Unverified Commit d026b31c authored by 老广's avatar 老广 Committed by GitHub

Bugfix (#2346)

* [Update] 修改command Post导致的output错误和定时任务创建问题

* [Update] 修改celery 日志

* [Update] 修改task日志方式

* [Update] 修改Docker file
parent 50c1b3ed
...@@ -6,7 +6,7 @@ RUN useradd jumpserver ...@@ -6,7 +6,7 @@ RUN useradd jumpserver
COPY ./requirements /tmp/requirements COPY ./requirements /tmp/requirements
RUN yum -y install epel-release && cd /tmp/requirements && \ RUN yum -y install epel-release openldap-clients telnet && cd /tmp/requirements && \
yum -y install $(cat rpm_requirements.txt) yum -y install $(cat rpm_requirements.txt)
RUN cd /tmp/requirements && pip install -r requirements.txt RUN cd /tmp/requirements && pip install -r requirements.txt
......
...@@ -87,6 +87,7 @@ class AdminUserTestConnectiveApi(generics.RetrieveAPIView): ...@@ -87,6 +87,7 @@ class AdminUserTestConnectiveApi(generics.RetrieveAPIView):
""" """
queryset = AdminUser.objects.all() queryset = AdminUser.objects.all()
permission_classes = (IsOrgAdmin,) permission_classes = (IsOrgAdmin,)
serializer_class = serializers.TaskIDSerializer
def retrieve(self, request, *args, **kwargs): def retrieve(self, request, *args, **kwargs):
admin_user = self.get_object() admin_user = self.get_object()
......
...@@ -113,6 +113,7 @@ class AssetAdminUserTestApi(generics.RetrieveAPIView): ...@@ -113,6 +113,7 @@ class AssetAdminUserTestApi(generics.RetrieveAPIView):
""" """
queryset = Asset.objects.all() queryset = Asset.objects.all()
permission_classes = (IsOrgAdmin,) permission_classes = (IsOrgAdmin,)
serializer_class = serializers.TaskIDSerializer
def retrieve(self, request, *args, **kwargs): def retrieve(self, request, *args, **kwargs):
asset_id = kwargs.get('pk') asset_id = kwargs.get('pk')
...@@ -124,6 +125,7 @@ class AssetAdminUserTestApi(generics.RetrieveAPIView): ...@@ -124,6 +125,7 @@ class AssetAdminUserTestApi(generics.RetrieveAPIView):
class AssetGatewayApi(generics.RetrieveAPIView): class AssetGatewayApi(generics.RetrieveAPIView):
queryset = Asset.objects.all() queryset = Asset.objects.all()
permission_classes = (IsOrgAdminOrAppUser,) permission_classes = (IsOrgAdminOrAppUser,)
serializer_class = serializers.GatewayWithAuthSerializer
def retrieve(self, request, *args, **kwargs): def retrieve(self, request, *args, **kwargs):
asset_id = kwargs.get('pk') asset_id = kwargs.get('pk')
......
...@@ -117,6 +117,7 @@ class SystemUserAssetsListView(generics.ListAPIView): ...@@ -117,6 +117,7 @@ class SystemUserAssetsListView(generics.ListAPIView):
class SystemUserPushToAssetApi(generics.RetrieveAPIView): class SystemUserPushToAssetApi(generics.RetrieveAPIView):
queryset = SystemUser.objects.all() queryset = SystemUser.objects.all()
permission_classes = (IsOrgAdmin,) permission_classes = (IsOrgAdmin,)
serializer_class = serializers.TaskIDSerializer
def retrieve(self, request, *args, **kwargs): def retrieve(self, request, *args, **kwargs):
system_user = self.get_object() system_user = self.get_object()
...@@ -129,6 +130,7 @@ class SystemUserPushToAssetApi(generics.RetrieveAPIView): ...@@ -129,6 +130,7 @@ class SystemUserPushToAssetApi(generics.RetrieveAPIView):
class SystemUserTestAssetConnectivityApi(generics.RetrieveAPIView): class SystemUserTestAssetConnectivityApi(generics.RetrieveAPIView):
queryset = SystemUser.objects.all() queryset = SystemUser.objects.all()
permission_classes = (IsOrgAdmin,) permission_classes = (IsOrgAdmin,)
serializer_class = serializers.TaskIDSerializer
def retrieve(self, request, *args, **kwargs): def retrieve(self, request, *args, **kwargs):
system_user = self.get_object() system_user = self.get_object()
......
...@@ -58,7 +58,7 @@ class ReplaceNodeAdminUserSerializer(serializers.ModelSerializer): ...@@ -58,7 +58,7 @@ class ReplaceNodeAdminUserSerializer(serializers.ModelSerializer):
管理用户更新关联到的集群 管理用户更新关联到的集群
""" """
nodes = serializers.PrimaryKeyRelatedField( nodes = serializers.PrimaryKeyRelatedField(
many=True, queryset = Node.objects.all() many=True, queryset=Node.objects.all()
) )
class Meta: class Meta:
...@@ -66,4 +66,5 @@ class ReplaceNodeAdminUserSerializer(serializers.ModelSerializer): ...@@ -66,4 +66,5 @@ class ReplaceNodeAdminUserSerializer(serializers.ModelSerializer):
fields = ['id', 'nodes'] fields = ['id', 'nodes']
class TaskIDSerializer(serializers.Serializer):
task = serializers.CharField(read_only=True)
# ~*~ coding: utf-8 ~*~ # ~*~ coding: utf-8 ~*~
import json import json
import re import re
import time
import os import os
from celery import shared_task from celery import shared_task
from django.utils.translation import ugettext as _ from django.utils.translation import ugettext as _
from django.core.cache import cache from django.core.cache import cache
from common.utils import capacity_convert, \ from common.utils import (
sum_capacity, encrypt_password, get_logger capacity_convert, sum_capacity, encrypt_password, get_logger
from ops.celery.utils import register_as_period_task, after_app_shutdown_clean )
from ops.celery.decorator import (
register_as_period_task, after_app_shutdown_clean_periodic
)
from .models import SystemUser, AdminUser, Asset from .models import SystemUser, AdminUser, Asset
from . import const from . import const
...@@ -132,7 +134,7 @@ def update_assets_hardware_info_util(assets, task_name=None): ...@@ -132,7 +134,7 @@ def update_assets_hardware_info_util(assets, task_name=None):
@shared_task @shared_task
def update_asset_hardware_info_manual(asset): def update_asset_hardware_info_manual(asset):
task_name = _("Update asset hardware info: {}").format(asset.hostname) task_name = _("Update asset hardware info: {}").format(asset.hostname)
return update_assets_hardware_info_util( update_assets_hardware_info_util(
[asset], task_name=task_name [asset], task_name=task_name
) )
...@@ -221,6 +223,7 @@ def test_admin_user_connectivity_period(): ...@@ -221,6 +223,7 @@ def test_admin_user_connectivity_period():
for admin_user in admin_users: for admin_user in admin_users:
task_name = _("Test admin user connectivity period: {}").format(admin_user.name) task_name = _("Test admin user connectivity period: {}").format(admin_user.name)
test_admin_user_connectivity_util(admin_user, task_name) test_admin_user_connectivity_util(admin_user, task_name)
cache.set(key, 1, 60*40)
@shared_task @shared_task
...@@ -394,13 +397,13 @@ def push_system_user_to_assets(system_user, assets): ...@@ -394,13 +397,13 @@ def push_system_user_to_assets(system_user, assets):
@shared_task @shared_task
@after_app_shutdown_clean @after_app_shutdown_clean_periodic
def test_system_user_connectability_period(): def test_system_user_connectability_period():
pass pass
@shared_task @shared_task
@after_app_shutdown_clean @after_app_shutdown_clean_periodic
def test_admin_user_connectability_period(): def test_admin_user_connectability_period():
pass pass
...@@ -408,7 +411,7 @@ def test_admin_user_connectability_period(): ...@@ -408,7 +411,7 @@ def test_admin_user_connectability_period():
# @shared_task # @shared_task
# @register_as_period_task(interval=3600) # @register_as_period_task(interval=3600)
# @after_app_ready_start # @after_app_ready_start
# # @after_app_shutdown_clean # @after_app_shutdown_clean_periodic
# def push_system_user_period(): # def push_system_user_period():
# for system_user in SystemUser.objects.all(): # for system_user in SystemUser.objects.all():
# push_system_user_related_nodes(system_user) # push_system_user_related_nodes(system_user)
......
...@@ -4,15 +4,20 @@ ...@@ -4,15 +4,20 @@
import os import os
import json import json
import jms_storage import jms_storage
import uuid
from rest_framework.views import Response, APIView from rest_framework.views import Response, APIView
from rest_framework import generics
from ldap3 import Server, Connection from ldap3 import Server, Connection
from django.core.mail import get_connection, send_mail from django.core.mail import send_mail
from django.core.cache import cache
from django.utils.translation import ugettext_lazy as _ from django.utils.translation import ugettext_lazy as _
from django.conf import settings from django.conf import settings
from .permissions import IsOrgAdmin, IsSuperUser from .permissions import IsOrgAdmin, IsSuperUser
from .serializers import MailTestSerializer, LDAPTestSerializer from .serializers import (
MailTestSerializer, LDAPTestSerializer, OutputSerializer
)
from .models import Setting from .models import Setting
...@@ -189,4 +194,39 @@ class DjangoSettingsAPI(APIView): ...@@ -189,4 +194,39 @@ class DjangoSettingsAPI(APIView):
return Response(data) return Response(data)
class LogTailApi(generics.RetrieveAPIView):
permission_classes = ()
buff_size = 1024 * 10
serializer_class = OutputSerializer
end = False
def is_file_finish_write(self):
return True
def get_log_path(self):
raise NotImplementedError()
def get(self, request, *args, **kwargs):
mark = request.query_params.get("mark") or str(uuid.uuid4())
log_path = self.get_log_path()
if not log_path or not os.path.isfile(log_path):
if self.is_file_finish_write():
return Response({
"data": 'Not found the log',
'end': True,
'mark': mark}
)
else:
return Response({"data": _("Waiting ...\n")}, status=200)
with open(log_path, 'r') as f:
offset = cache.get(mark, 0)
f.seek(offset)
data = f.read(self.buff_size).replace('\n', '\r\n')
mark = str(uuid.uuid4())
cache.set(mark, f.tell(), 5)
if data == '' and self.is_file_finish_write():
self.end = True
return Response({"data": data, 'end': self.end, 'mark': mark})
...@@ -19,3 +19,8 @@ class LDAPTestSerializer(serializers.Serializer): ...@@ -19,3 +19,8 @@ class LDAPTestSerializer(serializers.Serializer):
AUTH_LDAP_USER_ATTR_MAP = serializers.CharField() AUTH_LDAP_USER_ATTR_MAP = serializers.CharField()
AUTH_LDAP_START_TLS = serializers.BooleanField(required=False) AUTH_LDAP_START_TLS = serializers.BooleanField(required=False)
class OutputSerializer(serializers.Serializer):
output = serializers.CharField()
is_end = serializers.BooleanField()
mark = serializers.CharField()
...@@ -18,19 +18,21 @@ logger = get_logger(__file__) ...@@ -18,19 +18,21 @@ logger = get_logger(__file__)
@receiver(post_save, sender=Setting, dispatch_uid="my_unique_identifier") @receiver(post_save, sender=Setting, dispatch_uid="my_unique_identifier")
def refresh_settings_on_changed(sender, instance=None, **kwargs): def refresh_settings_on_changed(sender, instance=None, **kwargs):
logger.debug("Receive setting item change")
logger.debug(" - refresh setting: {}".format(instance.name))
if instance: if instance:
instance.refresh_setting() instance.refresh_setting()
@receiver(django_ready, dispatch_uid="my_unique_identifier") @receiver(django_ready, dispatch_uid="my_unique_identifier")
def refresh_all_settings_on_django_ready(sender, **kwargs): def monkey_patch_settings(sender, **kwargs):
logger.debug("Receive django ready signal")
logger.debug(" - fresh all settings")
cache_key_prefix = '_SETTING_' cache_key_prefix = '_SETTING_'
uncached_settings = [
'CACHES', 'DEBUG', 'SECRET_KEY', 'INSTALLED_APPS',
'ROOT_URLCONF', 'TEMPLATES', 'DATABASES', '_wrapped',
'CELERY_LOG_DIR'
]
def monkey_patch_getattr(self, name): def monkey_patch_getattr(self, name):
if name not in uncached_settings:
key = cache_key_prefix + name key = cache_key_prefix + name
cached = cache.get(key) cached = cache.get(key)
if cached is not None: if cached is not None:
...@@ -66,8 +68,11 @@ def refresh_all_settings_on_django_ready(sender, **kwargs): ...@@ -66,8 +68,11 @@ def refresh_all_settings_on_django_ready(sender, **kwargs):
@receiver(django_ready) @receiver(django_ready)
def auto_generate_terminal_host_key(sender, **kwargs): def auto_generate_terminal_host_key(sender, **kwargs):
try:
if Setting.objects.filter(name='TERMINAL_HOST_KEY').exists(): if Setting.objects.filter(name='TERMINAL_HOST_KEY').exists():
return return
except ProgrammingError:
return
private_key, public_key = ssh_key_gen() private_key, public_key = ssh_key_gen()
value = json.dumps(private_key) value = json.dumps(private_key)
Setting.objects.create(name='TERMINAL_HOST_KEY', value=value) Setting.objects.create(name='TERMINAL_HOST_KEY', value=value)
......
...@@ -406,24 +406,6 @@ def get_replay_storage_setting(): ...@@ -406,24 +406,6 @@ def get_replay_storage_setting():
return value return value
class TeeObj:
origin_stdout = sys.stdout
def __init__(self, file_obj):
self.file_obj = file_obj
def write(self, msg):
self.origin_stdout.write(msg)
self.file_obj.write(msg.replace('*', ''))
def flush(self):
self.origin_stdout.flush()
self.file_obj.flush()
def close(self):
self.file_obj.close()
def with_cache(func): def with_cache(func):
cache = {} cache = {}
key = "_{}.{}".format(func.__module__, func.__name__) key = "_{}.{}".format(func.__module__, func.__name__)
......
...@@ -412,6 +412,9 @@ RADIUS_SECRET = CONFIG.RADIUS_SECRET ...@@ -412,6 +412,9 @@ RADIUS_SECRET = CONFIG.RADIUS_SECRET
if AUTH_RADIUS: if AUTH_RADIUS:
AUTHENTICATION_BACKENDS.insert(0, AUTH_RADIUS_BACKEND) AUTHENTICATION_BACKENDS.insert(0, AUTH_RADIUS_BACKEND)
# Dump all celery log to here
CELERY_LOG_DIR = os.path.join(PROJECT_DIR, 'data', 'celery')
# Celery using redis as broker # Celery using redis as broker
CELERY_BROKER_URL = 'redis://:%(password)s@%(host)s:%(port)s/%(db)s' % { CELERY_BROKER_URL = 'redis://:%(password)s@%(host)s:%(port)s/%(db)s' % {
'password': CONFIG.REDIS_PASSWORD, 'password': CONFIG.REDIS_PASSWORD,
...@@ -425,14 +428,16 @@ CELERY_RESULT_BACKEND = CELERY_BROKER_URL ...@@ -425,14 +428,16 @@ CELERY_RESULT_BACKEND = CELERY_BROKER_URL
CELERY_ACCEPT_CONTENT = ['json', 'pickle'] CELERY_ACCEPT_CONTENT = ['json', 'pickle']
CELERY_RESULT_EXPIRES = 3600 CELERY_RESULT_EXPIRES = 3600
# CELERY_WORKER_LOG_FORMAT = '%(asctime)s [%(module)s %(levelname)s] %(message)s' # CELERY_WORKER_LOG_FORMAT = '%(asctime)s [%(module)s %(levelname)s] %(message)s'
CELERY_WORKER_LOG_FORMAT = '%(message)s' # CELERY_WORKER_LOG_FORMAT = '%(message)s'
# CELERY_WORKER_TASK_LOG_FORMAT = '%(asctime)s [%(module)s %(levelname)s] %(message)s' CELERY_WORKER_TASK_LOG_FORMAT = '%(task_id)s %(task_name)s %(message)s'
CELERY_WORKER_TASK_LOG_FORMAT = '%(message)s' # CELERY_WORKER_TASK_LOG_FORMAT = '%(message)s'
# CELERY_WORKER_LOG_FORMAT = '%(asctime)s [%(module)s %(levelname)s] %(message)s' # CELERY_WORKER_LOG_FORMAT = '%(asctime)s [%(module)s %(levelname)s] %(message)s'
CELERY_WORKER_LOG_FORMAT = '%(message)s'
CELERY_TASK_EAGER_PROPAGATES = True CELERY_TASK_EAGER_PROPAGATES = True
CELERY_REDIRECT_STDOUTS = True CELERY_WORKER_REDIRECT_STDOUTS = True
CELERY_REDIRECT_STDOUTS_LEVEL = "INFO" CELERY_WORKER_REDIRECT_STDOUTS_LEVEL = "INFO"
CELERY_WORKER_HIJACK_ROOT_LOGGER = False # CELERY_WORKER_HIJACK_ROOT_LOGGER = False
CELERY_WORKER_MAX_TASKS_PER_CHILD = 40
# Cache use redis # Cache use redis
CACHES = { CACHES = {
......
# -*- coding: utf-8 -*-
#
import sys
class TeeObj:
origin_stdout = sys.stdout
def __init__(self, file_obj):
self.file_obj = file_obj
def write(self, msg):
self.origin_stdout.write(msg)
self.file_obj.write(msg.replace('*', ''))
def flush(self):
self.origin_stdout.flush()
self.file_obj.flush()
...@@ -9,10 +9,10 @@ from ansible.parsing.dataloader import DataLoader ...@@ -9,10 +9,10 @@ from ansible.parsing.dataloader import DataLoader
from ansible.executor.playbook_executor import PlaybookExecutor from ansible.executor.playbook_executor import PlaybookExecutor
from ansible.playbook.play import Play from ansible.playbook.play import Play
import ansible.constants as C import ansible.constants as C
from ansible.utils.display import Display
from .callback import AdHocResultCallback, PlaybookResultCallBack, \ from .callback import (
CommandResultCallback AdHocResultCallback, PlaybookResultCallBack, CommandResultCallback
)
from common.utils import get_logger from common.utils import get_logger
from .exceptions import AnsibleError from .exceptions import AnsibleError
...@@ -22,13 +22,6 @@ C.HOST_KEY_CHECKING = False ...@@ -22,13 +22,6 @@ C.HOST_KEY_CHECKING = False
logger = get_logger(__name__) logger = get_logger(__name__)
class CustomDisplay(Display):
def display(self, msg, color=None, stderr=False, screen_only=False, log_only=False):
pass
display = CustomDisplay()
Options = namedtuple('Options', [ Options = namedtuple('Options', [
'listtags', 'listtasks', 'listhosts', 'syntax', 'connection', 'listtags', 'listtasks', 'listhosts', 'syntax', 'connection',
'module_path', 'forks', 'remote_user', 'private_key_file', 'timeout', 'module_path', 'forks', 'remote_user', 'private_key_file', 'timeout',
......
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# #
import uuid
import os
import os
from celery.result import AsyncResult from celery.result import AsyncResult
from django.core.cache import cache
from django.utils.translation import ugettext as _
from rest_framework import generics from rest_framework import generics
from rest_framework.views import Response
from common.permissions import IsOrgAdmin, IsValidUser from common.permissions import IsValidUser
from common.api import LogTailApi
from ..models import CeleryTask from ..models import CeleryTask
from ..serializers import CeleryResultSerializer from ..serializers import CeleryResultSerializer
from ..celery.utils import get_celery_task_log_path
__all__ = ['CeleryTaskLogApi', 'CeleryResultApi'] __all__ = ['CeleryTaskLogApi', 'CeleryResultApi']
class CeleryTaskLogApi(generics.RetrieveAPIView): class CeleryTaskLogApi(LogTailApi):
permission_classes = (IsValidUser,) permission_classes = (IsValidUser,)
buff_size = 1024 * 10 task = None
end = False task_id = ''
queryset = CeleryTask.objects.all()
def get(self, request, *args, **kwargs): def get(self, request, *args, **kwargs):
mark = request.query_params.get("mark") or str(uuid.uuid4()) self.task_id = str(kwargs.get('pk'))
task = self.get_object() self.task = AsyncResult(self.task_id)
log_path = task.full_log_path return super().get(request, *args, **kwargs)
if not log_path or not os.path.isfile(log_path): def get_log_path(self):
return Response({"data": _("Waiting ...")}, status=203) new_path = get_celery_task_log_path(self.task_id)
if new_path and os.path.isfile(new_path):
with open(log_path, 'r', encoding="utf8") as f: return new_path
offset = cache.get(mark, 0) try:
f.seek(offset) task = CeleryTask.objects.get(id=self.task_id)
data = f.read(self.buff_size).replace('\n', '\r\n') except CeleryTask.DoesNotExist:
mark = str(uuid.uuid4()) return None
cache.set(mark, f.tell(), 5) return task.full_log_path
if data == '' and task.is_finished(): def is_file_finish_write(self):
self.end = True return self.task.ready()
return Response({"data": data, 'end': self.end, 'mark': mark})
class CeleryResultApi(generics.RetrieveAPIView): class CeleryResultApi(generics.RetrieveAPIView):
......
...@@ -10,6 +10,5 @@ class OpsConfig(AppConfig): ...@@ -10,6 +10,5 @@ class OpsConfig(AppConfig):
from orgs.models import Organization from orgs.models import Organization
from orgs.utils import set_current_org from orgs.utils import set_current_org
set_current_org(Organization.root()) set_current_org(Organization.root())
super().ready()
from .celery import signal_handler from .celery import signal_handler
super().ready()
# -*- coding: utf-8 -*-
#
from functools import wraps
_need_registered_period_tasks = []
_after_app_ready_start_tasks = []
_after_app_shutdown_clean_periodic_tasks = []
def add_register_period_task(task):
_need_registered_period_tasks.append(task)
# key = "__REGISTER_PERIODIC_TASKS"
# value = cache.get(key, [])
# value.append(name)
# cache.set(key, value)
def get_register_period_tasks():
# key = "__REGISTER_PERIODIC_TASKS"
# return cache.get(key, [])
return _need_registered_period_tasks
def add_after_app_shutdown_clean_task(name):
# key = "__AFTER_APP_SHUTDOWN_CLEAN_TASKS"
# value = cache.get(key, [])
# value.append(name)
# cache.set(key, value)
_after_app_shutdown_clean_periodic_tasks.append(name)
def get_after_app_shutdown_clean_tasks():
# key = "__AFTER_APP_SHUTDOWN_CLEAN_TASKS"
# return cache.get(key, [])
return _after_app_shutdown_clean_periodic_tasks
def add_after_app_ready_task(name):
# key = "__AFTER_APP_READY_RUN_TASKS"
# value = cache.get(key, [])
# value.append(name)
# cache.set(key, value)
_after_app_ready_start_tasks.append(name)
def get_after_app_ready_tasks():
# key = "__AFTER_APP_READY_RUN_TASKS"
# return cache.get(key, [])
return _after_app_ready_start_tasks
def register_as_period_task(crontab=None, interval=None):
"""
Warning: Task must be have not any args and kwargs
:param crontab: "* * * * *"
:param interval: 60*60*60
:return:
"""
if crontab is None and interval is None:
raise SyntaxError("Must set crontab or interval one")
def decorate(func):
if crontab is None and interval is None:
raise SyntaxError("Interval and crontab must set one")
# Because when this decorator run, the task was not created,
# So we can't use func.name
name = '{func.__module__}.{func.__name__}'.format(func=func)
add_register_period_task({
name: {
'task': name,
'interval': interval,
'crontab': crontab,
'args': (),
'enabled': True,
}
})
@wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper
return decorate
def after_app_ready_start(func):
# Because when this decorator run, the task was not created,
# So we can't use func.name
name = '{func.__module__}.{func.__name__}'.format(func=func)
if name not in _after_app_ready_start_tasks:
add_after_app_ready_task(name)
@wraps(func)
def decorate(*args, **kwargs):
return func(*args, **kwargs)
return decorate
def after_app_shutdown_clean_periodic(func):
# Because when this decorator run, the task was not created,
# So we can't use func.name
name = '{func.__module__}.{func.__name__}'.format(func=func)
if name not in _after_app_shutdown_clean_periodic_tasks:
add_after_app_shutdown_clean_task(name)
@wraps(func)
def decorate(*args, **kwargs):
return func(*args, **kwargs)
return decorate
from logging import StreamHandler
from django.conf import settings
from celery import current_task
from celery.signals import task_prerun, task_postrun
from kombu import Connection, Exchange, Queue, Producer
from kombu.mixins import ConsumerMixin
from .utils import get_celery_task_log_path
routing_key = 'celery_log'
celery_log_exchange = Exchange('celery_log_exchange', type='direct')
celery_log_queue = [Queue('celery_log', celery_log_exchange, routing_key=routing_key)]
class CeleryLoggerConsumer(ConsumerMixin):
def __init__(self):
self.connection = Connection(settings.CELERY_LOG_BROKER_URL)
def get_consumers(self, Consumer, channel):
return [Consumer(queues=celery_log_queue,
accept=['pickle', 'json'],
callbacks=[self.process_task])
]
def handle_task_start(self, task_id, message):
pass
def handle_task_end(self, task_id, message):
pass
def handle_task_log(self, task_id, msg, message):
pass
def process_task(self, body, message):
action = body.get('action')
task_id = body.get('task_id')
msg = body.get('msg')
if action == CeleryLoggerProducer.ACTION_TASK_LOG:
self.handle_task_log(task_id, msg, message)
elif action == CeleryLoggerProducer.ACTION_TASK_START:
self.handle_task_start(task_id, message)
elif action == CeleryLoggerProducer.ACTION_TASK_END:
self.handle_task_end(task_id, message)
class CeleryLoggerProducer:
ACTION_TASK_START, ACTION_TASK_LOG, ACTION_TASK_END = range(3)
def __init__(self):
self.connection = Connection(settings.CELERY_LOG_BROKER_URL)
@property
def producer(self):
return Producer(self.connection)
def publish(self, payload):
self.producer.publish(
payload, serializer='json', exchange=celery_log_exchange,
declare=[celery_log_exchange], routing_key=routing_key
)
def log(self, task_id, msg):
payload = {'task_id': task_id, 'msg': msg, 'action': self.ACTION_TASK_LOG}
return self.publish(payload)
def read(self):
pass
def flush(self):
pass
def task_end(self, task_id):
payload = {'task_id': task_id, 'action': self.ACTION_TASK_END}
return self.publish(payload)
def task_start(self, task_id):
payload = {'task_id': task_id, 'action': self.ACTION_TASK_START}
return self.publish(payload)
class CeleryTaskLoggerHandler(StreamHandler):
terminator = '\r\n'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
task_prerun.connect(self.on_task_start)
task_postrun.connect(self.on_start_end)
@staticmethod
def get_current_task_id():
if not current_task:
return
task_id = current_task.request.root_id
return task_id
def on_task_start(self, sender, task_id, **kwargs):
return self.handle_task_start(task_id)
def on_start_end(self, sender, task_id, **kwargs):
return self.handle_task_end(task_id)
def after_task_publish(self, sender, body, **kwargs):
pass
def emit(self, record):
task_id = self.get_current_task_id()
if not task_id:
return
try:
self.write_task_log(task_id, record)
self.flush()
except Exception:
self.handleError(record)
def write_task_log(self, task_id, msg):
pass
def handle_task_start(self, task_id):
pass
def handle_task_end(self, task_id):
pass
class CeleryTaskMQLoggerHandler(CeleryTaskLoggerHandler):
def __init__(self):
self.producer = CeleryLoggerProducer()
super().__init__(stream=None)
def write_task_log(self, task_id, record):
msg = self.format(record)
self.producer.log(task_id, msg)
def flush(self):
self.producer.flush()
class CeleryTaskFileHandler(CeleryTaskLoggerHandler):
def __init__(self):
self.f = None
super().__init__(stream=None)
def emit(self, record):
msg = self.format(record)
if not self.f:
return
self.f.write(msg)
self.f.write(self.terminator)
self.flush()
def flush(self):
self.f and self.f.flush()
def handle_task_start(self, task_id):
log_path = get_celery_task_log_path(task_id)
self.f = open(log_path, 'a')
def handle_task_end(self, task_id):
self.f and self.f.close()
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# #
import os import logging
import datetime
import sys
import time
from django.conf import settings
from django.utils import timezone
from django.core.cache import cache from django.core.cache import cache
from django.db import transaction
from celery import subtask from celery import subtask
from celery.signals import worker_ready, worker_shutdown, task_prerun, \ from celery.signals import (
task_postrun, after_task_publish worker_ready, worker_shutdown, after_setup_logger
)
from django_celery_beat.models import PeriodicTask from django_celery_beat.models import PeriodicTask
from common.utils import get_logger, TeeObj, get_object_or_none from common.utils import get_logger
from common.const import celery_task_pre_key from .decorator import get_after_app_ready_tasks, get_after_app_shutdown_clean_tasks
from .utils import get_after_app_ready_tasks, get_after_app_shutdown_clean_tasks from .logger import CeleryTaskFileHandler
from ..models import CeleryTask
logger = get_logger(__file__) logger = get_logger(__file__)
@worker_ready.connect @worker_ready.connect
def on_app_ready(sender=None, headers=None, body=None, **kwargs): def on_app_ready(sender=None, headers=None, **kwargs):
if cache.get("CELERY_APP_READY", 0) == 1: if cache.get("CELERY_APP_READY", 0) == 1:
return return
cache.set("CELERY_APP_READY", 1, 10) cache.set("CELERY_APP_READY", 1, 10)
tasks = get_after_app_ready_tasks() tasks = get_after_app_ready_tasks()
logger.debug("Start need start task: [{}]".format( logger.debug("Work ready signal recv")
", ".join(tasks)) logger.debug("Start need start task: [{}]".format(", ".join(tasks)))
)
for task in tasks: for task in tasks:
subtask(task).delay() subtask(task).delay()
@worker_shutdown.connect @worker_shutdown.connect
def after_app_shutdown(sender=None, headers=None, body=None, **kwargs): def after_app_shutdown_periodic_tasks(sender=None, **kwargs):
if cache.get("CELERY_APP_SHUTDOWN", 0) == 1: if cache.get("CELERY_APP_SHUTDOWN", 0) == 1:
return return
cache.set("CELERY_APP_SHUTDOWN", 1, 10) cache.set("CELERY_APP_SHUTDOWN", 1, 10)
tasks = get_after_app_shutdown_clean_tasks() tasks = get_after_app_shutdown_clean_tasks()
logger.debug("App shutdown signal recv") logger.debug("Worker shutdown signal recv")
logger.debug("Clean need cleaned period tasks: [{}]".format( logger.debug("Clean period tasks: [{}]".format(', '.join(tasks)))
', '.join(tasks))
)
PeriodicTask.objects.filter(name__in=tasks).delete() PeriodicTask.objects.filter(name__in=tasks).delete()
@after_task_publish.connect @after_setup_logger.connect
def after_task_publish_signal_handler(sender, headers=None, **kwargs): def add_celery_logger_handler(sender=None, logger=None, loglevel=None, format=None, **kwargs):
CeleryTask.objects.create( if not logger:
id=headers["id"], status=CeleryTask.WAITING, name=headers["task"]
)
cache.set(headers["id"], True, 3600)
@task_prerun.connect
def pre_run_task_signal_handler(sender, task_id=None, task=None, **kwargs):
time.sleep(0.1)
for i in range(5):
if cache.get(task_id, False):
break
else:
time.sleep(0.1)
continue
t = get_object_or_none(CeleryTask, id=task_id)
if t is None:
logger.warn("Not get the task: {}".format(task_id))
return
now = datetime.datetime.now().strftime("%Y-%m-%d")
log_path = os.path.join(now, task_id + '.log')
full_path = os.path.join(CeleryTask.LOG_DIR, log_path)
if not os.path.exists(os.path.dirname(full_path)):
os.makedirs(os.path.dirname(full_path))
with transaction.atomic():
t.date_start = timezone.now()
t.status = CeleryTask.RUNNING
t.log_path = log_path
t.save()
f = open(full_path, 'w', encoding="utf-8")
tee = TeeObj(f)
sys.stdout = tee
task.log_f = tee
@task_postrun.connect
def post_run_task_signal_handler(sender, task_id=None, task=None, **kwargs):
t = get_object_or_none(CeleryTask, id=task_id)
if t is None:
logger.warn("Not get the task: {}".format(task_id))
return return
with transaction.atomic(): handler = CeleryTaskFileHandler()
t.status = CeleryTask.FINISHED handler.setLevel(loglevel)
t.date_finished = timezone.now() formatter = logging.Formatter(format)
t.save() handler.setFormatter(formatter)
task.log_f.flush() logger.addHandler(handler)
sys.stdout = task.log_f.origin_stdout
task.log_f.close()
# @after_task_publish.connect
# def after_task_publish_signal_handler(sender, headers=None, **kwargs):
# CeleryTask.objects.create(
# id=headers["id"], status=CeleryTask.WAITING, name=headers["task"]
# )
# cache.set(headers["id"], True, 3600)
#
#
# @task_prerun.connect
# def pre_run_task_signal_handler(sender, task_id=None, task=None, **kwargs):
# time.sleep(0.1)
# for i in range(5):
# if cache.get(task_id, False):
# break
# else:
# time.sleep(0.1)
# continue
#
# t = get_object_or_none(CeleryTask, id=task_id)
# if t is None:
# logger.warn("Not get the task: {}".format(task_id))
# return
# now = datetime.datetime.now().strftime("%Y-%m-%d")
# log_path = os.path.join(now, task_id + '.log')
# full_path = os.path.join(CeleryTask.LOG_DIR, log_path)
#
# if not os.path.exists(os.path.dirname(full_path)):
# os.makedirs(os.path.dirname(full_path))
# with transaction.atomic():
# t.date_start = timezone.now()
# t.status = CeleryTask.RUNNING
# t.log_path = log_path
# t.save()
# f = open(full_path, 'w', encoding="utf-8")
# tee = TeeObj(f)
# sys.stdout = tee
# task.log_f = tee
#
#
# @task_postrun.connect
# def post_run_task_signal_handler(sender, task_id=None, task=None, **kwargs):
# t = get_object_or_none(CeleryTask, id=task_id)
# if t is None:
# logger.warn("Not get the task: {}".format(task_id))
# return
# with transaction.atomic():
# t.status = CeleryTask.FINISHED
# t.date_finished = timezone.now()
# t.save()
# task.log_f.flush()
# sys.stdout = task.log_f.origin_stdout
# task.log_f.close()
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# #
import json import json
from functools import wraps import os
from django.conf import settings
from django.db.utils import ProgrammingError, OperationalError from django.db.utils import ProgrammingError, OperationalError
from django.core.cache import cache
from django_celery_beat.models import PeriodicTask, IntervalSchedule, CrontabSchedule from django_celery_beat.models import PeriodicTask, IntervalSchedule, CrontabSchedule
def add_register_period_task(name):
key = "__REGISTER_PERIODIC_TASKS"
value = cache.get(key, [])
value.append(name)
cache.set(key, value)
def get_register_period_tasks():
key = "__REGISTER_PERIODIC_TASKS"
return cache.get(key, [])
def add_after_app_shutdown_clean_task(name):
key = "__AFTER_APP_SHUTDOWN_CLEAN_TASKS"
value = cache.get(key, [])
value.append(name)
cache.set(key, value)
def get_after_app_shutdown_clean_tasks():
key = "__AFTER_APP_SHUTDOWN_CLEAN_TASKS"
return cache.get(key, [])
def add_after_app_ready_task(name):
key = "__AFTER_APP_READY_RUN_TASKS"
value = cache.get(key, [])
value.append(name)
cache.set(key, value)
def get_after_app_ready_tasks():
key = "__AFTER_APP_READY_RUN_TASKS"
return cache.get(key, [])
def create_or_update_celery_periodic_tasks(tasks): def create_or_update_celery_periodic_tasks(tasks):
""" """
:param tasks: { :param tasks: {
...@@ -123,63 +87,10 @@ def delete_celery_periodic_task(task_name): ...@@ -123,63 +87,10 @@ def delete_celery_periodic_task(task_name):
PeriodicTask.objects.filter(name=task_name).delete() PeriodicTask.objects.filter(name=task_name).delete()
def register_as_period_task(crontab=None, interval=None): def get_celery_task_log_path(task_id):
""" task_id = str(task_id)
Warning: Task must be have not any args and kwargs rel_path = os.path.join(task_id[0], task_id[1], task_id + '.log')
:param crontab: "* * * * *" path = os.path.join(settings.CELERY_LOG_DIR, rel_path)
:param interval: 60*60*60 os.makedirs(os.path.dirname(path), exist_ok=True)
:return: return path
"""
if crontab is None and interval is None:
raise SyntaxError("Must set crontab or interval one")
def decorate(func):
if crontab is None and interval is None:
raise SyntaxError("Interval and crontab must set one")
# Because when this decorator run, the task was not created,
# So we can't use func.name
name = '{func.__module__}.{func.__name__}'.format(func=func)
if name not in get_register_period_tasks():
create_or_update_celery_periodic_tasks({
name: {
'task': name,
'interval': interval,
'crontab': crontab,
'args': (),
'enabled': True,
}
})
add_register_period_task(name)
@wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper
return decorate
def after_app_ready_start(func):
# Because when this decorator run, the task was not created,
# So we can't use func.name
name = '{func.__module__}.{func.__name__}'.format(func=func)
if name not in get_after_app_ready_tasks():
add_after_app_ready_task(name)
@wraps(func)
def decorate(*args, **kwargs):
return func(*args, **kwargs)
return decorate
def after_app_shutdown_clean(func):
# Because when this decorator run, the task was not created,
# So we can't use func.name
name = '{func.__module__}.{func.__name__}'.format(func=func)
if name not in get_after_app_shutdown_clean_tasks():
add_after_app_shutdown_clean_task(name)
@wraps(func)
def decorate(*args, **kwargs):
return func(*args, **kwargs)
return decorate
...@@ -8,6 +8,8 @@ from django.utils.translation import ugettext_lazy as _ ...@@ -8,6 +8,8 @@ from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ugettext from django.utils.translation import ugettext
from django.db import models from django.db import models
from orgs.models import Organization
from ..ansible.runner import CommandRunner from ..ansible.runner import CommandRunner
from ..inventory import JMSInventory from ..inventory import JMSInventory
...@@ -53,6 +55,8 @@ class CommandExecution(models.Model): ...@@ -53,6 +55,8 @@ class CommandExecution(models.Model):
def run(self): def run(self):
print('-'*10 + ' ' + ugettext('Task start') + ' ' + '-'*10) print('-'*10 + ' ' + ugettext('Task start') + ' ' + '-'*10)
org = Organization.get_instance(self.run_as.org_id)
org.change_to()
self.date_start = timezone.now() self.date_start = timezone.now()
ok, msg = self.run_as.is_command_can_run(self.command) ok, msg = self.run_as.is_command_can_run(self.command)
if ok: if ok:
......
...@@ -5,7 +5,11 @@ from celery import shared_task, subtask ...@@ -5,7 +5,11 @@ from celery import shared_task, subtask
from django.utils import timezone from django.utils import timezone
from common.utils import get_logger, get_object_or_none from common.utils import get_logger, get_object_or_none
from .celery.utils import register_as_period_task, after_app_shutdown_clean from .celery.decorator import (
register_as_period_task, after_app_shutdown_clean_periodic,
after_app_ready_start
)
from .celery.utils import create_or_update_celery_periodic_tasks
from .models import Task, CommandExecution, CeleryTask from .models import Task, CommandExecution, CeleryTask
logger = get_logger(__file__) logger = get_logger(__file__)
...@@ -39,8 +43,8 @@ def run_command_execution(cid, **kwargs): ...@@ -39,8 +43,8 @@ def run_command_execution(cid, **kwargs):
@shared_task @shared_task
@after_app_shutdown_clean_periodic
@register_as_period_task(interval=3600*24) @register_as_period_task(interval=3600*24)
@after_app_shutdown_clean
def clean_tasks_adhoc_period(): def clean_tasks_adhoc_period():
logger.debug("Start clean task adhoc and run history") logger.debug("Start clean task adhoc and run history")
tasks = Task.objects.all() tasks = Task.objects.all()
...@@ -52,8 +56,8 @@ def clean_tasks_adhoc_period(): ...@@ -52,8 +56,8 @@ def clean_tasks_adhoc_period():
@shared_task @shared_task
@after_app_shutdown_clean_periodic
@register_as_period_task(interval=3600*24) @register_as_period_task(interval=3600*24)
@after_app_shutdown_clean
def clean_celery_tasks_period(): def clean_celery_tasks_period():
logger.debug("Start clean celery task history") logger.debug("Start clean celery task history")
one_month_ago = timezone.now() - timezone.timedelta(days=30) one_month_ago = timezone.now() - timezone.timedelta(days=30)
...@@ -69,11 +73,19 @@ def clean_celery_tasks_period(): ...@@ -69,11 +73,19 @@ def clean_celery_tasks_period():
tasks.delete() tasks.delete()
@shared_task
@after_app_ready_start
def create_or_update_registered_periodic_tasks():
from .celery.decorator import get_register_period_tasks
for task in get_register_period_tasks():
create_or_update_celery_periodic_tasks(task)
@shared_task @shared_task
def hello(name, callback=None): def hello(name, callback=None):
import time
time.sleep(10)
print("Hello {}".format(name)) print("Hello {}".format(name))
if callback is not None:
subtask(callback).delay("Guahongwei")
@shared_task @shared_task
......
{% load static %} {% load static %}
{% load i18n %}
<head> <head>
<title>term.js</title> <title>{% trans 'Task log' %}</title>
<script src="{% static 'js/jquery-2.1.1.js' %}"></script> <script src="{% static 'js/jquery-2.1.1.js' %}"></script>
<script src="{% static 'js/plugins/xterm/xterm.js' %}"></script> <script src="{% static 'js/plugins/xterm/xterm.js' %}"></script>
<link rel="stylesheet" href="{% static 'js/plugins/xterm/xterm.css' %}" /> <link rel="stylesheet" href="{% static 'js/plugins/xterm/xterm.css' %}" />
...@@ -15,14 +16,14 @@ ...@@ -15,14 +16,14 @@
} }
</style> </style>
</head> </head>
<div id="term" style="height: 100%;width: 100%"> <div id="term" style="height: 100%;width: 100%">
</div> </div>
<script> <script>
var rowHeight = 18; var rowHeight = 18;
var colWidth = 10; var colWidth = 10;
var mark = ''; var mark = '';
var url = "{% url 'api-ops:celery-task-log' pk=object.id %}"; var url = "{% url 'api-ops:celery-task-log' pk=task_id %}";
var term; var term;
var end = false; var end = false;
var error = false; var error = false;
...@@ -35,9 +36,9 @@ ...@@ -35,9 +36,9 @@
{#colWidth = 1.00 * t.width() / 6;#} {#colWidth = 1.00 * t.width() / 6;#}
} }
function resize() { function resize() {
var rows = Math.floor(window.innerHeight / rowHeight) - 1; {#var rows = Math.floor(window.innerHeight / rowHeight) - 1;#}
var cols = Math.floor(window.innerWidth / colWidth) - 2; {#var cols = Math.floor(window.innerWidth / colWidth) - 2;#}
term.resize(cols, rows); {#term.resize(cols, rows);#}
} }
function requestAndWrite() { function requestAndWrite() {
if (!end && success) { if (!end && success) {
...@@ -73,7 +74,7 @@ ...@@ -73,7 +74,7 @@
disableStdin: true disableStdin: true
}); });
term.open(document.getElementById('term')); term.open(document.getElementById('term'));
term.resize(80, 24); term.resize(90, 32);
resize(); resize();
term.on('data', function (data) { term.on('data', function (data) {
{#term.write(data.replace('\r', '\r\n'))#} {#term.write(data.replace('\r', '\r\n'))#}
......
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# #
from django.views.generic import DetailView from django.views.generic import DetailView, TemplateView
from common.permissions import AdminUserRequiredMixin from common.permissions import AdminUserRequiredMixin
from ..models import CeleryTask from ..models import CeleryTask
...@@ -9,6 +9,10 @@ from ..models import CeleryTask ...@@ -9,6 +9,10 @@ from ..models import CeleryTask
__all__ = ['CeleryTaskLogView'] __all__ = ['CeleryTaskLogView']
class CeleryTaskLogView(AdminUserRequiredMixin, DetailView): class CeleryTaskLogView(AdminUserRequiredMixin, TemplateView):
template_name = 'ops/celery_task_log.html' template_name = 'ops/celery_task_log.html'
model = CeleryTask
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context.update({'task_id': self.kwargs.get('pk')})
return context
...@@ -122,3 +122,7 @@ class Organization(models.Model): ...@@ -122,3 +122,7 @@ class Organization(models.Model):
return True return True
else: else:
return False return False
def change_to(self):
from .utils import set_current_org
set_current_org(self)
...@@ -33,16 +33,19 @@ class SessionViewSet(BulkModelViewSet): ...@@ -33,16 +33,19 @@ class SessionViewSet(BulkModelViewSet):
permission_classes = (IsOrgAdminOrAppUser,) permission_classes = (IsOrgAdminOrAppUser,)
def get_queryset(self): def get_queryset(self):
queryset = super().get_queryset()
terminal_id = self.kwargs.get("terminal", None) terminal_id = self.kwargs.get("terminal", None)
if terminal_id: if terminal_id:
terminal = get_object_or_404(Terminal, id=terminal_id) terminal = get_object_or_404(Terminal, id=terminal_id)
self.queryset = terminal.session_set.all() queryset = queryset.filter(terminal=terminal)
return self.queryset.all() return queryset
return queryset
def perform_create(self, serializer): def perform_create(self, serializer):
if hasattr(self.request.user, 'terminal'): if hasattr(self.request.user, 'terminal'):
serializer.validated_data["terminal"] = self.request.user.terminal serializer.validated_data["terminal"] = self.request.user.terminal
sid = serializer.validated_data["system_user"] sid = serializer.validated_data["system_user"]
# guacamole提交的是id
if is_uuid(sid): if is_uuid(sid):
_system_user = SystemUser.get_system_user_by_id_or_cached(sid) _system_user = SystemUser.get_system_user_by_id_or_cached(sid)
if _system_user: if _system_user:
......
...@@ -100,52 +100,15 @@ class StatusViewSet(viewsets.ModelViewSet): ...@@ -100,52 +100,15 @@ class StatusViewSet(viewsets.ModelViewSet):
task_serializer_class = serializers.TaskSerializer task_serializer_class = serializers.TaskSerializer
def create(self, request, *args, **kwargs): def create(self, request, *args, **kwargs):
from_gua = self.request.query_params.get("from_guacamole", None)
if not from_gua:
self.handle_sessions()
super().create(request, *args, **kwargs) super().create(request, *args, **kwargs)
self.handle_sessions()
tasks = self.request.user.terminal.task_set.filter(is_finished=False) tasks = self.request.user.terminal.task_set.filter(is_finished=False)
serializer = self.task_serializer_class(tasks, many=True) serializer = self.task_serializer_class(tasks, many=True)
return Response(serializer.data, status=201) return Response(serializer.data, status=201)
def handle_sessions(self): def handle_sessions(self):
sessions_active = [] sessions_id = self.request.data.get('sessions', [])
for session_data in self.request.data.get("sessions", []): Session.set_active_sessions(sessions_id)
self.create_or_update_session(session_data)
if not session_data["is_finished"]:
sessions_active.append(session_data["id"])
sessions_in_db_active = Session.objects.filter(
is_finished=False,
terminal=self.request.user.terminal.id
)
for session in sessions_in_db_active:
if str(session.id) not in sessions_active:
session.is_finished = True
session.date_end = timezone.now()
session.save()
def create_or_update_session(self, session_data):
session_data["terminal"] = self.request.user.terminal.id
_id = session_data["id"]
session = get_object_or_none(Session, id=_id)
if session:
serializer = serializers.SessionSerializer(
data=session_data, instance=session
)
else:
serializer = serializers.SessionSerializer(data=session_data)
if serializer.is_valid():
session = serializer.save()
return session
else:
msg = "session data is not valid {}: {}".format(
serializer.errors, str(serializer.data)
)
logger.error(msg)
return None
def get_queryset(self): def get_queryset(self):
terminal_id = self.kwargs.get("terminal", None) terminal_id = self.kwargs.get("terminal", None)
......
# ~*~ coding: utf-8 ~*~ # ~*~ coding: utf-8 ~*~
import datetime import datetime
from django.db import transaction
from django.utils import timezone from django.utils import timezone
from django.db.utils import OperationalError
from .base import CommandBase from .base import CommandBase
...@@ -35,7 +37,25 @@ class CommandStore(CommandBase): ...@@ -35,7 +37,25 @@ class CommandStore(CommandBase):
input=c["input"], output=c["output"], session=c["session"], input=c["input"], output=c["output"], session=c["session"],
org_id=c["org_id"], timestamp=c["timestamp"] org_id=c["org_id"], timestamp=c["timestamp"]
)) ))
return self.model.objects.bulk_create(_commands) error = False
try:
with transaction.atomic():
self.model.objects.bulk_create(_commands)
except OperationalError:
error = True
except:
return False
if not error:
return True
for command in _commands:
try:
with transaction.atomic():
command.save()
except OperationalError:
command.output = str(command.output.encode())
command.save()
return True
@staticmethod @staticmethod
def make_filter_kwargs( def make_filter_kwargs(
......
...@@ -8,6 +8,7 @@ from django.utils.translation import ugettext_lazy as _ ...@@ -8,6 +8,7 @@ from django.utils.translation import ugettext_lazy as _
from django.utils import timezone from django.utils import timezone
from django.conf import settings from django.conf import settings
from django.core.files.storage import default_storage from django.core.files.storage import default_storage
from django.core.cache import cache
from users.models import User from users.models import User
from orgs.mixins import OrgModelMixin from orgs.mixins import OrgModelMixin
...@@ -153,6 +154,7 @@ class Session(OrgModelMixin): ...@@ -153,6 +154,7 @@ class Session(OrgModelMixin):
date_end = models.DateTimeField(verbose_name=_("Date end"), null=True) date_end = models.DateTimeField(verbose_name=_("Date end"), null=True)
upload_to = 'replay' upload_to = 'replay'
ACTIVE_CACHE_KEY_PREFIX = 'SESSION_ACTIVE_{}'
def get_rel_replay_path(self, version=2): def get_rel_replay_path(self, version=2):
""" """
...@@ -182,6 +184,17 @@ class Session(OrgModelMixin): ...@@ -182,6 +184,17 @@ class Session(OrgModelMixin):
except OSError as e: except OSError as e:
return None, e return None, e
@classmethod
def set_active_sessions(cls, sessions_id):
data = {cls.ACTIVE_CACHE_KEY_PREFIX.format(i): i for i in sessions_id}
cache.set_many(data, timeout=5*60)
def is_active(self):
if self.protocol in ['ssh', 'telnet']:
key = self.ACTIVE_CACHE_KEY_PREFIX.format(self.id)
return bool(cache.get(key))
return True
class Meta: class Meta:
db_table = "terminal_session" db_table = "terminal_session"
ordering = ["-date_start"] ordering = ["-date_start"]
......
...@@ -69,6 +69,6 @@ class TaskSerializer(BulkSerializerMixin, serializers.ModelSerializer): ...@@ -69,6 +69,6 @@ class TaskSerializer(BulkSerializerMixin, serializers.ModelSerializer):
class ReplaySerializer(serializers.Serializer): class ReplaySerializer(serializers.Serializer):
file = serializers.FileField() file = serializers.FileField(allow_empty_file=True)
...@@ -10,8 +10,9 @@ from django.conf import settings ...@@ -10,8 +10,9 @@ from django.conf import settings
from django.core.files.storage import default_storage from django.core.files.storage import default_storage
from ops.celery.utils import register_as_period_task, after_app_ready_start, \ from ops.celery.decorator import (
after_app_shutdown_clean register_as_period_task, after_app_ready_start, after_app_shutdown_clean_periodic
)
from .models import Status, Session, Command from .models import Status, Session, Command
...@@ -23,28 +24,30 @@ logger = get_task_logger(__name__) ...@@ -23,28 +24,30 @@ logger = get_task_logger(__name__)
@shared_task @shared_task
@register_as_period_task(interval=3600) @register_as_period_task(interval=3600)
@after_app_ready_start @after_app_ready_start
@after_app_shutdown_clean @after_app_shutdown_clean_periodic
def delete_terminal_status_period(): def delete_terminal_status_period():
yesterday = timezone.now() - datetime.timedelta(days=1) yesterday = timezone.now() - datetime.timedelta(days=1)
Status.objects.filter(date_created__lt=yesterday).delete() Status.objects.filter(date_created__lt=yesterday).delete()
@shared_task @shared_task
@register_as_period_task(interval=3600) @register_as_period_task(interval=600)
@after_app_ready_start @after_app_ready_start
@after_app_shutdown_clean @after_app_shutdown_clean_periodic
def clean_orphan_session(): def clean_orphan_session():
active_sessions = Session.objects.filter(is_finished=False) active_sessions = Session.objects.filter(is_finished=False)
for session in active_sessions: for session in active_sessions:
if not session.terminal or not session.terminal.is_active: if not session.is_active():
continue
session.is_finished = True session.is_finished = True
session.date_end = timezone.now()
session.save() session.save()
@shared_task @shared_task
@register_as_period_task(interval=3600*24) @register_as_period_task(interval=3600*24)
@after_app_ready_start @after_app_ready_start
@after_app_shutdown_clean @after_app_shutdown_clean_periodic
def clean_expired_session_period(): def clean_expired_session_period():
logger.info("Start clean expired session record, commands and replay") logger.info("Start clean expired session record, commands and replay")
days = settings.TERMINAL_SESSION_KEEP_DURATION days = settings.TERMINAL_SESSION_KEEP_DURATION
...@@ -64,3 +67,4 @@ def clean_expired_session_period(): ...@@ -64,3 +67,4 @@ def clean_expired_session_period():
default_storage.delete(_local_path) default_storage.delete(_local_path)
# 删除session记录 # 删除session记录
session.delete() session.delete()
...@@ -3,10 +3,8 @@ ...@@ -3,10 +3,8 @@
from celery import shared_task from celery import shared_task
from ops.celery.utils import ( from ops.celery.utils import create_or_update_celery_periodic_tasks
create_or_update_celery_periodic_tasks, from ops.celery.decorator import after_app_ready_start
after_app_ready_start
)
from .models import User from .models import User
from common.utils import get_logger from common.utils import get_logger
from .utils import write_login_log, send_password_expiration_reminder_mail from .utils import write_login_log, send_password_expiration_reminder_mail
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment