Commit c8d007f9 authored by ibuler's avatar ibuler

Merge branch 'dev' of github.com:jumpserver/jumpserver into dev

parents 2fde6cfe d8069f47
......@@ -17,6 +17,7 @@ dump.rdb
.idea/
db.sqlite3
config.py
config.yml
*.log
host_rsa_key
*.bat
......
......@@ -6,13 +6,13 @@ RUN useradd jumpserver
COPY ./requirements /tmp/requirements
RUN yum -y install epel-release && cd /tmp/requirements && \
RUN yum -y install epel-release openldap-clients telnet && cd /tmp/requirements && \
yum -y install $(cat rpm_requirements.txt)
RUN cd /tmp/requirements && pip install -r requirements.txt
COPY . /opt/jumpserver
COPY config_docker.py /opt/jumpserver/config.py
RUN echo > config.yml
VOLUME /opt/jumpserver/data
VOLUME /opt/jumpserver/logs
......
......@@ -87,6 +87,7 @@ class AdminUserTestConnectiveApi(generics.RetrieveAPIView):
"""
queryset = AdminUser.objects.all()
permission_classes = (IsOrgAdmin,)
serializer_class = serializers.TaskIDSerializer
def retrieve(self, request, *args, **kwargs):
admin_user = self.get_object()
......
......@@ -113,6 +113,7 @@ class AssetAdminUserTestApi(generics.RetrieveAPIView):
"""
queryset = Asset.objects.all()
permission_classes = (IsOrgAdmin,)
serializer_class = serializers.TaskIDSerializer
def retrieve(self, request, *args, **kwargs):
asset_id = kwargs.get('pk')
......@@ -124,6 +125,7 @@ class AssetAdminUserTestApi(generics.RetrieveAPIView):
class AssetGatewayApi(generics.RetrieveAPIView):
queryset = Asset.objects.all()
permission_classes = (IsOrgAdminOrAppUser,)
serializer_class = serializers.GatewayWithAuthSerializer
def retrieve(self, request, *args, **kwargs):
asset_id = kwargs.get('pk')
......
......@@ -117,6 +117,7 @@ class SystemUserAssetsListView(generics.ListAPIView):
class SystemUserPushToAssetApi(generics.RetrieveAPIView):
queryset = SystemUser.objects.all()
permission_classes = (IsOrgAdmin,)
serializer_class = serializers.TaskIDSerializer
def retrieve(self, request, *args, **kwargs):
system_user = self.get_object()
......@@ -129,6 +130,7 @@ class SystemUserPushToAssetApi(generics.RetrieveAPIView):
class SystemUserTestAssetConnectivityApi(generics.RetrieveAPIView):
queryset = SystemUser.objects.all()
permission_classes = (IsOrgAdmin,)
serializer_class = serializers.TaskIDSerializer
def retrieve(self, request, *args, **kwargs):
system_user = self.get_object()
......
......@@ -58,7 +58,7 @@ class ReplaceNodeAdminUserSerializer(serializers.ModelSerializer):
管理用户更新关联到的集群
"""
nodes = serializers.PrimaryKeyRelatedField(
many=True, queryset = Node.objects.all()
many=True, queryset=Node.objects.all()
)
class Meta:
......@@ -66,4 +66,5 @@ class ReplaceNodeAdminUserSerializer(serializers.ModelSerializer):
fields = ['id', 'nodes']
class TaskIDSerializer(serializers.Serializer):
task = serializers.CharField(read_only=True)
# ~*~ coding: utf-8 ~*~
import json
import re
import time
import os
from celery import shared_task
from django.utils.translation import ugettext as _
from django.core.cache import cache
from common.utils import capacity_convert, \
sum_capacity, encrypt_password, get_logger
from ops.celery.utils import register_as_period_task, after_app_shutdown_clean
from common.utils import (
capacity_convert, sum_capacity, encrypt_password, get_logger
)
from ops.celery.decorator import (
register_as_period_task, after_app_shutdown_clean_periodic
)
from .models import SystemUser, AdminUser, Asset
from . import const
......@@ -132,7 +134,7 @@ def update_assets_hardware_info_util(assets, task_name=None):
@shared_task
def update_asset_hardware_info_manual(asset):
task_name = _("Update asset hardware info: {}").format(asset.hostname)
return update_assets_hardware_info_util(
update_assets_hardware_info_util(
[asset], task_name=task_name
)
......@@ -221,12 +223,14 @@ def test_admin_user_connectivity_period():
for admin_user in admin_users:
task_name = _("Test admin user connectivity period: {}").format(admin_user.name)
test_admin_user_connectivity_util(admin_user, task_name)
cache.set(key, 1, 60*40)
@shared_task
def test_admin_user_connectivity_manual(admin_user):
task_name = _("Test admin user connectivity: {}").format(admin_user.name)
return test_admin_user_connectivity_util(admin_user, task_name)
test_admin_user_connectivity_util(admin_user, task_name)
return True
## System user connective ##
......@@ -394,13 +398,13 @@ def push_system_user_to_assets(system_user, assets):
@shared_task
@after_app_shutdown_clean
@after_app_shutdown_clean_periodic
def test_system_user_connectability_period():
pass
@shared_task
@after_app_shutdown_clean
@after_app_shutdown_clean_periodic
def test_admin_user_connectability_period():
pass
......@@ -408,7 +412,7 @@ def test_admin_user_connectability_period():
# @shared_task
# @register_as_period_task(interval=3600)
# @after_app_ready_start
# # @after_app_shutdown_clean
# @after_app_shutdown_clean_periodic
# def push_system_user_period():
# for system_user in SystemUser.objects.all():
# push_system_user_related_nodes(system_user)
......
# -*- coding: utf-8 -*-
#
from django.contrib.auth import get_user_model
from radiusauth.backends import RADIUSBackend, RADIUSRealmBackend
from django.conf import settings
User = get_user_model()
class CreateUserMixin:
def get_django_user(self, username, password=None):
if isinstance(username, bytes):
username = username.decode()
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
if '@' in username:
email = username
else:
email_suffix = settings.EMAIL_SUFFIX
email = '{}@{}'.format(username, email_suffix)
user = User(username=username, name=username, email=email)
user.source = user.SOURCE_RADIUS
user.save()
return user
class RadiusBackend(CreateUserMixin, RADIUSBackend):
pass
class RadiusRealmBackend(CreateUserMixin, RADIUSRealmBackend):
pass
......@@ -4,15 +4,20 @@
import os
import json
import jms_storage
import uuid
from rest_framework.views import Response, APIView
from rest_framework import generics
from ldap3 import Server, Connection
from django.core.mail import get_connection, send_mail
from django.core.mail import send_mail
from django.core.cache import cache
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from .permissions import IsOrgAdmin, IsSuperUser
from .serializers import MailTestSerializer, LDAPTestSerializer
from .serializers import (
MailTestSerializer, LDAPTestSerializer, OutputSerializer
)
from .models import Setting
......@@ -189,4 +194,39 @@ class DjangoSettingsAPI(APIView):
return Response(data)
class LogTailApi(generics.RetrieveAPIView):
permission_classes = ()
buff_size = 1024 * 10
serializer_class = OutputSerializer
end = False
def is_file_finish_write(self):
return True
def get_log_path(self):
raise NotImplementedError()
def get(self, request, *args, **kwargs):
mark = request.query_params.get("mark") or str(uuid.uuid4())
log_path = self.get_log_path()
if not log_path or not os.path.isfile(log_path):
if self.is_file_finish_write():
return Response({
"data": 'Not found the log',
'end': True,
'mark': mark}
)
else:
return Response({"data": _("Waiting ...\n")}, status=200)
with open(log_path, 'r') as f:
offset = cache.get(mark, 0)
f.seek(offset)
data = f.read(self.buff_size).replace('\n', '\r\n')
mark = str(uuid.uuid4())
cache.set(mark, f.tell(), 5)
if data == '' and self.is_file_finish_write():
self.end = True
return Response({"data": data, 'end': self.end, 'mark': mark})
......@@ -19,3 +19,8 @@ class LDAPTestSerializer(serializers.Serializer):
AUTH_LDAP_USER_ATTR_MAP = serializers.CharField()
AUTH_LDAP_START_TLS = serializers.BooleanField(required=False)
class OutputSerializer(serializers.Serializer):
output = serializers.CharField()
is_end = serializers.BooleanField()
mark = serializers.CharField()
# -*- coding: utf-8 -*-
#
import json
from django.dispatch import receiver
from django.db.models.signals import post_save, pre_save
from django.conf import LazySettings, empty
......@@ -8,7 +10,7 @@ from django.core.cache import cache
from jumpserver.utils import current_request
from .models import Setting
from .utils import get_logger
from .utils import get_logger, ssh_key_gen
from .signals import django_ready
logger = get_logger(__file__)
......@@ -16,23 +18,25 @@ logger = get_logger(__file__)
@receiver(post_save, sender=Setting, dispatch_uid="my_unique_identifier")
def refresh_settings_on_changed(sender, instance=None, **kwargs):
logger.debug("Receive setting item change")
logger.debug(" - refresh setting: {}".format(instance.name))
if instance:
instance.refresh_setting()
@receiver(django_ready, dispatch_uid="my_unique_identifier")
def refresh_all_settings_on_django_ready(sender, **kwargs):
logger.debug("Receive django ready signal")
logger.debug(" - fresh all settings")
def monkey_patch_settings(sender, **kwargs):
cache_key_prefix = '_SETTING_'
uncached_settings = [
'CACHES', 'DEBUG', 'SECRET_KEY', 'INSTALLED_APPS',
'ROOT_URLCONF', 'TEMPLATES', 'DATABASES', '_wrapped',
'CELERY_LOG_DIR'
]
def monkey_patch_getattr(self, name):
key = cache_key_prefix + name
cached = cache.get(key)
if cached is not None:
return cached
if name not in uncached_settings:
key = cache_key_prefix + name
cached = cache.get(key)
if cached is not None:
return cached
if self._wrapped is empty:
self._setup(name)
val = getattr(self._wrapped, name)
......@@ -62,6 +66,18 @@ def refresh_all_settings_on_django_ready(sender, **kwargs):
pass
@receiver(django_ready)
def auto_generate_terminal_host_key(sender, **kwargs):
try:
if Setting.objects.filter(name='TERMINAL_HOST_KEY').exists():
return
except ProgrammingError:
return
private_key, public_key = ssh_key_gen()
value = json.dumps(private_key)
Setting.objects.create(name='TERMINAL_HOST_KEY', value=value)
@receiver(pre_save, dispatch_uid="my_unique_identifier")
def on_create_set_created_by(sender, instance=None, **kwargs):
if getattr(instance, '_ignore_auto_created_by', False) is True:
......
......@@ -406,24 +406,6 @@ def get_replay_storage_setting():
return value
class TeeObj:
origin_stdout = sys.stdout
def __init__(self, file_obj):
self.file_obj = file_obj
def write(self, msg):
self.origin_stdout.write(msg)
self.file_obj.write(msg.replace('*', ''))
def flush(self):
self.origin_stdout.flush()
self.file_obj.flush()
def close(self):
self.file_obj.close()
def with_cache(func):
cache = {}
key = "_{}.{}".format(func.__module__, func.__name__)
......
......@@ -193,14 +193,16 @@ class Config(dict):
if self.root_path:
filename = os.path.join(self.root_path, filename)
try:
with open(filename) as json_file:
obj = yaml.load(json_file)
with open(filename) as f:
obj = yaml.load(f)
except IOError as e:
if silent and e.errno in (errno.ENOENT, errno.EISDIR):
return False
e.strerror = 'Unable to load configuration file (%s)' % e.strerror
raise
return self.from_mapping(obj)
if obj:
return self.from_mapping(obj)
return True
def from_mapping(self, *mapping, **kwargs):
"""Updates the config like :meth:`update` ignoring items with non-upper
......@@ -286,8 +288,8 @@ class Config(dict):
defaults = {
'SECRET_KEY': '2vym+ky!997d5kkcc64mnz06y1mmui3lut#(^wd=%s_qj$1%x',
'BOOTSTRAP_TOKEN': 'PleaseChangeMe',
'SECRET_KEY': '',
'BOOTSTRAP_TOKEN': '',
'DEBUG': True,
'SITE_URL': 'http://localhost',
'LOG_LEVEL': 'DEBUG',
......@@ -312,6 +314,7 @@ defaults = {
'SESSION_COOKIE_AGE': 3600 * 24,
'SESSION_EXPIRE_AT_BROWSER_CLOSE': False,
'AUTH_OPENID': False,
'OTP_VALID_WINDOW': 0,
'OTP_ISSUER_NAME': 'Jumpserver',
'EMAIL_SUFFIX': 'jumpserver.org',
'TERMINAL_PASSWORD_AUTH': True,
......@@ -320,6 +323,7 @@ defaults = {
'TERMINAL_ASSET_LIST_SORT_BY': 'hostname',
'TERMINAL_ASSET_LIST_PAGE_SIZE': 'auto',
'TERMINAL_SESSION_KEEP_DURATION': 9999,
'TERMINAL_HOST_KEY': '',
'SECURITY_MFA_AUTH': False,
'SECURITY_LOGIN_LIMIT_COUNT': 7,
'SECURITY_LOGIN_LIMIT_TIME': 30,
......@@ -330,21 +334,48 @@ defaults = {
'SECURITY_PASSWORD_LOWER_CASE': False,
'SECURITY_PASSWORD_NUMBER': False,
'SECURITY_PASSWORD_SPECIAL_CHAR': False,
'AUTH_RADIUS': False,
'RADIUS_SERVER': 'localhost',
'RADIUS_PORT': 1812,
'RADIUS_SECRET': '',
'HTTP_BIND_HOST': '0.0.0.0',
'HTTP_LISTEN_PORT': 8080,
}
def load_user_config():
sys.path.insert(0, PROJECT_DIR)
config = Config(PROJECT_DIR, defaults)
def load_from_object(config):
try:
from config import config as c
config.from_object(c)
return True
except ImportError:
pass
return False
def load_from_yml(config):
for i in ['config.yml', 'config.yaml']:
if not os.path.isfile(os.path.join(config.root_path, i)):
continue
loaded = config.from_yaml(i)
if loaded:
return True
return False
def load_user_config():
sys.path.insert(0, PROJECT_DIR)
config = Config(PROJECT_DIR, defaults)
loaded = load_from_object(config)
if not loaded:
loaded = load_from_yml(config)
if not loaded:
msg = """
Error: No config file found.
You can run `cp config_example.py config.py`, and edit it.
You can run `cp config_example.yml config.yml`, and edit it.
"""
raise ImportError(msg)
return config
......@@ -12,6 +12,7 @@ https://docs.djangoproject.com/en/1.10/ref/settings/
import os
import sys
import socket
import ldap
# from django_auth_ldap.config import LDAPSearch, LDAPSearchUnion
......@@ -23,6 +24,13 @@ from .conf import load_user_config
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
PROJECT_DIR = os.path.dirname(BASE_DIR)
CONFIG = load_user_config()
LOG_DIR = os.path.join(PROJECT_DIR, 'logs')
HOSTNAME = socket.gethostname()
JUMPSERVER_LOG_FILE = os.path.join(LOG_DIR, 'jumpserver-{}.log'.format(HOSTNAME))
ANSIBLE_LOG_FILE = os.path.join(LOG_DIR, 'ansible-{}.log'.format(HOSTNAME))
if not os.path.isdir(LOG_DIR):
os.makedirs(LOG_DIR)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
......@@ -209,19 +217,21 @@ LOGGING = {
'formatter': 'main'
},
'file': {
'encoding': 'utf8',
'level': 'DEBUG',
'class': 'logging.handlers.TimedRotatingFileHandler',
'when': "D",
'interval': 1,
"backupCount": 7,
'formatter': 'main',
'filename': os.path.join(PROJECT_DIR, 'logs', 'jumpserver.log')
'filename': JUMPSERVER_LOG_FILE,
},
'ansible_logs': {
'encoding': 'utf8',
'level': 'DEBUG',
'class': 'logging.FileHandler',
'formatter': 'main',
'filename': os.path.join(PROJECT_DIR, 'logs', 'ansible.log')
'filename': ANSIBLE_LOG_FILE,
},
},
'loggers': {
......@@ -400,6 +410,19 @@ if AUTH_OPENID:
AUTHENTICATION_BACKENDS.insert(0, AUTH_OPENID_BACKENDS[0])
AUTHENTICATION_BACKENDS.insert(0, AUTH_OPENID_BACKENDS[1])
# Radius Auth
AUTH_RADIUS = CONFIG.AUTH_RADIUS
AUTH_RADIUS_BACKEND = 'authentication.radius.backends.RadiusBackend'
RADIUS_SERVER = CONFIG.RADIUS_SERVER
RADIUS_PORT = CONFIG.RADIUS_PORT
RADIUS_SECRET = CONFIG.RADIUS_SECRET
if AUTH_RADIUS:
AUTHENTICATION_BACKENDS.insert(0, AUTH_RADIUS_BACKEND)
# Dump all celery log to here
CELERY_LOG_DIR = os.path.join(PROJECT_DIR, 'data', 'celery')
# Celery using redis as broker
CELERY_BROKER_URL = 'redis://:%(password)s@%(host)s:%(port)s/%(db)s' % {
'password': CONFIG.REDIS_PASSWORD,
......@@ -413,14 +436,16 @@ CELERY_RESULT_BACKEND = CELERY_BROKER_URL
CELERY_ACCEPT_CONTENT = ['json', 'pickle']
CELERY_RESULT_EXPIRES = 3600
# CELERY_WORKER_LOG_FORMAT = '%(asctime)s [%(module)s %(levelname)s] %(message)s'
CELERY_WORKER_LOG_FORMAT = '%(message)s'
# CELERY_WORKER_TASK_LOG_FORMAT = '%(asctime)s [%(module)s %(levelname)s] %(message)s'
CELERY_WORKER_TASK_LOG_FORMAT = '%(message)s'
# CELERY_WORKER_LOG_FORMAT = '%(message)s'
CELERY_WORKER_TASK_LOG_FORMAT = '%(task_id)s %(task_name)s %(message)s'
# CELERY_WORKER_TASK_LOG_FORMAT = '%(message)s'
# CELERY_WORKER_LOG_FORMAT = '%(asctime)s [%(module)s %(levelname)s] %(message)s'
CELERY_WORKER_LOG_FORMAT = '%(message)s'
CELERY_TASK_EAGER_PROPAGATES = True
CELERY_REDIRECT_STDOUTS = True
CELERY_REDIRECT_STDOUTS_LEVEL = "INFO"
CELERY_WORKER_HIJACK_ROOT_LOGGER = False
CELERY_WORKER_REDIRECT_STDOUTS = True
CELERY_WORKER_REDIRECT_STDOUTS_LEVEL = "INFO"
# CELERY_WORKER_HIJACK_ROOT_LOGGER = False
CELERY_WORKER_MAX_TASKS_PER_CHILD = 40
# Cache use redis
CACHES = {
......@@ -492,6 +517,7 @@ TERMINAL_HEARTBEAT_INTERVAL = CONFIG.TERMINAL_HEARTBEAT_INTERVAL
TERMINAL_ASSET_LIST_SORT_BY = CONFIG.TERMINAL_ASSET_LIST_SORT_BY
TERMINAL_ASSET_LIST_PAGE_SIZE = CONFIG.TERMINAL_ASSET_LIST_PAGE_SIZE
TERMINAL_SESSION_KEEP_DURATION = CONFIG.TERMINAL_SESSION_KEEP_DURATION
TERMINAL_HOST_KEY = CONFIG.TERMINAL_HOST_KEY
# Django bootstrap3 setting, more see http://django-bootstrap3.readthedocs.io/en/latest/settings.html
BOOTSTRAP3 = {
......
This diff is collapsed.
# -*- coding: utf-8 -*-
#
import sys
class TeeObj:
origin_stdout = sys.stdout
def __init__(self, file_obj):
self.file_obj = file_obj
def write(self, msg):
self.origin_stdout.write(msg)
self.file_obj.write(msg.replace('*', ''))
def flush(self):
self.origin_stdout.flush()
self.file_obj.flush()
......@@ -9,10 +9,10 @@ from ansible.parsing.dataloader import DataLoader
from ansible.executor.playbook_executor import PlaybookExecutor
from ansible.playbook.play import Play
import ansible.constants as C
from ansible.utils.display import Display
from .callback import AdHocResultCallback, PlaybookResultCallBack, \
CommandResultCallback
from .callback import (
AdHocResultCallback, PlaybookResultCallBack, CommandResultCallback
)
from common.utils import get_logger
from .exceptions import AnsibleError
......@@ -22,13 +22,6 @@ C.HOST_KEY_CHECKING = False
logger = get_logger(__name__)
class CustomDisplay(Display):
def display(self, msg, color=None, stderr=False, screen_only=False, log_only=False):
pass
display = CustomDisplay()
Options = namedtuple('Options', [
'listtags', 'listtasks', 'listhosts', 'syntax', 'connection',
'module_path', 'forks', 'remote_user', 'private_key_file', 'timeout',
......
# -*- coding: utf-8 -*-
#
import uuid
import os
import os
from celery.result import AsyncResult
from django.core.cache import cache
from django.utils.translation import ugettext as _
from rest_framework import generics
from rest_framework.views import Response
from common.permissions import IsOrgAdmin, IsValidUser
from common.permissions import IsValidUser
from common.api import LogTailApi
from ..models import CeleryTask
from ..serializers import CeleryResultSerializer
from ..celery.utils import get_celery_task_log_path
__all__ = ['CeleryTaskLogApi', 'CeleryResultApi']
class CeleryTaskLogApi(generics.RetrieveAPIView):
class CeleryTaskLogApi(LogTailApi):
permission_classes = (IsValidUser,)
buff_size = 1024 * 10
end = False
queryset = CeleryTask.objects.all()
task = None
task_id = ''
def get(self, request, *args, **kwargs):
mark = request.query_params.get("mark") or str(uuid.uuid4())
task = self.get_object()
log_path = task.full_log_path
if not log_path or not os.path.isfile(log_path):
return Response({"data": _("Waiting ...")}, status=203)
with open(log_path, 'r') as f:
offset = cache.get(mark, 0)
f.seek(offset)
data = f.read(self.buff_size).replace('\n', '\r\n')
mark = str(uuid.uuid4())
cache.set(mark, f.tell(), 5)
if data == '' and task.is_finished():
self.end = True
return Response({"data": data, 'end': self.end, 'mark': mark})
self.task_id = str(kwargs.get('pk'))
self.task = AsyncResult(self.task_id)
return super().get(request, *args, **kwargs)
def get_log_path(self):
new_path = get_celery_task_log_path(self.task_id)
if new_path and os.path.isfile(new_path):
return new_path
try:
task = CeleryTask.objects.get(id=self.task_id)
except CeleryTask.DoesNotExist:
return None
return task.full_log_path
def is_file_finish_write(self):
return self.task.ready()
class CeleryResultApi(generics.RetrieveAPIView):
......
......@@ -10,6 +10,5 @@ class OpsConfig(AppConfig):
from orgs.models import Organization
from orgs.utils import set_current_org
set_current_org(Organization.root())
super().ready()
from .celery import signal_handler
super().ready()
# -*- coding: utf-8 -*-
#
from functools import wraps
_need_registered_period_tasks = []
_after_app_ready_start_tasks = []
_after_app_shutdown_clean_periodic_tasks = []
def add_register_period_task(task):
_need_registered_period_tasks.append(task)
# key = "__REGISTER_PERIODIC_TASKS"
# value = cache.get(key, [])
# value.append(name)
# cache.set(key, value)
def get_register_period_tasks():
# key = "__REGISTER_PERIODIC_TASKS"
# return cache.get(key, [])
return _need_registered_period_tasks
def add_after_app_shutdown_clean_task(name):
# key = "__AFTER_APP_SHUTDOWN_CLEAN_TASKS"
# value = cache.get(key, [])
# value.append(name)
# cache.set(key, value)
_after_app_shutdown_clean_periodic_tasks.append(name)
def get_after_app_shutdown_clean_tasks():
# key = "__AFTER_APP_SHUTDOWN_CLEAN_TASKS"
# return cache.get(key, [])
return _after_app_shutdown_clean_periodic_tasks
def add_after_app_ready_task(name):
# key = "__AFTER_APP_READY_RUN_TASKS"
# value = cache.get(key, [])
# value.append(name)
# cache.set(key, value)
_after_app_ready_start_tasks.append(name)
def get_after_app_ready_tasks():
# key = "__AFTER_APP_READY_RUN_TASKS"
# return cache.get(key, [])
return _after_app_ready_start_tasks
def register_as_period_task(crontab=None, interval=None):
"""
Warning: Task must be have not any args and kwargs
:param crontab: "* * * * *"
:param interval: 60*60*60
:return:
"""
if crontab is None and interval is None:
raise SyntaxError("Must set crontab or interval one")
def decorate(func):
if crontab is None and interval is None:
raise SyntaxError("Interval and crontab must set one")
# Because when this decorator run, the task was not created,
# So we can't use func.name
name = '{func.__module__}.{func.__name__}'.format(func=func)
add_register_period_task({
name: {
'task': name,
'interval': interval,
'crontab': crontab,
'args': (),
'enabled': True,
}
})
@wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper
return decorate
def after_app_ready_start(func):
# Because when this decorator run, the task was not created,
# So we can't use func.name
name = '{func.__module__}.{func.__name__}'.format(func=func)
if name not in _after_app_ready_start_tasks:
add_after_app_ready_task(name)
@wraps(func)
def decorate(*args, **kwargs):
return func(*args, **kwargs)
return decorate
def after_app_shutdown_clean_periodic(func):
# Because when this decorator run, the task was not created,
# So we can't use func.name
name = '{func.__module__}.{func.__name__}'.format(func=func)
if name not in _after_app_shutdown_clean_periodic_tasks:
add_after_app_shutdown_clean_task(name)
@wraps(func)
def decorate(*args, **kwargs):
return func(*args, **kwargs)
return decorate
from logging import StreamHandler
from django.conf import settings
from celery import current_task
from celery.signals import task_prerun, task_postrun
from kombu import Connection, Exchange, Queue, Producer
from kombu.mixins import ConsumerMixin
from .utils import get_celery_task_log_path
routing_key = 'celery_log'
celery_log_exchange = Exchange('celery_log_exchange', type='direct')
celery_log_queue = [Queue('celery_log', celery_log_exchange, routing_key=routing_key)]
class CeleryLoggerConsumer(ConsumerMixin):
def __init__(self):
self.connection = Connection(settings.CELERY_LOG_BROKER_URL)
def get_consumers(self, Consumer, channel):
return [Consumer(queues=celery_log_queue,
accept=['pickle', 'json'],
callbacks=[self.process_task])
]
def handle_task_start(self, task_id, message):
pass
def handle_task_end(self, task_id, message):
pass
def handle_task_log(self, task_id, msg, message):
pass
def process_task(self, body, message):
action = body.get('action')
task_id = body.get('task_id')
msg = body.get('msg')
if action == CeleryLoggerProducer.ACTION_TASK_LOG:
self.handle_task_log(task_id, msg, message)
elif action == CeleryLoggerProducer.ACTION_TASK_START:
self.handle_task_start(task_id, message)
elif action == CeleryLoggerProducer.ACTION_TASK_END:
self.handle_task_end(task_id, message)
class CeleryLoggerProducer:
ACTION_TASK_START, ACTION_TASK_LOG, ACTION_TASK_END = range(3)
def __init__(self):
self.connection = Connection(settings.CELERY_LOG_BROKER_URL)
@property
def producer(self):
return Producer(self.connection)
def publish(self, payload):
self.producer.publish(
payload, serializer='json', exchange=celery_log_exchange,
declare=[celery_log_exchange], routing_key=routing_key
)
def log(self, task_id, msg):
payload = {'task_id': task_id, 'msg': msg, 'action': self.ACTION_TASK_LOG}
return self.publish(payload)
def read(self):
pass
def flush(self):
pass
def task_end(self, task_id):
payload = {'task_id': task_id, 'action': self.ACTION_TASK_END}
return self.publish(payload)
def task_start(self, task_id):
payload = {'task_id': task_id, 'action': self.ACTION_TASK_START}
return self.publish(payload)
class CeleryTaskLoggerHandler(StreamHandler):
terminator = '\r\n'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
task_prerun.connect(self.on_task_start)
task_postrun.connect(self.on_start_end)
@staticmethod
def get_current_task_id():
if not current_task:
return
task_id = current_task.request.root_id
return task_id
def on_task_start(self, sender, task_id, **kwargs):
return self.handle_task_start(task_id)
def on_start_end(self, sender, task_id, **kwargs):
return self.handle_task_end(task_id)
def after_task_publish(self, sender, body, **kwargs):
pass
def emit(self, record):
task_id = self.get_current_task_id()
if not task_id:
return
try:
self.write_task_log(task_id, record)
self.flush()
except Exception:
self.handleError(record)
def write_task_log(self, task_id, msg):
pass
def handle_task_start(self, task_id):
pass
def handle_task_end(self, task_id):
pass
class CeleryTaskMQLoggerHandler(CeleryTaskLoggerHandler):
def __init__(self):
self.producer = CeleryLoggerProducer()
super().__init__(stream=None)
def write_task_log(self, task_id, record):
msg = self.format(record)
self.producer.log(task_id, msg)
def flush(self):
self.producer.flush()
class CeleryTaskFileHandler(CeleryTaskLoggerHandler):
def __init__(self):
self.f = None
super().__init__(stream=None)
def emit(self, record):
msg = self.format(record)
if not self.f:
return
self.f.write(msg)
self.f.write(self.terminator)
self.flush()
def flush(self):
self.f and self.f.flush()
def handle_task_start(self, task_id):
log_path = get_celery_task_log_path(task_id)
self.f = open(log_path, 'a')
def handle_task_end(self, task_id):
self.f and self.f.close()
# -*- coding: utf-8 -*-
#
import os
import datetime
import sys
import time
import logging
from django.conf import settings
from django.utils import timezone
from django.core.cache import cache
from django.db import transaction
from celery import subtask
from celery.signals import worker_ready, worker_shutdown, task_prerun, \
task_postrun, after_task_publish
from celery.signals import (
worker_ready, worker_shutdown, after_setup_logger
)
from django_celery_beat.models import PeriodicTask
from common.utils import get_logger, TeeObj, get_object_or_none
from common.const import celery_task_pre_key
from .utils import get_after_app_ready_tasks, get_after_app_shutdown_clean_tasks
from ..models import CeleryTask
from common.utils import get_logger
from .decorator import get_after_app_ready_tasks, get_after_app_shutdown_clean_tasks
from .logger import CeleryTaskFileHandler
logger = get_logger(__file__)
@worker_ready.connect
def on_app_ready(sender=None, headers=None, body=None, **kwargs):
def on_app_ready(sender=None, headers=None, **kwargs):
if cache.get("CELERY_APP_READY", 0) == 1:
return
cache.set("CELERY_APP_READY", 1, 10)
tasks = get_after_app_ready_tasks()
logger.debug("Start need start task: [{}]".format(
", ".join(tasks))
)
logger.debug("Work ready signal recv")
logger.debug("Start need start task: [{}]".format(", ".join(tasks)))
for task in tasks:
subtask(task).delay()
@worker_shutdown.connect
def after_app_shutdown(sender=None, headers=None, body=None, **kwargs):
def after_app_shutdown_periodic_tasks(sender=None, **kwargs):
if cache.get("CELERY_APP_SHUTDOWN", 0) == 1:
return
cache.set("CELERY_APP_SHUTDOWN", 1, 10)
tasks = get_after_app_shutdown_clean_tasks()
logger.debug("App shutdown signal recv")
logger.debug("Clean need cleaned period tasks: [{}]".format(
', '.join(tasks))
)
logger.debug("Worker shutdown signal recv")
logger.debug("Clean period tasks: [{}]".format(', '.join(tasks)))
PeriodicTask.objects.filter(name__in=tasks).delete()
@after_task_publish.connect
def after_task_publish_signal_handler(sender, headers=None, **kwargs):
CeleryTask.objects.create(
id=headers["id"], status=CeleryTask.WAITING, name=headers["task"]
)
cache.set(headers["id"], True, 3600)
@task_prerun.connect
def pre_run_task_signal_handler(sender, task_id=None, task=None, **kwargs):
time.sleep(0.1)
for i in range(5):
if cache.get(task_id, False):
break
else:
time.sleep(0.1)
continue
t = get_object_or_none(CeleryTask, id=task_id)
if t is None:
logger.warn("Not get the task: {}".format(task_id))
return
now = datetime.datetime.now().strftime("%Y-%m-%d")
log_path = os.path.join(now, task_id + '.log')
full_path = os.path.join(CeleryTask.LOG_DIR, log_path)
if not os.path.exists(os.path.dirname(full_path)):
os.makedirs(os.path.dirname(full_path))
with transaction.atomic():
t.date_start = timezone.now()
t.status = CeleryTask.RUNNING
t.log_path = log_path
t.save()
f = open(full_path, 'w')
tee = TeeObj(f)
sys.stdout = tee
task.log_f = tee
@task_postrun.connect
def post_run_task_signal_handler(sender, task_id=None, task=None, **kwargs):
t = get_object_or_none(CeleryTask, id=task_id)
if t is None:
logger.warn("Not get the task: {}".format(task_id))
@after_setup_logger.connect
def add_celery_logger_handler(sender=None, logger=None, loglevel=None, format=None, **kwargs):
if not logger:
return
with transaction.atomic():
t.status = CeleryTask.FINISHED
t.date_finished = timezone.now()
t.save()
task.log_f.flush()
sys.stdout = task.log_f.origin_stdout
task.log_f.close()
handler = CeleryTaskFileHandler()
handler.setLevel(loglevel)
formatter = logging.Formatter(format)
handler.setFormatter(formatter)
logger.addHandler(handler)
# @after_task_publish.connect
# def after_task_publish_signal_handler(sender, headers=None, **kwargs):
# CeleryTask.objects.create(
# id=headers["id"], status=CeleryTask.WAITING, name=headers["task"]
# )
# cache.set(headers["id"], True, 3600)
#
#
# @task_prerun.connect
# def pre_run_task_signal_handler(sender, task_id=None, task=None, **kwargs):
# time.sleep(0.1)
# for i in range(5):
# if cache.get(task_id, False):
# break
# else:
# time.sleep(0.1)
# continue
#
# t = get_object_or_none(CeleryTask, id=task_id)
# if t is None:
# logger.warn("Not get the task: {}".format(task_id))
# return
# now = datetime.datetime.now().strftime("%Y-%m-%d")
# log_path = os.path.join(now, task_id + '.log')
# full_path = os.path.join(CeleryTask.LOG_DIR, log_path)
#
# if not os.path.exists(os.path.dirname(full_path)):
# os.makedirs(os.path.dirname(full_path))
# with transaction.atomic():
# t.date_start = timezone.now()
# t.status = CeleryTask.RUNNING
# t.log_path = log_path
# t.save()
# f = open(full_path, 'w', encoding="utf-8")
# tee = TeeObj(f)
# sys.stdout = tee
# task.log_f = tee
#
#
# @task_postrun.connect
# def post_run_task_signal_handler(sender, task_id=None, task=None, **kwargs):
# t = get_object_or_none(CeleryTask, id=task_id)
# if t is None:
# logger.warn("Not get the task: {}".format(task_id))
# return
# with transaction.atomic():
# t.status = CeleryTask.FINISHED
# t.date_finished = timezone.now()
# t.save()
# task.log_f.flush()
# sys.stdout = task.log_f.origin_stdout
# task.log_f.close()
# -*- coding: utf-8 -*-
#
import json
from functools import wraps
import os
from django.conf import settings
from django.db.utils import ProgrammingError, OperationalError
from django.core.cache import cache
from django_celery_beat.models import PeriodicTask, IntervalSchedule, CrontabSchedule
def add_register_period_task(name):
key = "__REGISTER_PERIODIC_TASKS"
value = cache.get(key, [])
value.append(name)
cache.set(key, value)
def get_register_period_tasks():
key = "__REGISTER_PERIODIC_TASKS"
return cache.get(key, [])
def add_after_app_shutdown_clean_task(name):
key = "__AFTER_APP_SHUTDOWN_CLEAN_TASKS"
value = cache.get(key, [])
value.append(name)
cache.set(key, value)
def get_after_app_shutdown_clean_tasks():
key = "__AFTER_APP_SHUTDOWN_CLEAN_TASKS"
return cache.get(key, [])
def add_after_app_ready_task(name):
key = "__AFTER_APP_READY_RUN_TASKS"
value = cache.get(key, [])
value.append(name)
cache.set(key, value)
def get_after_app_ready_tasks():
key = "__AFTER_APP_READY_RUN_TASKS"
return cache.get(key, [])
def create_or_update_celery_periodic_tasks(tasks):
"""
:param tasks: {
......@@ -123,63 +87,10 @@ def delete_celery_periodic_task(task_name):
PeriodicTask.objects.filter(name=task_name).delete()
def register_as_period_task(crontab=None, interval=None):
"""
Warning: Task must be have not any args and kwargs
:param crontab: "* * * * *"
:param interval: 60*60*60
:return:
"""
if crontab is None and interval is None:
raise SyntaxError("Must set crontab or interval one")
def decorate(func):
if crontab is None and interval is None:
raise SyntaxError("Interval and crontab must set one")
# Because when this decorator run, the task was not created,
# So we can't use func.name
name = '{func.__module__}.{func.__name__}'.format(func=func)
if name not in get_register_period_tasks():
create_or_update_celery_periodic_tasks({
name: {
'task': name,
'interval': interval,
'crontab': crontab,
'args': (),
'enabled': True,
}
})
add_register_period_task(name)
@wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper
return decorate
def after_app_ready_start(func):
# Because when this decorator run, the task was not created,
# So we can't use func.name
name = '{func.__module__}.{func.__name__}'.format(func=func)
if name not in get_after_app_ready_tasks():
add_after_app_ready_task(name)
@wraps(func)
def decorate(*args, **kwargs):
return func(*args, **kwargs)
return decorate
def after_app_shutdown_clean(func):
# Because when this decorator run, the task was not created,
# So we can't use func.name
name = '{func.__module__}.{func.__name__}'.format(func=func)
if name not in get_after_app_shutdown_clean_tasks():
add_after_app_shutdown_clean_task(name)
def get_celery_task_log_path(task_id):
task_id = str(task_id)
rel_path = os.path.join(task_id[0], task_id[1], task_id + '.log')
path = os.path.join(settings.CELERY_LOG_DIR, rel_path)
os.makedirs(os.path.dirname(path), exist_ok=True)
return path
@wraps(func)
def decorate(*args, **kwargs):
return func(*args, **kwargs)
return decorate
......@@ -8,6 +8,8 @@ from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ugettext
from django.db import models
from orgs.models import Organization
from ..ansible.runner import CommandRunner
from ..inventory import JMSInventory
......@@ -53,6 +55,8 @@ class CommandExecution(models.Model):
def run(self):
print('-'*10 + ' ' + ugettext('Task start') + ' ' + '-'*10)
org = Organization.get_instance(self.run_as.org_id)
org.change_to()
self.date_start = timezone.now()
ok, msg = self.run_as.is_command_can_run(self.command)
if ok:
......
# coding: utf-8
import os
import subprocess
from django.conf import settings
from celery import shared_task, subtask
from django.utils import timezone
from common.utils import get_logger, get_object_or_none
from .celery.utils import register_as_period_task, after_app_shutdown_clean
from .models import Task, CommandExecution
from .celery.decorator import (
register_as_period_task, after_app_shutdown_clean_periodic,
after_app_ready_start
)
from .celery.utils import create_or_update_celery_periodic_tasks
from .models import Task, CommandExecution, CeleryTask
logger = get_logger(__file__)
......@@ -36,8 +45,8 @@ def run_command_execution(cid, **kwargs):
@shared_task
@after_app_shutdown_clean_periodic
@register_as_period_task(interval=3600*24)
@after_app_shutdown_clean
def clean_tasks_adhoc_period():
logger.debug("Start clean task adhoc and run history")
tasks = Task.objects.all()
......@@ -48,11 +57,42 @@ def clean_tasks_adhoc_period():
ad.delete()
@shared_task
@after_app_shutdown_clean_periodic
@register_as_period_task(interval=3600*24)
def clean_celery_tasks_period():
expire_days = 30
logger.debug("Start clean celery task history")
one_month_ago = timezone.now() - timezone.timedelta(days=expire_days)
tasks = CeleryTask.objects.filter(date_start__lt=one_month_ago)
for task in tasks:
if os.path.isfile(task.full_log_path):
try:
os.remove(task.full_log_path)
except (FileNotFoundError, PermissionError):
pass
task.delete()
tasks = CeleryTask.objects.filter(date_start__isnull=True)
tasks.delete()
command = "find %s -mtime +%s -name '*.log' -type f -exec rm -f {} \\;" % (
settings.CELERY_LOG_DIR, expire_days
)
subprocess.call(command, shell=True)
@shared_task
@after_app_ready_start
def create_or_update_registered_periodic_tasks():
from .celery.decorator import get_register_period_tasks
for task in get_register_period_tasks():
create_or_update_celery_periodic_tasks(task)
@shared_task
def hello(name, callback=None):
import time
time.sleep(10)
print("Hello {}".format(name))
if callback is not None:
subtask(callback).delay("Guahongwei")
@shared_task
......
{% load static %}
{% load i18n %}
<head>
<title>term.js</title>
<title>{% trans 'Task log' %}</title>
<script src="{% static 'js/jquery-2.1.1.js' %}"></script>
<script src="{% static 'js/plugins/xterm/xterm.js' %}"></script>
<link rel="stylesheet" href="{% static 'js/plugins/xterm/xterm.css' %}" />
......@@ -15,14 +16,14 @@
}
</style>
</head>
<div id="term" style="height: 100%;width: 100%">
</div>
<div id="term" style="height: 100%;width: 100%">
</div>
<script>
var rowHeight = 18;
var colWidth = 10;
var mark = '';
var url = "{% url 'api-ops:celery-task-log' pk=object.id %}";
var url = "{% url 'api-ops:celery-task-log' pk=task_id %}";
var term;
var end = false;
var error = false;
......@@ -35,9 +36,9 @@
{#colWidth = 1.00 * t.width() / 6;#}
}
function resize() {
var rows = Math.floor(window.innerHeight / rowHeight) - 1;
var cols = Math.floor(window.innerWidth / colWidth) - 2;
term.resize(cols, rows);
{#var rows = Math.floor(window.innerHeight / rowHeight) - 1;#}
{#var cols = Math.floor(window.innerWidth / colWidth) - 2;#}
{#term.resize(cols, rows);#}
}
function requestAndWrite() {
if (!end && success) {
......@@ -73,7 +74,7 @@
disableStdin: true
});
term.open(document.getElementById('term'));
term.resize(80, 24);
term.resize(90, 32);
resize();
term.on('data', function (data) {
{#term.write(data.replace('\r', '\r\n'))#}
......
# -*- coding: utf-8 -*-
#
from django.views.generic import DetailView
from django.views.generic import DetailView, TemplateView
from common.permissions import AdminUserRequiredMixin
from ..models import CeleryTask
......@@ -9,6 +9,10 @@ from ..models import CeleryTask
__all__ = ['CeleryTaskLogView']
class CeleryTaskLogView(AdminUserRequiredMixin, DetailView):
class CeleryTaskLogView(AdminUserRequiredMixin, TemplateView):
template_name = 'ops/celery_task_log.html'
model = CeleryTask
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context.update({'task_id': self.kwargs.get('pk')})
return context
......@@ -66,3 +66,4 @@ class OrgMembershipUsersViewSet(OrgMembershipModelViewSetMixin, BulkModelViewSet
serializer_class = OrgMembershipUserSerializer
membership_class = Organization.users.through
permission_classes = (IsSuperUserOrAppUser, )
......@@ -4,7 +4,7 @@
from werkzeug.local import Local
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.shortcuts import redirect
from django.shortcuts import redirect, get_object_or_404
from django.forms import ModelForm
from django.http.response import HttpResponseForbidden
from django.core.exceptions import ValidationError
......@@ -191,7 +191,7 @@ class OrgMembershipModelViewSetMixin:
http_method_names = ['get', 'post', 'delete', 'head', 'options']
def dispatch(self, request, *args, **kwargs):
self.org = Organization.objects.get(pk=kwargs.get('org_id'))
self.org = get_object_or_404(Organization, pk=kwargs.get('org_id'))
return super().dispatch(request, *args, **kwargs)
def get_serializer_context(self):
......@@ -200,4 +200,5 @@ class OrgMembershipModelViewSetMixin:
return context
def get_queryset(self):
return self.membership_class.objects.filter(organization=self.org)
queryset = self.membership_class.objects.filter(organization=self.org)
return queryset
......@@ -122,3 +122,7 @@ class Organization(models.Model):
return True
else:
return False
def change_to(self):
from .utils import set_current_org
set_current_org(self)
......@@ -9,11 +9,16 @@ from .. import api
app_name = 'orgs'
router = DefaultRouter()
# 将会删除
router.register(r'org/(?P<org_id>[0-9a-zA-Z\-]{36})/membership/admins',
api.OrgMembershipAdminsViewSet, 'membership-admins')
router.register(r'org/(?P<org_id>[0-9a-zA-Z\-]{36})/membership/users',
api.OrgMembershipUsersViewSet, 'membership-users'),
# 替换为这个
router.register(r'orgs/(?P<org_id>[0-9a-zA-Z\-]{36})/membership/admins',
api.OrgMembershipAdminsViewSet, 'membership-admins-2')
router.register(r'orgs/(?P<org_id>[0-9a-zA-Z\-]{36})/membership/users',
api.OrgMembershipUsersViewSet, 'membership-users-2'),
router.register(r'orgs', api.OrgViewSet, 'org')
......
......@@ -2,21 +2,26 @@
#
from django.shortcuts import get_object_or_404
from django.utils import timezone
from django.db.models import Q
from rest_framework.views import APIView, Response
from rest_framework.generics import ListAPIView, get_object_or_404, \
RetrieveUpdateAPIView
from rest_framework.generics import (
ListAPIView, get_object_or_404, RetrieveUpdateAPIView
)
from rest_framework import viewsets
from rest_framework.pagination import LimitOffsetPagination
from common.utils import set_or_append_attr_bulk
from common.permissions import IsValidUser, IsOrgAdmin, IsOrgAdminOrAppUser
from common.tree import TreeNode, TreeNodeSerializer
from common.utils import get_object_or_none
from orgs.mixins import RootOrgViewMixin
from orgs.utils import set_to_root_org
from .utils import AssetPermissionUtil
from .models import AssetPermission
from .hands import AssetGrantedSerializer, User, UserGroup, Asset, Node, \
from .hands import (
AssetGrantedSerializer, User, UserGroup, Asset, Node,
SystemUser, NodeSerializer
)
from . import serializers
from .mixins import AssetsFilterMixin
......@@ -38,6 +43,7 @@ class AssetPermissionViewSet(viewsets.ModelViewSet):
queryset = AssetPermission.objects.all()
serializer_class = serializers.AssetPermissionCreateUpdateSerializer
pagination_class = LimitOffsetPagination
filter_fields = ['name']
permission_classes = (IsOrgAdmin,)
def get_serializer_class(self):
......@@ -45,36 +51,122 @@ class AssetPermissionViewSet(viewsets.ModelViewSet):
return serializers.AssetPermissionListSerializer
return self.serializer_class
def get_queryset(self):
queryset = super().get_queryset().all()
search = self.request.query_params.get('search')
asset_id = self.request.query_params.get('asset')
node_id = self.request.query_params.get('node')
inherit_nodes = set()
def filter_valid(self, queryset):
valid = self.request.query_params.get('is_valid', None)
if valid is None:
return queryset
if valid in ['0', 'N', 'false', 'False']:
valid = False
else:
valid = True
now = timezone.now()
if valid:
queryset = queryset.filter(is_active=True).filter(
date_start__lt=now, date_expired__gt=now,
)
else:
queryset = queryset.filter(
Q(is_active=False) |
Q(date_start__gt=now) |
Q(date_expired__lt=now)
)
return queryset
if search:
queryset = queryset.filter(name__icontains=search)
def filter_system_user(self, queryset):
system_user_id = self.request.query_params.get('system_user_id')
system_user_name = self.request.query_params.get('system_user')
if system_user_id:
system_user = get_object_or_none(SystemUser, pk=system_user_id)
elif system_user_name:
system_user = get_object_or_none(SystemUser, name=system_user_name)
else:
return queryset
if not system_user:
return queryset.none()
queryset = queryset.filter(system_users=system_user)
return queryset
if not asset_id and not node_id:
def filter_node(self, queryset):
node_id = self.request.query_params.get('node_id')
node_name = self.request.query_params.get('node')
if node_id:
node = get_object_or_none(Node, pk=node_id)
elif node_name:
node = get_object_or_none(Node, name=node_name)
else:
return queryset
if not node:
return queryset.none()
nodes = node.get_ancestor(with_self=True)
queryset = queryset.filter(nodes__in=nodes)
return queryset
permissions = set()
def filter_asset(self, queryset):
asset_id = self.request.query_params.get('asset_id')
hostname = self.request.query_params.get('hostname')
ip = self.request.query_params.get('ip')
if asset_id:
asset = get_object_or_404(Asset, pk=asset_id)
permissions = set(queryset.filter(assets=asset))
assets = Asset.objects.filter(pk=asset_id)
elif hostname:
assets = Asset.objects.filter(hostname=hostname)
elif ip:
assets = Asset.objects.filter(ip=ip)
else:
return queryset
if not assets:
return queryset.none()
inherit_nodes = set()
for asset in assets:
for node in asset.nodes.all():
inherit_nodes.update(set(node.get_ancestor(with_self=True)))
elif node_id:
node = get_object_or_404(Node, pk=node_id)
permissions = set(queryset.filter(nodes=node))
inherit_nodes = node.get_ancestor()
queryset = queryset.filter(Q(assets__in=assets) | Q(nodes__in=inherit_nodes))
return queryset
def filter_user(self, queryset):
user_id = self.request.query_params.get('user_id')
username = self.request.query_params.get('username')
if user_id:
user = get_object_or_none(User, pk=user_id)
elif username:
user = get_object_or_none(User, username=username)
else:
return queryset
if not user:
return queryset.none()
def filter_user_group(self, queryset):
user_group_id = self.request.query_params.get('user_group_id')
user_group_name = self.request.query_params.get('user_group')
if user_group_id:
group = get_object_or_none(UserGroup, pk=user_group_id)
elif user_group_name:
group = get_object_or_none(UserGroup, name=user_group_name)
else:
return queryset
if not group:
return queryset.none()
queryset = queryset.filter(user_groups=group)
return queryset
for n in inherit_nodes:
_permissions = queryset.filter(nodes=n)
set_or_append_attr_bulk(_permissions, "inherit", n.value)
permissions.update(_permissions)
def filter_keyword(self, queryset):
keyword = self.request.query_params.get('search')
if not keyword:
return queryset
queryset = queryset.filter(name__icontains=keyword)
return queryset
return list(permissions)
def filter_queryset(self, queryset):
queryset = super().filter_queryset(queryset)
queryset = self.filter_valid(queryset)
queryset = self.filter_keyword(queryset)
queryset = self.filter_asset(queryset)
queryset = self.filter_node(queryset)
queryset = self.filter_system_user(queryset)
queryset = self.filter_user_group(queryset)
return queryset
def get_queryset(self):
return self.queryset.all()
class UserGrantedAssetsApi(AssetsFilterMixin, ListAPIView):
......
......@@ -51,9 +51,15 @@ class AssetPermission(OrgModelMixin):
def id_str(self):
return str(self.id)
@property
def is_expired(self):
if self.date_expired > timezone.now() > self.date_start:
return False
return True
@property
def is_valid(self):
if self.date_expired > timezone.now() > self.date_start and self.is_active:
if not self.is_expired and self.is_active:
return True
return False
......
......@@ -28,19 +28,13 @@ class AssetPermissionListSerializer(serializers.ModelSerializer):
assets = StringManyToManyField(many=True, read_only=True)
nodes = StringManyToManyField(many=True, read_only=True)
system_users = StringManyToManyField(many=True, read_only=True)
inherit = serializers.SerializerMethodField()
is_valid = serializers.BooleanField()
is_expired = serializers.BooleanField()
class Meta:
model = AssetPermission
fields = '__all__'
@staticmethod
def get_inherit(obj):
if hasattr(obj, 'inherit'):
return obj.inherit
else:
return None
class AssetPermissionUpdateUserSerializer(serializers.ModelSerializer):
......
......@@ -56,7 +56,7 @@
<th class="text-center">{% trans 'Asset' %}</th>
<th class="text-center">{% trans 'Node'%}</th>
<th class="text-center">{% trans 'System user' %}</th>
<th class="text-center">{% trans 'Active' %}</th>
<th class="text-center">{% trans 'Validity' %}</th>
<th class="text-center" >{% trans 'Action' %}</th>
</tr>
</thead>
......@@ -67,6 +67,17 @@
</div>
</div>
</div>
<ul class="dropdown-menu search-help">
<li><a class="search-item" data-value="name">{% trans 'Name' %}</a></li>
<li><a class="search-item" data-value="is_valid">{% trans 'Validity' %}</a></li>
<li><a class="search-item" data-value="username">{% trans 'Username' %}</a></li>
<li><a class="search-item" data-value="user_group">{% trans 'User group' %}</a></li>
<li><a class="search-item" data-value="ip">IP</a></li>
<li><a class="search-item" data-value="hostname">{% trans 'Hostname' %}</a></li>
<li><a class="search-item" data-value="node">{% trans 'Node' %}</a></li>
<li><a class="search-item" data-value="system_user">{% trans 'System user' %}</a></li>
</ul>
{% endblock %}
{% block custom_foot_js %}
......@@ -79,11 +90,11 @@ function onSelected(event, treeNode) {
setCookie('node_selected', treeNode.id);
var url = table.ajax.url();
if (treeNode.meta.type === 'node') {
url = setUrlParam(url, 'asset', "");
url = setUrlParam(url, 'node', treeNode.meta.node.id)
url = setUrlParam(url, 'asset_id', "");
url = setUrlParam(url, 'node_id', treeNode.meta.node.id)
} else {
url = setUrlParam(url, 'node', "");
url = setUrlParam(url, 'asset', treeNode.meta.asset.id)
url = setUrlParam(url, 'node_id', "");
url = setUrlParam(url, 'asset_id', treeNode.meta.asset.id)
}
setCookie('node_selected', treeNode.node_id);
table.ajax.url(url);
......@@ -178,7 +189,7 @@ function initTable() {
{data: "id"}, {data: "name"}, {data: "users"},
{data: "user_groups"}, {data: "assets"},
{data: "nodes"}, {data: "system_users"},
{data: "is_active", orderable: false}, {data: "id", orderable: false}
{data: "is_valid", orderable: false}, {data: "id", orderable: false}
],
select: {},
op_html: $('#actions').html()
......@@ -231,6 +242,7 @@ function toggle() {
$(document).ready(function(){
initTable();
initTree();
})
.on('click', '.btn-del', function () {
var $this = $(this);
......@@ -279,6 +291,28 @@ $(document).ready(function(){
}
}
}).on('click', '#permission_list_table_filter input', function (e) {
e.preventDefault();
e.stopPropagation();
var position = $('#permission_list_table_filter input').offset();
var y = position['top'];
var x = position['left'];
x -= 220;
y += 30;
$('.search-help').css({"top":y+"px", "left":x+"px", "position": "absolute"});
$('.dropdown-menu.search-help').show();
}).on('click', '.search-item', function (e) {
e.preventDefault();
e.stopPropagation();
var value = $(this).data('value');
var old_value = $('#permission_list_table_filter input').val();
var new_value = old_value + ' ' + value + ':';
$('#permission_list_table_filter input').val(new_value.trim());
$('.dropdown-menu.search-help').hide();
$('#permission_list_table_filter input').focus()
}).on('click', 'body', function (e) {
$('.dropdown-menu.search-help').hide()
})
</script>
......
......@@ -161,6 +161,87 @@ function activeNav() {
}
}
function formSubmit(props) {
/*
{
"form": $("form"),
"url": "",
"method": "POST",
"redirect_to": "",
"success": function(data, textStatue, jqXHR){},
"error": function(jqXHR, textStatus, errorThrown) {}
}
*/
props = props || {};
var data = props.data || props.form.serializeObject();
var redirect_to = props.redirect_to;
$.ajax({
url: props.url,
type: props.method || 'POST',
data: JSON.stringify(data),
contentType: props.content_type || "application/json; charset=utf-8",
dataType: props.data_type || "json"
}).done(function (data, textState, jqXHR) {
if (redirect_to) {
location.href = redirect_to;
} else if (typeof props.success === 'function') {
return props.success(data, textState, jqXHR);
}
}).fail(function(jqXHR, textStatus, errorThrown) {
if (typeof props.error === 'function') {
return props.error(jqXHR, textStatus, errorThrown)
}
if (!props.form) {
alert(jqXHR.responseText);
return
}
if (jqXHR.status === 400) {
var errors = jqXHR.responseJSON;
var noneFieldErrorRef = props.form.children('.alert-danger');
if (noneFieldErrorRef.length !== 1) {
props.form.prepend('<div class="alert alert-danger" style="display: none"></div>');
noneFieldErrorRef = props.form.children('.alert-danger');
}
var noneFieldErrorMsg = "";
noneFieldErrorRef.css("display", "none");
noneFieldErrorRef.html("");
props.form.find(".help-block.error").html("");
props.form.find(".form-group.has-error").removeClass("has-error");
if (typeof errors !== "object") {
noneFieldErrorMsg = errors;
if (noneFieldErrorRef.length === 1) {
noneFieldErrorRef.css('display', 'block');
noneFieldErrorRef.html(noneFieldErrorMsg);
}
return
}
$.each(errors, function (k, v) {
var fieldRef = props.form.find('input[name="' + k + '"]');
var formGroupRef = fieldRef.parents('.form-group');
var parentRef = fieldRef.parent();
var helpBlockRef = parentRef.children('.help-block.error');
if (helpBlockRef.length === 0) {
parentRef.append('<div class="help-block error"></div>');
helpBlockRef = parentRef.children('.help-block.error');
}
if (fieldRef.length === 1 && formGroupRef.length === 1) {
formGroupRef.addClass('has-error');
var help_msg = v.join("<br/>") ;
helpBlockRef.html(help_msg);
} else {
noneFieldErrorMsg += v + '<br/>';
}
});
if (noneFieldErrorRef.length === 1 && noneFieldErrorMsg !== '') {
noneFieldErrorRef.css('display', 'block');
noneFieldErrorRef.html(noneFieldErrorMsg);
}
}
})
}
function APIUpdateAttr(props) {
// props = {url: .., body: , success: , error: , method: ,}
props = props || {};
......@@ -195,9 +276,6 @@ function APIUpdateAttr(props) {
}).fail(function(jqXHR, textStatus, errorThrown) {
if (flash_message) {
var msg = "";
console.log(jqXHR);
console.log(textStatus);
console.log(errorThrown);
if (user_fail_message) {
msg = user_fail_message;
} else if (jqXHR.responseJSON) {
......@@ -213,6 +291,7 @@ function APIUpdateAttr(props) {
toastr.error(msg);
}
if (typeof props.error === 'function') {
console.log(jqXHR);
return props.error(jqXHR.responseText, jqXHR.status);
}
});
......@@ -478,7 +557,7 @@ jumpserver.initServerSideDataTable = function (options) {
url: options.ajax_url ,
data: function (data) {
delete data.columns;
if (data.length !== null ){
if (data.length !== null){
data.limit = data.length;
delete data.length;
}
......@@ -525,7 +604,7 @@ jumpserver.initServerSideDataTable = function (options) {
columns: options.columns || [],
select: options.select || select,
language: jumpserver.language,
lengthMenu: [[10, 15, 25, 50], [10, 15, 25, 50]]
lengthMenu: [[15, 25, 50, 9999], [15, 25, 50, 'All']]
});
table.selected = [];
table.selected_rows = [];
......
{% load i18n %}
<strong>Copyright</strong> {% trans ' Beijing Duizhan Tech, Inc. ' %} &copy; 2014-2018
\ No newline at end of file
<strong>Copyright</strong> {% trans ' Beijing Duizhan Tech, Inc. ' %} &copy; 2014-2019
\ No newline at end of file
......@@ -5,6 +5,6 @@
<!--<img style="display: none" src="http://www.jumpserver.org/img/evaluate_avatar1.jpg">-->
</div>
<div>
<strong>Copyright</strong> {% trans ' Beijing Duizhan Tech, Inc. ' %}&copy; 2014-2018
<strong>Copyright</strong> {% trans ' Beijing Duizhan Tech, Inc. ' %}&copy; 2014-2019
</div>
</div>
......@@ -54,7 +54,7 @@
{% include '_copyright.html' %}
</div>
<div class="col-md-6 text-right">
<small>2014-2018</small>
<small>2014-2019</small>
</div>
</div>
</div>
......
......@@ -33,16 +33,19 @@ class SessionViewSet(BulkModelViewSet):
permission_classes = (IsOrgAdminOrAppUser,)
def get_queryset(self):
queryset = super().get_queryset()
terminal_id = self.kwargs.get("terminal", None)
if terminal_id:
terminal = get_object_or_404(Terminal, id=terminal_id)
self.queryset = terminal.session_set.all()
return self.queryset.all()
queryset = queryset.filter(terminal=terminal)
return queryset
return queryset
def perform_create(self, serializer):
if hasattr(self.request.user, 'terminal'):
serializer.validated_data["terminal"] = self.request.user.terminal
sid = serializer.validated_data["system_user"]
# guacamole提交的是id
if is_uuid(sid):
_system_user = SystemUser.get_system_user_by_id_or_cached(sid)
if _system_user:
......
......@@ -100,52 +100,15 @@ class StatusViewSet(viewsets.ModelViewSet):
task_serializer_class = serializers.TaskSerializer
def create(self, request, *args, **kwargs):
from_gua = self.request.query_params.get("from_guacamole", None)
if not from_gua:
self.handle_sessions()
super().create(request, *args, **kwargs)
self.handle_sessions()
tasks = self.request.user.terminal.task_set.filter(is_finished=False)
serializer = self.task_serializer_class(tasks, many=True)
return Response(serializer.data, status=201)
def handle_sessions(self):
sessions_active = []
for session_data in self.request.data.get("sessions", []):
self.create_or_update_session(session_data)
if not session_data["is_finished"]:
sessions_active.append(session_data["id"])
sessions_in_db_active = Session.objects.filter(
is_finished=False,
terminal=self.request.user.terminal.id
)
for session in sessions_in_db_active:
if str(session.id) not in sessions_active:
session.is_finished = True
session.date_end = timezone.now()
session.save()
def create_or_update_session(self, session_data):
session_data["terminal"] = self.request.user.terminal.id
_id = session_data["id"]
session = get_object_or_none(Session, id=_id)
if session:
serializer = serializers.SessionSerializer(
data=session_data, instance=session
)
else:
serializer = serializers.SessionSerializer(data=session_data)
if serializer.is_valid():
session = serializer.save()
return session
else:
msg = "session data is not valid {}: {}".format(
serializer.errors, str(serializer.data)
)
logger.error(msg)
return None
sessions_id = self.request.data.get('sessions', [])
Session.set_active_sessions(sessions_id)
def get_queryset(self):
terminal_id = self.kwargs.get("terminal", None)
......
# -*- coding: utf-8 -*-
#
from rest_framework import viewsets
from rest_framework import viewsets, generics
from rest_framework import status
from rest_framework.response import Response
from common.permissions import IsSuperUser, WithBootstrapToken
from ...models import Terminal
from ...serializers import v2 as serializers
__all__ = ['TerminalViewSet', 'TerminalRegistrationViewSet']
__all__ = ['TerminalViewSet', 'TerminalRegistrationApi']
class TerminalViewSet(viewsets.ModelViewSet):
......@@ -15,8 +19,19 @@ class TerminalViewSet(viewsets.ModelViewSet):
permission_classes = [IsSuperUser]
class TerminalRegistrationViewSet(viewsets.ModelViewSet):
queryset = Terminal.objects.filter(is_deleted=False)
class TerminalRegistrationApi(generics.CreateAPIView):
serializer_class = serializers.TerminalRegistrationSerializer
permission_classes = [WithBootstrapToken]
http_method_names = ['post']
def create(self, request, *args, **kwargs):
data = {k: v for k, v in request.data.items()}
serializer = serializers.TerminalSerializer(
data=data, context={'request': request}
)
serializer.is_valid(raise_exception=True)
terminal = serializer.save()
sa_serializer = serializer.sa_serializer_class(instance=terminal.user)
data['service_account'] = sa_serializer.data
return Response(data, status=status.HTTP_201_CREATED)
# ~*~ coding: utf-8 ~*~
import datetime
from django.db import transaction
from django.utils import timezone
from django.db.utils import OperationalError
from .base import CommandBase
......@@ -35,7 +37,25 @@ class CommandStore(CommandBase):
input=c["input"], output=c["output"], session=c["session"],
org_id=c["org_id"], timestamp=c["timestamp"]
))
return self.model.objects.bulk_create(_commands)
error = False
try:
with transaction.atomic():
self.model.objects.bulk_create(_commands)
except OperationalError:
error = True
except:
return False
if not error:
return True
for command in _commands:
try:
with transaction.atomic():
command.save()
except OperationalError:
command.output = str(command.output.encode())
command.save()
return True
@staticmethod
def make_filter_kwargs(
......
......@@ -39,5 +39,3 @@ class TerminalForm(forms.ModelForm):
'name', 'remote_addr', 'comment',
'command_storage', 'replay_storage',
]
help_texts = {
}
......@@ -8,6 +8,7 @@ from django.utils.translation import ugettext_lazy as _
from django.utils import timezone
from django.conf import settings
from django.core.files.storage import default_storage
from django.core.cache import cache
from users.models import User
from orgs.mixins import OrgModelMixin
......@@ -61,8 +62,9 @@ class Terminal(models.Model):
def config(self):
configs = {}
for k in dir(settings):
if k.startswith('TERMINAL'):
configs[k] = getattr(settings, k)
if not k.startswith('TERMINAL'):
continue
configs[k] = getattr(settings, k)
configs.update(self.get_common_storage())
configs.update(self.get_replay_storage())
configs.update({
......@@ -152,6 +154,7 @@ class Session(OrgModelMixin):
date_end = models.DateTimeField(verbose_name=_("Date end"), null=True)
upload_to = 'replay'
ACTIVE_CACHE_KEY_PREFIX = 'SESSION_ACTIVE_{}'
def get_rel_replay_path(self, version=2):
"""
......@@ -181,6 +184,17 @@ class Session(OrgModelMixin):
except OSError as e:
return None, e
@classmethod
def set_active_sessions(cls, sessions_id):
data = {cls.ACTIVE_CACHE_KEY_PREFIX.format(i): i for i in sessions_id}
cache.set_many(data, timeout=5*60)
def is_active(self):
if self.protocol in ['ssh', 'telnet']:
key = self.ACTIVE_CACHE_KEY_PREFIX.format(self.id)
return bool(cache.get(key))
return True
class Meta:
db_table = "terminal_session"
ordering = ["-date_start"]
......
......@@ -31,8 +31,6 @@ class TerminalSerializer(serializers.ModelSerializer):
return cache.get(key)
class SessionSerializer(BulkSerializerMixin, serializers.ModelSerializer):
command_amount = serializers.SerializerMethodField()
command_store = get_multi_command_storage()
......@@ -69,6 +67,6 @@ class TaskSerializer(BulkSerializerMixin, serializers.ModelSerializer):
class ReplaySerializer(serializers.Serializer):
file = serializers.FileField()
file = serializers.FileField(allow_empty_file=True)
......@@ -3,7 +3,7 @@
from rest_framework import serializers
from common.utils import get_request_ip
from users.serializers.v2 import ServiceAccountRegistrationSerializer
from users.serializers.v2 import ServiceAccountSerializer
from ..models import Terminal
......@@ -11,36 +11,48 @@ __all__ = ['TerminalSerializer', 'TerminalRegistrationSerializer']
class TerminalSerializer(serializers.ModelSerializer):
class Meta:
model = Terminal
fields = [
'id', 'name', 'remote_addr', 'comment',
]
read_only_fields = ['id', 'remote_addr']
class TerminalRegistrationSerializer(serializers.ModelSerializer):
service_account = ServiceAccountRegistrationSerializer(read_only=True)
service_account_serializer = None
sa_serializer_class = ServiceAccountSerializer
sa_serializer = None
class Meta:
model = Terminal
fields = [
'id', 'name', 'remote_addr', 'comment', 'service_account'
'id', 'name', 'remote_addr', 'command_storage',
'replay_storage', 'user', 'is_accepted', 'is_deleted',
'date_created', 'comment'
]
read_only_fields = ['id', 'remote_addr', 'service_account']
def validate(self, attrs):
self.service_account_serializer = ServiceAccountRegistrationSerializer(data=attrs)
self.service_account_serializer.is_valid(raise_exception=True)
return attrs
read_only_fields = ['id', 'remote_addr', 'user', 'date_created']
def is_valid(self, raise_exception=False):
valid = super().is_valid(raise_exception=raise_exception)
if not valid:
return valid
data = {'name': self.validated_data.get('name')}
kwargs = {'data': data}
if self.instance and self.instance.user:
kwargs['instance'] = self.instance.user
self.sa_serializer = ServiceAccountSerializer(**kwargs)
valid = self.sa_serializer.is_valid(raise_exception=True)
return valid
def save(self, **kwargs):
instance = super().save(**kwargs)
sa = self.sa_serializer.save()
instance.user = sa
instance.save()
return instance
def create(self, validated_data):
request = self.context.get('request')
sa = self.service_account_serializer.save()
instance = super().create(validated_data)
instance.is_accepted = True
instance.user = sa
instance.remote_addr = get_request_ip(request)
if request:
instance.remote_addr = get_request_ip(request)
instance.save()
return instance
class TerminalRegistrationSerializer(serializers.Serializer):
name = serializers.CharField(max_length=128)
comment = serializers.CharField(max_length=128)
service_account = ServiceAccountSerializer(read_only=True)
......@@ -10,8 +10,9 @@ from django.conf import settings
from django.core.files.storage import default_storage
from ops.celery.utils import register_as_period_task, after_app_ready_start, \
after_app_shutdown_clean
from ops.celery.decorator import (
register_as_period_task, after_app_ready_start, after_app_shutdown_clean_periodic
)
from .models import Status, Session, Command
......@@ -23,28 +24,30 @@ logger = get_task_logger(__name__)
@shared_task
@register_as_period_task(interval=3600)
@after_app_ready_start
@after_app_shutdown_clean
@after_app_shutdown_clean_periodic
def delete_terminal_status_period():
yesterday = timezone.now() - datetime.timedelta(days=3)
yesterday = timezone.now() - datetime.timedelta(days=1)
Status.objects.filter(date_created__lt=yesterday).delete()
@shared_task
@register_as_period_task(interval=3600)
@register_as_period_task(interval=600)
@after_app_ready_start
@after_app_shutdown_clean
@after_app_shutdown_clean_periodic
def clean_orphan_session():
active_sessions = Session.objects.filter(is_finished=False)
for session in active_sessions:
if not session.terminal or not session.terminal.is_active:
session.is_finished = True
session.save()
if not session.is_active():
continue
session.is_finished = True
session.date_end = timezone.now()
session.save()
@shared_task
@register_as_period_task(interval=3600*24)
@after_app_ready_start
@after_app_shutdown_clean
@after_app_shutdown_clean_periodic
def clean_expired_session_period():
logger.info("Start clean expired session record, commands and replay")
days = settings.TERMINAL_SESSION_KEEP_DURATION
......@@ -64,3 +67,4 @@ def clean_expired_session_period():
default_storage.delete(_local_path)
# 删除session记录
session.delete()
......@@ -33,8 +33,6 @@
<h3>{% trans 'Info' %}</h3>
{% bootstrap_field form.name layout="horizontal" %}
{% bootstrap_field form.remote_addr layout="horizontal" %}
{# {% bootstrap_field form.ssh_port layout="horizontal" %}#}
{# {% bootstrap_field form.http_port layout="horizontal" %}#}
{% bootstrap_field form.command_storage layout="horizontal" %}
{% bootstrap_field form.replay_storage layout="horizontal" %}
......@@ -60,14 +58,14 @@
<script>
$(document).ready(function () {
$('.select2').select2();
$('.input-group.date').datepicker({
format: "yyyy-mm-dd",
todayBtn: "linked",
keyboardNavigation: false,
forceParse: false,
calendarWeeks: true,
autoclose: true
}).on('submit', 'form', function (e) {
e.preventDefault();
var form = $('form');
formSubmit({
'url': '{% url 'api-terminal-v2:terminal-detail' pk=DEFAULT_PK %}'.replace('{{ DEFAULT_PK }}', '{{ object.id }}'),
'form': form,
'method': 'PUT',
'redirect_to': '{% url "terminal:terminal-list" %}'
});
})
</script>
......
......@@ -11,10 +11,11 @@ app_name = 'terminal'
router = BulkRouter()
router.register(r'terminal', api.TerminalViewSet, 'terminal')
router.register(r'terminal-registrations', api.TerminalRegistrationViewSet, 'terminal-registration')
urlpatterns = [
path('terminal-registrations/', api.TerminalRegistrationApi.as_view(),
name='terminal-registration')
]
urlpatterns += router.urls
......@@ -19,6 +19,7 @@ from orgs.utils import current_org
from ..serializers import UserSerializer, UserPKUpdateSerializer, \
UserUpdateGroupSerializer, ChangeUserPasswordSerializer
from ..models import User
from ..signals import post_user_create
logger = get_logger(__name__)
......@@ -37,6 +38,10 @@ class UserViewSet(IDInFilterMixin, BulkModelViewSet):
permission_classes = (IsOrgAdmin,)
pagination_class = LimitOffsetPagination
def perform_create(self, serializer):
user = serializer.save()
post_user_create.send(self.__class__, user=user)
def get_queryset(self):
queryset = current_org.get_org_users()
return queryset
......
......@@ -7,6 +7,6 @@ from ...serializers import v2 as serializers
class ServiceAccountRegistrationViewSet(viewsets.ModelViewSet):
serializer_class = serializers.ServiceAccountRegistrationSerializer
serializer_class = serializers.ServiceAccountSerializer
permission_classes = (WithBootstrapToken,)
http_method_names = ['post']
# Generated by Django 2.1.4 on 2019-01-07 11:12
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0017_auto_20181123_1113'),
]
operations = [
migrations.AlterField(
model_name='user',
name='source',
field=models.CharField(choices=[('local', 'Local'), ('ldap', 'LDAP/AD'), ('openid', 'OpenID'), ('radius', 'Radius')], default='local', max_length=30, verbose_name='Source'),
),
]
......@@ -41,10 +41,12 @@ class User(AbstractUser):
SOURCE_LOCAL = 'local'
SOURCE_LDAP = 'ldap'
SOURCE_OPENID = 'openid'
SOURCE_RADIUS = 'radius'
SOURCE_CHOICES = (
(SOURCE_LOCAL, 'Local'),
(SOURCE_LDAP, 'LDAP/AD'),
(SOURCE_OPENID, 'OpenID'),
(SOURCE_RADIUS, 'Radius'),
)
id = models.UUIDField(default=uuid.uuid4, primary_key=True)
username = models.CharField(
......
# -*- coding: utf-8 -*-
#
from django.utils.translation import ugettext as _
from rest_framework import serializers
from ..models import User, AccessKey
......@@ -12,7 +13,7 @@ class AccessKeySerializer(serializers.ModelSerializer):
read_only_fields = ['id', 'secret']
class ServiceAccountRegistrationSerializer(serializers.ModelSerializer):
class ServiceAccountSerializer(serializers.ModelSerializer):
access_key = AccessKeySerializer(read_only=True)
class Meta:
......@@ -30,15 +31,22 @@ class ServiceAccountRegistrationSerializer(serializers.ModelSerializer):
def validate_name(self, name):
email = self.get_email()
username = self.get_username()
if User.objects.filter(email=email) or \
User.objects.filter(username=username):
raise serializers.ValidationError('name not unique', code='unique')
if self.instance:
users = User.objects.exclude(id=self.instance.id)
else:
users = User.objects.all()
if users.filter(email=email) or \
users.filter(username=username):
raise serializers.ValidationError(_('name not unique'), code='unique')
return name
def save(self, **kwargs):
self.validated_data['email'] = self.get_email()
self.validated_data['username'] = self.get_username()
self.validated_data['role'] = User.ROLE_APP
return super().save(**kwargs)
def create(self, validated_data):
validated_data['email'] = self.get_email()
validated_data['username'] = self.get_username()
validated_data['role'] = User.ROLE_APP
instance = super().create(validated_data)
instance.create_access_key()
return instance
......@@ -3,10 +3,8 @@
from celery import shared_task
from ops.celery.utils import (
create_or_update_celery_periodic_tasks,
after_app_ready_start
)
from ops.celery.utils import create_or_update_celery_periodic_tasks
from ops.celery.decorator import after_app_ready_start
from .models import User
from common.utils import get_logger
from .utils import write_login_log, send_password_expiration_reminder_mail
......
"""
jumpserver.config
~~~~~~~~~~~~~~~~~
Jumpserver project setting file
:copyright: (c) 2014-2017 by Jumpserver Team
:license: GPL v2, see LICENSE for more details.
"""
import os
import json
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
class Config:
# Use it to encrypt or decrypt data
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ.get('SECRET_KEY') or '2vym+ky!997d5kkcc64mnz06y1mmui3lut#(^wd=%s_qj$1%x'
# How many line display every page if using django pager, default 25
DISPLAY_PER_PAGE = 25
# It's used to identify your site, When we send a create mail to user, we only know login url is /login/
# But we should know the absolute url like: http://jms.jumpserver.org/login/, so SITE_URL is
# HTTP_PROTOCOL://HOST[:PORT]
SITE_URL = 'http://localhost'
# Django security setting, if your disable debug model, you should setting that
ALLOWED_HOSTS = ['*']
# Development env open this, when error occur display the full process track, Production disable it
DEBUG = True
# DEBUG, INFO, WARNING, ERROR, CRITICAL can set. See https://docs.djangoproject.com/en/1.10/topics/logging/
LOG_LEVEL = 'DEBUG'
LOG_DIR = os.path.join(BASE_DIR, 'logs')
# Database setting, Support sqlite3, mysql, postgres ....
# See https://docs.djangoproject.com/en/1.10/ref/settings/#databases
# SQLite setting:
DB_ENGINE = 'sqlite3'
DB_NAME = os.path.join(BASE_DIR, 'data', 'db.sqlite3')
# MySQL or postgres setting like:
# DB_ENGINE = 'mysql'
# DB_HOST = '127.0.0.1'
# DB_PORT = 3306
# DB_USER = 'root'
# DB_PASSWORD = ''
# DB_NAME = 'jumpserver'
# When Django start it will bind this host and port
# ./manage.py runserver 127.0.0.1:8080
HTTP_BIND_HOST = '0.0.0.0'
HTTP_LISTEN_PORT = 8080
# Use Redis as broker for celery and web socket
REDIS_HOST = '127.0.0.1'
REDIS_PORT = 6379
REDIS_PASSWORD = ''
BROKER_URL = 'redis://%(password)s%(host)s:%(port)s/3' % {
'password': REDIS_PASSWORD,
'host': REDIS_HOST,
'port': REDIS_PORT,
}
# Api token expiration when create, Jumpserver refresh time when request arrive
TOKEN_EXPIRATION = 3600
# Session and csrf domain settings
SESSION_COOKIE_AGE = 3600*24
# Email SMTP setting, we only support smtp send mail
EMAIL_HOST = 'smtp.163.com'
EMAIL_PORT = 25
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = '' # Caution: Some SMTP server using `Authorization Code` except password
EMAIL_USE_SSL = True if EMAIL_PORT == 465 else False
EMAIL_USE_TLS = True if EMAIL_PORT == 587 else False
EMAIL_SUBJECT_PREFIX = '[Jumpserver] '
CAPTCHA_TEST_MODE = False
# You can set jumpserver usage url here, that when user submit wizard redirect to
USER_GUIDE_URL = ''
# LDAP Auth settings
AUTH_LDAP = False
AUTH_LDAP_SERVER_URI = 'ldap://localhost:389'
AUTH_LDAP_BIND_DN = 'cn=admin,dc=jumpserver,dc=org'
AUTH_LDAP_BIND_PASSWORD = ''
AUTH_LDAP_SEARCH_OU = 'ou=tech,dc=jumpserver,dc=org'
AUTH_LDAP_SEARCH_FILTER = '(cn=%(user)s)'
AUTH_LDAP_USER_ATTR_MAP = {
"username": "cn",
"name": "sn",
"email": "mail"
}
AUTH_LDAP_START_TLS = False
#
# OTP_VALID_WINDOW = 0
def __init__(self):
pass
def __getattr__(self, item):
return None
class DockerConfig(Config):
"""
配置文件默认从环境变量里读取,如果没有会使用后面的默认值
"""
# 用来加密数据的key, 可以修改,但务必保存好这个字符串,丢失它后加密会无法解开
# SECRET_KEY = "SOME_KEY_NO_ONE_GUESS"
SECRET_KEY = os.environ.get("SECRET_KEY") or "MD923lkSDi8213kl),3()&^%aM2q1mz;223lkM0o1"
# 访问的域名, 格式 http[s]://域名[:端口号]
# SITE_URL = "http://jumpserver.fit2cloud.com"
SITE_URL = os.environ.get("SITE_URL") or 'http://localhost'
# 是否开启DEBUG模式
# DEBUG = True, or DEBUG = False,
DEBUG = bool(os.environ.get("DEBUG")) if os.environ.get("DEBUG") else False
# 日志级别, 默认 INFO
# LOG_LEVEL = WARN
LOG_LEVEL = os.environ.get("LOG_LEVEL") or "INFO"
# 使用的数据库类型,支持 SQLite, MySQL, PostgreSQL, Oracle
# 数据库设置, 如果使用外部的mysql请设置,否则不要改动
# DB_ENGINE = "oracle" | "postgre" | "mysql" | "sqlite3"
DB_ENGINE = os.environ.get("DB_ENGINE") or 'mysql'
# DB_HOST = "192.168.1.1"
DB_HOST = os.environ.get("DB_HOST") or 'mysql'
# 端口号
# DB_PORT = 3306
DB_PORT = os.environ.get("DB_PORT") or 3306
# 数据库账号
# DB_USER = "jumpserver"
DB_USER = os.environ.get("DB_USER") or 'root'
# 数据库密码
# DB_PASSWORD = "db_jumpserver_password"
DB_PASSWORD = os.environ.get("DB_PASSWORD") or ''
# 数据库名称
# DB_NAME = "jumpserver"
DB_NAME = os.environ.get("DB_NAME") or 'jumpserver'
# Redis配置,如果不使用外部redis不要改动
# Redis地址
# REDIS_HOST = "192.168.1.1"
REDIS_HOST = os.environ.get("REDIS_HOST") or 'redis'
# Redis端口号
# REDIS_PORT = 6380
REDIS_PORT = os.environ.get("REDIS_PORT") or 6379
# Redis密码
# REDIS_PASSWORD = "redis_password"
REDIS_PASSWORD = os.environ.get("REDIS_PASSWORD") or ''
# 邮箱SMTP设置, 可以参考各运营商配置文档
# SMTP服务器地址
# EMAIL_HOST = 'smtp.qq.com'
EMAIL_HOST = 'smtp.163.com'
# SMTP端口号
# EMAIL_PORT = 465
EMAIL_PORT = 25
# SMTP连接邮箱地址
# EMAIL_HOST_USER = "noreply@jumpserver.org"
EMAIL_HOST_USER = ''
# SMTP邮箱的密码, 注意 一些运营商通常要求使用授权码来发SMTP邮件
EMAIL_HOST_PASSWORD = ''
# 是否启用SSL, 如果端口号是 465通常设置为True
# EMAIL_USE_SSL = True
EMAIL_USE_SSL = True if EMAIL_PORT == 465 else False
# 是否启用TLS, 如果端口号是 587通常设置为True
# EMAIL_USE_TLS = True
EMAIL_USE_TLS = True if EMAIL_PORT == 587 else False
# 邮件的主题前缀
EMAIL_SUBJECT_PREFIX = '[Jumpserver] '
# 认证启用LDAP的设置
# 是否启用LDAP,默认不启用
# AUTH_LDAP = True
AUTH_LDAP = False
# LDAP的地址
AUTH_LDAP_SERVER_URI = 'ldap://localhost:389'
# LDAP绑定的查询账户
AUTH_LDAP_BIND_DN = 'cn=admin,dc=jumpserver,dc=org'
# 密码
AUTH_LDAP_BIND_PASSWORD = ''
# 用户所在的ou
AUTH_LDAP_SEARCH_OU = 'ou=tech,dc=jumpserver,dc=org'
# 查询时使用的过滤器, 仅可以修改前面的表示符,可能是cn或uid, 也就是登录用户名所在字段
# AUTH_LDAP_SEARCH_FILTER = '(uid=%(user)s)'
AUTH_LDAP_SEARCH_FILTER = '(cn=%(user)s)'
# LDAP用户信息映射到Jumpserver
AUTH_LDAP_USER_ATTR_MAP = {
"username": "cn", # 将LDAP信息中的 `cn` 字段映射为 `username(用户名)`
"name": "sn", # 将 LDAP信息中的 `sn` 映射为 `name(姓名)`
"email": "mail" # 将 LDAP信息中的 `mail` 映射为 `email(邮箱地址)`
}
# 是否启用TLS加密
AUTH_LDAP_START_TLS = False
#
OTP_VALID_WINDOW = int(os.environ.get("OTP_VALID_WINDOW")) if os.environ.get("OTP_VALID_WINDOW") else 0
# Default using Config settings, you can write if/else for different env
config = DockerConfig()
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
jumpserver.config
~~~~~~~~~~~~~~~~~
Jumpserver project setting file
:copyright: (c) 2014-2017 by Jumpserver Team
:license: GPL v2, see LICENSE for more details.
"""
import os
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
class Config:
"""
Jumpserver Config File
Jumpserver 配置文件
Jumpserver use this config for drive django framework running,
You can set is value or set the same envirment value,
Jumpserver look for config order: file => env => default
Jumpserver使用配置来驱动Django框架的运行,
你可以在该文件中设置,或者设置同样名称的环境变量,
Jumpserver使用配置的顺序: 文件 => 环境变量 => 默认值
"""
# SECURITY WARNING: keep the secret key used in production secret!
# 加密秘钥 生产环境中请修改为随机字符串,请勿外泄
SECRET_KEY = '2vym+ky!997d5kkcc64mnz06y1mmui3lut#(^wd=%s_qj$1%x'
# SECURITY WARNING: keep the bootstrap token used in production secret!
# 预共享Token coco和guacamole用来注册服务账号,不在使用原来的注册接受机制
BOOTSTRAP_TOKEN = 'PleaseChangeMe'
# Development env open this, when error occur display the full process track, Production disable it
# DEBUG 模式 开启DEBUG后遇到错误时可以看到更多日志
# DEBUG = True
# DEBUG, INFO, WARNING, ERROR, CRITICAL can set. See https://docs.djangoproject.com/en/1.10/topics/logging/
# 日志级别
# LOG_LEVEL = 'DEBUG'
# LOG_DIR = os.path.join(BASE_DIR, 'logs')
# Session expiration setting, Default 24 hour, Also set expired on on browser close
# 浏览器Session过期时间,默认24小时, 也可以设置浏览器关闭则过期
# SESSION_COOKIE_AGE = 3600 * 24
# SESSION_EXPIRE_AT_BROWSER_CLOSE = False
# Database setting, Support sqlite3, mysql, postgres ....
# 数据库设置
# See https://docs.djangoproject.com/en/1.10/ref/settings/#databases
# SQLite setting:
# 使用单文件sqlite数据库
# DB_ENGINE = 'sqlite3'
# DB_NAME = os.path.join(BASE_DIR, 'data', 'db.sqlite3')
# MySQL or postgres setting like:
# 使用Mysql作为数据库
DB_ENGINE = 'mysql'
DB_HOST = '127.0.0.1'
DB_PORT = 3306
DB_USER = 'jumpserver'
DB_PASSWORD = ''
DB_NAME = 'jumpserver'
# When Django start it will bind this host and port
# ./manage.py runserver 127.0.0.1:8080
# 运行时绑定端口
HTTP_BIND_HOST = '0.0.0.0'
HTTP_LISTEN_PORT = 8080
# Use Redis as broker for celery and web socket
# Redis配置
REDIS_HOST = '127.0.0.1'
REDIS_PORT = 6379
# REDIS_PASSWORD = ''
# REDIS_DB_CELERY = 3
# REDIS_DB_CACHE = 4
# Use OpenID authorization
# 使用OpenID 来进行认证设置
# BASE_SITE_URL = 'http://localhost:8080'
# AUTH_OPENID = False # True or False
# AUTH_OPENID_SERVER_URL = 'https://openid-auth-server.com/'
# AUTH_OPENID_REALM_NAME = 'realm-name'
# AUTH_OPENID_CLIENT_ID = 'client-id'
# AUTH_OPENID_CLIENT_SECRET = 'client-secret'
#
# OTP_VALID_WINDOW = 0
def __init__(self):
pass
def __getattr__(self, item):
return None
class DevelopmentConfig(Config):
pass
class TestConfig(Config):
pass
class ProductionConfig(Config):
pass
# Default using Config settings, you can write if/else for different env
config = DevelopmentConfig()
# SECURITY WARNING: keep the secret key used in production secret!
# 加密秘钥 生产环境中请修改为随机字符串,请勿外泄, 可使用命令生成
# $ cat /dev/urandom | tr -dc A-Za-z0-9 | head -c 49;echo
SECRET_KEY:
# SECURITY WARNING: keep the bootstrap token used in production secret!
# 预共享Token coco和guacamole用来注册服务账号,不在使用原来的注册接受机制
BOOTSTRAP_TOKEN:
# Development env open this, when error occur display the full process track, Production disable it
# DEBUG 模式 开启DEBUG后遇到错误时可以看到更多日志
# DEBUG: true
# DEBUG, INFO, WARNING, ERROR, CRITICAL can set. See https://docs.djangoproject.com/en/1.10/topics/logging/
# 日志级别
# LOG_LEVEL: DEBUG
# LOG_DIR:
# Session expiration setting, Default 24 hour, Also set expired on on browser close
# 浏览器Session过期时间,默认24小时, 也可以设置浏览器关闭则过期
# SESSION_COOKIE_AGE: 3600 * 24
# SESSION_EXPIRE_AT_BROWSER_CLOSE: False
# Database setting, Support sqlite3, mysql, postgres ....
# 数据库设置
# See https://docs.djangoproject.com/en/1.10/ref/settings/#databases
# SQLite setting:
# 使用单文件sqlite数据库
# DB_ENGINE: sqlite3
# DB_NAME:
# MySQL or postgres setting like:
# 使用Mysql作为数据库
DB_ENGINE: mysql
DB_HOST: 127.0.0.1
DB_PORT: 3306
DB_USER: jumpserver
DB_PASSWORD:
DB_NAME: jumpserver
# When Django start it will bind this host and port
# ./manage.py runserver 127.0.0.1:8080
# 运行时绑定端口
HTTP_BIND_HOST: 0.0.0.0
HTTP_LISTEN_PORT: 8080
# Use Redis as broker for celery and web socket
# Redis配置
REDIS_HOST: 127.0.0.1
REDIS_PORT: 6379
# REDIS_PASSWORD:
# REDIS_DB_CELERY: 3
# REDIS_DB_CACHE: 4
# Use OpenID authorization
# 使用OpenID 来进行认证设置
# BASE_SITE_URL: http://localhost:8080
# AUTH_OPENID: false # True or False
# AUTH_OPENID_SERVER_URL: https://openid-auth-server.com/
# AUTH_OPENID_REALM_NAME: realm-name
# AUTH_OPENID_CLIENT_ID: client-id
# AUTH_OPENID_CLIENT_SECRET: client-secret
# OTP settings
# OTP/MFA 配置
# OTP_VALID_WINDOW: 0
# OTP_ISSUER_NAME: Jumpserver
......@@ -15,9 +15,10 @@ sys.path.append(BASE_DIR)
from apps import __version__
try:
from config import config as CONFIG
from apps.jumpserver.conf import load_user_config
CONFIG = load_user_config()
except ImportError:
print("Could not find config file, `cp config_example.py config.py`")
print("Could not find config file, `cp config_example.yml config.yml`")
sys.exit(1)
os.environ["PYTHONIOENCODING"] = "UTF-8"
......@@ -107,8 +108,7 @@ def is_running(s, unlink=True):
pid_file = get_pid_file_path(s)
if os.path.isfile(pid_file):
with open(pid_file, 'r') as f:
pid = get_pid(s)
pid = get_pid(s)
if check_pid(pid):
return True
......
......@@ -78,3 +78,4 @@ python-keycloak-client==0.1.3
rest_condition==1.0.3
python-ldap==3.1.0
tencentcloud-sdk-python==3.0.40
django-radius==1.3.3
......@@ -17,13 +17,13 @@ class UserCreation:
self.domain = domain
def auth(self):
url = "{}/api/users/v1/token/".format(self.domain)
url = "{}/api/users/v1/auth/".format(self.domain)
data = {"username": self.username, "password": self.password}
resp = requests.post(url, data=data)
if resp.status_code == 200:
data = resp.json()
self.headers.update({
'Authorization': '{} {}'.format(data['Keyword'], data['Token'])
'Authorization': '{} {}'.format('Bearer', data['token'])
})
else:
print("用户名 或 密码 或 地址 不对")
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment