Unverified Commit 3b56027e authored by 老广's avatar 老广 Committed by GitHub

Merge pull request #2364 from jumpserver/dev

Dev
parents ed1dc7a9 82856100
...@@ -17,6 +17,7 @@ dump.rdb ...@@ -17,6 +17,7 @@ dump.rdb
.idea/ .idea/
db.sqlite3 db.sqlite3
config.py config.py
config.yml
*.log *.log
host_rsa_key host_rsa_key
*.bat *.bat
......
...@@ -6,13 +6,13 @@ RUN useradd jumpserver ...@@ -6,13 +6,13 @@ RUN useradd jumpserver
COPY ./requirements /tmp/requirements COPY ./requirements /tmp/requirements
RUN yum -y install epel-release && cd /tmp/requirements && \ RUN yum -y install epel-release openldap-clients telnet && cd /tmp/requirements && \
yum -y install $(cat rpm_requirements.txt) yum -y install $(cat rpm_requirements.txt)
RUN cd /tmp/requirements && pip install -r requirements.txt RUN cd /tmp/requirements && pip install -r requirements.txt
COPY . /opt/jumpserver COPY . /opt/jumpserver
COPY config_docker.py /opt/jumpserver/config.py RUN echo > config.yml
VOLUME /opt/jumpserver/data VOLUME /opt/jumpserver/data
VOLUME /opt/jumpserver/logs VOLUME /opt/jumpserver/logs
......
...@@ -87,6 +87,7 @@ class AdminUserTestConnectiveApi(generics.RetrieveAPIView): ...@@ -87,6 +87,7 @@ class AdminUserTestConnectiveApi(generics.RetrieveAPIView):
""" """
queryset = AdminUser.objects.all() queryset = AdminUser.objects.all()
permission_classes = (IsOrgAdmin,) permission_classes = (IsOrgAdmin,)
serializer_class = serializers.TaskIDSerializer
def retrieve(self, request, *args, **kwargs): def retrieve(self, request, *args, **kwargs):
admin_user = self.get_object() admin_user = self.get_object()
......
...@@ -113,6 +113,7 @@ class AssetAdminUserTestApi(generics.RetrieveAPIView): ...@@ -113,6 +113,7 @@ class AssetAdminUserTestApi(generics.RetrieveAPIView):
""" """
queryset = Asset.objects.all() queryset = Asset.objects.all()
permission_classes = (IsOrgAdmin,) permission_classes = (IsOrgAdmin,)
serializer_class = serializers.TaskIDSerializer
def retrieve(self, request, *args, **kwargs): def retrieve(self, request, *args, **kwargs):
asset_id = kwargs.get('pk') asset_id = kwargs.get('pk')
...@@ -124,6 +125,7 @@ class AssetAdminUserTestApi(generics.RetrieveAPIView): ...@@ -124,6 +125,7 @@ class AssetAdminUserTestApi(generics.RetrieveAPIView):
class AssetGatewayApi(generics.RetrieveAPIView): class AssetGatewayApi(generics.RetrieveAPIView):
queryset = Asset.objects.all() queryset = Asset.objects.all()
permission_classes = (IsOrgAdminOrAppUser,) permission_classes = (IsOrgAdminOrAppUser,)
serializer_class = serializers.GatewayWithAuthSerializer
def retrieve(self, request, *args, **kwargs): def retrieve(self, request, *args, **kwargs):
asset_id = kwargs.get('pk') asset_id = kwargs.get('pk')
......
...@@ -43,6 +43,23 @@ class NodeViewSet(viewsets.ModelViewSet): ...@@ -43,6 +43,23 @@ class NodeViewSet(viewsets.ModelViewSet):
permission_classes = (IsOrgAdmin,) permission_classes = (IsOrgAdmin,)
serializer_class = serializers.NodeSerializer serializer_class = serializers.NodeSerializer
def perform_create(self, serializer):
child_key = Node.root().get_next_child_key()
serializer.validated_data["key"] = child_key
serializer.save()
def update(self, request, *args, **kwargs):
node = self.get_object()
if node.is_root():
node_value = node.value
post_value = request.data.get('value')
if node_value != post_value:
return Response(
{"msg": _("You can't update the root node name")},
status=400
)
return super().update(request, *args, **kwargs)
class NodeListAsTreeApi(generics.ListAPIView): class NodeListAsTreeApi(generics.ListAPIView):
""" """
...@@ -259,7 +276,7 @@ class RefreshNodeHardwareInfoApi(APIView): ...@@ -259,7 +276,7 @@ class RefreshNodeHardwareInfoApi(APIView):
def get(self, request, *args, **kwargs): def get(self, request, *args, **kwargs):
node_id = kwargs.get('pk') node_id = kwargs.get('pk')
node = get_object_or_404(self.model, id=node_id) node = get_object_or_404(self.model, id=node_id)
assets = node.assets.all() assets = node.get_all_assets()
# task_name = _("更新节点资产硬件信息: {}".format(node.name)) # task_name = _("更新节点资产硬件信息: {}".format(node.name))
task_name = _("Update node asset hardware information: {}").format(node.name) task_name = _("Update node asset hardware information: {}").format(node.name)
task = update_assets_hardware_info_util.delay(assets, task_name=task_name) task = update_assets_hardware_info_util.delay(assets, task_name=task_name)
...@@ -273,7 +290,7 @@ class TestNodeConnectiveApi(APIView): ...@@ -273,7 +290,7 @@ class TestNodeConnectiveApi(APIView):
def get(self, request, *args, **kwargs): def get(self, request, *args, **kwargs):
node_id = kwargs.get('pk') node_id = kwargs.get('pk')
node = get_object_or_404(self.model, id=node_id) node = get_object_or_404(self.model, id=node_id)
assets = node.assets.all() assets = node.get_all_assets()
# task_name = _("测试节点下资产是否可连接: {}".format(node.name)) # task_name = _("测试节点下资产是否可连接: {}".format(node.name))
task_name = _("Test if the assets under the node are connectable: {}".format(node.name)) task_name = _("Test if the assets under the node are connectable: {}".format(node.name))
task = test_asset_connectivity_util.delay(assets, task_name=task_name) task = test_asset_connectivity_util.delay(assets, task_name=task_name)
......
...@@ -117,6 +117,7 @@ class SystemUserAssetsListView(generics.ListAPIView): ...@@ -117,6 +117,7 @@ class SystemUserAssetsListView(generics.ListAPIView):
class SystemUserPushToAssetApi(generics.RetrieveAPIView): class SystemUserPushToAssetApi(generics.RetrieveAPIView):
queryset = SystemUser.objects.all() queryset = SystemUser.objects.all()
permission_classes = (IsOrgAdmin,) permission_classes = (IsOrgAdmin,)
serializer_class = serializers.TaskIDSerializer
def retrieve(self, request, *args, **kwargs): def retrieve(self, request, *args, **kwargs):
system_user = self.get_object() system_user = self.get_object()
...@@ -129,6 +130,7 @@ class SystemUserPushToAssetApi(generics.RetrieveAPIView): ...@@ -129,6 +130,7 @@ class SystemUserPushToAssetApi(generics.RetrieveAPIView):
class SystemUserTestAssetConnectivityApi(generics.RetrieveAPIView): class SystemUserTestAssetConnectivityApi(generics.RetrieveAPIView):
queryset = SystemUser.objects.all() queryset = SystemUser.objects.all()
permission_classes = (IsOrgAdmin,) permission_classes = (IsOrgAdmin,)
serializer_class = serializers.TaskIDSerializer
def retrieve(self, request, *args, **kwargs): def retrieve(self, request, *args, **kwargs):
system_user = self.get_object() system_user = self.get_object()
......
...@@ -185,7 +185,7 @@ class Asset(OrgModelMixin): ...@@ -185,7 +185,7 @@ class Asset(OrgModelMixin):
@property @property
def connectivity(self): def connectivity(self):
if not self.is_unixlike(): if not self.is_unixlike():
return self.UNKNOWN return self.REACHABLE
key = self.CONNECTIVITY_CACHE_KEY.format(str(self.id)) key = self.CONNECTIVITY_CACHE_KEY.format(str(self.id))
cached = cache.get(key, None) cached = cache.get(key, None)
return cached if cached is not None else self.UNKNOWN return cached if cached is not None else self.UNKNOWN
......
...@@ -58,7 +58,7 @@ class ReplaceNodeAdminUserSerializer(serializers.ModelSerializer): ...@@ -58,7 +58,7 @@ class ReplaceNodeAdminUserSerializer(serializers.ModelSerializer):
管理用户更新关联到的集群 管理用户更新关联到的集群
""" """
nodes = serializers.PrimaryKeyRelatedField( nodes = serializers.PrimaryKeyRelatedField(
many=True, queryset = Node.objects.all() many=True, queryset=Node.objects.all()
) )
class Meta: class Meta:
...@@ -66,4 +66,5 @@ class ReplaceNodeAdminUserSerializer(serializers.ModelSerializer): ...@@ -66,4 +66,5 @@ class ReplaceNodeAdminUserSerializer(serializers.ModelSerializer):
fields = ['id', 'nodes'] fields = ['id', 'nodes']
class TaskIDSerializer(serializers.Serializer):
task = serializers.CharField(read_only=True)
# ~*~ coding: utf-8 ~*~ # ~*~ coding: utf-8 ~*~
import json import json
import re import re
import time
import os import os
from celery import shared_task from celery import shared_task
from django.utils.translation import ugettext as _ from django.utils.translation import ugettext as _
from django.core.cache import cache from django.core.cache import cache
from common.utils import capacity_convert, \ from common.utils import (
sum_capacity, encrypt_password, get_logger capacity_convert, sum_capacity, encrypt_password, get_logger
from ops.celery.utils import register_as_period_task, after_app_shutdown_clean )
from ops.celery.decorator import (
register_as_period_task, after_app_shutdown_clean_periodic
)
from .models import SystemUser, AdminUser, Asset from .models import SystemUser, AdminUser, Asset
from . import const from . import const
...@@ -132,7 +134,7 @@ def update_assets_hardware_info_util(assets, task_name=None): ...@@ -132,7 +134,7 @@ def update_assets_hardware_info_util(assets, task_name=None):
@shared_task @shared_task
def update_asset_hardware_info_manual(asset): def update_asset_hardware_info_manual(asset):
task_name = _("Update asset hardware info: {}").format(asset.hostname) task_name = _("Update asset hardware info: {}").format(asset.hostname)
return update_assets_hardware_info_util( update_assets_hardware_info_util(
[asset], task_name=task_name [asset], task_name=task_name
) )
...@@ -221,12 +223,14 @@ def test_admin_user_connectivity_period(): ...@@ -221,12 +223,14 @@ def test_admin_user_connectivity_period():
for admin_user in admin_users: for admin_user in admin_users:
task_name = _("Test admin user connectivity period: {}").format(admin_user.name) task_name = _("Test admin user connectivity period: {}").format(admin_user.name)
test_admin_user_connectivity_util(admin_user, task_name) test_admin_user_connectivity_util(admin_user, task_name)
cache.set(key, 1, 60*40)
@shared_task @shared_task
def test_admin_user_connectivity_manual(admin_user): def test_admin_user_connectivity_manual(admin_user):
task_name = _("Test admin user connectivity: {}").format(admin_user.name) task_name = _("Test admin user connectivity: {}").format(admin_user.name)
return test_admin_user_connectivity_util(admin_user, task_name) test_admin_user_connectivity_util(admin_user, task_name)
return True
## System user connective ## ## System user connective ##
...@@ -394,13 +398,13 @@ def push_system_user_to_assets(system_user, assets): ...@@ -394,13 +398,13 @@ def push_system_user_to_assets(system_user, assets):
@shared_task @shared_task
@after_app_shutdown_clean @after_app_shutdown_clean_periodic
def test_system_user_connectability_period(): def test_system_user_connectability_period():
pass pass
@shared_task @shared_task
@after_app_shutdown_clean @after_app_shutdown_clean_periodic
def test_admin_user_connectability_period(): def test_admin_user_connectability_period():
pass pass
...@@ -408,7 +412,7 @@ def test_admin_user_connectability_period(): ...@@ -408,7 +412,7 @@ def test_admin_user_connectability_period():
# @shared_task # @shared_task
# @register_as_period_task(interval=3600) # @register_as_period_task(interval=3600)
# @after_app_ready_start # @after_app_ready_start
# # @after_app_shutdown_clean # @after_app_shutdown_clean_periodic
# def push_system_user_period(): # def push_system_user_period():
# for system_user in SystemUser.objects.all(): # for system_user in SystemUser.objects.all():
# push_system_user_related_nodes(system_user) # push_system_user_related_nodes(system_user)
......
# -*- coding: utf-8 -*-
#
from django.contrib.auth import get_user_model
from radiusauth.backends import RADIUSBackend, RADIUSRealmBackend
from django.conf import settings
User = get_user_model()
class CreateUserMixin:
def get_django_user(self, username, password=None):
if isinstance(username, bytes):
username = username.decode()
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
if '@' in username:
email = username
else:
email_suffix = settings.EMAIL_SUFFIX
email = '{}@{}'.format(username, email_suffix)
user = User(username=username, name=username, email=email)
user.source = user.SOURCE_RADIUS
user.save()
return user
class RadiusBackend(CreateUserMixin, RADIUSBackend):
pass
class RadiusRealmBackend(CreateUserMixin, RADIUSRealmBackend):
pass
...@@ -4,15 +4,20 @@ ...@@ -4,15 +4,20 @@
import os import os
import json import json
import jms_storage import jms_storage
import uuid
from rest_framework.views import Response, APIView from rest_framework.views import Response, APIView
from rest_framework import generics
from ldap3 import Server, Connection from ldap3 import Server, Connection
from django.core.mail import get_connection, send_mail from django.core.mail import send_mail
from django.core.cache import cache
from django.utils.translation import ugettext_lazy as _ from django.utils.translation import ugettext_lazy as _
from django.conf import settings from django.conf import settings
from .permissions import IsOrgAdmin, IsSuperUser from .permissions import IsOrgAdmin, IsSuperUser
from .serializers import MailTestSerializer, LDAPTestSerializer from .serializers import (
MailTestSerializer, LDAPTestSerializer, OutputSerializer
)
from .models import Setting from .models import Setting
...@@ -189,4 +194,39 @@ class DjangoSettingsAPI(APIView): ...@@ -189,4 +194,39 @@ class DjangoSettingsAPI(APIView):
return Response(data) return Response(data)
class LogTailApi(generics.RetrieveAPIView):
permission_classes = ()
buff_size = 1024 * 10
serializer_class = OutputSerializer
end = False
def is_file_finish_write(self):
return True
def get_log_path(self):
raise NotImplementedError()
def get(self, request, *args, **kwargs):
mark = request.query_params.get("mark") or str(uuid.uuid4())
log_path = self.get_log_path()
if not log_path or not os.path.isfile(log_path):
if self.is_file_finish_write():
return Response({
"data": 'Not found the log',
'end': True,
'mark': mark
})
else:
return Response({"data": "Waiting...\r\n"}, status=200)
with open(log_path, 'r') as f:
offset = cache.get(mark, 0)
f.seek(offset)
data = f.read(self.buff_size).replace('\n', '\r\n')
mark = str(uuid.uuid4())
cache.set(mark, f.tell(), 5)
if data == '' and self.is_file_finish_write():
self.end = True
return Response({"data": data, 'end': self.end, 'mark': mark})
...@@ -19,3 +19,8 @@ class LDAPTestSerializer(serializers.Serializer): ...@@ -19,3 +19,8 @@ class LDAPTestSerializer(serializers.Serializer):
AUTH_LDAP_USER_ATTR_MAP = serializers.CharField() AUTH_LDAP_USER_ATTR_MAP = serializers.CharField()
AUTH_LDAP_START_TLS = serializers.BooleanField(required=False) AUTH_LDAP_START_TLS = serializers.BooleanField(required=False)
class OutputSerializer(serializers.Serializer):
output = serializers.CharField()
is_end = serializers.BooleanField()
mark = serializers.CharField()
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# #
import json
from django.dispatch import receiver from django.dispatch import receiver
from django.db.models.signals import post_save, pre_save from django.db.models.signals import post_save, pre_save
from django.conf import LazySettings, empty from django.conf import LazySettings, empty
...@@ -8,7 +10,7 @@ from django.core.cache import cache ...@@ -8,7 +10,7 @@ from django.core.cache import cache
from jumpserver.utils import current_request from jumpserver.utils import current_request
from .models import Setting from .models import Setting
from .utils import get_logger from .utils import get_logger, ssh_key_gen
from .signals import django_ready from .signals import django_ready
logger = get_logger(__file__) logger = get_logger(__file__)
...@@ -16,23 +18,25 @@ logger = get_logger(__file__) ...@@ -16,23 +18,25 @@ logger = get_logger(__file__)
@receiver(post_save, sender=Setting, dispatch_uid="my_unique_identifier") @receiver(post_save, sender=Setting, dispatch_uid="my_unique_identifier")
def refresh_settings_on_changed(sender, instance=None, **kwargs): def refresh_settings_on_changed(sender, instance=None, **kwargs):
logger.debug("Receive setting item change")
logger.debug(" - refresh setting: {}".format(instance.name))
if instance: if instance:
instance.refresh_setting() instance.refresh_setting()
@receiver(django_ready, dispatch_uid="my_unique_identifier") @receiver(django_ready, dispatch_uid="my_unique_identifier")
def refresh_all_settings_on_django_ready(sender, **kwargs): def monkey_patch_settings(sender, **kwargs):
logger.debug("Receive django ready signal")
logger.debug(" - fresh all settings")
cache_key_prefix = '_SETTING_' cache_key_prefix = '_SETTING_'
uncached_settings = [
'CACHES', 'DEBUG', 'SECRET_KEY', 'INSTALLED_APPS',
'ROOT_URLCONF', 'TEMPLATES', 'DATABASES', '_wrapped',
'CELERY_LOG_DIR'
]
def monkey_patch_getattr(self, name): def monkey_patch_getattr(self, name):
key = cache_key_prefix + name if name not in uncached_settings:
cached = cache.get(key) key = cache_key_prefix + name
if cached is not None: cached = cache.get(key)
return cached if cached is not None:
return cached
if self._wrapped is empty: if self._wrapped is empty:
self._setup(name) self._setup(name)
val = getattr(self._wrapped, name) val = getattr(self._wrapped, name)
...@@ -62,6 +66,18 @@ def refresh_all_settings_on_django_ready(sender, **kwargs): ...@@ -62,6 +66,18 @@ def refresh_all_settings_on_django_ready(sender, **kwargs):
pass pass
@receiver(django_ready)
def auto_generate_terminal_host_key(sender, **kwargs):
try:
if Setting.objects.filter(name='TERMINAL_HOST_KEY').exists():
return
private_key, public_key = ssh_key_gen()
value = json.dumps(private_key)
Setting.objects.create(name='TERMINAL_HOST_KEY', value=value)
except:
pass
@receiver(pre_save, dispatch_uid="my_unique_identifier") @receiver(pre_save, dispatch_uid="my_unique_identifier")
def on_create_set_created_by(sender, instance=None, **kwargs): def on_create_set_created_by(sender, instance=None, **kwargs):
if getattr(instance, '_ignore_auto_created_by', False) is True: if getattr(instance, '_ignore_auto_created_by', False) is True:
......
...@@ -35,7 +35,7 @@ ...@@ -35,7 +35,7 @@
<option value ="s3">s3</option> <option value ="s3">s3</option>
<option value="oss">oss</option> <option value="oss">oss</option>
<option value ="azure">azure</option> <option value ="azure">azure</option>
<option value="ceph">ceph</option> {# <option value="ceph">ceph</option>#}
</select> </select>
</div> </div>
</div> </div>
...@@ -108,15 +108,21 @@ ...@@ -108,15 +108,21 @@
<label class="col-md-2 control-label" for="id_endpoint">{% trans "Endpoint" %}</label> <label class="col-md-2 control-label" for="id_endpoint">{% trans "Endpoint" %}</label>
<div class="col-md-9"> <div class="col-md-9">
<input id="id_endpoint" class="form-control" type="text" name="ENDPOINT" value="" placeholder="Endpoint"> <input id="id_endpoint" class="form-control" type="text" name="ENDPOINT" value="" placeholder="Endpoint">
<div class="help-block">
<span class="oss">
{% trans 'OSS: http://{REGION_NAME}.aliyuncs.com' %}
<br>
{% trans 'Example: http://oss-cn-hangzhou.aliyuncs.com' %}
</span>
<span class="s3">{% trans 'S3: http://s3.{REGION_NAME}.amazonaws.com' %}<br></span>
<span class="s3">{% trans 'S3(China): http://s3.{REGION_NAME}.amazonaws.com.cn' %}<br></span>
<span class="s3">{% trans 'Example: http://s3.cn-north-1.amazonaws.com.cn' %}<br></span>
</div>
</div> </div>
</div> </div>
<div class="form-group" style="display: none;" > <div class="form-group" style="display: none;" >
<label class="col-md-2 control-label" for="id_endpoint_suffix">{% trans "Endpoint suffix" %}</label> <label class="col-md-2 control-label" for="id_endpoint_suffix">{% trans "Endpoint suffix" %}</label>
{# <div class="col-md-9">#}
{# <input id="id_endpoint_suffix" class="form-control" type="text" name="ENDPOINT_SUFFIX" value="">#}
{# <div class="help-block">{% trans '' %}</div>#}
{# </div>#}
<div class="col-md-9"> <div class="col-md-9">
<select id="id_endpoint_suffix" name="ENDPOINT_SUFFIX" class="endpoint-suffix-selector form-control"> <select id="id_endpoint_suffix" name="ENDPOINT_SUFFIX" class="endpoint-suffix-selector form-control">
<option value="core.chinacloudapi.cn" selected="selected">core.chinacloudapi.cn</option> <option value="core.chinacloudapi.cn" selected="selected">core.chinacloudapi.cn</option>
...@@ -129,6 +135,13 @@ ...@@ -129,6 +135,13 @@
<label class="col-md-2 control-label" for="id_region">{% trans "Region" %}</label> <label class="col-md-2 control-label" for="id_region">{% trans "Region" %}</label>
<div class="col-md-9"> <div class="col-md-9">
<input id="id_region" class="form-control" type="text" name="REGION" value="" placeholder=""> <input id="id_region" class="form-control" type="text" name="REGION" value="" placeholder="">
<div class="help-block">
<span class="s3">
{% trans 'Beijing: cn-north-1' %}
{% trans 'Ningxia: cn-northwest-1' %}
<a href="https://docs.aws.amazon.com/zh_cn/general/latest/gr/rande.html">{% trans 'More' %}</a>
</span>
</div>
</div> </div>
</div> </div>
...@@ -166,7 +179,6 @@ function hiddenField(field){ ...@@ -166,7 +179,6 @@ function hiddenField(field){
} }
function getFieldByType(type){ function getFieldByType(type){
if(type === 'server'){ if(type === 'server'){
return need_get_field_of_server return need_get_field_of_server
} }
...@@ -211,15 +223,17 @@ $(document).ready(function() { ...@@ -211,15 +223,17 @@ $(document).ready(function() {
field_of_all = [name_id, host_id, port_id, bucket_id, access_key_id, secret_key_id, container_name_id, account_name_id, account_key_id, endpoint_id, endpoint_suffix_id, region_id]; field_of_all = [name_id, host_id, port_id, bucket_id, access_key_id, secret_key_id, container_name_id, account_name_id, account_key_id, endpoint_id, endpoint_suffix_id, region_id];
need_get_field_of_server = [name_id]; need_get_field_of_server = [name_id];
need_get_field_of_s3 = [name_id, bucket_id, access_key_id, secret_key_id, region_id]; need_get_field_of_s3 = [name_id, bucket_id, access_key_id, secret_key_id, region_id, endpoint_id];
need_get_field_of_oss = [name_id, bucket_id, access_key_id, secret_key_id, endpoint_id]; need_get_field_of_oss = [name_id, bucket_id, access_key_id, secret_key_id, endpoint_id];
need_get_field_of_azure = [name_id, container_name_id, account_name_id, account_key_id, endpoint_suffix_id]; need_get_field_of_azure = [name_id, container_name_id, account_name_id, account_key_id, endpoint_suffix_id];
need_get_field_of_ceph = [name_id, host_id, port_id, bucket_id, access_key_id, secret_key_id, region_id]; need_get_field_of_ceph = [name_id, host_id, port_id, bucket_id, access_key_id, secret_key_id, region_id];
}) })
.on('change', '.selector', function(){ .on('change', '.selector', function(){
var type = $('.selector').val(); var type = $('.selector').val();
console.log(type); $("." + type).show();
hiddenField(field_of_all); hiddenField(field_of_all);
$('.help-block').children().hide();
$('.help-block ' + '.' + type).show();
var field = getFieldByType(type); var field = getFieldByType(type);
showField(field) showField(field)
}) })
......
...@@ -406,24 +406,6 @@ def get_replay_storage_setting(): ...@@ -406,24 +406,6 @@ def get_replay_storage_setting():
return value return value
class TeeObj:
origin_stdout = sys.stdout
def __init__(self, file_obj):
self.file_obj = file_obj
def write(self, msg):
self.origin_stdout.write(msg)
self.file_obj.write(msg.replace('*', ''))
def flush(self):
self.origin_stdout.flush()
self.file_obj.flush()
def close(self):
self.file_obj.close()
def with_cache(func): def with_cache(func):
cache = {} cache = {}
key = "_{}.{}".format(func.__module__, func.__name__) key = "_{}.{}".format(func.__module__, func.__name__)
......
...@@ -193,14 +193,16 @@ class Config(dict): ...@@ -193,14 +193,16 @@ class Config(dict):
if self.root_path: if self.root_path:
filename = os.path.join(self.root_path, filename) filename = os.path.join(self.root_path, filename)
try: try:
with open(filename) as json_file: with open(filename) as f:
obj = yaml.load(json_file) obj = yaml.load(f)
except IOError as e: except IOError as e:
if silent and e.errno in (errno.ENOENT, errno.EISDIR): if silent and e.errno in (errno.ENOENT, errno.EISDIR):
return False return False
e.strerror = 'Unable to load configuration file (%s)' % e.strerror e.strerror = 'Unable to load configuration file (%s)' % e.strerror
raise raise
return self.from_mapping(obj) if obj:
return self.from_mapping(obj)
return True
def from_mapping(self, *mapping, **kwargs): def from_mapping(self, *mapping, **kwargs):
"""Updates the config like :meth:`update` ignoring items with non-upper """Updates the config like :meth:`update` ignoring items with non-upper
...@@ -278,6 +280,8 @@ class Config(dict): ...@@ -278,6 +280,8 @@ class Config(dict):
return value return value
value = os.environ.get(item, None) value = os.environ.get(item, None)
if value is not None: if value is not None:
if value.isdigit():
value = int(value)
return value return value
return self.defaults.get(item) return self.defaults.get(item)
...@@ -286,8 +290,8 @@ class Config(dict): ...@@ -286,8 +290,8 @@ class Config(dict):
defaults = { defaults = {
'SECRET_KEY': '2vym+ky!997d5kkcc64mnz06y1mmui3lut#(^wd=%s_qj$1%x', 'SECRET_KEY': '',
'BOOTSTRAP_TOKEN': 'PleaseChangeMe', 'BOOTSTRAP_TOKEN': '',
'DEBUG': True, 'DEBUG': True,
'SITE_URL': 'http://localhost', 'SITE_URL': 'http://localhost',
'LOG_LEVEL': 'DEBUG', 'LOG_LEVEL': 'DEBUG',
...@@ -312,6 +316,7 @@ defaults = { ...@@ -312,6 +316,7 @@ defaults = {
'SESSION_COOKIE_AGE': 3600 * 24, 'SESSION_COOKIE_AGE': 3600 * 24,
'SESSION_EXPIRE_AT_BROWSER_CLOSE': False, 'SESSION_EXPIRE_AT_BROWSER_CLOSE': False,
'AUTH_OPENID': False, 'AUTH_OPENID': False,
'OTP_VALID_WINDOW': 0,
'OTP_ISSUER_NAME': 'Jumpserver', 'OTP_ISSUER_NAME': 'Jumpserver',
'EMAIL_SUFFIX': 'jumpserver.org', 'EMAIL_SUFFIX': 'jumpserver.org',
'TERMINAL_PASSWORD_AUTH': True, 'TERMINAL_PASSWORD_AUTH': True,
...@@ -320,6 +325,7 @@ defaults = { ...@@ -320,6 +325,7 @@ defaults = {
'TERMINAL_ASSET_LIST_SORT_BY': 'hostname', 'TERMINAL_ASSET_LIST_SORT_BY': 'hostname',
'TERMINAL_ASSET_LIST_PAGE_SIZE': 'auto', 'TERMINAL_ASSET_LIST_PAGE_SIZE': 'auto',
'TERMINAL_SESSION_KEEP_DURATION': 9999, 'TERMINAL_SESSION_KEEP_DURATION': 9999,
'TERMINAL_HOST_KEY': '',
'SECURITY_MFA_AUTH': False, 'SECURITY_MFA_AUTH': False,
'SECURITY_LOGIN_LIMIT_COUNT': 7, 'SECURITY_LOGIN_LIMIT_COUNT': 7,
'SECURITY_LOGIN_LIMIT_TIME': 30, 'SECURITY_LOGIN_LIMIT_TIME': 30,
...@@ -330,21 +336,48 @@ defaults = { ...@@ -330,21 +336,48 @@ defaults = {
'SECURITY_PASSWORD_LOWER_CASE': False, 'SECURITY_PASSWORD_LOWER_CASE': False,
'SECURITY_PASSWORD_NUMBER': False, 'SECURITY_PASSWORD_NUMBER': False,
'SECURITY_PASSWORD_SPECIAL_CHAR': False, 'SECURITY_PASSWORD_SPECIAL_CHAR': False,
'AUTH_RADIUS': False,
'RADIUS_SERVER': 'localhost',
'RADIUS_PORT': 1812,
'RADIUS_SECRET': '',
'HTTP_BIND_HOST': '0.0.0.0',
'HTTP_LISTEN_PORT': 8080,
} }
def load_user_config(): def load_from_object(config):
sys.path.insert(0, PROJECT_DIR)
config = Config(PROJECT_DIR, defaults)
try: try:
from config import config as c from config import config as c
config.from_object(c) config.from_object(c)
return True
except ImportError: except ImportError:
pass
return False
def load_from_yml(config):
for i in ['config.yml', 'config.yaml']:
if not os.path.isfile(os.path.join(config.root_path, i)):
continue
loaded = config.from_yaml(i)
if loaded:
return True
return False
def load_user_config():
sys.path.insert(0, PROJECT_DIR)
config = Config(PROJECT_DIR, defaults)
loaded = load_from_object(config)
if not loaded:
loaded = load_from_yml(config)
if not loaded:
msg = """ msg = """
Error: No config file found. Error: No config file found.
You can run `cp config_example.py config.py`, and edit it. You can run `cp config_example.yml config.yml`, and edit it.
""" """
raise ImportError(msg) raise ImportError(msg)
return config return config
...@@ -12,6 +12,7 @@ https://docs.djangoproject.com/en/1.10/ref/settings/ ...@@ -12,6 +12,7 @@ https://docs.djangoproject.com/en/1.10/ref/settings/
import os import os
import sys import sys
import socket
import ldap import ldap
# from django_auth_ldap.config import LDAPSearch, LDAPSearchUnion # from django_auth_ldap.config import LDAPSearch, LDAPSearchUnion
...@@ -23,6 +24,12 @@ from .conf import load_user_config ...@@ -23,6 +24,12 @@ from .conf import load_user_config
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
PROJECT_DIR = os.path.dirname(BASE_DIR) PROJECT_DIR = os.path.dirname(BASE_DIR)
CONFIG = load_user_config() CONFIG = load_user_config()
LOG_DIR = os.path.join(PROJECT_DIR, 'logs')
JUMPSERVER_LOG_FILE = os.path.join(LOG_DIR, 'jumpserver.log')
ANSIBLE_LOG_FILE = os.path.join(LOG_DIR, 'ansible.log')
if not os.path.isdir(LOG_DIR):
os.makedirs(LOG_DIR)
# Quick-start development settings - unsuitable for production # Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/ # See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
...@@ -209,19 +216,21 @@ LOGGING = { ...@@ -209,19 +216,21 @@ LOGGING = {
'formatter': 'main' 'formatter': 'main'
}, },
'file': { 'file': {
'encoding': 'utf8',
'level': 'DEBUG', 'level': 'DEBUG',
'class': 'logging.handlers.TimedRotatingFileHandler', 'class': 'logging.handlers.TimedRotatingFileHandler',
'when': "D", 'when': "D",
'interval': 1, 'interval': 1,
"backupCount": 7, "backupCount": 7,
'formatter': 'main', 'formatter': 'main',
'filename': os.path.join(PROJECT_DIR, 'logs', 'jumpserver.log') 'filename': JUMPSERVER_LOG_FILE,
}, },
'ansible_logs': { 'ansible_logs': {
'encoding': 'utf8',
'level': 'DEBUG', 'level': 'DEBUG',
'class': 'logging.FileHandler', 'class': 'logging.FileHandler',
'formatter': 'main', 'formatter': 'main',
'filename': os.path.join(PROJECT_DIR, 'logs', 'ansible.log') 'filename': ANSIBLE_LOG_FILE,
}, },
}, },
'loggers': { 'loggers': {
...@@ -400,6 +409,19 @@ if AUTH_OPENID: ...@@ -400,6 +409,19 @@ if AUTH_OPENID:
AUTHENTICATION_BACKENDS.insert(0, AUTH_OPENID_BACKENDS[0]) AUTHENTICATION_BACKENDS.insert(0, AUTH_OPENID_BACKENDS[0])
AUTHENTICATION_BACKENDS.insert(0, AUTH_OPENID_BACKENDS[1]) AUTHENTICATION_BACKENDS.insert(0, AUTH_OPENID_BACKENDS[1])
# Radius Auth
AUTH_RADIUS = CONFIG.AUTH_RADIUS
AUTH_RADIUS_BACKEND = 'authentication.radius.backends.RadiusBackend'
RADIUS_SERVER = CONFIG.RADIUS_SERVER
RADIUS_PORT = CONFIG.RADIUS_PORT
RADIUS_SECRET = CONFIG.RADIUS_SECRET
if AUTH_RADIUS:
AUTHENTICATION_BACKENDS.insert(0, AUTH_RADIUS_BACKEND)
# Dump all celery log to here
CELERY_LOG_DIR = os.path.join(PROJECT_DIR, 'data', 'celery')
# Celery using redis as broker # Celery using redis as broker
CELERY_BROKER_URL = 'redis://:%(password)s@%(host)s:%(port)s/%(db)s' % { CELERY_BROKER_URL = 'redis://:%(password)s@%(host)s:%(port)s/%(db)s' % {
'password': CONFIG.REDIS_PASSWORD, 'password': CONFIG.REDIS_PASSWORD,
...@@ -413,14 +435,16 @@ CELERY_RESULT_BACKEND = CELERY_BROKER_URL ...@@ -413,14 +435,16 @@ CELERY_RESULT_BACKEND = CELERY_BROKER_URL
CELERY_ACCEPT_CONTENT = ['json', 'pickle'] CELERY_ACCEPT_CONTENT = ['json', 'pickle']
CELERY_RESULT_EXPIRES = 3600 CELERY_RESULT_EXPIRES = 3600
# CELERY_WORKER_LOG_FORMAT = '%(asctime)s [%(module)s %(levelname)s] %(message)s' # CELERY_WORKER_LOG_FORMAT = '%(asctime)s [%(module)s %(levelname)s] %(message)s'
CELERY_WORKER_LOG_FORMAT = '%(message)s' # CELERY_WORKER_LOG_FORMAT = '%(message)s'
# CELERY_WORKER_TASK_LOG_FORMAT = '%(asctime)s [%(module)s %(levelname)s] %(message)s' CELERY_WORKER_TASK_LOG_FORMAT = '%(task_id)s %(task_name)s %(message)s'
CELERY_WORKER_TASK_LOG_FORMAT = '%(message)s' # CELERY_WORKER_TASK_LOG_FORMAT = '%(message)s'
# CELERY_WORKER_LOG_FORMAT = '%(asctime)s [%(module)s %(levelname)s] %(message)s' # CELERY_WORKER_LOG_FORMAT = '%(asctime)s [%(module)s %(levelname)s] %(message)s'
CELERY_WORKER_LOG_FORMAT = '%(message)s'
CELERY_TASK_EAGER_PROPAGATES = True CELERY_TASK_EAGER_PROPAGATES = True
CELERY_REDIRECT_STDOUTS = True CELERY_WORKER_REDIRECT_STDOUTS = True
CELERY_REDIRECT_STDOUTS_LEVEL = "INFO" CELERY_WORKER_REDIRECT_STDOUTS_LEVEL = "INFO"
CELERY_WORKER_HIJACK_ROOT_LOGGER = False # CELERY_WORKER_HIJACK_ROOT_LOGGER = False
CELERY_WORKER_MAX_TASKS_PER_CHILD = 40
# Cache use redis # Cache use redis
CACHES = { CACHES = {
...@@ -492,6 +516,7 @@ TERMINAL_HEARTBEAT_INTERVAL = CONFIG.TERMINAL_HEARTBEAT_INTERVAL ...@@ -492,6 +516,7 @@ TERMINAL_HEARTBEAT_INTERVAL = CONFIG.TERMINAL_HEARTBEAT_INTERVAL
TERMINAL_ASSET_LIST_SORT_BY = CONFIG.TERMINAL_ASSET_LIST_SORT_BY TERMINAL_ASSET_LIST_SORT_BY = CONFIG.TERMINAL_ASSET_LIST_SORT_BY
TERMINAL_ASSET_LIST_PAGE_SIZE = CONFIG.TERMINAL_ASSET_LIST_PAGE_SIZE TERMINAL_ASSET_LIST_PAGE_SIZE = CONFIG.TERMINAL_ASSET_LIST_PAGE_SIZE
TERMINAL_SESSION_KEEP_DURATION = CONFIG.TERMINAL_SESSION_KEEP_DURATION TERMINAL_SESSION_KEEP_DURATION = CONFIG.TERMINAL_SESSION_KEEP_DURATION
TERMINAL_HOST_KEY = CONFIG.TERMINAL_HOST_KEY
# Django bootstrap3 setting, more see http://django-bootstrap3.readthedocs.io/en/latest/settings.html # Django bootstrap3 setting, more see http://django-bootstrap3.readthedocs.io/en/latest/settings.html
BOOTSTRAP3 = { BOOTSTRAP3 = {
......
This diff is collapsed.
...@@ -118,18 +118,6 @@ class AdHocResultCallback(CallbackMixin, CallbackModule, CMDCallBackModule): ...@@ -118,18 +118,6 @@ class AdHocResultCallback(CallbackMixin, CallbackModule, CMDCallBackModule):
self.gather_result("unreachable", result) self.gather_result("unreachable", result)
super().v2_runner_on_unreachable(result) super().v2_runner_on_unreachable(result)
def on_playbook_start(self, name):
date_start = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
self.display(
"{} Start task: {}\r\n".format(date_start, name)
)
def on_playbook_end(self, name):
date_finished = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
self.display(
"{} Task finish\r\n".format(date_finished)
)
def display_skipped_hosts(self): def display_skipped_hosts(self):
pass pass
......
# -*- coding: utf-8 -*-
#
import sys
class TeeObj:
origin_stdout = sys.stdout
def __init__(self, file_obj):
self.file_obj = file_obj
def write(self, msg):
self.origin_stdout.write(msg)
self.file_obj.write(msg.replace('*', ''))
def flush(self):
self.origin_stdout.flush()
self.file_obj.flush()
...@@ -9,10 +9,10 @@ from ansible.parsing.dataloader import DataLoader ...@@ -9,10 +9,10 @@ from ansible.parsing.dataloader import DataLoader
from ansible.executor.playbook_executor import PlaybookExecutor from ansible.executor.playbook_executor import PlaybookExecutor
from ansible.playbook.play import Play from ansible.playbook.play import Play
import ansible.constants as C import ansible.constants as C
from ansible.utils.display import Display
from .callback import AdHocResultCallback, PlaybookResultCallBack, \ from .callback import (
CommandResultCallback AdHocResultCallback, PlaybookResultCallBack, CommandResultCallback
)
from common.utils import get_logger from common.utils import get_logger
from .exceptions import AnsibleError from .exceptions import AnsibleError
...@@ -22,13 +22,6 @@ C.HOST_KEY_CHECKING = False ...@@ -22,13 +22,6 @@ C.HOST_KEY_CHECKING = False
logger = get_logger(__name__) logger = get_logger(__name__)
class CustomDisplay(Display):
def display(self, msg, color=None, stderr=False, screen_only=False, log_only=False):
pass
display = CustomDisplay()
Options = namedtuple('Options', [ Options = namedtuple('Options', [
'listtags', 'listtasks', 'listhosts', 'syntax', 'connection', 'listtags', 'listtasks', 'listhosts', 'syntax', 'connection',
'module_path', 'forks', 'remote_user', 'private_key_file', 'timeout', 'module_path', 'forks', 'remote_user', 'private_key_file', 'timeout',
......
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# #
import uuid
import os
import os
from celery.result import AsyncResult from celery.result import AsyncResult
from django.core.cache import cache
from django.utils.translation import ugettext as _
from rest_framework import generics from rest_framework import generics
from rest_framework.views import Response
from common.permissions import IsOrgAdmin, IsValidUser from common.permissions import IsValidUser
from common.api import LogTailApi
from ..models import CeleryTask from ..models import CeleryTask
from ..serializers import CeleryResultSerializer from ..serializers import CeleryResultSerializer
from ..celery.utils import get_celery_task_log_path
__all__ = ['CeleryTaskLogApi', 'CeleryResultApi'] __all__ = ['CeleryTaskLogApi', 'CeleryResultApi']
class CeleryTaskLogApi(generics.RetrieveAPIView): class CeleryTaskLogApi(LogTailApi):
permission_classes = (IsValidUser,) permission_classes = (IsValidUser,)
buff_size = 1024 * 10 task = None
end = False task_id = ''
queryset = CeleryTask.objects.all()
def get(self, request, *args, **kwargs): def get(self, request, *args, **kwargs):
mark = request.query_params.get("mark") or str(uuid.uuid4()) self.task_id = str(kwargs.get('pk'))
task = self.get_object() self.task = AsyncResult(self.task_id)
log_path = task.full_log_path return super().get(request, *args, **kwargs)
if not log_path or not os.path.isfile(log_path): def get_log_path(self):
return Response({"data": _("Waiting ...")}, status=203) new_path = get_celery_task_log_path(self.task_id)
if new_path and os.path.isfile(new_path):
with open(log_path, 'r') as f: return new_path
offset = cache.get(mark, 0) try:
f.seek(offset) task = CeleryTask.objects.get(id=self.task_id)
data = f.read(self.buff_size).replace('\n', '\r\n') except CeleryTask.DoesNotExist:
mark = str(uuid.uuid4()) return None
cache.set(mark, f.tell(), 5) return task.full_log_path
if data == '' and task.is_finished(): def is_file_finish_write(self):
self.end = True return self.task.ready()
return Response({"data": data, 'end': self.end, 'mark': mark})
class CeleryResultApi(generics.RetrieveAPIView): class CeleryResultApi(generics.RetrieveAPIView):
......
...@@ -10,6 +10,5 @@ class OpsConfig(AppConfig): ...@@ -10,6 +10,5 @@ class OpsConfig(AppConfig):
from orgs.models import Organization from orgs.models import Organization
from orgs.utils import set_current_org from orgs.utils import set_current_org
set_current_org(Organization.root()) set_current_org(Organization.root())
super().ready()
from .celery import signal_handler from .celery import signal_handler
super().ready()
# -*- coding: utf-8 -*-
#
from functools import wraps
_need_registered_period_tasks = []
_after_app_ready_start_tasks = []
_after_app_shutdown_clean_periodic_tasks = []
def add_register_period_task(task):
_need_registered_period_tasks.append(task)
# key = "__REGISTER_PERIODIC_TASKS"
# value = cache.get(key, [])
# value.append(name)
# cache.set(key, value)
def get_register_period_tasks():
# key = "__REGISTER_PERIODIC_TASKS"
# return cache.get(key, [])
return _need_registered_period_tasks
def add_after_app_shutdown_clean_task(name):
# key = "__AFTER_APP_SHUTDOWN_CLEAN_TASKS"
# value = cache.get(key, [])
# value.append(name)
# cache.set(key, value)
_after_app_shutdown_clean_periodic_tasks.append(name)
def get_after_app_shutdown_clean_tasks():
# key = "__AFTER_APP_SHUTDOWN_CLEAN_TASKS"
# return cache.get(key, [])
return _after_app_shutdown_clean_periodic_tasks
def add_after_app_ready_task(name):
# key = "__AFTER_APP_READY_RUN_TASKS"
# value = cache.get(key, [])
# value.append(name)
# cache.set(key, value)
_after_app_ready_start_tasks.append(name)
def get_after_app_ready_tasks():
# key = "__AFTER_APP_READY_RUN_TASKS"
# return cache.get(key, [])
return _after_app_ready_start_tasks
def register_as_period_task(crontab=None, interval=None):
"""
Warning: Task must be have not any args and kwargs
:param crontab: "* * * * *"
:param interval: 60*60*60
:return:
"""
if crontab is None and interval is None:
raise SyntaxError("Must set crontab or interval one")
def decorate(func):
if crontab is None and interval is None:
raise SyntaxError("Interval and crontab must set one")
# Because when this decorator run, the task was not created,
# So we can't use func.name
name = '{func.__module__}.{func.__name__}'.format(func=func)
add_register_period_task({
name: {
'task': name,
'interval': interval,
'crontab': crontab,
'args': (),
'enabled': True,
}
})
@wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper
return decorate
def after_app_ready_start(func):
# Because when this decorator run, the task was not created,
# So we can't use func.name
name = '{func.__module__}.{func.__name__}'.format(func=func)
if name not in _after_app_ready_start_tasks:
add_after_app_ready_task(name)
@wraps(func)
def decorate(*args, **kwargs):
return func(*args, **kwargs)
return decorate
def after_app_shutdown_clean_periodic(func):
# Because when this decorator run, the task was not created,
# So we can't use func.name
name = '{func.__module__}.{func.__name__}'.format(func=func)
if name not in _after_app_shutdown_clean_periodic_tasks:
add_after_app_shutdown_clean_task(name)
@wraps(func)
def decorate(*args, **kwargs):
return func(*args, **kwargs)
return decorate
from logging import StreamHandler
from django.conf import settings
from celery import current_task
from celery.signals import task_prerun, task_postrun
from kombu import Connection, Exchange, Queue, Producer
from kombu.mixins import ConsumerMixin
from .utils import get_celery_task_log_path
routing_key = 'celery_log'
celery_log_exchange = Exchange('celery_log_exchange', type='direct')
celery_log_queue = [Queue('celery_log', celery_log_exchange, routing_key=routing_key)]
class CeleryLoggerConsumer(ConsumerMixin):
def __init__(self):
self.connection = Connection(settings.CELERY_LOG_BROKER_URL)
def get_consumers(self, Consumer, channel):
return [Consumer(queues=celery_log_queue,
accept=['pickle', 'json'],
callbacks=[self.process_task])
]
def handle_task_start(self, task_id, message):
pass
def handle_task_end(self, task_id, message):
pass
def handle_task_log(self, task_id, msg, message):
pass
def process_task(self, body, message):
action = body.get('action')
task_id = body.get('task_id')
msg = body.get('msg')
if action == CeleryLoggerProducer.ACTION_TASK_LOG:
self.handle_task_log(task_id, msg, message)
elif action == CeleryLoggerProducer.ACTION_TASK_START:
self.handle_task_start(task_id, message)
elif action == CeleryLoggerProducer.ACTION_TASK_END:
self.handle_task_end(task_id, message)
class CeleryLoggerProducer:
ACTION_TASK_START, ACTION_TASK_LOG, ACTION_TASK_END = range(3)
def __init__(self):
self.connection = Connection(settings.CELERY_LOG_BROKER_URL)
@property
def producer(self):
return Producer(self.connection)
def publish(self, payload):
self.producer.publish(
payload, serializer='json', exchange=celery_log_exchange,
declare=[celery_log_exchange], routing_key=routing_key
)
def log(self, task_id, msg):
payload = {'task_id': task_id, 'msg': msg, 'action': self.ACTION_TASK_LOG}
return self.publish(payload)
def read(self):
pass
def flush(self):
pass
def task_end(self, task_id):
payload = {'task_id': task_id, 'action': self.ACTION_TASK_END}
return self.publish(payload)
def task_start(self, task_id):
payload = {'task_id': task_id, 'action': self.ACTION_TASK_START}
return self.publish(payload)
class CeleryTaskLoggerHandler(StreamHandler):
terminator = '\r\n'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
task_prerun.connect(self.on_task_start)
task_postrun.connect(self.on_start_end)
@staticmethod
def get_current_task_id():
if not current_task:
return
task_id = current_task.request.root_id
return task_id
def on_task_start(self, sender, task_id, **kwargs):
return self.handle_task_start(task_id)
def on_start_end(self, sender, task_id, **kwargs):
return self.handle_task_end(task_id)
def after_task_publish(self, sender, body, **kwargs):
pass
def emit(self, record):
task_id = self.get_current_task_id()
if not task_id:
return
try:
self.write_task_log(task_id, record)
self.flush()
except Exception:
self.handleError(record)
def write_task_log(self, task_id, msg):
pass
def handle_task_start(self, task_id):
pass
def handle_task_end(self, task_id):
pass
class CeleryTaskMQLoggerHandler(CeleryTaskLoggerHandler):
def __init__(self):
self.producer = CeleryLoggerProducer()
super().__init__(stream=None)
def write_task_log(self, task_id, record):
msg = self.format(record)
self.producer.log(task_id, msg)
def flush(self):
self.producer.flush()
class CeleryTaskFileHandler(CeleryTaskLoggerHandler):
def __init__(self):
self.f = None
super().__init__(stream=None)
def emit(self, record):
msg = self.format(record)
if not self.f:
return
self.f.write(msg)
self.f.write(self.terminator)
self.flush()
def flush(self):
self.f and self.f.flush()
def handle_task_start(self, task_id):
log_path = get_celery_task_log_path(task_id)
self.f = open(log_path, 'a')
def handle_task_end(self, task_id):
self.f and self.f.close()
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# #
import os import logging
import datetime
import sys
import time
from django.conf import settings
from django.utils import timezone
from django.core.cache import cache from django.core.cache import cache
from django.db import transaction
from celery import subtask from celery import subtask
from celery.signals import worker_ready, worker_shutdown, task_prerun, \ from celery.signals import (
task_postrun, after_task_publish worker_ready, worker_shutdown, after_setup_logger
)
from kombu.utils.encoding import safe_str
from django_celery_beat.models import PeriodicTask from django_celery_beat.models import PeriodicTask
from common.utils import get_logger, TeeObj, get_object_or_none from common.utils import get_logger
from common.const import celery_task_pre_key from .decorator import get_after_app_ready_tasks, get_after_app_shutdown_clean_tasks
from .utils import get_after_app_ready_tasks, get_after_app_shutdown_clean_tasks from .logger import CeleryTaskFileHandler
from ..models import CeleryTask
logger = get_logger(__file__) logger = get_logger(__file__)
safe_str = lambda x: x
@worker_ready.connect @worker_ready.connect
def on_app_ready(sender=None, headers=None, body=None, **kwargs): def on_app_ready(sender=None, headers=None, **kwargs):
if cache.get("CELERY_APP_READY", 0) == 1: if cache.get("CELERY_APP_READY", 0) == 1:
return return
cache.set("CELERY_APP_READY", 1, 10) cache.set("CELERY_APP_READY", 1, 10)
tasks = get_after_app_ready_tasks() tasks = get_after_app_ready_tasks()
logger.debug("Start need start task: [{}]".format( logger.debug("Work ready signal recv")
", ".join(tasks)) logger.debug("Start need start task: [{}]".format(", ".join(tasks)))
)
for task in tasks: for task in tasks:
subtask(task).delay() subtask(task).delay()
@worker_shutdown.connect @worker_shutdown.connect
def after_app_shutdown(sender=None, headers=None, body=None, **kwargs): def after_app_shutdown_periodic_tasks(sender=None, **kwargs):
if cache.get("CELERY_APP_SHUTDOWN", 0) == 1: if cache.get("CELERY_APP_SHUTDOWN", 0) == 1:
return return
cache.set("CELERY_APP_SHUTDOWN", 1, 10) cache.set("CELERY_APP_SHUTDOWN", 1, 10)
tasks = get_after_app_shutdown_clean_tasks() tasks = get_after_app_shutdown_clean_tasks()
logger.debug("App shutdown signal recv") logger.debug("Worker shutdown signal recv")
logger.debug("Clean need cleaned period tasks: [{}]".format( logger.debug("Clean period tasks: [{}]".format(', '.join(tasks)))
', '.join(tasks))
)
PeriodicTask.objects.filter(name__in=tasks).delete() PeriodicTask.objects.filter(name__in=tasks).delete()
@after_task_publish.connect @after_setup_logger.connect
def after_task_publish_signal_handler(sender, headers=None, **kwargs): def add_celery_logger_handler(sender=None, logger=None, loglevel=None, format=None, **kwargs):
CeleryTask.objects.create( if not logger:
id=headers["id"], status=CeleryTask.WAITING, name=headers["task"]
)
cache.set(headers["id"], True, 3600)
@task_prerun.connect
def pre_run_task_signal_handler(sender, task_id=None, task=None, **kwargs):
time.sleep(0.1)
for i in range(5):
if cache.get(task_id, False):
break
else:
time.sleep(0.1)
continue
t = get_object_or_none(CeleryTask, id=task_id)
if t is None:
logger.warn("Not get the task: {}".format(task_id))
return
now = datetime.datetime.now().strftime("%Y-%m-%d")
log_path = os.path.join(now, task_id + '.log')
full_path = os.path.join(CeleryTask.LOG_DIR, log_path)
if not os.path.exists(os.path.dirname(full_path)):
os.makedirs(os.path.dirname(full_path))
with transaction.atomic():
t.date_start = timezone.now()
t.status = CeleryTask.RUNNING
t.log_path = log_path
t.save()
f = open(full_path, 'w')
tee = TeeObj(f)
sys.stdout = tee
task.log_f = tee
@task_postrun.connect
def post_run_task_signal_handler(sender, task_id=None, task=None, **kwargs):
t = get_object_or_none(CeleryTask, id=task_id)
if t is None:
logger.warn("Not get the task: {}".format(task_id))
return return
with transaction.atomic(): handler = CeleryTaskFileHandler()
t.status = CeleryTask.FINISHED handler.setLevel(loglevel)
t.date_finished = timezone.now() formatter = logging.Formatter(format)
t.save() handler.setFormatter(formatter)
task.log_f.flush() logger.addHandler(handler)
sys.stdout = task.log_f.origin_stdout
task.log_f.close()
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# #
import json import json
from functools import wraps import os
from django.conf import settings
from django.db.utils import ProgrammingError, OperationalError from django.db.utils import ProgrammingError, OperationalError
from django.core.cache import cache
from django_celery_beat.models import PeriodicTask, IntervalSchedule, CrontabSchedule from django_celery_beat.models import PeriodicTask, IntervalSchedule, CrontabSchedule
def add_register_period_task(name):
key = "__REGISTER_PERIODIC_TASKS"
value = cache.get(key, [])
value.append(name)
cache.set(key, value)
def get_register_period_tasks():
key = "__REGISTER_PERIODIC_TASKS"
return cache.get(key, [])
def add_after_app_shutdown_clean_task(name):
key = "__AFTER_APP_SHUTDOWN_CLEAN_TASKS"
value = cache.get(key, [])
value.append(name)
cache.set(key, value)
def get_after_app_shutdown_clean_tasks():
key = "__AFTER_APP_SHUTDOWN_CLEAN_TASKS"
return cache.get(key, [])
def add_after_app_ready_task(name):
key = "__AFTER_APP_READY_RUN_TASKS"
value = cache.get(key, [])
value.append(name)
cache.set(key, value)
def get_after_app_ready_tasks():
key = "__AFTER_APP_READY_RUN_TASKS"
return cache.get(key, [])
def create_or_update_celery_periodic_tasks(tasks): def create_or_update_celery_periodic_tasks(tasks):
""" """
:param tasks: { :param tasks: {
...@@ -123,63 +87,10 @@ def delete_celery_periodic_task(task_name): ...@@ -123,63 +87,10 @@ def delete_celery_periodic_task(task_name):
PeriodicTask.objects.filter(name=task_name).delete() PeriodicTask.objects.filter(name=task_name).delete()
def register_as_period_task(crontab=None, interval=None): def get_celery_task_log_path(task_id):
""" task_id = str(task_id)
Warning: Task must be have not any args and kwargs rel_path = os.path.join(task_id[0], task_id[1], task_id + '.log')
:param crontab: "* * * * *" path = os.path.join(settings.CELERY_LOG_DIR, rel_path)
:param interval: 60*60*60 os.makedirs(os.path.dirname(path), exist_ok=True)
:return: return path
"""
if crontab is None and interval is None:
raise SyntaxError("Must set crontab or interval one")
def decorate(func):
if crontab is None and interval is None:
raise SyntaxError("Interval and crontab must set one")
# Because when this decorator run, the task was not created,
# So we can't use func.name
name = '{func.__module__}.{func.__name__}'.format(func=func)
if name not in get_register_period_tasks():
create_or_update_celery_periodic_tasks({
name: {
'task': name,
'interval': interval,
'crontab': crontab,
'args': (),
'enabled': True,
}
})
add_register_period_task(name)
@wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper
return decorate
def after_app_ready_start(func):
# Because when this decorator run, the task was not created,
# So we can't use func.name
name = '{func.__module__}.{func.__name__}'.format(func=func)
if name not in get_after_app_ready_tasks():
add_after_app_ready_task(name)
@wraps(func)
def decorate(*args, **kwargs):
return func(*args, **kwargs)
return decorate
def after_app_shutdown_clean(func):
# Because when this decorator run, the task was not created,
# So we can't use func.name
name = '{func.__module__}.{func.__name__}'.format(func=func)
if name not in get_after_app_shutdown_clean_tasks():
add_after_app_shutdown_clean_task(name)
@wraps(func)
def decorate(*args, **kwargs):
return func(*args, **kwargs)
return decorate
...@@ -220,10 +220,10 @@ class AdHoc(models.Model): ...@@ -220,10 +220,10 @@ class AdHoc(models.Model):
time_start = time.time() time_start = time.time()
try: try:
date_start = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') date_start = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
print("{} Start task: {}\r\n".format(date_start, self.task.name)) print(_("{} Start task: {}").format(date_start, self.task.name))
raw, summary = self._run_only() raw, summary = self._run_only()
date_end = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S') date_end = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
print("\r\n{} Task finished".format(date_end)) print(_("{} Task finish").format(date_end))
history.is_finished = True history.is_finished = True
if summary.get('dark'): if summary.get('dark'):
history.is_success = False history.is_success = False
...@@ -235,7 +235,6 @@ class AdHoc(models.Model): ...@@ -235,7 +235,6 @@ class AdHoc(models.Model):
except Exception as e: except Exception as e:
return {}, {"dark": {"all": str(e)}, "contacted": []} return {}, {"dark": {"all": str(e)}, "contacted": []}
finally: finally:
# f.close()
history.date_finished = timezone.now() history.date_finished = timezone.now()
history.timedelta = time.time() - time_start history.timedelta = time.time() - time_start
history.save() history.save()
......
...@@ -8,6 +8,8 @@ from django.utils.translation import ugettext_lazy as _ ...@@ -8,6 +8,8 @@ from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ugettext from django.utils.translation import ugettext
from django.db import models from django.db import models
from orgs.models import Organization
from ..ansible.runner import CommandRunner from ..ansible.runner import CommandRunner
from ..inventory import JMSInventory from ..inventory import JMSInventory
...@@ -53,6 +55,8 @@ class CommandExecution(models.Model): ...@@ -53,6 +55,8 @@ class CommandExecution(models.Model):
def run(self): def run(self):
print('-'*10 + ' ' + ugettext('Task start') + ' ' + '-'*10) print('-'*10 + ' ' + ugettext('Task start') + ' ' + '-'*10)
org = Organization.get_instance(self.run_as.org_id)
org.change_to()
self.date_start = timezone.now() self.date_start = timezone.now()
ok, msg = self.run_as.is_command_can_run(self.command) ok, msg = self.run_as.is_command_can_run(self.command)
if ok: if ok:
......
# coding: utf-8 # coding: utf-8
import os
import subprocess
from django.conf import settings
from celery import shared_task, subtask from celery import shared_task, subtask
from django.utils import timezone
from common.utils import get_logger, get_object_or_none from common.utils import get_logger, get_object_or_none
from .celery.utils import register_as_period_task, after_app_shutdown_clean from .celery.decorator import (
from .models import Task, CommandExecution register_as_period_task, after_app_shutdown_clean_periodic,
after_app_ready_start
)
from .celery.utils import create_or_update_celery_periodic_tasks
from .models import Task, CommandExecution, CeleryTask
logger = get_logger(__file__) logger = get_logger(__file__)
...@@ -36,8 +45,8 @@ def run_command_execution(cid, **kwargs): ...@@ -36,8 +45,8 @@ def run_command_execution(cid, **kwargs):
@shared_task @shared_task
@after_app_shutdown_clean_periodic
@register_as_period_task(interval=3600*24) @register_as_period_task(interval=3600*24)
@after_app_shutdown_clean
def clean_tasks_adhoc_period(): def clean_tasks_adhoc_period():
logger.debug("Start clean task adhoc and run history") logger.debug("Start clean task adhoc and run history")
tasks = Task.objects.all() tasks = Task.objects.all()
...@@ -48,11 +57,42 @@ def clean_tasks_adhoc_period(): ...@@ -48,11 +57,42 @@ def clean_tasks_adhoc_period():
ad.delete() ad.delete()
@shared_task
@after_app_shutdown_clean_periodic
@register_as_period_task(interval=3600*24)
def clean_celery_tasks_period():
expire_days = 30
logger.debug("Start clean celery task history")
one_month_ago = timezone.now() - timezone.timedelta(days=expire_days)
tasks = CeleryTask.objects.filter(date_start__lt=one_month_ago)
for task in tasks:
if os.path.isfile(task.full_log_path):
try:
os.remove(task.full_log_path)
except (FileNotFoundError, PermissionError):
pass
task.delete()
tasks = CeleryTask.objects.filter(date_start__isnull=True)
tasks.delete()
command = "find %s -mtime +%s -name '*.log' -type f -exec rm -f {} \\;" % (
settings.CELERY_LOG_DIR, expire_days
)
subprocess.call(command, shell=True)
@shared_task
@after_app_ready_start
def create_or_update_registered_periodic_tasks():
from .celery.decorator import get_register_period_tasks
for task in get_register_period_tasks():
create_or_update_celery_periodic_tasks(task)
@shared_task @shared_task
def hello(name, callback=None): def hello(name, callback=None):
import time
time.sleep(10)
print("Hello {}".format(name)) print("Hello {}".format(name))
if callback is not None:
subtask(callback).delay("Guahongwei")
@shared_task @shared_task
......
{% load static %} {% load static %}
{% load i18n %}
<head> <head>
<title>term.js</title> <title>{% trans 'Task log' %}</title>
<script src="{% static 'js/jquery-2.1.1.js' %}"></script> <script src="{% static 'js/jquery-2.1.1.js' %}"></script>
<script src="{% static 'js/plugins/xterm/xterm.js' %}"></script> <script src="{% static 'js/plugins/xterm/xterm.js' %}"></script>
<link rel="stylesheet" href="{% static 'js/plugins/xterm/xterm.css' %}" /> <link rel="stylesheet" href="{% static 'js/plugins/xterm/xterm.css' %}" />
...@@ -15,14 +16,14 @@ ...@@ -15,14 +16,14 @@
} }
</style> </style>
</head> </head>
<div id="term" style="height: 100%;width: 100%"> <div id="term" style="height: 100%;width: 100%">
</div> </div>
<script> <script>
var rowHeight = 18; var rowHeight = 18;
var colWidth = 10; var colWidth = 10;
var mark = ''; var mark = '';
var url = "{% url 'api-ops:celery-task-log' pk=object.id %}"; var url = "{% url 'api-ops:celery-task-log' pk=task_id %}";
var term; var term;
var end = false; var end = false;
var error = false; var error = false;
...@@ -35,9 +36,9 @@ ...@@ -35,9 +36,9 @@
{#colWidth = 1.00 * t.width() / 6;#} {#colWidth = 1.00 * t.width() / 6;#}
} }
function resize() { function resize() {
var rows = Math.floor(window.innerHeight / rowHeight) - 1; {#var rows = Math.floor(window.innerHeight / rowHeight) - 1;#}
var cols = Math.floor(window.innerWidth / colWidth) - 2; {#var cols = Math.floor(window.innerWidth / colWidth) - 2;#}
term.resize(cols, rows); {#term.resize(cols, rows);#}
} }
function requestAndWrite() { function requestAndWrite() {
if (!end && success) { if (!end && success) {
...@@ -73,7 +74,7 @@ ...@@ -73,7 +74,7 @@
disableStdin: true disableStdin: true
}); });
term.open(document.getElementById('term')); term.open(document.getElementById('term'));
term.resize(80, 24); term.resize(90, 32);
resize(); resize();
term.on('data', function (data) { term.on('data', function (data) {
{#term.write(data.replace('\r', '\r\n'))#} {#term.write(data.replace('\r', '\r\n'))#}
......
...@@ -141,10 +141,10 @@ function onCheck(e, treeId, treeNode) { ...@@ -141,10 +141,10 @@ function onCheck(e, treeId, treeNode) {
var nodes_names = nodes.map(function (node) { var nodes_names = nodes.map(function (node) {
return node.name; return node.name;
}); });
var message = "已选择资产: "; var message = "{% trans 'Selected assets' %}" + ': ';
message += nodes_names.join(", "); message += nodes_names.join(", ");
message += "\r\n"; message += "\r\n";
message += "总共: " + nodes_names.length + "个\r\n"; message += "{% trans 'In total' %}" + ': ' + nodes_names.length + "个\r\n";
term.clear(); term.clear();
term.write(message) term.write(message)
} }
...@@ -179,7 +179,7 @@ function initResultTerminal() { ...@@ -179,7 +179,7 @@ function initResultTerminal() {
} }
}); });
term.open(document.getElementById('term')); term.open(document.getElementById('term'));
term.write("选择左侧资产, 选择运行的系统用户,批量执行命令\r\n") term.write("{% trans 'Select the left asset, select the running system user, execute command in batch' %}" + "\r\n")
} }
function wrapperError(msg) { function wrapperError(msg) {
...@@ -197,15 +197,15 @@ function execute() { ...@@ -197,15 +197,15 @@ function execute() {
return node.id; return node.id;
}); });
if (hosts.length === 0) { if (hosts.length === 0) {
term.write(wrapperError('没有选中资产')); term.write(wrapperError("{% trans 'Unselected assets' %}"));
return return
} }
if (!command) { if (!command) {
term.write(wrapperError('没有输入命令')); term.write(wrapperError("{% trans 'No input command' %}"));
return return
} }
if (!run_as) { if (!run_as) {
term.write(wrapperError('没有选择运行用户')); term.write(wrapperError("{% trans 'No system user was selected' %}"));
return return
} }
var data = { var data = {
......
...@@ -48,7 +48,7 @@ def update_or_create_ansible_task( ...@@ -48,7 +48,7 @@ def update_or_create_ansible_task(
hosts_same = old_hosts == new_hosts hosts_same = old_hosts == new_hosts
if not adhoc or adhoc != new_adhoc or not hosts_same: if not adhoc or adhoc != new_adhoc or not hosts_same:
logger.info(_("Update task content: {}").format(task_name)) logger.debug(_("Update task content: {}").format(task_name))
new_adhoc.save() new_adhoc.save()
new_adhoc.hosts.set(hosts) new_adhoc.hosts.set(hosts)
task.latest_adhoc = new_adhoc task.latest_adhoc = new_adhoc
......
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# #
from django.views.generic import DetailView from django.views.generic import DetailView, TemplateView
from common.permissions import AdminUserRequiredMixin from common.permissions import AdminUserRequiredMixin
from ..models import CeleryTask from ..models import CeleryTask
...@@ -9,6 +9,10 @@ from ..models import CeleryTask ...@@ -9,6 +9,10 @@ from ..models import CeleryTask
__all__ = ['CeleryTaskLogView'] __all__ = ['CeleryTaskLogView']
class CeleryTaskLogView(AdminUserRequiredMixin, DetailView): class CeleryTaskLogView(AdminUserRequiredMixin, TemplateView):
template_name = 'ops/celery_task_log.html' template_name = 'ops/celery_task_log.html'
model = CeleryTask
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context.update({'task_id': self.kwargs.get('pk')})
return context
...@@ -66,3 +66,4 @@ class OrgMembershipUsersViewSet(OrgMembershipModelViewSetMixin, BulkModelViewSet ...@@ -66,3 +66,4 @@ class OrgMembershipUsersViewSet(OrgMembershipModelViewSetMixin, BulkModelViewSet
serializer_class = OrgMembershipUserSerializer serializer_class = OrgMembershipUserSerializer
membership_class = Organization.users.through membership_class = Organization.users.through
permission_classes = (IsSuperUserOrAppUser, ) permission_classes = (IsSuperUserOrAppUser, )
...@@ -4,7 +4,7 @@ ...@@ -4,7 +4,7 @@
from werkzeug.local import Local from werkzeug.local import Local
from django.db import models from django.db import models
from django.utils.translation import ugettext_lazy as _ from django.utils.translation import ugettext_lazy as _
from django.shortcuts import redirect from django.shortcuts import redirect, get_object_or_404
from django.forms import ModelForm from django.forms import ModelForm
from django.http.response import HttpResponseForbidden from django.http.response import HttpResponseForbidden
from django.core.exceptions import ValidationError from django.core.exceptions import ValidationError
...@@ -191,7 +191,7 @@ class OrgMembershipModelViewSetMixin: ...@@ -191,7 +191,7 @@ class OrgMembershipModelViewSetMixin:
http_method_names = ['get', 'post', 'delete', 'head', 'options'] http_method_names = ['get', 'post', 'delete', 'head', 'options']
def dispatch(self, request, *args, **kwargs): def dispatch(self, request, *args, **kwargs):
self.org = Organization.objects.get(pk=kwargs.get('org_id')) self.org = get_object_or_404(Organization, pk=kwargs.get('org_id'))
return super().dispatch(request, *args, **kwargs) return super().dispatch(request, *args, **kwargs)
def get_serializer_context(self): def get_serializer_context(self):
...@@ -200,4 +200,5 @@ class OrgMembershipModelViewSetMixin: ...@@ -200,4 +200,5 @@ class OrgMembershipModelViewSetMixin:
return context return context
def get_queryset(self): def get_queryset(self):
return self.membership_class.objects.filter(organization=self.org) queryset = self.membership_class.objects.filter(organization=self.org)
return queryset
...@@ -122,3 +122,7 @@ class Organization(models.Model): ...@@ -122,3 +122,7 @@ class Organization(models.Model):
return True return True
else: else:
return False return False
def change_to(self):
from .utils import set_current_org
set_current_org(self)
...@@ -9,11 +9,16 @@ from .. import api ...@@ -9,11 +9,16 @@ from .. import api
app_name = 'orgs' app_name = 'orgs'
router = DefaultRouter() router = DefaultRouter()
# 将会删除
router.register(r'org/(?P<org_id>[0-9a-zA-Z\-]{36})/membership/admins', router.register(r'org/(?P<org_id>[0-9a-zA-Z\-]{36})/membership/admins',
api.OrgMembershipAdminsViewSet, 'membership-admins') api.OrgMembershipAdminsViewSet, 'membership-admins')
router.register(r'org/(?P<org_id>[0-9a-zA-Z\-]{36})/membership/users', router.register(r'org/(?P<org_id>[0-9a-zA-Z\-]{36})/membership/users',
api.OrgMembershipUsersViewSet, 'membership-users'), api.OrgMembershipUsersViewSet, 'membership-users'),
# 替换为这个
router.register(r'orgs/(?P<org_id>[0-9a-zA-Z\-]{36})/membership/admins',
api.OrgMembershipAdminsViewSet, 'membership-admins-2')
router.register(r'orgs/(?P<org_id>[0-9a-zA-Z\-]{36})/membership/users',
api.OrgMembershipUsersViewSet, 'membership-users-2'),
router.register(r'orgs', api.OrgViewSet, 'org') router.register(r'orgs', api.OrgViewSet, 'org')
......
...@@ -2,21 +2,26 @@ ...@@ -2,21 +2,26 @@
# #
from django.shortcuts import get_object_or_404 from django.shortcuts import get_object_or_404
from django.utils import timezone
from django.db.models import Q
from rest_framework.views import APIView, Response from rest_framework.views import APIView, Response
from rest_framework.generics import ListAPIView, get_object_or_404, \ from rest_framework.generics import (
RetrieveUpdateAPIView ListAPIView, get_object_or_404, RetrieveUpdateAPIView
)
from rest_framework import viewsets from rest_framework import viewsets
from rest_framework.pagination import LimitOffsetPagination from rest_framework.pagination import LimitOffsetPagination
from common.utils import set_or_append_attr_bulk
from common.permissions import IsValidUser, IsOrgAdmin, IsOrgAdminOrAppUser from common.permissions import IsValidUser, IsOrgAdmin, IsOrgAdminOrAppUser
from common.tree import TreeNode, TreeNodeSerializer from common.tree import TreeNode, TreeNodeSerializer
from common.utils import get_object_or_none
from orgs.mixins import RootOrgViewMixin from orgs.mixins import RootOrgViewMixin
from orgs.utils import set_to_root_org from orgs.utils import set_to_root_org
from .utils import AssetPermissionUtil from .utils import AssetPermissionUtil
from .models import AssetPermission from .models import AssetPermission
from .hands import AssetGrantedSerializer, User, UserGroup, Asset, Node, \ from .hands import (
AssetGrantedSerializer, User, UserGroup, Asset, Node,
SystemUser, NodeSerializer SystemUser, NodeSerializer
)
from . import serializers from . import serializers
from .mixins import AssetsFilterMixin from .mixins import AssetsFilterMixin
...@@ -38,6 +43,7 @@ class AssetPermissionViewSet(viewsets.ModelViewSet): ...@@ -38,6 +43,7 @@ class AssetPermissionViewSet(viewsets.ModelViewSet):
queryset = AssetPermission.objects.all() queryset = AssetPermission.objects.all()
serializer_class = serializers.AssetPermissionCreateUpdateSerializer serializer_class = serializers.AssetPermissionCreateUpdateSerializer
pagination_class = LimitOffsetPagination pagination_class = LimitOffsetPagination
filter_fields = ['name']
permission_classes = (IsOrgAdmin,) permission_classes = (IsOrgAdmin,)
def get_serializer_class(self): def get_serializer_class(self):
...@@ -45,36 +51,122 @@ class AssetPermissionViewSet(viewsets.ModelViewSet): ...@@ -45,36 +51,122 @@ class AssetPermissionViewSet(viewsets.ModelViewSet):
return serializers.AssetPermissionListSerializer return serializers.AssetPermissionListSerializer
return self.serializer_class return self.serializer_class
def get_queryset(self): def filter_valid(self, queryset):
queryset = super().get_queryset().all() valid = self.request.query_params.get('is_valid', None)
search = self.request.query_params.get('search') if valid is None:
asset_id = self.request.query_params.get('asset') return queryset
node_id = self.request.query_params.get('node') if valid in ['0', 'N', 'false', 'False']:
inherit_nodes = set() valid = False
else:
valid = True
now = timezone.now()
if valid:
queryset = queryset.filter(is_active=True).filter(
date_start__lt=now, date_expired__gt=now,
)
else:
queryset = queryset.filter(
Q(is_active=False) |
Q(date_start__gt=now) |
Q(date_expired__lt=now)
)
return queryset
if search: def filter_system_user(self, queryset):
queryset = queryset.filter(name__icontains=search) system_user_id = self.request.query_params.get('system_user_id')
system_user_name = self.request.query_params.get('system_user')
if system_user_id:
system_user = get_object_or_none(SystemUser, pk=system_user_id)
elif system_user_name:
system_user = get_object_or_none(SystemUser, name=system_user_name)
else:
return queryset
if not system_user:
return queryset.none()
queryset = queryset.filter(system_users=system_user)
return queryset
if not asset_id and not node_id: def filter_node(self, queryset):
node_id = self.request.query_params.get('node_id')
node_name = self.request.query_params.get('node')
if node_id:
node = get_object_or_none(Node, pk=node_id)
elif node_name:
node = get_object_or_none(Node, name=node_name)
else:
return queryset return queryset
if not node:
return queryset.none()
nodes = node.get_ancestor(with_self=True)
queryset = queryset.filter(nodes__in=nodes)
return queryset
permissions = set() def filter_asset(self, queryset):
asset_id = self.request.query_params.get('asset_id')
hostname = self.request.query_params.get('hostname')
ip = self.request.query_params.get('ip')
if asset_id: if asset_id:
asset = get_object_or_404(Asset, pk=asset_id) assets = Asset.objects.filter(pk=asset_id)
permissions = set(queryset.filter(assets=asset)) elif hostname:
assets = Asset.objects.filter(hostname=hostname)
elif ip:
assets = Asset.objects.filter(ip=ip)
else:
return queryset
if not assets:
return queryset.none()
inherit_nodes = set()
for asset in assets:
for node in asset.nodes.all(): for node in asset.nodes.all():
inherit_nodes.update(set(node.get_ancestor(with_self=True))) inherit_nodes.update(set(node.get_ancestor(with_self=True)))
elif node_id: queryset = queryset.filter(Q(assets__in=assets) | Q(nodes__in=inherit_nodes))
node = get_object_or_404(Node, pk=node_id) return queryset
permissions = set(queryset.filter(nodes=node))
inherit_nodes = node.get_ancestor() def filter_user(self, queryset):
user_id = self.request.query_params.get('user_id')
username = self.request.query_params.get('username')
if user_id:
user = get_object_or_none(User, pk=user_id)
elif username:
user = get_object_or_none(User, username=username)
else:
return queryset
if not user:
return queryset.none()
def filter_user_group(self, queryset):
user_group_id = self.request.query_params.get('user_group_id')
user_group_name = self.request.query_params.get('user_group')
if user_group_id:
group = get_object_or_none(UserGroup, pk=user_group_id)
elif user_group_name:
group = get_object_or_none(UserGroup, name=user_group_name)
else:
return queryset
if not group:
return queryset.none()
queryset = queryset.filter(user_groups=group)
return queryset
for n in inherit_nodes: def filter_keyword(self, queryset):
_permissions = queryset.filter(nodes=n) keyword = self.request.query_params.get('search')
set_or_append_attr_bulk(_permissions, "inherit", n.value) if not keyword:
permissions.update(_permissions) return queryset
queryset = queryset.filter(name__icontains=keyword)
return queryset
return list(permissions) def filter_queryset(self, queryset):
queryset = super().filter_queryset(queryset)
queryset = self.filter_valid(queryset)
queryset = self.filter_keyword(queryset)
queryset = self.filter_asset(queryset)
queryset = self.filter_node(queryset)
queryset = self.filter_system_user(queryset)
queryset = self.filter_user_group(queryset)
return queryset
def get_queryset(self):
return self.queryset.all()
class UserGrantedAssetsApi(AssetsFilterMixin, ListAPIView): class UserGrantedAssetsApi(AssetsFilterMixin, ListAPIView):
......
...@@ -51,9 +51,15 @@ class AssetPermission(OrgModelMixin): ...@@ -51,9 +51,15 @@ class AssetPermission(OrgModelMixin):
def id_str(self): def id_str(self):
return str(self.id) return str(self.id)
@property
def is_expired(self):
if self.date_expired > timezone.now() > self.date_start:
return False
return True
@property @property
def is_valid(self): def is_valid(self):
if self.date_expired > timezone.now() > self.date_start and self.is_active: if not self.is_expired and self.is_active:
return True return True
return False return False
......
...@@ -28,19 +28,13 @@ class AssetPermissionListSerializer(serializers.ModelSerializer): ...@@ -28,19 +28,13 @@ class AssetPermissionListSerializer(serializers.ModelSerializer):
assets = StringManyToManyField(many=True, read_only=True) assets = StringManyToManyField(many=True, read_only=True)
nodes = StringManyToManyField(many=True, read_only=True) nodes = StringManyToManyField(many=True, read_only=True)
system_users = StringManyToManyField(many=True, read_only=True) system_users = StringManyToManyField(many=True, read_only=True)
inherit = serializers.SerializerMethodField() is_valid = serializers.BooleanField()
is_expired = serializers.BooleanField()
class Meta: class Meta:
model = AssetPermission model = AssetPermission
fields = '__all__' fields = '__all__'
@staticmethod
def get_inherit(obj):
if hasattr(obj, 'inherit'):
return obj.inherit
else:
return None
class AssetPermissionUpdateUserSerializer(serializers.ModelSerializer): class AssetPermissionUpdateUserSerializer(serializers.ModelSerializer):
......
...@@ -56,7 +56,7 @@ ...@@ -56,7 +56,7 @@
<th class="text-center">{% trans 'Asset' %}</th> <th class="text-center">{% trans 'Asset' %}</th>
<th class="text-center">{% trans 'Node'%}</th> <th class="text-center">{% trans 'Node'%}</th>
<th class="text-center">{% trans 'System user' %}</th> <th class="text-center">{% trans 'System user' %}</th>
<th class="text-center">{% trans 'Active' %}</th> <th class="text-center">{% trans 'Validity' %}</th>
<th class="text-center" >{% trans 'Action' %}</th> <th class="text-center" >{% trans 'Action' %}</th>
</tr> </tr>
</thead> </thead>
...@@ -67,6 +67,17 @@ ...@@ -67,6 +67,17 @@
</div> </div>
</div> </div>
</div> </div>
<ul class="dropdown-menu search-help">
<li><a class="search-item" data-value="name">{% trans 'Name' %}</a></li>
<li><a class="search-item" data-value="is_valid">{% trans 'Validity' %}</a></li>
<li><a class="search-item" data-value="username">{% trans 'Username' %}</a></li>
<li><a class="search-item" data-value="user_group">{% trans 'User group' %}</a></li>
<li><a class="search-item" data-value="ip">IP</a></li>
<li><a class="search-item" data-value="hostname">{% trans 'Hostname' %}</a></li>
<li><a class="search-item" data-value="node">{% trans 'Node' %}</a></li>
<li><a class="search-item" data-value="system_user">{% trans 'System user' %}</a></li>
</ul>
{% endblock %} {% endblock %}
{% block custom_foot_js %} {% block custom_foot_js %}
...@@ -79,11 +90,11 @@ function onSelected(event, treeNode) { ...@@ -79,11 +90,11 @@ function onSelected(event, treeNode) {
setCookie('node_selected', treeNode.id); setCookie('node_selected', treeNode.id);
var url = table.ajax.url(); var url = table.ajax.url();
if (treeNode.meta.type === 'node') { if (treeNode.meta.type === 'node') {
url = setUrlParam(url, 'asset', ""); url = setUrlParam(url, 'asset_id', "");
url = setUrlParam(url, 'node', treeNode.meta.node.id) url = setUrlParam(url, 'node_id', treeNode.meta.node.id)
} else { } else {
url = setUrlParam(url, 'node', ""); url = setUrlParam(url, 'node_id', "");
url = setUrlParam(url, 'asset', treeNode.meta.asset.id) url = setUrlParam(url, 'asset_id', treeNode.meta.asset.id)
} }
setCookie('node_selected', treeNode.node_id); setCookie('node_selected', treeNode.node_id);
table.ajax.url(url); table.ajax.url(url);
...@@ -178,7 +189,7 @@ function initTable() { ...@@ -178,7 +189,7 @@ function initTable() {
{data: "id"}, {data: "name"}, {data: "users"}, {data: "id"}, {data: "name"}, {data: "users"},
{data: "user_groups"}, {data: "assets"}, {data: "user_groups"}, {data: "assets"},
{data: "nodes"}, {data: "system_users"}, {data: "nodes"}, {data: "system_users"},
{data: "is_active", orderable: false}, {data: "id", orderable: false} {data: "is_valid", orderable: false}, {data: "id", orderable: false}
], ],
select: {}, select: {},
op_html: $('#actions').html() op_html: $('#actions').html()
...@@ -231,6 +242,7 @@ function toggle() { ...@@ -231,6 +242,7 @@ function toggle() {
$(document).ready(function(){ $(document).ready(function(){
initTable(); initTable();
initTree(); initTree();
}) })
.on('click', '.btn-del', function () { .on('click', '.btn-del', function () {
var $this = $(this); var $this = $(this);
...@@ -279,6 +291,28 @@ $(document).ready(function(){ ...@@ -279,6 +291,28 @@ $(document).ready(function(){
} }
} }
}).on('click', '#permission_list_table_filter input', function (e) {
e.preventDefault();
e.stopPropagation();
var position = $('#permission_list_table_filter input').offset();
var y = position['top'];
var x = position['left'];
x -= 220;
y += 30;
$('.search-help').css({"top":y+"px", "left":x+"px", "position": "absolute"});
$('.dropdown-menu.search-help').show();
}).on('click', '.search-item', function (e) {
e.preventDefault();
e.stopPropagation();
var value = $(this).data('value');
var old_value = $('#permission_list_table_filter input').val();
var new_value = old_value + ' ' + value + ':';
$('#permission_list_table_filter input').val(new_value.trim());
$('.dropdown-menu.search-help').hide();
$('#permission_list_table_filter input').focus()
}).on('click', 'body', function (e) {
$('.dropdown-menu.search-help').hide()
}) })
</script> </script>
......
...@@ -161,6 +161,87 @@ function activeNav() { ...@@ -161,6 +161,87 @@ function activeNav() {
} }
} }
function formSubmit(props) {
/*
{
"form": $("form"),
"url": "",
"method": "POST",
"redirect_to": "",
"success": function(data, textStatue, jqXHR){},
"error": function(jqXHR, textStatus, errorThrown) {}
}
*/
props = props || {};
var data = props.data || props.form.serializeObject();
var redirect_to = props.redirect_to;
$.ajax({
url: props.url,
type: props.method || 'POST',
data: JSON.stringify(data),
contentType: props.content_type || "application/json; charset=utf-8",
dataType: props.data_type || "json"
}).done(function (data, textState, jqXHR) {
if (redirect_to) {
location.href = redirect_to;
} else if (typeof props.success === 'function') {
return props.success(data, textState, jqXHR);
}
}).fail(function(jqXHR, textStatus, errorThrown) {
if (typeof props.error === 'function') {
return props.error(jqXHR, textStatus, errorThrown)
}
if (!props.form) {
alert(jqXHR.responseText);
return
}
if (jqXHR.status === 400) {
var errors = jqXHR.responseJSON;
var noneFieldErrorRef = props.form.children('.alert-danger');
if (noneFieldErrorRef.length !== 1) {
props.form.prepend('<div class="alert alert-danger" style="display: none"></div>');
noneFieldErrorRef = props.form.children('.alert-danger');
}
var noneFieldErrorMsg = "";
noneFieldErrorRef.css("display", "none");
noneFieldErrorRef.html("");
props.form.find(".help-block.error").html("");
props.form.find(".form-group.has-error").removeClass("has-error");
if (typeof errors !== "object") {
noneFieldErrorMsg = errors;
if (noneFieldErrorRef.length === 1) {
noneFieldErrorRef.css('display', 'block');
noneFieldErrorRef.html(noneFieldErrorMsg);
}
return
}
$.each(errors, function (k, v) {
var fieldRef = props.form.find('input[name="' + k + '"]');
var formGroupRef = fieldRef.parents('.form-group');
var parentRef = fieldRef.parent();
var helpBlockRef = parentRef.children('.help-block.error');
if (helpBlockRef.length === 0) {
parentRef.append('<div class="help-block error"></div>');
helpBlockRef = parentRef.children('.help-block.error');
}
if (fieldRef.length === 1 && formGroupRef.length === 1) {
formGroupRef.addClass('has-error');
var help_msg = v.join("<br/>") ;
helpBlockRef.html(help_msg);
} else {
noneFieldErrorMsg += v + '<br/>';
}
});
if (noneFieldErrorRef.length === 1 && noneFieldErrorMsg !== '') {
noneFieldErrorRef.css('display', 'block');
noneFieldErrorRef.html(noneFieldErrorMsg);
}
}
})
}
function APIUpdateAttr(props) { function APIUpdateAttr(props) {
// props = {url: .., body: , success: , error: , method: ,} // props = {url: .., body: , success: , error: , method: ,}
props = props || {}; props = props || {};
...@@ -195,9 +276,6 @@ function APIUpdateAttr(props) { ...@@ -195,9 +276,6 @@ function APIUpdateAttr(props) {
}).fail(function(jqXHR, textStatus, errorThrown) { }).fail(function(jqXHR, textStatus, errorThrown) {
if (flash_message) { if (flash_message) {
var msg = ""; var msg = "";
console.log(jqXHR);
console.log(textStatus);
console.log(errorThrown);
if (user_fail_message) { if (user_fail_message) {
msg = user_fail_message; msg = user_fail_message;
} else if (jqXHR.responseJSON) { } else if (jqXHR.responseJSON) {
...@@ -213,6 +291,7 @@ function APIUpdateAttr(props) { ...@@ -213,6 +291,7 @@ function APIUpdateAttr(props) {
toastr.error(msg); toastr.error(msg);
} }
if (typeof props.error === 'function') { if (typeof props.error === 'function') {
console.log(jqXHR);
return props.error(jqXHR.responseText, jqXHR.status); return props.error(jqXHR.responseText, jqXHR.status);
} }
}); });
...@@ -478,7 +557,7 @@ jumpserver.initServerSideDataTable = function (options) { ...@@ -478,7 +557,7 @@ jumpserver.initServerSideDataTable = function (options) {
url: options.ajax_url , url: options.ajax_url ,
data: function (data) { data: function (data) {
delete data.columns; delete data.columns;
if (data.length !== null ){ if (data.length !== null){
data.limit = data.length; data.limit = data.length;
delete data.length; delete data.length;
} }
...@@ -525,7 +604,7 @@ jumpserver.initServerSideDataTable = function (options) { ...@@ -525,7 +604,7 @@ jumpserver.initServerSideDataTable = function (options) {
columns: options.columns || [], columns: options.columns || [],
select: options.select || select, select: options.select || select,
language: jumpserver.language, language: jumpserver.language,
lengthMenu: [[10, 15, 25, 50], [10, 15, 25, 50]] lengthMenu: [[15, 25, 50, 9999], [15, 25, 50, 'All']]
}); });
table.selected = []; table.selected = [];
table.selected_rows = []; table.selected_rows = [];
......
{% load i18n %} {% load i18n %}
<strong>Copyright</strong> {% trans ' Beijing Duizhan Tech, Inc. ' %} &copy; 2014-2018 <strong>Copyright</strong> {% trans ' Beijing Duizhan Tech, Inc. ' %} &copy; 2014-2019
\ No newline at end of file \ No newline at end of file
...@@ -5,6 +5,6 @@ ...@@ -5,6 +5,6 @@
<!--<img style="display: none" src="http://www.jumpserver.org/img/evaluate_avatar1.jpg">--> <!--<img style="display: none" src="http://www.jumpserver.org/img/evaluate_avatar1.jpg">-->
</div> </div>
<div> <div>
<strong>Copyright</strong> {% trans ' Beijing Duizhan Tech, Inc. ' %}&copy; 2014-2018 <strong>Copyright</strong> {% trans ' Beijing Duizhan Tech, Inc. ' %}&copy; 2014-2019
</div> </div>
</div> </div>
...@@ -54,7 +54,7 @@ ...@@ -54,7 +54,7 @@
{% include '_copyright.html' %} {% include '_copyright.html' %}
</div> </div>
<div class="col-md-6 text-right"> <div class="col-md-6 text-right">
<small>2014-2018</small> <small>2014-2019</small>
</div> </div>
</div> </div>
</div> </div>
......
...@@ -33,16 +33,19 @@ class SessionViewSet(BulkModelViewSet): ...@@ -33,16 +33,19 @@ class SessionViewSet(BulkModelViewSet):
permission_classes = (IsOrgAdminOrAppUser,) permission_classes = (IsOrgAdminOrAppUser,)
def get_queryset(self): def get_queryset(self):
queryset = super().get_queryset()
terminal_id = self.kwargs.get("terminal", None) terminal_id = self.kwargs.get("terminal", None)
if terminal_id: if terminal_id:
terminal = get_object_or_404(Terminal, id=terminal_id) terminal = get_object_or_404(Terminal, id=terminal_id)
self.queryset = terminal.session_set.all() queryset = queryset.filter(terminal=terminal)
return self.queryset.all() return queryset
return queryset
def perform_create(self, serializer): def perform_create(self, serializer):
if hasattr(self.request.user, 'terminal'): if hasattr(self.request.user, 'terminal'):
serializer.validated_data["terminal"] = self.request.user.terminal serializer.validated_data["terminal"] = self.request.user.terminal
sid = serializer.validated_data["system_user"] sid = serializer.validated_data["system_user"]
# guacamole提交的是id
if is_uuid(sid): if is_uuid(sid):
_system_user = SystemUser.get_system_user_by_id_or_cached(sid) _system_user = SystemUser.get_system_user_by_id_or_cached(sid)
if _system_user: if _system_user:
......
...@@ -100,52 +100,18 @@ class StatusViewSet(viewsets.ModelViewSet): ...@@ -100,52 +100,18 @@ class StatusViewSet(viewsets.ModelViewSet):
task_serializer_class = serializers.TaskSerializer task_serializer_class = serializers.TaskSerializer
def create(self, request, *args, **kwargs): def create(self, request, *args, **kwargs):
from_gua = self.request.query_params.get("from_guacamole", None) self.handle_status(request)
if not from_gua: self.handle_sessions()
self.handle_sessions()
super().create(request, *args, **kwargs)
tasks = self.request.user.terminal.task_set.filter(is_finished=False) tasks = self.request.user.terminal.task_set.filter(is_finished=False)
serializer = self.task_serializer_class(tasks, many=True) serializer = self.task_serializer_class(tasks, many=True)
return Response(serializer.data, status=201) return Response(serializer.data, status=201)
def handle_sessions(self): def handle_status(self, request):
sessions_active = [] request.user.terminal.is_alive = True
for session_data in self.request.data.get("sessions", []):
self.create_or_update_session(session_data)
if not session_data["is_finished"]:
sessions_active.append(session_data["id"])
sessions_in_db_active = Session.objects.filter(
is_finished=False,
terminal=self.request.user.terminal.id
)
for session in sessions_in_db_active:
if str(session.id) not in sessions_active:
session.is_finished = True
session.date_end = timezone.now()
session.save()
def create_or_update_session(self, session_data):
session_data["terminal"] = self.request.user.terminal.id
_id = session_data["id"]
session = get_object_or_none(Session, id=_id)
if session:
serializer = serializers.SessionSerializer(
data=session_data, instance=session
)
else:
serializer = serializers.SessionSerializer(data=session_data)
if serializer.is_valid(): def handle_sessions(self):
session = serializer.save() sessions_id = self.request.data.get('sessions', [])
return session Session.set_sessions_active(sessions_id)
else:
msg = "session data is not valid {}: {}".format(
serializer.errors, str(serializer.data)
)
logger.error(msg)
return None
def get_queryset(self): def get_queryset(self):
terminal_id = self.kwargs.get("terminal", None) terminal_id = self.kwargs.get("terminal", None)
......
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# #
from rest_framework import viewsets from rest_framework import viewsets, generics
from rest_framework import status
from rest_framework.response import Response
from common.permissions import IsSuperUser, WithBootstrapToken from common.permissions import IsSuperUser, WithBootstrapToken
from ...models import Terminal from ...models import Terminal
from ...serializers import v2 as serializers from ...serializers import v2 as serializers
__all__ = ['TerminalViewSet', 'TerminalRegistrationViewSet'] __all__ = ['TerminalViewSet', 'TerminalRegistrationApi']
class TerminalViewSet(viewsets.ModelViewSet): class TerminalViewSet(viewsets.ModelViewSet):
...@@ -15,8 +19,19 @@ class TerminalViewSet(viewsets.ModelViewSet): ...@@ -15,8 +19,19 @@ class TerminalViewSet(viewsets.ModelViewSet):
permission_classes = [IsSuperUser] permission_classes = [IsSuperUser]
class TerminalRegistrationViewSet(viewsets.ModelViewSet): class TerminalRegistrationApi(generics.CreateAPIView):
queryset = Terminal.objects.filter(is_deleted=False)
serializer_class = serializers.TerminalRegistrationSerializer serializer_class = serializers.TerminalRegistrationSerializer
permission_classes = [WithBootstrapToken] permission_classes = [WithBootstrapToken]
http_method_names = ['post'] http_method_names = ['post']
def create(self, request, *args, **kwargs):
data = {k: v for k, v in request.data.items()}
serializer = serializers.TerminalSerializer(
data=data, context={'request': request}
)
serializer.is_valid(raise_exception=True)
terminal = serializer.save()
sa_serializer = serializer.sa_serializer_class(instance=terminal.user)
data['service_account'] = sa_serializer.data
return Response(data, status=status.HTTP_201_CREATED)
# ~*~ coding: utf-8 ~*~ # ~*~ coding: utf-8 ~*~
import datetime import datetime
from django.db import transaction
from django.utils import timezone from django.utils import timezone
from django.db.utils import OperationalError
from .base import CommandBase from .base import CommandBase
...@@ -35,7 +37,25 @@ class CommandStore(CommandBase): ...@@ -35,7 +37,25 @@ class CommandStore(CommandBase):
input=c["input"], output=c["output"], session=c["session"], input=c["input"], output=c["output"], session=c["session"],
org_id=c["org_id"], timestamp=c["timestamp"] org_id=c["org_id"], timestamp=c["timestamp"]
)) ))
return self.model.objects.bulk_create(_commands) error = False
try:
with transaction.atomic():
self.model.objects.bulk_create(_commands)
except OperationalError:
error = True
except:
return False
if not error:
return True
for command in _commands:
try:
with transaction.atomic():
command.save()
except OperationalError:
command.output = str(command.output.encode())
command.save()
return True
@staticmethod @staticmethod
def make_filter_kwargs( def make_filter_kwargs(
......
...@@ -39,5 +39,3 @@ class TerminalForm(forms.ModelForm): ...@@ -39,5 +39,3 @@ class TerminalForm(forms.ModelForm):
'name', 'remote_addr', 'comment', 'name', 'remote_addr', 'comment',
'command_storage', 'replay_storage', 'command_storage', 'replay_storage',
] ]
help_texts = {
}
...@@ -8,10 +8,12 @@ from django.utils.translation import ugettext_lazy as _ ...@@ -8,10 +8,12 @@ from django.utils.translation import ugettext_lazy as _
from django.utils import timezone from django.utils import timezone
from django.conf import settings from django.conf import settings
from django.core.files.storage import default_storage from django.core.files.storage import default_storage
from django.core.cache import cache
from users.models import User from users.models import User
from orgs.mixins import OrgModelMixin from orgs.mixins import OrgModelMixin
from common.utils import get_command_storage_setting, get_replay_storage_setting from common.utils import get_command_storage_setting, get_replay_storage_setting
from .backends import get_multi_command_storage
from .backends.command.models import AbstractSessionCommand from .backends.command.models import AbstractSessionCommand
...@@ -28,6 +30,17 @@ class Terminal(models.Model): ...@@ -28,6 +30,17 @@ class Terminal(models.Model):
is_deleted = models.BooleanField(default=False) is_deleted = models.BooleanField(default=False)
date_created = models.DateTimeField(auto_now_add=True) date_created = models.DateTimeField(auto_now_add=True)
comment = models.TextField(blank=True, verbose_name=_('Comment')) comment = models.TextField(blank=True, verbose_name=_('Comment'))
STATUS_KEY_PREFIX = 'terminal_status_'
@property
def is_alive(self):
key = self.STATUS_KEY_PREFIX + str(self.id)
return bool(cache.get(key))
@is_alive.setter
def is_alive(self, value):
key = self.STATUS_KEY_PREFIX + str(self.id)
cache.set(key, value, 60)
@property @property
def is_active(self): def is_active(self):
...@@ -41,7 +54,7 @@ class Terminal(models.Model): ...@@ -41,7 +54,7 @@ class Terminal(models.Model):
self.user.is_active = active self.user.is_active = active
self.user.save() self.user.save()
def get_common_storage(self): def get_command_storage_setting(self):
storage_all = get_command_storage_setting() storage_all = get_command_storage_setting()
if self.command_storage in storage_all: if self.command_storage in storage_all:
storage = storage_all.get(self.command_storage) storage = storage_all.get(self.command_storage)
...@@ -49,7 +62,7 @@ class Terminal(models.Model): ...@@ -49,7 +62,7 @@ class Terminal(models.Model):
storage = storage_all.get('default') storage = storage_all.get('default')
return {"TERMINAL_COMMAND_STORAGE": storage} return {"TERMINAL_COMMAND_STORAGE": storage}
def get_replay_storage(self): def get_replay_storage_setting(self):
storage_all = get_replay_storage_setting() storage_all = get_replay_storage_setting()
if self.replay_storage in storage_all: if self.replay_storage in storage_all:
storage = storage_all.get(self.replay_storage) storage = storage_all.get(self.replay_storage)
...@@ -61,10 +74,11 @@ class Terminal(models.Model): ...@@ -61,10 +74,11 @@ class Terminal(models.Model):
def config(self): def config(self):
configs = {} configs = {}
for k in dir(settings): for k in dir(settings):
if k.startswith('TERMINAL'): if not k.startswith('TERMINAL'):
configs[k] = getattr(settings, k) continue
configs.update(self.get_common_storage()) configs[k] = getattr(settings, k)
configs.update(self.get_replay_storage()) configs.update(self.get_command_storage_setting())
configs.update(self.get_replay_storage_setting())
configs.update({ configs.update({
'SECURITY_MAX_IDLE_TIME': settings.SECURITY_MAX_IDLE_TIME 'SECURITY_MAX_IDLE_TIME': settings.SECURITY_MAX_IDLE_TIME
}) })
...@@ -152,6 +166,7 @@ class Session(OrgModelMixin): ...@@ -152,6 +166,7 @@ class Session(OrgModelMixin):
date_end = models.DateTimeField(verbose_name=_("Date end"), null=True) date_end = models.DateTimeField(verbose_name=_("Date end"), null=True)
upload_to = 'replay' upload_to = 'replay'
ACTIVE_CACHE_KEY_PREFIX = 'SESSION_ACTIVE_{}'
def get_rel_replay_path(self, version=2): def get_rel_replay_path(self, version=2):
""" """
...@@ -181,6 +196,26 @@ class Session(OrgModelMixin): ...@@ -181,6 +196,26 @@ class Session(OrgModelMixin):
except OSError as e: except OSError as e:
return None, e return None, e
@classmethod
def set_sessions_active(cls, sessions_id):
data = {cls.ACTIVE_CACHE_KEY_PREFIX.format(i): i for i in sessions_id}
cache.set_many(data, timeout=5*60)
@classmethod
def get_active_sessions(cls):
return cls.objects.filter(is_finished=False)
def is_active(self):
if self.protocol in ['ssh', 'telnet']:
key = self.ACTIVE_CACHE_KEY_PREFIX.format(self.id)
return bool(cache.get(key))
return True
@property
def command_amount(self):
command_store = get_multi_command_storage()
return command_store.count(session=str(self.id))
class Meta: class Meta:
db_table = "terminal_session" db_table = "terminal_session"
ordering = ["-date_start"] ordering = ["-date_start"]
......
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# #
from django.core.cache import cache
from rest_framework import serializers from rest_framework import serializers
from rest_framework_bulk.serializers import BulkListSerializer from rest_framework_bulk.serializers import BulkListSerializer
from common.mixins import BulkSerializerMixin from common.mixins import BulkSerializerMixin
from ..models import Terminal, Status, Session, Task from ..models import Terminal, Status, Session, Task
from ..backends import get_multi_command_storage
class TerminalSerializer(serializers.ModelSerializer): class TerminalSerializer(serializers.ModelSerializer):
session_online = serializers.SerializerMethodField() session_online = serializers.SerializerMethodField()
is_alive = serializers.SerializerMethodField() is_alive = serializers.BooleanField(read_only=True)
class Meta: class Meta:
model = Terminal model = Terminal
...@@ -23,42 +21,23 @@ class TerminalSerializer(serializers.ModelSerializer): ...@@ -23,42 +21,23 @@ class TerminalSerializer(serializers.ModelSerializer):
@staticmethod @staticmethod
def get_session_online(obj): def get_session_online(obj):
return Session.objects.filter(terminal=obj.id, is_finished=False).count() return Session.objects.filter(terminal=obj, is_finished=False).count()
@staticmethod
def get_is_alive(obj):
key = StatusSerializer.CACHE_KEY_PREFIX + str(obj.id)
return cache.get(key)
class SessionSerializer(BulkSerializerMixin, serializers.ModelSerializer): class SessionSerializer(BulkSerializerMixin, serializers.ModelSerializer):
command_amount = serializers.SerializerMethodField() command_amount = serializers.IntegerField(read_only=True)
command_store = get_multi_command_storage()
class Meta: class Meta:
model = Session model = Session
list_serializer_class = BulkListSerializer list_serializer_class = BulkListSerializer
fields = '__all__' fields = '__all__'
def get_command_amount(self, obj):
return self.command_store.count(session=str(obj.id))
class StatusSerializer(serializers.ModelSerializer): class StatusSerializer(serializers.ModelSerializer):
CACHE_KEY_PREFIX = 'terminal_status_'
class Meta: class Meta:
fields = '__all__' fields = ['id', 'terminal']
model = Status model = Status
def create(self, validated_data):
terminal_id = str(validated_data['terminal'].id)
key = self.CACHE_KEY_PREFIX + terminal_id
cache.set(key, 1, 60)
return validated_data
class TaskSerializer(BulkSerializerMixin, serializers.ModelSerializer): class TaskSerializer(BulkSerializerMixin, serializers.ModelSerializer):
...@@ -69,6 +48,6 @@ class TaskSerializer(BulkSerializerMixin, serializers.ModelSerializer): ...@@ -69,6 +48,6 @@ class TaskSerializer(BulkSerializerMixin, serializers.ModelSerializer):
class ReplaySerializer(serializers.Serializer): class ReplaySerializer(serializers.Serializer):
file = serializers.FileField() file = serializers.FileField(allow_empty_file=True)
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
from rest_framework import serializers from rest_framework import serializers
from common.utils import get_request_ip from common.utils import get_request_ip
from users.serializers.v2 import ServiceAccountRegistrationSerializer from users.serializers.v2 import ServiceAccountSerializer
from ..models import Terminal from ..models import Terminal
...@@ -11,36 +11,48 @@ __all__ = ['TerminalSerializer', 'TerminalRegistrationSerializer'] ...@@ -11,36 +11,48 @@ __all__ = ['TerminalSerializer', 'TerminalRegistrationSerializer']
class TerminalSerializer(serializers.ModelSerializer): class TerminalSerializer(serializers.ModelSerializer):
class Meta: sa_serializer_class = ServiceAccountSerializer
model = Terminal sa_serializer = None
fields = [
'id', 'name', 'remote_addr', 'comment',
]
read_only_fields = ['id', 'remote_addr']
class TerminalRegistrationSerializer(serializers.ModelSerializer):
service_account = ServiceAccountRegistrationSerializer(read_only=True)
service_account_serializer = None
class Meta: class Meta:
model = Terminal model = Terminal
fields = [ fields = [
'id', 'name', 'remote_addr', 'comment', 'service_account' 'id', 'name', 'remote_addr', 'command_storage',
'replay_storage', 'user', 'is_accepted', 'is_deleted',
'date_created', 'comment'
] ]
read_only_fields = ['id', 'remote_addr', 'service_account'] read_only_fields = ['id', 'remote_addr', 'user', 'date_created']
def validate(self, attrs): def is_valid(self, raise_exception=False):
self.service_account_serializer = ServiceAccountRegistrationSerializer(data=attrs) valid = super().is_valid(raise_exception=raise_exception)
self.service_account_serializer.is_valid(raise_exception=True) if not valid:
return attrs return valid
data = {'name': self.validated_data.get('name')}
kwargs = {'data': data}
if self.instance and self.instance.user:
kwargs['instance'] = self.instance.user
self.sa_serializer = ServiceAccountSerializer(**kwargs)
valid = self.sa_serializer.is_valid(raise_exception=True)
return valid
def save(self, **kwargs):
instance = super().save(**kwargs)
sa = self.sa_serializer.save()
instance.user = sa
instance.save()
return instance
def create(self, validated_data): def create(self, validated_data):
request = self.context.get('request') request = self.context.get('request')
sa = self.service_account_serializer.save()
instance = super().create(validated_data) instance = super().create(validated_data)
instance.is_accepted = True instance.is_accepted = True
instance.user = sa if request:
instance.remote_addr = get_request_ip(request) instance.remote_addr = get_request_ip(request)
instance.save() instance.save()
return instance return instance
class TerminalRegistrationSerializer(serializers.Serializer):
name = serializers.CharField(max_length=128)
comment = serializers.CharField(max_length=128)
service_account = ServiceAccountSerializer(read_only=True)
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# #
from celery import shared_task
from django.core.cache import cache
from django.db.utils import ProgrammingError, OperationalError
from common.utils import get_logger
from .const import ASSETS_CACHE_KEY, USERS_CACHE_KEY, SYSTEM_USER_CACHE_KEY
RUNNING = False
logger = get_logger(__file__)
def set_session_info_cache():
logger.debug("")
from .utils import get_session_asset_list, get_session_user_list, \
get_session_system_user_list
try:
assets = get_session_asset_list()
users = get_session_user_list()
system_users = get_session_system_user_list()
cache.set(ASSETS_CACHE_KEY, assets)
cache.set(USERS_CACHE_KEY, users)
cache.set(SYSTEM_USER_CACHE_KEY, system_users)
except (ProgrammingError, OperationalError):
pass
...@@ -10,8 +10,9 @@ from django.conf import settings ...@@ -10,8 +10,9 @@ from django.conf import settings
from django.core.files.storage import default_storage from django.core.files.storage import default_storage
from ops.celery.utils import register_as_period_task, after_app_ready_start, \ from ops.celery.decorator import (
after_app_shutdown_clean register_as_period_task, after_app_ready_start, after_app_shutdown_clean_periodic
)
from .models import Status, Session, Command from .models import Status, Session, Command
...@@ -23,28 +24,30 @@ logger = get_task_logger(__name__) ...@@ -23,28 +24,30 @@ logger = get_task_logger(__name__)
@shared_task @shared_task
@register_as_period_task(interval=3600) @register_as_period_task(interval=3600)
@after_app_ready_start @after_app_ready_start
@after_app_shutdown_clean @after_app_shutdown_clean_periodic
def delete_terminal_status_period(): def delete_terminal_status_period():
yesterday = timezone.now() - datetime.timedelta(days=3) yesterday = timezone.now() - datetime.timedelta(days=1)
Status.objects.filter(date_created__lt=yesterday).delete() Status.objects.filter(date_created__lt=yesterday).delete()
@shared_task @shared_task
@register_as_period_task(interval=3600) @register_as_period_task(interval=600)
@after_app_ready_start @after_app_ready_start
@after_app_shutdown_clean @after_app_shutdown_clean_periodic
def clean_orphan_session(): def clean_orphan_session():
active_sessions = Session.objects.filter(is_finished=False) active_sessions = Session.objects.filter(is_finished=False)
for session in active_sessions: for session in active_sessions:
if not session.terminal or not session.terminal.is_active: if not session.is_active():
session.is_finished = True continue
session.save() session.is_finished = True
session.date_end = timezone.now()
session.save()
@shared_task @shared_task
@register_as_period_task(interval=3600*24) @register_as_period_task(interval=3600*24)
@after_app_ready_start @after_app_ready_start
@after_app_shutdown_clean @after_app_shutdown_clean_periodic
def clean_expired_session_period(): def clean_expired_session_period():
logger.info("Start clean expired session record, commands and replay") logger.info("Start clean expired session record, commands and replay")
days = settings.TERMINAL_SESSION_KEEP_DURATION days = settings.TERMINAL_SESSION_KEEP_DURATION
...@@ -64,3 +67,4 @@ def clean_expired_session_period(): ...@@ -64,3 +67,4 @@ def clean_expired_session_period():
default_storage.delete(_local_path) default_storage.delete(_local_path)
# 删除session记录 # 删除session记录
session.delete() session.delete()
...@@ -94,7 +94,7 @@ ...@@ -94,7 +94,7 @@
<td class="text-center">{{ session.remote_addr|default:"" }}</td> <td class="text-center">{{ session.remote_addr|default:"" }}</td>
<td class="text-center">{{ session.protocol }}</td> <td class="text-center">{{ session.protocol }}</td>
<td class="text-center">{{ session.get_login_from_display }}</td> <td class="text-center">{{ session.get_login_from_display }}</td>
<td class="text-center">{{ session.id | get_session_command_amount }}</td> <td class="text-center">{{ session.command_amount }}</td>
<td class="text-center">{{ session.date_start }}</td> <td class="text-center">{{ session.date_start }}</td>
{# <td class="text-center">{{ session.date_last_active }}</td>#} {# <td class="text-center">{{ session.date_last_active }}</td>#}
......
...@@ -33,8 +33,6 @@ ...@@ -33,8 +33,6 @@
<h3>{% trans 'Info' %}</h3> <h3>{% trans 'Info' %}</h3>
{% bootstrap_field form.name layout="horizontal" %} {% bootstrap_field form.name layout="horizontal" %}
{% bootstrap_field form.remote_addr layout="horizontal" %} {% bootstrap_field form.remote_addr layout="horizontal" %}
{# {% bootstrap_field form.ssh_port layout="horizontal" %}#}
{# {% bootstrap_field form.http_port layout="horizontal" %}#}
{% bootstrap_field form.command_storage layout="horizontal" %} {% bootstrap_field form.command_storage layout="horizontal" %}
{% bootstrap_field form.replay_storage layout="horizontal" %} {% bootstrap_field form.replay_storage layout="horizontal" %}
...@@ -60,14 +58,14 @@ ...@@ -60,14 +58,14 @@
<script> <script>
$(document).ready(function () { $(document).ready(function () {
$('.select2').select2(); $('.select2').select2();
}).on('submit', 'form', function (e) {
$('.input-group.date').datepicker({ e.preventDefault();
format: "yyyy-mm-dd", var form = $('form');
todayBtn: "linked", formSubmit({
keyboardNavigation: false, 'url': '{% url 'api-terminal-v2:terminal-detail' pk=DEFAULT_PK %}'.replace('{{ DEFAULT_PK }}', '{{ object.id }}'),
forceParse: false, 'form': form,
calendarWeeks: true, 'method': 'PUT',
autoclose: true 'redirect_to': '{% url "terminal:terminal-list" %}'
}); });
}) })
</script> </script>
......
...@@ -11,10 +11,11 @@ app_name = 'terminal' ...@@ -11,10 +11,11 @@ app_name = 'terminal'
router = BulkRouter() router = BulkRouter()
router.register(r'terminal', api.TerminalViewSet, 'terminal') router.register(r'terminal', api.TerminalViewSet, 'terminal')
router.register(r'terminal-registrations', api.TerminalRegistrationViewSet, 'terminal-registration')
urlpatterns = [ urlpatterns = [
path('terminal-registrations/', api.TerminalRegistrationApi.as_view(),
name='terminal-registration')
] ]
urlpatterns += router.urls urlpatterns += router.urls
...@@ -19,6 +19,7 @@ from orgs.utils import current_org ...@@ -19,6 +19,7 @@ from orgs.utils import current_org
from ..serializers import UserSerializer, UserPKUpdateSerializer, \ from ..serializers import UserSerializer, UserPKUpdateSerializer, \
UserUpdateGroupSerializer, ChangeUserPasswordSerializer UserUpdateGroupSerializer, ChangeUserPasswordSerializer
from ..models import User from ..models import User
from ..signals import post_user_create
logger = get_logger(__name__) logger = get_logger(__name__)
...@@ -37,6 +38,10 @@ class UserViewSet(IDInFilterMixin, BulkModelViewSet): ...@@ -37,6 +38,10 @@ class UserViewSet(IDInFilterMixin, BulkModelViewSet):
permission_classes = (IsOrgAdmin,) permission_classes = (IsOrgAdmin,)
pagination_class = LimitOffsetPagination pagination_class = LimitOffsetPagination
def perform_create(self, serializer):
user = serializer.save()
post_user_create.send(self.__class__, user=user)
def get_queryset(self): def get_queryset(self):
queryset = current_org.get_org_users() queryset = current_org.get_org_users()
return queryset return queryset
......
...@@ -7,6 +7,6 @@ from ...serializers import v2 as serializers ...@@ -7,6 +7,6 @@ from ...serializers import v2 as serializers
class ServiceAccountRegistrationViewSet(viewsets.ModelViewSet): class ServiceAccountRegistrationViewSet(viewsets.ModelViewSet):
serializer_class = serializers.ServiceAccountRegistrationSerializer serializer_class = serializers.ServiceAccountSerializer
permission_classes = (WithBootstrapToken,) permission_classes = (WithBootstrapToken,)
http_method_names = ['post'] http_method_names = ['post']
# Generated by Django 2.1.4 on 2019-01-07 11:12
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0017_auto_20181123_1113'),
]
operations = [
migrations.AlterField(
model_name='user',
name='source',
field=models.CharField(choices=[('local', 'Local'), ('ldap', 'LDAP/AD'), ('openid', 'OpenID'), ('radius', 'Radius')], default='local', max_length=30, verbose_name='Source'),
),
]
...@@ -41,10 +41,12 @@ class User(AbstractUser): ...@@ -41,10 +41,12 @@ class User(AbstractUser):
SOURCE_LOCAL = 'local' SOURCE_LOCAL = 'local'
SOURCE_LDAP = 'ldap' SOURCE_LDAP = 'ldap'
SOURCE_OPENID = 'openid' SOURCE_OPENID = 'openid'
SOURCE_RADIUS = 'radius'
SOURCE_CHOICES = ( SOURCE_CHOICES = (
(SOURCE_LOCAL, 'Local'), (SOURCE_LOCAL, 'Local'),
(SOURCE_LDAP, 'LDAP/AD'), (SOURCE_LDAP, 'LDAP/AD'),
(SOURCE_OPENID, 'OpenID'), (SOURCE_OPENID, 'OpenID'),
(SOURCE_RADIUS, 'Radius'),
) )
id = models.UUIDField(default=uuid.uuid4, primary_key=True) id = models.UUIDField(default=uuid.uuid4, primary_key=True)
username = models.CharField( username = models.CharField(
......
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
# #
from django.utils.translation import ugettext as _
from rest_framework import serializers from rest_framework import serializers
from ..models import User, AccessKey from ..models import User, AccessKey
...@@ -12,7 +13,7 @@ class AccessKeySerializer(serializers.ModelSerializer): ...@@ -12,7 +13,7 @@ class AccessKeySerializer(serializers.ModelSerializer):
read_only_fields = ['id', 'secret'] read_only_fields = ['id', 'secret']
class ServiceAccountRegistrationSerializer(serializers.ModelSerializer): class ServiceAccountSerializer(serializers.ModelSerializer):
access_key = AccessKeySerializer(read_only=True) access_key = AccessKeySerializer(read_only=True)
class Meta: class Meta:
...@@ -30,15 +31,22 @@ class ServiceAccountRegistrationSerializer(serializers.ModelSerializer): ...@@ -30,15 +31,22 @@ class ServiceAccountRegistrationSerializer(serializers.ModelSerializer):
def validate_name(self, name): def validate_name(self, name):
email = self.get_email() email = self.get_email()
username = self.get_username() username = self.get_username()
if User.objects.filter(email=email) or \ if self.instance:
User.objects.filter(username=username): users = User.objects.exclude(id=self.instance.id)
raise serializers.ValidationError('name not unique', code='unique') else:
users = User.objects.all()
if users.filter(email=email) or \
users.filter(username=username):
raise serializers.ValidationError(_('name not unique'), code='unique')
return name return name
def save(self, **kwargs):
self.validated_data['email'] = self.get_email()
self.validated_data['username'] = self.get_username()
self.validated_data['role'] = User.ROLE_APP
return super().save(**kwargs)
def create(self, validated_data): def create(self, validated_data):
validated_data['email'] = self.get_email()
validated_data['username'] = self.get_username()
validated_data['role'] = User.ROLE_APP
instance = super().create(validated_data) instance = super().create(validated_data)
instance.create_access_key() instance.create_access_key()
return instance return instance
...@@ -3,10 +3,8 @@ ...@@ -3,10 +3,8 @@
from celery import shared_task from celery import shared_task
from ops.celery.utils import ( from ops.celery.utils import create_or_update_celery_periodic_tasks
create_or_update_celery_periodic_tasks, from ops.celery.decorator import after_app_ready_start
after_app_ready_start
)
from .models import User from .models import User
from common.utils import get_logger from common.utils import get_logger
from .utils import write_login_log, send_password_expiration_reminder_mail from .utils import write_login_log, send_password_expiration_reminder_mail
......
"""
jumpserver.config
~~~~~~~~~~~~~~~~~
Jumpserver project setting file
:copyright: (c) 2014-2017 by Jumpserver Team
:license: GPL v2, see LICENSE for more details.
"""
import os
import json
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
class Config:
# Use it to encrypt or decrypt data
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ.get('SECRET_KEY') or '2vym+ky!997d5kkcc64mnz06y1mmui3lut#(^wd=%s_qj$1%x'
# How many line display every page if using django pager, default 25
DISPLAY_PER_PAGE = 25
# It's used to identify your site, When we send a create mail to user, we only know login url is /login/
# But we should know the absolute url like: http://jms.jumpserver.org/login/, so SITE_URL is
# HTTP_PROTOCOL://HOST[:PORT]
SITE_URL = 'http://localhost'
# Django security setting, if your disable debug model, you should setting that
ALLOWED_HOSTS = ['*']
# Development env open this, when error occur display the full process track, Production disable it
DEBUG = True
# DEBUG, INFO, WARNING, ERROR, CRITICAL can set. See https://docs.djangoproject.com/en/1.10/topics/logging/
LOG_LEVEL = 'DEBUG'
LOG_DIR = os.path.join(BASE_DIR, 'logs')
# Database setting, Support sqlite3, mysql, postgres ....
# See https://docs.djangoproject.com/en/1.10/ref/settings/#databases
# SQLite setting:
DB_ENGINE = 'sqlite3'
DB_NAME = os.path.join(BASE_DIR, 'data', 'db.sqlite3')
# MySQL or postgres setting like:
# DB_ENGINE = 'mysql'
# DB_HOST = '127.0.0.1'
# DB_PORT = 3306
# DB_USER = 'root'
# DB_PASSWORD = ''
# DB_NAME = 'jumpserver'
# When Django start it will bind this host and port
# ./manage.py runserver 127.0.0.1:8080
HTTP_BIND_HOST = '0.0.0.0'
HTTP_LISTEN_PORT = 8080
# Use Redis as broker for celery and web socket
REDIS_HOST = '127.0.0.1'
REDIS_PORT = 6379
REDIS_PASSWORD = ''
BROKER_URL = 'redis://%(password)s%(host)s:%(port)s/3' % {
'password': REDIS_PASSWORD,
'host': REDIS_HOST,
'port': REDIS_PORT,
}
# Api token expiration when create, Jumpserver refresh time when request arrive
TOKEN_EXPIRATION = 3600
# Session and csrf domain settings
SESSION_COOKIE_AGE = 3600*24
# Email SMTP setting, we only support smtp send mail
EMAIL_HOST = 'smtp.163.com'
EMAIL_PORT = 25
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = '' # Caution: Some SMTP server using `Authorization Code` except password
EMAIL_USE_SSL = True if EMAIL_PORT == 465 else False
EMAIL_USE_TLS = True if EMAIL_PORT == 587 else False
EMAIL_SUBJECT_PREFIX = '[Jumpserver] '
CAPTCHA_TEST_MODE = False
# You can set jumpserver usage url here, that when user submit wizard redirect to
USER_GUIDE_URL = ''
# LDAP Auth settings
AUTH_LDAP = False
AUTH_LDAP_SERVER_URI = 'ldap://localhost:389'
AUTH_LDAP_BIND_DN = 'cn=admin,dc=jumpserver,dc=org'
AUTH_LDAP_BIND_PASSWORD = ''
AUTH_LDAP_SEARCH_OU = 'ou=tech,dc=jumpserver,dc=org'
AUTH_LDAP_SEARCH_FILTER = '(cn=%(user)s)'
AUTH_LDAP_USER_ATTR_MAP = {
"username": "cn",
"name": "sn",
"email": "mail"
}
AUTH_LDAP_START_TLS = False
#
# OTP_VALID_WINDOW = 0
def __init__(self):
pass
def __getattr__(self, item):
return None
class DockerConfig(Config):
"""
配置文件默认从环境变量里读取,如果没有会使用后面的默认值
"""
# 用来加密数据的key, 可以修改,但务必保存好这个字符串,丢失它后加密会无法解开
# SECRET_KEY = "SOME_KEY_NO_ONE_GUESS"
SECRET_KEY = os.environ.get("SECRET_KEY") or "MD923lkSDi8213kl),3()&^%aM2q1mz;223lkM0o1"
# 访问的域名, 格式 http[s]://域名[:端口号]
# SITE_URL = "http://jumpserver.fit2cloud.com"
SITE_URL = os.environ.get("SITE_URL") or 'http://localhost'
# 是否开启DEBUG模式
# DEBUG = True, or DEBUG = False,
DEBUG = bool(os.environ.get("DEBUG")) if os.environ.get("DEBUG") else False
# 日志级别, 默认 INFO
# LOG_LEVEL = WARN
LOG_LEVEL = os.environ.get("LOG_LEVEL") or "INFO"
# 使用的数据库类型,支持 SQLite, MySQL, PostgreSQL, Oracle
# 数据库设置, 如果使用外部的mysql请设置,否则不要改动
# DB_ENGINE = "oracle" | "postgre" | "mysql" | "sqlite3"
DB_ENGINE = os.environ.get("DB_ENGINE") or 'mysql'
# DB_HOST = "192.168.1.1"
DB_HOST = os.environ.get("DB_HOST") or 'mysql'
# 端口号
# DB_PORT = 3306
DB_PORT = os.environ.get("DB_PORT") or 3306
# 数据库账号
# DB_USER = "jumpserver"
DB_USER = os.environ.get("DB_USER") or 'root'
# 数据库密码
# DB_PASSWORD = "db_jumpserver_password"
DB_PASSWORD = os.environ.get("DB_PASSWORD") or ''
# 数据库名称
# DB_NAME = "jumpserver"
DB_NAME = os.environ.get("DB_NAME") or 'jumpserver'
# Redis配置,如果不使用外部redis不要改动
# Redis地址
# REDIS_HOST = "192.168.1.1"
REDIS_HOST = os.environ.get("REDIS_HOST") or 'redis'
# Redis端口号
# REDIS_PORT = 6380
REDIS_PORT = os.environ.get("REDIS_PORT") or 6379
# Redis密码
# REDIS_PASSWORD = "redis_password"
REDIS_PASSWORD = os.environ.get("REDIS_PASSWORD") or ''
# 邮箱SMTP设置, 可以参考各运营商配置文档
# SMTP服务器地址
# EMAIL_HOST = 'smtp.qq.com'
EMAIL_HOST = 'smtp.163.com'
# SMTP端口号
# EMAIL_PORT = 465
EMAIL_PORT = 25
# SMTP连接邮箱地址
# EMAIL_HOST_USER = "noreply@jumpserver.org"
EMAIL_HOST_USER = ''
# SMTP邮箱的密码, 注意 一些运营商通常要求使用授权码来发SMTP邮件
EMAIL_HOST_PASSWORD = ''
# 是否启用SSL, 如果端口号是 465通常设置为True
# EMAIL_USE_SSL = True
EMAIL_USE_SSL = True if EMAIL_PORT == 465 else False
# 是否启用TLS, 如果端口号是 587通常设置为True
# EMAIL_USE_TLS = True
EMAIL_USE_TLS = True if EMAIL_PORT == 587 else False
# 邮件的主题前缀
EMAIL_SUBJECT_PREFIX = '[Jumpserver] '
# 认证启用LDAP的设置
# 是否启用LDAP,默认不启用
# AUTH_LDAP = True
AUTH_LDAP = False
# LDAP的地址
AUTH_LDAP_SERVER_URI = 'ldap://localhost:389'
# LDAP绑定的查询账户
AUTH_LDAP_BIND_DN = 'cn=admin,dc=jumpserver,dc=org'
# 密码
AUTH_LDAP_BIND_PASSWORD = ''
# 用户所在的ou
AUTH_LDAP_SEARCH_OU = 'ou=tech,dc=jumpserver,dc=org'
# 查询时使用的过滤器, 仅可以修改前面的表示符,可能是cn或uid, 也就是登录用户名所在字段
# AUTH_LDAP_SEARCH_FILTER = '(uid=%(user)s)'
AUTH_LDAP_SEARCH_FILTER = '(cn=%(user)s)'
# LDAP用户信息映射到Jumpserver
AUTH_LDAP_USER_ATTR_MAP = {
"username": "cn", # 将LDAP信息中的 `cn` 字段映射为 `username(用户名)`
"name": "sn", # 将 LDAP信息中的 `sn` 映射为 `name(姓名)`
"email": "mail" # 将 LDAP信息中的 `mail` 映射为 `email(邮箱地址)`
}
# 是否启用TLS加密
AUTH_LDAP_START_TLS = False
#
OTP_VALID_WINDOW = int(os.environ.get("OTP_VALID_WINDOW")) if os.environ.get("OTP_VALID_WINDOW") else 0
# Default using Config settings, you can write if/else for different env
config = DockerConfig()
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
jumpserver.config
~~~~~~~~~~~~~~~~~
Jumpserver project setting file
:copyright: (c) 2014-2017 by Jumpserver Team
:license: GPL v2, see LICENSE for more details.
"""
import os
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
class Config:
"""
Jumpserver Config File
Jumpserver 配置文件
Jumpserver use this config for drive django framework running,
You can set is value or set the same envirment value,
Jumpserver look for config order: file => env => default
Jumpserver使用配置来驱动Django框架的运行,
你可以在该文件中设置,或者设置同样名称的环境变量,
Jumpserver使用配置的顺序: 文件 => 环境变量 => 默认值
"""
# SECURITY WARNING: keep the secret key used in production secret!
# 加密秘钥 生产环境中请修改为随机字符串,请勿外泄
SECRET_KEY = '2vym+ky!997d5kkcc64mnz06y1mmui3lut#(^wd=%s_qj$1%x'
# SECURITY WARNING: keep the bootstrap token used in production secret!
# 预共享Token coco和guacamole用来注册服务账号,不在使用原来的注册接受机制
BOOTSTRAP_TOKEN = 'PleaseChangeMe'
# Development env open this, when error occur display the full process track, Production disable it
# DEBUG 模式 开启DEBUG后遇到错误时可以看到更多日志
# DEBUG = True
# DEBUG, INFO, WARNING, ERROR, CRITICAL can set. See https://docs.djangoproject.com/en/1.10/topics/logging/
# 日志级别
# LOG_LEVEL = 'DEBUG'
# LOG_DIR = os.path.join(BASE_DIR, 'logs')
# Session expiration setting, Default 24 hour, Also set expired on on browser close
# 浏览器Session过期时间,默认24小时, 也可以设置浏览器关闭则过期
# SESSION_COOKIE_AGE = 3600 * 24
# SESSION_EXPIRE_AT_BROWSER_CLOSE = False
# Database setting, Support sqlite3, mysql, postgres ....
# 数据库设置
# See https://docs.djangoproject.com/en/1.10/ref/settings/#databases
# SQLite setting:
# 使用单文件sqlite数据库
# DB_ENGINE = 'sqlite3'
# DB_NAME = os.path.join(BASE_DIR, 'data', 'db.sqlite3')
# MySQL or postgres setting like:
# 使用Mysql作为数据库
DB_ENGINE = 'mysql'
DB_HOST = '127.0.0.1'
DB_PORT = 3306
DB_USER = 'jumpserver'
DB_PASSWORD = ''
DB_NAME = 'jumpserver'
# When Django start it will bind this host and port
# ./manage.py runserver 127.0.0.1:8080
# 运行时绑定端口
HTTP_BIND_HOST = '0.0.0.0'
HTTP_LISTEN_PORT = 8080
# Use Redis as broker for celery and web socket
# Redis配置
REDIS_HOST = '127.0.0.1'
REDIS_PORT = 6379
# REDIS_PASSWORD = ''
# REDIS_DB_CELERY = 3
# REDIS_DB_CACHE = 4
# Use OpenID authorization
# 使用OpenID 来进行认证设置
# BASE_SITE_URL = 'http://localhost:8080'
# AUTH_OPENID = False # True or False
# AUTH_OPENID_SERVER_URL = 'https://openid-auth-server.com/'
# AUTH_OPENID_REALM_NAME = 'realm-name'
# AUTH_OPENID_CLIENT_ID = 'client-id'
# AUTH_OPENID_CLIENT_SECRET = 'client-secret'
#
# OTP_VALID_WINDOW = 0
def __init__(self):
pass
def __getattr__(self, item):
return None
class DevelopmentConfig(Config):
pass
class TestConfig(Config):
pass
class ProductionConfig(Config):
pass
# Default using Config settings, you can write if/else for different env
config = DevelopmentConfig()
# SECURITY WARNING: keep the secret key used in production secret!
# 加密秘钥 生产环境中请修改为随机字符串,请勿外泄, 可使用命令生成
# $ cat /dev/urandom | tr -dc A-Za-z0-9 | head -c 49;echo
SECRET_KEY:
# SECURITY WARNING: keep the bootstrap token used in production secret!
# 预共享Token coco和guacamole用来注册服务账号,不在使用原来的注册接受机制
BOOTSTRAP_TOKEN:
# Development env open this, when error occur display the full process track, Production disable it
# DEBUG 模式 开启DEBUG后遇到错误时可以看到更多日志
# DEBUG: true
# DEBUG, INFO, WARNING, ERROR, CRITICAL can set. See https://docs.djangoproject.com/en/1.10/topics/logging/
# 日志级别
# LOG_LEVEL: DEBUG
# LOG_DIR:
# Session expiration setting, Default 24 hour, Also set expired on on browser close
# 浏览器Session过期时间,默认24小时, 也可以设置浏览器关闭则过期
# SESSION_COOKIE_AGE: 3600 * 24
# SESSION_EXPIRE_AT_BROWSER_CLOSE: False
# Database setting, Support sqlite3, mysql, postgres ....
# 数据库设置
# See https://docs.djangoproject.com/en/1.10/ref/settings/#databases
# SQLite setting:
# 使用单文件sqlite数据库
# DB_ENGINE: sqlite3
# DB_NAME:
# MySQL or postgres setting like:
# 使用Mysql作为数据库
DB_ENGINE: mysql
DB_HOST: 127.0.0.1
DB_PORT: 3306
DB_USER: jumpserver
DB_PASSWORD:
DB_NAME: jumpserver
# When Django start it will bind this host and port
# ./manage.py runserver 127.0.0.1:8080
# 运行时绑定端口
HTTP_BIND_HOST: 0.0.0.0
HTTP_LISTEN_PORT: 8080
# Use Redis as broker for celery and web socket
# Redis配置
REDIS_HOST: 127.0.0.1
REDIS_PORT: 6379
# REDIS_PASSWORD:
# REDIS_DB_CELERY: 3
# REDIS_DB_CACHE: 4
# Use OpenID authorization
# 使用OpenID 来进行认证设置
# BASE_SITE_URL: http://localhost:8080
# AUTH_OPENID: false # True or False
# AUTH_OPENID_SERVER_URL: https://openid-auth-server.com/
# AUTH_OPENID_REALM_NAME: realm-name
# AUTH_OPENID_CLIENT_ID: client-id
# AUTH_OPENID_CLIENT_SECRET: client-secret
# OTP settings
# OTP/MFA 配置
# OTP_VALID_WINDOW: 0
# OTP_ISSUER_NAME: Jumpserver
...@@ -7,5 +7,10 @@ function cleanup() ...@@ -7,5 +7,10 @@ function cleanup()
fi fi
} }
service="all"
if [ "$1" != "" ];then
service=$1
fi
trap cleanup EXIT trap cleanup EXIT
python jms start all python jms start $service
...@@ -15,9 +15,10 @@ sys.path.append(BASE_DIR) ...@@ -15,9 +15,10 @@ sys.path.append(BASE_DIR)
from apps import __version__ from apps import __version__
try: try:
from config import config as CONFIG from apps.jumpserver.conf import load_user_config
CONFIG = load_user_config()
except ImportError: except ImportError:
print("Could not find config file, `cp config_example.py config.py`") print("Could not find config file, `cp config_example.yml config.yml`")
sys.exit(1) sys.exit(1)
os.environ["PYTHONIOENCODING"] = "UTF-8" os.environ["PYTHONIOENCODING"] = "UTF-8"
...@@ -107,8 +108,7 @@ def is_running(s, unlink=True): ...@@ -107,8 +108,7 @@ def is_running(s, unlink=True):
pid_file = get_pid_file_path(s) pid_file = get_pid_file_path(s)
if os.path.isfile(pid_file): if os.path.isfile(pid_file):
with open(pid_file, 'r') as f: pid = get_pid(s)
pid = get_pid(s)
if check_pid(pid): if check_pid(pid):
return True return True
...@@ -120,12 +120,15 @@ def is_running(s, unlink=True): ...@@ -120,12 +120,15 @@ def is_running(s, unlink=True):
def parse_service(s): def parse_service(s):
if s == 'all': if s == 'all':
return all_services return all_services
elif "," in s:
return [i.strip() for i in s.split(',')]
else: else:
return [s] return [s]
def start_gunicorn(): def start_gunicorn():
print("\n- Start Gunicorn WSGI HTTP Server") print("\n- Start Gunicorn WSGI HTTP Server")
prepare()
service = 'gunicorn' service = 'gunicorn'
bind = '{}:{}'.format(HTTP_HOST, HTTP_PORT) bind = '{}:{}'.format(HTTP_HOST, HTTP_PORT)
log_format = '%(h)s %(t)s "%(r)s" %(s)s %(b)s ' log_format = '%(h)s %(t)s "%(r)s" %(s)s %(b)s '
...@@ -218,7 +221,6 @@ def start_service(s): ...@@ -218,7 +221,6 @@ def start_service(s):
print(time.ctime()) print(time.ctime())
print('Jumpserver version {}, more see https://www.jumpserver.org'.format( print('Jumpserver version {}, more see https://www.jumpserver.org'.format(
__version__)) __version__))
prepare()
services_handler = { services_handler = {
"gunicorn": start_gunicorn, "gunicorn": start_gunicorn,
...@@ -316,7 +318,7 @@ if __name__ == '__main__': ...@@ -316,7 +318,7 @@ if __name__ == '__main__':
) )
parser.add_argument( parser.add_argument(
"service", type=str, default="all", nargs="?", "service", type=str, default="all", nargs="?",
choices=("all", "gunicorn", "celery", "beat"), choices=("all", "gunicorn", "celery", "beat", "celery,beat"),
help="The service to start", help="The service to start",
) )
parser.add_argument('-d', '--daemon', nargs="?", const=1) parser.add_argument('-d', '--daemon', nargs="?", const=1)
......
...@@ -78,3 +78,4 @@ python-keycloak-client==0.1.3 ...@@ -78,3 +78,4 @@ python-keycloak-client==0.1.3
rest_condition==1.0.3 rest_condition==1.0.3
python-ldap==3.1.0 python-ldap==3.1.0
tencentcloud-sdk-python==3.0.40 tencentcloud-sdk-python==3.0.40
django-radius==1.3.3
...@@ -17,13 +17,13 @@ class UserCreation: ...@@ -17,13 +17,13 @@ class UserCreation:
self.domain = domain self.domain = domain
def auth(self): def auth(self):
url = "{}/api/users/v1/token/".format(self.domain) url = "{}/api/users/v1/auth/".format(self.domain)
data = {"username": self.username, "password": self.password} data = {"username": self.username, "password": self.password}
resp = requests.post(url, data=data) resp = requests.post(url, data=data)
if resp.status_code == 200: if resp.status_code == 200:
data = resp.json() data = resp.json()
self.headers.update({ self.headers.update({
'Authorization': '{} {}'.format(data['Keyword'], data['Token']) 'Authorization': '{} {}'.format('Bearer', data['token'])
}) })
else: else:
print("用户名 或 密码 或 地址 不对") print("用户名 或 密码 或 地址 不对")
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment