Commit 70480519 authored by 段英荣's avatar 段英荣

Merge branch 'dev' into 'master'

Dev

See merge request !1
parents d26b85b2 220acc06
<?xml version="1.0" encoding="utf-8"?>
<gm_rpcd_config>
<info config_name="app" version="1.0"/>
<config name="log_dir" value="/data/log/physical/app"/>
<config name="application_name" value="physical"/>
<config name="service_list">
<element value="physical"/>
</config>
<config name="initializer_list">
<element value="physical.django_init"/>
<element value="search.views.topic"/>
<element value="search.views.pick"/>
<element value="search.views.group"/>
<element value="search.views.user"/>
</config>
</gm_rpcd_config>
from django.contrib import admin
# Register your models here.
from django.contrib import admin
# Register your models here.
# -*- coding: UTF-8 -*-
from celery import shared_task
from django.conf import settings
from django.core import serializers
from trans2es.type_info import get_type_info_map
from rpc.all import get_rpc_remote_invoker
from libs.es import ESPerform
@shared_task
def write_to_es(es_type, pk_list, configuration, use_batch_query_set=False):
pk_list = list(frozenset(pk_list))
type_info_map = get_type_info_map()
type_info = type_info_map[es_type]
type_info.insert_table_by_pk_list(
sub_index_name=es_type,
pk_list=pk_list,
use_batch_query_set=use_batch_query_set,
es=ESPerform.get_cli()
)
from django.test import TestCase
# Create your tests here.
from django.shortcuts import render
# Create your views here.
from django.db import models
# Create your models here.
from django.test import TestCase
# Create your tests here.
from django.shortcuts import render
# Create your views here.
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import redis
from django.conf import settings
redis_client = redis.StrictRedis.from_url(settings.REDIS_URL)
\ No newline at end of file
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from gm_rpcd.all import RPCDFaultException
from gm_types.doris.error import ERROR
from raven.contrib.django.raven_compat.models import client as _sentry_client
def raise_error(error_code, message=None):
assert error_code != 0
if message is None:
message = ERROR.getDesc(error_code)
raise RPCDFaultException(code=error_code, message=message)
def logging_exception(send_to_sentry=True):
try:
# send exception info to sentry, fail silently
_sentry_client.captureException()
except:
pass
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import logging
import traceback
import os.path
import re
import json
from elasticsearch import Elasticsearch
import elasticsearch.helpers
class ESPerform(object):
cli_obj = None
cli_info_list = [
{
"host": "10.29.130.141",
"port": 9200
}
]
index_prefix = "gm-dbmw"
@classmethod
def get_cli(cls):
try:
cls.cli_obj = Elasticsearch(cls.cli_info_list)
return cls.cli_obj
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return None
@classmethod
def get_official_index_name(cls,sub_index_name,index_flag=None):
"""
:remark:get official es index name
:param sub_index_name:
:param index_flag:
:return:
"""
try:
assert (index_flag in [None,"read","write"])
official_index_name = cls.index_prefix + "-" + sub_index_name
if index_flag:
official_index_name += "-" + index_flag
return official_index_name
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return None
@classmethod
def __load_mapping(cls,doc_type):
try:
mapping_file_path = os.path.join(
os.path.dirname(__file__),
'..', 'trans2es','mapping', '%s.json' % (doc_type,))
mapping = ''
with open(mapping_file_path, 'r') as f:
for line in f:
# 去掉注释
mapping += re.sub(r'//.*$', '', line)
mapping = json.loads(mapping)
return mapping
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return None
@classmethod
def create_index(cls,es_cli,sub_index_name):
"""
:remark: create es index,alias index
:param sub_index_name:
:return:
"""
try:
assert (es_cli is not None)
official_index_name = cls.get_official_index_name(sub_index_name)
index_exist = es_cli.indices.exists(official_index_name)
if not index_exist:
es_cli.indices.create(official_index_name)
read_alias_name = cls.get_official_index_name(sub_index_name,"read")
es_cli.indices.put_alias(official_index_name,read_alias_name)
write_alias_name = cls.get_official_index_name(sub_index_name,"write")
es_cli.indices.put_alias(official_index_name,write_alias_name)
return True
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return False
@classmethod
def put_index_mapping(cls,es_cli,sub_index_name,mapping_type="_doc"):
"""
:remark: put index mapping
:param es_cli:
:param sub_index_name:
:param mapping_type:
:return:
"""
try:
assert (es_cli is not None)
write_alias_name = cls.get_official_index_name(sub_index_name,"write")
index_exist = es_cli.indices.exists(write_alias_name)
if not index_exist:
return False
mapping_dict = cls.__load_mapping(sub_index_name)
es_cli.indices.put_mapping(index=write_alias_name,body=mapping_dict,doc_type=mapping_type)
return True
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return False
@classmethod
def es_helpers_bulk(cls,es_cli,data_list,sub_index_name,auto_create_index=False,doc_type="_doc"):
try:
assert (es_cli is not None)
official_index_name = cls.get_official_index_name(sub_index_name)
index_exists = es_cli.indices.exists(official_index_name)
if not index_exists:
if not auto_create_index:
logging.error("index:%s is not existing,bulk data error!" % official_index_name)
return False
else:
cls.create_index(es_cli,sub_index_name)
cls.put_index_mapping(es_cli,sub_index_name)
bulk_actions = []
for data in data_list:
bulk_actions.append({
'_op_type': 'index',
'_index': official_index_name,
'_type': doc_type,
'_id': data['id'],
'_source': data,
})
elasticsearch.helpers.bulk(es_cli,bulk_actions)
return True
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return False
@classmethod
def get_search_results(cls, es_cli,sub_index_name,query_body,offset=0,size=10,
auto_create_index=False,doc_type="_doc",aggregations_query=False):
try:
assert (es_cli is not None)
official_index_name = cls.get_official_index_name(sub_index_name,"read")
index_exists = es_cli.indices.exists(official_index_name)
if not index_exists:
if not auto_create_index:
logging.error("index:%s is not existing,get_search_results error!" % official_index_name)
return None
else:
cls.create_index(es_cli,sub_index_name)
cls.put_index_mapping(es_cli,sub_index_name)
logging.info("duan add,query_body:%s" % str(query_body).encode("utf-8"))
res = es_cli.search(index=official_index_name,doc_type=doc_type,body=query_body,from_=offset,size=size)
result_dict = {
"total_count":res["hits"]["total"],
"hits":res["hits"]["hits"]
}
if aggregations_query:
result_dict["aggregations"] = res["aggregations"]
return result_dict
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"total_count":0,"hits":[]}
\ No newline at end of file
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, print_function, absolute_import
import six
import random
from django.db import models
class ITableChunk(object):
def __iter__(self):
raise NotImplementedError
def get_pk_start(self):
raise NotImplementedError
def get_pk_stop(self):
raise NotImplementedError
class TableScannerChunk(ITableChunk):
def __init__(self, data_list, pk_start, pk_stop):
self._data_list = data_list
self._pk_start = pk_start
self._pk_stop = pk_stop
def __iter__(self):
return iter(self._data_list)
def get_pk_start(self):
return self._pk_start
def get_pk_stop(self):
return self._pk_stop
class TableScannerChunkIterator(object):
def __init__(self, scanner, last_pk, chunk_size):
assert isinstance(scanner, TableScanner)
self._scanner = scanner
self._last_pk = last_pk
self._chunk_size = chunk_size
def __iter__(self):
while True:
last_pk = self._last_pk
data_list, next_last_pk = self._scanner.get_next_data_list(last_pk=last_pk, chunk_size=self._chunk_size)
self._last_pk = next_last_pk
yield TableScannerChunk(data_list=data_list, pk_start=last_pk, pk_stop=next_last_pk)
class TableScannerFlattenIterator(object):
def __init__(self, scanner, last_pk):
assert isinstance(scanner, TableScanner)
self._scanner = scanner
self._last_pk = last_pk
def __iter__(self):
while True:
data_list, next_last_pk = self._scanner.get_next_data_list(last_pk=self._last_pk)
self._last_pk = next_last_pk
for data in data_list:
yield data
class TableScanner(object):
def __init__(self, queryset):
assert isinstance(queryset, models.QuerySet)
self._model = queryset.model
self._query = queryset.query
self._db_table = self._model._meta.db_table
@property
def queryset(self):
return models.QuerySet(model=self._model, query=self._query)
@property
def model_queryset(self):
return self._model.objects
def get_random_pk(self):
count = self.model_queryset.count()
if count == 0:
return None
index = random.randrange(count)
try:
return self.model_queryset.values_list('pk', flat=True)[index]
except IndexError:
return None
def get_next_data_list(self, last_pk=None, chunk_size=1):
qs = self.queryset.order_by('pk')
if last_pk is not None:
qs = qs.filter(pk__gt=last_pk)
data_list = list(qs[:chunk_size])
if len(data_list) == 0:
next_last_pk = None
else:
next_last_pk = data_list[-1].pk
return data_list, next_last_pk
def __iter__(self):
pk = self.get_random_pk()
return iter(TableScannerFlattenIterator(scanner=self, last_pk=pk))
def chunks(self, chunk_size):
pk = self.get_random_pk()
return iter(TableScannerChunkIterator(scanner=self, last_pk=pk, chunk_size=chunk_size))
class TableSlicerChunk(ITableChunk):
"""
this object can be pickled and transferred to another process.
"""
def __init__(self, model, query, pk_start, pk_stop):
self._model = model
self._query = query
self._pk_start = pk_start
self._pk_stop = pk_stop
def __iter__(self):
data_list = self.__get_range(self._model, self._query, pk_start=self._pk_start, pk_stop=self._pk_stop)
return iter(data_list)
def get_pk_start(self):
return self._pk_start
def get_pk_stop(self):
return self._pk_stop
@classmethod
def __get_range(cls, model, query, pk_start, pk_stop):
qs = models.QuerySet(model=model, query=query)
if pk_start is not None:
qs = qs.filter(pk__gte=pk_start)
if pk_stop is not None:
qs = qs.filter(pk__lt=pk_stop)
return list(qs)
class TableSlicer(object):
def __init__(self, queryset, chunk_size=None, chunk_count=None, sep_list=None):
assert isinstance(queryset, models.QuerySet)
assert chunk_size is None or isinstance(chunk_size, six.integer_types)
assert chunk_count is None or isinstance(chunk_count, six.integer_types)
assert sep_list is None or isinstance(sep_list, list)
assert (chunk_size is not None) + (chunk_count is not None) + (sep_list is not None) == 1
if sep_list is not None:
sep_list = list(sep_list)
else:
count = queryset.count()
if chunk_size is None:
chunk_size = count / chunk_count
index_list = list(range(0, count, chunk_size))
sep_list = [
queryset.order_by('pk').values_list('pk', flat=True)[index]
for index in index_list
]
self._model = queryset.model
self._query = queryset.query
self._sep_list = [None] + sep_list + [None]
def chunks(self):
reversed_sep_list = list(reversed(self._sep_list))
for i in range(len(self._sep_list) - 1):
pk_start = reversed_sep_list[i+1]
pk_stop = reversed_sep_list[i]
yield TableSlicerChunk(model=self._model, query=self._query, pk_start=pk_start, pk_stop=pk_stop)
class TableStreamingSlicer(object):
def __init__(self, queryset, chunk_size=None):
assert isinstance(queryset, models.QuerySet)
assert chunk_size is None or isinstance(chunk_size, six.integer_types)
self._model = queryset.model
self._query = queryset.query
self._chunk_size = chunk_size
self._descend = False
def chunks(self):
last_pk = None
queryset = models.QuerySet(model=self._model, query=self._query).order_by('pk')
value_list = queryset.values_list('pk', flat=True)
while True:
current_value_list = value_list
if last_pk is not None:
current_value_list = current_value_list.filter(pk__gt=last_pk)
try:
next_last_pk = current_value_list[self._chunk_size-1]
except IndexError:
next_last_pk = None
yield TableSlicerChunk(model=self._model, query=self._query, pk_start=last_pk, pk_stop=next_last_pk)
last_pk = next_last_pk
if last_pk is None:
break
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
from pytz import timezone
from datetime import datetime
def tzlc(dt, truncate_to_sec=True):
if dt is None:
return None
if truncate_to_sec:
dt = dt.replace(microsecond=0)
if dt.tzinfo is None:
return timezone(settings.TIME_ZONE).localize(dt)
else:
return timezone(settings.TIME_ZONE).normalize(dt)
\ No newline at end of file
# coding=utf-8
from __future__ import unicode_literals, print_function, absolute_import
import itertools
from django.conf import settings
class CeleryTaskRouter(object):
queue_task_map = {
"gaia-dbmw":{
'injection.data_sync.tasks.write_to_es',
}
}
# Map[TaskName, QueueName]
task_queue_map = dict(itertools.chain.from_iterable(
[(task, queue) for task in task_list]
for (queue, task_list) in queue_task_map.items()
))
def route_for_task(self, task, args=None, kwargs=None):
"""
if settings.DEBUG:
return None
if task.startswith("statistic") or task.startswith("api.tasks.export_excel_task"):
return "slow"
"""
queue_name_or_none = self.task_queue_map.get(task)
return queue_name_or_none
\ No newline at end of file
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import django
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'physical.settings')
django.setup()
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import logging
LOG_DIR = '/data/log/physical/app/'
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s %(levelname)s %(module)s.%(funcName)s Line:%(lineno)d %(message)s',
filename=os.path.join(LOG_DIR, 'filelog.log'),
)
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_true': {
'()': 'django.utils.log.RequireDebugTrue',
},
},
'formatters': {
'verbose': {
'format': '%(asctime)s %(levelname)s %(module)s.%(funcName)s Line:%(lineno)d %(message)s'
},
'simple': {
'format': '%(levelname)s %(message)s'
},
'profile': {
'format': '%(asctime)s %(message)s'
},
'raw': {
'format': '%(message)s'
}
},
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
# 默认的服务器Log(保存到log/filelog.log中, 通过linux的logrotate来处理日志的分割
'default': {
'level': 'INFO',
'class': 'logging.handlers.RotatingFileHandler',
'filename': os.path.join(LOG_DIR, 'filelog.log'),
'formatter': 'verbose',
},
# 默认的服务器ERROR log
'default_err': {
'level': 'ERROR',
'class': 'logging.handlers.RotatingFileHandler',
'filename': os.path.join(LOG_DIR, 'error_logger.log'),
'formatter': 'verbose',
},
'exception_logger': {
'level': 'INFO',
'class': 'logging.handlers.RotatingFileHandler',
'filename': os.path.join(LOG_DIR, 'exception_logger.log'),
'formatter': 'verbose',
},
'tracer_handler': {
'level': 'INFO',
'class': 'logging.handlers.RotatingFileHandler',
'filename': os.path.join(LOG_DIR, 'tracer.log'),
'formatter': 'raw'
},
},
'loggers': {
'django': {
'handlers': ['default'],
'propagate': True,
'level': 'INFO',
},
'django.request': {
'handlers': ['default_err'],
'level': 'ERROR',
'propagate': False,
},
'exception_logger': {
'handlers': ['exception_logger'],
'level': 'INFO',
'propagate': False,
},
'gm_tracer.subscribe': {
'handlers': ['tracer_handler'],
'propagate': False,
'level': 'INFO'
},
},
}
......@@ -11,6 +11,7 @@ https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
from .log_settings import *
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
......@@ -29,6 +30,9 @@ ALLOWED_HOSTS = []
# Application definition
BROKER_URL = "redis://127.0.0.1:6379/0"
CELERY_TIMEZONE = 'Asia/Shanghai'
CELERY_ROUTES = ['physical.celery_task_router.CeleryTaskRouter']
INSTALLED_APPS = [
'django.contrib.admin',
......@@ -38,6 +42,8 @@ INSTALLED_APPS = [
'django.contrib.messages',
'django.contrib.staticfiles',
'trans2es',
'search',
'injection.data_sync'
]
MIDDLEWARE = [
......@@ -70,17 +76,17 @@ TEMPLATES = [
WSGI_APPLICATION = 'physical.wsgi.application'
REDIS_URL = "redis://127.0.0.1:6379"
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'doris_test',
'ENGINE': 'django.db.backends.mysql',
'NAME': 'alpha',
'USER': 'work',
'PASSWORD': 'workwork',
'HOST': 'rdsmaqevmuzj6jy.mysql.rds.aliyuncs.com',
'PASSWORD': 'Gengmei123',
'HOST': 'rm-2ze5k2we69904787l.mysql.rds.aliyuncs.com',
'PORT': '3306',
'OPTIONS': {
"init_command": "SET foreign_key_checks = 0;",
......@@ -114,7 +120,7 @@ AUTH_PASSWORD_VALIDATORS = [
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
......
from search.views import *
Django==2.1.3
elasticsearch==6.3.1
numpy==1.15.4
protobuf==3.6.1
scapy==2.4.0
scikit-image==0.14.1
scipy==0.19.0
urllib3==1.24.1
Werkzeug==0.14.1
from django.contrib import admin
# Register your models here.
from django.apps import AppConfig
class SearchConfig(AppConfig):
name = 'search'
from django.db import models
# Create your models here.
from django.test import TestCase
# Create your tests here.
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import time
import logging
class GroupSortTypes(object):
# 热度排序
HOT_RECOMMEND=0
# 关注排序
ATTENTION_RECOMMEND=1
class PickType(object):
# 明星榜
CELEBRITY_PICK=0
# 帖子榜
TOPIC_PICK=1
class TopicDocumentField(object):
"""
帖子索引字段信息
"""
ID="id",
IS_ONLINE = "is_online",
TAG_LIST = "tag_list"
def time_consuming_decorator(func):
def time_consuming(*args, **kwargs):
start_time = time.time()
func(*args,**kwargs)
end_time = time.time()
logging.info("func consuming time:%fs" % (end_time-start_time))
return time_consuming
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import traceback
from libs.es import ESPerform
class GroupUtils(object):
@classmethod
def get_group_query_result(cls,query,offset,size):
try:
q = dict()
multi_fields = {
'description': 2,
'name': 4,
}
query_fields = ['^'.join((k, str(v))) for (k, v) in multi_fields.items()]
multi_match = {
'query': query,
'type': 'cross_fields',
'operator': 'and',
'fields': query_fields,
}
q['query'] = {
'bool': {
"must": {
"term":{
"is_online": True
}
},
"should": [
{'multi_match': multi_match}
],
"minimum_should_match": 1
}
}
q["_source"] = {
"include":["id"]
}
return ESPerform.get_search_results(ESPerform.get_cli(), "group", q, offset, size)
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"total_count":0, "hits":[]}
@classmethod
def get_hot_group_recommend_result_list(cls,offset,size):
try:
q = dict()
q["query"] = {
"bool":{
"must":{
"term":{
"is_online": True
}
}
}
}
q["sort"] = [
{"high_quality_topic_num":{"order":"desc"}}
]
q["_source"] = {
"include":["id"]
}
result_dict = ESPerform.get_search_results(ESPerform.get_cli(),"group",q,offset,size)
group_ids_list = []
if len(result_dict["hits"]) > 0:
group_ids_list = [item["_source"]["id"] for item in result_dict["hits"]]
return group_ids_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
@classmethod
def get_user_attention_group_list(cls,user_id,offset,size):
"""
:remark: 获取用户关注小组列表
:return:
"""
try:
q = dict()
q["query"] = {
"bool":{
"should":[
{"term":{
"is_online": True
}},
{"term":{
"user_id":user_id
}}
],
"minimum_should_match":2
}
}
q["_source"] = {
"include":["attention_group_id_list"]
}
result_dict = ESPerform.get_search_results(ESPerform.get_cli(),"user",q,offset,size)
if len(result_dict["hits"])>0:
return result_dict["hits"][0]["_source"]["attention_group_id_list"]
else:
return []
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
@classmethod
def get_group_ids_by_aggs(cls,group_id_list):
"""
:remark:聚合查询获取小组列表
:param group_id_list:
:return:
"""
try:
q = dict()
q["size"]=0
q["query"] = {
"terms":{
"group_id":group_id_list
}
}
q["aggs"] = {
"group_ids":{
"terms":{
"field":"group_id"
},
"aggs":{
"max_date":{
"max":{
"field":"update_time_val"
}
}
}
}
}
result_dict = ESPerform.get_search_results(ESPerform.get_cli(),"topic",q,aggregations_query=True)
buckets_list = result_dict["aggregations"]["group_ids"]["buckets"]
sorted_buckets_list = sorted(buckets_list,key=lambda item:item["max_date"]["value"],reverse=True)
sorted_group_id_list = [item["key"] for item in sorted_buckets_list]
return sorted_group_id_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
\ No newline at end of file
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import traceback
import datetime
import json
from libs.es import ESPerform
from libs.cache import redis_client
class PickUtils(object):
@classmethod
def refresh_redis_data(cls,have_read_celebrity_list,redis_cli,redis_key):
try:
now = datetime.datetime.now()
str_end_time = "%s-%s-%s 23:59:59" % (now.year, now.month, now.day)
end_time = datetime.datetime.strptime(str_end_time, "%Y-%m-%d %H:%M:%S")
valid_seconds = int((end_time - now).seconds)
redis_cli.setex(redis_key,valid_seconds,json.dumps(have_read_celebrity_list))
return True
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return False
@classmethod
def ___get_filter_term_list(cls,ori_list):
try:
should_term_list = list()
for term_id in ori_list:
term_dict = {
"term":{
"id":{"value":term_id}
}
}
should_term_list.append(term_dict)
return should_term_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
@classmethod
def get_recommend_celebrity_id_list(cls,pick_id,have_read_pick_celebrity_ids,offset,size):
"""
:remark 根据榜单ID推荐明星ID
:param pick_id:
:param have_read_pick_celebrity_ids:
:return:
"""
try:
filter_term_list = cls.___get_filter_term_list(have_read_pick_celebrity_ids)
q = dict()
q["query"] = {
"bool":{
"must_not":{
"terms":{
"id":have_read_pick_celebrity_ids
}
},
"must":[
{"term":{"pick_id_list":pick_id}},
{"term":{"is_online":True}}
]
}
}
q["_source"] = {
"include": ["id"]
}
result_dict = ESPerform.get_search_results(ESPerform.get_cli(),sub_index_name="celebrity",query_body=q,offset=offset,size=size)
recommend_pick_celebrity_list = list()
if len(result_dict["hits"]) > 0:
recommend_pick_celebrity_list = [item["_source"]["id"] for item in result_dict["hits"]]
return recommend_pick_celebrity_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
@classmethod
def get_topic_pick_query(cls,query,pick_id,offset,size):
"""
:remark 查询榜单下帖子列表
:param query:
:param pick_id:
:param offset:
:param size:
:return:
"""
try:
q = dict()
multi_fields = {
'description': 2,
'name': 4,
}
query_fields = ['^'.join((k, str(v))) for (k, v) in multi_fields.items()]
multi_match = {
'query': query,
'type': 'cross_fields',
'operator': 'and',
'fields': query_fields,
}
q["query"] = {
"multi_match":multi_match,
"bool":{
"must":[
{"term":{"pick_id_list":pick_id}},
{"term":{"is_online":True}}
]
}
}
q["_source"] = {
"include":["id"]
}
result_dict = ESPerform.get_search_results(ESPerform.get_cli(),sub_index_name="topic",query_body=q,offset=offset,size=size)
recommend_topic_id_list = []
if len(result_dict["hits"]) > 0:
recommend_topic_id_list = [item["_source"]["id"] for item in result_dict["hits"]]
return recommend_topic_id_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
@classmethod
def get_celebrity_pick_query(cls,query,pick_id,offset,size):
try:
q = dict()
"""
multi_fields = {
'name': 2,
}
query_fields = ['^'.join((k, str(v))) for (k, v) in multi_fields.items()]
multi_match = {
'query': query,
'type': 'cross_fields',
'operator': 'and',
'fields': query_fields,
}
"""
q["query"] = {
#"multi_match":multi_match,
"match":{
"name":query
},
"bool":{
"must":[
{"term":{"pick_id_list":pick_id}},
{"term":{"is_online":True}}
]
}
}
q["_source"] = {
"include":["id"]
}
result_dict = ESPerform.get_search_results(ESPerform.get_cli(),sub_index_name="celebrity",query_body=q,offset=offset,size=size)
recommend_celebrity_id_list = []
if len(result_dict["hits"]) > 0:
recommend_celebrity_id_list = [item["_source"]["id"] for item in result_dict["hits"]]
return recommend_celebrity_id_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
This diff is collapsed.
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import traceback
import json
import time
from libs.es import ESPerform
class UserUtils(object):
@classmethod
def get_attention_user_list(cls,user_id_list,self_user_id):
"""
:remark 获取指定用户列表 关注的 用户列表
:param user_id:
:return:
"""
try:
q = dict()
q["query"] = {
"terms":{
"user_id":user_id_list
}
}
q["_source"] = {
"include":["attention_user_id_list","user_id"]
}
result_dict = ESPerform.get_search_results(ESPerform.get_cli(), "user", q, offset=0, size=len(user_id_list))
self_attention_user_id_list = []
recursion_attention_user_id_list = []
for hit_item in result_dict["hits"]:
if hit_item["_source"]["user_id"] == self_user_id:
self_attention_user_id_list = [item["user_id"] for item in hit_item["_source"]["attention_user_id_list"]]
else:
recursion_attention_user_id_list = [item["user_id"] for item in hit_item["_source"]["attention_user_id_list"]]
return (self_attention_user_id_list,recursion_attention_user_id_list)
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return ([],[])
@classmethod
def ___get_should_term_list(cls,ori_list,field_name="tag_list"):
try:
should_term_list = list()
for term_id in ori_list:
term_dict = {
"term":{
field_name:{"value":term_id}
}
}
should_term_list.append(term_dict)
return should_term_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
@classmethod
def get_recommend_user_list(cls,self_attention_user_id_list,recursion_attention_user_id_list,offset,size):
"""
:remark 获取推荐用户列表
:param attention_user_id_list:
:param recursion_attention_user_id_list:
:return:
"""
try:
q = dict()
q["query"] = dict()
recursion_attention_user_list = cls.___get_should_term_list(recursion_attention_user_id_list,field_name="user_id")
functions_list = [
{
"filter":{
"bool":{
"should":recursion_attention_user_list
}
},
"weight":10
},
{
"gauss": {
"latest_topic_time_val": {
"origin": int(time.time()),
"scale": "600",
"decay": 0.1
}
}
}
]
query_function_score = {
"query": {
"bool": {
"should": [
{"term": {"is_recommend": True}},
{"term": {"is_online": True}},
],
"minimum_should_match":2,
"must_not":{
"terms":{
"user_id":self_attention_user_id_list
}
}
}
},
"score_mode": "sum",
"boost_mode": "sum",
"functions": functions_list
}
q["query"]["function_score"] = query_function_score
q["_source"] = {
"include":["user_id"]
}
result_dict = ESPerform.get_search_results(ESPerform.get_cli(), sub_index_name="user", query_body=q,
offset=offset, size=size)
recommend_user_list = list()
for item in result_dict["hits"]:
recommend_user_list.append(item["_source"]["user_id"])
return recommend_user_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
\ No newline at end of file
from django.shortcuts import render
# Create your views here.
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from gm_rpcd.all import bind
import logging
import traceback
import json
from libs.cache import redis_client
from libs.es import ESPerform
from search.utils.group import GroupUtils
from search.utils.common import GroupSortTypes
@bind("physical/search/query_group")
def query_group(query="",offset=0,size=10):
"""
:remark:小组搜索排序策略,缺少排序策略
:param query:
:param offset:
:param size:
:return:
"""
try:
result_dict = GroupUtils.get_group_query_result(query,offset,size)
group_ids_list = []
if len(result_dict["hits"]) > 0:
group_ids_list = [item["_source"]["id"] for item in result_dict["hits"]]
return {"group_ids": group_ids_list}
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"group_ids":[]}
@bind("physical/search/group_sort")
def group_sort(user_id=-1,sort_type=GroupSortTypes.HOT_RECOMMEND,offset=0,size=10):
"""
:remark 小组排序,缺少:前1天发评论人数*x
:param user_id:
:param sort_type:
:param offset:
:param size:
:return:
"""
try:
if sort_type==GroupSortTypes.HOT_RECOMMEND:
group_ids_list = GroupUtils.get_hot_group_recommend_result_list(offset,size)
return {"group_recommend_ids":group_ids_list}
elif sort_type==GroupSortTypes.ATTENTION_RECOMMEND:
attention_group_list = GroupUtils.get_user_attention_group_list(user_id,offset,size)
if len(attention_group_list)==0:
return {"group_recommend_ids": []}
else:
attention_group_id_list = [item["group_id"] for item in attention_group_list]
sorted_group_ids_list = GroupUtils.get_group_ids_by_aggs(attention_group_id_list)
group_recommend_ids_list = sorted_group_ids_list
if len(group_recommend_ids_list) < size and len(group_recommend_ids_list)<len(attention_group_list):
sorted_attention_group_list = sorted(attention_group_list,key=lambda item:item["update_time_val"],reverse=True)
for item in sorted_attention_group_list:
if item["group_id"] not in group_recommend_ids_list:
group_recommend_ids_list.append(item["group_id"])
return {"group_recommend_ids": group_recommend_ids_list}
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"group_recommend_ids":[]}
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from gm_rpcd.all import bind
import logging
import traceback
import json
from libs.cache import redis_client
from libs.es import ESPerform
from search.utils.pick import PickUtils
from search.utils.common import PickType
@bind("physical/search/pick_celebrity")
def pick_celebrity(user_id,pick_id,offset=0,size=10):
try:
have_read_pick_celebrity_ids = list()
redis_key = "physical:pick_celebrity:" + "user_id:" + str(user_id) + ":pick_id:" + str(pick_id)
if user_id != -1:
try:
redis_celebrity_info = redis_client.get(redis_key)
if redis_celebrity_info:
have_read_pick_celebrity_ids = json.loads(redis_celebrity_info)
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
pass
recommend_pick_celebrity_list = PickUtils.get_recommend_celebrity_id_list(pick_id,have_read_pick_celebrity_ids,0,size)
if len(recommend_pick_celebrity_list)>0 and user_id!=-1:
total_read_celebrity_ids = have_read_pick_celebrity_ids + recommend_pick_celebrity_list
PickUtils.refresh_redis_data(have_read_celebrity_list=total_read_celebrity_ids,
redis_cli=redis_client,redis_key=redis_key)
return {"recommend_pick_celebrity_ids": recommend_pick_celebrity_list}
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"recommend_pick_celebrity_ids":[]}
@bind("physical/search/pick_topic")
def pick_topic(user_id,pick_id,offset=0,size=10):
try:
have_read_pick_topic_ids = list()
redis_key = "physical:pick_topic:" + "user_id:" + str(user_id) + ":pick_id:" + str(pick_id)
if user_id != -1:
try:
redis_celebrity_info = redis_client.get(redis_key)
if redis_celebrity_info:
have_read_pick_topic_ids = json.loads(redis_celebrity_info)
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
pass
q = dict()
functions_list = [
{
"filter": {"match": {"content_level": 5}},
"weight": 5
},
{
"filter": {"match": {"content_level": 4}},
"weight": 3
},
{
"filter": {"match": {"content_level": 3}},
"weight": 2
}
]
query_function_score = {
"query":{
"bool":{
"must":[
{"term":{"pick_id_list":pick_id}},
{"range": {"content_level": {"gte": 3, "lte": 5}}}
],
"must_not": {
"terms": {
"id": have_read_pick_topic_ids
}
},
}
},
"score_mode": "sum",
"boost_mode": "sum",
"functions":functions_list
}
q["query"] = {
"function_score":query_function_score
}
q["sort"] = [
"_score"
]
q["_source"] = {
"include":["id"]
}
result_dict = ESPerform.get_search_results(ESPerform.get_cli(),sub_index_name="topic",query_body=q,offset=0,size=size)
recommend_pick_topic_list = []
if len(result_dict["hits"]) > 0:
recommend_pick_topic_list = [item["_source"]["id"] for item in result_dict["hits"]]
if len(recommend_pick_topic_list)>0 and user_id!=-1:
total_read_topic_ids = have_read_pick_topic_ids + recommend_pick_topic_list
PickUtils.refresh_redis_data(have_read_celebrity_list=total_read_topic_ids,
redis_cli=redis_client,redis_key=redis_key)
return {"recommend_pick_topic_ids": recommend_pick_topic_list}
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"recommend_pick_topic_ids":[]}
@bind("physical/search/pick_query")
def pick_query(query="",pick_id=-1,query_type=PickType.TOPIC_PICK,offset=0,size=10):
"""
:remark 榜单搜索
:param query:
:param pick_id:
:param query_type:
:param offset:
:param size:
:return:
"""
try:
pick_query_result_list = []
if query_type == PickType.TOPIC_PICK:
pick_query_result_list = PickUtils.get_topic_pick_query(query,pick_id,offset,size)
elif query_type == PickType.CELEBRITY_PICK:
pick_query_result_list = PickUtils.get_celebrity_pick_query(query,pick_id,offset,size)
return {"pick_query_result_list":pick_query_result_list}
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"pick_query_result_list":[]}
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from gm_rpcd.all import bind
import logging
import traceback
import json
from search.utils.topic import TopicUtils
from libs.es import ESPerform
from libs.cache import redis_client
from search.utils.common import *
@bind("physical/search/home_recommend")
def home_recommend(session_id="",user_id=-1,offset=0,size=10):
"""
:remark:首页推荐,目前只推荐日记
:param session_id:
:param user_id:
:param offset:
:param size:
:return:
"""
try:
if not user_id:
user_id=-1
if not isinstance(session_id,str):
session_id = ""
redis_key = "physical:home_recommend" + ":user_id:" + str(user_id) + ":session_id:" + session_id
redis_val_dict = redis_client.hgetall(redis_key)
recommend_topic_ids = []
topic_id_list = TopicUtils.get_recommend_topic_ids(user_id, offset, size*size)
have_read_group_id_tuple = set()
unread_topic_id_list = list()
have_read_topic_id_tuple = set()
if len(redis_val_dict)>0:
topic_id_list = json.loads(redis_val_dict[b"unread_topic_id"]) + topic_id_list
have_read_topic_id_tuple = set(json.loads(redis_val_dict[b"have_read_topic_id"]))
for item in topic_id_list:
if item["group_id"] in have_read_group_id_tuple:
unread_topic_id_list.append(item)
else:
if item["id"] not in have_read_topic_id_tuple:
recommend_topic_ids.append(item["id"])
if isinstance(item["group_id"],int) and item["group_id"]>0:
have_read_group_id_tuple.add(item["group_id"])
have_read_topic_id_tuple.add(item["id"])
if len(recommend_topic_ids) >= size:
break
if len(recommend_topic_ids) < size and len(unread_topic_id_list)>0:
recommend_len = len(recommend_topic_ids)
offi_unread_topic_id = [item["id"] for item in unread_topic_id_list[:(size-recommend_len)]]
recommend_topic_ids = recommend_topic_ids + offi_unread_topic_id
unread_topic_id_list = unread_topic_id_list[(size-recommend_len):]
if len(unread_topic_id_list)>0:
redis_dict = {
"unread_topic_id":json.dumps(unread_topic_id_list),
"have_read_topic_id":json.dumps(list(have_read_topic_id_tuple))
}
redis_client.hmset(redis_key,redis_dict)
# 每个session key保存15分钟
redis_client.expire(redis_key,15*60*60)
return {"recommend_topic_ids":recommend_topic_ids}
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"recommend_topic_ids": []}
@bind("physical/search/home_query")
def home_query(session_id="",user_id=-1,query="",offset=0,size=10):
"""
:remark:首页搜索,目前只推荐日记
:param session_id:
:param user_id:
:param query:
:param offset:
:param size:
:return:
"""
try:
if not user_id:
user_id=-1
if not isinstance(session_id,str):
session_id = ""
redis_key = "physical:home_query" + ":user_id:" + str(user_id) + ":session_id:" + session_id
redis_val_dict = redis_client.hgetall(redis_key)
recommend_topic_ids = []
topic_id_list = TopicUtils.get_recommend_topic_ids(user_id, offset, size*size,query)
have_read_group_id_tuple = set()
unread_topic_id_list = list()
have_read_topic_id_tuple = set()
if len(redis_val_dict)>0:
topic_id_list = json.loads(redis_val_dict[b"unread_topic_id"]) + topic_id_list
have_read_topic_id_tuple = set(json.loads(redis_val_dict[b"have_read_topic_id"]))
for item in topic_id_list:
if item["group_id"] in have_read_group_id_tuple:
unread_topic_id_list.append(item)
else:
if item["id"] not in have_read_topic_id_tuple:
recommend_topic_ids.append(item["id"])
if isinstance(item["group_id"],int) and item["group_id"]>0:
have_read_group_id_tuple.add(item["group_id"])
have_read_topic_id_tuple.add(item["id"])
if len(recommend_topic_ids) >= size:
break
if len(recommend_topic_ids) < size and len(unread_topic_id_list)>0:
recommend_len = len(recommend_topic_ids)
offi_unread_topic_id = [item["id"] for item in unread_topic_id_list[:(size-recommend_len)]]
recommend_topic_ids = recommend_topic_ids + offi_unread_topic_id
unread_topic_id_list = unread_topic_id_list[(size-recommend_len):]
if len(unread_topic_id_list)>0:
redis_dict = {
"unread_topic_id":json.dumps(unread_topic_id_list),
"have_read_topic_id":json.dumps(list(have_read_topic_id_tuple))
}
redis_client.hmset(redis_key,redis_dict)
# 每个session key保存15分钟
redis_client.expire(redis_key,15*60*60)
return {"recommend_topic_ids":recommend_topic_ids}
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"recommend_topic_ids": []}
@bind("physical/search/topic_detail_page_recommend")
def topic_detail_page_recommend(user_id=-1,topic_id=-1,topic_group_id=-1,topic_user_id=-1,offset=0,size=10):
"""
:remark:帖子详情页推荐策略,缺少第一个卡片策略
:param user_id:
:param topic_id:
:param topic_group_id:
:param topic_user_id:
:return:
"""
try:
# 获取帖子标签列表
topic_tag_list = TopicUtils.get_topic_tag_id_list(topic_id)
result_list = TopicUtils.get_topic_detail_recommend_list(user_id,topic_id,topic_tag_list,topic_group_id,topic_user_id,offset,size)
recommend_topic_ids_list = list()
if len(result_list)>0:
recommend_topic_ids_list = [item["_source"]["id"] for item in result_list]
return {"recommend_topic_ids": recommend_topic_ids_list}
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"recommend_topic_ids": []}
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from gm_rpcd.all import bind
import logging
import traceback
import json
from search.utils.topic import TopicUtils
from libs.es import ESPerform
from libs.cache import redis_client
from search.utils.user import UserUtils
from search.utils.common import *
@bind("physical/search/recommend_user")
def recommend_user(self_user_id,interesting_user_id,offset=0,size=10):
"""
:remark 点关注推荐用户
:param self_user_id:
:param interesting_user_id:
:param offset:
:param size:
:return:
"""
try:
#获取关注用户列表
(self_attention_user_id_list,recursion_attention_user_id_list) = UserUtils.get_attention_user_list([self_user_id,interesting_user_id],self_user_id)
recommend_user_list = UserUtils.get_recommend_user_list(self_attention_user_id_list,recursion_attention_user_id_list,offset,size)
return recommend_user_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"recommend_user_ids":[]}
\ No newline at end of file
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import django.db.models
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import traceback
import logging
import six
import sys
from libs.es import ESPerform
import trans2es.models as md
from trans2es.utils import topic_transfer
from libs.table_scan import TableSlicer,TableSlicerChunk
from trans2es.type_info import get_type_info_map,TypeInfo
class Job(object):
__es = None
def __init__(self, sub_index_name, type_name, chunk):
assert isinstance(sub_index_name, six.string_types)
assert isinstance(type_name, six.string_types)
assert isinstance(chunk, TableSlicerChunk)
self._sub_index_name = sub_index_name
self._type_name = type_name
self._chunk = chunk
@classmethod
def get_es(cls):
if cls.__es is None:
cls.__es = ESPerform().get_cli()
return cls.__es
def __call__(self):
type_info = get_type_info_map()[self._type_name]
assert isinstance(type_info, TypeInfo)
result = type_info.insert_table_chunk(
sub_index_name=self._sub_index_name,
table_chunk=self._chunk,
es=self.get_es(),
)
class Command(BaseCommand):
args = ''
help = 'dump data to elasticsearch, parallel'
from optparse import make_option
option_list = BaseCommand.option_list + (
make_option('-t', '--type', dest='type', help='type name to dump data to elasticsearch', metavar='TYPE',default=''),
make_option('-i', '--index-prefix', dest='index_prefix', help='index name to dump data to elasticsearch', metavar='INDEX_PREFIX'),
make_option('-p', '--parallel', dest='parallel', help='parallel process count', metavar='PARALLEL'),
make_option('-s', '--pks', dest='pks', help='specify sync pks, comma separated', metavar='PKS', default=''),
make_option('--streaming-slicing', dest='streaming_slicing', action='store_true', default=True),
make_option('--no-streaming-slicing', dest='streaming_slicing', action='store_false', default=True),
)
def handle(self, *args, **options):
try:
type_name_list = get_type_info_map().keys()
for type_name in type_name_list:
if len(options["type"]) and type_name!=options["type"]:
logging.warning("type_name:%s can not need to execute!" % type_name)
continue
type_info = get_type_info_map()[type_name]
query_set = type_info.queryset
slicer = TableSlicer(queryset=query_set, chunk_size=type_info.bulk_insert_chunk_size)
for chunk in slicer.chunks():
job = Job(
sub_index_name=type_name,
type_name=type_name,
chunk=chunk,
)
job()
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import traceback
import logging
from libs.es import ESPerform
class Command(BaseCommand):
def handle(self, *args, **options):
try:
es_obj = ESPerform()
es_cli = es_obj.get_cli()
es_obj.create_index(es_cli,"topic")
es_obj.put_index_mapping(es_cli=es_cli,sub_index_name="topic")
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
{
"dynamic":"strict",
"properties": {
"id":{"type":"long"},
"is_online":{"type":"boolean"},//上线
"is_deleted":{"type":"boolean"},
"portrait":{"type":"text"},
"name":{"type":"text"},
"description":{"type":"text"},
"gender":{"type":"integer"},
"city_id":{"type":"text"},
"pick_id_list":{"type":"long"},
"create_time":{"type":"date", "format":"date_time_no_millis"},
"update_time":{"type":"date", "format":"date_time_no_millis"}
}
}
\ No newline at end of file
{
"dynamic":"strict",
"properties": {
"id":{"type":"long"},
"is_online":{"type":"boolean"},//上线
"is_deleted":{"type":"boolean"},
"is_recommend":{"type":"boolean"},
"name":{"type":"text"},
"description":{"type":"text"},
"topic_num":{"type":"long"},
"creator_id":{"type":"long"},
"icon":{"type":"text"},
"high_quality_topic_num":{"type":"long"},//前一天该小组4&5星帖子数量
"create_time":{"type":"date", "format":"date_time_no_millis"},
"update_time":{"type":"date", "format":"date_time_no_millis"}
}
}
\ No newline at end of file
{
"dynamic": "strict",
"properties":{
"id":{"type":"long"},
"rank":{"type":"long"},
"celebrity_id":{"type":"long"},
"pick_id":{"type":"long"},
"create_time":{"type":"date", "format":"date_time_no_millis"},
"update_time":{"type":"date", "format":"date_time_no_millis"}
}
}
\ No newline at end of file
{
"dynamic":"strict",
"properties": {
"id":{"type":"long"},
"is_online":{"type":"boolean"},//上线
"vote_num":{"type":"long"},
"reply_num":{"type":"long"},
"name":{"type":"text"},
"description":{"type":"text"},
"content":{"type":"text"},
"content_level":{"type":"text"},
"user_id":{"type":"long"},
"group_id":{"type":"long"}, //所在组ID
"tag_list":{"type":"long"},//标签属性
"share_num":{"type":"long"},
"pick_id_list":{"type":"long"},
"offline_score":{"type":"double"},//离线算分
"create_time":{"type":"date", "format":"date_time_no_millis"},
"update_time":{"type":"date", "format":"date_time_no_millis"},
"create_time_val":{"type":"long"},
"update_time_val":{"type":"long"}
}
}
\ No newline at end of file
{
"dynamic":"strict",
"properties": {
"id":{"type":"long"},
"user_id":{"type":"long"},
"nick_name":{"type":"text"}, //昵称
"profile_pic":{"type":"text"}, //头像
"gender":{"type":"integer"},
"is_online":{"type":"boolean"},//是否上线
"tag_list":{"type":"long"},//标签属性
"city_id":{"type":"long"},
"country_id":{"type":"long"},
"is_recommend":{"type":"boolean"},//是否运营推荐用户
"is_shadow":{"type":"boolean"},//是否马甲用户
"latest_topic_time_val":{"type":"long"},//最近发帖时间
"attention_user_id_list":{//关注用户列表
"type":"nested",
"properties":{
"user_id":{"type":"long"},
"country_id":{"type":"long"}
}
},
"pick_user_id_list":{//pick用户列表
"type":"nested",
"properties":{
"user_id":{"type":"long"},
"country_id":{"type":"long"}
}
},
"same_group_user_id_list":{//同组用户列表
"type":"nested",
"properties":{
"user_id":{"type":"long"},
"country_id":{"type":"long"}
}
},
"attention_group_id_list":{//关注小组列表
"type":"nested",
"properties":{
"group_id":{"type":"long"},
"update_time_val":{"type":"long"}
}
},
"create_time":{"type":"date", "format":"date_time_no_millis"},
"update_time":{"type":"date", "format":"date_time_no_millis"},
"create_time_val":{"type":"long"},
"update_time_val":{"type":"long"}
}
}
\ No newline at end of file
from .user_follow import UserFollow
from .topic import Topic
from .user import User
from .pickuserrecord import PickUserRecord
from .group import Group
from .pick_topic import PickTopic
from .tag import TopicTag
from .user_extra import UserExtra
from .group_user_role import GroupUserRole
from .tag import AccountUserTag
from .user import User
from .group import Group
from .topic import Topic
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import traceback
import logging
from libs.es import ESPerform
from django.db import models
import datetime
from .pick_celebrity import PickCelebrity
class Celebrity(models.Model):
class Meta:
verbose_name = u'明星表'
db_table = 'celebrity'
id = models.IntegerField(verbose_name="主键ID",primary_key=True)
is_deleted = models.BooleanField(verbose_name="是否删除")
name = models.CharField(verbose_name=u'名称', max_length=300, default='')
portrait = models.CharField(verbose_name=u'肖像', max_length=100, default='')
gender = models.SmallIntegerField(verbose_name=u'性别')
city_id = models.IntegerField(verbose_name=u'城市id')
desc = models.IntegerField(verbose_name='')
is_online = models.BooleanField(verbose_name="是否上线")
create_time = models.DateTimeField(verbose_name=u'创建时间',default=datetime.datetime.fromtimestamp(0))
update_time = models.DateTimeField(verbose_name=u'更新时间', default=datetime.datetime.fromtimestamp(0))
def get_pick_id_list(self):
try:
pick_id_list = list()
query_results = PickCelebrity.objects.filter(celebrity_id=self.id,is_deleted=False)
for data_item in query_results:
pick_id_list.append(data_item.pick_id)
return list(tuple(pick_id_list))
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
\ No newline at end of file
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import traceback
import logging
from libs.es import ESPerform
from django.db import models
import datetime
class Group(models.Model):
class Meta:
verbose_name = u'小组'
db_table = 'group'
id = models.IntegerField(verbose_name=u"小组ID",primary_key=True)
name = models.CharField(verbose_name=u'名称', max_length=100, default='')
description = models.CharField(verbose_name=u'描述', max_length=200, default='')
creator_id = models.IntegerField(verbose_name=u'创建者ID')
#icon = ImgUrlField('图标', max_length=255, img_type=IMG_TYPE.WATERMARK, default='')
icon = models.CharField(verbose_name=u'图标', max_length=128, default='')
topic_num = models.IntegerField(verbose_name=u'帖子数', default=0)
is_online = models.BooleanField(verbose_name=u"是否有效", default=True)
is_deleted = models.BooleanField(verbose_name="是否删除")
is_recommend = models.BooleanField(verbose_name=u"是否推荐", default=False)
create_time = models.DateTimeField(verbose_name=u'创建时间',default=datetime.datetime.fromtimestamp(0))
update_time = models.DateTimeField(verbose_name=u'更新时间', default=datetime.datetime.fromtimestamp(0))
#获取前一天4,5星发帖数
def get_high_quality_topic_num(self):
yesterday = datetime.datetime.now()-datetime.timedelta(days=1)
yesterday_begin_time = "%s-%s-%s 00:00:00" % (yesterday.year, yesterday.month, yesterday.day)
yesterday_end_time = "%s-%s-%s 23:59:59" % (yesterday.year, yesterday.month, yesterday.day)
topic_num = self.group_topics.filter(content_level__in=("4","5"),create_time__gte=yesterday_begin_time,create_time__lte=yesterday_end_time).count()
return topic_num
def detail(self):
result = {
'id': self.id,
'name': self.name,
'description': self.description,
'creator_id': self.creator_id,
'icon': self.icon,
'topic_num': self.topic_num,
'create_time': self.create_time.timestamp(),
'update_time': self.update_time.timestamp(),
}
return result
\ No newline at end of file
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import traceback
import logging
from libs.es import ESPerform
from django.db import models
import datetime
from .group import Group
class GroupUserRole(models.Model):
class Meta:
verbose_name = u'group_user_role'
db_table = 'group_user_role'
user_id = models.BigIntegerField(verbose_name=u'用户ID')
#group_id = models.BigIntegerField(verbose_name=u'组ID')
group = models.ForeignKey(
Group, verbose_name=u"关联的小组", null=True, blank=True, default=None, on_delete=models.CASCADE)
is_online = models.BooleanField(verbose_name=u"是否有效", default=True, db_index=True)
role_id = models.SmallIntegerField(verbose_name=u'角色ID')
invite_num = models.IntegerField(verbose_name=u'邀请数量', default=0)
create_time = models.DateTimeField(verbose_name=u'创建时间',default=datetime.datetime.fromtimestamp(0))
update_time = models.DateTimeField(verbose_name=u'更新时间', default=datetime.datetime.fromtimestamp(0))
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import traceback
import logging
from libs.es import ESPerform
from django.db import models
import datetime
class PickCelebrity(models.Model):
class Meta:
verbose_name=u"pick明星"
db_table = u"pickcelebrity"
id = models.IntegerField(primary_key=True,verbose_name=u"主键ID")
is_deleted = models.BooleanField(verbose_name="是否删除")
celebrity_id = models.IntegerField(verbose_name=u"明星ID")
pick_id = models.IntegerField(verbose_name=u"榜ID")
rank = models.IntegerField(verbose_name=u"排名")
create_time = models.DateTimeField(verbose_name=u'创建时间',default=datetime.datetime.fromtimestamp(0))
update_time = models.DateTimeField(verbose_name=u'更新时间', default=datetime.datetime.fromtimestamp(0))
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import traceback
import logging
from libs.es import ESPerform
from django.db import models
import datetime
class PickTopic(models.Model):
class Meta:
verbose_name=u"pick帖子"
db_table = u"picktopic"
id = models.IntegerField(primary_key=True,verbose_name=u"主键ID")
is_deleted = models.BooleanField(verbose_name="是否删除")
topic_id = models.IntegerField(verbose_name=u"帖子ID")
pick_id = models.IntegerField(verbose_name=u"榜ID")
rank = models.IntegerField(verbose_name=u"排名")
create_time = models.DateTimeField(verbose_name=u'创建时间',default=datetime.datetime.fromtimestamp(0))
update_time = models.DateTimeField(verbose_name=u'更新时间', default=datetime.datetime.fromtimestamp(0))
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import traceback
import logging
from libs.es import ESPerform
from django.db import models
import datetime
from .user import User
class PickUserRecord(models.Model):
class Meta:
verbose_name = u'用户pick帖子'
db_table = 'picktopicrecord'
picktopic_id = models.IntegerField(verbose_name="pick topic id")
is_deleted = models.BooleanField(verbose_name="是否删除")
is_pick = models.BooleanField(verbose_name="is_pick")
create_time = models.DateTimeField(verbose_name=u'创建时间',default=datetime.datetime.fromtimestamp(0))
update_time = models.DateTimeField(verbose_name=u'更新时间', default=datetime.datetime.fromtimestamp(0))
user = models.ForeignKey(User,related_name="user_pick",verbose_name="pick用户信息")
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import traceback
import logging
from libs.es import ESPerform
from django.db import models
import datetime
class TopicTag(models.Model):
class Meta:
verbose_name=u"帖子标签"
db_table="community_topictag"
id = models.IntegerField(primary_key=True,verbose_name=u"主键ID")
tag_id = models.IntegerField(verbose_name=u"标签ID")
topic_id = models.IntegerField(verbose_name=u"帖子ID")
is_online = models.BooleanField(verbose_name=u"是否在线")
class AccountUserTag(models.Model):
class Meta:
verbose_name=u"用户标签"
db_table="account_user_tag"
id = models.IntegerField(primary_key=True,verbose_name=u"主键ID")
tag_id = models.IntegerField(verbose_name=u"标签ID")
user_id = models.IntegerField(verbose_name=u"用户ID")
is_deleted = models.BooleanField(verbose_name=u"是否删除")
create_time = models.DateTimeField(verbose_name=u'创建时间',default=datetime.datetime.fromtimestamp(0))
update_time = models.DateTimeField(verbose_name=u'更新时间', default=datetime.datetime.fromtimestamp(0))
class Tag(models.Model):
class Meta:
verbose_name=u"标签"
db_table="community_tag"
id = models.IntegerField(primary_key=True,verbose_name=u"主键ID")
is_deleted = models.BooleanField(verbose_name=u"是否删除")
name = models.CharField(verbose_name=u"标签名称",max_length=128)
description = models.TextField(verbose_name=u"标签描述")
icon_url=models.CharField(verbose_name=u"icon_url",max_length=120)
create_time = models.DateTimeField(verbose_name=u'创建时间',default=datetime.datetime.fromtimestamp(0))
update_time = models.DateTimeField(verbose_name=u'更新时间', default=datetime.datetime.fromtimestamp(0))
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import traceback
import logging
from libs.es import ESPerform
from django.db import models
import datetime
from .pick_topic import PickTopic
from .tag import TopicTag
from .user_extra import UserExtra
from .group import Group
class ActionSumAboutTopic(models.Model):
class Meta:
verbose_name=u"帖子埋点数据汇总"
db_table="action_sum_about_topic"
partiton_date = models.CharField(verbose_name=u"日期",max_length=20)
device_id = models.CharField(verbose_name=u"用户设备号",max_length=50)
topic_id = models.CharField(verbose_name=u"帖子ID",max_length=50)
user_id = models.CharField(verbose_name=u"用户ID",max_length=50)
data_type = models.IntegerField(verbose_name=u"动作类型")
data_value = models.BigIntegerField(verbose_name=u"值")
class Topic(models.Model):
class Meta:
verbose_name = u'日记'
db_table = 'topic'
id = models.IntegerField(verbose_name='日记ID',primary_key=True)
name = models.CharField(verbose_name='日记名称',max_length=100)
#group_id = models.IntegerField(verbose_name='用户所在组ID',default=-1)
group = models.ForeignKey(
Group, verbose_name=u"关联的小组", related_name=u"group_topics",null=True, blank=True, default=None, on_delete=models.CASCADE)
user_id = models.IntegerField(verbose_name='用户ID')
description = models.CharField(verbose_name='日记本描述',max_length=200)
content = models.CharField(verbose_name='日记本内容',max_length=1000)
share_num = models.IntegerField(verbose_name='')
vote_num = models.IntegerField(verbose_name='点赞数')
reply_num = models.IntegerField(verbose_name='回复数')
cover = models.CharField(verbose_name='',max_length=200)
is_online = models.BooleanField(verbose_name='是否上线')
is_deleted = models.BooleanField(verbose_name='是否删除')
content_level = models.CharField(verbose_name='内容等级',max_length=3)
create_time = models.DateTimeField(verbose_name=u'日记创建时间',default=datetime.datetime.fromtimestamp(0))
update_time = models.DateTimeField(verbose_name=u'日记更新时间', default=datetime.datetime.fromtimestamp(0))
def get_pick_id_info(self):
try:
pick_id_list = list()
query_list = PickTopic.objects.filter(topic_id=self.id,is_deleted=False)
for item in query_list:
pick_id_list.append(item.pick_id)
return tuple(pick_id_list)
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return ()
def get_topic_tag_id_list(self):
try:
topic_tag_id_list = list()
query_results = TopicTag.objects.filter(topic_id=self.id)
for item in query_results:
topic_tag_id_list.append(item.tag_id)
return topic_tag_id_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return None
def get_topic_offline_score(self):
try:
offline_score = 0.0
user_is_shadow = False
#是否官方推荐用户
user_query_results = UserExtra.objects.filter(user_id=self.user_id)
if user_query_results.count() > 0:
if user_query_results[0].is_recommend:
offline_score += 2.0
elif user_query_results[0].is_shadow:
user_is_shadow = True
# 是否官方推荐小组
if self.group and self.group.is_recommend:
offline_score += 4.0
#帖子等级
if self.content_level == '5':
offline_score += 5.0
elif self.content_level == '4':
offline_score += 3.0
elif self.content_level == '3':
offline_score += 2.0
exposure_count = ActionSumAboutTopic.objects.filter(topic_id=self.id,data_type=1).count()
click_count = ActionSumAboutTopic.objects.filter(topic_id=self.id, data_type=2).count()
uv_num = ActionSumAboutTopic.objects.filter(topic_id=self.id,data_type=3).count()
if exposure_count>0:
offline_score += click_count/exposure_count
if uv_num>0:
offline_score += (self.vote_num/uv_num + self.reply_num/uv_num)
"""
1:马甲账号是否对总分降权?
"""
if user_is_shadow:
offline_score = offline_score*0.5
return offline_score
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return 0.0
\ No newline at end of file
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import traceback
import logging
from libs.es import ESPerform
from django.db import models
import datetime
import time
from libs.tools import tzlc
from .group_user_role import GroupUserRole
from .tag import AccountUserTag
from .topic import Topic
from .user_extra import UserExtra
class User(models.Model):
class Meta:
verbose_name = u'用户'
db_table = 'account_user'
id = models.IntegerField(verbose_name="主键ID",primary_key=True)
user_id = models.BigIntegerField(verbose_name=u'用户id', unique=True)
nick_name = models.CharField(verbose_name=u'昵称', max_length=255, default='')
profile_pic = models.CharField(verbose_name=u'头像', max_length=300)
gender = models.SmallIntegerField(verbose_name=u'性别')
city_id = models.IntegerField(verbose_name=u'城市id')
country_id = models.IntegerField(verbose_name='国家id')
is_online = models.BooleanField(verbose_name="是否上线")
create_time = models.DateTimeField(verbose_name=u'创建时间',default=datetime.datetime.fromtimestamp(0))
update_time = models.DateTimeField(verbose_name=u'更新时间', default=datetime.datetime.fromtimestamp(0))
def get_is_recommend_flag(self):
is_shadow = False
is_recommend = False
query_sql = UserExtra.objects.filter(user_id=self.user_id)
for record in query_sql:
is_recommend = record.is_recommend
is_shadow = record.is_shadow
return (is_recommend,is_shadow)
def get_latest_topic_time_val(self):
# 获取该用户最新发帖时间
latest_topic_time_val = -1
topic_records = Topic.objects.filter(user_id=self.user_id).order_by("-update_time")
check_index = 0
for record in topic_records:
topic_update_time = record.update_time
tzlc_topic_update_time = tzlc(topic_update_time)
latest_topic_time_val = int(time.mktime(tzlc_topic_update_time.timetuple()))
check_index += 1
if check_index >= 1:
break
return latest_topic_time_val
def get_follow_user_id_list(self):
follow_user_id_list = list()
user_follows = self.userfollow.filter(is_online=True)
for user_follow in user_follows:
follow_user_id_list.append(user_follow.follow_id)
follow_user_detail_list = list()
sql_data_list = User.objects.filter(user_id__in=follow_user_id_list)
for detail_data in sql_data_list:
item = {
"user_id":detail_data.user_id,
"country_id":detail_data.country_id
}
follow_user_detail_list.append(item)
return follow_user_detail_list
def get_attention_group_id_list(self):
try:
attention_group_id_list = list()
query_results = GroupUserRole.objects.filter(is_online=True,user_id=self.user_id)
for item in query_results:
item_dict = {
"group_id": item.group_id,
"update_time_val":time.mktime(tzlc(item.update_time).timetuple())
}
attention_group_id_list.append(item_dict)
return attention_group_id_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
def get_pick_user_id_list(self):
pick_topic_id_list = list()
user_picks = self.user_pick.filter(is_deleted=False,is_pick=True)
for user_pick in user_picks:
pick_topic_id_list.append(user_pick.picktopic_id)
pick_user_id_list = []
topic_sql_list = Topic.objects.filter(id__in=pick_topic_id_list)
for topic_data in topic_sql_list:
pick_user_id_list.append(topic_data.user_id)
pick_user_id_list = tuple(pick_user_id_list)
pick_user_detail_list = list()
sql_data_list = User.objects.filter(user_id__in=pick_user_id_list)
for detail_data in sql_data_list:
item = {
"user_id":detail_data.user_id,
"country_id":detail_data.country_id
}
pick_user_detail_list.append(item)
return pick_user_detail_list
def get_same_group_user_id_list(self):
same_group_user_id_list = list()
group_items_list = GroupUserRole.objects.filter(user_id=self.user_id)
for group_item in group_items_list:
group_id = group_item.group_id
user_items_list = GroupUserRole.objects.filter(group_id=group_id)
for user_items_list in user_items_list:
same_group_user_id_list.append(user_items_list.user_id)
same_group_detail_list = list()
sql_data_list = User.objects.filter(user_id__in=same_group_user_id_list)
for detail_data in sql_data_list:
item = {
"user_id":detail_data.user_id,
"country_id":detail_data.country_id
}
same_group_detail_list.append(item)
return same_group_detail_list
def get_user_tag_id_list(self):
try:
user_tag_id_list = list()
query_results = AccountUserTag.objects.filter(user_id=self.user_id)
for item in query_results:
user_tag_id_list.append(item.tag_id)
return user_tag_id_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return None
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import traceback
import logging
from libs.es import ESPerform
from django.db import models
import datetime
class UserExtra(models.Model):
class Meta:
verbose_name=u"推荐用户表"
db_table="user_extra"
id = models.IntegerField(verbose_name="主键ID",primary_key=True)
user_id = models.BigIntegerField(verbose_name=u"用户ID")
is_shadow = models.BooleanField(verbose_name=u"是否是马甲账户")
is_online = models.BooleanField(verbose_name=u"是否上线")
is_recommend = models.BooleanField(verbose_name=u"是否推荐")
has_answered = models.BooleanField(verbose_name=u"")
is_deleted = models.BooleanField(verbose_name=u"")
create_time = models.DateTimeField(verbose_name=u'创建时间',default=datetime.datetime.fromtimestamp(0))
update_time = models.DateTimeField(verbose_name=u'更新时间', default=datetime.datetime.fromtimestamp(0))
\ No newline at end of file
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import traceback
import logging
from libs.es import ESPerform
from django.db import models
import datetime
from .user import User
class UserFollow(models.Model):
class Meta:
verbose_name = u'用户关注'
db_table = 'user_follow'
user = models.ForeignKey(User,related_name='userfollow',to_field="user_id",on_delete=models.CASCADE)
follow_id = models.IntegerField(verbose_name="关注的用户ID")
is_online = models.BooleanField(verbose_name="是否上线")
is_deleted = models.BooleanField(verbose_name="是否删除")
create_time = models.DateTimeField(verbose_name=u'创建时间',default=datetime.datetime.fromtimestamp(0))
update_time = models.DateTimeField(verbose_name=u'更新时间', default=datetime.datetime.fromtimestamp(0))
This diff is collapsed.
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import logging
import traceback
from libs.tools import tzlc
class CelebrityTransfer(object):
@classmethod
def get_celebrity_data(cls,instance):
try:
res = dict()
res["id"] = instance.id
res["is_online"] = instance.is_online
res["is_deleted"] = instance.is_deleted
res["portrait"] = instance.portrait
res["name"] = instance.name
res["description"] = instance.desc
res["gender"] = instance.gender
res["city_id"] = instance.city_id
res["pick_id_list"] = instance.get_pick_id_list()
create_time = instance.create_time
tzlc_create_time = tzlc(create_time)
res["create_time"] = tzlc_create_time
update_time = instance.update_time
tzlc_udpate_time = tzlc(update_time)
res["update_time"] = tzlc_udpate_time
return res
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return None
\ No newline at end of file
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import logging
import traceback
from libs.tools import tzlc
class GroupTransfer(object):
def __init__(self):
pass
@classmethod
def get_group_data(cls,instance):
try:
res = dict()
res["id"] = instance.id
res["is_online"] = instance.is_online
res["is_deleted"] = instance.is_deleted
res["is_recommend"] = instance.is_recommend
res["name"] = instance.name
res["description"] = instance.description
res["topic_num"] = instance.topic_num
res["creator_id"] = instance.creator_id
res["icon"] = instance.icon
create_time = instance.create_time
tzlc_create_time = tzlc(create_time)
res["create_time"] = tzlc_create_time
update_time = instance.update_time
tzlc_udpate_time = tzlc(update_time)
res["update_time"] = tzlc_udpate_time
res["high_quality_topic_num"] = instance.get_high_quality_topic_num()
return res
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return None
\ No newline at end of file
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import logging
import traceback
from libs.tools import tzlc
class PickCelebrityTransfer(object):
@classmethod
def get_pick_celebrity_data(cls,instance):
try:
res = dict()
res["id"] = instance.id
res["rank"] = instance.rank
res["celebrity_id"] = instance.celebrity_id
res["pick_id"] = instance.pick_id
res["create_time"] = tzlc(instance.create_time)
res["update_time"] = tzlc(instance.update_time)
return res
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return None
\ No newline at end of file
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import logging
import traceback
from libs.tools import tzlc
import time
class TopicTransfer(object):
@classmethod
def get_topic_data(cls,instance):
try:
res = dict()
res["id"] = instance.id
res["is_online"] = instance.is_online
res["vote_num"] = instance.vote_num
res["reply_num"] = instance.reply_num
res["name"] = instance.name
res["description"] = instance.description
res["content"] = instance.content
res["content_level"] = instance.content_level
res["user_id"] = instance.user_id
if instance.group:
res["group_id"] = instance.group.id
else:
res["group_id"] = -1
res["share_num"] = instance.share_num
res["pick_id_list"] = instance.get_pick_id_info()
res["tag_list"] = instance.get_topic_tag_id_list()
res["offline_score"] = instance.get_topic_offline_score()
create_time = instance.create_time
tzlc_create_time = tzlc(create_time)
res["create_time"] = tzlc_create_time
res["create_time_val"] = int(time.mktime(tzlc_create_time.timetuple()))
update_time = instance.update_time
tzlc_update_time = tzlc(update_time)
res["update_time"] = tzlc_update_time
res["update_time_val"] = int(time.mktime(tzlc_update_time.timetuple()))
return res
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return None
\ No newline at end of file
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import logging
import traceback
import time
from libs.tools import tzlc
class UserTransfer(object):
@classmethod
def get_user_data(cls,instance):
try:
res = dict()
res["id"] = instance.id
res["user_id"] = instance.user_id
res["nick_name"] = instance.nick_name
res["profile_pic"] = instance.profile_pic
res["gender"] = instance.gender
res["city_id"] = instance.city_id
res["country_id"] = instance.country_id
res["is_online"] = instance.is_online
(is_recommend,is_shadow) = instance.get_is_recommend_flag()
res["is_recommend"] = is_recommend
res["is_shadow"] = is_shadow
latest_topic_time_val = instance.get_latest_topic_time_val()
res["latest_topic_time_val"] = latest_topic_time_val
tzlc_create_time = tzlc(instance.create_time)
res["create_time"] = tzlc_create_time
res["create_time_val"] = int(time.mktime(tzlc_create_time.timetuple()))
tzlc_update_time = tzlc(instance.update_time)
res["update_time"] = tzlc_update_time
res["update_time_val"] = int(time.mktime(tzlc_update_time.timetuple()))
res["tag_list"] = instance.get_user_tag_id_list()
res["attention_user_id_list"] = instance.get_follow_user_id_list()
res["attention_group_id_list"] = instance.get_attention_group_id_list()
res["pick_user_id_list"] = instance.get_pick_user_id_list()
res["same_group_user_id_list"] = instance.get_same_group_user_id_list()
return res
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return None
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment