Commit 175a10fa authored by lixiaofang's avatar lixiaofang

Initial commit

parents

Too many changes to show.

To preserve performance only 1000 of 1000+ files are displayed.

b221f63aaa0911e693b200163e00062b 308e33e020b111e6b76600163e001c72 dalianxuyingyiliaomeirong Dxuying
\ No newline at end of file
FROM ccr.ccs.tencentyun.com/gm-base/gm-alpine-py2-ffmpeg:v1.0
MAINTAINER wph [wangpenghong@igengmei.com]
COPY ./requirements/common.txt /tmp
RUN apk add --no-cache --virtual .build-deps \
bzip2-dev \
coreutils \
dpkg-dev dpkg \
expat-dev \
findutils \
gcc \
gdbm-dev \
libc-dev \
libffi-dev \
libnsl-dev \
libressl-dev \
libtirpc-dev \
linux-headers \
make \
ncurses-dev \
pax-utils \
readline-dev \
sqlite-dev \
tcl-dev \
tk \
tk-dev \
xz-dev \
zlib-dev \
# 业务相关依赖和安装工具
linux-headers \
python2-dev \
librdkafka-dev \
mariadb-client \
mariadb-dev \
git \
openssh \
\
# 取消ssh第一次链接的确认
&& echo "StrictHostKeyChecking no" >> /etc/ssh/ssh_config \
&& apk add --no-cache mariadb-connector-c-dev libxml2-dev libxslt-dev librdkafka-dev \
&& pip install -i http://mirrors.aliyun.com/pypi/simple/ --trusted-host mirrors.aliyun.com --upgrade setuptools \
&& sed -i '/st_mysql_options options;/a unsigned int reconnect;' /usr/include/mysql/mysql.h \
&& pip install --no-cache-dir -i http://mirrors.aliyun.com/pypi/simple/ --trusted-host mirrors.aliyun.com -r /tmp/common.txt \
&& mkdir -p /tmp/video_convert \
&& mkdir -p /data/log/gaia/app
COPY . /srv/apps/gaia/
WORKDIR /srv/apps/gaia/
# requirements文件中指定安装master分支的依赖每次构建镜像都会重新安装
RUN cat requirements/common.txt | grep master > /tmp/gm-requirements.txt \
&& pip install --no-deps --upgrade -i http://mirrors.aliyun.com/pypi/simple/ --trusted-host mirrors.aliyun.com -r /tmp/gm-requirements.txt \
&& apk del .build-deps
CMD gunicorn gaia.wsgi:application -w 6 -k gevent -b 0.0.0.0:8000 --worker-tmp-dir /dev/shm
@Library('gm-pipeline-library') _
pipeline {
agent any
options {
// Console output add timestamps
timestamps()
// Disallow concurrent executions of the Pipeline
disableConcurrentBuilds()
// On failure, retry the entire Pipeline the specified number of times.
retry(1)
}
parameters {
choice(name: 'cache', choices: ['', '--no-cache'], description: 'docker build 是否使用cache,默认使用,不使用为--no-cache')
}
environment {
// Image Tag branch.time.hash
TAG = dockerTag()
// Image Full Tag
IMAGE = "${DOCKER_REGISTRY}/gm-backend/gaia:$TAG"
}
stages {
stage("Begin") {
steps {
dingNotify "before"
}
}
stage('Build Image') {
steps {
sh "docker build . ${params.cache} -f ./Dockerfile -t $IMAGE"
sh "docker push $IMAGE"
}
}
}
post {
always {
dingNotify "after", "${currentBuild.currentResult}"
}
}
}
GAIA
====
说明: 远程调用的参数和返回值会通过 json 序列化, 因此 dict 的 key 必须是字符串.
本地运行gaia服务器方法:
安装依赖
pip install -r requirements/dev.txt
增加rpc的相关配置
cp gaia/rpcd.json.demo gaia/rpcd.json
如果需要修改响应的RPC地址,gaia/rpcd.json中修改。
bingo
HELIOS_ROUTE_TABLE_FOR_DEBUG="$(<gaia/rpcd.json)" python manage.py runserver
# Commond
目录:api/management/commands
执行方式
`python manage.py command_name`
|命令|描述|时间|
|---|---|---|
|relation_190507_service_with_tag|导入产品提供的美购service和tag关联的数据,数据来自 /tag.xlsx|2019.05.07|
#用户达人标志开始回退
# !/usr/bin/env python
# encoding=utf-8
from __future__ import absolute_import
import os
# set the default Django settings module for the 'celery' program.
# os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings')
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'gaia.settings')
import raven
from raven.contrib.celery import register_signal, register_logger_signal
from celery import Celery
from django.conf import settings
class Celery(Celery):
"""wrap for celery.Celery."""
def on_configure(self):
# check if sentry settings provided
if not settings.SENTRY_CELERY_ENDPOINT:
return
client = raven.Client(settings.SENTRY_CELERY_ENDPOINT)
# register a custom filter to filter out duplicate logs
register_logger_signal(client)
# hook into the Celery error handler
register_signal(client)
app = Celery('gaia_tasks')
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
File added
#!/usr/bin/env python
# -*- coding: utf-8 -*-
\ No newline at end of file
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from django.core.management.base import BaseCommand
from gm_types.gaia import (
AGILE_TAG_HERA_RECOMMEND_TAB_TYPE,
AGILE_TAG_RECOMMEND_TYPE,
)
from agile.models import AgileTagHeraRecommend
tab_type_mapping_tags = {
(AGILE_TAG_RECOMMEND_TYPE.TRACTATE, AGILE_TAG_HERA_RECOMMEND_TAB_TYPE.ALL): # 全部
[9535, 9510, 9669, 9670, 9509, 9532, 9518, 9508, 9505, 9511, 8, 21, 117, 31, 61],
(AGILE_TAG_RECOMMEND_TYPE.TRACTATE, AGILE_TAG_HERA_RECOMMEND_TAB_TYPE.PROJECT): # 项目
[117, 31, 1, 50, 35, 8, 21, 4, 28, 64, 60, 7, 61, 37, 4],
(AGILE_TAG_RECOMMEND_TYPE.TRACTATE, AGILE_TAG_HERA_RECOMMEND_TAB_TYPE.POSITION): # 部位
[169, 156, 120, 124, 332, 331, 330, 329, 306, 310, 303, 278, 360, 265, 271],
(AGILE_TAG_RECOMMEND_TYPE.TRACTATE, AGILE_TAG_HERA_RECOMMEND_TAB_TYPE.BRAND): # 品牌
[342, 344, 184, 168, 315, 294, 181, 190, 214, 309, 289],
(AGILE_TAG_RECOMMEND_TYPE.TRACTATE, AGILE_TAG_HERA_RECOMMEND_TAB_TYPE.INSTRUMENT): # 仪器
[223, 218, 122, 318, 324, 280, 251, 166],
}
class Command(BaseCommand):
def handle(self, *args, **kwargs):
print("BEGIN")
bulk_create_list = []
for k, v in tab_type_mapping_tags.items():
_recommend_type, _tab_type = k
base_kw = {
"agile_recommend_type": _recommend_type,
"agile_tab_type": _tab_type,
"is_online": True,
}
if AgileTagHeraRecommend.objects.filter(**base_kw).exists():
print("recommend_type {}、tab_type {} exists!".format(
AGILE_TAG_RECOMMEND_TYPE.getDesc(_recommend_type),
AGILE_TAG_HERA_RECOMMEND_TAB_TYPE.getDesc(_tab_type)
))
continue
else:
base_kw.update({
"agile_tag_sort_ids": json.dumps(v),
})
bulk_create_list.append(AgileTagHeraRecommend(**base_kw))
print("recommend_type {}、tab_type {} will create!".format(
AGILE_TAG_RECOMMEND_TYPE.getDesc(_recommend_type),
AGILE_TAG_HERA_RECOMMEND_TAB_TYPE.getDesc(_tab_type)
))
if bulk_create_list:
print("bulk create, lens {}".format(len(bulk_create_list)))
AgileTagHeraRecommend.objects.bulk_create(bulk_create_list)
print("END")
# coding:utf-8
import os
from django.core.management.base import BaseCommand
from django.db import IntegrityError
from gm_types.gaia import AGILE_TAG_CREATE_TYPE, AGILE_TAG_STYLE, AGILE_TAG_TYPE
from utils.execel import ExcelReader, ExcelWriter
from agile.models.agile_tag import AgileTag, AgileTagMapping, AgileTagType
AGILE_TAG_TYPE_MAP = {
u'项目': AGILE_TAG_TYPE.PROJECT,
u'-': AGILE_TAG_TYPE.UNDEFINED,
u'医院': AGILE_TAG_TYPE.HOSPITAL,
u'医生': AGILE_TAG_TYPE.DOCTOR,
u'仪器': AGILE_TAG_TYPE.INSTRUMENT,
u'品牌': AGILE_TAG_TYPE.BRAND,
u'部位': AGILE_TAG_TYPE.POSITION,
u'症状': AGILE_TAG_TYPE.SYMPTOM,
u'材料': AGILE_TAG_TYPE.MATERIAL,
u'城市': AGILE_TAG_TYPE.CITY,
u'省份': AGILE_TAG_TYPE.PROVINCE,
u'国家': AGILE_TAG_TYPE.COUNTRY,
}
CREATE_TAG_TYPE_MAP = {
u'项目': AGILE_TAG_CREATE_TYPE.SYSTEM,
u'-': AGILE_TAG_CREATE_TYPE.OPERATOR,
u'医生': AGILE_TAG_CREATE_TYPE.SYSTEM,
u'医院': AGILE_TAG_CREATE_TYPE.SYSTEM,
}
EXCEL_SHEET = [u'医院', u'医生']
def get_path():
parent_path = os.path.dirname(os.path.dirname(__file__))
path = parent_path + '/files/医院医生新标签映射表格.xlsx'
return path
class Command(BaseCommand):
"""
医生、医院标签
sync_agile_tag_v2
"""
def handle(self, *args, **options):
print('START')
excel_write = ExcelWriter("agile_tags.xlsx")
excel_read = ExcelReader(get_path())
for sheet in EXCEL_SHEET:
excel_read.sheet_select_by_name(sheet)
excel_write.create_sheet(sheet)
excel_write.write_header(['标签名称', '标签id'])
agile_tag_type = []
write_rows = []
for row in range(1, excel_read.row_number):
data = excel_read.read_row(row)
# data[0], data[1], data[2] 标签类型 老标签id 新标签name
if not any([data[0], data[1], data[2]]):
break
try:
agile = AgileTag.objects.create(
name=data[2],
create_tag_type=CREATE_TAG_TYPE_MAP[data[0]],
style=AGILE_TAG_STYLE.UNDEFINED
)
except:
continue
agile_tag_type.append(
AgileTagType(
agile_tag_id=agile.id,
agile_tag_type=AGILE_TAG_TYPE_MAP[data[0]]
)
)
write_rows.append([data[2], agile.id])
offset = 1000
while agile_tag_type:
AgileTagType.objects.bulk_create(agile_tag_type[:offset])
agile_tag_type = agile_tag_type[offset:]
excel_write.write_rows(2, write_rows)
excel_write.save()
print('DONE')
# coding:utf-8
import os
from django.core.management.base import BaseCommand
from django.db import IntegrityError
from gm_types.gaia import AGILE_TAG_CREATE_TYPE, AGILE_TAG_STYLE, AGILE_TAG_TYPE
from utils.execel import ExcelReader
from agile.models.agile_tag import AgileTag, AgileTagMapping, AgileTagType
AGILE_TAG_TYPE_MAP = {
u'项目': AGILE_TAG_TYPE.PROJECT,
u'-': AGILE_TAG_TYPE.UNDEFINED
}
CREATE_TAG_TYPE_MAP = {
u'项目': AGILE_TAG_CREATE_TYPE.SYSTEM,
u'-': AGILE_TAG_CREATE_TYPE.OPERATOR
}
def get_path():
parent_path = os.path.dirname(os.path.dirname(__file__))
path = parent_path + '/files/新老标签映射1.4.xlsx' # 文件上传地址确定
return path
class Command(BaseCommand):
"""
新老标签关系映射
sync_tag_agile_tag
"""
def handle(self, *args, **options):
print('START')
excel = ExcelReader(get_path())
excel.sheet_select_by_name(u"映射关系for后端")
for row in range(1, excel.row_number):
data = excel.read_row(row)
# data[0], data[1], data[2] 标签类型 老标签id 新标签name
if not any([data[0], data[1], data[2]]):
break
try:
agile = AgileTag.objects.create(
name=data[2],
create_tag_type=CREATE_TAG_TYPE_MAP[data[0]],
style=AGILE_TAG_STYLE.UNDEFINED
)
except IntegrityError:
agile = AgileTag.objects.get(name=data[2])
if not agile.is_online:
agile.is_online = True
agile.save(update_fields=['is_online'])
AgileTagType.objects.create(
agile_tag_id=agile.id,
agile_tag_type=AGILE_TAG_TYPE_MAP[data[0]]
)
raw_mapping_data = data[1]
if raw_mapping_data:
old_tag_mapping = str(data[1]).split(',')
else:
old_tag_mapping = []
agiletagmapping_list = []
for _id in old_tag_mapping:
tag_mapping = AgileTagMapping(
agile_tag_id=agile.id,
old_tag_id=int(_id)
)
agiletagmapping_list.append(tag_mapping)
AgileTagMapping.objects.bulk_create(agiletagmapping_list)
print('DONE')
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .agile_tag import (
AgileTag,
AgileTagType,
AgileTagRecommendType,
AgileTagMapping,
AgileTagHeraRecommend,
)
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import, print_function
from django.db import models
from django.utils import timezone
from gm_types.gaia import (
AGILE_TAG_TYPE,
AGILE_TAG_CREATE_TYPE,
AGILE_TAG_STYLE,
AGILE_TAG_RECOMMEND_TYPE,
AGILE_TAG_ATTRIBUTE,
AGILE_TAG_HERA_RECOMMEND_TAB_TYPE,
)
from api.models.tag import Tag as OldTag
class BaseModel(models.Model):
class Meta:
abstract = True
is_online = models.BooleanField(verbose_name=u"是否有效", default=True)
create_time = models.DateTimeField(verbose_name=u'创建时间', auto_now_add=True)
update_time = models.DateTimeField(verbose_name=u'更新时间', auto_now=True)
class AgileTag(BaseModel):
class Meta:
verbose_name = u'新标签'
db_table = 'api_agile_tag'
name = models.CharField(verbose_name=u'新标签名字', max_length=128, null=False, unique=True, default='')
description = models.TextField(verbose_name=u'描述', default='')
create_tag_type = models.CharField(
verbose_name=u"标签创建类型", max_length=3, choices=AGILE_TAG_CREATE_TYPE, default=AGILE_TAG_CREATE_TYPE.OPERATOR)
style = models.CharField(
verbose_name=u"标签样式", max_length=3, choices=AGILE_TAG_STYLE, default=AGILE_TAG_STYLE.UNDEFINED)
attribute = models.CharField(verbose_name=u"标签属性", max_length=3, choices=AGILE_TAG_ATTRIBUTE, default=AGILE_TAG_ATTRIBUTE.NULL)
topic_recommend_sort = models.IntegerField(verbose_name=u'帖子推荐排序', default=9999)
class AgileTagType(BaseModel):
class Meta:
verbose_name = u'新标签类型(可多选)'
db_table = 'api_agile_tag_type'
agile_tag_id = models.IntegerField(verbose_name=u'新标签', db_index=True)
agile_tag_type = models.CharField(
verbose_name=u"标签类型", max_length=3, choices=AGILE_TAG_TYPE, default=AGILE_TAG_TYPE.UNDEFINED)
class AgileTagRecommendType(BaseModel):
class Meta:
verbose_name = u'新标签推荐类型(可多选)'
db_table = 'api_agile_tag_recommend_type'
agile_tag_id = models.IntegerField(verbose_name=u'新标签', db_index=True)
agile_tag_type = models.CharField(
verbose_name=u"标签推荐类型", max_length=3, choices=AGILE_TAG_RECOMMEND_TYPE)
class AgileTagMapping(BaseModel):
class Meta:
verbose_name = u'新标签与老标签映射关系(可多选)'
db_table = 'api_agile_tag_mapping'
agile_tag_id = models.IntegerField(verbose_name=u'新标签', db_index=True)
old_tag_id = models.IntegerField(verbose_name=u'老标签', db_index=True)
class AgileTagHeraRecommend(BaseModel):
class Meta:
verbose_name = u'hera后台推荐新标签'
db_table = 'api_agile_tag_hera_recommend'
unique_together = ('agile_recommend_type', 'agile_tab_type')
agile_recommend_type = models.CharField(verbose_name=u"内容范围(标签推荐类型)", max_length=11, choices=AGILE_TAG_RECOMMEND_TYPE)
agile_tab_type = models.CharField(verbose_name=u"类型(TAB按钮类型)", max_length=11, choices=AGILE_TAG_HERA_RECOMMEND_TAB_TYPE)
agile_tag_sort_ids = models.TextField(verbose_name=u'配置的新标签有序列表,json字符串', default="")
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .agile_tag import AgileTagService
from .agile_tag_type import AgileTagTypeService
from .agile_tag_mapping import AgileTagMappingTagService
from .agile_tag_polymer import AgileTagRelationPolymerService
from .agile_tag_recommend_type import AgileTagRecommendTypeService
from .agile_tag_hera_recommend import AgileTagHeraRecommendService
# -*- coding: utf-8 -*-
from django.db import IntegrityError
from django.db.models import Q
from django.conf import settings
from gm_types.error import ERROR
from gm_types.gaia import AGILE_TAG_CREATE_TYPE, AGILE_TAG_STYLE, AGILE_TAG_TYPE
from agile.models.agile_tag import AgileTag
from rpc.tool.error_code import gen
class AgileTagService(object):
model = AgileTag
_base_query = Q(is_online=True)
@classmethod
def create(cls, name=None, description='', order=9999,
create_tag_type=AGILE_TAG_CREATE_TYPE.CUSTOM, style=AGILE_TAG_STYLE.UNDEFINED):
if not name:
return gen(ERROR.AGILE_TAG_NAME_ERR)
data = {
'name': name,
'description': description,
'topic_recommend_sort': order,
'create_tag_type': create_tag_type,
'style': style,
}
try:
agile_tag = cls.model.objects.create(**data)
except IntegrityError:
return gen(ERROR.AGILE_TAG_NAME_EXIST)
return {
'tag_id': agile_tag.id,
'name': agile_tag.name
}
@classmethod
def tag_obj_to_dict(cls, obj, simple=True):
_style = obj.style
_data = {
"id": obj.id,
"name": obj.name,
"style": _style,
"style_image_url": settings.AGILE_STYLE.get(_style, ''),
"attribute": obj.attribute, # 标签属性
}
if not simple:
_data.update({
'description': obj.description,
'order': obj.topic_recommend_sort,
'create_tag_type': obj.create_tag_type,
})
return _data
@classmethod
def tag_queryset_to_dict(cls, queryset=None, simple=True):
result = []
for item in queryset.iterator():
result.append(cls.tag_obj_to_dict(item, simple=simple))
return {'result': result}
@classmethod
def get_tag_by_ids(cls, agile_ids, simple=True):
query = cls._base_query & Q(id__in=agile_ids)
queryset = cls.model.objects.filter(query)
return cls.tag_queryset_to_dict(queryset, simple=simple)
@classmethod
def get_hot_agile_tags_by_ids(cls, agile_ids, simple=True, start_num=0, offset=15):
"""
获取 热门标签
:param agile_ids:
:param start_num:
:param offset:
:param simple:
:param show_style: 展示标签 样式
:return:
"""
query = cls._base_query & Q(id__in=agile_ids)
agile_tags = cls.model.objects.filter(query).order_by(
'topic_recommend_sort', '-id')[start_num: start_num+offset]
agile_dic = cls.tag_queryset_to_dict(agile_tags, simple=simple)
return agile_dic
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from django.db.models import Q
from agile.models.agile_tag import AgileTagHeraRecommend
class AgileTagHeraRecommendService(object):
model = AgileTagHeraRecommend
_base_query = Q(is_online=True)
@classmethod
def get_hera_recommend_agile_tag_ids(cls, recommend_type, tab_type, start_num=0, count=15):
"""
获取推荐标签
:param recommend_type:
:param tab_type:
:param start_num:
:param count:
:return:
"""
filter_query = cls._base_query & Q(
agile_recommend_type=recommend_type,
agile_tab_type=tab_type
)
agile_tag_ids = []
_agile_tag_sort_ids = cls.model.objects.filter(filter_query).values_list("agile_tag_sort_ids", flat=True).last()
if _agile_tag_sort_ids:
agile_tag_ids = json.loads(_agile_tag_sort_ids)[start_num: start_num + count]
return agile_tag_ids
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from collections import defaultdict
from django.db import IntegrityError
from django.db.models import Q
from django.conf import settings
from gm_types.error import ERROR
from api.models import Tag
from agile.models.agile_tag import AgileTagMapping, AgileTag
class AgileTagMappingTagService(object):
model = AgileTagMapping
_base_query = Q(is_online=True)
@classmethod
def get_mapping_tags_by_agile_tag_ids(cls, agile_tag_ids, only_need_old_tag_ids=False):
"""
获取所有映射的老标签数据
if only_need_old_tag_ids:
return {new_tag_id: [old_tag_id1, old_tag_id2]}
else
return {
new_tag_id : [
{
"id": 1,
"name": "标签名",
"tag_type": 标签类型,
"recommend_type": 推荐类型,
},
]
}
:param agile_tag_ids:
:param only_need_old_tag_ids: 仅需要老标签id
:return:
"""
result = {}
filter_query = cls._base_query & Q(agile_tag_id__in=agile_tag_ids)
mapping_tags = cls.model.objects.filter(filter_query).values("agile_tag_id", "old_tag_id")
if mapping_tags:
result = defaultdict(list)
if only_need_old_tag_ids:
old_tags_info_dic = {}
else:
_old_tag_ids = set(map(lambda item: item["old_tag_id"], mapping_tags))
old_tags_info = Tag.objects.filter(
pk__in=_old_tag_ids,
is_online=True
).values("id", "name", "tag_type", "recommend_type")
old_tags_info_dic = {
tag_info["id"]: dict(tag_info) for tag_info in old_tags_info
}
for mapping_tag in mapping_tags:
if only_need_old_tag_ids:
_info = mapping_tag["old_tag_id"]
else:
_info = old_tags_info_dic.get(mapping_tag["old_tag_id"], {})
if _info:
result[str(mapping_tag["agile_tag_id"])].append(_info)
return dict(result)
@classmethod
def get_mapping_tags_by_old_tag_ids(cls, old_tag_ids):
"""
通过老标签获取对应的新标签
:param old_tag_ids:
:return: {old_tag_id: [new_tag_id1, new_tag_id2]}
"""
result = {}
filter_query = cls._base_query & Q(old_tag_id__in=old_tag_ids)
mapping_tags = cls.model.objects.filter(filter_query).values("agile_tag_id", "old_tag_id")
if mapping_tags:
result = defaultdict(list)
for mapping_tag in mapping_tags:
result[str(mapping_tag["old_tag_id"])].append(mapping_tag["agile_tag_id"])
return dict(result)
@classmethod
def get_mapping_tags_tuple_by_old_tag_ids(cls, old_tag_ids):
"""
通过老标签获取对应的新标签
:param old_tag_ids:
:return: [(new_tag_id1,new_tag_id1_name), (new_tag_id2,new_tag_id2_name)]}
"""
filter_query = cls._base_query & Q(old_tag_id__in=old_tag_ids)
agile_tag_ids = cls.model.objects.filter(filter_query).values_list('agile_tag_id', flat=True)
if not agile_tag_ids:
return list()
res = AgileTag.objects.filter(id__in=agile_tag_ids).values_list('id', 'name')
# print res[0]
return res
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from collections import defaultdict
from django.db.models import Q
from polymer.models import AgileTagRelationPolymer
class AgileTagRelationPolymerService(object):
model = AgileTagRelationPolymer
_base_query = Q(is_online=True)
@classmethod
def create(cls, agile_tag_id, polymer_id):
if not all([agile_tag_id, polymer_id]):
return
cls.model.objects.update_or_create(
agile_tag_id=agile_tag_id,
polymer_id=polymer_id,
defaults={"is_online": True},
)
@classmethod
def get_polymer_id_by_agile_tag_ids(cls, agile_tag_ids):
"""
获取新标签关联的聚合页
:param agile_tag_ids:
:return: {agile_tag_id: polymer_id1,}
"""
result = {}
filter_query = cls._base_query & Q(agile_tag_id__in=agile_tag_ids)
relation_polymers = cls.model.objects.filter(filter_query).values("agile_tag_id", "polymer_id")
if relation_polymers:
result = defaultdict(int)
for relation_polymer in relation_polymers:
result[relation_polymer["agile_tag_id"]] = relation_polymer["polymer_id"]
return dict(result)
# -*- coding: utf-8 -*-
from collections import defaultdict
from django.db.models import Q
from gm_types.error import ERROR
from gm_types.gaia import AGILE_TAG_RECOMMEND_TYPE
from agile.models.agile_tag import AgileTagRecommendType
from rpc.tool.error_code import gen
class AgileTagRecommendTypeService(object):
model = AgileTagRecommendType
_base_query = Q(is_online=True)
@classmethod
def create(cls, agile_tag_id, agile_recommend_types):
"""
新标签推荐类型创建
:param agile_tag_id:
:param agile_recommend_types: list
:return:
"""
if not all([agile_tag_id, agile_recommend_types]):
return
recommend_type_list = []
for _type in agile_recommend_types:
agile_type = cls.model(
agile_tag_id=agile_tag_id,
agile_tag_type=_type
)
recommend_type_list.append(agile_type)
cls.model.objects.bulk_create(recommend_type_list)
return {'id': agile_tag_id}
@classmethod
def get_recommend_type_info(cls, agile_ids):
"""
获取标签的推荐类型
:param agile_ids:
:return:
"""
result = {}
if not agile_ids:
return result
query = cls._base_query & Q(agile_tag_id__in=set(agile_ids))
recommend_types = cls.model.objects.filter(query).values('agile_tag_id', 'agile_tag_type')
if recommend_types:
result = defaultdict(list)
for recommend_type in recommend_types.iterator():
result[recommend_type['agile_tag_id']].append(recommend_type["agile_tag_type"])
return result
@classmethod
def get_agile_tag_ids_by_recommend_type(cls,
agile_tag_ids,
recommend_type=AGILE_TAG_RECOMMEND_TYPE.TRACTATE):
"""
根据推荐类型 获取标签id
:param agile_tag_ids:
:param recommend_type: 推荐类型
:return:
"""
result = []
if not agile_tag_ids:
return result
agile_ids = list(cls.model.objects.filter(
agile_tag_type=recommend_type, agile_tag_id__in=agile_tag_ids
).values_list('agile_tag_id', flat=True))
return agile_ids
# -*- coding: utf-8 -*-
from collections import defaultdict
from django.db.models import Q
from gm_types.error import ERROR
from gm_types.gaia import AGILE_TAG_TYPE
from agile.models.agile_tag import AgileTagType
from rpc.tool.error_code import gen
class AgileTagTypeService(object):
_base_query = Q(is_online=True)
model = AgileTagType
@classmethod
def create(cls, agile_tag_id, agile_tag_types=[AGILE_TAG_TYPE.UNDEFINED]):
"""
标签 与 类型关联 manytomany
:param agile_tag_id:
:param agile_tag_types: list
:return:
"""
if not all([agile_tag_id, agile_tag_types]):
return
agile_type_list = []
for _type in agile_tag_types:
agile_type = cls.model(
agile_tag_id=agile_tag_id,
agile_tag_type=_type
)
agile_type_list.append(agile_type)
cls.model.objects.bulk_create(agile_type_list)
@classmethod
def get_agile_types(cls, agile_ids):
"""
获取新标签的类型
:param agile_ids: list
:return:
"""
result = {}
if not agile_ids:
return result
query = cls._base_query & Q(agile_tag_id__in=set(agile_ids))
agile_tags_type = cls.model.objects.filter(query).values('agile_tag_id', 'agile_tag_type')
if agile_tags_type:
result = defaultdict(list)
for _agile_type in agile_tags_type.iterator():
result[_agile_type["agile_tag_id"]].append(_agile_type["agile_tag_type"])
return dict(result)
@classmethod
def get_agile_ids_by_agile_type(cls, agile_type='', start_num=0, offset=15):
"""
通过类型获取标签id
:param agile_type: agile_type为空时获取所有热门标签
:param start_num:
:param offset:
:return:
"""
query = cls._base_query
if agile_type:
query = query & Q(agile_tag_type=agile_type)
agile_ids = list(cls.model.objects.filter(query).values_list('agile_tag_id', flat=True))
return {'ids': agile_ids}
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .agile_tag import *
# -*- coding: utf-8 -*-
import json
from django.conf import settings
from gm_types.error import ERROR
from gm_types.gaia import (
AGILE_TAG_RECOMMEND_TYPE,
AGILE_TAG_HERA_RECOMMEND_TAB_TYPE,
)
from rpc.decorators import bind, bind_context
from agile.services import (
AgileTagService,
AgileTagTypeService,
AgileTagMappingTagService,
AgileTagRecommendTypeService,
AgileTagRelationPolymerService,
AgileTagHeraRecommendService,
)
from rpc.tool.error_code import gen
from .common import AgileTagFormat
from rpc.cache import old_new_tag_mapping_cache
@bind('api/agile/create')
def agile_tag_create(name, description='', order=None, create_tag_type=None, style=None,
agile_tag_types=None, agile_recommend_types=None, relation_polymer_id=None):
"""
新标签创建
:param name:
:param description:
:param order:
:param create_tag_type:
:param style:
:param agile_tag_types:
:param agile_recommend_types:
:return:
"""
if not name:
return gen(ERROR.AGILE_TAG_NAME_ERR)
agile = AgileTagService.create(name=name)
agile_id = agile.get('tag_id')
AgileTagTypeService.create(agile_id)
# 需要建立关联聚合页的话
if relation_polymer_id:
AgileTagRelationPolymerService.create(
agile_tag_id=agile_id,
polymer_id=relation_polymer_id
)
return agile
@bind('api/agile_tags/by_type')
def get_agile_by_type(agile_type, recommend_type=AGILE_TAG_RECOMMEND_TYPE.TRACTATE, show_style=False, start_num=0, offset=15):
"""
根据标签类型 获取热门标签
:param agile_type:
:param recommend_type: 标签推荐范围
:param show_style:
:param start_num:
:param offset:
:return:
"""
result = {}
if not agile_type:
agile_type = AGILE_TAG_HERA_RECOMMEND_TAB_TYPE.ALL
# 获取有序的推荐标签
_recommend_agile_tag_ids = AgileTagHeraRecommendService.get_hera_recommend_agile_tag_ids(
recommend_type=recommend_type,
tab_type=agile_type,
start_num=start_num,
count=offset
)
_tag_list = (
AgileTagService.get_tag_by_ids(
agile_ids=_recommend_agile_tag_ids
) or {}).get("result", [])
tags = sorted(_tag_list, key=lambda item: _recommend_agile_tag_ids.index(item.get("id", 0)))
result.update({'agile_tags': tags})
return {'result': result}
@bind("api/agile_tag/info_by_ids")
def get_agile_tag_by_ids(agile_tag_ids, simple=True, only_need_old_tag_ids=True, return_dic=False):
"""
通过标签id获取数据
:param agile_tag_ids:
:param simple: 基本数据
:param only_need_old_tag_ids:仅需要老标签id
:param return_dic: 返回的数据结构 True dict False []
:return:
"""
result = {} if return_dic else []
if agile_tag_ids:
assert len(agile_tag_ids) <= settings.COUNT_LIMIT, 'too many agile_tag_ids'
agile_tag_info_dic = AgileTagFormat.agile_tags_info_format(
agile_tag_ids,
simple=simple,
only_need_old_tag_ids=only_need_old_tag_ids
)
# 数据结构返回控制
if return_dic:
result = agile_tag_info_dic
else:
result = list(agile_tag_info_dic.values())
return result
@bind("api/agile_tag/get_mapping_old_tag_ids_by_ids")
def get_mapping_old_tag_ids_by_agile_tag_ids(agile_tag_ids, only_need_old_tag_ids=True):
"""
仅获取新标签映射的老标签
:param agile_tag_ids:
:param only_need_old_tag_ids:
:return:
"""
result = {
"tag_mapping": {}
}
if agile_tag_ids:
assert len(agile_tag_ids) <= settings.COUNT_LIMIT, 'too many agile_tag_ids'
# 新标签,标签映射老标签数据
result["tag_mapping"] = AgileTagMappingTagService.get_mapping_tags_by_agile_tag_ids(
agile_tag_ids,
only_need_old_tag_ids=only_need_old_tag_ids
)
return result
@bind("api/agile_tag/get_agile_tags_by_old_tag_ids")
def get_agile_tags_by_old_tag_ids(old_tag_ids):
"""
通过老标签,获取对应的新标签
:param old_tag_ids:
:param only_need_new_tag_ids:
:return:
"""
result = {
"tag_mapping": {}
}
if old_tag_ids:
assert len(old_tag_ids) <= settings.COUNT_LIMIT, 'too many old_tag_ids'
result["tag_mapping"] = AgileTagMappingTagService.get_mapping_tags_by_old_tag_ids(old_tag_ids)
return result
@bind("api/agile_tag/tuple_new_tags")
def get_agile_tags_tuple_by_old_tag_ids(old_tag_ids):
result = get_cache_data(old_tag_ids, True)
return result
@bind("api/agile_tag/cache")
def get_cache_agile_data(old_tag_ids):
"""
Get hash list cache mapping data
:param old_tag_ids:
:return:
"""
result = get_cache_data(old_tag_ids)
return result
def get_cache_data(old_tag_ids, use_tuple=False):
result = list()
if old_tag_ids is None:
return result
if not isinstance(old_tag_ids, list):
return result
cache_name = settings.TAT_MAPPING_HASH_NAME
pipe = old_new_tag_mapping_cache.pipeline()
for key in old_tag_ids:
pipe.hget(cache_name, key)
data = pipe.execute()
for item in data:
if item:
_info = json.loads(item)
if use_tuple:
result.append((_info.get('id'), _info.get('name')))
else:
result.append(_info)
return result
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from agile.services import (
AgileTagService,
AgileTagTypeService,
AgileTagMappingTagService,
AgileTagRecommendTypeService,
AgileTagRelationPolymerService,
)
class AgileTagFormat(object):
@classmethod
def agile_tags_info_format(cls, agile_tag_ids, simple=True, only_need_old_tag_ids=True):
"""
新标签数据拼接转换
:param agile_tag_ids:
:param simple:
:param only_need_old_tag_ids:
:return:
"""
# 新标签,标签基本数据
_agile_tags_dic = AgileTagService.get_tag_by_ids(agile_tag_ids, simple=simple)
# 新标签,标签类型数据
_agile_tags_type_dic = AgileTagTypeService.get_agile_types(agile_tag_ids)
# 新标签,标签推荐类型数据
_agile_tags_recommend_type_dic = AgileTagRecommendTypeService.get_recommend_type_info(agile_tag_ids)
if simple: # 如果只是需要简单数据的话
_agile_tags_mapping_dic = {}
_agile_tags_polymer = {}
else:
# 新标签,标签映射老标签数据
_agile_tags_mapping_dic = AgileTagMappingTagService.get_mapping_tags_by_agile_tag_ids(
agile_tag_ids,
only_need_old_tag_ids=only_need_old_tag_ids
)
# 获取对应的聚合页数据
_agile_tags_polymer = AgileTagRelationPolymerService.get_polymer_id_by_agile_tag_ids(
agile_tag_ids
)
# 数据组装
_agile_tags = _agile_tags_dic.get("result", [])
_agile_tags_dic = {}
for agile_tag in _agile_tags:
_agile_tag_id = agile_tag.get("id", 0)
if _agile_tag_id:
agile_tag.update({
"tags_type": _agile_tags_type_dic.get(_agile_tag_id, []),
"recommends_type": _agile_tags_recommend_type_dic.get(_agile_tag_id, []),
"mapping_old_tag_ids": [], # 仅有id
"mapping_old_tags_info": [], # 详细数据
"polymer_id": _agile_tags_polymer.get(_agile_tag_id, 0) # 关联的聚合页id
})
if only_need_old_tag_ids:
agile_tag.update({
"mapping_old_tag_ids": _agile_tags_mapping_dic.get(str(_agile_tag_id), []),
})
else:
agile_tag.update({
"mapping_old_tags_info": _agile_tags_mapping_dic.get(str(_agile_tag_id), [])
})
_agile_tags_dic[str(_agile_tag_id)] = agile_tag
return _agile_tags_dic
# coding=utf8
from __future__ import unicode_literals, absolute_import, print_function
from .gaia_api import *
# coding=utf-8
from gm_types.gaia import QUESTION_ORDER_TYPE, USER_TYPE
from api.models import Province, Zone
from api.tasks.user_related_tasks import get_sleep_user
from api.tool.user_tool import get_auth_type_by_userid, get_user_type_ids
from rpc.decorators import bind
from rpc.decorators import list_interface
from search.utils.question import filter_question
from social import SocialInfo
from statistic import const_strings
@bind('api/question/filter')
@list_interface(offset_name='offset', limit_name='size')
def api_filter_question(offset=0, size=5, province_id=None, sort_type=QUESTION_ORDER_TYPE.DEFAULT, filters={}, sort_params=None):
"""
question filter.
:param province_id:
:param offset:
:param size:
:param sort_type:
:param filters:
:param sort_params:
:return:
"""
if province_id:
try:
p = Province.objects.get(id=province_id)
if p.tag:
filters['area_tag_id'] = p.tag.id
except Province.DoesNotExist:
pass
return filter_question(offset=offset, size=size, sort_type=sort_type, filters=filters, sort_params=sort_params)
@bind('api/user/is_following')
def is_following(cuid, uid):
return SocialInfo(cuid).is_following_user(uid)
@bind('api/user/auth_type')
def get_user_auth_type(uid):
"""
get user auth type
:param uid:
:return:
"""
return get_auth_type_by_userid(uid)
@bind('api/users/user_type')
def get_user_auth_type(uids):
"""
get users user type
:param user_ids:
:return:
"""
users_type = get_user_type_ids(uids)
result = {}
for uid, user_type in users_type.items():
if user_type['hospital_id']:
user_type["user_type"] = USER_TYPE.OFFICER
elif user_type['doctor_id']:
user_type["user_type"] = USER_TYPE.EXPERT
else:
user_type["user_type"] = USER_TYPE.USER
result[uid] = user_type
return result
@bind('api/user/get_sleep_user')
def _get_sleep_user(number):
user_extras = get_sleep_user(number)
return [user_extra.user.id for user_extra in user_extras]
@bind('api/province/list')
def province_list(**kwargs):
"""
:param kwargs:
:return:
"""
queryset = Province.objects.all()
display_in_filter = kwargs.get('display_in_filter')
if display_in_filter is not None:
queryset = queryset.filter(display_in_filter=display_in_filter)
provinces = [{'id': p.id, 'name': p.name} for p in queryset]
provinces.insert(0, {'id': const_strings.NATIONWIDE, 'name': u'全国'})
return provinces
@bind('api/zone/list')
def zone_list(pks):
"""
zone list.
:return:
"""
return [{'id': zone.id} for zone in Zone.objects.filter(pk__in=pks)]
from .answer import *
\ No newline at end of file
This diff is collapsed.
# coding=utf8
from __future__ import unicode_literals, absolute_import, print_function
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from _celery import app as celery_app
from api.signals import *
from django.contrib import admin
# Register your models here.
# -*- coding: utf-8 -*-
from django.db import models
from gm_types.gaia import REQUEST_SOURCE
from rpc.api_control import request_source
class RequestSourceManager(models.Manager):
"""马甲包配置项"""
def get_queryset(self):
req_source = request_source.__dict__.get('req_source')
if req_source:
return super(RequestSourceManager, self).get_queryset().filter(
req_source=req_source
)
else:
return super(RequestSourceManager, self).get_queryset().filter()
class RequestSourceModel(models.Model):
objects = RequestSourceManager()
class Meta:
abstract = True
req_source = models.CharField(u'请求来源', max_length=32, choices=REQUEST_SOURCE.choices, default=REQUEST_SOURCE.GEMGMEI)
# coding=utf8
from __future__ import unicode_literals, absolute_import, print_function
# coding=utf-8
import datetime
from gm_types.gaia import GROUPBUY_STATUS
from api.models import ORDER_STATUS
from api.models import ORDER_OPERATION_TYPE
from api.models import CASH_BACK_STATUS
from api.models import REFUND_STATUS
from api.models import NEW_CASH_BACK_STATUS
from rpc.tool.error_code import CODES, gen
ORDER_ID_LEN = 10
def order_pay(order):
if order.status in (ORDER_STATUS.NOT_PAID, ORDER_STATUS.PAYING, ORDER_STATUS.CANCEL):
order.status = ORDER_STATUS.PAID
return
raise gen(CODES.ORDER_PAY_STATUS_ERROR)
def order_validate(order):
if order.status != ORDER_STATUS.PAID:
raise gen(CODES.ORDER_PAY_STATUS_ERROR)
order.status = ORDER_STATUS.USED
def order_cancel(order):
if order.status not in (ORDER_STATUS.NOT_PAID, ORDER_STATUS.PAYING):
raise gen(CODES.ORDER_CAN_NOT_CANCEL)
# 未付款订单可直接取消
order.status = ORDER_STATUS.CANCEL
def order_apply_refund(order):
from api.models import RefundOrder
if order.payment == 0:
raise gen(CODES.ORDER_REFUDN_PAYMENT_ERROR)
order.status = ORDER_STATUS.WAIT_REFUNDED
refund_order = RefundOrder.create_or_update(order=order, user_reason=order.refund_comment)
if not refund_order:
raise gen(CODES.ORDER_REFUDN_STATUS_ERROR)
def order_cancel_refund(order):
payment = order.payment or order.real_payment
if payment == 0 or order.status != ORDER_STATUS.WAIT_REFUNDED:
raise gen(CODES.ORDER_REFUDN_CANCLE_STATUS_ERROR)
order.status = ORDER_STATUS.PAID
order.refund.status = REFUND_STATUS.CANCLED
order.refund.save()
# def order_refund(order):
# order.status = ORDER_STATUS.REFUNDED
# order.refund.status = REFUND_STATUS.REFUNDED
def order_not_paid(order):
order.status = ORDER_STATUS.NOT_PAID
def order_paying(order):
order.status = ORDER_STATUS.PAYING
def order_cashback(order):
order.cashback.status = CASH_BACK_STATUS.SUCCESS
order.cash_back_status = CASH_BACK_STATUS.SUCCESS
def order_doctor_approve(order):
refund = order.refund
if refund.status != REFUND_STATUS.PROCESSING and order.status != ORDER_STATUS.USED:
raise gen(CODES.ORDER_REFUDN_STATUS_ERROR)
refund.status = REFUND_STATUS.DOCTOR_APPROVE
refund.save(update_fields=["status"])
def order_groupbuy_fail(order):
refund = order.refund
if refund.status != REFUND_STATUS.PROCESSING:
raise gen(CODES.ORDER_REFUDN_STATUS_ERROR)
if order.groupbuy_status != GROUPBUY_STATUS.GROUPBUY_FAIL:
raise gen(CODES.ORDER_REFUDN_STATUS_ERROR)
refund.status = REFUND_STATUS.DOCTOR_APPROVE
refund.save(update_fields=["status"])
def order_doctor_reject(order):
refund = order.refund
if refund.status != REFUND_STATUS.PROCESSING:
raise gen(CODES.ORDER_REFUDN_STATUS_ERROR)
refund.status = REFUND_STATUS.DOCTOR_REJECT
refund.save()
order.status = ORDER_STATUS.USED
# def order_appeal_arbit(order):
# # 订单已使用不能申诉
# # 医生没拒绝不能申诉
# refund = order.refund
# if refund.status != REFUND_STATUS.DOCTOR_REJECT and order.status != ORDER_STATUS.USED:
# raise gen(CODES.ORDER_REFUDN_STATUS_ERROR)
# refund.status = REFUND_STATUS.ARBITING
# refund.save()
# order.status = ORDER_STATUS.WAIT_REFUNDED
# def order_arbit_approve(order):
# refund = order.refund
# if refund.status != REFUND_STATUS.ARBITING and order.status != ORDER_STATUS.USED:
# raise gen(CODES.ORDER_REFUDN_STATUS_ERROR)
# refund.status = REFUND_STATUS.ARBIT_APPROVE
# refund.save()
# order.status = ORDER_STATUS.WAIT_REFUNDED
# def order_arbit_reject(order):
# refund = order.refund
# if refund.status != REFUND_STATUS.ARBITING:
# raise gen(CODES.ORDER_REFUDN_STATUS_ERROR)
# refund.status = REFUND_STATUS.ARBIT_REJECT
# refund.save()
# order.status = ORDER_STATUS.PAID
def order_refund_timeout(order):
# 商家确认超时
# 自动变为申请仲裁状态, 由运营同事退款
refund = order.refund
if refund.status != REFUND_STATUS.PROCESSING and order.status != ORDER_STATUS.USED:
raise gen(CODES.ORDER_REFUDN_STATUS_ERROR)
refund.status = REFUND_STATUS.REFUND_APPLY_SELLER_TIMEOUT
refund.save()
def stale_refund(order):
refund_order = order.refund
if ORDER_STATUS.REFUNDED == order.status or REFUND_STATUS.REFUNDED == refund_order.status:
raise gen(CODES.ORDER_REFUDN_HAS_BEEN_OPERATED)
if not refund_order.stale:
raise gen(CODES.ORDER_REFUDN_NOT_STALE)
order.status = ORDER_STATUS.REFUNDED
order.refund_time = datetime.datetime.now()
refund_order.status = REFUND_STATUS.REFUNDED
refund_order.refunded_at = datetime.datetime.now()
refund_order.save()
def stale_cashback(order):
cashback_order = order.cashback
if CASH_BACK_STATUS.SUCCESS == order.cash_back_status or NEW_CASH_BACK_STATUS.SUCCESS == cashback_order.status:
raise gen(CODES.ORDER_CASHBACK_HAS_BEEN_OPERATED)
if not cashback_order.stale:
raise gen(CODES.ORDER_CASHBACK_NOT_STALE)
order.cash_back_status = CASH_BACK_STATUS.SUCCESS
order.cash_back_time = datetime.datetime.now()
cashback_order.status = NEW_CASH_BACK_STATUS.SUCCESS
cashback_order.cashbacked_at = datetime.datetime.now()
cashback_order.save()
def tag_stale_refund(order):
refund_order = order.refund
refund_order.stale = True
refund_order.save()
def tag_stale_cashback(order):
cashback_order = order.cashback
cashback_order.stale = True
cashback_order.save()
def order_has_refunded(order):
now = datetime.datetime.now()
refund_order = order.refund
refund_order.status = REFUND_STATUS.REFUNDED
refund_order.refunded_at = now
refund_order.save()
order.status = ORDER_STATUS.REFUNDED
order.refund_time = now
def order_has_cashback(order):
now = datetime.datetime.now()
cashback_order = order.cashback
cashback_order.status = NEW_CASH_BACK_STATUS.SUCCESS
cashback_order.cashbacked_at = now
cashback_order.save()
order.cash_back_time = now
order.cash_back_status = CASH_BACK_STATUS.SUCCESS
def refund_error(order):
refund_order = order.refund
refund_order.error = True
refund_order.save()
def cashback_error(order):
cashback_order = order.cashback
cashback_order.error = True
cashback_order.save()
# 已经在上一层增加了, transaction.atomic() 不要在函数里再加了
order_operate_map = {
ORDER_OPERATION_TYPE.PAY: order_pay,
ORDER_OPERATION_TYPE.VALIDATE: order_validate,
ORDER_OPERATION_TYPE.CANCEL: order_cancel,
ORDER_OPERATION_TYPE.APPLY_REFUND: order_apply_refund,
ORDER_OPERATION_TYPE.CANCEL_REFUND: order_cancel_refund,
ORDER_OPERATION_TYPE.STALE_REFUND: stale_refund,
ORDER_OPERATION_TYPE.STALE_CASHBACK: stale_cashback,
# ORDER_OPERATION_TYPE.APPEAL_ARBIT: order_appeal_arbit,
ORDER_OPERATION_TYPE.NOT_PAID: order_not_paid,
ORDER_OPERATION_TYPE.PAYING: order_paying,
ORDER_OPERATION_TYPE.DOCTOR_APPROVE: order_doctor_approve,
ORDER_OPERATION_TYPE.DOCTOR_REJECT: order_doctor_reject,
# ORDER_OPERATION_TYPE.ARBIT_APPROVE: order_arbit_approve,
# ORDER_OPERATION_TYPE.ARBIT_REJECT: order_arbit_reject,
ORDER_OPERATION_TYPE.REFUND_TIMEOUT: order_refund_timeout,
ORDER_OPERATION_TYPE.TAG_STALE_REFUND: tag_stale_refund,
ORDER_OPERATION_TYPE.TAG_STALE_CASHBACK: tag_stale_cashback,
ORDER_OPERATION_TYPE.REFUNDED: order_has_refunded,
ORDER_OPERATION_TYPE.CASHBACKED: order_has_cashback,
ORDER_OPERATION_TYPE.REFUND_ERROR: refund_error,
ORDER_OPERATION_TYPE.CASHBACK_ERROR: cashback_error,
ORDER_OPERATION_TYPE.GROUPBUY_FAIL: order_groupbuy_fail,
}
# coding=utf8
from __future__ import unicode_literals, absolute_import, print_function
from ..models import TAG_TYPE
from ..models import Tag, TagRelation, dfs_child_closure, dfs_parent_closure
def is_valid_tag_type(tag_type):
o = object()
return TAG_TYPE.getDesc(tag_type, o) is not o
class TagControl(object):
@classmethod
def get_tag(cls, pk):
return Tag.objects.get(pk=pk)
@classmethod
def add_tag(cls, name, tag_type):
# TODO: 目前到做法语意不明
t, created = Tag.objects.get_or_create(name=name)
if created:
assert is_valid_tag_type(tag_type)
t.tag_type = tag_type
t.save()
return t
@classmethod
def add_relation(cls, parent, child):
TagRelation.add_relation(parent=parent, child=child)
@classmethod
def del_relation(cls, parent, child):
TagRelation.del_relation(parent=parent, child=child)
@classmethod
def get_ancestors(cls, initial_set, exclude_init, tag_type=None, is_online_only=None):
result = dfs_parent_closure(initial_set=initial_set, exclude_init=exclude_init, is_online_only=is_online_only).values()
if tag_type is not None:
assert is_valid_tag_type(tag_type)
result = [tag for tag in result if tag.tag_type == tag_type]
return result
@classmethod
def get_descendants(cls, initial_set, exclude_init, tag_type=None, is_online_only=None):
result = dfs_child_closure(initial_set=initial_set, exclude_init=exclude_init, is_online_only=is_online_only).values()
if tag_type is not None:
assert is_valid_tag_type(tag_type)
result = [tag for tag in result if tag.tag_type == tag_type]
return result
# coding=utf-8
__author__ = 'cheng'
# coding=utf-8
__author__ = 'cheng'
# coding=utf-8
from django.core.management import BaseCommand
from api.models import Area
class Command(BaseCommand):
@classmethod
def _save_data(cls, name, num):
a = Area(area_name=name, phone_prefix=num)
a.save()
def handle(self, *args, **kwargs):
self._save_data(u'中国大陆', u'+86')
self._save_data(u'香港', u'+852')
self._save_data(u'台湾', u'+886')
self._save_data(u'韩国', u'+82')
self._save_data(u'日本', u'+81')
\ No newline at end of file
# coding=utf-8
from django.core.management import BaseCommand
from openpyxl import Workbook, load_workbook
from api.models import BuDanLuRu
class Command(BaseCommand):
"""
补单录入添加类型
python manage.py add_budanluru_type
"""
def handle(self, *args, **kwargs):
objs = BuDanLuRu.objects.all()
workbook = load_workbook('./budan_luru.xlsx')
sheet = workbook.get_active_sheet()
datas = {}
for item in sheet:
datas[item[0].value] = item[5].value
for obj in objs:
if datas.get(obj.id, None) != None:
obj.type = datas.get(obj.id)
obj.save()
#!/usr/bin/env python
# -*- coding:utf-8 -*-
#
# Author : RobertDing
# E-mail : robertdingx@gmail.com
# Date : 16/03/18 10:19:01
# Desc : 大礼包渠道之前数据插入数据库
#
from __future__ import absolute_import, division, with_statement, print_function
from django.core.management import BaseCommand
from api.models import BusinessChannel
items = [
(0, '360生活助手'),
(1, '付款成功后微信分享'),
(2, '内部使用'),
(3, '易拉宝'),
(4, 'CEO发红包'),
(5, '市场合作'),
(6, '8.5返场券'),
(7, '游戏-愿望树'),
(8, '99更美周年大礼包'),
(9, '微信摇'),
(10, '品牌组活动'),
(11, '市场组活动'),
(12, '邀请好友'),
(13, '接受邀请'),
(14, '百度优惠'),
(15, '2016元旦'),
(16, '经纬中国'),
(17, '分众专享'),
(18, '嗒嗒巴士'),
]
class Command(BaseCommand):
def handle(self, *args, **kwargs):
data = BusinessChannel.objects.all()
if data:
print('错误: api_business_channel 数据库不为空')
return
BusinessChannel.objects.bulk_create([
BusinessChannel(id=id, desc=desc) for (id, desc) in items])
# -*- coding:utf-8 -*-
from __future__ import print_function
from django.core.management import BaseCommand
from channel.models import Channel
channels = [
['360tf', u'360'],
['360tf2', u'360'],
['baiduss', u'百度搜索'],
['baiduss1', u'百度搜索'],
['baiduss2', u'百度搜索'],
['pretty', u'百度搜索'],
['baiduss3', u'百度搜索'],
['baiduss4', u'百度搜索'],
['baiduss5', u'百度搜索'],
['baiduss6', u'百度搜索'],
['baiduss7', u'百度搜索'],
['baiduss8', u'百度搜索'],
['baiduss10', u'百度搜索'],
['baiduss9', u'百度搜索'],
['baiduss11', u'百度搜索'],
['baiduss12', u'百度搜索'],
['shenmass', u'神马搜索'],
['shenmass1', u'神马搜索'],
['shenmass2', u'神马搜索'],
['sougou', u'搜狗市场'],
['sougoutg', u'搜狗市场推广'],
['sougou1', u'搜狗搜索'],
['sougou2', u'搜狗搜索'],
['sougou3', u'搜狗搜索'],
['sougou4', u'搜狗搜索'],
['fensitong', u'粉丝通'],
['fst', u'粉丝通-细化'],
['fst1', u'粉丝通-细化'],
['fst4', u'粉丝通投放'],
['xfst', u'粉丝通新户'],
['xfst1', u'粉丝通新户'],
['baidu', u'百度市场'],
['baidu_family', u'百度市场'],
['chunhua', u'百度市场'],
['m360', u'360市场'],
['lianxiang', u'联想市场'],
['huawei', u'华为市场'],
['chuizi', u'锤子市场'],
['jifeng', u'机锋市场'],
['meizu', u'魅族市场'],
['163', u'163网易'],
['anzhi', u'安智市场'],
['91ZS', u'91助手'],
['baidubaitong', u'百度百通'],
['baiducpd', u'百度市场-CPD付费'],
['baidumotu', u' 百度魔图'],
['benzhan', u'主站'],
['bbs', u'宝宝树-开屏'],
['bdbt', u'百度春华'],
['bdvip', u'百度VIP'],
['360yaoyiyao', u'360手助活动'],
['abizhi', u'爱壁纸'],
['chaping', u'广点通插屏'],
['gdt', u'广点通-分包1'],
['gdt1', u'广点通-分包2'],
['guangdiantong', u'广点通-分包3'],
['guangdiantong1', u'广点通新'],
['yuansheng', u'广点通原生广告'],
['yuansheng1', u'广点通原生广告'],
['googleplay', u'谷歌市场'],
['googlead', u'谷歌广告'],
['googlead1', u'谷歌广告'],
['googlead2', u'谷歌广告'],
['dayima', u'大姨吗-开屏'],
['guimiquan', u'辣妈帮-闺蜜圈'],
['huatian', u'花田'],
['iqiyi', u'爱奇艺-前贴'],
['ysbyanzhuo', u'爱奇艺一搜百应'],
['iqiyi-sspd', u'爱奇艺-时尚频道'],
['jingzhun', u'精准投放'],
['jinshan', u'金山'],
['jx', u'聚效'],
['KuaiYong', u'快用苹果助手'],
['kuchuan', u'酷传'],
['lamabang', u'辣妈帮-开屏'],
['m91', u'91助手'],
['meika', u'美咔相机-开屏'],
['mxyc1', u'明星衣橱-换量1'],
['mxyc2', u'明星衣橱-换量2'],
['mxyc3', u'明星衣橱-换量3'],
['mingxingyichu', u'明星衣橱'],
['mizhe', u'米折-换量'],
['MM', u'移动MM'],
['mocha', u'抹茶-开屏'],
['momo', u'陌陌'],
['shouji_qiantie', u'芒果TV前贴'],
['shouji_zanting', u'芒果TV暂停'],
['pad_qiantie', u'芒果ipad '],
['pad_zanting', u'芒果ipad '],
['oppo', u'OPPO市场'],
['shipin1', u'其他'],
['shipin2', u'其他'],
['wandoujia', u'豌豆荚'],
['pyhd', u'品友互动'],
['qita', u'其他'],
['qq', u'应用宝'],
['qq-cpd', u'qq-cpd'],
['sanxing', u'三星市场'],
['suopingjingling', u'锁屏精灵'],
['tengxunweibo', u'腾讯微博'],
['toutiao', u'今日头条'],
['toutiao1', u'今日头条'],
['uc', u'UC'],
['uctg', u'UC手助推广'],
['wangye', u'网页'],
['wanpu', u'万普'],
['weibosixin', u'微博私信'],
['weixingg', u'微信公众账号'],
['xiaomi', u'小米市场'],
['yhzh', u'不详'],
['ykzt', u'优酷暂停'],
['youku', u'优酷前贴片'],
['yybhlcpd', u'应用宝换量CPD'],
['zhidian', u'其他'],
['zht', u'智慧推'],
['xmttf', u'新媒体投放'],
['meiyouCPT', u'美柚CPT'],
['wyyd1', u'网易有道'],
['wyyd2', u'网易有道'],
['wyyd3', u'网易有道'],
['wyyd4', u'网易有道'],
['wyyd5', u'网易有道'],
['dbmq', u'兑吧美券'],
['tsq', u'她社区'],
['tsq1', u'她社区'],
['tsq2', u'她社区'],
['tsq3', u'她社区'],
['xxtg', u'校园推广'],
['xxtg-myxs', u'校园推广'],
['blued', u'blued'],
['fx1', u'分销推广'],
['fx2', u'分销推广'],
['fx3', u'分销推广'],
['fx4', u'分销推广'],
['mymd1', u'美业门店推广'],
['mymd2', u'美业门店推广'],
['mymd3', u'美业门店推广'],
['mymd4', u'美业门店推广'],
['mymd5', u'美业门店推广'],
['mymd6', u'美业门店推广'],
['mymd7', u'美业门店推广'],
['mymd8', u'美业门店推广'],
['mymd9', u'美业门店推广'],
['mymd10', u'美业门店推广'],
['mymd11', u'美业门店推广'],
['mymd12', u'美业门店推广'],
['mymd13', u'美业门店推广'],
['mymd14', u'美业门店推广'],
['mymd15', u'美业门店推广'],
['mymd16', u'美业门店推广'],
['mymd17', u'美业门店推广'],
['mymd18', u'美业门店推广'],
['mymd19', u'美业门店推广'],
['mymd20', u'美业门店推广'],
['yqxiu1', u'易企秀'],
['yqxiu2', u'易企秀'],
['yqxiu3', u'易企秀'],
['yqxiu4', u'易企秀'],
['yqxiu5', u'易企秀'],
['whfx1', u'美业分享推广'],
['whfx2', u'美业分享推广'],
['whfx3', u'美业分享推广'],
['whfx4', u'美业分享推广'],
['whfx5', u'美业分享推广'],
['whfx6', u'美业分享推广'],
['whfx7', u'美业分享推广'],
['whfx8', u'美业分享推广'],
['whfx9', u'美业分享推广'],
['whfx10', u'美业分享推广'],
['whfx11', u'美业分享推广'],
['whfx12', u'美业分享推广'],
['whfx13', u'美业分享推广'],
['whfx14', u'美业分享推广'],
['whfx15', u'美业分享推广'],
['whfx16', u'美业分享推广'],
['whfx17', u'美业分享推广'],
['whfx18', u'美业分享推广'],
['whfx19', u'美业分享推广'],
['whfx20', u'美业分享推广'],
['whfx21', u'美业分享推广'],
['whfx22', u'美业分享推广'],
['whfx23', u'美业分享推广'],
['whfx24', u'美业分享推广'],
['whfx25', u'美业分享推广'],
['whfx26', u'美业分享推广'],
['whfx27', u'美业分享推广'],
['whfx28', u'美业分享推广'],
['whfx29', u'美业分享推广'],
['whfx30', u'美业分享推广'],
['meipai', u'美拍'],
['leshi', u'乐视'],
['maimai', u'脉脉'],
['anzhuoshichang', u'安卓市场'],
['vivo', u'vivo'],
['qqllq', u'qq浏览器'],
['wangyi', u'网易'],
['bihe1', u'碧合'],
['bihe2', u'碧合'],
['bihe4', u'碧合'],
['bihe3', u'碧合'],
['bihe5', u'碧合'],
['bihe6', u'碧合'],
['mumayi', u'木蚂蚁'],
['zhuoyi', u'卓易'],
['txsjgj', u'腾讯手机管家'],
['baidumotu', u'百度魔图'],
]
class Command(BaseCommand):
for code, name in channels:
print(code, name)
if not Channel.objects.filter(url_name=code).exists():
Channel.objects.create(name=name, url_name=code)
else:
ch = Channel.objects.get(url_name=code)
ch.name = name
ch.save()
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import xlrd
from django.core.management import BaseCommand
from api.models import College
def assemble_data():
xls_file = xlrd.open_workbook("college.xlsx")
xls_sheet = xls_file.sheets()[0]
college_obj_list = []
for line in range(0, xls_sheet.nrows):
row_value = xls_sheet.row_values(line)
create_kwargs = {
'name': row_value[1],
'level': row_value[2],
'city_id': row_value[3],
}
college_obj_list.append(College(**create_kwargs))
College.objects.bulk_create(college_obj_list)
class Command(BaseCommand):
def handle(self, *args, **options):
print('----------开始数据导入-----------------')
assemble_data()
print('----------数据导入完成-----------------')
#!/usr/bin/env python
# encoding=utf-8
from django.core.management import BaseCommand
from api.models.consult import ConsultScience
from api.models.consult import ConsultScienceWiki
from api.models.consult import ConsultTheme
from api.models.consult import ConsultThemeWiki
from api.models import ItemWiki
class Command(BaseCommand):
def handle(self, *args, **options):
#只能执行一次
delete_data()
add_data()
def delete_data():
ConsultScience.objects.all().delete()
ConsultScienceWiki.objects.all().delete()
ConsultTheme.objects.all().delete()
ConsultThemeWiki.objects.all().delete()
data1 = \
[{
"title": u"闪亮明眸",
"desc": u"撩汉宝典 必备大眼",
"image": u"http://hera.s.gengmei.cc/2016/05/12/de3d257600",
"sub_datas":[{
"wiki_id":484,
"slogan":u'毫厘之间 大放睛彩',
"image":u'http://hera.s.gengmei.cc/2016/05/12/62b37859a6',
},
{
"wiki_id":192,
"slogan":u'嘘!大眼睛的小秘密',
"image":u'http://hera.s.gengmei.cc/2016/05/12/f7714d5dbd',
},
{
"wiki_id":194,
"slogan":u'抚平眼袋 定格年轻双眸',
"image":u'http://hera.s.gengmei.cc/2016/05/12/43d8e50210',
},
{
"wiki_id":205,
"slogan":u'告别疲惫 再见熊猫眼',
"image":u'http://hera.s.gengmei.cc/2016/05/12/d3965a1d96',
},
{
"wiki_id":201,
"slogan":u'为你点亮媚眼的小心机',
"image":u'http://hera.s.gengmei.cc/2016/05/12/36bf70877f',
}]
},
{
"title": u"童颜利器",
"desc": u"女明星逆龄的秘密",
"image": u"http://hera.s.gengmei.cc/2016/05/12/ab63b39036",
"sub_datas":[{
"wiki_id":527,
"slogan":u'秒变少女萌萌哒',
"image":u'http://hera.s.gengmei.cc/2016/05/12/209463c376',
},
{
"wiki_id":290,
"slogan":u'带你年轻 带你飞 ',
"image":u'http://hera.s.gengmei.cc/2016/05/12/aed4e175be',
},
{
"wiki_id":369,
"slogan":u'黄脸婆到学生妹的逆袭',
"image":u'http://hera.s.gengmei.cc/2016/05/12/aaf8e25546',
},
{
"wiki_id":481,
"slogan":u'逆时光 逆引力 真童颜',
"image":u'http://hera.s.gengmei.cc/2016/05/12/073fb80f70',
},
{
"wiki_id":493,
"slogan":u'抗衰除皱一步到位',
"image":u'http://hera.s.gengmei.cc/2016/05/12/6ff78d9041',
}]
},
{
"title": u"我要笑更美",
"desc": u"爱笑的人运气都不差",
"image": u"http://hera.s.gengmei.cc/2016/05/12/d72fbf10aa",
"sub_datas":[{
"wiki_id":565,
"slogan":u'给口腔的大扫除',
"image":u'http://hera.s.gengmei.cc/2016/05/12/147f860231',
},
{
"wiki_id":265,
"slogan":u'貌美牙为先 齿白七分俏',
"image":u'http://hera.s.gengmei.cc/2016/05/12/adcb6a10d0',
},
{
"wiki_id":244,
"slogan":u'你就是蒙娜丽莎',
"image":u'http://hera.s.gengmei.cc/2016/05/12/475a8451c7',
},
{
"wiki_id":459,
"slogan":u'跟钢牙 say byebye~',
"image":u'http://hera.s.gengmei.cc/2016/05/12/4e998c6d5c',
},
{
"wiki_id":563,
"slogan":u'牙齿好 颜值又加分',
"image":u'http://hera.s.gengmei.cc/2016/05/12/68d912a744',
}]
},
{
"title": u"瓷感美肌",
"desc": u"皮肤俏 不显老",
"image": u"http://hera.s.gengmei.cc/2016/05/12/8ae3ee1b00",
"sub_datas":[{
"wiki_id":429,
"slogan":u'乔妹同款灯泡肌',
"image":u'http://hera.s.gengmei.cc/2016/05/12/1ff7b88d76',
},
{
"wiki_id":433,
"slogan":u'肌肤焕发光彩 绽放青春美白',
"image":u'http://hera.s.gengmei.cc/2016/05/12/45756af691',
},
{
"wiki_id":533,
"slogan":u'厚角质bye 好肤质来!',
"image":u'http://hera.s.gengmei.cc/2016/05/12/5664a7b7ad',
},
{
"wiki_id":295,
"slogan":u'反转肌龄 脸蛋净白嫩',
"image":u'http://hera.s.gengmei.cc/2016/05/12/9b1bf5515a',
},
{
"wiki_id":501,
"slogan":u'一次为肌肤喝饱水',
"image":u'http://hera.s.gengmei.cc/2016/05/12/9b63a9a3e6',
}]
}]
data2=\
[{
"desc": u"pk掉同龄人的必杀器",
"image": u"http://hera.s.gengmei.cc/2016/05/12/1d6d9c9a18",
"wiki_id": 480,
"wiki_ids":[430,481,550]
},
{
"desc": u"轻松get小V脸",
"image": u"http://hera.s.gengmei.cc/2016/05/12/d2183e3a70",
"wiki_id": 498,
"wiki_ids":[419,227,420]
},
{
"desc": u"跟黄脸婆、村姑脸说拜拜",
"image": u"http://hera.s.gengmei.cc/2016/05/12/da6b715939",
"wiki_id": 302,
"wiki_ids":[429,295,501]
},
{
"desc": u"一次治疗 即刻重获青春",
"image": u"http://hera.s.gengmei.cc/2016/05/12/ddca369359",
"wiki_id": 503,
"wiki_ids":[481,551,312]
},
{
"desc": u"永远不脱妆 专治懒癌",
"image": u"http://hera.s.gengmei.cc/2016/05/12/61f300742d",
"wiki_id": 486,
"wiki_ids":[591,594]
},
{
"desc": u"让你的肌肤自带磨皮感",
"image": u"http://hera.s.gengmei.cc/2016/05/12/1dc3548a44",
"wiki_id": 432,
"wiki_ids":[501,433,383]
}]
def add_data():
script1(data1)
script2(data2)
def script1(datas):
for data in datas:
theme = ConsultTheme()
theme.title = data['title']
theme.desc = data['desc']
theme.image = data['image']
theme.save()
sub_datas = data['sub_datas']
for sub_data in sub_datas:
theme_wiki = ConsultThemeWiki()
theme_wiki.consult_theme = theme
theme_wiki.image = sub_data['image']
theme_wiki.slogan = sub_data['slogan']
try:
wiki = ItemWiki.objects.get(id = sub_data['wiki_id'])
theme_wiki.wiki = wiki
theme_wiki.save()
except:
pass
def script2(datas):
for data in datas:
science = ConsultScience()
science.desc = data['desc']
science.image = data['image']
try:
wiki = ItemWiki.objects.get(id = data['wiki_id'])
science.wiki = wiki
science.save()
wiki_ids = data['wiki_ids']
for wiki_id in wiki_ids:
science_wiki = ConsultScienceWiki()
science_wiki.consult_science = science
try:
wiki = ItemWiki.objects.get(id = wiki_id)
science_wiki.wiki = wiki
science_wiki.save()
except:
pass
except:
pass
# coding=utf-8
from django.core.management import BaseCommand
from api.models import Area
class Command(BaseCommand):
@classmethod
def _save_data(cls, name, num):
a = Area(area_name=name, phone_prefix=num)
a.save()
def handle(self, *args, **kwargs):
self._save_data(u'泰国', u'+66')
self._save_data(u'新加坡', u'+65')
# coding=utf-8
import datetime
from django.contrib.auth.models import User
from openpyxl import load_workbook
from django.core.management import BaseCommand
from social.models import UserFollow
class Command(BaseCommand):
def handle(self, *args, **options):
data = []
wb = load_workbook(filename=u'/tmp/add_fans.xlsx')
sheetnames = wb.get_sheet_names()
ws = wb.get_sheet_by_name(sheetnames[0])
# 读取excel
for rx in range(2, ws.get_highest_row() + 1):
item = {}
item['id'] = ws.cell(row=rx, column=1).value
item['fans'] = ws.cell(row=rx, column=3).value
data.append(item)
# 添加粉丝
for item in data:
today = datetime.date.today()
last_login = datetime.datetime(today.year - 1, today.month, today.day, 23, 59, 59)
users_fans = UserFollow.objects.filter(follow_id=item['id']).values_list('user_id', flat=True)
users = User.objects.filter(last_login__lte=last_login).exclude(id__in=list(users_fans))[0:int(item['fans'])]
count = 1000
flag = True
start_num = 0
while flag:
fans_list = []
for user in users[start_num: start_num+count]:
fans_list.append(UserFollow(follow_id=item['id'], user=user, is_virtual_fan=True))
if fans_list:
print 'UserID={} start={}'.format(item['id'], start_num)
UserFollow.objects.bulk_create(fans_list)
start_num += count
else:
flag = False
print 'Done'
# coding=utf-8
import datetime
from django.core.management import BaseCommand
from api.models import Diary, Service
from api.models.types import TAG_TYPE
class Command(BaseCommand):
def handle(self, *args, **kwargs):
start_time = datetime.datetime(2015, 11, 24)
diaries = Diary.objects.filter(created_time__gt=start_time)
for diary in diaries:
tags = diary.tags.all()
if (diary.service and
not tags.filter(tag_type__in=[TAG_TYPE.ITEM_WIKI,
TAG_TYPE.BODY_PART_SUB_ITEM])):
service = diary.service
service_tags = service.tags.all()
for tag in service_tags.filter(
tag_type__in=[TAG_TYPE.ITEM_WIKI,
TAG_TYPE.BODY_PART_SUB_ITEM]):
diary.tags.add(tag)
break
# coding=utf-8
from django.core.management import BaseCommand
from api.models.slide import DisplaySite, Slide, SlideToDisplaySite
items = [
('0', 'app轮播图'),
('1', '小程序轮播图'),
('2', 'M站轮播图'),
('3', 'PC站轮播图'),
]
class Command(BaseCommand):
"""
没有轮播图展示位置编辑页面,所以用脚本创建基础数据,
并绑定已有轮播图数据,做版本兼容
"""
def handle(self, *args, **kwargs):
data = DisplaySite.objects.first()
if not data:
print('开始处理数据')
DisplaySite.objects.bulk_create([
DisplaySite(display_type=item[0]) for item in items])
# 数据兼容处理
display_site_ids = DisplaySite.objects.all().values_list('id', flat=True)
start_id = 1
while True:
slide_ids = Slide.objects.filter(id__gte=start_id).order_by('id') \
.values_list('id', flat=True)[:100]
if not slide_ids:
break
for item in display_site_ids:
SlideToDisplaySite.objects.bulk_create(
[SlideToDisplaySite(slide_id=slide_id, display_site_id=item)
for slide_id in slide_ids]
)
start_id += 100
print('数据兼容处理完成')
return
print('基础数据已经存在,不需要创建')
return
# coding=utf-8
import time
import xlrd
from django.core.management import BaseCommand
from api.models.face.facial import FacialCombi
from gm_types.gaia import CONTOUR_TYPE, EYE_TYPE
class Command(BaseCommand):
@classmethod
def update_old_to_offline(cls):
FacialCombi.objects.filter().update(is_online=False)
@classmethod
def read_and_insert(cls, file_path):
xls_file = xlrd.open_workbook(file_path)
xls_sheet = xls_file.sheets()[0]
data_list = []
key_map = {}
for item in [CONTOUR_TYPE, EYE_TYPE]:
[key_map.update({i[1]: i[0]}) for i in item]
key_map.update({u"椭圆": 'tuoyuanlian'})
key_map.update({u"长脸": 'changlian'})
key_map.update({u"圆脸": 'yuanlian'})
key_map.update({u"倒三角脸": 'daosanjiaolian'})
key_map.update({u"菱形脸": 'lingxinglian'})
key_map.update({u"凤眼": 'fengyan'})
key_map.update({u"下垂眼": 'xiachuiyan'})
key_map.update({u"三角眼": 'sanjiaoyan'})
key_map.update({u"细长眼": 'xichangyan'})
for line in range(1, xls_sheet.nrows):
row_value = xls_sheet.row_values(line)
create_kwargs = {
'contour': key_map[row_value[0]],
'eye': key_map[row_value[1]],
"gender": 1 if row_value[2] == u"男" else 2,
"title": row_value[3],
"desc": row_value[4],
"celebrity": row_value[5],
'chin': "unknown",
"eyebrow": "unknown",
"nose": "unknown",
"lip": "unknown",
}
data_list.append(FacialCombi(**create_kwargs))
FacialCombi.objects.bulk_create(data_list)
def handle(self, *args, **kwargs):
time_start = int(time.time() * 1000)
print('----------开始执行命令-----------------')
print('----------开始更新老数据-----------------')
self.update_old_to_offline()
print('----------更新老数据完成-----------------')
print('----------开始数据导入-----------------')
file_path = "./api/management/files/facial_info.xlsx" # args[2]
self.read_and_insert(file_path)
print('----------数据导入完成-----------------')
time_end = int(time.time() * 1000)
print("----------耗时 {} 毫秒----------------".format(time_end-time_start))
# coding=utf-8
import xlrd
from django.core.management import BaseCommand
from lasker.models import AwardResult
from gm_types.gaia import SERVICE_FLAG
def open_excel(file_name="famous_doctor_list.xlsx"):
try:
data = xlrd.open_workbook(file_name)
return data
except Exception, e:
print str(e)
class Command(BaseCommand):
""" Import famous doctor award list from excel to database """
def handle(self, *args, **options):
data = open_excel("famous_doctor_list.xlsx")
doctor_table, hospital_table = data.sheets()[0], data.sheets()[1]
d_rows, d_cols = doctor_table.nrows, doctor_table.ncols
h_rows, h_cols = hospital_table.nrows, hospital_table.ncols
AwardResult.objects.filter(is_deleted=False).update(is_deleted=True)
# insert DOCTOR data into database
for row_num in range(1, d_rows):
row = doctor_table.row_values(row_num)
if row:
laureate_id = row[5]
laureate_name = row[0]
# 奖项优先级为2>3>1, 去除列表中的空字符串, 最多展示1项
award_name = ','.join([item for item in (row[3], row[4], row[2]) if item][:1])
AwardResult.objects.update_or_create(laureate_id=laureate_id, defaults={
'laureate_name': laureate_name,
'laureate_type': SERVICE_FLAG.DOCTOR,
'award_name': award_name,
'is_deleted': False
})
print u'*** handled doctor info: {} {} {}'.format(laureate_id, laureate_name, award_name)
# insert HOSPITAL data into database
for row_num in range(1, h_rows):
row = hospital_table.row_values(row_num)
if row:
laureate_id = row[2]
laureate_name = row[0]
award_name = row[1]
AwardResult.objects.update_or_create(laureate_id=laureate_id, defaults={
'laureate_name': laureate_name,
'laureate_type': SERVICE_FLAG.HOSPITAL,
'award_name': award_name,
'is_deleted': False
})
print u'*** handled hospital info: {} {} {}'.format(laureate_id, laureate_name, award_name)
# coding=utf-8
from django.core.management import BaseCommand
from api.models import Service
class Command(BaseCommand):
def handle(self, *args, **options):
coupon_state = (
u'\n'
u'【如何使用优惠卷】\n'
u'所有带"底价"标签的美购,不可使用优惠卷。\n'
u'在购物车结算中使用满减劵时,此美购将不计入满减金额,退款时可全额退款。\n'
)
for service in Service.objects.all():
print service.id
if not service.special_remind:
service.special_remind = u''
service.special_remind += coupon_state
service.save()
# coding=utf-8
import hashlib
from django.conf import settings
from django.core.management import BaseCommand
from api.models import Hospital, PeriodHospital
class Command(BaseCommand):
def handle(self, *args, **options):
file = open('hospital_list.txt')
lines = file.readlines()
for line in lines:
try:
line = line.strip()
hash_id = hashlib.md5(line).hexdigest()
hospital = Hospital.objects.get(id=line)
PeriodHospital.objects.get_or_create(hospital=hospital, period_id=hash_id)
except:
print (u'bug:' + line)
# coding=utf-8
from django.core.management import BaseCommand
from hippo.models.merchant import Merchant, MerchantRelevance
from hippo.models.doctor import Doctor
class Command(BaseCommand):
"""
初始化商户数据
python manage.py add_merchant_data
"""
def handle(self, *args, **options):
doctors = Doctor.objects.filter(is_merchant=True)
for doctor in doctors.iterator():
merchant, _ = Merchant.objects.get_or_create(doctor_id=doctor.id, doctor_name=doctor.name)
for item in doctor.get_merchant_doctors():
mr, _ = MerchantRelevance.objects.get_or_create(merchant_id=merchant.id, doctor_id=item['doctor_id'])
print item['doctor_id']
print 'Done!'
\ No newline at end of file
# coding=utf-8
import csv
from django.core.management import BaseCommand
from answer.models import Answer, Question, User
class Question_Data(object):
def __init__(self, line):
self.id = line[0]
self.title = line[1]
self.content = line[2]
self.images = line[3]
self.user_id = line[4]
class Answer_Data(object):
def __init__(self, line):
self.id = line[0]
self.question_id = line[1]
self.content = line[2]
self.images = line[3]
self.vote_num = line[4]
self.user_id = line[5]
class Command(BaseCommand):
def handle(self, *args, **kwargs):
questions = []
answers = []
reader = csv.reader(file('Sheet1.csv', 'rb'))
for line in reader:
q = Question_Data(line)
try:
u = User.objects.get(id=q.user_id)
qs = Question.objects.create(user_id=q.user_id, title=q.title,
id=q.id, content=q.content)
except:
print 'error: question_id: ' + str(q.id)
reader = csv.reader(file('Sheet2.csv', 'rb'))
for line in reader:
a = Answer_Data(line)
try:
u = User.objects.get(id=a.user_id)
answer = Answer.objects.create(id=a.id, user_id=a.user_id,
content=a.content, like_num=a.vote_num,
question_id=a.question_id)
except:
print 'error: answer_id: ' + str(a.id)
# coding=utf-8
from django.core.management import BaseCommand
from api.models import Service
class Command(BaseCommand):
"""
批量修改秒杀美购的更美价和秒杀价 python manage.py add_service_price_discount update(大促专用加)
"""
def add_arguments(self, parser):
parser.add_argument('param', nargs='+', type=str)
def handle(self, *args, **options):
print options
for item in options['param']:
if item == 'update':
Process.save_data()
elif item == 'test':
Process.test_data()
class Process():
@staticmethod
def save_data():
file_log = open('dachuservice/4.txt', 'r')
result_log = open('0816_add.txt', 'a')
lines = file_log.readlines(100000)
for line in lines:
try:
service_id = int(line.strip())
print service_id
service = Service.objects.get(id=service_id)
service.gengmei_price += 11
service.discount += 11
service.pre_payment_price += 11
service.save()
message = str(service.id) + u',' + str(service.gengmei_price) + u',' + str(service.discount) + u',' + \
str(service.pre_payment_price)
print message
result_log.writelines(message)
except Exception as e:
print e.message
continue
print (u"更新成功!")
file_log.close()
result_log.close()
@staticmethod
def test_data():
file_log = open('dachuservice/4.txt', 'r')
lines = file_log.readlines(100000)
for line in lines:
try:
service_id = int(line.strip())
print service_id
service = Service.objects.get(id=service_id)
service.gengmei_price += 11
service.discount += 11
service.pre_payment_price += 11
message = str(service.id) + u',' + str(service.gengmei_price) + u',' + str(service.discount) + u',' + \
str(service.pre_payment_price)
print message
except Exception as e:
print e.message
continue
print (u"测试一下没问题!请继续执行python manage.py add_service_price_discount update")
file_log.close()
# coding=utf-8
import datetime
from django.core.management import BaseCommand
from gm_types.gaia import SERVICE_REVIEW_STATUS
from api.models import ServiceRegister, ServiceReviewRecord
class Command(BaseCommand):
"""
处理一批美购审核的错误数据
python manage.py add_serviceregister_review_record
"""
def handle(self, *args, **options):
list_service = [5250207,
5256440,
5256442,
4984359,
5241599,
5057094,
5256441,
5257054,
4917862,
5259828,
5259925,
5298944,
5210473,
5299088,
5244878,
5244978,
5177256,
5284359,
5244879,
5306079,
5176450,
5306217,
5140015,
5273267,
5269360,
5261428,
5098352,
5306216,
4804524,
5166222,
5299667,
5230620,
5299773,
4589947]
for item in list_service:
serviceregister = ServiceRegister.objects.get(service_id=item)
if ServiceReviewRecord.objects.filter(serviceregister_id=serviceregister.id).count() == 0:
update_data = u'处理一下老数据的错误.2016-09-08'
person_id = '5b1b9ab0e9ae11e5aa5700163e000a4a'
ServiceReviewRecord.objects.create(
person_id=person_id,
update_data=update_data,
serviceregister_id=serviceregister.id,
created_time=datetime.datetime.now(),
now_review_status=serviceregister.review_status,
review_type=SERVICE_REVIEW_STATUS.UNDER_REVIEW
)
print serviceregister.id
# coding=utf-8
from django.core.management import BaseCommand
from api.models.sign_activity import SignConfig
class Command(BaseCommand):
def handle(self, *args, **kwargs):
print("BEGIN")
SignConfig.objects.create(enter_switch=False, float_window_switch=False)
print("END")
# -*- coding:utf-8 -*-
import math
import time
import datetime
from operator import itemgetter
from multiprocessing import Pool, Manager
from django.core.management.base import BaseCommand
from django import db
from api.models.doctor import Doctor
from api.models.sign_activity import SignActivity, UserLastSignRecord, UserLastSign, ActivityPrize
def get_activity_info():
activity = SignActivity.objects.filter(is_online=True, start_time__lte=datetime.datetime.now(),
end_time__gte=datetime.datetime.now()).first()
if not activity:
return {}
return {"activity_id": activity.id}
def transfer_data(result):
user_ids = result.get('user_ids')
activity_id = result.get('activity_id')
user_ids = list(set(user_ids))
for user_id in user_ids:
doctor = Doctor.objects.filter(user_id=user_id)
if doctor:
continue
try:
user_sign = UserLastSignRecord.objects.get(user_id=user_id, available_days__lt=10, is_new=True,
end_time__gte=datetime.datetime(year=2018, month=12, day=27),
end_time__lt=datetime.datetime(year=2018, month=12, day=28))
except UserLastSignRecord.DoesNotExist:
continue
last_record, created = UserLastSign.objects.get_or_create(user_id=user_id, activity_id=activity_id)
if created:
last_record.available_days = user_sign.available_days
else:
last_record.available_days += user_sign.available_days
last_record.save()
class Command(BaseCommand):
def handle(self, *args, **options):
print('------ starting -----')
start_time = time.time()
print("start at: ", start_time)
queue = Manager().Queue(maxsize=4)
queue.put(0) # 触发程序开始
per_num = 200.0
all_record = UserLastSignRecord.objects.all().values_list('user_id', flat=True).distinct()
count = all_record.count()
cnt = int(math.ceil(count / per_num))
result = get_activity_info()
if not result:
return
args_list = []
for _ in range(cnt):
args_list.append({'user_ids': list(all_record)[_*int(per_num):(_+1)*int(per_num)],
"activity_id": result.get('activity_id')})
db.connections.close_all()
pool = Pool(processes=4)
pool.map(transfer_data, args_list)
pool.close()
pool.join()
end_time = time.time()
print("end at: ", end_time)
print('total use {} s.'.format(end_time - start_time))
print('Done!')
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# __author__ = "chenwei"
# Date: 2019/8/20
from utils.execel import ExcelReader
from django.core.management import BaseCommand
from api.models import Doctor
from lasker.models import AwardResult
from gm_types.gaia import SERVICE_FLAG, DOCTOR_TYPE
from django.db.models import F
filename = "12.xlsx"
def update_or_create_hospital(row):
hospital_id, hospital_name, award_name, _ = row
doctor = Doctor.objects.filter(
hospital_id=hospital_id, doctor_type=DOCTOR_TYPE.OFFICER).first()
if not doctor:
print("not exist....", row)
return
award = AwardResult.objects.filter(
laureate_type=SERVICE_FLAG.HOSPITAL,
is_deleted=False, laureate_id=doctor.id
).first()
if award:
award.award_name = F("award_name") + ',' + award_name
award.save()
else:
AwardResult.objects.create(
laureate_id=hospital_id,
laureate_name=hospital_name,
laureate_type=SERVICE_FLAG.HOSPITAL,
award_name=award_name)
def update_or_create_doctor(row):
doctor_id, doctor_name, award_name, _ = row
doctor = Doctor.objects.filter(id=doctor_id).first()
if not doctor:
print("not exist....", row)
return
award = AwardResult.objects.filter(
laureate_type=SERVICE_FLAG.DOCTOR,
is_deleted=False, laureate_id=doctor_id
).first()
if award:
award.award_name = F("award_name") + ',' + award_name
award.save()
else:
AwardResult.objects.create(
laureate_id=doctor_id,
laureate_name=doctor_name,
laureate_type=SERVICE_FLAG.DOCTOR,
award_name=award_name)
class Command(BaseCommand):
def handle(self, *args, **options):
excel = ExcelReader(filename)
for row in range(1, excel.row_number):
data = excel.read_row(row)
assert len(data) == 4
# 类型
laureate_type = data[3]
if laureate_type == SERVICE_FLAG.DOCTOR:
update_or_create_doctor(data)
else:
update_or_create_hospital(data)
# coding=utf-8
from django.core.management import BaseCommand
from api.models import VideoWhiteList
class Command(BaseCommand):
"""
批量修改秒杀美购的更美价和秒杀价 python manage.py add_service_price_discount update(大促专用加)
"""
def handle(self, *args, **options):
file = open('video_white_list.txt')
lines = file.readlines()
for line in lines:
print line
try:
VideoWhiteList.objects.get_or_create(user_id=line)
except:
print('error:' + line)
continue
print('success:' + line)
# coding=utf-8
from __future__ import print_function, unicode_literals, absolute_import
from django.core.management import BaseCommand
from rpc.tool.random_tool import random_str
from api.models import UserExtra, Person
from api.models.types import PLATFORM_CHANNEL
class Command(BaseCommand):
"""
anonymous user should be created only ONCE
"""
def handle(self, *args, **options):
phone = '88888888881'
anonymous_user = UserExtra.create_user(phone, '', random_str(32))
anonymous_user.last_name = '游客'
anonymous_user.save()
Person.objects.create(phone=phone, user=anonymous_user, platform=PLATFORM_CHANNEL.UNKNOWN)
print ('Anonymous user id is %d' % (anonymous_user.id,))
# coding=utf-8
from django.core.management import BaseCommand
from api.models import Province
class Command(BaseCommand):
def handle(self, *args, **options):
special_city = (u'北京', u'天津', u'上海', u'重庆', u'首尔', u'香港', u'台湾', u'澳门', u'济州岛', u'曼谷')
Province.objects.filter(
name__in=special_city
).update(is_municipality=True)
# coding=utf-8
from django.core.management import BaseCommand
from api.models.coupon import Coupon
class Command(BaseCommand):
"""
初始化商户数据
python manage.py add_merchant_data
"""
def handle(self, *args, **options):
print('开始同步....')
for c in Coupon.objects.all():
c.benefit_type = c.has_threshold
c.save()
print('同步结束....')
\ No newline at end of file
# coding=utf-8
import datetime
from django.core.management import BaseCommand
from api.models import SimpleActionLog
class Command(BaseCommand):
"""
更新日志数据 python manage.py async_log_data
"""
def handle(self, *args, **options):
pass_file = open('pass_due.txt', 'r')
pass_result_success = open('pass_result_success.txt', 'a')
pass_result_erro = open('pass_result_erro.txt', 'a')
while True:
lines = pass_file.readlines(100000)
if lines:
for line in lines:
pass_data = line.strip('\n').split(',')
pass_time = datetime.datetime.strptime(pass_data[0], '%Y-%m-%d %H:%M:%S')
service_id = int(pass_data[1])
user = int(pass_data[2])
log_data = SimpleActionLog.objects.filter(
action_time__gte=pass_time + datetime.timedelta(seconds=-60),
action_time__lte=pass_time + datetime.timedelta(seconds=60),
object_type=u'serviceregister',
action_new=2,
action='',
message=str(service_id),
user_id=user
)
if len(log_data) > 1:
result_line = line.strip('\n') + u',result:'
for item in log_data:
result_line += str(item.id)
result_line += u','
result_line += u'\n'
pass_result_erro.writelines(result_line)
elif len(log_data) == 1:
log_data[0].action_new = 11
log_data[0].save()
result_line = line.strip('\n') + u',result:'
result_line += str(log_data[0].id)
result_line += u'\n'
pass_result_success.writelines(result_line)
else:
break
pass_file.close()
pass_result_erro.close()
pass_result_success.close()
audit_reject_file = open('audit_reject_due.txt', 'r')
audit_reject_result_success = open('audit_reject_result_success.txt', 'a')
audit_reject_result_erro = open('audit_reject_result_erro.txt', 'a')
while True:
lines = audit_reject_file.readlines(100000)
if lines:
for line in lines:
audit_reject_data = line.strip('\n').split(',')
audit_reject_time = datetime.datetime.strptime(audit_reject_data[0], '%Y-%m-%d %H:%M:%S')
service_id = int(audit_reject_data[1])
user = int(audit_reject_data[2])
log_data = SimpleActionLog.objects.filter(
action_time__gte=audit_reject_time + datetime.timedelta(seconds=-1),
action_time__lte=audit_reject_time + datetime.timedelta(seconds=1),
object_type='serviceregister',
action_new=2,
action='',
message=str(service_id),
user_id=user
)
if len(log_data) > 1:
result_line = line.strip('\n') + u',result:'
for item in log_data:
result_line += str(item.id)
result_line += u','
result_line += u'\n'
audit_reject_result_erro.writelines(result_line)
elif len(log_data) == 1:
log_data[0].action_new = 12
log_data[0].save()
result_line = line.strip('\n') + u',result:'
result_line += str(log_data[0].id)
result_line += u'\n'
audit_reject_result_success.writelines(result_line)
else:
break
audit_reject_file.close()
audit_reject_result_erro.close()
audit_reject_result_success.close()
# coding=utf-8
from django.core.management import BaseCommand
from answer.models import Answer, Question, UserAnswerQuestion
class Command(BaseCommand):
"""
同步线上question/answer两张表的数据进user_question_answer表
"""
def handle(self, *args, **options):
for answer in Answer.objects.filter(user_id__isnull=False, is_online=True):
UserAnswerQuestion.objects.update_or_create(user=answer.user, answer=answer, create_time=answer.create_time)
for question in Question.objects.filter(user_id__isnull=False, is_online=True):
UserAnswerQuestion.objects.update_or_create(user=question.user, question=question, create_time=question.create_time)
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# __author__ = "chenwei"
# Date: 2019/6/3
import json
from django.core.management import BaseCommand
from api.models.servicehome import ServiceHomeOperation, \
ServiceRelatedOperation
class Command(BaseCommand):
def handle(self, *args, **options):
for servicehome in ServiceHomeOperation.objects.all():
count = 0
details = json.loads(servicehome.details) if servicehome.details else []
for element in details:
count += 1
if not element['id'] or not element['img']:
continue
ServiceRelatedOperation.objects.update_or_create(
servicehome_id=servicehome.id,
position=count,
defaults={'image': element['img'], 'jump_id': element['id']}
)
\ No newline at end of file
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
__title__ = '同步用户已绑定的手机号到实名制表中'
__author__ = 'xierong@gmei.com'
__mtime__ = '17/9/18'
des:
手机号注册 -> Person
更新手机号 -> UserExtra
绑定手机号 -> Person,UserExtra
'''
import multiprocessing
from django.core.management import BaseCommand
from django.db import connection
from api.models.user import UserAuthentication
from api.models.person import Person
def AsyncUserPhone(start_index, end_index):
start_pk = start_index
final_pk = end_index
end_pk = start_index + 1000
print start_pk, final_pk
print 'start work'
while end_pk <= final_pk:
try:
phone_info = Person.objects.filter(phone__isnull=False).values('user_id', 'phone')[start_pk:end_pk]
connection.close() # fix problem: mysql closed connect unilateral
insert_lst = []
for item in phone_info:
if UserAuthentication.objects.get(user_id=item['user_id']).exist():
continue
insert_lst.append(UserAuthentication(phone=item['phone'], user_id=item['user_id']))
if insert_lst:
print 'start insert'
UserAuthentication.objects.bulk_create(insert_lst)
start_pk = end_pk
end_pk += 1000
connection.close()
except Exception, e:
print e
else:
try:
phone_info = Person.objects.filter(phone__isnull=False).values('user_id', 'phone')[start_pk:final_pk]
connection.close()
insert_lst = []
for item in phone_info:
if UserAuthentication.objects.get(user_id=item['user_id']).exist():
continue
insert_lst.append(UserAuthentication(phone=item['phone'], user_id=item['user_id']))
print 'update final:', len(insert_lst)
if insert_lst:
UserAuthentication.objects.bulk_create(insert_lst)
connection.close()
except Exception, e:
print e
class Command(BaseCommand):
def handle(self, *args, **options):
total_pk = Person.objects.count()
connection.close()
print 'total person count:', total_pk
process_num = 4
final_pk = total_pk + 1
index_num = int(final_pk) / process_num
pool = multiprocessing.Pool(processes=process_num)
for i in xrange(process_num):
start_pk = i * index_num
end_pk = start_pk + index_num
pool.apply_async(AsyncUserPhone, (start_pk, end_pk))
pool.close()
pool.join()
\ No newline at end of file
# coding=utf-8
from django.core.management import BaseCommand
from answer.models import Question
from api.models.user import UserExtra
class Command(BaseCommand):
"""
同步api_userextra表中city_id到api_question的city_id
"""
def handle(self, *args, **options):
user_ids = Question.objects.values_list('user_id', flat=True).distinct()
print 'get user id : done'
city_info = UserExtra.objects.filter(user_id__in=user_ids, city_id__isnull = False).values('user_id', 'city_id')
print 'get city info : done'
print 'start update'
for item in city_info:
Question.objects.filter(user_id=item['user_id']).update(city_id=item['city_id'])
print 'end update'
\ No newline at end of file
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# __author__: vv
# Date: 2019/9/6
from django.core.management import BaseCommand
from api.tasks.sync_bdtransfer_status import check_clues_first_stage_status, check_second_and_third_stage
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('-t', '--type', type=str, help="")
def handle(self, *args, **options):
type = options['type']
print("-----------start to handle-------")
if type == "f":
check_clues_first_stage_status()
elif type == "s":
check_second_and_third_stage()
print('-----------end to handle---------')
# coding=utf-8
from django.core.management import BaseCommand
from api.tasks.order_task import calc_doctor_discount_all
class Command(BaseCommand):
def handle(self, *args, **options):
calc_doctor_discount_all()
# coding=utf-8
from django.core.management import BaseCommand
from api.tasks.hospital_task import calc_doctor_tags_for_search, calc_hospital_tags_for_search
class Command(BaseCommand):
def handle(self, *args, **options):
calc_doctor_tags_for_search()
calc_hospital_tags_for_search()
# coding=utf-8
from django.core.management import BaseCommand
from api.models import Service, const_strings
from rpc.cache import ViewRecord
class Command(BaseCommand):
def handle(self, *args, **options):
services = Service.objects.all()
for s in services:
ViewRecord(const_strings.SERVICE)[s.id] = s.sell_amount
\ No newline at end of file
# coding=utf-8
import datetime
import json
from multiprocessing import Manager, Pool
from django.core.management import BaseCommand
from api.models import UserExtra
from rpc.cache import sleep_action_fans_cache
from rpc.cache import sleep_noaction_fans_cache
manager = Manager()
action_user = manager.list()
no_action_user = manager.list()
def calc_user_extra(user_extra):
try:
user = user_extra.user
topic_count = int(user.person.topic_count)
if topic_count > 0:
action_user.append(user.id)
else:
no_action_user.append(user.id)
print user.id
except:
print 'error'
pass
class Command(BaseCommand):
def handle(self, *args, **options):
user_extras = UserExtra.objects.filter(last_login__lt=datetime.datetime(2015, 1, 1))
pool = Pool(processes=4)
pool.map(calc_user_extra, user_extras)
pool.close()
pool.join()
sleep_action_fans_cache.set('sleep_user', json.dumps(list(action_user)))
sleep_noaction_fans_cache.set('sleep_user', json.dumps(list(no_action_user)))
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment