Commit f2142cd4 authored by lixiaofang's avatar lixiaofang

Merge branch 'vest_edit_judge' into dev

parents b357ea40 15aef535
......@@ -83,3 +83,8 @@ gaia/rpcd.json
*.swp
dbmw_deploy/config.dir/
.idea/codeStyles/Project.xml
.idea/misc.xml
.idea/physical.iml
.idea/codeStyles/Project.xml
.idea/
\ No newline at end of file
<component name="ProjectCodeStyleConfiguration">
<code_scheme name="Project" version="173">
<DBN-PSQL>
<case-options enabled="false">
<option name="KEYWORD_CASE" value="lower" />
<option name="FUNCTION_CASE" value="lower" />
<option name="PARAMETER_CASE" value="lower" />
<option name="DATATYPE_CASE" value="lower" />
<option name="OBJECT_CASE" value="preserve" />
</case-options>
<formatting-settings enabled="false" />
</DBN-PSQL>
<DBN-SQL>
<case-options enabled="false">
<option name="KEYWORD_CASE" value="lower" />
<option name="FUNCTION_CASE" value="lower" />
<option name="PARAMETER_CASE" value="lower" />
<option name="DATATYPE_CASE" value="lower" />
<option name="OBJECT_CASE" value="preserve" />
</case-options>
<formatting-settings enabled="false">
<option name="STATEMENT_SPACING" value="one_line" />
<option name="CLAUSE_CHOP_DOWN" value="chop_down_if_statement_long" />
<option name="ITERATION_ELEMENTS_WRAPPING" value="chop_down_if_not_single" />
</formatting-settings>
</DBN-SQL>
<DBN-PSQL>
<case-options enabled="false">
<option name="KEYWORD_CASE" value="lower" />
<option name="FUNCTION_CASE" value="lower" />
<option name="PARAMETER_CASE" value="lower" />
<option name="DATATYPE_CASE" value="lower" />
<option name="OBJECT_CASE" value="preserve" />
</case-options>
<formatting-settings enabled="false" />
</DBN-PSQL>
<DBN-SQL>
<case-options enabled="false">
<option name="KEYWORD_CASE" value="lower" />
<option name="FUNCTION_CASE" value="lower" />
<option name="PARAMETER_CASE" value="lower" />
<option name="DATATYPE_CASE" value="lower" />
<option name="OBJECT_CASE" value="preserve" />
</case-options>
<formatting-settings enabled="false">
<option name="STATEMENT_SPACING" value="one_line" />
<option name="CLAUSE_CHOP_DOWN" value="chop_down_if_statement_long" />
<option name="ITERATION_ELEMENTS_WRAPPING" value="chop_down_if_not_single" />
</formatting-settings>
</DBN-SQL>
</code_scheme>
</component>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ChangeListManager">
<list default="true" id="d7dd36ca-85ef-4a59-9db5-8b1ee4993a4e" name="Default Changelist" comment="">
<change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" />
</list>
<option name="EXCLUDED_CONVERTED_TO_IGNORED" value="true" />
<option name="SHOW_DIALOG" value="false" />
<option name="HIGHLIGHT_CONFLICTS" value="true" />
<option name="HIGHLIGHT_NON_ACTIVE_CHANGELIST" value="false" />
<option name="LAST_RESOLUTION" value="IGNORE" />
</component>
<component name="FUSProjectUsageTrigger">
<session id="-127591351">
<usages-collector id="statistics.lifecycle.project">
<counts>
<entry key="project.open.time.1" value="1" />
<entry key="project.opened" value="1" />
</counts>
</usages-collector>
<usages-collector id="statistics.file.extensions.open">
<counts>
<entry key="py" value="5" />
</counts>
</usages-collector>
<usages-collector id="statistics.file.types.open">
<counts>
<entry key="Python" value="5" />
</counts>
</usages-collector>
<usages-collector id="statistics.file.extensions.edit">
<counts>
<entry key="py" value="28" />
</counts>
</usages-collector>
<usages-collector id="statistics.file.types.edit">
<counts>
<entry key="Python" value="28" />
</counts>
</usages-collector>
</session>
<session id="1569122105">
<usages-collector id="statistics.file.extensions.open">
<counts>
<entry key="py" value="2" />
<entry key="template" value="1" />
<entry key="xml" value="1" />
</counts>
</usages-collector>
<usages-collector id="statistics.file.types.open">
<counts>
<entry key="PLAIN_TEXT" value="1" />
<entry key="Python" value="2" />
<entry key="XML" value="1" />
</counts>
</usages-collector>
</session>
</component>
<component name="FileEditorManager">
<leaf SIDE_TABS_SIZE_LIMIT_KEY="300">
<file pinned="false" current-in-tab="false">
<entry file="file://$PROJECT_DIR$/search/views/topic.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="209">
<caret line="85" column="46" selection-start-line="85" selection-start-column="46" selection-end-line="85" selection-end-column="46" />
<folding>
<element signature="e#463#1573#0" />
<element signature="e#6049#6240#0" />
<element signature="e#7051#7614#0" />
<element signature="e#8492#10033#0" />
<element signature="e#10162#10820#0" />
<element signature="e#10934#11363#0" />
</folding>
</state>
</provider>
</entry>
</file>
<file pinned="false" current-in-tab="true">
<entry file="file://$PROJECT_DIR$/search/utils/topic.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="1760">
<caret line="293" column="66" selection-start-line="293" selection-start-column="66" selection-end-line="293" selection-end-column="66" />
<folding>
<element signature="e#47#61#0" expanded="true" />
<element signature="e#363#1043#0" />
</folding>
</state>
</provider>
</entry>
</file>
</leaf>
</component>
<component name="Git.Settings">
<option name="RECENT_GIT_ROOT_PATH" value="$PROJECT_DIR$" />
</component>
<component name="IdeDocumentHistory">
<option name="CHANGED_PATHS">
<list>
<option value="$PROJECT_DIR$/trans2es/management/commands/trans2es_data2es_parallel.py" />
<option value="$PROJECT_DIR$/trans2es/models/face_user_contrast_similar.py" />
<option value="$PROJECT_DIR$/search/views/topic.py" />
<option value="$PROJECT_DIR$/search/utils/topic.py" />
<option value="$PROJECT_DIR$/physical/settings.py" />
</list>
</option>
</component>
<component name="ProjectFrameBounds">
<option name="x" value="279" />
<option name="y" value="23" />
<option name="width" value="1280" />
<option name="height" value="738" />
</component>
<component name="ProjectInspectionProfilesVisibleTreeState">
<entry key="Project Default">
<profile-state>
<expanded-state>
<State />
<State>
<id>General</id>
</State>
</expanded-state>
<selected-state>
<State>
<id>Buildout</id>
</State>
</selected-state>
</profile-state>
</entry>
</component>
<component name="ProjectView">
<navigator proportions="" version="1">
<foldersAlwaysOnTop value="true" />
</navigator>
<panes>
<pane id="Scope" />
<pane id="ProjectPane">
<subPane>
<expand>
<path>
<item name="physical" type="b2602c69:ProjectViewProjectNode" />
<item name="physical" type="462c0819:PsiDirectoryNode" />
</path>
<path>
<item name="physical" type="b2602c69:ProjectViewProjectNode" />
<item name="physical" type="462c0819:PsiDirectoryNode" />
<item name="linucb" type="462c0819:PsiDirectoryNode" />
</path>
<path>
<item name="physical" type="b2602c69:ProjectViewProjectNode" />
<item name="physical" type="462c0819:PsiDirectoryNode" />
<item name="physical" type="462c0819:PsiDirectoryNode" />
</path>
<path>
<item name="physical" type="b2602c69:ProjectViewProjectNode" />
<item name="physical" type="462c0819:PsiDirectoryNode" />
<item name="search" type="462c0819:PsiDirectoryNode" />
</path>
<path>
<item name="physical" type="b2602c69:ProjectViewProjectNode" />
<item name="physical" type="462c0819:PsiDirectoryNode" />
<item name="search" type="462c0819:PsiDirectoryNode" />
<item name="utils" type="462c0819:PsiDirectoryNode" />
</path>
<path>
<item name="physical" type="b2602c69:ProjectViewProjectNode" />
<item name="physical" type="462c0819:PsiDirectoryNode" />
<item name="search" type="462c0819:PsiDirectoryNode" />
<item name="views" type="462c0819:PsiDirectoryNode" />
</path>
</expand>
<select />
</subPane>
</pane>
</panes>
</component>
<component name="PropertiesComponent">
<property name="last_opened_file_path" value="$PROJECT_DIR$" />
<property name="settings.editor.selected.configurable" value="com.jetbrains.python.configuration.PyActiveSdkModuleConfigurable" />
</component>
<component name="RecentsManager">
<key name="MoveFile.RECENT_KEYS">
<recent name="$PROJECT_DIR$/search/views" />
</key>
</component>
<component name="RunDashboard">
<option name="ruleStates">
<list>
<RuleState>
<option name="name" value="ConfigurationTypeDashboardGroupingRule" />
</RuleState>
<RuleState>
<option name="name" value="StatusDashboardGroupingRule" />
</RuleState>
</list>
</option>
</component>
<component name="SvnConfiguration">
<configuration />
</component>
<component name="TaskManager">
<task active="true" id="Default" summary="Default task">
<changelist id="d7dd36ca-85ef-4a59-9db5-8b1ee4993a4e" name="Default Changelist" comment="" />
<created>1548319196437</created>
<option name="number" value="Default" />
<option name="presentableId" value="Default" />
<updated>1548319196437</updated>
</task>
<servers />
</component>
<component name="ToolWindowManager">
<frame x="0" y="23" width="1280" height="724" extended-state="6" />
<layout>
<window_info content_ui="combo" id="Project" order="0" visible="true" weight="0.22160445" />
<window_info id="Structure" order="1" side_tool="true" weight="0.25" />
<window_info id="DB Browser" order="2" />
<window_info id="Favorites" order="3" side_tool="true" />
<window_info anchor="bottom" id="Message" order="0" />
<window_info anchor="bottom" id="Find" order="1" />
<window_info anchor="bottom" id="Run" order="2" />
<window_info anchor="bottom" id="Debug" order="3" weight="0.4" />
<window_info anchor="bottom" id="Cvs" order="4" weight="0.25" />
<window_info anchor="bottom" id="Inspection" order="5" weight="0.4" />
<window_info anchor="bottom" id="TODO" order="6" />
<window_info anchor="bottom" id="Version Control" order="7" show_stripe_button="false" />
<window_info anchor="bottom" id="DB Execution Console" order="8" />
<window_info active="true" anchor="bottom" id="Terminal" order="9" visible="true" weight="0.44663382" />
<window_info anchor="bottom" id="Python Console" order="10" />
<window_info anchor="bottom" id="Event Log" order="11" side_tool="true" />
<window_info anchor="right" id="Commander" internal_type="SLIDING" order="0" type="SLIDING" weight="0.4" />
<window_info anchor="right" id="Ant Build" order="1" weight="0.25" />
<window_info anchor="right" content_ui="combo" id="Hierarchy" order="2" weight="0.25" />
</layout>
</component>
<component name="VcsContentAnnotationSettings">
<option name="myLimit" value="2678400000" />
</component>
<component name="editorHistoryManager">
<entry file="file://$PROJECT_DIR$/trans2es/models/face_user_contrast_similar.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="240">
<caret line="21" column="26" selection-start-line="21" selection-start-column="26" selection-end-line="21" selection-end-column="26" />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/trans2es/management/commands/trans2es_data2es_parallel.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="364">
<caret line="129" column="63" selection-start-line="129" selection-start-column="54" selection-end-line="129" selection-end-column="63" />
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/search/utils/topic.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="1760">
<caret line="293" column="66" selection-start-line="293" selection-start-column="66" selection-end-line="293" selection-end-column="66" />
<folding>
<element signature="e#47#61#0" expanded="true" />
<element signature="e#363#1043#0" />
</folding>
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/app_conf.xml">
<provider selected="true" editor-type-id="text-editor" />
</entry>
<entry file="file://$PROJECT_DIR$/physical/settings_local.py.template">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="-1987" />
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/search/views/contrast_similar.py">
<provider selected="true" editor-type-id="text-editor" />
</entry>
<entry file="file://$PROJECT_DIR$/search/views/topic.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="209">
<caret line="85" column="46" selection-start-line="85" selection-start-column="46" selection-end-line="85" selection-end-column="46" />
<folding>
<element signature="e#463#1573#0" />
<element signature="e#6049#6240#0" />
<element signature="e#7051#7614#0" />
<element signature="e#8492#10033#0" />
<element signature="e#10162#10820#0" />
<element signature="e#10934#11363#0" />
</folding>
</state>
</provider>
</entry>
<entry file="file://$PROJECT_DIR$/physical/settings.py">
<provider selected="true" editor-type-id="text-editor">
<state relative-caret-position="-2448">
<caret line="12" column="9" lean-forward="true" selection-start-line="12" selection-start-column="9" selection-end-line="12" selection-end-column="9" />
</state>
</provider>
</entry>
</component>
<component name="masterDetails">
<states>
<state key="ScopeChooserConfigurable.UI">
<settings>
<splitter-proportions>
<option name="proportions">
<list>
<option value="0.2" />
</list>
</option>
</splitter-proportions>
</settings>
</state>
</states>
</component>
</project>
\ No newline at end of file
FROM python:3.6
ENV PATH="/usr/local/bin:$PATH"
COPY . /srv/apps/physical/
WORKDIR /root/.ssh/
ADD ssh/id_rsa .
ADD ./sources.list /etc/apt/sources.list
WORKDIR /srv/apps/physical/
RUN chmod -R 600 /root/.ssh/id_rsa \
&& echo "StrictHostKeyChecking no" >> /etc/ssh/ssh_config \
&& mkdir -p /data/log/physical/app
RUN apt-key adv --recv-keys --keyserver keyserver.ubuntu.com 3B4FE6ACC0B21F32 \
&& apt-get -y update && apt-get -y install mysql-client libmysqlclient-dev \
&& apt-get clean \
&& apt-get autoclean \
&& rm -rf /var/lib/apt/lists/*
RUN pip3 install -i https://pypi.tuna.tsinghua.edu.cn/simple -r requirements.txt
CMD bash -c 'source ~/.bashrc && celery -A physical worker -Q tapir-alpha --loglevel=DEBUG --maxtasksperchild 500 -c 62'
strategy part like es perform
容器运行命令
docker run -it -p 9999:9999 -d \
--volume=/etc/gm-config:/etc/gm-config \
--volume=/etc/resolv.conf:/etc/resolv.conf \
--volume=/srv/apps/physical/physical/settings_local.py:/srv/apps/physical/physical/settings_local.py \
--entrypoint="/bin/bash" physical-test:1.0 "-c" "source ~/.bashrc && celery -A physical worker -Q tapir-alpha --loglevel=DEBUG --maxtasksperchild 500 -c 62"
......@@ -17,5 +17,7 @@
<element value="search.views.contrast_similar"/>
<element value="injection.data_sync.tasks"/>
<element value="search.views.contrast_similar"/>
<element value="search.views.search_hotword"/>
<element value="search.views.product"/>
</config>
</gm_rpcd_config>
import sys
from gm_rpcd.commands.utils import add_cwd_to_path
from gm_rpcd.internals.utils import serve
def main(args):
add_cwd_to_path()
from gm_rpcd.internals.configuration import config
config.is_develop_mode = True
config.freeze()
host = '127.0.0.1'
port = 9000
try:
first_arg = args[0]
except IndexError:
pass
else:
if ':' in first_arg:
host, port = first_arg.split(':')
port = int(port)
else:
port = int(first_arg)
print('Serving on {}:{}'.format(host, port))
serve(host=host, port=port)
if __name__ == '__main__':
main(sys.argv[1:])
# -*- coding: UTF-8 -*-
import logging
import traceback
import json
import pymysql
import threading
import random
import datetime
from celery import shared_task
from django.conf import settings
from django.core import serializers
from trans2es.type_info import get_type_info_map
# from rpc.all import get_rpc_remote_invoker
from libs.es import ESPerform
import logging
import traceback
from libs.cache import redis_client
from trans2es.models.face_user_contrast_similar import FaceUserContrastSimilar,UserSimilarScore
import json
from trans2es.models.face_user_contrast_similar import FaceUserContrastSimilar, UserSimilarScore
from linucb.utils.register_user_tag import RegisterUserTag
from trans2es.models.tag import SettingsConfig, Tag
@shared_task
def write_to_es(es_type, pk_list, use_batch_query_set=False):
try:
pk_list = list(frozenset(pk_list))
type_info_map = get_type_info_map()
type_info = type_info_map[es_type]
logging.info("duan add,es_type:%s" % str(es_type))
type_info.insert_table_by_pk_list(
sub_index_name=es_type,
pk_list=pk_list,
use_batch_query_set=use_batch_query_set,
es=ESPerform.get_cli()
)
if es_type == "register_user_tag":
RegisterUserTag.get_register_user_tag(pk_list)
elif es_type == "attention_user_tag":
RegisterUserTag.get_user_attention_tag(pk_list)
else:
type_info_map = get_type_info_map()
type_info = type_info_map[es_type]
logging.info("consume es_type:%s" % str(es_type))
type_info.insert_table_by_pk_list(
sub_index_name=es_type,
pk_list=pk_list,
use_batch_query_set=use_batch_query_set,
es=ESPerform.get_cli()
)
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
......@@ -37,7 +48,7 @@ def sync_face_similar_data_to_redis():
result_items = FaceUserContrastSimilar.objects.filter(is_online=True, is_deleted=False).distinct().values(
"participant_user_id").values_list("participant_user_id", flat=True)
logging.info("duan add,begin sync_face_similar_data_to_redis!")
logging.info("begin sync_face_similar_data_to_redis!")
redis_key_prefix = "physical:user_similar:participant_user_id:"
for participant_user_id in result_items:
......@@ -50,38 +61,74 @@ def sync_face_similar_data_to_redis():
item_list = list()
for item in similar_result_items:
weight_score = int(item.similarity * 100)
item_list.append(
{
"contrast_user_id": item.contrast_user_id,
"similarity": item.similarity
"filter": {
"constant_score": {
"filter": {
"term": {"user_id": item.contrast_user_id}
}
}
},
"weight": weight_score * 2
}
)
if len(item_list) >= 100:
break
redis_client.set(redis_key, json.dumps(item_list))
logging.info("duan add,participant_user_id:%d set data done!" % participant_user_id)
logging.info("participant_user_id:%d set data done!" % participant_user_id)
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
@shared_task
def sync_user_similar_score():
try:
results_items = UserSimilarScore.objects.filter(is_deleted=False).distinct().values("user_id").values_list("user_id",flat=True)
results_items = UserSimilarScore.objects.filter(is_deleted=False).distinct().values("user_id").values_list(
"user_id", flat=True)
redis_key_prefix = "physical:user_similar_score:user_id:"
logging.info("duan add,begin sync user_similar_score!")
logging.info("begin sync user_similar_score!")
for user_id in results_items:
redis_key = redis_key_prefix + str(user_id)
similar_results_items = UserSimilarScore.objects.filter(is_deleted=False,user_id=user_id).order_by("-score")
similar_results_items = UserSimilarScore.objects.filter(is_deleted=False, user_id=user_id).order_by(
"-score")
item_list = list()
for item in similar_results_items:
contrast_user_id = item.contrast_user_id
score = item.score
item_list.append(
[contrast_user_id,score]
[contrast_user_id, score]
)
redis_client.set(redis_key, json.dumps(item_list))
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
@shared_task
def get_tag_count():
try:
# 获取搜索推荐热词
# results_registr_tag = list(set(SettingsConfig.objects.filter(is_deleted=False).values_list("val", flat=True)))
# tag_val_list =set()
# for item in results_registr_tag:
# for word in item.split():
# tag_val_list.add(word)
# 获取符合条件的核心词
results_tag = list(
set(Tag.objects.filter(is_online=True, is_deleted=False, collection=1).values_list("id", flat=True)))
redis_registr_tag = "physical:search_hotword:results_registr_tag"
redis_tag = "physical:search_hotword:results_tag"
# redis_client.set(redis_registr_tag, list(results_registr_tag))
redis_client.set(redis_tag, list(results_tag))
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
......@@ -17,22 +17,22 @@ class ESPerform(object):
cli_info_list = settings.ES_INFO_LIST
index_prefix = settings.ES_INDEX_PREFIX
@classmethod
def get_cli(cls):
def get_cli(cls, cli_info=None):
try:
init_args = {
'sniff_on_start': False,
'sniff_on_connection_fail': False,
}
cls.cli_obj = Elasticsearch(hosts=cls.cli_info_list, **init_args)
es_cli_info = cli_info if cli_info else cls.cli_info_list
cls.cli_obj = Elasticsearch(hosts=es_cli_info, **init_args)
return cls.cli_obj
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return None
@classmethod
def get_official_index_name(cls,sub_index_name,index_flag=None):
def get_official_index_name(cls, sub_index_name, index_flag=None):
"""
:remark:get official es index name
:param sub_index_name:
......@@ -40,7 +40,7 @@ class ESPerform(object):
:return:
"""
try:
assert (index_flag in [None,"read","write"])
assert (index_flag in [None, "read", "write"])
official_index_name = cls.index_prefix + "-" + sub_index_name
if index_flag:
......@@ -52,11 +52,11 @@ class ESPerform(object):
return None
@classmethod
def __load_mapping(cls,doc_type):
def __load_mapping(cls, doc_type):
try:
mapping_file_path = os.path.join(
os.path.dirname(__file__),
'..', 'trans2es','mapping', '%s.json' % (doc_type,))
'..', 'trans2es', 'mapping', '%s.json' % (doc_type,))
mapping = ''
with open(mapping_file_path, 'r') as f:
for line in f:
......@@ -69,7 +69,7 @@ class ESPerform(object):
return None
@classmethod
def create_index(cls,es_cli,sub_index_name):
def create_index(cls, es_cli, sub_index_name):
"""
:remark: create es index,alias index
:param sub_index_name:
......@@ -82,11 +82,11 @@ class ESPerform(object):
index_exist = es_cli.indices.exists(official_index_name)
if not index_exist:
es_cli.indices.create(official_index_name)
read_alias_name = cls.get_official_index_name(sub_index_name,"read")
es_cli.indices.put_alias(official_index_name,read_alias_name)
read_alias_name = cls.get_official_index_name(sub_index_name, "read")
es_cli.indices.put_alias(official_index_name, read_alias_name)
write_alias_name = cls.get_official_index_name(sub_index_name,"write")
es_cli.indices.put_alias(official_index_name,write_alias_name)
write_alias_name = cls.get_official_index_name(sub_index_name, "write")
es_cli.indices.put_alias(official_index_name, write_alias_name)
return True
except:
......@@ -94,7 +94,7 @@ class ESPerform(object):
return False
@classmethod
def put_index_mapping(cls,es_cli,sub_index_name,mapping_type="_doc",force_sync=False):
def put_index_mapping(cls, es_cli, sub_index_name, mapping_type="_doc", force_sync=False):
"""
:remark: put index mapping
:param es_cli:
......@@ -105,13 +105,13 @@ class ESPerform(object):
try:
assert (es_cli is not None)
write_alias_name = cls.get_official_index_name(sub_index_name,"write")
write_alias_name = cls.get_official_index_name(sub_index_name, "write")
index_exist = es_cli.indices.exists(write_alias_name)
if not index_exist and not force_sync:
return False
mapping_dict = cls.__load_mapping(sub_index_name)
es_cli.indices.put_mapping(index=write_alias_name,body=mapping_dict,doc_type=mapping_type)
es_cli.indices.put_mapping(index=write_alias_name, body=mapping_dict, doc_type=mapping_type)
return True
except:
......@@ -119,7 +119,7 @@ class ESPerform(object):
return False
@classmethod
def put_indices_template(cls,es_cli,template_file_name, template_name):
def put_indices_template(cls, es_cli, template_file_name, template_name):
"""
:remark put index template
:param es_cli:
......@@ -131,7 +131,7 @@ class ESPerform(object):
assert (es_cli is not None)
mapping_dict = cls.__load_mapping(template_file_name)
es_cli.indices.put_template(name=template_name,body=mapping_dict)
es_cli.indices.put_template(name=template_name, body=mapping_dict)
return True
except:
......@@ -139,7 +139,7 @@ class ESPerform(object):
return False
@classmethod
def es_helpers_bulk(cls,es_cli,data_list,sub_index_name,auto_create_index=False,doc_type="_doc"):
def es_helpers_bulk(cls, es_cli, data_list, sub_index_name, auto_create_index=False, doc_type="_doc"):
try:
assert (es_cli is not None)
......@@ -150,31 +150,35 @@ class ESPerform(object):
logging.error("index:%s is not existing,bulk data error!" % official_index_name)
return False
else:
cls.create_index(es_cli,sub_index_name)
cls.put_index_mapping(es_cli,sub_index_name)
cls.create_index(es_cli, sub_index_name)
cls.put_index_mapping(es_cli, sub_index_name)
bulk_actions = []
if sub_index_name=="topic":
if sub_index_name == "topic" or \
sub_index_name == "topic-star-routing" or \
sub_index_name == "topic-high-star":
for data in data_list:
bulk_actions.append({
'_op_type': 'index',
'_index': official_index_name,
'_type': doc_type,
'_id': data['id'],
'_source': data,
'routing': data["content_level"]
})
if data:
bulk_actions.append({
'_op_type': 'index',
'_index': official_index_name,
'_type': doc_type,
'_id': data['id'],
'_source': data,
'routing': data["content_level"]
})
else:
for data in data_list:
bulk_actions.append({
'_op_type': 'index',
'_index': official_index_name,
'_type': doc_type,
'_id': data['id'],
'_source': data,
})
elasticsearch.helpers.bulk(es_cli,bulk_actions)
if data:
bulk_actions.append({
'_op_type': 'index',
'_index': official_index_name,
'_type': doc_type,
'_id': data['id'],
'_source': data,
})
elasticsearch.helpers.bulk(es_cli, bulk_actions)
return True
except:
......@@ -182,45 +186,67 @@ class ESPerform(object):
return False
@classmethod
def get_search_results(cls, es_cli,sub_index_name,query_body,offset=0,size=10,
auto_create_index=False,doc_type="_doc",aggregations_query=False,is_suggest_request=False,batch_search=False):
def get_search_results(cls, es_cli, sub_index_name, query_body, offset=0, size=10,
auto_create_index=False, doc_type="_doc", aggregations_query=False, is_suggest_request=False,
batch_search=False, routing=None):
try:
assert (es_cli is not None)
official_index_name = cls.get_official_index_name(sub_index_name,"read")
official_index_name = cls.get_official_index_name(sub_index_name, "read")
index_exists = es_cli.indices.exists(official_index_name)
if not index_exists:
if not auto_create_index:
logging.error("index:%s is not existing,get_search_results error!" % official_index_name)
return None
else:
cls.create_index(es_cli,sub_index_name)
cls.put_index_mapping(es_cli,sub_index_name)
cls.create_index(es_cli, sub_index_name)
cls.put_index_mapping(es_cli, sub_index_name)
logging.info("duan add,query_body:%s" % str(query_body).encode("utf-8"))
if not batch_search:
res = es_cli.search(index=official_index_name,doc_type=doc_type,body=query_body,from_=offset,size=size)
if not routing:
res = es_cli.search(index=official_index_name, doc_type=doc_type, body=query_body, from_=offset,
size=size)
else:
res = es_cli.search(index=official_index_name, doc_type=doc_type, body=query_body, from_=offset,
size=size, routing=routing)
if is_suggest_request:
return res
else:
result_dict = {
"total_count":res["hits"]["total"],
"hits":res["hits"]["hits"]
"total_count": res["hits"]["total"],
"hits": res["hits"]["hits"]
}
if aggregations_query:
result_dict["aggregations"] = res["aggregations"]
return result_dict
else:
res = es_cli.msearch(body=query_body,index=official_index_name, doc_type=doc_type)
res = es_cli.msearch(body=query_body, index=official_index_name, doc_type=doc_type)
logging.info("duan add,msearch res:%s" % str(res))
return res
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"total_count":0,"hits":[]}
return {"total_count": 0, "hits": []}
@classmethod
def get_analyze_results(cls, es_cli, sub_index_name, query_body):
try:
assert (es_cli is not None)
official_index_name = cls.get_official_index_name(sub_index_name, "read")
index_exists = es_cli.indices.exists(official_index_name)
if not index_exists:
logging.error("index:%s is not existing,get_search_results error!" % official_index_name)
return None
res = es_cli.indices.analyze(index=official_index_name, body=query_body)
return res
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return None
@classmethod
def if_es_node_load_high(cls, es_cli):
......@@ -234,9 +260,9 @@ class ESPerform(object):
for item in es_nodes_info_list:
try:
item_list = item.split(" ")
if len(item_list)==11:
if len(item_list) == 11:
cpu_load = item_list[4]
elif len(item_list)==10:
elif len(item_list) == 10:
cpu_load = item_list[3]
else:
continue
......@@ -245,14 +271,188 @@ class ESPerform(object):
high_num += 1
es_nodes_list.append(int_cpu_load)
except:
logging.error("catch exception,item:%s,err_msg:%s" % (str(item),traceback.format_exc()))
logging.error("catch exception,item:%s,err_msg:%s" % (str(item), traceback.format_exc()))
return True
if high_num > 3:
logging.info("check es_nodes_load high,cpu load:%s,ori_cpu_info:%s" % (str(es_nodes_list), str(es_nodes_info_list)))
logging.info("check es_nodes_load high,cpu load:%s,ori_cpu_info:%s" % (
str(es_nodes_list), str(es_nodes_info_list)))
return True
else:
return False
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return True
@classmethod
def get_tag_topic_list(cls, tag_id, have_read_topic_id_list, size=100):
try:
functions_list = list()
# for id in tag_id:
# functions_list.append(
# {
# "filter": {"term": {"tag_list": id}},
# "weight": 1
# }
# )
functions_list += [
{
"filter": {
"constant_score": {
"filter": {
"term": {"content_level": 6}}
}
},
"weight": 60
},
{
"filter": {
"constant_score": {
"filter": {
"term": {"content_level": 5}}
}
},
"weight": 50
},
{
"filter": {
"constant_score": {
"filter": {
"term": {"content_level": 4}}
}
},
"weight": 40
}
]
q = {
"query": {
"function_score": {
"query": {
"bool": {
"must": [
{"range": {"content_level": {"gte": 4, "lte": 6}}},
{"term": {"is_online": True}},
{"term": {"is_deleted": False}},
{"terms": {"tag_list": tag_id}}
]
}
},
"boost_mode": "sum",
"score_mode": "sum",
"functions": functions_list
}
},
"_source": {
"include": ["id"]
},
"sort": [
{"_score": {"order": "desc"}},
{"create_time_val": {"order": "desc"}},
# {"language_type": {"order": "asc"}},
]
}
if len(have_read_topic_id_list) > 0:
q["query"]["function_score"]["query"]["bool"]["must_not"] = {
"terms": {
"id": have_read_topic_id_list
}
}
result_dict = ESPerform.get_search_results(ESPerform.get_cli(), sub_index_name="topic", query_body=q,
offset=0, size=size, routing="4,5,6")
topic_id_list = [item["_source"]["id"] for item in result_dict["hits"]]
logging.info("topic_id_list:%s" % str(topic_id_list))
return topic_id_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return list()
@classmethod
def get_tag_topic_list_dict(cls, tag_id, have_read_topic_id_list, size=100):
try:
functions_list = list()
for id in tag_id:
functions_list.append(
{
"filter": {"term": {"tag_list": id}},
"weight": 1
}
)
# functions_list += [
# {
# "filter": {"term": {"content_level": 6}},
# "weight": 6000
# },
# {
# "filter": {"term": {"content_level": 5}},
# "weight": 5000
# },
# {
# "filter": {"term": {"content_level": 4}},
# "weight": 4000
# }
# ]
q = {
"query": {
"function_score": {
"query": {
"bool": {
"must": [
{"term": {"content_level": 6}},
{"term": {"is_online": True}},
{"term": {"is_deleted": False}},
{"terms": {"tag_list": tag_id}}
]
}
},
"boost_mode": "sum",
"score_mode": "sum",
"functions": functions_list
}
},
"_source": {
"include": ["id", "user_id"]
},
"sort": [
{"latest_reply_time": {"order": "desc"}},
{"create_time_val": {"order": "desc"}},
{"language_type": {"order": "asc"}},
],
"collapse": {
"field": "user_id"
}
}
if len(have_read_topic_id_list) > 0:
q["query"]["function_score"]["query"]["bool"]["must_not"] = {
"terms": {
"id": have_read_topic_id_list
}
}
result_dict = ESPerform.get_search_results(ESPerform.get_cli(), sub_index_name="topic-high-star",
query_body=q,
offset=0, size=size, routing="6")
topic_id_list = [item["_source"]["id"] for item in result_dict["hits"]]
# logging.info("topic_id_list:%s" % str(topic_id_list))
# topic_id_dict = [{str(item["_source"]["id"]):item["_source"]["user_id"]} for item in result_dict["hits"]]
topic_id_dict = dict()
for item in result_dict["hits"]:
topic_id_dict[str(item["_source"]["id"])] = item["_source"]["user_id"]
logging.info("topic_id_list:%s" % str(topic_id_dict))
return topic_id_list, topic_id_dict
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return list()
@classmethod
def get_highlight(cls, fields=[]):
field_highlight = {
'fields': {k: {} for k in fields},
'pre_tags': ['<%s>' % 'ems'],
'post_tags': ['</%s>' % 'ems']
}
return field_highlight
......@@ -4,6 +4,10 @@
from django.conf import settings
from pytz import timezone
from datetime import datetime
import traceback
from libs.cache import redis_client
import json
import logging
def tzlc(dt, truncate_to_sec=True):
......@@ -15,4 +19,23 @@ def tzlc(dt, truncate_to_sec=True):
if dt.tzinfo is None:
return timezone(settings.TIME_ZONE).localize(dt)
else:
return timezone(settings.TIME_ZONE).normalize(dt)
\ No newline at end of file
return timezone(settings.TIME_ZONE).normalize(dt)
def get_have_read_topic_id_list(device_id, user_id, query_type):
try:
if user_id and int(user_id) > 0:
redis_key = "physical:home_recommend" + ":user_id:" + str(user_id) + ":query_type:" + str(query_type)
else:
redis_key = "physical:home_recommend" + ":device_id:" + str(device_id) + ":query_type:" + str(query_type)
have_read_topic_id_list = list()
redis_field_list = [b'have_read_topic_list']
redis_field_val_list = redis_client.hmget(redis_key, redis_field_list)
if redis_field_val_list[0]:
have_read_topic_id_list = list(json.loads(redis_field_val_list[0]))
return have_read_topic_id_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return list()
# -*- coding: UTF-8 -*-
# !/usr/bin/env python
import numpy as np
import redis
from libs.cache import redis_client
import logging
import traceback
import json
import pickle
from django.conf import settings
from trans2es.models.tag import AccountUserTag,CommunityTagFollow
from libs.es import ESPerform
import libs.tools as Tools
from search.utils.common import *
class RegisterUserTag(object):
linucb_device_id_matrix_redis_prefix = "physical:linucb:device_id:"
linucb_device_id_recommend_redis_prefix = "physical:linucb:tag_recommend:device_id:"
linucb_device_id_recommend_topic_id_prefix = "physical:linucb:topic_recommend:device_id:"
tag_topic_id_redis_prefix = "physical:tag_id:topic_id_list:"
linucb_user_id_matrix_redis_prefix = "physical:linucb:user_id:"
linucb_user_id_recommend_redis_prefix = "physical:linucb:tag_recommend:user_id:"
linucb_user_id_recommend_topic_id_prefix = "physical:linucb:topic_recommend:user_id:"
linucb_device_id_register_tag_topic_id_prefix = "physical:linucb:register_tag_topic_recommend:device_id:"
linucb_user_id_register_tag_topic_id_prefix = "physical:linucb:register_tag_topic_recommend:user_id:"
# 用户关注标签
linucb_register_user_tag_key = "physical:linucb:register_user_tag_info"
# # 用户关注标签
# linucb_user_attention_tag_key = "physical:linucb:user_attention_tag_info"
# # 用户喜好标签(注册勾选 + 关注)
# linucb_user_like_tag_key = "physical:linucb:user_like_tag_info"
@classmethod
def get_user_attention_tag(cls, pk_list):
"""
:remark 获取用户关注标签
:param pk_list:
:return:
"""
try:
user_id_dict = dict()
query_results = CommunityTagFollow.objects.filter(pk__in=pk_list,is_deleted=False,is_online=True)
for item in query_results:
tag_id = item.tag_id
user_id = item.user_id
user_tag_list = list(CommunityTagFollow.objects.filter(user_id=user_id,is_deleted=False,is_online=True).values_list("tag_id", flat=True))
user_id_dict[user_id] = user_tag_list
user_register_tag_list = list(AccountUserTag.objects.filter(user=user_id,is_deleted=False).values_list("tag_id", flat=True))
user_id_dict[user_id].extend(user_register_tag_list)
for user_id in user_id_dict:
# redis_user_tag_id_data = redis_client.hget(cls.linucb_register_user_tag_key, user_id)
# redis_user_tag_id_list = json.loads(redis_user_tag_id_data) if redis_user_tag_id_data else []
# redis_user_tag_id_list.extend(user_id_dict[user_id])
redis_client.hset(cls.linucb_register_user_tag_key, user_id, json.dumps(list(set(user_id_dict[user_id]))))
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
@classmethod
def get_register_user_tag(cls,pk_list):
"""
:remark 用户注册时选的标签
:param pk_list:
:return:
"""
try:
user_id_dict = dict()
query_results = AccountUserTag.objects.filter(pk__in=pk_list)
for item in query_results:
tag_id = item.tag_id
user_id = item.user
user_tag_list = list(AccountUserTag.objects.filter(user=user_id).values_list("tag_id", flat=True))
user_id_dict[user_id] = user_tag_list
user_follow_tag_list = list(CommunityTagFollow.objects.filter(user_id=user_id,is_deleted=False,is_online=True).values_list("tag_id", flat=True))
user_id_dict[user_id].extend(user_follow_tag_list)
for user_id in user_id_dict:
# redis_user_tag_id_data = redis_client.hget(cls.linucb_register_user_tag_key, user_id)
# redis_user_tag_id_list = json.loads(redis_user_tag_id_data) if redis_user_tag_id_data else []
# redis_user_tag_id_list.extend(user_id_dict[user_id])
redis_client.hset(cls.linucb_register_user_tag_key, user_id, json.dumps(list(set(user_id_dict[user_id]))))
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
# -*- coding: UTF-8 -*-
# !/usr/bin/env python
import numpy as np
import redis
from libs.cache import redis_client
import logging
import traceback
import json
import pickle
from django.conf import settings
from trans2es.models.tag import CommunityTagFollow
from libs.es import ESPerform
import libs.tools as Tools
from search.utils.common import *
class UserAttentionTag(object):
linucb_device_id_matrix_redis_prefix = "physical:linucb:device_id:"
linucb_device_id_recommend_redis_prefix = "physical:linucb:tag_recommend:device_id:"
linucb_device_id_recommend_topic_id_prefix = "physical:linucb:topic_recommend:device_id:"
tag_topic_id_redis_prefix = "physical:tag_id:topic_id_list:"
linucb_user_id_matrix_redis_prefix = "physical:linucb:user_id:"
linucb_user_id_recommend_redis_prefix = "physical:linucb:tag_recommend:user_id:"
linucb_user_id_recommend_topic_id_prefix = "physical:linucb:topic_recommend:user_id:"
linucb_user_id_attention_tag_topic_id_prefix = "physical:linucb:attention_tag_topic_recommend:user_id:"
@classmethod
def get_register_user_tag(cls,pk_list):
try:
user_id_set = set()
query_results = CommunityTagFollow.objects.filter(pk__in=pk_list)
for item in query_results:
tag_id = item.tag_id
user_id = item.user_id
if user_id not in user_id_set:
user_id_set.add(user_id)
user_tag_list = CommunityTagFollow.objects.filter(user_id=user_id).order_by("-create_time").values_list("tag_id",flat=True)
have_read_topic_id_list = Tools.get_have_read_topic_id_list(-1, user_id,
TopicPageType.HOME_RECOMMEND)
recommend_topic_id_list = list()
cycle_num = int(10000/len(user_tag_list))
for index in range(0,cycle_num):
for tag_id in user_tag_list:
redis_tag_id_key = cls.tag_topic_id_redis_prefix + str(tag_id)
redis_tag_id_data = redis_client.get(redis_tag_id_key)
tag_topic_id_list = json.loads(redis_tag_id_data) if redis_tag_id_data else []
if not redis_tag_id_data:
tag_topic_id_list = ESPerform.get_tag_topic_list(tag_id)
redis_client.set(redis_tag_id_key,json.dumps(tag_topic_id_list))
redis_client.expire(redis_tag_id_key,1*24*60*60)
if len(tag_topic_id_list)>index:
for topic_id in tag_topic_id_list[index:]:
if topic_id not in have_read_topic_id_list and topic_id not in recommend_topic_id_list:
recommend_topic_id_list.append(topic_id)
break
redis_register_tag_topic_data = {
"data": json.dumps(recommend_topic_id_list),
"cursor": 0
}
redis_client.hmset(cls.linucb_user_id_attention_tag_topic_id_prefix,redis_register_tag_topic_data)
redis_client.expire(cls.linucb_user_id_attention_tag_topic_id_prefix,30*24*60*60)
topic_recommend_redis_key = cls.linucb_user_id_recommend_topic_id_prefix + str(user_id)
redis_recommend_topic_dict = redis_client.hgetall(topic_recommend_redis_key)
if len(redis_recommend_topic_dict)==0:
redis_data_dict = {
"data": json.dumps(recommend_topic_id_list),
"cursor":0
}
redis_client.hmset(topic_recommend_redis_key,redis_data_dict)
redis_client.expire(topic_recommend_redis_key,30*24*60*60)
else:
ori_recommend_topic_id_list = json.loads(redis_recommend_topic_dict["data"])
ori_recommend_cursor = redis_recommend_topic_dict["cursor"]
ori_index = 0
for new_recommend_index in range(0,len(recommend_topic_id_list),2):
pass
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
......@@ -2,16 +2,18 @@
# !/usr/bin/env python
from kafka import KafkaConsumer
import random
from libs.cache import redis_client
import logging
from linucb.views.linucb import LinUCB
import json
from trans2es.models.tag import TopicTag
from trans2es.models.tag import TopicTag,Tag
from trans2es.models.topic import TopicHomeRecommend
import traceback
from django.conf import settings
from libs.es import ESPerform
from search.utils.common import *
import libs.tools as Tools
class KafkaManager(object):
consumser_obj = None
......@@ -30,9 +32,12 @@ class CollectData(object):
def __init__(self):
self.linucb_matrix_redis_prefix = "physical:linucb:device_id:"
#废弃
self.linucb_recommend_redis_prefix = "physical:linucb:tag_recommend:device_id:"
#推荐帖子
self.linucb_recommend_topic_id_prefix = "physical:linucb:topic_recommend:device_id:"
self.tag_topic_id_redis_prefix = "physical:tag_id:topic_id_list:"
self.click_recommend_redis_key_prefix = "physical:click_recommend:device_id:"
# 默认
self.user_feature = [0,1]
......@@ -49,88 +54,77 @@ class CollectData(object):
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return dict()
def get_tag_topic_list(self,tag_id):
try:
q = {
"query":{
"bool":{
"must":[
{"term":{"is_online": True}},
{"term":{"is_deleted": False}},
{"term":{"tag_list":tag_id}}
]
}
},
"_source":{
"include":["id"]
},
"sort":[
{"create_time_val":{"order":"desc"}},
{"language_type":{"order":"asc"}},
]
}
result_dict = ESPerform.get_search_results(ESPerform.get_cli(), sub_index_name="topic-high-star", query_body=q,
offset=0, size=5000)
topic_id_list = [item["_source"]["id"] for item in result_dict["hits"]]
return topic_id_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return list()
def update_recommend_tag_list(self, device_id,user_feature=None):
def update_recommend_tag_list(self, device_id,user_feature=None,user_id=None,click_topic_tag_list=None,new_user_click_tag_list = []):
try:
recommend_tag_set = set()
recommend_tag_list = list()
recommend_tag_dict = dict()
redis_linucb_tag_data_dict = self._get_user_linucb_info(device_id)
if len(redis_linucb_tag_data_dict) == 0:
recommend_tag_list = LinUCB.get_default_tag_list()
recommend_tag_list = LinUCB.get_default_tag_list(user_id)
LinUCB.init_device_id_linucb_info(redis_client, self.linucb_matrix_redis_prefix,device_id,recommend_tag_list)
else:
user_feature = user_feature if user_feature else self.user_feature
(recommend_tag_dict,recommend_tag_set) = LinUCB.linucb_recommend_tag(device_id,redis_linucb_tag_data_dict,user_feature,list(redis_linucb_tag_data_dict.keys()))
recommend_tag_list = list(recommend_tag_dict.keys())
if len(recommend_tag_dict) > 0:
recommend_tag_list = list(recommend_tag_set)
if len(recommend_tag_list) > 0:
tag_recommend_redis_key = self.linucb_recommend_redis_prefix + str(device_id)
redis_client.set(tag_recommend_redis_key, json.dumps(recommend_tag_list))
# Todo:设置过期时间,调研set是否支持
redis_client.expire(tag_recommend_redis_key, 7*24*60*60)
redis_key = "physical:home_recommend" + ":device_id:" + device_id + ":query_type:" + str(TopicPageType.HOME_RECOMMEND)
have_read_topic_id_list = list()
redis_field_list = [b'have_read_topic_list']
redis_field_val_list = redis_client.hmget(redis_key, redis_field_list)
if redis_field_val_list[0]:
have_read_topic_id_list = list(json.loads(redis_field_val_list[0]))
have_read_topic_id_list = Tools.get_have_read_topic_id_list(device_id,user_id,TopicPageType.HOME_RECOMMEND)
promote_recommend_topic_id_list = TopicHomeRecommend.objects.using(settings.SLAVE_DB_NAME).filter(is_online=1).values_list("topic_id",flat=True)
have_read_topic_id_list.extend(promote_recommend_topic_id_list)
recommend_topic_id_list = list()
for index in range(0,1000):
for tag_id in recommend_tag_list[0:5]:
redis_tag_id_key = self.tag_topic_id_redis_prefix + str(tag_id)
redis_tag_id_data = redis_client.get(redis_tag_id_key)
tag_topic_id_list = json.loads(redis_tag_id_data) if redis_tag_id_data else []
if not redis_tag_id_data:
tag_topic_id_list = self.get_tag_topic_list(tag_id)
redis_client.set(redis_tag_id_key,json.dumps(tag_topic_id_list))
redis_client.expire(redis_tag_id_key,1*24*60*60)
if len(tag_topic_id_list)>index:
for topic_id in tag_topic_id_list[index:]:
if topic_id not in have_read_topic_id_list and topic_id not in recommend_topic_id_list:
recommend_topic_id_list.append(topic_id)
break
recommend_topic_id_list_dict = dict()
recommend_topic_id_list_click = list()
recommend_topic_id_list_click_dict = dict()
if click_topic_tag_list and len(click_topic_tag_list)>0:
recommend_topic_id_list_click,recommend_topic_id_list_click_dict = ESPerform.get_tag_topic_list_dict(click_topic_tag_list,
have_read_topic_id_list,size=2)
if len(recommend_topic_id_list_click) > 0:
recommend_topic_id_list.extend(recommend_topic_id_list_click)
recommend_topic_id_list_dict.update(recommend_topic_id_list_click_dict)
# have_read_topic_id_list.extend(recommend_topic_id_list_click)
# click_recommend_redis_key = self.click_recommend_redis_key_prefix + str(device_id)
# click_redis_data_dict = {
# "data": json.dumps(recommend_topic_id_list),
# "datadict":json.dumps(recommend_topic_id_list_dict),
# "cursor": 0
# }
# redis_client.hmset(click_recommend_redis_key, click_redis_data_dict)
tag_id_list = recommend_tag_list[0:20]
topic_recommend_redis_key = self.linucb_recommend_topic_id_prefix + str(device_id)
redis_data_dict = {
"data": json.dumps(recommend_topic_id_list),
"cursor":0
}
redis_client.hmset(topic_recommend_redis_key,redis_data_dict)
# redis_topic_data_dict = redis_client.hgetall(topic_recommend_redis_key)
# redis_topic_list = list()
# cursor = -1
# if b"data" in redis_topic_data_dict:
# redis_topic_list = json.loads(redis_topic_data_dict[b"data"]) if redis_topic_data_dict[
# b"data"] else []
# cursor = int(str(redis_topic_data_dict[b"cursor"], encoding="utf-8"))
# if len(recommend_topic_id_list)==0 and cursor==0 and len(redis_topic_list)>0:
# have_read_topic_id_list.extend(redis_topic_list[:2])
if len(new_user_click_tag_list)>0:
tag_topic_id_list,tag_topic_dict = ESPerform.get_tag_topic_list_dict(new_user_click_tag_list, have_read_topic_id_list)
else:
tag_topic_id_list,tag_topic_dict = ESPerform.get_tag_topic_list_dict(tag_id_list,have_read_topic_id_list)
if len(recommend_topic_id_list)>0 or len(tag_topic_id_list)>0 or len(new_user_click_tag_list) > 0:
tag_topic_id_list = recommend_topic_id_list + tag_topic_id_list
tag_topic_dict.update(recommend_topic_id_list_dict)
redis_data_dict = {
"data": json.dumps(tag_topic_id_list),
"datadict":json.dumps(tag_topic_dict),
"cursor":0
}
redis_client.hmset(topic_recommend_redis_key,redis_data_dict)
return True
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
......@@ -160,72 +154,137 @@ class CollectData(object):
raw_val_dict = json.loads(ori_msg.value)
if "type" in raw_val_dict and "on_click_feed_topic_card" == raw_val_dict["type"]:
topic_id = raw_val_dict["params"]["business_id"] or raw_val_dict["params"]["topic_id"]
device_id = raw_val_dict["device"]["device_id"]
if "type" in raw_val_dict and \
(raw_val_dict["type"] in ("on_click_feed_topic_card","tag_zone_click_focus")):
click_topic_tag_list = list()
if "on_click_feed_topic_card" == raw_val_dict["type"]:
topic_id = raw_val_dict["params"]["topic_id"]
device_id = raw_val_dict["device"]["device_id"]
user_id = raw_val_dict["user_id"] if "user_id" in raw_val_dict else None
logging.info("consume topic_id:%s,device_id:%s" % (str(topic_id), str(device_id)))
# topic_tag_list = list(TopicTag.objects.using(settings.SLAVE_DB_NAME).filter(topic_id=topic_id,is_online=True).values_list("tag_id",flat=True))
# tag_query_results = Tag.objects.using(settings.SLAVE_DB_NAME).filter(id__in=topic_tag_list,is_online=True,is_deleted=False).values_list("id","collection","is_ai")
# for id,collection,is_ai in tag_query_results:
# if collection and is_ai:
# click_topic_tag_list.append(id)
topic_tag_list = list()
click_results = TopicTag.objects.using(settings.SLAVE_DB_NAME).filter(
topic_id=topic_id, is_online=True).values_list("tag_id", "is_collection")
for tag_id, is_collection in click_results:
topic_tag_list.append(tag_id)
if is_collection:
click_topic_tag_list.append(tag_id)
tag_query_results = Tag.objects.using(settings.SLAVE_DB_NAME).filter(
id__in=topic_tag_list, is_online=True, is_deleted=False).values_list("id",
"is_ai")
for id, is_ai in tag_query_results:
if is_ai:
click_topic_tag_list.append(id)
logging.info("positive tag_list,device_id:%s,topic_id:%s,tag_list:%s" % (
str(device_id), str(topic_id), str(click_topic_tag_list)))
else:
tag_name = raw_val_dict["params"]["query"]
query_type = raw_val_dict["params"]["type"]
device_id = raw_val_dict["device"]["device_id"]
user_id = raw_val_dict["user_id"] if "user_id" in raw_val_dict else None
if query_type=="do":
tag_list = list(Tag.objects.using(settings.SLAVE_DB_NAME).filter(name=tag_name,is_online=True,is_deleted=False).values_list("id",flat=True))
click_topic_tag_list.extend(tag_list)
logging.info("query tag attention,positive tag_list,device_id:%s,query_name:%s,tag_list:%s" % (
str(device_id), tag_name, str(click_topic_tag_list)))
logging.info("consume topic_id:%s,device_id:%s" % (str(topic_id), str(device_id)))
tag_list = list()
click_sql_query_results = TopicTag.objects.using(settings.SLAVE_DB_NAME).filter(topic_id=topic_id).values_list("tag_id","is_online")
for tag_id,is_online in click_sql_query_results:
if is_online:
tag_list.append(tag_id)
logging.info("click_topic_tag_list:%s"%(str(click_topic_tag_list)))
is_click = 1
is_vote = 0
reward = 1 if is_click or is_vote else 0
logging.info("positive tag_list,device_id:%s,topic_id:%s,tag_list:%s" % (
str(device_id), str(topic_id), str(tag_list)))
for tag_id in tag_list:
for tag_id in click_topic_tag_list:
self.update_user_linucb_tag_info(reward, device_id, tag_id, user_feature)
# 更新该用户的推荐tag数据,放在 更新完成user tag行为信息之后
self.update_recommend_tag_list(device_id, user_feature)
elif "type" in raw_val_dict and "page_precise_exposure" == raw_val_dict["type"]:
if isinstance(raw_val_dict["params"]["exposure_cards"],str):
exposure_cards_list = json.loads(raw_val_dict["params"]["exposure_cards"])
elif isinstance(raw_val_dict["params"]["exposure_cards"],list):
exposure_cards_list = raw_val_dict["params"]["exposure_cards"]
if len(click_topic_tag_list)>0:
self.update_recommend_tag_list(device_id, user_feature, user_id,click_topic_tag_list=click_topic_tag_list)
# elif "type" in raw_val_dict and "page_precise_exposure" == raw_val_dict["type"]:
# if isinstance(raw_val_dict["params"]["exposure_cards"],str):
# exposure_cards_list = json.loads(raw_val_dict["params"]["exposure_cards"])
# elif isinstance(raw_val_dict["params"]["exposure_cards"],list):
# exposure_cards_list = raw_val_dict["params"]["exposure_cards"]
# else:
# exposure_cards_list = list()
# device_id = raw_val_dict["device"]["device_id"]
# user_id = raw_val_dict["user_id"] if "user_id" in raw_val_dict else None
# logging.warning("type msg:%s" % raw_val_dict.get("type"))
# exposure_topic_id_list = list()
# for item in exposure_cards_list:
# if "card_id" not in item:
# continue
# exposure_topic_id = item["card_id"]
# logging.info(
# "consume exposure topic_id:%s,device_id:%s" % (str(exposure_topic_id), str(device_id)))
# if exposure_topic_id:
# exposure_topic_id_list.append(exposure_topic_id)
#
# topic_tag_id_dict = dict()
# tag_list = list()
# exposure_sql_query_results = TopicTag.objects.using(settings.SLAVE_DB_NAME).\
# filter(topic_id__in=exposure_topic_id_list).\
# values_list("topic_id","tag_id","is_online","is_collection")
# # if len(exposure_sql_query_results)>0:
# for topic_id,tag_id,is_online,is_collection in exposure_sql_query_results:
# if is_online and is_collection == 1:
# tag_list.append(tag_id)
# if is_online:
# tag_sql_query_results = Tag.objects.using(settings.SLAVE_DB_NAME).filter(
# id=tag_id).values_list("id", "collection", "is_ai")
# for id, collection, is_ai in tag_sql_query_results:
# if (is_ai == 1) and id not in tag_list:
# tag_list.append(id)
#
# if topic_id not in topic_tag_id_dict:
# topic_tag_id_dict[topic_id] = list()
# topic_tag_id_dict[topic_id].append(tag_id)
#
# is_click = 0
# is_vote = 0
#
# reward = 1 if is_click or is_vote else 0
#
# logging.info("negative tag_list,device_id:%s,topic_tag_id_dict:%s" % (
# str(device_id), str(topic_tag_id_dict)))
# for tag_id in tag_list:
# self.update_user_linucb_tag_info(reward, device_id, tag_id, user_feature)
#
# # 更新该用户的推荐tag数据,放在 更新完成user tag行为信息之后
# self.update_recommend_tag_list(device_id, user_feature, user_id)
elif "type" in raw_val_dict and "interest_choice_click_next" == raw_val_dict["type"]:
if isinstance(raw_val_dict["params"]["tagid_list"],str):
tagid_list = json.loads(raw_val_dict["params"]["tagid_list"])
elif isinstance(raw_val_dict["params"]["tagid_list"],list):
tagid_list = raw_val_dict["params"]["tagid_list"]
else:
exposure_cards_list = list()
device_id = raw_val_dict["device"]["device_id"]
tagid_list = list()
logging.warning("unknown type msg:%s" % raw_val_dict.get("type", "missing type"))
exposure_topic_id_list = list()
for item in exposure_cards_list:
if "card_id" not in item:
continue
exposure_topic_id = item["card_id"]
logging.info(
"consume exposure topic_id:%s,device_id:%s" % (str(exposure_topic_id), str(device_id)))
if exposure_topic_id:
exposure_topic_id_list.append(exposure_topic_id)
topic_tag_id_dict = dict()
tag_list = list()
exposure_sql_query_results = TopicTag.objects.using(settings.SLAVE_DB_NAME).filter(topic_id__in=exposure_topic_id_list).values_list("topic_id","tag_id","is_online")
for topic_id,tag_id,is_online in exposure_sql_query_results:
if is_online:
tag_list.append(tag_id)
if topic_id not in topic_tag_id_dict:
topic_tag_id_dict[topic_id] = list()
topic_tag_id_dict[topic_id].append(tag_id)
is_click = 0
is_vote = 0
device_id = raw_val_dict["device"]["device_id"]
user_id = raw_val_dict["user_id"] if "user_id" in raw_val_dict else None
reward = 1 if is_click or is_vote else 0
# if len(exposure_sql_query_results)>0:
if len(tagid_list) > 0:
is_click = 1
is_vote = 0
logging.info("negative tag_list,device_id:%s,topic_tag_id_dict:%s" % (
str(device_id), str(topic_tag_id_dict)))
for tag_id in tag_list:
self.update_user_linucb_tag_info(reward, device_id, tag_id, user_feature)
reward = 1 if is_click or is_vote else 0
for tag_id in tagid_list:
self.update_user_linucb_tag_info(reward, device_id, tag_id, user_feature)
# 更新该用户的推荐tag数据,放在 更新完成user tag行为信息之后
self.update_recommend_tag_list(device_id, user_feature)
self.update_recommend_tag_list(device_id, user_feature, user_id,new_user_click_tag_list=tagid_list)
else:
logging.warning("unknown type msg:%s" % raw_val_dict.get("type", "missing type"))
except:
......
......@@ -20,11 +20,16 @@ class LinUCB:
default_tag_list = list()
@classmethod
def get_default_tag_list(cls):
def get_default_tag_list(cls,user_id):
try:
if len(cls.default_tag_list) == 0:
cls.default_tag_list = Tag.objects.using(settings.SLAVE_DB_NAME).filter(is_online=True,collection=1).values_list("id",flat=True)[0:100]
if user_id:
redis_tag_data = redis_client.hget("physical:linucb:register_user_tag_info", user_id)
cls.default_tag_list = json.loads(redis_tag_data) if redis_tag_data else []
if len(cls.default_tag_list) == 0:
cls.default_tag_list = Tag.objects.using(settings.SLAVE_DB_NAME).filter(is_online=True,collection=1).values_list("id",flat=True)[0:100]
return cls.default_tag_list
except:
......
# !/usr/bin/env python
# -*- coding: utf-8 -*-
DATABASE_APPS_MAPPING = {'face': 'face', 'commodity': 'commodity'}
class DBRouter:
......@@ -12,32 +13,46 @@ class DBRouter:
"""
Attempts to read user models go to users_db.
"""
if model._meta.app_label == 'face':
return 'face'
# if model._meta.app_label == 'face':
# return 'face'
if model._meta.app_label in DATABASE_APPS_MAPPING:
return DATABASE_APPS_MAPPING[model._meta.app_label]
return None
def db_for_write(self, model, **hints):
"""
Attempts to write user models go to users_db.
"""
if model._meta.app_label == 'face':
return 'face'
# if model._meta.app_label == 'face':
# return 'face'
if model._meta.app_label in DATABASE_APPS_MAPPING:
return DATABASE_APPS_MAPPING[model._meta.app_label]
return None
def allow_relation(self, obj1, obj2, **hints):
"""
Allow relations if a model in the user app is involved.
"""
if obj1._meta.app_label == 'face' or \
obj2._meta.app_label == 'face':
return True
return None
# """
db_obj1 = DATABASE_APPS_MAPPING.get(obj1._meta.app_label)
db_obj2 = DATABASE_APPS_MAPPING.get(obj2._meta.app_label)
if db_obj1 and db_obj2:
if db_obj1 == db_obj2:
return True
else:
return False
else:
return None
def allow_migrate(self, db, app_label, model_name=None, **hints):
"""
Make sure the auth app only appears in the 'users_db'
database.
"""
if app_label == 'face':
return db == 'face'
if db in DATABASE_APPS_MAPPING.values():
return DATABASE_APPS_MAPPING.get(app_label) == db
elif app_label in DATABASE_APPS_MAPPING:
return False
return None
# coding=utf-8
from __future__ import unicode_literals, print_function, absolute_import
import itertools
from django.conf import settings
import itertools
import logging
......
......@@ -10,8 +10,5 @@ For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
from .log_settings import *
from datetime import timedelta
from celery.schedules import crontab
from .settings_local import *
......@@ -57,6 +57,7 @@ CELERYBEAT_SCHEDULE = {
'args': ()
},
}
"""
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
......
......@@ -9,7 +9,7 @@ from libs.es import ESPerform
class GroupUtils(object):
@classmethod
def get_group_query_result(cls,query,offset,size):
def get_group_query_result(cls, query, offset, size):
try:
q = dict()
......@@ -38,37 +38,92 @@ class GroupUtils(object):
}
}
q["_source"] = {
"includes":["id"]
"includes": ["id"]
}
return ESPerform.get_search_results(ESPerform.get_cli(), "group", q, offset, size)
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"total_count":0, "hits":[]}
return {"total_count": 0, "hits": []}
@classmethod
def get_hot_pictorial_recommend_result_list(cls,offset,size,es_cli_obj=None):
def get_hot_pictorial_recommend_result_list(cls, offset, size, es_cli_obj=None, attention_tag_list=[]):
try:
if not es_cli_obj:
es_cli_obj = ESPerform.get_cli()
q = dict()
q["query"] = {
"bool":{
"must":[
{"term": {"is_online": True}},
{"term":{"is_deleted": False}}
]
functions_list = list()
for tag_id in attention_tag_list:
functions_list.append({
"filter": {
"constant_score": {
"filter": {
"term": {
"tag_id": tag_id
}
}
}
},
"weight": 20
})
if len(functions_list) >= 20:
break
functions_list.append(
{
"gauss": {
"create_time": {
"scale": "1d",
"decay": 0.99
}
},
"weight": 60
}
)
q = {
"query": {
"function_score": {
"query": {
"bool": {
"filter": [
{"term": {"is_online": True}},
{"term": {"is_deleted": False}},
{"term": {"effective": True}}
],
"must_not": [
{"term": {"is_default": 1}}
]
}
},
"score_mode": "sum",
"boost_mode": "sum",
"functions": functions_list
}
}
}
q["sort"] = [
{"high_quality_topic_num":{"order":"desc"}}
{
"_script": {
"type": "number",
"script": {
"lang": "expression",
"source": "_score+doc['offline_score']"
},
"order": "desc"
}
},
{
"_score": {
"order": "desc"
}
}
]
q["_source"] = {
"includes":["id"]
"includes": ["id"]
}
result_dict = ESPerform.get_search_results(es_cli_obj,"pictorial",q,offset,size)
result_dict = ESPerform.get_search_results(es_cli_obj, "pictorial", q, offset, size)
pictorial_ids_list = []
if len(result_dict["hits"]) > 0:
......@@ -80,7 +135,7 @@ class GroupUtils(object):
return []
@classmethod
def get_user_attention_pictorial_list(cls,user_id,offset=0,size=10,es_cli_obj=None):
def get_user_attention_pictorial_list(cls, user_id, offset=0, size=10, es_cli_obj=None):
"""
:remark: 获取用户关注小组列表
:return:
......@@ -91,29 +146,31 @@ class GroupUtils(object):
q = dict()
q["query"] = {
"bool":{
"must":[
{"term":{"is_online": True}},
{"term":{"user_id":user_id}},
{"term":{"is_deleted":False}}
"bool": {
"must": [
{"term": {"is_online": True}},
{"term": {"user_id": user_id}},
{"term": {"is_deleted": False}},
{"term": {"effective": True}}
]
}
}
q["_source"] = {
"includes":["attention_pictorial_id_list"]
"includes": ["attention_pictorial_id_list"]
}
result_dict = ESPerform.get_search_results(es_cli_obj,"user",q,offset,size)
if len(result_dict["hits"])>0:
result_dict = ESPerform.get_search_results(es_cli_obj, "user", q, offset, size)
if len(result_dict["hits"]) > 0:
return result_dict["hits"][0]["_source"]["attention_pictorial_id_list"]
else:
return []
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
@classmethod
def get_pictorial_ids_by_aggs(cls,pictorial_ids_list,es_cli_obj=None):
def get_pictorial_ids_by_aggs(cls, pictorial_ids_list, es_cli_obj=None):
"""
:remark:聚合查询获取小组列表
:param group_id_list:
......@@ -124,34 +181,96 @@ class GroupUtils(object):
es_cli_obj = ESPerform.get_cli()
q = dict()
q["size"]=0
q["size"] = 0
q["query"] = {
"terms":{
"pictorial_id":pictorial_ids_list
"terms": {
"pictorial_id": pictorial_ids_list
}
}
q["aggs"] = {
"pictorial_ids":{
"terms":{
"field":"pictorial_id"
"pictorial_ids": {
"terms": {
"field": "pictorial_id"
},
"aggs":{
"max_date":{
"max":{
"field":"update_time_val"
"aggs": {
"max_date": {
"max": {
"field": "update_time_val"
}
}
}
}
}
result_dict = ESPerform.get_search_results(es_cli_obj,"topic",q,aggregations_query=True)
result_dict = ESPerform.get_search_results(es_cli_obj, "topic", q, aggregations_query=True)
buckets_list = result_dict["aggregations"]["pictorial_ids"]["buckets"]
sorted_buckets_list = sorted(buckets_list,key=lambda item:item["max_date"]["value"],reverse=True)
sorted_buckets_list = sorted(buckets_list, key=lambda item: item["max_date"]["value"], reverse=True)
sorted_pictorial_id_list = [item["key"] for item in sorted_buckets_list]
return sorted_pictorial_id_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
\ No newline at end of file
return []
@classmethod
def get_search_pictorial_topic(cls, query, offset, size):
try:
q = dict()
multi_fields = {
'name': 4,
'description': 4,
'edit_tag_name': 4
}
query_fields = ['^'.join((k, str(v))) for (k, v) in multi_fields.items()]
multi_match = {
'query': query,
'type': 'cross_fields',
'operator': 'and',
'fields': query_fields,
"analyzer": "gm_default_index"
}
q['query'] = {
'bool': {
"must": [
{"term": {"is_online": True}},
{"term": {"is_deleted": False}},
{"term": {"is_default": 0}},
{"range": {"topic_id_list": {"gte": 0}}},
{"term": {"is_cover": True}}
],
"should": [
{'multi_match': multi_match}
],
"minimum_should_match": 1
}
}
q["_source"] = {
"includes": ["id", "is_online", "is_deleted", "is_default", "name", "tag_name", "description",
"is_cover",
"offline_score",
"is_default"]
}
q["sort"] = [
{
"real_user_activate_time": {
"order": "desc"
}
},
{
"_score": {
"order": "desc"
}
}
]
logging.info("get get_search_pictorial_topic:%s" % q)
es_cli_obj = ESPerform.get_cli()
result_dict = ESPerform.get_search_results(es_cli_obj, "pictorial", q, offset, size)
return result_dict
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return None
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from gm_rpcd.all import bind
import logging
import traceback
import json
from libs.cache import redis_client
from libs.es import ESPerform
from search.utils.group import GroupUtils
from search.utils.common import GroupSortTypes
from libs.es import ESPerform
from trans2es.models.pictorial import PictorialTopics
class ProductUtils(object):
@classmethod
def get_product_sku(cls, query='', offset=0, size=10, filters={}):
try:
multi_fields = {
'cn_name': 2,
'en_name': 2,
'alias': 2,
'brand_cn_name': 2,
'brand_en_name': 2,
'brand_alias': 2,
'category_cn_name': 2,
"effect_cn_name": 2
}
query_fields = ['^'.join((k, str(v))) for (k, v) in multi_fields.items()]
multi_match = {
'query': query,
'type': 'cross_fields',
'operator': 'and',
'fields': query_fields,
}
sku_must_flter = cls.sku_filter(filters)
logging.info("get sku_must_flter:%s " % sku_must_flter)
logging.info("get sku_must_flter:%s " % type(sku_must_flter))
q = {
"query": {
"bool": {
"must": sku_must_flter
}
}
}
if query != '':
q = {
"query": {
"bool": {
"must": sku_must_flter,
"should": {
"multi_match": multi_match
},
"minimum_should_match": 1
}
}
}
q["sort"] = [{"comment_nums": {"order": "desc"}}, {"cn_name_sort": {"order": "asc"}}]
logging.info("get product query:%s" % q)
result_dict = ESPerform.get_search_results(ESPerform.get_cli(), sub_index_name="product", query_body=q,
offset=offset, size=size)
return result_dict
except:
logging.error("catch exception, query_sku:%s" % traceback.format_exc())
return []
@classmethod
def sku_filter(cls, filters):
"""处理过滤器部分。"""
logging.info("get filters:%s" % filters)
f = [
{'term': {"have_image": True}},
{'term': {"is_online": True}},
{"term": {"is_deleted": False}},
]
if not filters:
return f
for k, v in filters.items():
if v in (None, '', []):
continue
if k == "brand":
f.append({"term": {"brand_cn_name_pre": v}})
if k == "effect":
f.append({"term": {"effect_cn_name_pre": v}})
if k == "category":
f.append({"term": {"category_cn_name_pre": v}})
return f
......@@ -10,6 +10,8 @@ from alpha_types.venus import TOPIC_SEARCH_SORT
from libs.es import ESPerform
from .common import TopicDocumentField
from search.utils.common import *
from trans2es.models.pictorial import PictorialTopics
from libs.cache import redis_client
class TopicUtils(object):
......@@ -120,11 +122,12 @@ class TopicUtils(object):
return {}
@classmethod
def get_recommend_topic_ids(cls,user_id,tag_id,offset,size,single_size,query=None,query_type=TopicPageType.FIND_PAGE,
filter_topic_id_list=[],test_score=False,must_topic_id_list=[],recommend_tag_list=[],
user_similar_score_list=[],index_type="topic"):
def get_recommend_topic_ids(cls, user_id, tag_id, offset, size, single_size, query=None,
query_type=TopicPageType.HOME_RECOMMEND,
filter_topic_id_list=[], test_score=False, must_topic_id_list=[], recommend_tag_list=[],
user_similar_score_list=[], index_type="topic", routing=None, attention_tag_list=[],
linucb_user_id_list=[], disable_collpase=False):
"""
:需增加打散逻辑
:remark:获取首页推荐帖子列表
:param user_id:
:param offset:
......@@ -137,14 +140,14 @@ class TopicUtils(object):
# pick_user_id_list = list()
# same_group_id_list = list()
user_tag_list = list()
if filter_topic_id_list == None:
filter_topic_id_list = []
result_dict = TopicUtils.get_related_user_info(user_id, 0, 1)
if len(result_dict["hits"]) == 0:
logging.warning("not find user_id:%d in es!" % int(user_id))
else:
attention_user_info_list = result_dict["hits"][0]["_source"]["attention_user_id_list"]
attention_user_id_list = [item["user_id"] for item in attention_user_info_list]
# pick_user_info_list = result_dict["hits"][0]["_source"]["pick_user_id_list"]
# pick_user_id_list = [item["user_id"] for item in pick_user_info_list]
......@@ -156,17 +159,13 @@ class TopicUtils(object):
user_tag_list = result_dict["hits"][0]["_source"]["tag_list"]
q = dict()
ret_data_list = list()
topic_id_list = list()
q["query"] = dict()
functions_list = [
{
"filter": {
"term": {
"language_type": 1
}
},
"weight": 6
},
{
"gauss": {
"create_time": {
......@@ -174,91 +173,14 @@ class TopicUtils(object):
"decay": 0.99
}
},
"weight": 5
"weight": 60
}
]
if len(user_similar_score_list) > 0:
for item in user_similar_score_list[:100]:
score_item = 2 + item[1]
functions_list.append(
{
"filter": {"bool": {
"should": {"term": {"user_id": item[0]}}}},
"weight": score_item,
}
)
if len(attention_user_id_list) > 0:
functions_list.append(
{
"filter": {"bool": {
"should": {"terms": {"user_id": attention_user_id_list}}}},
"weight": 3,
}
)
# if len(pick_user_id_list) > 0:
# functions_list.append(
# {
# "filter": {"bool": {
# "should": {"terms": {"user_id": pick_user_id_list}}}},
# "weight": 2
# }
# )
# if len(same_pictorial_id_list) > 0:
# functions_list.append(
# {
# "filter": {"bool": {
# "should": {"terms": {"user_id": same_pictorial_id_list}}}},
# "weight": 1
# }
# )
# query_tag_term_list = cls.___get_should_term_list(user_tag_list)
if len(user_tag_list) > 0:
functions_list.append(
{
"filter": {"bool": {
"should": {"terms": {"tag_list": user_tag_list}}}},
"weight": 1
}
)
# if len(recommend_tag_list)>0:
# if len(recommend_tag_list)>1:
# functions_list += [
# {
# "filter": {"term": {"tag_list": recommend_tag_list[0]}},
# "weight": 4
# },
# {
# "filter": {"terms": {"tag_list": recommend_tag_list[1:]}},
# "weight": 3
# }
# ]
# else:
# functions_list.append(
# {
# "filter": {"terms": {"tag_list": recommend_tag_list}},
# "weight": 3
# }
# )
# for tag_id in recommend_tag_dict:
# functions_list.append(
# {
# "filter": {"term": {"tag_list": tag_id}},
# "weight": recommend_tag_dict[tag_id]
# }
# )
# low_content_level = 4 if query_type == TopicPageType.FIND_PAGE else 3
query_function_score = {
"query": {
"bool": {
"filter": [
# {"range": {"content_level": {"gte": low_content_level, "lte": 5}}},
# {"term": {"has_image":True}},
{"term": {"is_online": True}},
{"term": {"is_deleted": False}}
],
......@@ -278,10 +200,13 @@ class TopicUtils(object):
}
}
],
"minimum_should_match": 1
"minimum_should_match": 1,
"must_not": [
{"term": {"is_history": True}}
]
}
},
"score_mode": "sum",
"score_mode": "max",
"boost_mode": "sum",
"functions": functions_list
}
......@@ -292,103 +217,338 @@ class TopicUtils(object):
}
}
if len(filter_topic_id_list) > 0:
query_function_score["query"]["bool"]["must_not"] = {
"terms": {
"id": filter_topic_id_list
}
}
query_function_score["query"]["bool"]["must_not"] = [
{"terms": {"id": filter_topic_id_list}}
]
if len(linucb_user_id_list) > 0:
if "must_not" in query_function_score["query"]["bool"]:
query_function_score["query"]["bool"]["must_not"] += [
{"terms": {"user_id": linucb_user_id_list}}
]
else:
query_function_score["query"]["bool"]["must_not"] = [
{"terms": {"user_id": linucb_user_id_list}}
]
if query is not None: # 搜索帖子
multi_fields = {
'description': 200,
'content': 300,
'name': 400,
'tag_name_list': 300,
}
query_fields = ['^'.join((k, str(v))) for (k, v) in multi_fields.items()]
multi_match = {
'query': query,
'type': 'cross_fields',
'operator': 'and',
'fields': query_fields,
'type': 'best_fields',
'operator': 'or',
'fields': ["content", "tag_name_list"],
}
query_function_score["boost_mode"] = "replace"
query_function_score["query"]["bool"]["should"] = [
{'multi_match': multi_match},
{"term": {"tag_list": tag_id}}
{"term": {"tag_list": tag_id}},
{"term": {"user_nick_name_pre": query.lower()}}
]
query_function_score["query"]["bool"]["minimum_should_match"] = 1
q["query"]["function_score"] = query_function_score
q["collapse"] = {
"field": "user_id"
}
query_function_score["query"]["bool"]["filter"].append(
{"range": {"content_level": {"gte": 3, "lte": 6}}}
)
else:
if "must_not" in query_function_score["query"]["bool"]:
query_function_score["query"]["bool"]["must_not"] += [
{"term": {"is_operation_home_recommend": True}}
]
else:
query_function_score["query"]["bool"]["must_not"] = [
{"term": {"is_operation_home_recommend": True}}
]
if query_type == TopicPageType.FIND_PAGE:
query_function_score["query"]["bool"]["filter"].append(
{"range": {"content_level": {"gte": 4, "lte": 6}}}
)
else:
query_function_score["query"]["bool"]["filter"].append(
{"term": {"content_level": 6}}
)
q["_source"] = {
"includes": ["id", "pictorial_id", "offline_score", "user_id", "edit_tag_list"]
"includes": ["id", "highlight", "description"]
}
q["sort"] = [
{
"_script": {
"type": "number",
"script": {
"lang": "expression",
"source": "_score+doc['offline_score']"
# "lang":"painless",
# "source":"_score+params._source.offline_score"
},
"order": "desc"
if query is None:
if user_id and user_id > 0:
redis_key_prefix = "physical:user_similar:participant_user_id:"
similar_redis_key = redis_key_prefix + str(user_id)
redis_user_similar_data = redis_client.get(similar_redis_key)
user_similar_list = json.loads(redis_user_similar_data) if redis_user_similar_data else []
if len(user_similar_list) > 0:
functions_list.extend(user_similar_list)
if len(attention_user_id_list) > 0:
functions_list.append(
{
"filter": {"constant_score": {"filter": {"terms": {"user_id": attention_user_id_list}}}},
"weight": 100,
}
)
if len(attention_tag_list) > 0:
functions_list.append(
{
"filter": {"bool": {
"should": {"terms": {"tag_list": attention_tag_list}}}},
"weight": 100
}
)
query_function_score["functions"] = functions_list
q["query"]["function_score"] = query_function_score
if not disable_collpase:
q["collapse"] = {
"field": "user_id"
}
},
# {
# "offline_score":{
# "order": "desc"
# }
# },
"_score"
]
result_dict = ESPerform.get_search_results(ESPerform.get_cli(), sub_index_name=index_type, query_body=q,
offset=offset, size=size)
q["sort"] = [
# {
# "_script": {
# "type": "number",
# "script": {
# "lang": "expression",
# "source": "_score+doc['offline_score']"
# # "lang":"painless",
# # "source":"_score+params._source.offline_score"
# },
# "order": "desc"
# }
# },
{
"offline_score": {
"order": "desc"
}
},
{
"_score": {
"order": "desc"
}
}
]
topic_id_list = list()
same_group_id_set = set()
same_user_id_set = set()
result_dict = ESPerform.get_search_results(ESPerform.get_cli(), sub_index_name=index_type, query_body=q,
offset=offset, size=size, routing=routing)
for item in result_dict["hits"]:
topic_id_list.append(item["_source"]["id"])
# for item in result_dict["hits"]:
# if item["_source"]["group_id"]>0 and item["_source"]["group_id"] not in same_group_id_set:
# same_group_id_set.add(item["_source"]["id"])
# topic_id_list.append(item["_source"]["id"])
# else:
# same_group_id_set.add(item["_source"]["id"])
#
# if item["_source"]["user_id"] not in same_user_id_set:
# same_user_id_set.add(item["_source"]["id"])
# topic_id_list.append(item["_source"]["id"])
# else:
# same_user_id_set.add(item["_source"]["id"])
#
# if len(topic_id_list) >= single_size:
# break
for item in result_dict["hits"]:
topic_id_list.append(item["_source"]["id"])
else:
multi_match = {
'query': query,
'type': 'best_fields',
'operator': 'and',
'fields': ["content", "tag_name_list"],
}
functions_list += [
{
"weight": 400,
"filter": {
"constant_score": {
"filter": {
"term": {"user_nick_name_pre": query.lower()}
}
}
}
},
{
"weight": 400,
"filter": {
"constant_score": {
"filter": {
"bool": {
"must": {
"term": {"content_level": 6},
},
"minimum_should_match": 1,
"should": [
{'match_phrase': {"content": query}},
{'match_phrase': {"tag_name_list": query}},
# {'multi_match': multi_match},
{"term": {"tag_list": tag_id}},
{"term": {"user_nick_name_pre": query.lower()}}
]
}
}
}
}
},
{
"weight": 400,
"filter": {
"constant_score": {
"filter": {
"bool": {
"must": {
"term": {"content_level": 5},
},
"minimum_should_match": 1,
"should": [
{'match_phrase': {"content": query}},
{'match_phrase': {"tag_name_list": query}},
# {'multi_match': multi_match},
{"term": {"tag_list": tag_id}},
{"term": {"user_nick_name_pre": query.lower()}}
]
}
}
}
}
},
{
"weight": 400,
"filter": {
"constant_score": {
"filter": {
"bool": {
"must": {
"term": {"content_level": 4},
},
"minimum_should_match": 1,
"should": [
{'match_phrase': {"content": query}},
{'match_phrase': {"tag_name_list": query}},
# {'multi_match': multi_match},
{"term": {"tag_list": tag_id}},
{"term": {"user_nick_name_pre": query.lower()}}
]
}
}
}
}
}
]
query_function_score["functions"] = functions_list
q["query"]["function_score"] = query_function_score
q["sort"] = [
# {
# "_script": {
# "type": "number",
# "script": {
# "lang": "expression",
# "source": "_score+doc['offline_score']"
# # "lang":"painless",
# # "source":"_score+params._source.offline_score"
# },
# "order": "desc"
# }
# },
{
"_score": {
"order": "desc"
}
},
{
"latest_reply_time": {
"order": "desc"
}
},
{
"offline_score": {
"order": "desc"
}
}
]
q["highlight"] = ESPerform.get_highlight(["content"])
result_dict = ESPerform.get_search_results(ESPerform.get_cli(), sub_index_name=index_type, query_body=q,
offset=offset, size=size, routing=routing)
for item in result_dict["hits"]:
ret_data_list.append({"id": item["_source"]["id"], "highlight": item.get("highlight", {})})
topic_id_list.append(item["_source"]["id"])
return topic_id_list,ret_data_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return list(),list()
@classmethod
def userful_tag_topic_list(cls, user_id, have_read_topic_list, size,
index_type="topic-high-star", routing=None, useful_tag_list=[]):
"""
:remark 帖子详情页推荐列表,缺少按时间衰减
:param user_id:
:param topic_tag_list:
:param topic_group_id:
:param topic_user_id:
:param offset:
:param size:
:return:
"""
try:
es_cli_obj = ESPerform.get_cli()
# useful_tag_list = list()
# q = dict()
# q["query"] = {
# "term": {
# "user_id": user_id
# }
# }
#
# if len(topic_id_list) < single_size:
# for topic_id in same_group_id_set:
# topic_id_list.append(topic_id)
# if len(topic_id_list)>=single_size:
# break
# for topic_id in same_user_id_set:
# topic_id_list.append(topic_id)
# if len(topic_id_list)>=single_size:
# break
# q["_source"] = {
# "include": ["useful_tag_list"]
# }
# result_dict = ESPerform.get_search_results(es_cli_obj, "user", q, 0, 1)
# if len(result_dict["hits"]) == 0:
# logging.warning("not find user_id:%d in es!" % int(user_id))
# else:
# useful_tag_list = result_dict["hits"][0]["_source"]["useful_tag_list"]
if len(useful_tag_list) == 0:
return []
else:
q = dict()
q["query"] = {
"bool": {
"must": [
{"term": {"is_online": True}},
{"term": {"is_deleted": False}},
{"terms": {"useful_tag_list": useful_tag_list}},
{"term": {"content_level": 6}}
],
"must_not": {
"terms": {
"id": have_read_topic_list
}
}
}
}
# logging.warning("topic_tag_list:%s"%str(topic_tag_list))
# query_function_score = {
# "query": {
# "bool": {
# "must": [
# {"term": {"is_online": True}},
# {"term": {"is_deleted": False}},
# {"terms": {"tag_list": useful_tag_list}}
# ],
# "must_not": {
# "terms": {
# "id": have_read_topic_list
# }
# }
# }
# }
# }
q["_source"] = {
"includes": ["id"]
}
return topic_id_list
result_dict = ESPerform.get_search_results(es_cli_obj, sub_index_name=index_type, query_body=q,
size=size,
routing=routing)
topic_id_list = list()
for item in result_dict["hits"]:
topic_id_list.append(item["_source"]["id"])
return topic_id_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return list()
return []
@classmethod
def get_topic_detail_recommend_list(cls, user_id, topic_id, topic_tag_list, topic_pictorial_id, topic_user_id,
filter_topic_user_id, have_read_topic_list, offset, size, es_cli_obj=None,index_type="topic"):
filter_topic_user_id, have_read_topic_list, offset, size, es_cli_obj=None,
index_type="topic", routing=None):
"""
:remark 帖子详情页推荐列表,缺少按时间衰减
:param user_id:
......@@ -435,7 +595,7 @@ class TopicUtils(object):
"query": {
"bool": {
"must": [
# {"range": {"content_level": {"gte": 3, "lte": 5}}},
{"range": {"content_level": {"gte": 4, "lte": 6}}},
{"term": {"is_online": True}},
{"term": {"is_deleted": False}}
],
......@@ -465,13 +625,140 @@ class TopicUtils(object):
}
result_dict = ESPerform.get_search_results(es_cli_obj, sub_index_name=index_type, query_body=q,
offset=offset, size=size)
offset=offset, size=size, routing=routing)
return result_dict["hits"]
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
@classmethod
def top_get_topic_detail_recommend_list(cls, user_id, topic_id, have_read_topic_list, size, es_cli_obj=None,
index_type="topic", routing=None, collection_topic_tag_list=[],
topic_tag_list=[],
topic_user_id=-1):
"""
:remark 帖子详情页推荐列表,缺少按时间衰减
:param user_id:
:param topic_tag_list:
:param topic_group_id:
:param topic_user_id:
:param offset:
:param size:
:return:
"""
try:
if not es_cli_obj:
es_cli_obj = ESPerform.get_cli()
q = dict()
q["query"] = dict()
# logging.warning("topic_tag_list:%s"%str(topic_tag_list))
functions_list = [
{
"linear": {
"create_time": {
"scale": "1d",
"decay": 0.5
}
}
}
]
# if len(topic_tag_list) > 0:
# functions_list.append(
# {
# "filter": {"bool": {
# "should": {"terms": {"tag_list": topic_tag_list}}}},
# "weight": 5000
# }
# )
# if topic_user_id != -1:
# functions_list.append(
# {
# "filter": {"bool": {
# "should": {"term": {"user_id": topic_user_id}}}},
# "weight": 5000
# }
# )
if len(topic_tag_list) != 0 or topic_user_id != -1:
query_function_score = {
"query": {
"bool": {
"must": [
{"range": {"content_level": {"gte": 3, "lte": 6}}},
{"term": {"is_online": True}},
{"term": {"is_deleted": False}}
],
"must_not": {
"terms": {
"id": have_read_topic_list
}
}
}
},
"score_mode": "sum",
"boost_mode": "sum",
"functions": functions_list
}
else:
query_function_score = {
"query": {
"bool": {
"must": [
{"range": {"content_level": {"gte": 4, "lte": 6}}},
{"term": {"is_online": True}},
{"term": {"is_deleted": False}}
],
"must_not": {
"terms": {
"id": have_read_topic_list
}
}
}
},
"score_mode": "sum",
"boost_mode": "sum",
"functions": functions_list
}
if len(topic_tag_list) > 0:
query_function_score["query"]["bool"]["filter"] = {
"terms": {
"edit_tag_list": topic_tag_list
}
}
if topic_user_id != -1:
query_function_score["query"]["bool"]["filter"] = {
"term": {
"user_id": topic_user_id
}
}
q["query"]["function_score"] = query_function_score
if topic_user_id == -1:
q["collapse"] = {
"field": "user_id"
}
q["_source"] = {
"includes": ["id", "pictorial_id", "user_id", "_score"]
}
# "includes": ["id", "pictorial_id", "user_id", "_score", "create_time", "content_level"]
q['sort'] = [
{"latest_reply_time": {"order": "desc"}},
# {"create_time": {"order": "desc"}}
]
result_dict = ESPerform.get_search_results(es_cli_obj, sub_index_name=index_type, query_body=q, size=size,
routing=routing)
topic_id_list = list()
for item in result_dict["hits"]:
topic_id_list.append(item["_source"]["id"])
return topic_id_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
@classmethod
def get_topic_tag_id_list(cls, topic_id, es_cli_obj=None):
"""
......@@ -605,7 +892,7 @@ class TopicUtils(object):
{"term": {"is_deleted": False}},
]
logging.info("get filters:%s"%filters)
logging.info("get filters:%s" % filters)
if not filters:
return f
......@@ -615,14 +902,7 @@ class TopicUtils(object):
"term": {k: v},
})
if k == "is_complaint":
f.append({
"term": {k: v},
})
if not v:
if v in (None, '', []):
continue
if k in ["create_time_gte", "create_time_lte"]:
......@@ -654,20 +934,37 @@ class TopicUtils(object):
}
}
})
elif k.endswith("__exclude"):
filed = k[:-5]
op = "lt"
f.append({
"range": {
filed: {
op: v,
}
}
})
op = "gt"
f.append({
"range": {
filed: {
op: v,
}
}
})
elif k.startswith("__gte") or k.startswith("__lte") or \
k.startswith("__gt") or k.startswith("__lt"):
if k.startswith("__gte"):
elif k.endswith("__gte") or k.endswith("__lte") or k.endswith("__gt") or k.endswith("__lt"):
if k.endswith("__gte"):
op = "gte"
filed = k[:-5]
elif k == "__lte":
elif k.endswith("__lte"):
op = "lte"
filed = k[:-5]
elif k == "__gt":
op = "lte"
elif k.endswith("__gt"):
op = "gt"
filed = k[:-4]
elif k == "__lt":
op = "lte"
elif k.endswith("__lt"):
op = "lt"
filed = k[:-4]
f.append({
......@@ -706,7 +1003,7 @@ class TopicUtils(object):
return nf
@classmethod
def process_sort(cls, sorts_by):
def process_sort(cls, sorts_by, pictorial_id):
"""处理排序部分。"""
sort_rule = []
......@@ -720,7 +1017,18 @@ class TopicUtils(object):
"order": "desc"
},
})
if sorts_by == TOPIC_SEARCH_SORT.TOPIC_ADD_TIME:
sort_rule.append({
"related_billboard.topic_add_createtime": {
"order": "desc",
"nested_path": "related_billboard",
"nested_filter": {
"term": {
"related_billboard.pictorial_id": pictorial_id
}
}
},
})
elif isinstance(sorts_by, list):
for sort_by in sorts_by:
if sort_by == TOPIC_SEARCH_SORT.ID_AEC:
......@@ -759,13 +1067,117 @@ class TopicUtils(object):
"order": "desc"
},
})
elif sort_by == TOPIC_SEARCH_SORT.REAL_VOTE_AEC:
sort_rule.append({
"related_billboard.real_vote_cnt": {
"order": "asc",
"nested_path": "related_billboard",
"missing": "_last",
"nested_filter": {
"term": {
"related_billboard.pictorial_id": pictorial_id
}
}
},
})
elif sort_by == TOPIC_SEARCH_SORT.REAL_VOTE_DESC:
sort_rule.append({
"related_billboard.real_vote_cnt": {
"order": "desc",
"nested_path": "related_billboard",
# "missing": "_last",
"nested_filter": {
"term": {
"related_billboard.pictorial_id": pictorial_id
}
}
},
})
elif sort_by == TOPIC_SEARCH_SORT.VIRT_VOTE_AEC:
sort_rule.append({
"related_billboard.virt_vote_cnt": {
"order": "asc",
"nested_path": "related_billboard",
# "missing": "_last",
"nested_filter": {
"term": {
"related_billboard.pictorial_id": pictorial_id
}
}
},
})
elif sort_by == TOPIC_SEARCH_SORT.VIRT_VOTE_DESC:
sort_rule.append({
"related_billboard.virt_vote_cnt": {
"order": "desc",
"nested_path": "related_billboard",
# "missing": "_last",
"nested_filter": {
"term": {
"related_billboard.pictorial_id": pictorial_id
}
}
},
})
logging.info("get picotirial:%s" % sort_rule)
return sort_rule
@classmethod
def list_topic_ids(cls, filters, nfilters, sorts_by, offset=0, size=10, index_name="topic", filter_online=True):
try:
must = cls.process_filters(filters, filter_online=filter_online)
q = {
"query": {
"bool": {
"must": must,
"must_not": cls.process_nfilters(nfilters),
}
}
}
if 'pictorial_id' in filters.keys():
if sorts_by:
sorts = cls.process_sort(sorts_by, filters["pictorial_id"])
if sorts:
q["sort"] = sorts
else:
if sorts_by:
sorts = cls.process_sort(sorts_by, pictorial_id=None)
if sorts:
q["sort"] = sorts
must = cls.process_filters(filters, filter_online=filter_online)
result_dict = ESPerform.get_search_results(
ESPerform.get_cli(), sub_index_name=index_name,
query_body=q, offset=offset, size=size
)
if len(result_dict["hits"]) > 0:
topic_id_list = [item["_source"]["id"] for item in result_dict["hits"]]
return (topic_id_list, result_dict["total_count"])
elif offset == 0 and "pictorial_id" in filters: # 防止帖子同步延迟,画报详情页为空
pictorial_id = int(filters["pictorial_id"])
topic_id_list = list(PictorialTopics.objects.filter(pictorial_id=pictorial_id, is_online=True,
is_deleted=False).values_list("topic_id",
flat=True)[
offset:size])
return (topic_id_list, len(topic_id_list))
else:
return ([], 0)
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return ([], 0)
@classmethod
def business_topic_ids(cls, filters, nfilters, sorts_by, offset=0, size=10, index_name="topic", filter_online=True):
must = cls.business_filters(filters, filter_online=filter_online)
q = {
"query": {
"bool": {
......@@ -773,12 +1185,18 @@ class TopicUtils(object):
"must_not": cls.process_nfilters(nfilters),
}
}
}
if sorts_by:
sorts = cls.process_sort(sorts_by)
if sorts:
q["sort"] = sorts
}
if 'pictorial_id' in filters.keys():
if sorts_by:
sorts = cls.process_sort(sorts_by, filters["pictorial_id"])
if sorts:
q["sort"] = sorts
else:
if sorts_by:
sorts = cls.process_sort(sorts_by, pictorial_id=None)
if sorts:
q["sort"] = sorts
try:
result_dict = ESPerform.get_search_results(
......@@ -798,64 +1216,96 @@ class TopicUtils(object):
}
@classmethod
def business_topic_ids(cls, filters, nfilters, sorts_by, offset=0, size=10, index_name="topic", filter_online=True):
def business_filters(cls, filters, filter_online=True):
"""处理过滤器部分。"""
logging.info("get filters:%s" % filters)
f = [
{"term": {"is_deleted": False}},
]
if not filters:
return f
if filter_online:
f.append({"term": {"is_online": True}})
must = cls.process_filters(filters, filter_online=filter_online)
query = ''
for k, v in filters.items():
if v in (None, '', []):
continue
if k == "content":
query = filters[k]
q = {}
q["query"] = {
"function_score": {
"functions": [{
"filter": {
"bool": {
"must": must,
"must_not": cls.process_nfilters(nfilters),
}
},
"weight": 1
}],
"query": {
f.append({
"multi_match": {
"fields":["content"],
"fields": ["content"],
"type": "cross_fields",
"operator": "and",
"query": query
"query": v
}
}
}
})
}
if query == '':
q["query"] = {
"bool": {
"must": must,
"must_not": cls.process_nfilters(nfilters),
elif k == "virtual_content_level":
f.append({
"match": {k: v}
})
elif k in ["create_time_gte", "create_time_lte"]:
if k == "create_time_gte":
op = "gte"
elif k == "create_time_lte":
op = "lte"
f.append({
"range": {
"create_time_val": {
op: v,
}
}
})
elif k == "drop_score":
if v == "0":
f.append({
"term": {k: v}
})
}
else:
f.append({
"range": {
"drop_score": {
"gte": v,
}
}
})
# elif k == "pictorial_id":
# f.append({
# "nested": {
# "path": "related_billboard",
# "query": {
# "bool": {
# "must": [
# {
# "term": {
# "related_billboard.pictorial_id": v
# }
# }
# ]
# }
# }
# }
# })
if sorts_by:
sorts = cls.process_sort(sorts_by)
if sorts:
q["sort"] = sorts
else:
try:
result_dict = ESPerform.get_search_results(
ESPerform.get_cli(), sub_index_name=index_name,
query_body=q, offset=offset, size=size
)
if isinstance(v, list):
f.append({
"terms": {k: v},
})
else:
f.append({
"term": {k: v},
})
return {
"hits": result_dict["hits"],
"total_count": result_dict["total_count"]
}
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {
"hits": [],
"total_count": 0
}
return f
......@@ -25,7 +25,6 @@ def business_topic_search(filters, nfilters=None, sorts_by=None, offset=0, size=
index_name="topic"
)
logging.info("get result_lsit:%s"%result_list)
topic_ids = [item["_source"]["id"] for item in result_list["hits"]]
return {"topic_ids": topic_ids, "total_count": result_list["total_count"]}
except:
......
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from gm_rpcd.all import bind
import logging
import traceback
import json
import time
import datetime
from libs.cache import redis_client
from gm_rpcd.all import bind
from libs.cache import redis_client
from libs.es import ESPerform
from search.utils.group import GroupUtils
from search.utils.common import GroupSortTypes
from libs.es import ESPerform
from trans2es.models.pictorial import PictorialTopics
from trans2es.models.pictorial import CommunityPictorialActivity
from alpha_types.venus import PICTORIAL_ACTIVITY_SORT
@bind("physical/search/query_pictorial")
......@@ -35,7 +41,7 @@ def query_pictorial(query="", offset=0, size=10):
@bind("physical/search/pictorial_sort")
def pictorial_sort(user_id=-1, sort_type=GroupSortTypes.HOT_RECOMMEND, offset=0, size=10):
def pictorial_sort(user_id=-1, sort_type=GroupSortTypes.HOT_RECOMMEND, offset=0, size=10, device_id=""):
"""
:remark 小组排序,缺少:前1天发评论人数*x
:param user_id:
......@@ -52,15 +58,22 @@ def pictorial_sort(user_id=-1, sort_type=GroupSortTypes.HOT_RECOMMEND, offset=0,
es_cli_obj = ESPerform.get_cli()
if sort_type == GroupSortTypes.HOT_RECOMMEND:
pictorial_ids_list = GroupUtils.get_hot_pictorial_recommend_result_list(offset, size, es_cli_obj)
# 用户关注标签
attention_tag_list = list()
if user_id > 0:
redis_tag_data = redis_client.hget("physical:linucb:register_user_tag_info", user_id)
attention_tag_list = json.loads(redis_tag_data) if redis_tag_data else []
return {"pictorial_recommend_ids": pictorial_ids_list}
pictorial_ids_list = GroupUtils.get_hot_pictorial_recommend_result_list(offset, size, es_cli_obj,
attention_tag_list)
return {"pictorial_recommend_ids": pictorial_ids_list}
elif sort_type == GroupSortTypes.ATTENTION_RECOMMEND:
attention_pictorial_list = GroupUtils.get_user_attention_pictorial_list(user_id, offset=0, size=1,
es_cli_obj=es_cli_obj)
if len(attention_pictorial_list) == 0:
return {"pictorial_recommend_ids": []}
else:
attention_pictorial_id_list = [item["pictorial_id"] for item in attention_pictorial_list]
sorted_pictorial_ids_list = GroupUtils.get_pictorial_ids_by_aggs(attention_pictorial_id_list,
......@@ -73,7 +86,6 @@ def pictorial_sort(user_id=-1, sort_type=GroupSortTypes.HOT_RECOMMEND, offset=0,
for item in sorted_attention_pictorial_list:
if item["pictorial_id"] not in pictorial_recommend_ids_list:
pictorial_recommend_ids_list.append(item["pictorial_id"])
return {"pictorial_recommend_ids": pictorial_recommend_ids_list[offset:(offset + size)]}
except:
......@@ -92,23 +104,38 @@ def pictorial_topic(topic_id=-1, offset=0, size=10):
:return:
"""
try:
if not isinstance(topic_id, int):
user_id = -1
# 获取es链接对象
es_cli_obj = ESPerform.get_cli()
q = {}
# 获取帖子从属的画报
q["query"] = {
"term": {
"id": topic_id
"bool": {
"must": [
{
"term": {
"id": topic_id
}
},
{
"term": {
"is_online": True
}
}
],
"must_not": [
{
"term": {
"is_history": True
}
}
]
}
}
q["_source"] = {
"includes": ["id", "pictorial_id", "tag_list"]
}
result_dict = ESPerform.get_search_results(es_cli_obj, "topic", q, offset, size)
logging.info("get result_dict:%s" % result_dict)
pict_pictorial_ids_list = []
topic_tag_list = []
pictorial_id_list = []
......@@ -167,10 +194,17 @@ def pictorial_topic(topic_id=-1, offset=0, size=10):
"term": {
"is_online": True
}
},{
}, {
"term": {
"is_deleted": False
}
}, {
"range": {
"topic_id_list": {
"gte": 0
}
}
}]
}
}
......@@ -202,3 +236,292 @@ def pictorial_topic(topic_id=-1, offset=0, size=10):
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"pictorial_ids_list": []}
@bind("physical/search/pictorial_topic_sort")
def pictorial_topic_sort(pictorial_id=-1, offset=0, size=10):
"""
:remark 画报排序 人气 部分
人气按照票数从大到小排序,相同票数按照图片票数更新时间由旧到新排序
:param user_id:
:param sort_type:
:param offset:
:param size:
:return:
"""
try:
q = {
"query": {
"bool": {
"must": [
{"term": {"is_online": True}},
{"term": {"is_deleted": False}},
{"term": {"pictorial_id": pictorial_id}}
# {
# "nested": {
# "path": "related_billboard",
# "query": {
# "bool": {
# "must": [
# {
# "term": {
# "related_billboard.pictorial_id": pictorial_id
# }
# }
# ]
# }
# }
# }
# }
]
}
},
"sort": [
{"related_billboard.total_vote_cnt": {
"order": "desc",
"nested_path": "related_billboard",
"missing": "_last",
"nested_filter": {
"term": {
"related_billboard.pictorial_id": pictorial_id
}
}
}},
{"create_time": {"order": "desc"}}
]
}
logging.info("get qqqqqq:%s" % q)
pict_pictorial_ids_list = []
# 获取es链接对象
es_cli_obj = ESPerform.get_cli()
result_dict = ESPerform.get_search_results(es_cli_obj, "topic", q, offset, size)
# logging.info("get pictorial_topic_sort res:%s" % result_dict)
for item in result_dict["hits"]:
topic_id = item["_source"]["id"]
pict_pictorial_ids_list.append(topic_id)
return {"pict_pictorial_ids_list": pict_pictorial_ids_list}
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"pict_pictorial_ids_list": []}
@bind("physical/search/search_physical")
def search_physical(query="", offset=0, size=10):
"""
搜索页面搜索画报
:param query:
:param offset:
:param size:
:return:
1. query识别:允许模糊匹配
2. 召回部分:去掉无封面、无帖子、下线的、默认画报
3. rank部分:(和之前一样,下面只是粘贴过来)
"""
try:
result_dicts = GroupUtils.get_search_pictorial_topic(query, offset, size)
group_ids_list = []
if len(result_dicts["hits"]) > 0:
group_ids_list = [item["_source"]["id"] for item in result_dicts["hits"]]
return {"search_pictorial_ids": group_ids_list}
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"search_pictorial_ids": []}
@bind("physical/search/pictorial_activity")
def pictorial_activity_sort(activity_id=0, offset=0, size=10, sort_type=1):
"""
榜单活动的排名和最新tab下的排序
排名根据该榜单下的帖子的总票数倒排
最新根据榜单的创建时间排序
过滤掉下线的无封面的帖子
:param activity_id:
:param offset:
:param size:
:param sort_type:
:return:
"""
try:
now = datetime.datetime.now()
activity_status = CommunityPictorialActivity.objects.filter(id=activity_id).values_list("end_time",
flat=True).first()
now = int(time.mktime(now.timetuple()))
activity_status = int(time.mktime(activity_status.timetuple()))
logging.info("get now:%s" % now)
logging.info("get activity_status:%s" % activity_status)
pictorial_ids_list = []
if sort_type == 1:
redis_key = "pictorial:activity:vote:id:" + str(activity_id)
else:
redis_key = "pictorial:activity:create:id:" + str(activity_id)
if now < activity_status:
q = {
"query": {
"bool": {
"must": [
{
"term": {
"activity_join": activity_id
}
},
{
"term": {
"is_cover": True
}
},
{
"term": {
"is_online": True
}
}
]
}
}
}
q["sort"] = process_sort(sort_type)
es_cli_obj = ESPerform.get_cli()
result_dict = ESPerform.get_search_results(es_cli_obj, "pictorial", q, offset, size)
if len(result_dict["hits"]) > 0:
for item in result_dict["hits"]:
pictorial_id = item["_source"]["id"]
vote_num = item["_source"]["topic_vote_number"]
pictorial_ids_list.append({"pictorial_id": pictorial_id, "vote_num": vote_num})
redis_client.set(redis_key, json.dumps(pictorial_ids_list))
else:
redis_field_val_list = redis_client.get(redis_key)
pictorial_ids_list_all = json.loads(str(redis_field_val_list, encoding="utf-8"))
pictorial_ids_list = pictorial_ids_list_all[offset:size + offset]
return {"pictorial_activity_sort": pictorial_ids_list}
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"pictorial_activity_sort": []}
def process_sort(sorts_by):
"""处理排序部分。"""
sort_rule = []
if isinstance(sorts_by, int):
if sorts_by == PICTORIAL_ACTIVITY_SORT.VOTE_NUM:
sort_rule.append({
"topic_vote_number": {
"order": "desc"
}
})
if sorts_by == PICTORIAL_ACTIVITY_SORT.CREATE_TIME:
sort_rule.append({
"create_time": {
"order": "desc"
}
})
logging.info("get picotirial:%s" % sort_rule)
return sort_rule
@bind("physical/search/pictorialid_topic")
def pictorial_topic_sort_peoplehot(pictorial_id=-1, user_id=-1, offset=0, size=10):
try:
if not isinstance(pictorial_id, int):
pictorial_id = -1
results = pictorial_topic_sort(pictorial_id=pictorial_id, size=100)
pict_pictorial_ids_list = results["pict_pictorial_ids_list"]
logging.info("get pict_pictorial_ids_list res:%s" % pict_pictorial_ids_list)
q = {
"query": {
"bool": {
"must": [
{"term": {"is_online": True}},
{"term": {"is_deleted": False}},
{"term": {"pictorial_id": pictorial_id}},
{"term": {"user_id": user_id}}
]
}
},
"sort": [
{"related_billboard.total_vote_cnt": {
"order": "desc",
"nested_path": "related_billboard",
"missing": "_last",
"nested_filter": {
"term": {
"related_billboard.pictorial_id": pictorial_id
}
}
}},
{"create_time": {"order": "desc"}}
]
}
# 获取es链接对象
es_cli_obj = ESPerform.get_cli()
result_dict = ESPerform.get_search_results(es_cli_obj, "topic", q, offset, size)
user_pict_pictorial_ids_list = []
for item in result_dict["hits"]:
topic_id = item["_source"]["id"]
user_pict_pictorial_ids_list.append(topic_id)
logging.info("get user_pict_pictorial_ids_list res:%s" % user_pict_pictorial_ids_list)
pictorial_user_topicid_list = list()
for user_id_topic in user_pict_pictorial_ids_list:
if user_id_topic in pict_pictorial_ids_list:
topic_sort = pict_pictorial_ids_list.index(user_id_topic)
pictorial_user_topicid_list.append({"topic_id": user_id_topic, "topic_sort": topic_sort + 1})
else:
index = user_pict_pictorial_ids_list.index(user_id_topic)
pictorial_user_topicid_list.append({"topic_id": user_id_topic, "topic_sort": 100 + index})
return {"pictorial_user_topicid_list": pictorial_user_topicid_list}
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"pict_pictorial_ids_list": []}
@bind("physical/search/pictorial_activity_sort")
def get_pictorial_activeity_sort(activity_id=0, pictorial_id=0):
try:
pictorial_activity_sort_only = []
pictorial_ids_list = pictorial_activity_sort(activity_id=activity_id, size=999, offset=0, sort_type=1)
logging.info("get pictorial_ids_list:%s" % pictorial_ids_list)
data = pictorial_ids_list.get("pictorial_activity_sort", [])
logging.info("get data:%s" % data)
if data:
pictorial_ids = []
for item in data:
pictorial_ids.append(item["pictorial_id"])
if pictorial_id in pictorial_ids:
pictorial_sort = pictorial_ids.index(pictorial_id)
pictorial_activity_sort_only.append(
{"pictorial_id": pictorial_id, "pictorial_sort": pictorial_sort + 1})
else:
pictorial_activity_sort_only.append({"pictorial_id": pictorial_id, "pictorial_sort": 1000})
return {"pictorial_activity_sort": pictorial_activity_sort_only}
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"pictorial_activity_sort": []}
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from gm_rpcd.all import bind
import logging
import traceback
import json
from libs.cache import redis_client
from libs.es import ESPerform
from search.utils.product import ProductUtils
from search.utils.common import GroupSortTypes
from libs.es import ESPerform
@bind("physical/search/query_product_sku")
def product_hot_sort(query='', offset=0, size=10, filters={}):
'''
商品sku排序
:param query:
:param offset:
:param size:
:param sort_type:
:param filters:
:return:
'''
try:
res = ProductUtils.get_product_sku(query=query, offset=offset, size=size, filters=filters)
product_list = []
res_hit = res["hits"]
for item in res_hit:
product_id = item["_source"]["id"]
product_list.append(product_id)
return {"product_hot_ids": product_list}
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"product_hot_ids": []}
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from gm_rpcd.all import bind
import logging
import traceback
import random
import json
from libs.cache import redis_client
from libs.es import ESPerform
from search.utils.group import GroupUtils
from search.utils.common import GroupSortTypes
from libs.es import ESPerform
from trans2es.models.pictorial import PictorialTopics
from trans2es.models.tag import SettingsConfig, Tag
from libs.cache import redis_client
@bind("physical/search/search_hotword")
def search_hotword(device_id=-1):
"""
:remark:搜索页的热门灵感
内容组成:搜索推荐热词register_show_tag
个性化标签physical:linucb:tag_recommend:device_id:
核心词,需要去重community_tag collection 1
①先从后台随机取最多6个搜索推荐热词,不够6个则有多少取多少
②然后随机取和推荐热词一样数量的linUCB标签,取不够数则取核心标签,linUCB+核心词去重后的数量要和搜索推荐热词的数量一样
③最后打乱顺序填入到热门灵感中,超过3行的标签弃掉,不够3行不用管
:param query:
:param offset:
:param size:
:return:
"""
try:
all_tag_name_list = set()
# results_registr_tag = json.loads(redis_client.get("physical:search_hotword:results_registr_tag"))
results_tag = json.loads(redis_client.get("physical:search_hotword:results_tag"))
# 先获取搜索推荐热词
results_registr_tag = list(set(SettingsConfig.objects.filter(is_deleted=False,key=1).values_list("val", flat=True)))
tag_val_list = set()
for item in results_registr_tag:
for word in item.split():
tag_val_list.add(word)
tag_id_list = random.sample(range(0, len(tag_val_list)), 6)
for tag_id in tag_id_list:
tag_val = list(tag_val_list)[tag_id]
all_tag_name_list.add(tag_val)
logging.info("get all_tag_name_list:%s" % all_tag_name_list)
# 获取个性化标签
linucb_recommend_redis_prefix = "physical:linucb:tag_recommend:device_id:"
tag_recommend_redis_key = linucb_recommend_redis_prefix + str(device_id)
linucb_recommend_tag_data = redis_client.get(tag_recommend_redis_key)
linucb_recommend_tag_list = json.loads(linucb_recommend_tag_data) if linucb_recommend_tag_data else []
for item in linucb_recommend_tag_list:
results_tag_recommend = list(
set(Tag.objects.filter(id=item, is_online=True).values_list("name", flat=True)))
if results_tag_recommend:
all_tag_name_list.add(results_tag_recommend[0])
logging.info("get all_tag_name_list:%s" % all_tag_name_list)
if len(all_tag_name_list) == 12:
return {"recommend_tag_name": list(all_tag_name_list)}
# 取不够数则取核心标签
if len(all_tag_name_list) < 12:
for i in range(0, 12):
tag_id = random.randint(1, len(results_tag))
results_tag_hexin = Tag.objects.filter(id=results_tag[tag_id], is_online=True,
collection=1).values_list("name",
flat=True)
if results_tag_hexin:
if results_tag_hexin[0] not in all_tag_name_list:
all_tag_name_list.add(results_tag_hexin[0])
logging.info("get all_tag_name_list:%s" % all_tag_name_list)
if len(all_tag_name_list) >= 12:
return {"recommend_tag_name": list(all_tag_name_list)}
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"recommend_tag_name": []}
......@@ -10,6 +10,9 @@ from search.utils.topic import TopicUtils
from libs.es import ESPerform
from libs.cache import redis_client
from search.utils.common import *
from trans2es.models.tag import TopicTag,AccountUserTag,CommunityTagFollow,Tag
import time
from django.conf import settings
def get_highlight(fields=[]):
......@@ -24,33 +27,31 @@ def get_highlight(fields=[]):
@bind("physical/search/query_tag")
def query_tag(query,offset,size):
try:
"""
q = {
if query:
query = query.lower()
pre_q = {
"query":{
"bool":{
"bool": {
"must":[
{"term":{"is_online":True}},
{"term": {"is_deleted": False}}
],
"should":[
{"multi_match":{
"query": query,
"fields":["name"],
"operator":"and"}}
],
"minimum_should_match":1
{"term":{"name_pre": query}},
{"term":{"is_online": True}}
]
}
},
"sort":[
{"near_new_topic_num":{"order":"desc"}},
{'_score': {"order": "desc"}}
],
"_source": {
"includes": ["id", "name"]
"include": ["id", "name", "is_deleted", "is_online"]
}
}
q["highlight"] = get_highlight(["name"])
"""
ret_list = list()
result_dict = ESPerform.get_search_results(ESPerform.get_cli(),sub_index_name="tag",query_body=pre_q,offset=0,size=1)
if len(result_dict["hits"])>0:
hitLight = u'<ems>%s</ems>' % query
result_dict["hits"][0]["_source"]["highlight"] = result_dict["hits"][0]["_source"]["name"].replace(query, hitLight)
ret_list.append(result_dict["hits"][0]["_source"])
size -= 1
q = {
"suggest":{
......@@ -58,6 +59,7 @@ def query_tag(query,offset,size):
"prefix":query,
"completion":{
"field":"suggest",
"size":size,
"contexts":{
"is_online": [True],
"is_deleted": [False]
......@@ -73,7 +75,6 @@ def query_tag(query,offset,size):
}
}
ret_list = list()
result_dict = ESPerform.get_search_results(ESPerform.get_cli(),sub_index_name="tag",query_body=q,offset=offset,size=size,is_suggest_request=True)
for tips_item in result_dict["suggest"]["tips-suggest"]:
......@@ -121,3 +122,85 @@ def query_by_tag_type(tag_type_id,offset,size):
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"tag_list": []}
@bind("physical/search/choice_push_tag")
def choice_push_tag(device_id, user_id):
"""
:remark 挑选push标签
:param device_id:
:param user_id:
:return:
"""
try:
redis_key_prefix = "physical:push_tag:user_id:"
redis_push_tag_key = redis_key_prefix + str(user_id)
redis_push_tag_data = redis_client.get(redis_push_tag_key)
redis_push_tag_dict = json.loads(redis_push_tag_data) if redis_push_tag_data else {}
now_sec = int(time.time())
valid_time = 8*7*24*60*60
ori_key_list = list(redis_push_tag_dict.keys())
for tag_id in ori_key_list:
if now_sec - redis_push_tag_dict[tag_id] >= valid_time:
redis_push_tag_dict.pop(tag_id)
redis_push_tag_list = list(redis_push_tag_dict.keys())
redis_push_tag_list = [int(item) for item in redis_push_tag_list]
account_user_tag_list = list(AccountUserTag.objects.filter(user=user_id,is_deleted=False).values_list("tag_id",flat=True))
community_tag_follow_list = list(CommunityTagFollow.objects.filter(user_id=user_id,is_online=True,is_deleted=False).values_list("tag_id",flat=True))
linucb_recommend_redis_prefix = "physical:linucb:tag_recommend:device_id:"
tag_recommend_redis_key = linucb_recommend_redis_prefix + str(device_id)
linucb_recommend_tag_data = redis_client.get(tag_recommend_redis_key)
linucb_recommend_tag_list = json.loads(linucb_recommend_tag_data) if linucb_recommend_tag_data else []
account_user_tag_list.extend(community_tag_follow_list)
account_user_tag_list.extend(linucb_recommend_tag_list)
unread_tag_list = list(set(account_user_tag_list) - set(redis_push_tag_list))
unread_tag_list = list(Tag.objects.filter(id__in=unread_tag_list, is_online=True, is_deleted=False).values_list("id",flat=True))
ret_tag_set = set()
if len(unread_tag_list)>0:
for tag_id in unread_tag_list:
valid_tag_topic_num = TopicTag.objects.filter(tag_id=tag_id,is_online=True).count()
if valid_tag_topic_num>100:
ret_tag_set.add(tag_id)
redis_push_tag_dict[tag_id] = now_sec
if len(ret_tag_set)>=1:
break
redis_client.set(redis_push_tag_key, json.dumps(redis_push_tag_dict))
return {"tag_list": list(ret_tag_set)}
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"tag_list": []}
@bind("physical/search/identity_tag_name")
def identity_tag_name(topic_content):
try:
ret_tag_set = set()
redis_key_name = "physical:tag_name_set"
body = {
'text': topic_content,
'analyzer': "gm_default_index"
}
cli_info = settings.TAG_ES_INFO_LIST
res = ESPerform.get_analyze_results(es_cli=ESPerform.get_cli(cli_info=cli_info), sub_index_name="tag", query_body=body)
logging.info("duan add,res:%s" % str(res).encode("utf-8"))
for item in res["tokens"]:
token_word = item["token"]
is_member = redis_client.sismember(redis_key_name, token_word)
if is_member:
ret_tag_set.add(token_word)
return {"tag_name_list": list(ret_tag_set)}
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"tag_name_list": []}
......@@ -7,10 +7,13 @@ import logging
import traceback
import json
from search.utils.topic import TopicUtils
from trans2es.models.topic import TopicHomeRecommend
from libs.es import ESPerform
from libs.cache import redis_client
from search.utils.common import *
from libs.es import ESPerform
from django.conf import settings
import datetime
def get_discover_page_topic_ids(user_id, device_id, size, query_type=TopicPageType.FIND_PAGE):
......@@ -24,11 +27,14 @@ def get_discover_page_topic_ids(user_id, device_id, size, query_type=TopicPageTy
redis_field_val_list = redis_client.hmget(redis_key, redis_field_list)
have_read_topic_id_list = json.loads(redis_field_val_list[0]) if redis_field_val_list[0] else []
if have_read_topic_id_list == None:
have_read_topic_id_list = list()
recommend_topic_ids = TopicUtils.get_recommend_topic_ids(user_id=user_id, tag_id=0, offset=0, size=size,single_size=size,
query_type=query_type,
filter_topic_id_list=have_read_topic_id_list,index_type="topic-high-star")
recommend_topic_ids,ret_data_list = TopicUtils.get_recommend_topic_ids(user_id=user_id, tag_id=0, offset=0, size=size,
single_size=size,
query_type=query_type,
filter_topic_id_list=have_read_topic_id_list,
index_type="topic", routing="4,5,6")
have_read_topic_id_list.extend(recommend_topic_ids)
redis_dict = {
......@@ -43,115 +49,171 @@ def get_discover_page_topic_ids(user_id, device_id, size, query_type=TopicPageTy
def get_home_recommend_topic_ids(user_id, device_id, tag_id, offset, size, query=None,
query_type=TopicPageType.HOME_RECOMMEND):
query_type=TopicPageType.HOME_RECOMMEND, promote_topic_list=[], disable_collpase=False,
usefulrecall=-1, useful_tag_list=[]):
try:
topic_star_routing = "6"
index_type = "topic-high-star"
device_redis_key = ""
if query is None:
# redis_key = "physical:home_recommend" + ":user_id:" + str(
# user_id) + ":device_id:" + device_id + ":query_type:" + str(query_type)
redis_key = "physical:home_recommend" + ":device_id:" + device_id + ":query_type:" + str(query_type)
if user_id > 0:
redis_key = "physical:home_recommend" + ":user_id:" + str(user_id) + ":query_type:" + str(query_type)
device_redis_key = "physical:home_recommend" + ":device_id:" + device_id + ":query_type:" + str(
query_type)
else:
redis_key = "physical:home_recommend" + ":device_id:" + device_id + ":query_type:" + str(query_type)
else:
# redis_key = "physical:home_query" + ":user_id:" + str(
# user_id) + ":device_id:" + device_id + ":query:" + str(query) + ":query_type:" + str(query_type)
redis_key = "physical:home_query" + ":device_id:" + device_id + ":query:" + str(query) + ":query_type:" + str(query_type)
topic_star_routing = "3,4,5,6"
index_type = "topic"
if user_id > 0:
redis_key = "physical:home_query" + ":user_id:" + str(user_id) + ":query:" + str(
query) + ":query_type:" + str(query_type)
device_redis_key = "physical:home_query" + ":device_id:" + device_id + ":query:" + str(
query) + ":query_type:" + str(query_type)
else:
redis_key = "physical:home_query" + ":device_id:" + device_id + ":query:" + str(
query) + ":query_type:" + str(query_type)
redis_field_list = [b'have_read_topic_list']
redis_field_val_list = redis_client.hmget(redis_key, redis_field_list)
topic_recommend_redis_key = "physical:linucb:topic_recommend:device_id:" + str(device_id)
# recommend_tag_dict = dict()
# tag_recommend_val = redis_client.get(tag_recommend_redis_key)
# if tag_recommend_val:
# recommend_tag_dict = json.loads(str(tag_recommend_val, encoding="utf-8"))
recommend_topic_list=list()
recommend_topic_dict = redis_client.hgetall(topic_recommend_redis_key)
if b"data" in recommend_topic_dict:
recommend_topic_id_list = json.loads(recommend_topic_dict[b"data"])
cursor = int(str(recommend_topic_dict[b"cursor"], encoding="utf-8"))
newcursor = cursor + 5
if len(recommend_topic_id_list) > newcursor:
recommend_topic_list = recommend_topic_id_list[cursor:newcursor]
redis_client.hset(topic_recommend_redis_key,"cursor",newcursor)
# 获取已读帖子
have_read_topic_id_list = list()
if redis_field_val_list[0]:
if query is None:
have_read_topic_id_list = list(json.loads(redis_field_val_list[0]))
else:
if offset>0:
if offset > 0: # 首次搜索时不需要过滤已读
have_read_topic_id_list = list(json.loads(redis_field_val_list[0]))
user_similar_score_redis_key = "physical:user_similar_score:user_id:" + str(user_id)
redis_user_similar_score_redis_val = redis_client.get(user_similar_score_redis_key)
user_similar_score_redis_list = json.loads(
redis_user_similar_score_redis_val) if redis_user_similar_score_redis_val else []
size = size-len(recommend_topic_list)
topic_id_list = TopicUtils.get_recommend_topic_ids(user_id=user_id, tag_id=tag_id, offset=offset, size=size,
single_size=size,query=query, query_type=query_type,
filter_topic_id_list=have_read_topic_id_list,
recommend_tag_list=recommend_topic_list,
user_similar_score_list=user_similar_score_redis_list,index_type="topic-high-star")
have_read_group_id_set = set()
have_read_user_id_set = set()
unread_topic_id_dict = dict()
# # 当前页小组数量
# cur_page_group_num = 0
# # 当前页用户数量
# cur_page_user_num = 0
#
# for topic_id in topic_id_dict:
# if topic_id_dict[topic_id][0] in have_read_group_id_set or topic_id_dict[topic_id][
# 1] in have_read_user_id_set:
# unread_topic_id_dict[topic_id] = topic_id_dict[topic_id]
elif len(device_redis_key) > 0:
redis_field_val_list = redis_client.hmget(device_redis_key, redis_field_list)
if redis_field_val_list[0]:
if query is None:
have_read_topic_id_list = list(json.loads(redis_field_val_list[0]))
else:
if offset > 0: # 首次搜索时不需要过滤已读
have_read_topic_id_list = list(json.loads(redis_field_val_list[0]))
if have_read_topic_id_list == None:
have_read_topic_id_list = list()
have_read_topic_id_list.extend(promote_topic_list)
useful_topic_id_list = list()
recommend_topic_user_list = list()
attention_tag_list = list()
recommend_topic_list = list()
if query is None:
if user_id != -1:
# 有用标签召回
if usefulrecall != -1:
useful_topic_id_list = TopicUtils.userful_tag_topic_list(user_id, have_read_topic_id_list, 4,
"topic-high-star", "6",
useful_tag_list=useful_tag_list)
# user_similar_score_redis_key = "physical:user_similar_score:user_id:" + str(user_id)
# redis_user_similar_score_redis_val = redis_client.get(user_similar_score_redis_key)
# user_similar_score_redis_list = json.loads(
# redis_user_similar_score_redis_val) if redis_user_similar_score_redis_val else []
size = size - len(useful_topic_id_list)
have_read_topic_id_list.extend(useful_topic_id_list)
# linucb 推荐帖子
topic_recommend_redis_key = "physical:linucb:topic_recommend:device_id:" + str(device_id)
recommend_topic_dict = redis_client.hgetall(topic_recommend_redis_key)
linucb_recommend_topic_id_list = list()
recommend_topic_list = list()
if b"data" in recommend_topic_dict:
linucb_recommend_topic_id_list = json.loads(recommend_topic_dict[b"data"])
if linucb_recommend_topic_id_list == None:
linucb_recommend_topic_id_list = list()
# 推荐帖子是强插的,要保证推荐帖子不在已读里
logging.warning(
"type1:%s,type2:%s" % (type(linucb_recommend_topic_id_list), type(have_read_topic_id_list)))
recommend_topic_id_list = list(set(linucb_recommend_topic_id_list) - set(have_read_topic_id_list))
recommend_topic_id_list.sort(key=linucb_recommend_topic_id_list.index)
# cursor = int(str(recommend_topic_dict[b"cursor"], encoding="utf-8"))
# newcursor = cursor + 6
if len(recommend_topic_id_list) > 0:
recommend_topic_list = recommend_topic_id_list[0:size]
# redis_client.hset(topic_recommend_redis_key, "cursor", newcursor)
if b"datadict" in recommend_topic_dict:
linucb_recommend_topic_id_dict = json.loads(recommend_topic_dict[b"datadict"])
if linucb_recommend_topic_id_dict is not None and len(recommend_topic_list) > 0:
for i in recommend_topic_list:
recommend_topic_user_list.append(linucb_recommend_topic_id_dict[str(i)])
# if have_read_topic_id_list == None:
# have_read_topic_id_list = list()
# 用户关注标签
redis_tag_data = redis_client.hget("physical:linucb:register_user_tag_info", user_id)
attention_tag_list = json.loads(redis_tag_data) if redis_tag_data else []
if len(recommend_topic_list) > 0:
size = size - len(recommend_topic_list)
have_read_topic_id_list.extend(recommend_topic_list)
# have_read_topic_id_list_add_promote = list()
# have_read_topic_id_list_add_promote.extend(have_read_topic_id_list)
# promote_recommend_topic_id_list = TopicHomeRecommend.objects.using(settings.SLAVE_DB_NAME).filter(
# is_online=1).values_list("topic_id",flat=True)
#
# for topic_id in promote_recommend_topic_id_list:
# have_read_topic_id_list_add_promote.append(topic_id)
rank_topic_id_list = list()
ret_data_list = list()
if size > 0:
rank_topic_id_list,ret_data_list = TopicUtils.get_recommend_topic_ids(user_id=user_id, tag_id=tag_id, offset=0, size=size,
single_size=size, query=query,
query_type=query_type,
filter_topic_id_list=have_read_topic_id_list,
index_type=index_type, routing=topic_star_routing,
attention_tag_list=attention_tag_list,
linucb_user_id_list=recommend_topic_user_list,
disable_collpase=disable_collpase)
# if len(recommend_topic_list) == 6 and query is None:
# if (size < 11):
# topic_id_list.extend(rank_topic_id_list[0:3])
# topic_id_list.extend(recommend_topic_list[0:3])
# topic_id_list.extend(rank_topic_id_list[3:size])
# topic_id_list.extend(recommend_topic_list[3:6])
# else:
# if isinstance(topic_id_dict[topic_id][0], int) and topic_id_dict[topic_id][
# 0] > 0 and cur_page_group_num < (size * 0.9):
# have_read_group_id_set.add(topic_id_dict[topic_id][0])
# have_read_user_id_set.add(topic_id_dict[topic_id][1])
# have_read_topic_id_list.append(topic_id)
# cur_page_group_num += 1
# recommend_topic_ids.append(topic_id)
# elif topic_id_dict[topic_id] and cur_page_user_num < (size * 0.1):
# have_read_user_id_set.add(topic_id_dict[topic_id][1])
# cur_page_user_num += 1
# recommend_topic_ids.append(topic_id)
# have_read_topic_id_list.append(topic_id)
# else:
# unread_topic_id_dict[topic_id] = topic_id_dict[topic_id]
#
# if len(recommend_topic_ids) >= size:
# break
# if len(recommend_topic_ids) < size and len(unread_topic_id_dict) > 0:
# for unread_topic_id in unread_topic_id_dict:
# if len(recommend_topic_ids) < size:
# recommend_topic_ids.append(unread_topic_id)
# have_read_topic_id_list.append(unread_topic_id)
# else:
# break
recommend_topic_list.extend(topic_id_list)
have_read_topic_id_list.extend(recommend_topic_list)
# topic_id_list.extend(rank_topic_id_list[0:size - 7])
# topic_id_list.extend(recommend_topic_list[0:3])
# topic_id_list.extend(rank_topic_id_list[size - 7:size])
# topic_id_list.extend(recommend_topic_list[3:6])
# else:
# topic_id_list.extend(rank_topic_id_list)
have_read_topic_id_list.extend(rank_topic_id_list)
if len(have_read_topic_id_list) > 30000:
cut_len = len(have_read_topic_id_list)-30000
cut_len = len(have_read_topic_id_list) - 30000
have_read_topic_id_list = have_read_topic_id_list[cut_len:]
redis_dict = {
"have_read_topic_list": json.dumps(have_read_topic_id_list),
}
redis_client.hmset(redis_key, redis_dict)
# 每个session key保存15分钟
redis_client.expire(redis_key, 60 * 60 * 24 * 30)
return recommend_topic_list
# 失效时间为第二天凌晨3点
# if redis_client.ttl(redis_key)<0:
today = datetime.datetime.strptime(str(datetime.date.today()), "%Y-%m-%d")
tomorrow = today + datetime.timedelta(days=1)
nowTime = datetime.datetime.now()
expire_time = (tomorrow - nowTime).seconds + 3*60*60
redis_client.expire(redis_key, expire_time)
ret_list = rank_topic_id_list if query is None else ret_data_list
if usefulrecall != -1:
return recommend_topic_list, ret_list, useful_topic_id_list
else:
return recommend_topic_list, ret_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
if usefulrecall != -1:
return [], [], []
else:
return [], []
@bind("physical/search/query_tag_id_by_topic")
......@@ -164,7 +226,8 @@ def query_tag_id_by_topic(offset=0, size=10, topic_id_list=[], user_id=-1):
@bind("physical/search/home_recommend")
def home_recommend(device_id="", user_id=-1, offset=0, size=10, query_type=TopicPageType.HOME_RECOMMEND):
def home_recommend(device_id="", user_id=-1, offset=0, size=10, query_type=TopicPageType.HOME_RECOMMEND,
promote_topic_list=[], usefulrecall=-1, useful_tag_list=[]):
"""
:remark:首页推荐,目前只推荐日记
:param session_id:
......@@ -180,7 +243,8 @@ def home_recommend(device_id="", user_id=-1, offset=0, size=10, query_type=Topic
device_id = ""
recommend_topic_ids = list()
rank_topic_ids = list()
useful_topic_ids = list()
es_node_load_high_flag = False
# try:
# es_node_load_high_flag = ESPerform.if_es_node_load_high(ESPerform.get_cli())
......@@ -196,18 +260,34 @@ def home_recommend(device_id="", user_id=-1, offset=0, size=10, query_type=Topic
have_read_topic_id_list = list(json.loads(redis_field_val_list[0]))
if len(have_read_topic_id_list) > offset:
recommend_topic_ids = have_read_topic_id_list[offset:offset+size]
recommend_topic_ids = have_read_topic_id_list[offset:offset + size]
else:
recommend_topic_ids = have_read_topic_id_list[0:size]
else:
recommend_topic_ids = get_home_recommend_topic_ids(user_id, device_id, tag_id=0, offset=0, size=size,
query_type=query_type)
if usefulrecall != -1 and len(useful_tag_list) > 0:
recommend_topic_ids, rank_topic_ids, useful_topic_ids = get_home_recommend_topic_ids(user_id, device_id,
tag_id=0, offset=0,
size=size,
query_type=query_type,
promote_topic_list=promote_topic_list,
usefulrecall=usefulrecall,
useful_tag_list=useful_tag_list)
return {"linucb_topic_ids": recommend_topic_ids, "rank_topic_ids": rank_topic_ids,
"useful_topic_ids": useful_topic_ids}
else:
recommend_topic_ids, rank_topic_ids = get_home_recommend_topic_ids(user_id, device_id, tag_id=0,
offset=0, size=size,
query_type=query_type,
promote_topic_list=promote_topic_list)
return {"recommend_topic_ids": recommend_topic_ids}
return {"linucb_topic_ids": recommend_topic_ids, "rank_topic_ids": rank_topic_ids}
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"recommend_topic_ids": []}
if usefulrecall != -1:
return {"linucb_topic_ids": [], "rank_topic_ids": [], "useful_topic_ids": []}
else:
return {"linucb_topic_ids": [], "rank_topic_ids": []}
@bind("physical/search/discover_page")
......@@ -235,7 +315,7 @@ def discover_page(device_id="", user_id=-1, size=10):
@bind("physical/search/home_query")
def home_query(device_id="", tag_id=-1, user_id=-1, query="", offset=0, size=10):
def home_query(device_id="", tag_id=-1, user_id=-1, query="", offset=0, size=10, query_type=-1):
"""
:remark:首页搜索,目前只推荐日记
:param session_id:
......@@ -246,13 +326,29 @@ def home_query(device_id="", tag_id=-1, user_id=-1, query="", offset=0, size=10)
:return:
"""
try:
result_topic_data = list()
if not user_id:
user_id = -1
if not isinstance(device_id, str):
device_id = ""
recommend_topic_ids = get_home_recommend_topic_ids(user_id, device_id, tag_id, offset, size, query)
return {"recommend_topic_ids": recommend_topic_ids}
recommend_topic_list, rank_topic_id_list = get_home_recommend_topic_ids(user_id, device_id, tag_id,
offset=offset, size=size, query=query)
if len(rank_topic_id_list) > 0 and len(rank_topic_id_list) < size:
recommend_topic_list, rank_topic_id_list = get_home_recommend_topic_ids(user_id, device_id, tag_id,
offset=offset, size=size,
query=query, disable_collpase=True)
if query_type != 3 and rank_topic_id_list:
for item in rank_topic_id_list:
result_topic_data.append(item["id"])
logging.info("get result_topic_data:%s" % result_topic_data)
return {"recommend_topic_ids": result_topic_data}
return {"recommend_topic_ids": rank_topic_id_list}
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"recommend_topic_ids": []}
......@@ -260,7 +356,7 @@ def home_query(device_id="", tag_id=-1, user_id=-1, query="", offset=0, size=10)
@bind("physical/search/topic_detail_page_recommend")
def topic_detail_page_recommend(device_id="", user_id=-1, topic_id=-1, topic_pictorial_id=-1, topic_user_id=-1,
filter_topic_user_id=False, offset=0, size=10):
filter_topic_user_id=False, offset=0, size=10, topic_tag_list=[]):
"""
:remark:帖子详情页推荐策略,缺少第一个卡片策略
:param user_id:
......@@ -272,31 +368,95 @@ def topic_detail_page_recommend(device_id="", user_id=-1, topic_id=-1, topic_pic
try:
if not isinstance(user_id, int):
user_id = -1
if user_id > 0:
redis_key = "physical:topic_detail_recommend" + ":user_id:" + str(user_id) + "topic_id:" + str(topic_id)
else:
redis_key = "physical:topic_detail_recommend" + ":device_id:" + device_id + "topic_id:" + str(topic_id)
if int(offset) == 0:
have_read_topic_list = list()
# redis_dict = {
# "have_read_topic_id": json.dumps(have_read_topic_list)
# }
redis_client.delete(redis_key)
# redis_client.expire(redis_key, 60 * 60 * 24)
redis_key = "physical:topic_detail_page_recommend" + ":user_id:" + str(user_id) + ":device_id:" + str(device_id)
have_read_topic_redis_data = redis_client.get(redis_key)
have_read_topic_list = json.loads(have_read_topic_redis_data) if have_read_topic_redis_data else []
else:
have_read_topic_list = list()
redis_field_list = [b'have_read_topic_list']
# 获取es链接对象
have_read_topic_redis_data = redis_client.hmget(redis_key, redis_field_list)
have_read_topic_list = json.loads(have_read_topic_redis_data[0]) if have_read_topic_redis_data[0] else []
es_cli_obj = ESPerform.get_cli()
have_read_topic_list.append(topic_id)
topic_user_result = list()
topic_tag_result = list()
result = list()
if len(topic_tag_list) != 0:
topic_tag_result = TopicUtils.top_get_topic_detail_recommend_list(user_id, topic_id, have_read_topic_list,
size, es_cli_obj,
index_type="topic", routing="3,4,5,6",
topic_tag_list=topic_tag_list)
topic_tag_size = len(topic_tag_result)
have_read_topic_list.extend(topic_tag_result)
else:
topic_tag_size = 0
if topic_tag_size < size:
size = size - topic_tag_size
if topic_user_id != -1:
topic_user_result = TopicUtils.top_get_topic_detail_recommend_list(user_id, topic_id,
have_read_topic_list,
size, es_cli_obj,
index_type="topic",
routing="3,4,5,6",
topic_user_id=topic_user_id
)
topic_user_size = len(topic_user_result)
have_read_topic_list.extend(topic_user_result)
if topic_user_size < size:
size = size - topic_user_size
result = TopicUtils.top_get_topic_detail_recommend_list(user_id, topic_id,
have_read_topic_list,
size, es_cli_obj,
index_type="topic", routing="4,5,6")
have_read_topic_list.extend(result)
# have_read_topic_redis_data = redis_client.get(redis_key)
# have_read_topic_list = json.loads(have_read_topic_redis_data) if have_read_topic_redis_data else []
#
# redis_key = "physical:topic_detail_page_recommend" + ":user_id:" + str(user_id) + ":device_id:" + str(device_id)
# have_read_topic_redis_data = redis_client.get(redis_key)
# have_read_topic_list = json.loads(have_read_topic_redis_data) if have_read_topic_redis_data else []
#
# # 获取es链接对象
# es_cli_obj = ESPerform.get_cli()
#
# # 获取帖子标签列表
# topic_tag_list = TopicUtils.get_topic_tag_id_list(topic_id, es_cli_obj)
# result_list = TopicUtils.get_topic_detail_recommend_list(user_id, topic_id, topic_tag_list, topic_pictorial_id,
# topic_user_id, filter_topic_user_id,
# have_read_topic_list, offset, size, es_cli_obj,index_type="topic",routing="4,5,6")
# result_list = TopicUtils.get_recommend_topic_ids(user_id, tag_id=-1, single_size=size, offset=offset, size=size,
# topic_user_id=topic_user_id, current_topic_id=topic_id,
# topic_tag_list=topic_tag_list)
#
# user_recommend_list = TopicUtils.get_recommend_topic_ids(user_id, tag_id=-1, single_size=size, offset=offset, size=size,
# topic_user_id=topic_user_id, current_topic_id=topic_id,
# topic_tag_list=topic_tag_list)
redis_dict = {
"have_read_topic_list": json.dumps(have_read_topic_list),
}
redis_client.hmset(redis_key, redis_dict)
redis_client.expire(redis_key, 60 * 60 * 24)
# 获取帖子标签列表
topic_tag_list = TopicUtils.get_topic_tag_id_list(topic_id, es_cli_obj)
result_list = TopicUtils.get_topic_detail_recommend_list(user_id, topic_id, topic_tag_list, topic_pictorial_id,
topic_user_id, filter_topic_user_id,
have_read_topic_list, offset, size, es_cli_obj,index_type="topic-high-star")
result_list = []
logging.warning("topic_tag_result:%s" % str(topic_tag_result))
logging.warning("topic_user_result:%s" % str(topic_user_result))
logging.warning("result:%s" % str(result))
recommend_topic_ids_list = list()
if len(result_list) > 0:
recommend_topic_ids_list = [item["_source"]["id"] for item in result_list]
have_read_topic_list.extend(recommend_topic_ids_list)
have_read_topic_len = len(have_read_topic_list)
if have_read_topic_len > 5000:
have_read_topic_list = have_read_topic_list[(have_read_topic_len - 5000):]
redis_client.set(redis_key, json.dumps(have_read_topic_list))
recommend_topic_ids_list.extend(topic_tag_result)
recommend_topic_ids_list.extend(topic_user_result)
recommend_topic_ids_list.extend(result)
return {"recommend_topic_ids": recommend_topic_ids_list}
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
......@@ -334,13 +494,12 @@ def topic_search(filters, nfilters=None, sorts_by=None, offset=0, size=10):
"""帖子搜索。"""
try:
result_list = TopicUtils.list_topic_ids(filters=filters, nfilters=nfilters,
sorts_by=sorts_by, offset=offset, size=size)
logging.info("get result_list:%s"%result_list)
topic_ids = [item["_source"]["id"] for item in result_list["hits"]]
(topic_id_list, total_count) = TopicUtils.list_topic_ids(filters=filters, nfilters=nfilters,
sorts_by=sorts_by, offset=offset, size=size)
return {
"topic_ids": topic_ids,
"total_count": result_list["total_count"]
"topic_ids": topic_id_list,
"total_count": total_count
}
except:
logging.error("catch exception, err_msg:%s" % traceback.format_exc())
......@@ -358,8 +517,10 @@ def query_topic_by_user_similarity(topic_similarity_score_dict, offset=0, size=1
"""
try:
must_topic_id_list = list(topic_similarity_score_dict.keys())
topic_id_list = TopicUtils.get_recommend_topic_ids(tag_id=0, user_id=-1, offset=offset, size=size,single_size=size,
must_topic_id_list=must_topic_id_list,index_type="topic-high-star")
topic_id_list,ret_data_list = TopicUtils.get_recommend_topic_ids(tag_id=0, user_id=-1, offset=offset, size=size,
single_size=size,
must_topic_id_list=must_topic_id_list, index_type="topic",
routing="4,5,6")
return {"recommend_topic_ids": topic_id_list}
except:
......
......@@ -14,7 +14,7 @@ from libs.es import ESPerform
@bind("physical/search/recommend_user")
def recommend_user(self_user_id,interesting_user_id,offset=0,size=10):
def recommend_user(self_user_id, interesting_user_id, offset=0, size=10):
"""
:remark 点关注推荐用户
:param self_user_id:
......@@ -29,25 +29,29 @@ def recommend_user(self_user_id,interesting_user_id,offset=0,size=10):
if not isinstance(interesting_user_id, int):
interesting_user_id = -1
#获取es链接对象
# 获取es链接对象
es_cli_obj = ESPerform.get_cli()
#获取关注用户列表
(self_attention_user_id_list,recursion_attention_user_id_list) = UserUtils.get_attention_user_list([self_user_id,interesting_user_id],self_user_id,es_cli_obj)
# 获取关注用户列表
(self_attention_user_id_list, recursion_attention_user_id_list) = UserUtils.get_attention_user_list(
[self_user_id, interesting_user_id], self_user_id, es_cli_obj)
#去除自身及感兴趣的用户ID
# 去除自身及感兴趣的用户ID
self_attention_user_id_list.append(self_user_id)
self_attention_user_id_list.append(interesting_user_id)
recommend_user_list = UserUtils.get_recommend_user_list(self_attention_user_id_list,recursion_attention_user_id_list,offset,size,es_cli_obj)
recommend_user_list = UserUtils.get_recommend_user_list(self_attention_user_id_list,
recursion_attention_user_id_list, offset, size,
es_cli_obj)
return recommend_user_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
@bind("physical/search/batch_recommend_user")
def batch_recommend_user(self_user_id,interesting_user_id_list,offset=0,size=10):
def batch_recommend_user(self_user_id, interesting_user_id_list, offset=0, size=10):
"""
:remark 点关注推荐用户 - 批量接口
:param self_user_id:
......@@ -57,23 +61,128 @@ def batch_recommend_user(self_user_id,interesting_user_id_list,offset=0,size=10)
:return:
"""
try:
if not isinstance(self_user_id,int):
if not isinstance(self_user_id, int):
self_user_id = -1
#获取es链接对象
# 获取es链接对象
es_cli_obj = ESPerform.get_cli()
#获取关注用户列表
(need_filter_attention_user_id_list, attention_user_dict_list,attention_user_id_list) = UserUtils.get_batch_attention_user_list(interesting_user_id_list,self_user_id,es_cli_obj)
# 获取关注用户列表
(need_filter_attention_user_id_list, attention_user_dict_list,
attention_user_id_list) = UserUtils.get_batch_attention_user_list(interesting_user_id_list, self_user_id,
es_cli_obj)
#去除自身及感兴趣的用户ID
# 去除自身及感兴趣的用户ID
need_filter_attention_user_id_list.append(self_user_id)
recommend_user_dict = UserUtils.get_batch_recommend_user_dict(need_filter_attention_user_id_list=need_filter_attention_user_id_list,attention_user_id_list=attention_user_id_list,attention_user_dict_list=attention_user_dict_list,self_user_id=self_user_id,offset=offset,size=size,es_cli_obj=es_cli_obj)
recommend_user_dict = UserUtils.get_batch_recommend_user_dict(
need_filter_attention_user_id_list=need_filter_attention_user_id_list,
attention_user_id_list=attention_user_id_list, attention_user_dict_list=attention_user_dict_list,
self_user_id=self_user_id, offset=offset, size=size, es_cli_obj=es_cli_obj)
logging.info("duan add,recommend_user_dict:%s" % str(recommend_user_dict))
return recommend_user_dict
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {}
\ No newline at end of file
return {}
@bind("physical/search/search_user")
def search_user(query="", offset=0, size=10):
"""
:remark 搜索用户
:param query:
:param offset:
:param size:
:return:
搜索域:用户名称
1.识别query:允许模糊匹配
2.召回部分:全部 线上状态用户
3.rank部分:一级-是否推荐,二级-发帖数
"""
try:
es_cli_obj = ESPerform.get_cli()
q = {}
# 先获取完全匹配的用户ID
sheer_user_id = []
q["query"] = {
"bool": {
"must": [
{"term": {
"nick_pre": query
}
}, {
"term": {
"is_recommend": True
}
}
]
}
}
q["sort"] = {
"count_topic": {
"order": "desc"
}
}
# que = {"query": {"term": {"nick_name_pre": query}}}
result_dict = ESPerform.get_search_results(es_cli_obj, "user", q, 0, 10)
res = result_dict["hits"]
if len(res) > 0:
sheer_user_id = [item["_source"]["user_id"] for item in res]
logging.info("get res:%s" % res)
# 获取模糊匹配的用户ID
multi_match = {
"fields": ["nick_name"],
"type": "cross_fields",
"operator": "and",
"query": query
}
q = {}
q["query"] = {
"bool": {
"must": [{
"multi_match": multi_match
}, {
"term": {
"is_online": True
}
}
]
}
}
q["sort"] = {
"is_recommend": {
"order": "desc"
},
"count_topic": {
"order": "desc"
},
}
logging.info("get q:%s" % q)
result_dict = ESPerform.get_search_results(es_cli_obj, "user", q, offset, size)
logging.info("get result_dict:%s" % result_dict)
search_user_id = []
res = result_dict["hits"]
if len(res) > 0:
search_user_id = [item["_source"]["user_id"] for item in res]
return {"sheer_user_id": sheer_user_id, "search_user_id": search_user_id}
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return {"sheer_user_id": [], "search_user_id": []}
deb http://mirrors.aliyun.com/ubuntu/ bionic main restricted universe multiverse
deb-src http://mirrors.aliyun.com/ubuntu/ bionic main restricted universe multiverse
deb http://mirrors.aliyun.com/ubuntu/ bionic-security main restricted universe multiverse
deb-src http://mirrors.aliyun.com/ubuntu/ bionic-security main restricted universe multiverse
deb http://mirrors.aliyun.com/ubuntu/ bionic-updates main restricted universe multiverse
deb-src http://mirrors.aliyun.com/ubuntu/ bionic-updates main restricted universe multiverse
deb http://mirrors.aliyun.com/ubuntu/ bionic-backports main restricted universe multiverse
deb-src http://mirrors.aliyun.com/ubuntu/ bionic-backports main restricted universe multiverse
deb http://mirrors.aliyun.com/ubuntu/ bionic-proposed main restricted universe multiverse
deb-src http://mirrors.aliyun.com/ubuntu/ bionic-proposed main restricted universe multiverse
-- Deploy flipr:sl_user_login_status to mysql
BEGIN;
CREATE TABLE `sl_user_login_status` (
`user_id` varchar(100) NOT NULL COMMENT '用户ID',
`is_shadow` tinyint(1) NOT NULL COMMENT '是否是马甲用户',
`first_visit_day` date COMMENT '首次日期',
`last_visit_day` date COMMENT '最后一次登陆日期',
`day_id` varchar(10) NOT NULL COMMENT '数据账期',
PRIMARY KEY (`user_id`),
INDEX `lv_day` (`last_visit_day`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COMMENT='数据仓库推送表用户登录状态表';
-- XXX Add DDLs here.
COMMIT;
-- Revert flipr:sl_user_login_status from mysql
BEGIN;
DROP table sl_user_login_status;
-- XXX Add DDLs here.
COMMIT;
[core]
engine = mysql
# plan_file = sqitch.plan
# top_dir = .
# [engine "mysql"]
# target = db:mysql:
# registry = sqitch
# client = /usr/local/mysql/bin/mysql
%syntax-version=1.0.0
%project=flipr
%uri=https://github.com/sqitchers/sqitch-mysql-intro/
sl_user_login_status 2019-06-25T11:06:15Z Lxrent <lxrent@lxrentdeMacBook-Pro.local> # 数据仓库推送表用户登录状态表
-- Verify flipr:sl_user_login_status on mysql
BEGIN;
SELECT user_id FROM sl_user_login_status;
-- XXX Add verifications here.
ROLLBACK;
-----BEGIN RSA PRIVATE KEY-----
MIIEowIBAAKCAQEAvnSXGsqnlSxWVh9e4U40lWeV1f8DOD4B/atSjfxU9CJaSisc
xtmcLcyRY91lwx2OJJ0GvTx0DBifYBRsvpu8zrG33QQgr+StuJlIKUMlCXzaqsVt
u4wWgSLRks1nKFXVL4yIsKDxUAc60abHB3x9ygM+pS182iZAaW7yowe/E05IvnkU
jLifQRgQ2jLpgOmlxI+X2BDw988exIlwqfdErmEe06DMjuCbLnhbOUhD+Q88Xtn6
7UfxN4IiQTkRai405ZEhr5QKnqmx4RQF5Am+00p8caDksOQQYYQ0sKt+52rUnwfS
p7TQw6A+1QCZtaz3Sdbvhmace2mlC/Ekl2ZVCQIDAQABAoIBACEFkAl6J7JKnLGU
ANxptd5NtoIDFCjVP30fDlJF9OjbZ/JCJVwo8NZUCMVa5sD8x997RmbbK3fJcSC0
ieJGmGbyE2IDzDMTIDfIg+V7mdlaR4OidZM2B9TeF54vdPpeX3c+E5kkXpK0njMp
ioq2wAydoWf8ShB832Aod3Ni7XNIK6QtAQEWwJTPSbXLXx6+X8JIRoVFpZmNLkOA
MG1ElGPRH5fm92D/ZYfBMkDqPUFQF28f96MazD8EFDGRyOU/rpi4pIa+fZJYlIm1
ICzpk8UvSgg3xEWRhSvzoCixdxdVToX4JCZ8jSO6IlqDwa22WmB3rhjCVP3Ctbog
kStqizECgYEA8KRXxt78XVDKcW6Ydv2wmeN2JSQxxKgypU6Ux6amF8WY+0OThCdL
JDfva+ada04UVNdJ/dz3NDBr238e03pF8Z/gZp1NV3/m5rCARXrLIfxxCAVOae0P
nQVnBF270knUWM0vO6E+EhAzKlOcrTZXNQYdtGFic9IydNd1wXx98m0CgYEAypxK
JRny68YtKzwtaFlrgCkaJqGqExLglabskubFuh0g0878bLM4Ogpd514Z46ZUzUcv
859SQzGR48XGd7lUEZvQeAnCfalyl5dc/FDIiz/P2jiwjPvMGR6XVwWllJTWZwTc
H9TQ6ls3xigU9FO3ts0bEBqTVoGGl8xYWPHdq40CgYBbWDbNkaVAEsPVzQJo4KGh
uJsHPDb8DFC1OR/2rUaM7X/FmQAtAPFf/S+PyMlmiwvirJo0mCTqFZtLhXSBF//m
2SZILVvHZBCU7tiyBwuGihmpzsHWKZtsojlZie8awtWtI63TN8ClAKs7XOOzSFZQ
FVM/Plwt1NM8UPEtEgGI/QKBgQCT5w1VEiE7rfpRIme/Ue9gqiB17PO7Yr9hX245
Oo9CcnsBQ4sPSrET5XdxK0dY4JXtGDdP75DcjjYCgOCFi1tsU0MWLYG1u9l0AcGW
St5qkFWJ5nIzKKhv+d3eX7fkw9XTdD/AWNl9CsOnOqE5TlfA8O79jXja4EjBTSF9
JGp+DQKBgC04JCqYJ4jHTQLNCEh42tajL34K8VCltNWdHrAabnd9a16YqzsdVIZ/
xxOBghO9Xwhz666v8yh5TDGAR8XA9kCNbVxeDlqWP1oqWMpHXSwUN5Q7cH/l8M8F
YlQLOkFz4B9mSobZoiupYXS9mpe2kMase2FroYkTy6NFX8mKa93q
-----END RSA PRIVATE KEY-----
......@@ -7,6 +7,16 @@ import traceback
import logging
from libs.es import ESPerform
from trans2es.type_info import get_type_info_map, TypeInfo
from vest.reply import true_comment_one, true_comment_two, true_comment_three, one_seven_topic_comment
from vest.click import true_click_five, true_click_two, true_click_four, true_click_one, true_click_three, \
one_seven_star_topic
from vest.follow import auto_follow, auto_follow_new
from vest.urge import auto_star_urge, auto_lunch_app, auto_lunch_app2, auto_urge1, auto_urge2
from vest.fix import fix_no_comment_click
from vest.reply_answer import reply_comment2, reply_comment3, answer_reply2, answer_reply3, answer_reply1
from vest.request import get_session, auto_user_id
from vest.vest_majiauser import vest_click_reply
from vest.pictorial import principal_offline_comment1, principal_online_comment1,no_reply_principal
class Command(BaseCommand):
......@@ -27,10 +37,14 @@ class Command(BaseCommand):
make_option('-s', '--pks', dest='pks', help='specify sync pks, comma separated', metavar='PKS', default=''),
make_option('--streaming-slicing', dest='streaming_slicing', action='store_true', default=True),
make_option('--no-streaming-slicing', dest='streaming_slicing', action='store_false', default=True),
make_option('-m', '--mvest', dest='mvest', help='mvest reply comment', metavar='MVEST'),
)
def handle(self, *args, **options):
try:
logging.info("get-------------")
es_cli = ESPerform.get_cli()
type_name_list = get_type_info_map().keys()
......@@ -47,7 +61,6 @@ class Command(BaseCommand):
logging.info("begin create [%s] mapping!" % type_name)
ESPerform.put_index_mapping(es_cli, type_name, force_sync=True)
if len(options["indices_template"]):
template_file_name = options["indices_template"]
if ESPerform.put_indices_template(es_cli=es_cli, template_file_name=template_file_name,
......@@ -55,5 +68,84 @@ class Command(BaseCommand):
logging.info("put indices template suc!")
else:
logging.error("put indices template err!")
# 点赞
if options["mvest"] == "true_click_one":
true_click_one.true_click_one()
if options["mvest"] == "true_click_two":
true_click_two.true_click_two()
if options["mvest"] == "true_click_three":
true_click_three.true_click_three()
if options["mvest"] == "true_click_four":
true_click_four.true_click_four()
if options["mvest"] == "true_click_five":
true_click_five.true_click_five()
if options["mvest"] == "one_seven_star_topic":
one_seven_star_topic.one_seven_star_topic()
# 评论
if options["mvest"] == "true_comment_one":
true_comment_one.true_comment_one()
if options["mvest"] == "true_comment_two":
true_comment_two.true_comment_two()
if options["mvest"] == "true_comment_three":
true_comment_three.true_comment_three()
if options["mvest"] == "one_seven_topic_comment":
one_seven_topic_comment.one_seven_topic_comment()
# 催更
if options["mvest"] == "auto_urge1":
auto_urge1.auto_urge1()
if options["mvest"] == "auto_urge2":
auto_urge2.auto_urge2()
if options["mvest"] == "auto_lunch_app":
auto_lunch_app.auto_lunch_app()
if options["mvest"] == "auto_lunch_app2":
auto_lunch_app2.auto_lunch_app2()
if options["mvest"] == "auto_star_urge":
auto_star_urge.auto_star_urge()
# 关注
if options["mvest"] == "auto_follow":
auto_follow.auto_follow()
if options["mvest"] == "auto_follow_new":
auto_follow_new.auto_follow_new()
# 补足
if options["mvest"] == "fix_no_comment_click":
fix_no_comment_click.fix_no_comment_click()
# 二级评论
if options["mvest"] == "answer_reply1":
answer_reply1.answer_reply1()
if options["mvest"] == "answer_reply2":
answer_reply2.answer_reply2()
if options["mvest"] == "answer_reply3":
answer_reply3.answer_reply3()
if options["mvest"] == "reply_comment2":
reply_comment2.reply_comment2()
if options["mvest"] == "reply_comment3":
reply_comment3.reply_comment3()
# 获得session和use_id
if options["mvest"] == "get_login_session":
get_session.get_session()
if options["mvest"] == "get_user_id":
auto_user_id.auto_user_id()
# 马甲3456
if options["mvest"] == "vest_click_reply":
vest_click_reply.vest_click_reply()
# 榜单评论
if options["mvest"] == "principal_offline_comment1":
principal_offline_comment1.principal_offline_comment1()
if options["mvest"] == "principal_online_comment1":
principal_online_comment1.principal_online_comment1()
if options["mvest"] == "no_reply_principal":
no_reply_principal.no_reply_principal()
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
......@@ -5,8 +5,8 @@
"is_online":{"type":"boolean"},//上线
"is_deleted":{"type":"boolean"},
"is_recommend":{"type":"boolean"},
"name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"description":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_search"},
"description":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_search"},
"topic_num":{"type":"long"},
"creator_id":{"type":"long"},
"icon":{"type":"text"},
......@@ -14,8 +14,19 @@
"create_time":{"type":"date", "format":"date_time_no_millis"},
"update_time":{"type":"date", "format":"date_time_no_millis"},
"tag_id":{"type":"long"},
"tag_name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"topic_id_list":{"type":"long"}
"tag_name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_search"},
"topic_id_list":{"type":"long"},
"effective":{"type":"boolean"},
"offline_score":{"type":"long"},
"is_default":{"type":"long"},
"is_cover":{"type":"boolean"},
"topic_vote_number":{"type":"long"},
"activity_join":{"type":"long"},
"latest_real_reply_time":{"type":"date", "format":"date_time_no_millis"},
"latest_real_topic_time":{"type":"date", "format":"date_time_no_millis"},
"real_user_activate_time":{"type":"date", "format":"date_time_no_millis"},
"edit_tag_id":{"type":"long"},
"edit_tag_name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_search"}
}
}
\ No newline at end of file
}
{
"dynamic":"strict",
"_routing": {"required": false},
"properties": {
"id":{"type":"long"}, //id
"is_online":{"type":"boolean"},//上线
"create_time":{"type":"date", "format":"date_time_no_millis"},
"update_time":{"type":"date", "format":"date_time_no_millis"},
"create_time_val":{"type":"long"},//创建时间
"update_time_val":{"type":"long"},//更新时间
"is_deleted":{"type":"boolean"}, //是否被删除
"price":{"type":"double"}, //价格
"cn_name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"}, //商品名称
"en_name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"alias":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"brand_cn_name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"brand_en_name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"brand_alias":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"category_cn_name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"effect_cn_name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"cn_name_pre":{"type": "text","analyzer":"keyword"}, //商品名称
"cn_name_sort":{"type": "text", "fielddata":"true"}, //商品名称
"en_name_pre":{"type": "text", "analyzer":"keyword"}, //商品原名
"alias_pre":{"type": "text", "analyzer":"keyword"},
"description":{"type":"text","analyzer":"keyword","search_analyzer":"keyword"},
"have_image":{"type":"boolean"},
"comment_nums":{"type":"long"},
"brand_cn_name_pre":{"type": "text", "analyzer":"keyword"}, //所属品牌名称
"brand_en_name_pre":{"type": "text", "analyzer":"keyword"}, //所属品牌原名
"brand_alias_pre":{"type": "text", "analyzer":"keyword"}, //所属品牌别名
"category_cn_name_pre":{"type": "text", "analyzer":"keyword"}, //所属类目的名称
"effect_cn_name_pre":{"type": "text", "analyzer":"keyword"}//所属功效的名称
}
}
{
"dynamic":"strict",
"properties": {
"name":{"type":"keyword"}
}
}
\ No newline at end of file
......@@ -3,6 +3,8 @@
"properties": {
"id":{"type":"long"},
"suggest":{
"analyzer":"keyword",
"search_analyzer":"keyword",
"type":"completion",
"contexts":[
{
......@@ -18,6 +20,7 @@
]
},
"name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"name_pre":{"type":"text","analyzer":"keyword","search_analyzer":"keyword"},
"tag_type":{"type":"long"},
"collection":{"type":"long"},
"is_ai":{"type":"long"},
......
{
"dynamic":"strict",
"properties": {
"id":{"type":"long"},
"suggest":{
"analyzer":"keyword",
"search_analyzer":"keyword",
"type":"completion",
"contexts":[
{
"name":"is_online",
"type": "category",
"path": "is_online"
},
{
"name":"is_deleted",
"type": "category",
"path": "is_deleted"
}
]
},
"name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"name_pre":{"type":"text","analyzer":"keyword","search_analyzer":"keyword"},
"tag_type":{"type":"long"},
"collection":{"type":"long"},
"is_ai":{"type":"long"},
"is_own":{"type":"long"},
"is_online":{"type":"keyword"},//上线
"is_deleted":{"type":"keyword"},
"near_new_topic_num":{"type":"long","store": true}
}
}
\ No newline at end of file
{
"dynamic":"strict",
"_routing": {"required": true},
"properties": {
"id":{"type":"long"},
"is_online":{"type":"boolean"},//上线
"is_deleted":{"type":"boolean"},
"vote_num":{"type":"long"},
"total_vote_num":{"type":"long"},
"reply_num":{"type":"long"},
"name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"description":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
......@@ -12,8 +14,11 @@
"content_level":{"type":"text"},
"user_id":{"type":"long"},
"user_nick_name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},//帖子用户名
"user_nick_name_pre": {"type":"text","analyzer":"keyword"}, //不切词的用户名
"group_id":{"type":"long"}, //所在组ID
"tag_list":{"type":"long"},//标签属性
"latest_reply_time":{"type":"date", "format":"date_time_no_millis"},
"useful_tag_list":{"type":"long"},//有用标签属性
"edit_tag_list":{"type":"long"},//编辑标签
"tag_name_list":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"share_num":{"type":"long"},
......@@ -48,7 +53,22 @@
"type": "text",
"analyzer": "gm_default_index",
"search_analyzer": "gm_default_index"
},
"is_excellent":{"type": "long"},
"is_operation_home_recommend": {"type": "boolean"}, //是否首页运营推荐
"is_history": {"type": "boolean"}, //是否历史数据
"related_billboard":{
"type":"nested",
"properties":{
"pictorial_id":{"type":"long"},
"topic_add_createtime":{"type":"long"},
"real_vote_cnt":{"type":"long"},
"virt_vote_cnt":{"type":"long"},
"total_vote_cnt":{"type":"long"}
}
}
}
}
{
"dynamic":"strict",
"_routing": {"required": true},
"properties": {
"id":{"type":"long"},
"is_online":{"type":"boolean"},//上线
"is_deleted":{"type":"boolean"},
"vote_num":{"type":"long"},
"total_vote_num":{"type":"long"},
"reply_num":{"type":"long"},
"name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"description":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
......@@ -12,8 +14,11 @@
"content_level":{"type":"text"},
"user_id":{"type":"long"},
"user_nick_name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},//帖子用户名
"user_nick_name_pre": {"type":"text","analyzer":"keyword"}, //不切词的用户名
"group_id":{"type":"long"}, //所在组ID
"tag_list":{"type":"long"},//标签属性
"useful_tag_list":{"type":"long"},//有用标签属性
"latest_reply_time":{"type":"date", "format":"date_time_no_millis"},
"edit_tag_list":{"type":"long"},//编辑标签
"tag_name_list":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"share_num":{"type":"long"},
......@@ -48,7 +53,22 @@
"type": "text",
"analyzer": "gm_default_index",
"search_analyzer": "gm_default_index"
},
"is_excellent":{"type": "long"},
"is_operation_home_recommend": {"type": "boolean"}, //是否首页运营推荐
"is_history": {"type": "boolean"}, //是否历史数据
"related_billboard":{
"type":"nested",
"properties":{
"pictorial_id":{"type":"long"},
"topic_add_createtime":{"type":"long"},
"real_vote_cnt":{"type":"long"},
"virt_vote_cnt":{"type":"long"},
"total_vote_cnt":{"type":"long"}
}
}
}
}
{
"dynamic":"strict",
"_routing": {"required": true},
"properties": {
"id":{"type":"long"},
"is_online":{"type":"boolean"},//上线
"is_deleted":{"type":"boolean"},
"vote_num":{"type":"long"},
"total_vote_num":{"type":"long"},
"reply_num":{"type":"long"},
"name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"description":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"content":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"content_level":{"type":"text"},
"user_id":{"type":"long"},
"user_nick_name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},//帖子用户名
"user_nick_name_pre": {"type":"text","analyzer":"keyword"}, //不切词的用户名
"group_id":{"type":"long"}, //所在组ID
"tag_list":{"type":"long"},//标签属性
"useful_tag_list":{"type":"long"},//有用标签属性
"latest_reply_time":{"type":"date", "format":"date_time_no_millis"},
"edit_tag_list":{"type":"long"},//编辑标签
"tag_name_list":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"share_num":{"type":"long"},
"pick_id_list":{"type":"long"},
"offline_score":{"type":"double"},//离线算分
"manual_score":{"type":"double"},//人工赋分
"has_image":{"type":"boolean"},//是否有图
"has_video":{"type":"boolean"},//是否是视频
"create_time":{"type":"date", "format":"date_time_no_millis"},
"update_time":{"type":"date", "format":"date_time_no_millis"},
"create_time_val":{"type":"long"},
"update_time_val":{"type":"long"},
"language_type":{"type":"long"},
"is_shadow": {"type": "boolean"},
"is_recommend": {"type": "boolean"},
"is_complaint": {"type": "boolean"}, // 是否被举报
"virtual_content_level":{"type": "text"},
"like_num_crawl": {"type": "long"}, // 爬取点赞数
"comment_num_crawl": {"type": "long"}, // 爬取评论数
"is_crawl": {"type": "boolean"},
"platform": {"type": "long"},
"platform_id": {"type": "long"},
"drop_score":{"type": "double"}, // 人工降分
"sort_score":{"type": "double"}, // 排序分
"pictorial_id":{"type": "long"}, //所在组ID
"pictorial_name":{ // 所在组名称
"type": "text",
"analyzer": "gm_default_index",
"search_analyzer": "gm_default_index"
},
"is_excellent":{"type": "long"},
"is_operation_home_recommend": {"type": "boolean"}, //是否首页运营推荐
"is_history": {"type": "boolean"}, //是否历史数据
"related_billboard":{
"type":"nested",
"properties":{
"pictorial_id":{"type":"long"},
"topic_add_createtime":{"type":"long"},
"real_vote_cnt":{"type":"long"},
"virt_vote_cnt":{"type":"long"},
"total_vote_cnt":{"type":"long"}
}
}
}
}
{
"dynamic":"strict",
"_routing": {"required": true},
"properties": {
"id":{"type":"long"},
"is_online":{"type":"boolean"},//上线
"is_deleted":{"type":"boolean"},
"vote_num":{"type":"long"},
"total_vote_num":{"type":"long"},
"reply_num":{"type":"long"},
"name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"description":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"content":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"content_level":{"type":"text"},
"user_id":{"type":"long"},
"user_nick_name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},//帖子用户名
"user_nick_name_pre": {"type":"text","analyzer":"keyword"}, //不切词的用户名
"group_id":{"type":"long"}, //所在组ID
"tag_list":{"type":"long"},//标签属性
"useful_tag_list":{"type":"long"},//有用标签属性
"edit_tag_list":{"type":"long"},//编辑标签
"tag_name_list":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"share_num":{"type":"long"},
......@@ -47,7 +52,22 @@
"type": "text",
"analyzer": "gm_default_index",
"search_analyzer": "gm_default_index"
},
"is_excellent":{"type": "long"},
"is_operation_home_recommend": {"type": "boolean"}, //是否首页运营推荐
"is_history": {"type": "boolean"}, //是否历史数据
"related_billboard":{
"type":"nested",
"properties":{
"pictorial_id":{"type":"long"},
"topic_add_createtime":{"type":"long"},
"real_vote_cnt":{"type":"long"},
"virt_vote_cnt":{"type":"long"},
"total_vote_cnt":{"type":"long"}
}
}
}
}
{
"dynamic":"strict",
"_routing": {"required": true},
"properties": {
"id":{"type":"long"},
"is_online":{"type":"boolean"},//上线
......@@ -13,8 +14,11 @@
"content_level":{"type":"text"},
"user_id":{"type":"long"},
"user_nick_name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},//帖子用户名
"user_nick_name_pre": {"type":"text","analyzer":"keyword"}, //不切词的用户名
"group_id":{"type":"long"}, //所在组ID
"tag_list":{"type":"long"},//标签属性
"latest_reply_time":{"type":"date", "format":"date_time_no_millis"},
"useful_tag_list":{"type":"long"},//有用标签属性
"edit_tag_list":{"type":"long"},//编辑标签
"tag_name_list":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"},
"share_num":{"type":"long"},
......@@ -49,7 +53,22 @@
"type": "text",
"analyzer": "gm_default_index",
"search_analyzer": "gm_default_index"
},
"is_excellent":{"type": "long"},
"is_operation_home_recommend": {"type": "boolean"}, //是否首页运营推荐
"is_history": {"type": "boolean"}, //是否历史数据
"related_billboard":{
"type":"nested",
"properties":{
"pictorial_id":{"type":"long"},
"topic_add_createtime":{"type":"long"},
"real_vote_cnt":{"type":"long"},
"virt_vote_cnt":{"type":"long"},
"total_vote_cnt":{"type":"long"}
}
}
}
}
......@@ -4,11 +4,14 @@
"id":{"type":"long"},
"user_id":{"type":"long"},
"nick_name":{"type":"text","analyzer":"gm_default_index","search_analyzer":"gm_default_index"}, //昵称
"nick_pre":{"type":"text","analyzer":"keyword"}, //昵称
"nick_name_pre":{"type":"text"}, //昵称
"profile_pic":{"type":"text"}, //头像
"gender":{"type":"integer"},
"is_online":{"type":"boolean"},//是否上线
"is_deleted":{"type":"boolean"},
"tag_list":{"type":"long"},//标签属性
"useful_tag_list":{"type":"long"},//有用标签属性
"city_id":{"type":"text"},
"country_id":{"type":"text"},
"is_recommend":{"type":"boolean"},//是否运营推荐用户
......@@ -59,6 +62,7 @@
"create_time":{"type":"date", "format":"date_time_no_millis"},
"update_time":{"type":"date", "format":"date_time_no_millis"},
"create_time_val":{"type":"long"},
"update_time_val":{"type":"long"}
"update_time_val":{"type":"long"},
"count_topic":{"type":"long"}
}
}
\ No newline at end of file
......@@ -2,9 +2,11 @@ import datetime
from django.db import models
import logging
import traceback
from django.conf import settings
from trans2es.models.topic import Reply
from .tag import Tag
from .topic import Topic
from .topic import Topic, TopicBillBoard
from .user_extra import UserExtra
class PictorialFollow(models.Model):
......@@ -36,7 +38,6 @@ class PictorialTopics(models.Model):
pictorial_id = models.BigIntegerField(verbose_name=u'画报ID')
topic_id = models.BigIntegerField(verbose_name=u'帖子ID')
is_online = models.BooleanField(verbose_name=u"是否有效", default=True)
is_online = models.BooleanField(verbose_name=u'是否上线')
is_deleted = models.BooleanField(verbose_name=u'是否删除')
......@@ -59,15 +60,34 @@ class Pictorial(models.Model):
creator_id = models.BigIntegerField(verbose_name=u'画报用户ID')
icon = models.CharField(verbose_name=u'画报名称', max_length=255)
topic_num = models.IntegerField(verbose_name=u'次数')
add_score = models.IntegerField(verbose_name=u'人工权重')
is_default = models.IntegerField(verbose_name=u"是否是默认画报")
def get_topic_id(self):
try:
topic_id =[]
topic_id_list = PictorialTopics.objects.filter(pictorial_id=self.id).values_list("topic_id", flat=True)
for i in topic_id_list:
topic_id.append(i)
return topic_id
topic_id_list = list(
PictorialTopics.objects.filter(pictorial_id=self.id, is_online=True, is_deleted=False).values_list(
"topic_id", flat=True))
return topic_id_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
def get_effective(self, topic_id_list):
try:
effective_num = 0
ret = False
for topic_id in topic_id_list:
topic_id_object = Topic.objects.filter(id=int(topic_id)).first()
if topic_id_object and topic_id_object.is_online and int(topic_id_object.content_level) in [0, 3, 4, 5,
6]:
effective_num += 1
if effective_num >= 5:
ret = True
break
return ret
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return False
......@@ -92,30 +112,191 @@ class Pictorial(models.Model):
def get_tag_by_id(self):
try:
tag_id_list = []
tags = PictorialTag.objects.filter(pictorial_id=self.id, is_online=True).values_list("tag_id", flat=True)
for i in tags:
tag_id_list.append(i)
tag_id_list = list(
PictorialTag.objects.filter(pictorial_id=self.id, is_online=True, is_collection=1).values_list("tag_id",
flat=True))
return tag_id_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return False
return []
def get_tag_by_name(self, tag_id):
try:
tag_name_list = []
tags = Tag.objects.filter(id__in=tag_id, is_online=True).values_list("name", flat=True)
for i in tags:
tag_name_list.append(i)
tag_name_list = list(Tag.objects.filter(id__in=tag_id, is_online=True).values_list("name", flat=True))
return tag_name_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
def get_edit_tag_id(self):
try:
tag_id_list = list(
PictorialTag.objects.filter(pictorial_id=self.id, is_online=True, is_collection=1).values_list("tag_id",
flat=True))
return tag_id_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
def get_edit_tag_name(self, tag_id):
try:
tag_name_list = list(
Tag.objects.filter(id__in=tag_id, is_online=True).values_list("name", flat=True))
return tag_name_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
def get_is_cover(self, topic_id_list):
try:
effective_num = 0
ret = False
for topic_id in topic_id_list:
topic_id_object = Topic.objects.filter(id=int(topic_id)).first()
if topic_id_object and topic_id_object.is_online and int(topic_id_object.content_level) in [3, 4, 5, 6]:
effective_num += 1
if effective_num >= 5:
ret = True
break
return ret
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return False
def get_topic_vote_number(self):
try:
topic_vote = TopicBillBoard.objects.filter(pictorial_id=self.id).values("real_vote_cnt", "virt_vote_cnt")
total_vote_cnt = 0
if topic_vote:
for item in topic_vote:
total_vote_cnt += int(item["virt_vote_cnt"]) + int(item["real_vote_cnt"])
return total_vote_cnt
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return 0
def get_activity_join(self):
try:
activicy_list = list(CommunityPictorialActivityRelation.objects.filter(pictorial_id=self.id, is_online=True,
is_deleted=False).values_list(
"pictorial_activity_id", flat=True))
return activicy_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
def get_latest_real_reply_time(self):
try:
reply_query_results = Reply.objects.using(settings.SLAVE_DB_NAME).filter(pictorial_id=self.id, is_deleted=0,
is_online=1).values("user_id",
"create_time")
datetime_list = list()
if reply_query_results.count() > 0:
for reply in reply_query_results:
UserExtra_results = UserExtra.objects.using(settings.SLAVE_DB_NAME).filter(
user_id=reply['user_id']).values('is_shadow')
if UserExtra_results.count() > 0:
if not UserExtra_results[0]['is_shadow']:
datetime_list.append(reply['create_time'])
else:
datetime_list.append(reply['create_time'])
if len(datetime_list) > 0:
dt = max(datetime_list)
return datetime.datetime(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,
tzinfo=datetime.timezone(datetime.timedelta(hours=8)))
else:
return datetime.datetime(1980, 1, 1, 0, 0, 0, tzinfo=datetime.timezone(datetime.timedelta(hours=8)))
except:
return datetime.datetime(1980, 1, 1, 0, 0, 0, tzinfo=datetime.timezone(datetime.timedelta(hours=8)))
def get_latest_real_topic_time(self):
try:
topic_id_list = list(
PictorialTopics.objects.filter(pictorial_id=self.id, is_online=True, is_deleted=False).values_list(
"topic_id", flat=True))
logging.info("get topic_id_list:%s" % topic_id_list)
topic_info = Topic.objects.filter(id__in=topic_id_list).values("user_id", "create_time")
logging.info("get topic_info:%s" % topic_info)
datetime_list = list()
if topic_info.count() > 0:
for topic in topic_info:
UserExtra_results = UserExtra.objects.using(settings.SLAVE_DB_NAME).filter(
user_id=topic['user_id']).values('is_shadow')
if UserExtra_results.count() > 0:
if not UserExtra_results[0]['is_shadow']:
datetime_list.append(topic['create_time'])
else:
datetime_list.append(topic['create_time'])
logging.info("get datetime_list:%s" % datetime_list)
if len(datetime_list) > 0:
dt = max(datetime_list)
logging.info("get max datetime_list:%s" % dt)
return datetime.datetime(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,
tzinfo=datetime.timezone(datetime.timedelta(hours=8)))
else:
return datetime.datetime(1980, 1, 1, 0, 0, 0, tzinfo=datetime.timezone(datetime.timedelta(hours=8)))
except:
return datetime.datetime(1980, 1, 1, 0, 0, 0, tzinfo=datetime.timezone(datetime.timedelta(hours=8)))
def get_real_user_activate_time(self):
try:
reply_query_results = Reply.objects.using(settings.SLAVE_DB_NAME).filter(pictorial_id=self.id, is_deleted=0,
is_online=1).values("user_id",
"create_time")
datetime_list = list()
if reply_query_results.count() > 0:
for reply in reply_query_results:
UserExtra_results = UserExtra.objects.using(settings.SLAVE_DB_NAME).filter(
user_id=reply['user_id']).values('is_shadow')
if UserExtra_results.count() > 0:
if not UserExtra_results[0]['is_shadow']:
datetime_list.append(reply['create_time'])
else:
datetime_list.append(reply['create_time'])
topic_id_list = list(
PictorialTopics.objects.filter(pictorial_id=self.id, is_online=True, is_deleted=False).values_list(
"topic_id", flat=True))
topic_info = Topic.objects.filter(id__in=topic_id_list, is_online=1).values("user_id", "create_time")
if topic_info.count() > 0:
for topic in topic_info:
UserExtra_results = UserExtra.objects.using(settings.SLAVE_DB_NAME).filter(
user_id=topic['user_id']).values('is_shadow')
if UserExtra_results.count() > 0:
if not UserExtra_results[0]['is_shadow']:
datetime_list.append(topic['create_time'])
else:
datetime_list.append(topic['create_time'])
logging.info("get datetime_list:%s" % datetime_list)
if len(datetime_list) > 0:
dt = max(datetime_list)
logging.info("get max datetime_list:%s" % dt)
return datetime.datetime(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,
tzinfo=datetime.timezone(datetime.timedelta(hours=8)))
else:
return datetime.datetime(1980, 1, 1, 0, 0, 0, tzinfo=datetime.timezone(datetime.timedelta(hours=8)))
except:
return datetime.datetime(1980, 1, 1, 0, 0, 0, tzinfo=datetime.timezone(datetime.timedelta(hours=8)))
class PictorialTag(models.Model):
"""画报关注标签"""
......@@ -131,3 +312,34 @@ class PictorialTag(models.Model):
pictorial_id = models.BigIntegerField(verbose_name=u'画报ID', max_length=20)
tag_id = models.BigIntegerField(verbose_name=u'标签ID', max_length=20)
is_online = models.BooleanField(verbose_name=u'是否上线', max_length=1)
is_collection = models.IntegerField(verbose_name=u"是否编辑标签")
class CommunityPictorialActivity(models.Model):
"""榜单活动"""
class Meta:
verbose_name = u"画报标签"
app_label = "community"
db_table = "community_pictorial_activity"
start_time = models.DateTimeField(verbose_name=u'开始时间', default=datetime.datetime.fromtimestamp(0))
end_time = models.DateTimeField(verbose_name=u'结束时间', default=datetime.datetime.fromtimestamp(0))
is_online = models.BooleanField(verbose_name=u'是否上线', max_length=1)
is_deleted = models.BooleanField(verbose_name=u'是否被删除', max_length=1)
class CommunityPictorialActivityRelation(models.Model):
"""榜单活动关系"""
class Meta:
verbose_name = u"画报标签"
app_label = "community"
db_table = "community_pictorial_activity_relation"
is_online = models.BooleanField(verbose_name=u'是否上线', max_length=1)
is_deleted = models.BooleanField(verbose_name=u'是否被删除', max_length=1)
pictorial_activity_id = models.BigIntegerField(verbose_name=u'活动ID', max_length=20)
pictorial_id = models.BigIntegerField(verbose_name=u'榜单ID', max_length=20)
import datetime
from django.db import models
import logging
import traceback
from .tag import Tag
from .topic import Topic
class CommodityProduct(models.Model):
"""画报关注"""
class Meta:
verbose_name = u"商品"
app_label = "commodity"
db_table = "commodity_product"
id = models.IntegerField(verbose_name=u'商品ID', primary_key=True)
create_time = models.DateTimeField(verbose_name=u'创建时间', default=datetime.datetime.fromtimestamp(0))
update_time = models.DateTimeField(verbose_name=u'更新时间', default=datetime.datetime.fromtimestamp(0))
is_online = models.BooleanField(verbose_name=u'是否上线')
is_deleted = models.BooleanField(verbose_name=u'是否删除')
cn_name = models.CharField(verbose_name=u'商品名称', max_length=64)
en_name = models.CharField(verbose_name=u'商品原名', max_length=64, default="")
alias = models.CharField(verbose_name=u'别名', max_length=64)
image = models.CharField(verbose_name=u'图片', max_length=120)
description = models.CharField(verbose_name=u'商品描述', max_length=200)
comment_nums = models.IntegerField(verbose_name=u'评论数', max_length=11)
price = models.IntegerField(verbose_name="价格", max_length=11)
def get_brand_name(self):
try:
brand_id = CommodityProductBrand.objects.filter(product_id=self.id, is_deleted=False).values_list(
"brand_id", flat=True)
result_name = CommodityBrand.objects.filter(id=brand_id, is_online=True, is_deleted=False).values("cn_name",
"en_name",
"alias").first()
return result_name
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
def get_category_en_name(self):
try:
category_id = CommodityProductCategory.objects.filter(product_id=self.id, is_deleted=False).values_list(
"category_id", flat=True)
result_name = CommodityCategory.objects.filter(id=category_id, is_online=True,
is_deleted=False).values_list("cn_name", flat=True)
return list(result_name)
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return ""
def get_effect_cn_name(self):
try:
effect_id = CommodityProductEffect.objects.filter(product_id=self.id, is_deleted=False).values_list(
"effect_id", flat=True)
result_name = CommodityEffect.objects.filter(id=effect_id, is_deleted=False).values_list("cn_name",
flat=True)
return list(result_name)
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return ""
class CommodityBrand(models.Model):
"""品牌"""
class Meta:
verbose_name = u'品牌'
app_label = 'commodity'
db_table = 'commodity_brand'
id = models.IntegerField(verbose_name=u'品牌ID', primary_key=True)
create_time = models.DateTimeField(verbose_name=u'创建时间', default=datetime.datetime.fromtimestamp(0))
update_time = models.DateTimeField(verbose_name=u'更新时间', default=datetime.datetime.fromtimestamp(0))
is_online = models.BooleanField(verbose_name=u'是否上线')
is_deleted = models.BooleanField(verbose_name=u'是否删除')
cn_name = models.CharField(verbose_name=u'品牌名称', max_length=64)
en_name = models.CharField(verbose_name=u'品牌原名', max_length=64)
alias = models.CharField(verbose_name=u'别名', max_length=64)
description = models.CharField(verbose_name=u'品牌描述', max_length=200)
class CommodityCategory(models.Model):
"""类目"""
class Meta:
verbose_name = u'类目'
app_label = 'commodity'
db_table = 'commodity_category'
id = models.IntegerField(verbose_name=u'品牌ID', primary_key=True)
create_time = models.DateTimeField(verbose_name=u'创建时间', default=datetime.datetime.fromtimestamp(0))
update_time = models.DateTimeField(verbose_name=u'更新时间', default=datetime.datetime.fromtimestamp(0))
is_online = models.BooleanField(verbose_name=u'是否上线')
is_deleted = models.BooleanField(verbose_name=u'是否删除')
cn_name = models.CharField(verbose_name=u'品牌名称', max_length=64)
class CommodityEffect(models.Model):
"""功效"""
class Meta:
verbose_name = u'功效'
app_label = 'commodity'
db_table = 'commodity_effect'
id = models.IntegerField(verbose_name=u'品牌ID', primary_key=True)
create_time = models.DateTimeField(verbose_name=u'创建时间', default=datetime.datetime.fromtimestamp(0))
update_time = models.DateTimeField(verbose_name=u'更新时间', default=datetime.datetime.fromtimestamp(0))
is_deleted = models.BooleanField(verbose_name=u'是否删除')
cn_name = models.CharField(verbose_name=u'品牌名称', max_length=64)
class CommodityProductBrand(models.Model):
"""商品品牌关系"""
class Meta:
verbose_name = u'商品品牌关系'
app_label = 'commodity'
db_table = 'commodity_productbrand'
id = models.IntegerField(verbose_name=u'ID', primary_key=True)
product_id = models.BigIntegerField(verbose_name=u'商品ID')
brand_id = models.BigIntegerField(verbose_name=u'品牌ID')
is_deleted = models.BooleanField(verbose_name=u'是否删除')
class CommodityProductCategory(models.Model):
"""画报关注"""
class Meta:
verbose_name = u"画报"
app_label = "commodity"
db_table = "commodity_productcategory"
id = models.IntegerField(verbose_name=u'关注ID', primary_key=True)
is_deleted = models.BooleanField(verbose_name=u'是否删除')
product_id = models.BigIntegerField(verbose_name=u'商品ID')
category_id = models.BigIntegerField(verbose_name=u'分类ID')
class CommodityProductEffect(models.Model):
"""画报关注标签"""
class Meta:
verbose_name = u"画报标签"
app_label = "commodity"
db_table = "commodity_producteffect"
id = models.IntegerField(verbose_name=u'关注ID', primary_key=True)
is_deleted = models.BooleanField(verbose_name=u'是否删除')
product_id = models.BigIntegerField(verbose_name=u'商品ID')
effect_id = models.BigIntegerField(verbose_name=u'功效ID')
......@@ -12,70 +12,98 @@ import datetime
class TopicTag(models.Model):
class Meta:
verbose_name=u"帖子标签"
db_table="community_topictag"
verbose_name = u"帖子标签"
db_table = "community_topictag"
id = models.IntegerField(primary_key=True,verbose_name=u"主键ID")
id = models.IntegerField(primary_key=True, verbose_name=u"主键ID")
tag_id = models.IntegerField(verbose_name=u"标签ID")
topic_id = models.IntegerField(verbose_name=u"帖子ID")
is_online = models.BooleanField(verbose_name=u"是否在线")
is_collection = models.IntegerField(verbose_name=u"是否编辑标签")
is_body_esthetics = models.IntegerField(verbose_name=u"是否有用标签")
class AccountUserTag(models.Model):
class Meta:
verbose_name=u"用户标签"
db_table="account_user_tag"
verbose_name = u"用户标签"
db_table = "account_user_tag"
id = models.IntegerField(primary_key=True,verbose_name=u"主键ID")
id = models.IntegerField(primary_key=True, verbose_name=u"主键ID")
tag_id = models.IntegerField(verbose_name=u"标签ID")
user = models.IntegerField(verbose_name=u"用户ID")
is_deleted = models.BooleanField(verbose_name=u"是否删除")
create_time = models.DateTimeField(verbose_name=u'创建时间',default=datetime.datetime.fromtimestamp(0))
create_time = models.DateTimeField(verbose_name=u'创建时间', default=datetime.datetime.fromtimestamp(0))
update_time = models.DateTimeField(verbose_name=u'更新时间', default=datetime.datetime.fromtimestamp(0))
is_body_esthetics = models.IntegerField(verbose_name=u"是否有用标签")
class Tag(models.Model):
class Meta:
verbose_name=u"标签"
db_table="community_tag"
verbose_name = u"标签"
db_table = "community_tag"
id = models.IntegerField(primary_key=True,verbose_name=u"主键ID")
id = models.IntegerField(primary_key=True, verbose_name=u"主键ID")
is_deleted = models.BooleanField(verbose_name=u"是否删除")
is_online = models.BooleanField(verbose_name=u"是否上线")
name = models.CharField(verbose_name=u"标签名称",max_length=128)
name = models.CharField(verbose_name=u"标签名称", max_length=128)
description = models.TextField(verbose_name=u"标签描述")
icon_url=models.CharField(verbose_name=u"icon_url",max_length=120)
icon_url = models.CharField(verbose_name=u"icon_url", max_length=120)
collection = models.IntegerField(verbose_name=u"是否编辑")
is_ai = models.IntegerField(verbose_name=u"是否ai")
is_own = models.IntegerField(verbose_name=u"是否ins上自带")
create_time = models.DateTimeField(verbose_name=u'创建时间',default=datetime.datetime.fromtimestamp(0))
create_time = models.DateTimeField(verbose_name=u'创建时间', default=datetime.datetime.fromtimestamp(0))
update_time = models.DateTimeField(verbose_name=u'更新时间', default=datetime.datetime.fromtimestamp(0))
class CommunityTagFollow(models.Model):
class Meta:
verbose_name = u"用户关注标签"
db_table = "community_tag_follow"
id = models.IntegerField(primary_key=True, verbose_name=u"主键ID")
is_deleted = models.BooleanField(verbose_name=u"是否删除")
is_online = models.BooleanField(verbose_name=u"是否上线")
user_id = models.IntegerField(verbose_name=u"用户ID")
tag_id = models.IntegerField(verbose_name=u"帖子ID")
create_time = models.DateTimeField(verbose_name=u'创建时间', default=datetime.datetime.fromtimestamp(0))
update_time = models.DateTimeField(verbose_name=u'更新时间', default=datetime.datetime.fromtimestamp(0))
class CommunityTagTypeRelation(models.Model):
class Meta:
verbose_name=u"标签类型对应关系"
db_table="community_tag_type_relation"
verbose_name = u"标签类型对应关系"
db_table = "community_tag_type_relation"
id = models.IntegerField(primary_key=True,verbose_name=u"主键ID")
id = models.IntegerField(primary_key=True, verbose_name=u"主键ID")
is_deleted = models.BooleanField(verbose_name=u"是否删除")
is_online = models.BooleanField(verbose_name=u"是否上线")
tag_id = models.IntegerField(verbose_name=u"标签ID")
tag_type_id = models.IntegerField(verbose_name=u"标签类型ID")
create_time = models.DateTimeField(verbose_name=u'创建时间',default=datetime.datetime.fromtimestamp(0))
create_time = models.DateTimeField(verbose_name=u'创建时间', default=datetime.datetime.fromtimestamp(0))
update_time = models.DateTimeField(verbose_name=u'更新时间', default=datetime.datetime.fromtimestamp(0))
class CommunityTagType(models.Model):
class Meta:
verbose_name=u"标签类型"
db_table="community_tag_type"
verbose_name = u"标签类型"
db_table = "community_tag_type"
id = models.IntegerField(primary_key=True,verbose_name=u"主键ID")
id = models.IntegerField(primary_key=True, verbose_name=u"主键ID")
is_deleted = models.BooleanField(verbose_name=u"是否删除")
is_online = models.BooleanField(verbose_name=u"是否上线")
name = models.CharField(verbose_name=u"名称",max_length=64)
create_time = models.DateTimeField(verbose_name=u'创建时间',default=datetime.datetime.fromtimestamp(0))
name = models.CharField(verbose_name=u"名称", max_length=64)
create_time = models.DateTimeField(verbose_name=u'创建时间', default=datetime.datetime.fromtimestamp(0))
update_time = models.DateTimeField(verbose_name=u'更新时间', default=datetime.datetime.fromtimestamp(0))
class SettingsConfig(models.Model):
class Meta:
verbose_name = "搜索热词"
db_table = "settingsconfig"
id = models.IntegerField(primary_key=True, verbose_name=u"主键ID")
is_deleted = models.IntegerField(verbose_name=u"是否删除")
key = models.IntegerField(verbose_name=u"值")
val = models.IntegerField(verbose_name="标签内容")
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import, print_function
import time
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
import traceback
import logging
import datetime
from libs.es import ESPerform
from django.db import models
import datetime
from alpha_types.venus import GRAP_PLATFORM
from .pick_topic import PickTopic
from .tag import TopicTag, Tag
from .user_extra import UserExtra
from .group import Group
class ActionSumAboutTopic(models.Model):
class Meta:
verbose_name = u"帖子埋点数据汇总"
......@@ -44,6 +43,22 @@ class TopicImage(models.Model):
update_time = models.DateTimeField(verbose_name=u'更新时间', default=datetime.datetime.fromtimestamp(0))
class ExcellentTopic(models.Model):
"""优质帖子"""
class Meta:
verbose_name = "优质帖子"
db_table = "excellent_topic"
id = models.IntegerField(verbose_name=u'ID', primary_key=True)
topic_id = models.IntegerField(verbose_name=u"帖子ID", db_index=True)
is_online = models.BooleanField(verbose_name=u'是否上线')
is_deleted = models.BooleanField(verbose_name=u'是否删除')
excellent_type = models.IntegerField(verbose_name=u"优质类型", db_index=True)
create_time = models.DateTimeField(verbose_name=u'创建时间', default=datetime.datetime.fromtimestamp(0))
update_time = models.DateTimeField(verbose_name=u'更新时间', default=datetime.datetime.fromtimestamp(0))
class Topic(models.Model):
class Meta:
verbose_name = u'日记'
......@@ -85,15 +100,18 @@ class Topic(models.Model):
def get_virtual_vote_num(self):
try:
topic_extra = TopicExtra.object.get(topic_id=self.id)
return topic_extra.virtual_vote_num
topic_extra = TopicExtra.objects.filter(topic_id=self.id).first()
virtual_vote_num = topic_extra.virtual_vote_num if topic_extra else 0
return virtual_vote_num
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return 0
def get_pictorial_id(self):
try:
pictorial_id_list =[]
pictorial_id = PictorialTopic.objects.filter(topic_id=self.id).values_list("pictorial_id",flat=True)
pictorial_id_list = []
pictorial_id = PictorialTopic.objects.filter(topic_id=self.id).values_list("pictorial_id", flat=True)
for i in pictorial_id:
pictorial_id_list.append(i)
return pictorial_id_list
......@@ -104,17 +122,37 @@ class Topic(models.Model):
@property
def is_complaint(self):
"""是否被举报"""
try:
if TopicComplaint.objects.filter(topic_id=self.id, is_online=True).exists():
return True
if TopicComplaint.objects.filter(topic_id=self.id, is_online=True).exists():
return True
return False
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return False
return False
@classmethod
def get_topic_image_num(cls, topic_id):
"""
:remark 获取指定帖子的图片数量
:param topic_id:
:return:
"""
try:
query_list = list(
TopicImage.objects.using(settings.SLAVE_DB_NAME).filter(topic_id=topic_id, is_deleted=False,
is_online=True).values_list("url", flat=True))
return len(query_list)
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return 0
def topic_has_image(self):
try:
has_image = False
query_list = TopicImage.objects.using(settings.SLAVE_DB_NAME).filter(topic_id=self.id, is_deleted=False, is_online=True)
query_list = TopicImage.objects.using(settings.SLAVE_DB_NAME).filter(topic_id=self.id, is_deleted=False,
is_online=True)
if len(query_list) > 0:
has_image = True
......@@ -140,12 +178,13 @@ class Topic(models.Model):
topic_tag_id_list = list()
edit_tag_id_list = list()
tag_id_list = TopicTag.objects.using(settings.SLAVE_DB_NAME).filter(topic_id=self.id).values_list("tag_id", flat=True)
tag_id_list = TopicTag.objects.using(settings.SLAVE_DB_NAME).filter(topic_id=self.id).values_list("tag_id",
flat=True)
tag_query_results = Tag.objects.using(settings.SLAVE_DB_NAME).filter(id__in=tag_id_list)
for tag_item in tag_query_results:
is_online=tag_item.is_online
is_deleted=tag_item.is_deleted
collection=tag_item.collection
is_online = tag_item.is_online
is_deleted = tag_item.is_deleted
collection = tag_item.collection
if is_online and not is_deleted:
topic_tag_id_list.append(tag_item.id)
......@@ -155,20 +194,54 @@ class Topic(models.Model):
return (topic_tag_id_list, edit_tag_id_list)
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return ([],[])
return ([], [])
def get_edit_tag_id_list(self):
try:
tag_id_list = TopicTag.objects.using(settings.SLAVE_DB_NAME).filter(topic_id=self.id,
is_collection=1).values_list("tag_id",
flat=True)
return list(tag_id_list)
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
def get_topic_useful_tag_id_list(self):
try:
topic_useful_tag_id_list = list()
tag_id_list = TopicTag.objects.using(settings.SLAVE_DB_NAME).filter(topic_id=self.id, is_online=True,
is_body_esthetics=1)
for tag_item in tag_id_list:
topic_useful_tag_id_list.append(tag_item.tag_id)
return topic_useful_tag_id_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
def get_tag_name_list(self, tag_id_list):
try:
tag_name_list = list()
for i in range(0, len(tag_name_list), 1000):
query_results = Tag.objects.using(settings.SLAVE_DB_NAME).filter(id__in=tag_id_list[i:i + 1000])
for item in query_results:
tag_name_list.append(item)
for i in range(0, len(tag_id_list), 100):
query_terms_list = list(
Tag.objects.using(settings.SLAVE_DB_NAME).filter(id__in=tag_id_list[i:i + 100], is_online=True,
is_deleted=False).values_list("name", flat=True))
tag_name_list.extend(query_terms_list)
return tag_name_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
def judge_if_excellent_topic(self, topic_id):
try:
excelllect_object = ExcellentTopic.objects.filter(topic_id=topic_id).first()
if excelllect_object and excelllect_object.is_online and not excelllect_object.is_deleted:
return True
else:
return False
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return False
def get_topic_offline_score(self):
try:
offline_score = 0.0
......@@ -178,30 +251,20 @@ class Topic(models.Model):
user_query_results = UserExtra.objects.using(settings.SLAVE_DB_NAME).filter(user_id=self.user_id)
if user_query_results.count() > 0:
if user_query_results[0].is_recommend:
offline_score += 2.0
offline_score += 2.0 * 10
elif user_query_results[0].is_shadow:
user_is_shadow = True
# 是否官方推荐小组
# if self.group and self.group.is_recommend:
# offline_score += 4.0
# 帖子等级
if self.content_level == '5':
offline_score += 6.0
offline_score += 100.0 * 3
elif self.content_level == '4':
offline_score += 5.0
elif self.content_level == '3':
offline_score += 2.0
exposure_count = ActionSumAboutTopic.objects.using(settings.SLAVE_DB_NAME).filter(topic_id=self.id, data_type=1).count()
click_count = ActionSumAboutTopic.objects.using(settings.SLAVE_DB_NAME).filter(topic_id=self.id, data_type=2).count()
uv_num = ActionSumAboutTopic.objects.using(settings.SLAVE_DB_NAME).filter(topic_id=self.id, data_type=3).count()
offline_score += 60.0 * 3
elif self.content_level == '6':
offline_score += 200.0 * 3
if exposure_count > 0:
offline_score += click_count / exposure_count
if uv_num > 0:
offline_score += (self.vote_num / uv_num + self.reply_num / uv_num)
if self.language_type == 1:
offline_score += 60.0
"""
1:马甲账号是否对总分降权?
......@@ -215,6 +278,67 @@ class Topic(models.Model):
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return 0.0
def get_related_billboard(self):
try:
related_billboard_list = list()
pictorials = PictorialTopic.objects.filter(topic_id=self.id).values("pictorial_id", "create_time")
for pictorial_id in pictorials:
query_result = TopicBillBoard.objects.filter(pictorial_id=pictorial_id.get('pictorial_id', None),
topic_id=self.id).values().first()
if query_result == None:
related_billboard_list.append(
{"pictorial_id": pictorial_id.get('pictorial_id', None), "real_vote_cnt": 0,
"virt_vote_cnt": 0, "total_vote_cnt": 0,
"topic_add_createtime": int(
time.mktime(pictorial_id.get("create_time", None).timetuple()))})
else:
total_vote_cnt = int(query_result["virt_vote_cnt"]) + int(query_result["real_vote_cnt"])
related_billboard_list.append(
{"pictorial_id": query_result["pictorial_id"], "real_vote_cnt": query_result["real_vote_cnt"],
"virt_vote_cnt": query_result["virt_vote_cnt"], "total_vote_cnt": total_vote_cnt,
"topic_add_createtime": int(time.mktime(pictorial_id.get("create_time", None).timetuple()))})
logging.info("product_brand_info" % related_billboard_list)
return related_billboard_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
def get_latest_reply_date(self):
try:
reply_query_results = Reply.objects.using(settings.SLAVE_DB_NAME).filter(topic_id=self.id, is_deleted=0,
is_online=1).values("user_id",
"create_time")
datetime_list = list()
if reply_query_results.count() > 0:
for reply in reply_query_results:
UserExtra_results = UserExtra.objects.using(settings.SLAVE_DB_NAME).filter(
user_id=reply['user_id']).values('is_shadow')
if UserExtra_results.count() > 0:
if not UserExtra_results[0]['is_shadow']:
datetime_list.append(reply['create_time'])
if len(datetime_list) > 0:
dt = max(datetime_list)
return datetime.datetime(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,
tzinfo=datetime.timezone(datetime.timedelta(hours=8)))
else:
dt = self.create_time
return datetime.datetime(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,
tzinfo=datetime.timezone(datetime.timedelta(hours=8)))
else:
dt = self.create_time
return datetime.datetime(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,
tzinfo=datetime.timezone(datetime.timedelta(hours=8)))
except:
return datetime.datetime(1980, 1, 1, 0, 0, 0, tzinfo=datetime.timezone(datetime.timedelta(hours=8)))
class TopicComplaint(models.Model):
"""帖子举报"""
......@@ -227,7 +351,8 @@ class TopicComplaint(models.Model):
id = models.IntegerField(verbose_name='日记图片ID', primary_key=True)
user_id = models.BigIntegerField(verbose_name=u'用户ID', db_index=True)
topic = models.ForeignKey(
Topic, verbose_name=u"关联的帖子", null=True, blank=True, default=None, on_delete=models.CASCADE, related_name='complaints')
Topic, verbose_name=u"关联的帖子", null=True, blank=True, default=None, on_delete=models.CASCADE,
related_name='complaints')
is_online = models.BooleanField(verbose_name=u"是否有效", default=True)
......@@ -244,8 +369,8 @@ class PictorialTopic(models.Model):
pictorial_id = models.BigIntegerField(verbose_name=u'画报ID')
topic_id = models.BigIntegerField(verbose_name=u'帖子ID')
is_online = models.BooleanField(verbose_name=u"是否有效", default=True)
is_online = models.BooleanField(verbose_name=u'是否上线')
is_deleted = models.BooleanField(verbose_name=u'是否删除')
create_time = models.DateTimeField(verbose_name=u'创建时间', default=datetime.datetime.fromtimestamp(0))
class TopicExtra(models.Model):
......@@ -257,5 +382,73 @@ class TopicExtra(models.Model):
db_table = 'topic_extra'
id = models.IntegerField(verbose_name=u'ID', primary_key=True)
topic_id = models.IntegerField(verbose_name=u"帖子ID",db_index=True)
topic_id = models.IntegerField(verbose_name=u"帖子ID", db_index=True)
virtual_vote_num = models.IntegerField(verbose_name="帖子虚拟点赞")
class TopicHomeRecommend(models.Model):
"""运营位帖子"""
class Meta:
verbose_name = "运营位帖子"
db_table = "topic_home_recommend"
id = models.IntegerField(verbose_name=u"id", primary_key=True)
topic_id = models.IntegerField(verbose_name=u"帖子ID")
is_online = models.BooleanField(verbose_name=u'是否上线')
is_deleted = models.BooleanField(verbose_name=u'是否删除')
class TopicBillBoard(models.Model):
"""榜单投票"""
class Meta:
verbose_name = "榜单投票"
db_table = "topic_vote_cnt"
topic_id = models.IntegerField(verbose_name=u"帖子ID")
pictorial_id = models.IntegerField(verbose_name=u"榜单ID")
real_vote_cnt = models.IntegerField(verbose_name=u"真实赞")
virt_vote_cnt = models.IntegerField(verbose_name=u"虚拟赞")
class Reply(models.Model):
"""帖子回复"""
class Meta:
verbose_name = "帖子回复"
db_table = "reply"
topic_id = models.IntegerField(verbose_name=u"帖子ID")
user_id = models.IntegerField(verbose_name=u'用户ID')
create_time = models.DateTimeField(verbose_name=u'创建时间')
is_deleted = models.BooleanField(verbose_name=u'是否删除')
is_online = models.BooleanField(verbose_name=u'是否上线')
pictorial_id = models.IntegerField(verbose_name=u"榜单ID")
class CommunityTopicProduct(models.Model):
"""帖子商品信息"""
class Meta:
verbose_name = "帖子商品信息"
db_table = "community_topic_product"
topic_id = models.IntegerField(verbose_name=u"帖子ID")
product_id = models.IntegerField(verbose_name=u'商品ID')
create_time = models.DateTimeField(verbose_name=u'创建时间')
is_deleted = models.BooleanField(verbose_name=u'是否删除')
class CommunityCategoryTagRelation(models.Model):
class Meta:
verbose_name = u"标签分类对应关系"
db_table = "community_category_tag_relation"
id = models.IntegerField(primary_key=True, verbose_name=u"主键ID")
is_deleted = models.BooleanField(verbose_name=u"是否删除")
is_online = models.BooleanField(verbose_name=u"是否上线")
tag_id = models.IntegerField(verbose_name=u"标签ID")
category_tag_id = models.IntegerField(verbose_name=u"标签类型ID")
create_time = models.DateTimeField(verbose_name=u'创建时间', default=datetime.datetime.fromtimestamp(0))
update_time = models.DateTimeField(verbose_name=u'更新时间', default=datetime.datetime.fromtimestamp(0))
......@@ -16,6 +16,8 @@ from .tag import AccountUserTag
from .topic import Topic
from .user_extra import UserExtra
from .pictorial import PictorialFollow
from .topic import Topic
class User(models.Model):
class Meta:
......@@ -35,9 +37,10 @@ class User(models.Model):
update_time = models.DateTimeField(verbose_name=u'更新时间', default=datetime.datetime.fromtimestamp(0))
@classmethod
def get_user_nick_name(cls,user_id):
def get_user_nick_name(cls, user_id):
try:
nick_name = User.objects.using(settings.SLAVE_DB_NAME).filter(user_id=user_id).values_list("nick_name").first()
nick_name = User.objects.using(settings.SLAVE_DB_NAME).filter(user_id=user_id).values_list(
"nick_name").first()
return nick_name[0]
except:
......@@ -47,7 +50,8 @@ class User(models.Model):
def get_is_recommend_flag(self):
is_shadow = False
is_recommend = False
query_sql = UserExtra.objects.using(settings.SLAVE_DB_NAME).filter(user_id=self.user_id, is_deleted=False, is_online=True)
query_sql = UserExtra.objects.using(settings.SLAVE_DB_NAME).filter(user_id=self.user_id, is_deleted=False,
is_online=True)
for record in query_sql:
is_recommend = record.is_recommend
is_shadow = record.is_shadow
......@@ -58,8 +62,9 @@ class User(models.Model):
latest_topic_time_val = -1
# 获取该用户最新发帖时间
topic_records = Topic.objects.using(settings.SLAVE_DB_NAME).filter(user_id=self.user_id).order_by("-update_time").values_list("update_time",
flat=True).first()
topic_records = Topic.objects.using(settings.SLAVE_DB_NAME).filter(user_id=self.user_id).order_by(
"-update_time").values_list("update_time",
flat=True).first()
if topic_records:
tzlc_topic_update_time = tzlc(topic_records)
latest_topic_time_val = int(time.mktime(tzlc_topic_update_time.timetuple()))
......@@ -75,7 +80,8 @@ class User(models.Model):
logging.info("get follow_user_id_list :%s" % follow_user_id_list)
for i in range(0, len(follow_user_id_list), 1000):
logging.info("get follow_user_id_list :%s" % follow_user_id_list[i:i + 1000])
sql_data_list = User.objects.using(settings.SLAVE_DB_NAME).filter(user_id__in=follow_user_id_list[i:i + 1000])
sql_data_list = User.objects.using(settings.SLAVE_DB_NAME).filter(
user_id__in=follow_user_id_list[i:i + 1000])
for detail_data in sql_data_list:
item = {
"user_id": detail_data.user_id,
......@@ -88,7 +94,8 @@ class User(models.Model):
def get_attention_group_id_list(self):
try:
attention_group_id_list = list()
query_results = GroupUserRole.objects.using(settings.SLAVE_DB_NAME).filter(is_online=True, user_id=self.user_id)
query_results = GroupUserRole.objects.using(settings.SLAVE_DB_NAME).filter(is_online=True,
user_id=self.user_id)
for item in query_results:
item_dict = {
"group_id": item.group_id,
......@@ -104,15 +111,16 @@ class User(models.Model):
def get_attention_pictorial_id_list(self):
try:
attention_pictorial_id_list = list()
query_results = PictorialFollow.objects.using(settings.SLAVE_DB_NAME).filter(is_online=True, user_id=self.user_id)
logging.info("get PictorialFollow:%s"%query_results)
query_results = PictorialFollow.objects.using(settings.SLAVE_DB_NAME).filter(is_online=True,
user_id=self.user_id)
logging.info("get PictorialFollow:%s" % query_results)
for item in query_results:
item_dict = {
"pictorial_id": item.pictorial_id,
"update_time_val": time.mktime(tzlc(item.update_time).timetuple())
}
attention_pictorial_id_list.append(item_dict)
logging.info("get user_id:%s" %self.user_id)
logging.info("get user_id:%s" % self.user_id)
logging.info("get same_pictorial_user_id_list:%s" % attention_pictorial_id_list)
......@@ -149,25 +157,29 @@ class User(models.Model):
def get_same_pictorial_user_id_list(self):
#todo 有的同组数据过大,导致celery cpu过高,暂时限制同组的数据大小,后续可能会去掉同组的数据
# todo 有的同组数据过大,导致celery cpu过高,暂时限制同组的数据大小,后续可能会去掉同组的数据
same_pictorial_user_id_list = list()
pictorial_items_list = list(PictorialFollow.objects.using(settings.SLAVE_DB_NAME).filter(user_id=self.user_id).values_list("pictorial_id",flat=True))
pictorial_items_list = list(
PictorialFollow.objects.using(settings.SLAVE_DB_NAME).filter(user_id=self.user_id).values_list(
"pictorial_id", flat=True))
for pictorial_id in pictorial_items_list:
user_items_list = list(PictorialFollow.objects.using(settings.SLAVE_DB_NAME).filter(pictorial_id=pictorial_id).values_list("user_id",flat=True))
user_items_list = list(
PictorialFollow.objects.using(settings.SLAVE_DB_NAME).filter(pictorial_id=pictorial_id).values_list(
"user_id", flat=True))
for user_id in user_items_list:
same_pictorial_user_id_list.append(user_id)
if len(same_pictorial_user_id_list)>=100:
if len(same_pictorial_user_id_list) >= 100:
break
if len(same_pictorial_user_id_list)>=100:
if len(same_pictorial_user_id_list) >= 100:
break
logging.info("get same user_id:%s"%self.user_id)
logging.info("get same_pictorial_user_id_list:%s"%same_pictorial_user_id_list)
logging.info("get same user_id:%s" % self.user_id)
logging.info("get same_pictorial_user_id_list:%s" % same_pictorial_user_id_list)
same_pictorial_detail_list = list()
for i in range(0, len(same_pictorial_user_id_list), 200):
sql_data_list = User.objects.using(settings.SLAVE_DB_NAME).filter(user_id__in=same_pictorial_user_id_list[i:i + 1000])
sql_data_list = User.objects.using(settings.SLAVE_DB_NAME).filter(
user_id__in=same_pictorial_user_id_list[i:i + 1000])
for detail_data in sql_data_list:
item = {
"user_id": detail_data.user_id,
......@@ -181,7 +193,8 @@ class User(models.Model):
try:
user_tag_id_list = list()
query_results = AccountUserTag.objects.using(settings.SLAVE_DB_NAME).filter(user=self.user_id, is_deleted=False)
query_results = AccountUserTag.objects.using(settings.SLAVE_DB_NAME).filter(user=self.user_id,
is_deleted=False)
for item in query_results:
user_tag_id_list.append(item.tag_id)
......@@ -189,3 +202,26 @@ class User(models.Model):
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
def get_user_useful_tag_id_list(self):
try:
user_useful_tag_id_list = list()
query_results = AccountUserTag.objects.using(settings.SLAVE_DB_NAME).filter(user=self.user_id,
is_body_esthetics=1,
is_deleted=False)
for item in query_results:
user_useful_tag_id_list.append(item.tag_id)
return user_useful_tag_id_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
def get_topic_number(self):
try:
number = Topic.objects.filter(user_id=self.user_id).count()
return number
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return 0
......@@ -11,18 +11,20 @@ from libs.es import ESPerform
import elasticsearch
import elasticsearch.helpers
import sys
from libs.cache import redis_client
import copy
from trans2es.models import topic, user, pick_celebrity, group, celebrity, tag, contrast_similar,pictorial
from trans2es.models import topic, user, pick_celebrity, group, celebrity, tag, contrast_similar, pictorial, product
from trans2es.utils.user_transfer import UserTransfer
from trans2es.utils.pick_celebrity_transfer import PickCelebrityTransfer
from trans2es.utils.group_transfer import GroupTransfer
from trans2es.utils.topic_transfer import TopicTransfer
from trans2es.utils.excellect_topic_transfer import ExcellectTopicTransfer
from trans2es.utils.pictorial_transfer import PictorialTransfer
from trans2es.utils.celebrity_transfer import CelebrityTransfer
from trans2es.utils.tag_transfer import TagTransfer
from trans2es.utils.contrast_similar_transfer import Contrast_Similar_Transfer
from libs.es import ESPerform
from trans2es.utils.product_transfer import ProductTransfer
__es = None
......@@ -62,6 +64,7 @@ class TypeInfo(object):
self.round_insert_chunk_size = round_insert_chunk_size
self.round_insert_period = round_insert_period
self.logic_database_id = logic_database_id
self.physical_topic_star = "physical:topic_star"
@property
def query(self):
......@@ -81,7 +84,7 @@ class TypeInfo(object):
def bulk_get_data(self, instance_iterable):
data_list = []
# 4,5星帖子单独索引
# 4星以上帖子单独索引
topic_data_high_star_list = list()
if self.batch_get_data_func:
......@@ -139,11 +142,38 @@ class TypeInfo(object):
pk,
))
else:
data_list.append(data)
if self.type=="topic" and instance.content_level and int(instance.content_level)>=4:
topic_data_high_star_list.append(data)
return (data_list,topic_data_high_star_list)
if data:
if self.type == "topic":
ori_topic_star = redis_client.hget(self.physical_topic_star, data["id"])
if ori_topic_star:
ori_topic_star = str(ori_topic_star, encoding="utf-8")
if not ori_topic_star:
redis_client.hset(self.physical_topic_star, data["id"], data["content_level"])
else:
int_ori_topic_star = int(ori_topic_star)
if int_ori_topic_star != data["content_level"]:
old_data = copy.deepcopy(data)
old_data["is_online"] = False
old_data["is_deleted"] = True
old_data["content_level"] = int_ori_topic_star
old_data["is_history"] = True
data_list.append(old_data)
if int_ori_topic_star >= 4:
topic_data_high_star_list.append(old_data)
redis_client.hset(self.physical_topic_star, data["id"], data["content_level"])
if data["content_level"] and int(data["content_level"]) >= 4:
topic_data_high_star_list.append(data)
elif self.type == "tag" or self.type == "tag_v1":
(res, begin_res) = data
data_list.append(res)
data_list.append(begin_res)
else:
data_list.append(data)
return (data_list, topic_data_high_star_list)
def elasticsearch_bulk_insert_data(self, sub_index_name, data_list, es=None):
......@@ -186,29 +216,37 @@ class TypeInfo(object):
else:
qs = self.model.objects.all()
end = time.time()
time0=end-begin
time0 = end - begin
begin = time.time()
instance_list = qs.filter(pk__in=pk_list)
end = time.time()
time1=end-begin
time1 = end - begin
begin = time.time()
data_list, topic_data_high_star_list = self.bulk_get_data(instance_list)
end = time.time()
time2=end-begin
time2 = end - begin
begin = time.time()
logging.info("get sub_index_name:%s"%sub_index_name)
logging.info("get data_list:%s"%data_list)
# logging.info("get sub_index_name:%s"%sub_index_name)
# logging.info("get data_list:%s"%data_list)
self.elasticsearch_bulk_insert_data(
sub_index_name=sub_index_name,
data_list=data_list,
es=es,
)
if sub_index_name == "topic":
self.elasticsearch_bulk_insert_data(
sub_index_name="topic-star-routing",
data_list=data_list,
es=es,
)
# 同时写4星及以上的帖子
if len(topic_data_high_star_list)>0:
if len(topic_data_high_star_list) > 0:
self.elasticsearch_bulk_insert_data(
sub_index_name="topic-high-star",
data_list=topic_data_high_star_list,
......@@ -216,9 +254,8 @@ class TypeInfo(object):
)
end = time.time()
time3=end-begin
logging.info("duan add,insert_table_by_pk_list time cost:%ds,%ds,%ds,%ds" % (time0,time1,time2,time3))
time3 = end - begin
logging.info("duan add,insert_table_by_pk_list time cost:%ds,%ds,%ds,%ds" % (time0, time1, time2, time3))
def insert_table_chunk(self, sub_index_name, table_chunk, es=None):
try:
......@@ -240,7 +277,7 @@ class TypeInfo(object):
auto_create_index=True
)
logging.info("es_helpers_bulk,sub_index_name:%s,data_list len:%d" % (sub_index_name,len(data_list)))
logging.info("es_helpers_bulk,sub_index_name:%s,data_list len:%d" % (sub_index_name, len(data_list)))
stage_3_time = time.time()
end_clock = time.clock()
......@@ -274,6 +311,16 @@ def get_type_info_map():
return _get_type_info_map_result
type_info_list = [
TypeInfo(
name='topic-star',
type='topic-star',
model=topic.Topic,
query_deferred=lambda: topic.Topic.objects.all().query, # 假的
get_data_func=TopicTransfer.get_topic_data, # 假的
bulk_insert_chunk_size=100,
round_insert_chunk_size=5,
round_insert_period=2,
),
TypeInfo(
name='topic-star-routing',
type='topic-star-routing',
......@@ -294,6 +341,16 @@ def get_type_info_map():
round_insert_chunk_size=5,
round_insert_period=2,
),
TypeInfo(
name='excellect-topic', # 优质帖子
type='excellect-topic',
model=topic.ExcellentTopic,
query_deferred=lambda: topic.ExcellentTopic.objects.all().query,
get_data_func=ExcellectTopicTransfer.get_excellect_topic_data,
bulk_insert_chunk_size=100,
round_insert_chunk_size=5,
round_insert_period=2,
),
TypeInfo(
name='topic', # 日记
type='topic',
......@@ -369,6 +426,26 @@ def get_type_info_map():
round_insert_chunk_size=5,
round_insert_period=2,
),
TypeInfo(
name="tag_v1", # 标签
type="tag_v1",
model=tag.Tag,
query_deferred=lambda: tag.Tag.objects.all().query,
get_data_func=TagTransfer.get_tag_data,
bulk_insert_chunk_size=100,
round_insert_chunk_size=5,
round_insert_period=2,
),
TypeInfo(
name="tag-name", # 标签名字
type="tag-name",
model=tag.Tag,
query_deferred=lambda: tag.Tag.objects.all().query,
get_data_func=TagTransfer.get_tag_name_data,
bulk_insert_chunk_size=100,
round_insert_chunk_size=5,
round_insert_period=2,
),
TypeInfo(
name='contrast_similar', # facesimilar
type='contrast_similar',
......@@ -388,8 +465,27 @@ def get_type_info_map():
bulk_insert_chunk_size=100,
round_insert_chunk_size=5,
round_insert_period=2,
),
# TypeInfo(
# name="account_user_tag", # 用户标签
# type="account_user_tag",
# model=pictorial.Pictorial,
# query_deferred=lambda: pictorial.Pictorial.objects.all().query,
# get_data_func=PictorialTransfer.get_poctorial_data,
# bulk_insert_chunk_size=100,
# round_insert_chunk_size=5,
# round_insert_period=2,
# )
TypeInfo(
name="product", # 商品
type="product",
model=product.CommodityProduct,
query_deferred=lambda: product.CommodityProduct.objects.all().query,
get_data_func=ProductTransfer.get_product_data,
bulk_insert_chunk_size=100,
round_insert_chunk_size=5,
round_insert_period=2,
)
]
type_info_map = {
......@@ -399,4 +495,3 @@ def get_type_info_map():
_get_type_info_map_result = type_info_map
return type_info_map
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import logging
import traceback
from libs.tools import tzlc
import time
import re
import datetime
from trans2es.models.user import User
from trans2es.models.topic import Topic
from trans2es.utils.topic_transfer import TopicTransfer
class ExcellectTopicTransfer(object):
@classmethod
def get_excellect_topic_data(cls,instance):
try:
topic_id = instance.topic_id
is_online = instance.is_online
is_deleted = instance.is_deleted
res = None
if is_online and not is_deleted:
topic_ins = Topic.objects.filter(id=topic_id).first()
if topic_ins:
res = TopicTransfer.get_topic_data(topic_ins,is_excellect=True)
return res
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return None
......@@ -5,6 +5,7 @@ import sys
import logging
import traceback
from libs.tools import tzlc
from trans2es.models.topic import Topic
class PictorialTransfer(object):
......@@ -12,6 +13,32 @@ class PictorialTransfer(object):
def __init__(self):
pass
@classmethod
def get_offline_score(cls, instance, topic_id_list):
try:
total_offline_score = 0
topic_image_num = 0
for topic_id in topic_id_list:
topic_image_num += Topic.get_topic_image_num(topic_id)
if topic_image_num >= 6 and topic_image_num <= 10:
total_offline_score += 30
elif topic_image_num > 10 and topic_image_num <= 20:
total_offline_score += 60
elif topic_image_num > 20 and topic_image_num <= 50:
total_offline_score += 80
elif topic_image_num > 50:
total_offline_score += 100
total_offline_score += instance.add_score
if instance.is_recommend:
total_offline_score += 100
return total_offline_score
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return 0
@classmethod
def get_poctorial_data(cls, instance):
try:
......@@ -33,11 +60,22 @@ class PictorialTransfer(object):
tzlc_udpate_time = tzlc(update_time)
res["update_time"] = tzlc_udpate_time
res["high_quality_topic_num"] = instance.get_high_quality_topic_num()
tag_id = instance.get_tag_by_id()
res["tag_id"] = tag_id
res["tag_name"] = instance.get_tag_by_name(tag_id)
res["topic_id_list"] =instance.get_topic_id()
tag_id_list = instance.get_tag_by_id()
res["tag_id"] = tag_id_list
res["tag_name"] = instance.get_tag_by_name(tag_id_list)
res["topic_id_list"] = instance.get_topic_id()
res["effective"] = instance.get_effective(res["topic_id_list"])
res["offline_score"] = cls.get_offline_score(instance, res["topic_id_list"])
res["is_default"] = instance.is_default
res["is_cover"] = instance.get_is_cover(res["topic_id_list"])
res["topic_vote_number"] = instance.get_topic_vote_number()
res["activity_join"] = instance.get_activity_join()
res["latest_real_reply_time"] = instance.get_latest_real_reply_time()
res["latest_real_topic_time"] = instance.get_latest_real_topic_time()
res["real_user_activate_time"] = instance.get_real_user_activate_time()
res["edit_tag_id"] = instance.get_edit_tag_id()
res["edit_tag_name"] = instance.get_edit_tag_name(res["edit_tag_id"])
logging.info("get data:%s" % res)
return res
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
......
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import logging
import traceback
import time
from libs.tools import tzlc
from pypinyin import lazy_pinyin
class ProductTransfer(object):
@classmethod
def get_product_data(cls, instance):
try:
res = dict()
res["id"] = instance.id
res["is_online"] = instance.is_online
res["is_deleted"] = instance.is_deleted
res["create_time"] = tzlc(instance.create_time)
res["update_time"] = tzlc(instance.update_time)
res["create_time_val"] = int(time.mktime(instance.create_time.timetuple()))
res["update_time_val"] = int(time.mktime(instance.update_time.timetuple()))
res["price"] = instance.price
res["cn_name_sort"] = ''
for i in lazy_pinyin(instance.cn_name):
res["cn_name_sort"] += str(i[0])
res["cn_name_pre"] = instance.cn_name
res["en_name_pre"] = instance.en_name
res["alias"] = instance.alias
res["cn_name"] = instance.cn_name
res["en_name"] = instance.en_name
res["alias_pre"] = instance.alias
res['description'] = instance.description
res["have_image"] = True if instance.image else False
res["comment_nums"] = instance.comment_nums
result_name = instance.get_brand_name()
if result_name:
res["brand_cn_name"] = result_name.get("cn_name", "")
res["brand_en_name"] = result_name.get("en_name", "")
res["brand_alias"] = result_name.get("alias", "")
res["brand_cn_name_pre"] = result_name.get("cn_name", "")
res["brand_en_name_pre"] = result_name.get("en_name", "")
res["brand_alias_pre"] = result_name.get("alias", "")
else:
res["brand_cn_name"] = ""
res["brand_en_name"] = ""
res["brand_alias"] = ""
res["brand_cn_name_pre"] = ""
res["brand_en_name_pre"] = ""
res["brand_alias_pre"] = ""
category_cn_name = instance.get_category_en_name()
if category_cn_name:
res["category_cn_name"] = category_cn_name
res["category_cn_name_pre"] = category_cn_name
else:
res["category_cn_name"] = []
res["category_cn_name_pre"] = []
effect_cn_name = instance.get_effect_cn_name()
if effect_cn_name:
res['effect_cn_name'] = effect_cn_name
res['effect_cn_name_pre'] = effect_cn_name
else:
res["effect_cn_name"] = []
res["effect_cn_name_pre"] = []
logging.info("get product:%s" % res)
return res
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return dict()
......@@ -8,59 +8,78 @@ import logging
import traceback
from libs.tools import tzlc
from trans2es.models.topic import Topic
from trans2es.models.tag import TopicTag,CommunityTagType,CommunityTagTypeRelation
from trans2es.models.tag import TopicTag, CommunityTagType, CommunityTagTypeRelation
import datetime
from django.conf import settings
import copy
class TagTransfer(object):
@classmethod
def get_tag_name_data(cls, instance):
try:
res = dict()
res["name"] = instance.name
class TagTransfer(object):
return res
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return dict()
@classmethod
def get_tag_data(cls,instance):
def get_tag_data(cls, instance):
try:
res = dict()
res["id"] = instance.id
begin_tag_name_terms_list = list()
tag_name_terms_list = list()
for i in range(len(instance.name)):
for j in range(i,len(instance.name)+1):
for j in range(i, len(instance.name) + 1):
name_term = instance.name[i:j].strip()
if name_term:
tag_name_terms_list.append(name_term)
if i == 0:
begin_tag_name_terms_list.append(name_term.lower())
else:
tag_name_terms_list.append(name_term.lower())
res["suggest"] = {
"input":tag_name_terms_list,
"weight": 1,
"contexts":{
"is_online": [instance.is_online],
"is_deleted": [instance.is_deleted]
}
}
res["name"] = instance.name
res["name_pre"] = instance.name
res["is_online"] = instance.is_online
res["is_deleted"] = instance.is_deleted
topic_num = 0
res["near_new_topic_num"] = topic_num
if instance.is_online==True and instance.is_deleted==False:
if instance.is_online == True and instance.is_deleted == False:
topic_id_list = list()
sql_result_results = list(TopicTag.objects.using(settings.SLAVE_DB_NAME).filter(
tag_id=instance.id).values_list("topic_id", "is_online"))
for topic_id,is_online in sql_result_results:
for topic_id, is_online in sql_result_results:
if is_online:
topic_id_list.append(topic_id)
time_base_val = datetime.datetime.strftime(datetime.datetime.now()+datetime.timedelta(-7), "%Y-%m-%d")
time_base_val = datetime.datetime.strftime(datetime.datetime.now() + datetime.timedelta(-7), "%Y-%m-%d")
for topic_begin_index in range(0,len(topic_id_list),100):
cur_topic_num = Topic.objects.using(settings.SLAVE_DB_NAME).filter(id__in=topic_id_list[topic_begin_index:topic_begin_index+100],create_time__gte=time_base_val).count()
for topic_begin_index in range(0, len(topic_id_list), 100):
cur_topic_num = Topic.objects.using(settings.SLAVE_DB_NAME).filter(
id__in=topic_id_list[topic_begin_index:topic_begin_index + 100],
create_time__gte=time_base_val).count()
topic_num += cur_topic_num
res["near_new_topic_num"] = topic_num
tag_type_sql_list = CommunityTagTypeRelation.objects.using(settings.SLAVE_DB_NAME).filter(tag_id=instance.id).values_list("tag_type_id",flat=True)
tag_type_sql_list = CommunityTagTypeRelation.objects.using(settings.SLAVE_DB_NAME).filter(
tag_id=instance.id).values_list("tag_type_id", flat=True)
tag_type_list = list()
for tag_type_id in tag_type_sql_list:
tag_type_list.append(tag_type_id)
......@@ -69,7 +88,13 @@ class TagTransfer(object):
res["collection"] = instance.collection
res["is_ai"] = instance.is_ai
res["is_own"] = instance.is_own
return res
begin_res = copy.deepcopy(res)
begin_res["id"] = begin_res["id"] + 600000000
begin_res["suggest"]["input"] = begin_tag_name_terms_list
begin_res["suggest"]["weight"] = 10
return (res, begin_res)
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return dict()
\ No newline at end of file
return (dict(), dict())
......@@ -9,11 +9,13 @@ import time
import re
import datetime
from trans2es.models.user import User
from trans2es.models.topic import ExcellentTopic, TopicHomeRecommend
class TopicTransfer(object):
@classmethod
def get_topic_data(cls,instance):
def get_topic_data(cls, instance, is_excellect=False):
try:
res = dict()
......@@ -28,6 +30,10 @@ class TopicTransfer(object):
res["content_level"] = instance.content_level
res["user_id"] = instance.user_id
res["user_nick_name"] = User.get_user_nick_name(instance.user_id)
try:
res["user_nick_name_pre"] = res["user_nick_name"].lower()
except:
res["user_nick_name_pre"] = ""
if instance.group:
res["group_id"] = instance.group.id
......@@ -42,25 +48,24 @@ class TopicTransfer(object):
begin = time.time()
res["pick_id_list"] = instance.get_pick_id_info()
end = time.time()
time0 = (end-begin)
time0 = (end - begin)
begin = time.time()
(topic_tag_id_list, edit_tag_id_list) = instance.get_topic_tag_id_list()
res["tag_list"] = topic_tag_id_list
res["edit_tag_list"] = edit_tag_id_list
res["edit_tag_list"] = instance.get_edit_tag_id_list()
end = time.time()
time1 = (end-begin)
time1 = (end - begin)
begin = time.time()
res["tag_name_list"] = instance.get_tag_name_list(res["tag_list"])
end = time.time()
time2 = (end-begin)
time2 = (end - begin)
begin = time.time()
res["offline_score"] = instance.get_topic_offline_score()
end = time.time()
time3 = (end-begin)
time3 = (end - begin)
begin = time.time()
res["manual_score"] = instance.drop_score
......@@ -68,8 +73,16 @@ class TopicTransfer(object):
res["has_video"] = instance.has_video
res["language_type"] = instance.language_type
end = time.time()
time4 = (end-begin)
time4 = (end - begin)
# begin = time.time()
topic_useful_tag_id_list = instance.get_topic_useful_tag_id_list()
res["useful_tag_list"] = topic_useful_tag_id_list
latest_reply_time = instance.get_latest_reply_date()
res["latest_reply_time"] = latest_reply_time
# end = time.time()
# time5 = (end - begin)
# # 片假名
# re_jp_pian_words = re.compile(u"[\u30a0-\u30ff]+")
# m_pian = re_jp_pian_words.search(instance.content, 0)
......@@ -89,7 +102,7 @@ class TopicTransfer(object):
res["virtual_content_level"] = instance.virtual_content_level
res["like_num_crawl"] = instance.like_num_crawl
res["comment_num_crawl"]= instance.comment_num_crawl
res["comment_num_crawl"] = instance.comment_num_crawl
res["is_crawl"] = instance.is_crawl
res["platform"] = instance.platform
......@@ -103,7 +116,6 @@ class TopicTransfer(object):
res["create_time"] = tzlc_create_time
res["create_time_val"] = int(time.mktime(tzlc_create_time.timetuple()))
update_time = instance.update_time
tzlc_update_time = tzlc(update_time)
res["update_time"] = tzlc_update_time
......@@ -111,7 +123,28 @@ class TopicTransfer(object):
res["total_vote_num"] = instance.get_virtual_vote_num() + instance.vote_num
logging.info("test topic transfer time cost,time0:%d,time1:%d,time2:%d,time3:%d,time4:%d" % (time0,time1,time2,time3,time4))
if is_excellect:
res["is_excellent"] = 1
else:
# excelllect_object = ExcellentTopic.objects.filter(topic_id=instance.id).first()
# if excelllect_object and excelllect_object.is_online and not excelllect_object.is_deleted:
is_excellent = instance.judge_if_excellent_topic(instance.id)
if is_excellent:
res["is_excellent"] = 1
else:
res["is_excellent"] = 0
res["is_operation_home_recommend"] = False
operation_home_recommend = TopicHomeRecommend.objects.filter(topic_id=instance.id).first()
if operation_home_recommend and operation_home_recommend.is_online and not operation_home_recommend.is_deleted:
res["is_operation_home_recommend"] = True
logging.info("test topic transfer time cost,time0:%d,time1:%d,time2:%d,time3:%d,time4:%d" % (
time0, time1, time2, time3, time4))
# 榜单关联的投票
res["related_billboard"] = instance.get_related_billboard()
return res
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
......
......@@ -21,8 +21,9 @@ class UserTransfer(object):
follow_user_detail_list = list()
for i in range(0, len(follow_user_id_list), 1000):
sql_data_list = User.objects.using(settings.SLAVE_DB_NAME).filter(user_id__in=follow_user_id_list[i:i + 1000], is_online=True,
is_deleted=False)
sql_data_list = User.objects.using(settings.SLAVE_DB_NAME).filter(
user_id__in=follow_user_id_list[i:i + 1000], is_online=True,
is_deleted=False)
for detail_data in sql_data_list:
item = {
"user_id": detail_data.user_id,
......@@ -40,13 +41,15 @@ class UserTransfer(object):
res["id"] = instance.id
res["user_id"] = instance.user_id
res["nick_name"] = instance.nick_name
res["nick_name_pre"] = instance.nick_name
res["nick_pre"] = instance.nick_name
res["profile_pic"] = instance.profile_pic
res["gender"] = instance.gender
res["city_id"] = instance.city_id
res["country_id"] = instance.country_id
res["is_online"] = instance.is_online
res["is_deleted"] = instance.is_deleted
res["count_topic"] = instance.get_topic_number()
try:
(is_recommend, is_shadow) = instance.get_is_recommend_flag()
res["is_recommend"] = is_recommend
......@@ -73,6 +76,7 @@ class UserTransfer(object):
try:
res["tag_list"] = instance.get_user_tag_id_list()
res["useful_tag_list"] = instance.get_user_useful_tag_id_list()
res["attention_user_id_list"] = cls.get_follow_user_id_list(userInstance=instance)
# res["attention_group_id_list"] = instance.get_attention_group_id_list()
......@@ -94,7 +98,6 @@ class UserTransfer(object):
res["attention_pictorial_id_list"] = []
res["same_pictorial_user_id_list"] = []
return res
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
......
import time
import pymysql
import random
import traceback
import logging
from vest.request.auto_request import login, click, time_convs
from vest.request.auto_request import host, user, db, passwd
def get_data(numtime, numtime2):
try:
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
cursor.execute(
"SELECT distinct(user_id),id FROM topic WHERE is_online=1 and content_level in (4,5,6) and (create_time >= '%s' and create_time <= '%s')" % (
numtime, numtime2))
data = cursor.fetchall()
topic_id = list(data)
topic_id_list = []
for i in topic_id and topic_id:
cursor.execute(
"select user_id,is_shadow from user_extra where user_id =" + str(
i[0]) + " and is_online =1 and is_deleted =0")
data = cursor.fetchall()
user_id = list(data)
if (user_id and user_id[0][1] == 0) or len(user_id) == 0:
topic_id_list.append(i)
logging.info("Database version : %s " % topic_id_list)
pc.close()
return topic_id_list
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
def one_seven_star_topic():
try:
numtime, numtime2 = time_convs(7, 1)
user_id = get_data(numtime, numtime2)
logging.info("get user_id:%s" % user_id)
for i in user_id:
rand_num = random.randint(0, 1)
if rand_num == 1:
cook = login()
if cook is not None:
click(cook, i[1])
except:
logging.error("catch exception,main:%s" % traceback.format_exc())
import pymysql
import random
import traceback
import logging
from vest.request.auto_request import login, click, time_conv
from vest.request.auto_request import host, user, db, passwd
from libs.error import logging_exception
def get_data(numtime, numtime2):
try:
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
logging.info("get numtime:%s" % numtime)
logging.info("get numtime2:%s" % numtime2)
cursor.execute(
"SELECT user_id,id FROM topic WHERE is_online=1 and (create_time >= '%s' and create_time <= '%s')" % (
numtime, numtime2))
data = cursor.fetchall()
topic_id = list(data)
topic_id_list = []
logging.info("get topic_id:%s" % topic_id)
try:
for i in topic_id and topic_id:
cursor.execute(
"select user_id,is_shadow from user_extra where user_id =" + str(
i[0]) + " and is_online =1 and is_deleted =0")
data = cursor.fetchall()
user_id = list(data)
if (user_id and user_id[0][1] == 0) or len(user_id) == 0:
topic_id_list.append(i)
logging.info("Database version : %s " % topic_id_list)
pc.close()
except:
pass
return topic_id_list
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
return []
def true_click_five():
try:
logging.info("click five")
numtime, numtime2 = time_conv(51, 41)
user_id = get_data(numtime, numtime2)
for i in user_id:
rand_num = random.randint(0, 1)
if rand_num == 1:
cook = login()
if cook is not None:
click(cook, i[1])
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
import pymysql
import random
import traceback
import logging
from vest.request.auto_request import login, click, time_conv
from vest.request.auto_request import host, user, db, passwd
from libs.error import logging_exception
def get_data(numtime, numtime2):
try:
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
logging.info("get numtime:%s" % numtime)
logging.info("get numtime2:%s" % numtime2)
cursor.execute(
"SELECT user_id,id FROM topic WHERE is_online=1 and (create_time >= '%s' and create_time <= '%s')" % (
numtime, numtime2))
data = cursor.fetchall()
topic_id = list(data)
topic_id_list = []
logging.info("get topic_id:%s" % topic_id)
try:
for i in topic_id and topic_id:
cursor.execute(
"select user_id,is_shadow from user_extra where user_id =" + str(
i[0]) + " and is_online =1 and is_deleted =0")
data = cursor.fetchall()
user_id = list(data)
if (user_id and user_id[0][1] == 0) or len(user_id) == 0:
topic_id_list.append(i)
logging.info("Database version : %s " % topic_id_list)
pc.close()
except:
pass
return topic_id_list
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
return []
def true_click_four():
try:
logging.info("click four")
numtime, numtime2 = time_conv(47, 37)
user_id = get_data(numtime, numtime2)
dicts = {}
for i in user_id:
rand_num = random.randint(0, 1)
if rand_num == 1:
cook = login()
if cook is not None:
click(cook, i[1])
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
import pymysql
import traceback
import logging
from vest.request.auto_request import login, click, time_conv
from vest.request.auto_request import host, user, db, passwd
from libs.error import logging_exception
def get_data(numtime, numtime2):
try:
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
logging.info("get numtime:%s" % numtime)
logging.info("get numtime2:%s" % numtime2)
cursor.execute(
"SELECT user_id,id FROM topic WHERE is_online=1 and (create_time >= '%s' and create_time <= '%s')" % (
numtime, numtime2))
data = cursor.fetchall()
topic_id = list(data)
topic_id_list = []
logging.info("get topic_id:%s" % topic_id)
for i in topic_id and topic_id:
cursor.execute(
"select user_id,is_shadow from user_extra where user_id =" + str(
i[0]) + " and is_online =1 and is_deleted =0")
data = cursor.fetchall()
user_id = list(data)
logging.info("get user_id:%s" % user_id)
if (user_id and user_id[0][1] == 0) or len(user_id) == 0:
topic_id_list.append(i)
logging.info("Database version : %s " % topic_id_list)
pc.close()
return topic_id_list
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
return []
def true_click_one():
try:
numtime, numtime2 = time_conv(5, 0)
user_id = get_data(numtime, numtime2)
for i in user_id:
cook = login()
logging.info("get cook:%s" % cook)
if cook is not None:
click(cook, i[1])
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
import pymysql
import random
import traceback
import logging
from vest.request.auto_request import login, click, time_conv
from vest.request.auto_request import host, user, db, passwd
from libs.error import logging_exception
def get_data(numtime, numtime2):
try:
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
logging.info("get numtime:%s" % numtime)
logging.info("get numtime2:%s" % numtime2)
cursor.execute(
"SELECT user_id,id FROM topic WHERE is_online=1 and (create_time >= '%s' and create_time <= '%s')" % (
numtime, numtime2))
data = cursor.fetchall()
topic_id = list(data)
topic_id_list = []
logging.info("get topic_id:%s" % topic_id)
try:
for i in topic_id and topic_id:
cursor.execute(
"select user_id,is_shadow from user_extra where user_id =" + str(
i[0]) + " and is_online =1 and is_deleted =0")
data = cursor.fetchall()
user_id = list(data)
if (user_id and user_id[0][1] == 0) or len(user_id) == 0:
topic_id_list.append(i)
logging.info("Database version : %s " % topic_id_list)
pc.close()
except:
pass
return topic_id_list
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
return []
def true_click_three():
try:
logging.info("click three")
numtime, numtime2 = time_conv(45, 35)
user_id = get_data(numtime, numtime2)
for i in user_id:
rand_num = random.randint(0, 1)
if rand_num == 1:
cook = login()
if cook is not None:
click(cook, i[1])
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
import time
import pymysql
import random
import traceback
import logging
from vest.request.auto_request import login, click, time_conv
from vest.request.auto_request import host, user, db, passwd
from libs.error import logging_exception
def get_data(numtime, numtime2):
try:
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
logging.info("get numtime:%s" % numtime)
logging.info("get numtime2:%s" % numtime2)
cursor.execute(
"SELECT user_id,id FROM topic WHERE is_online=1 and (create_time >= '%s' and create_time <= '%s')" % (
numtime, numtime2))
data = cursor.fetchall()
topic_id = list(data)
topic_id_list = []
logging.info("get topic_id:%s" % topic_id)
try:
for i in topic_id and topic_id:
cursor.execute(
"select user_id,is_shadow from user_extra where user_id =" + str(
i[0]) + " and is_online =1 and is_deleted =0")
data = cursor.fetchall()
user_id = list(data)
if (user_id and user_id[0][1] == 0) or len(user_id) == 0:
topic_id_list.append(i)
logging.info("Database version : %s " % topic_id_list)
pc.close()
except:
pass
return topic_id_list
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
return []
def true_click_two():
try:
numtime, numtime2 = time_conv(27, 17)
user_id = get_data(numtime, numtime2)
for i in user_id:
rand_num = random.randint(0, 1)
if rand_num == 1:
cook = login()
if cook is not None:
click(cook, i[1])
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
求分享购买方式啦
这个还有其他款式嘛
这个围巾好复古的感觉
有适合古着的饰品推荐嘛
点睛之笔呀
好时尚的单品呢
最好的搭配是饰品
贵嘛
想戴锁骨链没有锁骨呢
丝巾怎么系才好看呢
我和作者有一样的品味呢
好喜欢复古的饰品呢
同款分享啦
太适合这套穿搭啦
想问一下,如何准确选择饰品搭配呢
小姐姐的腰带我要收啦
点赞好评,小姐姐的眼镜想求同款
最爱的腰带款式啦
这也太好看了吧
被小姐姐的配饰吸引
好吸睛的配饰呢
好个性呀
喜欢小姐姐的配饰
这款腰带是几孔的呀
剁手买买买
又要剁手啦
准备双十一入的呢
这个戒指真好看
就是有点贵
看起来很适合你呀
我的皮肤黑怎么办呢
可以多推荐平价好看的饰品嘛
对小饰品有瘾呀
太漂亮啦,闪闪的
我喜欢个性的呢
想要这个定制款的呢
手工最近太火啦
大家都爱手工的吧
好看的,气质挂的呢
又被种草啦
希望有平价替代呢
适合学生党呢
东大门风格的
这对耳环有点好看呢
我最爱的风格啦
戴上饰品瞬间有气质啦
盘他,买买买
比口红都深得我心啊
真是本命饰品呢
\ No newline at end of file
我是干皮,美妆小白一个😂不知道怎么选😁就是想让自己的肤色看上去白一点,本身肤色偏黄,小仙女能不能推荐一款啊谢谢:-)
想要想要想要想要
卧槽也太好看了。。
太好看了!!!想入手!
这个色调透露一种高贵冷艳的气质,也太显白了叭
我加入购物车了!
冲冲冲!!!好爱了喔
妈耶!太好看了吧!黄皮女孩的福音
一直在犹豫买哪个色号结果到最后手里零花钱所剩无几
没用过他家的东西诶,怎么样啊
看了就想买系列
感觉很润的样子
你太美了 唇色好喜欢
妈呀!太干净舒服了吧!果断收藏、转发
用的是什么相机啊!!
颜色看起来还不错,哈哈哈
这个好漂亮啊,我要种草了
这个无敌是最爱了
这个系列超级难卸啊啊啊,我用Mac眼唇卸妆液,擦到脸发麻了还卸不掉
我有这个、不知道是不是买到假货了、感觉颜色久了会很深
好看,很适合日常哇!
我比较想看粉底液的,毕竟底妆真的很重要
想看你试试国货橘朵的唇釉,最近经常看到小姐姐在推
听说这个系列的质地不是很好,不敢下手
瞬间种草了,这个颜色发现也很适合我、买买
想知道夏天干皮用什么底妆,技能持久,又不脱妆
好温柔的颜色!好好看!想看美宝莲才出的小灯管SRD04或56
诶嘛,终于更新了
这个颜色,我先下单了,然后又退……
怎么这么好看(。・ω・。)ノ♡
太好看了吧!!!我喜欢眼影!!!可以推一下吗!!!!
omg!!
妆效感人
哇撒,太美了
很有参考价值
已经种草,这个是真的好用
紧紧捂着钱包看你的帖子哈哈哈哈哈哈
每次看你发的东西,我都超级超级想买呢!!!
好喜欢,可是又是我愿望清单之一,买不起系列愿望清单
嘻嘻嘻码住,最近正在纠结粉底液。
最近真的需要。家里囤货基本没了
好激动!!终于可以出现在前排了呀~~好好看哦~~测评也是超级用心~
我的妈呀被种草啦 要去找代购啦
皮肤状态也太好了吧
这个颜色好好看啊!是什么色啊
她来了她来了,她带着美貌来啦
这个颜色好配你啊!
蹲个色号!
想问一下 你平时定妆都用什么
它家的东西真的强 我还没买过不喜欢的
这太考验化妆技术了,,,
\ No newline at end of file
谁能不爱这颜值
每日看好看的脸回血
你真是生活动力
我太爱你的颜值了
舔屏
长得好看,横着竖着斜着拍都好看
太优越了
优越本人
暴击
盛世美颜
这是什么绝世颜值啊
来品品这个style
来品品这个风格
除了哇我还能说什么
真的是好看呢
这颜值,可真的是顶不住了
咋就这么好看呢
这颜值超级超级棒哦!
你真的好好看
好看到起鸡皮疙瘩
好看的立刻粉了
这个照片不够看的,求更多
太击中我了
诚邀各位欣赏这盛世美颜啊
请问这是什么神仙颜值
颜值真的太高了
好看到合不拢嘴,想要联系方式
哇,一直在感叹
火速存到手机,想拿去做头像
我真的看不够,多发几张来看看
看一万遍还是看不够
破音呐喊,太好看啦
咦,谁在发光哦
发什么都美呆了!
好看的人就是治愈良药
期待你的每天更新
看你就是是生活必备品
每日看一万遍好看的颜值
不愧是你
真的很吃这种颜
不愧是宁
点进来就出不去了
谁看了能不爱
真的喜欢这个风格
对这种风格无法抵抗
我没法冷静了
都给我看!
神仙颜值没错了
我真实的发出尖叫声
我来了!!!这个系列都好看
神仙颜值!摇旗呐喊
对这种颜无法抵抗
颜值大发
是什么神仙颜值呢
当场昏厥
这是什么绝色啊
绝色!!
颜狗来了!
真的可太棒了呢
好看到词穷
随便拍都会好看的
在发光啊
怎么拍都好看啊
这什么颜值!
为你喊破喉咙
什么都做得好
你好看的让我魂牵梦绕
顶不住啊顶不住
想把你的照片偷来当头像
好看到我分分钟昏厥
每一张都想拿去当头像
超超超超美的
颜值模板吧你,太好看了
很想知道滤镜,太好看了吧!
你咋这么好看
疯狂鸡叫
好看的人怎么拍都好看
一秒内爱上
awsl
进来看颜王的照片
美美哒
你怎么这么好看呢
我天,真好看
我的妈!你超美啊!
爱辽
心动辽
你怎么可以这么好看
同款姿势可以拥有,颜值拥有不了了。
可惜国家不分配颜值
长得好看是王道
好看的人随便怎么拍都在这么好看
这个颜值,怕是要在线夺命
谁能不喜欢呢
你简直是就是优越本身
你简直是是优越代名词
你好看到仿佛是来索命的
你又来索命了吗!这个颜值,献上小命
哇惊叹着
你真的是宝藏,太好看了
你是什么宝藏哦,偷偷摸摸关注一下
神仙颜值!!!
你也太好看了吧!!!
你好好看啊,好喜欢你的颜值
你真的好好看!!
哇,你也太好看了
哇,你是什么神仙颜值!
图二超级好看!
图一的角度,非常好看了吧
图一真的太好看了吧
你的颜是我吃的那一挂哎
这颜值是我喜欢的类型了
这颜值是长在我的上审美了
这颜真是长我审美点上了
哇塞,好可爱啊
好可爱啊
喜欢!太可爱了
哇塞,喜欢这个颜值
粉了粉了
我是你的颜值粉
这颜值戳到我了
神仙颜值
是戳我审美点的颜了
是戳我审美点的可爱哇
是戳我审美点的好看哇
喜欢这个发色
脸型好棒
这个鼻子是我的爱
颜值好好看
这个颜值一定是穿什么都好看
想学习这个妆容
是戳我审美点的眼睛了
是戳我审美点的高鼻梁了
是戳我审美点的唇形了
是戳我审美点的眉毛了
五官好精致啊
眼睛真的好看
脸型好棒啊,喜欢这个脸型
鼻子好好看
整体看着好舒服
喜欢这种风格
这构图很好啊
想学习你的拍照技巧
这什么神仙颜值!!!!
人好看还会还会拍
人这么好看还会拍,慕了!
羡慕长得好看的人
羡慕给你拍照的人
颜粉儿来报道
你是我的私藏神仙颜值!
怎么会这么好看的人!
这世上怎么会有这么好看的人呢!
是戳我审美点的发色了
是戳我审美点的发型了
你的头发,眼睛,鼻子,仿佛都长成了我最爱的样子
怎么会有人长得和我喜欢的样子这么像
喜欢你的颜值!!
这是什么神仙颜值诶~
好看到直戳我心
看完你的照片。心跳漏一拍
可以得到你的联系方式吗。
立刻粉了!喜欢这个颜值
等到你的新照片,开心
颜值一级!
请你每天多更几张照片好吗
图片是加滤镜了吗 真好看呀
这个颜值我真的是欲罢不能,看了好几遍
我会告诉你我看了好几遍吗
立刻粉了,为了这个颜值
ღ( ´・ᴗ・` )比心
给你笔芯
在线比心漂亮姐姐
给这个颜值在线打call
给你,我的小心心
请收下我的小心心
请收下我的疯狂笔芯
给你我所有的爱意,喜欢!
怎么会这么好看。老天造人不公平
这颜值,人生肯定特别顺利
为你在线打call
喜欢你的每一张照片!请多更新
可盐可甜的风格!
什么风格都好看!
这无处安放的魅力啊
立刻粉了!!
第一张也太好看了吧!
喜欢喜欢,在线打call
我是你的颜值粉,请多更新好吗
这张和我的同学好像!
我,我,我,可以认识你吗
又好看又会拍
喜欢了!
神仙颜值,每天都想催更新
为这个神仙颜值打call!!!
这个颜值真的是我的最喜欢的那一卦!
平时不发言,但这个颜值我必须说话!
不爱说话的我,必须发言了。喜欢你
这个角度能hold,长得好看果然可以随便拍
这个角度能抗住,这得什么神仙颜值
这个角度还这么好看啊
又好看又会拍照,厉害了
你怎么这么好看
真的太美了我很喜欢!!
你以为你长得美就可以不回我吗
我是新来的,都是神仙照片啊,放图吧,我还扛得住
好好看 我哭 我什么时候可以再好看一点
真的是 喜欢到不行!
好好看呀 我也想拍这么好看的
我对你的爱意值高达百分之两百噢
喜欢各种各样的你  嘿嘿
哇哦 这是谁的神仙大宝贝啊
为什么会有这么好看的人
超爱这种感jio!!!!!
每天看到这么好看的颜,美梦鸭
是我梦想的颜值
完全就是我喜欢的类型吧!
吼吼看呀美颜暴击
总是莫名的 怎么照都好看
人美怎样拍都好看
这是什么神仙颜值 爱liao 爱liao
该有的感觉你都有
不说啥了。只想夸夸这神仙颜值
博主的颜真耐打
都好看到哭啊,小北鼻
第一瞬间就戳中了我
嘤嘤嘤 我都爱死了!!
我要有这么好看我也很爱自拍了
因为好看 所以爱自拍
真的长的超级精致
一直都超喜欢看你的照片哎
不行了 我 你太好看了我要晕倒了
您也是真的hin好看诶
会拍你就多拍点 顺便也多发点哈哈哈哈 可爱^_^
看来老天确实不是公平的
这颜值让我变成了柠檬精
实名制羡慕!
天啦!我第一次看到这么好看的人!!!
到底为什么会这么好看!!!
这也太好看啦叭 一眼沦陷 我对你 就是爱!死!了!
大概是从天而降的仙女吧
太美了 我宣布我已经血空
为什么会有这么好看的人在人间
天啦噜!!!这也太好看了
这谁顶得住啊太好看了!
我希望我在梦里可以长这个样子
这到底是什么仙女了啦
啊啊啊啊啊啊看到这么美的你!俺心情也好好!
看到美女心情就更好啦!!!
甜甜的你 温暖我的心 
请问如何才能像博主一样美
这么美丽冻人直戳人心真的好吗
哭唧唧看这里!!!!好看哭泣
究竟怎么做到这种妆容的 有点爱
你是人间天使吗?
果然人好看怎么拍都好看呢
您真是好看极了!!
请告诉我怎么拍都好看的秘诀
太好看了吧 皮卡皮卡
亲亲 那么好看建议你多拍哦
我真的夸不动了,为什么有这么可爱美丽的女孩纸
真的好美,你到底要让我夸多少遍才够
太好看了8 永远看不够
看你我就觉得可以开心一年接着一年
我的宝藏女孩 啊啊真滴太美了 人间仙女
真的太美了!怎么会有这么美的女孩
这是什么美丽的精灵
嘻嘻,好美一女的,爱了爱了
被天使亲吻过的脸
太美了,我要窒息了
未闻花名 但知其人
太美了!!!一眼沦陷!
爱死了你这个颜值
上辈子肯定拯救了上帝,让你这么好看
哇哇哇,你也太好看了叭
太好看了吧!!救命!!
上帝是不是造你的时候偏心啦!
好了 这个真的美哭了
啊我心中的天使
妈呀,你也太好看了吧
说不出哪里喜欢 就是哪哪都喜欢
求求您 停止散发你的魅力!!!!!
特别有气质,加油
好美啊,初恋的面孔
我天,真的太好看了
看到你,就看到了生活的希望,看到人间还有如此好看的人
本颜粉儿来报道啦
是神颜没错了
为什么可以这么好看
我死了 你为什么这么好看,我就地死亡
画里走出来的叭
今天的您仍然是我的心动嘉宾呦
用来当壁纸真的贼好看
太美了吧我看呆了555
我也要像你一样闪闪发光
这个真的超级美丽了
这是什么散落人间的仙子
我天……这是什么神仙颜值
这神仙颜值,立刻粉了
wei,110吗,这里有人好看到让我想报警诶!
太好看了吧 !哭泣了
好看到爆炸(。・ω・。)ノ♡
\ No newline at end of file
楼主辛苦啦
心动,我要去试试
新技能get√
感觉不错mark一下
翻我翻我~
关注啦O(∩_∩)O
这个好,学习了!
持续好感中
日常夸奖博主
支持一下!
转需留存
种草了!!!
求被小姐姐翻牌!
终于更新了啦~
成功引起了我的注意!
get√
爱你哟~(づ ̄3 ̄)づ╭❤~
我觉得很赞!!
不管怎么样先收藏了
被小姐姐圈粉
期待小姐姐更新
来啦来啦来啦来啦来啦
楼主真的好棒
学习啦!!!
干货满满~
学习新技能中!
码了~谢谢小姐姐
这个必须点赞
默默关注中!
刚刚学了下!
好实用的内容,学习了
被你种草了!
mark啦~
哇,心动\(^o^)/~
太喜欢啦~
新技能出现!
真心喜欢呢~
小姐姐觉得适合我不
真的很适合我了
太适合我了!
幸好关注了
完美~\(^o^)/~
喜欢哦~
火速关注!
迅速关注!
博主看我~
支持
马✧(≖ ◡ ≖✿)
种草!(⊙o⊙)
博主终于更新了
日常求更新!
get!O(∩_∩)O
不错哦!
你辛苦啦~
决定向你学习了!
想试试!
必须m住!
我的天(⊙o⊙)
=w=
确实8错!
很喜欢你发的图
好实用,我也试试看~
pick!!
收藏收藏收藏
太心动了
满满的干货
立即mark了
夸赞你!!
这个真的好
日常表白博主
支持哦~
马一下✧(≖ ◡ ≖✿)
又双叒被种草了
博主你来了
收!!!
不错哎
辛苦啦
好好向楼主学习
尝试一下
点赞收藏+评论,爱你
解锁新技能
mark起来
赶紧赞起来
悄悄关注了
哇,赶紧get起来
干货收藏了
关注关注!
好棒的内容,学习了
心动了呢
真的是干货
果断get
果断马了
wow
立马关注
这个真棒
一会再看
给作者卖个萌
忠实粉丝
看看我吧
支持一下下
马了!!!
又被种草了!
终于等到更新了
超好看~
求更新!
不错呢!
整理这些太辛苦了
值得学习
种草了
哇!!!
比心~
mark住!
好喜欢你~
很棒的内容
看起来好棒
一定要试试了
收藏好了
果断保存
天哪!
很棒
被戳中了
日常夸博主
日常求勾搭
支持一下下哦
Mark
日常求被翻牌
博主我终于等到你了
get到了
今日份催更(1/1)
真不错哦
辛苦整理这些了
实名心动了
腻害
马住!
真是厉害了我的楼主
好棒的推荐
好实用,收藏啦
被安利了
全都是干货呢
立即保存了
真的很合适诶
omg
日常夸赞博主
勾搭
支持一下哦
M一下
又双叒叕被种草了
你终于来了
超级好看
get了
决定去试试
好好研究下
实用诶
666这个牛
真的棒
这个好
这个真的很有用哎
希望能学会
很棒的干货
mark成功
真的赞
我只能赞叹着
实名称赞
表白博主
围观一下
有用的,马一下
又是被安利的一天
涨知识了
终于更新了,好开心
我来催更了
谢谢亲
感谢分享
向楼主大大学习
很妙!
又学会一个新技能
好好好
好用心啊 喜欢
我要把你的帖子分享跟我的朋友
马住有时间再看
圈你啦~
太好用了!
辛苦♪(・ω・)ノ
可以说非常需要了
啾啾
完美安利
喜欢这个
好帖保存
哇哇哇
很需要的干货
nice
我的天太棒了吧
吹爆
看起来很不错
表扬表扬
称赞称赞
日常赞一波博主
占楼围观
有用呢,马一下
美好的一天从种草开始
更新撒花
我又要催更了
哇,很不错呢
学到啦
学习了
mark!mark!mark!
超级棒哦!
很想要学会
真的是有用的干货
mark一下下
太赞了吧
圈粉了
博主真好
前来占楼
有用呢,马
又是种草的一天
学到了!!
终于更新了,快乐
催更来了~
辛苦亲~
努力学习但是不会
看见干货就会收藏
果断马住不带犹豫的
绝对的大发
疯狂推荐给小伙伴
博主真棒!
求勾搭嗷
有用哦,马
小姐姐求翻牌
好,get到了
不错不错~
辛苦大大了
回头有空试试
这个要马住
真实心动了
果断收藏啦~
感觉很不错呢!
日常夸博主!
来支持一下~
存一下!
又双叒被安利了
今日份打卡
哇,get√
辛苦辛苦
努力学习中
不错不错很实用~
收藏起来学习
可以可以
立马来个同款
我也要试试看
给你圈粉啦~
简单有用
种草一波
果断get试试
分享给了朋友
晕,真的好看
真的很棒
满分哦~
真棒!!!
日常夸夸博主
前来支持
又被安利了
等被翻牌
搭配的超美腻
哇,get!
太辛苦了
学到了!
太棒辣!
感谢分享!收
厉害,哈哈哈
好帖好帖,收藏学习啦
存啦,好内容
谢谢分享呢
我来抢沙发了!
向你学习呢!
正在尝试~
收藏好了这个干货
厉害厉害,果断mark
疯狂夸赞
眼前一亮哇!
我来夸一下博主
日常勾搭
火速围观
有用呢,存
行走的种草机
不愧是大大
大赞!!!
厉害厉害~
要好好学习啦
先收藏了回看
pick!
这么棒的内容
日常关注!
火速去试试
真的看起来很不错
给你比个心
夸一波博主
支持一下子
有用,保存
终于等到你
已get!
坐等更新
很不错哦~
大大真的好辛苦
好棒的分享
给你小心心
好实用!!
真的想学习
真爱干货~
感觉很棒!
我酸了,太好看了
我好喜欢
先马一下
夸一下博主
前来支持一下下
有用,存了
终于等到博主了
在线催更
真不错鸭
辛苦了加油!
学习中
必须收藏啊
火速来get
好帖!!!
手动点赞,收藏学习啦
超实用内容马住
喜欢博主的举个爪爪
用心的学习中
试一下同款
比起别的更喜欢干货
立即马住
疯狂表博主!
每日赞叹
我来夸赞一下博主
火速占楼
有用的,马!
新手涨知识了
今日的我被翻牌了吗?没有!
终于终于更新了
太厉害了大大
收藏学习了
get了~
为了封面而来
马不停蹄收藏
最好的推荐,没有之一~
先保存啦,有空会看的
火钳路名
我看了好多遍
夸一夸博主
博主看我一下鸭
前来围观
有用的,存
太厉害了吧
超超超级棒
方法真棒
被种草了!
so cool
超级棒诶
有用有用~
真好,学习
太心动了吧
收藏成功
这个颜我可以!
表白一下博主
围观~
有用,马一下
楼主棒棒哒!
有意思,学习了
吃下安利
哇 果断马住
立刻收藏
想拥有!
吃下这枚安利!
马住回家看
充分的学习啦
准备去试试了
最实用的干货
太喜欢了所以看了很多次
同款get中
最佳!!
火速存下
我来夸一波博主
日常求关注
前排围观
这波安利我吃!
厉害了我的朋友
太喜欢啦
这个方法真不错
收藏学习起来
马住!!!
很不错,学习一下
想去试试
已经收藏好~
点赞并保存
每天看一百遍
表白一波博主
前来支持一下子哦
有用,保存一下
这安利我吃了
求更新啊啊啊
这安利我吃!
楼主太厉害了
不错学习了~
优秀!!!
又学到了一招
先收藏了,有时间一定要试试~
好精彩的内容
正在努力学习
真的很了不起呢
赞赞!!
给你点个赞
给你点赞
看见这个帖子好幸运哦
感谢这个帖子哦
赞一下博主
支持一下子哦
有用,保存了
种草惹~
棒!!!
真棒啊
真不错啊
这个有点棒
这个有点不错
觉得还不错
觉得很棒呢
觉得真不错
觉得真棒呢
我很喜欢
我很喜欢这个
我真的很喜欢这个
各种get
三百六十度夸赞
三百六十度无死角夸赞
激情点赞
喜欢这个呢~
真的很爱了
爱了爱了爱了
火前留名
火速前来围观
火速前来占坑
前排留名
来留个名
围观一下哦!
前排占坑
点进来准备种草
这个必须get
这我好爱
这太棒了
这个绝了
这个适合我
这真心适合我
太赞了!!
赞一个~
赞得很!
终于等到了
更新了,好开心
今天又学到了
支持博主
又被种草了
点赞收藏
又是收获的一天
真的很动心了
每天跟着博主学知识
博主更新就是我每天的动力
又是被种草的一天
真是我的心头爱了
超级喜欢
不愧是我爱的博主
爱了!!!
真的很实用了
超赞~
超级值得关注
不得不为博主点赞
完整看完
想get很多
第一次关注
真的太棒了
我不管,都是我爱的
一定要早一点更啊
这个更新太棒了
超心动
这个厉害了
我超级喜欢
看起来就是很舒服
好看,好看
好看啊!!!
好心水
我心水啦
心水!
我收了!
多发哦~
多发啊
要多发哦✧(≖ ◡ ≖✿)
心动了!
心动!!!
学习到了!
新技能GET!
粉了粉了~
小姐姐太美啦!
赞的呢!
不错不错~~
好好看啊~
收了收了!
太美啦!
哇!感觉不错
很棒的赶脚了!
笔芯给小姐姐!
不错不错!
今日份的喜欢,小姐姐收到请回复!
已学会!
太漂亮了
看到封面果断点进来!
po主太厉害了!
相当赞的了!
很棒的分享呢~
喜欢!不得不赞!
好看耶~~
你是仙女吗?
喜欢这种!
100昏~喜欢
悄悄来点赞!
为你打call!
好棒!!
小姐姐好用心啊!
收藏啦!!
喜欢你的分享
赞赞赞!!
pick啦
哇哇哇哇!
这一波分享很满足了
好赞!!
有点棒哦
爱辽爱辽~
默默围观中
要不要这么棒!
楼主为何如此优秀
好美哦!
超级漂亮!
好看的
第二张图我超级喜欢
喜欢这个风格
前排表白小姐姐
冒个泡!
果断赞一下
美der~
很喜欢哦~
渣渣前来学习
哇。好可爱呐
emmm,还不错
美美哒
也太优秀了吧
哈哈,可爱!
炒鸡好看哦!
好喜欢这个风格
嘿嘿不错哦
手动赞!
手动赞一个哦~
厉害的~
手动打电话!
手动打call!
笔芯点赞!
哇哦!有点喜欢!
围观就位!
看到封面果断点进来
可以说很好看啦
喜欢!为你点赞
学会啦~
捞一下,很喜欢
手动点赞+笔芯
太好看了吧!
这个必须要赞一下了
大大的赞一个
喜欢喜欢
超级喜欢!
立刻粉了
爱了!!!
好喜欢
好喜欢!!
手动赞!!!
太心动
好心动!
手动点赞
手动笔芯!
点赞!!
已收藏!
疯狂笔芯
疯狂笔芯中!
疯狂比心
心水!!!
心水。。2333
赞诶!
这一波分享很满足了!
很棒的赶脚
哇哇,喜欢
先赞为敬!
还不错的赶脚
感觉很棒诶
强势围观!
哈哈!稀饭!
超级稀饭!
\ No newline at end of file
一看就会,一学就废 ╥﹏╥
这个发量,有护发的东西安利吗?
别人家的头发,做了都很好看系列
婴儿肥怎么办,适合这个发型吗
老师!我没有学会!
手残党真是看着就知道自己做不到了
学渣想问,打理起来有手把手分步骤讲解图吗!!
我决定把图留着,等去找tony老师!
一个发型狗啃一般的我,也不知道为啥,认认真真看完了每一个字。。
wow 真的好看哎,一进来就被这张美到了
发际线高,额头凸出,适合什么发型或者刘海啊
不知道自己的脸型适合什么发型。。。
秃头少女想问,有发际线技巧吗!
正在苦苦留长的我……
讲真……奉劝各位 不要剪不要烫……除非你长的很时尚
请问 我头大呢 适合什么发型?
这发型真适合走活力少女路线!
每次看小姐姐们的发型,都觉得是自己颜值水平拉低了tony的技术水平
我之前剪了这个发型!全部人都说好看,回家我妈还以为我打了瘦脸针!!!
细软少还脸圆plus。。。我只能留着中分。
求推荐发型啊,想要蓬松一点的,现在长发有点贴头皮
我的TONY严肃地叫我不要剪。。然后我不信,他就拿出了假发给我戴上看大概效果
但我头发还是细软少,哭了
看了一天贴有点动心。。。谢谢!!
我剪了类似这种,不过自己吹出来没有tony吹的那么美!
想起来上次剪头发,发型师帮我卷了像高晓松,第二天睡醒,直了像刘胡兰,现在像马大姐╥﹏╥
太少女了吧!!
今天的彩虹屁我要全部给你!太好看了吧!!!
这是谁家的仙女!!
小个子女生该留怎样的发型好!留了六年刘海最近近年留中分 想剪短发又怕不好看!求支招
中分发型不化妆是否会显得人没精神啊?中分适合什么脸型吗?
脸大有点方 应该留什么发型呀= =
换了个发型,被嘲笑了半个月了
求问,头发少,留什么发型显头发多?
头发少到tony都说好心酸的地步了,羡慕小姐姐
今天剪了个巨丑的发型,想哭
换了十万种发型以后发现,头发靠吹,不会打理都见鬼 *^▽^*
贾玲适合什么发型啊?不过贾玲还比我好看。
我是细软,我最近两次烫的都是那种自然的大卷,烫回家我爸妈都说我没烫…
不知道秃头女孩适合什么发型
头发又多又硬又自来卷,折腾头发都不敢想,哭了
头发老贴头皮咋办?
发际线如何打碎?
艾玛 马住以后剪头发时候用
发量不多不少,大圆脸,适合斜刘海还是无刘海?
刚烫了羊毛卷,风太大太让人失去理智了!
我至今没有遇到 我可以顺利交流的tony… 导致我至今没有过合理的发型,全靠自然生长o(╥﹏╥)o
最近想换发型了,做一个新的发型,新鲜感溢出天际那种!小姐姐有推荐的吗~
我也种草了这个发型,但是我头发细软还容易油 呜呜呜
小姐姐看我!!!我现在就是这种发型!我觉得还行,染了一点颜色,还蛮洋气的
\ No newline at end of file
你的手好漂亮啊
我要的是这个色吗,我需要的是这个手!
看了看自己的鸡爪。。。。
第一眼看的时候,哇塞这美甲好看啊,冷静下来之后发现是手太好看了
已经做了同款,非常满意
神仙手
请问在哪做的
看了看我的小猪蹄。。。
手和指甲都很赞耶
这个颜色这样搭配好看的
仙到爆炸,超级好看
底料是什么颜色的呀
好看呀
啊啊啊啊 喜欢这个 想看细节图
我也做了耶好好看
目前为止看到最好看的指甲图了
真的超好看!特别显白 出门感觉自己是最靓的仙女
爱了
会显手黑吗
预算多少呢?
小清新款
手黑可咋办
我的天啊,也太好看了吧!!!
太好看了吧
好喜欢呢!
超级温柔的美甲
我也要做一个精致的小仙女!
好美丽的手呀
主要是手还有指甲盖好看,美甲是其次的
这个我也做了,非常好看
做了同款,喜欢
超喜欢
天呐,我关注了一个什么样的小仙女
我做啦!!!好看!!嘻嘻
这个是啥甲油啊,好稀饭
好喜欢你的颜色搭配,太好看了
这撩到我了~
太好看了吧 让我一个不做美甲的人 都心动了
小姐姐,请问都是哪些颜色,能说一下吗
你很赞哦
手也太好看了吧~羡慕
显白哦
简单好看,还是那种万年不过时的款
哇哇哇,同款同款
做了同款 太好看了 我的妈耶
美甲师路过,昨天刚刚给一个顾客做过
这一个造型多少钱
小姐姐你的手是真的好看
同款指甲
嘻嘻嘻做了同款指甲好好看美爆
\ No newline at end of file
楼主辛苦啦 心动,我要去试试 新技能get√ 感觉不错mark一下 翻我翻我~ 关注啦O(∩_∩)O 这个好,学习了! 持续好感中 日常夸奖博主 支持一下! 转需留存 种草了!!! 求被小姐姐翻牌! 终于更新了啦~ 成功引起了我的注意! get√ 爱你哟~(づ ̄3 ̄)づ╭❤~ 我觉得很赞!! 不管怎么样先收藏了 被小姐姐圈粉 期待小姐姐更新 来啦来啦来啦来啦来啦 楼主真的好棒 学习啦!!! 干货满满~ 学习新技能中! 码了~谢谢小姐姐 这个必须点赞 默默关注中! 刚刚学了下! 好实用的内容,学习了 被你种草了! mark啦~ 哇,心动\(^o^)/~ 太喜欢啦~ 新技能出现! 真心喜欢呢~ 小姐姐觉得适合我不 真的很适合我了 太适合我了! 幸好关注了 完美~\(^o^)/~ 喜欢哦~ 火速关注! 迅速关注! 博主看我~ 支持 马✧(≖ ◡ ≖✿) 种草!(⊙o⊙) 博主终于更新了 日常求更新! get!O(∩_∩)O 不错哦! 你辛苦啦~ 决定向你学习了! 想试试! 必须m住! 我的天(⊙o⊙) =w= 确实8错! 很喜欢你发的图 好实用,我也试试看~ pick!! 收藏收藏收藏 太心动了 满满的干货 立即mark了 夸赞你!! 这个真的好 日常表白博主 支持哦~ 马一下✧(≖ ◡ ≖✿) 又双叒被种草了 博主你来了 收!!! 不错哎 辛苦啦 好好向楼主学习 尝试一下 点赞收藏+评论,爱你 解锁新技能 mark起来 赶紧赞起来 悄悄关注了 哇,赶紧get起来 干货收藏了 关注关注! 好棒的内容,学习了 心动了呢 真的是干货 果断get 果断马了 wow 立马关注 这个真棒 一会再看 给作者卖个萌 忠实粉丝 看看我吧 支持一下下 马了!!! 又被种草了! 终于等到更新了 超好看~ 求更新! 不错呢! 整理这些太辛苦了 值得学习 种草了 哇!!! 比心~ mark住! 好喜欢你~ 很棒的内容 看起来好棒 一定要试试了 收藏好了 果断保存 天哪! 很棒 被戳中了 日常夸博主 日常求勾搭 支持一下下哦 Mark 日常求被翻牌 博主我终于等到你了 get到了 今日份催更(1/1) 真不错哦 辛苦整理这些了 实名心动了 腻害 马住! 真是厉害了我的楼主 好棒的推荐 好实用,收藏啦 被安利了 全都是干货呢 立即保存了 真的很合适诶 omg 日常夸赞博主 勾搭 支持一下哦 M一下 又双叒叕被种草了 你终于来了 超级好看 get了 决定去试试 好好研究下 实用诶 666这个牛 真的棒 这个好 这个真的很有用哎 希望能学会 很棒的干货 mark成功 真的赞 我只能赞叹着 实名称赞 表白博主 围观一下 有用的,马一下 又是被安利的一天 涨知识了 终于更新了,好开心 我来催更了 谢谢亲 感谢分享 向楼主大大学习 很妙! 又学会一个新技能 好好好 好用心啊 喜欢 我要把你的帖子分享跟我的朋友 马住有时间再看 圈你啦~ 太好用了! 辛苦♪(・ω・)ノ 可以说非常需要了 啾啾 完美安利 喜欢这个 好帖保存 哇哇哇 很需要的干货 酷 nice 我的天太棒了吧 吹爆 看起来很不错 表扬表扬 称赞称赞 日常赞一波博主 占楼围观 有用呢,马一下 美好的一天从种草开始 更新撒花 我又要催更了 哇,很不错呢 学到啦 学习了 mark!mark!mark! 超级棒哦! 很想要学会 真的是有用的干货 mark一下下 太赞了吧 圈粉了 博主真好 前来占楼 有用呢,马 又是种草的一天 学到了!! 终于更新了,快乐 催更来了~ 辛苦亲~ 努力学习但是不会 看见干货就会收藏 果断马住不带犹豫的 绝对的大发 疯狂推荐给小伙伴 博主真棒! 求勾搭嗷 有用哦,马 小姐姐求翻牌 好,get到了 不错不错~ 辛苦大大了 回头有空试试 这个要马住 真实心动了 果断收藏啦~ 感觉很不错呢! 日常夸博主! 来支持一下~ 存一下! 又双叒被安利了 今日份打卡 哇,get√ 辛苦辛苦 努力学习中 不错不错很实用~ 收藏起来学习 可以可以 立马来个同款 我也要试试看 给你圈粉啦~ 简单有用 种草一波 果断get试试 分享给了朋友 晕,真的好看 真的很棒 满分哦~ 真棒!!! 日常夸夸博主 前来支持 又被安利了 等被翻牌 搭配的超美腻 哇,get! 太辛苦了 学到了! 太棒辣! 感谢分享!收 厉害,哈哈哈 好帖好帖,收藏学习啦 存啦,好内容 谢谢分享呢 我来抢沙发了! 向你学习呢! 正在尝试~ 收藏好了这个干货 厉害厉害,果断mark 疯狂夸赞 眼前一亮哇! 我来夸一下博主 日常勾搭 火速围观 有用呢,存 行走的种草机 不愧是大大 大赞!!! 厉害厉害~ 要好好学习啦 先收藏了回看 pick! 这么棒的内容 日常关注! 火速去试试 真的看起来很不错 给你比个心 夸一波博主 支持一下子 有用,保存 终于等到你 已get! 坐等更新 很不错哦~ 大大真的好辛苦 好棒的分享 给你小心心 好实用!! 真的想学习 真爱干货~ 感觉很棒! 我酸了,太好看了 我好喜欢 先马一下 夸一下博主 前来支持一下下 有用,存了 终于等到博主了 在线催更 真不错鸭 辛苦了加油! 学习中 必须收藏啊 火速来get 好帖!!! 手动点赞,收藏学习啦 超实用内容马住 喜欢博主的举个爪爪 用心的学习中 试一下同款 比起别的更喜欢干货 立即马住 疯狂表博主! 每日赞叹 我来夸赞一下博主 火速占楼 有用的,马! 新手涨知识了 今日的我被翻牌了吗?没有! 终于终于更新了 太厉害了大大 收藏学习了 get了~ 为了封面而来 马不停蹄收藏 最好的推荐,没有之一~ 先保存啦,有空会看的 火钳路名 我看了好多遍 夸一夸博主 博主看我一下鸭 前来围观 有用的,存 太厉害了吧 超超超级棒 方法真棒 被种草了! so cool 超级棒诶 有用有用~ 真好,学习 太心动了吧 收藏成功 这个颜我可以! 表白一下博主 围观~ 有用,马一下 楼主棒棒哒! 有意思,学习了 吃下安利 哇 果断马住 立刻收藏 想拥有! 吃下这枚安利! 马住回家看 充分的学习啦 准备去试试了 最实用的干货 太喜欢了所以看了很多次 同款get中 最佳!! 火速存下 我来夸一波博主 日常求关注 前排围观 这波安利我吃! 厉害了我的朋友 太喜欢啦 这个方法真不错 收藏学习起来 马住!!! 很不错,学习一下 想去试试 已经收藏好~ 点赞并保存 每天看一百遍 表白一波博主 前来支持一下子哦 有用,保存一下 这安利我吃了 求更新啊啊啊 这安利我吃! 楼主太厉害了 不错学习了~ 优秀!!! 又学到了一招 先收藏了,有时间一定要试试~ 好精彩的内容 正在努力学习 真的很了不起呢 赞赞!! 给你点个赞 给你点赞 看见这个帖子好幸运哦 感谢这个帖子哦 赞一下博主 支持一下子哦 有用,保存了 种草惹~ 棒!!! 真棒啊 真不错啊 这个有点棒 这个有点不错 觉得还不错 觉得很棒呢 觉得真不错 觉得真棒呢 我很喜欢 我很喜欢这个 我真的很喜欢这个 各种get 三百六十度夸赞 三百六十度无死角夸赞 激情点赞 喜欢这个呢~ 真的很爱了 爱了爱了爱了 火前留名 火速前来围观 火速前来占坑 前排留名 来留个名 围观一下哦! 前排占坑 点进来准备种草 这个必须get 这我好爱 这太棒了 这个绝了 这个适合我 这真心适合我 太赞了!! 赞一个~ 赞得很! 终于等到了 更新了,好开心 今天又学到了 支持博主 又被种草了 点赞收藏 又是收获的一天 真的很动心了 每天跟着博主学知识 博主更新就是我每天的动力 又是被种草的一天 真是我的心头爱了 超级喜欢 不愧是我爱的博主 爱了!!! 真的很实用了 超赞~ 超级值得关注 不得不为博主点赞 完整看完 想get很多 第一次关注 真的太棒了 我不管,都是我爱的 一定要早一点更啊 这个更新太棒了 超心动 这个厉害了 我超级喜欢 看起来就是很舒服 好看,好看 好看啊!!! 好心水 我心水啦 心水! 我收了! 多发哦~ 多发啊 要多发哦✧(≖ ◡ ≖✿) 心动了! 心动!!! 学习到了! 新技能GET! 粉了粉了~ 小姐姐太美啦! 赞的呢! 不错不错~~ 好好看啊~ 收了收了! 太美啦! 哇!感觉不错 很棒的赶脚了! 笔芯给小姐姐! 不错不错! 今日份的喜欢,小姐姐收到请回复! 已学会! 太漂亮了 看到封面果断点进来! po主太厉害了! 相当赞的了! 很棒的分享呢~ 喜欢!不得不赞! 好看耶~~ 你是仙女吗? 喜欢这种! 100昏~喜欢 悄悄来点赞! 为你打call! 好棒!! 小姐姐好用心啊! 收藏啦!! 喜欢你的分享 赞赞赞!! pick啦 哇哇哇哇! 这一波分享很满足了 好赞!! 有点棒哦 爱辽爱辽~ 默默围观中 要不要这么棒! 楼主为何如此优秀 好美哦! 超级漂亮! 好看的 第二张图我超级喜欢 喜欢这个风格 前排表白小姐姐 冒个泡! 果断赞一下 美der~ 很喜欢哦~ 渣渣前来学习 哇。好可爱呐 emmm,还不错 美美哒 也太优秀了吧 哈哈,可爱! 炒鸡好看哦! 好喜欢这个风格 嘿嘿不错哦 手动赞! 手动赞一个哦~ 厉害的~ 手动打电话! 手动打call! 笔芯点赞! 哇哦!有点喜欢! 围观就位! 看到封面果断点进来 可以说很好看啦 喜欢!为你点赞 学会啦~ 捞一下,很喜欢 手动点赞+笔芯 太好看了吧! 这个必须要赞一下了 大大的赞一个 喜欢喜欢 超级喜欢! 立刻粉了 爱了!!! 好喜欢 好喜欢!! 手动赞!!! 太心动 好心动! 手动点赞 手动笔芯! 点赞!! 已收藏! 疯狂笔芯 疯狂笔芯中! 疯狂比心 心水!!! 心水。。2333 赞诶! 这一波分享很满足了! 很棒的赶脚 哇哇,喜欢 先赞为敬! 还不错的赶脚 感觉很棒诶 强势围观! 哈哈!稀饭! 超级稀饭!
\ No newline at end of file
想问下小清新用哪款
妈耶 这个香水暴露了年龄
没用过香水的,应该选哪个呀?
想问一下,有么有那种擦肩而过的回头香
渣女香
你们都是在哪里买的香水呢
哪款香水香味持久一点的,求推荐
最喜欢这款了 不好撞香
各位大神,夜场上班适合什么香水
我觉得这个味道还是阔以的
这款是真的好闻
哪款香水留香久呢
这款简直就是我的挚爱了
回购3瓶的路过
喜欢甜甜的香水
看了看自己的钱包,再见
竟然没有我用的那款,差评
买不起、过
爱了爱了,这款颜值也很高呀
买不起系列
买起来
味道挺好闻的 特别香 保持也很久
求一款淡香持久的香水,可否推荐一款
请问持久性如何?
好看
这款瓶子好好看
穷人只配路过
好闻呢
好是好,就不太留香的感觉
可以推荐一下那种淡淡的、比较温柔的香水嘛?
男的适合哪一款呢?
现在官网还能买到么?
哪种适合秋冬喷呢?
想知道现在在哪里能买到
想买试管分装 以免踩雷
分装在哪里能买到呢
香味持久又好闻 好评
能推荐一款小众香水么
我也有这款
刚下单了,还没到手
想问下香水会过期么
每天最大的困惑就是不知道应该宠幸哪瓶香水
好闻鸭
有什么适合秋冬香水的推荐嘛
有同款香水的路过
我也是香水控
好漂亮,颜值控
之前还在纠结买哪款,看完文章果断入手了
楼楼 瓶子好不好看不要紧 给我推荐一款你觉得最好闻的呗
准备入手这款香水了
\ No newline at end of file
整理这个榜单的人也太优秀了吧
这个榜单我爱了
这个榜单是干货啊干货
整理这个榜单的人也太棒了吧
这个榜单我完全心动
真的是非常厉害了
这个榜单是我的菜
宁这个榜单一定会火的
超级棒了!!!!!!
这个榜单完全是我的取向狙击
整理的好棒,加油~
给贴主加油!
这么棒的榜单我一定要站前排!
火前留个名吧
超爱看这个类型的
疯狂夸赞贴主
收藏成功
真心瑞思拜
我真的超爱这个类型的榜单
感谢干货
支持支持
这是什么神仙榜单啊
整理的真的很棒啦
这个榜单有点屌
干货最实用了
比起别的更喜欢干货
在这里面得到了一些启发哈哈哈哈哈
真爱干货
断头推荐这个榜单
刚看到,好可惜
已经收藏好~
偶买噶,这个也太棒了
干货真棒
断头安利这个榜单
看榜单太欢乐了
pick这个榜单
被吸粉了
我要立刻把这个榜单分享给朋友
有人在一起看么?
各种榜单里面有宝藏
看见干货就会收藏
我立马把这个榜单分享给小伙伴
求更新下一个榜单
马一下,等会一起看
看见的第一件事儿就是点赞
看到很多感兴趣的就很幸福了
绝对不能错过
选择题真的好难做
您更新的下一个榜单我也一定追
看到很多感兴趣的就很开心了
成功安利给小伙伴
激情留名
好了,我要追到底了
respect!
偷偷看很多遍
瑞思拜
敬佩敬佩~
看见的第一件事儿就是收藏
真的很了不起呢
真的很佩服哇
太佩服你了
赞赞赞
激情夸赞这个榜单
悄咪咪的看很多遍
不收藏还在等啥呢
这是我不断翻阅的榜单
看各种榜单是我的快乐源泉
我最爱榜单没错了
果断pick
看见你更新我就很喜欢
你更新我立马来看了
我莫得感情,只想点赞
前排留个名吧
我宣布这是我的最爱了
给你点个赞
希望总能看见你更新
我在这个榜单住下了
这个榜单治愈了我
消除疲惫感的一榜单
感谢这个榜单哦
咋这么厉害呢
我吹爆这个榜单
给你点一万个赞
这是我最爱的榜单无疑
这个榜单是我留下的理由
这个榜单太了不起了
这个榜单所有内容我都爱!
很喜欢这个榜单,看了好几遍
吹爆这个榜单
我宣布我住在这个榜单了
先马一下
翻看了好几遍
整理的很全啊
请保持这个速度继续更新
宁整理的是对的
这个有点nice
先马一下一会看
噢哟,有点厉害的
太优秀了
看见这个榜单我决定留在这个app
这个好棒呀!
这个榜单整理的厉害了~
这个榜单好酷哦!!!
果断收藏了
真的很棒了呢
这个真的很酷哦
这个很棒啊
这个有点厉害
这个很不错的样子
就喜欢这种看这种榜单
果断码了
很厉害的样子
这个很需要了
这是什么好登西
wow!so cool
哇哇哇,我要化身尖叫鸡了
前来打call
在线打call哇
默默地占个前排
火速前来围观
火速前来占坑
前排留名
火前留名
前排围观
前排占坑
默默的钱来围观
默默地占个坑
这又是什么好东西哦
这个好像很厉害的样子
这个好像还不错哦
这个好像很不错哦
这个有点点厉害
这个有些厉害哦
这个感觉很棒哎
这个真心不错哦
这个真心喜欢呢
这个真心可以
这个真的可以哎!
不错,支持一下
这个好哎
真棒呀,支持!
特意前来打call
这个必须收藏
这个必须支持
这个我爱了
哇,真的不错
可以哎,相当可以
很棒啊,支持!
为你点赞!
前排火速留名
前排火速占坑
前排火速围观
我又看到了什么好东西哦
真好呀,支持!
很不错哎,支持一下
很OK啊,支持!
很OK啊,必须支持
很棒啊,必须支持!!!!
很不错,必须支持
很好啊,必须支持
哇,相当可以哎
哇,非常OK呢
哇,真的棒
哇,很厉害哎
是我喜欢的没错了
是戳我点的了
是我欣赏的呢
是我的菜了~~~
看着就好棒的样子
看着就不错的样子
看着很不错的样子
怎么办 心动了呢
非常OK哇
是很棒的内容
是很不错的内容
这真的是超级棒的榜单啊
好用心的榜单啊
真心喜欢这个榜单啊
榜单看着很厉害的样子
这个榜单很不错啊
这个必须支持了
这个必须收藏哇
感觉很不错呢
感觉很OK啊
感觉很喜欢呢
哇塞,可以呀!
哇塞,我爱了
相当不错哎
真的很OK呢
前排前来打call
前排火速来打call
前排火速赶来支持
前排火速赶来给排面
前排表白这个榜单
必须夸夸这个榜单了
前排火速赶来夸赞这个榜单
前排占楼夸下这个榜单
前排占坑表白此榜
很不错哎,夸一波这个榜单
这波必须夸一下这个榜单
下一次不要让我们等得太久好吗?
反正你的每一期我都看着呢。
恭喜你成功的吸引了我的注意力
呦呵 最近更的挺勤啊
啊啊啊。。。快把你的库存全部交出来!!!
超喜欢这种风格!!!!好好看!!!!
查收查收查收 嘻嘻
我是么得感情的舔狗机器
啊啊啊啊啊啊啊,终于逮住你啦!!!!
这才多久我就赶不上前排了
妈呀这么靠前!
看了一眼,好懵,我是前排
卑微小粉在线关注
为了贡献一个粉丝数上来关注一下
我怎就觉得我要开始转粉了呢
希望以后我可以成为你粉丝中的一员哦!
哈哈哈哈在线等你
今天热情营业中啊
我也在线给你彩虹屁
我早就关注你了 只是少评论 我要变铁粉
闲来无事就来看看
看到这么优秀的你我好开心!
今天刚关注你
我是一个不经常给你评论点赞的安安静静的粉儿
完了完了沦陷了沦陷了
又营业啦!!!好棒
最近辛苦啦,一直支持你
好吧。。。入坑
这营业频率也太开心了吧
天哪,这个今日营业很奈斯啊
这么多人评论你都回了宝贝考虑一下回我吗
我想使劲夸你!
我来了我来了我带着一天好心情来了
最近的更博速度好喜欢啊!!
每天营业也太棒了!把之前的都补回来
粉丝速度给我冲啊!
看到更新开心的我老泪纵横
一直默默地关注你,现在真的藏不住了!
刚上想看看你就发更新了哈哈哈啊哈
请保持这个速度哦 迷晕我
说实话我是新来的
这种营业速度我真的喜欢!!!!
热评坐稳了送我上去啊
我来啦我来啦!前排让让! 
我又来晚了?哇啊……
为你沦陷了~
考不考虑翻个牌啥的
请保持这个营业速度,不要停
我错过了前排,为什么!
我的愿望是你能越来越好
啊啊啊啊啊第一次这么前不知道说什么好啊啊
最近营业速度真的很优秀了!!!冲鸭!!!
我的愿望是你能每天更新
这营业速度我可以
我现在都抢不到前排惹
不知为什么真的很喜欢你,尽管没有尽头
就酱子更新,不要停
我的喜欢溢于言表
哇 等到你来了
更新速度保持喔
更新频率太棒了
我还能抢到你的热评嘛?
你永远都会被人发现,你是我们珍贵的宝藏啊
老大你的粉丝涨的有点快
继续加油,我们一直支持你
我也想成为你的铁粉
我的妈呀涨粉速度吓人
每日必做:到这来支持你一下
你终于被更多人知道了
听说你最近被圈粉很严重啊
我疯了第一次这么早!!!
你知不知道每天看到你 就超级开心的
哼 看过了 要新鲜的!!
前排送来彩虹屁!
今天更新好勤快呀!!!我爱了
你也一定会闪闪发光呀
我们永远在你身边
你真的火了,但是你好就好
前排混个眼熟。
你的粉丝越来越多了 老粉在背后默默支持你
天哪你居然更新了!!!!
你一定也会越来越好啊!!!冲鸭!!
你火了这么多评论 我再也上不了你的热评了
一个迟来的留言!
保持这个营业速度
老粉表示不努力再也抢不到前排了
我不管!反正我是第一!
啊啊啊啊啊啊,福利来啦,太暖了
会一如既往的支持您
管他呢 先夸再说
我觉得我上不了热评
终于想到发福利了,都要哭了!
继续加油,期待你的好作品
我真的打卡好多天了,我是不是很听话!!!!!
啊啊啊啊啊啊啊先尖叫了再说!!!
他来了他来了他带着欠着一屁股的粉丝福利来了
愿你走过的每一条路都平坦,度过的每一天艳阳满天
我对你很满意,你会越来越好的
看来我需要特别关注你了
啊啊啊啊啊啊,太突然了~我以为是假的
\ No newline at end of file
果断被种草了
有机会去拔个草
一定要找机会去拔草了
有点喜欢诶
这个有点棒的
我很喜欢这个宝贝诶
感觉这个东西有点适合我
被种草的死死地
这个我觉得还挺不错的
有链接吗小姐姐?喜欢
好好好!我买就是啦
我想要拥有这个!
我也要去买来试试
收藏好了,准备去买
我一定要拥有这个
我有点心动啊
好想试试这个
正在使用ing
这个我正在用,超级好用
试一下同款
哇,真是心动,被种草了,码住
希望这个价格我能接受,哈哈哈哈哈哈哈
心动并火速行动!!种草本人
心动了呢,然后决定立刻入手
这个真的很适合我!
我默默地打开了某宝
一定要试试了,想要一个连接
立刻被种草了,看完我要去买了
太心动了,必入
你是什么种草机器码?我决定搞一个
准备去试试。喜欢这个
这个必买了!
搞一个试试
这个宝贝真的好喜欢
OMG!是魔鬼吗,也太适合我了吧
弄个试试还是可以的
太适合我了
如果能适合我就好了
希望这个适合我
OMG!买它!
这个价格我可以入手了!
告知价格让我死心
这个一定要赞一下叭
看见这个我觉得我很幸运了
还想被小姐姐种草更多的物品
害!觉得自己又要花钱了
我的天,我竟然破天荒的被种草了
你种草能力太强了吧
我的钱包又要空了吗
我的金钱没的理所当然
我要护好我的钱包
一边被种草一边看了看我的蚂蚁花呗
只想默默地守护好我的钱包
买了别的,后悔没早看见!这个明明更适合我
在小姐姐的引导下,钱包日渐消瘦
我的蚂蚁花呗要承受不住了
刚买了个相似的,不知道哪个更适合我
这个商品是我的取向狙击没错了。
想想就觉得不错
早看见就好了,刚买了个相似的
贫穷限制了我
非常好的安利贴
宁是对的
撞了同款了
我也用过这个
贫穷使我哭泣
这个我试过
我超爱这个的
等我get了,也要搞个回馈贴
这个看起来还不错的。
默默收藏,有机会试试
这个我一定要拥有了
果断点了收藏,我早晚会get的
一会就去买一个试试
我一定也会get一下的
生活不易,看啥都想要
真的是看啥都想试试
貌似之前也被安利过这个
之前貌似看到过相似的
看什么都想买,疯了
我立马去get同款
咋买咋买
要去哪里买嘞?
求个链接
我该去哪里买呢?
看着就心动
哪里买比较合适
之前在别的地方看过,一直很心动
最近好多人都在写这个
这个真的是干货
看见这个这么适合我没关注错
求个链接噻
幸好关注了不然错过这么适合我的东西
果断搜索同款
我去搜索同款了
最实用的干货
幸好关注了你不然就错过了
收藏好了这个干货
我真的迅速get
立即马住
帖主666,我又被种草了
感谢帖主分享
感谢分享,辛苦
这真的优秀,感谢分享
这真心适合我,感谢分享
这个真不错,感谢分享
完美安利啊
点进来就是吃安利的
这个真的可以哎
这个真的可以哎,感谢分享
这个厉害了,感谢分享
是我喜欢的了
是我喜欢的了,感谢分享
帖主这个魔鬼,我都好喜欢啊
魔鬼帖主,我是躺着被安利
行走的种草机,说的就是帖主这种吧
又被在线种草
明知道会被种草,我还是要点进来
明知道点进来我的钱包就又得被掏空
在线激动!这个我真喜欢
终于看到适合我的了
绝美系列啊,必须get
这个我要分享给我朋友
这个适合我闺蜜哦,拿去安利了
害!又被种草了
终于看到我爱的了!激动啊
这个适合我闺蜜
我要拿去安利给我闺蜜了
OMG!终于看到适合我的了
这个6!我爱了
我这个吃土少女为什么要点进来
天知道我看了多少帖子,才看到这么喜欢的
本吃土少女扪心自问,为什么要点进来
哇哇哇,这个真的好喜欢啊
真是夸爆小姐姐!分享的我都好喜欢
天知道我看了多少帖子,才看到我想找的
点进来的那一刻,我就知道我的钱包又要被掏空了
这个必须get了!炒鸡喜欢啊
超级超级超级喜欢了
太棒了吧!都是神仙单品
我这个被种草机又自动走过来了
超级赞了,迅速get
想拥有!我说的超大声!
这是我最喜欢的系列了
真是有永远都种不完的草
种草了!
又被种草了!
又双叒被种草了!
又双叒叕被种草了!
就知道会被种草
吃土的我为什么要点进来
帖主真是行走的种草机
最近的愿望清单+1
这个月的愿望清单+1
今年的愿望清单+1
就算吃土也要get同款
存钱!买!
同款安排!
同款get
同款必须安排
同款必须get
这个适合我哦
这个总该适合我了吧
这个应该适合我
这个真心适合我
这个好喜欢
这个真心喜欢
这个必须get
这个我好爱
这个真心爱了
这个太戳我了
这个直戳我心
心动了
怎么办,心动了!
在线心动
疯狂心动
整天都在被种草!
真是种不完的草
成功引起了我的注意
成功吸引到我了
成功诱惑到我了
成功引起了我的兴趣
心动
心动+10086
心动ing
心动了,我要去试试
心动了,同款安排!
我都这么穷了,但依然想get
感觉不错,mark一下
真心不错,mark一下
先收藏准没错
回头试试
决定试试
喜欢!下次试试
太喜欢了,一定要试试
超棒的,回头试试啊
心动,我要去试试
可以可以,试试
这个一定要试试了
无敌了,必须试试
哎哟不错哦,很好
我也一直想买,就是觉得好看
我不管,都是我要买的
看到你推荐的这些东西,我都很想要。
很不错的宝贝,谢谢分享
已被种草
来惹来惹,安排一下
便宜又好看有木有
这些好物一定要收藏,然后买买买
我知道你又要掏空我的钱包 但是我心甘情愿
强烈推荐啊!简直不要太棒了这次安利
这些东西真的非常棒。全都是我想要的。
看了好多都挺需要的,是时候填满我的购物车啦啦
感谢整理省的我们自己再去一个一个找好用的东西了
又被种草了!真的要去看看这些哦
发的这些都是真心好用的,赶快安利一下
都是好东西,赶快种草起来吧
我已经推荐给身边各种小伙伴啦 
我都买了好几波了!真的生活里不可或缺了 
又要让我种草了。不过觉得挺开心的。
是真的很不错,我都想要。
好像都没用过在评论里求一波安利
这些推荐的真的是太有用了,而且我感觉都非常的适合我,买起来买起来
可以可以,你推荐的我都很放心
这些好物已经被推荐过好多次了,确实好用
这些东西都好好的啊哈哈准备去淘宝一波啊
看起来很不错的样子,支持一下哦
真的是不错呀,一定会推荐的,真好
这些东西真的都非常好用,表示已经行动了
已经买过一波啦 身边的小伙伴都推荐一遍啦
是的呢,我也是通过室友推荐过来看看的。
那么问题来了去哪可以买到呢
我正打算买买买正好赶上发工资
见过但是没有买,因为不知道好不好用
这个都很实用呀,赶紧收藏总有有用的时候
东西都是良心推荐哦,感觉都不错,
真的是太棒了,很值得我们推荐一下
这么好的东西,钱包又要瘦了,但是我高兴呀
谢谢推荐,我去剁手了
真的是太棒了,很值得推荐一下
东西很不错呢,如果是好东西我都会推荐给我的友友的
很不错,都推荐给朋友一起买!
高贵而迷人的产品,让人心仪。
哈哈,简直是超赞的呢
好看!感觉会很适合我!
今天就可以下单了,开心
哇哦~这个真的是好好看,好喜欢
哇哇哇哇哇哇噻~不知道怎么形容好了
好东西转给大家~~
没后悔 围观你了
还是很支持这样的东西的
每次看到安利,就会忍不住想剁手
一定要买!忍不住了
总是推荐好物  我都忍不住买买买
这个推荐真的是挺不错的了
朋友圈一直都在推荐,这次赶紧安排上
这个推荐太好啦,又可以入手好宝贝啦
你就不能看我发工资天天撺掇我花钱,我都瘦了
这波安利我真的爱了,购物车就绪
每次看见你发的,我都想买,实在是忍不住呀
次能不能把价格打上好让我死心
这么好的分享看来这月工资又保不住了
我太需要了,必须得安排上了。
有好多自己最近正想要呢,必须要买啊。
看了之后就知道自己又要剁手啦。
确实有一种相见恨晚的感觉 可能钱包又不愿意了
正好看看有没有需要的呀?必须要买的啦
来的真的是太及时的了,我要买
棒了简直是,真的是爱了,哈哈
先收藏了呀,万一有用呢?
完蛋了,又被安利了
真的是太棒了呢,哈哈,超级喜欢的呢
特别好的东西,谢谢博主安利
又推荐出好东西了吗,我又忍不住想买买买啦
断头安利各位姐妹,真的良心
感谢博主幸幸苦苦的整理,再也不怕买错东西啦
好想入手,又要吃土的一个月了 
又想骗我买买买,等等快发工资了
一到有推荐,腰包极度缩水
拿起小本本记下来了 
这个真的是厉害了,妥妥的已经收藏
福利贴啊^_^
确实很好,盘它
啊那个叫什么来着找了好久了,终于知道是什么了
我觉得我被安利的真的需要入手了
精致的猪猪女孩确实需要马一下!安排一波
我感觉我的花呗又要超了 
我准备入手,然后试试效果
对于这种好用又实惠的东西完全抵抗不住诱惑
又想骗我买买买,但是这么物美价廉的东西谁不想安利呢
哇博主这么优秀的吗?看来是要一个个买起来没有错了
不用自己去找了,上面的需要的都买了,谢谢
对对就喜欢这个
这个确实是好用的,我应该囤点
我承认我心动了
确认过眼神,是我喜欢的
这都是什么神仙推荐,好合我心意
你这个坏人,我又想剁手了
我发誓,以后要少看这类的东西,太费钱了
辛辛苦苦挣点钱,看个帖子又没了
\ No newline at end of file
真令人羡慕
想要同款
完美啊~
这样挺好的,很时尚。
来来来还等啥,过来盘它!
看完以后,准备入手了
我不知道能在哪里买呀。
第一次关注,看着很舒服
一定要去囤货
上等社会的既视感
喜欢看了!!看得心情好好
很喜欢小姐姐写的帖子
楼主看着还是那么的美丽迷人
我不管,都是我要买的
记得以后也一定要多多的更新,大家都表示很喜欢
已经关注你了,期待你更多精彩内容
想每天都看到你的帖子
看到你推荐的这些东西,我都很想要。
这分享真心也是挺不错的
这个更新频率太棒了
哇,是真的超心动啦
安排一下
整一个?
很干净的颜色搭配哦
清新爆了
简单粗暴,我是被脸吸引进来的
我超级想要这个的
我的钱钱钱钱钱~渐渐的~渐渐的~它就离我远去了
看起来就是很舒服
多少钱?好让我死心
想要拥有~
宝宝我前排
铁粉铁粉来了 
来了来了 感觉莫名开心的一匹
前排啊啊
我的天哪!
冲鸭
天呐太可爱了吧
想想就激动
冲啊!!!
就喜欢单纯不做作的
我要有你一半颜值我就满意了
我又要换头像了
我该说什么呢…好看!!!
有我们陪着你
是我喜欢的人呢
往后余生 全部都是你
我来了宝宝
太好看了啊,忍不住点个小心心
你说啥都行
宝贝你来啦!
好美啊,我要融化掉了
哈哈!我又来了
你是怎么做到这么优秀的
忍不住就夸夸你
真的很可以
你说滴dei
说啥都爱你
哈哈哈哈哈 懂你这种感觉
画面超美 
好好看啊
越来越好看
也太美了吧
我我我第几了
缺助理吗?
好看死了
小姐姐简直就是我偶像
还能这样,好厉害
真好看。随便一穿,随便一拍
色调超级和谐
我是铁粉
就爱你这种不藏着掖着的
你怎么这么好看
好有范儿
前排!!!快捉我
这是谁 这么好看 能不能回我啊
好漂亮呀 这些角度我都好爱。
不知道哪个点戳中我了
哇了个塞?这么酷?
日常来打卡
真的好期待哦
来个偶遇好不好
哈哈哈 来网恋吗?
值得我们期待
就是喜欢你没道理
话不多说,就支持
你怎么这么好看
好看!是最靓的崽了
第一次评论
可把您盼回来了
欢迎回来营业
庆祝小姐姐回来营业
认认真真跟你表白
小姐姐简直是我的小可爱
这个好棒
实不相瞒,我喜欢你
我超喜欢你,很久很久了
给你美坏了吧
好实用的内容
我这么喜欢你,你说怎么办吧
你是我心中最美的一道风景
我什么时候才能偶遇你啊 
呜呜呜,我可以
一直在!
想你啦!
太閃了呀喂 
哇哦,激动的心❤,想偶遇
我每张都想放大看!
第一啊啊啊
我来的时候明明没有人的 你们怎么肥事 能让我抢一次前排吗
时光不老,我们不散
福利时间到,给boss比心
等来啦,我的宝贝 
小可愛你更新啦
你是我的私藏宝藏了!
我的天,吼吼看!
感觉会被翻牌
哎妈呀,太激动了
过于可爱(*╹▽╹*)
媽呀 我愛你!!![愛你][愛你]
喜欢小姐姐的分享
猝不及防!!!
一直在等你更新
北北冲鸭!
你怎么那么棒
小东西,你来啦
我刷到了什么!!!!
哇哇哇,我想死你啦
我恋爱啦 
知道我们有多想你吗
好久不见,甚是想念
第一名手快准狠吗 
看到你啦,开心就好!
今天给你赞
我又来晚了
我的天来晚了
这个sense羡慕哭了
word天,太美了叭!
美得我眼泪PradaPrada的掉
啊啊啊啊啊终于想起我们
 我该怎样才能引起你得注意
还好我没放弃,终于等到你了
好有感觉哦
小姐姐看这里,我是你的小粉丝
哇塞。。等你好久了。。
第一次!让我上前排吧!
终于知道露个面儿了
啊!想Shi你啦
哇塞,总算等到了
哇塞,想被小姐姐关注
我要分享给我的小伙伴
嗯嗯嗯,,喜欢小姐姐
疯狂打call 
一路支持你
你说什么都支持
宝贝你终于发帖了
突如其来的惊喜
这是什么神仙小姐姐!关注了
路过,刚下班的我
喜欢你的每一个分享
一直爱你 用行动表示
哈哈,用行动爱你
啊啊啊啊我有点意外
想被小姐姐回复
❤你很棒!
你是我的万里挑一
被小姐姐的帖子激励了
爱的魔力转圈圈(为了抢热评)
失去了我的前排
再评一次,能看到吗?
评论里真热闹
我好喜欢你咋办呀
第一个回复!
好喜欢小姐姐~
超有感觉,好喜欢呀
这周就要按照你的方法试试看
喜欢小姐姐,求回复
手动点赞,收藏学习啦
么么么么|。・㉨・)っ♡ 喜欢你♪
偷偷承包女神,笔芯
超级棒!!!!
这次我来的早 
好棒,好棒,真的好棒
头一回这么前
元气满满哦
好喜欢你~
趁着还没成大明星赶紧关注一下
啊啊啊啊啊宝贝!你来啦!!!!!!
你一直默默承受着这个年龄不应有的颜值和机智,你不累吗?
最好的推荐,没有之一~
哇哇哇前排
存啦,好内容
拍的好棒
我现在都不敢夸你 因为我怕被赞到明年
是好看的
这个太好用了!夸爆 学起来
收藏到画报啦
翻我牌翻我牌
我天前排啊!!!
大型收图现场
简直太令人发指了,这世界上怎么可以有这么好看的人!
偶遇吗?
火速前来围观
火速前来占坑
前排留名
顶我上去
前排围观
前排占坑
点进来准备种草
就知道会被种草
博主真是行走的种草机
吃土的我为什么要点进来
存钱买
就算吃土也要get同款
同款已在路上
同款安排上
这个必须get
这我好爱
这太棒了
这个绝了
这个适合我
这太适合我了
这真心适合我
这总该适合我了吧
太赞了
赞一个
赞赞赞
今日种草了一位颜值满分小姐姐
超级赞
这个必须点赞
打call
疯狂打call
前来打call
心动
在线心动
心动,我要去试试
mark一下
感觉不错,mark一下
马住啦,有用的内容
真心不错,mark一下
立马去get同款
这个好,学习了
酷爆了!
种草了!
又被种草了!
整天都在被种草!
真是种不完的草
成功引起了我的注意
成功吸引到我了
成功诱惑到我了
get√
加到我的画报里
完美
简直完美!
太完美了!
不错哟
确实不错
绝了
关注关注!
疯狂心动
还能这样,好厉害
真不错
必须码住!
学到了
这下真学到了
这个我可以!
我也可以的
图都好喜欢
品位好棒呀
图都好好看哦
很喜欢你的图
超喜欢你的图
这构图,完美
好看哦
pick
疯狂pick
日常关注!
种草一波
喜欢你的每一个分享
每天都来看看你~
想被小姐姐回复
很有feel
可以的
真心可以哦
爱了
非常OK
喜欢这个分享
可以说很OK了
这个角度美呆
这个真的好哎
很喜欢小姐姐的帖子
这个真的棒
这个真心好棒
又双叒被种草了…
ღ( ´・ᴗ・` )比心
在线比心
比个小心心
这个搭配可以呀
比个大心心
疯狂比心
小姐姐请收下我的小心心
小心心送给小姐姐
哇,赶紧get
喜欢小姐姐
这是在哪儿
求同款链接
哇哦!
哇!同款安排上了
干货贴
哇塞,get
wow!迅速get
mark!mark!mark!
又双叒叕被种草了
哇,立马get
哇塞,立马get
被戳中了
这个直戳我心
这个太戳我心了
wow!喜欢
心脏狙击
简直心脏狙击
果断存了
这真的优秀
优秀啊
wow!心动
太优秀了吧
哇!get
哇!爱了
简直优秀
哇,速度get
哇塞,速度get
这个厉害
这个厉害了
回头试试
下次试试
可以可以,试试
心水啊,可以尝试
喜欢!下次试试
全都是干货呀
干货满满!超赞的
决定试试
这个一定要试试了
简直无敌了
无敌了,必须试试
哇!绝了
太喜欢了,一定要试试
太赞了,必须get
超棒的,回头试试啊
疯狂心动,立马get
心动不如行动,同款安排!
超棒的干货
双击666
感谢分享
帖主就是行走的种草机
帖主666,我又被种草了
感谢帖主分享
哇!心动
哇!喜欢
感谢分享,辛苦
这真的优秀,感谢分享
这真心适合我,感谢分享
这个真不错,感谢分享
完美安利啊
点进来就是吃安利的
这个真的可以哎
这个真的可以哎,感谢分享
这个厉害了,感谢分享
是我喜欢的了
是我喜欢的,感谢分享
这个我爱了!
帖主这个魔鬼,我都好喜欢啊
躺着被安利
这个我可以!感谢分享
这个我爱了,感谢分享!
这个超赞的,谢谢分享
这个6,我爱了!
这真的棒!是适合我的
wow!
行走的种草机,说的就是帖主这种吧
又被在线种草
明知道会被种草,我还是要点进来
明知道点进来我的钱包就又得被掏空
在线激动!这个我真喜欢
终于看到适合我的了
绝美系列啊,必须get
这个我要分享给我朋友
这个适合我闺蜜哦,拿去安利了
又被种草了
终于看到我爱的了!激动啊
这个适合我闺蜜
我要拿去安利给我闺蜜了
OMG!终于看到适合我的了
666666!这个我爱了
我这个吃土少女为什么要点进来
天知道我看了多少帖子,才看到这么喜欢的
本吃土少女扪心自问,为什么要点进来
哇哇哇,这个真的好喜欢啊
真是夸爆小姐姐!分享的我都好喜欢
我都感觉我要爱上帖主了
终于找到这个帖子了
点进来的那一刻,我就知道我的钱包又要被掏空了
这个必须get了!炒鸡喜欢啊
图一好好看
666666
吃下这枚安利
吃下小姐姐的安利
一人血书求全身连接
求分享滤镜
好喜欢你的分享
好好看,背景求分享
仙女!
求同款摄影师,拍的好棒
围观就位
人美,怎么拍都好
人好看怎么拍都好呢
喜欢你的风格
这简直是保姆级的攻略诶
你拍的都好专业啊
博主真有心
不错诶
喜欢图1,2,3
好可爱n(*≧▽≦*)n
圈粉啦
拍的好腻害
好有感觉
记下了记下了
很ok的分享诶
厉害了厉害了
求连接
我也觉得这样的自拍姿势好好看
爱了ლ(°◕‵ƹ′◕ლ)
博主还会继续分享吗
nice
自拍不会找角度,有什么分享的技巧吗
妈呀好看的
围观就位
美~
这个有点喜欢
你接下来还会更新吗
被击中
这么美的小姐姐。我们能交朋友吗
点赞点赞,不错
什么滤镜呀
什么时候拍的呀
心动哦,小姐姐是哪里人呀
技术流23333
被封面图吸引
很喜欢你的分享哦
小姐姐我想要衣服链接
好美啊
小姐姐,是不是男朋友把你拍的这么美
人美景美,超级爱
支持你
先赞再看
火钳刘明
这个很不错诶
谢谢博主的安利啦
非常优秀了诶
请问是用什么设备拍的呢
我想承包小姐姐的盛世美颜
这个小姐姐的美颜盛世我承包了
喜欢最后一张
不错不错,风格很喜欢,很少女
你好会拍
顶一个
好好看的小姐姐啊(我也想这么好看)
啊啊啊啊啊啊,小姐姐我好喜欢你,好喜欢漂亮的小姐姐
美腻的小姐姐呢
跪求滤镜
喜欢这种干净又漂亮的小姐姐
美女!爱辽
太好看了吧!
小姐姐,我可以拿你的照片当头像吗
美女小姐姐
超喜欢你呢
妈耶,这是什么神仙颜值
我要是这么美,出门得横着走
好可爱的小姐姐呢
你好阔爱
好看呐。我也喜欢这个
敲好看
求被关注,求博主回复
博主可真好看!Get
博主,前两张都好好看啊
好看鸭!
小姐姐是少数民族吗
求问第二张图是什么滤镜呀
美翻了
想要连接
可以发个链接吗
先收藏一波
我的少女心
老夫的少女心被激发了
心水
妈耶。可都是我的菜
看起来好棒哦
被小姐姐种下一片草原
小姐姐可真是行走的种草机
又学到了一招!
想有同款
同求
超羡慕
时尚时尚最时尚
不盘不行
买买买!
我有,等等发给你
写的真的好棒呢
小手已经动起来了
想学都学不会
心情好才是真的好
我也是~
小心楼主飘起来了
买买买!
希望博主越来越好
我也关注啦
楼主,写了好几篇有用的内容呢
装进购物车吧
对大家都有帮助
还想看到更多,嘻嘻
心动不如行动
安排上了
必须整一个
我也超爱这个色系
可爱的要命
我也是
实不相瞒,我也是
没事,它还会回来的
我觉得看看就够了
看的有些心塞
你值得拥有
总能看到你
送你上去
WINK~ 
优秀
怎么啦?
一起冲
做梦都想变成这样的人
稳住
冲鸭!
比纯净水还纯
那你现在应该就挺满意的吧哈哈
换成小姐姐么哈哈哈
就这么简单吗
我们都会在的
也是我喜欢的
土掉渣的情话么?
捞你上去
我要给小姐姐大心心哈哈
那你想让小姐姐说啥
不许叫我的小姐姐宝贝
确实好看
我见过你!!
真是花式夸奖
根本不用忍
棒棒的
赞同
爱的不行
自恋的感觉吗
真的挺好看的
我也这么觉得
一直好看好不好
必须的
你自己数数
选我选我选我
词穷了都
还有我还有我!!
真会玩
差的可能是颜值
想学哈哈哈
不,钻石粉
我也喜欢
日常夸赞是么哈哈
就这个feel
送你上去
额滴个亲娘啊,这也太可爱了吧!同问这是哪个小仙女
哪个最喜欢
但是就是很喜欢是不是哈哈
酷!
滴!沙瓦迪卡
我也期待
我说好有用么
这么直接的吗
很喜欢小姐姐的内容
没道理的赞同你
支持支持
我们和小姐姐差的就是颜值
这条街最靓的
期待你的第二次
哈哈哈哈,催更+1
小姐姐要多发发帖子呀
小姐姐的帖子都超棒
认认真真的听着
也是我的小可爱
我也学习了
实不相瞒,我也喜欢
很有品位哦 要继续喜欢我的小姐姐
美坏了应该
喜欢这一类的分享
楼上就是彩虹屁本屁了
啧啧啧,甜度满分
我也想遇到小姐姐
我也可!!!
一直在+1
我不管,我也想了
毕竟是我爱的宝藏女孩
想想就行╭(╯^╰)╮
放进榜单!
来晚了,想插队
下次你一定会前排的
时光老了也不散
比心队形!
你是住在like吗,这都被你等到了!
是谁,跟我一样发现小可爱更新了!
才没有,她是大家的宝藏
看吼吼,plmm太棒了!
翻我!翻我!
淡定淡定,小姐姐的日常操作
同意
唉呀妈呀,贼稀罕
我也好喜欢这一类的诶
终于等到更新啦
同等ing
一起向前冲鸭
棒当然是天生的
什么小东西,人家是小可爱
醒醒,你刷到了,别怀疑自己
冯巩附体?
哪哪都有你,到处跟plmm恋爱!
比一点点还多一点点!
me too
木有感情的第一名
看见你我也很开心
双击666嘛
记好这是第几次迟到了
永远不会晚
前面的姐妹一起哭
我永远喜欢她.jpg
哈哈,我Gucci的时候,眼泪也Prada的掉
她肯定没忘记我萌♪(^∇^*)
首先,你得跟我一样优秀
辛亏没放弃
小姐姐文笔还是蛮好的
小粉丝+1
这不是来了嘛
来来来,给你让开了
小姐姐经常又在发呀
土味肉麻
你好夸张啊
我就看看不说话
咋分享
还能更直白么
你还可以更疯狂
酷!
真的假的
小姐姐经常发帖的呀
惊到了嘛?
小姐姐就是神仙本仙哈哈
下班快乐
me2
OMG 这是在公开告白么
666。。。
意外的点在于…?
小姐姐估计没戏,我勉强回复你一下吧
小桃心好可爱
土味情话?
一起做更好的自己
想我想到黑夜白天?
那我给你当前排
这是在找存在感么
凑热闹
这就有人告白了?
你还能更快么
同喜欢
➕1
试完了等你的帖子
来来来,我来回复你
你可以给小姐姐一个小红心
给小姐姐笔芯
想一同承包哈哈
棒呆
早早早
没有最棒,只有更棒
奖励你一个赞
充实每一天
我就看看小姐姐会不会给你翻牌
你真会说话
咦,小姐姐才不是你的宝贝
你也太会夸人了
小姐姐还有别的帖子也不错
借着你的楼上去哈哈
这个小姐姐之前的文章也超棒
我也很喜欢
你不想被赞到明年吗
肯定啊
等着你的分享
这个可以收藏么
小姐姐一般不回复
这个有难度么?
咋保存?
可以确定了,确实有这么好看的
你想得美
围观+1
占坑+1
借楼留名
挤挤
瓜子汽水小板凳要哇
带我一个
我也是
但是还是点进来了哈哈
是的!
我也在问我自己这个问题
贵么
有钱有钱
这么速度
这个能找到嘛?
拿起我的小本本
喜欢
棒棒哒
哈哈哈哈,真有意思
我觉得也适合我
羡慕,我也想找个适合我的
真好
哈哈哈哈哈哈希望是真的
好帖,保存了
一个够嘛
重要的话说三遍是么哈哈
不要抢我的小姐姐
是哦
必须的必
我也来打call
用给你递个电话么
带上我
为小姐姐心动过很多次了
心动ing
怎么试?教教我
蛮有用的
同码
我也很喜欢呢
同码!
找到告诉我
一起学习,天天向上
cool~
我也是呢
我也是哈哈哈
谁不是呢
我希望有花不完的钱
哈哈哈我也是
同被吸引到了
同被诱惑到了
get+1
怎么加?
完美!
是呢
我同意
好看哦
酷的一匹
超绝
你好,我是小姐姐后援会会长哈哈
我也是!
我也没想到
小姐姐的帖子都挺不错的
蛮实用的
下回我也试试
感谢帖主呢
我也可以!
我不信~
是的都好好看
太逆生长了叭
我也觉得是呢
我也喜欢
看到志同道合的胖友了
可惜学不会~
那当然了
真的是被小姐姐的点戳中了
喜欢的嘞
找到了另一个小迷妹
哈哈,我也是
都很有用呢
不要抢我的小姐姐
带上我
喜欢这种风格
可以+1
喜欢
爱了爱了
说的是呢
这个小姐姐很厉害的
真的好
差的不是角度是颜值
很实用呢
小粉丝路过
➕1
棒呆
真是种不完的草
这个好可爱
笔芯
加上我
相当可以
大心心哈哈哈哈
biubiubiu
加个我的
带上我
get了
赶紧拉起粉丝群哈哈
同问
同求
哈哈哈 你这是一条语音评论么
有链接嘛?
说的是呢
我也是
get了
码住
你不是一个人,小姐姐分享的我也很喜欢呢
学起来
哈哈我也是
我也是
喜欢呢
喜欢
是的了
真的是!
写的好棒
加个“!”
真的秀
还是小姐姐写的好吧
秀出新高度
get.
超爱
赞同
哈哈我也是
码住
腻害
真的厉害
同想试试
我也这么想
真可以试试
米兔
我这次就要试一试
我好喜欢
最喜欢干货了
嗯嗯 我也是
是的是的!
哈哈哈是无敌了
是的是的!
好玩
我也是超喜欢
夸爆
炒鸡棒
心动到不行
安排上了
棒棒棒
666
感谢帖主
是的!我都不知道在这里吃了多少安利了
没能逃过被种草
辛苦帖主
喜欢呢
我也是
谢谢帖主
感谢帖主
也适合我,谢谢帖主
是的哦 感谢楼主
是的喂
哈哈哈一样一样
可以可以相当可以
超棒的帖子
棒棒的帖子
也是我喜欢的
我也喜欢这个帖子哈哈
我也爱了
哈哈哈哈哈我也是
已经打开了某宝
干货
我也超爱
赞赞赞
666
我也终于看到适合我的了
wow!
没错,是她是她就是她
谁不是呢
哈哈哈哈哈哈我也是
吃土少女的自虐哈哈哈哈哈
我也是 好喜欢啊
我也是!终于!
必须的!
感觉蛮有用的
你闺蜜:我不吃。哈哈哈哈哈
呼呼,我已经是第二次被种草了
激动!我也是
我也要安利给我朋友
哈哈哈哈哈好巧,我也在安利
我也是!激动
我也爱!
我也在问自己这个问题
我也是呢!超级喜欢
因为要花钱了哈哈
我也超级喜欢
➕1
我也是我也是
我也是!真不容易
哈哈哈我也是!
我也炒鸡喜欢 get!
也戳中我了
wow
要去安利我的姐妹
立刻去分享给姐妹
同求
这个滤镜真好看
小姐姐太棒了
➕1我也想要
仙女➕1
谁不想要呢
瓜子准备啦
同意
我缺的是摄影师吗。我缺的是颜值
也太棒了吧
我也想收藏!
是啊,我也这种感觉
是的呢,我最喜欢的博主
点赞
我最喜欢第二张
喜欢
我先!
好像专业摄影师拍的诶
姐妹,我也能拍
码住哦
okok
d=====( ̄▽ ̄*)b厉害
我也想要连接
我要多学习
小姐姐是我的!
同期待
hhhh
长得美就行
想换头
前排瓜子准备好了
美就一个字
那我就不一样了。我特别喜欢
搓手期待
biubiubiu
举手报名,我也想和小姐姐当朋友
前排先赞一个
这个滤镜可真好看
天气可真好啊
你是调查户口的吗
2333333
我喜欢图2
我家小姐姐最棒
我也是我也是
想知道怎么拍的
我也觉得,一定是男朋友拍的
这是哪里呀
我是你的粉头
火前留名
赞一个赞一个
同感同感
吃下这枚安利喽
赞一个
感觉是专业的相机诶
我先来承包
哼。我先来的
我喜欢封面!
嘿嘿,是可爱的小姐姐没错了
你也可以的
前排留名
小声bb)我也想
我也是!不许跟我抢
嗷嗷嗷我也觉得
同求
我也喜欢!
你走开,小姐姐是我的
爱辽
我也想用。。。
是我的小姐姐
嗷嗷,你不喜欢我了吗
太可爱喽,我喜欢的颜值类型
我也想横着走!
好甜啊
你怎么也这么可爱啊
点头同意
美丽
哼。我先来的
拍照新姿势!
我最喜欢封面
手残党,学不会
哇塞小姐姐好好看,爱了爱了
求教程
➕11111
求连接呀
直接被种草了
感觉很有用
好可爱
同感
种草了哦
咱俩这么多同款吗
可以看看我家的产品哦
行走的种草机
是的呢
新技能get!
看完会省不少钱
超有用的!
心动惹
哈哈哈哈哈哈护肤博主=大型自主实验田
马一下
什么时候开始用眼霜比较好呀?
有什么好用的精华推荐吗?
有什么适合混油皮的护肤品推荐吗?
课代表在哪里
求出个油皮护肤大法
终于更新了!!
怎么知道自己是哪种类型的皮肤鸭
一键get
功课不怕多,我又学到了好多~~
满满的干货,辛苦啦~
干皮还不停长痘怎么办
油皮防晒,求推荐~
求适合干皮的身体乳,冬夏都可以用的
已经不晓得自己是什么皮了,一会油一会干的感觉
敏感肌真的太南了呜呜…不知道用什么好
羡慕皮肤好的人
真的很实用啊
敏感肌可以用吗?
干货!
感觉最主要的还是吃好睡好!睡眠充足了,饮食健康了,皮肤自然就好了
真心求问,黑眼圈还有得救吗?
种草了
有没有适合学生党的水乳呀?
想试试
喜欢!!!
来啦来啦,又来种草啦
性价比很高了!
我爱了
安排上!
看起来不错
最近正好缺,可以试下
感觉还是要用适合自己肤质的护肤品,才能有效果
夏天油,冬天干,有的救吗?
看起来很好用,种草小大人
比心❤
赞赞赞
哇哇哇!想要
实名心动
学习了
默默加入了购物车
我也有同款诶
良心推荐爱了爱了
真的很用心~
求个适合干皮还容易脸泛红的
又想尝试又怕不适合自己…
\ No newline at end of file
好想瘦呀
这个方法真的有效嘛
我想知道有没有试过的朋友呢
效果到底怎样呀
小姐姐从什么时候开始的呀
目前瘦了多少呀
想知道坚持下来的动力是什么呢
需要配合运动嘛
不想控制饮食怎么瘦呀
不想运动可以瘦嘛
健身饮食控制呢
哇,居然有瘦身教程哎
女人一定要瘦呀
活到老瘦到老啦
小姐姐的方法很有用的呢
感谢小姐姐的分享呢
棒棒的啦
点赞和手动笔芯呀
一个月瘦了十几斤瘦下来真爽
甩肉肉
吃糖果也可以瘦的
从130到95的我
本来就很瘦 可是长得不好看也没什么卵用 高三暑假在家按摩鼻子和脸骨逆袭 去了大学才是真的开了挂
过低碳生活呢
不错的方法,get
尝试一下
练起来
瘦下来的感觉真好
我马上也要变瘦啦
这个最快什么时候能瘦下来呢
这是我见过最有效果的啦
我就是运动就瘦很快的呀
这个方法真的能瘦,我试过
瘦了又胖回来啦
就是容易反弹呢
介绍的很是详细啦
这个有什么重要的注意事项吗
可以坚持看一下啦
佩服小姐姐的毅力啦
马住啦
跟着作者一起瘦起来了
瘦身但是还嫌累怎么办
只要方法对,瘦身很简单
顽固体质好难呀
要控制好饮食哦
瘦了怎么都好看
要瘦下来真的好难呢
少吃就会瘦嘛
瘦下来才是好身材呢
体重从来没到过100斤
\ No newline at end of file
import traceback
import logging
from trans2es.models.topic import TopicTag, TopicImage, CommunityTopicProduct, Topic, CommunityCategoryTagRelation
from django.conf import settings
from trans2es.models.pictorial import PictorialTag
from random import randint
from trans2es.models.user_extra import UserExtra
from django.db import models
def get_edit_tag_id_list(topic_id):
try:
tag_id_list = TopicTag.objects.using(settings.SLAVE_DB_NAME).filter(topic_id=topic_id,
is_collection=1).values_list("tag_id",
flat=True)
return tag_id_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
def topic_has_image(topic_id):
try:
has_image = False
query_list = TopicImage.objects.using(settings.SLAVE_DB_NAME).filter(topic_id=topic_id, is_deleted=False,
is_online=True)
if len(query_list) > 0:
has_image = True
return has_image
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return False
def get_topic_product_info(topic_id):
try:
has_product = False
query_list = CommunityTopicProduct.objects.using(settings.SLAVE_DB_NAME).filter(topic_id=topic_id,
is_deleted=False)
if len(query_list) > 0:
has_product = True
return has_product
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
def get_pictorial_tag_by_id(pictorial_id):
try:
tag_id_list = list(
PictorialTag.objects.filter(pictorial_id=pictorial_id, is_online=True, is_collection=1).values_list(
"tag_id", flat=True))
return tag_id_list
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return []
# def get_topic_data(numtime, numitme2):
# try:
#
# result_topic = Topic.objects.filter(create_time__lte=numitme2, create_time__gte=numtime,
# is_online=True).values("user_id", "id")
#
# if len(result_topic) > 0:
# for i in result_topic:
# shadow_judge = UserExtra.objects.filter(user_id=i[0], is_online=Topic, is_deleted=False).values_list(
# "is_shadow", flat=True)
#
# return shadow_judge
# except:
# logging.error("catch exception,err_msg:%s" % traceback.format_exc())
# return []
def get_category_tag_id(edit_tag_list):
try:
# 拿到搜索的分类
all_category_tag_list = CommunityCategoryTagRelation.objects.filter(tag_id__in=edit_tag_list, is_online=True,
is_deleted=False).values("category_tag_id")
# 随机拿一个
if len(all_category_tag_list) > 0:
index = randint(0, len(all_category_tag_list) - 1)
logging.info("get get_category_tag_id:%s" % all_category_tag_list[index])
return all_category_tag_list[index]
else:
return 0
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
return -1
s_sgMyOm@shadow.com,q6e7mubvavvrxpd99vbegxphb1uwa8o1
s_4HRYFK@shadow.com,7xrdxc2j3ysers1woj9sl4ued4o9mp0b
s_KaRMJO@shadow.com,2ey68fbras579cky919eh0s9im1ygycr
s_07ssdt@shadow.com,dayvhfinn7vep32ubr986q9pg3hwdg2k
s_zMc7wq@shadow.com,vdl009c25x0q7opb83e7ttztysg27wij
s_zA014B@shadow.com,ckr5mwswjrc83hyigdh8zg2irwttlg29
s_akgPyg@shadow.com,ico3dctr7n362vu6mhb3414a7mlfbi46
s_vKSOXA@shadow.com,f54zvb9by32pgckw57phglgi2l1flanb
s_oi7M2a@shadow.com,6e1xjtqi1bymvqdvqizwr33dsp788y0j
s_lOTICx@shadow.com,ukj94xw8kfxyv31iqne8oq688nhn6xfv
s_i4rFhk@shadow.com,haa78thx4mzwg6tw5i8ue4cq4mfq6mtv
s_UPqB4b@shadow.com,wa8bcg98m622vp5pizwldmzombvrbgw7
s_do9K3h@shadow.com,izh9pv7ppowzy7sbpfj7m0wd40cptc9e
s_gXRMNW@shadow.com,l6n36kyvef1uisf419ciqwsy2p9numen
s_GGopQR@shadow.com,1airpxf6mo5gmdnmxh7gjvodb9e081ml
s_be9Zi8@shadow.com,qkyadh7im4h2p019wc7qkbhtvf9u58ei
s_hDAJDM@shadow.com,88s3s9jre3sqm1qmqdctbfbo52q25fvp
s_yQJV10@shadow.com,w540re4anzs2arilzgzsx9ga13oma74u
s_axSkdT@shadow.com,jehghh03lj2kgalw6saqpqarepcqdf39
s_QI5QFa@shadow.com,lzov68m8ewzhwvon34gzajkw5ubf8duh
s_ERIRTt@shadow.com,1saqarawjncshadlfb5mg89lonomoy0c
s_zNA0qR@shadow.com,4jr40f3pq8j6xzfi2lygffk0cmrj520h
s_by2EKN@shadow.com,22esu0b9cduq73sheiv19i1zl4qdvo6c
s_yRDyTe@shadow.com,8i7xtftg46dgr3havreg0yazq6ov5kan
s_mAu8xe@shadow.com,5vp91yszbsxcxja5nad3i5ln2x4zlgmg
s_xNXLax@shadow.com,iywthxarr7utzivt79cernagk6uy3gj8
s_xkRtne@shadow.com,vnwpxxwmnai45vx4b680ewlst4m47id5
s_Dpguo5@shadow.com,lt0ywfrfrivaf64uv91vsxmdhzzkchys
s_i7ILAu@shadow.com,20ynh8x55lvhq69wpsa9176xai5m47n6
s_fjgn6l@shadow.com,r1yvjodxg69n9iy6n3jo0q1byc8hfhh8
s_iwSVzk@shadow.com,0cewo3qve1pymlg1658t6d6fr8ruyi04
s_u39tAC@shadow.com,2cvqq3vnkwylcv3iaz1o3any7v7dbsoo
s_BqeGFr@shadow.com,pnfyi0bp7rwpnwnkcf4g2tltqklfn1ai
s_cA91fz@shadow.com,mcw2oy43409xrpoad77is00qq3d47kca
s_ESqCxg@shadow.com,ybyzurf3jq213e6xa0dyevynu5ugwef6
s_8G7dcC@shadow.com,gzcubkhtx9ls8pdwa5jm2l7msntadc2o
s_yUirRE@shadow.com,3fm5cv37y1ura53klt5a78ekn7bnx8pv
s_upU0XW@shadow.com,e518tby0wcfw9tv05q9h2yv66urbpril
s_e1gsjY@shadow.com,2fk1pcjj5qvd7bfckxntvcve6vkt5ndd
s_ShQyke@shadow.com,jt0mia5s42b3wmbiw9c1lbl1xszdzgdg
s_KzuAYn@shadow.com,uy87gofyorbmpphi5858xxvkvin0x145
s_hsG5eH@shadow.com,zucrgbfb3bzy0lfgd5qa3eq5s2t60rwc
s_cF7TuX@shadow.com,jwp20fa7fvbl25o17l1815tco9d3q9ip
s_bNzT3y@shadow.com,u4jkfqkce797yqlmf9zty84m03dsg2d7
s_E4I3Xs@shadow.com,a3mg07kxritcita9temn165sovth2jm8
s_AdUmQr@shadow.com,b2gfwxf8epm42v18hshqg0ysgbh49sma
s_JU1HJp@shadow.com,5rl71tt2c1k7n1qfu9v9gma1tqxjwpgg
s_e6gQH3@shadow.com,q74zxcymf3y31kwro8sjgmoeddny9zjd
s_YSpR4I@shadow.com,nj0q1v96026rg3qm1lz0gcwo8vgafc52
s_36xYLw@shadow.com,89z4gud4yqzaulklc3oo7v0i8k5aesyd
s_b5CSos@shadow.com,yj4r2mplaih29bzche7svrarjk8f1bdp
s_fGrHfu@shadow.com,kyr4ldujpum2xglqb2zuxemv5khcptv5
s_OTNbWY@shadow.com,aq0enggi72cl08npe6zva2jo3khdkurv
s_BmjUyS@shadow.com,wgfipgk0opq0763gk0dgvfdemcex3nur
s_5gjbBq@shadow.com,hsgeuoqxupaqoq4gpdvhwajbue8iin0j
s_o1KuZK@shadow.com,f5dnbpfv92yzmpoormmwwf0g516geotl
s_4cYuPa@shadow.com,lhanqc8grtesf3j3o5f5mtvh0s7vhv2z
s_ql2jNt@shadow.com,qdk41ahnixsq1vf6aj9l316o64yc0j71
s_TlQiUV@shadow.com,q3mn6rm3icwam02vfanq2u5qd5la52wu
s_oD1Hmy@shadow.com,2c8ruombvfw2g6dlcbdhkq86wk8n0ugk
s_lRgLWN@shadow.com,p2gpnilvggf8c4omtdn7mhso38woj10w
s_hO4VFC@shadow.com,mealdrlv8pmtois96bbpucant4sud62w
s_AviTFG@shadow.com,dr0gabkvktzd9y60ls4hqgtktkw2cqxr
s_2Lago0@shadow.com,t6qvoiy9f02tsfwnlrkcbt84duxbcckd
s_IN3CIK@shadow.com,ghcqfa1g9skknnq5tiobvcf409yrgll3
s_49T1iu@shadow.com,ac52x0yrciyeuyrlj421yssmudat5euo
s_QZ5xev@shadow.com,9mjcrc34jumh14fthz1vba052acolca5
s_f0ULrl@shadow.com,yw8z4md1875apbp6n7wmjo2vfwx7jbzx
s_XubfZG@shadow.com,itqau9wfonqsfne0epgxj1wujf2kwb5z
s_PzH35A@shadow.com,d27k8tyljxopz324c8f3i51b58wjp2n7
s_3chHX1@shadow.com,df9818lmy1iv7sk3b2g0ds2ld3tz9wjq
s_Eh80vb@shadow.com,0jqcyzd2ilgv3uxnn5r25z0e8wsb13zs
s_XAkiWi@shadow.com,08ieghnl0pxs9q8e3usdlvsrax570tfx
s_DsRsnp@shadow.com,erqxz3yppp0ievto77p7gxlu5k1gpfws
s_S4WZXB@shadow.com,88nvgu4nfaips2i2ujbgfsbnfnpott2h
s_QqYjNu@shadow.com,jmcn6msp0p7hzsmnke4x44zmq15xg6w6
s_FMZSoT@shadow.com,wqxus6p59znufy1ebp3aibbh0ohc9gec
s_NTvlOB@shadow.com,xi73z17enxvzhmwl2vwxra7wy1y1li3l
s_ei0zs6@shadow.com,trri4yzhkm4riyaciqqc9qgzrgkxrfk6
s_KnFg4t@shadow.com,wx4n4jsfb0e0x07rmnu0qiyya4kgrswg
s_ropbJR@shadow.com,ugmhxwnzjwm5oq948d7kye8b48ipmar7
s_3Q98tn@shadow.com,fe2rud4k1hpqo0a3q2qss9cp1tr42ef3
s_EbB7e0@shadow.com,o4t7nbfnt0bf1izl2h9hc4261bg8zatn
s_NdwviG@shadow.com,m7plp4u2eifpmmlf03x6y60rm4gpf57e
s_fIH36h@shadow.com,fh684k6a6c9hclx83dc94wkj77yw1v1w
s_HrFCH6@shadow.com,ondb93yt62fnhoqq3z90inj6ptvkndg4
s_3ixLfv@shadow.com,8ld47ldlo290xqyjdzxqflfnn33rsjqc
s_IqssSO@shadow.com,uk3faynqsjqyc14gxglltcah3t794um4
s_V1GHJz@shadow.com,8nhlvej27jtlzd0zpie5bbc208h302im
s_Lt4V85@shadow.com,oul24dusne5fbbncy736pb67uboaecha
s_exsZWG@shadow.com,9pba61a72xhoxecw1tk9k6fbekou379l
s_Py0oZ4@shadow.com,dd9p3w6580zrs3csbxn1axyq43nqkx7y
s_W4RqbA@shadow.com,asn3dftvyuer1yaygqsqdr2ylizpjekq
s_s1L2lv@shadow.com,y4jqrabd1vx5ie7srnamorb15hut7dcb
s_hgZehK@shadow.com,2jwyu925a9o0yz7d36041k8pepjsfw00
s_KU3D7m@shadow.com,wx7zvih9j5lify6kfk6b79itm6n3b3ij
s_JwVmFg@shadow.com,4td9y6t2shht9jkkwsxu4hey5ncn6kfv
s_uZRiM7@shadow.com,glv1oo8j50vn817fpz4lmdw82l00gw4t
s_LNhnOE@shadow.com,s6q9oiw6pwknrsm1ssr5f892i9am4gtw
s_YcQsmL@shadow.com,56q89kuwlr3ad048z2y4f8qu9cjt5gpr
s_AAXg2x@shadow.com,a4ta3l3l1hvcknywp23ruazrobjhve2u
s_1N8qG8@shadow.com,dgivx3ija9tk5jclcijcvjihy173v8aq
s_hpC1IP@shadow.com,71rpxjsdqfrdaanzsrmazofje0ld5m0e
s_VWL5Pk@shadow.com,5g2u01lkodeazrfh2vy7vx3vjt86o7la
s_y8atXB@shadow.com,p5fq5irhg007ds56ujijndb7tvnl6rii
s_igjXaA@shadow.com,hakpaiwehf97zmgx3b0rs7hmwwatz5tg
s_axJVqe@shadow.com,ijwqlf7z9m2aza6c5u2a3y2homvf9zql
s_hhSSqd@shadow.com,waif67iix2v43hm3l4tvg1ikt5uo886m
s_8Evf2c@shadow.com,idgvd0unhqthen7j4oaae5t47r9odzp3
s_EvDs7H@shadow.com,toyecpfr4sjzj89wbdl60hqwyqt0i0z0
s_F6FHfH@shadow.com,6etiw6pbhdy140wmnz5dirwujhvyy7qu
s_RGun9G@shadow.com,fr8pvj8h1eltry7hhrlfwvjdcputxy3r
s_qVKF9Q@shadow.com,wpkfw64sonqdlj7artnt9mio61qao5ny
s_i6ffeZ@shadow.com,p8pohcur8dhg63t90wxcvx5a8jykz563
s_E0txbx@shadow.com,faxgcpedzjts87t3tdvhze6oni1jjast
s_DVwDK3@shadow.com,0wp1yi6svv90t7omympr1d3jnnppv0up
s_gwVryk@shadow.com,nyow8667n7etwtf78f06nthiyrqfpdnk
s_Cm7btb@shadow.com,b1dalla42pnf5vvy7g14myo4lgbf0nsk
s_WP3s7w@shadow.com,793mh9sjjvwzvojfaifhk6j5mmewkbou
s_ZdLDZo@shadow.com,mfp2s2i932wqp4xe64jwszasz66wip14
s_C2PiMv@shadow.com,sucxkpax2yc582vrdwingsnkbhzipd71
s_GPR0C1@shadow.com,khh1tdp47f9jy3yqkhjoevmp1nny9oj4
s_wnrxFo@shadow.com,sw4zhq4s1c2meqhq4x8n11s353ncafai
s_sNyLO4@shadow.com,vxfdgm1f0etg8bj879tu1s9arjn9pvjn
s_08DlWX@shadow.com,4nt550s7nlw0vmqymza33b64drann668
s_rTS8qV@shadow.com,12c5qaiztuswzlo8b3nsnssdfc6zpamv
s_nd6HiH@shadow.com,qm0wl1sfzifkbxdhoko1vadep0820ze1
s_a9cIf1@shadow.com,gy2s3aa73jrax3om2agd409fwdfmvami
s_5I3fN3@shadow.com,eu28nj4t65mcrycewe7yavommn3q891p
s_TWwKNi@shadow.com,gmwv0e4ogg5l9emh797r102onsyf6ep7
s_WrVSmD@shadow.com,brpoxhdk8oopkrt6nq8u0iqgyqcnsphl
s_Twwxb8@shadow.com,xlg1s4jin4re0whqsf9wgqzixxiliq1p
s_DJvlFY@shadow.com,bhgj778pobifc61ura50hxbd6wtth7fl
s_KTcosq@shadow.com,58xe9zb9s90qigvps4bigp7d1xtd5lyx
s_5o4RJL@shadow.com,rt88azpiv0e96kh5k2t2511xr9fwce0p
s_hsj9pq@shadow.com,k7zfdqyx3kaxho37przth5h2bb35csu1
s_7TOm5v@shadow.com,g2qut2bv4nk0ottrfg3jhakcidsd1csa
s_enXoFU@shadow.com,v4lzg1cex6g2q4pm4br7g3nmsht2cg6w
s_F9hH2t@shadow.com,ll6u3eeuazhf4csk1q6y6j9vgxxv52l9
s_fJD9SK@shadow.com,fqq290itpdhj9zbmpo6uswlhescmyklf
s_895iAz@shadow.com,bcli5ah0yq3n55drruyxdew7cgoq1ctg
s_oniMPE@shadow.com,d5yxi2y1qzrb3saclzz1ayfaqen6tm3j
s_y6GymP@shadow.com,ji98p40cuqjj6h82psonfjt7f5vr6fhh
s_UGN1aM@shadow.com,bdmuq1u16k5ajsxobd5lu45mmhqlbvei
s_1RCzUi@shadow.com,iw9f3sph9n9iaxxhgk9fjh4b6w3gf89x
s_GPok7I@shadow.com,hpzfmbn901jf4l31ppm462dx2vtls9sm
s_hX41uJ@shadow.com,8ktb1gf4nu8o3ntit3gaswvg88qwxogi
s_jYDXVu@shadow.com,wmp8o8e046id0vdlo4e4vgy2av8y8q98
s_5M8FPz@shadow.com,n1twbjpip4ueo7vudmsv6cwf044e7qi2
s_0KyxOu@shadow.com,ul9owhtt9gghvi5mn35w8erq5tci67yl
s_wWtI7X@shadow.com,mpreg09cc2b0xt0zy3h0dw4v04tafcaz
s_upwHbj@shadow.com,3fl332d667u8ofgwi8o1cwjdnrhscvbw
s_kWGe6S@shadow.com,meuma11q5ld7q5wus1uz7mldl0hez84o
s_GtpUFG@shadow.com,e4k74bgzka3ixfs5rnax2h3apwg3kdi9
s_yMfJP3@shadow.com,qs4qag6gh9yqb0sgg8pypsn1xuomnioz
s_g3zIuG@shadow.com,y4ih9v3upt54rxbe8718q15cmxd3qgsq
s_bLymnh@shadow.com,z5o49tg7clz9n7sm02d3kqyszdpac3q7
s_xvwj7J@shadow.com,yjdcmxqz45rmzbvr6b4e0s5fk78y2w5t
s_n2rgzI@shadow.com,8dcfx23h5kyobwgbqxluca19fea8ke6f
s_kfGtFE@shadow.com,w17uk8ea2hgxhipn81scx9zo4kramet2
s_lK7QJJ@shadow.com,8dll7je4l7i5195i12ltw0tbfoo0s9ec
s_bOQCNQ@shadow.com,7ynurjv5g2nhjg6jlkpwipgvlz6oapsb
s_1mypap@shadow.com,3o89z53sggju07ao5hnl2pa3zwdr34ay
s_ux8AJL@shadow.com,xj1ume77azacqn5iboz8858mx01fawwv
s_twoHks@shadow.com,djrx2qfnaomgoccfmxa9d09pvq6u1vn1
s_xgZqN4@shadow.com,w0p6zv3e7j24l9k20i1p5js9gl8hz4lc
s_MaChwe@shadow.com,1xpwo5kp4y5ja76qa6ay37829w9le1u4
s_NzQnI9@shadow.com,f1i4gmed1weh0c474rr8aek4kwqbv2i9
s_87PpDu@shadow.com,b6byxvn5ck910b8enrucd590o8i9zx67
s_UTQ2Lb@shadow.com,vd10nl0ox149edndvjfgk5iwp5mj5avs
s_2ZlLCx@shadow.com,pps6epyfxwt721ckh6e79mssq7elg5lz
s_5FAGd4@shadow.com,666c32c5e5ga8cd1ccp62uk60m671fh8
s_TNhLPD@shadow.com,jnpsd6tq5h2ao8q33cfdfew3iyb8cz3z
s_uujHEb@shadow.com,ol86l94d2gk7o4ifwzwscgjqsr1z59qx
s_wBqKNY@shadow.com,247vkegrtkxo5rftqw892m24p82m8jez
s_MrbRuR@shadow.com,r1zo9eq5qqxe7ycvpvhupifo2caob7eg
s_FPb33o@shadow.com,dcznk66kvmkml3bho6aw4hlx0bu581kt
s_3c7tUk@shadow.com,rikum5mxnpfdm4x0cctuhh5vjrrqixjn
s_s7uz0U@shadow.com,78houu5vaz2s9vnw7ghhy4aoroz6naoe
s_v3TI1p@shadow.com,746l01xdb8bl2pm4memxg2maqhcy0oph
s_djjDaF@shadow.com,fysaanrulvuf28d77k8h9rown9toheeg
s_NjoRTW@shadow.com,pdvetscv2huouwkdrd0yz20gh7a0mf69
s_CDooAN@shadow.com,9i4a1f1gjy77fqvmk5l693d8qf2gfasu
s_pkMFSS@shadow.com,kohxlgnes88hva6t1pwz2kh0wy8uj0q1
s_OU8QCt@shadow.com,1rqwvfbvhctbywmkj6bveaqwsiz0vuva
s_rSlV4T@shadow.com,25hkkyyk13yt5xp8qouywtb1j5ird3an
s_MtxuFr@shadow.com,3vz0bq304xbhln2ec687x22p0j70c30n
s_H36e9F@shadow.com,vdnvv5nunv8ewdd43ianjp0qpew4vl75
s_xnXhe5@shadow.com,mnyi850sy4ami1g04pirnat8vx2pxzlv
s_7nWGkq@shadow.com,v5je9bgxch4jyhe0exdh2uo8uw9soa56
s_KtWEu9@shadow.com,hki55vd2g2sfqknlchkpqw2mn9s137em
s_FpzO9Q@shadow.com,uy9qg3uyukgat8zrz5z9xzcy9kx4kfcv
s_zhOQcV@shadow.com,pbohttjeszd4o68efmnef6c30cn4sm6c
s_T8Gcjy@shadow.com,zuhks2a3xfax80u84v7cwjviefb5djzh
s_ZsMr7d@shadow.com,qplahnk9tjb1muod1y09o42aen8ozna2
s_XgWX4z@shadow.com,pcvl37f2e00tdq1yunvki7j1ievgrzoj
s_1ozsiy@shadow.com,75sqqw5ugwcy6dxjcrdk7ftx3y7pws92
s_UPD6bx@shadow.com,2cj1adi70rtsdlhs8o5gxkd7v7xk4oyx
s_zsvmqU@shadow.com,dlyechyzmlxdu6fmzccjnzfelt0gg59w
s_HfHwKX@shadow.com,r114svymrk8z8ajfo220f1i9xme7s4wc
s_FihASr@shadow.com,z6fdy7t3ngw5jie30dbi7z66plpdfffb
s_KRN7h9@shadow.com,6bfmtcwtkwrm6nojznf9934607rl9wfr
s_Jmkcc9@shadow.com,vfpcs5z1jmxwbw4iy2vzrh9qmphst5yi
s_d7TIgK@shadow.com,148qh4l1adcbc6743a2xxosce8ykytsa
s_gtMV3b@shadow.com,o1oh86422l4kywsq6bknyjlcb1afa7uq
s_XVsXeA@shadow.com,zutmuptoh6pnwy3odakacmnjnvyl7fve
s_edvBYb@shadow.com,62llgh0s18pls7uvqiqfglbg70fij89k
s_YiX7OV@shadow.com,slpvx2n3s38vyzd2b73ds7g1ftbcl708
s_EAYAMe@shadow.com,81r0y4zu5e9sickbh5ary3dijlwjmz0d
s_IR0Jmz@shadow.com,if9204katxboe38omdyijes4qmbe7klk
s_KphQ2M@shadow.com,2urx117z1a9mpybi0xsz2gf0fpw9uh0z
s_lPRv0u@shadow.com,31t9f21x31wx67b8x7s2wbfsxr6xjhf1
s_7FUcFf@shadow.com,8dhinuap0vab16mxqpmcujtmy1wqdil9
s_FGgy48@shadow.com,mqczl4p5v2ht5am4pkshral524onpzeb
s_ltsupK@shadow.com,hef1d43epcznxz5p1jn3rw6b0yybdrv4
s_Ee4QC1@shadow.com,ye0pjcwuzp2ot111diw4m9hckeuu74rh
s_gqBA3v@shadow.com,det55wtxiailldhio9fqj2qrfsl7o22a
s_embU8E@shadow.com,3idla6uxyidv3q5uteqryusv46gpskhk
s_0I4C0g@shadow.com,cqa37y10fb9tk6d8lbqgom0p8px9fla8
s_EzpzqV@shadow.com,z3xu4qt7mxsp1t0fwjx1fnlsk6y1eji8
s_98A1Z2@shadow.com,41w3blyzjwng2e0bry148gntq5au7d7o
s_p5kFP1@shadow.com,ouk4sua14zpbwbq3pkkv9vmw7dllu7f2
s_6srtjM@shadow.com,pjt90s8qdqnll2vv96g3w2xau8jfxv82
s_FJyYaA@shadow.com,wmguhltzrwzbckb721qzmqxl36f7t9ps
s_TSJPo5@shadow.com,0upuq37e1ax1adii4l7xott6pdci6f4z
s_8nvaAF@shadow.com,dzuj6oa57ammiviquc38qx54ics6cgzm
s_6rkYkC@shadow.com,odsw2cy06gp1vhr7mnk8nluzigxql4tm
s_rUnoMo@shadow.com,h7xtmgttorkqmcr6r185ctsaehrtzwqo
s_Z8PP5J@shadow.com,64bcbk2oemgirhzmr8iue0oip1av7uo9
s_Q5LUMj@shadow.com,vtnyzuvfpklrhzsj9ujy7ld7yood8q4p
s_v26xAA@shadow.com,f6ua53j7h8aidru12jw9t93nn0zd7ji3
s_HqZEg7@shadow.com,bb3z9ja12yxev4dnd2wbtzh9ntygu3y1
s_8gSWmC@shadow.com,aw3ca0nzaiod6yl1kjar4x114s0s5dws
s_fCQcu1@shadow.com,ud26iadqiv23jwr9pgie5dv7t4i34735
s_91xnox@shadow.com,9l99mnko5pm43r7oufaie2lrh7g6i2pb
s_gvlFut@shadow.com,foidf232uwpwpzd69xzmyj7pxp2g4z9f
s_nyfbCS@shadow.com,cbso0y7etx7nbryvhq11thqyqu4xzh67
s_uA9TOF@shadow.com,ic7xrqpb0d81jfkb222nr89sbo7rvhn6
s_JLlz1L@shadow.com,yij77pjdrziqlsegrkbyxtd08rl28qj6
s_vm9MFh@shadow.com,avrn6qt9s16afhj7zi4nfykyznfhiu2j
s_Lqxoqm@shadow.com,rwqo8phr5mtleh7eqf4xj1lcym1v2i8x
s_uhXH6g@shadow.com,etpw1fxncl700qu9y7bkkvr5f7e3i2zk
s_Wyn2gW@shadow.com,z2ijf5qz8ga99vra8mdfmo9vsowglhwm
s_rtCnmO@shadow.com,0gw5id0iv6sp0twne39gha6ykwsl5jw5
s_RyIPYo@shadow.com,jy4fqisl0huxunlpzizg647ed8cuamno
s_bS8mIT@shadow.com,bc0yujyzwdg54bz8dqu2lgwx45gt1gx7
s_eUL2wx@shadow.com,9zyhsr7bd4rduz4eed17bjpxt8240rr3
s_rggaRa@shadow.com,bwg02suxav71nub5eukpq4u2msh9bml1
s_T93ZyL@shadow.com,gh2b50alox6oqvj94ohbrx89xzqh3ufy
s_qnVpkI@shadow.com,mzusq8y9cc5j6yblbssyfcnaqmsqnn8k
s_hHpKxO@shadow.com,w2zgpvk9o93jxp1e4hn63cs26zhza2li
s_OgaFKI@shadow.com,5wgw3wvwbsubmsyi9snbpplf4o8v9zz4
s_kJWvzA@shadow.com,muxvjqsv2dv2ev1gjs9qah69z6oxz5fz
s_ilPCgr@shadow.com,tsa7fjhvu1k8n1nw0nxvmgxtos89uxt1
s_0bCas9@shadow.com,uw3tvsb6mv9n8endyxcixefqqwfxdffg
s_gJ4l7J@shadow.com,vim8av4whoosc2k457ufmygg9xqe7zgk
s_jBC0ve@shadow.com,u7x6hy9imwwpmtqjaevym7ctqti12vdh
s_8e80N0@shadow.com,p97jkw5jq7fr8tnbkojkv61m3gjaxpls
s_4jPHc1@shadow.com,j5daxz2ccodn9c2flfvp7d8bcczgkrd9
s_2LjC8I@shadow.com,l1zjxqqfch3t7ssc0wv41wlzb5o8uuvs
s_XXGEwN@shadow.com,j9m47bndqyk9q7wdaofild4a3rhn8qxz
s_VbZq9G@shadow.com,42dqn4qtge4iwz19prb8e6oygicjcyvz
s_5rfk9c@shadow.com,llz8lyblip806o9dichfmwx361woegdw
s_kzuGZj@shadow.com,j2gamapr96oc5e0es9c78jusf2vy5mf8
s_yUUJmV@shadow.com,2m3xesb4z3efe3w5i4m7ro76oi3tb85w
s_Kmd5N3@shadow.com,7pu2hnms0qwaz2bcqhz7650jaoz1w1ia
s_wvsmbJ@shadow.com,5vngv8bfgdp417ve4svu3ngtw4jhq1hw
s_ROiMbb@shadow.com,p9tzj2jern0gl0ggrscr2qxmn1hi25b5
s_sAZ7L8@shadow.com,zrmcbamfb0nk0s6kx553klilkc4obxqu
s_UJWzA6@shadow.com,cp7yx5etum0l2weersuf3sstw2mdm3cf
s_mCazhk@shadow.com,va0sea916qribcjaksnw7gldw550rgt3
s_cuQRSf@shadow.com,50bh9jnpe0kt9774g143vd5cs0ps7ump
s_3Us8vf@shadow.com,j4gdzt8wzy9row5o3npvm6eq2qq69fzb
s_pxACJM@shadow.com,g8ubkoqor9elcs1rzco31zct17vj2yd9
s_jRQhe1@shadow.com,qg7m9beu9wv2knr3wmg5onttrprtocz1
s_O8qfPf@shadow.com,7oz4dk68muw997wa2kgvj0usags06lna
s_rQC4Tn@shadow.com,4yxriq8lgmghg8qfi1ejqm2a3orfspeb
s_09lKhD@shadow.com,78yfum8m6k79mlu935sce0je8mr1trmn
s_J3uR9F@shadow.com,35ntc7fwz3eesg1fnuc48k6h0kuwuvf9
s_8vCW0n@shadow.com,ghpuw56jn1tc32l08mpyrcnil8hsoq0r
s_53EyN7@shadow.com,f05d1gfau3cano47otqzy7hc8o9tyigi
s_jlNKoI@shadow.com,t4rnd01lxtk9rxao6aiagy24invset3w
s_prpHwa@shadow.com,1av46vm3ebnpfhlcv4oiluuzlevmefy4
s_NBpqo6@shadow.com,e1l7mahst0x72qbnekxl2cp4enxmj8eq
s_0YjkyC@shadow.com,s2pqk1r29hj9v3hh107fnyvf46gjq0ar
s_oD0SPa@shadow.com,czrdhsl5nubzm3huwv0ubiw6txnhkhnn
s_cRZ820@shadow.com,fck3nygi4e7ajb6juruiw2iukfllgbqi
s_QuSUc9@shadow.com,yqnvuibqgxqxu8ou4vevxglhoc4c17hk
s_1aQRIO@shadow.com,pbkjmshl74eyoyscqrfn9q6qequix8lg
s_8WpiMk@shadow.com,zetzxhidkaise4zc2pv062nz3b2xghug
s_mFbRwb@shadow.com,623ci4p5arf4uax8sgkg2rk9c32xc2ca
s_P0Y9Tk@shadow.com,2gzy7349a4xshjwtyk947st1y950mxdg
s_GakHBe@shadow.com,oql90qsv40u40v7w1l5dunoikhcix4ry
s_XQ3BZh@shadow.com,ub2ykjue42tlfhc2mmiarxa9nfrxvfea
s_gJDa8J@shadow.com,srgbwf6qjhshdmfye3d4drhd7g6s00ya
s_6MhNqA@shadow.com,ygkfsatjj1myhbf3z00dzeia1af70dl3
s_fvdNZW@shadow.com,e0rlhe2sbkv9vnokm8e9prgg31kk2kk3
s_5J2GxW@shadow.com,ar5g9aioxqurl8j6yxby7xahobqxqlzk
s_JluV3t@shadow.com,o6lit7wfkpp3ml4lqc8zzu2qw6w0qd25
s_Iblrkj@shadow.com,p1xu2u8p2msw2z2fwhig5iel4kh1fwe6
s_335ZV4@shadow.com,vxronco8v00mebn0u7pi9l96lpddpmo9
s_CHiVph@shadow.com,l9ne3cxrbn32roqbcrj8eqmljvsb9ogf
s_nYtmvx@shadow.com,0iyo1x9k2ab3ivtqoow6mecdxje4ag04
s_KPbWLr@shadow.com,51l35t3sqhlzr4zf8z4nj7t93440tciz
s_lQKSDY@shadow.com,9ma0rebf4kzr26drn5uokh5vth4dba76
s_Du0nUG@shadow.com,b2ud9r112esnjonpt49iwhp0wwhm5yae
s_DTILiI@shadow.com,27uou8hj5ybeslrquv4s6bp3mubo2g56
s_NzzDIB@shadow.com,a4xfcs8yz9dkbyip0jpvjx29xdkmggqd
s_IJ4WKW@shadow.com,eu7wxcc8ftchhpfwhjqwibo0il0ttrxx
s_ChO3b1@shadow.com,ld3zj62cxuvfhczmnglc1zfy92t2ycjw
s_kNYvxm@shadow.com,628uiy8g3qzo5fqyu8se2y3ymwrnfkns
s_UJDVhN@shadow.com,c5yebunl886nyfjlpqfz1cbf03hekt5d
s_pPYeUT@shadow.com,ui43508y912hz2tbpxhg2u9s23cc5vuy
s_85Kgkq@shadow.com,t1qlqq3anrq5knytt6t2qilktchl22sd
s_uTPfeO@shadow.com,0c739i1m1ybgr9fh3hppgqo7t582k61b
s_nO8pMn@shadow.com,pxbuxk3oo3hmt26auqeifpoxwnnp2xmf
s_Jsgrmu@shadow.com,i8juskdkgv2hw00ao9a4gzck4sffmaul
s_BXqhZU@shadow.com,tuf86hb629xedo500z5rf52gil6152ht
s_V11qVt@shadow.com,ol5qecmca7scoa2djto6h2r9nhh2ddtj
s_dIHw1i@shadow.com,ozz6f42s2dqzgixgzixvnrsdd6fe4uum
s_nh6qPY@shadow.com,a50re90a4gejo9xomedkj9op4lhi0ds3
s_OKKdnO@shadow.com,l7ytmhua4keuemru986o0a5gc0opksxo
s_68WhjX@shadow.com,vdxjd8mmtgpv8mugx42x8hvsdmeiw4bk
s_8FXzfc@shadow.com,b77wyj4v2sifc1vngvavh5sbt2gu54ai
s_FJSZGg@shadow.com,icm69ctqr20wzjp5m4h2lbsgtxu5ozl0
s_EkQYST@shadow.com,lqa7u1iux8ssuaja9c71jglov76n8wkg
s_LbXj8I@shadow.com,efingb8fqf9atxo9kx38w6dm8ggz9y1b
s_PewDvT@shadow.com,flyc0ka2y8r01w9tfjjgsvx6voq72ogo
s_1wfiXX@shadow.com,v5zza8bx22gxk1di9x7mu1edpcg3o1vl
s_tIpF2Z@shadow.com,8grxmrg5bwa7uvfw10trmxkhe6hcmi6z
s_KifGet@shadow.com,pvyuf6gxfi5t440rwltakqjodimx94ik
s_RpvPdS@shadow.com,nmtnwa5b4vwz0drrck7doge2ylcgxr82
s_a30n4A@shadow.com,258963bqu5e05xt5e14w3oemvx9l0dq8
s_KExWNT@shadow.com,lxjmgp6dqprq5uyjtyb54ir81v3f9rda
s_eGi9Rv@shadow.com,9envlkgetbgp7m0llu7ygf2rox5tf32x
s_De6CKT@shadow.com,jfuiuybikmh6y87fwcx8aor06urq5qrg
s_o4Fr6f@shadow.com,7vjo6gtv1hx4hupt0cyp3i16e1apnuzc
s_vDC1eS@shadow.com,ulgzvxg55w0py7z5j33tglyhi47kuthf
s_pl3E7A@shadow.com,i97ne79frqg74ojxcmdewvew8jixzi4b
s_HttzBh@shadow.com,wvxxzro825gi9qckhvpo402k0216vjuz
s_vAhvMd@shadow.com,e9pc5aaxcz48kq2b1ie7yw1vw0josklf
s_w1S8S8@shadow.com,v4tt13cydo401aux2wxsj23l2j5inqj2
s_UcMCt1@shadow.com,6m81boe2efpmv6ujm3xs8d2dhykonotx
s_8ZpViX@shadow.com,ch2wxb0908s1zixjc7gdfmn7t3g4vxx8
s_UGjdzs@shadow.com,ic2lzpuwow6i9yqu9o9hiusmu14upyry
s_17kAoA@shadow.com,clyhyq1ggqdele7lyp7recl7jphpl406
s_741wfB@shadow.com,u0dui0clsa5vsdpef2uefg62nuqk7m4y
s_f5Jj7b@shadow.com,aijgedm7byypbt5l30po13gzsi7jxylk
s_CJiMB7@shadow.com,4ywqvild2xuvd9lb75ttarrd36ilge0o
s_FfHEiT@shadow.com,ngnl78g9ltad7vsaahwdnutrwkwsmm9h
s_VI2Imw@shadow.com,kvnx34jwfheyhyguq8n8nv37797p99kc
s_UdLIDf@shadow.com,agoy835yxe92c517fn91qs8c2v8nidgy
s_k77N6c@shadow.com,aujl6oul2e1eg7tbf3o88zmow9wjykga
s_zKJf4X@shadow.com,42uvmlxy7kmsfh4cgxwwnhd2de4iah37
s_dGjQK4@shadow.com,pf6j9y3f6zifvbybijwkds9neoidfjli
s_15Du4D@shadow.com,j7yoy7yhj26hqn2rqkqd1il35dnlwlst
s_oGmTi8@shadow.com,8tyfsiprlvvdd76qaaj9yzl9vqmww22s
s_65gIdM@shadow.com,ldecbehnj6dqugfo56y915e362p41kt3
s_qOqqzJ@shadow.com,09vkw8q74ra5xnmgzlwzgg3bxyryitu8
s_mUZqQt@shadow.com,3m4kj03pdw3nk9d87dut0ismmipteuv9
s_aI9Ku7@shadow.com,ramndj154d1zuuy3gw3vciva758pyas6
s_PdBQu7@shadow.com,otaasm8ndpaavwq5maezd0mhodi2ktrs
s_UhTVV5@shadow.com,93a4dsfyqyeiqpb6s8cxcinzbqwe31bf
s_bzRObE@shadow.com,a3iy6e6vhpcqkynfmpebct384g4111hx
s_ntnHXm@shadow.com,bhbgvtr94cmn552mcxy1vi04lsboea4v
s_w3ZZJn@shadow.com,xjrbt53witsm80t8zoft0p1hgys8p35s
s_uMjxpT@shadow.com,1cu38obtkejp03iewzrq7dlvxghz6hhz
s_BxSAPH@shadow.com,fpvc8z549fs4wiphfmawhqbqib3r0vt7
s_dsJUNE@shadow.com,5gobn4pzwg04l1xe9z6gpo5nsc5aod53
s_sfLGzM@shadow.com,kx0ocanpdxu1vupj0n8awwg766knmtuv
s_Tao00Z@shadow.com,26ffpkcptx96k0erc4ia62a3451bd0k8
s_o92L1i@shadow.com,nsmn9m7cdd0h6q234s9he090v5g863y9
s_rRrUE5@shadow.com,7ddjoaxjrdglpob6ne3is3z2dz5z5mt8
s_x5DTTo@shadow.com,zgcx0s7os5g91txs2nek0o8s1w2gy2iu
s_55RYhl@shadow.com,c4h61817ztxq0pglet67wbqjva8y3knl
s_2Yteeu@shadow.com,fg2sonl1nt49ezejeiipdbivhawyduj4
s_06fWcj@shadow.com,eatahic9tvks6pwdwha2o2v5loqqkgue
s_BEa7Nm@shadow.com,u6u9s1t0kx6nvqmzgyn0drbq8mjusr5t
s_Zli3Rp@shadow.com,suycarc1mold8xlafqto9t69a0c4f6ay
s_p1PEOV@shadow.com,6jawtkc0bk6huki8j8fy8cwljfhfzgg7
s_ISOl1S@shadow.com,p8z3e4d28q2300uvjf97d3v6o5ovc2bp
s_gFzMYy@shadow.com,msfwxirytqrlba33kko49lseiwcp9zul
s_lRjEm4@shadow.com,vpwfc8eq5j0wvrp48r8ckbid32s76nbm
s_7UkikS@shadow.com,85icrc61kkhohob5fuy4r5feb71nsxf1
s_ZFISK2@shadow.com,5fpvqbdb5wr2dtwtapj94dcp1nirowzf
s_MaqNBa@shadow.com,7crqsrrg3akvcritepmq2q7qc3mfrb1t
s_PoKDa9@shadow.com,yol999to2c6uk6bcd9udjhw8xdu0w6ip
s_hFMaHa@shadow.com,q0f3cvvbbkgeq3afa6caium2n1s5awfn
s_hveGbT@shadow.com,pdb89zxyggez4u7496imcjknxhpr4nf0
s_OXrBq7@shadow.com,j4j66i9ho82ov2li3mkad191tv340zbd
s_IYyGo8@shadow.com,kxilqywyip61v7ttt2to2wkmeoiudgfg
s_BP84Ls@shadow.com,47aid7e2r1ku6wykdb1u67gn6f39o6uc
s_rexvsp@shadow.com,oq1r1k5sqgh5z99nt0kfmt6sa56gkpgh
s_86vJCD@shadow.com,sztpcmknnjfk1lpnxckv1chkxcueqghk
s_J2n2rf@shadow.com,bwlay6bxl7reep9e2kntajtun2gz58ve
s_wrajLj@shadow.com,afjkcetkkgbdgu4xb22rily9kuhjrh8r
s_p7moGs@shadow.com,o2nmq0s7zzblfdtkn4fntsume53zclsx
s_WsbZFz@shadow.com,uwfvnrjk3qqj4jij82i2uv91hb74ch9b
s_0LpqA0@shadow.com,geq356u2ir0san3vdks13gvyqdu4k2ng
s_DXhqau@shadow.com,mvdlv7cajgv4362b7mfqh2g3jt2ce7tt
s_wLJpFr@shadow.com,ssoow3m8cydgz0ziqcsnm136e2xm4gj1
s_elsxmB@shadow.com,u3r4npjtdvlavo3dbejzj02n5aoyld4l
s_PbDamE@shadow.com,3vgq4du1e9ctjnz0236a8hdfgdf3fky0
s_gsAitD@shadow.com,zi0d7n9pczxj0k23cro35aowk31j2m4t
s_iAO3TP@shadow.com,vxwljddp7ub7uubygc7a8oh22nrxp5g6
s_ITbTqk@shadow.com,tucpg0a6psl69882ouizvqufpumhlrce
s_4O4rSo@shadow.com,kg3y9el1bw05diev0xwc21erzmjhs6eh
s_f4PIek@shadow.com,xjqxd9c02rs1cimqykrwvrny1asf4ehi
s_kAafF1@shadow.com,8or646wga1o4gbogcq4lgezxfubul5fc
s_UlbokC@shadow.com,r1hx1ax23e5t0u712rqug1gyosrd5zo9
s_StVyIN@shadow.com,jh1u0b7wn4boeiysprcj3qki0r3hnwxx
s_Qny9jd@shadow.com,5hvatnnrp09calygs74e22aayf1mj20j
s_yLXjZH@shadow.com,3pilzgp7a9qgw7ka5xcjvf8fx64l5b0a
s_A206OU@shadow.com,ojvvjy12pvxw7918qvxdogbjl1rxqtqx
s_PA9kIT@shadow.com,i01j7j3ekypm3ie2gvb0i4udk0mmt1w1
s_ZCN9cu@shadow.com,q1aephw9f37oy78gffjb6isukbfotojh
s_ukKc8S@shadow.com,m1myw3njnq8i2f7okxyce2a57u574bbi
s_MhRJYd@shadow.com,s4zpp9dkkyqkagdslad7dh5wwm15fm01
s_aYvHUZ@shadow.com,hjqx6zdry9egvahi8ujkwlz9ftem9lpz
s_sQTGcP@shadow.com,4r4cb7u8a5mkx20p4tdeqvyit9ighcgm
s_vqyczy@shadow.com,szlfq0hy7h3og00838l3jcyy8m9v3td7
s_lqerll@shadow.com,ml4a0i39cxlx7b7wk1sxn35vry9xcmwo
s_2dIy2D@shadow.com,etqxp9vb5u1a0zr3rr94n9ci7uknj7y1
s_sqPBhm@shadow.com,q2060qemovy1vbvgla58ecjxm6rm8pzl
s_rxeL87@shadow.com,ipnphtoof34r8yai8iwrkl9p53oksobx
s_T7Zllb@shadow.com,3pg2u2wna6p9lga84ojgagkh848068ie
s_A1prFK@shadow.com,cf0qhgrbgprnkuw4e2pb78djcrynjkgq
s_kqy0Ti@shadow.com,gezulayw1su7mb81ac1lek2ib9oxwthr
s_yRxvCl@shadow.com,bl5b3ugo813is18ucq28gnnzlpp5fv8h
s_bAf12Y@shadow.com,4cf0npcv9h3so7zqwcenzpndlcqyv9sh
s_mSDYSZ@shadow.com,y186c2upmc5hhvmt6u6fxl9x2iaoibab
s_XQa53g@shadow.com,lm9372t8fb1puoai4evbdgps3gf4ttb0
s_ifZ8Nd@shadow.com,hgx60tfmqlzxiqoq3vxqgb0krmbcgex4
s_JnVz5P@shadow.com,2aouk5e8o6gylkt192nw6e9kf3oe8spe
s_YQAkOm@shadow.com,r0ura6wuxuds7e1smqqnrp87n2j9z085
s_LrWglN@shadow.com,bfhome8s4j6kpntqgog9t58oh2ucead4
s_20d8Bf@shadow.com,us7kwxsub817amx3kjpnwsqgvz25zoc0
s_FOhoT3@shadow.com,hic6v099gx6o6scgy9kqbmlwafcktqv5
s_87duIN@shadow.com,yqa7myqmtxp1lwby0l3wgby2oxa6sk4f
s_f7Wbt1@shadow.com,16ro7lvya3ubyq5h1nl22eogb3jz555m
s_0rotjy@shadow.com,of5wkuljo6b7ywllajcdb6qcxynkvzht
s_95vHay@shadow.com,a9afco2huzoziofbq3orpe3a623akuvv
s_xjCKlA@shadow.com,fb9xvm0e2tjmjw2706awbf902gxyqi6n
s_roJThz@shadow.com,r9snwsk7j091kupoifc6o0hv7txd1jbo
s_cIoVmE@shadow.com,cjuop7w8oeh33dnq3g2315vuhv0pr9w3
s_F7Kobr@shadow.com,78jhfdctwuc6amxn1gd37st2fufcsqg3
s_Ye2HV6@shadow.com,552sdubujb3jp5axhjtjzs917ijeo48k
s_Zq0esd@shadow.com,ljduclms85ni7rwavkof6cp86wzrn47b
s_qtIoFr@shadow.com,wh4x9hk1oj2cgf7mf4q8dzjmho9tww31
s_9SUFAA@shadow.com,znber8cz0gftf8dxvc1iktj8czm8a1hy
s_wWfMdw@shadow.com,vbfu0yatc321g2pgmv8zuo4nbzxunwp6
s_fSjdY4@shadow.com,xm31xyakr9eh85qemal8wsu8ly1ppq6q
s_6COY94@shadow.com,riwsll7n6nmmdiplj4pto41e6orbhmd2
s_Y4eWHx@shadow.com,v9vgkpeetyvm7yrzk8q8opnp5tb2qu8e
s_oLysj9@shadow.com,50n50084ay9i1042c24kx81jeijumwe8
s_6Sve8x@shadow.com,f8zufbh4lal58idjhpgbu4sfno4oow4t
s_fai50t@shadow.com,oeewhmfgxngctbefs77a0jt00owbf4qy
s_LxFO8U@shadow.com,mp2t67xwaef9ilummduvw17u6rn5umj6
s_rha2wA@shadow.com,1izt9jls41vor4h0a5zbn3m3k3rv90np
s_jb6wqN@shadow.com,rl2btbno0p5xuge59wb2ygswtdyjqkmp
s_16PqdV@shadow.com,py64x4m7pwkgiruq3zt72gngy24w510k
s_I89v8b@shadow.com,2acyarxo4vabmwz399u5x639ecmzrxdv
s_kTP2tk@shadow.com,j24px14x8myx56706fpk1ongmrhiop4a
s_58S40h@shadow.com,tzwqekhj7k1fqdiexwuxpmiv8xs8jxh1
s_8wiQXC@shadow.com,baq7h6qq0o2am6o6uhek5di56nkuz8kx
s_2n6PoJ@shadow.com,sqrmg2j3fea4vhf5s3hpg9715tl8hnas
s_gsvhGu@shadow.com,37yupl618d474gm1cv6td9gejqlr1isj
s_njbBDy@shadow.com,bobtu8g1yic0x2khn070okly7a370zq0
s_xMzp0h@shadow.com,c8ug0ym27php0b5e4d3fkhhw2zu8abai
s_4g7YHw@shadow.com,x88xvzo5n9i2jlecoc465mxz1zypeidl
s_cgbaLZ@shadow.com,cio38l64hnpm2ueajgosce21udshbctm
s_8hDFBn@shadow.com,vcy1vgpbq5tlum2xz703wle5ao5hcqwo
s_aOIRXe@shadow.com,yhl5jeofm352iiqz42m474btte5uwbe4
s_vKHZZk@shadow.com,ntbwtedy4j36129rqsoewp5xh19elsrn
s_Mbo1lq@shadow.com,seazsj6wg54gtjom0aamtif4gn63koex
s_1RRDLt@shadow.com,xrjwf3j1heaxo4mrmkzd7to90s1ccuxr
s_YbKv8b@shadow.com,8c76738njneerb28j2yn6qnl6v2c4dj3
s_oLPWVd@shadow.com,29mo5t8pzfqjf9ik0dnmw7200ahpuu2x
s_1L5VHC@shadow.com,f4m9up82tkrqof56yajvdvuy58heje51
s_dt8GRU@shadow.com,6cn7cvgibfb3azredg0n4ica7gg04p3f
s_q8Plkv@shadow.com,yvmio7dtkgwr76gecp3go1yqu2q9ocnr
s_M4qo0a@shadow.com,huw2u3k02ljmpeuk3c5qgmm8bzda1wrs
s_8TOaIC@shadow.com,mda873zm819ko9nowjlalaijcaonbu8g
s_Axbdwo@shadow.com,jvl1u3w1is1awk25r46neeh6slf840p6
s_DsgJyQ@shadow.com,id3ub3fnl1xnhocydlfp10m7kttznh1e
s_KcE9m4@shadow.com,ekotfo48yh019vjvqxule8m59p6hbdz1
s_cxUoTi@shadow.com,i8g1vgy6nd7t182qxrllr9kg85mppbip
s_yf5F2d@shadow.com,p70e0nn7qxj6pe6ldn3r1mk9vjfb4lby
s_WWBin1@shadow.com,31i8k57l7s5ou4feyrefgvr7eyp4hiwz
s_4hNpGq@shadow.com,k3fuhwk937mu1bs4px5qviwsmeqeo91r
s_Y0DdxY@shadow.com,vjuidbwj25vsemozkrjqlwrmno8o06zd
s_q4AaZH@shadow.com,ijg26wuwgraxis7vpxla80hnacvwumn5
s_GnhyAn@shadow.com,van9frxb3pvsauj24ldpccdfnrwd788z
s_ULfH0b@shadow.com,nbs01y9o2wyysxaw0eicpuzantewhvpv
s_GUt0uv@shadow.com,9quzqoelnxgl73rmq1ouq5u6k2yb2gqx
s_nh7lJ6@shadow.com,hoztrum0qmc3eopqolvca4g0ifrw34o8
s_dDXEx1@shadow.com,5cc6i65sr1aaq9vfeufqw62e4sjvjaog
s_d6tSOl@shadow.com,rnsy384qlrtya2bazwop0ya84cs59ccb
s_ZUKIJB@shadow.com,vg242kiyp2ysrth1ak2k4v8peb1n29b4
s_YQizji@shadow.com,qigcj7226sah63nfxpq914tvh7y4hcgm
s_sgMyOm@shadow.com
s_4HRYFK@shadow.com
s_KaRMJO@shadow.com
s_07ssdt@shadow.com
s_zMc7wq@shadow.com
s_zA014B@shadow.com
s_akgPyg@shadow.com
s_vKSOXA@shadow.com
s_oi7M2a@shadow.com
s_lOTICx@shadow.com
s_i4rFhk@shadow.com
s_UPqB4b@shadow.com
s_do9K3h@shadow.com
s_gXRMNW@shadow.com
s_GGopQR@shadow.com
s_be9Zi8@shadow.com
s_hDAJDM@shadow.com
s_yQJV10@shadow.com
s_axSkdT@shadow.com
s_QI5QFa@shadow.com
s_ERIRTt@shadow.com
s_zNA0qR@shadow.com
s_by2EKN@shadow.com
s_yRDyTe@shadow.com
s_mAu8xe@shadow.com
s_xNXLax@shadow.com
s_xkRtne@shadow.com
s_Dpguo5@shadow.com
s_i7ILAu@shadow.com
s_fjgn6l@shadow.com
s_iwSVzk@shadow.com
s_u39tAC@shadow.com
s_BqeGFr@shadow.com
s_cA91fz@shadow.com
s_ESqCxg@shadow.com
s_8G7dcC@shadow.com
s_yUirRE@shadow.com
s_upU0XW@shadow.com
s_e1gsjY@shadow.com
s_ShQyke@shadow.com
s_KzuAYn@shadow.com
s_hsG5eH@shadow.com
s_cF7TuX@shadow.com
s_bNzT3y@shadow.com
s_E4I3Xs@shadow.com
s_AdUmQr@shadow.com
s_JU1HJp@shadow.com
s_e6gQH3@shadow.com
s_YSpR4I@shadow.com
s_36xYLw@shadow.com
s_b5CSos@shadow.com
s_fGrHfu@shadow.com
s_OTNbWY@shadow.com
s_BmjUyS@shadow.com
s_5gjbBq@shadow.com
s_o1KuZK@shadow.com
s_4cYuPa@shadow.com
s_ql2jNt@shadow.com
s_TlQiUV@shadow.com
s_oD1Hmy@shadow.com
s_lRgLWN@shadow.com
s_hO4VFC@shadow.com
s_AviTFG@shadow.com
s_2Lago0@shadow.com
s_IN3CIK@shadow.com
s_49T1iu@shadow.com
s_QZ5xev@shadow.com
s_f0ULrl@shadow.com
s_XubfZG@shadow.com
s_PzH35A@shadow.com
s_3chHX1@shadow.com
s_Eh80vb@shadow.com
s_XAkiWi@shadow.com
s_DsRsnp@shadow.com
s_S4WZXB@shadow.com
s_QqYjNu@shadow.com
s_FMZSoT@shadow.com
s_NTvlOB@shadow.com
s_ei0zs6@shadow.com
s_KnFg4t@shadow.com
s_ropbJR@shadow.com
s_3Q98tn@shadow.com
s_EbB7e0@shadow.com
s_NdwviG@shadow.com
s_fIH36h@shadow.com
s_HrFCH6@shadow.com
s_3ixLfv@shadow.com
s_IqssSO@shadow.com
s_V1GHJz@shadow.com
s_Lt4V85@shadow.com
s_exsZWG@shadow.com
s_Py0oZ4@shadow.com
s_W4RqbA@shadow.com
s_s1L2lv@shadow.com
s_hgZehK@shadow.com
s_KU3D7m@shadow.com
s_JwVmFg@shadow.com
s_uZRiM7@shadow.com
s_LNhnOE@shadow.com
s_YcQsmL@shadow.com
s_AAXg2x@shadow.com
s_1N8qG8@shadow.com
s_hpC1IP@shadow.com
s_VWL5Pk@shadow.com
s_y8atXB@shadow.com
s_igjXaA@shadow.com
s_axJVqe@shadow.com
s_hhSSqd@shadow.com
s_8Evf2c@shadow.com
s_EvDs7H@shadow.com
s_F6FHfH@shadow.com
s_RGun9G@shadow.com
s_qVKF9Q@shadow.com
s_i6ffeZ@shadow.com
s_E0txbx@shadow.com
s_DVwDK3@shadow.com
s_gwVryk@shadow.com
s_Cm7btb@shadow.com
s_WP3s7w@shadow.com
s_ZdLDZo@shadow.com
s_C2PiMv@shadow.com
s_GPR0C1@shadow.com
s_wnrxFo@shadow.com
s_sNyLO4@shadow.com
s_08DlWX@shadow.com
s_rTS8qV@shadow.com
s_nd6HiH@shadow.com
s_a9cIf1@shadow.com
s_5I3fN3@shadow.com
s_TWwKNi@shadow.com
s_WrVSmD@shadow.com
s_Twwxb8@shadow.com
s_DJvlFY@shadow.com
s_KTcosq@shadow.com
s_5o4RJL@shadow.com
s_hsj9pq@shadow.com
s_7TOm5v@shadow.com
s_enXoFU@shadow.com
s_F9hH2t@shadow.com
s_fJD9SK@shadow.com
s_895iAz@shadow.com
s_oniMPE@shadow.com
s_y6GymP@shadow.com
s_UGN1aM@shadow.com
s_1RCzUi@shadow.com
s_GPok7I@shadow.com
s_hX41uJ@shadow.com
s_jYDXVu@shadow.com
s_5M8FPz@shadow.com
s_0KyxOu@shadow.com
s_wWtI7X@shadow.com
s_upwHbj@shadow.com
s_kWGe6S@shadow.com
s_GtpUFG@shadow.com
s_yMfJP3@shadow.com
s_g3zIuG@shadow.com
s_bLymnh@shadow.com
s_xvwj7J@shadow.com
s_n2rgzI@shadow.com
s_kfGtFE@shadow.com
s_lK7QJJ@shadow.com
s_bOQCNQ@shadow.com
s_1mypap@shadow.com
s_ux8AJL@shadow.com
s_twoHks@shadow.com
s_xgZqN4@shadow.com
s_MaChwe@shadow.com
s_NzQnI9@shadow.com
s_87PpDu@shadow.com
s_UTQ2Lb@shadow.com
s_2ZlLCx@shadow.com
s_5FAGd4@shadow.com
s_TNhLPD@shadow.com
s_uujHEb@shadow.com
s_wBqKNY@shadow.com
s_MrbRuR@shadow.com
s_FPb33o@shadow.com
s_3c7tUk@shadow.com
s_s7uz0U@shadow.com
s_v3TI1p@shadow.com
s_djjDaF@shadow.com
s_NjoRTW@shadow.com
s_CDooAN@shadow.com
s_pkMFSS@shadow.com
s_OU8QCt@shadow.com
s_rSlV4T@shadow.com
s_MtxuFr@shadow.com
s_H36e9F@shadow.com
s_xnXhe5@shadow.com
s_7nWGkq@shadow.com
s_KtWEu9@shadow.com
s_FpzO9Q@shadow.com
s_zhOQcV@shadow.com
s_T8Gcjy@shadow.com
s_ZsMr7d@shadow.com
s_XgWX4z@shadow.com
s_1ozsiy@shadow.com
s_UPD6bx@shadow.com
s_zsvmqU@shadow.com
s_HfHwKX@shadow.com
s_FihASr@shadow.com
s_KRN7h9@shadow.com
s_Jmkcc9@shadow.com
s_d7TIgK@shadow.com
s_gtMV3b@shadow.com
s_XVsXeA@shadow.com
s_edvBYb@shadow.com
s_YiX7OV@shadow.com
s_EAYAMe@shadow.com
s_IR0Jmz@shadow.com
s_KphQ2M@shadow.com
s_lPRv0u@shadow.com
s_7FUcFf@shadow.com
s_FGgy48@shadow.com
s_ltsupK@shadow.com
s_Ee4QC1@shadow.com
s_gqBA3v@shadow.com
s_embU8E@shadow.com
s_0I4C0g@shadow.com
s_EzpzqV@shadow.com
s_98A1Z2@shadow.com
s_p5kFP1@shadow.com
s_6srtjM@shadow.com
s_FJyYaA@shadow.com
s_TSJPo5@shadow.com
s_8nvaAF@shadow.com
s_6rkYkC@shadow.com
s_rUnoMo@shadow.com
s_Z8PP5J@shadow.com
s_Q5LUMj@shadow.com
s_v26xAA@shadow.com
s_HqZEg7@shadow.com
s_8gSWmC@shadow.com
s_fCQcu1@shadow.com
s_91xnox@shadow.com
s_gvlFut@shadow.com
s_nyfbCS@shadow.com
s_uA9TOF@shadow.com
s_JLlz1L@shadow.com
s_vm9MFh@shadow.com
s_Lqxoqm@shadow.com
s_uhXH6g@shadow.com
s_Wyn2gW@shadow.com
s_rtCnmO@shadow.com
s_RyIPYo@shadow.com
s_bS8mIT@shadow.com
s_eUL2wx@shadow.com
s_rggaRa@shadow.com
s_T93ZyL@shadow.com
s_qnVpkI@shadow.com
s_hHpKxO@shadow.com
s_OgaFKI@shadow.com
s_kJWvzA@shadow.com
s_ilPCgr@shadow.com
s_0bCas9@shadow.com
s_gJ4l7J@shadow.com
s_jBC0ve@shadow.com
s_8e80N0@shadow.com
s_4jPHc1@shadow.com
s_2LjC8I@shadow.com
s_XXGEwN@shadow.com
s_VbZq9G@shadow.com
s_5rfk9c@shadow.com
s_kzuGZj@shadow.com
s_yUUJmV@shadow.com
s_Kmd5N3@shadow.com
s_wvsmbJ@shadow.com
s_ROiMbb@shadow.com
s_sAZ7L8@shadow.com
s_UJWzA6@shadow.com
s_mCazhk@shadow.com
s_cuQRSf@shadow.com
s_3Us8vf@shadow.com
s_pxACJM@shadow.com
s_jRQhe1@shadow.com
s_O8qfPf@shadow.com
s_rQC4Tn@shadow.com
s_09lKhD@shadow.com
s_J3uR9F@shadow.com
s_8vCW0n@shadow.com
s_53EyN7@shadow.com
s_jlNKoI@shadow.com
s_prpHwa@shadow.com
s_NBpqo6@shadow.com
s_0YjkyC@shadow.com
s_oD0SPa@shadow.com
s_cRZ820@shadow.com
s_QuSUc9@shadow.com
s_1aQRIO@shadow.com
s_8WpiMk@shadow.com
s_mFbRwb@shadow.com
s_P0Y9Tk@shadow.com
s_GakHBe@shadow.com
s_XQ3BZh@shadow.com
s_gJDa8J@shadow.com
s_6MhNqA@shadow.com
s_fvdNZW@shadow.com
s_5J2GxW@shadow.com
s_JluV3t@shadow.com
s_Iblrkj@shadow.com
s_335ZV4@shadow.com
s_CHiVph@shadow.com
s_nYtmvx@shadow.com
s_KPbWLr@shadow.com
s_lQKSDY@shadow.com
s_Du0nUG@shadow.com
s_DTILiI@shadow.com
s_NzzDIB@shadow.com
s_IJ4WKW@shadow.com
s_ChO3b1@shadow.com
s_kNYvxm@shadow.com
s_UJDVhN@shadow.com
s_pPYeUT@shadow.com
s_85Kgkq@shadow.com
s_uTPfeO@shadow.com
s_nO8pMn@shadow.com
s_Jsgrmu@shadow.com
s_BXqhZU@shadow.com
s_V11qVt@shadow.com
s_dIHw1i@shadow.com
s_nh6qPY@shadow.com
s_OKKdnO@shadow.com
s_68WhjX@shadow.com
s_8FXzfc@shadow.com
s_FJSZGg@shadow.com
s_EkQYST@shadow.com
s_LbXj8I@shadow.com
s_PewDvT@shadow.com
s_1wfiXX@shadow.com
s_tIpF2Z@shadow.com
s_KifGet@shadow.com
s_RpvPdS@shadow.com
s_a30n4A@shadow.com
s_KExWNT@shadow.com
s_eGi9Rv@shadow.com
s_De6CKT@shadow.com
s_o4Fr6f@shadow.com
s_vDC1eS@shadow.com
s_pl3E7A@shadow.com
s_HttzBh@shadow.com
s_vAhvMd@shadow.com
s_w1S8S8@shadow.com
s_UcMCt1@shadow.com
s_8ZpViX@shadow.com
s_UGjdzs@shadow.com
s_17kAoA@shadow.com
s_741wfB@shadow.com
s_f5Jj7b@shadow.com
s_CJiMB7@shadow.com
s_FfHEiT@shadow.com
s_VI2Imw@shadow.com
s_UdLIDf@shadow.com
s_k77N6c@shadow.com
s_zKJf4X@shadow.com
s_dGjQK4@shadow.com
s_15Du4D@shadow.com
s_oGmTi8@shadow.com
s_65gIdM@shadow.com
s_qOqqzJ@shadow.com
s_mUZqQt@shadow.com
s_aI9Ku7@shadow.com
s_PdBQu7@shadow.com
s_UhTVV5@shadow.com
s_bzRObE@shadow.com
s_ntnHXm@shadow.com
s_w3ZZJn@shadow.com
s_uMjxpT@shadow.com
s_BxSAPH@shadow.com
s_dsJUNE@shadow.com
s_sfLGzM@shadow.com
s_Tao00Z@shadow.com
s_o92L1i@shadow.com
s_rRrUE5@shadow.com
s_x5DTTo@shadow.com
s_55RYhl@shadow.com
s_2Yteeu@shadow.com
s_06fWcj@shadow.com
s_BEa7Nm@shadow.com
s_Zli3Rp@shadow.com
s_p1PEOV@shadow.com
s_ISOl1S@shadow.com
s_gFzMYy@shadow.com
s_lRjEm4@shadow.com
s_7UkikS@shadow.com
s_ZFISK2@shadow.com
s_MaqNBa@shadow.com
s_PoKDa9@shadow.com
s_hFMaHa@shadow.com
s_hveGbT@shadow.com
s_OXrBq7@shadow.com
s_IYyGo8@shadow.com
s_BP84Ls@shadow.com
s_rexvsp@shadow.com
s_86vJCD@shadow.com
s_J2n2rf@shadow.com
s_wrajLj@shadow.com
s_p7moGs@shadow.com
s_WsbZFz@shadow.com
s_0LpqA0@shadow.com
s_DXhqau@shadow.com
s_wLJpFr@shadow.com
s_elsxmB@shadow.com
s_PbDamE@shadow.com
s_gsAitD@shadow.com
s_iAO3TP@shadow.com
s_ITbTqk@shadow.com
s_4O4rSo@shadow.com
s_f4PIek@shadow.com
s_kAafF1@shadow.com
s_UlbokC@shadow.com
s_StVyIN@shadow.com
s_Qny9jd@shadow.com
s_yLXjZH@shadow.com
s_A206OU@shadow.com
s_PA9kIT@shadow.com
s_ZCN9cu@shadow.com
s_ukKc8S@shadow.com
s_MhRJYd@shadow.com
s_aYvHUZ@shadow.com
s_sQTGcP@shadow.com
s_vqyczy@shadow.com
s_lqerll@shadow.com
s_2dIy2D@shadow.com
s_sqPBhm@shadow.com
s_rxeL87@shadow.com
s_T7Zllb@shadow.com
s_A1prFK@shadow.com
s_kqy0Ti@shadow.com
s_yRxvCl@shadow.com
s_bAf12Y@shadow.com
s_mSDYSZ@shadow.com
s_XQa53g@shadow.com
s_ifZ8Nd@shadow.com
s_JnVz5P@shadow.com
s_YQAkOm@shadow.com
s_LrWglN@shadow.com
s_20d8Bf@shadow.com
s_FOhoT3@shadow.com
s_87duIN@shadow.com
s_f7Wbt1@shadow.com
s_0rotjy@shadow.com
s_95vHay@shadow.com
s_xjCKlA@shadow.com
s_roJThz@shadow.com
s_cIoVmE@shadow.com
s_F7Kobr@shadow.com
s_Ye2HV6@shadow.com
s_Zq0esd@shadow.com
s_qtIoFr@shadow.com
s_9SUFAA@shadow.com
s_wWfMdw@shadow.com
s_fSjdY4@shadow.com
s_6COY94@shadow.com
s_Y4eWHx@shadow.com
s_oLysj9@shadow.com
s_6Sve8x@shadow.com
s_fai50t@shadow.com
s_LxFO8U@shadow.com
s_rha2wA@shadow.com
s_jb6wqN@shadow.com
s_16PqdV@shadow.com
s_I89v8b@shadow.com
s_kTP2tk@shadow.com
s_58S40h@shadow.com
s_8wiQXC@shadow.com
s_2n6PoJ@shadow.com
s_gsvhGu@shadow.com
s_njbBDy@shadow.com
s_xMzp0h@shadow.com
s_4g7YHw@shadow.com
s_cgbaLZ@shadow.com
s_8hDFBn@shadow.com
s_aOIRXe@shadow.com
s_vKHZZk@shadow.com
s_Mbo1lq@shadow.com
s_1RRDLt@shadow.com
s_YbKv8b@shadow.com
s_oLPWVd@shadow.com
s_1L5VHC@shadow.com
s_dt8GRU@shadow.com
s_q8Plkv@shadow.com
s_M4qo0a@shadow.com
s_8TOaIC@shadow.com
s_Axbdwo@shadow.com
s_DsgJyQ@shadow.com
s_KcE9m4@shadow.com
s_cxUoTi@shadow.com
s_yf5F2d@shadow.com
s_WWBin1@shadow.com
s_4hNpGq@shadow.com
s_Y0DdxY@shadow.com
s_q4AaZH@shadow.com
s_GnhyAn@shadow.com
s_ULfH0b@shadow.com
s_GUt0uv@shadow.com
s_nh7lJ6@shadow.com
s_dDXEx1@shadow.com
s_d6tSOl@shadow.com
s_ZUKIJB@shadow.com
s_YQizji@shadow.com
每一个搭配都好好看呀
 哪有什么上身好看的裤子 只有上身好看的腿
时髦青春我统统都要!
这波穿搭真的很有逼格我太喜欢
有木有肩宽胯窄女孩的穿搭
想要链接!
秋天露腿真的不冷么?
我缺的是搭配法则吗,我缺的是穿这些衣服的身材
女人没有四季 只有衣服好看
穿的这么Fashion 去逛该吗
瘦就是最有效的穿搭法则
有不露腿长裤的穿搭吗
确认过眼神 是我喜欢的风格
请问这些衣服哪里买?有链接吗?
我把这套锁的死死滴了哟
小个子适合可爱风
其实主要就是要突出高腰线啊
短衣配长裤!好看炸!!
超舒服的一套
就想问问157 的大胸小个子怎么穿
这个风格 简单好看
可爱在性感面前不值一提 
我不管.穿出自己的感觉.我也是超膜~
显瘦满分的穿搭 
好看的衣服就应该配一个好看的包包,不怕买不起那种
身材不好的话一件hold不住
喜欢这套穿搭
我再说一遍!我没有这个身材!!!!!
可盐可甜了.童话公主的配色了
这个是我的style了
?真心发问,1米8的姐妹都在哪里买的裤子
真好看~今天我的穿搭是阔腿裤嘻嘻
不是穿搭好看,真是人好看
来来来.我的腿长一米七的裤子,该上场了!!
55555今年真的爱这条!!!
挺可爱挺精神的 很有朝气
有点复古的感觉哦
想穿上漂亮的仙女裙去谈一场甜甜的恋爱
大长腿无敌了
超爱这样的搭配
衣服那么多,实名羡慕
这个女人穿什么都好看
好想知道楼主体重哇 感觉身材超棒
找到最适合自己的穿衣风格,才是日常穿搭的第一步啊
 正好要开学了,赶紧买几件备着,到时候好穿
这种五颜六色的我真的驾驭不住
白颜色真的怎么搭配都好看
适合我这种不会穿衣服的
看别人穿在身上是挺好看的,自己买回来穿在身上又是另一回事了
哈哈,不知道高中会不会允许我这样穿
特别喜欢这种穿搭 希望多出一点
\ No newline at end of file
import pymysql
import traceback
import logging
from vest.request.auto_request import login, time_convs, reply, get_comment, click
from vest.request.auto_request import host, user, db, passwd
from libs.error import logging_exception
def get_topic_id(numtime1, numtime2):
try:
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
cursor.execute(
"SELECT id,user_id FROM topic WHERE is_online=1 and create_time > '%s' and create_time < '%s' and id not in "
"(select topic_id from reply where create_time > '%s' and create_time < '%s')" % (
numtime1, numtime2, numtime1, numtime2)
)
data = cursor.fetchall()
topic_id = list(data)
logging.info("Database version : %s " % topic_id)
return topic_id
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
return []
def fix_no_comment_click():
try:
numtime1, numtime2 = time_convs(8, 0)
user_id = get_topic_id(numtime1, numtime2)
for i in user_id:
cook = login()
if cook is not None:
click(cook, i[0])
comment = get_comment()
reply(cook, i[0], comment)
except:
logging_exception()
logging.error("catch exception,main :%s" % traceback.format_exc())
import pymysql
import traceback
import logging
from vest.request.auto_request import login, time_convs, follow
from vest.request.auto_request import host, user, db, passwd
from libs.error import logging_exception
def get_data(numtime1, numtime2):
try:
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
cursor.execute(
"SELECT distinct(user_id) FROM topic WHERE is_online=1 and create_time > '%s' and create_time < '%s'" % (
numtime1, numtime2))
data = cursor.fetchall()
topic_id = list(data)
topic_id_list = []
if topic_id:
for i in topic_id:
cursor.execute(
"select user_id,is_shadow from user_extra where user_id =" + str(
i[0]) + " and is_online =1 and is_deleted =0")
data = cursor.fetchall()
user_id = list(data)
if (user_id and user_id[0][1] == 0) or len(user_id) == 0:
topic_id_list.append(i)
logging.info("Database version : %s " % topic_id_list)
pc.close()
return topic_id_list
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
return []
def auto_follow():
try:
numtime1, numtime2 = time_convs(8, 0)
user_id = get_data(numtime1, numtime2)
for j in user_id and user_id:
id = int(j[0])
cookies = login()
if cookies is not None:
follow(cookies, id)
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
import pymysql
import random
import traceback
import logging
from vest.request.auto_request import login, time_convs, follow
from vest.request.auto_request import host, user, db, passwd
from libs.error import logging_exception
def get_commnet_id(numtime):
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
cursor.execute(
"SELECT user_id FROM reply WHERE create_time > '%s' " % numtime)
data_comment = cursor.fetchall()
cursor.execute(
"SELECT t.user_id from topic t left join reply r on r.topic_id = t.id WHERE r.create_time > '%s'" % numtime)
data_reply_topic = cursor.fetchall()
cursor.execute(
"SELECT r.user_id from reply r left join reply p on r.replied_id = p.replied_id WHERE p.create_time > '%s'" % numtime)
data_reply_reply = cursor.fetchall()
cursor.execute(
"SELECT user_id FROM group_user_role WHERE create_time > '%s' " % numtime)
data_group_follow = cursor.fetchall()
cursor.execute(
"select user_id from account_user WHERE create_time > '%s'" % numtime)
data_new_user = cursor.fetchall()
data_id = list(data_comment)
data1_id = list(data_reply_topic)
data2_id = list(data_group_follow)
data3_id = list(data_new_user)
data_reply = list(data_reply_reply)
all_data = []
all_data.extend(data_id)
all_data.extend(data2_id)
all_data.extend(data3_id)
all_data.extend(data_reply)
all_data.extend(data1_id)
topic_id_list = []
for i in all_data and all_data:
cursor.execute(
"select user_id from user_extra where user_id =" + str(i[0]) + " and is_shadow =0 ")
data = cursor.fetchall()
user_id = list(data)
if (user_id and user_id[0][1] == 0) or len(user_id) == 0:
topic_id_list.append(i)
logging.info("Database version : %s " % topic_id_list)
pc.close()
return topic_id_list
def auto_follow_new():
try:
numtime1, numtime2 = time_convs(1, 2)
user_id = get_commnet_id(numtime1)
try:
for j in user_id and user_id:
id = int(j[0])
follow_num = random.randint(0, 1)
for i in range(follow_num):
cookies = login()
if cookies is not None:
follow(cookies, id)
except:
pass
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
import pymysql
import random
import traceback
import logging
from vest.request.auto_request import logins, pictorial_reply, get_majia_reply, get_pictorial_comment
from vest.request.auto_request import host, user, db, passwd
"""
离线一次性插入{1,4}条评论
"""
def get_data():
try:
print("....")
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
cursor.execute("select count(*),pictorial_id from reply group by pictorial_id")
data = cursor.fetchall()
topic_id = list(data)
data_all = []
for i in topic_id:
if i[0] <= 4:
data_all.append(i)
logging.info("Database version : %s " % topic_id)
return data_all
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
return []
def no_reply_principal():
try:
logging.info("comment offline pictorial")
user_id = get_data()
for i in user_id:
follow_num = random.randint(1, 5)
for num in range(follow_num):
majia_user_id = get_majia_reply(i[0])
cook = logins(majia_user_id)
comment = get_pictorial_comment()
if cook is not None:
logging.info("get cook:%s" % cook)
logging.info("get i[1]:%s" % i[1])
logging.info("get comment:%s" % comment)
pictorial_reply(cook, i[1], comment)
else:
logging.info("no get cookies-------------")
except:
logging.error("catch exception,main:%s" % traceback.format_exc())
import pymysql
import random
import traceback
import logging
from vest.request.auto_request import logins, pictorial_reply, get_majia_reply, get_pictorial_comment
from vest.request.auto_request import host, user, db, passwd
"""
离线一次性插入{1,4}条评论
"""
def get_data():
try:
print("....")
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
cursor.execute(
"SELECT creator_id,id FROM community_pictorial where is_online =True ")
data = cursor.fetchall()
topic_id = list(data)
logging.info("Database version : %s " % topic_id)
return topic_id
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
return []
def principal_offline_comment1():
try:
logging.info("comment offline pictorial")
user_id = get_data()
dicts = {}
for i in user_id:
follow_num = random.randint(5, 20)
for num in range(follow_num):
majia_user_id = get_majia_reply(i[0])
cook = logins(majia_user_id)
comment = get_pictorial_comment()
if cook is not None:
logging.info("get cook:%s" % cook)
logging.info("get i[1]:%s" % i[1])
logging.info("get comment:%s" % comment)
pictorial_reply(cook, i[1], comment)
else:
logging.info("no get cookies-------------")
except:
logging.error("catch exception,main:%s" % traceback.format_exc())
import pymysql
import random
import traceback
import logging
from vest.request.auto_request import logins, pictorial_reply, get_majia, get_pictorial_comment, time_conv_minute
from vest.request.auto_request import host, user, db, passwd
from vest.data.topic_models import get_pictorial_tag_by_id
"""
榜单新增内容(仅限在线的)
每半小时一次
过去半小时之内,if
新增在线帖子(既包含马甲用户、又包含普通用户)达到1个时,插入{1,2}条评论
新增在线帖子(既包含马甲用户、又包含普通用户)达到2-5个时,插入{2,3}条评论
新增在线帖子(既包含马甲用户、又包含普通用户)达到>5张时,插入{3,5}条评论
(注意评论作者需要是马甲用户)
过去半小时之内,if
本榜单有新增投票(既包含马甲用户、又包含普通用户),插入{1,2}条评论
评论取自单独的榜单评论列表,并自动随机关联马甲
注意需要禁止本帖作者评论
"""
def get_data(numtime1, numtime2):
try:
print("....")
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
cursor.execute(
"SELECT pictorial_id ,count(*) as count FROM community_pictorial_topic WHERE is_online=1 and (create_time >= '%s' and create_time < '%s') group by pictorial_id " % (
numtime1, numtime2))
data = cursor.fetchall()
cursor.execute(
"SELECT pictorial_id FROM topic_vote_cnt WHERE is_deleted = 0 and (update_time >= '%s' and update_time < '%s') group by pictorial_id" % (
numtime1, numtime2))
data_vote = cursor.fetchall()
pictorial_data = list(data)
pictorial_vote = list(data_vote)
logging.info("Database version : %s " % pictorial_data)
logging.info("Database version : %s " % pictorial_vote)
return pictorial_data, pictorial_vote
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
return []
def principal_online_comment1():
try:
logging.info("comment offline pictorial")
numtime1, numtime2, minute = time_conv_minute(30, 0)
user_id, data_vote = get_data(numtime1, numtime2)
random_num = 0
if user_id:
for i in user_id:
if i[1] == 1:
random_num = random.randint(1, 2)
if i[1] >= 2 and i[1] <= 5:
random_num = random.randint(2, 3)
if i[1] > 5:
random_num = random.randint(3, 5)
for num in range(random_num):
majia_user_id = get_majia()
cook = logins(majia_user_id)
comment = ""
# 判断是不是编辑标签
edit_judge = get_pictorial_tag_by_id(i[0])
if edit_judge:
rand_tag_id = random.randint(0, len(edit_judge) - 1)
comment = get_pictorial_comment()
else:
comment = get_pictorial_comment()
if cook is not None:
pictorial_reply(cook, i[0], comment)
if data_vote:
for i in data_vote:
random_num = random.randint(1, 2)
for num in range(random_num):
majia_user_id = get_majia()
cook = logins(majia_user_id)
edit_judge = get_pictorial_tag_by_id(i[0])
if edit_judge:
rand_tag_id = random.randint(0, len(edit_judge) - 1)
comment = get_pictorial_comment()
else:
comment = get_pictorial_comment()
if cook is not None:
pictorial_reply(cook, i[0], comment)
else:
logging.error("catch exception,main:%s" % traceback.format_exc())
except:
logging.error("catch exception,main:%s" % traceback.format_exc())
import time
import pymysql
import random
import traceback
import logging
from vest.request.auto_request import login, click, time_convs
from vest.request.auto_request import host, user, db, passwd
def get_data(numtime, numtime2):
try:
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
cursor.execute(
"SELECT distinct(user_id),id FROM topic WHERE is_online=1 and (create_time >= '%s' and create_time <= '%s')" % (
numtime, numtime2))
data = cursor.fetchall()
topic_id = list(data)
topic_id_list = []
for i in topic_id and topic_id:
cursor.execute(
"select user_id,is_shadow from user_extra where user_id =" + str(
i[0]) + " and is_online =1 and is_deleted =0")
data = cursor.fetchall()
user_id = list(data)
if (user_id and user_id[0][1] == 0) or len(user_id) == 0:
topic_id_list.append(i)
logging.info("Database version : %s " % topic_id_list)
pc.close()
return topic_id_list
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
def one_seven_topic_comment():
try:
numtime, numtime2 = time_convs(7, 1)
user_id = get_data(numtime, numtime2)
logging.info("get user_id:%s" % user_id)
for i in user_id:
cook = login()
if cook is not None:
click(cook, i[1])
except:
logging.error("catch exception,main:%s" % traceback.format_exc())
import pymysql
import traceback
import logging
from vest.request.auto_request import login, time_conv, get_comment, reply, get_product_comment, get_face_comment, \
get_category_tag_id, get_category_reply_commtent,judge_topic_info_get_comment
from vest.request.auto_request import host, user, db, passwd
from libs.error import logging_exception
from vest.data.topic_models import get_edit_tag_id_list, get_pictorial_tag_by_id, get_topic_product_info, \
topic_has_image
import random
def get_data(numtime, numtime2):
try:
print("....")
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
cursor.execute(
"SELECT user_id,id FROM topic WHERE is_online=1 and (create_time > '%s' and create_time < '%s' )" % (
numtime, numtime2))
data = cursor.fetchall()
topic_id = list(data)
topic_id_list = []
try:
for i in topic_id and topic_id:
cursor.execute(
"select user_id,is_shadow from user_extra where user_id =" + str(
i[0]) + " and is_online =1 and is_deleted =0")
data = cursor.fetchall()
user_id = list(data)
if (user_id and user_id[0][1] == 0) or len(user_id) == 0:
topic_id_list.append(i)
logging.info("Database version : %s " % topic_id_list)
pc.close()
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
return topic_id_list
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
return []
def true_comment_one():
try:
logging.info("comment one")
numtime1, numtime2 = time_conv(5, 0)
user_id = get_data(numtime1, numtime2)
if user_id:
for i in user_id:
cook = login()
comment = judge_topic_info_get_comment(i[1])
if cook is not None:
reply(cook, i[1], comment)
else:
logging.error("catch exception,main:%s" % traceback.format_exc())
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
#
#
# def true_comment_one():
# try:
# logging.info("comment one")
# numtime1, numtime2 = time_conv(5, 0)
# user_id = get_data(numtime1, numtime2)
# if user_id:
# for i in user_id:
#
#
# cookie = login()
# if cookie is not None:
# reply(cookie, i[1], comment)
# else:
# logging.error("catch exception,main:%s" % traceback.format_exc())
# except:
# logging_exception()
# logging.error("catch exception,main:%s" % traceback.format_exc())
import time
import pymysql
import random
import traceback
import logging
from vest.request.auto_request import login, time_conv, get_comment, reply
from vest.request.auto_request import host, user, db, passwd
from libs.error import logging_exception
def get_data(numtime, numtime2):
try:
print("....")
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
cursor.execute(
"SELECT user_id,id FROM topic WHERE is_online=1 and (create_time > '%s' and create_time < '%s' )" % (
numtime, numtime2))
data = cursor.fetchall()
topic_id = list(data)
topic_id_list = []
try:
for i in topic_id and topic_id:
cursor.execute(
"select user_id,is_shadow from user_extra where user_id =" + str(
i[0]) + " and is_online =1 and is_deleted =0")
data = cursor.fetchall()
user_id = list(data)
if (user_id and user_id[0][1] == 0) or len(user_id) == 0:
topic_id_list.append(i)
logging.info("Database version : %s " % topic_id_list)
pc.close()
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
return topic_id_list
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
return []
def true_comment_three():
try:
logging.info(" answer_reply_three")
numtime1, numtime2 = time_conv(45, 35)
user_id = get_data(numtime1, numtime2)
if user_id:
for i in user_id:
rand_num = random.randint(0, 1)
if rand_num == 1:
cook = login()
logging.info("get cook;%s" % cook)
comment = get_comment()
if cook is not None:
reply(cook, i[1], comment)
else:
logging.error("catch exception,main:%s" % traceback.format_exc())
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
import pymysql
import random
import traceback
import logging
from vest.request.auto_request import login, time_conv, get_comment, reply
from vest.request.auto_request import host, user, db, passwd
from libs.error import logging_exception
def get_data(numtime, numtime2):
try:
print("....")
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
cursor.execute(
"SELECT user_id,id FROM topic WHERE is_online=1 and (create_time > '%s' and create_time < '%s' )" % (
numtime, numtime2))
data = cursor.fetchall()
topic_id = list(data)
topic_id_list = []
try:
for i in topic_id and topic_id:
cursor.execute(
"select user_id,is_shadow from user_extra where user_id =" + str(
i[0]) + " and is_online =1 and is_deleted =0")
data = cursor.fetchall()
user_id = list(data)
if (user_id and user_id[0][1] == 0) or len(user_id) == 0:
topic_id_list.append(i)
logging.info("Database version : %s " % topic_id_list)
pc.close()
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
return topic_id_list
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
return []
def true_comment_two():
try:
logging.info("answer_reply_two")
numtime1, numtime2 = time_conv(27, 17)
user_id = get_data(numtime1, numtime2)
dicts = {}
if user_id:
for i in user_id:
rand_num = random.randint(0, 1)
if rand_num == 1:
cook = login()
comment = get_comment()
if cook is not None:
reply(cook, i[1], comment)
else:
logging.error("catch exception,main:%s" % traceback.format_exc())
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
import pymysql
import traceback
import logging
import json
from vest.request.auto_request import time_conv_minute, get_answer_data, reply_answer, set_reply_to_redis, \
reply2, logins, \
get_majia, get_majia_reply
from vest.request.auto_request import host, user, db, passwd
from libs.error import logging_exception
def get_data(numtime, numtime2):
try:
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
cursor.execute(
"SELECT user_id,id FROM topic WHERE is_online=1 and (create_time > '%s' and create_time < '%s' )" % (
numtime, numtime2))
data = cursor.fetchall()
topic_id = list(data)
topic_id_list = []
try:
for i in topic_id and topic_id:
cursor.execute(
"select user_id,is_shadow from user_extra where user_id =" + str(
i[0]) + " and is_online =1 and is_deleted =0")
data = cursor.fetchall()
user_id = list(data)
if (user_id and user_id[0][1] == 0) or len(user_id) == 0:
topic_id_list.append(i)
logging.info("Database version : %s " % topic_id_list)
pc.close()
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
return topic_id_list
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
return []
def answer_reply1():
try:
logging.info("answer_reply_one")
numtime1, numtime2, minute = time_conv_minute(5, 0)
user_id = get_data(numtime1, numtime2)
redis_key1 = "cybertron:set_reply_id:one"
reply_data_comment = []
if user_id:
for i in user_id:
majia_user_id = get_majia()
logging.info("get majia_user-id:%s" % majia_user_id)
cook = logins(majia_user_id)
comment1, comment2 = get_answer_data()
logging.info("get comment1:%s" % comment1)
logging.info("get comment2:%s" % comment2)
if cook is not None:
response = reply_answer(cook, i[1], comment1)
if len(response) > 1:
data_dict = json.loads(response)
reply_id = data_dict["data"]["id"]
reply_user_id = data_dict["data"]["user"]["id"]
topic_id = i[1]
reply_data_comment.append(
{"id": reply_id, "reply_user_id": reply_user_id, "answer": comment2, "topic_id": topic_id,
"majia_user_id": majia_user_id})
redis_client = set_reply_to_redis()
redis_client.hset(redis_key1, str(minute), json.dumps(reply_data_comment))
logging.info("get eeeeeeee:%s" % str(minute - 5))
have_reply1 = redis_client.hget(redis_key1, str(minute - 5))
logging.info("get have_reply1:%s" % have_reply1)
if have_reply1:
result = json.loads(str(have_reply1, encoding="utf-8"))
redis_client.hdel(redis_key1, str(minute - 5))
if result:
for item in result:
majia_user_id = get_majia_reply(item["majia_user_id"])
cook = logins(majia_user_id)
reply2(cook, item["topic_id"], item["answer"], item["id"])
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
import pymysql
import traceback
import logging
import json
from vest.request.auto_request import logins, time_conv, get_answer_data, reply_answer, get_majia, \
set_reply_to_redis
from vest.request.auto_request import host, user, db, passwd
from libs.error import logging_exception
def get_data(numtime, numtime2):
try:
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
cursor.execute(
"SELECT user_id,id FROM topic WHERE is_online=1 and (create_time > '%s' and create_time < '%s' )" % (
numtime, numtime2))
data = cursor.fetchall()
topic_id = list(data)
topic_id_list = []
try:
for i in topic_id and topic_id:
cursor.execute(
"select user_id,is_shadow from user_extra where user_id =" + str(
i[0]) + " and is_online =1 and is_deleted =0")
data = cursor.fetchall()
user_id = list(data)
print(user_id)
if (user_id and user_id[0][1] == 0) or len(user_id) == 0:
topic_id_list.append(i)
logging.info("Database version : %s " % topic_id_list)
pc.close()
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
return topic_id_list
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
return []
def answer_reply2():
try:
logging.info("comment answer one")
numtime1, numtime2 = time_conv(27, 17)
user_id = get_data(numtime1, numtime2)
redis_key = "cybertron:set_reply_id:two"
reply_data_comment = []
if user_id:
for i in user_id:
majia_user_id = get_majia()
cook = logins(majia_user_id)
comment1, comment2 = get_answer_data()
if cook is not None:
response = reply_answer(cook, i[1], comment1)
if len(response) > 1:
data_dict = json.loads(response)
reply_id = data_dict["data"]["id"]
reply_user_id = data_dict["data"]["user"]["id"]
reply_data_comment.append(
{"id": reply_id, "reply_user_id": reply_user_id, "answer": comment2, "topic_id": i[1],
"majia_user_id": majia_user_id})
redis_client = set_reply_to_redis()
redis_client.set(redis_key, json.dumps(reply_data_comment))
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
import pymysql
import traceback
import logging
import json
from vest.request.auto_request import logins, time_conv, get_answer_data, reply_answer, set_reply_to_redis, get_majia
from vest.request.auto_request import host, user, db, passwd
from libs.error import logging_exception
def get_data(numtime, numtime2):
try:
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
cursor.execute(
"SELECT user_id,id FROM topic WHERE is_online=1 and (create_time > '%s' and create_time < '%s' )" % (
numtime, numtime2))
data = cursor.fetchall()
topic_id = list(data)
topic_id_list = []
try:
for i in topic_id and topic_id:
cursor.execute(
"select user_id,is_shadow from user_extra where user_id =" + str(
i[0]) + " and is_online =1 and is_deleted =0")
data = cursor.fetchall()
user_id = list(data)
print(user_id)
if (user_id and user_id[0][1] == 0) or len(user_id) == 0:
topic_id_list.append(i)
logging.info("Database version : %s " % topic_id_list)
pc.close()
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
return topic_id_list
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
return []
def answer_reply3():
try:
logging.info("comment answer one")
numtime1, numtime2 = time_conv(45, 35)
user_id = get_data(numtime1, numtime2)
redis_key1 = "cybertron:set_reply_id:three"
reply_data_comment = []
if user_id:
for i in user_id:
majia_user_id = get_majia()
cook = logins(majia_user_id)
comment1, comment2 = get_answer_data()
if cook is not None:
response = reply_answer(cook, i[1], comment1)
if len(response) > 1:
data_dict = json.loads(response)
reply_id = data_dict["data"]["id"]
reply_user_id = data_dict["data"]["user"]["id"]
reply_data_comment.append(
{"id": reply_id, "reply_user_id": reply_user_id, "answer": comment2, "topic_id": i[1],
"majia_user_id": majia_user_id})
redis_client = set_reply_to_redis()
redis_client.set(redis_key1, json.dumps(reply_data_comment))
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
import traceback
import logging
import json
from vest.request.auto_request import set_reply_to_redis, reply2, logins, get_majia_reply
from libs.error import logging_exception
def reply_comment2():
try:
redis_key1 = "cybertron:set_reply_id:two"
redis_client = set_reply_to_redis()
have_reply1 = redis_client.get(redis_key1)
result = json.loads(str(have_reply1, encoding="utf-8"))
if result:
for item in result:
majia_user_id = get_majia_reply(item["majia_user_id"])
cook = logins(majia_user_id)
reply2(cook, item["topic_id"], item["answer"], item["id"])
redis_client.delete(redis_key1)
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
import traceback
import json
import logging
from vest.request.auto_request import set_reply_to_redis, reply2, logins, get_majia_reply
from libs.error import logging_exception
def reply_comment3():
try:
redis_key1 = "cybertron:set_reply_id:three"
redis_client = set_reply_to_redis()
have_reply1 = redis_client.get(redis_key1)
result = json.loads(str(have_reply1, encoding="utf-8"))
if result:
for item in result:
majia_user_id = get_majia_reply(item["majia_user_id"])
cook = logins(majia_user_id)
reply2(cook, item["topic_id"], item["answer"], item["id"])
redis_client.delete(redis_key1)
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
import requests
import time
import datetime
import random
import traceback
import logging
import json
import redis
import smtplib
from libs.cache import redis_client
from email.mime.text import MIMEText
from email.utils import formataddr
from physical.settings_local import DATABASES
from physical.settings_local import REDIS_URL
from vest.data.topic_models import get_pictorial_tag_by_id, get_topic_product_info, get_edit_tag_id_list, \
get_category_tag_id, topic_has_image
my_sender = 'lixiaofang@igengmei.com'
my_pass = 'tg5AVKBB8jLQGBET'
my_user6 = "lixiaofang@igengmei.com"
# auto_click_url = "http://earth.gmapp.env/api/v1/like"
# auto_reply_url = "http://earth.gmapp.env/api/v1/reply/create"
# auto_follow_url = "http://earth.gmapp.env/api/v1/follow"
# auto_urge_url = "http://earth.gmapp.env/api/v1/user/urge"
# DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.mysql',
# 'NAME': 'alpha',
# 'USER': 'work',
# 'PASSWORD': 'Gengmei123!',
# 'HOST': 'bj-cdb-55ejhsjy.sql.tencentcdb.com',
# 'PORT': '62177',
# 'OPTIONS': {
# "init_command": "SET foreign_key_checks = 0;",
# "charset": "utf8mb4",
# },
# }
# }
auto_click_url = "http://earth.iyanzhi.com/api/v1/like"
auto_reply_url = "http://earth.iyanzhi.com/api/v1/reply/create"
auto_follow_url = "http://earth.iyanzhi.com/api/v1/follow"
auto_urge_url = "http://earth.iyanzhi.com/api/v1/user/urge"
host = DATABASES['default']['HOST']
user = DATABASES['default']['USER']
port = DATABASES['default']['PORT']
db = DATABASES['default']['NAME']
passwd = DATABASES['default']['PASSWORD']
def get_majia_reply(user_id):
try:
data = open("/srv/apps/physical/vest/data/vest_user_email.txt")
list = []
for i in data:
if i != user_id:
list.append(i.strip('\n').strip(','))
maj = random.randint(1, len(list))
user_id = list[maj - 1]
return user_id
except:
logging.error("catch exception,get_majia:%s" % traceback.format_exc())
return None
def get_majia():
try:
data = open("/srv/apps/physical/vest/data/vest_user_email.txt")
list = []
for i in data:
list.append(i.strip('\n').strip(','))
maj = random.randint(1, len(list))
user_id = list[maj - 1]
return user_id
except:
logging.error("catch exception,get_majia:%s" % traceback.format_exc())
return None
def get_cookies(user_id):
try:
session_key = "majia_login_session"
data = redis_client.hget(session_key, user_id)
logging.info("get data:%s" % data)
logging.info("get data:%s" % json.loads(data))
return json.loads(data)
except:
logging.error("catch exception,get_majia:%s" % traceback.format_exc())
return None
def login():
try:
user_id = get_majia()
logging.info("get user_id:%s" % user_id)
cookies = get_cookies(user_id)
if cookies is not None:
return {'sessionid': cookies}
except:
logging.error("catch exception,logins:%s" % traceback.format_exc())
return None
def logins(user_id):
try:
cookies = get_cookies(user_id)
if cookies is not None:
return {'sessionid': cookies}
except:
logging.error("catch exception,logins:%s" % traceback.format_exc())
return None
def click(cookies_get, id):
try:
post_dict = {
'type': 0,
'id': id
}
response = requests.post(url=auto_click_url,
cookies=cookies_get,
data=post_dict)
logging.info("response.text:%s" % response.text)
get_error(response.text, "click", id)
except:
logging.error("catch exception,logins:%s" % traceback.format_exc())
def reply(cookies_get, id, content):
try:
post_dict = {
'topic_id': id,
'content': content,
"type": 4
}
response = requests.post(url=auto_reply_url,
cookies=cookies_get,
data=post_dict)
logging.info("response.text:%s" % response.text)
get_error(response.text, "reply", id)
except:
logging.error("catch exception,logins:%s" % traceback.format_exc())
def time_conv(minutest, minutest2):
try:
now = datetime.datetime.now()
yes_time = now - datetime.timedelta(minutes=minutest)
yes_time2 = now - datetime.timedelta(minutes=minutest2)
return yes_time, yes_time2
except:
return None
def time_conv_minute(minutest, minutest2):
try:
now = datetime.datetime.now()
minute = datetime.datetime.now().minute
yes_time = now - datetime.timedelta(minutes=minutest)
yes_time2 = now - datetime.timedelta(minutes=minutest2)
return yes_time, yes_time2, minute
except:
return None
def time_now(minutest):
try:
now = datetime.datetime.now()
yes_time = now - datetime.timedelta(hours=minutest)
return yes_time, now
except:
return None
def time_convs(numtime, numtime2):
try:
now = datetime.datetime.now()
yes_time = now - datetime.timedelta(days=numtime)
yes_time_str = yes_time.strftime('%Y-%m-%d')
yes_time_str = yes_time_str + ' 23:59:59.000000'
yes_time2 = now - datetime.timedelta(days=numtime2)
yes_time_str2 = yes_time2.strftime('%Y-%m-%d')
yes_time_str2 = yes_time_str2 + ' 00:00:00.000000'
return yes_time_str, yes_time_str2
except:
return None
def get_comment():
try:
data = open("/srv/apps/physical/vest/data/guanshui.txt", "r")
list_guanshui = []
for i in data:
list_guanshui.append(i)
num = random.randint(0, len(list_guanshui))
comment = list_guanshui[num - 1]
return comment
except:
return None
def get_comments():
try:
data = open("/srv/apps/physical/vest/data/guanshui.txt", "r")
list_guanshui = []
for i in data:
list_guanshui.append(i)
num1 = random.randint(0, 200)
num2 = random.randint(200, 400)
num3 = random.randint(400, len(list_guanshui))
comment1 = list_guanshui[num1 - 1]
comment2 = list_guanshui[num2 - 1]
comment3 = list_guanshui[num3 - 1]
comment_list = [comment1, comment2, comment3]
return comment_list
except:
return None
def follow(cookies_get, id):
try:
post_dict = {
'type': 1,
'id': id
}
response = requests.post(url=auto_follow_url,
cookies=cookies_get,
data=post_dict)
logging.info("response.text:%s" % response.text)
get_error(response.text, "follow", id)
except:
logging.error("catch exception,logins:%s" % traceback.format_exc())
def Urge(cookies_get, id):
try:
post_dict = {
'id': id
}
response = requests.post(url=auto_urge_url,
cookies=cookies_get,
data=post_dict)
logging.info("response.text:%s" % response.text)
get_error(response.text, "Urge", id)
except:
logging.error("catch exception,logins:%s" % traceback.format_exc())
def get_offline_comment():
try:
data = open("/srv/apps/physical/vest/data/offline_comment.txt", "r")
list_guanshui = []
for i in data:
list_guanshui.append(i)
num1 = random.randint(0, 200)
num2 = random.randint(200, 300)
num3 = random.randint(300, 400)
num4 = random.randint(400, len(list_guanshui))
comment1 = list_guanshui[num1 - 1]
comment2 = list_guanshui[num2 - 1]
comment3 = list_guanshui[num3 - 1]
comment4 = list_guanshui[num4 - 1]
comment_list = [comment1, comment2, comment3, comment4]
return comment_list
except:
return None
def get_user_id():
user_data = open("/srv/apps/physical/vest/data/get_user_id.txt", "r")
user_list = []
for i in user_data.readlines():
user_list.append(int(i.strip()))
return user_list
def send_email(stat_data):
ret = True
now = datetime.datetime.now()
yes_time = now - datetime.timedelta(days=1)
yes_time_str = yes_time.strftime('%Y%m%d')
try:
msg = MIMEText(stat_data, 'plain', 'utf-8')
msg['From'] = formataddr(["李小芳", my_sender])
msg["To"] = formataddr(["李小芳", my_user6])
msg['Subject'] = str(datetime.date.today()) + "like的测试环境的自动点赞出问题了"
server = smtplib.SMTP_SSL("smtp.exmail.qq.com", 465)
server.login(my_sender, my_pass)
server.sendmail(my_sender, [my_user6], msg.as_string())
server.quit()
return ret
except Exception:
ret = False
logging.error("catch exception,main:%s" % traceback.format_exc())
def get_error(line, type, id):
try:
if len(line) > 1:
data_dict = json.loads(line)
if data_dict["error"] != 0:
stat_data = str(line) + str(type) + str(id)
ret = send_email(stat_data)
if ret:
logging.info('like query统计邮件发送成功')
else:
logging.info('like query统计邮件发送失败')
else:
pass
except:
logging.error("catch exception,err_msg:%s" % traceback.format_exc())
def get_answer_data():
try:
# 获取第一条评论
data = open("/srv/apps/physical/vest/data/reply_answer_data1.txt", "r")
answer_data1 = []
for i in data:
answer_data1.append(i)
num1 = random.randint(0, len(answer_data1))
comment1 = answer_data1[num1 - 1]
# 获取第二条评论
data = open("/srv/apps/physical/vest/data/reply_answer_data2.txt", "r")
answer_data2 = []
for i in data:
answer_data2.append(i)
comment2 = answer_data2[num1 - 1]
logging.info("get comment1:%s" % comment1)
logging.info("get comment2:%s" % comment2)
return comment1, comment2
except:
return ["", ""]
def reply_answer(cookies_get, id, content):
try:
post_dict = {
'topic_id': id,
'content': content,
"type": 4
}
response = requests.post(url=auto_reply_url,
cookies=cookies_get,
data=post_dict)
logging.info("response.text:%s" % response.text)
get_error(response.text, "reply", id)
return response.text
except:
logging.error("catch exception,logins:%s" % traceback.format_exc())
def set_reply_to_redis():
try:
redis_client = redis.StrictRedis.from_url(REDIS_URL)
return redis_client
except:
logging.error("catch exception,logins:%s" % traceback.format_exc())
def reply2(cookies_get, id, content, replied_id):
try:
post_dict = {
'topic_id': id,
'content': content,
"type": 4,
"replied_id": replied_id
}
response = requests.post(url=auto_reply_url,
cookies=cookies_get,
data=post_dict)
logging.info("response.text:%s" % response.text)
get_error(response.text, "reply2", id)
except:
logging.error("catch exception,logins:%s" % traceback.format_exc())
def pictorial_reply(cookies_get, id, content):
try:
post_dict = {
'pictorial_id': id,
'content': content,
"type": 4
}
response = requests.post(url=auto_reply_url,
cookies=cookies_get,
data=post_dict)
logging.info("response.text:%s" % response.text)
get_error(response.text, "pictorial_reply", id)
except:
logging.error("catch exception,logins:%s" % traceback.format_exc())
def get_pictorial_comment():
try:
data = open("/srv/apps/physical/vest/data/pictorial_reply_data.txt", "r")
list_guanshui = []
for i in data:
list_guanshui.append(i)
num = random.randint(0, len(list_guanshui))
comment = list_guanshui[num - 1]
return comment
except:
return None
def get_product_comment():
try:
data = open("/srv/apps/physical/vest/data/product_reply_data.txt", "r")
list_guanshui = []
for i in data:
list_guanshui.append(i)
num = random.randint(0, len(list_guanshui))
comment = list_guanshui[num - 1]
return comment
except:
return None
def get_face_comment():
try:
data = open("/srv/apps/physical/vest/data/face_reply_data.txt", "r")
list_guanshui = []
for i in data:
list_guanshui.append(i)
num = random.randint(0, len(list_guanshui))
comment = list_guanshui[num - 1]
return comment
except:
return None
def get_category_reply_commtent(category_id):
try:
# 发型42、穿搭46、美妆3、护肤4、美甲21972、香水17576、瘦身32605、配饰5305608
file = ""
if category_id == 5305608: # 配饰
file = "/srv/apps/physical/vest/data/accessories_reply_data.txt"
if category_id == 3: # 美妆
file = "/srv/apps/physical/vest/data/beauty_reply_data.txt"
if category_id == 42: # 发型
file = "/srv/apps/physical/vest/data/hairstyle_reply_data.txt"
if category_id == 21972: # 美甲
file = "/srv/apps/physical/vest/data/manicure_reply_data.txt"
if category_id == 17576: # 香水
file = "/srv/apps/physical/vest/data/perfume_reply_data.txt"
if category_id == 4: # 护肤
file = "/srv/apps/physical/vest/data/skincare_reply_data.txt"
if category_id == 32605: # 瘦身
file = "/srv/apps/physical/vest/data/slimming_reply_data.txt"
if category_id == 46: # 穿搭
file = "/srv/apps/physical/vest/data/wear_repy_data.txt"
data = open(file, "r")
list_guanshui = []
for i in data:
list_guanshui.append(i)
num = random.randint(0, len(list_guanshui))
comment = list_guanshui[num - 1]
logging.info("get comment:%s" % comment)
return comment
except:
return None
def judge_topic_info_get_comment(topic_id):
try:
comment = ""
# 先判断是不是编辑标签
edit_tag_list = get_edit_tag_id_list(topic_id)
if len(edit_tag_list) > 0:
# 如果有编辑标签
# 去拿他的所属分类 根据分类去拿对应的标签(随机拿一个)
category_tag_id = get_category_tag_id(edit_tag_list)
if category_tag_id > 0:
comment = get_category_reply_commtent(category_tag_id)
else:
product_judge = get_topic_product_info(topic_id)
if product_judge:
comment = get_product_comment()
else:
# 判断有没有人脸
face_judge = topic_has_image(topic_id)
if face_judge:
comment = get_face_comment()
# 原始评论
else:
comment = get_comment()
else:
# 判断有没有商品信息
product_judge = get_topic_product_info(topic_id)
if product_judge:
comment = get_product_comment()
else:
# 判断有没有人脸
face_judge = topic_has_image(topic_id)
if face_judge:
comment = get_face_comment()
# 原始评论
else:
comment = get_comment()
logging.info("get judge_topic_info_get_comment:%s" % comment)
return comment
except:
logging.error("catch exception,logins:%s" % traceback.format_exc())
return None
import pymysql
import traceback
import logging
import json
from vest.request.auto_request import host, user, db, passwd
from libs.cache import redis_client
from libs.error import logging_exception
def get_data():
try:
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
cursor.execute("select user_id from user_extra where is_shadow = 1 ")
data = cursor.fetchall()
user_id = list(data)
logging.info("Database version : %s " % user_id)
user_id_list = []
if user_id:
for i in user_id:
cursor.execute("select user_id from topic where user_id = " + str(
i[0]) + " group by user_id having count(user_id) >5")
data = cursor.fetchall()
user_id = list(data)
if len(user_id):
user_id_list.append(user_id[0])
pc.close()
return user_id_list
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
return None
def auto_user_id():
try:
data = get_data()
file = open("/srv/apps/physical/vest/data/get_user_id.txt", "w")
if data:
for i in data:
file.write(str(i[0]))
file.write("\n")
redis_client.set("get_user_id_data", json.dumps(data))
except:
logging_exception()
logging.error("index_first:%s" % traceback.format_exc())
import requests
import traceback
import logging
import json
from libs.cache import redis_client
from libs.error import logging_exception
# login_url = "http://earth.iyanzhi.com/api/account/login_pwd"
login_url = "http://earth.gmapp.env/api/account/login_pwd"
def index_first():
try:
r1 = requests.get(login_url)
return r1.cookies.get_dict()
except:
logging.error("index_first:%s" % traceback.format_exc())
return None
def get_cook():
try:
data = open("/srv/apps/physical/vest/data/vest_user_email.txt")
list = []
dicts = {}
for i in data:
list.append(i.strip('\n').strip(','))
for i in list:
cookies = index_first()
post_dict = {
'account_type': 2,
'pwd': '123456',
'email': i
}
response = requests.post(
url=login_url,
data=post_dict,
cookies=cookies
)
headers = response.headers
print(response.text)
cook = headers['Set-Cookie'].split(";")
cook = cook[0].split('=')[1]
logging.info("response.text :%s" % response.text)
dicts[i] = cook
return dicts
except:
logging.error("index_first:%s" % traceback.format_exc())
return None
def get_session():
try:
session_key = "majia_login_session"
dicts = get_cook()
for key, value in dicts.items():
redis_client.hset(session_key, key, json.dumps(value))
except:
logging_exception()
logging.error("index_first:%s" % traceback.format_exc())
import pymysql
import traceback
import logging
from vest.request.auto_request import login, time_convs, Urge
from vest.request.auto_request import host, user, passwd
from libs.error import logging_exception
def get_data(numtime):
try:
pc = pymysql.connect(host=host, user=user, passwd=passwd, db="physical", port=3306)
cursor = pc.cursor()
cursor.execute(
"select user_id from sl_user_login_status where last_visit_day < '%s' and is_shadow =0 " % numtime)
data = cursor.fetchall()
user_id = list(data)
logging.info("Database version : %s " % user_id)
pc.close()
return user_id
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
return None
def auto_lunch_app():
try:
numtime1, numtime2 = time_convs(1, 3)
user_id = get_data(numtime2)
for i in user_id:
id = int(i[0])
cook = login()
if cook is not None:
Urge(cook, id)
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
import pymysql
import random
import traceback
import logging
from vest.request.auto_request import login, time_convs, Urge
from vest.request.auto_request import host, user, passwd
from libs.error import logging_exception
def get_data(numtime):
try:
pc = pymysql.connect(host=host, user=user, passwd=passwd, db="physical", port=3306)
cursor = pc.cursor()
cursor.execute(
"select user_id from sl_user_login_status where last_visit_day < '%s' and is_shadow = 0 " % numtime)
data = cursor.fetchall()
user_id = list(data)
logging.info("Database version : %s " % user_id)
pc.close()
return user_id
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
return None
def auto_lunch_app2():
try:
numtime1, numtime2 = time_convs(1, 3)
user_id = get_data(numtime2)
for i in user_id and user_id:
num = random.randint(0, 1)
if num == 1:
id = int(i[0])
cook = login()
if cook is not None:
Urge(cook, id)
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
import pymysql
import traceback
import logging
from vest.request.auto_request import login, time_convs, Urge
from vest.request.auto_request import host, user, db, passwd
from libs.error import logging_exception
def get_star_useid(numtime1, numtime2):
try:
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
cursor.execute(
"SELECT distinct(user_id) FROM topic WHERE is_online=1 and content_level in (4,5) and create_time > '%s' and create_time < '%s'" % (
numtime1, numtime2))
data = cursor.fetchall()
user_id = list(data)
topic_id_list = []
for i in user_id and user_id:
cursor.execute(
"select user_id,is_shadow from user_extra where user_id =" + str(
i[0]) + " and is_online =1 and is_deleted =0")
data = cursor.fetchall()
user_id = list(data)
if (user_id and user_id[0][1] == 0) or len(user_id) == 0:
topic_id_list.append(i)
logging.info("Database version : %s " % topic_id_list)
pc.close()
return topic_id_list
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
return None
def auto_star_urge():
try:
numtime1, numtime2 = time_convs(8, 0)
user_id = get_star_useid(numtime1, numtime2)
for j in user_id and user_id:
id = int(j[0])
for i in range(1):
cookies = login()
if cookies is not None:
Urge(cookies, id)
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
import pymysql
import random
import traceback
import logging
from vest.request.auto_request import login, time_convs, Urge
from vest.request.auto_request import host, port, passwd, user, db
from libs.error import logging_exception
def get_data(numtime1, numtime2):
try:
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
cursor.execute(
"SELECT distinct(user_id) FROM topic WHERE is_online=1 and create_time > '%s' and create_time < '%s'" % (
numtime1, numtime2))
data = cursor.fetchall()
user_id = list(data)
topic_id_list = []
for i in user_id and user_id:
cursor.execute(
"select user_id,is_shadow from user_extra where user_id =" + str(
i[0]) + " and is_online =1 and is_deleted =0")
data = cursor.fetchall()
user_id = list(data)
if (user_id and user_id[0][1] == 0) or len(user_id) == 0:
topic_id_list.append(i)
logging.info("Database version : %s " % topic_id_list)
pc.close()
return topic_id_list
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
return None
def auto_urge1():
try:
numtime1, numtime2 = time_convs(8, 0)
logging.info("get numtime1:%s" % numtime1)
logging.info("get numtime2:%s" % numtime2)
user_id = get_data(numtime1, numtime2)
urge_num = random.randint(0, 1)
if urge_num == 1:
for j in user_id and user_id:
id = int(j[0])
cookies = login()
if cookies is not None:
Urge(cookies, id)
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
import pymysql
import random
import traceback
import logging
from vest.request.auto_request import login, time_convs, Urge
from vest.request.auto_request import host, port, passwd, user, db
from libs.error import logging_exception
def get_data(numtime1, numtime2):
try:
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
cursor.execute(
"SELECT distinct(user_id) FROM topic WHERE is_online=1 and create_time > '%s' and create_time < '%s'" % (
numtime1, numtime2))
data = cursor.fetchall()
user_id = list(data)
topic_id_list = []
for i in user_id and user_id:
cursor.execute(
"select user_id,is_shadow from user_extra where user_id =" + str(
i[0]) + " and is_online =1 and is_deleted =0")
data = cursor.fetchall()
user_id = list(data)
if (user_id and user_id[0][1] == 0) or len(user_id) == 0:
topic_id_list.append(i)
logging.info("Database version : %s " % topic_id_list)
pc.close()
return topic_id_list
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
return None
def auto_urge2():
try:
numtime1, numtime2 = time_convs(8, 0)
logging.info("get numtime1:%s" % numtime1)
logging.info("get numtime2:%s" % numtime2)
user_id = get_data(numtime1, numtime2)
for j in user_id and user_id:
urge_num = random.randint(0, 1)
if urge_num == 1:
id = int(j[0])
cookies = login()
if cookies is not None:
Urge(cookies, id)
except:
logging_exception()
logging.error("catch exception,main:%s" % traceback.format_exc())
import requests
import pymysql
import random
import traceback
import logging
import json
import datetime
from libs.cache import redis_client
from vest.request.auto_request import get_offline_comment
from vest.request.auto_request import host, user, db, passwd
from libs.error import logging_exception
auto_reply_url = "http://saturn.iyanzhi.com/api/v1/reply/create_for_inner"
def reply(id, content, user_id):
try:
post_dict = {
'user_id': user_id,
'topic_id': id,
'content': content,
"type": 4
}
response = requests.post(url=auto_reply_url,
data=post_dict)
logging.info("response.text:%s" % response.text)
except:
logging.error("catch exception,logins:%s" % traceback.format_exc())
def get_data(now, noww):
try:
pc = pymysql.connect(host=host, user=user, passwd=passwd, db=db, port=3306)
cursor = pc.cursor()
cursor.execute(
"SELECT id,user_id FROM topic WHERE ( create_time > '%s' and create_time <= '%s' ) and is_online = 1 and content_level in (4,5,6) " % (
now, noww))
data = cursor.fetchall()
topic_id = list(data)
topic_id_list = []
for id in topic_id:
cursor.execute(
"select user_id,is_shadow from user_extra where user_id =" + str(
id[1]) + " and is_online =1 and is_deleted =0")
data = cursor.fetchall()
user_id = list(data)
if user_id and user_id[0][1] == 1:
topic_id_list.append(id)
pc.close()
logging.info("get majia topic_id_list:%s" % topic_id_list)
return topic_id_list
except:
logging.error("catch exception,get_data:%s" % traceback.format_exc())
return None
def vest_click_reply():
try:
logging.info("get majia data11111111111")
now = datetime.datetime.now()
yes_time = now - datetime.timedelta(minutes=5)
data = json.loads(redis_client.get("get_user_id_data"))
user_list = []
for i in data:
user_list.append(i[0])
topic_id = get_data(yes_time, now)
if topic_id:
for id in topic_id:
rand_num = random.randint(0, 4)
for i in range(rand_num):
num = random.randint(0, len(user_list) - 1)
user_id = user_list[num]
if user_id != id[1]:
comment_list = get_offline_comment()
comment = comment_list[i]
reply(id[0], comment, user_id)
except:
logging_exception()
logging.error("catch exception,main :%s" % traceback.format_exc())
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment