Commit 36608059 authored by 张彦钊's avatar 张彦钊

test

parent 33cb78a9
...@@ -18,6 +18,19 @@ def con_sql(db,sql): ...@@ -18,6 +18,19 @@ def con_sql(db,sql):
return df return df
def multi_hot(df,column,n):
df[column] = df[column].fillna("lost_na")
app_list_value = [i.split(",") for i in df[column].unique()]
app_list_unique = []
for i in app_list_value:
app_list_unique.extend(i)
app_list_unique = list(set(app_list_unique))
number = len(app_list_unique)
app_list_map = dict(zip(app_list_unique, list(range(n, number + n))))
df[column] = df[column].apply(app_list_func, args=(app_list_map,))
return number,app_list_map
def get_data(): def get_data():
db = pymysql.connect(host='10.66.157.22', port=4000, user='root', passwd='3SYz54LS9#^9sBvC', db='jerry_test') db = pymysql.connect(host='10.66.157.22', port=4000, user='root', passwd='3SYz54LS9#^9sBvC', db='jerry_test')
sql = "select max(stat_date) from {}".format(train_data_set) sql = "select max(stat_date) from {}".format(train_data_set)
...@@ -27,17 +40,18 @@ def get_data(): ...@@ -27,17 +40,18 @@ def get_data():
start = (temp - datetime.timedelta(days=20)).strftime("%Y-%m-%d") start = (temp - datetime.timedelta(days=20)).strftime("%Y-%m-%d")
print(start) print(start)
db = pymysql.connect(host='10.66.157.22', port=4000, user='root', passwd='3SYz54LS9#^9sBvC', db='jerry_test') db = pymysql.connect(host='10.66.157.22', port=4000, user='root', passwd='3SYz54LS9#^9sBvC', db='jerry_test')
sql = "select e.y,e.z,e.stat_date,e.ucity_id,e.clevel1_id,e.ccity_name," \ sql = "select e.y,e.z,e.stat_date,e.ucity_id,feat.level2_ids,e.ccity_name," \
"u.device_type,u.manufacturer,u.channel,c.top,cl.l1,cl.l2,e.device_id,cut.time,dl.app_list " \ "u.device_type,u.manufacturer,u.channel,c.top,e.device_id,cut.time,dl.app_list " \
"from {} e left join user_feature u on e.device_id = u.device_id " \ "from {} e left join user_feature u on e.device_id = u.device_id " \
"left join cid_type_top c on e.device_id = c.device_id " \ "left join cid_type_top c on e.device_id = c.device_id " \
"left join cid_level2 cl on e.cid_id = cl.cid " \ "left join cid_level2 cl on e.cid_id = cl.cid " \
"left join cid_time_cut cut on e.cid_id = cut.cid " \ "left join cid_time_cut cut on e.cid_id = cut.cid " \
"left join device_app_list dl on e.device_id = dl.device_id " \ "left join device_app_list dl on e.device_id = dl.device_id " \
"left join diary_feat feat on e.cid_id = feat.diary_id " \
"where e.stat_date >= '{}'".format(train_data_set,start) "where e.stat_date >= '{}'".format(train_data_set,start)
df = con_sql(db, sql) df = con_sql(db, sql)
# print(df.shape) # print(df.shape)
df = df.rename(columns={0: "y", 1: "z", 2: "stat_date", 3: "ucity_id", 4: "clevel1_id", 5: "ccity_name", df = df.rename(columns={0: "y", 1: "z", 2: "stat_date", 3: "ucity_id", 4: "clevel2_id", 5: "ccity_name",
6: "device_type", 7: "manufacturer", 8: "channel", 9: "top", 10: "l1",11: "l2", 6: "device_type", 7: "manufacturer", 8: "channel", 9: "top", 10: "l1",11: "l2",
12: "device_id", 13: "time",14:"app_list"}) 12: "device_id", 13: "time",14:"app_list"})
print("esmm data ok") print("esmm data ok")
...@@ -46,30 +60,23 @@ def get_data(): ...@@ -46,30 +60,23 @@ def get_data():
print(df.shape) print(df.shape)
print("after") print("after")
df = df.drop_duplicates() df = df.drop_duplicates()
df = df.drop_duplicates(["ucity_id", "clevel1_id", "ccity_name", "device_type", "manufacturer", df = df.drop_duplicates(["ucity_id", "clevel2_id", "ccity_name", "device_type", "manufacturer",
"channel", "top", "l1","l2", "time", "stat_date","app_list"]) "channel", "top", "l1","l2", "time", "stat_date","app_list"])
df["app_list"] = df["app_list"].fillna("lost_na") app_list_number,app_list_map = multi_hot(df,"app_list",1)
app_list_value = [i.split(",") for i in df["app_list"].unique()] level2_number,level2_map = multi_hot(df,"clevel2_id",1+app_list_number)
app_list_unique = [] # df["app_list"] = df["app_list"].fillna("lost_na")
for i in app_list_value: # app_list_value = [i.split(",") for i in df["app_list"].unique()]
app_list_unique.extend(i) # app_list_unique = []
app_list_unique = list(set(app_list_unique)) # for i in app_list_value:
app_list_map = dict(zip(app_list_unique, list(range(1, len(app_list_unique) + 1)))) # app_list_unique.extend(i)
df["app_list"] = df["app_list"].apply(app_list_func,args=(app_list_map,)) # app_list_unique = list(set(app_list_unique))
print("after applist map") # app_list_map = dict(zip(app_list_unique, list(range(1, len(app_list_unique) + 1))))
print(df.head(2)) # df["app_list"] = df["app_list"].apply(app_list_func,args=(app_list_map,))
# print(df.shape)
# print("exp numbers:")
# print(df[df["y"] == 0].shape)
# print("click numbers")
# print(df[(df["y"] == 1)&(df["z"] == 0)].shape)
# print("buy numbers")
# print(df[(df["y"] == 1) & (df["z"] == 1)].shape)
unique_values = [] unique_values = []
features = ["ucity_id", "clevel1_id", "ccity_name", "device_type", "manufacturer", features = ["ucity_id", "ccity_name", "device_type", "manufacturer",
"channel", "top", "time", "stat_date"] "channel", "top", "time", "stat_date"]
for i in features: for i in features:
df[i] = df[i].astype("str") df[i] = df[i].astype("str")
...@@ -88,13 +95,13 @@ def get_data(): ...@@ -88,13 +95,13 @@ def get_data():
print(len(unique_values)) print(len(unique_values))
print(df.head(2)) print(df.head(2))
temp = list(range(len(app_list_unique) + 1,len(app_list_unique) + len(unique_values)+1)) temp = list(range(1+app_list_number+level2_number, 1 + app_list_number+level2_number + len(unique_values)))
value_map = dict(zip(unique_values,temp)) value_map = dict(zip(unique_values,temp))
df = df.drop("device_id", axis=1) df = df.drop("device_id", axis=1)
train = df[df["stat_date"] != validate_date+"stat_date"] train = df[df["stat_date"] != validate_date+"stat_date"]
test = df[df["stat_date"] == validate_date+"stat_date"] test = df[df["stat_date"] == validate_date+"stat_date"]
for i in ["ucity_id", "clevel1_id", "ccity_name", "device_type", "manufacturer", for i in ["ucity_id", "ccity_name", "device_type", "manufacturer",
"channel", "top", "l1", "time", "stat_date","l2"]: "channel", "top", "l1", "time", "stat_date","l2"]:
train[i] = train[i].map(value_map) train[i] = train[i].map(value_map)
test[i] = test[i].map(value_map) test[i] = test[i].map(value_map)
......
...@@ -110,10 +110,7 @@ def model_fn(features, labels, mode, params): ...@@ -110,10 +110,7 @@ def model_fn(features, labels, mode, params):
with tf.variable_scope("Shared-Embedding-layer"): with tf.variable_scope("Shared-Embedding-layer"):
embedding_id = tf.nn.embedding_lookup(Feat_Emb,feat_ids) embedding_id = tf.nn.embedding_lookup(Feat_Emb,feat_ids)
app_id = tf.nn.embedding_lookup_sparse(Feat_Emb, sp_ids=app_list, sp_weights=None, combiner="sum") app_id = tf.nn.embedding_lookup_sparse(Feat_Emb, sp_ids=app_list, sp_weights=None, combiner="sum")
print("a")
print(embedding_id.shape)
print("b")
print(app_id.shape)
# x_concat = tf.reshape(embedding_id,shape=[-1, common_dims]) # None * (F * K) # x_concat = tf.reshape(embedding_id,shape=[-1, common_dims]) # None * (F * K)
x_concat = tf.concat([tf.reshape(embedding_id,shape=[-1,common_dims]),app_id], axis=1) x_concat = tf.concat([tf.reshape(embedding_id,shape=[-1,common_dims]),app_id], axis=1)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment