Commit 6ef594e9 authored by Your Name's avatar Your Name

test local predict with sample id

parent e93b3862
......@@ -15,6 +15,7 @@ import subprocess
import time
import glob
import random
import pandas as pd
#################### CMD Arguments ####################
FLAGS = tf.app.flags.FLAGS
......@@ -65,7 +66,10 @@ def input_fn(filenames, batch_size=32, num_epochs=1, perform_shuffle=False):
"tag5_list": tf.VarLenFeature(tf.int64),
"tag6_list": tf.VarLenFeature(tf.int64),
"tag7_list": tf.VarLenFeature(tf.int64),
"number": tf.VarLenFeature(tf.int64)
"number": tf.VarLenFeature(tf.int64),
"uid": tf.VarLenFeature(tf.string),
"city": tf.VarLenFeature(tf.string),
"cid_id": tf.VarLenFeature(tf.string)
}
parsed = tf.parse_single_example(record, features)
y = parsed.pop('y')
......@@ -133,6 +137,9 @@ def model_fn(features, labels, mode, params):
tag6_list = features['tag6_list']
tag7_list = features['tag7_list']
number = features['number']
uid = features['uid']
city = features['city']
cid_id = features['cid_id']
if FLAGS.task_type != "infer":
y = labels['y']
......@@ -157,6 +164,9 @@ def model_fn(features, labels, mode, params):
tag2, tag3, tag4, tag5, tag6, tag7], axis=1)
sample_id = tf.sparse.to_dense(number)
uid = tf.sparse.to_dense(uid,default_value="")
city = tf.sparse.to_dense(city,default_value="")
cid_id = tf.sparse.to_dense(cid_id,default_value="")
with tf.name_scope("CVR_Task"):
if mode == tf.estimator.ModeKeys.TRAIN:
......@@ -203,7 +213,7 @@ def model_fn(features, labels, mode, params):
pctcvr = pctr*pcvr
predictions={"pcvr": pcvr, "pctr": pctr, "pctcvr": pctcvr, "sample_id": sample_id}
predictions={"pctcvr": pctcvr, "sample_id": sample_id, "uid":uid, "city":city, "cid_id":cid_id}
export_outputs = {tf.saved_model.signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY: tf.estimator.export.PredictOutput(predictions)}
# Provide an estimator spec for `ModeKeys.PREDICT`
if mode == tf.estimator.ModeKeys.PREDICT:
......@@ -224,11 +234,11 @@ def model_fn(features, labels, mode, params):
# Provide an estimator spec for `ModeKeys.EVAL`
eval_metric_ops = {
"CTR_AUC": tf.metrics.auc(y, pctr),
# "CTR_AUC": tf.metrics.auc(y, pctr),
#"CTR_F1": tf.contrib.metrics.f1_score(y,pctr),
#"CTR_Precision": tf.metrics.precision(y,pctr),
#"CTR_Recall": tf.metrics.recall(y,pctr),
"CVR_AUC": tf.metrics.auc(z, pcvr),
# "CVR_AUC": tf.metrics.auc(z, pcvr),
"CTCVR_AUC": tf.metrics.auc(z, pctcvr)
}
if mode == tf.estimator.ModeKeys.EVAL:
......@@ -311,7 +321,7 @@ def set_dist_env():
print(json.dumps(tf_config))
os.environ['TF_CONFIG'] = json.dumps(tf_config)
def main(_):
def main(te_files):
#------check Arguments------
if FLAGS.dt_dir == "":
FLAGS.dt_dir = (date.today() + timedelta(-1)).strftime('%Y%m%d')
......@@ -321,7 +331,6 @@ def main(_):
tr_files = ["hdfs://172.16.32.4:8020/strategy/esmm/test_tr/part-r-00000"]
va_files = ["hdfs://172.16.32.4:8020/strategy/esmm/va/part-r-00000"]
# te_files = ["%s/part-r-00000" % FLAGS.hdfs_dir]
te_files = ["hdfs://172.16.32.4:8020/strategy/esmm/test_nearby/part-r-00000"]
if FLAGS.clear_existing_model:
try:
......@@ -360,10 +369,11 @@ def main(_):
for key,value in sorted(result.items()):
print('%s: %s' % (key,value))
elif FLAGS.task_type == 'infer':
preds = Estimator.predict(input_fn=lambda: input_fn(te_files, num_epochs=1, batch_size=FLAGS.batch_size), predict_keys=["pctcvr","pctr","pcvr","sample_id"])
with open(FLAGS.local_dir + "/pred.txt", "w") as fo:
preds = Estimator.predict(input_fn=lambda: input_fn(te_files, num_epochs=1, batch_size=FLAGS.batch_size), predict_keys=["pctcvr","sample_id","uid","city","cid_id"])
result = []
for prob in preds:
fo.write("%f\t%f\t%f\t%s\n" % (prob['pctr'], prob['pcvr'], prob['pctcvr'], prob["sample_id"][0]))
result.append([str(prob["sample_id"][0]),str(prob["uid"][0]),str(prob["city"][0]),str(prob["cid_id"][0]),str(prob['pctcvr'])])
return result
elif FLAGS.task_type == 'export':
print("Not Implemented, Do It Yourself!")
......@@ -373,8 +383,11 @@ if __name__ == "__main__":
b = time.time()
path = "hdfs://172.16.32.4:8020/strategy/esmm/"
tf.logging.set_verbosity(tf.logging.INFO)
te_files = ["hdfs://172.16.32.4:8020/strategy/esmm/test_nearby/part-r-00000"]
print("hello up")
tf.app.run()
result = main(te_files)
df = pd.DataFrame(result,columns=["sample_id","uid","city","cid_id","pctcvr"])
df.head(10)
print("hello down")
print("耗时(分钟):")
print((time.time()-b)/60)
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment