Commit ba135498 authored by 赵威's avatar 赵威

add dropout layer for tractate

parent fec1d7f2
......@@ -16,6 +16,7 @@ def build_deep_layer(net, params):
units=num_hidden_units,
activation=tf.nn.relu,
kernel_initializer=tf.glorot_uniform_initializer())
net = tf.layers.dropout(net, rate=0.1, training=True)
return net
......
......@@ -73,7 +73,8 @@ def main():
set_essm_model_save_path("tractate", save_path)
print("============================================================")
save_path = get_essm_model_save_path("tractate")
# TODO save
# save_path = get_essm_model_save_path("tractate")
print("load path: " + save_path)
# save_path = str(Path("~/data/models/tractate/1598236893").expanduser()) # local
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment