Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in
Toggle navigation
F
ffm-baseline
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
ML
ffm-baseline
Commits
80e00192
Commit
80e00192
authored
Dec 21, 2018
by
张彦钊
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
add doctor
parent
02589ea3
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
5 additions
and
5 deletions
+5
-5
ffm.py
tensnsorflow/ffm.py
+5
-5
No files found.
tensnsorflow/ffm.py
View file @
80e00192
...
@@ -148,7 +148,7 @@ def get_data():
...
@@ -148,7 +148,7 @@ def get_data():
print
(
start
)
print
(
start
)
db
=
pymysql
.
connect
(
host
=
'10.66.157.22'
,
port
=
4000
,
user
=
'root'
,
passwd
=
'3SYz54LS9#^9sBvC'
,
db
=
'jerry_test'
)
db
=
pymysql
.
connect
(
host
=
'10.66.157.22'
,
port
=
4000
,
user
=
'root'
,
passwd
=
'3SYz54LS9#^9sBvC'
,
db
=
'jerry_test'
)
sql
=
"select e.y,e.z,e.stat_date,e.ucity_id,e.clevel1_id,e.ccity_name,"
\
sql
=
"select e.y,e.z,e.stat_date,e.ucity_id,e.clevel1_id,e.ccity_name,"
\
"u.device_type,u.manufacturer,u.channel,c.top,cid_time.time,s.doctor_id
,s.hospital_id
"
\
"u.device_type,u.manufacturer,u.channel,c.top,cid_time.time,s.doctor_id "
\
"from esmm_train_data e left join user_feature u on e.device_id = u.device_id "
\
"from esmm_train_data e left join user_feature u on e.device_id = u.device_id "
\
"left join cid_type_top c on e.device_id = c.device_id left join cid_time on e.cid_id = cid_time.cid_id "
\
"left join cid_type_top c on e.device_id = c.device_id left join cid_time on e.cid_id = cid_time.cid_id "
\
"left join service_hospital s on e.diary_service_id = s.id "
\
"left join service_hospital s on e.diary_service_id = s.id "
\
...
@@ -157,11 +157,11 @@ def get_data():
...
@@ -157,11 +157,11 @@ def get_data():
print
(
df
.
shape
)
print
(
df
.
shape
)
df
=
df
.
rename
(
columns
=
{
0
:
"y"
,
1
:
"z"
,
2
:
"stat_date"
,
3
:
"ucity_id"
,
4
:
"clevel1_id"
,
5
:
"ccity_name"
,
df
=
df
.
rename
(
columns
=
{
0
:
"y"
,
1
:
"z"
,
2
:
"stat_date"
,
3
:
"ucity_id"
,
4
:
"clevel1_id"
,
5
:
"ccity_name"
,
6
:
"device_type"
,
7
:
"manufacturer"
,
8
:
"channel"
,
9
:
"top"
,
10
:
"time"
,
6
:
"device_type"
,
7
:
"manufacturer"
,
8
:
"channel"
,
9
:
"top"
,
10
:
"time"
,
11
:
"doctor_id"
,
12
:
"hospital_id"
})
11
:
"doctor_id"
})
print
(
"esmm data ok"
)
print
(
"esmm data ok"
)
print
(
df
.
head
(
2
))
print
(
df
.
head
(
2
))
features
=
0
features
=
0
category
=
[
"ucity_id"
,
"clevel1_id"
,
"ccity_name"
,
"device_type"
,
"manufacturer"
,
"channel"
,
"top"
,
"
hospital_id"
,
"
doctor_id"
]
category
=
[
"ucity_id"
,
"clevel1_id"
,
"ccity_name"
,
"device_type"
,
"manufacturer"
,
"channel"
,
"top"
,
"doctor_id"
]
for
i
in
category
:
for
i
in
category
:
df
[
i
]
=
df
[
i
]
.
fillna
(
"na"
)
df
[
i
]
=
df
[
i
]
.
fillna
(
"na"
)
features
=
features
+
len
(
df
[
i
]
.
unique
())
features
=
features
+
len
(
df
[
i
]
.
unique
())
...
@@ -199,8 +199,8 @@ def transform(a,validate_date):
...
@@ -199,8 +199,8 @@ def transform(a,validate_date):
test
=
test
.
drop
(
"stat_date"
,
axis
=
1
)
test
=
test
.
drop
(
"stat_date"
,
axis
=
1
)
# print("train shape")
# print("train shape")
# print(train.shape)
# print(train.shape)
train
.
to_csv
(
path
+
"tr.csv"
,
sep
=
"
\t
"
,
index
=
False
)
train
.
to_csv
(
path
+
"tr
_new_doc
.csv"
,
sep
=
"
\t
"
,
index
=
False
)
test
.
to_csv
(
path
+
"va.csv"
,
sep
=
"
\t
"
,
index
=
False
)
test
.
to_csv
(
path
+
"va
_new_doc
.csv"
,
sep
=
"
\t
"
,
index
=
False
)
return
model
return
model
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment