Skip to content

Commit e9c8f08

Browse files
author
浅梦
authored
v0.8.2
Refactor DNN Layer
1 parent f9b07e4 commit e9c8f08

39 files changed

+71
-97
lines changed

.github/ISSUE_TEMPLATE/bug_report.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,8 @@ Steps to reproduce the behavior:
1919

2020
**Operating environment(运行环境):**
2121
- python version [e.g. 3.5, 3.7]
22-
- tensorflow version [e.g. 1.4.0, 1.15.0, 2.2.0]
23-
- deepctr version [e.g. 0.8.0,]
22+
- tensorflow version [e.g. 1.4.0, 1.15.0, 2.3.0]
23+
- deepctr version [e.g. 0.8.2,]
2424

2525
**Additional context**
2626
Add any other context about the problem here.

.github/ISSUE_TEMPLATE/question.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,5 +16,5 @@ Add any other context about the problem here.
1616

1717
**Operating environment(运行环境):**
1818
- python version [e.g. 3.6]
19-
- tensorflow version [e.g. 1.4.0, 1.5.0, 2.2.0]
20-
- deepctr version [e.g. 0.8.0,]
19+
- tensorflow version [e.g. 1.4.0, 1.5.0, 2.3.0]
20+
- deepctr version [e.g. 0.8.2,]

README.md

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -57,16 +57,16 @@ Let's [**Get Started!**](https://deepctr-doc.readthedocs.io/en/latest/Quick-Star
5757

5858
## Citation
5959

60-
- Weichen Shen. (2018). DeepCTR: Easy-to-use,Modular and Extendible package of deep-learning based CTR models. https://github.com/shenweichen/deepctr.
60+
- Weichen Shen. (2017). DeepCTR: Easy-to-use,Modular and Extendible package of deep-learning based CTR models. https://github.com/shenweichen/deepctr.
6161

6262

6363
If you find this code useful in your research, please cite it using the following BibTeX:
6464

6565
```bibtex
66-
@misc{shen2018deepctr,
66+
@misc{shen2017deepctr,
6767
author = {Weichen Shen},
6868
title = {DeepCTR: Easy-to-use,Modular and Extendible package of deep-learning based CTR models},
69-
year = {2018},
69+
year = {2017},
7070
publisher = {GitHub},
7171
journal = {GitHub Repository},
7272
howpublished = {\url{https://github.com/shenweichen/deepctr}},
@@ -86,7 +86,6 @@ For more information about the recommendation system, such as **feature engineer
8686

8787
更多关于推荐系统的内容,如**特征工程,用户画像,召回,排序和多目标优化,在线学习与实时计算以及更多前沿技术和实战项目**等可参考:
8888

89-
9089
- [推荐系统实战](https://www.julyedu.com/course/getDetail/181?ccode=5ee751d37278c)
91-
- [推荐系统就业小班](https://www.julyedu.com/course/getDetail/321?ccode=5ee751d37278c)
90+
- [互联网计算广告实战](https://www.julyedu.com/course/getDetail/158?ccode=5ee751d37278c)
9291

deepctr/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
from .utils import check_version
22

3-
__version__ = '0.8.1'
3+
__version__ = '0.8.2'
44
check_version(__version__)

deepctr/estimator/models/autoint.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -73,14 +73,12 @@ def _model_fn(features, labels, mode, config):
7373
dnn_input = combined_dnn_input(sparse_embedding_list, dense_value_list)
7474

7575
if len(dnn_hidden_units) > 0 and att_layer_num > 0: # Deep & Interacting Layer
76-
deep_out = DNN(dnn_hidden_units, dnn_activation, l2_reg_dnn, dnn_dropout,
77-
dnn_use_bn, seed)(dnn_input, training=train_flag)
76+
deep_out = DNN(dnn_hidden_units, dnn_activation, l2_reg_dnn, dnn_dropout, dnn_use_bn, seed=seed)(dnn_input, training=train_flag)
7877
stack_out = tf.keras.layers.Concatenate()([att_output, deep_out])
7978
final_logit = tf.keras.layers.Dense(
8079
1, use_bias=False, kernel_initializer=tf.keras.initializers.glorot_normal(seed))(stack_out)
8180
elif len(dnn_hidden_units) > 0: # Only Deep
82-
deep_out = DNN(dnn_hidden_units, dnn_activation, l2_reg_dnn, dnn_dropout,
83-
dnn_use_bn, seed)(dnn_input, training=train_flag)
81+
deep_out = DNN(dnn_hidden_units, dnn_activation, l2_reg_dnn, dnn_dropout, dnn_use_bn, seed=seed)(dnn_input, training=train_flag)
8482
final_logit = tf.keras.layers.Dense(
8583
1, use_bias=False, kernel_initializer=tf.keras.initializers.glorot_normal(seed))(deep_out)
8684
elif att_layer_num > 0: # Only Interacting Layer

deepctr/estimator/models/ccpm.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -80,8 +80,7 @@ def _model_fn(features, labels, mode, config):
8080
k=min(k, int(conv_result.shape[1])), axis=1)(conv_result)
8181

8282
flatten_result = tf.keras.layers.Flatten()(pooling_result)
83-
dnn_out = DNN(dnn_hidden_units, l2_reg=l2_reg_dnn,
84-
dropout_rate=dnn_dropout, seed=seed)(flatten_result, training=train_flag)
83+
dnn_out = DNN(dnn_hidden_units, l2_reg=l2_reg_dnn, dropout_rate=dnn_dropout, seed=seed)(flatten_result, training=train_flag)
8584
dnn_logit = tf.keras.layers.Dense(1, use_bias=False, kernel_initializer=tf.keras.initializers.glorot_normal(seed))(dnn_out)
8685

8786
logits = linear_logits + dnn_logit

deepctr/estimator/models/dcn.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -63,15 +63,13 @@ def _model_fn(features, labels, mode, config):
6363
dnn_input = combined_dnn_input(sparse_embedding_list, dense_value_list)
6464

6565
if len(dnn_hidden_units) > 0 and cross_num > 0: # Deep & Cross
66-
deep_out = DNN(dnn_hidden_units, dnn_activation, l2_reg_dnn, dnn_dropout,
67-
dnn_use_bn, seed)(dnn_input, training=train_flag)
66+
deep_out = DNN(dnn_hidden_units, dnn_activation, l2_reg_dnn, dnn_dropout, dnn_use_bn, seed=seed)(dnn_input, training=train_flag)
6867
cross_out = CrossNet(cross_num, l2_reg=l2_reg_cross)(dnn_input)
6968
stack_out = tf.keras.layers.Concatenate()([cross_out, deep_out])
7069
final_logit = tf.keras.layers.Dense(
7170
1, use_bias=False, kernel_initializer=tf.keras.initializers.glorot_normal(seed))(stack_out)
7271
elif len(dnn_hidden_units) > 0: # Only Deep
73-
deep_out = DNN(dnn_hidden_units, dnn_activation, l2_reg_dnn, dnn_dropout,
74-
dnn_use_bn, seed)(dnn_input, training=train_flag)
72+
deep_out = DNN(dnn_hidden_units, dnn_activation, l2_reg_dnn, dnn_dropout, dnn_use_bn, seed=seed)(dnn_input, training=train_flag)
7573
final_logit = tf.keras.layers.Dense(
7674
1, use_bias=False, kernel_initializer=tf.keras.initializers.glorot_normal(seed))(deep_out)
7775
elif cross_num > 0: # Only Cross

deepctr/estimator/models/deepfm.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -63,8 +63,7 @@ def _model_fn(features, labels, mode, config):
6363

6464
fm_logit = FM()(concat_func(sparse_embedding_list, axis=1))
6565

66-
dnn_output = DNN(dnn_hidden_units, dnn_activation, l2_reg_dnn, dnn_dropout,
67-
dnn_use_bn, seed)(dnn_input, training=train_flag)
66+
dnn_output = DNN(dnn_hidden_units, dnn_activation, l2_reg_dnn, dnn_dropout, dnn_use_bn, seed=seed)(dnn_input, training=train_flag)
6867
dnn_logit = tf.keras.layers.Dense(
6968
1, use_bias=False, kernel_initializer=tf.keras.initializers.glorot_normal(seed=seed))(dnn_output)
7069

deepctr/estimator/models/fibinet.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -68,8 +68,7 @@ def _model_fn(features, labels, mode, config):
6868

6969
dnn_input = combined_dnn_input(
7070
[Flatten()(concat_func([senet_bilinear_out, bilinear_out]))], dense_value_list)
71-
dnn_out = DNN(dnn_hidden_units, dnn_activation, l2_reg_dnn, dnn_dropout,
72-
False, seed)(dnn_input, training=train_flag)
71+
dnn_out = DNN(dnn_hidden_units, dnn_activation, l2_reg_dnn, dnn_dropout, False, seed=seed)(dnn_input, training=train_flag)
7372
dnn_logit = Dense(
7473
1, use_bias=False, kernel_initializer=tf.keras.initializers.glorot_normal(seed))(dnn_out)
7574

deepctr/estimator/models/fnn.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -53,8 +53,7 @@ def _model_fn(features, labels, mode, config):
5353
sparse_embedding_list, dense_value_list = input_from_feature_columns(features, dnn_feature_columns,
5454
l2_reg_embedding=l2_reg_embedding)
5555
dnn_input = combined_dnn_input(sparse_embedding_list, dense_value_list)
56-
deep_out = DNN(dnn_hidden_units, dnn_activation, l2_reg_dnn,
57-
dnn_dropout, False, seed)(dnn_input, training=train_flag)
56+
deep_out = DNN(dnn_hidden_units, dnn_activation, l2_reg_dnn, dnn_dropout, False, seed=seed)(dnn_input, training=train_flag)
5857
dnn_logit = tf.keras.layers.Dense(
5958
1, use_bias=False, kernel_initializer=tf.keras.initializers.glorot_normal(seed))(deep_out)
6059

0 commit comments

Comments
 (0)