Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
group_embedding_dict, dense_value_list = input_from_feature_columns(features, dnn_feature_columns, l2_reg_embedding,
init_std, seed, support_group=True)
linear_logit = get_linear_logit(features, linear_feature_columns, init_std=init_std, seed=seed, prefix='linear',
l2_reg=l2_reg_linear)
fm_logit = add_func([FM()(concat_func(v, axis=1))
for k, v in group_embedding_dict.items() if k in fm_group])
dnn_input = combined_dnn_input(list(chain.from_iterable(
group_embedding_dict.values())), dense_value_list)
dnn_output = DNN(dnn_hidden_units, dnn_activation, l2_reg_dnn, dnn_dropout,
dnn_use_bn, seed)(dnn_input)
dnn_logit = tf.keras.layers.Dense(
1, use_bias=False, activation=None)(dnn_output)
final_logit = add_func([linear_logit, fm_logit, dnn_logit])
output = PredictionLayer(task)(final_logit)
model = tf.keras.models.Model(inputs=inputs_list, outputs=output)
return model
inputs_list = list(features.values())
group_embedding_dict, _ = input_from_feature_columns(features, dnn_feature_columns, l2_reg_embedding, init_std,
seed, support_dense=False, support_group=True)
linear_logit = get_linear_logit(features, linear_feature_columns, init_std=init_std, seed=seed, prefix='linear',
l2_reg=l2_reg_linear)
if use_attention:
fm_logit = add_func([AFMLayer(attention_factor, l2_reg_att, afm_dropout,
seed)(list(v)) for k, v in group_embedding_dict.items() if k in fm_group])
else:
fm_logit = add_func([FM()(concat_func(v, axis=1))
for k, v in group_embedding_dict.items() if k in fm_group])
final_logit = add_func([linear_logit, fm_logit])
output = PredictionLayer(task)(final_logit)
model = tf.keras.models.Model(inputs=inputs_list, outputs=output)
return model
inputs_list = list(features.values())
sparse_embedding_list, dense_value_list = input_from_feature_columns(features, dnn_feature_columns,
l2_reg_embedding, init_std, seed)
linear_logit = get_linear_logit(features, linear_feature_columns, init_std=init_std, seed=seed, prefix='linear',
l2_reg=l2_reg_linear)
dnn_input = combined_dnn_input(sparse_embedding_list, dense_value_list)
dnn_out = DNN(dnn_hidden_units, dnn_activation, l2_reg_dnn, dnn_dropout,
False, seed)(dnn_input)
dnn_logit = Dense(
1, use_bias=False, activation=None)(dnn_out)
final_logit = add_func([dnn_logit, linear_logit])
output = PredictionLayer(task)(final_logit)
model = Model(inputs=inputs_list, outputs=output)
return model
senet_bilinear_out = BilinearInteraction(
bilinear_type=bilinear_type, seed=seed)(senet_embedding_list)
bilinear_out = BilinearInteraction(
bilinear_type=bilinear_type, seed=seed)(sparse_embedding_list)
linear_logit = get_linear_logit(features, linear_feature_columns, init_std=init_std, seed=seed, prefix='linear',
l2_reg=l2_reg_linear)
dnn_input = combined_dnn_input(
[Flatten()(concat_func([senet_bilinear_out, bilinear_out]))], dense_value_list)
dnn_out = DNN(dnn_hidden_units, dnn_activation, l2_reg_dnn, dnn_dropout,
False, seed)(dnn_input)
dnn_logit = Dense(
1, use_bias=False, activation=None)(dnn_out)
final_logit = add_func([linear_logit,dnn_logit])
output = PredictionLayer(task)(final_logit)
model = Model(inputs=inputs_list, outputs=output)
return model
linear_feature_columns + dnn_feature_columns)
inputs_list = list(features.values())
sparse_embedding_list, dense_value_list = input_from_feature_columns(features, dnn_feature_columns,
l2_reg_embedding, init_std, seed)
linear_logit = get_linear_logit(features, linear_feature_columns, init_std=init_std, seed=seed, prefix='linear',
l2_reg=l2_reg_linear)
dnn_input = combined_dnn_input(sparse_embedding_list, dense_value_list)
deep_out = DNN(dnn_hidden_units, dnn_activation, l2_reg_dnn,
dnn_dropout, False, seed)(dnn_input)
dnn_logit = tf.keras.layers.Dense(
1, use_bias=False, activation=None)(deep_out)
final_logit = add_func([dnn_logit, linear_logit])
output = PredictionLayer(task)(final_logit)
model = tf.keras.models.Model(inputs=inputs_list,
outputs=output)
return model
new_features = FGCNNLayer(
conv_filters, conv_kernel_width, new_maps, pooling_width)(fg_input)
combined_input = concat_func([origin_input, new_features], axis=1)
else:
combined_input = origin_input
inner_product = tf.keras.layers.Flatten()(InnerProductLayer()(
tf.keras.layers.Lambda(unstack, mask=[None] * int(combined_input.shape[1]))(combined_input)))
linear_signal = tf.keras.layers.Flatten()(combined_input)
dnn_input = tf.keras.layers.Concatenate()([linear_signal, inner_product])
dnn_input = tf.keras.layers.Flatten()(dnn_input)
final_logit = DNN(dnn_hidden_units, dropout_rate=dnn_dropout,
l2_reg=l2_reg_dnn)(dnn_input)
final_logit = tf.keras.layers.Dense(1, use_bias=False)(final_logit)
final_logit = add_func([final_logit,linear_logit])
output = PredictionLayer(task)(final_logit)
model = tf.keras.models.Model(inputs=inputs_list, outputs=output)
return model
dnn_use_bn, seed)(dnn_input)
stack_out = tf.keras.layers.Concatenate()([att_output, deep_out])
final_logit = tf.keras.layers.Dense(
1, use_bias=False, activation=None)(stack_out)
elif len(dnn_hidden_units) > 0: # Only Deep
deep_out = DNN(dnn_hidden_units, dnn_activation, l2_reg_dnn, dnn_dropout,
dnn_use_bn, seed)(dnn_input)
final_logit = tf.keras.layers.Dense(
1, use_bias=False, activation=None)(deep_out)
elif att_layer_num > 0: # Only Interacting Layer
final_logit = tf.keras.layers.Dense(
1, use_bias=False, activation=None)(att_output)
else: # Error
raise NotImplementedError
final_logit = add_func([final_logit, linear_logit])
output = PredictionLayer(task)(final_logit)
model = tf.keras.models.Model(inputs=inputs_list, outputs=output)
return model
element_wise_prod = multiply([fc_i_embedding, fc_j_embedding])
if reduce_sum:
element_wise_prod = Lambda(lambda element_wise_prod: K.sum(
element_wise_prod, axis=-1))(element_wise_prod)
embed_list.append(element_wise_prod)
ffm_out = tf.keras.layers.Flatten()(concat_func(embed_list, axis=1))
if use_bn:
ffm_out = tf.keras.layers.BatchNormalization()(ffm_out)
dnn_input = combined_dnn_input([ffm_out], dense_value_list)
dnn_out = DNN(dnn_hidden_units, l2_reg=l2_reg_dnn, dropout_rate=dnn_dropout)(dnn_input)
dnn_logit = Dense(1, use_bias=False)(dnn_out)
final_logit = add_func([dnn_logit,linear_logit])
output = PredictionLayer(task)(final_logit)
model = Model(inputs=inputs_list, outputs=output)
return model
fm_input = concat_func(sparse_embedding_list, axis=1)
dnn_input = combined_dnn_input(sparse_embedding_list, dense_value_list)
dnn_output = DNN(dnn_hidden_units, dnn_activation, l2_reg_dnn, dnn_dropout,
dnn_use_bn, seed)(dnn_input)
dnn_logit = tf.keras.layers.Dense(
1, use_bias=False, activation=None)(dnn_output)
final_logit = add_func([linear_logit, dnn_logit])
if len(cin_layer_size) > 0:
exFM_out = CIN(cin_layer_size, cin_activation,
cin_split_half, l2_reg_cin, seed)(fm_input)
exFM_logit = tf.keras.layers.Dense(1, activation=None, )(exFM_out)
final_logit = add_func([final_logit, exFM_logit])
output = PredictionLayer(task)(final_logit)
model = tf.keras.models.Model(inputs=inputs_list, outputs=output)
return model
linear_logit_list = []
for i in range(units):
if len(linear_emb_list[0]) > 0 and len(dense_input_list) > 0:
sparse_input = concat_func(linear_emb_list[i])
dense_input = concat_func(dense_input_list)
linear_logit = Linear(l2_reg, mode=2, use_bias=use_bias)([sparse_input, dense_input])
elif len(linear_emb_list[0]) > 0:
sparse_input = concat_func(linear_emb_list[i])
linear_logit = Linear(l2_reg, mode=0, use_bias=use_bias)(sparse_input)
elif len(dense_input_list) > 0:
dense_input = concat_func(dense_input_list)
linear_logit = Linear(l2_reg, mode=1, use_bias=use_bias)(dense_input)
else:
#raise NotImplementedError
return add_func([])
linear_logit_list.append(linear_logit)
return concat_func(linear_logit_list)