Skip to content

Commit

Permalink
demo for adma opt
Browse files Browse the repository at this point in the history
  • Loading branch information
fuhailin committed Nov 17, 2023
1 parent f7e9aaa commit e7247a8
Show file tree
Hide file tree
Showing 5 changed files with 49 additions and 6 deletions.
8 changes: 5 additions & 3 deletions deepray/core/base_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@
if FLAGS.use_dynamic_embedding:
from tensorflow_recommenders_addons import dynamic_embedding as de
from tensorflow_recommenders_addons.dynamic_embedding.python.ops.dynamic_embedding_ops import TrainableWrapper, DEResourceVariable
# tf.train.Checkpoint = de.train.checkpoint.DEHvdCheckpoint
else:
TrainableWrapper, DEResourceVariable = type(None), type(None)

Expand Down Expand Up @@ -255,10 +256,11 @@ def __init__(
self.global_batch_size *= get_world_size()
learning_rate *= get_world_size()

if isinstance(optimizer, optimizers.Optimizer):
# TODO: fuhailin
# if isinstance(optimizer, optimizers.Optimizer):
self.optimizer = optimizer
else:
raise ValueError("Not support opt.")
# else:
# raise ValueError("Not support opt.")
self.use_float16 = common_flags.use_float16()
if self.use_float16:
self.optimizer = tf.keras.mixed_precision.LossScaleOptimizer(self.optimizer, dynamic=True)
Expand Down
1 change: 1 addition & 0 deletions deepray/optimizers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,3 +49,4 @@
)
from deepray.optimizers.yogi import Yogi
from deepray.optimizers.cocob import COCOB
from deepray.optimizers.adam import AdamOptimizer
3 changes: 3 additions & 0 deletions deepray/utils/export/export.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,9 @@ def serving_input_receiver_fn():


def export_to_checkpoint(saver: Union[tf.train.Checkpoint, tf.train.CheckpointManager], checkpoint_number=None):
if FLAGS.use_dynamic_embedding:
# TFRA not support ckpt yet.
return

def helper(name, _saver):
"""Saves model to with provided checkpoint prefix."""
Expand Down
37 changes: 36 additions & 1 deletion modelzoo/Recommendation/criteo_ctr/feature_map_small.csv
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,40 @@ name,dtype,ftype,dim,length,voc_size
feature_0,int32,Label,1,1,
feature_1,float64,Numerical,1,1,
feature_2,float64,Numerical,1,1,
feature_3,float64,Numerical,1,1,
feature_4,float64,Numerical,1,1,
feature_5,float64,Numerical,1,1,
feature_6,float64,Numerical,1,1,
feature_7,float64,Numerical,1,1,
feature_8,float64,Numerical,1,1,
feature_9,float64,Numerical,1,1,
feature_10,float64,Numerical,1,1,
feature_11,float64,Numerical,1,1,
feature_12,float64,Numerical,1,1,
feature_13,float64,Numerical,1,1,
feature_14,int32,Categorical,16,1,7912888
feature_15,int32,Categorical,16,1,33822
feature_15,int32,Categorical,16,1,33822
feature_16,int32,Categorical,16,1,17138
feature_17,int32,Categorical,16,1,7338
feature_18,int32,Categorical,16,1,20045
feature_19,int32,Categorical,16,1,3
feature_20,int32,Categorical,16,1,7104
feature_21,int32,Categorical,16,1,1381
feature_22,int32,Categorical,16,1,62
feature_23,int32,Categorical,16,1,5554113
feature_24,int32,Categorical,16,1,582468
feature_25,int32,Categorical,16,1,245827
feature_26,int32,Categorical,16,1,10
feature_27,int32,Categorical,16,1,2208
feature_28,int32,Categorical,16,1,10666
feature_29,int32,Categorical,16,1,103
feature_30,int32,Categorical,16,1,3
feature_31,int32,Categorical,16,1,967
feature_32,int32,Categorical,16,1,14
feature_33,int32,Categorical,16,1,8165895
feature_34,int32,Categorical,16,1,2675939
feature_35,int32,Categorical,16,1,7156452
feature_36,int32,Categorical,16,1,302515
feature_37,int32,Categorical,16,1,12021
feature_38,int32,Categorical,16,1,96
feature_39,int32,Categorical,16,1,34
6 changes: 4 additions & 2 deletions modelzoo/Recommendation/criteo_ctr/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import sys

import tensorflow as tf
import deepray as dp
from absl import app, flags
from tensorflow.keras import backend as K
from tensorflow_recommenders_addons import dynamic_embedding as de
Expand All @@ -34,9 +35,10 @@


def main(_):
model = Ranking(interaction="cross")
model = Ranking(interaction="cross")
optimizer = tf.keras.optimizers.Adam(learning_rate=FLAGS.learning_rate, amsgrad=False)
optimizer = de.DynamicEmbeddingOptimizer(optimizer, synchronous=FLAGS.use_horovod)
if FLAGS.use_dynamic_embedding:
optimizer = de.DynamicEmbeddingOptimizer(optimizer, synchronous=FLAGS.use_horovod)
trainer = Trainer(model=model, optimizer=optimizer, loss="binary_crossentropy", metrics=['AUC'])
data_pipe = CriteoTsvReader(use_synthetic_data=True)
train_input_fn = data_pipe(FLAGS.train_data, FLAGS.batch_size, is_training=True)
Expand Down

0 comments on commit e7247a8

Please sign in to comment.