Skip to content

Commit

Permalink
fix FP16 O2 for F.normalize in PartialFC (PaddlePaddle#131)
Browse files Browse the repository at this point in the history
  • Loading branch information
GuoxiaWang authored Oct 18, 2022
1 parent 7baba0d commit 7321b10
Show file tree
Hide file tree
Showing 2 changed files with 5 additions and 5 deletions.
4 changes: 2 additions & 2 deletions plsc/metric/metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ def __init__(self, flip_test=True, nrof_folds=10, pca=0):
self.nrof_folds = nrof_folds
self.pca = pca

def forward(self, embeddings, actual_issame, flip_test=False):
def forward(self, embeddings, actual_issame):

metric_dict = dict()

Expand Down Expand Up @@ -147,7 +147,7 @@ def forward(self, embeddings, actual_issame, flip_test=False):

metric_dict["std"] = std2
metric_dict["xnorm"] = _xnorm
if flip_test:
if self.flip_test:
metric_dict["accuracy-flip"] = acc2
else:
metric_dict["accuracy"] = acc2
Expand Down
6 changes: 3 additions & 3 deletions plsc/models/layers/partialfc.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def backward(ctx, grad):
for _op in dist_ops:
_op.wait()

#grad_out *= len(grad_list) # cooperate with distributed loss function
grad_out *= len(grad_list) # cooperate with distributed loss function
return grad_out


Expand Down Expand Up @@ -192,8 +192,8 @@ def sparse_grad_hook_fn():
else:
self.sub_weight = self.weight

norm_feature = paddle.nn.functional.normalize(total_feature, axis=1)
norm_weight = paddle.nn.functional.normalize(self.sub_weight, axis=0)
norm_feature = paddle.fluid.layers.l2_normalize(total_feature, axis=1)
norm_weight = paddle.fluid.layers.l2_normalize(self.sub_weight, axis=0)

local_logit = paddle.matmul(norm_feature, norm_weight)
return local_logit, total_label

0 comments on commit 7321b10

Please sign in to comment.