Skip to content

Commit

Permalink
Update README.md
Browse files Browse the repository at this point in the history
  • Loading branch information
tetrapod0 authored Aug 2, 2022
1 parent b07d3d9 commit 236b1d5
Showing 1 changed file with 77 additions and 1 deletion.
78 changes: 77 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,5 +14,81 @@ Under pictures are embeddings about mnist dataset after training.
### CurricularFace Loss
![download](https://user-images.githubusercontent.com/48349693/182266736-44cb7dc4-b273-45cc-9ff6-e3fd7672b0a5.png)

---

# Code Explanation

```python
import tensorflow as tf
import numpy as np

(train_x, train_y), (test_x, test_y) = tf.keras.datasets.mnist.load_data()
print(train_x.shape, train_y.shape)
print(train_x.dtype, train_y.dtype)

train_x = train_x.astype(np.float32) / 255.0
test_x = test_x.astype(np.float32) / 255.0

train_y = tf.keras.utils.to_categorical(train_y, num_classes=10)
test_y = tf.keras.utils.to_categorical(test_y, num_classes=10)
print(train_y.shape, train_y.dtype)
print(test_y.shape, test_y.dtype)
```
Load mnist dataset.

```python
class CurricularFaceLoss(tf.keras.losses.Loss):
def __init__(self, scale=30, margin=0.5, alpha=0.99, name="CurricularFaceLoss", **kwargs):
super().__init__(name=name, **kwargs)
self.scale = scale
self.margin = margin
self.alpha = alpha
self.t = tf.Variable(0.)
self.eps = 1e-7

def positive_forward(self, y_logit):
cosine_sim = y_logit
theta_margin = tf.math.acos(cosine_sim) + self.margin
y_logit_pos = tf.math.cos(theta_margin)
return y_logit_pos

def negative_forward(self, y_logit_pos_masked, y_logit):
hard_sample_mask = y_logit_pos_masked < y_logit # (N, n_classes)
y_logit_neg = tf.where(hard_sample_mask, tf.square(y_logit)+self.t*y_logit, y_logit)
return y_logit_neg

def forward(self, y_true, y_logit):
y_logit = tf.clip_by_value(y_logit, -1.0+self.eps, 1.0-self.eps)
y_logit_masked = tf.expand_dims(tf.reduce_sum(y_true*y_logit, axis=1), axis=1) # (N, 1)
y_logit_pos_masked = self.positive_forward(y_logit_masked) # (N, 1)
y_logit_neg = self.negative_forward(y_logit_pos_masked, y_logit) # (N, n_classes)
# update t
r = tf.reduce_mean(y_logit_pos_masked)
self.t.assign(self.alpha*r + (1-self.alpha)*self.t)

y_true = tf.cast(y_true, dtype=tf.bool)
return tf.where(y_true, y_logit_pos_masked, y_logit_neg)

def __call__(self, y_true, y_logit): # shape(N, n_classes)
y_logit_fixed = self.forward(y_true, y_logit)
loss = tf.nn.softmax_cross_entropy_with_logits(y_true, y_logit_fixed*self.scale)
loss = tf.reduce_mean(loss)
return loss
```






---

### Reference

https://emkademy.medium.com/angular-margin-losses-for-representative-embeddings-training-arcface-2018-vs-mv-arc-softmax-96b54bcd030b

https://linuxtut.com/en/a9dadc68a5cd0a2747c0/




#

0 comments on commit 236b1d5

Please sign in to comment.