Skip to content

Commit

Permalink
Merge pull request #1086 from NLGithubWP/add_model
Browse files Browse the repository at this point in the history
Add the backward implementation for sum error loss
  • Loading branch information
lzjpaul authored Sep 6, 2023
2 parents 210d7a6 + 0c151e7 commit ec2d81c
Showing 1 changed file with 16 additions and 2 deletions.
18 changes: 16 additions & 2 deletions examples/model_selection_psql/msmlp/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,21 @@ def forward(self, x):
# self.n *= s
# loss /= self.n
return loss


def backward(self, dy=1.0):
# dx = self.err
dev = device.get_default_device()
dx = tensor.Tensor(self.data_x.shape, dev, singa_dtype['float32'])
dx.copy_from_numpy(np.ones(self.data_x.shape))
# dx *= float(2 / self.n)
dx *= dy
return dx

### called in the MSMLP class for sum error loss gradients
def se_loss(x):
# assert x.shape == t.shape, "input and target shape different: %s, %s" % (
# x.shape, t.shape)
return SumError()(x)[0]

class MSMLP(model.Model):

Expand All @@ -66,7 +80,7 @@ def __init__(self, data_size=10, perceptron_size=100, num_classes=10):
self.linear2 = layer.Linear(num_classes)
self.softmax_cross_entropy = layer.SoftMaxCrossEntropy()
self.sum_error = SumErrorLayer()

def forward(self, inputs):
y = self.linear1(inputs)
y = self.relu(y)
Expand Down

0 comments on commit ec2d81c

Please sign in to comment.