"git@developer.sourcefind.cn:gaoqiong/migraphx.git" did not exist on "2edffaf59c072bb7132a346a5467130790ce1377"
test.py 1.3 KB
Newer Older
liangjing's avatar
liangjing committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
import paddle
import paddle.nn as nn
import paddle.nn.functional as F
import numpy as np

#a = np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19], dtype = int) 
y_true = paddle.to_tensor([0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19], "int64")
#pred = np.array([0.21167, -0.593262, -0.314209, -0.436035, 0.0211029, 0.0109406, -0.0210571, 0.588379, 0.215088, -0.277588, -0.0982666, -0.168579, -0.00405884, -0.290283, 0.105896, 0.0933228, 0.0889893, 0.0363159, 0.53418, 0.552734], dtype = float)
#y_pred = paddle.to_tensor(pred).astype(np.float32)
y_pred = paddle.to_tensor([0.21167 -0.593262 -0.314209 -0.436035 0.0211029 0.0109406 -0.0210571 0.588379 0.215088 -0.277588 -0.0982666 -0.168579 -0.00405884 -0.290283 0.105896 0.0933228 0.0889893 0.0363159 0.53418 0.552734], "float32") 

#one_hot = F.one_hot(y_true,num_classes=3)
#res=paddle.sum(paddle.exp(y_pred),axis=1)
#res = paddle.reshape(res, [-1, 1])
#softmax = paddle.exp(y_pred)/res
#logsoftmax = paddle.log(softmax)

#nllloss = -paddle.sum(one_hot.squeeze(1)*logsoftmax)/y_true.shape[0]
#print(nllloss)

#logsoftmax = F.log_softmax(y_pred)

#nllloss = F.nll_loss(logsoftmax, y_true, ignore_index=-1)
#print(nllloss)
loss_fct = paddle.nn.loss.CrossEntropyLoss(ignore_index=-1) 
nllloss = loss_fct(input=y_pred,label=y_true)
print(nllloss)