"csrc/vscode:/vscode.git/clone" did not exist on "0e541cc9175945634ef946568104829175fbdc4e"
test.py 1.07 KB
Newer Older
Hang Zhang's avatar
init  
Hang Zhang committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
## Created by: Hang Zhang
## ECE Department, Rutgers University
## Email: zhang.hang@rutgers.edu
## Copyright (c) 2017
##
## This source code is licensed under the MIT-style license found in the
## LICENSE file in the root directory of this source tree 
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++

import torch
import torch.nn as nn
from torch.autograd import Variable
from encoding import Aggregate
Hang Zhang's avatar
Hang Zhang committed
15
from encoding import Encoding
Hang Zhang's avatar
Hang Zhang committed
16
from torch.autograd import gradcheck
Hang Zhang's avatar
init  
Hang Zhang committed
17

Hang Zhang's avatar
Hang Zhang committed
18
# declare dims and variables 
Hang Zhang's avatar
init  
Hang Zhang committed
19
B, N, K, D = 1, 2, 3, 4
Hang Zhang's avatar
Hang Zhang committed
20
21
A = Variable(torch.randn(B,N,K).cuda(), requires_grad=True)
R = Variable(torch.randn(B,N,K,D).cuda(), requires_grad=True)
Hang Zhang's avatar
Hang Zhang committed
22
X = Variable(torch.randn(B,D,3,3).cuda(), requires_grad=True)
Hang Zhang's avatar
Hang Zhang committed
23
24
25
26
27

# check Aggregate operation
test = gradcheck(Aggregate(),(A, R), eps=1e-4, atol=1e-3)
print('Gradcheck of Aggreate() returns ', test)

Hang Zhang's avatar
Hang Zhang committed
28
29
30
31
32
33
# check Encoding operation
encoding = Encoding(D=D, K=K).cuda()
print(encoding)
E = encoding(X)
loss = E.view(B,-1).pow(2).sum()
loss.backward()