test_cpu_adam.py 1.64 KB
Newer Older
Jeff Rasley's avatar
Jeff Rasley committed
1
2
3
4
5
6
7
8
import argparse
import torch

import time
import numpy as np
import pytest
import copy

9
import deepspeed
Jeff Rasley's avatar
Jeff Rasley committed
10
11
from deepspeed.ops.adam import DeepSpeedCPUAdam

12
13
14
if not deepspeed.ops.__installed_ops__['cpu-adam']:
    pytest.skip("cpu-adam is not installed", allow_module_level=True)

Jeff Rasley's avatar
Jeff Rasley committed
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33

def check_equal(first, second, atol=1e-2, verbose=False):
    x = first.detach().numpy()
    y = second.detach().numpy()
    if verbose:
        print("x = {}".format(x.flatten()))
        print("y = {}".format(y.flatten()))
        print('-' * 80)
    np.testing.assert_allclose(x, y, err_msg="param-update dismatch!", atol=atol)

@pytest.mark.parametrize('model_size',
                         [
                             (64),
                             (22),
                             (55),
                             (127),
                             (1024),
                             (1048576),
                         ]) # yapf: disable
Shaden Smith's avatar
Shaden Smith committed
34
def test_cpu_adam_opt(model_size):
Jeff Rasley's avatar
Jeff Rasley committed
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
    device = 'cpu'
    rng_state = torch.get_rng_state()
    param = torch.nn.Parameter(torch.randn(model_size, device=device))
    torch.set_rng_state(rng_state)
    param1 = torch.nn.Parameter(torch.randn(model_size, device=device))

    optimizer1 = torch.optim.Adam([param1])
    optimizer = DeepSpeedCPUAdam([param])

    for i in range(10):
        rng_state = torch.get_rng_state()
        param.grad = torch.randn(model_size, device=device)
        torch.set_rng_state(rng_state)
        param1.grad = torch.randn(model_size, device=device)

        optimizer.step()
        optimizer1.step()

    check_equal(param, param1, atol=1e-2, verbose=True)