activation_checkpoint_codegen.py 39 KB
Newer Older
1
import colossalai
2
3
4
import torch
from typing import List, Callable, Any, Tuple, Dict

5
6
try:
    from torch.fx.node import Node, Argument, map_arg, _type_repr, _get_qualified_name
7
    from torch.fx.graph import _Namespace, PythonCode, _custom_builtins, _is_from_torch, _format_target, magic_methods, CodeGen, _origin_type_map, inplace_methods, _CustomBuiltin
8
    CODEGEN_AVAILABLE = True
9
except:
10
    from torch.fx.graph import _Namespace, PythonCode, _custom_builtins, _is_from_torch, _format_target, magic_methods, _origin_type_map, _format_args, _CustomBuiltin
11
    from torch.fx.node import Node, Argument, map_arg, _type_repr, _get_qualified_name
12
    CODEGEN_AVAILABLE = False
13

14
if CODEGEN_AVAILABLE:
15
16
17
18
19
    __all__ = ['ActivationCheckpointCodeGen']
else:
    __all__ = ['python_code_with_activation_checkpoint']


Boyuan Yao's avatar
Boyuan Yao committed
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
def _gen_saved_tensors_hooks():
    """
    Generate saved tensors hooks
    """

    pack_hook = """def pack_hook(self, x):
    if getattr(x, "offload", None):
        return (x.device, x.cpu())
    else:
        return x
"""

    unpack_hook = """def unpack_hook(self, packed):
    if isinstance(packed, tuple):
        device, tensor = packed
        return tensor.to(device)
    else:
        return packed
"""

    return pack_hook, unpack_hook


def _gen_save_tensors_hooks_context():
    """
    Generate save tensors hooks context
    """

    context = "with torch.autograd.graph.saved_tensors_hooks(self.pack_hook, self.unpack_hook):\n"
    return context


52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
def _find_input_and_output_nodes(nodes: List[Node]):
    """
    Find the input and output node names which are not found in the given list of nodes.
    """
    input_nodes = []
    output_nodes = []

    # if a node has an input node which is not in the node list
    # we treat that input node as the input of the checkpoint function
    for node in nodes:
        for input_node in node._input_nodes.keys():
            node_repr = repr(input_node)
            if input_node not in nodes and node_repr not in input_nodes:
                input_nodes.append(node_repr)

    # if a node has a user node which is not in the node list
    # we treat that user node as the node receiving the current node output
    for node in nodes:
        for output_node in node.users.keys():
            node_repr = repr(node)
            if output_node not in nodes and node_repr not in output_nodes:
                output_nodes.append(node_repr)

    return input_nodes, output_nodes


def _find_ckpt_regions(nodes: List[Node]):
    """
    Find the checkpoint regions given a list of consecutive nodes. The outputs will be list
    of tuples, each tuple is in the form of (start_index, end_index).
    """
    ckpt_nodes = []
    ckpt_regions = []
    start = -1
    end = -1
    current_region = None

    for idx, node in enumerate(nodes):
        if hasattr(node, 'activation_checkpoint'):
            act_ckpt_label = node.activation_checkpoint

            # this activation checkpoint label is not set yet
            # meaning this is the first node of the activation ckpt region
            if current_region is None:
                current_region = act_ckpt_label
                start = idx

            # if activation checkpoint has changed
            # we restart the tracking
            # e.g. node ckpt states = [ckpt1, ckpt2, ckpt2, ckpt2]
            if act_ckpt_label != current_region:
                assert start != -1
                ckpt_regions.append((start, idx - 1))
                current_region = act_ckpt_label
                start = idx
                end = -1
        elif current_region is not None and not hasattr(node, 'activation_checkpoint'):
            # used to check the case below
            # node ckpt states = [ckpt, ckpt, non-ckpt]
            end = idx - 1
            assert start != -1 and end != -1
            ckpt_regions.append((start, end))
            start = end = -1
            current_region = None
        else:
            pass
    return ckpt_regions


def _gen_ckpt_fn_def(label, free_vars: List[str]) -> str:
    """
    Generate the checkpoint function definition
    """
125
    return f"def checkpoint_{label}({', '.join(['self'] + free_vars)}):"
126
127
128
129
130
131
132
133
134


def _gen_ckpt_output(output_vars: List[str]) -> str:
    """
    Generate the return statement for checkpoint region
    """
    return f"return {', '.join(output_vars)}"


135
def _gen_ckpt_usage(label, activation_offload, input_vars, output_vars, use_reentrant=True):
136
137
138
139
140
    """
    Generate the checkpoint function call code text
    """
    outputs = ', '.join(output_vars)
    inputs = ', '.join(input_vars)
141
    return f'{outputs} = colossalai.utils.activation_checkpoint.checkpoint(self.checkpoint_{label}, {activation_offload}, {inputs}, use_reentrant={use_reentrant})'
142
143


144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
def _end_of_ckpt(node: Node, check_idx: int) -> bool:
    """Check if the node could end the ckpt region

    Args:
        node (Node): torch.fx.Node
        check_idx (int): the index of checkpoint level for 
        nested checkpoint

    Returns:
        bool
    """
    if hasattr(node, "activation_checkpoint"):
        if isinstance(node.activation_checkpoint, list):
            return node.activation_checkpoint[check_idx] == None
        else:
            return False
    else:
        return True


def _find_nested_ckpt_regions(nodes, check_idx=0):
    """
    Find the nested checkpoint regions given a list of consecutive nodes. The outputs 
    will be list of tuples, each tuple is in the form of (start_index, end_index).
    """
    ckpt_regions = []
    start = -1
    end = -1
    current_region = None

    for idx, node in enumerate(nodes):
        if hasattr(node, 'activation_checkpoint'):
            if isinstance(getattr(node, 'activation_checkpoint'), int):
                act_ckpt_label = node.activation_checkpoint
            else:
                act_ckpt_label = node.activation_checkpoint[check_idx]

            # this activation checkpoint label is not set yet
            # meaning this is the first node of the activation ckpt region
            if current_region is None:
                current_region = act_ckpt_label
                start = idx

            # if activation checkpoint has changed
            # we restart the tracking
            # e.g. node ckpt states = [ckpt1, ckpt2, ckpt2, ckpt2]
            if act_ckpt_label != current_region:
                assert start != -1
                ckpt_regions.append((start, idx - 1))
                current_region = act_ckpt_label
                start = idx
                end = -1
        elif current_region is not None and _end_of_ckpt(node, check_idx):
            # used to check the case below
            # node ckpt states = [ckpt, ckpt, non-ckpt]
            end = idx - 1
            assert start != -1 and end != -1
            ckpt_regions.append((start, end))
            start = end = -1
            current_region = None
        else:
            pass

    if current_region is not None:
        end = len(nodes) - 1
        ckpt_regions.append((start, end))
    return ckpt_regions


def emit_ckpt_func(body,
                   ckpt_func,
                   node_list: List[Node],
                   emit_node_func,
                   delete_unused_value_func,
                   level=0,
                   in_ckpt=False):
    """Emit ckpt fuction in nested way

    Args:
        body: forward code, in recursive calls, this part will be checkpoint
        functions code
        ckpt_func: checkpoint functions code, in recursive calls, this part
        will be a buffer
        node_list (List[Node]): list of torch.fx.Node
        emit_node_func: function to emit a node
        delete_unused_value_func: function to delete unused value
        level (int, optional): checkpoint level. Defaults to 0.
        in_ckpt (bool, optional): indicates wether the func is in recursive
        call. Defaults to False.
    """
    inputs, outputs = _find_input_and_output_nodes(node_list)

    # if the current checkpoint function use int as label, using old generation method
    if isinstance(node_list[0].activation_checkpoint, int):
        label = node_list[0].activation_checkpoint
        ckpt_fn_def = _gen_ckpt_fn_def(label, inputs)
        ckpt_func.append(f'{ckpt_fn_def}\n')
        for node in node_list:
            emit_node_func(node, ckpt_func)
            ckpt_func[-1] = '    ' + ckpt_func[-1]
            delete_unused_value_func(node, ckpt_func)

Boyuan Yao's avatar
Boyuan Yao committed
246
        ckpt_func.append('    ' + _gen_ckpt_output(outputs) + '\n\n')
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
        activation_offload = getattr(node_list[0], "activation_offload", False)
        usage = _gen_ckpt_usage(label, activation_offload, inputs, outputs, False)
        usage += "\n"
        body.append(usage)

    # use nested ckpt function codegen
    else:
        # label given by each layer, e.g. if you are currently at level [0, 1, 1]
        # the label will be '0_1_1'
        label = "_".join([str(idx) for idx in node_list[0].activation_checkpoint[:level + 1]])
        ckpt_fn_def = _gen_ckpt_fn_def(label, inputs)
        ckpt_func.append(f'{ckpt_fn_def}\n')

        # if there is more level to fetch
        if level + 1 < len(node_list[0].activation_checkpoint):
            ckpt_regions = _find_nested_ckpt_regions(node_list, level + 1)
            start_idx = [item[0] for item in ckpt_regions]
            end_idx = [item[1] for item in ckpt_regions]

            # use ckpt_func_buffer to store nested checkpoint functions
            ckpt_func_buffer = []
            node_idx = 0
            while 1:
                if node_idx >= len(node_list):
                    break

                if node_idx in start_idx:
                    ckpt_node_list = node_list[node_idx:end_idx[start_idx.index(node_idx)] + 1]
                    emit_ckpt_func(ckpt_func, ckpt_func_buffer, ckpt_node_list, emit_node_func,
                                   delete_unused_value_func, level + 1, True)
                    node_idx += len(ckpt_node_list)

                else:
                    node = node_list[node_idx]
                    emit_node_func(node, ckpt_func)
                    ckpt_func[-1] = '    ' + ckpt_func[-1]
                    delete_unused_value_func(node, ckpt_func)
                    node_idx += 1

Boyuan Yao's avatar
Boyuan Yao committed
286
            ckpt_func.append('    ' + _gen_ckpt_output(outputs) + '\n\n')
287
288
289
290
291
292
293
294
295
296
297
298
299
300
            ckpt_func += ckpt_func_buffer
            activation_offload = getattr(node_list[0], "activation_offload", False)
            usage = _gen_ckpt_usage(label, activation_offload, inputs, outputs, False) + '\n'
            if in_ckpt:
                usage = '    ' + usage
            body.append(usage)

        # last level
        else:
            for node in node_list:
                emit_node_func(node, ckpt_func)
                ckpt_func[-1] = '    ' + ckpt_func[-1]
                delete_unused_value_func(node, ckpt_func)

Boyuan Yao's avatar
Boyuan Yao committed
301
            ckpt_func.append('    ' + _gen_ckpt_output(outputs) + '\n\n')
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
            activation_offload = getattr(node_list[0], "activation_offload", False)
            usage = _gen_ckpt_usage(label, activation_offload, inputs, outputs, False) + '\n'
            if in_ckpt:
                usage = '    ' + usage
            body.append(usage)


def emit_code_with_nested_activation_checkpoint(body, ckpt_func, nodes, emit_node_func, delete_unused_value_func):
    """Emit code with nested activation checkpoint
    When we detect some of the node.activation_checkpoint is a List, we will use
    this function to emit the activation checkpoint codes.

    Args:
        body: forward code
        ckpt_func: checkpoint functions code
        nodes: graph.nodes
        emit_node_func: function to emit node
        delete_unused_value_func: function to remove the unused value
    """
    ckpt_regions = _find_nested_ckpt_regions(nodes, 0)
    start_idx = [item[0] for item in ckpt_regions]
    end_idx = [item[1] for item in ckpt_regions]

    node_list = list(nodes)

Boyuan Yao's avatar
Boyuan Yao committed
327
328
329
    # this flag is to prevent repeated insert of save tensors
    # hooks definition in ckpt_func
    is_hook_inserted = False
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
    node_idx = 0
    while 1:
        # break if we finish the processing all the nodes
        if node_idx >= len(node_list):
            break

        # process ckpt_regions
        if node_idx in start_idx:
            ckpt_node_list = node_list[node_idx:end_idx[start_idx.index(node_idx)] + 1]
            emit_ckpt_func(body, ckpt_func, ckpt_node_list, emit_node_func, delete_unused_value_func)
            node_idx += len(ckpt_node_list)

        # process node in forward function
        else:
            node = node_list[node_idx]
Boyuan Yao's avatar
Boyuan Yao committed
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365

            # if a node is outside of checkpoint region and want to offload
            # it's input activation, we will use torch.saved_tensors_hooks
            # to complete the offload process.
            if getattr(node, "activation_offload", False):
                if not is_hook_inserted:
                    pack_hook, unpack_hook = _gen_saved_tensors_hooks()
                    ckpt_func.insert(0, "\n".join([pack_hook, unpack_hook]) + "\n")

                for par in node.all_input_nodes:
                    # annotate the input tensor for pack hook
                    body.append(f"setattr({repr(par)}, 'offload', True)\n")

                body.append(_gen_save_tensors_hooks_context())
                emit_node_func(node, body)
                body[-1] = '    ' + body[-1]
                delete_unused_value_func(node, body)

            else:
                emit_node_func(node, body)
                delete_unused_value_func(node, body)
366
367
368
            node_idx += 1


369
def emit_code_with_activation_checkpoint(body, ckpt_func, nodes, emit_node_func, delete_unused_value_func):
370
371
372
373
374
375
376
377
378
379
    # find the activation checkpoint regions
    ckpt_regions = _find_ckpt_regions(nodes)
    start_idx = [item[0] for item in ckpt_regions]
    end_idx = [item[1] for item in ckpt_regions]
    input_vars = []
    output_vars = []
    within_ckpt_region = False

    node_list = list(nodes)

Boyuan Yao's avatar
Boyuan Yao committed
380
381
382
383
    # use this variable to avoid inserting hook functions
    # to ckpt_func repeatedly
    is_hook_inserted = False

384
385
386
387
388
389
390
391
392
393
394
395
396
397
    # find the input and output var names for each region
    for idx, (start, end) in enumerate(ckpt_regions):
        ckpt_node_list = node_list[start:end + 1]
        inputs, outputs = _find_input_and_output_nodes(ckpt_node_list)
        input_vars.append(inputs)
        output_vars.append(outputs)

    # append code text to body
    for idx, node in enumerate(node_list):
        # if this is the first node of the ckpt region
        # append the ckpt function defition
        if idx in start_idx:
            label = start_idx.index(idx)
            ckpt_fn_def = _gen_ckpt_fn_def(label, input_vars[label])
398
            ckpt_func.append(f'{ckpt_fn_def}\n')
399
400
401
402
            within_ckpt_region = True

        # NOTE: emit_node does not emit a string with newline. It depends
        # on delete_unused_values to append one
403
        # NOTE: currently we separate body and ckpt_func definition
404
        if within_ckpt_region:
405
406
407
408
            emit_node_func(node, ckpt_func)
            ckpt_func[-1] = '    ' + ckpt_func[-1]
            delete_unused_value_func(node, ckpt_func)
        else:
Boyuan Yao's avatar
Boyuan Yao committed
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
            # if a node is outside of checkpoint region wants to offload
            # it's input activation, we will use torch.saved_tensors_hooks
            # to complete the offload process.
            if getattr(node, "activation_offload", False):
                if not is_hook_inserted:
                    pack_hook, unpack_hook = _gen_saved_tensors_hooks()
                    ckpt_func.insert(0, "\n".join([pack_hook, unpack_hook]) + "\n")

                for par in node.all_input_nodes:
                    # annotate the input tensor for pack hook
                    body.append(f"setattr({repr(par)}, 'offload', True)\n")

                body.append(_gen_save_tensors_hooks_context())
                emit_node_func(node, body)
                body[-1] = '    ' + body[-1]
                delete_unused_value_func(node, body)

            else:
                emit_node_func(node, body)
                delete_unused_value_func(node, body)
429
430
431
432
433
434

        if idx in end_idx:
            # if this is the last node of the ckpt region
            # generate return statement
            label = end_idx.index(idx)
            return_statement = _gen_ckpt_output(output_vars[label])
435
436
            return_statement = f'    {return_statement}\n\n'
            ckpt_func.append(return_statement)
437

438
439
440
441
442
443
444
            # we need to check if the checkpoint need to offload the input
            start_node_idx = start_idx[label]
            if hasattr(node_list[start_node_idx], 'activation_offload'):
                activation_offload = node_list[start_node_idx].activation_offload
            else:
                activation_offload = False

445
446
            # we need to check if the checkpoint need use_reentrant=False
            use_reentrant = True
447
            non_leaf_input = 0
448
            for var in input_vars[label]:
449
                input_node = next(item for item in node_list if item.name == var)
450
451
                if input_node.op != "placeholder":
                    non_leaf_input = 1
452
453
454
455
456
457
458
459
460
461
462
                for user in input_node.users:
                    if hasattr(user, "activation_checkpoint"):
                        if user.activation_checkpoint == label:
                            if user.op == "call_module":
                                if hasattr(user.graph.owning_module.get_submodule(user.target), "inplace"):
                                    use_reentrant = not user.graph.owning_module.get_submodule(user.target).inplace

                            elif user.op == "call_function":
                                if "inplace" in user.kwargs:
                                    use_reentrant = not user.kwargs["inplace"]

463
464
465
466
            # if all the inputs are leaf nodes, we need to set use_reentrant = False
            if not non_leaf_input:
                use_reentrant = False

467
            # generate checkpoint function call in a new line
468
            usage = _gen_ckpt_usage(label, activation_offload, input_vars[label], output_vars[label], use_reentrant)
469
470
471
472
473
            usage += '\n'
            body.append(usage)
            within_ckpt_region = False


474
if CODEGEN_AVAILABLE:
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508

    class ActivationCheckpointCodeGen(CodeGen):

        def _gen_python_code(self, nodes, root_module: str, namespace: _Namespace) -> PythonCode:
            free_vars: List[str] = []
            body: List[str] = []
            globals_: Dict[str, Any] = {}
            wrapped_fns: Dict[str, None] = {}

            # Wrap string in list to pass by reference
            maybe_return_annotation: List[str] = ['']

            def add_global(name_hint: str, obj: Any):
                """Add an obj to be tracked as a global.

                We call this for names that reference objects external to the
                Graph, like functions or types.

                Returns: the global name that should be used to reference 'obj' in generated source.
                """
                if _is_from_torch(obj) and obj != torch.device:    # to support registering torch.device
                    # HACK: workaround for how torch custom ops are registered. We
                    # can't import them like normal modules so they must retain their
                    # fully qualified name.
                    return _get_qualified_name(obj)

                # normalize the name hint to get a proper identifier
                global_name = namespace.create_name(name_hint, obj)

                if global_name in globals_:
                    assert globals_[global_name] is obj
                    return global_name
                globals_[global_name] = obj
                return global_name
509

510
511
512
            # set _custom_builtins here so that we needn't import colossalai in forward
            _custom_builtins["colossalai"] = _CustomBuiltin("import colossalai", colossalai)

513
514
515
            # Pre-fill the globals table with registered builtins.
            for name, (_, obj) in _custom_builtins.items():
                add_global(name, obj)
516

517
518
519
520
            def type_repr(o: Any):
                if o == ():
                    # Empty tuple is used for empty tuple type annotation Tuple[()]
                    return '()'
521

522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
                typename = _type_repr(o)

                if hasattr(o, '__origin__'):
                    # This is a generic type, e.g. typing.List[torch.Tensor]
                    origin_type = _origin_type_map.get(o.__origin__, o.__origin__)
                    origin_typename = add_global(_type_repr(origin_type), origin_type)

                    if hasattr(o, '__args__'):
                        # Assign global names for each of the inner type variables.
                        args = [type_repr(arg) for arg in o.__args__]

                        if len(args) == 0:
                            # Bare type, such as `typing.Tuple` with no subscript
                            # This code-path used in Python < 3.9
                            return origin_typename

                        return f'{origin_typename}[{",".join(args)}]'
                    else:
                        # Bare type, such as `typing.Tuple` with no subscript
                        # This code-path used in Python 3.9+
                        return origin_typename

                # Common case: this is a regular module name like 'foo.bar.baz'
                return add_global(typename, o)

            def _format_args(args: Tuple[Argument, ...], kwargs: Dict[str, Argument]) -> str:

                def _get_repr(arg):
                    # Handle NamedTuples (if it has `_fields`) via add_global.
                    if isinstance(arg, tuple) and hasattr(arg, '_fields'):
                        qualified_name = _get_qualified_name(type(arg))
                        global_name = add_global(qualified_name, type(arg))
                        return f"{global_name}{repr(tuple(arg))}"
                    return repr(arg)

                args_s = ', '.join(_get_repr(a) for a in args)
                kwargs_s = ', '.join(f'{k} = {_get_repr(v)}' for k, v in kwargs.items())
                if args_s and kwargs_s:
                    return f'{args_s}, {kwargs_s}'
                return args_s or kwargs_s

            # Run through reverse nodes and record the first instance of a use
            # of a given node. This represents the *last* use of the node in the
            # execution order of the program, which we will use to free unused
            # values
            node_to_last_use: Dict[Node, Node] = {}
            user_to_last_uses: Dict[Node, List[Node]] = {}

            def register_last_uses(n: Node, user: Node):
                if n not in node_to_last_use:
                    node_to_last_use[n] = user
                    user_to_last_uses.setdefault(user, []).append(n)

            for node in reversed(nodes):
                map_arg(node.args, lambda n: register_last_uses(n, node))
                map_arg(node.kwargs, lambda n: register_last_uses(n, node))

579
580
            # NOTE: we add a variable to distinguish body and ckpt_func
            def delete_unused_values(user: Node, body):
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
                """
                Delete values after their last use. This ensures that values that are
                not used in the remainder of the code are freed and the memory usage
                of the code is optimal.
                """
                if user.op == 'placeholder':
                    return
                if user.op == 'output':
                    body.append('\n')
                    return
                nodes_to_delete = user_to_last_uses.get(user, [])
                if len(nodes_to_delete):
                    to_delete_str = ' = '.join([repr(n) for n in nodes_to_delete] + ['None'])
                    body.append(f';  {to_delete_str}\n')
                else:
                    body.append('\n')

598
599
            # NOTE: we add a variable to distinguish body and ckpt_func
            def emit_node(node: Node, body):
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
                maybe_type_annotation = '' if node.type is None else f' : {type_repr(node.type)}'
                if node.op == 'placeholder':
                    assert isinstance(node.target, str)
                    maybe_default_arg = '' if not node.args else f' = {repr(node.args[0])}'
                    free_vars.append(f'{node.target}{maybe_type_annotation}{maybe_default_arg}')
                    raw_name = node.target.replace('*', '')
                    if raw_name != repr(node):
                        body.append(f'{repr(node)} = {raw_name}\n')
                    return
                elif node.op == 'call_method':
                    assert isinstance(node.target, str)
                    body.append(
                        f'{repr(node)}{maybe_type_annotation} = {_format_target(repr(node.args[0]), node.target)}'
                        f'({_format_args(node.args[1:], node.kwargs)})')
                    return
                elif node.op == 'call_function':
                    assert callable(node.target)
                    # pretty print operators
                    if node.target.__module__ == '_operator' and node.target.__name__ in magic_methods:
                        assert isinstance(node.args, tuple)
                        body.append(f'{repr(node)}{maybe_type_annotation} = '
                                    f'{magic_methods[node.target.__name__].format(*(repr(a) for a in node.args))}')
                        return

                    # pretty print inplace operators; required for jit.script to work properly
                    # not currently supported in normal FX graphs, but generated by torchdynamo
                    if node.target.__module__ == '_operator' and node.target.__name__ in inplace_methods:
                        body.append(f'{inplace_methods[node.target.__name__].format(*(repr(a) for a in node.args))};  '
                                    f'{repr(node)}{maybe_type_annotation} = {repr(node.args[0])}')
                        return

                    qualified_name = _get_qualified_name(node.target)
                    global_name = add_global(qualified_name, node.target)
                    # special case for getattr: node.args could be 2-argument or 3-argument
                    # 2-argument: attribute access; 3-argument: fall through to attrib function call with default value
                    if global_name == 'getattr' and \
                    isinstance(node.args, tuple) and \
                    isinstance(node.args[1], str) and \
                    node.args[1].isidentifier() and \
                    len(node.args) == 2:
                        body.append(
                            f'{repr(node)}{maybe_type_annotation} = {_format_target(repr(node.args[0]), node.args[1])}')
                        return
                    body.append(
                        f'{repr(node)}{maybe_type_annotation} = {global_name}({_format_args(node.args, node.kwargs)})')
                    if node.meta.get('is_wrapped', False):
                        wrapped_fns.setdefault(global_name)
                    return
                elif node.op == 'call_module':
                    assert isinstance(node.target, str)
                    body.append(f'{repr(node)}{maybe_type_annotation} = '
                                f'{_format_target(root_module, node.target)}({_format_args(node.args, node.kwargs)})')
                    return
                elif node.op == 'get_attr':
                    assert isinstance(node.target, str)
                    body.append(f'{repr(node)}{maybe_type_annotation} = {_format_target(root_module, node.target)}')
                    return
                elif node.op == 'output':
                    if node.type is not None:
                        maybe_return_annotation[0] = f" -> {type_repr(node.type)}"
                    body.append(self.generate_output(node.args[0]))
                    return
                raise NotImplementedError(f'node: {node.op} {node.target}')

            # Modified for activation checkpointing
665
            ckpt_func = []
Boyuan Yao's avatar
Boyuan Yao committed
666
667
668
669

            # if any node has a list of labels for activation_checkpoint, we
            # will use nested type of activation checkpoint codegen
            if any(isinstance(getattr(node, "activation_checkpoint", None), list) for node in nodes):
670
                emit_code_with_nested_activation_checkpoint(body, ckpt_func, nodes, emit_node, delete_unused_values)
Boyuan Yao's avatar
Boyuan Yao committed
671
672
            else:
                emit_code_with_activation_checkpoint(body, ckpt_func, nodes, emit_node, delete_unused_values)
673
674
675
676
677
678
679
680
681
682

            if len(body) == 0:
                # If the Graph has no non-placeholder nodes, no lines for the body
                # have been emitted. To continue to have valid Python code, emit a
                # single pass statement
                body.append('pass\n')

            if len(wrapped_fns) > 0:
                wrap_name = add_global('wrap', torch.fx.wrap)
                wrap_stmts = '\n'.join([f'{wrap_name}("{name}")' for name in wrapped_fns])
683
            else:
684
                wrap_stmts = ''
685

686
687
            if self._body_transformer:
                body = self._body_transformer(body)
688

689
690
691
            for name, value in self.additional_globals():
                add_global(name, value)

692
693
            # as we need colossalai.utils.checkpoint, we need to import colossalai
            # in forward function
694
            prologue = self.gen_fn_def(free_vars, maybe_return_annotation[0])
695
696
            prologue = ''.join(ckpt_func) + prologue
            prologue = prologue
697

698
699
700
            code = ''.join(body)
            code = '\n'.join('    ' + line for line in code.split('\n'))
            fn_code = f"""
701
{wrap_stmts}
702

703
704
{prologue}
{code}"""
705
706
707
708
709
            return PythonCode(fn_code, globals_)

else:

    def python_code_with_activation_checkpoint(self, root_module: str, namespace: _Namespace) -> PythonCode:
710
        """
711
712
        This method is copied from the _python_code of torch.fx.graph.Graph. Modifications are made so that it can generate
        code for activation checkpoint.
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
        """
        free_vars: List[str] = []
        body: List[str] = []
        globals_: Dict[str, Any] = {}
        wrapped_fns: Dict[str, None] = {}

        # Wrap string in list to pass by reference
        maybe_return_annotation: List[str] = ['']

        def add_global(name_hint: str, obj: Any):
            """Add an obj to be tracked as a global.

            We call this for names that reference objects external to the
            Graph, like functions or types.

            Returns: the global name that should be used to reference 'obj' in generated source.
            """
            if _is_from_torch(obj) and obj != torch.device:    # to support registering torch.device
                # HACK: workaround for how torch custom ops are registered. We
                # can't import them like normal modules so they must retain their
                # fully qualified name.
                return _get_qualified_name(obj)

            # normalize the name hint to get a proper identifier
            global_name = namespace.create_name(name_hint, obj)

            if global_name in globals_:
                assert globals_[global_name] is obj
                return global_name
            globals_[global_name] = obj
            return global_name

745
746
747
        # set _custom_builtins here so that we needn't import colossalai in forward
        _custom_builtins["colossalai"] = _CustomBuiltin("import colossalai", colossalai)

748
749
750
751
752
753
754
755
756
757
758
        # Pre-fill the globals table with registered builtins.
        for name, (_, obj) in _custom_builtins.items():
            add_global(name, obj)

        def type_repr(o: Any):
            if o == ():
                # Empty tuple is used for empty tuple type annotation Tuple[()]
                return '()'

            typename = _type_repr(o)

759
            # This is a generic type, e.g. typing.List[torch.Tensor]
760
761
762
763
            if hasattr(o, '__origin__'):
                origin_type = _origin_type_map.get(o.__origin__, o.__origin__)
                origin_typename = add_global(_type_repr(origin_type), origin_type)

764
765
                # Assign global names for each of the inner type variables.
                args = [type_repr(arg) for arg in o.__args__]
766

767
                return f'{origin_typename}[{",".join(args)}]'
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783

            # Common case: this is a regular module name like 'foo.bar.baz'
            return add_global(typename, o)

        # Run through reverse nodes and record the first instance of a use
        # of a given node. This represents the *last* use of the node in the
        # execution order of the program, which we will use to free unused
        # values
        node_to_last_use: Dict[Node, Node] = {}
        user_to_last_uses: Dict[Node, List[Node]] = {}

        def register_last_uses(n: Node, user: Node):
            if n not in node_to_last_use:
                node_to_last_use[n] = user
                user_to_last_uses.setdefault(user, []).append(n)

784
        for node in reversed(self.nodes):
785
786
787
            map_arg(node.args, lambda n: register_last_uses(n, node))
            map_arg(node.kwargs, lambda n: register_last_uses(n, node))

788
789
        # NOTE: we add a variable to distinguish body and ckpt_func
        def delete_unused_values(user: Node, body):
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
            """
            Delete values after their last use. This ensures that values that are
            not used in the remainder of the code are freed and the memory usage
            of the code is optimal.
            """
            if user.op == 'placeholder':
                return
            if user.op == 'output':
                body.append('\n')
                return
            nodes_to_delete = user_to_last_uses.get(user, [])
            if len(nodes_to_delete):
                to_delete_str = ' = '.join([repr(n) for n in nodes_to_delete] + ['None'])
                body.append(f';  {to_delete_str}\n')
            else:
                body.append('\n')

807
808
        # NOTE: we add a variable to distinguish body and ckpt_func
        def emit_node(node: Node, body):
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
            maybe_type_annotation = '' if node.type is None else f' : {type_repr(node.type)}'
            if node.op == 'placeholder':
                assert isinstance(node.target, str)
                maybe_default_arg = '' if not node.args else f' = {repr(node.args[0])}'
                free_vars.append(f'{node.target}{maybe_type_annotation}{maybe_default_arg}')
                raw_name = node.target.replace('*', '')
                if raw_name != repr(node):
                    body.append(f'{repr(node)} = {raw_name}\n')
                return
            elif node.op == 'call_method':
                assert isinstance(node.target, str)
                body.append(f'{repr(node)}{maybe_type_annotation} = {_format_target(repr(node.args[0]), node.target)}'
                            f'({_format_args(node.args[1:], node.kwargs)})')
                return
            elif node.op == 'call_function':
                assert callable(node.target)
                # pretty print operators
                if node.target.__module__ == '_operator' and node.target.__name__ in magic_methods:
                    assert isinstance(node.args, tuple)
                    body.append(f'{repr(node)}{maybe_type_annotation} = '
                                f'{magic_methods[node.target.__name__].format(*(repr(a) for a in node.args))}')
                    return
                qualified_name = _get_qualified_name(node.target)
                global_name = add_global(qualified_name, node.target)
                # special case for getattr: node.args could be 2-argument or 3-argument
                # 2-argument: attribute access; 3-argument: fall through to attrib function call with default value
                if global_name == 'getattr' and \
                   isinstance(node.args, tuple) and \
                   isinstance(node.args[1], str) and \
                   node.args[1].isidentifier() and \
                   len(node.args) == 2:
                    body.append(
                        f'{repr(node)}{maybe_type_annotation} = {_format_target(repr(node.args[0]), node.args[1])}')
                    return
                body.append(
                    f'{repr(node)}{maybe_type_annotation} = {global_name}({_format_args(node.args, node.kwargs)})')
                if node.meta.get('is_wrapped', False):
                    wrapped_fns.setdefault(global_name)
                return
            elif node.op == 'call_module':
                assert isinstance(node.target, str)
                body.append(f'{repr(node)}{maybe_type_annotation} = '
                            f'{_format_target(root_module, node.target)}({_format_args(node.args, node.kwargs)})')
                return
            elif node.op == 'get_attr':
                assert isinstance(node.target, str)
                body.append(f'{repr(node)}{maybe_type_annotation} = {_format_target(root_module, node.target)}')
                return
            elif node.op == 'output':
                if node.type is not None:
                    maybe_return_annotation[0] = f" -> {type_repr(node.type)}"
860
861
862
863
                if self._pytree_info is None:
                    body.append(f'return {repr(node.args[0])}')
                else:
                    body.append(f'return pytree.tree_unflatten({repr(node.args[0])}, self._out_spec)')
864
865
866
                return
            raise NotImplementedError(f'node: {node.op} {node.target}')

867
        # Modified for activation checkpointing
868
        ckpt_func = []
Boyuan Yao's avatar
Boyuan Yao committed
869
870
871
872

        # if any node has a list of labels for activation_checkpoint, we
        # will use nested type of activation checkpoint codegen
        if any(isinstance(getattr(node, "activation_checkpoint", None), list) for node in self.nodes):
873
            emit_code_with_nested_activation_checkpoint(body, ckpt_func, self.nodes, emit_node, delete_unused_values)
Boyuan Yao's avatar
Boyuan Yao committed
874
875
        else:
            emit_code_with_activation_checkpoint(body, ckpt_func, self.nodes, emit_node, delete_unused_values)
876
877
878
879
880
881

        if len(body) == 0:
            # If the Graph has no non-placeholder nodes, no lines for the body
            # have been emitted. To continue to have valid Python code, emit a
            # single pass statement
            body.append('pass\n')
882
883
884
885
886
887
888
889
890
891
892
        if self._pytree_info is not None:
            orig_args = self._pytree_info.orig_args
            has_orig_self = (orig_args[0] == 'self')
            if has_orig_self:
                free_vars.insert(0, 'self')
            if len(free_vars) > 0:    # pytree has placeholders in it
                body.insert(
                    0,
                    f"{', '.join(free_vars)}, = fx_pytree.tree_flatten_spec([{', '.join(orig_args)}], self._in_spec)\n")
        else:
            orig_args = free_vars
893
894
895
896
897
898
899

        if len(wrapped_fns) > 0:
            wrap_name = add_global('wrap', torch.fx.wrap)
            wrap_stmts = '\n'.join([f'{wrap_name}("{name}")' for name in wrapped_fns])
        else:
            wrap_stmts = ''

900
901
        ckpt_func = ''.join(ckpt_func)

902
903
904
905
        # If the original function didn't have self as its first argument, we
        # would have added it.
        if len(orig_args) == 0 or orig_args[0] != 'self':
            orig_args.insert(0, 'self')
906
907
        code = ''.join(body)
        code = '\n'.join('    ' + line for line in code.split('\n'))
908
909
910

        # as we need colossalai.utils.checkpoint, we need to import colossalai
        # in forward function
911
912
913
        fn_code = f"""
{wrap_stmts}

914
{ckpt_func}
915
def forward({', '.join(orig_args)}){maybe_return_annotation[0]}:
916
917
{code}"""
        return PythonCode(fn_code, globals_)