"src/git@developer.sourcefind.cn:renzhc/diffusers_dcu.git" did not exist on "3159e60d59819ae874ea3cdbd28e02d9e6c57321"
Commit b24daa66 authored by Minjie Wang's avatar Minjie Wang
Browse files

change to rel import within dgl

parent 842d3768
from __future__ import absolute_import
import os
__backend__ = os.environ.get('DGLBACKEND', 'pytorch').lower()
if __backend__ == 'numpy':
from dgl.backend.numpy import *
from .numpy import *
elif __backend__ == 'pytorch':
from dgl.backend.pytorch import *
from .pytorch import *
else:
raise Exception("Unsupported backend %s" % __backend__)
......@@ -4,7 +4,7 @@ import torch as th
from .._ffi.runtime_ctypes import TVMType, TVMContext, TVMArray
from .._ffi.runtime_ctypes import TypeCode, tvm_shape_index_t
from ..context as cpu, gpu
from ..context import cpu, gpu
# Tensor types
Tensor = th.Tensor
......
......@@ -3,9 +3,8 @@ from __future__ import absolute_import
import numpy as np
from dgl.graph import DGLGraph
import dgl.backend as F
import dgl
from .graph import DGLGraph
from . import backend as F
class BatchedDGLGraph(DGLGraph):
def __init__(self, graph_list, node_attrs=None, edge_attrs=None, **attr):
......
......@@ -7,9 +7,9 @@ from __future__ import absolute_import
import igraph
import dgl.backend as F
from dgl.backend import Tensor
import dgl.utils as utils
from . import backend as F
from .backend import Tensor
from . import utils
class CachedGraph:
def __init__(self):
......
from __future__ import absolute_import
from ._ffi.function import _init_api
import .backend as F
from . import backend as F
class DGLGraph(object):
def __init__(self):
......
......@@ -11,7 +11,7 @@ import networkx as nx
import scipy.sparse as sp
import os, sys
from dgl.data.utils import download, extract_archive, get_download_dir
from .utils import download, extract_archive, get_download_dir
_urls = {
'cora' : 'https://www.dropbox.com/s/3ggdpkj7ou8svoc/cora.zip?dl=1',
......
......@@ -10,9 +10,9 @@ from nltk.tree import Tree
from nltk.corpus.reader import BracketParseCorpusReader
import networkx as nx
import dgl
import dgl.backend as F
from dgl.data.utils import download, extract_archive, get_download_dir
from .. import backend as F
from ..graph import DGLGraph
from .utils import download, extract_archive, get_download_dir
_urls = {
'sst' : 'https://www.dropbox.com/s/dw8kr2vuq7k4dqi/sst.zip?dl=1',
......
......@@ -4,9 +4,9 @@ from __future__ import absolute_import
from collections import MutableMapping
import numpy as np
import dgl.backend as F
from dgl.backend import Tensor
import dgl.utils as utils
from . import backend as F
from .backend import Tensor
from . import utils
class Frame(MutableMapping):
def __init__(self, data=None):
......
from .message import *
"""DGL builtin functors"""
from __future__ import absolute_import
from .message import *
from .reducer import *
"""Built-in reducer function."""
from __future__ import absolute_import
import dgl.backend as F
from .. import backend as F
__all__ = ["ReduceFunction", "sum", "max"]
......
"""Package for graph generators"""
from __future__ import absolute_import
from .line import *
......@@ -4,9 +4,9 @@ from __future__ import absolute_import
import networkx as nx
import numpy as np
import dgl.backend as F
from dgl.graph import DGLGraph
from dgl.frame import FrameRef
from .. import backend as F
from ..graph import DGLGraph
from ..frame import FrameRef
def line_graph(G, no_backtracking=False):
"""Create the line graph that shares the underlying features.
......
......@@ -6,15 +6,15 @@ import networkx as nx
from networkx.classes.digraph import DiGraph
import dgl
from dgl.base import ALL, is_all, __MSG__, __REPR__
import dgl.backend as F
from dgl.backend import Tensor
from dgl.cached_graph import CachedGraph, create_cached_graph
import dgl.context as context
from dgl.frame import FrameRef, merge_frames
from dgl.nx_adapt import nx_init
import dgl.scheduler as scheduler
import dgl.utils as utils
from .base import ALL, is_all, __MSG__, __REPR__
from . import backend as F
from .backend import Tensor
from .cached_graph import CachedGraph, create_cached_graph
from . import context
from .frame import FrameRef, merge_frames
from .nx_adapt import nx_init
from . import scheduler
from . import utils
class DGLGraph(DiGraph):
"""Base graph class specialized for neural networks on graphs.
......
"""Package nn modules"""
from __future__ import absolute_import
import os
__backend__ = os.environ.get('DGLBACKEND', 'pytorch').lower()
if __backend__ == 'numpy':
......
......@@ -7,9 +7,8 @@ GCN with SPMV specialization.
"""
import torch.nn as nn
import dgl
import dgl.function as fn
from dgl.base import ALL, is_all
from ... import function as fn
from ...base import ALL, is_all
class NodeUpdateModule(nn.Module):
def __init__(self, in_feats, out_feats, activation=None):
......
......@@ -3,10 +3,10 @@ from __future__ import absolute_import
import numpy as np
import dgl.backend as F
import dgl.function.message as fmsg
import dgl.function.reducer as fred
import dgl.utils as utils
from . import backend as F
from .function import message as fmsg
from .function import reducer as fred
from . import utils
__all__ = ["degree_bucketing", "get_executor"]
......
......@@ -2,11 +2,12 @@
from __future__ import absolute_import
import networkx as nx
import dgl.backend as F
from dgl.frame import Frame, FrameRef
from dgl.graph import DGLGraph
from dgl.nx_adapt import nx_init
import dgl.utils as utils
from . import backend as F
from .frame import Frame, FrameRef
from .graph import DGLGraph
from .nx_adapt import nx_init
from . import utils
class DGLSubGraph(DGLGraph):
# TODO(gaiyu): ReadOnlyGraph
......
......@@ -5,8 +5,8 @@ from collections import Mapping
from functools import wraps
import numpy as np
import dgl.backend as F
from dgl.backend import Tensor, SparseTensor
from . import backend as F
from .backend import Tensor, SparseTensor
def is_id_tensor(u):
"""Return whether the input is a supported id tensor."""
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment