Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
torch-scatter
Commits
778f6245
Commit
778f6245
authored
Aug 05, 2021
by
rusty1s
Browse files
allow loading of CPU wheels in PyTorch CUDA installation
parent
66f87e5f
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
38 additions
and
39 deletions
+38
-39
torch_scatter/__init__.py
torch_scatter/__init__.py
+38
-39
No files found.
torch_scatter/__init__.py
View file @
778f6245
...
@@ -6,50 +6,49 @@ import torch
...
@@ -6,50 +6,49 @@ import torch
__version__
=
'2.0.8'
__version__
=
'2.0.8'
suffix
=
'cuda'
if
torch
.
cuda
.
is_available
()
else
'cpu'
for
library
in
[
'_version'
,
'_scatter'
,
'_segment_csr'
,
'_segment_coo'
]:
cuda_spec
=
importlib
.
machinery
.
PathFinder
().
find_spec
(
try
:
f
'
{
library
}
_cuda'
,
[
osp
.
dirname
(
__file__
)])
for
library
in
[
'_version'
,
'_scatter'
,
'_segment_csr'
,
'_segment_coo'
]:
cpu_spec
=
importlib
.
machinery
.
PathFinder
().
find_spec
(
torch
.
ops
.
load_library
(
importlib
.
machinery
.
PathFinder
().
find_spec
(
f
'
{
library
}
_cpu'
,
[
osp
.
dirname
(
__file__
)])
f
'
{
library
}
_
{
suffix
}
'
,
[
osp
.
dirname
(
__file__
)]).
origin
)
spec
=
cuda_spec
or
cpu_spec
except
AttributeError
as
e
:
if
spec
is
not
None
:
if
os
.
getenv
(
'BUILD_DOCS'
,
'0'
)
!=
'1'
:
torch
.
ops
.
load_library
(
spec
.
origin
)
raise
AttributeError
(
e
)
elif
os
.
getenv
(
'BUILD_DOCS'
,
'0'
)
!=
'1'
:
raise
ImportError
(
f
"Could not find module '
{
library
}
_cpu' in "
from
.placeholder
import
cuda_version_placeholder
f
"
{
osp
.
dirname
(
__file__
)
}
"
)
torch
.
ops
.
torch_scatter
.
cuda_version
=
cuda_version_placeholder
else
:
from
.placeholder
import
cuda_version_placeholder
from
.placeholder
import
scatter_placeholder
torch
.
ops
.
torch_scatter
.
cuda_version
=
cuda_version_placeholder
torch
.
ops
.
torch_scatter
.
scatter_mul
=
scatter_placeholder
from
.placeholder
import
scatter_arg_placeholder
from
.placeholder
import
scatter_placeholder
torch
.
ops
.
torch_scatter
.
scatter_min
=
scatter_arg_placeholder
torch
.
ops
.
torch_scatter
.
scatter_mul
=
scatter_placeholder
torch
.
ops
.
torch_scatter
.
scatter_max
=
scatter_arg_placeholder
from
.placeholder
import
segment_csr_placeholder
from
.placeholder
import
scatter_arg_placeholder
from
.placeholder
import
segment_csr_arg_placeholder
torch
.
ops
.
torch_scatter
.
scatter_min
=
scatter_arg_placeholder
from
.placeholder
import
gather_csr_placeholder
torch
.
ops
.
torch_scatter
.
scatter_max
=
scatter_arg_placeholder
torch
.
ops
.
torch_scatter
.
segment_sum_csr
=
segment_csr_placeholder
torch
.
ops
.
torch_scatter
.
segment_mean_csr
=
segment_csr_placeholder
torch
.
ops
.
torch_scatter
.
segment_min_csr
=
segment_csr_arg_placeholder
torch
.
ops
.
torch_scatter
.
segment_max_csr
=
segment_csr_arg_placeholder
torch
.
ops
.
torch_scatter
.
gather_csr
=
gather_csr_placeholder
from
.placeholder
import
segment_c
oo
_placeholder
from
.placeholder
import
segment_c
sr
_placeholder
from
.placeholder
import
segment_c
oo
_arg_placeholder
from
.placeholder
import
segment_c
sr
_arg_placeholder
from
.placeholder
import
gather_c
oo
_placeholder
from
.placeholder
import
gather_c
sr
_placeholder
torch
.
ops
.
torch_scatter
.
segment_sum_c
oo
=
segment_c
oo
_placeholder
torch
.
ops
.
torch_scatter
.
segment_sum_c
sr
=
segment_c
sr
_placeholder
torch
.
ops
.
torch_scatter
.
segment_mean_c
oo
=
segment_c
oo
_placeholder
torch
.
ops
.
torch_scatter
.
segment_mean_c
sr
=
segment_c
sr
_placeholder
torch
.
ops
.
torch_scatter
.
segment_min_c
oo
=
segment_c
oo
_arg_placeholder
torch
.
ops
.
torch_scatter
.
segment_min_c
sr
=
segment_c
sr
_arg_placeholder
torch
.
ops
.
torch_scatter
.
segment_max_c
oo
=
segment_c
oo
_arg_placeholder
torch
.
ops
.
torch_scatter
.
segment_max_c
sr
=
segment_c
sr
_arg_placeholder
torch
.
ops
.
torch_scatter
.
gather_c
oo
=
gather_c
oo
_placeholder
torch
.
ops
.
torch_scatter
.
gather_c
sr
=
gather_c
sr
_placeholder
if
torch
.
cuda
.
is_available
():
# pragma: no cover
from
.placeholder
import
segment_coo_placeholder
cuda_version
=
torch
.
ops
.
torch_scatter
.
cuda_version
()
from
.placeholder
import
segment_coo_arg_placeholder
from
.placeholder
import
gather_coo_placeholder
torch
.
ops
.
torch_scatter
.
segment_sum_coo
=
segment_coo_placeholder
torch
.
ops
.
torch_scatter
.
segment_mean_coo
=
segment_coo_placeholder
torch
.
ops
.
torch_scatter
.
segment_min_coo
=
segment_coo_arg_placeholder
torch
.
ops
.
torch_scatter
.
segment_max_coo
=
segment_coo_arg_placeholder
torch
.
ops
.
torch_scatter
.
gather_coo
=
gather_coo_placeholder
if
cuda_version
=
=
-
1
:
cuda_version
=
torch
.
ops
.
torch_scatter
.
cuda_version
()
major
=
minor
=
0
if
torch
.
cuda
.
is_available
()
and
cuda_version
!=
-
1
:
# pragma: no cover
el
if
cuda_version
<
10000
:
if
cuda_version
<
10000
:
major
,
minor
=
int
(
str
(
cuda_version
)[
0
]),
int
(
str
(
cuda_version
)[
2
])
major
,
minor
=
int
(
str
(
cuda_version
)[
0
]),
int
(
str
(
cuda_version
)[
2
])
else
:
else
:
major
,
minor
=
int
(
str
(
cuda_version
)[
0
:
2
]),
int
(
str
(
cuda_version
)[
3
])
major
,
minor
=
int
(
str
(
cuda_version
)[
0
:
2
]),
int
(
str
(
cuda_version
)[
3
])
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment