Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
ColossalAI
Commits
81e0da7f
Unverified
Commit
81e0da7f
authored
Nov 30, 2022
by
Frank Lee
Committed by
GitHub
Nov 30, 2022
Browse files
[setup] supported conda-installed torch (#2048)
* [setup] supported conda-installed torch * polish code
parent
e37f3db4
Changes
4
Show whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
36 additions
and
19 deletions
+36
-19
.gitignore
.gitignore
+3
-0
colossalai/__init__.py
colossalai/__init__.py
+8
-1
requirements/requirements.txt
requirements/requirements.txt
+1
-2
setup.py
setup.py
+24
-16
No files found.
.gitignore
View file @
81e0da7f
...
...
@@ -141,3 +141,6 @@ docs/.build
# pytorch checkpoint
*.pt
# ignore version.py generated by setup.py
colossalai/version.py
colossalai/__init__.py
View file @
81e0da7f
...
...
@@ -7,4 +7,11 @@ from .initialize import (
launch_from_torch
,
)
__version__
=
'0.1.11rc4'
try
:
# .version will be created by setup.py
from
.version
import
__version__
except
ModuleNotFoundError
:
# this will only happen if the user did not run `pip install`
# and directly set PYTHONPATH to use Colossal-AI which is a bad practice
__version__
=
'0.0.0'
print
(
'please install Colossal-AI from https://www.colossalai.org/download or from source'
)
requirements/requirements.txt
View file @
81e0da7f
torch>=1.8
numpy
tqdm
psutil
...
...
setup.py
View file @
81e0da7f
...
...
@@ -4,6 +4,19 @@ import subprocess
from
setuptools
import
Extension
,
find_packages
,
setup
try
:
import
torch
from
torch.utils.cpp_extension
import
CUDA_HOME
,
BuildExtension
,
CUDAExtension
print
(
"
\n\n
torch.__version__ = {}
\n\n
"
.
format
(
torch
.
__version__
))
TORCH_MAJOR
=
int
(
torch
.
__version__
.
split
(
'.'
)[
0
])
TORCH_MINOR
=
int
(
torch
.
__version__
.
split
(
'.'
)[
1
])
if
TORCH_MAJOR
<
1
or
(
TORCH_MAJOR
==
1
and
TORCH_MINOR
<
10
):
raise
RuntimeError
(
"Colossal-AI requires Pytorch 1.10 or newer.
\n
"
"The latest stable release can be obtained from https://pytorch.org/"
)
except
ImportError
:
raise
ModuleNotFoundError
(
'torch is not found. You need to install PyTorch before installing Colossal-AI.'
)
# ninja build does not work unless include_dirs are abs path
this_dir
=
os
.
path
.
dirname
(
os
.
path
.
abspath
(
__file__
))
build_cuda_ext
=
True
...
...
@@ -93,29 +106,24 @@ def fetch_readme():
def
get_version
():
with
open
(
'version.txt'
)
as
f
:
setup_file_path
=
os
.
path
.
abspath
(
__file__
)
project_path
=
os
.
path
.
dirname
(
setup_file_path
)
version_txt_path
=
os
.
path
.
join
(
project_path
,
'version.txt'
)
version_py_path
=
os
.
path
.
join
(
project_path
,
'colossalai/version.py'
)
with
open
(
version_txt_path
)
as
f
:
version
=
f
.
read
().
strip
()
if
build_cuda_ext
:
torch_version
=
'.'
.
join
(
torch
.
__version__
.
split
(
'.'
)[:
2
])
cuda_version
=
'.'
.
join
(
get_cuda_bare_metal_version
(
CUDA_HOME
)[
1
:])
version
+=
f
'+torch
{
torch_version
}
cu
{
cuda_version
}
'
return
version
# write version into version.py
with
open
(
version_py_path
,
'w'
)
as
f
:
f
.
write
(
f
"__version__ = '
{
version
}
'
\n
"
)
if
build_cuda_ext
:
try
:
import
torch
from
torch.utils.cpp_extension
import
CUDA_HOME
,
BuildExtension
,
CUDAExtension
print
(
"
\n\n
torch.__version__ = {}
\n\n
"
.
format
(
torch
.
__version__
))
TORCH_MAJOR
=
int
(
torch
.
__version__
.
split
(
'.'
)[
0
])
TORCH_MINOR
=
int
(
torch
.
__version__
.
split
(
'.'
)[
1
])
return
version
if
TORCH_MAJOR
<
1
or
(
TORCH_MAJOR
==
1
and
TORCH_MINOR
<
8
):
raise
RuntimeError
(
"Colossal-AI requires Pytorch 1.8 or newer.
\n
"
"The latest stable release can be obtained from https://pytorch.org/"
)
except
ImportError
:
print
(
'torch is not found. CUDA extension will not be installed'
)
build_cuda_ext
=
False
if
build_cuda_ext
:
build_cuda_ext
=
check_cuda_availability
(
CUDA_HOME
)
and
check_cuda_torch_binary_vs_bare_metal
(
CUDA_HOME
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment