Commit 21d47d0e authored by yuguo's avatar yuguo
Browse files

Oneflow 0.8 for DCU

parents
# Changelog for OneFlow v0.8.0
## v0.8.0-dev
### 1. Enhancements
#### Eager
- placeholder
#### System
- placeholder
#### Build
### 2. Bug fixes
#### Placeholder
### 3. Deprecations
#### Single client
## v0.7.0
The CHANGELOG for v0.7.0 releases can be found [in the v0.7.0 tag](https://github.com/Oneflow-Inc/oneflow/releases/tag/v0.7.0).
# Minimum CMake required
cmake_minimum_required(VERSION 3.18.0)
set(CMAKE_INSTALL_MESSAGE LAZY CACHE STRING "")
set(CMAKE_EXPORT_COMPILE_COMMANDS ON CACHE BOOL "")
option(THIRD_PARTY "Build third party" ON)
option(ONEFLOW "Build oneflow" ON)
if(NOT THIRD_PARTY AND NOT ONEFLOW)
message(FATAL_ERROR "at least one of flags THIRD_PARTY and ONEFLOW should be ON")
endif()
option(USE_CLANG_FORMAT "" OFF)
option(USE_CLANG_TIDY "" OFF)
option(BUILD_PYTHON "" ON)
option(BUILD_CPP_API "Option to build OneFlow C++ API (beta)" OFF)
option(BUILD_RDMA "" OFF)
option(BUILD_CUDA "" ON)
option(BUILD_ROCM "" OFF)
option(BUILD_TESTING "" OFF)
option(BUILD_GIT_VERSION "" ON)
option(BUILD_PROFILER "" OFF)
option(BUILD_FOR_CI "" OFF)
option(WITH_COCOAPI "Option to build with COCO API" ON)
option(WITH_ZLIB "" ON)
option(WITH_ONEDNN "" ON)
option(WITH_MLIR "" OFF)
option(WITH_MLIR_CUDA_CODEGEN "" OFF)
option(OF_SOFTMAX_USE_FAST_MATH "" ON)
option(OF_LAYER_NORM_USE_FAST_MATH "" ON)
option(TREAT_WARNINGS_AS_ERRORS "" ON)
option(MAYBE_NEED_ERROR_MSG_CHECK "" OFF)
# Reference:
# https://medium.com/@alasher/colored-c-compiler-output-with-ninja-clang-gcc-10bfe7f2b949
option(OF_FORCE_COLORED_DIAGNOSTICS "Always produce ANSI-colored diagnostics (GNU/Clang only)." ON)
if (BUILD_CUDA AND BUILD_ROCM)
message(FATAL_ERROR "Compile with cuda and rocm enabled simultaneously are not supported right now.")
endif()
if (BUILD_ROCM)
# Search for rocm in common locations
list(APPEND CMAKE_PREFIX_PATH ${ROCM_PATH}/hip ${ROCM_PATH} ${ROCM_PATH}/llvm)
endif()
set(ONEFLOW_CURRENT_VERSION 0.7.0.dev CACHE STRING "")
if(BUILD_FOR_CI)
set(ONEFLOW_CURRENT_VERSION ci)
endif()
set(LLVM_PROVIDER "in-tree" CACHE STRING "in-tree, install")
if(NOT WITH_MLIR)
set(LLVM_PROVIDER "install"
CACHE STRING "in-tree will build LLVM's ALL, not what we want when not building MLIR" FORCE)
endif(NOT WITH_MLIR)
set(RPC_BACKEND "GRPC,LOCAL" CACHE STRING "")
set(THIRD_PARTY_MIRROR "" CACHE STRING "")
set(PIP_INDEX_MIRROR "" CACHE STRING "")
set(CPU_THREADING_RUNTIME "SEQ" CACHE STRING "")
if(APPLE)
set(RPC_BACKEND "LOCAL")
set(BUILD_CUDA OFF)
set(WITH_COCOAPI OFF)
set(WITH_ONEDNN OFF)
endif()
set(CUDNN_STATIC OFF CACHE BOOL "")
project(oneflow C CXX)
if(NOT CMAKE_BUILD_TYPE)
message(STATUS "No build type selected, default to Release")
set(CMAKE_BUILD_TYPE "Release" CACHE STRING "Build type (default Release)" FORCE)
endif()
if(NOT CMAKE_BUILD_TYPE MATCHES "^(Debug|Release|RelWithDebInfo|MinSizeRel)$")
message(
FATAL_ERROR
"Expected CMAKE_BUILD_TYPE is Debug, Release, RelWithDebInfo or MinSizeRel, got ${CMAKE_BUILD_TYPE}"
)
endif()
message(STATUS "CMAKE_BUILD_TYPE: ${CMAKE_BUILD_TYPE}")
set(COMPILER_VERSION_ERROR_MSG "At least gcc 7, clang 5 or Apple clang 12 is supported.")
if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")
if("${CMAKE_CXX_COMPILER_VERSION}" VERSION_LESS 7)
message(FATAL_ERROR ${COMPILER_VERSION_ERROR_MSG})
endif()
if(CPU_THREADING_RUNTIME STREQUAL "OMP")
set(OMP_FLAGS "-fopenmp")
endif()
elseif("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang")
# Reference:
# https://releases.llvm.org/11.0.0/tools/clang/docs/OpenMPSupport.html
if("${CMAKE_CXX_COMPILER_VERSION}" VERSION_LESS 11)
if(CPU_THREADING_RUNTIME STREQUAL "OMP")
message(
FATAL_ERROR
"libopenmp is not supported under clang10, please use TBB with '-DCPU_THREADING_RUNTIME=TBB'."
)
endif()
endif()
if("${CMAKE_CXX_COMPILER_VERSION}" VERSION_LESS 5)
message(FATAL_ERROR ${COMPILER_VERSION_ERROR_MSG})
endif()
elseif("${CMAKE_CXX_COMPILER_ID}" STREQUAL "AppleClang")
if("${CMAKE_CXX_COMPILER_VERSION}" VERSION_LESS 12)
message(FATAL_ERROR ${COMPILER_VERSION_ERROR_MSG})
endif()
else()
message(WARNING "Unknown compiler \"${CMAKE_CXX_COMPILER_ID}\".")
endif()
set(oneflow_cmake_dir ${PROJECT_SOURCE_DIR}/cmake)
get_filename_component(real_src_dir "${CMAKE_SOURCE_DIR}" REALPATH)
get_filename_component(real_bin_dir "${CMAKE_BINARY_DIR}" REALPATH)
if("${real_src_dir}" STREQUAL "${real_bin_dir}")
message(FATAL_ERROR "In-source build not allowed")
endif()
# Modules
list(APPEND CMAKE_MODULE_PATH ${oneflow_cmake_dir}/third_party)
list(APPEND CMAKE_MODULE_PATH ${oneflow_cmake_dir})
include(util)
include(proto2cpp)
if(NOT DEFINED USE_CXX11_ABI)
check_cxx11_abi(CXX11_ABI_AVAILABLE)
set(USE_CXX11_ABI ${CXX11_ABI_AVAILABLE})
elseif(USE_CXX11_ABI)
check_cxx11_abi(CXX11_ABI_AVAILABLE)
if(NOT CXX11_ABI_AVAILABLE)
message(FATAL_ERROR "cxx11 abi is not available for current compiler")
endif()
endif()
message(STATUS "USE_CXX11_ABI: ${USE_CXX11_ABI}")
if(WITH_MLIR)
add_definitions(-DWITH_MLIR)
if(WITH_MLIR_CUDA_CODEGEN)
add_definitions(-DWITH_MLIR_CUDA_CODEGEN)
endif()
endif()
if(WITH_COCOAPI)
add_definitions(-DWITH_COCOAPI)
endif()
if(USE_CXX11_ABI)
add_definitions(-D_GLIBCXX_USE_CXX11_ABI=1)
else()
add_definitions(-D_GLIBCXX_USE_CXX11_ABI=0)
endif()
if(BUILD_PROFILER)
add_definitions(-DOF_ENABLE_PROFILER)
endif()
if(OF_SOFTMAX_USE_FAST_MATH)
add_definitions(-DOF_SOFTMAX_USE_FAST_MATH)
endif()
if(OF_LAYER_NORM_USE_FAST_MATH)
add_definitions(-DOF_LAYER_NORM_USE_FAST_MATH)
endif()
if(CPU_THREADING_RUNTIME STREQUAL "TBB")
add_definitions(-DOF_CPU_THREADING_RUNTIME=OF_RUNTIME_TBB)
elseif(CPU_THREADING_RUNTIME STREQUAL "OMP")
add_definitions(-DOF_CPU_THREADING_RUNTIME=OF_RUNTIME_OMP)
elseif(CPU_THREADING_RUNTIME STREQUAL "SEQ")
add_definitions(-DOF_CPU_THREADING_RUNTIME=OF_RUNTIME_SEQ)
else()
message(FATAL_ERROR "CPU_THREADING_RUNTIME must be one of: TBB, OMP, SEQ")
endif()
if(OF_FORCE_COLORED_DIAGNOSTICS)
add_compile_options(
$<$<COMPILE_LANGUAGE:CXX>:$<$<CXX_COMPILER_ID:GNU>:-fdiagnostics-color=always>>
$<$<COMPILE_LANGUAGE:CXX>:$<$<CXX_COMPILER_ID:Clang>:-fcolor-diagnostics>>
$<$<COMPILE_LANGUAGE:CUDA>:$<$<CUDA_COMPILER_ID:Clang>:-fcolor-diagnostics>>)
endif()
if(RPC_BACKEND MATCHES "GRPC")
add_definitions(-DRPC_BACKEND_GRPC)
message(STATUS "RPC backend enabled: gRPC")
set(SUPPORTED_RPC_BACKEND_FOUND 1)
endif()
if(WITH_ONEDNN)
add_definitions(-DWITH_ONEDNN)
endif()
add_definitions(-DRPC_BACKEND_LOCAL)
message(STATUS "RPC backend enabled: local")
enable_testing()
set(CMAKE_CXX_STANDARD 14)
set(CMAKE_POSITION_INDEPENDENT_CODE ON)
set(THIRD_PARTY_DIR "${PROJECT_BINARY_DIR}/third_party_install"
CACHE PATH "Where to install third party headers and libs")
set(ONEFLOW_PYTHON_DIR "${PROJECT_SOURCE_DIR}/python" CACHE PATH "oneflow python src dir")
if(WIN32)
set(CMAKE_BUILD_TYPE Debug)
add_definitions(-DNOMINMAX -D_WIN32_WINNT=0x0A00 -DLANG_CXX11 -DCOMPILER_MSVC
-D__VERSION__=\"MSVC\")
add_definitions(
-DWIN32
-DOS_WIN
-D_MBCS
-DWIN64
-DWIN32_LEAN_AND_MEAN
-DNOGDI
-DPLATFORM_WINDOWS
-D_ITERATOR_DEBUG_LEVEL=0)
add_definitions(
/bigobj
/nologo
/EHsc
/GF
/FC
/MP
/Gm-)
add_definitions(-DGOOGLE_GLOG_DLL_DECL=)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /MP")
foreach(
flag_var
CMAKE_C_FLAGS
CMAKE_C_FLAGS_DEBUG
CMAKE_C_FLAGS_RELEASE
CMAKE_CXX_FLAGS
CMAKE_CXX_FLAGS_DEBUG
CMAKE_CXX_FLAGS_RELEASE
CMAKE_CXX_FLAGS_MINSIZEREL
CMAKE_CXX_FLAGS_RELWITHDEBINFO)
if(${flag_var} MATCHES "/MD")
string(REGEX REPLACE "/MD" "/MT" ${flag_var} "${${flag_var}}")
endif()
endforeach()
#set(CMAKE_EXE_LINKER_FLAGS_DEBUG "${CMAKE_EXE_LINKER_FLAGS} /DEBUG:FASTLINK")
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /D_ITERATOR_DEBUG_LEVEL=0")
else()
set(EXTRA_CXX_FLAGS "-std=c++14 -Wall -Wno-sign-compare -Wno-unused-function -fPIC")
if(APPLE)
set(EXTRA_CXX_FLAGS "${EXTRA_CXX_FLAGS} -Wno-deprecated-declarations")
endif()
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${EXTRA_CXX_FLAGS}")
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} ${EXTRA_CXX_FLAGS}")
set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} ${EXTRA_CXX_FLAGS}")
set(CMAKE_CXX_FLAGS_RELWITHDEBINFO "${CMAKE_CXX_FLAGS_RELWITHDEBINFO} ${EXTRA_CXX_FLAGS}")
endif()
if(BUILD_PYTHON)
set(ONEFLOW_INCLUDE_DIR "${ONEFLOW_PYTHON_DIR}/oneflow/include")
endif(BUILD_PYTHON)
if(CPU_THREADING_RUNTIME STREQUAL "TBB")
set(ONETBB_URL
https://github.com/oneapi-src/oneTBB/archive/3db67b5ba2a81bd1288325c5847e09e13c46f4d7.zip)
use_mirror(VARIABLE ONETBB_URL URL ${ONETBB_URL})
set(ONETBB_MD5 7545d4084baff17af73da2dae5ab8005)
endif()
set(ROBIN_HOOD_HASHING_URL
https://github.com/martinus/robin-hood-hashing/archive/refs/tags/3.11.5.tar.gz)
use_mirror(VARIABLE ROBIN_HOOD_HASHING_URL URL ${ROBIN_HOOD_HASHING_URL})
set(ROBIN_HOOD_HASHING_MD5 a78bd30a7582f25984f8592652836467)
set(FMT_URL https://github.com/fmtlib/fmt/archive/48b7e3dafb27ece02cd6addc8bd1041c79d59c2c.zip)
use_mirror(VARIABLE FMT_URL URL ${FMT_URL})
set(FMT_MD5 45925a979ed7195e0c88a70be691de09)
set(KINETO_URL
https://github.com/pytorch/kineto/archive/ff8dba20499a660650632952be76450bd70a52a6.zip)
use_mirror(VARIABLE KINETO_URL URL ${KINETO_URL})
set(KINETO_MD5 f9b550591b3899fb267270c19484933f)
include(cuda)
add_subdirectory(external)
include(third_party)
if(BUILD_CUDA)
# NOTE: if you want to use source PTX with a version different from produced PTX/binary, you should add flags
if(NOT DEFINED CMAKE_CUDA_ARCHITECTURES)
list(APPEND CMAKE_CUDA_ARCHITECTURES 60-real)
# Tesla P40/P4, Quadro Pxxx/Pxxxx, GeForce GTX 10xx, TITAN X/Xp
list(APPEND CMAKE_CUDA_ARCHITECTURES 61-real)
# V100, TITAN V
list(APPEND CMAKE_CUDA_ARCHITECTURES 70-real)
if(CUDA_VERSION VERSION_GREATER_EQUAL "10.0")
# T4, Quadro RTX xxxx, Txxxx, Geforce RTX 20xx, TITAN RTX
list(APPEND CMAKE_CUDA_ARCHITECTURES 75-real)
endif()
if(CUDA_VERSION VERSION_GREATER_EQUAL "11.0")
# A100
list(APPEND CMAKE_CUDA_ARCHITECTURES 80-real)
endif()
if(CUDA_VERSION VERSION_GREATER_EQUAL "11.1")
# GeForce RTX 30xx
list(APPEND CMAKE_CUDA_ARCHITECTURES 86-real)
endif()
if(CUDA_VERSION VERSION_GREATER_EQUAL "11.0")
list(APPEND CMAKE_CUDA_ARCHITECTURES 80-virtual)
elseif(CUDA_VERSION VERSION_GREATER_EQUAL "10.0")
list(APPEND CMAKE_CUDA_ARCHITECTURES 75-virtual)
else()
list(APPEND CMAKE_CUDA_ARCHITECTURES 70-virtual)
endif()
endif()
enable_language(CUDA)
include_directories(${CMAKE_CUDA_TOOLKIT_INCLUDE_DIRECTORIES})
message(STATUS "CMAKE_CUDA_ARCHITECTURES: ${CMAKE_CUDA_ARCHITECTURES}")
set(CUDA_SEPARABLE_COMPILATION OFF)
if("${CMAKE_CUDA_COMPILER_ID}" STREQUAL "NVIDIA")
if(CMAKE_CUDA_COMPILER_VERSION VERSION_GREATER_EQUAL "11.2")
set(CUDA_NVCC_THREADS_NUMBER "4" CACHE STRING "")
list(APPEND CUDA_NVCC_FLAGS -t ${CUDA_NVCC_THREADS_NUMBER})
endif()
message(STATUS "CUDA_NVCC_FLAGS: " ${CUDA_NVCC_FLAGS})
list(JOIN CUDA_NVCC_FLAGS " " CMAKE_CUDA_FLAGS)
endif()
endif()
message(STATUS "CMAKE_CXX_COMPILER_VERSION: " ${CMAKE_CXX_COMPILER_VERSION})
add_custom_target(oneflow_deps ALL DEPENDS prepare_oneflow_third_party)
# skip oneflow cmake to avoid errors caused by the absences of python-dev, proto src
if(ONEFLOW)
include(oneflow)
endif()
add_subdirectory(ci)
Copyright 2020 The OneFlow Authors. All rights reserved.
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
# OneFlow
**OneFlow is a performance-centered and open-source deep learning framework.**
[![Simple CI](https://github.com/Oneflow-Inc/oneflow/actions/workflows/simple.yml/badge.svg)](https://github.com/Oneflow-Inc/oneflow/actions/workflows/simple.yml)
[![Nightly Docker Image](https://github.com/Oneflow-Inc/docker-images/actions/workflows/oneflow-nightly.yml/badge.svg)](https://github.com/Oneflow-Inc/docker-images/actions/workflows/oneflow-nightly.yml)
[![Nightly Release](https://github.com/Oneflow-Inc/oneflow/actions/workflows/release.yml/badge.svg)](https://github.com/Oneflow-Inc/oneflow/actions/workflows/release.yml)
[![Documentation](https://readthedocs.org/projects/oneflow/badge/?version=master)](https://oneflow.readthedocs.io/en/master/?badge=master)
## Latest News
- Version 0.7.0 is out!
- Introducing global tensor
- Semi-auto parallelization has landed
- [Full changelog](https://github.com/Oneflow-Inc/oneflow/releases/tag/v0.7.0)
## Publication
- [OneFlow: Redesign the Distributed Deep Learning Framework from Scratch](https://arxiv.org/abs/2110.15032)
- Bibtex Citation
```
@misc{yuan2021oneflow,
title={OneFlow: Redesign the Distributed Deep Learning Framework from Scratch},
author={Jinhui Yuan and Xinqi Li and Cheng Cheng and Juncheng Liu and Ran Guo and Shenghang Cai and Chi Yao and Fei Yang and Xiaodong Yi and Chuan Wu and Haoran Zhang and Jie Zhao},
year={2021},
eprint={2110.15032},
archivePrefix={arXiv},
primaryClass={cs.DC}
}
```
## Install OneFlow
### System Requirements
- Linux. As for now, there is no pre-built release for macOS, Windows.
- Python 3.6, 3.7, 3.8, 3.9, 3.10
- (**Highly recommended**) Upgrade pip
```
python3 -m pip install --upgrade pip #--user
```
- CUDA Toolkit Linux x86_64 Driver
- CUDA runtime is statically linked into OneFlow. OneFlow will work on a minimum supported driver, and any driver beyond. For more information, please refer to [CUDA compatibility documentation](https://docs.nvidia.com/deploy/cuda-compatibility/index.html).
- Please upgrade your Nvidia driver to version 440.33 or above and install OneFlow for CUDA 10.2 if possible.
### Install with Pip Package
- To install latest stable release of OneFlow with CUDA support:
```bash
python3 -m pip install -f https://release.oneflow.info oneflow==0.7.0+cu102
```
- To install nightly release of OneFlow with CUDA support:
```bash
python3 -m pip install --pre oneflow -f https://staging.oneflow.info/branch/master/cu102
```
- To install other available builds for different variants:
- Stable
```bash
python3 -m pip install --find-links https://release.oneflow.info oneflow==0.7.0+[PLATFORM]
```
- Nightly
```
python3 -m pip install --pre oneflow -f https://staging.oneflow.info/branch/master/[PLATFORM]
```
- All available `[PLATFORM]`:
| Platform |CUDA Driver Version| Supported GPUs |
|---|---|---|
| cu112 | >= 450.80.02 | GTX 10xx, RTX 20xx, A100, RTX 30xx |
| cu102 | >= 440.33 | GTX 10xx, RTX 20xx |
| cpu | N/A | N/A |
- If you are in China, you could run this to have pip download packages from domestic mirror of pypi:
```
python3 -m pip config set global.index-url https://pypi.tuna.tsinghua.edu.cn/simple
```
For more information on this, please refer to [pypi 镜像使用帮助](https://mirror.tuna.tsinghua.edu.cn/help/pypi/)
### Use docker image
```
docker pull oneflowinc/oneflow:nightly-cuda10.2
docker pull oneflowinc/oneflow:nightly-cuda11.2
```
### Build from Source
<details>
<summary>Clone Source Code</summary>
- #### Option 1: Clone source code from GitHub
```bash
git clone https://github.com/Oneflow-Inc/oneflow --depth=1
```
- #### Option 2: Download from Aliyun
If you are in China, please download OneFlow source code from: https://oneflow-public.oss-cn-beijing.aliyuncs.com/oneflow-src.zip
```bash
curl https://oneflow-public.oss-cn-beijing.aliyuncs.com/oneflow-src.zip -o oneflow-src.zip
unzip oneflow-src.zip
```
</details>
<details>
<summary>Build OneFlow</summary>
- #### Option 1: Build with Conda (recommended)
Please refer to [this repo](https://github.com/Oneflow-Inc/conda-env)
- #### Option 2: Build in docker container (recommended)
- Pull the docker image:
```bash
docker pull oneflowinc/manylinux2014_x86_64_cuda11.2
```
- Follow the instructions in the bare metal build guide below.
- #### Option 3: Build on bare metal
- Install dependencies (not required if you are using docker):
- on Ubuntu 20.04, run:
```
sudo apt install -y libopenblas-dev nasm g++ gcc python3-pip cmake autoconf libtool
```
- on macOS, run:
```
brew install nasm
```
- In the root directory of OneFlow source code, run:
```
mkdir build
cd build
```
- Config the project, inside `build` directory:
- If you are in China
run this to config for CUDA:
```
cmake .. -C ../cmake/caches/cn/cuda.cmake
```
run this to config for CPU-only:
```
cmake .. -C ../cmake/caches/cn/cpu.cmake
```
- If you are not in China
run this to config for CUDA:
```
cmake .. -C ../cmake/caches/international/cuda.cmake
```
run this to config for CPU-only:
```
cmake .. -C ../cmake/caches/international/cpu.cmake
```
- Build the project, inside `build` directory, run:
```
make -j$(nproc)
```
- Add oneflow to your PYTHONPATH, inside `build` directory, run:
```
source source.sh
```
Please note that this change is not permanent.
- Simple validation
```
python3 -m oneflow --doctor
```
</details>
### Troubleshooting
Please refer to [troubleshooting](docs/source/troubleshooting.md) for common issues you might encounter when compiling and running OneFlow.
### Advanced features
- [OneFlow-XRT](https://github.com/Oneflow-Inc/oneflow-xrt): An extension for OneFlow to target third-party compiler, such as XLA, TensorRT and OpenVINO etc.
## Getting Started
- Please refer to [QUICKSTART](https://docs.oneflow.org/en/master/basics/01_quickstart.html)
- 中文版请参见 [快速上手](https://docs.oneflow.org/master/basics/01_quickstart.html)
## Documentation
- [API Reference](https://oneflow.readthedocs.io/en/master/)
- [Usage & Design Docs](http://docs.oneflow.org/)
- [System Design](https://docs.oneflow.org/en/v0.4.0/basics_topics/essentials_of_oneflow.html)
## Model Zoo and Benchmark
- [Libai(Toolbox for Parallel Training Large-Scale Transformer Models)](https://github.com/Oneflow-Inc/libai)
- [BERT-large](https://libai.readthedocs.io/en/latest/tutorials/get_started/quick_run.html)
- [GPT](https://libai.readthedocs.io/en/latest/modules/libai.models.html#id5)
- [T5](https://libai.readthedocs.io/en/latest/modules/libai.models.html#id4)
- [VisionTransformer](https://libai.readthedocs.io/en/latest/modules/libai.models.html#id1)
- [SwinTransformer](https://libai.readthedocs.io/en/latest/modules/libai.models.html#id2)
- [FlowVision(Toolbox for Computer Vision Datasets, SOTA Models and Utils)](https://github.com/Oneflow-Inc/vision)
- [OneFlow-Models(Examples of How to Implement Models in Various Fields with OneFlow)](https://github.com/Oneflow-Inc/models)
- [ResNet-50](https://github.com/Oneflow-Inc/models/tree/main/Vision/classification/image/resnet50)
- [Wide&Deep](https://github.com/Oneflow-Inc/models/tree/main/RecommenderSystems/wide_and_deep)
- [OneFlow-Benchmark(Outdated)](https://github.com/Oneflow-Inc/OneFlow-Benchmark)
## Communication
- [GitHub issues](https://github.com/Oneflow-Inc/oneflow/issues): any install, bug, feature issues.
- [www.oneflow.org](http://www.oneflow.org): brand related information.
- ### 中文
- QQ 群: 331883
- 微信号(加好友入交流群): OneFlowXZS
- [知乎](https://www.zhihu.com/org/oneflow-17)
- ### International
- [Discord](https://discord.gg/4kpjGA5bZY)
- [Twitter](https://twitter.com/OneFlowNews)
- [LinkedIn](https://www.linkedin.com/company/oneflow-inc)
- [Medium](https://oneflow2020.medium.com)
## The Team
OneFlow was originally developed by [OneFlow Inc](http://www.oneflow.org) and [Zhejiang Lab](http://www.zhejianglab.com/).
## License
[Apache License 2.0](LICENSE)
# Monkey patch to not ship libjvm.so in pypi wheels
import sys
from auditwheel.main import main
from auditwheel.policy import _POLICIES as POLICIES
# libjvm is loaded dynamically; do not include it
for p in POLICIES:
p['lib_whitelist'].append('librccl.so.1')
p['lib_whitelist'].append('libhipblas.so.0')
p['lib_whitelist'].append('libhiprand.so.1')
p['lib_whitelist'].append('librocrand.so.1')
p['lib_whitelist'].append('libMIOpen.so.1')
p['lib_whitelist'].append('libgalaxyhip.so.4')
p['lib_whitelist'].append('librocm_smi64.so.2')
p['lib_whitelist'].append('librocsolver.so.0 ')
p['lib_whitelist'].append('librocblas.so.0')
if __name__ == "__main__":
sys.exit(main())
add_subdirectory(test)
import os
import argparse
from pathlib import Path
import re
import json
import subprocess
def check_and_download(tag, url):
img_dir = os.path.join(os.path.expanduser("~"), "imgs")
if not os.path.exists(img_dir):
os.makedirs(img_dir)
returncode = subprocess.run(
f"docker image inspect {tag}",
shell=True,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
).returncode
if returncode == 0:
print("[OK]", tag)
else:
basename = os.path.basename(url)
dst = os.path.join(img_dir, basename)
subprocess.check_call(f"wget -c {url} -O {dst}", shell=True)
subprocess.check_call(f"docker load -i {dst}", shell=True)
base = os.path.basename(dst)
base = os.path.splitext(base)[0]
base = os.path.splitext(base)[0]
keep_tag = f"ofkeep:{base}"
subprocess.check_call(f"docker tag {tag} {keep_tag}", shell=True)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--create_index", action="store_true", required=False, default=False
)
args = parser.parse_args()
imgs = [
{
"tag": "nvidia/cuda:10.0-cudnn7-devel-centos7",
"url": "https://oneflow-static.oss-cn-beijing.aliyuncs.com/img/nvidiacuda10.0-cudnn7-devel-centos7.tar.gz",
},
{
"tag": "nvidia/cuda:10.1-cudnn7-devel-centos7",
"url": "https://oneflow-static.oss-cn-beijing.aliyuncs.com/img/nvidiacuda10.1-cudnn7-devel-centos7.tar.gz",
},
{
"tag": "nvidia/cuda:10.2-cudnn7-devel-centos7",
"url": "https://oneflow-static.oss-cn-beijing.aliyuncs.com/img/nvidiacuda10.2-cudnn7-devel-centos7.tar.gz",
},
{
"tag": "nvidia/cuda:11.0-cudnn8-devel-centos7",
"url": "https://oneflow-static.oss-cn-beijing.aliyuncs.com/img/nvidiacuda11.0-cudnn8-devel-centos7.tar.gz",
},
{
"tag": "nvidia/cuda:11.1-cudnn8-devel-centos7",
"url": "https://oneflow-static.oss-cn-beijing.aliyuncs.com/img/nvidiacuda11.1-cudnn8-devel-centos7.tar.gz",
},
]
for img in imgs:
check_and_download(img["tag"], img["url"])
set -ex
src_dir=${ONEFLOW_SRC_DIR:-"$PWD"}
tmp_dir=${ONEFLOW_CI_TMP_DIR:-"$HOME/ci-tmp"}
extra_oneflow_cmake_args=${ONEFLOW_CI_EXTRA_ONEFLOW_CMAKE_ARGS:-""}
package_suffix=${ONEFLOW_CI_PACKAGE_SUFFIX:-""}
cuda_version=${ONEFLOW_CI_CUDA_VERSION:-"10.2"}
python_version_args=${ONEFLOW_CI_PYTHON_VERSION_ARGS:-"--python3.6"}
build_wheel_bash_args=${ONEFLOW_CI_BUILD_WHEEL_BASH_ARGS:-"-l"}
mkdir -p $tmp_dir
docker_tag=${ONEFLOW_CI_DOCKER_TAG:-"oneflow:ci-manylinux2014-cuda10.2"}
docker_proxy_build_args=""
docker_proxy_build_args+="--build-arg http_proxy=${ONEFLOW_CI_HTTP_PROXY} --build-arg https_proxy=${ONEFLOW_CI_HTTPS_PROXY}"
docker_proxy_run_args=""
docker_proxy_run_args+="--env http_proxy=${ONEFLOW_CI_HTTP_PROXY} --env https_proxy=${ONEFLOW_CI_HTTPS_PROXY}"
docker_it=""
if [[ -t 1 ]]; then
docker_it="-it"
fi
# build manylinux image
cd $src_dir
docker build -f $src_dir/docker/package/manylinux/Dockerfile \
--build-arg from=nvidia/cuda:${cuda_version}-cudnn7-devel-centos7 \
$docker_proxy_build_args -t $docker_tag .
cd -
# build function
function build() {
set -x
docker run --rm \
-v $tmp_dir:/ci-tmp \
-w $tmp_dir:/ci-tmp busybox rm -rf /ci-tmp/wheelhouse
docker run \
$docker_proxy_run_args \
--rm $docker_it \
-v $src_dir:/oneflow-src \
-v $tmp_dir:/ci-tmp \
-w /ci-tmp \
"$docker_tag" \
bash ${build_wheel_bash_args} /oneflow-src/docker/package/manylinux/build_wheel.sh \
${python_version_args} \
--house-dir /ci-tmp/wheelhouse \
--package-name oneflow${package_suffix} \
$extra_oneflow_cmake_args
}
set +e
# reuse cache
build
# clean cache and retry
cached_build_ret=$?
set -e
if [ $cached_build_ret -ne 0 ] && [[ ! -t 1 ]]; then
echo "retry after cleaning build dir"
docker run --rm -v $tmp_dir:/ci-tmp busybox sh -c "rm -rf /ci-tmp/*"
build
fi
*,-maybe-glog-fatal,-clang-analyzer-alpha.*,-clang-analyzer-cplusplus.NewDelete,-clang-diagnostic-*
\ No newline at end of file
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import multiprocessing as mp
import os
from fnmatch import fnmatch
from subprocess import Popen
def chunk(seq, n):
"""
divide a sequence into equal sized chunks
(the last chunk may be smaller, but won't be empty)
"""
chunks = []
some = []
for element in seq:
if len(some) == n:
chunks.append(some)
some = []
some.append(element)
if len(some) > 0:
chunks.append(some)
return chunks
def dechunk(chunks):
"flatten chunks into a single list"
seq = []
for chunk in chunks:
seq.extend(chunk)
return seq
def run_parallel(cmds, **kwargs):
"""
Run each of cmds (with shared **kwargs) using subprocess.Popen
then wait for all of them to complete.
Runs batches of multiprocessing.cpu_count() * 2 from cmds
returns a list of tuples containing each process'
returncode, stdout, stderr
"""
complete = []
for cmds_batch in chunk(cmds, mp.cpu_count() * 2):
procs_batch = [Popen(cmd, **kwargs) for cmd in cmds_batch]
for proc in procs_batch:
stdout, stderr = proc.communicate()
complete.append((proc.returncode, stdout, stderr))
return complete
_source_extensions = """
.h
.cc
.cpp
.cu
.cuh
""".split()
def get_sources(source_dir, exclude_globs=[]):
sources = []
for directory, subdirs, basenames in os.walk(source_dir):
for path in [os.path.join(directory, basename) for basename in basenames]:
# filter out non-source files
if os.path.splitext(path)[1] not in _source_extensions:
continue
path = os.path.abspath(path)
# filter out files that match the globs in the globs file
if any([fnmatch(path, glob) for glob in exclude_globs]):
continue
sources.append(path)
return sources
def stdout_pathcolonline(completed_process, filenames):
"""
given a completed process which may have reported some files as problematic
by printing the path name followed by ':' then a line number, examine
stdout and return the set of actually reported file names
"""
returncode, stdout, stderr = completed_process
bfilenames = set()
for filename in filenames:
bfilenames.add(filename.encode("utf-8") + b":")
problem_files = set()
for line in stdout.splitlines():
for filename in bfilenames:
if line.startswith(filename):
problem_files.add(filename.decode("utf-8"))
bfilenames.remove(filename)
break
return problem_files, stdout
#!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import asyncio
import argparse
import pathlib
import multiprocessing
import subprocess
import os
import platform
def split_and_print(prefix, text):
lines = text.decode().splitlines(keepends=True)
prefixed = ""
for l in lines:
prefixed += f"{prefix} {l.strip()}"
if l.strip():
print(prefixed, flush=True)
async def handle_stream(stream, cb):
while True:
line = await stream.readline()
if line:
cb(line)
else:
break
async def run_command(cmd=None, dry=False, name=None):
if dry:
print(f"[dry] {cmd}")
return 0
process = await asyncio.create_subprocess_shell(
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE,
)
l = lambda x: split_and_print(f"[{name}]" if name else "", x)
# l = lambda x: x
await asyncio.gather(
handle_stream(process.stdout, l), handle_stream(process.stderr, l),
)
await process.wait()
return process.returncode
def chunks(lst, n):
"""Yield successive n-sized chunks from lst."""
for i in range(0, len(lst), n):
yield lst[i : i + n]
def check_version(bin):
try:
out = subprocess.check_output(["bash", "-c", f"{bin} --version"]).decode()
print(out)
return "version 11.0.0" in out
except:
return False
def download(dry=False):
if platform.system() != "Linux":
raise ValueError("Please install clang format 11.0.0")
url = "https://oneflow-static.oss-cn-beijing.aliyuncs.com/bin/clang-format/linux-x86/clang-format-11"
if os.getenv("CI"):
url = "https://github.com/Oneflow-Inc/oneflow-fmt/raw/master/clang-format/linux-x86/clang-format-11"
dst_dir = ".cache/bin"
dst = f"{dst_dir}/clang-format"
if dry:
if os.path.isfile(dst):
return dst
else:
None
else:
assert subprocess.call(f"mkdir -p {dst_dir}", shell=True) == 0
assert subprocess.call(f"curl -L {url} -o {dst}", shell=True) == 0
assert subprocess.call(f"chmod +x {dst}", shell=True) == 0
return dst
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Runs clang-format on all of the source "
"files. If --fix is specified enforce format by "
"modifying in place, otherwise compare the output "
"with the existing file and output any necessary "
"changes as a patch in unified diff format"
)
parser.add_argument(
"--clang_format_binary",
required=False,
help="Path to the clang-format binary.",
default="clang-format",
)
parser.add_argument(
"--source_dir", required=True, help="Root directory of the source code"
)
parser.add_argument(
"--fix",
default=False,
action="store_true",
help="If specified, will re-format the source "
"code instead of comparing the re-formatted "
"output, defaults to %(default)s",
)
parser.add_argument(
"--quiet",
default=False,
action="store_true",
help="If specified, only print errors",
)
args = parser.parse_args()
exts = [".h", ".cc", ".cpp", ".cu", ".cuh"]
files = filter(
lambda p: p.suffix in exts, pathlib.Path(args.source_dir).rglob("*"),
)
loop = asyncio.get_event_loop()
files = [str(f) for f in files]
clang_fmt_args = "-dry-run --Werror"
if args.fix:
clang_fmt_args = "-i"
results = []
if check_version(args.clang_format_binary) == False:
downloaded = download(dry=True)
if downloaded:
assert check_version(downloaded)
args.clang_format_binary = downloaded
else:
args.clang_format_binary = download()
assert check_version(args.clang_format_binary)
for chunk in chunks(files, multiprocessing.cpu_count() * 2):
promises = [
run_command(f"{args.clang_format_binary} {clang_fmt_args} {f}")
for f in chunk
]
chunk_results = loop.run_until_complete(asyncio.gather(*promises))
results.extend(chunk_results)
print(len(results), "files checked")
assert len(results) == len(files)
for (r, f) in zip(results, files):
if r != 0:
print("[fail]", f)
assert sum(results) == 0
#!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import asyncio
import argparse
import subprocess
import os
from typing import List, Optional
from pathlib import Path
def split_and_print(prefix, text):
lines = text.decode().splitlines(keepends=True)
prefixed = ""
for l in lines:
prefixed += f"{prefix} {l.strip()}"
if l.strip():
print(prefixed, flush=True)
async def handle_stream(stream, cb):
while True:
line = await stream.readline()
if line:
cb(line)
else:
break
async def run_command(cmd=None, dry=False, name=None):
if dry:
print(f"[dry] {cmd}")
return 0
process = await asyncio.create_subprocess_shell(
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE,
)
l = lambda x: split_and_print(f"[{name}]" if name else "", x)
await asyncio.gather(
handle_stream(process.stdout, l), handle_stream(process.stderr, l),
)
await process.wait()
return process.returncode
def download(build_dir, dry=False) -> Optional[List[str]]:
urls = [
"https://github.com/Oneflow-Inc/llvm-project/releases/download/llvmorg-13.0.0-maybe/clang-tidy-13.AppImage"
if os.getenv("CI")
else "https://oneflow-static.oss-cn-beijing.aliyuncs.com/bin/clang-tidy/linux-x86_64/clang-tidy-13.AppImage",
"https://raw.githubusercontent.com/oneflow-inc/llvm-project/maybe/clang-tools-extra/clang-tidy/tool/clang-tidy-diff.py",
]
dst_dir = f"{build_dir}/cache/bin"
dst = [f"{dst_dir}/clang-tidy", f"{dst_dir}/clang-tidy-diff.py"]
if dry:
if os.path.isfile(dst[0]) and os.path.isfile(dst[1]):
return dst
else:
None
else:
assert subprocess.call(f"mkdir -p {dst_dir}", shell=True) == 0
for i, _dst in enumerate(dst):
assert subprocess.call(f"curl -L {urls[i]} -o {_dst}", shell=True) == 0
assert subprocess.call(f"chmod +x {_dst}", shell=True) == 0
return dst
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Runs clang-tidy on all of the source files."
)
parser.add_argument(
"--build_dir", required=True,
)
parser.add_argument(
"--check-error-msg", action="store_true", default=False,
)
args = parser.parse_args()
loop = asyncio.get_event_loop()
downloaded = download(args.build_dir, dry=True)
if downloaded is None:
downloaded = download(args.build_dir)
assert downloaded is not None
warnings_as_errors = (
(Path(__file__).parent / "clang_tidy_warnings_as_errors_on_diff")
.read_text()
.strip()
)
cmd = f"git diff -U0 master | {downloaded[1]} -clang-tidy-binary {downloaded[0]} -path {args.build_dir} -j $(nproc) -p1 -allow-enabling-alpha-checkers -extra-arg=-Xclang -extra-arg=-analyzer-config -extra-arg=-Xclang -extra-arg=aggressive-binary-operation-simplification=true"
if args.check_error_msg:
command = f" cd .. && {cmd} -warnings-as-errors='{warnings_as_errors}' && {cmd} -checks=-*,maybe-need-error-msg -warnings-as-errors=* -skip-line-filter"
else:
command = f"cd .. && {cmd} -warnings-as-errors='{warnings_as_errors}'"
ret_code = loop.run_until_complete(run_command(command))
exit(ret_code)
from subprocess import call
from argparse import ArgumentParser
from glob import glob
from pathlib import Path
from multiprocessing.pool import ThreadPool
from multiprocessing import cpu_count
if __name__ == "__main__":
parser = ArgumentParser(
description="Runs cmake-format on all of the cmake source files."
)
parser.add_argument(
"--bin", default="cmake-format", help="Path of cmake-format binary"
)
parser.add_argument(
"--fix", default=False, action="store_true", help="Format all sources in place"
)
parser.add_argument(
"--source_dir", default=".", help="Root directory of the source code"
)
parser.add_argument(
"-j",
"--jobs",
type=int,
default=cpu_count(),
help="Specifies the number of jobs (commands) to run simultaneously",
)
args = parser.parse_args()
patterns = [
"cmake/**/*.cmake",
"oneflow/**/*.cmake",
"oneflow/**/CMakeLists.txt",
"tools/**/*.cmake",
"tools/**/CMakeLists.txt",
"CMakeLists.txt",
]
files = []
for pattern in patterns:
files.extend(glob(str(Path(args.source_dir) / pattern), recursive=True))
def gen_cmd(file):
cmd = [args.bin, file]
cmd.append("-i" if args.fix else "--check")
return cmd
tp = ThreadPool(args.jobs)
res = tp.map_async(call, [gen_cmd(file) for file in files])
tp.close()
tp.join()
count = sum(map(lambda x: 0 if x == 0 else 1, res.get()))
total = len(files)
if args.fix:
print(f"cmake-format -i done. {total} total")
else:
print(f"cmake-format --check done. {count} failed / {total} total")
exit(0 if count == 0 else 1)
import argparse
import os
import glob
from multiprocessing import Pool
LICENSE_TXT = """Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
CPP_TXT = "/*\n{}*/\n".format(LICENSE_TXT)
PY_TXT = '"""\n{}"""\n'.format(LICENSE_TXT)
def get_txt(path: str):
if path.endswith((".cpp", ".h", ".hpp", ".cu", ".cuh")):
return CPP_TXT
elif path.endswith((".py")):
return PY_TXT
else:
return None
def check_file(path):
with open(path, "r", encoding="utf-8") as f:
content = f.read()
txt = get_txt(path)
if (
"import doctest" in content
and "raise_on_error=True" not in content
and "doctest.DebugRunner" not in content
):
return ("please add 'doctest.testmod(raise_on_error=True)'", content)
elif content.count("The OneFlow Authors. All rights reserved.") > 1:
return ("license_duplicated", content)
elif content.startswith(txt) or (not content):
return ("ok", content)
elif content.startswith(txt) == False:
return ("license_absent", content)
def format_file(path):
txt = get_txt(path)
with open(path, "r", encoding="utf-8") as r:
content = r.read()
format_status, content = check_file(path)
if format_status == "ok":
return True
elif format_status == "license_absent":
with open(path, "w") as w:
new_content = txt + content
w.write(new_content)
return False
else:
raise ValueError(f"{format_status} {path}")
def do_check(x):
format_status, _ = check_file(x)
return (x, format_status)
def do_format(x):
return (x, format_file(x))
def glob_files(path: str = None, excludes=None):
files = []
for ext in ("**/*.cpp", "**/*.h", "**/*.hpp", "**/*.cu", "**/*.cuh", "**/*.py"):
joined = os.path.join(path, ext)
files.extend(glob.glob(joined, recursive=True))
files = [
f
for f in files
if "version.py" not in f and all([not e in f for e in excludes])
]
print("[files]", len(files))
return files
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--root_path", type=str, required=True)
parser.add_argument(
"-v", "--verbose", default=False, action="store_true", required=False
)
parser.add_argument("--silent", default=False, action="store_true", required=False)
parser.add_argument(
"-c", "--check", default=False, action="store_true", required=False
)
parser.add_argument(
"-f", "--fix", default=False, action="store_true", required=False
)
parser.add_argument("--exclude", action="append", default=[])
args = parser.parse_args()
files = glob_files(args.root_path, excludes=args.exclude)
assert args.check != args.fix
with Pool(10) as p:
if args.check:
any_absence = False
for (p, format_status) in p.map(do_check, files):
if format_status != "ok":
print(f"{format_status}:", p)
any_absence = True
if any_absence:
exit(1)
if args.fix:
for (p, format_result) in p.map(do_format, files):
if format_result == True:
if args.verbose:
print("license already added:", p)
else:
if args.silent == False:
print("license just added:", p)
import argparse
import sys
import platform
from subprocess import Popen
import os
if __name__ == "__main__":
major = platform.sys.version_info.major
minor = platform.sys.version_info.minor
if major == 3 and minor < 6:
print("WARNING: python >= 3.6 required, python source format won't run")
exit(0)
parser = argparse.ArgumentParser(
description="Runs py-format on all of the source files."
"If --fix is specified enforce format by modifying in place."
)
parser.add_argument(
"--source_dir", required=True, help="Root directory of the source code"
)
parser.add_argument(
"--fix",
default=False,
action="store_true",
help="If specified, will re-format the source",
)
arguments = parser.parse_args()
os.chdir(arguments.source_dir)
version_cmd = sys.executable + " -m {} --version | grep {} > /dev/null"
BLACK_VER = "19.10b0"
if os.system(version_cmd.format("black", BLACK_VER)):
print(
f"Please install black {BLACK_VER}. For instance, run 'python3 -m pip install black=={BLACK_VER} --user'"
)
sys.exit(1)
cmd_line = sys.executable + " -m black " + "."
if arguments.fix == False:
cmd_line += " --check"
if os.system(cmd_line):
sys.exit(1)
set -ex
export PATH=/usr/lib/llvm-12/bin:/usr/lib/llvm-13/bin:/usr/lib64/ccache:/root/.local/bin:$PATH
# clean python dir
cd ${ONEFLOW_CI_SRC_DIR}
${ONEFLOW_CI_PYTHON_EXE} -m pip install -i https://mirrors.aliyun.com/pypi/simple --user -r ci/fixed-dev-requirements.txt
cd python
git clean -nXd -e \!dist -e \!dist/**
git clean -fXd -e \!dist -e \!dist/**
# cmake config
mkdir -p ${ONEFLOW_CI_BUILD_DIR}
cd ${ONEFLOW_CI_BUILD_DIR}
find ${ONEFLOW_CI_BUILD_DIR} -name CMakeCache.txt
find ${ONEFLOW_CI_BUILD_DIR} -name CMakeCache.txt -delete
if [ ! -f "$ONEFLOW_CI_CMAKE_INIT_CACHE" ]; then
echo "$ONEFLOW_CI_CMAKE_INIT_CACHE does not exist."
exit 1
fi
cmake -S ${ONEFLOW_CI_SRC_DIR} -C ${ONEFLOW_CI_CMAKE_INIT_CACHE} -DPython3_EXECUTABLE=${ONEFLOW_CI_PYTHON_EXE}
# cmake build
cd ${ONEFLOW_CI_BUILD_DIR}
cmake --build . -j $(nproc)
# build pip
cd ${ONEFLOW_CI_SRC_DIR}
cd python
${ONEFLOW_CI_PYTHON_EXE} setup.py bdist_wheel
set -ex
conda activate oneflow-dev-clang10-v2
mkdir -p build
cd build
cmake .. -C ../cmake/caches/cn/fast/cpu-clang.cmake
cmake --build . -j $(nproc)
cd -
cd python
python setup.py bdist_wheel
echo "wheelhouse_dir=$PWD/dist" >> $GITHUB_ENV
channels:
- defaults
show_channel_urls: true
default_channels:
- https://mirrors.tuna.tsinghua.edu.cn/anaconda/pkgs/main
- https://mirrors.tuna.tsinghua.edu.cn/anaconda/pkgs/r
- https://mirrors.tuna.tsinghua.edu.cn/anaconda/pkgs/msys2
custom_channels:
conda-forge: https://mirrors.tuna.tsinghua.edu.cn/anaconda/cloud
msys2: https://mirrors.tuna.tsinghua.edu.cn/anaconda/cloud
bioconda: https://mirrors.tuna.tsinghua.edu.cn/anaconda/cloud
menpo: https://mirrors.tuna.tsinghua.edu.cn/anaconda/cloud
pytorch: https://mirrors.tuna.tsinghua.edu.cn/anaconda/cloud
simpleitk: https://mirrors.tuna.tsinghua.edu.cn/anaconda/cloud
numpy==1.22.1 ; python_version >= "3.10"
numpy==1.20.0 ; python_version >= "3.9" and python_version < "3.10"
numpy==1.18.0 ; python_version >= "3.8" and python_version < "3.9"
numpy==1.17.0 ; python_version >= "3.6" and python_version < "3.8"
source scl_source enable devtoolset-7
set -ex
ONEFLOW_CI_BUILD_PARALLEL=${ONEFLOW_CI_BUILD_PARALLEL:-$(nproc)}
gcc --version
ld --version
# clean python dir
cd ${ONEFLOW_CI_SRC_DIR}
${ONEFLOW_CI_PYTHON_EXE} -m pip install -i https://mirrors.aliyun.com/pypi/simple --user -r ci/fixed-dev-requirements.txt
cd python
git clean -nXd -e \!dist -e \!dist/**
git clean -fXd -e \!dist -e \!dist/**
# cmake config
mkdir -p ${ONEFLOW_CI_BUILD_DIR}
cd ${ONEFLOW_CI_BUILD_DIR}
find ${ONEFLOW_CI_BUILD_DIR} -name CMakeCache.txt
find ${ONEFLOW_CI_BUILD_DIR} -name CMakeCache.txt -delete
if [ ! -f "$ONEFLOW_CI_CMAKE_INIT_CACHE" ]; then
echo "$ONEFLOW_CI_CMAKE_INIT_CACHE does not exist."
exit 1
fi
export PATH="${PATH}:$(dirname ${ONEFLOW_CI_PYTHON_EXE})"
export PYTHON_BIN_PATH=${ONEFLOW_CI_PYTHON_EXE}
cmake -S ${ONEFLOW_CI_SRC_DIR} -C ${ONEFLOW_CI_CMAKE_INIT_CACHE} -DPython3_EXECUTABLE=${ONEFLOW_CI_PYTHON_EXE}
# cmake build
cd ${ONEFLOW_CI_BUILD_DIR}
cmake --build . --parallel ${ONEFLOW_CI_BUILD_PARALLEL}
# build pip
cd ${ONEFLOW_CI_SRC_DIR}
cd python
${ONEFLOW_CI_PYTHON_EXE} setup.py bdist_wheel
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment