"GRUB2/MOD_SRC/vscode:/vscode.git/clone" did not exist on "1c0b4ee90324bb38369d4d4ad59d793bbeff8fc7"
Commit 1fb0017a authored by dugupeiwen's avatar dugupeiwen
Browse files

init 0.58

parents
#!/bin/bash
set -v -e
# first configure conda to have more tolerance of network problems, these
# numbers are not scientifically chosen, just merely larger than defaults
conda config --write-default
conda config --set remote_connect_timeout_secs 30.15
conda config --set remote_max_retries 10
conda config --set remote_read_timeout_secs 120.2
conda config --set show_channel_urls true
conda info
conda config --show
CONDA_INSTALL="conda install -q -y"
PIP_INSTALL="pip install -q"
EXTRA_CHANNELS=""
if [ "${USE_C3I_TEST_CHANNEL}" == "yes" ]; then
EXTRA_CHANNELS="${EXTRA_CHANNELS} -c c3i_test"
fi
# Deactivate any environment
source deactivate
# Display root environment (for debugging)
conda list
# If VANILLA_INSTALL is yes, then only Python, NumPy and pip are installed, this
# is to catch tests/code paths that require an optional package and are not
# guarding against the possibility that it does not exist in the environment.
# Create a base env first and then add to it...
# NOTE: gitpython is needed for CI testing to do the test slicing
# NOTE: pyyaml is used to ensure that the Azure CI config is valid
conda create -n $CONDA_ENV -q -y ${EXTRA_CHANNELS} python=$PYTHON numpy=$NUMPY pip gitpython pyyaml
# Activate first
set +v
source activate $CONDA_ENV
set -v
# Install optional packages into activated env
echo "PYTHON=$PYTHON"
echo "VANILLA_INSTALL=$VANILLA_INSTALL"
if [ "${VANILLA_INSTALL}" != "yes" ]; then
# Scipy, CFFI, jinja2, IPython and pygments are optional
# dependencies, but exercised in the test suite
# pexpect is used to run the gdb tests.
# ipykernel is used for testing ipython behaviours.
$CONDA_INSTALL ${EXTRA_CHANNELS} cffi jinja2 ipython ipykernel scipy pygments pexpect
fi
# Install the compiler toolchain and gdb (if available)
if [[ $(uname) == Linux ]]; then
$CONDA_INSTALL gcc_linux-64 gxx_linux-64 gdb gdb-pretty-printer
elif [[ $(uname) == Darwin ]]; then
$CONDA_INSTALL clang_osx-64 clangxx_osx-64
# Install llvm-openmp on OSX for headers during build and runtime during
# testing
$CONDA_INSTALL llvm-openmp
fi
# Install latest correct build
$CONDA_INSTALL -c numba/label/dev llvmlite=0.41
# Install importlib-metadata for Python < 3.9
if [ $PYTHON \< "3.9" ]; then $CONDA_INSTALL importlib_metadata; fi
# Install dependencies for building the documentation
if [ "$BUILD_DOC" == "yes" ]; then $CONDA_INSTALL sphinx docutils sphinx_rtd_theme pygments numpydoc; fi
if [ "$BUILD_DOC" == "yes" ]; then $PIP_INSTALL rstcheck; fi
# Install dependencies for code coverage
if [ "$RUN_COVERAGE" == "yes" ]; then $CONDA_INSTALL coverage; fi
# Install SVML
if [ "$TEST_SVML" == "yes" ]; then $CONDA_INSTALL -c numba icc_rt; fi
# Install Intel TBB parallel backend
if [ "$TEST_THREADING" == "tbb" ]; then $CONDA_INSTALL "tbb>=2021.6" "tbb-devel>=2021.6"; fi
# Install typeguard
if [ "$RUN_TYPEGUARD" == "yes" ]; then $CONDA_INSTALL "conda-forge::typeguard==3.0.1"; fi
# Install RVSDG
if [ "$TEST_RVSDG" == "yes" ]; then $PIP_INSTALL numba-rvsdg; fi
# environment dump for debug
# echo "DEBUG ENV:"
# echo "-------------------------------------------------------------------------"
# conda env export
# echo "-------------------------------------------------------------------------"
call activate %CONDA_ENV%
@rem Ensure that the documentation builds without warnings
if "%BUILD_DOC%" == "yes" python setup.py build_doc
@rem Run system info tool
pushd bin
numba -s
popd
@rem switch off color messages
set NUMBA_DISABLE_ERROR_MESSAGE_HIGHLIGHTING=1
@rem switch on developer mode
set NUMBA_DEVELOPER_MODE=1
@rem enable the faulthandler
set PYTHONFAULTHANDLER=1
@rem enable new style error handling
set NUMBA_CAPTURED_ERRORS=new_style
@rem First check that the test discovery works
python -m numba.tests.test_runtests
@rem Now run the Numba test suite
@rem Note that coverage is run from the checkout dir to match the "source"
@rem directive in .coveragerc
if "%RUN_COVERAGE%" == "yes" (
set PYTHONPATH=.
coverage erase
coverage run runtests.py -b --exclude-tags='long_running' -m -- numba.tests
) else (
set NUMBA_ENABLE_CUDASIM=1
python -m numba.runtests -b --exclude-tags='long_running' -m -- numba.tests
)
if %errorlevel% neq 0 exit /b %errorlevel%
#!/bin/bash
source activate $CONDA_ENV
# Make sure any error below is reported as such
set -v -e
# If the build is a "Vanilla" variant, then remove the setuptools package. It
# was installed at build time for setup.py to use, but is an _optional_ runtime
# dependency of Numba and therefore shouldn't be present in "Vanilla" testing.
# This package is "force" removed so that its removal doesn't uninstall
# things that might depend on it (the dependencies are present but are not of
# interest to Numba as those code paths are not used by Numba).
if [ "${VANILLA_INSTALL}" == "yes" ]; then
conda remove --force -y setuptools
fi
# Ensure the README is correctly formatted
if [ "$BUILD_DOC" == "yes" ]; then rstcheck README.rst; fi
# Ensure that the documentation builds without warnings
pushd docs
if [ "$BUILD_DOC" == "yes" ]; then make SPHINXOPTS=-W clean html; fi
popd
# Run system and gdb info tools
if [ "${VANILLA_INSTALL}" == "yes" ]; then
# Vanilla install has no access to pkg_resources as setuptools is removed,
# so run these via their modules.
python -m numba -s
python -m numba -g
else
pushd bin
numba -s
numba -g
popd
fi
# switch off color messages
export NUMBA_DISABLE_ERROR_MESSAGE_HIGHLIGHTING=1
# switch on developer mode
export NUMBA_DEVELOPER_MODE=1
# enable the fault handler
export PYTHONFAULTHANDLER=1
# enable new style error handling
export NUMBA_CAPTURED_ERRORS="new_style"
# Disable NumPy dispatching to AVX512_SKX feature extensions if the chip is
# reported to support the feature and NumPy >= 1.22 as this results in the use
# of low accuracy SVML libm replacements in ufunc loops.
_NPY_CMD='from numba.misc import numba_sysinfo;\
sysinfo=numba_sysinfo.get_sysinfo();\
print(sysinfo["NumPy AVX512_SKX detected"] and
sysinfo["NumPy Version"]>="1.22")'
NUMPY_DETECTS_AVX512_SKX_NP_GT_122=$(python -c "$_NPY_CMD")
echo "NumPy >= 1.22 with AVX512_SKX detected: $NUMPY_DETECTS_AVX512_SKX_NP_GT_122"
if [[ "$NUMPY_DETECTS_AVX512_SKX_NP_GT_122" == "True" ]]; then
export NPY_DISABLE_CPU_FEATURES="AVX512_SKX"
fi
# deal with threading layers
if [ -z ${TEST_THREADING+x} ]; then
echo "INFO: Threading layer not explicitly set."
else
case "${TEST_THREADING}" in "workqueue"|"omp"|"tbb")
export NUMBA_THREADING_LAYER="$TEST_THREADING"
echo "INFO: Threading layer set as: $TEST_THREADING"
;;
*)
echo "INFO: Threading layer explicitly set to bad value: $TEST_THREADING."
exit 1
;;
esac
fi
# If TEST_THREADING is set in the env, then check that Numba agrees that the
# environment can support the requested threading.
function check_sysinfo() {
cmd="import os;\
from numba.misc.numba_sysinfo import get_sysinfo;\
assert get_sysinfo()['$1 Threading'] is True, 'Threading layer $1 '\
'is not supported';\
print('Threading layer $1 is supported')"
python -c "$cmd"
}
if [[ "$TEST_THREADING" ]]; then
if [[ "$TEST_THREADING" == "tbb" ]]; then
check_sysinfo "TBB"
elif [[ "$TEST_THREADING" == "omp" ]]; then
check_sysinfo "OpenMP"
elif [[ "$TEST_THREADING" == "workqueue" ]]; then
check_sysinfo "Workqueue"
else
echo "Unknown threading layer requested: $TEST_THREADING"
exit 1
fi
fi
# Find catchsegv
unamestr=`uname`
if [[ "$unamestr" == 'Linux' ]]; then
SEGVCATCH=catchsegv
elif [[ "$unamestr" == 'Darwin' ]]; then
SEGVCATCH=""
else
echo Error
fi
# limit CPUs in use on PPC64LE, fork() issues
# occur on high core count systems
archstr=`uname -m`
if [[ "$archstr" == 'ppc64le' ]]; then
TEST_NPROCS=16
fi
# setup SDKROOT on Mac
if [[ $(uname) == "Darwin" ]]; then
export SDKROOT=`pwd`/MacOSX10.10.sdk
fi
if [[ "$TEST_RVSDG" == "yes" ]]; then
echo "Running RVSDG tests..."
# Now run tests based on the changes identified via git
NUMBA_USE_RVSDG_FRONTEND=1 NUMBA_CAPTURED_ERRORS=new_style NUMBA_ENABLE_CUDASIM=1 $SEGVCATCH python -m numba.runtests -b -v -m $TEST_NPROCS -- numba.tests.test_usecases
else
# First check that the test discovery works
python -m numba.tests.test_runtests
# Now run tests based on the changes identified via git
NUMBA_ENABLE_CUDASIM=1 $SEGVCATCH python -m numba.runtests -b -v -g -m $TEST_NPROCS -- numba.tests
# List the tests found
echo "INFO: All discovered tests:"
python -m numba.runtests -l
# Now run the Numba test suite with sharding
# Note that coverage is run from the checkout dir to match the "source"
# directive in .coveragerc
echo "INFO: Running shard of discovered tests: ($TEST_START_INDEX:$TEST_COUNT)"
if [ "$RUN_COVERAGE" == "yes" ]; then
export PYTHONPATH=.
coverage erase
$SEGVCATCH coverage run runtests.py -b -j "$TEST_START_INDEX:$TEST_COUNT" --exclude-tags='long_running' -m $TEST_NPROCS -- numba.tests
elif [ "$RUN_TYPEGUARD" == "yes" ]; then
echo "INFO: Running with typeguard"
NUMBA_USE_TYPEGUARD=1 NUMBA_ENABLE_CUDASIM=1 PYTHONWARNINGS="ignore:::typeguard" $SEGVCATCH python runtests.py -b -j "$TEST_START_INDEX:$TEST_COUNT" --exclude-tags='long_running' -m $TEST_NPROCS -- numba.tests
else
NUMBA_ENABLE_CUDASIM=1 $SEGVCATCH python -m numba.runtests -b -j "$TEST_START_INDEX:$TEST_COUNT" --exclude-tags='long_running' -m $TEST_NPROCS -- numba.tests
fi
fi
# Configuration for codecov.io
# When editing this file, please validate its contents using:
# curl -X POST --data-binary @- https://codecov.io/validate < codecov.yml
comment:
layout: "header, diff, changes, uncovered"
coverage:
ignore:
- "numba/cuda/.*"
- "numba/hsa/.*"
status:
project:
default:
# The build fails if total project coverage drops by more than 3%
target: auto
threshold: "3%"
# These checks can mark a build failed if too much new code
# is not covered (which happens often with JITted functions).
changes: false
patch: false
{
<llvmpy_get_cpu_name_cond>
Memcheck:Cond
fun:_ZN4llvm3sys14getHostCPUNameEv
fun:LLVMPY_GetHostCPUName
}
{
<llvmpy_get_cpu_name_value8>
Memcheck:Value8
fun:_ZN4llvm3sys14getHostCPUNameEv
fun:LLVMPY_GetHostCPUName
}
{
<openmp_init_cond>
Memcheck:Cond
fun:__intel_sse2_strrchr
fun:_ZN67_INTERNAL_45_______src_thirdparty_tbb_omp_dynamic_link_cpp_c306cade5__kmp12init_dl_dataEv
fun:__sti__$E
}
# Makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS = -j1
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = _build
# User-friendly check for sphinx-build
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
endif
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
# the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
help:
@echo "Please use \`make <target>' where <target> is one of"
@echo " html to make standalone HTML files"
@echo " dirhtml to make HTML files named index.html in directories"
@echo " singlehtml to make a single large HTML file"
@echo " pickle to make pickle files"
@echo " json to make JSON files"
@echo " htmlhelp to make HTML files and a HTML help project"
@echo " qthelp to make HTML files and a qthelp project"
@echo " devhelp to make HTML files and a Devhelp project"
@echo " epub to make an epub"
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
@echo " latexpdf to make LaTeX files and run them through pdflatex"
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
@echo " text to make text files"
@echo " man to make manual pages"
@echo " texinfo to make Texinfo files"
@echo " info to make Texinfo files and run them through makeinfo"
@echo " gettext to make PO message catalogs"
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " xml to make Docutils-native XML files"
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
clean:
rm -rf $(BUILDDIR)/*
html:
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
dirhtml:
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
singlehtml:
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
@echo
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
pickle:
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
json:
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
htmlhelp:
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
qthelp:
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Numba.qhcp"
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Numba.qhc"
devhelp:
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@echo "To view the help file:"
@echo "# mkdir -p $$HOME/.local/share/devhelp/Numba"
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Numba"
@echo "# devhelp"
epub:
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
latex:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)."
latexpdf:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
latexpdfja:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through platex and dvipdfmx..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
text:
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo
@echo "Build finished. The text files are in $(BUILDDIR)/text."
man:
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
texinfo:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
@echo "Run \`make' in that directory to run these through makeinfo" \
"(use \`make info' here to do that automatically)."
info:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo "Running Texinfo files through makeinfo..."
make -C $(BUILDDIR)/texinfo info
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
gettext:
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
@echo
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
changes:
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
linkcheck:
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
@echo
@echo "Link check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/linkcheck/output.txt."
doctest:
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
xml:
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
@echo
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
pseudoxml:
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
@echo
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
/* modernizr.min.js is unused but causes a reflow on load. In firefox, this
* manifests as the Numba logo flashing up across the whole browser window for a
* split second every time the page is loaded or a documentation link is
* clicked. This empty file overrides the version included by the theme.
*
* Reference: https://github.com/readthedocs/sphinx_rtd_theme/issues/724
*/
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 20.1.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 500 500" style="enable-background:new 0 0 500 500;" xml:space="preserve">
<style type="text/css">
.st0{fill:#00A3E0;}
.st1{fill:none;}
.st2{fill:url(#SVGID_1_);}
</style>
<g>
<path class="st0" d="M333.2,310.3c0,0,136.3-100.5,81.1-165.5c-64-75.4-243.4,84.5-263,52S310.5,86.4,329.9,73.4
c2.1-1.4,2.7-4.1,2.1-7.2l20.7-2.4l-12.9-6.5l3.3-12.9l-15,11.7c-5.6-10.1-15.2-21-20.7-24.8c-19.5-12.9-55.2-9.8-107.1,16.2
S28,154.6,89.6,245.5c55.2,81.1,213.8-58.3,240.3-35.8c22.7,19.5-152.5,139.6-152.5,139.6s74.4,0,74.7,0
C239.2,381.8,177.4,476,177.4,476l224.8-162.4C378.6,310.1,333.2,310.3,333.2,310.3z M272.1,41c8-0.6,14.9,5.6,15.5,13.5
c0.6,8-5.6,14.9-13.5,15.5c-8,0.6-14.9-5.6-15.5-13.5C257.9,48.5,264.1,41.6,272.1,41z"/>
</g>
<rect x="22.5" y="22.5" class="st1" width="455" height="455"/>
<linearGradient id="SVGID_1_" gradientUnits="userSpaceOnUse" x1="301.6" y1="435.7" x2="301.6" y2="435.7" gradientTransform="matrix(1 0 0 -1 0 500)">
<stop offset="0" style="stop-color:#243746"/>
<stop offset="9.525055e-02" style="stop-color:#223442"/>
<stop offset="0.1897" style="stop-color:#1C2B36"/>
<stop offset="0.2839" style="stop-color:#121B22"/>
<stop offset="0.3773" style="stop-color:#030507"/>
<stop offset="0.3961" style="stop-color:#000000"/>
</linearGradient>
<path class="st2" d="M301.6,64.3"/>
</svg>
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 20.1.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 500 500" style="enable-background:new 0 0 500 500;" xml:space="preserve">
<style type="text/css">
.st0{fill:#FFFFFF;}
.st1{fill:none;}
.st2{fill:url(#SVGID_1_);}
</style>
<g>
<path class="st0" d="M333.2,310.3c0,0,136.3-100.5,81.1-165.5c-64-75.4-243.4,84.5-263,52S310.5,86.4,329.9,73.4
c2.1-1.4,2.7-4.1,2.1-7.2l20.7-2.4l-12.9-6.5l3.3-12.9l-15,11.7c-5.6-10.1-15.2-21-20.7-24.8c-19.5-12.9-55.2-9.8-107.1,16.2
S28,154.6,89.6,245.5c55.2,81.1,213.8-58.3,240.3-35.8c22.7,19.5-152.5,139.6-152.5,139.6s74.4,0,74.7,0
C239.2,381.8,177.4,476,177.4,476l224.8-162.4C378.6,310.1,333.2,310.3,333.2,310.3z M272.1,41c8-0.6,14.9,5.6,15.5,13.5
c0.6,8-5.6,14.9-13.5,15.5c-8,0.6-14.9-5.6-15.5-13.5C257.9,48.5,264.1,41.6,272.1,41z"/>
</g>
<rect x="22.5" y="22.5" class="st1" width="455" height="455"/>
<linearGradient id="SVGID_1_" gradientUnits="userSpaceOnUse" x1="301.6" y1="435.7" x2="301.6" y2="435.7" gradientTransform="matrix(1 0 0 -1 0 500)">
<stop offset="0" style="stop-color:#243746"/>
<stop offset="9.525055e-02" style="stop-color:#223442"/>
<stop offset="0.1897" style="stop-color:#1C2B36"/>
<stop offset="0.2839" style="stop-color:#121B22"/>
<stop offset="0.3773" style="stop-color:#030507"/>
<stop offset="0.3961" style="stop-color:#000000"/>
</linearGradient>
<path class="st2" d="M301.6,64.3"/>
</svg>
.wy-nav-content {
max-width: 1200px
}
# DAG Roadmap
This directory includes a representation of the Numba roadmap in the form of a
DAG. We have done this to enable a highly granular display of enhancements to
Numba that also shows the relationships between these tasks. Many tasks have
prerequisites, and we've found that issue trackers, Kanban boards, and
time-bucketed roadmap documentation all fail to represent this information in
different ways.
## Requirements
```
conda install jinja2 python-graphviz pyyaml
```
## Usage
```
./render.py -o dagmap.html dagmap.yaml
```
The generated HTML file will look for `jquery.graphviz.svg.js` in the same
directory.
## Updating the DAG
Copy one of the existing tasks and edit:
* `label`: text appears on the node. Embed `\n` for line breaks.
* `id`: Referenced to indicate a dependency
* `description`: Shown in the tooltip. Automatically word-wrapped.
* `depends_on`: Optional list of task IDs which this task depends on.
The `style` section of the file is not used yet.
## Notes
The HTML rendering of the graph is based on a slightly modified version of
(jquery.graphviz.svg)[https://github.com/mountainstorm/jquery.graphviz.svg/].
Its license is:
```
Copyright (c) 2015 Mountainstorm
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
```
\ No newline at end of file
meta:
version: 1
style:
tags:
performance:
border: red
tasks:
- label: Track allocations in functions
id: track_alloc
description: |
Maintain a list of allocations inside each function which can be used
for freeing things on return, and also for debugging memory usage.
- label: Catch exceptions
id: catch_exceptions
description: |
Allow exceptions raised in nopython mode to be caught in nopython mode.
depends_on:
- track_alloc
- label: New IR
id: new_ir
description: |
New intermediate representation for Numba that is backed by a dictionary
- label: New Type Matching DSL
id: type_matching
description: |
Replace the current DSL for Numba types with something more expressive
that can match type patterns
- label: Declarative type signatures\nfor @overload/@overload_method
id: declarative_overload
description: |
Replace the current DSL for Numba types with something more expressive\n
that can match type patterns
depends_on:
- type_matching
- label: Rewrite "old-style" implementations
id: rewrite_old_impls
description: |
Rewrite implementations of functions that use the old extension API that
separates typing from implementation, and often uses the LLVM builder
unnecessarily.
depends_on:
- declarative_overload
- improve_test_suite_tooling
- faster_pr_testing
- label: Unify and add more test suite tooling
id: improve_test_suite_tooling
description: |
Add tools to help with common patterns in testing and unify the ones we
have, there's no need for 12 spellings of "is this Python 3" Also decide
on "what to test", do all types need testing if inputs are being
"as_array"'d?
- label: Pipeline pass formalisation
id: pass_formalisation
description: |
Decide on a formal description of a compiler pass and create supporting
code for it
- label: Array expression fusion pass
id: new_array_expr_fusion_pass
description:
From parfors extract out the array expression fusion pass
depends_on:
- parfors_clean_up
- pass_formalisation
- label: LICM Pass
id: new_licm_pass
description: |
Create a LICM Pass
depends_on:
- parfors_clean_up
- pass_formalisation
- label: Clean up Parfors
id: parfors_clean_up
description: |
General clean up and refactoring of parfors ahead of any additional work
- label: Mode based pipeline
id: mode_based_pipeline
description: |
Switch the jit decorator to use a mode based pipeline with
`nopython=True` equivalent as default.
- label: Remove object mode fallback
id: remove_objmode_fallback
description: |
Remove the deprecated object mode fallback
depends_on:
- mode_based_pipeline
- label: Switch to ORC JIT
id: orc_jit
description: |
MCJIT has been deprecated for some time. Need to switch to the newer
ORC JIT class.
- label: Performance analysis suite
id: perform_analysis_suite
description: |
Meta task for all performance analysis related functionality
depends_on:
- line_profiling
- assembly_analysis_tooling
- vectorisation_analysis
- label: Vectorisation analysis
id: vectorisation_analysis
description: |
Obtain LLVMs vectorisation reports and present these in a user friendly
manner
- label: Line profiling
id: line_profiling
description: |
Support collection of profiling statistics from compiled machine code
and map back to lines of Python.
depends_on:
- orc_jit
- assembly_analysis_tooling
- label: Assembly analysis tooling
id: assembly_analysis_tooling
description: |
Tie generated assembly back to python lines and annotate instruction
quality
depends_on:
- capstone
- label: Build capstone against llvmdev
id: capstone
description: |
Build capstone against llvmdev and create conda packages/wheels
- label: Increase JIT class method performance
id: jit_class_method_performance
description: |
Increase the performance of jitclass methods
depends_on:
- llvm_ref_count_pruning
- new_licm_pass
- label: LLVM level ref count pruning
id: llvm_ref_count_pruning
description: |
Add a LLVM compiler pass to prune refcounts across entire functions
- label: JITted coverage information
id: jitted_coverage_info
description: |
Work out how to leverage gcov support in LLVM to enable coverage
information
depends_on:
- compiler_rt
- label: LLVM compiler_rt support
id: compiler_rt
description: |
Work out how to build compiler_rt into LLVM and how to use it in Numba
- label: Switch to pytest
id: pytest
description: |
Make it possible to use pytest as test runner for Numba
- label: Option to run modified tests only
id: run_new_tests
description: |
Use / make pytest plugin to detect all test files which are new /
changed relative to a given branch, and run only those tests
depends_on:
- pytest
- label: Option to run 1/N slice of tests
id: run_test_slice
description: |
Use / make pytest plugin to run 1/N of enumerated tests.
depends_on:
- pytest
- label: Faster PR testing
id: faster_pr_testing
description: |
Make automated PR testing with public CI services give faster feedback.
depends_on:
- run_new_tests
- run_test_slice
/*
* Copyright (c) 2015 Mountainstorm
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
+function ($) {
'use strict'
// Cross Browser starts/endsWith support
// =====================================
String.prototype.startsWith = function(prefix) {
return this.indexOf(prefix) == 0;
};
String.prototype.endsWith = function(suffix) {
return this.indexOf(suffix, this.length - suffix.length) !== -1;
};
// GRAPHVIZSVG PUBLIC CLASS DEFINITION
// ===================================
var GraphvizSvg = function (element, options) {
this.type = null
this.options = null
this.enabled = null
this.$element = null
this.init('graphviz.svg', element, options)
}
GraphvizSvg.VERSION = '1.0.1'
GraphvizSvg.GVPT_2_PX = 32.5 // used to ease removal of extra space
GraphvizSvg.DEFAULTS = {
url: null,
svg: null,
shrink: '0.125pt',
tooltips: {
init: function ($graph) {
var $a = $(this)
$a.tooltip({
container: $graph,
placement: 'auto left',
animation: false,
viewport: null
}).on('hide.bs.tooltip', function() {
// keep them visible even if you accidentally mouse over
if ($a.attr('data-tooltip-keepvisible')) {
return false
}
})
},
show: function () {
var $a = $(this)
$a.attr('data-tooltip-keepvisible', true)
$a.tooltip('show')
},
hide: function () {
var $a = $(this)
$a.removeAttr('data-tooltip-keepvisible')
$a.tooltip('hide')
},
update: function () {
var $this = $(this)
if ($this.attr('data-tooltip-keepvisible')) {
$this.tooltip('show')
return
}
}
},
zoom: true,
highlight: {
selected: function (col, bg) {
return col
},
unselected: function (col, bg) {
return jQuery.Color(col).transition(bg, 0.9)
}
},
ready: null
}
GraphvizSvg.prototype.init = function (type, element, options) {
this.enabled = true
this.type = type
this.$element = $(element)
this.options = this.getOptions(options)
if (options.url) {
var that = this
$.get(options.url, null, function(data) {
var svg = $("svg", data)
that.$element.html(document.adoptNode(svg[0]))
that.setup()
}, "xml")
} else {
if (options.svg) {
this.$element.html(options.svg)
}
this.setup()
}
}
GraphvizSvg.prototype.getDefaults = function () {
return GraphvizSvg.DEFAULTS
}
GraphvizSvg.prototype.getOptions = function (options) {
options = $.extend({}, this.getDefaults(), this.$element.data(), options)
if (options.shrink) {
if (typeof options.shrink != 'object') {
options.shrink = {
x: options.shrink,
y: options.shrink
}
}
options.shrink.x = this.convertToPx(options.shrink.x)
options.shrink.y = this.convertToPx(options.shrink.y)
}
return options
}
GraphvizSvg.prototype.setup = function () {
var options = this.options
// save key elements in the graph for easy access
var $svg = $(this.$element.children('svg'))
var $graph = $svg.children('g:first')
this.$svg = $svg
this.$graph = $graph
this.$background = $graph.children('polygon:first') // might not exist
this.$nodes = $graph.children('.node')
this.$edges = $graph.children('.edge')
this._nodesByName = {}
this._edgesByName = {}
// add top level class and copy background color to element
this.$element.addClass('graphviz-svg')
if (this.$background.length) {
this.$element.css('background', this.$background.attr('fill'))
}
// setup all the nodes and edges
var that = this
this.$nodes.each(function () { that.setupNodesEdges($(this), true) })
this.$edges.each(function () { that.setupNodesEdges($(this), false) })
// remove the graph title element
var $title = this.$graph.children('title')
this.$graph.attr('data-name', $title.text())
$title.remove()
if (options.zoom) {
this.setupZoom()
}
// tell people we're done
if (options.ready) {
options.ready.call(this)
}
}
GraphvizSvg.prototype.setupNodesEdges = function ($el, isNode) {
var that = this
var options = this.options
// save the colors of the paths, ellipses and polygons
$el.find('polygon, ellipse, path').each(function () {
var $this = $(this)
// save original colors
$this.data('graphviz.svg.color', {
fill: $this.attr('fill'),
stroke: $this.attr('stroke')
})
// shrink it if it's a node
if (isNode && options.shrink) {
that.scaleNode($this)
}
})
// save the node name and check if theres a comment above; save it
var $title = $el.children('title')
if ($title[0]) {
// remove any compass points:
var title = $title.text().replace(/:[snew][ew]?/g,'')
$el.attr('data-name', title)
$title.remove()
if (isNode) {
this._nodesByName[title] = $el[0]
} else {
this._edgesByName[title] = $el[0]
}
// without a title we can't tell if its a user comment or not
var previousSibling = $el[0].previousSibling
while (previousSibling && previousSibling.nodeType != 8) {
previousSibling = previousSibling.previousSibling
}
if (previousSibling != null && previousSibling.nodeType == 8) {
var htmlDecode = function (input) {
var e = document.createElement('div')
e.innerHTML = input
return e.childNodes[0].nodeValue
}
var value = htmlDecode(previousSibling.nodeValue.trim())
if (value != title) {
// user added comment
$el.attr('data-comment', value)
}
}
}
// remove namespace from a[xlink:title]
$el.find('a').filter(function () {
return $(this).attr('xlink:title') }).each(function () {
var $a = $(this)
$a.attr('title', $a.attr('xlink:title'))
$a.removeAttr('xlink:title')
if (options.tooltips) {
options.tooltips.init.call(this, that.$element)
}
})
}
GraphvizSvg.prototype.setupZoom = function() {
var that = this
var $element = this.$element
var $svg = this.$svg
this.zoom = {width: $svg.attr('width'), height: $svg.attr('height'), percentage: null }
this.scaleView(100.0)
$element.mousewheel(function (evt) {
if (evt.shiftKey) {
var percentage = that.zoom.percentage
percentage -= evt.deltaY * evt.deltaFactor
if (percentage < 100.0) {
percentage = 100.0
}
// get pointer offset in view
// ratio offset within svg
var dx = evt.pageX - $svg.offset().left
var dy = evt.pageY - $svg.offset().top
var rx = dx / $svg.width()
var ry = dy / $svg.height()
// offset within frame ($element)
var px = evt.pageX - $element.offset().left
var py = evt.pageY - $element.offset().top
that.scaleView(percentage)
// scroll so pointer is still in same place
$element.scrollLeft((rx * $svg.width()) + 0.5 - px)
$element.scrollTop((ry * $svg.height()) + 0.5 - py)
return false // stop propagation
}
})
}
GraphvizSvg.prototype.scaleView = function(percentage) {
var that = this
var $svg = this.$svg
$svg.attr('width', percentage + '%')
$svg.attr('height', percentage + '%')
this.zoom.percentage = percentage
// now callback to update tooltip position
var $everything = this.$nodes.add(this.$edges)
$everything.children('a[title]').each(function () {
that.options.tooltips.update.call(this)
})
}
GraphvizSvg.prototype.scaleNode = function($node) {
var dx = this.options.shrink.x
var dy = this.options.shrink.y
var tagName = $node.prop('tagName')
if (tagName == 'ellipse') {
$node.attr('rx', parseFloat($node.attr('rx')) - dx)
$node.attr('ry', parseFloat($node.attr('ry')) - dy)
} else if (tagName == 'polygon') {
// this is more complex - we need to scale it manually
var bbox = $node[0].getBBox()
var cx = bbox.x + (bbox.width / 2)
var cy = bbox.y + (bbox.height / 2)
var pts = $node.attr('points').split(' ')
var points = '' // new value
for (var i in pts) {
var xy = pts[i].split(',')
var ox = parseFloat(xy[0])
var oy = parseFloat(xy[1])
points += (((cx - ox) / (bbox.width / 2) * dx) + ox) +
',' +
(((cy - oy) / (bbox.height / 2) * dy) + oy) +
' '
}
$node.attr('points', points)
}
}
GraphvizSvg.prototype.convertToPx = function (val) {
var retval = val
if (typeof val == 'string') {
var end = val.length
var factor = 1.0
if (val.endsWith('px')) {
end -= 2
} else if (val.endsWith('pt')) {
end -= 2
factor = GraphvizSvg.GVPT_2_PX
}
retval = parseFloat(val.substring(0, end)) * factor
}
return retval
}
GraphvizSvg.prototype.findEdge = function (nodeName, testEdge, $retval) {
var retval = []
for (var name in this._edgesByName) {
var match = testEdge(nodeName, name)
if (match) {
if ($retval) {
$retval.push(this._edgesByName[name])
}
retval.push(match)
}
}
return retval
}
GraphvizSvg.prototype.findLinked = function (node, includeEdges, testEdge, $retval) {
var that = this
var $node = $(node)
var $edges = null
if (includeEdges) {
$edges = $retval
}
var names = this.findEdge($node.attr('data-name'), testEdge, $edges)
for (var i in names) {
var n = this._nodesByName[names[i]]
if (!$retval.is(n)) {
$retval.push(n)
that.findLinked(n, includeEdges, testEdge, $retval)
}
}
}
GraphvizSvg.prototype.colorElement = function ($el, getColor) {
var bg = this.$element.css('background')
$el.find('polygon, ellipse, path').each(function() {
var $this = $(this)
var color = $this.data('graphviz.svg.color')
if (color.fill && $this.prop('tagName') != 'path') {
$this.attr('fill', getColor(color.fill, bg)) // don't set fill if it's a path
}
if (color.stroke) {
$this.attr('stroke', getColor(color.stroke, bg))
}
})
}
GraphvizSvg.prototype.restoreElement = function ($el) {
$el.find('polygon, ellipse, path').each(function() {
var $this = $(this)
var color = $this.data('graphviz.svg.color')
if (color.fill) {
$this.attr('fill', color.fill) // don't set fill if it's a path
}
if (color.stroke) {
$this.attr('stroke', color.stroke)
}
})
}
// methods users can actually call
GraphvizSvg.prototype.nodes = function () {
return this.$nodes
}
GraphvizSvg.prototype.edges = function () {
return this.$edges
}
GraphvizSvg.prototype.nodesByName = function () {
return this._nodesByName
}
GraphvizSvg.prototype.edgesByName = function () {
return this._edgesByName
}
GraphvizSvg.prototype.linkedTo = function (node, includeEdges) {
var $retval = $()
this.findLinked(node, includeEdges, function (nodeName, edgeName) {
var other = null;
var match = '->' + nodeName
if (edgeName.endsWith(match)) {
other = edgeName.substring(0, edgeName.length - match.length);
}
return other;
}, $retval)
return $retval
}
GraphvizSvg.prototype.linkedFrom = function (node, includeEdges) {
var $retval = $()
this.findLinked(node, includeEdges, function (nodeName, edgeName) {
var other = null;
var match = nodeName + '->'
if (edgeName.startsWith(match)) {
other = edgeName.substring(match.length);
}
return other;
}, $retval)
return $retval
}
GraphvizSvg.prototype.linked = function (node, includeEdges) {
var $retval = $()
this.findLinked(node, includeEdges, function (nodeName, edgeName) {
return '^' + name + '--(.*)$'
}, $retval)
this.findLinked(node, includeEdges, function (nodeName, edgeName) {
return '^(.*)--' + name + '$'
}, $retval)
return $retval
}
GraphvizSvg.prototype.tooltip = function ($elements, show) {
var that = this
var options = this.options
$elements.each(function () {
$(this).find('a[title]').each(function () {
if (show) {
options.tooltips.show.call(this)
} else {
options.tooltips.hide.call(this)
}
})
})
}
GraphvizSvg.prototype.bringToFront = function ($elements) {
$elements.detach().appendTo(this.$graph)
}
GraphvizSvg.prototype.sendToBack = function ($elements) {
if (this.$background.length) {
$element.insertAfter(this.$background)
} else {
$elements.detach().prependTo(this.$graph)
}
}
GraphvizSvg.prototype.highlight = function ($nodesEdges, tooltips) {
var that = this
var options = this.options
var $everything = this.$nodes.add(this.$edges)
if ($nodesEdges && $nodesEdges.length > 0) {
// create set of all other elements and dim them
$everything.not($nodesEdges).each(function () {
that.colorElement($(this), options.highlight.unselected)
$(this).css('font-weight', 'normal')
that.tooltip($(this))
})
$nodesEdges.each(function () {
that.colorElement($(this), options.highlight.selected)
$(this).css('font-weight', 'normal')
})
this.tooltip($nodesEdges, tooltips)
} else {
$everything.each(function () {
that.restoreElement($(this))
$(this).css('font-weight', 'normal')
})
this.tooltip($everything)
}
}
GraphvizSvg.prototype.destroy = function () {
var that = this
this.hide(function () {
that.$element.off('.' + that.type).removeData(that.type)
})
}
// GRAPHVIZSVG PLUGIN DEFINITION
// =============================
function Plugin(option) {
return this.each(function () {
var $this = $(this)
var data = $this.data('graphviz.svg')
var options = typeof option == 'object' && option
if (!data && /destroy/.test(option)) return
if (!data) $this.data('graphviz.svg', (data = new GraphvizSvg(this, options)))
if (typeof option == 'string') data[option]()
})
}
var old = $.fn.graphviz
$.fn.graphviz = Plugin
$.fn.graphviz.Constructor = GraphvizSvg
// GRAPHVIZ NO CONFLICT
// ====================
$.fn.graphviz.noConflict = function () {
$.fn.graphviz = old
return this
}
}(jQuery)
#!/usr/bin/env python
import os.path
import json
import collections
import yaml
import graphviz
from jinja2 import Environment, FileSystemLoader
Dagmap = collections.namedtuple('Dagmap',
['version', 'meta', 'style', 'tasks'])
def parse_yaml(filename):
with open(filename, 'r') as f:
contents = yaml.safe_load(f)
meta = contents['meta']
version = meta['version']
if version > 1:
raise Exception('Unsupported version %d' % version)
del meta['version']
style = contents['style']
tasks = contents['tasks']
if not isinstance(tasks, list):
raise Exception('"tasks" must be a list')
return Dagmap(version=version, meta=meta, style=style, tasks=tasks)
def to_graphviz(dagmap):
G = graphviz.Digraph(format='svg', engine='neato',
graph_attr=dict(bgcolor="#f4f4f4", pad="0.5", overlap="false"),
node_attr=dict(width="0.6", style="filled",
fillcolor="#83c6de", color="#83c6de", penwidth="3", label="",
fontname="helvetica Neue Ultra Light", fontsize="28"),
edge_attr=dict(color="#616a72", arrowsize="2.0", penwidth="4", fontname="helvetica Neue Ultra Light"))
G.node(name='_nothing', label='', style='invis')
for task in dagmap.tasks:
G.node(name=task['id'], label=task['label'],
tooltip=task['description'].strip())
depends_on = task.get('depends_on', ['_nothing'])
for dep in depends_on:
if dep == '_nothing':
attrs = {
'style': 'invis',
}
else:
attrs = {}
G.edge(dep, task['id'], **attrs)
return G
def main(argv):
import argparse
parser = argparse.ArgumentParser(description='Render Dagmap to Graphviz')
parser.add_argument('-o', '--output', required=True, help='output svg filename')
parser.add_argument('-t', '--template', default='template.html', help='HTML rendering template')
parser.add_argument('input', metavar='INPUT', type=str,
help='YAML input filename')
args = parser.parse_args(argv[1:])
dagmap = parse_yaml(args.input)
graph = to_graphviz(dagmap)
svg = graph.pipe().decode('utf-8')
template_env = Environment(loader=FileSystemLoader(os.path.dirname(__file__)))
template = template_env.get_template(args.template)
html = template.render(svg=json.dumps(svg))
with open(args.output, 'w') as f:
f.write(html)
return 0
if __name__ == '__main__':
import sys
sys.exit(main(sys.argv))
<!--
* Copyright (c) 2015 Mountainstorm
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
-->
<html>
<head>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/3.3.4/css/bootstrap.min.css">
</head>
<style>
#instructions {
color: #0a0a0a;
position: absolute;
z-index: 100;
bottom: 0px;
left: 0px;
}
/* this element needs tooltip positioning to work */
.graphviz-svg {
position: relative;
}
/* stop people selecting text on nodes */
.graphviz-svg text {
-webkit-touch-callout: none;
-webkit-user-select: none;
-khtml-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
cursor: default;
}
</style>
<body>
<h4 id="instructions">Click node to highlight; Shift-scroll to zoom; Esc to unhighlight</h4>
<div id="details", style="float: left; width: 20%; height: 100%; background-color:lightgray; font-color: black; overflow: scroll; padding: 1em;">
<h5>Details</h5>
<p>(Click on a node for details)</p>
</div>
<div id="graph" style="float: right; width: 80%; height: 100%; overflow: scroll;"></div>
<script type="text/javascript" src="https://code.jquery.com/jquery-2.1.3.min.js"></script>
<script type="text/javascript"
src="https://cdn.rawgit.com/jquery/jquery-mousewheel/master/jquery.mousewheel.min.js"></script>
<script type="text/javascript" src="https://cdn.rawgit.com/jquery/jquery-color/master/jquery.color.js"></script>
<script type="text/javascript"
src="https://cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/3.3.4/js/bootstrap.min.js"></script>
<script type="text/javascript" src="jquery.graphviz.svg.js"></script>
<script type="text/javascript">
var svg = {{ svg }}
</script>
<script type="text/javascript">
$(document).ready(function () {
$("#graph").graphviz({
svg: svg,
ready: function () {
var gv = this
gv.nodes().click(function () {
var $set = $()
$set.push(this)
$set = $set.add(gv.linkedFrom(this, true))
$set = $set.add(gv.linkedTo(this, true))
gv.highlight($set, false)
gv.tooltip($(this), true)
gv.bringToFront($set)
// copy tooltip text to pane
var tooltip_id = $(this).find("a").attr("aria-describedby")
var tooltip_box = $("#"+tooltip_id).find(".tooltip-inner")
var tooltip_data = tooltip_box.html()
$("#details p").html(tooltip_data)
gv.tooltip($(this), false)
$(this).css('font-weight', 'bold')
})
$(document).keydown(function (evt) {
if (evt.keyCode == 27) {
gv.highlight()
}
})
}
});
});
</script>
</body>
</html>
# This environment is used by the RTD config for PR builds. RTD uses this as the
# base environment and then adds in the sphinx etc tools on top.
# See: https://docs.readthedocs.io/en/stable/guides/conda.html
name: rtd
channels:
- numba/label/dev
dependencies:
- python=3.8
- llvmlite=0.41
- numpy
- numpydoc
- setuptools
# https://stackoverflow.com/questions/67542699/readthedocs-sphinx-not-rendering-bullet-list-from-rst-fileA
- docutils==0.16
# The following is needed to fix RTD.
- conda
@ECHO OFF
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set BUILDDIR=_build
set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% source
set I18NSPHINXOPTS=%SPHINXOPTS% source
if NOT "%PAPER%" == "" (
set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
)
if "%1" == "" goto help
if "%1" == "help" (
:help
echo.Please use `make ^<target^>` where ^<target^> is one of
echo. html to make standalone HTML files
echo. dirhtml to make HTML files named index.html in directories
echo. singlehtml to make a single large HTML file
echo. pickle to make pickle files
echo. json to make JSON files
echo. htmlhelp to make HTML files and a HTML help project
echo. qthelp to make HTML files and a qthelp project
echo. devhelp to make HTML files and a Devhelp project
echo. epub to make an epub
echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
echo. text to make text files
echo. man to make manual pages
echo. texinfo to make Texinfo files
echo. gettext to make PO message catalogs
echo. changes to make an overview over all changed/added/deprecated items
echo. xml to make Docutils-native XML files
echo. pseudoxml to make pseudoxml-XML files for display purposes
echo. linkcheck to check all external links for integrity
echo. doctest to run all doctests embedded in the documentation if enabled
goto end
)
if "%1" == "clean" (
for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
del /q /s %BUILDDIR%\*
goto end
)
%SPHINXBUILD% 2> nul
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.http://sphinx-doc.org/
exit /b 1
)
if "%1" == "html" (
%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/html.
goto end
)
if "%1" == "dirhtml" (
%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
goto end
)
if "%1" == "singlehtml" (
%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
goto end
)
if "%1" == "pickle" (
%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can process the pickle files.
goto end
)
if "%1" == "json" (
%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can process the JSON files.
goto end
)
if "%1" == "htmlhelp" (
%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can run HTML Help Workshop with the ^
.hhp project file in %BUILDDIR%/htmlhelp.
goto end
)
if "%1" == "qthelp" (
%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished; now you can run "qcollectiongenerator" with the ^
.qhcp project file in %BUILDDIR%/qthelp, like this:
echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Numba.qhcp
echo.To view the help file:
echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Numba.ghc
goto end
)
if "%1" == "devhelp" (
%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
if errorlevel 1 exit /b 1
echo.
echo.Build finished.
goto end
)
if "%1" == "epub" (
%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The epub file is in %BUILDDIR%/epub.
goto end
)
if "%1" == "latex" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
if errorlevel 1 exit /b 1
echo.
echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "latexpdf" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
cd %BUILDDIR%/latex
make all-pdf
cd %BUILDDIR%/..
echo.
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "latexpdfja" (
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
cd %BUILDDIR%/latex
make all-pdf-ja
cd %BUILDDIR%/..
echo.
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
goto end
)
if "%1" == "text" (
%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The text files are in %BUILDDIR%/text.
goto end
)
if "%1" == "man" (
%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The manual pages are in %BUILDDIR%/man.
goto end
)
if "%1" == "texinfo" (
%SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
goto end
)
if "%1" == "gettext" (
%SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
goto end
)
if "%1" == "changes" (
%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
if errorlevel 1 exit /b 1
echo.
echo.The overview file is in %BUILDDIR%/changes.
goto end
)
if "%1" == "linkcheck" (
%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
if errorlevel 1 exit /b 1
echo.
echo.Link check complete; look for any errors in the above output ^
or in %BUILDDIR%/linkcheck/output.txt.
goto end
)
if "%1" == "doctest" (
%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
if errorlevel 1 exit /b 1
echo.
echo.Testing of doctests in the sources finished, look at the ^
results in %BUILDDIR%/doctest/output.txt.
goto end
)
if "%1" == "xml" (
%SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The XML files are in %BUILDDIR%/xml.
goto end
)
if "%1" == "pseudoxml" (
%SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
if errorlevel 1 exit /b 1
echo.
echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
goto end
)
:end
numpydoc
\ No newline at end of file
import os.path as path
import subprocess
import shlex
from sphinx.util import logging
from docutils import nodes
logger = logging.getLogger(__name__)
# use an old git trick, to get the top-level, could have used ../ etc.. but
# this will be fine..
top = subprocess.check_output(shlex.split(
"git rev-parse --show-toplevel")).strip().decode("utf-8")
def make_ref(text):
""" Make hyperlink to Github """
full_path = path.join(top, text)
if path.isfile(full_path):
ref = "https://www.github.com/numba/numba/blob/main/" + text
elif path.isdir(full_path):
ref = "https://www.github.com/numba/numba/tree/main/" + text
else:
logger.warn("Failed to find file in repomap: " + text)
ref = "https://www.github.com/numba/numba"
return ref
def intersperse(lst, item):
""" Insert item between each item in lst.
Copied under CC-BY-SA from stackoverflow at:
https://stackoverflow.com/questions/5920643/
add-an-item-between-each-item-already-in-the-list
"""
result = [item] * (len(lst) * 2 - 1)
result[0::2] = lst
return result
def ghfile_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
""" Emit hyperlink nodes for a given file in repomap. """
my_nodes = []
if "{" in text: # myfile.{c,h} - make two nodes
# could have used regexes, but this will be fine..
base = text[:text.find(".") + 1]
exts = text[text.find("{") + 1:text.find("}")].split(",")
for e in exts:
node = nodes.reference(rawtext,
base + e,
refuri=make_ref(base + e),
**options)
my_nodes.append(node)
elif "*" in text: # path/*_files.py - link to directory
# Could have used something from os.path, but this will be fine..
ref = path.dirname(text) + path.sep
node = nodes.reference(rawtext, text, refuri=make_ref(ref), **options)
my_nodes.append(node)
else: # everything else is taken verbatim
node = nodes.reference(rawtext, text, refuri=make_ref(text), **options)
my_nodes.append(node)
# insert separators if needed
if len(my_nodes) > 1:
my_nodes = intersperse(my_nodes, nodes.Text(" | "))
return my_nodes, []
def setup(app):
logger.info('Initializing ghfiles plugin')
app.add_role('ghfile', ghfile_role)
metadata = {'parallel_read_safe': True, 'parallel_write_safe': True}
return metadata
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment