Commit 8f8fbb9f authored by Hang Zhang's avatar Hang Zhang
Browse files

v1.0.1

parent aa9af7fd
......@@ -3,3 +3,4 @@
*.pyc
build/
data/
docs/html/
......@@ -13,6 +13,7 @@ year = {2017}
```
## [Documentation](http://hangzh.com/PyTorch-Encoding/)
Please visit the [**Docs**](http://hangzh.com/PyTorch-Encoding/) for detail instructions of installation and usage.
(If you would like to reproduce the texture recognition benchmark in the paper, please visit our original [Torch implementation](https://github.com/zhanghang1989/Deep-Encoding).)
- Please visit the [**Docs**](http://hangzh.com/PyTorch-Encoding/) for detail instructions of installation and usage.
- [**Link**](http://hangzh.com/PyTorch-Encoding/experiments/texture.html) to the experiments and pre-trained models.
......@@ -40,9 +40,9 @@ defines = [('WITH_CUDA', None)]
with_cuda = True
include_path = [os.path.join(lib_path, 'include'),
os.path.join(os.environ['HOME'],'pytorch/torch/lib/THC'),
os.path.join(lib_path,'include/ENCODING'),
os.path.join(this_file,'encoding/src/')]
os.path.join(os.environ['HOME'],'pytorch/torch/lib/THC'),
os.path.join(lib_path,'include/ENCODING'),
os.path.join(this_file,'encoding/src/')]
def make_relative_rpath(path):
if platform.system() == 'Darwin':
......
body {
font-family: "Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;
}
/* Default header fonts are ugly */
h1, h2, .rst-content .toctree-wrapper p.caption, h3, h4, h5, h6, legend, p.caption {
font-family: "Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;
}
/* Use white for docs background */
.wy-side-nav-search {
background-color: #a0e2ff;
}
.wy-nav-content-wrap, .wy-menu li.current > a {
background-color: #fff;
}
@media screen and (min-width: 1400px) {
.wy-nav-content-wrap {
background-color: rgba(0, 0, 0, 0.0470588);
}
.wy-nav-content {
background-color: #fff;
}
}
/* Fixes for mobile */
.wy-nav-top {
background-color: #fff;
background-repeat: no-repeat;
background-position: center;
padding: 0;
margin: 0.4045em 0.809em;
color: #333;
}
.wy-nav-top > a {
display: none;
}
@media screen and (max-width: 768px) {
.wy-side-nav-search>a img.logo {
height: 60px;
}
}
/* This is needed to ensure that logo above search scales properly */
.wy-side-nav-search a {
display: block;
}
/* This ensures that multiple constructors will remain in separate lines. */
.rst-content dl:not(.docutils) dt {
display: table;
}
/* Use our blue for literals */
.rst-content tt.literal, .rst-content tt.literal, .rst-content code.literal {
color: #4080bf;
}
.rst-content tt.xref, a .rst-content tt, .rst-content tt.xref,
.rst-content code.xref, a .rst-content tt, a .rst-content code {
color: #404040;
}
/* Change link colors (except for the menu) */
a {
color: #4080bf;
}
a:hover {
color: #4080bf;
}
a:visited {
color: #306293;
}
.wy-menu a {
color: #b3b3b3;
}
.wy-menu a:hover {
color: #b3b3b3;
}
/* Default footer text is quite big */
footer {
font-size: 80%;
}
footer .rst-footer-buttons {
font-size: 125%; /* revert footer settings - 1/80% = 125% */
}
footer p {
font-size: 100%;
}
/* For hidden headers that appear in TOC tree */
/* see http://stackoverflow.com/a/32363545/3343043
*/
.rst-content .hidden-section {
display: none;
}
nav .hidden-section {
display: inherit;
}
.wy-side-nav-search>div.version {
color: #000;
}
<?xml version="1.0"?>
<!DOCTYPE svg PUBLIC '-//W3C//DTD SVG 1.0//EN'
'http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd'>
<svg xmlns:xlink="http://www.w3.org/1999/xlink" style="fill-opacity:1; color-rendering:auto; color-interpolation:auto; stroke:black; text-rendering:auto; stroke-linecap:square; stroke-miterlimit:10; stroke-opacity:1; shape-rendering:auto; fill:black; stroke-dasharray:none; font-weight:normal; stroke-width:1; font-family:'Dialog'; font-style:normal; stroke-linejoin:miter; font-size:12px; stroke-dashoffset:0; image-rendering:auto;" width="560" height="420" xmlns="http://www.w3.org/2000/svg"
><!--Generated by the Batik Graphics2D SVG Generator--><defs id="genericDefs"
/><g
><defs id="defs1"
><clipPath clipPathUnits="userSpaceOnUse" id="clipPath1"
><path d="M0 0 L560 0 L560 420 L0 420 L0 0 Z"
/></clipPath
><font horiz-adv-x="50.0" id="font1"
><font-face ascent="95.200005" descent="21.3" units-per-em="100" style="font-style:normal; font-family:Helvetica Neue; font-weight:normal;"
/><missing-glyph horiz-adv-x="50.0" d="M40.9 66.6 L10.2 66.6 L10.2 4.7 L40.9 4.7 ZM46 71.2 L46 0.1 L5.1 0.1 L5.1 71.2 Z"
/><glyph unicode="0" horiz-adv-x="55.6" d="M13.2 34.9 Q13.2 37.5 13.25 40.65 Q13.3 43.8 13.75 46.95 Q14.2 50.1 15.05 53.1 Q15.9 56.1 17.55 58.35 Q19.2 60.6 21.7 62 Q24.2 63.4 27.8 63.4 Q31.4 63.4 33.9 62 Q36.4 60.6 38.05 58.35 Q39.7 56.1 40.55 53.1 Q41.4 50.1 41.85 46.95 Q42.3 43.8 42.35 40.65 Q42.4 37.5 42.4 34.9 Q42.4 30.9 42.15 25.95 Q41.9 21 40.55 16.65 Q39.2 12.3 36.2 9.3 Q33.2 6.3 27.8 6.3 Q22.4 6.3 19.4 9.3 Q16.4 12.3 15.05 16.65 Q13.7 21 13.45 25.95 Q13.2 30.9 13.2 34.9 ZM4.2 34.8 Q4.2 30.9 4.4 26.7 Q4.6 22.5 5.4 18.5 Q6.2 14.5 7.7 10.95 Q9.2 7.4 11.8 4.7 Q14.4 2 18.35 0.45 Q22.3 -1.1 27.8 -1.1 Q33.4 -1.1 37.3 0.45 Q41.2 2 43.8 4.7 Q46.4 7.4 47.9 10.95 Q49.4 14.5 50.2 18.5 Q51 22.5 51.2 26.7 Q51.4 30.9 51.4 34.8 Q51.4 38.7 51.2 42.9 Q51 47.1 50.2 51.1 Q49.4 55.1 47.9 58.7 Q46.4 62.3 43.8 65 Q41.2 67.7 37.25 69.3 Q33.3 70.9 27.8 70.9 Q22.3 70.9 18.35 69.3 Q14.4 67.7 11.8 65 Q9.2 62.3 7.7 58.7 Q6.2 55.1 5.4 51.1 Q4.6 47.1 4.4 42.9 Q4.2 38.7 4.2 34.8 Z"
/><glyph unicode="5" horiz-adv-x="55.6" d="M47 62.2 L47 69.7 L12 69.7 L5.4 32.9 L12.7 32.5 Q15.2 35.5 18.45 37.35 Q21.7 39.2 25.9 39.2 Q29.5 39.2 32.45 38 Q35.4 36.8 37.5 34.65 Q39.6 32.5 40.75 29.55 Q41.9 26.6 41.9 23.1 Q41.9 18.9 40.7 15.75 Q39.5 12.6 37.45 10.5 Q35.4 8.4 32.65 7.35 Q29.9 6.3 26.9 6.3 Q23.7 6.3 21.05 7.25 Q18.4 8.2 16.45 9.95 Q14.5 11.7 13.35 14.05 Q12.2 16.4 12 19.1 L3.5 19.1 Q3.6 14.3 5.4 10.5 Q7.2 6.7 10.3 4.15 Q13.4 1.6 17.45 0.25 Q21.5 -1.1 26.1 -1.1 Q32.3 -1.1 36.95 0.85 Q41.6 2.8 44.7 6.1 Q47.8 9.4 49.35 13.65 Q50.9 17.9 50.9 22.4 Q50.9 28.5 49.1 33.05 Q47.3 37.6 44.2 40.65 Q41.1 43.7 36.9 45.2 Q32.7 46.7 28 46.7 Q24.4 46.7 20.75 45.45 Q17.1 44.2 14.8 41.6 L14.6 41.8 L18.4 62.2 Z"
/><glyph unicode="1" horiz-adv-x="55.6" d="M35.6 0 L35.6 70.9 L29.1 70.9 Q28.4 66.9 26.5 64.3 Q24.6 61.7 21.85 60.2 Q19.1 58.7 15.7 58.15 Q12.3 57.6 8.7 57.6 L8.7 50.8 L27.1 50.8 L27.1 0 Z"
/><glyph unicode="2" horiz-adv-x="55.6" d="M4.4 45.8 L12.9 45.8 Q12.8 49 13.55 52.15 Q14.3 55.3 16 57.8 Q17.7 60.3 20.35 61.85 Q23 63.4 26.7 63.4 Q29.5 63.4 32 62.5 Q34.5 61.6 36.35 59.9 Q38.2 58.2 39.3 55.85 Q40.4 53.5 40.4 50.6 Q40.4 46.9 39.25 44.1 Q38.1 41.3 35.85 38.9 Q33.6 36.5 30.2 34.15 Q26.8 31.8 22.3 29 Q18.6 26.8 15.2 24.3 Q11.8 21.8 9.1 18.5 Q6.4 15.2 4.65 10.75 Q2.9 6.3 2.4 0 L48.7 0 L48.7 7.5 L12.3 7.5 Q12.9 10.8 14.85 13.35 Q16.8 15.9 19.55 18.1 Q22.3 20.3 25.6 22.25 Q28.9 24.2 32.2 26.2 Q35.5 28.3 38.6 30.6 Q41.7 32.9 44.1 35.75 Q46.5 38.6 47.95 42.2 Q49.4 45.8 49.4 50.5 Q49.4 55.5 47.65 59.3 Q45.9 63.1 42.9 65.65 Q39.9 68.2 35.85 69.55 Q31.8 70.9 27.2 70.9 Q21.6 70.9 17.2 69 Q12.8 67.1 9.85 63.75 Q6.9 60.4 5.5 55.8 Q4.1 51.2 4.4 45.8 Z"
/><glyph unicode="3" horiz-adv-x="55.6" d="M21.8 40.5 L21.8 33.3 Q24.2 33.6 26.9 33.6 Q30.1 33.6 32.85 32.75 Q35.6 31.9 37.6 30.15 Q39.6 28.4 40.8 25.85 Q42 23.3 42 20 Q42 16.8 40.75 14.25 Q39.5 11.7 37.4 9.95 Q35.3 8.2 32.5 7.25 Q29.7 6.3 26.6 6.3 Q19.3 6.3 15.5 10.65 Q11.7 15 11.5 21.9 L3 21.9 Q2.9 16.4 4.55 12.1 Q6.2 7.8 9.3 4.85 Q12.4 1.9 16.8 0.4 Q21.2 -1.1 26.6 -1.1 Q31.6 -1.1 36.05 0.25 Q40.5 1.6 43.8 4.3 Q47.1 7 49.05 11.05 Q51 15.1 51 20.4 Q51 26.8 47.85 31.5 Q44.7 36.2 38.2 37.6 L38.2 37.8 Q42.4 39.7 45.2 43.4 Q48 47.1 48 51.9 Q48 56.8 46.35 60.4 Q44.7 64 41.8 66.3 Q38.9 68.6 34.95 69.75 Q31 70.9 26.4 70.9 Q21.1 70.9 17.05 69.2 Q13 67.5 10.3 64.5 Q7.6 61.5 6.15 57.3 Q4.7 53.1 4.5 48 L13 48 Q13 51.1 13.8 53.9 Q14.6 56.7 16.25 58.8 Q17.9 60.9 20.45 62.15 Q23 63.4 26.4 63.4 Q31.8 63.4 35.4 60.55 Q39 57.7 39 52 Q39 49.2 37.9 47 Q36.8 44.8 34.95 43.35 Q33.1 41.9 30.65 41.15 Q28.2 40.4 25.5 40.4 L23.7 40.4 Q23.2 40.4 22.7 40.4 Q22.3 40.4 21.8 40.5 Z"
/><glyph unicode="4" horiz-adv-x="55.6" d="M33.9 24.1 L10.1 24.1 L33.7 58.8 L33.9 58.8 ZM41.9 24.1 L41.9 70.9 L35.1 70.9 L2.8 24.8 L2.8 16.6 L33.9 16.6 L33.9 0 L41.9 0 L41.9 16.6 L51.5 16.6 L51.5 24.1 Z"
/><glyph unicode="s" horiz-adv-x="50.0" d="M11.6 16.3 L3.1 16.3 Q3.3 11.5 5.1 8.15 Q6.9 4.8 9.9 2.75 Q12.9 0.7 16.8 -0.2 Q20.7 -1.1 25 -1.1 Q28.9 -1.1 32.85 -0.35 Q36.8 0.4 39.95 2.3 Q43.1 4.2 45.05 7.3 Q47 10.4 47 15.1 Q47 18.8 45.55 21.3 Q44.1 23.8 41.75 25.45 Q39.4 27.1 36.35 28.1 Q33.3 29.1 30.1 29.8 Q27.1 30.5 24.1 31.15 Q21.1 31.8 18.7 32.75 Q16.3 33.7 14.75 35.15 Q13.2 36.6 13.2 38.8 Q13.2 40.8 14.2 42.05 Q15.2 43.3 16.8 44.05 Q18.4 44.8 20.35 45.1 Q22.3 45.4 24.2 45.4 Q26.3 45.4 28.35 44.95 Q30.4 44.5 32.1 43.5 Q33.8 42.5 34.9 40.85 Q36 39.2 36.2 36.7 L44.7 36.7 Q44.4 41.4 42.7 44.55 Q41 47.7 38.15 49.55 Q35.3 51.4 31.6 52.15 Q27.9 52.9 23.5 52.9 Q20.1 52.9 16.65 52.05 Q13.2 51.2 10.45 49.45 Q7.7 47.7 5.95 44.9 Q4.2 42.1 4.2 38.2 Q4.2 33.2 6.7 30.4 Q9.2 27.6 12.95 26.05 Q16.7 24.5 21.1 23.65 Q25.5 22.8 29.25 21.75 Q33 20.7 35.5 19 Q38 17.3 38 14 Q38 11.6 36.8 10.05 Q35.6 8.5 33.75 7.7 Q31.9 6.9 29.7 6.6 Q27.5 6.3 25.5 6.3 Q22.9 6.3 20.45 6.8 Q18 7.3 16.05 8.45 Q14.1 9.6 12.9 11.55 Q11.7 13.5 11.6 16.3 Z"
/><glyph unicode="h" horiz-adv-x="55.6" d="M6.4 71.4 L6.4 0 L14.9 0 L14.9 29.2 Q14.9 32.7 15.85 35.65 Q16.8 38.6 18.7 40.8 Q20.6 43 23.45 44.2 Q26.3 45.4 30.2 45.4 Q35.1 45.4 37.9 42.6 Q40.7 39.8 40.7 35 L40.7 0 L49.2 0 L49.2 34 Q49.2 38.2 48.35 41.65 Q47.5 45.1 45.4 47.6 Q43.3 50.1 39.9 51.5 Q36.5 52.9 31.4 52.9 Q29.1 52.9 26.65 52.4 Q24.2 51.9 21.95 50.85 Q19.7 49.8 17.9 48.15 Q16.1 46.5 15.1 44.1 L14.9 44.1 L14.9 71.4 Z"
/><glyph unicode="c" horiz-adv-x="53.7" d="M41.5 35.1 L50.3 35.1 Q49.8 39.7 47.9 43.05 Q46 46.4 43.05 48.6 Q40.1 50.8 36.25 51.85 Q32.4 52.9 28 52.9 Q21.9 52.9 17.3 50.75 Q12.7 48.6 9.65 44.85 Q6.6 41.1 5.1 36.05 Q3.6 31 3.6 25.2 Q3.6 19.4 5.15 14.55 Q6.7 9.7 9.75 6.2 Q12.8 2.7 17.35 0.8 Q21.9 -1.1 27.8 -1.1 Q37.7 -1.1 43.45 4.1 Q49.2 9.3 50.6 18.9 L41.9 18.9 Q41.1 12.9 37.55 9.6 Q34 6.3 27.7 6.3 Q23.7 6.3 20.8 7.9 Q17.9 9.5 16.1 12.15 Q14.3 14.8 13.45 18.2 Q12.6 21.6 12.6 25.2 Q12.6 29.1 13.4 32.75 Q14.2 36.4 16.05 39.2 Q17.9 42 21 43.7 Q24.1 45.4 28.7 45.4 Q34.1 45.4 37.3 42.7 Q40.5 40 41.5 35.1 Z"
/><glyph unicode="o" horiz-adv-x="57.4" d="M12.6 25.8 Q12.6 30.5 13.85 34.15 Q15.1 37.8 17.3 40.3 Q19.5 42.8 22.45 44.1 Q25.4 45.4 28.7 45.4 Q32 45.4 34.95 44.1 Q37.9 42.8 40.1 40.3 Q42.3 37.8 43.55 34.15 Q44.8 30.5 44.8 25.8 Q44.8 21.1 43.55 17.45 Q42.3 13.8 40.1 11.35 Q37.9 8.9 34.95 7.6 Q32 6.3 28.7 6.3 Q25.4 6.3 22.45 7.6 Q19.5 8.9 17.3 11.35 Q15.1 13.8 13.85 17.45 Q12.6 21.1 12.6 25.8 ZM3.6 25.8 Q3.6 20.1 5.2 15.2 Q6.8 10.3 10 6.65 Q13.2 3 17.9 0.95 Q22.6 -1.1 28.7 -1.1 Q34.9 -1.1 39.55 0.95 Q44.2 3 47.4 6.65 Q50.6 10.3 52.2 15.2 Q53.8 20.1 53.8 25.8 Q53.8 31.5 52.2 36.45 Q50.6 41.4 47.4 45.05 Q44.2 48.7 39.55 50.8 Q34.9 52.9 28.7 52.9 Q22.6 52.9 17.9 50.8 Q13.2 48.7 10 45.05 Q6.8 41.4 5.2 36.45 Q3.6 31.5 3.6 25.8 Z"
/><glyph unicode="p" horiz-adv-x="59.3" d="M6.7 51.7 L6.7 -19.7 L15.2 -19.7 L15.2 6.9 L15.4 6.9 Q16.8 4.6 18.85 3.05 Q20.9 1.5 23.2 0.6 Q25.5 -0.3 27.85 -0.7 Q30.2 -1.1 32.1 -1.1 Q38 -1.1 42.45 1 Q46.9 3.1 49.85 6.75 Q52.8 10.4 54.25 15.3 Q55.7 20.2 55.7 25.7 Q55.7 31.2 54.2 36.1 Q52.7 41 49.75 44.75 Q46.8 48.5 42.35 50.7 Q37.9 52.9 31.9 52.9 Q26.5 52.9 22 50.95 Q17.5 49 15.4 44.7 L15.2 44.7 L15.2 51.7 ZM46.7 26.3 Q46.7 22.5 45.9 18.9 Q45.1 15.3 43.3 12.5 Q41.5 9.7 38.55 8 Q35.6 6.3 31.2 6.3 Q26.8 6.3 23.7 7.95 Q20.6 9.6 18.65 12.3 Q16.7 15 15.8 18.55 Q14.9 22.1 14.9 25.9 Q14.9 29.5 15.75 33 Q16.6 36.5 18.5 39.25 Q20.4 42 23.4 43.7 Q26.4 45.4 30.7 45.4 Q34.8 45.4 37.85 43.8 Q40.9 42.2 42.85 39.5 Q44.8 36.8 45.75 33.35 Q46.7 29.9 46.7 26.3 Z"
/><glyph unicode="E" horiz-adv-x="61.1" d="M7.8 71.4 L7.8 0 L57.4 0 L57.4 8 L17.3 8 L17.3 32.7 L54.4 32.7 L54.4 40.7 L17.3 40.7 L17.3 63.4 L57.1 63.4 L57.1 71.4 Z"
/><glyph unicode="r" horiz-adv-x="33.3" d="M6.1 51.7 L6.1 0 L14.6 0 L14.6 23 Q14.6 28 15.6 31.85 Q16.6 35.7 18.8 38.4 Q21 41.1 24.6 42.5 Q28.2 43.9 33.3 43.9 L33.3 52.9 Q26.4 53.1 21.9 50.1 Q17.4 47.1 14.3 40.8 L14.1 40.8 L14.1 51.7 Z"
/><glyph unicode="n" horiz-adv-x="55.6" d="M6.4 51.7 L6.4 0 L14.9 0 L14.9 29.2 Q14.9 32.7 15.85 35.65 Q16.8 38.6 18.7 40.8 Q20.6 43 23.45 44.2 Q26.3 45.4 30.2 45.4 Q35.1 45.4 37.9 42.6 Q40.7 39.8 40.7 35 L40.7 0 L49.2 0 L49.2 34 Q49.2 38.2 48.35 41.65 Q47.5 45.1 45.4 47.6 Q43.3 50.1 39.9 51.5 Q36.5 52.9 31.4 52.9 Q19.9 52.9 14.6 43.5 L14.4 43.5 L14.4 51.7 Z"
/><glyph unicode="i" horiz-adv-x="22.2" d="M15.4 61 L15.4 71.4 L6.9 71.4 L6.9 61 ZM6.9 51.7 L6.9 0 L15.4 0 L15.4 51.7 Z"
/><glyph unicode="a" horiz-adv-x="53.7" d="M52.2 0.2 Q50 -1.1 46.1 -1.1 Q42.8 -1.1 40.85 0.75 Q38.9 2.6 38.9 6.8 Q35.4 2.6 30.75 0.75 Q26.1 -1.1 20.7 -1.1 Q17.2 -1.1 14.05 -0.3 Q10.9 0.5 8.6 2.2 Q6.3 3.9 4.95 6.65 Q3.6 9.4 3.6 13.3 Q3.6 17.7 5.1 20.5 Q6.6 23.3 9.05 25.05 Q11.5 26.8 14.65 27.7 Q17.8 28.6 21.1 29.2 Q24.6 29.9 27.75 30.25 Q30.9 30.6 33.3 31.25 Q35.7 31.9 37.1 33.15 Q38.5 34.4 38.5 36.8 Q38.5 39.6 37.45 41.3 Q36.4 43 34.75 43.9 Q33.1 44.8 31.05 45.1 Q29 45.4 27 45.4 Q21.6 45.4 18 43.35 Q14.4 41.3 14.1 35.6 L5.6 35.6 Q5.8 40.4 7.6 43.7 Q9.4 47 12.4 49.05 Q15.4 51.1 19.25 52 Q23.1 52.9 27.5 52.9 Q31 52.9 34.45 52.4 Q37.9 51.9 40.7 50.35 Q43.5 48.8 45.2 46 Q46.9 43.2 46.9 38.7 L46.9 12.1 Q46.9 9.1 47.25 7.7 Q47.6 6.3 49.6 6.3 Q50.7 6.3 52.2 6.8 ZM38.4 26.7 Q36.8 25.5 34.2 24.95 Q31.6 24.4 28.75 24.05 Q25.9 23.7 23 23.25 Q20.1 22.8 17.8 21.8 Q15.5 20.8 14.05 18.95 Q12.6 17.1 12.6 13.9 Q12.6 11.8 13.45 10.35 Q14.3 8.9 15.65 8 Q17 7.1 18.8 6.7 Q20.6 6.3 22.6 6.3 Q26.8 6.3 29.8 7.45 Q32.8 8.6 34.7 10.35 Q36.6 12.1 37.5 14.15 Q38.4 16.2 38.4 18 Z"
/><glyph unicode="t" horiz-adv-x="31.5" d="M18.2 51.7 L18.2 67.2 L9.7 67.2 L9.7 51.7 L0.9 51.7 L0.9 44.2 L9.7 44.2 L9.7 11.3 Q9.7 7.7 10.4 5.5 Q11.1 3.3 12.55 2.1 Q14 0.9 16.35 0.45 Q18.7 0 22 0 L28.5 0 L28.5 7.5 L24.6 7.5 Q22.6 7.5 21.35 7.65 Q20.1 7.8 19.4 8.3 Q18.7 8.8 18.45 9.7 Q18.2 10.6 18.2 12.1 L18.2 44.2 L28.5 44.2 L28.5 51.7 Z"
/><glyph unicode="l" horiz-adv-x="22.2" d="M6.9 71.4 L6.9 0 L15.4 0 L15.4 71.4 Z"
/><glyph unicode="v" horiz-adv-x="50.0" d="M29.7 0 L48.6 51.7 L39.7 51.7 L25.6 8.6 L25.4 8.6 L10.9 51.7 L1.4 51.7 L20.6 0 Z"
/></font
></defs
><g style="fill:white; stroke:white;"
><rect x="0" y="0" width="560" style="clip-path:url(#clipPath1); stroke:none;" height="420"
/></g
><g style="fill:white; text-rendering:optimizeSpeed; color-rendering:optimizeSpeed; image-rendering:optimizeSpeed; shape-rendering:crispEdges; color-interpolation:sRGB; stroke:white;"
><rect x="0" width="560" height="420" y="0" style="stroke:none;"
/><path style="stroke:none;" d="M73 374 L507 374 L507 31.5 L73 31.5 Z"
/></g
><g style="fill:rgb(38,38,38); text-rendering:geometricPrecision; color-rendering:optimizeQuality; image-rendering:optimizeQuality; stroke-linejoin:round; color-interpolation:linearRGB; stroke:rgb(38,38,38); stroke-width:0.5;"
><line y2="374" style="fill:none;" x1="73" x2="507" y1="374"
/><line y2="31.5" style="fill:none;" x1="73" x2="507" y1="31.5"
/><line y2="369.66" style="fill:none;" x1="73" x2="73" y1="374"
/><line y2="369.66" style="fill:none;" x1="117.2857" x2="117.2857" y1="374"
/><line y2="369.66" style="fill:none;" x1="161.5714" x2="161.5714" y1="374"
/><line y2="369.66" style="fill:none;" x1="205.8571" x2="205.8571" y1="374"
/><line y2="369.66" style="fill:none;" x1="250.1429" x2="250.1429" y1="374"
/><line y2="369.66" style="fill:none;" x1="294.4286" x2="294.4286" y1="374"
/><line y2="369.66" style="fill:none;" x1="338.7143" x2="338.7143" y1="374"
/><line y2="369.66" style="fill:none;" x1="383" x2="383" y1="374"
/><line y2="369.66" style="fill:none;" x1="427.2857" x2="427.2857" y1="374"
/><line y2="369.66" style="fill:none;" x1="471.5714" x2="471.5714" y1="374"
/><line y2="35.84" style="fill:none;" x1="73" x2="73" y1="31.5"
/><line y2="35.84" style="fill:none;" x1="117.2857" x2="117.2857" y1="31.5"
/><line y2="35.84" style="fill:none;" x1="161.5714" x2="161.5714" y1="31.5"
/><line y2="35.84" style="fill:none;" x1="205.8571" x2="205.8571" y1="31.5"
/><line y2="35.84" style="fill:none;" x1="250.1429" x2="250.1429" y1="31.5"
/><line y2="35.84" style="fill:none;" x1="294.4286" x2="294.4286" y1="31.5"
/><line y2="35.84" style="fill:none;" x1="338.7143" x2="338.7143" y1="31.5"
/><line y2="35.84" style="fill:none;" x1="383" x2="383" y1="31.5"
/><line y2="35.84" style="fill:none;" x1="427.2857" x2="427.2857" y1="31.5"
/><line y2="35.84" style="fill:none;" x1="471.5714" x2="471.5714" y1="31.5"
/></g
><g transform="translate(73,378)" style="font-size:10px; fill:rgb(38,38,38); text-rendering:geometricPrecision; color-rendering:optimizeQuality; image-rendering:optimizeQuality; font-family:'Helvetica Neue'; color-interpolation:linearRGB; stroke:rgb(38,38,38);"
><text x="-3" xml:space="preserve" y="10" style="stroke:none;"
>0</text
></g
><g transform="translate(117.2857,378)" style="font-size:10px; fill:rgb(38,38,38); text-rendering:geometricPrecision; color-rendering:optimizeQuality; image-rendering:optimizeQuality; font-family:'Helvetica Neue'; color-interpolation:linearRGB; stroke:rgb(38,38,38);"
><text x="-3" xml:space="preserve" y="10" style="stroke:none;"
>5</text
></g
><g transform="translate(161.5714,378)" style="font-size:10px; fill:rgb(38,38,38); text-rendering:geometricPrecision; color-rendering:optimizeQuality; image-rendering:optimizeQuality; font-family:'Helvetica Neue'; color-interpolation:linearRGB; stroke:rgb(38,38,38);"
><text x="-6" xml:space="preserve" y="10" style="stroke:none;"
>10</text
></g
><g transform="translate(205.8571,378)" style="font-size:10px; fill:rgb(38,38,38); text-rendering:geometricPrecision; color-rendering:optimizeQuality; image-rendering:optimizeQuality; font-family:'Helvetica Neue'; color-interpolation:linearRGB; stroke:rgb(38,38,38);"
><text x="-6" xml:space="preserve" y="10" style="stroke:none;"
>15</text
></g
><g transform="translate(250.1429,378)" style="font-size:10px; fill:rgb(38,38,38); text-rendering:geometricPrecision; color-rendering:optimizeQuality; image-rendering:optimizeQuality; font-family:'Helvetica Neue'; color-interpolation:linearRGB; stroke:rgb(38,38,38);"
><text x="-6" xml:space="preserve" y="10" style="stroke:none;"
>20</text
></g
><g transform="translate(294.4286,378)" style="font-size:10px; fill:rgb(38,38,38); text-rendering:geometricPrecision; color-rendering:optimizeQuality; image-rendering:optimizeQuality; font-family:'Helvetica Neue'; color-interpolation:linearRGB; stroke:rgb(38,38,38);"
><text x="-6" xml:space="preserve" y="10" style="stroke:none;"
>25</text
></g
><g transform="translate(338.7143,378)" style="font-size:10px; fill:rgb(38,38,38); text-rendering:geometricPrecision; color-rendering:optimizeQuality; image-rendering:optimizeQuality; font-family:'Helvetica Neue'; color-interpolation:linearRGB; stroke:rgb(38,38,38);"
><text x="-6" xml:space="preserve" y="10" style="stroke:none;"
>30</text
></g
><g transform="translate(383,378)" style="font-size:10px; fill:rgb(38,38,38); text-rendering:geometricPrecision; color-rendering:optimizeQuality; image-rendering:optimizeQuality; font-family:'Helvetica Neue'; color-interpolation:linearRGB; stroke:rgb(38,38,38);"
><text x="-6" xml:space="preserve" y="10" style="stroke:none;"
>35</text
></g
><g transform="translate(427.2857,378)" style="font-size:10px; fill:rgb(38,38,38); text-rendering:geometricPrecision; color-rendering:optimizeQuality; image-rendering:optimizeQuality; font-family:'Helvetica Neue'; color-interpolation:linearRGB; stroke:rgb(38,38,38);"
><text x="-6" xml:space="preserve" y="10" style="stroke:none;"
>40</text
></g
><g transform="translate(471.5714,378)" style="font-size:10px; fill:rgb(38,38,38); text-rendering:geometricPrecision; color-rendering:optimizeQuality; image-rendering:optimizeQuality; font-family:'Helvetica Neue'; color-interpolation:linearRGB; stroke:rgb(38,38,38);"
><text x="-6" xml:space="preserve" y="10" style="stroke:none;"
>45</text
></g
><g transform="translate(290.0002,391.5)" style="font-size:11px; fill:rgb(38,38,38); text-rendering:geometricPrecision; color-rendering:optimizeQuality; image-rendering:optimizeQuality; font-family:'Helvetica Neue'; color-interpolation:linearRGB; stroke:rgb(38,38,38);"
><text x="-19" xml:space="preserve" y="11" style="stroke:none;"
>Epochs</text
></g
><g style="fill:rgb(38,38,38); text-rendering:geometricPrecision; color-rendering:optimizeQuality; image-rendering:optimizeQuality; stroke-linejoin:round; color-interpolation:linearRGB; stroke:rgb(38,38,38); stroke-width:0.5;"
><line y2="31.5" style="fill:none;" x1="73" x2="73" y1="374"
/><line y2="31.5" style="fill:none;" x1="507" x2="507" y1="374"
/><line y2="374" style="fill:none;" x1="73" x2="77.34" y1="374"
/><line y2="331.1875" style="fill:none;" x1="73" x2="77.34" y1="331.1875"
/><line y2="288.375" style="fill:none;" x1="73" x2="77.34" y1="288.375"
/><line y2="245.5625" style="fill:none;" x1="73" x2="77.34" y1="245.5625"
/><line y2="202.75" style="fill:none;" x1="73" x2="77.34" y1="202.75"
/><line y2="159.9375" style="fill:none;" x1="73" x2="77.34" y1="159.9375"
/><line y2="117.125" style="fill:none;" x1="73" x2="77.34" y1="117.125"
/><line y2="74.3125" style="fill:none;" x1="73" x2="77.34" y1="74.3125"
/><line y2="31.5" style="fill:none;" x1="73" x2="77.34" y1="31.5"
/><line y2="374" style="fill:none;" x1="507" x2="502.66" y1="374"
/><line y2="331.1875" style="fill:none;" x1="507" x2="502.66" y1="331.1875"
/><line y2="288.375" style="fill:none;" x1="507" x2="502.66" y1="288.375"
/><line y2="245.5625" style="fill:none;" x1="507" x2="502.66" y1="245.5625"
/><line y2="202.75" style="fill:none;" x1="507" x2="502.66" y1="202.75"
/><line y2="159.9375" style="fill:none;" x1="507" x2="502.66" y1="159.9375"
/><line y2="117.125" style="fill:none;" x1="507" x2="502.66" y1="117.125"
/><line y2="74.3125" style="fill:none;" x1="507" x2="502.66" y1="74.3125"
/><line y2="31.5" style="fill:none;" x1="507" x2="502.66" y1="31.5"
/></g
><g transform="translate(69,374)" style="font-size:10px; fill:rgb(38,38,38); text-rendering:geometricPrecision; color-rendering:optimizeQuality; image-rendering:optimizeQuality; font-family:'Helvetica Neue'; color-interpolation:linearRGB; stroke:rgb(38,38,38);"
><text x="-12" xml:space="preserve" y="3.5" style="stroke:none;"
>10</text
></g
><g transform="translate(69,331.1875)" style="font-size:10px; fill:rgb(38,38,38); text-rendering:geometricPrecision; color-rendering:optimizeQuality; image-rendering:optimizeQuality; font-family:'Helvetica Neue'; color-interpolation:linearRGB; stroke:rgb(38,38,38);"
><text x="-12" xml:space="preserve" y="3.5" style="stroke:none;"
>15</text
></g
><g transform="translate(69,288.375)" style="font-size:10px; fill:rgb(38,38,38); text-rendering:geometricPrecision; color-rendering:optimizeQuality; image-rendering:optimizeQuality; font-family:'Helvetica Neue'; color-interpolation:linearRGB; stroke:rgb(38,38,38);"
><text x="-12" xml:space="preserve" y="3.5" style="stroke:none;"
>20</text
></g
><g transform="translate(69,245.5625)" style="font-size:10px; fill:rgb(38,38,38); text-rendering:geometricPrecision; color-rendering:optimizeQuality; image-rendering:optimizeQuality; font-family:'Helvetica Neue'; color-interpolation:linearRGB; stroke:rgb(38,38,38);"
><text x="-12" xml:space="preserve" y="3.5" style="stroke:none;"
>25</text
></g
><g transform="translate(69,202.75)" style="font-size:10px; fill:rgb(38,38,38); text-rendering:geometricPrecision; color-rendering:optimizeQuality; image-rendering:optimizeQuality; font-family:'Helvetica Neue'; color-interpolation:linearRGB; stroke:rgb(38,38,38);"
><text x="-12" xml:space="preserve" y="3.5" style="stroke:none;"
>30</text
></g
><g transform="translate(69,159.9375)" style="font-size:10px; fill:rgb(38,38,38); text-rendering:geometricPrecision; color-rendering:optimizeQuality; image-rendering:optimizeQuality; font-family:'Helvetica Neue'; color-interpolation:linearRGB; stroke:rgb(38,38,38);"
><text x="-12" xml:space="preserve" y="3.5" style="stroke:none;"
>35</text
></g
><g transform="translate(69,117.125)" style="font-size:10px; fill:rgb(38,38,38); text-rendering:geometricPrecision; color-rendering:optimizeQuality; image-rendering:optimizeQuality; font-family:'Helvetica Neue'; color-interpolation:linearRGB; stroke:rgb(38,38,38);"
><text x="-12" xml:space="preserve" y="3.5" style="stroke:none;"
>40</text
></g
><g transform="translate(69,74.3125)" style="font-size:10px; fill:rgb(38,38,38); text-rendering:geometricPrecision; color-rendering:optimizeQuality; image-rendering:optimizeQuality; font-family:'Helvetica Neue'; color-interpolation:linearRGB; stroke:rgb(38,38,38);"
><text x="-12" xml:space="preserve" y="3.5" style="stroke:none;"
>45</text
></g
><g transform="translate(69,31.5)" style="font-size:10px; fill:rgb(38,38,38); text-rendering:geometricPrecision; color-rendering:optimizeQuality; image-rendering:optimizeQuality; font-family:'Helvetica Neue'; color-interpolation:linearRGB; stroke:rgb(38,38,38);"
><text x="-12" xml:space="preserve" y="3.5" style="stroke:none;"
>50</text
></g
><g transform="translate(54.5,202.7498) rotate(-90)" style="font-size:11px; fill:rgb(38,38,38); text-rendering:geometricPrecision; color-rendering:optimizeQuality; image-rendering:optimizeQuality; font-family:'Helvetica Neue'; color-interpolation:linearRGB; stroke:rgb(38,38,38);"
><text x="-12.5" xml:space="preserve" y="-3" style="stroke:none;"
>Error</text
></g
><g style="stroke-linecap:butt; fill:rgb(0,114,189); text-rendering:geometricPrecision; color-rendering:optimizeQuality; image-rendering:optimizeQuality; stroke-linejoin:round; color-interpolation:linearRGB; stroke:rgb(0,114,189); stroke-width:3;"
><path d="M87.0045 31.4592 L90.7143 80.0719 L99.5714 109.8195 L108.4286 121.4697 L117.2857 139.4269 L126.1429 147.696 L135 152.6889 L143.8571 158.8031 L152.7143 167.2649 L161.5714 171.9776 L170.4286 173.5718 L179.2857 180.8773 L188.1429 181.6832 L197 185.8528 L205.8571 191.1961 L214.7143 193.5262 L223.5714 195.3306 L232.4286 196.5044 L241.2857 196.1015 L250.1429 197.7833 L259 197.7658 L267.8571 197.5731 L276.7143 193.351 L285.5714 193.824 L294.4286 196.592 L303.2857 191.4239 L312.1429 200.9893 L321 203.7749 L329.8571 210.1694 L338.7143 210.9052 L347.5714 213.498 L356.4286 214.2864 L365.2857 216.3712 L374.1429 218.6136 L383 220.9086 L391.8571 221.9072 L400.7143 216.8442 L409.5714 220.5758 L418.4286 223.5891 L427.2857 226.9177 L436.1429 269.3141 L445 284.2405 L453.8571 296.5389 L462.7143 304.4926 L471.5714 306.3322 L480.4286 309.3279 L489.2857 312.096 L498.1429 312.9719 L507 317.8948" style="fill:none; fill-rule:evenodd;"
/><path d="M81.8571 132.9098 L90.7143 167.0109 L99.5714 185.625 L108.4286 200.8141 L117.2857 214.0674 L126.1429 216.0033 L135 224.3424 L143.8571 221.0663 L152.7143 236.4044 L161.5714 232.6815 L170.4286 230.8946 L179.2857 242.8076 L188.1429 245.3391 L197 247.5728 L205.8571 252.487 L214.7143 235.213 L223.5714 230.15 L232.4286 247.1261 L241.2857 242.8076 L250.1429 239.2337 L259 240.7228 L267.8571 238.638 L276.7143 230.4478 L285.5714 240.2761 L294.4286 240.425 L303.2857 235.213 L312.1429 243.2543 L321 242.212 L329.8571 252.9337 L338.7143 243.5522 L347.5714 237.8935 L356.4286 240.7228 L365.2857 253.0826 L374.1429 239.5315 L383 242.5098 L391.8571 239.8293 L400.7143 238.3402 L409.5714 240.8717 L418.4286 234.9152 L427.2857 244.8924 L436.1429 281.3761 L445 288.375 L453.8571 288.5239 L462.7143 287.0348 L471.5714 287.9283 L480.4286 287.0348 L489.2857 283.9076 L498.1429 284.95 L507 283.7587" style="fill:none; fill-rule:evenodd; stroke:rgb(217,83,25);"
/></g
><g style="fill:white; text-rendering:optimizeSpeed; color-rendering:optimizeSpeed; image-rendering:optimizeSpeed; shape-rendering:crispEdges; color-interpolation:sRGB; stroke:white;"
><path style="stroke:none;" d="M438 68 L438 41.5 L497 41.5 L497 68 Z"
/></g
><g transform="translate(475,49)" style="font-size:9px; text-rendering:geometricPrecision; color-rendering:optimizeQuality; image-rendering:optimizeQuality; font-family:'Helvetica Neue'; color-interpolation:linearRGB;"
><text x="0" xml:space="preserve" y="3.5" style="stroke:none;"
>train</text
></g
><g style="stroke-linecap:butt; fill:rgb(0,114,189); text-rendering:geometricPrecision; color-rendering:optimizeQuality; image-rendering:optimizeQuality; stroke-linejoin:round; color-interpolation:linearRGB; stroke:rgb(0,114,189); stroke-width:3;"
><line y2="49" style="fill:none;" x1="442" x2="472" y1="49"
/></g
><g transform="translate(475,60.5)" style="font-size:9px; text-rendering:geometricPrecision; color-rendering:optimizeQuality; image-rendering:optimizeQuality; font-family:'Helvetica Neue'; color-interpolation:linearRGB;"
><text x="0" xml:space="preserve" y="3.5" style="stroke:none;"
>val</text
></g
><g style="stroke-linecap:butt; fill:rgb(217,83,25); text-rendering:geometricPrecision; color-rendering:optimizeQuality; image-rendering:optimizeQuality; stroke-linejoin:round; color-interpolation:linearRGB; stroke:rgb(217,83,25); stroke-width:3;"
><line y2="60.5" style="fill:none;" x1="442" x2="472" y1="60.5"
/></g
><g style="stroke-linecap:butt; fill:rgb(38,38,38); text-rendering:geometricPrecision; color-rendering:optimizeQuality; image-rendering:optimizeQuality; color-interpolation:linearRGB; stroke:rgb(38,38,38); stroke-width:0.5;"
><path d="M438 68 L438 41.5 L497 41.5 L497 68 Z" style="fill:none; fill-rule:evenodd;"
/></g
></g
></svg
>
......@@ -20,6 +20,7 @@
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
import encoding
import sphinx_rtd_theme
......@@ -47,7 +48,7 @@ extensions = [
napoleon_use_ivar = True
googleanalytics_id = 'UA-90545585-1'
googleanalytics_id = 'UA-54746507-1'
googleanalytics_enabled = True
# Add any paths that contain templates here, relative to this directory.
......@@ -56,8 +57,8 @@ templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
source_suffix = ['.rst', '.md']
#source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
......@@ -72,7 +73,7 @@ author = 'Hang Zhang'
# built documents.
#
# The short X.Y version.
version = 'master (0.0.1)'
version = 'master (' + encoding.__version__ + ')'
# The full version, including alpha/beta/rc tags.
# TODO: verify this works as expected
release = 'master'
......@@ -124,6 +125,7 @@ html_static_path = ['_static']
html_context = {
'css_files': [
'https://fonts.googleapis.com/css?family=Lato',
'_static/css/encoding.css'
],
}
#'_static/css/hangzh.css'
......
.. role:: hidden
:class: hidden-section
Dilated Networks
================
We provide correct dilated pre-trained ResNet and DenseNet for semantic segmentation.
For dilation of ResNet, we replace the stride of 2 Conv3x3 at begining of certain stage and update the dilation of the conv layers afterwards.
For dilation of DenseNet, we provide DilatedAvgPool2d that handles the dilation of the transition layers, then update the dilation of the conv layers afterwards.
All provided models have been verified.
.. automodule:: encoding.dilated
.. currentmodule:: encoding.dilated
ResNet
------
:hidden:`ResNet`
~~~~~~~~~~~~~~~~
.. autoclass:: ResNet
:members:
:hidden:`resnet18`
~~~~~~~~~~~~~~~~~~
.. autofunction:: resnet18
:hidden:`resnet34`
~~~~~~~~~~~~~~~~~~
.. autofunction:: resnet34
:hidden:`resnet50`
~~~~~~~~~~~~~~~~~~
.. autofunction:: resnet50
:hidden:`resnet101`
~~~~~~~~~~~~~~~~~~~
.. autofunction:: resnet101
:hidden:`resnet152`
~~~~~~~~~~~~~~~~~~~
.. autofunction:: resnet152
DenseNet
--------
:hidden:`DenseNet`
~~~~~~~~~~~~~~~~~~
.. autoclass:: DenseNet
:members:
:hidden:`densenet161`
~~~~~~~~~~~~~~~~~~~~~
.. autofunction:: densenet161
:hidden:`densenet121`
~~~~~~~~~~~~~~~~~~~~~
.. autofunction:: densenet121
:hidden:`densenet169`
~~~~~~~~~~~~~~~~~~~~~
.. autofunction:: densenet169
:hidden:`densenet201`
~~~~~~~~~~~~~~~~~~~~~
.. autofunction:: densenet201
.. role:: hidden
:class: hidden-section
Encoding Layer
==============
.. automodule:: encoding
My NN Layers
============
Modules
-------
.. currentmodule:: encoding
.. currentmodule:: encoding.nn
:hidden:`Encoding`
~~~~~~~~~~~~~~~~~~
......@@ -18,6 +15,24 @@ Modules
.. autoclass:: Encoding
:members:
:hidden:`Inspiration`
~~~~~~~~~~~~~~~~~~~~~
.. autoclass:: Inspiration
:members:
:hidden:`DilatedAvgPool2d`
~~~~~~~~~~~~~~~~~~~~~~~~~~
.. autoclass:: DilatedAvgPool2d
:members:
:hidden:`GramMatrix`
~~~~~~~~~~~~~~~~~~~~
.. autoclass:: GramMatrix
:members:
:hidden:`Aggregate`
~~~~~~~~~~~~~~~~~~~
......@@ -27,23 +42,22 @@ Modules
Functions
---------
.. currentmodule:: encoding.functions
:hidden:`aggregate`
~~~~~~~~~~~~~~~~~~~
.. autoclass:: aggregate
:members:
.. autofunction:: aggregate
:hidden:`scaledL2`
~~~~~~~~~~~~~~~~~~~
.. autoclass:: scaledL2
:members:
.. autofunction:: scaledL2
:hidden:`residual`
~~~~~~~~~~~~~~~~~~~
.. autoclass:: residual
:members:
.. autofunction:: residual
:hidden:`assign`
......
MSG-Net Style Transfer Example
==============================
.. image:: https://raw.githubusercontent.com/zhanghang1989/MSG-Net/master/images/figure1.jpg
:width: 55%
:align: left
We provide PyTorh Implementation of `MSG-Net`_ and `Neural Style`_ in the `GitHub repo <https://github.com/zhanghang1989/PyTorch-Style-Transfer>`_.
We also provide `Torch <https://github.com/zhanghang1989/MSG-Net/>`_ and
`MXNet <https://github.com/zhanghang1989/MXNet-Gluon-Style-Transfer>`_ implementations.
Tabe of content
---------------
- Real-time Style Transfer using `MSG-Net`_
* `Stylize Images using Pre-trained Model`_
* `Train Your Own MSG-Net Model`_
- `Neural Style`_
MSG-Net
-------
.. note::
Hang Zhang, and Kristin Dana. "Multi-style Generative Network for Real-time Transfer."::
@article{zhang2017multistyle,
title={Multi-style Generative Network for Real-time Transfer},
author={Zhang, Hang and Dana, Kristin},
journal={arXiv preprint arXiv:1703.06953},
year={2017}
}
Stylize Images Using Pre-trained Model
--------------------------------------
- Clone the repo and download the pre-trained model::
git clone git@github.com:zhanghang1989/PyTorch-Style-Transfer.git
cd PyTorch-Style-Transfer/experiments
bash models/download_model.sh
- Camera Demo::
python camera_demo.py demo --model models/9styles.model
.. image:: https://raw.githubusercontent.com/zhanghang1989/PyTorch-Style-Transfer/master/images/myimage.gif
- Test the model::
python main.py eval --content-image images/content/venice-boat.jpg --style-image images/9styles/candy.jpg --model models/9styles.model --content-size 1024
If you don't have a GPU, simply set ``--cuda=0``. For a different style, set ``--style-image path/to/style``.
If you would to stylize your own photo, change the ``--content-image path/to/your/photo``. More options:
* ``--content-image``: path to content image you want to stylize.
* ``--style-image``: path to style image (typically covered during the training).
* ``--model``: path to the pre-trained model to be used for stylizing the image.
* ``--output-image``: path for saving the output image.
* ``--content-size``: the content image size to test on.
* ``--cuda``: set it to 1 for running on GPU, 0 for CPU.
.. raw:: html
<img src ="https://raw.githubusercontent.com/zhanghang1989/PyTorch-Style-Transfer/master/images/1.jpg" width="260px" /> <img src ="https://raw.githubusercontent.com/zhanghang1989/PyTorch-Style-Transfer/master/images/2.jpg" width="260px" />
<img src ="https://raw.githubusercontent.com/zhanghang1989/PyTorch-Style-Transfer/master/images/3.jpg" width="260px" />
<img src ="https://raw.githubusercontent.com/zhanghang1989/PyTorch-Style-Transfer/master/images/4.jpg" width="260px" />
<img src ="https://raw.githubusercontent.com/zhanghang1989/PyTorch-Style-Transfer/master/images/5.jpg" width="260px" />
<img src ="https://raw.githubusercontent.com/zhanghang1989/PyTorch-Style-Transfer/master/images/6.jpg" width="260px" />
<img src ="https://raw.githubusercontent.com/zhanghang1989/PyTorch-Style-Transfer/master/images/7.jpg" width="260px" />
<img src ="https://raw.githubusercontent.com/zhanghang1989/PyTorch-Style-Transfer/master/images/8.jpg" width="260px" />
<img src ="https://raw.githubusercontent.com/zhanghang1989/PyTorch-Style-Transfer/master/images/9.jpg" width="260px" />
Train Your Own MSG-Net Model
----------------------------
- Download the dataset::
bash dataset/download_dataset.sh
- Train the model::
python main.py train --epochs 4
If you would like to customize styles, set ``--style-folder path/to/your/styles``. More options:
* ``--style-folder``: path to the folder style images.
* ``--vgg-model-dir``: path to folder where the vgg model will be downloaded.
* ``--save-model-dir``: path to folder where trained model will be saved.
* ``--cuda``: set it to 1 for running on GPU, 0 for CPU.
Neural Style
-------------
`Image Style Transfer Using Convolutional Neural Networks <http://www.cv-foundation.org/openaccess/content_cvpr_2016/papers/Gatys_Image_Style_Transfer_CVPR_2016_paper.pdf>`_ by Leon A. Gatys, Alexander S. Ecker, and Matthias Bethge::
python main.py optim --content-image images/content/venice-boat.jpg --style-image images/9styles/candy.jpg
* ``--content-image``: path to content image.
* ``--style-image``: path to style image.
* ``--output-image``: path for saving the output image.
* ``--content-size``: the content image size to test on.
* ``--style-size``: the style image size to test on.
* ``--cuda``: set it to 1 for running on GPU, 0 for CPU.
.. raw:: html
<img src ="https://raw.githubusercontent.com/zhanghang1989/PyTorch-Style-Transfer/master/images/g1.jpg" width="260px" /> <img src ="https://raw.githubusercontent.com/zhanghang1989/PyTorch-Style-Transfer/master/images/g2.jpg" width="260px" />
<img src ="https://raw.githubusercontent.com/zhanghang1989/PyTorch-Style-Transfer/master/images/g3.jpg" width="260px" />
<img src ="https://raw.githubusercontent.com/zhanghang1989/PyTorch-Style-Transfer/master/images/g4.jpg" width="260px" />
<img src ="https://raw.githubusercontent.com/zhanghang1989/PyTorch-Style-Transfer/master/images/g5.jpg" width="260px" />
<img src ="https://raw.githubusercontent.com/zhanghang1989/PyTorch-Style-Transfer/master/images/g6.jpg" width="260px" />
<img src ="https://raw.githubusercontent.com/zhanghang1989/PyTorch-Style-Transfer/master/images/g7.jpg" width="260px" />
<img src ="https://raw.githubusercontent.com/zhanghang1989/PyTorch-Style-Transfer/master/images/g8.jpg" width="260px" />
<img src ="https://raw.githubusercontent.com/zhanghang1989/PyTorch-Style-Transfer/master/images/g9.jpg" width="260px" />
Deep TEN: Deep Texture Encoding Network Example
===============================================
.. image:: http://hangzh.com/figure/cvpr17.svg
:width: 100%
:align: left
In this section, we show an example of training/testing Encoding-Net for texture recognition on MINC-2500 dataset. Comparing to original Torch implementation, we use *different learning rate* for pre-trained base network and encoding layer (10x), disable color jittering after reducing lr and adopt much *smaller training image size* (224 instead of 352).
.. note::
**Make Sure** to `Install PyTorch Encoding <../notes/compile.html>`_ First.
Test Pre-trained Model
----------------------
- Clone the GitHub repo (I am sure you did during the installation)::
git clone git@github.com:zhanghang1989/PyTorch-Encoding.git
- Download the `MINC-2500 <http://opensurfaces.cs.cornell.edu/publications/minc/>`_ dataset to ``$HOME/data/minc`` folder. Download pre-trained model (training `curve`_ as bellow, pre-trained on train-1 split using single training size of 224, with an error rate of :math:`19.98\%` using single crop on test-1 set)::
cd PyTorch-Encoding/experiments
bash model/download_models.sh
.. _curve:
.. image:: ../_static/img/deep_ten_curve.svg
:width: 70%
- Test pre-trained model on MINC-2500::
>>> python main.py --dataset minc --model encodingnet --resume model/minc.pth.tar --eval
# Teriminal Output:
#[======================================== 23/23 ===================================>...] Step: 104ms | Tot: 3s256ms | Loss: 0.719 | Err: 19.983% (1149/5750)
Train Your Own Model
--------------------
- Example training command::
python main.py --dataset minc --model encodingnet --batch-size 64 --lr 0.01 --epochs 60
- Training options::
-h, --help show this help message and exit
--dataset DATASET training dataset (default: cifar10)
--model MODEL network model type (default: densenet)
--widen N widen factor of the network (default: 4)
--batch-size N batch size for training (default: 128)
--test-batch-size N batch size for testing (default: 1000)
--epochs N number of epochs to train (default: 300)
--start_epoch N the epoch number to start (default: 0)
--lr LR learning rate (default: 0.1)
--momentum M SGD momentum (default: 0.9)
--weight-decay M SGD weight decay (default: 1e-4)
--no-cuda disables CUDA training
--plot matplotlib
--seed S random seed (default: 1)
--resume RESUME put the path to resuming file if needed
--checkname set the checkpoint name
--eval evaluating
.. todo::
Provide example code for extracting features.
Extending the Software
----------------------
This code includes an integrated pipeline and some visualization tools (progress bar, real-time training curve plots). It is easy to use and extend for your own model or dataset:
- Write your own Dataloader ``mydataset.py`` to ``dataset/`` folder
- Write your own Model ``mymodel.py`` to ``model/`` folder
- Run the program::
python main.py --dataset mydataset --model mymodel
Citation
--------
.. note::
* Hang Zhang, Jia Xue, and Kristin Dana. "Deep TEN: Texture Encoding Network." *The IEEE Conference on Computer Vision and Pattern Recognition (CVPR) 2017*::
@InProceedings{Zhang_2017_CVPR,
author = {Zhang, Hang and Xue, Jia and Dana, Kristin},
title = {Deep TEN: Texture Encoding Network},
booktitle = {The IEEE Conference on Computer Vision and Pattern Recognition (CVPR)},
month = {July},
year = {2017}
}
.. role:: hidden
:class: hidden-section
Other Functions
===============
.. automodule:: encoding.functions
.. currentmodule:: encoding.functions
:hidden:`dilatedavgpool2d`
~~~~~~~~~~~~~~~~~~~~~~~~~~
.. autofunction:: dilatedavgpool2d
.. Encoding documentation master file, created by
sphinx-quickstart on Fri Dec 23 13:31:47 2016.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
.. Encoding documentation master file
:github_url: https://github.com/zhanghang1989/PyTorch-Encoding
Encoding documentation
Encoding Documentation
======================
PyTorch-Encoding is an optimized PyTorch package using GPU, including Encoding Layer, Multi-GPU Synchronized Batch Normalization.
Created by `Hang Zhang <http://hangzh.com/>`_
PyTorch-Encoding is an optimized PyTorch package with CUDA backend, including Encoding Layer, Multi-GPU Synchronized Batch Normalization and useful util functions. Example systems are also provided in `experiments section <experiments/texture.html>`_. We hope this software will accelerate your research, please cite our `papers <notes/compile.html>`_.
.. toctree::
:glob:
......@@ -18,13 +17,23 @@ PyTorch-Encoding is an optimized PyTorch package using GPU, including Encoding L
notes/*
.. toctree::
:maxdepth: 3
:maxdepth: 1
:caption: Package Reference
encoding
syncbn
parallel
dilated
nn
functions
utils
.. toctree::
:glob:
:maxdepth: 1
:caption: Experiment Systems
experiments/*
Indices and tables
==================
......
.. role:: hidden
:class: hidden-section
Other NN Layers
===============
.. automodule:: encoding.nn
Customized Layers
-----------------
:hidden:`Normalize`
~~~~~~~~~~~~~~~~~~~
.. autoclass:: Normalize
:members:
:hidden:`View`
~~~~~~~~~~~~~~
.. autoclass:: View
:members:
Standard Layers
---------------
Standard Layers as in PyTorch but in :class:`encoding.parallel.SelfDataParallel` mode. Use together with SyncBN.
:hidden:`Conv1d`
~~~~~~~~~~~~~~~~
.. autoclass:: Conv1d
:members:
:hidden:`Conv2d`
~~~~~~~~~~~~~~~~
.. autoclass:: Conv2d
:members:
:hidden:`ConvTranspose2d`
~~~~~~~~~~~~~~~~~~~~~~~~~
.. autoclass:: ConvTranspose2d
:members:
:hidden:`ReLU`
~~~~~~~~~~~~~~
.. autoclass:: ReLU
:members:
:hidden:`Sigmoid`
~~~~~~~~~~~~~~~~~
.. autoclass:: Sigmoid
:members:
:hidden:`MaxPool2d`
~~~~~~~~~~~~~~~~~~~
.. autoclass:: MaxPool2d
:members:
:hidden:`AvgPool2d`
~~~~~~~~~~~~~~~~~~~
.. autoclass:: AvgPool2d
:members:
:hidden:`AdaptiveAvgPool2d`
~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. autoclass:: AdaptiveAvgPool2d
:members:
:hidden:`Dropout2d`
~~~~~~~~~~~~~~~~~~~
.. autoclass:: Dropout2d
:members:
:hidden:`Linear`
~~~~~~~~~~~~~~~~
.. autoclass:: Linear
:members:
......@@ -16,17 +16,21 @@ Install PyTorch-Encoding
python setup.py install
* On MAC OSX::
* On Mac OSX::
MACOSX_DEPLOYMENT_TARGET=10.9 CC=clang CXX=clang++ python setup.py install
- Reference:
Hang Zhang, Jia Xue, and Kristin Dana. "Deep TEN: Texture Encoding Network." *The IEEE Conference on Computer Vision and Pattern Recognition (CVPR) 2017*::
@InProceedings{Zhang_2017_CVPR,
author = {Zhang, Hang and Xue, Jia and Dana, Kristin},
title = {Deep TEN: Texture Encoding Network},
booktitle = {The IEEE Conference on Computer Vision and Pattern Recognition (CVPR)},
month = {July},
year = {2017}
}
.. note::
If using the code in your research, please cite our paper.
* Hang Zhang, Jia Xue, and Kristin Dana. "Deep TEN: Texture Encoding Network." *The IEEE Conference on Computer Vision and Pattern Recognition (CVPR) 2017*::
@InProceedings{Zhang_2017_CVPR,
author = {Zhang, Hang and Xue, Jia and Dana, Kristin},
title = {Deep TEN: Texture Encoding Network},
booktitle = {The IEEE Conference on Computer Vision and Pattern Recognition (CVPR)},
month = {July},
year = {2017}
}
Implementing Synchronized Multi-GPU Batch Normalization
=======================================================
We will release the implementation detail of Multi-GPU Batch Normalization in later version.
Why Synchronize?
----------------
- Standard Implementation
How to Synchronize?
-------------------
- Forward and Backward Pass
- Synchronized DataParallel
- Cross GPU Autograd
Comparing Performance
---------------------
......@@ -7,10 +7,8 @@ Data Parallel
Current PyTorch DataParallel Table is not supporting mutl-gpu loss calculation, which makes the gpu memory usage very in-efficient. We address this issue here by doing CriterionDataParallel.
The DataParallel compatible with SyncBN will be released later.
Modules
-------
.. currentmodule:: encoding
.. automodule:: encoding.parallel
.. currentmodule:: encoding.parallel
:hidden:`ModelDataParallel`
~~~~~~~~~~~~~~~~~~~~~~~~~~~
......@@ -24,3 +22,21 @@ Modules
.. autoclass:: CriterionDataParallel
:members:
:hidden:`SelfDataParallel`
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. autoclass:: SelfDataParallel
:members:
:hidden:`AllReduce`
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. autoclass:: AllReduce
:members:
:hidden:`Broadcast`
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. autoclass:: Broadcast
:members:
......@@ -8,26 +8,46 @@ The current BN is implementated insynchronized accross the gpus, which is a big
To synchronize the batchnorm accross multiple gpus is not easy to implment within the current Dataparallel framework. We address this difficulty by making each layer 'self-parallel', that is accepting the inputs from multi-gpus. Therefore, we can handle different layers seperately for synchronizing it across gpus.
We will release the whole SyncBN Module and compatible DataParallel later.
.. currentmodule:: encoding.nn
Modules
-------
:hidden:`BatchNorm1d`
~~~~~~~~~~~~~~~~~~~~~~~~
.. autoclass:: BatchNorm1d
:members:
:hidden:`BatchNorm2d`
~~~~~~~~~~~~~~~~~~~~~~~~
.. autoclass:: BatchNorm2d
:members:
.. currentmodule:: encoding
Functions
---------
.. currentmodule:: encoding.functions
:hidden:`batchnormtrain`
~~~~~~~~~~~~~~~~~~~~~~~~
.. autoclass:: batchnormtrain
:members:
.. autofunction:: batchnormtrain
:hidden:`batchnormeval`
~~~~~~~~~~~~~~~~~~~~~~~
.. autoclass:: batchnormeval
:members:
.. autofunction:: batchnormeval
:hidden:`sum_square`
~~~~~~~~~~~~~~~~~~~~
.. autoclass:: sum_square
:members:
.. autofunction:: sum_square
.. role:: hidden
:class: hidden-section
My PyTorch Utils
================
Useful util functions.
.. automodule:: encoding.utils
.. currentmodule:: encoding.utils
:hidden:`CosLR_Scheduler`
~~~~~~~~~~~~~~~~~~~~~~~~~
.. autoclass:: CosLR_Scheduler
:members:
:hidden:`get_optimizer`
~~~~~~~~~~~~~~~~~~~~~~~
.. autofunction:: get_optimizer
:hidden:`save_checkpoint`
~~~~~~~~~~~~~~~~~~~~~~~~~
.. autofunction:: save_checkpoint
:hidden:`progress_bar`
~~~~~~~~~~~~~~~~~~~~~~
.. autofunction:: progress_bar
......@@ -75,6 +75,8 @@ IF(ENCODING_SO_VERSION)
SOVERSION ${ENCODING_SO_VERSION})
ENDIF(ENCODING_SO_VERSION)
FILE(GLOB src-header kernel/generic/*.h)
INSTALL(TARGETS ENCODING LIBRARY DESTINATION ${ENCODING_INSTALL_LIB_SUBDIR})
INSTALL(FILES kernel/thc_encoding.h DESTINATION "${ENCODING_INSTALL_INCLUDE_SUBDIR}/ENCODING")
INSTALL(FILES kernel/generic/encoding_kernel.h DESTINATION "${ENCODING_INSTALL_INCLUDE_SUBDIR}/ENCODING/generic")
INSTALL(FILES ${src-header} DESTINATION "${ENCODING_INSTALL_INCLUDE_SUBDIR}/ENCODING/generic")
......@@ -8,8 +8,10 @@
## LICENSE file in the root directory of this source tree
##+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
from .functions import *
from .modules import *
from .syncbn import sum_square, batchnormtrain, batchnormeval
from .parallel import ModelDataParallel, CriterionDataParallel
__version__ = '1.0.1'
import encoding.nn
import encoding.functions
import encoding.dilated
import encoding.parallel
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment