_config.yml 2.69 KB
Newer Older
Shaden Smith's avatar
Shaden Smith committed
1
2
3
4
5
6
7
title: DeepSpeed
email: deepspeed@microsoft.com
description: >-
  DeepSpeed is a deep learning optimization library that makes distributed
  training easy, efficient, and effective.

locale : "en-US"
aiss's avatar
aiss committed
8
logo: /assets/images/deepspeed-logo-uppercase-bold-white-1.15.svg
Shaden Smith's avatar
Shaden Smith committed
9
10
11
12
13
14
15
16

repository: microsoft/DeepSpeed
baseurl: "/" # the subpath of your site, e.g. /blog
url: "https://www.deepspeed.ai" # the base hostname & protocol for your site, e.g. http://example.com

# Build settings
remote_theme: "mmistakes/minimal-mistakes@4.19.0"
minimal_mistakes_skin : "air"
aiss's avatar
aiss committed
17
search: true
Shaden Smith's avatar
Shaden Smith committed
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33

plugins:
  - jekyll-feed
  - jekyll-include-cache
  - jekyll-paginate

#paginate: 10
#paginate_path: /blog/page:num

include: ["_pages"]
exclude: ["code-docs"]

collections:
  tutorials:
    output: true
    permalink: /:collection/:path/
Jeff Rasley's avatar
Jeff Rasley committed
34
    order:
35
      - advanced-install.md
Jeff Rasley's avatar
Jeff Rasley committed
36
37
      - getting-started.md
      - azure.md
38
      - bert-finetuning.md
aiss's avatar
aiss committed
39
40
41
42
43
44
45
46
      - bert-pretraining.md
      - cifar-10.md
      - curriculum-learning.md
      - flops-profiler.md
      - pytorch-profiler.md
      - autotuning.md
      - gan.md
      - lrrt.md
Jeff Rasley's avatar
Jeff Rasley committed
47
      - megatron.md
aiss's avatar
aiss committed
48
49
50
      - mixture-of-experts.md
      - mixture-of-experts-nlg.md
      - mixture-of-experts-inference.md
Olatunji Ruwase's avatar
Olatunji Ruwase committed
51
      - one-cycle.md
aiss's avatar
aiss committed
52
53
54
55
56
57
58
59
      - onebit-adam.md
      - zero-one-adam.md
      - onebit-lamb.md
      - pipeline.md
      - progressive_layer_dropping.md
      - sparse-attention.md
      - transformer_kernel.md
      - zero-offload.md
Olatunji Ruwase's avatar
Olatunji Ruwase committed
60
      - zero.md
Shaden Smith's avatar
Shaden Smith committed
61
62
63
64
65
66
67

defaults:
  - scope:
      path: ""
    values:
      layout: single
      author_profile: false
Shaden Smith's avatar
Shaden Smith committed
68
      read_time: false
Shaden Smith's avatar
Shaden Smith committed
69
      comments: false
Shaden Smith's avatar
Shaden Smith committed
70
      share: false
Shaden Smith's avatar
Shaden Smith committed
71
      related: false
Shaden Smith's avatar
Shaden Smith committed
72
      sneak_preview: false
Shaden Smith's avatar
Shaden Smith committed
73
74
75
76
      toc: true
      toc_label: "Contents"
      sidebar:
        nav: "lnav"
Shaden Smith's avatar
Shaden Smith committed
77
78
79
  - scope:
      path: "_pages"
    values:
80
      permalink: /docs/:basename/
Shaden Smith's avatar
Shaden Smith committed
81
82
83
84
  - scope:
      path: ""
      type: posts
    values:
aiss's avatar
aiss committed
85
86
87
88
      layout: single-full
      author_profile: false
      read_time: false
      comments: false
Shaden Smith's avatar
Shaden Smith committed
89
      share: true
aiss's avatar
aiss committed
90
91
92
93
94
95
96
97
98
99
100
101
      related: false
      toc: true
      toc_label: "Contents"
      toc_sticky: true
      show_date: true
  - scope:
      path: ""
      type: tutorials
    values:
      layout: single
      toc_sticky: true

Shaden Smith's avatar
Shaden Smith committed
102

103
104
105
106
107
analytics:
  provider: "google-gtag"
  google:
    tracking_id: "UA-169781858-1"

Shaden Smith's avatar
Shaden Smith committed
108
109
timezone: America/Los_Angeles
breadcrumbs: true
Jeff Rasley's avatar
Jeff Rasley committed
110

Jeff Rasley's avatar
Jeff Rasley committed
111
press_release_v3: https://www.microsoft.com/en-us/research/blog/deepspeed-extreme-scale-model-training-for-everyone/
aiss's avatar
aiss committed
112
113
press_release_v5: https://www.microsoft.com/en-us/research/blog/deepspeed-powers-8x-larger-moe-model-training-with-high-performance/
press_release_v6: https://www.microsoft.com/en-us/research/blog/deepspeed-advancing-moe-inference-and-training-to-power-next-generation-ai-scale/