config.yml 46.7 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
version: 2.1
orbs:
    gcp-gke: circleci/gcp-gke@1.0.4
    go: circleci/go@1.3.0

# TPU REFERENCES
references:
    checkout_ml_testing: &checkout_ml_testing
        run:
            name: Checkout ml-testing-accelerators
            command: |
                git clone https://github.com/GoogleCloudPlatform/ml-testing-accelerators.git
                cd ml-testing-accelerators
                git fetch origin 5e88ac24f631c27045e62f0e8d5dfcf34e425e25:stable
                git checkout stable
    build_push_docker: &build_push_docker
        run:
            name: Configure Docker
            command: |
                gcloud --quiet auth configure-docker
                cd docker/transformers-pytorch-tpu
22
                if [ -z "$CIRCLE_PR_NUMBER" ]; then docker build --tag "$GCR_IMAGE_PATH:$CIRCLE_WORKFLOW_JOB_ID" -f Dockerfile --build-arg "TEST_IMAGE=1" . ; else docker build --tag "$GCR_IMAGE_PATH:$CIRCLE_WORKFLOW_JOB_ID" -f Dockerfile --build-arg "TEST_IMAGE=1" --build-arg "GITHUB_REF=pull/$CIRCLE_PR_NUMBER/head" . ; fi
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
                docker push "$GCR_IMAGE_PATH:$CIRCLE_WORKFLOW_JOB_ID"
    deploy_cluster: &deploy_cluster
        run:
            name: Deploy the job on the kubernetes cluster
            command: |
                go get github.com/google/go-jsonnet/cmd/jsonnet && \
                export PATH=$PATH:$HOME/go/bin && \
                kubectl create -f docker/transformers-pytorch-tpu/dataset.yaml || true && \
                job_name=$(jsonnet -J ml-testing-accelerators/ docker/transformers-pytorch-tpu/bert-base-cased.jsonnet --ext-str image=$GCR_IMAGE_PATH --ext-str image-tag=$CIRCLE_WORKFLOW_JOB_ID | kubectl create -f -) && \
                job_name=${job_name#job.batch/} && \
                job_name=${job_name% created} && \
                echo "Waiting on kubernetes job: $job_name" && \
                i=0 && \
                # 30 checks spaced 30s apart = 900s total.
                max_checks=30 && \
                status_code=2 && \
                # Check on the job periodically. Set the status code depending on what
                # happened to the job in Kubernetes. If we try max_checks times and
                # still the job hasn't finished, give up and return the starting
                # non-zero status code.
                while [ $i -lt $max_checks ]; do ((i++)); if kubectl get jobs $job_name -o jsonpath='Failed:{.status.failed}' | grep "Failed:1"; then status_code=1 && break; elif kubectl get jobs $job_name -o jsonpath='Succeeded:{.status.succeeded}' | grep "Succeeded:1" ; then status_code=0 && break; else echo "Job not finished yet"; fi; sleep 30; done && \
                echo "Done waiting. Job status code: $status_code" && \
45
46
47
                pod_name=$(kubectl get po -l controller-uid=`kubectl get job $job_name -o "jsonpath={.metadata.labels.controller-uid}"` | awk 'match($0,!/NAME/) {print $1}') && \
                echo "GKE pod name: $pod_name" && \
                kubectl logs -f $pod_name --container=train
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
                echo "Done with log retrieval attempt." && \
                gcloud container images delete "$GCR_IMAGE_PATH:$CIRCLE_WORKFLOW_JOB_ID" --force-delete-tags && \
                exit $status_code
    delete_gke_jobs: &delete_gke_jobs
        run:
            name: Delete GKE Jobs
            command: |
                # Match jobs whose age matches patterns like '1h' or '1d', i.e. any job
                # that has been around longer than 1hr. First print all columns for
                # matches, then execute the delete.
                kubectl get job | awk 'match($4,/[0-9]+[dh]/) {print $0}'
                kubectl delete job $(kubectl get job | awk 'match($4,/[0-9]+[dh]/) {print $1}')




Julien Chaumond's avatar
Julien Chaumond committed
64
jobs:
Aymeric Augustin's avatar
Aymeric Augustin committed
65
    run_tests_torch_and_tf:
66
        working_directory: ~/transformers
67
        docker:
68
            - image: cimg/python:3.7.12
69
70
        environment:
            OMP_NUM_THREADS: 1
71
72
            RUN_PT_TF_CROSS_TESTS: yes
            TRANSFORMERS_IS_CI: yes
73
            PYTEST_TIMEOUT: 120
74
75
76
77
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
78
79
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
80
81
                      - v0.5-torch_and_tf-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
82
83
            - run: sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng git-lfs
            - run: git lfs install
84
            - run: pip install --upgrade pip
85
            - run: pip install .[sklearn,tf-cpu,torch,testing,sentencepiece,torch-speech,vision]
86
            - run: pip install torch-scatter -f https://pytorch-geometric.com/whl/torch-1.12.0+cpu.html
Kamal Raj's avatar
Kamal Raj committed
87
            - run: pip install tensorflow_probability
88
            - run: pip install https://github.com/kpu/kenlm/archive/master.zip
89
            - run: pip install git+https://github.com/huggingface/accelerate
90
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
91
                key: v0.5-{{ checksum "setup.py" }}
92
93
                paths:
                    - '~/.cache/pip'
94
95
96
97
98
            - run: python utils/tests_fetcher.py | tee test_preparation.txt
            - store_artifacts:
                  path: ~/transformers/test_preparation.txt
            - run: |
                  if [ -f test_list.txt ]; then
Yih-Dar's avatar
Yih-Dar committed
99
                    python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_torch_and_tf $(cat test_list.txt) -m is_pt_tf_cross_test --durations=0 | tee tests_output.txt
100
                  fi
101
            - store_artifacts:
102
103
104
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
105

106
107
108
    run_tests_torch_and_tf_all:
        working_directory: ~/transformers
        docker:
109
            - image: cimg/python:3.7.12
110
111
112
113
        environment:
            OMP_NUM_THREADS: 1
            RUN_PT_TF_CROSS_TESTS: yes
            TRANSFORMERS_IS_CI: yes
114
            PYTEST_TIMEOUT: 120
115
116
117
118
119
120
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
121
122
                      - v0.5-torch_and_tf-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
123
124
            - run: sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng git-lfs
            - run: git lfs install
125
            - run: pip install --upgrade pip
126
            - run: pip install .[sklearn,tf-cpu,torch,testing,sentencepiece,torch-speech,vision]
127
            - run: pip install torch-scatter -f https://pytorch-geometric.com/whl/torch-1.12.0+cpu.html
Kamal Raj's avatar
Kamal Raj committed
128
            - run: pip install tensorflow_probability
129
            - run: pip install https://github.com/kpu/kenlm/archive/master.zip
130
            - run: pip install git+https://github.com/huggingface/accelerate
131
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
132
                key: v0.5-{{ checksum "setup.py" }}
133
134
135
                paths:
                    - '~/.cache/pip'
            - run: |
Yih-Dar's avatar
Yih-Dar committed
136
                  python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_torch_and_tf tests -m is_pt_tf_cross_test --durations=0 | tee tests_output.txt
137
138
139
140
            - store_artifacts:
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
141

142
143
144
    run_tests_torch_and_flax:
        working_directory: ~/transformers
        docker:
145
            - image: cimg/python:3.7.12
146
147
        environment:
            OMP_NUM_THREADS: 1
148
149
            RUN_PT_FLAX_CROSS_TESTS: yes
            TRANSFORMERS_IS_CI: yes
150
            PYTEST_TIMEOUT: 120
151
152
153
154
155
156
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
157
158
                      - v0.5-torch_and_flax-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
159
            - run: sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng
160
            - run: pip install --upgrade pip
161
            - run: pip install .[sklearn,flax,torch,testing,sentencepiece,torch-speech,vision]
162
            - run: pip install torch-scatter -f https://pytorch-geometric.com/whl/torch-1.12.0+cpu.html
163
            - run: pip install https://github.com/kpu/kenlm/archive/master.zip
164
            - run: pip install git+https://github.com/huggingface/accelerate
165
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
166
                key: v0.5-{{ checksum "setup.py" }}
167
168
                paths:
                    - '~/.cache/pip'
169
170
171
172
173
            - run: python utils/tests_fetcher.py | tee test_preparation.txt
            - store_artifacts:
                  path: ~/transformers/test_preparation.txt
            - run: |
                  if [ -f test_list.txt ]; then
Yih-Dar's avatar
Yih-Dar committed
174
                    python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_torch_and_flax $(cat test_list.txt) -m is_pt_flax_cross_test --durations=0 | tee tests_output.txt
175
                  fi
176
177
178
179
            - store_artifacts:
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
180

181
182
183
    run_tests_torch_and_flax_all:
        working_directory: ~/transformers
        docker:
184
            - image: cimg/python:3.7.12
185
186
187
188
        environment:
            OMP_NUM_THREADS: 1
            RUN_PT_FLAX_CROSS_TESTS: yes
            TRANSFORMERS_IS_CI: yes
189
            PYTEST_TIMEOUT: 120
190
191
192
193
194
195
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
196
197
                      - v0.5-torch_and_flax-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
198
            - run: sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng
199
            - run: pip install --upgrade pip
200
            - run: pip install .[sklearn,flax,torch,testing,sentencepiece,torch-speech,vision]
201
            - run: pip install torch-scatter -f https://pytorch-geometric.com/whl/torch-1.12.0+cpu.html
202
            - run: pip install https://github.com/kpu/kenlm/archive/master.zip
203
            - run: pip install git+https://github.com/huggingface/accelerate
204
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
205
                key: v0.5-{{ checksum "setup.py" }}
206
207
208
                paths:
                    - '~/.cache/pip'
            - run: |
Yih-Dar's avatar
Yih-Dar committed
209
                  python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_torch_and_flax tests -m is_pt_flax_cross_test --durations=0 | tee tests_output.txt
210
211
212
213
            - store_artifacts:
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
214

Aymeric Augustin's avatar
Aymeric Augustin committed
215
    run_tests_torch:
216
        working_directory: ~/transformers
Julien Chaumond's avatar
Julien Chaumond committed
217
        docker:
218
            - image: cimg/python:3.7.12
219
220
        environment:
            OMP_NUM_THREADS: 1
221
            TRANSFORMERS_IS_CI: yes
222
            PYTEST_TIMEOUT: 120
223
        resource_class: xlarge
224
        parallelism: 1
Julien Chaumond's avatar
Julien Chaumond committed
225
226
        steps:
            - checkout
227
228
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
229
230
                      - v0.5-torch-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
231
            - run: sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng time
232
            - run: pip install --upgrade pip
233
            - run: pip install .[sklearn,torch,testing,sentencepiece,torch-speech,vision,timm]
234
            - run: pip install torch-scatter -f https://pytorch-geometric.com/whl/torch-1.12.0+cpu.html
235
            - run: pip install https://github.com/kpu/kenlm/archive/master.zip
236
            - run: pip install git+https://github.com/huggingface/accelerate
237
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
238
                  key: v0.5-torch-{{ checksum "setup.py" }}
239
240
                  paths:
                      - '~/.cache/pip'
241
242
243
244
245
            - run: python utils/tests_fetcher.py | tee test_preparation.txt
            - store_artifacts:
                  path: ~/transformers/test_preparation.txt
            - run: |
                  if [ -f test_list.txt ]; then
Yih-Dar's avatar
Yih-Dar committed
246
                    python -m pytest -n 3 --max-worker-restart=0 --dist=loadfile -s --make-reports=tests_torch $(cat test_list.txt) | tee tests_output.txt
247
                  fi
248
            - store_artifacts:
249
250
251
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
252

253
254
255
    run_tests_torch_all:
        working_directory: ~/transformers
        docker:
256
            - image: cimg/python:3.7.12
257
258
259
        environment:
            OMP_NUM_THREADS: 1
            TRANSFORMERS_IS_CI: yes
260
            PYTEST_TIMEOUT: 120
261
262
263
264
265
266
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
267
268
                      - v0.5-torch-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
269
            - run: sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng
270
            - run: pip install --upgrade pip
271
            - run: pip install .[sklearn,torch,testing,sentencepiece,torch-speech,vision,timm]
272
            - run: pip install torch-scatter -f https://pytorch-geometric.com/whl/torch-1.12.0+cpu.html
273
            - run: pip install https://github.com/kpu/kenlm/archive/master.zip
274
            - run: pip install git+https://github.com/huggingface/accelerate
275
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
276
                  key: v0.5-torch-{{ checksum "setup.py" }}
277
278
279
                  paths:
                      - '~/.cache/pip'
            - run: |
Yih-Dar's avatar
Yih-Dar committed
280
                  python -m pytest -n 3 --max-worker-restart=0 --dist=loadfile -s --make-reports=tests_torch tests | tee tests_output.txt
281
282
283
284
            - store_artifacts:
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
Lysandre Debut's avatar
Lysandre Debut committed
285

Aymeric Augustin's avatar
Aymeric Augustin committed
286
    run_tests_tf:
287
        working_directory: ~/transformers
thomwolf's avatar
thomwolf committed
288
        docker:
289
            - image: cimg/python:3.7.12
290
291
        environment:
            OMP_NUM_THREADS: 1
292
            TRANSFORMERS_IS_CI: yes
293
            PYTEST_TIMEOUT: 120
thomwolf's avatar
thomwolf committed
294
295
296
297
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
298
299
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
300
301
                      - v0.5-tf-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
302
            - run: sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng
303
            - run: pip install --upgrade pip
304
            - run: pip install .[sklearn,tf-cpu,testing,sentencepiece,tf-speech,vision]
Kamal Raj's avatar
Kamal Raj committed
305
            - run: pip install tensorflow_probability
306
            - run: pip install https://github.com/kpu/kenlm/archive/master.zip
307
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
308
                  key: v0.5-tf-{{ checksum "setup.py" }}
309
310
                  paths:
                      - '~/.cache/pip'
311
312
313
314
315
            - run: python utils/tests_fetcher.py | tee test_preparation.txt
            - store_artifacts:
                  path: ~/transformers/test_preparation.txt
            - run: |
                  if [ -f test_list.txt ]; then
Yih-Dar's avatar
Yih-Dar committed
316
                    python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_tf $(cat test_list.txt) | tee tests_output.txt
317
                  fi
318
            - store_artifacts:
319
320
321
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
322

323
324
325
    run_tests_tf_all:
        working_directory: ~/transformers
        docker:
326
            - image: cimg/python:3.7.12
327
328
329
        environment:
            OMP_NUM_THREADS: 1
            TRANSFORMERS_IS_CI: yes
330
            PYTEST_TIMEOUT: 120
331
332
333
334
335
336
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
337
338
                      - v0.5-tf-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
339
            - run: sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng
340
            - run: pip install --upgrade pip
341
            - run: pip install .[sklearn,tf-cpu,testing,sentencepiece,tf-speech,vision]
Kamal Raj's avatar
Kamal Raj committed
342
            - run: pip install tensorflow_probability
343
            - run: pip install https://github.com/kpu/kenlm/archive/master.zip
344
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
345
                  key: v0.5-tf-{{ checksum "setup.py" }}
346
347
348
                  paths:
                      - '~/.cache/pip'
            - run: |
Yih-Dar's avatar
Yih-Dar committed
349
                  python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_tf tests | tee tests_output.txt
350
351
352
353
            - store_artifacts:
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
354

355
356
357
    run_tests_flax:
        working_directory: ~/transformers
        docker:
358
            - image: cimg/python:3.7.12
359
360
        environment:
            OMP_NUM_THREADS: 1
361
            TRANSFORMERS_IS_CI: yes
362
            PYTEST_TIMEOUT: 120
363
364
365
366
367
368
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                keys:
Yih-Dar's avatar
Yih-Dar committed
369
370
                    - v0.5-flax-{{ checksum "setup.py" }}
                    - v0.5-{{ checksum "setup.py" }}
371
            - run: sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng
372
            - run: pip install --upgrade pip
373
374
            - run: pip install .[flax,testing,sentencepiece,flax-speech,vision]
            - run: pip install https://github.com/kpu/kenlm/archive/master.zip
375
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
376
                  key: v0.5-flax-{{ checksum "setup.py" }}
377
378
                  paths:
                      - '~/.cache/pip'
379
380
381
382
383
            - run: python utils/tests_fetcher.py | tee test_preparation.txt
            - store_artifacts:
                  path: ~/transformers/test_preparation.txt
            - run: |
                  if [ -f test_list.txt ]; then
Yih-Dar's avatar
Yih-Dar committed
384
                    python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_flax $(cat test_list.txt) | tee tests_output.txt
385
                  fi
386
            - store_artifacts:
387
388
389
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
390

391
392
393
    run_tests_flax_all:
        working_directory: ~/transformers
        docker:
394
            - image: cimg/python:3.7.12
395
396
397
        environment:
            OMP_NUM_THREADS: 1
            TRANSFORMERS_IS_CI: yes
398
            PYTEST_TIMEOUT: 120
399
400
401
402
403
404
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                keys:
Yih-Dar's avatar
Yih-Dar committed
405
406
                    - v0.5-flax-{{ checksum "setup.py" }}
                    - v0.5-{{ checksum "setup.py" }}
407
            - run: sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng
408
            - run: pip install --upgrade pip
409
410
            - run: pip install .[flax,testing,sentencepiece,vision,flax-speech]
            - run: pip install https://github.com/kpu/kenlm/archive/master.zip
411
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
412
                  key: v0.5-flax-{{ checksum "setup.py" }}
413
414
415
                  paths:
                      - '~/.cache/pip'
            - run: |
Yih-Dar's avatar
Yih-Dar committed
416
                  python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_flax tests | tee tests_output.txt
417
418
419
420
            - store_artifacts:
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
421

422
423
424
    run_tests_pipelines_torch:
        working_directory: ~/transformers
        docker:
425
            - image: cimg/python:3.7.12
426
427
        environment:
            OMP_NUM_THREADS: 1
428
429
            RUN_PIPELINE_TESTS: yes
            TRANSFORMERS_IS_CI: yes
430
            PYTEST_TIMEOUT: 120
431
432
433
434
435
436
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
437
438
                      - v0.5-torch-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
439
            - run: sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng
440
            - run: pip install --upgrade pip
441
            - run: pip install .[sklearn,torch,testing,sentencepiece,torch-speech,vision,timm]
442
            - run: pip install torch-scatter -f https://pytorch-geometric.com/whl/torch-1.12.0+cpu.html
443
            - run: pip install https://github.com/kpu/kenlm/archive/master.zip
444
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
445
                  key: v0.5-torch-{{ checksum "setup.py" }}
446
447
                  paths:
                      - '~/.cache/pip'
448
449
450
451
452
            - run: python utils/tests_fetcher.py | tee test_preparation.txt
            - store_artifacts:
                  path: ~/transformers/test_preparation.txt
            - run: |
                  if [ -f test_list.txt ]; then
Yih-Dar's avatar
Yih-Dar committed
453
                    python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_pipelines_torch -m is_pipeline_test $(cat test_list.txt) | tee tests_output.txt
454
                  fi
455
            - store_artifacts:
456
457
458
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
459

460
461
462
    run_tests_pipelines_torch_all:
        working_directory: ~/transformers
        docker:
463
            - image: cimg/python:3.7.12
464
465
466
467
        environment:
            OMP_NUM_THREADS: 1
            RUN_PIPELINE_TESTS: yes
            TRANSFORMERS_IS_CI: yes
468
            PYTEST_TIMEOUT: 120
469
470
471
472
473
474
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
475
476
                      - v0.5-torch-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
477
            - run: sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng
478
            - run: pip install --upgrade pip
479
            - run: pip install .[sklearn,torch,testing,sentencepiece,torch-speech,vision,timm]
480
            - run: pip install torch-scatter -f https://pytorch-geometric.com/whl/torch-1.12.0+cpu.html
481
            - run: pip install https://github.com/kpu/kenlm/archive/master.zip
482
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
483
                  key: v0.5-torch-{{ checksum "setup.py" }}
484
485
486
                  paths:
                      - '~/.cache/pip'
            - run: |
Yih-Dar's avatar
Yih-Dar committed
487
                  python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_pipelines_torch -m is_pipeline_test tests | tee tests_output.txt
488
489
490
491
            - store_artifacts:
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
492

493
494
495
    run_tests_pipelines_tf:
        working_directory: ~/transformers
        docker:
496
            - image: cimg/python:3.7.12
497
498
        environment:
            OMP_NUM_THREADS: 1
499
500
            RUN_PIPELINE_TESTS: yes
            TRANSFORMERS_IS_CI: yes
501
            PYTEST_TIMEOUT: 120
502
503
504
505
506
507
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
508
509
                      - v0.5-tf-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
510
            - run: pip install --upgrade pip
511
            - run: pip install .[sklearn,tf-cpu,testing,sentencepiece]
Kamal Raj's avatar
Kamal Raj committed
512
            - run: pip install tensorflow_probability
513
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
514
                  key: v0.5-tf-{{ checksum "setup.py" }}
515
516
                  paths:
                      - '~/.cache/pip'
517
518
519
520
521
            - run: python utils/tests_fetcher.py | tee test_preparation.txt
            - store_artifacts:
                  path: ~/transformers/test_preparation.txt
            - run: |
                  if [ -f test_list.txt ]; then
Yih-Dar's avatar
Yih-Dar committed
522
                    python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_pipelines_tf $(cat test_list.txt) -m is_pipeline_test | tee tests_output.txt
523
                  fi
524
525
            - store_artifacts:
                  path: ~/transformers/tests_output.txt
526
            - store_artifacts:
527
528
                  path: ~/transformers/reports

529
530
531
    run_tests_pipelines_tf_all:
        working_directory: ~/transformers
        docker:
532
            - image: cimg/python:3.7.12
533
534
535
536
        environment:
            OMP_NUM_THREADS: 1
            RUN_PIPELINE_TESTS: yes
            TRANSFORMERS_IS_CI: yes
537
            PYTEST_TIMEOUT: 120
538
539
540
541
542
543
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
544
545
                      - v0.5-tf-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
546
547
            - run: pip install --upgrade pip
            - run: pip install .[sklearn,tf-cpu,testing,sentencepiece]
Kamal Raj's avatar
Kamal Raj committed
548
            - run: pip install tensorflow_probability
549
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
550
                  key: v0.5-tf-{{ checksum "setup.py" }}
551
552
553
                  paths:
                      - '~/.cache/pip'
            - run: |
Yih-Dar's avatar
Yih-Dar committed
554
                  python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_pipelines_tf tests -m is_pipeline_test | tee tests_output.txt
555
556
557
558
559
            - store_artifacts:
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports

Aymeric Augustin's avatar
Aymeric Augustin committed
560
    run_tests_custom_tokenizers:
561
562
        working_directory: ~/transformers
        docker:
563
            - image: cimg/python:3.7.12
564
565
        environment:
            RUN_CUSTOM_TOKENIZERS: yes
566
            TRANSFORMERS_IS_CI: yes
567
            PYTEST_TIMEOUT: 120
568
569
        steps:
            - checkout
570
571
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
572
573
                      - v0.5-custom_tokenizers-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
574
            - run: pip install --upgrade pip
575
            - run: pip install .[ja,testing,sentencepiece,jieba,spacy,ftfy,rjieba]
576
            - run: python -m unidic download
577
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
578
                  key: v0.5-custom_tokenizers-{{ checksum "setup.py" }}
579
580
                  paths:
                      - '~/.cache/pip'
581
582
            - run: |
                  if [ -f test_list.txt ]; then
Yih-Dar's avatar
Yih-Dar committed
583
                    python -m pytest --max-worker-restart=0 -s --make-reports=tests_custom_tokenizers ./tests/test_tokenization_bert_japanese.py ./tests/test_tokenization_openai.py | tee tests_output.txt
584
                  fi
585
586
            - run: |
                  if [ -f test_list.txt ]; then
Yih-Dar's avatar
Yih-Dar committed
587
                    python -m pytest -n 1 --max-worker-restart=0 tests/test_tokenization_clip.py --dist=loadfile -s --make-reports=tests_tokenization_clip --durations=100 | tee tests_output.txt
588
                  fi
589
590
            - store_artifacts:
                  path: ~/transformers/tests_output.txt
591
            - store_artifacts:
592
593
                  path: ~/transformers/reports

Aymeric Augustin's avatar
Aymeric Augustin committed
594
    run_examples_torch:
595
596
        working_directory: ~/transformers
        docker:
597
            - image: cimg/python:3.7.12
598
599
        environment:
            OMP_NUM_THREADS: 1
600
            TRANSFORMERS_IS_CI: yes
601
            PYTEST_TIMEOUT: 120
602
603
604
605
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
606
607
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
608
609
                      - v0.5-torch_examples-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
610
            - run: sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng
611
            - run: pip install --upgrade pip
612
            - run: pip install .[sklearn,torch,sentencepiece,testing,torch-speech]
Sylvain Gugger's avatar
Sylvain Gugger committed
613
            - run: pip install -r examples/pytorch/_tests_requirements.txt
614
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
615
                  key: v0.5-torch_examples-{{ checksum "setup.py" }}
616
617
                  paths:
                      - '~/.cache/pip'
618
            - run: python utils/tests_fetcher.py --filters examples tests | tee test_preparation.txt
619
620
621
622
            - store_artifacts:
                  path: ~/transformers/test_preparation.txt
            - run: |
                  if [ -f test_list.txt ]; then
Yih-Dar's avatar
Yih-Dar committed
623
                    python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -s --make-reports=examples_torch ./examples/pytorch/ | tee tests_output.txt
624
                  fi
625
            - store_artifacts:
626
627
628
                  path: ~/transformers/examples_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
629

630
631
632
    run_examples_torch_all:
        working_directory: ~/transformers
        docker:
633
            - image: cimg/python:3.7.12
634
635
636
        environment:
            OMP_NUM_THREADS: 1
            TRANSFORMERS_IS_CI: yes
637
            PYTEST_TIMEOUT: 120
638
639
640
641
642
643
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
644
645
                      - v0.5-torch_examples-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
646
            - run: sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng
647
            - run: pip install --upgrade pip
648
            - run: pip install .[sklearn,torch,sentencepiece,testing,torch-speech]
649
650
            - run: pip install -r examples/pytorch/_tests_requirements.txt
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
651
                  key: v0.5-torch_examples-{{ checksum "setup.py" }}
652
653
654
                  paths:
                      - '~/.cache/pip'
            - run: |
Yih-Dar's avatar
Yih-Dar committed
655
                  TRANSFORMERS_IS_CI=1 python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -s --make-reports=examples_torch ./examples/pytorch/ | tee examples_output.txt
656
657
658
659
            - store_artifacts:
                  path: ~/transformers/examples_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
660

Matt's avatar
Matt committed
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
    run_examples_tensorflow:
        working_directory: ~/transformers
        docker:
            - image: cimg/python:3.7.12
        environment:
            OMP_NUM_THREADS: 1
            TRANSFORMERS_IS_CI: yes
            PYTEST_TIMEOUT: 120
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                  keys:
                      - v0.5-tensorflow_examples-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
            - run: pip install --upgrade pip
            - run: pip install .[sklearn,tensorflow,sentencepiece,testing]
            - run: pip install -r examples/tensorflow/_tests_requirements.txt
            - save_cache:
                  key: v0.5-tensorflow_examples-{{ checksum "setup.py" }}
                  paths:
                      - '~/.cache/pip'
            - run: python utils/tests_fetcher.py --filters examples tests | tee test_preparation.txt
            - store_artifacts:
                  path: ~/transformers/test_preparation.txt
            - run: |
                  if [ -f test_list.txt ]; then
                    python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -s --make-reports=examples_tensorflow ./examples/tensorflow/ | tee tests_output.txt
                  fi
            - store_artifacts:
                  path: ~/transformers/tensorflow_examples_output.txt
            - store_artifacts:
                  path: ~/transformers/reports

    run_examples_tensorflow_all:
        working_directory: ~/transformers
        docker:
            - image: cimg/python:3.7.12
        environment:
            OMP_NUM_THREADS: 1
            TRANSFORMERS_IS_CI: yes
            PYTEST_TIMEOUT: 120
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                  keys:
                      - v0.5-tensorflow_examples-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
            - run: pip install --upgrade pip
            - run: pip install .[sklearn,tensorflow,sentencepiece,testing]
            - run: pip install -r examples/tensorflow/_tests_requirements.txt
            - save_cache:
                  key: v0.5-tensorflow_examples-{{ checksum "setup.py" }}
                  paths:
                      - '~/.cache/pip'
            - run: |
                  TRANSFORMERS_IS_CI=1 python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -s --make-reports=examples_tensorflow ./examples/tensorflow/ | tee examples_output.txt
            - store_artifacts:
                  path: ~/transformers/tensorflow_examples_output.txt
            - store_artifacts:
                  path: ~/transformers/reports

Suraj Patil's avatar
Suraj Patil committed
726
727
728
    run_examples_flax:
        working_directory: ~/transformers
        docker:
729
            - image: cimg/python:3.7.12
Suraj Patil's avatar
Suraj Patil committed
730
731
732
        environment:
            OMP_NUM_THREADS: 1
            TRANSFORMERS_IS_CI: yes
733
            PYTEST_TIMEOUT: 120
Suraj Patil's avatar
Suraj Patil committed
734
735
736
737
738
739
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                keys:
Yih-Dar's avatar
Yih-Dar committed
740
741
                    - v0.5-flax_examples-{{ checksum "setup.py" }}
                    - v0.5-{{ checksum "setup.py" }}
Suraj Patil's avatar
Suraj Patil committed
742
            - run: pip install --upgrade pip
743
            - run: pip install .[flax,testing,sentencepiece]
Suraj Patil's avatar
Suraj Patil committed
744
745
            - run: pip install -r examples/flax/_tests_requirements.txt
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
746
                  key: v0.5-flax_examples-{{ checksum "setup.py" }}
Suraj Patil's avatar
Suraj Patil committed
747
748
749
750
751
752
753
                  paths:
                      - '~/.cache/pip'
            - run: python utils/tests_fetcher.py --filters examples tests | tee test_preparation.txt
            - store_artifacts:
                  path: ~/transformers/test_preparation.txt
            - run: |
                  if [ -f test_list.txt ]; then
Yih-Dar's avatar
Yih-Dar committed
754
                    python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -s --make-reports=examples_flax ./examples/flax/ | tee tests_output.txt
Suraj Patil's avatar
Suraj Patil committed
755
756
757
758
759
                  fi
            - store_artifacts:
                  path: ~/transformers/flax_examples_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
760

Suraj Patil's avatar
Suraj Patil committed
761
762
763
    run_examples_flax_all:
        working_directory: ~/transformers
        docker:
764
            - image: cimg/python:3.7.12
Suraj Patil's avatar
Suraj Patil committed
765
766
767
        environment:
            OMP_NUM_THREADS: 1
            TRANSFORMERS_IS_CI: yes
768
            PYTEST_TIMEOUT: 120
Suraj Patil's avatar
Suraj Patil committed
769
770
771
772
773
774
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                keys:
Yih-Dar's avatar
Yih-Dar committed
775
776
                    - v0.5-flax_examples-{{ checksum "setup.py" }}
                    - v0.5-{{ checksum "setup.py" }}
Suraj Patil's avatar
Suraj Patil committed
777
            - run: pip install --upgrade pip
778
            - run: pip install .[flax,testing,sentencepiece]
Suraj Patil's avatar
Suraj Patil committed
779
780
            - run: pip install -r examples/flax/_tests_requirements.txt
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
781
                  key: v0.5-flax_examples-{{ checksum "setup.py" }}
Suraj Patil's avatar
Suraj Patil committed
782
783
784
                  paths:
                      - '~/.cache/pip'
            - run: |
Yih-Dar's avatar
Yih-Dar committed
785
                  TRANSFORMERS_IS_CI=1 python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -s --make-reports=examples_flax ./examples/flax/ | tee examples_output.txt
Suraj Patil's avatar
Suraj Patil committed
786
787
788
789
790
            - store_artifacts:
                  path: ~/transformers/flax_examples_output.txt
            - store_artifacts:
                  path: ~/transformers/reports

Sylvain Gugger's avatar
Sylvain Gugger committed
791
    run_tests_hub:
792
793
        working_directory: ~/transformers
        docker:
794
            - image: cimg/python:3.7.12
795
        environment:
Sylvain Gugger's avatar
Sylvain Gugger committed
796
            HUGGINGFACE_CO_STAGING: yes
797
798
            RUN_GIT_LFS_TESTS: yes
            TRANSFORMERS_IS_CI: yes
799
            PYTEST_TIMEOUT: 120
800
801
802
803
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
Sylvain Gugger's avatar
Sylvain Gugger committed
804
805
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
806
807
                      - v0.5-hub-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
808
            - run: sudo apt-get -y update && sudo apt-get install git-lfs
809
810
811
812
            - run: |
                git config --global user.email "ci@dummy.com"
                git config --global user.name "ci"
            - run: pip install --upgrade pip
Sylvain Gugger's avatar
Sylvain Gugger committed
813
814
            - run: pip install .[torch,sentencepiece,testing]
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
815
                  key: v0.5-hub-{{ checksum "setup.py" }}
Sylvain Gugger's avatar
Sylvain Gugger committed
816
817
                  paths:
                      - '~/.cache/pip'
818
819
820
821
822
            - run: python utils/tests_fetcher.py | tee test_preparation.txt
            - store_artifacts:
                  path: ~/transformers/test_preparation.txt
            - run: |
                  if [ -f test_list.txt ]; then
Yih-Dar's avatar
Yih-Dar committed
823
                    python -m pytest --max-worker-restart=0 -sv --make-reports=tests_hub $(cat test_list.txt) -m is_staging_test | tee tests_output.txt
824
                  fi
825
826
827
828
            - store_artifacts:
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
829

830
831
832
    run_tests_hub_all:
        working_directory: ~/transformers
        docker:
833
            - image: cimg/python:3.7.12
834
835
836
837
        environment:
            HUGGINGFACE_CO_STAGING: yes
            RUN_GIT_LFS_TESTS: yes
            TRANSFORMERS_IS_CI: yes
838
            PYTEST_TIMEOUT: 120
839
840
841
842
843
844
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
845
846
                      - v0.5-hub-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
847
            - run: sudo apt-get -y update && sudo apt-get install git-lfs
848
849
850
851
852
853
            - run: |
                git config --global user.email "ci@dummy.com"
                git config --global user.name "ci"
            - run: pip install --upgrade pip
            - run: pip install .[torch,sentencepiece,testing]
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
854
                  key: v0.5-hub-{{ checksum "setup.py" }}
855
856
857
                  paths:
                      - '~/.cache/pip'
            - run: |
Yih-Dar's avatar
Yih-Dar committed
858
                  python -m pytest --max-worker-restart=0 -sv --make-reports=tests_hub tests -m is_staging_test | tee tests_output.txt
859
860
861
862
            - store_artifacts:
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
863

864
865
866
    run_tests_onnxruntime:
        working_directory: ~/transformers
        docker:
867
            - image: cimg/python:3.7.12
868
869
870
        environment:
            OMP_NUM_THREADS: 1
            TRANSFORMERS_IS_CI: yes
871
            PYTEST_TIMEOUT: 120
872
873
874
875
876
877
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
878
879
                      - v0.5-torch-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
880
            - run: pip install --upgrade pip
881
            - run: pip install .[torch,tf,testing,sentencepiece,onnxruntime,vision,rjieba]
882
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
883
                  key: v0.5-onnx-{{ checksum "setup.py" }}
884
885
                  paths:
                      - '~/.cache/pip'
886
887
888
889
890
            - run: python utils/tests_fetcher.py | tee test_preparation.txt
            - store_artifacts:
                  path: ~/transformers/test_preparation.txt
            - run: |
                  if [ -f test_list.txt ]; then
Yih-Dar's avatar
Yih-Dar committed
891
                    python -m pytest -n 1 --max-worker-restart=0 --dist=loadfile -s --make-reports=tests_onnx $(cat test_list.txt) -k onnx | tee tests_output.txt
892
                  fi
893
894
895
896
            - store_artifacts:
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
897

898
899
900
    run_tests_onnxruntime_all:
        working_directory: ~/transformers
        docker:
901
            - image: cimg/python:3.7.12
902
903
904
        environment:
            OMP_NUM_THREADS: 1
            TRANSFORMERS_IS_CI: yes
905
            PYTEST_TIMEOUT: 120
906
907
908
909
910
911
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
912
913
                      - v0.5-torch-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
914
            - run: pip install --upgrade pip
915
            - run: pip install .[torch,tf,testing,sentencepiece,onnxruntime,vision]
916
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
917
                  key: v0.5-onnx-{{ checksum "setup.py" }}
918
919
920
                  paths:
                      - '~/.cache/pip'
            - run: |
Yih-Dar's avatar
Yih-Dar committed
921
                  python -m pytest -n 1 --max-worker-restart=0 --dist=loadfile -s --make-reports=tests_onnx tests -k onnx | tee tests_output.txt
922
923
924
925
926
            - store_artifacts:
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports

Aymeric Augustin's avatar
Aymeric Augustin committed
927
928
929
    check_code_quality:
        working_directory: ~/transformers
        docker:
930
            - image: cimg/python:3.7.12
Lysandre's avatar
Lysandre committed
931
        resource_class: large
932
933
        environment:
            TRANSFORMERS_IS_CI: yes
934
            PYTEST_TIMEOUT: 120
Aymeric Augustin's avatar
Aymeric Augustin committed
935
936
937
        parallelism: 1
        steps:
            - checkout
938
939
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
940
941
                      - v0.5-code_quality-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
942
            - run: pip install --upgrade pip
943
            - run: pip install .[all,quality]
944
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
945
                  key: v0.5-code_quality-{{ checksum "setup.py" }}
946
947
                  paths:
                      - '~/.cache/pip'
Sylvain Gugger's avatar
Sylvain Gugger committed
948
            - run: black --check --preview examples tests src utils
949
            - run: isort --check-only examples tests src utils
Sylvain Gugger's avatar
Sylvain Gugger committed
950
            - run: python utils/custom_init_isort.py --check_only
951
            - run: python utils/sort_auto_mappings.py --check_only
952
            - run: flake8 examples tests src utils
953
            - run: doc-builder style src/transformers docs/source --max_len 119 --check_only --path_to_docs docs/source
Sylvain Gugger's avatar
Sylvain Gugger committed
954
            - run: python utils/check_doc_toc.py
955

956
    check_repository_consistency:
R茅mi Louf's avatar
R茅mi Louf committed
957
958
        working_directory: ~/transformers
        docker:
959
            - image: cimg/python:3.7.12
Sylvain Gugger's avatar
Sylvain Gugger committed
960
961
962
        resource_class: large
        environment:
            TRANSFORMERS_IS_CI: yes
963
            PYTEST_TIMEOUT: 120
R茅mi Louf's avatar
R茅mi Louf committed
964
965
966
        parallelism: 1
        steps:
            - checkout
Sylvain Gugger's avatar
Sylvain Gugger committed
967
968
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
969
970
                      - v0.5-repository_consistency-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
Sylvain Gugger's avatar
Sylvain Gugger committed
971
972
973
            - run: pip install --upgrade pip
            - run: pip install .[all,quality]
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
974
                  key: v0.5-repository_consistency-{{ checksum "setup.py" }}
Sylvain Gugger's avatar
Sylvain Gugger committed
975
976
977
978
979
980
981
                  paths:
                      - '~/.cache/pip'
            - run: python utils/check_copies.py
            - run: python utils/check_table.py
            - run: python utils/check_dummies.py
            - run: python utils/check_repo.py
            - run: python utils/check_inits.py
982
            - run: python utils/check_config_docstrings.py
Sylvain Gugger's avatar
Sylvain Gugger committed
983
984
            - run: make deps_table_check_updated
            - run: python utils/tests_fetcher.py --sanity_check
985

NielsRogge's avatar
NielsRogge committed
986
    run_tests_layoutlmv2_and_v3:
987
988
        working_directory: ~/transformers
        docker:
989
            - image: cimg/python:3.7.12
990
991
992
        environment:
            OMP_NUM_THREADS: 1
            TRANSFORMERS_IS_CI: yes
993
            PYTEST_TIMEOUT: 120
994
995
996
997
998
999
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
1000
1001
                      - v0.5-torch-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
1002
1003
1004
1005
            - run: sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev
            - run: pip install --upgrade pip
            - run: pip install .[torch,testing,vision]
            - run: pip install torchvision
1006
1007
1008
            # The commit `36a65a0907d90ed591479b2ebaa8b61cfa0b4ef0` in `detectron2` break things.
            # See https://github.com/facebookresearch/detectron2/commit/36a65a0907d90ed591479b2ebaa8b61cfa0b4ef0#comments.
            # TODO: Revert this change back once the above issue is fixed.
Yih-Dar's avatar
Yih-Dar committed
1009
            - run: python -m pip install 'git+https://github.com/facebookresearch/detectron2.git'
1010
1011
1012
            - run: sudo apt install tesseract-ocr
            - run: pip install pytesseract
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
1013
                  key: v0.5-torch-{{ checksum "setup.py" }}
1014
1015
1016
1017
1018
1019
1020
                  paths:
                      - '~/.cache/pip'
            - run: python utils/tests_fetcher.py | tee test_preparation.txt
            - store_artifacts:
                  path: ~/transformers/test_preparation.txt
            - run: |
                  if [ -f test_list.txt ]; then
Yih-Dar's avatar
Yih-Dar committed
1021
                    python -m pytest -n 1 --max-worker-restart=0 tests/models/*layoutlmv* --dist=loadfile -s --make-reports=tests_layoutlmv2_and_v3 --durations=100
1022
1023
1024
1025
1026
1027
                  fi
            - store_artifacts:
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports

1028
1029
1030
# TPU JOBS
    run_examples_tpu:
        docker:
1031
            - image: cimg/python:3.7.12
1032
1033
        environment:
            OMP_NUM_THREADS: 1
1034
            TRANSFORMERS_IS_CI: yes
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - go/install
            - *checkout_ml_testing
            - gcp-gke/install
            - gcp-gke/update-kubeconfig-with-credentials:
                  cluster: $GKE_CLUSTER
                  perform-login: true
            - setup_remote_docker
            - *build_push_docker
            - *deploy_cluster
1048

1049
1050
    cleanup-gke-jobs:
        docker:
1051
            - image: cimg/python:3.7.12
1052
1053
1054
1055
1056
1057
        steps:
            - gcp-gke/install
            - gcp-gke/update-kubeconfig-with-credentials:
                  cluster: $GKE_CLUSTER
                  perform-login: true
            - *delete_gke_jobs
1058

LysandreJik's avatar
LysandreJik committed
1059
1060
1061
1062
workflow_filters: &workflow_filters
    filters:
        branches:
            only:
1063
                - main
1064
workflows:
LysandreJik's avatar
LysandreJik committed
1065
1066
1067
    version: 2
    build_and_test:
        jobs:
Aymeric Augustin's avatar
Aymeric Augustin committed
1068
            - check_code_quality
1069
            - check_repository_consistency
Aymeric Augustin's avatar
Aymeric Augustin committed
1070
            - run_examples_torch
Matt's avatar
Matt committed
1071
            - run_examples_tensorflow
1072
            - run_examples_flax
Aymeric Augustin's avatar
Aymeric Augustin committed
1073
1074
            - run_tests_custom_tokenizers
            - run_tests_torch_and_tf
1075
            - run_tests_torch_and_flax
Aymeric Augustin's avatar
Aymeric Augustin committed
1076
1077
            - run_tests_torch
            - run_tests_tf
1078
            - run_tests_flax
1079
1080
            - run_tests_pipelines_torch
            - run_tests_pipelines_tf
1081
            - run_tests_onnxruntime
Sylvain Gugger's avatar
Sylvain Gugger committed
1082
            - run_tests_hub
NielsRogge's avatar
NielsRogge committed
1083
            - run_tests_layoutlmv2_and_v3
1084
1085
1086
1087
1088
1089
1090
    nightly:
        triggers:
            - schedule:
                cron: "0 0 * * *"
                filters:
                    branches:
                        only:
1091
                            - main
1092
        jobs:
1093
            - run_examples_torch_all
Matt's avatar
Matt committed
1094
            - run_examples_tensorflow_all
1095
            - run_examples_flax_all
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
            - run_tests_torch_and_tf_all
            - run_tests_torch_and_flax_all
            - run_tests_torch_all
            - run_tests_tf_all
            - run_tests_flax_all
            - run_tests_pipelines_torch_all
            - run_tests_pipelines_tf_all
            - run_tests_onnxruntime_all
            - run_tests_hub_all

1106
1107
1108
1109
1110
1111
1112
1113
#    tpu_testing_jobs:
#        triggers:
#            - schedule:
#                # Set to run at the first minute of every hour.
#                cron: "0 8 * * *"
#                filters:
#                    branches:
#                        only:
1114
#                            - main
1115
1116
1117
#        jobs:
#            - cleanup-gke-jobs
#            - run_examples_tpu