config.yml 49.7 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
version: 2.1
orbs:
    gcp-gke: circleci/gcp-gke@1.0.4
    go: circleci/go@1.3.0

# TPU REFERENCES
references:
    checkout_ml_testing: &checkout_ml_testing
        run:
            name: Checkout ml-testing-accelerators
            command: |
                git clone https://github.com/GoogleCloudPlatform/ml-testing-accelerators.git
                cd ml-testing-accelerators
                git fetch origin 5e88ac24f631c27045e62f0e8d5dfcf34e425e25:stable
                git checkout stable
    build_push_docker: &build_push_docker
        run:
            name: Configure Docker
            command: |
                gcloud --quiet auth configure-docker
                cd docker/transformers-pytorch-tpu
22
                if [ -z "$CIRCLE_PR_NUMBER" ]; then docker build --tag "$GCR_IMAGE_PATH:$CIRCLE_WORKFLOW_JOB_ID" -f Dockerfile --build-arg "TEST_IMAGE=1" . ; else docker build --tag "$GCR_IMAGE_PATH:$CIRCLE_WORKFLOW_JOB_ID" -f Dockerfile --build-arg "TEST_IMAGE=1" --build-arg "GITHUB_REF=pull/$CIRCLE_PR_NUMBER/head" . ; fi
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
                docker push "$GCR_IMAGE_PATH:$CIRCLE_WORKFLOW_JOB_ID"
    deploy_cluster: &deploy_cluster
        run:
            name: Deploy the job on the kubernetes cluster
            command: |
                go get github.com/google/go-jsonnet/cmd/jsonnet && \
                export PATH=$PATH:$HOME/go/bin && \
                kubectl create -f docker/transformers-pytorch-tpu/dataset.yaml || true && \
                job_name=$(jsonnet -J ml-testing-accelerators/ docker/transformers-pytorch-tpu/bert-base-cased.jsonnet --ext-str image=$GCR_IMAGE_PATH --ext-str image-tag=$CIRCLE_WORKFLOW_JOB_ID | kubectl create -f -) && \
                job_name=${job_name#job.batch/} && \
                job_name=${job_name% created} && \
                echo "Waiting on kubernetes job: $job_name" && \
                i=0 && \
                # 30 checks spaced 30s apart = 900s total.
                max_checks=30 && \
                status_code=2 && \
                # Check on the job periodically. Set the status code depending on what
                # happened to the job in Kubernetes. If we try max_checks times and
                # still the job hasn't finished, give up and return the starting
                # non-zero status code.
                while [ $i -lt $max_checks ]; do ((i++)); if kubectl get jobs $job_name -o jsonpath='Failed:{.status.failed}' | grep "Failed:1"; then status_code=1 && break; elif kubectl get jobs $job_name -o jsonpath='Succeeded:{.status.succeeded}' | grep "Succeeded:1" ; then status_code=0 && break; else echo "Job not finished yet"; fi; sleep 30; done && \
                echo "Done waiting. Job status code: $status_code" && \
45
46
47
                pod_name=$(kubectl get po -l controller-uid=`kubectl get job $job_name -o "jsonpath={.metadata.labels.controller-uid}"` | awk 'match($0,!/NAME/) {print $1}') && \
                echo "GKE pod name: $pod_name" && \
                kubectl logs -f $pod_name --container=train
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
                echo "Done with log retrieval attempt." && \
                gcloud container images delete "$GCR_IMAGE_PATH:$CIRCLE_WORKFLOW_JOB_ID" --force-delete-tags && \
                exit $status_code
    delete_gke_jobs: &delete_gke_jobs
        run:
            name: Delete GKE Jobs
            command: |
                # Match jobs whose age matches patterns like '1h' or '1d', i.e. any job
                # that has been around longer than 1hr. First print all columns for
                # matches, then execute the delete.
                kubectl get job | awk 'match($4,/[0-9]+[dh]/) {print $0}'
                kubectl delete job $(kubectl get job | awk 'match($4,/[0-9]+[dh]/) {print $1}')




Julien Chaumond's avatar
Julien Chaumond committed
64
jobs:
Aymeric Augustin's avatar
Aymeric Augustin committed
65
    run_tests_torch_and_tf:
66
        working_directory: ~/transformers
67
        docker:
68
            - image: cimg/python:3.7.12
69
70
        environment:
            OMP_NUM_THREADS: 1
71
72
            RUN_PT_TF_CROSS_TESTS: yes
            TRANSFORMERS_IS_CI: yes
73
            PYTEST_TIMEOUT: 120
74
75
76
77
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
78
79
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
80
81
                      - v0.5-torch_and_tf-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
82
83
            - run: sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng git-lfs
            - run: git lfs install
84
            - run: pip install --upgrade pip
85
            - run: pip install .[sklearn,tf-cpu,torch,testing,sentencepiece,torch-speech,vision]
86
            - run: pip install torch-scatter -f https://pytorch-geometric.com/whl/torch-1.12.0+cpu.html
Kamal Raj's avatar
Kamal Raj committed
87
            - run: pip install tensorflow_probability
88
            - run: pip install https://github.com/kpu/kenlm/archive/master.zip
89
            - run: pip install git+https://github.com/huggingface/accelerate
90
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
91
                key: v0.5-{{ checksum "setup.py" }}
92
93
                paths:
                    - '~/.cache/pip'
94
95
96
97
98
            - run: python utils/tests_fetcher.py | tee test_preparation.txt
            - store_artifacts:
                  path: ~/transformers/test_preparation.txt
            - run: |
                  if [ -f test_list.txt ]; then
Yih-Dar's avatar
Yih-Dar committed
99
                    python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_torch_and_tf $(cat test_list.txt) -m is_pt_tf_cross_test --durations=0 | tee tests_output.txt
100
                  fi
101
            - store_artifacts:
102
103
104
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
105

106
107
108
    run_tests_torch_and_tf_all:
        working_directory: ~/transformers
        docker:
109
            - image: cimg/python:3.7.12
110
111
112
113
        environment:
            OMP_NUM_THREADS: 1
            RUN_PT_TF_CROSS_TESTS: yes
            TRANSFORMERS_IS_CI: yes
114
            PYTEST_TIMEOUT: 120
115
116
117
118
119
120
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
121
122
                      - v0.5-torch_and_tf-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
123
124
            - run: sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng git-lfs
            - run: git lfs install
125
            - run: pip install --upgrade pip
126
            - run: pip install .[sklearn,tf-cpu,torch,testing,sentencepiece,torch-speech,vision]
127
            - run: pip install torch-scatter -f https://pytorch-geometric.com/whl/torch-1.12.0+cpu.html
Kamal Raj's avatar
Kamal Raj committed
128
            - run: pip install tensorflow_probability
129
            - run: pip install https://github.com/kpu/kenlm/archive/master.zip
130
            - run: pip install git+https://github.com/huggingface/accelerate
131
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
132
                key: v0.5-{{ checksum "setup.py" }}
133
134
135
                paths:
                    - '~/.cache/pip'
            - run: |
Yih-Dar's avatar
Yih-Dar committed
136
                  python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_torch_and_tf tests -m is_pt_tf_cross_test --durations=0 | tee tests_output.txt
137
138
139
140
            - store_artifacts:
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
141

142
143
144
    run_tests_torch_and_flax:
        working_directory: ~/transformers
        docker:
145
            - image: cimg/python:3.7.12
146
147
        environment:
            OMP_NUM_THREADS: 1
148
149
            RUN_PT_FLAX_CROSS_TESTS: yes
            TRANSFORMERS_IS_CI: yes
150
            PYTEST_TIMEOUT: 120
151
152
153
154
155
156
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
157
158
                      - v0.5-torch_and_flax-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
159
            - run: sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng
160
            - run: pip install --upgrade pip
161
            - run: pip install .[sklearn,flax,torch,testing,sentencepiece,torch-speech,vision]
162
            - run: pip install torch-scatter -f https://pytorch-geometric.com/whl/torch-1.12.0+cpu.html
163
            - run: pip install https://github.com/kpu/kenlm/archive/master.zip
164
            - run: pip install git+https://github.com/huggingface/accelerate
165
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
166
                key: v0.5-{{ checksum "setup.py" }}
167
168
                paths:
                    - '~/.cache/pip'
169
170
171
172
173
            - run: python utils/tests_fetcher.py | tee test_preparation.txt
            - store_artifacts:
                  path: ~/transformers/test_preparation.txt
            - run: |
                  if [ -f test_list.txt ]; then
Yih-Dar's avatar
Yih-Dar committed
174
                    python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_torch_and_flax $(cat test_list.txt) -m is_pt_flax_cross_test --durations=0 | tee tests_output.txt
175
                  fi
176
177
178
179
            - store_artifacts:
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
180

181
182
183
    run_tests_torch_and_flax_all:
        working_directory: ~/transformers
        docker:
184
            - image: cimg/python:3.7.12
185
186
187
188
        environment:
            OMP_NUM_THREADS: 1
            RUN_PT_FLAX_CROSS_TESTS: yes
            TRANSFORMERS_IS_CI: yes
189
            PYTEST_TIMEOUT: 120
190
191
192
193
194
195
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
196
197
                      - v0.5-torch_and_flax-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
198
            - run: sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng
199
            - run: pip install --upgrade pip
200
            - run: pip install .[sklearn,flax,torch,testing,sentencepiece,torch-speech,vision]
201
            - run: pip install torch-scatter -f https://pytorch-geometric.com/whl/torch-1.12.0+cpu.html
202
            - run: pip install https://github.com/kpu/kenlm/archive/master.zip
203
            - run: pip install git+https://github.com/huggingface/accelerate
204
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
205
                key: v0.5-{{ checksum "setup.py" }}
206
207
208
                paths:
                    - '~/.cache/pip'
            - run: |
Yih-Dar's avatar
Yih-Dar committed
209
                  python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_torch_and_flax tests -m is_pt_flax_cross_test --durations=0 | tee tests_output.txt
210
211
212
213
            - store_artifacts:
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
214

Aymeric Augustin's avatar
Aymeric Augustin committed
215
    run_tests_torch:
216
        working_directory: ~/transformers
Julien Chaumond's avatar
Julien Chaumond committed
217
        docker:
218
            - image: cimg/python:3.7.12
219
220
        environment:
            OMP_NUM_THREADS: 1
221
            TRANSFORMERS_IS_CI: yes
222
            PYTEST_TIMEOUT: 120
223
        resource_class: xlarge
224
        parallelism: 1
Julien Chaumond's avatar
Julien Chaumond committed
225
226
        steps:
            - checkout
227
228
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
229
230
                      - v0.5-torch-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
231
            - run: sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng time
232
            - run: pip install --upgrade pip
233
            - run: pip install .[sklearn,torch,testing,sentencepiece,torch-speech,vision,timm]
234
            - run: pip install torch-scatter -f https://pytorch-geometric.com/whl/torch-1.12.0+cpu.html
235
            - run: pip install https://github.com/kpu/kenlm/archive/master.zip
236
            - run: pip install git+https://github.com/huggingface/accelerate
237
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
238
                  key: v0.5-torch-{{ checksum "setup.py" }}
239
240
                  paths:
                      - '~/.cache/pip'
241
242
243
244
245
            - run: python utils/tests_fetcher.py | tee test_preparation.txt
            - store_artifacts:
                  path: ~/transformers/test_preparation.txt
            - run: |
                  if [ -f test_list.txt ]; then
Yih-Dar's avatar
Yih-Dar committed
246
                    python -m pytest -n 3 --max-worker-restart=0 --dist=loadfile -s --make-reports=tests_torch $(cat test_list.txt) | tee tests_output.txt
247
                  fi
248
            - store_artifacts:
249
250
251
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
252

253
254
255
    run_tests_torch_all:
        working_directory: ~/transformers
        docker:
256
            - image: cimg/python:3.7.12
257
258
259
        environment:
            OMP_NUM_THREADS: 1
            TRANSFORMERS_IS_CI: yes
260
            PYTEST_TIMEOUT: 120
261
262
263
264
265
266
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
267
268
                      - v0.5-torch-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
269
            - run: sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng
270
            - run: pip install --upgrade pip
271
            - run: pip install .[sklearn,torch,testing,sentencepiece,torch-speech,vision,timm]
272
            - run: pip install torch-scatter -f https://pytorch-geometric.com/whl/torch-1.12.0+cpu.html
273
            - run: pip install https://github.com/kpu/kenlm/archive/master.zip
274
            - run: pip install git+https://github.com/huggingface/accelerate
275
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
276
                  key: v0.5-torch-{{ checksum "setup.py" }}
277
278
279
                  paths:
                      - '~/.cache/pip'
            - run: |
Yih-Dar's avatar
Yih-Dar committed
280
                  python -m pytest -n 3 --max-worker-restart=0 --dist=loadfile -s --make-reports=tests_torch tests | tee tests_output.txt
281
282
283
284
            - store_artifacts:
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
Lysandre Debut's avatar
Lysandre Debut committed
285

Aymeric Augustin's avatar
Aymeric Augustin committed
286
    run_tests_tf:
287
        working_directory: ~/transformers
thomwolf's avatar
thomwolf committed
288
        docker:
289
            - image: cimg/python:3.7.12
290
291
        environment:
            OMP_NUM_THREADS: 1
292
            TRANSFORMERS_IS_CI: yes
293
            PYTEST_TIMEOUT: 120
thomwolf's avatar
thomwolf committed
294
295
296
297
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
298
299
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
300
301
                      - v0.5-tf-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
302
            - run: sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng
303
            - run: pip install --upgrade pip
304
            - run: pip install .[sklearn,tf-cpu,testing,sentencepiece,tf-speech,vision]
Kamal Raj's avatar
Kamal Raj committed
305
            - run: pip install tensorflow_probability
306
            - run: pip install https://github.com/kpu/kenlm/archive/master.zip
307
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
308
                  key: v0.5-tf-{{ checksum "setup.py" }}
309
310
                  paths:
                      - '~/.cache/pip'
311
312
313
314
315
            - run: python utils/tests_fetcher.py | tee test_preparation.txt
            - store_artifacts:
                  path: ~/transformers/test_preparation.txt
            - run: |
                  if [ -f test_list.txt ]; then
Yih-Dar's avatar
Yih-Dar committed
316
                    python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_tf $(cat test_list.txt) | tee tests_output.txt
317
                  fi
318
            - store_artifacts:
319
320
321
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
322

323
324
325
    run_tests_tf_all:
        working_directory: ~/transformers
        docker:
326
            - image: cimg/python:3.7.12
327
328
329
        environment:
            OMP_NUM_THREADS: 1
            TRANSFORMERS_IS_CI: yes
330
            PYTEST_TIMEOUT: 120
331
332
333
334
335
336
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
337
338
                      - v0.5-tf-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
339
            - run: sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng
340
            - run: pip install --upgrade pip
341
            - run: pip install .[sklearn,tf-cpu,testing,sentencepiece,tf-speech,vision]
Kamal Raj's avatar
Kamal Raj committed
342
            - run: pip install tensorflow_probability
343
            - run: pip install https://github.com/kpu/kenlm/archive/master.zip
344
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
345
                  key: v0.5-tf-{{ checksum "setup.py" }}
346
347
348
                  paths:
                      - '~/.cache/pip'
            - run: |
Yih-Dar's avatar
Yih-Dar committed
349
                  python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_tf tests | tee tests_output.txt
350
351
352
353
            - store_artifacts:
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
354

355
356
357
    run_tests_flax:
        working_directory: ~/transformers
        docker:
358
            - image: cimg/python:3.7.12
359
360
        environment:
            OMP_NUM_THREADS: 1
361
            TRANSFORMERS_IS_CI: yes
362
            PYTEST_TIMEOUT: 120
363
364
365
366
367
368
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                keys:
Yih-Dar's avatar
Yih-Dar committed
369
370
                    - v0.5-flax-{{ checksum "setup.py" }}
                    - v0.5-{{ checksum "setup.py" }}
371
            - run: sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng
372
            - run: pip install --upgrade pip
373
374
            - run: pip install .[flax,testing,sentencepiece,flax-speech,vision]
            - run: pip install https://github.com/kpu/kenlm/archive/master.zip
375
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
376
                  key: v0.5-flax-{{ checksum "setup.py" }}
377
378
                  paths:
                      - '~/.cache/pip'
379
380
381
382
383
            - run: python utils/tests_fetcher.py | tee test_preparation.txt
            - store_artifacts:
                  path: ~/transformers/test_preparation.txt
            - run: |
                  if [ -f test_list.txt ]; then
Yih-Dar's avatar
Yih-Dar committed
384
                    python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_flax $(cat test_list.txt) | tee tests_output.txt
385
                  fi
386
            - store_artifacts:
387
388
389
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
390

391
392
393
    run_tests_flax_all:
        working_directory: ~/transformers
        docker:
394
            - image: cimg/python:3.7.12
395
396
397
        environment:
            OMP_NUM_THREADS: 1
            TRANSFORMERS_IS_CI: yes
398
            PYTEST_TIMEOUT: 120
399
400
401
402
403
404
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                keys:
Yih-Dar's avatar
Yih-Dar committed
405
406
                    - v0.5-flax-{{ checksum "setup.py" }}
                    - v0.5-{{ checksum "setup.py" }}
407
            - run: sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng
408
            - run: pip install --upgrade pip
409
410
            - run: pip install .[flax,testing,sentencepiece,vision,flax-speech]
            - run: pip install https://github.com/kpu/kenlm/archive/master.zip
411
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
412
                  key: v0.5-flax-{{ checksum "setup.py" }}
413
414
415
                  paths:
                      - '~/.cache/pip'
            - run: |
Yih-Dar's avatar
Yih-Dar committed
416
                  python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_flax tests | tee tests_output.txt
417
418
419
420
            - store_artifacts:
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
421

422
423
424
    run_tests_pipelines_torch:
        working_directory: ~/transformers
        docker:
425
            - image: cimg/python:3.7.12
426
427
        environment:
            OMP_NUM_THREADS: 1
428
429
            RUN_PIPELINE_TESTS: yes
            TRANSFORMERS_IS_CI: yes
430
            PYTEST_TIMEOUT: 120
431
432
433
434
435
436
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
437
438
                      - v0.5-torch-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
439
            - run: sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng
440
            - run: pip install --upgrade pip
441
            - run: pip install .[sklearn,torch,testing,sentencepiece,torch-speech,vision,timm]
442
            - run: pip install torch-scatter -f https://pytorch-geometric.com/whl/torch-1.12.0+cpu.html
443
            - run: pip install https://github.com/kpu/kenlm/archive/master.zip
444
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
445
                  key: v0.5-torch-{{ checksum "setup.py" }}
446
447
                  paths:
                      - '~/.cache/pip'
448
449
450
451
452
            - run: python utils/tests_fetcher.py | tee test_preparation.txt
            - store_artifacts:
                  path: ~/transformers/test_preparation.txt
            - run: |
                  if [ -f test_list.txt ]; then
Yih-Dar's avatar
Yih-Dar committed
453
                    python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_pipelines_torch -m is_pipeline_test $(cat test_list.txt) | tee tests_output.txt
454
                  fi
455
            - store_artifacts:
456
457
458
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
459

460
461
462
    run_tests_pipelines_torch_all:
        working_directory: ~/transformers
        docker:
463
            - image: cimg/python:3.7.12
464
465
466
467
        environment:
            OMP_NUM_THREADS: 1
            RUN_PIPELINE_TESTS: yes
            TRANSFORMERS_IS_CI: yes
468
            PYTEST_TIMEOUT: 120
469
470
471
472
473
474
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
475
476
                      - v0.5-torch-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
477
            - run: sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng
478
            - run: pip install --upgrade pip
479
            - run: pip install .[sklearn,torch,testing,sentencepiece,torch-speech,vision,timm]
480
            - run: pip install torch-scatter -f https://pytorch-geometric.com/whl/torch-1.12.0+cpu.html
481
            - run: pip install https://github.com/kpu/kenlm/archive/master.zip
482
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
483
                  key: v0.5-torch-{{ checksum "setup.py" }}
484
485
486
                  paths:
                      - '~/.cache/pip'
            - run: |
Yih-Dar's avatar
Yih-Dar committed
487
                  python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_pipelines_torch -m is_pipeline_test tests | tee tests_output.txt
488
489
490
491
            - store_artifacts:
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
492

493
494
495
    run_tests_pipelines_tf:
        working_directory: ~/transformers
        docker:
496
            - image: cimg/python:3.7.12
497
498
        environment:
            OMP_NUM_THREADS: 1
499
500
            RUN_PIPELINE_TESTS: yes
            TRANSFORMERS_IS_CI: yes
501
            PYTEST_TIMEOUT: 120
502
503
504
505
506
507
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
508
509
                      - v0.5-tf-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
510
            - run: pip install --upgrade pip
511
            - run: pip install .[sklearn,tf-cpu,testing,sentencepiece]
Kamal Raj's avatar
Kamal Raj committed
512
            - run: pip install tensorflow_probability
513
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
514
                  key: v0.5-tf-{{ checksum "setup.py" }}
515
516
                  paths:
                      - '~/.cache/pip'
517
518
519
520
521
            - run: python utils/tests_fetcher.py | tee test_preparation.txt
            - store_artifacts:
                  path: ~/transformers/test_preparation.txt
            - run: |
                  if [ -f test_list.txt ]; then
Yih-Dar's avatar
Yih-Dar committed
522
                    python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_pipelines_tf $(cat test_list.txt) -m is_pipeline_test | tee tests_output.txt
523
                  fi
524
525
            - store_artifacts:
                  path: ~/transformers/tests_output.txt
526
            - store_artifacts:
527
528
                  path: ~/transformers/reports

529
530
531
    run_tests_pipelines_tf_all:
        working_directory: ~/transformers
        docker:
532
            - image: cimg/python:3.7.12
533
534
535
536
        environment:
            OMP_NUM_THREADS: 1
            RUN_PIPELINE_TESTS: yes
            TRANSFORMERS_IS_CI: yes
537
            PYTEST_TIMEOUT: 120
538
539
540
541
542
543
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
544
545
                      - v0.5-tf-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
546
547
            - run: pip install --upgrade pip
            - run: pip install .[sklearn,tf-cpu,testing,sentencepiece]
Kamal Raj's avatar
Kamal Raj committed
548
            - run: pip install tensorflow_probability
549
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
550
                  key: v0.5-tf-{{ checksum "setup.py" }}
551
552
553
                  paths:
                      - '~/.cache/pip'
            - run: |
Yih-Dar's avatar
Yih-Dar committed
554
                  python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -rA -s --make-reports=tests_pipelines_tf tests -m is_pipeline_test | tee tests_output.txt
555
556
557
558
559
            - store_artifacts:
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports

Aymeric Augustin's avatar
Aymeric Augustin committed
560
    run_tests_custom_tokenizers:
561
562
        working_directory: ~/transformers
        docker:
563
            - image: cimg/python:3.7.12
564
565
        environment:
            RUN_CUSTOM_TOKENIZERS: yes
566
            TRANSFORMERS_IS_CI: yes
567
            PYTEST_TIMEOUT: 120
568
569
        steps:
            - checkout
570
571
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
572
573
                      - v0.5-custom_tokenizers-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
574
            - run: pip install --upgrade pip
575
            - run: pip install .[ja,testing,sentencepiece,jieba,spacy,ftfy,rjieba]
576
            - run: python -m unidic download
577
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
578
                  key: v0.5-custom_tokenizers-{{ checksum "setup.py" }}
579
580
                  paths:
                      - '~/.cache/pip'
581
582
583
            - run: python utils/tests_fetcher.py | tee test_preparation.txt
            - store_artifacts:
                  path: ~/transformers/test_preparation.txt
584
585
            - run: |
                  if [ -f test_list.txt ]; then
586
                    python -m pytest --max-worker-restart=0 -s --make-reports=tests_custom_tokenizers ./tests/models/bert_japanese/test_tokenization_bert_japanese.py ./tests/models/openai/test_tokenization_openai.py ./tests/models/clip/test_tokenization_clip.py | tee tests_output.txt
587
                  fi
588
589
            - store_artifacts:
                  path: ~/transformers/tests_output.txt
590
            - store_artifacts:
591
592
                  path: ~/transformers/reports

593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
    run_tests_custom_tokenizers_all:
        working_directory: ~/transformers
        docker:
            - image: cimg/python:3.7.12
        environment:
            RUN_CUSTOM_TOKENIZERS: yes
            TRANSFORMERS_IS_CI: yes
            PYTEST_TIMEOUT: 120
        steps:
            - checkout
            - restore_cache:
                  keys:
                      - v0.5-custom_tokenizers-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
            - run: pip install --upgrade pip
            - run: pip install .[ja,testing,sentencepiece,jieba,spacy,ftfy,rjieba]
            - run: python -m unidic download
            - save_cache:
                  key: v0.5-custom_tokenizers-{{ checksum "setup.py" }}
                  paths:
                      - '~/.cache/pip'
            - run: python -m pytest --max-worker-restart=0 -s --make-reports=tests_custom_tokenizers ./tests/models/bert_japanese/test_tokenization_bert_japanese.py ./tests/models/openai/test_tokenization_openai.py ./tests/models/clip/test_tokenization_clip.py | tee tests_output.txt
            - store_artifacts:
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports

Aymeric Augustin's avatar
Aymeric Augustin committed
620
    run_examples_torch:
621
622
        working_directory: ~/transformers
        docker:
623
            - image: cimg/python:3.7.12
624
625
        environment:
            OMP_NUM_THREADS: 1
626
            TRANSFORMERS_IS_CI: yes
627
            PYTEST_TIMEOUT: 120
628
629
630
631
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
632
633
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
634
635
                      - v0.5-torch_examples-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
636
            - run: sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng
637
            - run: pip install --upgrade pip
638
            - run: pip install .[sklearn,torch,sentencepiece,testing,torch-speech]
Sylvain Gugger's avatar
Sylvain Gugger committed
639
            - run: pip install -r examples/pytorch/_tests_requirements.txt
640
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
641
                  key: v0.5-torch_examples-{{ checksum "setup.py" }}
642
643
                  paths:
                      - '~/.cache/pip'
644
            - run: python utils/tests_fetcher.py --filters examples tests | tee test_preparation.txt
645
646
647
648
            - store_artifacts:
                  path: ~/transformers/test_preparation.txt
            - run: |
                  if [ -f test_list.txt ]; then
Yih-Dar's avatar
Yih-Dar committed
649
                    python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -s --make-reports=examples_torch ./examples/pytorch/ | tee tests_output.txt
650
                  fi
651
            - store_artifacts:
652
653
654
                  path: ~/transformers/examples_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
655

656
657
658
    run_examples_torch_all:
        working_directory: ~/transformers
        docker:
659
            - image: cimg/python:3.7.12
660
661
662
        environment:
            OMP_NUM_THREADS: 1
            TRANSFORMERS_IS_CI: yes
663
            PYTEST_TIMEOUT: 120
664
665
666
667
668
669
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
670
671
                      - v0.5-torch_examples-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
672
            - run: sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev espeak-ng
673
            - run: pip install --upgrade pip
674
            - run: pip install .[sklearn,torch,sentencepiece,testing,torch-speech]
675
676
            - run: pip install -r examples/pytorch/_tests_requirements.txt
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
677
                  key: v0.5-torch_examples-{{ checksum "setup.py" }}
678
679
680
                  paths:
                      - '~/.cache/pip'
            - run: |
Yih-Dar's avatar
Yih-Dar committed
681
                  TRANSFORMERS_IS_CI=1 python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -s --make-reports=examples_torch ./examples/pytorch/ | tee examples_output.txt
682
683
684
685
            - store_artifacts:
                  path: ~/transformers/examples_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
686

Matt's avatar
Matt committed
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
    run_examples_tensorflow:
        working_directory: ~/transformers
        docker:
            - image: cimg/python:3.7.12
        environment:
            OMP_NUM_THREADS: 1
            TRANSFORMERS_IS_CI: yes
            PYTEST_TIMEOUT: 120
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                  keys:
                      - v0.5-tensorflow_examples-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
            - run: pip install --upgrade pip
            - run: pip install .[sklearn,tensorflow,sentencepiece,testing]
            - run: pip install -r examples/tensorflow/_tests_requirements.txt
            - save_cache:
                  key: v0.5-tensorflow_examples-{{ checksum "setup.py" }}
                  paths:
                      - '~/.cache/pip'
            - run: python utils/tests_fetcher.py --filters examples tests | tee test_preparation.txt
            - store_artifacts:
                  path: ~/transformers/test_preparation.txt
            - run: |
                  if [ -f test_list.txt ]; then
                    python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -s --make-reports=examples_tensorflow ./examples/tensorflow/ | tee tests_output.txt
                  fi
            - store_artifacts:
                  path: ~/transformers/tensorflow_examples_output.txt
            - store_artifacts:
                  path: ~/transformers/reports

    run_examples_tensorflow_all:
        working_directory: ~/transformers
        docker:
            - image: cimg/python:3.7.12
        environment:
            OMP_NUM_THREADS: 1
            TRANSFORMERS_IS_CI: yes
            PYTEST_TIMEOUT: 120
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                  keys:
                      - v0.5-tensorflow_examples-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
            - run: pip install --upgrade pip
            - run: pip install .[sklearn,tensorflow,sentencepiece,testing]
            - run: pip install -r examples/tensorflow/_tests_requirements.txt
            - save_cache:
                  key: v0.5-tensorflow_examples-{{ checksum "setup.py" }}
                  paths:
                      - '~/.cache/pip'
            - run: |
                  TRANSFORMERS_IS_CI=1 python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -s --make-reports=examples_tensorflow ./examples/tensorflow/ | tee examples_output.txt
            - store_artifacts:
                  path: ~/transformers/tensorflow_examples_output.txt
            - store_artifacts:
                  path: ~/transformers/reports

Suraj Patil's avatar
Suraj Patil committed
752
753
754
    run_examples_flax:
        working_directory: ~/transformers
        docker:
755
            - image: cimg/python:3.7.12
Suraj Patil's avatar
Suraj Patil committed
756
757
758
        environment:
            OMP_NUM_THREADS: 1
            TRANSFORMERS_IS_CI: yes
759
            PYTEST_TIMEOUT: 120
Suraj Patil's avatar
Suraj Patil committed
760
761
762
763
764
765
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                keys:
Yih-Dar's avatar
Yih-Dar committed
766
767
                    - v0.5-flax_examples-{{ checksum "setup.py" }}
                    - v0.5-{{ checksum "setup.py" }}
Suraj Patil's avatar
Suraj Patil committed
768
            - run: pip install --upgrade pip
769
            - run: pip install .[flax,testing,sentencepiece]
Suraj Patil's avatar
Suraj Patil committed
770
771
            - run: pip install -r examples/flax/_tests_requirements.txt
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
772
                  key: v0.5-flax_examples-{{ checksum "setup.py" }}
Suraj Patil's avatar
Suraj Patil committed
773
774
775
776
777
778
779
                  paths:
                      - '~/.cache/pip'
            - run: python utils/tests_fetcher.py --filters examples tests | tee test_preparation.txt
            - store_artifacts:
                  path: ~/transformers/test_preparation.txt
            - run: |
                  if [ -f test_list.txt ]; then
Yih-Dar's avatar
Yih-Dar committed
780
                    python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -s --make-reports=examples_flax ./examples/flax/ | tee tests_output.txt
Suraj Patil's avatar
Suraj Patil committed
781
782
783
784
785
                  fi
            - store_artifacts:
                  path: ~/transformers/flax_examples_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
786

Suraj Patil's avatar
Suraj Patil committed
787
788
789
    run_examples_flax_all:
        working_directory: ~/transformers
        docker:
790
            - image: cimg/python:3.7.12
Suraj Patil's avatar
Suraj Patil committed
791
792
793
        environment:
            OMP_NUM_THREADS: 1
            TRANSFORMERS_IS_CI: yes
794
            PYTEST_TIMEOUT: 120
Suraj Patil's avatar
Suraj Patil committed
795
796
797
798
799
800
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                keys:
Yih-Dar's avatar
Yih-Dar committed
801
802
                    - v0.5-flax_examples-{{ checksum "setup.py" }}
                    - v0.5-{{ checksum "setup.py" }}
Suraj Patil's avatar
Suraj Patil committed
803
            - run: pip install --upgrade pip
804
            - run: pip install .[flax,testing,sentencepiece]
Suraj Patil's avatar
Suraj Patil committed
805
806
            - run: pip install -r examples/flax/_tests_requirements.txt
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
807
                  key: v0.5-flax_examples-{{ checksum "setup.py" }}
Suraj Patil's avatar
Suraj Patil committed
808
809
810
                  paths:
                      - '~/.cache/pip'
            - run: |
Yih-Dar's avatar
Yih-Dar committed
811
                  TRANSFORMERS_IS_CI=1 python -m pytest -n 8 --max-worker-restart=0 --dist=loadfile -s --make-reports=examples_flax ./examples/flax/ | tee examples_output.txt
Suraj Patil's avatar
Suraj Patil committed
812
813
814
815
816
            - store_artifacts:
                  path: ~/transformers/flax_examples_output.txt
            - store_artifacts:
                  path: ~/transformers/reports

Sylvain Gugger's avatar
Sylvain Gugger committed
817
    run_tests_hub:
818
819
        working_directory: ~/transformers
        docker:
820
            - image: cimg/python:3.7.12
821
        environment:
Sylvain Gugger's avatar
Sylvain Gugger committed
822
            HUGGINGFACE_CO_STAGING: yes
823
824
            RUN_GIT_LFS_TESTS: yes
            TRANSFORMERS_IS_CI: yes
825
            PYTEST_TIMEOUT: 120
826
827
828
829
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
Sylvain Gugger's avatar
Sylvain Gugger committed
830
831
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
832
833
                      - v0.5-hub-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
834
            - run: sudo apt-get -y update && sudo apt-get install git-lfs
835
836
837
838
            - run: |
                git config --global user.email "ci@dummy.com"
                git config --global user.name "ci"
            - run: pip install --upgrade pip
Sylvain Gugger's avatar
Sylvain Gugger committed
839
840
            - run: pip install .[torch,sentencepiece,testing]
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
841
                  key: v0.5-hub-{{ checksum "setup.py" }}
Sylvain Gugger's avatar
Sylvain Gugger committed
842
843
                  paths:
                      - '~/.cache/pip'
844
845
846
847
848
            - run: python utils/tests_fetcher.py | tee test_preparation.txt
            - store_artifacts:
                  path: ~/transformers/test_preparation.txt
            - run: |
                  if [ -f test_list.txt ]; then
Yih-Dar's avatar
Yih-Dar committed
849
                    python -m pytest --max-worker-restart=0 -sv --make-reports=tests_hub $(cat test_list.txt) -m is_staging_test | tee tests_output.txt
850
                  fi
851
852
853
854
            - store_artifacts:
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
855

856
857
858
    run_tests_hub_all:
        working_directory: ~/transformers
        docker:
859
            - image: cimg/python:3.7.12
860
861
862
863
        environment:
            HUGGINGFACE_CO_STAGING: yes
            RUN_GIT_LFS_TESTS: yes
            TRANSFORMERS_IS_CI: yes
864
            PYTEST_TIMEOUT: 120
865
866
867
868
869
870
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
871
872
                      - v0.5-hub-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
873
            - run: sudo apt-get -y update && sudo apt-get install git-lfs
874
875
876
877
878
879
            - run: |
                git config --global user.email "ci@dummy.com"
                git config --global user.name "ci"
            - run: pip install --upgrade pip
            - run: pip install .[torch,sentencepiece,testing]
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
880
                  key: v0.5-hub-{{ checksum "setup.py" }}
881
882
883
                  paths:
                      - '~/.cache/pip'
            - run: |
Yih-Dar's avatar
Yih-Dar committed
884
                  python -m pytest --max-worker-restart=0 -sv --make-reports=tests_hub tests -m is_staging_test | tee tests_output.txt
885
886
887
888
            - store_artifacts:
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
889

890
891
892
    run_tests_onnxruntime:
        working_directory: ~/transformers
        docker:
893
            - image: cimg/python:3.7.12
894
895
896
        environment:
            OMP_NUM_THREADS: 1
            TRANSFORMERS_IS_CI: yes
897
            PYTEST_TIMEOUT: 120
898
899
900
901
902
903
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
904
905
                      - v0.5-torch-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
906
            - run: pip install --upgrade pip
907
            - run: pip install .[torch,tf,testing,sentencepiece,onnxruntime,vision,rjieba]
908
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
909
                  key: v0.5-onnx-{{ checksum "setup.py" }}
910
911
                  paths:
                      - '~/.cache/pip'
912
913
914
915
916
            - run: python utils/tests_fetcher.py | tee test_preparation.txt
            - store_artifacts:
                  path: ~/transformers/test_preparation.txt
            - run: |
                  if [ -f test_list.txt ]; then
Yih-Dar's avatar
Yih-Dar committed
917
                    python -m pytest -n 1 --max-worker-restart=0 --dist=loadfile -s --make-reports=tests_onnx $(cat test_list.txt) -k onnx | tee tests_output.txt
918
                  fi
919
920
921
922
            - store_artifacts:
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports
923

924
925
926
    run_tests_onnxruntime_all:
        working_directory: ~/transformers
        docker:
927
            - image: cimg/python:3.7.12
928
929
930
        environment:
            OMP_NUM_THREADS: 1
            TRANSFORMERS_IS_CI: yes
931
            PYTEST_TIMEOUT: 120
932
933
934
935
936
937
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
938
939
                      - v0.5-torch-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
940
            - run: pip install --upgrade pip
941
            - run: pip install .[torch,tf,testing,sentencepiece,onnxruntime,vision]
942
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
943
                  key: v0.5-onnx-{{ checksum "setup.py" }}
944
945
946
                  paths:
                      - '~/.cache/pip'
            - run: |
Yih-Dar's avatar
Yih-Dar committed
947
                  python -m pytest -n 1 --max-worker-restart=0 --dist=loadfile -s --make-reports=tests_onnx tests -k onnx | tee tests_output.txt
948
949
950
951
952
            - store_artifacts:
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports

Aymeric Augustin's avatar
Aymeric Augustin committed
953
954
955
    check_code_quality:
        working_directory: ~/transformers
        docker:
956
            - image: cimg/python:3.7.12
Lysandre's avatar
Lysandre committed
957
        resource_class: large
958
959
        environment:
            TRANSFORMERS_IS_CI: yes
960
            PYTEST_TIMEOUT: 120
Aymeric Augustin's avatar
Aymeric Augustin committed
961
962
963
        parallelism: 1
        steps:
            - checkout
964
965
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
966
967
                      - v0.5-code_quality-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
968
            - run: pip install --upgrade pip
969
            - run: pip install .[all,quality]
970
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
971
                  key: v0.5-code_quality-{{ checksum "setup.py" }}
972
973
                  paths:
                      - '~/.cache/pip'
Sylvain Gugger's avatar
Sylvain Gugger committed
974
            - run: black --check --preview examples tests src utils
975
            - run: isort --check-only examples tests src utils
Sylvain Gugger's avatar
Sylvain Gugger committed
976
            - run: python utils/custom_init_isort.py --check_only
977
            - run: python utils/sort_auto_mappings.py --check_only
978
            - run: flake8 examples tests src utils
979
            - run: doc-builder style src/transformers docs/source --max_len 119 --check_only --path_to_docs docs/source
Sylvain Gugger's avatar
Sylvain Gugger committed
980
            - run: python utils/check_doc_toc.py
981

982
    check_repository_consistency:
R茅mi Louf's avatar
R茅mi Louf committed
983
984
        working_directory: ~/transformers
        docker:
985
            - image: cimg/python:3.7.12
Sylvain Gugger's avatar
Sylvain Gugger committed
986
987
988
        resource_class: large
        environment:
            TRANSFORMERS_IS_CI: yes
989
            PYTEST_TIMEOUT: 120
R茅mi Louf's avatar
R茅mi Louf committed
990
991
992
        parallelism: 1
        steps:
            - checkout
Sylvain Gugger's avatar
Sylvain Gugger committed
993
994
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
995
996
                      - v0.5-repository_consistency-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
Sylvain Gugger's avatar
Sylvain Gugger committed
997
998
999
            - run: pip install --upgrade pip
            - run: pip install .[all,quality]
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
1000
                  key: v0.5-repository_consistency-{{ checksum "setup.py" }}
Sylvain Gugger's avatar
Sylvain Gugger committed
1001
1002
1003
1004
1005
1006
1007
                  paths:
                      - '~/.cache/pip'
            - run: python utils/check_copies.py
            - run: python utils/check_table.py
            - run: python utils/check_dummies.py
            - run: python utils/check_repo.py
            - run: python utils/check_inits.py
1008
            - run: python utils/check_config_docstrings.py
Sylvain Gugger's avatar
Sylvain Gugger committed
1009
1010
            - run: make deps_table_check_updated
            - run: python utils/tests_fetcher.py --sanity_check
1011
            - run: python utils/update_metadata.py --check-only
1012

NielsRogge's avatar
NielsRogge committed
1013
    run_tests_layoutlmv2_and_v3:
1014
1015
        working_directory: ~/transformers
        docker:
1016
            - image: cimg/python:3.7.12
1017
1018
1019
        environment:
            OMP_NUM_THREADS: 1
            TRANSFORMERS_IS_CI: yes
1020
            PYTEST_TIMEOUT: 120
1021
1022
1023
1024
1025
1026
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                  keys:
Yih-Dar's avatar
Yih-Dar committed
1027
1028
                      - v0.5-torch-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
1029
1030
1031
1032
            - run: sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev
            - run: pip install --upgrade pip
            - run: pip install .[torch,testing,vision]
            - run: pip install torchvision
1033
1034
1035
            # The commit `36a65a0907d90ed591479b2ebaa8b61cfa0b4ef0` in `detectron2` break things.
            # See https://github.com/facebookresearch/detectron2/commit/36a65a0907d90ed591479b2ebaa8b61cfa0b4ef0#comments.
            # TODO: Revert this change back once the above issue is fixed.
Yih-Dar's avatar
Yih-Dar committed
1036
            - run: python -m pip install 'git+https://github.com/facebookresearch/detectron2.git'
1037
1038
1039
            - run: sudo apt install tesseract-ocr
            - run: pip install pytesseract
            - save_cache:
Yih-Dar's avatar
Yih-Dar committed
1040
                  key: v0.5-torch-{{ checksum "setup.py" }}
1041
1042
1043
1044
1045
1046
1047
                  paths:
                      - '~/.cache/pip'
            - run: python utils/tests_fetcher.py | tee test_preparation.txt
            - store_artifacts:
                  path: ~/transformers/test_preparation.txt
            - run: |
                  if [ -f test_list.txt ]; then
Yih-Dar's avatar
Yih-Dar committed
1048
                    python -m pytest -n 1 --max-worker-restart=0 tests/models/*layoutlmv* --dist=loadfile -s --make-reports=tests_layoutlmv2_and_v3 --durations=100
1049
1050
1051
1052
1053
1054
                  fi
            - store_artifacts:
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports

1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
    run_tests_layoutlmv2_and_v3_all:
        working_directory: ~/transformers
        docker:
            - image: cimg/python:3.7.12
        environment:
            OMP_NUM_THREADS: 1
            TRANSFORMERS_IS_CI: yes
            PYTEST_TIMEOUT: 120
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - restore_cache:
                  keys:
                      - v0.5-torch-{{ checksum "setup.py" }}
                      - v0.5-{{ checksum "setup.py" }}
            - run: sudo apt-get -y update && sudo apt-get install -y libsndfile1-dev
            - run: pip install --upgrade pip
            - run: pip install .[torch,testing,vision]
            - run: pip install torchvision
            # The commit `36a65a0907d90ed591479b2ebaa8b61cfa0b4ef0` in `detectron2` break things.
            # See https://github.com/facebookresearch/detectron2/commit/36a65a0907d90ed591479b2ebaa8b61cfa0b4ef0#comments.
            # TODO: Revert this change back once the above issue is fixed.
            - run: python -m pip install 'git+https://github.com/facebookresearch/detectron2.git'
            - run: sudo apt install tesseract-ocr
            - run: pip install pytesseract
            - save_cache:
                  key: v0.5-torch-{{ checksum "setup.py" }}
                  paths:
                      - '~/.cache/pip'
            - run: python -m pytest -n 1 --max-worker-restart=0 tests/models/*layoutlmv* --dist=loadfile -s --make-reports=tests_layoutlmv2_and_v3 --durations=100
            - store_artifacts:
                  path: ~/transformers/tests_output.txt
            - store_artifacts:
                  path: ~/transformers/reports

1091
1092
1093
# TPU JOBS
    run_examples_tpu:
        docker:
1094
            - image: cimg/python:3.7.12
1095
1096
        environment:
            OMP_NUM_THREADS: 1
1097
            TRANSFORMERS_IS_CI: yes
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
        resource_class: xlarge
        parallelism: 1
        steps:
            - checkout
            - go/install
            - *checkout_ml_testing
            - gcp-gke/install
            - gcp-gke/update-kubeconfig-with-credentials:
                  cluster: $GKE_CLUSTER
                  perform-login: true
            - setup_remote_docker
            - *build_push_docker
            - *deploy_cluster
1111

1112
1113
    cleanup-gke-jobs:
        docker:
1114
            - image: cimg/python:3.7.12
1115
1116
1117
1118
1119
1120
        steps:
            - gcp-gke/install
            - gcp-gke/update-kubeconfig-with-credentials:
                  cluster: $GKE_CLUSTER
                  perform-login: true
            - *delete_gke_jobs
1121

LysandreJik's avatar
LysandreJik committed
1122
1123
1124
1125
workflow_filters: &workflow_filters
    filters:
        branches:
            only:
1126
                - main
1127
workflows:
LysandreJik's avatar
LysandreJik committed
1128
1129
1130
    version: 2
    build_and_test:
        jobs:
Aymeric Augustin's avatar
Aymeric Augustin committed
1131
            - check_code_quality
1132
            - check_repository_consistency
Aymeric Augustin's avatar
Aymeric Augustin committed
1133
            - run_examples_torch
Matt's avatar
Matt committed
1134
            - run_examples_tensorflow
1135
            - run_examples_flax
Aymeric Augustin's avatar
Aymeric Augustin committed
1136
1137
            - run_tests_custom_tokenizers
            - run_tests_torch_and_tf
1138
            - run_tests_torch_and_flax
Aymeric Augustin's avatar
Aymeric Augustin committed
1139
1140
            - run_tests_torch
            - run_tests_tf
1141
            - run_tests_flax
1142
1143
            - run_tests_pipelines_torch
            - run_tests_pipelines_tf
1144
            - run_tests_onnxruntime
Sylvain Gugger's avatar
Sylvain Gugger committed
1145
            - run_tests_hub
NielsRogge's avatar
NielsRogge committed
1146
            - run_tests_layoutlmv2_and_v3
1147
1148
1149
1150
1151
1152
1153
    nightly:
        triggers:
            - schedule:
                cron: "0 0 * * *"
                filters:
                    branches:
                        only:
1154
                            - main
1155
        jobs:
1156
            - run_examples_torch_all
Matt's avatar
Matt committed
1157
            - run_examples_tensorflow_all
1158
            - run_examples_flax_all
1159
            - run_tests_custom_tokenizers_all
1160
1161
1162
1163
1164
1165
1166
1167
1168
            - run_tests_torch_and_tf_all
            - run_tests_torch_and_flax_all
            - run_tests_torch_all
            - run_tests_tf_all
            - run_tests_flax_all
            - run_tests_pipelines_torch_all
            - run_tests_pipelines_tf_all
            - run_tests_onnxruntime_all
            - run_tests_hub_all
1169
            - run_tests_layoutlmv2_and_v3_all
1170

1171
1172
1173
1174
1175
1176
1177
1178
#    tpu_testing_jobs:
#        triggers:
#            - schedule:
#                # Set to run at the first minute of every hour.
#                cron: "0 8 * * *"
#                filters:
#                    branches:
#                        only:
1179
#                            - main
1180
1181
1182
#        jobs:
#            - cleanup-gke-jobs
#            - run_examples_tpu