Unverified Commit 3714f3f8 authored by Yih-Dar's avatar Yih-Dar Committed by GitHub
Browse files

Upload (daily) CI results to Hub (#31168)



* build

* build

* build

* build

* fix

* fix

* fix

---------
Co-authored-by: default avatarydshieh <ydshieh@users.noreply.github.com>
parent 99de3a84
...@@ -19,6 +19,8 @@ on: ...@@ -19,6 +19,8 @@ on:
required: true required: true
type: string type: string
env:
TRANSFORMERS_CI_RESULTS_UPLOAD_TOKEN: ${{ secrets.TRANSFORMERS_CI_RESULTS_UPLOAD_TOKEN }}
jobs: jobs:
send_results: send_results:
...@@ -54,6 +56,7 @@ jobs: ...@@ -54,6 +56,7 @@ jobs:
# empty string, and the called script still get one argument (which is the emtpy string). # empty string, and the called script still get one argument (which is the emtpy string).
run: | run: |
sudo apt-get install -y curl sudo apt-get install -y curl
pip install huggingface_hub
pip install slack_sdk pip install slack_sdk
pip show slack_sdk pip show slack_sdk
python utils/notification_service.py "${{ inputs.folder_slices }}" python utils/notification_service.py "${{ inputs.folder_slices }}"
...@@ -81,6 +84,7 @@ jobs: ...@@ -81,6 +84,7 @@ jobs:
# `quantization/bnb` to `quantization_bnb` is required, as the artifact names use `_` instead of `/`. # `quantization/bnb` to `quantization_bnb` is required, as the artifact names use `_` instead of `/`.
run: | run: |
sudo apt-get install -y curl sudo apt-get install -y curl
pip install huggingface_hub
pip install slack_sdk pip install slack_sdk
pip show slack_sdk pip show slack_sdk
python utils/notification_service_quantization.py "${{ inputs.quantization_matrix }}" python utils/notification_service_quantization.py "${{ inputs.quantization_matrix }}"
......
...@@ -14,6 +14,7 @@ ...@@ -14,6 +14,7 @@
import ast import ast
import collections import collections
import datetime
import functools import functools
import json import json
import operator import operator
...@@ -26,9 +27,11 @@ from typing import Dict, List, Optional, Union ...@@ -26,9 +27,11 @@ from typing import Dict, List, Optional, Union
import requests import requests
from get_ci_error_statistics import get_jobs from get_ci_error_statistics import get_jobs
from get_previous_daily_ci import get_last_daily_ci_reports from get_previous_daily_ci import get_last_daily_ci_reports
from huggingface_hub import HfApi
from slack_sdk import WebClient from slack_sdk import WebClient
api = HfApi()
client = WebClient(token=os.environ["CI_SLACK_BOT_TOKEN"]) client = WebClient(token=os.environ["CI_SLACK_BOT_TOKEN"])
NON_MODEL_TEST_MODULES = [ NON_MODEL_TEST_MODULES = [
...@@ -1154,12 +1157,25 @@ if __name__ == "__main__": ...@@ -1154,12 +1157,25 @@ if __name__ == "__main__":
if not os.path.isdir(os.path.join(os.getcwd(), f"ci_results_{job_name}")): if not os.path.isdir(os.path.join(os.getcwd(), f"ci_results_{job_name}")):
os.makedirs(os.path.join(os.getcwd(), f"ci_results_{job_name}")) os.makedirs(os.path.join(os.getcwd(), f"ci_results_{job_name}"))
target_workflow = "huggingface/transformers/.github/workflows/self-scheduled-caller.yml@refs/heads/main"
is_scheduled_ci_run = os.environ.get("CI_WORKFLOW_REF") == target_workflow
# Only the model testing job is concerned: this condition is to avoid other jobs to upload the empty list as # Only the model testing job is concerned: this condition is to avoid other jobs to upload the empty list as
# results. # results.
if job_name == "run_models_gpu": if job_name == "run_models_gpu":
with open(f"ci_results_{job_name}/model_results.json", "w", encoding="UTF-8") as fp: with open(f"ci_results_{job_name}/model_results.json", "w", encoding="UTF-8") as fp:
json.dump(model_results, fp, indent=4, ensure_ascii=False) json.dump(model_results, fp, indent=4, ensure_ascii=False)
# upload results to Hub dataset (only for the scheduled daily CI run on `main`)
if is_scheduled_ci_run:
api.upload_file(
path_or_fileobj=f"ci_results_{job_name}/model_results.json",
path_in_repo=f"{datetime.datetime.today().strftime('%Y-%m-%d')}/ci_results_{job_name}/model_results.json",
repo_id="hf-internal-testing/transformers_daily_ci",
repo_type="dataset",
token=os.environ.get("TRANSFORMERS_CI_RESULTS_UPLOAD_TOKEN", None),
)
# Must have the same keys as in `additional_results`. # Must have the same keys as in `additional_results`.
# The values are used as the file names where to save the corresponding CI job results. # The values are used as the file names where to save the corresponding CI job results.
test_to_result_name = { test_to_result_name = {
...@@ -1172,10 +1188,19 @@ if __name__ == "__main__": ...@@ -1172,10 +1188,19 @@ if __name__ == "__main__":
with open(f"ci_results_{job_name}/{test_to_result_name[job]}_results.json", "w", encoding="UTF-8") as fp: with open(f"ci_results_{job_name}/{test_to_result_name[job]}_results.json", "w", encoding="UTF-8") as fp:
json.dump(job_result, fp, indent=4, ensure_ascii=False) json.dump(job_result, fp, indent=4, ensure_ascii=False)
# upload results to Hub dataset (only for the scheduled daily CI run on `main`)
if is_scheduled_ci_run:
api.upload_file(
path_or_fileobj=f"ci_results_{job_name}/{test_to_result_name[job]}_results.json",
path_in_repo=f"{datetime.datetime.today().strftime('%Y-%m-%d')}/ci_results_{job_name}/{test_to_result_name[job]}_results.json",
repo_id="hf-internal-testing/transformers_daily_ci",
repo_type="dataset",
token=os.environ.get("TRANSFORMERS_CI_RESULTS_UPLOAD_TOKEN", None),
)
prev_ci_artifacts = None prev_ci_artifacts = None
if job_name == "run_models_gpu": if is_scheduled_ci_run:
target_workflow = "huggingface/transformers/.github/workflows/self-scheduled-caller.yml@refs/heads/main" if job_name == "run_models_gpu":
if os.environ.get("CI_WORKFLOW_REF") == target_workflow:
# Get the last previously completed CI's failure tables # Get the last previously completed CI's failure tables
artifact_names = [f"ci_results_{job_name}"] artifact_names = [f"ci_results_{job_name}"]
output_dir = os.path.join(os.getcwd(), "previous_reports") output_dir = os.path.join(os.getcwd(), "previous_reports")
......
...@@ -13,6 +13,7 @@ ...@@ -13,6 +13,7 @@
# limitations under the License. # limitations under the License.
import ast import ast
import datetime
import json import json
import os import os
import sys import sys
...@@ -20,6 +21,7 @@ import time ...@@ -20,6 +21,7 @@ import time
from typing import Dict from typing import Dict
from get_ci_error_statistics import get_jobs from get_ci_error_statistics import get_jobs
from huggingface_hub import HfApi
from notification_service import ( from notification_service import (
Message, Message,
handle_stacktraces, handle_stacktraces,
...@@ -31,6 +33,7 @@ from notification_service import ( ...@@ -31,6 +33,7 @@ from notification_service import (
from slack_sdk import WebClient from slack_sdk import WebClient
api = HfApi()
client = WebClient(token=os.environ["CI_SLACK_BOT_TOKEN"]) client = WebClient(token=os.environ["CI_SLACK_BOT_TOKEN"])
...@@ -249,6 +252,19 @@ if __name__ == "__main__": ...@@ -249,6 +252,19 @@ if __name__ == "__main__":
with open(f"ci_results_{job_name}/quantization_results.json", "w", encoding="UTF-8") as fp: with open(f"ci_results_{job_name}/quantization_results.json", "w", encoding="UTF-8") as fp:
json.dump(quantization_results, fp, indent=4, ensure_ascii=False) json.dump(quantization_results, fp, indent=4, ensure_ascii=False)
target_workflow = "huggingface/transformers/.github/workflows/self-scheduled-caller.yml@refs/heads/main"
is_scheduled_ci_run = os.environ.get("CI_WORKFLOW_REF") == target_workflow
# upload results to Hub dataset (only for the scheduled daily CI run on `main`)
if is_scheduled_ci_run:
api.upload_file(
path_or_fileobj=f"ci_results_{job_name}/quantization_results.json",
path_in_repo=f"{datetime.datetime.today().strftime('%Y-%m-%d')}/ci_results_{job_name}/quantization_results.json",
repo_id="hf-internal-testing/transformers_daily_ci",
repo_type="dataset",
token=os.environ.get("TRANSFORMERS_CI_RESULTS_UPLOAD_TOKEN", None),
)
message = QuantizationMessage( message = QuantizationMessage(
title, title,
results=quantization_results, results=quantization_results,
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment