Unverified Commit 02f7f3e4 authored by Lianmin Zheng's avatar Lianmin Zheng Committed by GitHub
Browse files

Update the transformers version in CI (#1690)

parent 2782132b
...@@ -29,7 +29,7 @@ jobs: ...@@ -29,7 +29,7 @@ jobs:
run: | run: |
pip install --upgrade pip pip install --upgrade pip
pip install -e "python[dev]" pip install -e "python[dev]"
pip install transformers==4.44 pip install transformers==4.45.2
pip install flashinfer -i https://flashinfer.ai/whl/cu121/torch2.4/ --force-reinstall pip install flashinfer -i https://flashinfer.ai/whl/cu121/torch2.4/ --force-reinstall
- name: Run test - name: Run test
...@@ -49,7 +49,7 @@ jobs: ...@@ -49,7 +49,7 @@ jobs:
run: | run: |
pip install --upgrade pip pip install --upgrade pip
pip install -e "python[dev]" pip install -e "python[dev]"
pip install transformers==4.44 pip install transformers==4.45.2
pip install flashinfer -i https://flashinfer.ai/whl/cu121/torch2.4/ --force-reinstall pip install flashinfer -i https://flashinfer.ai/whl/cu121/torch2.4/ --force-reinstall
- name: Run test - name: Run test
...@@ -69,7 +69,7 @@ jobs: ...@@ -69,7 +69,7 @@ jobs:
run: | run: |
pip install --upgrade pip pip install --upgrade pip
pip install -e "python[dev]" pip install -e "python[dev]"
pip install transformers==4.44 pip install transformers==4.45.2
pip install flashinfer -i https://flashinfer.ai/whl/cu121/torch2.4/ --force-reinstall pip install flashinfer -i https://flashinfer.ai/whl/cu121/torch2.4/ --force-reinstall
- name: Run test - name: Run test
...@@ -89,7 +89,7 @@ jobs: ...@@ -89,7 +89,7 @@ jobs:
run: | run: |
pip install --upgrade pip pip install --upgrade pip
pip install -e "python[dev]" pip install -e "python[dev]"
pip install transformers==4.44 pip install transformers==4.45.2
pip install flashinfer -i https://flashinfer.ai/whl/cu121/torch2.4/ --force-reinstall pip install flashinfer -i https://flashinfer.ai/whl/cu121/torch2.4/ --force-reinstall
- name: Run test - name: Run test
...@@ -109,7 +109,7 @@ jobs: ...@@ -109,7 +109,7 @@ jobs:
run: | run: |
pip install --upgrade pip pip install --upgrade pip
pip install -e "python[all]" pip install -e "python[all]"
pip install transformers==4.44 pip install transformers==4.45.2
pip install flashinfer -i https://flashinfer.ai/whl/cu121/torch2.4/ --force-reinstall pip install flashinfer -i https://flashinfer.ai/whl/cu121/torch2.4/ --force-reinstall
- name: Benchmark Single Latency - name: Benchmark Single Latency
...@@ -147,7 +147,7 @@ jobs: ...@@ -147,7 +147,7 @@ jobs:
run: | run: |
pip install --upgrade pip pip install --upgrade pip
pip install -e "python[all]" pip install -e "python[all]"
pip install transformers==4.44 pip install transformers==4.45.2
pip install flashinfer -i https://flashinfer.ai/whl/cu121/torch2.4/ --force-reinstall pip install flashinfer -i https://flashinfer.ai/whl/cu121/torch2.4/ --force-reinstall
- name: Benchmark Offline Throughput (w/o RadixAttention) - name: Benchmark Offline Throughput (w/o RadixAttention)
...@@ -179,7 +179,7 @@ jobs: ...@@ -179,7 +179,7 @@ jobs:
run: | run: |
pip install --upgrade pip pip install --upgrade pip
pip install -e "python[all]" pip install -e "python[all]"
pip install transformers==4.44 pip install transformers==4.45.2
pip install flashinfer -i https://flashinfer.ai/whl/cu121/torch2.4/ --force-reinstall pip install flashinfer -i https://flashinfer.ai/whl/cu121/torch2.4/ --force-reinstall
- name: Benchmark Offline Throughput (TP=2) - name: Benchmark Offline Throughput (TP=2)
...@@ -211,7 +211,7 @@ jobs: ...@@ -211,7 +211,7 @@ jobs:
run: | run: |
pip install --upgrade pip pip install --upgrade pip
pip install -e "python[all]" pip install -e "python[all]"
pip install transformers==4.44 pip install transformers==4.45.2
pip install flashinfer -i https://flashinfer.ai/whl/cu121/torch2.4/ --force-reinstall pip install flashinfer -i https://flashinfer.ai/whl/cu121/torch2.4/ --force-reinstall
git clone https://github.com/merrymercy/human-eval.git git clone https://github.com/merrymercy/human-eval.git
...@@ -235,7 +235,7 @@ jobs: ...@@ -235,7 +235,7 @@ jobs:
run: | run: |
pip install --upgrade pip pip install --upgrade pip
pip install -e "python[all]" pip install -e "python[all]"
pip install transformers==4.44 pip install transformers==4.45.2
pip install flashinfer -i https://flashinfer.ai/whl/cu121/torch2.4/ --force-reinstall pip install flashinfer -i https://flashinfer.ai/whl/cu121/torch2.4/ --force-reinstall
git clone https://github.com/merrymercy/human-eval.git git clone https://github.com/merrymercy/human-eval.git
......
...@@ -432,9 +432,11 @@ def launch_server( ...@@ -432,9 +432,11 @@ def launch_server(
LOGGING_CONFIG["formatters"]["default"][ LOGGING_CONFIG["formatters"]["default"][
"fmt" "fmt"
] = "[%(asctime)s] %(levelprefix)s %(message)s" ] = "[%(asctime)s] %(levelprefix)s %(message)s"
LOGGING_CONFIG["formatters"]["default"]["datefmt"] = "%Y-%m-%d %H:%M:%S"
LOGGING_CONFIG["formatters"]["access"][ LOGGING_CONFIG["formatters"]["access"][
"fmt" "fmt"
] = '[%(asctime)s] %(levelprefix)s %(client_addr)s - "%(request_line)s" %(status_code)s' ] = '[%(asctime)s] %(levelprefix)s %(client_addr)s - "%(request_line)s" %(status_code)s'
LOGGING_CONFIG["formatters"]["access"]["datefmt"] = "%Y-%m-%d %H:%M:%S"
uvicorn.run( uvicorn.run(
app, app,
host=server_args.host, host=server_args.host,
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment