"docs/git@developer.sourcefind.cn:hehl2/torchaudio.git" did not exist on "de1cb83d8ba7ab1a7b99d9b711ca53c18e607440"
Commit 76259a80 authored by myhloli's avatar myhloli
Browse files

fix: update backend references from huggingface to transformers in client and predictor modules

parent 72340ba7
......@@ -22,7 +22,7 @@ try:
hf_loaded = True
except ImportError as e:
logger.warning("hf is not installed. If you are not using huggingface, you can ignore this warning.")
logger.warning("hf is not installed. If you are not using transformers, you can ignore this warning.")
engine_loaded = False
try:
......@@ -51,9 +51,9 @@ def get_predictor(
) -> BasePredictor:
start_time = time.time()
if backend == "huggingface":
if backend == "transformers":
if not model_path:
raise ValueError("model_path must be provided for huggingface backend.")
raise ValueError("model_path must be provided for transformers backend.")
if not hf_loaded:
raise ImportError(
"transformers is not installed, so huggingface backend cannot be used. "
......@@ -77,7 +77,7 @@ def get_predictor(
raise ImportError(
"sglang is not installed, so sglang-engine backend cannot be used. "
"If you need to use sglang-engine backend for inference, "
"please install sglang[all]==0.4.6.post4 or a newer version."
"please install sglang[all]==0.4.7 or a newer version."
)
predictor = SglangEnginePredictor(
server_args=ServerArgs(model_path, **kwargs),
......@@ -104,7 +104,7 @@ def get_predictor(
http_timeout=http_timeout,
)
else:
raise ValueError(f"Unsupported backend: {backend}. Supports: huggingface, sglang-engine, sglang-client.")
raise ValueError(f"Unsupported backend: {backend}. Supports: transformers, sglang-engine, sglang-client.")
elapsed = round(time.time() - start_time, 2)
logger.info(f"get_predictor cost: {elapsed}s")
......
......@@ -40,7 +40,7 @@ def doc_analyze(
pdf_bytes,
image_writer: DataWriter | None,
predictor: BasePredictor | None = None,
backend="huggingface",
backend="transformers",
model_path=ModelPath.vlm_root_hf,
server_url: str | None = None,
):
......@@ -66,7 +66,7 @@ async def aio_doc_analyze(
pdf_bytes,
image_writer: DataWriter | None,
predictor: BasePredictor | None = None,
backend="huggingface",
backend="transformers",
model_path=ModelPath.vlm_root_hf,
server_url: str | None = None,
):
......
......@@ -48,10 +48,10 @@ from .common import do_parse, read_fn, pdf_suffixes, image_suffixes
'-b',
'--backend',
'backend',
type=click.Choice(['pipeline', 'vlm-huggingface', 'vlm-sglang-engine', 'vlm-sglang-client']),
type=click.Choice(['pipeline', 'vlm-transformers', 'vlm-sglang-engine', 'vlm-sglang-client']),
help="""the backend for parsing pdf:
pipeline: More general.
vlm-huggingface: More general.
vlm-transformers: More general.
vlm-sglang-engine: Faster(engine).
vlm-sglang-client: Faster(client).
without method specified, pipeline will be used by default.""",
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment