Unverified Commit 2c74c556 authored by drbh's avatar drbh Committed by GitHub
Browse files

fix: add merge-lora arg for model id (#2788)

parent a35d1e6f
......@@ -1193,6 +1193,7 @@ fn download_convert_model(
huggingface_hub_cache: Option<&str>,
weights_cache_override: Option<&str>,
running: Arc<AtomicBool>,
merge_lora: bool,
) -> Result<(), LauncherError> {
// Enter download tracing span
let _span = tracing::span!(tracing::Level::INFO, "download").entered();
......@@ -1207,6 +1208,10 @@ fn download_convert_model(
"--json-output".to_string(),
];
if merge_lora {
download_args.push("--merge-lora".to_string());
}
// Model optional revision
if let Some(revision) = &revision {
download_args.push("--revision".to_string());
......@@ -1842,6 +1847,7 @@ fn main() -> Result<(), LauncherError> {
args.huggingface_hub_cache.as_deref(),
args.weights_cache_override.as_deref(),
running.clone(),
true, // if its only a lora model - we should merge the lora adapters
)?;
// Download and convert lora adapters if any
......@@ -1875,6 +1881,7 @@ fn main() -> Result<(), LauncherError> {
args.huggingface_hub_cache.as_deref(),
args.weights_cache_override.as_deref(),
running.clone(),
false, // avoid merging lora adapters if using multi-lora
)?;
} else {
return Err(LauncherError::ArgumentValidation(format!(
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment