Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
AutoAWQ
Commits
eb85f67d
Unverified
Commit
eb85f67d
authored
Apr 06, 2024
by
Roshiago
Committed by
GitHub
Apr 06, 2024
Browse files
Add download_kwargs for load model (#302) (#399)
parent
f8353793
Changes
2
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
28 additions
and
3 deletions
+28
-3
awq/models/auto.py
awq/models/auto.py
+4
-0
awq/models/base.py
awq/models/base.py
+24
-3
No files found.
awq/models/auto.py
View file @
eb85f67d
...
@@ -51,6 +51,7 @@ class AutoAWQForCausalLM:
...
@@ -51,6 +51,7 @@ class AutoAWQForCausalLM:
trust_remote_code
=
True
,
trust_remote_code
=
True
,
safetensors
=
True
,
safetensors
=
True
,
device_map
=
None
,
device_map
=
None
,
download_kwargs
=
None
,
**
model_init_kwargs
,
**
model_init_kwargs
,
)
->
BaseAWQForCausalLM
:
)
->
BaseAWQForCausalLM
:
model_type
=
check_and_get_model_type
(
model_type
=
check_and_get_model_type
(
...
@@ -63,6 +64,7 @@ class AutoAWQForCausalLM:
...
@@ -63,6 +64,7 @@ class AutoAWQForCausalLM:
trust_remote_code
=
trust_remote_code
,
trust_remote_code
=
trust_remote_code
,
safetensors
=
safetensors
,
safetensors
=
safetensors
,
device_map
=
device_map
,
device_map
=
device_map
,
download_kwargs
=
download_kwargs
,
**
model_init_kwargs
,
**
model_init_kwargs
,
)
)
...
@@ -80,6 +82,7 @@ class AutoAWQForCausalLM:
...
@@ -80,6 +82,7 @@ class AutoAWQForCausalLM:
safetensors
=
True
,
safetensors
=
True
,
device_map
=
"balanced"
,
device_map
=
"balanced"
,
offload_folder
=
None
,
offload_folder
=
None
,
download_kwargs
=
None
,
**
config_kwargs
,
**
config_kwargs
,
)
->
BaseAWQForCausalLM
:
)
->
BaseAWQForCausalLM
:
os
.
environ
[
"AWQ_BATCH_SIZE"
]
=
str
(
batch_size
)
os
.
environ
[
"AWQ_BATCH_SIZE"
]
=
str
(
batch_size
)
...
@@ -104,5 +107,6 @@ class AutoAWQForCausalLM:
...
@@ -104,5 +107,6 @@ class AutoAWQForCausalLM:
safetensors
=
safetensors
,
safetensors
=
safetensors
,
device_map
=
device_map
,
device_map
=
device_map
,
offload_folder
=
offload_folder
,
offload_folder
=
offload_folder
,
download_kwargs
=
download_kwargs
,
**
config_kwargs
,
**
config_kwargs
,
)
)
awq/models/base.py
View file @
eb85f67d
...
@@ -297,6 +297,9 @@ class BaseAWQForCausalLM(nn.Module):
...
@@ -297,6 +297,9 @@ class BaseAWQForCausalLM(nn.Module):
"A device map that will be passed onto the model loading method from transformers."
"A device map that will be passed onto the model loading method from transformers."
),
),
]
=
None
,
]
=
None
,
download_kwargs
:
Annotated
[
Dict
,
Doc
(
"Used for configure download model"
),
]
=
None
,
**
model_init_kwargs
:
Annotated
[
**
model_init_kwargs
:
Annotated
[
Dict
,
Dict
,
Doc
(
Doc
(
...
@@ -307,7 +310,9 @@ class BaseAWQForCausalLM(nn.Module):
...
@@ -307,7 +310,9 @@ class BaseAWQForCausalLM(nn.Module):
"""A method for initialization of pretrained models, usually in FP16."""
"""A method for initialization of pretrained models, usually in FP16."""
# Get weights path and quant config
# Get weights path and quant config
model_weights_path
,
config
,
quant_config
=
self
.
_load_config
(
model_weights_path
,
config
,
quant_config
=
self
.
_load_config
(
self
,
model_path
,
""
,
safetensors
,
trust_remote_code
=
trust_remote_code
self
,
model_path
,
""
,
safetensors
,
trust_remote_code
=
trust_remote_code
,
download_kwargs
=
download_kwargs
)
)
target_cls_name
=
TRANSFORMERS_AUTO_MAPPING_DICT
[
config
.
model_type
]
target_cls_name
=
TRANSFORMERS_AUTO_MAPPING_DICT
[
config
.
model_type
]
...
@@ -390,6 +395,9 @@ class BaseAWQForCausalLM(nn.Module):
...
@@ -390,6 +395,9 @@ class BaseAWQForCausalLM(nn.Module):
str
,
str
,
Doc
(
"The folder ot offload the model to."
),
Doc
(
"The folder ot offload the model to."
),
]
=
None
,
]
=
None
,
download_kwargs
:
Annotated
[
Dict
,
Doc
(
"Used for configure download model"
),
]
=
None
,
**
config_kwargs
:
Annotated
[
**
config_kwargs
:
Annotated
[
Dict
,
Dict
,
Doc
(
Doc
(
...
@@ -406,6 +414,7 @@ class BaseAWQForCausalLM(nn.Module):
...
@@ -406,6 +414,7 @@ class BaseAWQForCausalLM(nn.Module):
safetensors
,
safetensors
,
trust_remote_code
,
trust_remote_code
,
max_seq_len
=
max_seq_len
,
max_seq_len
=
max_seq_len
,
download_kwargs
=
download_kwargs
,
**
config_kwargs
,
**
config_kwargs
,
)
)
...
@@ -477,6 +486,7 @@ class BaseAWQForCausalLM(nn.Module):
...
@@ -477,6 +486,7 @@ class BaseAWQForCausalLM(nn.Module):
safetensors
=
True
,
safetensors
=
True
,
trust_remote_code
=
True
,
trust_remote_code
=
True
,
max_seq_len
=
4096
,
max_seq_len
=
4096
,
download_kwargs
=
None
,
**
config_kwargs
,
**
config_kwargs
,
):
):
# [STEP 1] Download model if path is not a directory
# [STEP 1] Download model if path is not a directory
...
@@ -487,7 +497,18 @@ class BaseAWQForCausalLM(nn.Module):
...
@@ -487,7 +497,18 @@ class BaseAWQForCausalLM(nn.Module):
else
:
else
:
ignore_patterns
.
append
(
"*.safetensors*"
)
ignore_patterns
.
append
(
"*.safetensors*"
)
model_path
=
snapshot_download
(
model_path
,
ignore_patterns
=
ignore_patterns
)
if
download_kwargs
is
None
:
download_kwargs
=
{}
if
"ignore_patterns"
in
download_kwargs
:
download_kwargs_ignore_patterns
=
download_kwargs
.
pop
(
"ignore_patterns"
)
if
isinstance
(
download_kwargs_ignore_patterns
,
str
):
ignore_patterns
.
append
(
download_kwargs_ignore_patterns
)
elif
isinstance
(
download_kwargs_ignore_patterns
,
list
):
ignore_patterns
.
extend
(
download_kwargs_ignore_patterns
)
model_path
=
snapshot_download
(
model_path
,
ignore_patterns
=
ignore_patterns
,
**
download_kwargs
)
if
model_filename
!=
""
:
if
model_filename
!=
""
:
model_weights_path
=
model_path
+
f
"/
{
model_filename
}
"
model_weights_path
=
model_path
+
f
"/
{
model_filename
}
"
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment