Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
gaoqiong
lm-evaluation-harness
Commits
3ccea2b2
Unverified
Commit
3ccea2b2
authored
Oct 23, 2023
by
Stella Biderman
Committed by
GitHub
Oct 23, 2023
Browse files
Merge pull request #918 from Muennighoff/patch-2
pass through low_cpu_mem_usage
parents
906ef948
d98cb3d5
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
1 addition
and
3 deletions
+1
-3
lm_eval/models/huggingface.py
lm_eval/models/huggingface.py
+1
-3
No files found.
lm_eval/models/huggingface.py
View file @
3ccea2b2
...
...
@@ -19,7 +19,6 @@ _DeviceMapping = NewType("DeviceMapping", Mapping[str, Union[int, str, torch.dev
def
_get_accelerate_args
(
low_cpu_mem_usage
:
Optional
[
bool
]
=
True
,
device_map_option
:
Optional
[
str
]
=
"auto"
,
max_memory_per_gpu
:
Optional
[
Union
[
int
,
str
]]
=
None
,
max_cpu_memory
:
Optional
[
Union
[
int
,
str
]]
=
None
,
...
...
@@ -39,7 +38,6 @@ def _get_accelerate_args(
args
=
{}
if
max_memory
:
args
[
"max_memory"
]
=
max_memory
args
[
"low_cpu_mem_usage"
]
=
low_cpu_mem_usage
args
[
"device_map"
]
=
device_map_option
args
[
"offload_folder"
]
=
offload_folder
return
args
...
...
@@ -222,7 +220,6 @@ class HuggingFaceAutoLM(BaseLM):
model_kwargs
=
{}
if
use_accelerate
:
model_kwargs
=
_get_accelerate_args
(
low_cpu_mem_usage
,
device_map_option
,
max_memory_per_gpu
,
max_cpu_memory
,
...
...
@@ -242,6 +239,7 @@ class HuggingFaceAutoLM(BaseLM):
bnb_4bit_quant_type
=
bnb_4bit_quant_type
,
bnb_4bit_compute_dtype
=
bnb_4bit_compute_dtype
,
bnb_4bit_use_double_quant
=
bnb_4bit_use_double_quant
,
low_cpu_mem_usage
=
low_cpu_mem_usage
,
**
model_kwargs
,
)
# note: peft_path can be different than pretrained model path
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment