Commit 10625bd8 authored by Jason Krone's avatar Jason Krone
Browse files

Fix "TypeError: 'tqdm' object is not subscriptable" error that occurs

in hugging face model loglikelihood_tokens and greedy_util functions
when batch-size is set to auto
parent 17095c83
...@@ -661,19 +661,13 @@ class HFLM(LM): ...@@ -661,19 +661,13 @@ class HFLM(LM):
# automatic (variable) batch size detection for vectorization # automatic (variable) batch size detection for vectorization
# pull longest context sample from request # pull longest context sample from request
for chunk in utils.chunks( chunks = utils.chunks(
tqdm(re_ord.get_reordered(), disable=(disable_tqdm or (self.rank != 0))), re_ord.get_reordered(),
n=self.batch_size n=self.batch_size if self.batch_size != "auto" else override_bs if override_bs is not None else 0,
if self.batch_size != "auto" fn=self._batch_scheduler if self.batch_size == "auto" and n_reordered_requests > 0 and not override_bs else None,
else override_bs )
if override_bs is not None
else 0, for chunk in tqdm(chunks, disable=(disable_tqdm or (self.rank != 0))):
fn=self._batch_scheduler
if self.batch_size == "auto"
and n_reordered_requests > 0
and not override_bs
else None,
):
inps = [] inps = []
cont_toks_list = [] cont_toks_list = []
inplens = [] inplens = []
...@@ -844,17 +838,12 @@ class HFLM(LM): ...@@ -844,17 +838,12 @@ class HFLM(LM):
adaptive_batch_size = batch_size adaptive_batch_size = batch_size
# for each different set of kwargs, we execute all requests, by batch. # for each different set of kwargs, we execute all requests, by batch.
for key, re_ord in re_ords.items(): for key, re_ord in re_ords.items():
for chunk in utils.chunks( chunks = utils.chunks(
tqdm(re_ord.get_reordered(), disable=self.rank != 0), re_ord.get_reordered(),
n=self.batch_size n=self.batch_size if self.batch_size != "auto" else adaptive_batch_size if adaptive_batch_size is not None else 0,
if self.batch_size != "auto" fn=self._batch_scheduler if self.batch_size == "auto" and not adaptive_batch_size else None,
else adaptive_batch_size )
if adaptive_batch_size is not None for chunk in tqdm(chunks, disable=self.rank != 0):
else 0,
fn=self._batch_scheduler
if self.batch_size == "auto" and not adaptive_batch_size
else None,
):
contexts, all_gen_kwargs = zip(*chunk) contexts, all_gen_kwargs = zip(*chunk)
# we assume all gen kwargs in the batch are the same # we assume all gen kwargs in the batch are the same
# this is safe to assume because the `grouper` object ensures it. # this is safe to assume because the `grouper` object ensures it.
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment