Unverified Commit b7a4ea06 authored by Lintang Sutawika's avatar Lintang Sutawika Committed by GitHub
Browse files

Merge pull request #952 from EleutherAI/fixup-logging

[Refactor] Logging fixes
parents 01227a7e 9e5648b8
......@@ -2,7 +2,6 @@ import random
import itertools
import json
import collections
import logging
import sys
import torch
......@@ -25,10 +24,6 @@ from lm_eval.utils import (
from lm_eval.logger import eval_logger
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
logger.addHandler(logging.StreamHandler(sys.stdout))
@positional_deprecated
def simple_evaluate(
......
......@@ -675,7 +675,8 @@ class HFLM(LM):
else None,
)
for chunk in tqdm(chunks, disable=(disable_tqdm or (self.rank != 0))):
pbar = tqdm(total=len(requests), disable=(disable_tqdm or (self.rank != 0)))
for chunk in chunks:
inps = []
cont_toks_list = []
inplens = []
......@@ -812,6 +813,9 @@ class HFLM(LM):
res.append(answer)
self.cache_hook.add_partial("loglikelihood", cache_key, answer)
pbar.update(1)
pbar.close()
return re_ord.get_original(res)
......@@ -857,7 +861,7 @@ class HFLM(LM):
if self.batch_size == "auto" and not adaptive_batch_size
else None,
)
for chunk in tqdm(chunks, disable=self.rank != 0):
for chunk in chunks:
contexts, all_gen_kwargs = zip(*chunk)
# we assume all gen kwargs in the batch are the same
# this is safe to assume because the `grouper` object ensures it.
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment