utils.py 18.5 KB
Newer Older
1
2
3
import collections
import fnmatch
import functools
4
import hashlib
5
import importlib.util
6
import inspect
7
import json
8
9
10
import logging
import os
import re
11
from dataclasses import asdict, is_dataclass
12
from itertools import islice
13
from pathlib import Path
Baber Abbasi's avatar
Baber Abbasi committed
14
from typing import Any, Callable, Generator, List, Optional, Tuple
15

Lintang Sutawika's avatar
Lintang Sutawika committed
16
import numpy as np
17
import yaml
18
from jinja2 import BaseLoader, Environment, StrictUndefined
sdtblck's avatar
sdtblck committed
19

lintangsutawika's avatar
lintangsutawika committed
20

21
SPACING = " " * 47
sdtblck's avatar
sdtblck committed
22

23
24
25
26
27
HIGHER_IS_BETTER_SYMBOLS = {
    True: "↑",
    False: "↓",
}

sdtblck's avatar
sdtblck committed
28

29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
def wrap_text(string: str, width: int = 140, **kwargs) -> Optional[str]:
    """
    Wraps the given string to the specified width.
    """
    import textwrap

    return textwrap.fill(
        inspect.cleandoc(string),
        width=width,
        initial_indent="",
        subsequent_indent=" " * 8,
        break_long_words=False,
        break_on_hyphens=False,
        **kwargs,
    )


Lintang Sutawika's avatar
Lintang Sutawika committed
46
47
def setup_logging(verbosity=logging.INFO):
    # Configure the root logger
Baber Abbasi's avatar
Baber Abbasi committed
48
49
50
51
52
53
54
55
56
57
58
    class CustomFormatter(logging.Formatter):
        def format(self, record):
            if record.name.startswith("lm_eval."):
                record.name = record.name[len("lm_eval.") :]
            return super().format(record)

    formatter = CustomFormatter(
        "%(asctime)s %(levelname)-8s [%(name)s:%(lineno)d] %(message)s",
        datefmt="%Y-%m-%d:%H:%M:%S",
    )

Lintang Sutawika's avatar
Lintang Sutawika committed
59
60
61
62
63
64
65
66
67
68
69
    log_level = os.environ.get("LOGLEVEL", verbosity) or verbosity

    level_map = {
        "DEBUG": logging.DEBUG,
        "INFO": logging.INFO,
        "WARNING": logging.WARNING,
        "ERROR": logging.ERROR,
        "CRITICAL": logging.CRITICAL,
    }

    log_level = level_map.get(str(log_level).upper(), logging.INFO)
Baber Abbasi's avatar
Baber Abbasi committed
70

Lintang Sutawika's avatar
Lintang Sutawika committed
71
    if not logging.root.handlers:
Baber Abbasi's avatar
Baber Abbasi committed
72
73
74
75
76
77
78
        handler = logging.StreamHandler()
        handler.setFormatter(formatter)

        root_logger = logging.getLogger()
        root_logger.addHandler(handler)
        root_logger.setLevel(log_level)

Lintang Sutawika's avatar
Lintang Sutawika committed
79
80
81
82
83
84
85
86
        if log_level == logging.DEBUG:
            third_party_loggers = ["urllib3", "filelock", "fsspec"]
            for logger_name in third_party_loggers:
                logging.getLogger(logger_name).setLevel(logging.INFO)
    else:
        logging.getLogger().setLevel(log_level)


87
88
89
90
def hash_string(string: str) -> str:
    return hashlib.sha256(string.encode("utf-8")).hexdigest()


91
92
93
94
95
96
97
98
99
100
101
102
def escaped_split(text, sep_char, maxsplit=-1):
    """Split text into a list on occurrences of the given separation
    character `sep_char`. The separation character may be escaped by a
    backslash to avoid splitting at that location.

    The separation character must be a string of size 1.

    If `maxsplit` is given, at most `maxsplit` splits are done (thus,
    the list will have at most `maxsplit + 1` elements). If `maxsplit`
    is not specified or less than 0, then there is no limit on the
    number of splits (all possible splits are made).
    """
Baber Abbasi's avatar
Baber Abbasi committed
103
104
105
    assert len(sep_char) == 1, (
        "separation string must be a single character for escaped splitting"
    )
106
107
108
109
110
111
112
113

    if maxsplit == 0:
        return text
    maxsplit = max(0, maxsplit)

    return re.split(r"(?<!\\)" + sep_char, text, maxsplit)


haileyschoelkopf's avatar
haileyschoelkopf committed
114
115
116
117
118
def handle_arg_string(arg):
    if arg.lower() == "true":
        return True
    elif arg.lower() == "false":
        return False
119
120
121
122
123
124
    elif arg.isnumeric():
        return int(arg)
    try:
        return float(arg)
    except ValueError:
        return arg
haileyschoelkopf's avatar
haileyschoelkopf committed
125
126


127
128
129
130
131
132
133
134
135
def handle_non_serializable(o):
    if isinstance(o, np.int64) or isinstance(o, np.int32):
        return int(o)
    elif isinstance(o, set):
        return list(o)
    else:
        return str(o)


136
137
138
139
140
141
142
143
144
145
146
147
def sanitize_list(sub):
    """
    Takes possible nested list and recursively converts all inner component to strings
    """
    if isinstance(sub, list):
        return [sanitize_list(item) for item in sub]
    if isinstance(sub, tuple):
        return tuple(sanitize_list(item) for item in sub)
    else:
        return str(sub)


Baber Abbasi's avatar
Baber Abbasi committed
148
def simple_parse_args_string(args_string: Optional[str]) -> dict:
Jason Phang's avatar
gpt3  
Jason Phang committed
149
150
151
152
153
    """
    Parses something like
        args1=val1,arg2=val2
    Into a dictionary
    """
Baber Abbasi's avatar
Baber Abbasi committed
154
155
    if args_string is None:
        return {}
Jason Phang's avatar
Jason Phang committed
156
    args_string = args_string.strip()
Jason Phang's avatar
gpt3  
Jason Phang committed
157
158
    if not args_string:
        return {}
159
    arg_list = [arg for arg in args_string.split(",") if arg]
haileyschoelkopf's avatar
haileyschoelkopf committed
160
    args_dict = {
161
162
        kv[0]: handle_arg_string("=".join(kv[1:]))
        for kv in [arg.split("=") for arg in arg_list]
haileyschoelkopf's avatar
haileyschoelkopf committed
163
    }
Jason Phang's avatar
gpt3  
Jason Phang committed
164
    return args_dict
Leo Gao's avatar
Leo Gao committed
165

Fabrizio Milo's avatar
Fabrizio Milo committed
166

Leo Gao's avatar
Leo Gao committed
167
168
def join_iters(iters):
    for iter in iters:
Leo Gao's avatar
Leo Gao committed
169
        yield from iter
Leo Gao's avatar
Leo Gao committed
170
171


172
173
174
175
176
def group(arr, fn):
    res = collections.defaultdict(list)

    for ob in arr:
        res[fn(ob)].append(ob)
Fabrizio Milo's avatar
Fabrizio Milo committed
177

178
179
    return list(res.values())

Fabrizio Milo's avatar
Fabrizio Milo committed
180

gakada's avatar
gakada committed
181
182
183
# Returns a list containing all values of the source_list that
# match at least one of the patterns
def pattern_match(patterns, source_list):
184
    if isinstance(patterns, str):
185
186
        patterns = [patterns]

gakada's avatar
gakada committed
187
188
189
190
191
192
193
    task_names = set()
    for pattern in patterns:
        for matching in fnmatch.filter(source_list, pattern):
            task_names.add(matching)
    return sorted(list(task_names))


Baber Abbasi's avatar
Baber Abbasi committed
194
def softmax(x) -> np.ndarray:
Lintang Sutawika's avatar
Lintang Sutawika committed
195
196
197
198
199
    """Compute softmax values for each sets of scores in x."""
    e_x = np.exp(x - np.max(x))
    return e_x / e_x.sum()


Baber Abbasi's avatar
Baber Abbasi committed
200
def general_detokenize(string) -> str:
Leo Gao's avatar
Leo Gao committed
201
202
203
    string = string.replace(" n't", "n't")
    string = string.replace(" )", ")")
    string = string.replace("( ", "(")
Fabrizio Milo's avatar
Fabrizio Milo committed
204
205
    string = string.replace('" ', '"')
    string = string.replace(' "', '"')
Leo Gao's avatar
Fix  
Leo Gao committed
206
    string = re.sub(r" (['.,])", r"\1", string)
207
208
209
    return string


210
211
212
213
214
215
216
217
218
219
220
def get_file_task_name(filename: str) -> str:
    """
    Given the sample results filenames, extracts and returns the task name.
    """
    return filename[filename.find("_") + 1 : filename.rfind("_")]


def get_file_datetime(filename: str) -> str:
    """
    Given the results and sample results filenames, extracts and returns the datetime.
    """
221
    return filename[filename.rfind("_") + 1 :].replace(".jsonl", "")
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258


def sanitize_model_name(model_name: str) -> str:
    """
    Given the model name, returns a sanitized version of it.
    """
    return re.sub(r"[\"<>:/\|\\?\*\[\]]+", "__", model_name)


def sanitize_task_name(task_name: str) -> str:
    """
    Given the task name, returns a sanitized version of it.
    """
    return re.sub(r"\W", "_", task_name)


def get_latest_filename(filenames: List[str]) -> str:
    """
    Given a list of filenames, returns the filename with the latest datetime.
    """
    return max(filenames, key=lambda f: get_file_datetime(f))


def get_results_filenames(filenames: List[str]) -> List[str]:
    """
    Extracts filenames that correspond to aggregated results.
    """
    return [f for f in filenames if "/results_" in f and ".json" in f]


def get_sample_results_filenames(filenames: List[str]) -> List[str]:
    """
    Extracts filenames that correspond to sample results.
    """
    return [f for f in filenames if "/samples_" in f and ".json" in f]


259
260
261
def get_rolling_token_windows(
    token_list: List[int], prefix_token: int, max_seq_len: int, context_len: int
) -> Generator[Tuple[List[int], List[int]], None, None]:
Jason Phang's avatar
Jason Phang committed
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
    """
    - context_len allows for a rolling window context, allowing each prediction window to potentially
      condition on some context

    :param token_list: list
        List of tokens to be PREDICTED
    :param max_seq_len: int
        max_seq_len of model (or max_seq_len we want to use)
    :param context_len: int
        Amount of desired token context for prediction. Needs to be at least 1.
    :param prefix_token: token
        Dummy token like <eos> so the first token has something to condition on
    :return: generator
        Generator of tuples
            (input_tokens, pred_tokens)
        Note: Score only the last len(pred_tokens) logits of the LM
    """
    assert 1 <= context_len <= max_seq_len
    if not token_list:
        return
    # +1 offset, going from input->preds
    pred_len = max_seq_len - context_len + 1
    predicted = 0

    # Special handling for first window: predict all tokens
    first_seq_len = min(max_seq_len, len(token_list))
288
    yield [prefix_token] + token_list[: first_seq_len - 1], token_list[:first_seq_len]
Jason Phang's avatar
Jason Phang committed
289
290
291
292
293
    predicted += first_seq_len

    while predicted < len(token_list):
        window_pred_len = min(len(token_list) - predicted, pred_len)
        window_end = predicted + window_pred_len
Leo Gao's avatar
Leo Gao committed
294

Jason Phang's avatar
Jason Phang committed
295
        yield (
lintangsutawika's avatar
lintangsutawika committed
296
297
            token_list[window_end - max_seq_len - 1 : window_end - 1],
            token_list[window_end - window_pred_len : window_end],
Jason Phang's avatar
Jason Phang committed
298
299
300
        )
        predicted += window_pred_len

Fabrizio Milo's avatar
Fabrizio Milo committed
301

302
303
304
def make_disjoint_window(
    pair: Tuple[List[int], List[int]],
) -> Tuple[List[int], List[int]]:
Fabrizio Milo's avatar
Fabrizio Milo committed
305
    """Takes output from get_rolling_token_windows and makes the context not overlap with the continuation"""
Leo Gao's avatar
Leo Gao committed
306
    a, b = pair
307
    return a[: len(a) - (len(b) - 1)], b
Fabrizio Milo's avatar
Fabrizio Milo committed
308

Jason Phang's avatar
Jason Phang committed
309

310
311
312
313
314
315
316
317
318
319
320
321
class EnhancedJSONEncoder(json.JSONEncoder):
    """
    Provides a proper json encoding for the loggers and trackers json dumps.
    Notably manages the json encoding of dataclasses.
    """

    def default(self, o):
        if is_dataclass(o):
            return asdict(o)
        return super().default(o)


322
class Reorderer:
baberabb's avatar
baberabb committed
323
324
325
326
327
328
329
    def __init__(self, arr: List[Any], fn: Callable) -> None:
        """Reorder an array according to some function

        Args:
            arr (List[Any]): The initial array
            fn (Callable[[Any], Any]): A function to determine the priority of elements
        """
330
331
332
        self.size = len(arr)
        arr = list(enumerate(arr))
        arr = group(arr, lambda x: fn(x[1]))
333
334
335
        # arr = [([y[0] for y in x], x[0][1]) for x in arr]
        # TODO: overhaul reorderer. It currently grouped requests by content but we don't want this
        arr = [([y[0]], x[0][1]) for x in arr for y in x]
336
337
338
        arr.sort(key=lambda x: fn(x[1]))

        self.arr = arr
Fabrizio Milo's avatar
Fabrizio Milo committed
339

340
    def get_reordered(self):
baberabb's avatar
baberabb committed
341
342
343
344
345
        """Gets the reordered array

        Returns:
            List[Any]: The reordered array
        """
346
        return [x[1] for x in self.arr]
Fabrizio Milo's avatar
Fabrizio Milo committed
347

348
    def get_original(self, newarr):
baberabb's avatar
baberabb committed
349
350
351
352
353
354
355
356
        """Restores the original order of a new array based on the old array's order

        Args:
            newarr (List[Any]): The array to be restored

        Returns:
            List[Any]: The array restored to the original order
        """
357
358
359
360
        res = [None] * self.size
        cov = [False] * self.size

        for (inds, _), v in zip(self.arr, newarr):
Fabrizio Milo's avatar
Fabrizio Milo committed
361
            for ind in inds:
362
363
                res[ind] = v
                cov[ind] = True
Fabrizio Milo's avatar
Fabrizio Milo committed
364

365
        assert all(cov)
Fabrizio Milo's avatar
Fabrizio Milo committed
366

367
368
        return res

Fabrizio Milo's avatar
Fabrizio Milo committed
369

Lintang Sutawika's avatar
Lintang Sutawika committed
370
def make_table(result_dict, column: str = "results", sort_results: bool = False):
371
    """Generate table of results."""
372
    from pytablewriter import LatexTableWriter, MarkdownTableWriter
373

lintangsutawika's avatar
lintangsutawika committed
374
    if column == "results":
lintangsutawika's avatar
lintangsutawika committed
375
376
377
        column_name = "Tasks"
    elif column == "groups":
        column_name = "Groups"
lintangsutawika's avatar
lintangsutawika committed
378

lintangsutawika's avatar
lintangsutawika committed
379
    all_headers = [
lintangsutawika's avatar
lintangsutawika committed
380
        column_name,
lintangsutawika's avatar
lintangsutawika committed
381
382
        "Version",
        "Filter",
383
        "n-shot",
lintangsutawika's avatar
lintangsutawika committed
384
        "Metric",
385
        "",
lintangsutawika's avatar
lintangsutawika committed
386
387
388
389
        "Value",
        "",
        "Stderr",
    ]
390

lintangsutawika's avatar
lintangsutawika committed
391
392
393
394
395
    md_writer = MarkdownTableWriter()
    latex_writer = LatexTableWriter()
    md_writer.headers = all_headers
    latex_writer.headers = all_headers

396
397
    values = []

398
399
    keys = result_dict[column].keys()
    if sort_results:
Lintang Sutawika's avatar
Lintang Sutawika committed
400
401
402
        # sort entries alphabetically by task or group name.
        # NOTE: we default here to false, because order matters for multi-level table printing a la mmlu.
        # sorting here would mess that up
403
404
405
        keys = sorted(keys)
    for k in keys:
        dic = result_dict[column][k]
Lintang Sutawika's avatar
Lintang Sutawika committed
406
407
        version = result_dict["versions"].get(k, "    N/A")
        n = str(result_dict.get("n-shot", " ").get(k, " "))
Baber's avatar
Baber committed
408
409
        # TODO: fix this
        # higher_is_better = result_dict.get("higher_is_better", {}).get(k, {})
410
411
412
413

        if "alias" in dic:
            k = dic.pop("alias")

414
        metric_items = dic.items()
Lintang Sutawika's avatar
Lintang Sutawika committed
415
        metric_items = sorted(metric_items)
416
417

        for (mf), v in metric_items:
418
            m, _, f = mf.partition(",")
419
420
421
            if m.endswith("_stderr"):
                continue

Baber's avatar
Baber committed
422
423
424
            # hib = HIGHER_IS_BETTER_SYMBOLS.get(higher_is_better.get(m), "")
            # TODO: fix
            hib = "↑"
425

Lintang Sutawika's avatar
Lintang Sutawika committed
426
427
            v = "%.4f" % v if isinstance(v, float) else v

428
429
            if m + "_stderr" + "," + f in dic:
                se = dic[m + "_stderr" + "," + f]
Lintang Sutawika's avatar
Lintang Sutawika committed
430
                se = "   N/A" if se == "N/A" else "%.4f" % se
Lintang Sutawika's avatar
Lintang Sutawika committed
431
                values.append([k, version, f, n, m, hib, v, "±", se])
432
            else:
Lintang Sutawika's avatar
Lintang Sutawika committed
433
                values.append([k, version, f, n, m, hib, v, "", ""])
434
435
436
437
438
439
440
441
442
443
444
            k = ""
            version = ""
    md_writer.value_matrix = values
    latex_writer.value_matrix = values

    # todo: make latex table look good
    # print(latex_writer.dumps())

    return md_writer.dumps()


445
446
def positional_deprecated(fn):
    """
Fabrizio Milo's avatar
Fabrizio Milo committed
447
    A decorator to nudge users into passing only keyword args (`kwargs`) to the
448
449
    wrapped function, `fn`.
    """
Fabrizio Milo's avatar
Fabrizio Milo committed
450

451
452
    @functools.wraps(fn)
    def _wrapper(*args, **kwargs):
Fabrizio Milo's avatar
Fabrizio Milo committed
453
454
455
        if len(args) != 1 if inspect.ismethod(fn) else 0:
            print(
                f"WARNING: using {fn.__name__} with positional arguments is "
456
                "deprecated and will be disallowed in a future version of "
Fabrizio Milo's avatar
Fabrizio Milo committed
457
458
                "lm-evaluation-harness!"
            )
459
        return fn(*args, **kwargs)
Fabrizio Milo's avatar
Fabrizio Milo committed
460

461
    return _wrapper
Stephen Hogg's avatar
Stephen Hogg committed
462

Fabrizio Milo's avatar
Fabrizio Milo committed
463

464
465
466
467
def ignore_constructor(loader, node):
    return node


468
def import_function(loader: yaml.Loader, node, yaml_path: Path):
lintangsutawika's avatar
lintangsutawika committed
469
470
    function_name = loader.construct_scalar(node)

lintangsutawika's avatar
lintangsutawika committed
471
    *module_name, function_name = function_name.split(".")
472
    if isinstance(module_name, list):
lintangsutawika's avatar
lintangsutawika committed
473
        module_name = ".".join(module_name)
474
    module_path = yaml_path.parent / f"{module_name}.py"
lintangsutawika's avatar
lintangsutawika committed
475

476
477
478
479
    spec = importlib.util.spec_from_file_location(module_name, module_path.as_posix())

    if spec is None:
        raise ImportError(f"Could not import module {module_name} from {module_path}.")
lintangsutawika's avatar
lintangsutawika committed
480
    module = importlib.util.module_from_spec(spec)
481
482
483

    if spec.loader is None:
        raise ImportError(f"Module loader is None, {module_name} from {module_path}.")
lintangsutawika's avatar
lintangsutawika committed
484
485
486
487
488
    spec.loader.exec_module(module)

    function = getattr(module, function_name)
    return function

lintangsutawika's avatar
lintangsutawika committed
489

490
491
492
493
def load_yaml_config(yaml_path=None, yaml_config=None, yaml_dir=None, mode="full"):
    if mode == "simple":
        constructor_fn = ignore_constructor
    elif mode == "full":
494
495
496
497
        if yaml_path is None:
            raise ValueError("yaml_path must be provided if mode is 'full'.")
        # Attach yaml_path to the import function so that it can be used later
        constructor_fn = functools.partial(import_function, yaml_path=Path(yaml_path))
lintangsutawika's avatar
lintangsutawika committed
498

499
    loader = yaml.CLoader if yaml.__with_libyaml__ else yaml.FullLoader
500
    # Add the import_function constructor to the YAML loader
501
    yaml.add_constructor("!function", constructor_fn, Loader=loader)
502
503
    if yaml_config is None:
        with open(yaml_path, "rb") as file:
504
            yaml_config = yaml.load(file, Loader=loader)
lintangsutawika's avatar
lintangsutawika committed
505

lintangsutawika's avatar
lintangsutawika committed
506
507
    if yaml_dir is None:
        yaml_dir = os.path.dirname(yaml_path)
508
509
510
511
512
513
514

    assert yaml_dir is not None

    if "include" in yaml_config:
        include_path = yaml_config["include"]
        del yaml_config["include"]

515
        if isinstance(include_path, str):
516
517
518
519
520
521
522
523
524
525
526
527
528
            include_path = [include_path]

        # Load from the last one first
        include_path.reverse()
        final_yaml_config = {}
        for path in include_path:
            # Assumes that path is a full path.
            # If not found, assume the included yaml
            # is in the same dir as the original yaml
            if not os.path.isfile(path):
                path = os.path.join(yaml_dir, path)

            try:
529
                included_yaml_config = load_yaml_config(yaml_path=path, mode=mode)
530
531
532
533
534
535
536
537
                final_yaml_config.update(included_yaml_config)
            except Exception as ex:
                # If failed to load, ignore
                raise ex

        final_yaml_config.update(yaml_config)
        return final_yaml_config
    return yaml_config
lintangsutawika's avatar
lintangsutawika committed
538
539


Ethan Smith's avatar
Ethan Smith committed
540
def regex_replace(string, pattern, repl, count: int = 0):
541
542
    """Implements the `re.sub` function as a custom Jinja filter."""
    return re.sub(pattern, repl, string, count=count)
lintangsutawika's avatar
lintangsutawika committed
543

lintangsutawika's avatar
lintangsutawika committed
544

545
546
547
env = Environment(
    loader=BaseLoader, undefined=StrictUndefined, keep_trailing_newline=True
)
548
env.filters["regex_replace"] = regex_replace
549
550


baberabb's avatar
baberabb committed
551
def apply_template(template: str, doc: dict) -> str:
552
553
    rtemplate = env.from_string(template)
    return rtemplate.render(**doc)
554
555


556
def create_iterator(raw_iterator, *, rank=0, world_size=1, limit=None):
557
558
559
    """
    Method for creating a (potentially) sliced and limited
    iterator from a raw document iterator. Used for splitting data
560
561
562
    among ranks in multigpu setting or only pulling a sample of documents
    """
    return islice(raw_iterator, rank, limit, world_size)
563
564
565
566
567
568
569
570
571
572


def weighted_f1_score(items):
    from sklearn.metrics import f1_score

    unzipped_list = list(zip(*items))
    golds = unzipped_list[0]
    preds = unzipped_list[1]
    fscore = f1_score(golds, preds, average="weighted")
    return fscore
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601


def convert_pil_to_hash(value):
    from io import BytesIO

    img_bytes = BytesIO()
    value.save(img_bytes, format="PNG")
    return hashlib.sha256(str(img_bytes).encode()).hexdigest()


def convert_bytes_to_hash(value):
    return hashlib.sha256(str(value).encode()).hexdigest()


def hash_dict_images(data_dict):
    """
    Create a deep copy of `data_dict` where all bytes and PIL.Image.Image values
    are replaced by their respective hashes using the provided converter functions.

    Parameters:
        data_dict (dict): The input dictionary with arbitrary nesting of dicts and lists.

    Returns:
        dict: A new dictionary with the same structure as `data_dict`, but with all
              bytes and PIL.Image.Image objects replaced by their hashes.
    """

    def _process_value(value):
        # Bytes -> hash
Baber Abbasi's avatar
Baber Abbasi committed
602
603
        from PIL import Image

604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
        if isinstance(value, (bytes, bytearray)):
            return convert_bytes_to_hash(value)
        # PIL Image -> hash
        if isinstance(value, Image.Image):
            return convert_pil_to_hash(value)
        # Nested dictionary -> recurse
        if isinstance(value, dict):
            return {k: _process_value(v) for k, v in value.items()}
        # List or tuple -> recurse, preserving type
        if isinstance(value, list):
            return [_process_value(v) for v in value]
        if isinstance(value, tuple):
            return tuple(_process_value(v) for v in value)
        # Other types remain unchanged
        return value

    # Ensure the top-level is a dict
    if not isinstance(data_dict, dict):
        raise TypeError("Input must be a dictionary")

Baber Abbasi's avatar
Baber Abbasi committed
624
625
626
627
628
    return (
        {key: _process_value(val) for key, val in data_dict.items()}
        if importlib.util.find_spec("PIL")
        else data_dict
    )