misc.py 8.77 KB
Newer Older
luopl's avatar
luopl committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
# flake8: noqa: F401, F403
import abc
import argparse
import csv
import multiprocessing as mp
import os
import os.path as osp
from pathlib import Path
import copy as cp
import random as rd
import requests
import shutil
import subprocess
import warnings
import pandas as pd
from collections import OrderedDict, defaultdict
from multiprocessing import Pool, current_process
from tqdm import tqdm
import datetime
import matplotlib.pyplot as plt
from tabulate import tabulate
from json import JSONDecoder
from huggingface_hub import scan_cache_dir
from huggingface_hub.utils._cache_manager import _scan_cached_repo
from sty import fg, bg, ef, rs


def modelscope_flag_set():
    return os.environ.get('VLMEVALKIT_USE_MODELSCOPE', None) in ['1', 'True']


def process_punctuation(inText):
    import re
    outText = inText
    punct = [
        ';', r'/', '[', ']', '"', '{', '}', '(', ')', '=', '+', '\\', '_', '-',
        '>', '<', '@', '`', ',', '?', '!'
    ]
    commaStrip = re.compile('(\d)(,)(\d)')  # noqa: W605
    periodStrip = re.compile('(?!<=\d)(\.)(?!\d)')  # noqa: W605
    for p in punct:
        if (p + ' ' in inText or ' ' + p in inText) or (re.search(
                commaStrip, inText) is not None):
            outText = outText.replace(p, '')
        else:
            outText = outText.replace(p, ' ')
    outText = periodStrip.sub('', outText, re.UNICODE)
    return outText

def h2r(value):
    if value[0] == '#':
        value = value[1:]
    assert len(value) == 6
    return tuple(int(value[i:i + 2], 16) for i in range(0, 6, 2))

def r2h(rgb):
    return '#%02x%02x%02x' % rgb

def colored(s, color):
    if isinstance(color, str):
        if hasattr(fg, color):
            return getattr(fg, color) + s + fg.rs
        color = h2r(color)
    return fg(*color) + s + fg.rs

def istype(s, type):
    if isinstance(s, type):
        return True
    try:
        return isinstance(eval(s), type)
    except Exception as _:
        return False

def bincount(lst):
    bins = defaultdict(lambda: 0)
    for item in lst:
        bins[item] += 1
    return bins

def get_cache_path(repo_id, branch='main', repo_type='datasets'):
    try:
        if modelscope_flag_set():
            from modelscope.hub.file_download import create_temporary_directory_and_cache
            if repo_type == 'datasets':
                repo_type = 'dataset'
            _, cache = create_temporary_directory_and_cache(model_id=repo_id, repo_type=repo_type)
            cache_path = cache.get_root_location()
            return cache_path
        else:
            from .file import HFCacheRoot
            cache_path = HFCacheRoot()
            org, repo_name = repo_id.split('/')
            repo_path = Path(osp.join(cache_path, f'{repo_type}--{org}--{repo_name}/'))
            hf_cache_info = _scan_cached_repo(repo_path=repo_path)
            revs = {r.refs: r for r in hf_cache_info.revisions}
            if branch is not None:
                revs = {refs: r for refs, r in revs.items() if branch in refs}
            rev2keep = max(revs.values(), key=lambda r: r.last_modified)
            return str(rev2keep.snapshot_path)
    except Exception as e:
        import logging
        logging.warning(f'{type(e)}: {e}')
        return None

def proxy_set(s):
    import os
    for key in ['http_proxy', 'HTTP_PROXY', 'https_proxy', 'HTTPS_PROXY']:
        os.environ[key] = s

def get_rank_and_world_size():
    rank = int(os.environ.get('RANK', 0))
    world_size = int(os.environ.get('WORLD_SIZE', 1))
    return rank, world_size

def splitlen(s, sym='/'):
    return len(s.split(sym))

def listinstr(lst, s):
    assert isinstance(lst, list)
    for item in lst:
        if item in s:
            return True
    return False

def d2df(D):
    return pd.DataFrame({x: [D[x]] for x in D})

def cn_string(s):
    import re
    if re.search(u'[\u4e00-\u9fff]', s):
        return True
    return False

try:
    import decord
except ImportError:
    pass

def timestr(granularity='second'):
    s = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
    assert granularity in ['second', 'minute', 'hour', 'day']
    if granularity == 'second':
        return s
    elif granularity == 'minute':
        return s[:-2]
    elif granularity == 'hour':
        return s[:-4]
    elif granularity == 'day':
        return s[:-6]

def _minimal_ext_cmd(cmd, cwd=None):
    env = {}
    for k in ['SYSTEMROOT', 'PATH', 'HOME']:
        v = os.environ.get(k)
        if v is not None:
            env[k] = v
    env['LANGUAGE'] = 'C'
    env['LANG'] = 'C'
    env['LC_ALL'] = 'C'
    out = subprocess.Popen(cmd, stdout=subprocess.PIPE, env=env, cwd=cwd).communicate()[0]
    return out

def githash(fallback='unknown', digits=8):
    if digits is not None and not isinstance(digits, int):
        raise TypeError('digits must be None or an integer')
    try:
        import vlmeval
    except ImportError as e:
        import logging
        logging.error(f'ImportError: {str(e)}')
        return fallback
    try:
        out = _minimal_ext_cmd(['git', 'rev-parse', 'HEAD'], cwd=vlmeval.__path__[0])
        sha = out.strip().decode('ascii')
        if digits is not None:
            sha = sha[:digits]
    except OSError:
        sha = fallback
    return sha

def dict_merge(dct, merge_dct):
    for k, _ in merge_dct.items():
        if (k in dct and isinstance(dct[k], dict) and isinstance(merge_dct[k], dict)):  #noqa
            dict_merge(dct[k], merge_dct[k])
        else:
            dct[k] = merge_dct[k]

def youtube_dl(idx):
    cmd = f'youtube-dl -f best -f mp4 "{idx}"  -o {idx}.mp4'
    os.system(cmd)

def run_command(cmd):
    if isinstance(cmd, str):
        cmd = cmd.split()
    return subprocess.check_output(cmd).decode()

def load_env():
    import logging
    logging.basicConfig(
        format='[%(asctime)s] %(levelname)s - %(filename)s: %(funcName)s - %(lineno)d: %(message)s',
        datefmt='%Y-%m-%d %H:%M:%S')

    try:
        import vlmeval
    except ImportError:
        logging.error('VLMEval is not installed. Failed to import environment variables from .env file. ')
        return
    pth = osp.realpath(vlmeval.__path__[0])
    pth = osp.join(pth, '../.env')
    pth = osp.realpath(pth)
    if not osp.exists(pth):
        logging.error(f'Did not detect the .env file at {pth}, failed to load. ')
        return

    from dotenv import dotenv_values
    values = dotenv_values(pth)
    for k, v in values.items():
        if v is not None and len(v):
            os.environ[k] = v
    logging.info(f'API Keys successfully loaded from {pth}')

def pip_install_robust(package):
    import sys
    retry = 3
    while retry > 0:
        try:
            package_base = package.split('=')[0]
            module = __import__(package)
            return True
        except ImportError:
            subprocess.check_call([sys.executable, '-m', 'pip', 'install', package])
            retry -= 1
    return False


def version_cmp(v1, v2, op='eq'):
    from packaging import version
    import operator
    op_func = getattr(operator, op)
    return op_func(version.parse(v1), version.parse(v2))


def toliststr(s):
    if isinstance(s, str) and (s[0] == '[') and (s[-1] == ']'):
        return [str(x) for x in eval(s)]
    elif isinstance(s, str):
        return [s]
    elif isinstance(s, list):
        return [str(x) for x in s]
    raise NotImplementedError


def extract_json_objects(text, decoder=JSONDecoder()):
    pos = 0
    while True:
        match = text.find('{', pos)
        if match == -1: break
        try:
            result, index = decoder.raw_decode(text[match:])
            yield result
            pos = match + index
        except ValueError:
            pos = match + 1


def get_gpu_memory():
    import subprocess
    try:
        command = "rocm-smi --showmeminfo vram"
        output = subprocess.check_output(command.split(), stderr=subprocess.STDOUT)
        memory_info = output.decode('ascii').split('\n')
        memory_free_values = []
        for line in memory_info:
            if "vram Total Memory" in line:
                total_memory = int(line.split(":")[-1].strip().split()[0])  # 提取总显存
            elif "vram Total Used Memory" in line:
                used_memory = int(line.split(":")[-1].strip().split()[0])  # 提取已用显存
               # print(total_memory)
               # print(used_memory)
                free_memory = total_memory - used_memory  # 计算空闲显存
                memory_free_values.append(free_memory)

        return memory_free_values
        #memory_free_info = subprocess.check_output(command.split()).decode('ascii').split('\n')[:-1][1:]
        #memory_free_values = [int(x.split()[0]) for i, x in enumerate(memory_free_info)]
        #return memory_free_values
    except Exception as e:
        print(f'{type(e)}: {str(e)}')
        return []


def auto_split_flag():
    flag = os.environ.get('AUTO_SPLIT', '0')
    return flag == '1'