Commit ce34ef01 authored by mashun1's avatar mashun1
Browse files

dynamicrafter

parents
Pipeline #812 canceled with stages
man fishing in a boat at sunset
a brown bear is walking in a zoo enclosure, some rocks around
boy walking on the street
two people dancing
a campfire on the beach and the ocean waves in the background
girl with fires and smoke on his head
girl talking and blinking
bear playing guitar happily, snowing
\ No newline at end of file
time-lapse of a blooming flower with leaves and a stem
a bonfire is lit in the middle of a field
a woman looking out in the rain
rotating view, small house
pouring honey onto some slices of bread
a sailboat sailing in rough seas with a dramatic sunset
a boat traveling on the ocean
a group of penguins walking on a beach
\ No newline at end of file
decord==0.6.0
einops==0.3.0
imageio==2.9.0
numpy==1.24.2
omegaconf==2.1.1
opencv_python
pandas==2.0.0
Pillow==9.5.0
pytorch_lightning==1.8.3
PyYAML==6.0
setuptools==65.6.3
# torch>=2.0.0
# torchvision
tqdm==4.65.0
transformers==4.25.1
moviepy
av
# xformers
gradio
timm
scikit-learn
open_clip_torch==2.22.0
kornia
\ No newline at end of file
import datetime
import argparse, importlib
from pytorch_lightning import seed_everything
import torch
import torch.distributed as dist
def setup_dist(local_rank):
if dist.is_initialized():
return
torch.cuda.set_device(local_rank)
torch.distributed.init_process_group('nccl', init_method='env://')
def get_dist_info():
if dist.is_available():
initialized = dist.is_initialized()
else:
initialized = False
if initialized:
rank = dist.get_rank()
world_size = dist.get_world_size()
else:
rank = 0
world_size = 1
return rank, world_size
if __name__ == '__main__':
now = datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
parser = argparse.ArgumentParser()
parser.add_argument("--module", type=str, help="module name", default="inference")
parser.add_argument("--local_rank", type=int, nargs="?", help="for ddp", default=0)
args, unknown = parser.parse_known_args()
inference_api = importlib.import_module(args.module, package=None)
inference_parser = inference_api.get_parser()
inference_args, unknown = inference_parser.parse_known_args()
seed_everything(inference_args.seed)
setup_dist(args.local_rank)
torch.backends.cudnn.benchmark = True
rank, gpu_num = get_dist_info()
# inference_args.savedir = inference_args.savedir+str('_seed')+str(inference_args.seed)
print("@DynamiCrafter Inference [rank%d]: %s"%(rank, now))
inference_api.run_inference(inference_args, gpu_num, rank)
\ No newline at end of file
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment