Unverified Commit cd524755 authored by Patrick von Platen's avatar Patrick von Platen Committed by GitHub
Browse files

[Examples] Remove datasets important that is not needed (#2267)

* [Examples] Remove datasets important that is not needed

* remove from lora tambien
parent 0f04e799
...@@ -28,7 +28,6 @@ import torch.nn.functional as F ...@@ -28,7 +28,6 @@ import torch.nn.functional as F
import torch.utils.checkpoint import torch.utils.checkpoint
from torch.utils.data import Dataset from torch.utils.data import Dataset
import datasets
import diffusers import diffusers
import transformers import transformers
from accelerate import Accelerator from accelerate import Accelerator
...@@ -509,11 +508,9 @@ def main(args): ...@@ -509,11 +508,9 @@ def main(args):
) )
logger.info(accelerator.state, main_process_only=False) logger.info(accelerator.state, main_process_only=False)
if accelerator.is_local_main_process: if accelerator.is_local_main_process:
datasets.utils.logging.set_verbosity_warning()
transformers.utils.logging.set_verbosity_warning() transformers.utils.logging.set_verbosity_warning()
diffusers.utils.logging.set_verbosity_info() diffusers.utils.logging.set_verbosity_info()
else: else:
datasets.utils.logging.set_verbosity_error()
transformers.utils.logging.set_verbosity_error() transformers.utils.logging.set_verbosity_error()
diffusers.utils.logging.set_verbosity_error() diffusers.utils.logging.set_verbosity_error()
......
...@@ -28,7 +28,6 @@ import torch.nn.functional as F ...@@ -28,7 +28,6 @@ import torch.nn.functional as F
import torch.utils.checkpoint import torch.utils.checkpoint
from torch.utils.data import Dataset from torch.utils.data import Dataset
import datasets
import diffusers import diffusers
import transformers import transformers
from accelerate import Accelerator from accelerate import Accelerator
...@@ -550,11 +549,9 @@ def main(args): ...@@ -550,11 +549,9 @@ def main(args):
) )
logger.info(accelerator.state, main_process_only=False) logger.info(accelerator.state, main_process_only=False)
if accelerator.is_local_main_process: if accelerator.is_local_main_process:
datasets.utils.logging.set_verbosity_warning()
transformers.utils.logging.set_verbosity_warning() transformers.utils.logging.set_verbosity_warning()
diffusers.utils.logging.set_verbosity_info() diffusers.utils.logging.set_verbosity_info()
else: else:
datasets.utils.logging.set_verbosity_error()
transformers.utils.logging.set_verbosity_error() transformers.utils.logging.set_verbosity_error()
diffusers.utils.logging.set_verbosity_error() diffusers.utils.logging.set_verbosity_error()
......
...@@ -27,7 +27,6 @@ import torch.nn.functional as F ...@@ -27,7 +27,6 @@ import torch.nn.functional as F
import torch.utils.checkpoint import torch.utils.checkpoint
from torch.utils.data import Dataset from torch.utils.data import Dataset
import datasets
import diffusers import diffusers
import PIL import PIL
import transformers import transformers
...@@ -486,11 +485,9 @@ def main(): ...@@ -486,11 +485,9 @@ def main():
) )
logger.info(accelerator.state, main_process_only=False) logger.info(accelerator.state, main_process_only=False)
if accelerator.is_local_main_process: if accelerator.is_local_main_process:
datasets.utils.logging.set_verbosity_warning()
transformers.utils.logging.set_verbosity_warning() transformers.utils.logging.set_verbosity_warning()
diffusers.utils.logging.set_verbosity_info() diffusers.utils.logging.set_verbosity_info()
else: else:
datasets.utils.logging.set_verbosity_error()
transformers.utils.logging.set_verbosity_error() transformers.utils.logging.set_verbosity_error()
diffusers.utils.logging.set_verbosity_error() diffusers.utils.logging.set_verbosity_error()
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment