@@ -349,7 +349,8 @@ class StableDiffusionPipelineSafe(DiffusionPipeline):
...
@@ -349,7 +349,8 @@ class StableDiffusionPipelineSafe(DiffusionPipeline):
flagged_images=np.zeros((2,*image.shape[1:]))
flagged_images=np.zeros((2,*image.shape[1:]))
ifany(has_nsfw_concept):
ifany(has_nsfw_concept):
logger.warning(
logger.warning(
"Potential NSFW content was detected in one or more images. A black image will be returned instead."
"Potential NSFW content was detected in one or more images. A black image will be returned"
" instead."
f"{'You may look at this images in the `unsafe_images` variable of the output at your own discretion.'ifenable_safety_guidanceelse'Try again with a different prompt and/or seed.'}"
f"{'You may look at this images in the `unsafe_images` variable of the output at your own discretion.'ifenable_safety_guidanceelse'Try again with a different prompt and/or seed.'}"