Skip to content

Commit 7d5d678

Browse files
authored
Merge pull request #1 from huggingface/naykun-main
make fix-copies
2 parents 02851fa + 49e39f8 commit 7d5d678

File tree

2 files changed

+15
-2
lines changed

2 files changed

+15
-2
lines changed

src/diffusers/pipelines/qwenimage/pipeline_qwenimage_layered.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -393,7 +393,6 @@ def check_inputs(
393393
raise ValueError(f"`max_sequence_length` cannot be greater than 1024 but is {max_sequence_length}")
394394

395395
@staticmethod
396-
# Copied from diffusers.pipelines.qwenimage.pipeline_qwenimage.QwenImagePipeline._pack_latents
397396
def _pack_latents(latents, batch_size, num_channels_latents, height, width, layers):
398397
latents = latents.view(batch_size, layers, num_channels_latents, height // 2, 2, width // 2, 2)
399398
latents = latents.permute(0, 1, 3, 5, 2, 4, 6)
@@ -402,7 +401,6 @@ def _pack_latents(latents, batch_size, num_channels_latents, height, width, laye
402401
return latents
403402

404403
@staticmethod
405-
# Copied from diffusers.pipelines.qwenimage.pipeline_qwenimage.QwenImagePipeline._unpack_latents
406404
def _unpack_latents(latents, height, width, layers, vae_scale_factor):
407405
batch_size, num_patches, channels = latents.shape
408406

src/diffusers/utils/dummy_torch_and_transformers_objects.py

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2297,6 +2297,21 @@ def from_pretrained(cls, *args, **kwargs):
22972297
requires_backends(cls, ["torch", "transformers"])
22982298

22992299

2300+
class QwenImageLayeredPipeline(metaclass=DummyObject):
2301+
_backends = ["torch", "transformers"]
2302+
2303+
def __init__(self, *args, **kwargs):
2304+
requires_backends(self, ["torch", "transformers"])
2305+
2306+
@classmethod
2307+
def from_config(cls, *args, **kwargs):
2308+
requires_backends(cls, ["torch", "transformers"])
2309+
2310+
@classmethod
2311+
def from_pretrained(cls, *args, **kwargs):
2312+
requires_backends(cls, ["torch", "transformers"])
2313+
2314+
23002315
class QwenImagePipeline(metaclass=DummyObject):
23012316
_backends = ["torch", "transformers"]
23022317

0 commit comments

Comments
 (0)