Spaces:
Runtime error
Runtime error
File size: 957 Bytes
77f7c06 09e2af9 10e909a 77f7c06 09e2af9 77f7c06 09e2af9 9dffc07 10e909a 09e2af9 77f7c06 09e2af9 10e909a 77f7c06 10e909a 77f7c06 10e909a 9dffc07 77f7c06 09e2af9 77f7c06 10e909a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 |
import gradio as gr
from diffusers import DiffusionPipeline
import torch
from PIL import Image
MODEL_ID = "Phr00t/Qwen-Image-Edit-Rapid-AIO"
pipe = DiffusionPipeline.from_pretrained(
MODEL_ID,
torch_dtype=torch.float32,
safety_checker=None,
trust_remote_code=True
)
pipe.to("cpu")
def generate(prompt, image):
if image is None:
# text → image
result = pipe(prompt)
return result.images[0]
else:
# image editing mode
img = Image.fromarray(image)
result = pipe(prompt=prompt, image=img)
return result.images[0]
demo = gr.Interface(
fn=generate,
inputs=[
gr.Textbox(label="Prompt"),
gr.Image(label="Input Image (optional)")
],
outputs=gr.Image(label="Generated / Edited Image"),
title="Qwen Image Edit Rapid AIO (CPU Friendly)",
description="Works on CPU with Diffusers. Supports text-to-image AND image editing."
)
demo.launch()
|