Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -188,11 +188,26 @@ def run_lora(
|
|
188 |
if adapter_names:
|
189 |
pipe.set_adapters(adapter_names, adapter_weights=adapter_weights)
|
190 |
# 可选:融合后推理更快,但无法动态调整权重
|
191 |
-
pipe.fuse_lora(adapter_names=adapter_names)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
192 |
|
193 |
-
pipe.enable_vae_slicing()
|
194 |
|
195 |
-
|
196 |
clip_side_prompt = safe_trim_for_clip(prompt, max_words=77)
|
197 |
init_image = None
|
198 |
error_message = ""
|
|
|
188 |
if adapter_names:
|
189 |
pipe.set_adapters(adapter_names, adapter_weights=adapter_weights)
|
190 |
# 可选:融合后推理更快,但无法动态调整权重
|
191 |
+
# pipe.fuse_lora(adapter_names=adapter_names)
|
192 |
+
try:
|
193 |
+
active = pipe.get_active_adapters() if hasattr(pipe, "get_active_adapters") else []
|
194 |
+
print("Active adapters:", active)
|
195 |
+
except Exception as e:
|
196 |
+
print("Active adapters query failed:", e)
|
197 |
+
|
198 |
+
lora_layer_count = 0
|
199 |
+
for name, module in pipe.transformer.named_modules():
|
200 |
+
attrs = dir(module)
|
201 |
+
if any(a.startswith("lora_") for a in attrs) or "lora" in module.__class__.__name__.lower():
|
202 |
+
lora_layer_count += 1
|
203 |
+
print(f"[DEBUG] transformer LoRA layers: {lora_layer_count}")
|
204 |
+
|
205 |
+
# 若层数为 0,给出直观警告
|
206 |
+
if lora_layer_count == 0:
|
207 |
+
gr.Warning("LoRA seems not injected (0 layers on transformer). Check whether the LoRA is trained for FLUX and `prefix=None` is set.")
|
208 |
|
|
|
209 |
|
210 |
+
pipe.enable_vae_slicing()
|
211 |
clip_side_prompt = safe_trim_for_clip(prompt, max_words=77)
|
212 |
init_image = None
|
213 |
error_message = ""
|