rahul7star commited on
Commit
293e41a
Β·
verified Β·
1 Parent(s): ef1bc30

Update app_lora.py

Browse files
Files changed (1) hide show
  1. app_lora.py +54 -13
app_lora.py CHANGED
@@ -97,6 +97,46 @@ log(f"USE_CPU_OFFLOAD : {USE_CPU_OFFLOAD}")
97
 
98
  log_system_stats("BEFORE TRANSFORMER LOAD")
99
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
100
 
101
  # ============================================================
102
  # FUNCTION TO CONVERT LATENTS TO IMAGE
@@ -994,6 +1034,9 @@ with gr.Blocks(title="Z-Image-Turbo") as demo:
994
  # =========================
995
  # CALLBACKS
996
  # =========================
 
 
 
997
  def refresh_lora_list(repo_name):
998
  files = list_loras_from_repo(repo_name)
999
  if not files:
@@ -1005,29 +1048,27 @@ with gr.Blocks(title="Z-Image-Turbo") as demo:
1005
  refresh_lora_btn.click(refresh_lora_list, inputs=[lora_repo], outputs=[lora_file])
1006
 
1007
  def apply_lora(repo_name, lora_filename, strength):
1008
- global pipe
1009
  if pipe is None:
1010
  return "❌ Pipeline not initialized"
1011
  if not lora_filename:
1012
  return "⚠️ No LoRA file selected"
1013
 
 
1014
  try:
1015
- pipe.load_lora_weights(
1016
- repo_name,
1017
- weight_name=lora_filename,
1018
- adapter_name="ui_lora"
1019
- )
1020
- pipe.set_adapters(["ui_lora"], [strength])
1021
- log(f"βœ… Applied LoRA: {repo_name}/{lora_filename} (strength={strength})")
1022
- if hasattr(pipe, "peft_config"):
1023
- log(f"🎯 Active adapters: {list(pipe.peft_config.keys())}")
1024
- return "LoRA applied"
1025
  except Exception as e:
1026
  log(f"❌ Failed to apply LoRA: {e}")
1027
  return f"Failed: {e}"
1028
 
1029
- apply_lora_btn.click(apply_lora, inputs=[lora_repo, lora_file, lora_strength], outputs=[logs_box])
1030
-
1031
  def clear_lora():
1032
  global pipe
1033
  if pipe is None:
 
97
 
98
  log_system_stats("BEFORE TRANSFORMER LOAD")
99
 
100
+ # ============================================================
101
+ # LORA SETTINGS
102
+ # ============================================================
103
+ loaded_loras = {} # Keep track of which LoRAs are already loaded
104
+
105
+ def apply_lora(repo_name, lora_filename, strength):
106
+ global pipe, loaded_loras
107
+ if pipe is None:
108
+ return "❌ Pipeline not initialized"
109
+ if not lora_filename:
110
+ return "⚠️ No LoRA file selected"
111
+
112
+ adapter_name = f"ui_lora_{lora_filename.replace('/', '_')}" # unique adapter name per file
113
+
114
+ try:
115
+ # Load the LoRA only if not already loaded
116
+ if adapter_name not in loaded_loras:
117
+ pipe.load_lora_weights(
118
+ repo_name,
119
+ weight_name=lora_filename,
120
+ adapter_name=adapter_name
121
+ )
122
+ loaded_loras[adapter_name] = lora_filename
123
+ log(f"πŸ“₯ LoRA loaded into memory: {repo_name}/{lora_filename}")
124
+ else:
125
+ log(f"♻️ LoRA already loaded: {repo_name}/{lora_filename}")
126
+
127
+ # Activate the adapter
128
+ pipe.set_adapters([adapter_name], [strength])
129
+ log(f"βœ… Applied LoRA: {repo_name}/{lora_filename} (strength={strength})")
130
+
131
+ if hasattr(pipe, "peft_config"):
132
+ log(f"🎯 Active adapters: {list(pipe.peft_config.keys())}")
133
+
134
+ return "LoRA applied"
135
+
136
+ except Exception as e:
137
+ log(f"❌ Failed to apply LoRA: {e}")
138
+ return f"Failed: {e}"
139
+
140
 
141
  # ============================================================
142
  # FUNCTION TO CONVERT LATENTS TO IMAGE
 
1034
  # =========================
1035
  # CALLBACKS
1036
  # =========================
1037
+ ------------------------
1038
+ # CALLBACKS
1039
+ # ------------------------
1040
  def refresh_lora_list(repo_name):
1041
  files = list_loras_from_repo(repo_name)
1042
  if not files:
 
1048
  refresh_lora_btn.click(refresh_lora_list, inputs=[lora_repo], outputs=[lora_file])
1049
 
1050
  def apply_lora(repo_name, lora_filename, strength):
1051
+ global pipe, loaded_loras
1052
  if pipe is None:
1053
  return "❌ Pipeline not initialized"
1054
  if not lora_filename:
1055
  return "⚠️ No LoRA file selected"
1056
 
1057
+ adapter_name = f"ui_lora_{lora_filename.replace('/', '_')}"
1058
  try:
1059
+ if adapter_name not in loaded_loras:
1060
+ pipe.load_lora_weights(repo_name, weight_name=lora_filename, adapter_name=adapter_name)
1061
+ loaded_loras[adapter_name] = lora_filename
1062
+ log(f"πŸ“₯ Loaded LoRA: {lora_filename}")
1063
+
1064
+ pipe.set_adapters([adapter_name], [strength])
1065
+ log(f"βœ… Applied LoRA adapter: {adapter_name} (strength={strength})")
1066
+ return f"LoRA applied: {lora_filename}"
1067
+
 
1068
  except Exception as e:
1069
  log(f"❌ Failed to apply LoRA: {e}")
1070
  return f"Failed: {e}"
1071
 
 
 
1072
  def clear_lora():
1073
  global pipe
1074
  if pipe is None: