macgaga commited on
Commit
c269c5c
·
verified ·
1 Parent(s): e9acfdf

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +45 -23
app.py CHANGED
@@ -1066,29 +1066,51 @@ lora_search_civitai_gallery.select(
1066
  )
1067
 
1068
 
1069
- for i, l in enumerate(lora_repo):
1070
- deselect_lora_button.click(lambda: ("", 1.0), None, [lora_repo[i], lora_wt[i]], queue=False, show_api=False)
1071
- gr.on(
1072
- triggers=[lora_download[i].click],
1073
- fn=download_my_lora_flux,
1074
- inputs=[lora_download_url, lora_repo[i]],
1075
- outputs=[lora_repo[i]],
1076
- scroll_to_output=True,
1077
- queue=True,
1078
- show_api=False,
1079
- )
1080
- gr.on(
1081
- triggers=[lora_repo[i].change, lora_wt[i].change],
1082
- fn=update_loras_flux,
1083
- inputs=[prompt, lora_repo[i], lora_wt[i]],
1084
- outputs=[prompt, lora_repo[i], lora_wt[i], lora_info[i], lora_md[i]],
1085
- queue=False,
1086
- trigger_mode="once",
1087
- show_api=False,
1088
- ).success(get_repo_safetensors, [lora_repo[i]], [lora_weights[i]], queue=False, show_api=False
1089
- ).success(apply_lora_prompt_flux, [lora_info[i]], [lora_trigger[i]], queue=False, show_api=False
1090
- ).success(compose_lora_json, [lora_repo_json, lora_num[i], lora_repo[i], lora_wt[i], lora_weights[i], lora_trigger[i]], [lora_repo_json], queue=False, show_api=False)
1091
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1092
  for i, m in enumerate(cn_mode):
1093
  gr.on(
1094
  triggers=[cn_mode[i].change, cn_scale[i].change],
 
1066
  )
1067
 
1068
 
1069
+ for i, l in enumerate(lora_repo):
1070
+ deselect_lora_button.click(
1071
+ lambda: ("", 1.0),
1072
+ None,
1073
+ [lora_repo[i], lora_wt[i]],
1074
+ queue=False,
1075
+ show_api=False
1076
+ )
1077
+ gr.on(
1078
+ triggers=[lora_download[i].click],
1079
+ fn=download_my_lora_flux,
1080
+ inputs=[lora_download_url, lora_repo[i]],
1081
+ outputs=[lora_repo[i]],
1082
+ scroll_to_output=True,
1083
+ queue=True,
1084
+ show_api=False,
1085
+ )
1086
+ gr.on(
1087
+ triggers=[lora_repo[i].change, lora_wt[i].change],
1088
+ fn=update_loras_flux,
1089
+ inputs=[prompt, lora_repo[i], lora_wt[i]],
1090
+ outputs=[prompt, lora_repo[i], lora_wt[i], lora_info[i], lora_md[i]],
1091
+ queue=False,
1092
+ trigger_mode="once",
1093
+ show_api=False,
1094
+ ).success(
1095
+ get_repo_safetensors,
1096
+ [lora_repo[i]],
1097
+ [lora_weights[i]],
1098
+ queue=False,
1099
+ show_api=False
1100
+ ).success(
1101
+ apply_lora_prompt_flux,
1102
+ [lora_info[i]],
1103
+ [lora_trigger[i]],
1104
+ queue=False,
1105
+ show_api=False
1106
+ ).success(
1107
+ compose_lora_json,
1108
+ [lora_repo_json, lora_num[i], lora_repo[i], lora_wt[i], lora_weights[i], lora_trigger[i]],
1109
+ [lora_repo_json],
1110
+ queue=False,
1111
+ show_api=False
1112
+ )
1113
+
1114
  for i, m in enumerate(cn_mode):
1115
  gr.on(
1116
  triggers=[cn_mode[i].change, cn_scale[i].change],