From f9abe4cddcdc6704be02633d9d5ed9640d6b9008 Mon Sep 17 00:00:00 2001 From: Sayo Date: Mon, 8 May 2023 20:38:10 +0800 Subject: Add api method to get LoRA models with prompt --- extensions-builtin/Lora/lora.py | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) (limited to 'extensions-builtin/Lora/lora.py') diff --git a/extensions-builtin/Lora/lora.py b/extensions-builtin/Lora/lora.py index 8fc1ddca..05162e41 100644 --- a/extensions-builtin/Lora/lora.py +++ b/extensions-builtin/Lora/lora.py @@ -2,9 +2,8 @@ import glob import os import re import torch -from typing import Union, List, Optional -from fastapi import FastAPI -import gradio as gr +from typing import Union +import scripts.api as api from modules import shared, devices, sd_models, errors, scripts @@ -445,12 +444,6 @@ def infotext_pasted(infotext, params): if added: params["Prompt"] += "\n" + "".join(added) -def api(_: gr.Blocks, app: FastAPI): - @app.get("/sdapi/v1/loras") - async def getloras(): - return [{"name": name, "path": available_loras[name].filename, "prompt": ""} for name in available_loras] - - available_loras = {} available_lora_aliases = {} loaded_loras = [] @@ -458,6 +451,6 @@ loaded_loras = [] list_available_loras() try: import modules.script_callbacks as script_callbacks - script_callbacks.on_app_started(api) + script_callbacks.on_app_started(api.api) except: pass \ No newline at end of file -- cgit v1.2.1