From da828827b7755ce84ff8ff91ec6f200d2fa08940 Mon Sep 17 00:00:00 2001 From: lutzapps Date: Tue, 12 Nov 2024 17:45:20 +0700 Subject: [PATCH] app_configs bugfix online Url, app_utils gradio link support, port changes --- .../better-ai-launcher/.vscode/tasks.json | 8 +- .../better-ai-launcher/README.md | 2 +- .../better-ai-launcher/app/app.py | 19 ++++- .../app/templates/index.html | 39 ++++++++- .../app/utils/app_configs.py | 79 +++++++++++++------ .../better-ai-launcher/app/utils/app_utils.py | 35 +++++++- .../docker-compose.debug.yml | 2 +- .../better-ai-launcher/docker-compose.yml | 2 +- .../better-ai-launcher/nginx/readme.html | 1 + 9 files changed, 147 insertions(+), 40 deletions(-) diff --git a/official-templates/better-ai-launcher/.vscode/tasks.json b/official-templates/better-ai-launcher/.vscode/tasks.json index 6be2451..53cce64 100644 --- a/official-templates/better-ai-launcher/.vscode/tasks.json +++ b/official-templates/better-ai-launcher/.vscode/tasks.json @@ -74,10 +74,6 @@ "containerPort": 6006, // Tensorboard (needed by kohya_ss) "hostPort": 6006 }, - { - "containerPort": 7860, // Kohya-ss (lutzapps - added new Kohya app with FLUX support) - "hostPort": 7860 - }, { "containerPort": 7862, // Forge (aka Stable-Diffiusion-WebUI-Forge) "hostPort": 7862 @@ -85,6 +81,10 @@ { "containerPort": 7863, // A1111 (aka Stable-Diffiusion-WebUI) "hostPort": 7863 + }, + { + "containerPort": 7864, // Kohya-ss (lutzapps - added new Kohya app with FLUX support) + "hostPort": 7864 } ] }, diff --git a/official-templates/better-ai-launcher/README.md b/official-templates/better-ai-launcher/README.md index 8649f9f..e75ec3d 100644 --- a/official-templates/better-ai-launcher/README.md +++ b/official-templates/better-ai-launcher/README.md @@ -62,9 +62,9 @@ BASE_IMAGE=madiator2011/better-base:cuda12.4 - 3000/http (ComfyUI) - 6006/http (Tensorboard [needed by kohya_ss]) -- 7860/http (Kohya-ss) with FLUX.1 support - 7862/http (Forge) aka Stable-Diffiusion-WebUI-Forge - 7863/http (A1111) aka Stable-Diffiusion-WebUI +- 7864/http (Kohya-ss) with FLUX.1 support ## ENV Vars (System) diff --git a/official-templates/better-ai-launcher/app/app.py b/official-templates/better-ai-launcher/app/app.py index 871e8f0..99975d6 100644 --- a/official-templates/better-ai-launcher/app/app.py +++ b/official-templates/better-ai-launcher/app/app.py @@ -17,8 +17,10 @@ from utils.filebrowser_utils import configure_filebrowser, start_filebrowser, st from utils.app_utils import ( run_app, update_process_status, check_app_directories, get_app_status, force_kill_process_by_name, update_webui_user_sh, save_install_status, - get_install_status, download_and_unpack_venv, fix_custom_nodes, is_process_running, install_app #, update_model_symlinks + get_install_status, download_and_unpack_venv, fix_custom_nodes, is_process_running, install_app, # update_model_symlinks + get_bkohya_launch_url # lutzapps - support dynamic generated gradio url ) + # lutzapps - CHANGE #1 LOCAL_DEBUG = os.environ.get('LOCAL_DEBUG', 'False') # support local browsing for development/debugging @@ -67,7 +69,7 @@ running_processes = {} app_configs = get_app_configs() -S3_BASE_URL = "https://better.s3.madiator.com/" +#S3_BASE_URL = "https://better.s3.madiator.com/" # unused now SETTINGS_FILE = '/workspace/.app_settings.json' @@ -187,6 +189,19 @@ def get_logs(app_name): return jsonify({'logs': running_processes[app_name]['log'][-100:]}) return jsonify({'logs': []}) +# lutzapps - support bkohya gradio url +@app.route('/get_bkohya_launch_url', methods=['GET']) +def get_bkohya_launch_url_route(): + command = app_configs['bkohya']['command'] + is_gradio = ("--share" in command.lower()) # gradio share mode + if is_gradio: + mode = 'gradio' + else: + mode = 'local' + + launch_url = get_bkohya_launch_url() # get this from the app_utils global BKOHYA_GRADIO_URL, which is polled from the kohya log + return jsonify({ 'mode': mode, 'url': launch_url }) # used from the index.html:OpenApp() button click function + @app.route('/kill_all', methods=['POST']) def kill_all(): try: diff --git a/official-templates/better-ai-launcher/app/templates/index.html b/official-templates/better-ai-launcher/app/templates/index.html index 4dae966..b0f08db 100644 --- a/official-templates/better-ai-launcher/app/templates/index.html +++ b/official-templates/better-ai-launcher/app/templates/index.html @@ -2768,16 +2768,47 @@ await updateStatus(); } - function openApp(appKey, port) { - // *** lutzapps - Change #3 - support to run locally + async function openApp(appKey, port) { + // *** lutzapps - support to run locally and new support for bkohya gradio url // NOTE: ` (back-ticks) are used here for template literals var url = `https://${podId}-${port}.proxy.runpod.net/`; // need to be declared as var if (`${enable_unsecure_localhost}` === 'True') { url = `http://localhost:${port}/`; // remove runpod.net proxy - //alert(`openApp URL=${url}`); } - window.open(url, '_blank'); + // new: support gradio url for kohya_ss, e.g. https://b6365c256c395e755b.gradio.live + //if (`${appKey}` === 'bkohya' && `${app_status[appKey]['status']}` === 'running') { // get the latest data from the server + if (appKey == 'bkohya') { // get the latest data from the server + var response = await fetch('/get_bkohya_launch_url'); + var result = await response.json(); + + var launch_mode = result['mode']; // 'gradio' or 'local' + //alert('launch_mode=' + launch_mode); + + var launch_url = result['url']; + //alert('launch_url=' + launch_url); + + if (launch_url !== '') { // when a launch url is defined, the app is initialized and ready to be opened + if (launch_mode === 'gradio') { // if it is a gradio url + url = launch_url; // then use it, instead of the above defined CF proxy url + //alert('using gradio for bkohya: ' + launch_url); + } + // else use the CF proxy url defined above for localhost, instead of the localhost url from the log + } + else { // empty launch_url, waiting for launch url from log + if (launch_mode === 'gradio') { + alert('Waiting for Gradio URL to be generated ...'); + } + else { + alert('Waiting for lokal Launch URL to be generated ...'); + } + return; // no lauch Url yet + } + } + + //alert(`openApp URL=${url}`); + + window.open(url, '_blank'); // open app window as popup window } async function updateStatus() { diff --git a/official-templates/better-ai-launcher/app/utils/app_configs.py b/official-templates/better-ai-launcher/app/utils/app_configs.py index 47a1507..0b2e19e 100644 --- a/official-templates/better-ai-launcher/app/utils/app_configs.py +++ b/official-templates/better-ai-launcher/app/utils/app_configs.py @@ -1,6 +1,7 @@ import os import xml.etree.ElementTree as ET import requests +import urllib.request import json # this is the replacement for the XML manifest, and defines all app_configs in full detail @@ -141,7 +142,13 @@ app_configs = { 'bkohya': { 'id': 'bkohya', # app_name 'name': 'Better Kohya', - 'command': 'cd /workspace/bkohya && . ./bin/activate && cd /workspace/kohya_ss && python ./kohya_gui.py --headless --share --server_port 7860', # TODO!! check ./kohya_gui.py + 'command': 'cd /workspace/bkohya && . ./bin/activate && cd /workspace/kohya_ss && python ./kohya_gui.py --headless --share --server_port 7864', # TODO!! check other ""./kohya_gui.py" cmdlines options + # need to check: + # python ./kohya_gui.py --inbrowser --server_port 7864 + # works for now: + # python ./kohya_gui.py --headless --share --server_port 7864 + # creates a gradio link for 72h like e.g. https://b6365c256c395e755b.gradio.live + # ### for Gradio supported reverse proxy: # --share -> Share the gradio UI # --root_path ROOT_PATH -> root_path` for Gradio to enable reverse proxy support. e.g. /kohya_ss @@ -182,8 +189,8 @@ app_configs = { 'venv_path': '/workspace/bkohya', 'app_path': '/workspace/kohya_ss', - 'port': 7860, - 'download_url': 'https://better.s3.madiator.com/kohya.tar.gz', # (2024-11-08 13:13:00Z) - lutzapps + 'port': 7864, + 'download_url': 'https://better.s3.madiator.com/bkohya/kohya.tar.gz', # (2024-11-08 13:13:00Z) - lutzapps 'venv_uncompressed_size': 12128345264, # uncompressed size of the tar-file (in bytes) 'archive_size': 6314758227, # tar filesize (in bytes) 'sha256_hash': '9a0c0ed5925109e82973d55e28f4914fff6728cfb7f7f028a62e2ec1a9e4f60a', @@ -276,12 +283,12 @@ def write_dict_to_jsonfile(dict:dict, json_filepath:str, overwrite:bool=False) - return False, error_msg # failure - # Write the JSON data to a file + # Write the JSON data to a file BUGBUG with open(json_filepath, 'w', encoding='utf-8') as output_file: json.dump(dict, output_file, ensure_ascii=False, indent=4, separators=(',', ': ')) except Exception as e: - error_msg = f"ERROR in shared_models:write_dict_to_jsonfile() - loading JSON Map File '{json_filepath}'\nException: {str(e)}" + error_msg = f"ERROR in write_dict_to_jsonfile() - loading JSON Map File '{json_filepath}'\nException: {str(e)}" print(error_msg) return False, error_msg # failure @@ -293,17 +300,20 @@ def read_dict_from_jsonfile(json_filepath:str) -> tuple [dict, str]: # Read JSON file from 'json_filepath' and return it as 'dict' try: - if os.path.exists(json_filepath): + if ":" in json_filepath: # filepath is online Url containing ":" like http:/https:/ftp: + with urllib.request.urlopen(json_filepath) as url: + dict = json.load(url) + elif os.path.exists(json_filepath): # local file path, e.g. "/workspace/..."" with open(json_filepath, 'r') as input_file: dict = json.load(input_file) else: - error_msg = f"dictionary file '{json_filepath}' does not exist" - #print(error_msg) + error_msg = f"local dictionary file '{json_filepath}' does not exist" + print(error_msg) return {}, error_msg # failure except Exception as e: - error_msg = f"ERROR in shared_models:read_dict_from_jsonfile() - loading JSON Map File '{json_filepath}'\nException: {str(e)}" + error_msg = f"ERROR in read_dict_from_jsonfile() - loading JSON Map File '{json_filepath}'\nException: {str(e)}" print(error_msg) return {}, error_msg # failure @@ -317,30 +327,39 @@ def pretty_dict(dict:dict) -> str: return dict_string # helper function for "init_app_install_dirs(), "init_shared_model_app_map()", "init_shared_models_folders()" and "inir_DEBUG_SETTINGS()" -def load_global_dict_from_file(dict:dict, dict_filepath:str, dict_description:str, SHARED_MODELS_DIR:str="", write_file:bool=True) -> tuple[bool, dict]: +def load_global_dict_from_file(default_dict:dict, dict_filepath:str, dict_description:str, SHARED_MODELS_DIR:str="", write_file:bool=True) -> tuple[bool, dict]: # returns the 'dict' for 'dict_description' from 'dict_filepath' + success = False + return_dict = {} + try: if not SHARED_MODELS_DIR == "" and not os.path.exists(SHARED_MODELS_DIR): print(f"\nThe SHARED_MODELS_DIR '{SHARED_MODELS_DIR}' is not found!\nCreate it by clicking the 'Create Shared Folders' button from the WebUI 'Settings' Tab\n") - return + return False, return_dict - if os.path.isfile(dict_filepath) and os.path.exists(dict_filepath): + # read from file, if filepath is online url (http:/https:/ftp:) or local filepath exists + if ":" in dict_filepath or \ + os.path.isfile(dict_filepath) and os.path.exists(dict_filepath): dict_filepath_found = True # read the dict_description from JSON file - print(f"\nExisting '{dict_description}' found and read from file '{dict_filepath}'\nThe file overwrites the code defaults!") + print(f"\nExisting '{dict_description}' found online and read from file '{dict_filepath}'\nThe file overwrites the code defaults!") - dict, error_msg = read_dict_from_jsonfile(dict_filepath) - if not error_msg == "": - print(error_msg) + return_dict, error_msg = read_dict_from_jsonfile(dict_filepath) + + success = (not return_dict == {} and error_msg == "") # translate to success state + + if not success: # return_dict == {} + dict_filepath_found = False # handle 404 errors from online urls + return_dict = default_dict # use the code-defaults dict passed in else: # init the dict_description from app code dict_filepath_found = False print(f"No {dict_description}_FILE found, initializing default '{dict_description}' from code ...") # use already defined dict from app code # write the dict to JSON file - success, ErrorMsg = write_dict_to_jsonfile(dict, dict_filepath) + success, ErrorMsg = write_dict_to_jsonfile(default_dict, dict_filepath) if success: print(f"'{dict_description}' is initialized and written to file '{dict_filepath}'") @@ -348,14 +367,15 @@ def load_global_dict_from_file(dict:dict, dict_filepath:str, dict_description:st print(ErrorMsg) # Convert 'dict_description' dictionary to formatted JSON - print(f"\nUsing {'external' if dict_filepath_found else 'default'} '{dict_description}':\n{pretty_dict(dict)}") + print(f"\nUsing {'external' if dict_filepath_found else 'default'} '{dict_description}':\n{pretty_dict(return_dict)}") except Exception as e: - print(f"ERROR in shared_models:load_global_dict_from_file() - initializing dict Map File '{dict_filepath}'\nException: {str(e)}") + print(f"ERROR in load_global_dict_from_file() - initializing dict file '{dict_filepath}'\nException: {str(e)}") return False, {} - return True, dict # success + return success, return_dict + DEBUG_SETTINGS_FILE = "/workspace/_debug_settings.json" DEBUG_SETTINGS = { @@ -380,9 +400,9 @@ def init_debug_settings(): local_debug = os.environ.get('LOCAL_DEBUG', 'False') # support local browsing for development/debugging generate_debug_settings_file = os.environ.get('DEBUG_SETTINGS_FILE', 'False') # generate the DEBUG_SETTINGS_FILE, if not exist already - write_file_if_not_exist = local_debug == 'True' or generate_debug_settings_file == 'True' + write_file_if_not_exists = (local_debug == 'True' or local_debug == 'true' or generate_debug_settings_file == 'True' or generate_debug_settings_file == 'true') - success, dict = load_global_dict_from_file(DEBUG_SETTINGS, DEBUG_SETTINGS_FILE, "DEBUG_SETTINGS", write_file=write_file_if_not_exist) + success, dict = load_global_dict_from_file(DEBUG_SETTINGS, DEBUG_SETTINGS_FILE, "DEBUG_SETTINGS", write_file=write_file_if_not_exists) if success: DEBUG_SETTINGS = dict @@ -426,21 +446,28 @@ APP_CONFIGS_FILE = APP_CONFIGS_MANIFEST_URL # default is the online manifest url # when the IMAGE is used normally. def init_app_configs(): + global APP_CONFIGS_MANIFEST_URL global APP_CONFIGS_FILE global app_configs # check for overwrite of APP_CONFIGS_MANIFEST_URL - if not DEBUG_SETTINGS['APP_CONFIGS_MANIFEST_URL'] == "": - APP_CONFIGS_FILE = DEBUG_SETTINGS['APP_CONFIGS_MANIFEST_URL'] + debug_app_configs_manifest_url = DEBUG_SETTINGS['APP_CONFIGS_MANIFEST_URL'] + if not debug_app_configs_manifest_url == "": + print(f"using APP_CONFIGS_MANIFEST_URL from DEBUG_SETTINGS: {debug_app_configs_manifest_url}") + APP_CONFIGS_MANIFEST_URL = debug_app_configs_manifest_url + APP_CONFIGS_FILE = APP_CONFIGS_MANIFEST_URL + + + print(f"\nUsing APP_CONFIGS_MANIFEST_URL={APP_CONFIGS_MANIFEST_URL}") local_debug = os.environ.get('LOCAL_DEBUG', 'False') # support local browsing for development/debugging generate_app_configs_file = os.environ.get('APP_CONFIGS_FILE', 'False') # generate the APP_CONFIGS_FILE, if not exist already - write_file_if_not_exists = local_debug == 'True' or generate_app_configs_file == 'True' + write_file_if_not_exists = (local_debug == 'True' or local_debug == 'true' or generate_app_configs_file == 'True' or generate_app_configs_file == 'true') success, dict = load_global_dict_from_file(app_configs, APP_CONFIGS_FILE, "APP_CONFIGS", write_file=write_file_if_not_exists) if success: - app_configs = dict # overwrite code-defaults (from local or external settings) + app_configs = dict # overwrite code-defaults (from local or external/online JSON settings file) #else app_configs = return diff --git a/official-templates/better-ai-launcher/app/utils/app_utils.py b/official-templates/better-ai-launcher/app/utils/app_utils.py index 3001754..c846a56 100644 --- a/official-templates/better-ai-launcher/app/utils/app_utils.py +++ b/official-templates/better-ai-launcher/app/utils/app_utils.py @@ -13,11 +13,19 @@ import xml.etree.ElementTree as ET import time import datetime import shutil -from utils.app_configs import (DEBUG_SETTINGS, pretty_dict, init_app_configs, init_debug_settings, write_debug_setting, ensure_kohya_local_venv_is_symlinked) +from utils.app_configs import (app_configs, DEBUG_SETTINGS, pretty_dict, init_app_configs, init_debug_settings, write_debug_setting, ensure_kohya_local_venv_is_symlinked) from utils.model_utils import (get_sha256_hash_from_file) INSTALL_STATUS_FILE = '/tmp/install_status.json' +# lutzapps - support for bkohya gradio url +BKOHYA_LAUNCH_URL = "" # will be captured during run_app('bkohya', ...) from bkohya log +# e.g. https://85f6f17d6d725c6cde.gradio.live + +def get_bkohya_launch_url() -> str: + global BKOHYA_LAUNCH_URL + return BKOHYA_LAUNCH_URL + def is_process_running(pid): try: process = psutil.Process(pid) @@ -33,8 +41,33 @@ def run_app(app_name, command, running_processes): 'log': [], 'status': 'running' } + + # lutzapps - capture the gradio-url for bkohya app + global BKOHYA_LAUNCH_URL + + BKOHYA_LAUNCH_URL = "" # will be captured during run_app('bkohya', ...) from bkohya log + # e.g. https://85f6f17d6d725c6cde.gradio.live for line in process.stdout: + # wait for gradio-url in bkohya log (the --share option generates a gradio url) + if app_name == 'bkohya' and BKOHYA_LAUNCH_URL == "": + gradio_mode = ("--share" in command.lower()) + if gradio_mode and ".gradio.live" in line: + # get the gradio url from the log line + # line = '* Running on public URL: https://85f6f17d6d725c6cde.gradio.live\n' + gradio_url_pattern = r"https://([\w.-]+(?:\.[\w.-]+)+)" + + match = re.search(gradio_url_pattern, line) + if match: + BKOHYA_LAUNCH_URL = match.group(0) # Full URL, e.g., "https://85f6f17d6d725c6cde.gradio.live" + print(f"Public Gradio-URL found in bkohya log: {BKOHYA_LAUNCH_URL}") + + elif not gradio_mode and "127.0.0.1" in line: # only wait for this when gradio_mode = False (=local URL mode) + port = app_configs[app_name]['port'] # read the configured port from app_configs + # line = '* Running on local URL: http://127.0.0.1:7864 + BKOHYA_LAUNCH_URL = f"http://127.0.0.1:{port}" + print(f"Local-URL found in bkohya log: {BKOHYA_LAUNCH_URL}") + running_processes[app_name]['log'].append(line.strip()) if len(running_processes[app_name]['log']) > 1000: running_processes[app_name]['log'] = running_processes[app_name]['log'][-1000:] diff --git a/official-templates/better-ai-launcher/docker-compose.debug.yml b/official-templates/better-ai-launcher/docker-compose.debug.yml index f68f72c..a647da6 100644 --- a/official-templates/better-ai-launcher/docker-compose.debug.yml +++ b/official-templates/better-ai-launcher/docker-compose.debug.yml @@ -18,9 +18,9 @@ services: # - 3000:3000 # ComfyUI # - 6006:6006 # Tensorboard (needed by kohya_ss) - # - 7860:7860 # Kohya-ss (lutzapps - added new Kohya app with FLUX support) # - 7862:7862 # Forge (aka Stable-Diffiusion-WebUI-Forge) # - 7863:7863 # A1111 (aka Stable-Diffiusion-WebUI) + # - 7864:7864 # Kohya-ss (lutzapps - added new Kohya app with FLUX support) env_file: - .env # pass additional env-vars (hf_token, civitai token, ssh public-key) from ".env" file to container diff --git a/official-templates/better-ai-launcher/docker-compose.yml b/official-templates/better-ai-launcher/docker-compose.yml index c4c9b34..e73a5bf 100644 --- a/official-templates/better-ai-launcher/docker-compose.yml +++ b/official-templates/better-ai-launcher/docker-compose.yml @@ -19,6 +19,6 @@ services: - 3000:3000 # ComfyUI - 6006:6006 # Tensorboard (needed by kohya_ss) - - 7860:7860 # Kohya-ss (lutzapps - added new Kohya app with FLUX support) - 7862:7862 # Forge (aka Stable-Diffiusion-WebUI-Forge) - 7863:7863 # A1111 (aka Stable-Diffiusion-WebUI) + - 7864:7864 # Kohya-ss (lutzapps - added new Kohya app with FLUX support) diff --git a/official-templates/better-ai-launcher/nginx/readme.html b/official-templates/better-ai-launcher/nginx/readme.html index 92d6b99..dd52108 100644 --- a/official-templates/better-ai-launcher/nginx/readme.html +++ b/official-templates/better-ai-launcher/nginx/readme.html @@ -41,6 +41,7 @@
  • Better Comfy UI
  • Better Forge
  • Better A1111
  • +
  • Better Kohya
  • Getting Started