diff --git a/official-templates/better-ai-launcher/Dockerfile b/official-templates/better-ai-launcher/Dockerfile index ec31773..764a7b9 100644 --- a/official-templates/better-ai-launcher/Dockerfile +++ b/official-templates/better-ai-launcher/Dockerfile @@ -7,22 +7,75 @@ FROM ${BASE_IMAGE:-madiator2011/better-base:cuda12.4} AS base ARG BASE_IMAGE ENV BASE_IMAGE=$BASE_IMAGE +# lutzapps - replaced by above bake build-args #FROM madiator2011/better-base:cuda12.4 AS base # lutzapps - prepare for local developement and debugging # needed to change the ORDER of "apt-get commands" and move the "update-alternatives" for python3 # AFTER the "apt-get remove -y python3.10" cmd, OTHERWISE the symlink to python3 # is broken in the image and the VSCode debugger could not exec "python3" as CMD overwrite +# also fixed a boring "Blinker" blocking error -# Install Python 3.11, set it as default, and remove Python 3.10 RUN apt-get update && \ + ### ---> needed Tools for Installer # removed: 2x git nginx ffmpeg (as they are already installed with the base image) # added: pigz (for parallel execution of TAR files); zip (for easier folder compression) - apt-get install -y python3.11 python3.11-venv python3.11-dev python3.11-distutils \ - aria2 pigz zip pv rsync zstd libtcmalloc-minimal4 bc && \ - apt-get remove -y python3.10 python3.10-minimal libpython3.10-minimal libpython3.10-stdlib && \ + apt-get install -y aria2 pigz zip pv rsync zstd libtcmalloc-minimal4 bc \ + # add Python3.11 as system Python version, serving the Python Flask App + python3.11 python3.11-venv python3.11-dev python3.11-distutils && \ + # not remove Python3.10, as we need it for "official" app support (e.g. for kohya_ss VENV) + ###apt-get remove -y python3.10 python3.10-minimal libpython3.10-minimal libpython3.10-stdlib && \ + # + # setup an "alias" for "python" be symlinked to Python3.11 + # (which is the default anyway after this installation here of Python 3.11) update-alternatives --install /usr/bin/python python /usr/bin/python3.11 1 && \ + # + # setup the "python3" alias for Python3.11, as this is what the debugger needs (not work with 3.10) update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.11 1 && \ + # + # VENV will have their own "preferred/supported/recommended" Python version, as e.g. + # the "kohya_ss" app, only supports Python up to version 3.10 (but not 3.11) + # + # until here we have a broken "Blinker" installation from some base images before, + # and if we try to "update" "Blinker" for Python3.10 or via e.g. "pip install --upgrade blinker", + # or "pip install blinker==x.y.z.z", this breaks, as it was installed in an APT bundle, + # which can not be safely upgraded (= Uninstall/Reinstall)! It breaks as follows: + # we get a blinker 1.4 uninstall error chained by trying to uninstall "distutils": + # 8.568 Found existing installation: blinker 1.4 + # 8.782 error: uninstall-distutils-installed-package + # 8.782 + # 8.782 × Cannot uninstall blinker 1.4 + # 8.782 ╰─> It is a distutils installed project and thus we cannot accurately determine which files belong to it which would lead to only a partial uninstall. + # that not only blocks building the docker image, but later also breaks the "kohya_ss" app during setup, + # which try to install Blinker and Python310-venv from "setup-runpod.sh": + # # Install tk and python3.10-venv + # echo "Installing tk and python3.10-venv..." + # apt update -y && apt install -y python3-tk python3.10-venv + # + # Python 3.10 needs to run as Kohya's "official" requirement, and is used in the VENV + # + # first uninstall the APT bundle package for "Blinker" + apt-get remove -y python3-blinker && \ + # then re-install the APT unbundled package of "Blinker back", + # together with Python3.10 venv, which we need to setup the kohya_ss VENV + apt-get install -y python3-tk python3.10-venv && \ + # this re-captures back the "python3" alias for Python3.10, but not the "python" alias (stays for Python3.11) + # the "Python3.11" and "Python3.10" cmds work too + # global PIP is 3.11, VENV pip is 3.10 + # + # ---> CUDA 12.4 Toolkit (is already in the 12.4 base-image) + wget https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2204/x86_64/cuda-keyring_1.1-1_all.deb && \ + dpkg -i cuda-keyring_1.1-1_all.deb && \ + ### need to refresh the repository metatdata, after downloading this NVIDIA downloaded package list!!! + apt-get update && \ + apt-get -y install cuda-toolkit-12-4 && \ + # + # ---> get the latest cuDNN 9.x version supporting CUDA 12.x + # remove the current dev package which depends on libcudnn9-cuda-12 and breaks the upgrade otherwise + apt-get remove -y --allow-change-held-packages libcudnn9-cuda-12 libcudnn9-dev-cuda-12 && \ + apt-get -y --allow-change-held-packages install cudnn-cuda-12 && \ + # + # clean-up resources and caches apt-get autoremove -y && \ apt-get clean && \ rm -rf /var/lib/apt/lists/* diff --git a/official-templates/better-ai-launcher/app/app.py b/official-templates/better-ai-launcher/app/app.py index 7021ad8..871e8f0 100644 --- a/official-templates/better-ai-launcher/app/app.py +++ b/official-templates/better-ai-launcher/app/app.py @@ -28,8 +28,7 @@ from utils.shared_models import ( SHARED_MODELS_DIR, SHARED_MODEL_FOLDERS, SHARED_MODEL_FOLDERS_FILE, ensure_shared_models_folders, APP_INSTALL_DIRS, APP_INSTALL_DIRS_FILE, init_app_install_dirs, # APP_INSTALL_DIRS dict/file/function MAP_APPS, sync_with_app_configs_install_dirs, # internal MAP_APPS dict and sync function - SHARED_MODEL_APP_MAP, SHARED_MODEL_APP_MAP_FILE, init_shared_model_app_map, # SHARED_MODEL_APP_MAP dict/file/function - write_dict_to_jsonfile, read_dict_from_jsonfile, PrettyDICT # JSON helper functions + SHARED_MODEL_APP_MAP, SHARED_MODEL_APP_MAP_FILE, init_shared_model_app_map # SHARED_MODEL_APP_MAP dict/file/function ) # the "update_model_symlinks()" function replaces the app.py function with the same same # and redirects to same function name "update_model_symlinks()" in the new "utils.shared_models" module @@ -340,92 +339,12 @@ def remove_existing_app_config(app_name): return jsonify({'status': 'success', 'message': f'App {app_name} removed successfully'}) return jsonify({'status': 'error', 'message': f'App {app_name} not found'}) -# unused function -def obsolate_update_model_symlinks(): - # lutzapps - CHANGE #3 - use the new "shared_models" module for app model sharing - # remove this whole now unused function - return "replaced by utils.shared_models.update_model_symlinks()" - # modified function def setup_shared_models(): # lutzapps - CHANGE #4 - use the new "shared_models" module for app model sharing jsonResult = update_model_symlinks() return SHARED_MODELS_DIR # shared_models_dir is now owned and managed by the "shared_models" utils module - # remove below unused code - - shared_models_dir = '/workspace/shared_models' - model_types = ['Stable-diffusion', 'VAE', 'Lora', 'ESRGAN'] - - # Create shared models directory if it doesn't exist - os.makedirs(shared_models_dir, exist_ok=True) - - for model_type in model_types: - shared_model_path = os.path.join(shared_models_dir, model_type) - - # Create shared model type directory if it doesn't exist - os.makedirs(shared_model_path, exist_ok=True) - - # Create a README file in the shared models directory - readme_path = os.path.join(shared_models_dir, 'README.txt') - if not os.path.exists(readme_path): - with open(readme_path, 'w') as f: - f.write("Upload your models to the appropriate folders:\n\n") - f.write("- Stable-diffusion: for Stable Diffusion models\n") - f.write("- VAE: for VAE models\n") - f.write("- Lora: for LoRA models\n") - f.write("- ESRGAN: for ESRGAN upscaling models\n\n") - f.write("These models will be automatically linked to all supported apps.") - - print(f"Shared models directory created at {shared_models_dir}") - print("Shared models setup completed.") - - return shared_models_dir - -# unused function -def obsolate_update_model_symlinks(): - # lutzapps - CHANGE #5 - use the new "shared_models" module for app model sharing - # remove this whole now unused function - return "replaced by utils.shared_models.update_model_symlinks()" - - shared_models_dir = '/workspace/shared_models' - apps = { - 'stable-diffusion-webui': '/workspace/stable-diffusion-webui/models', - 'stable-diffusion-webui-forge': '/workspace/stable-diffusion-webui-forge/models', - 'ComfyUI': '/workspace/ComfyUI/models' - } - model_types = ['Stable-diffusion', 'VAE', 'Lora', 'ESRGAN'] - - for model_type in model_types: - shared_model_path = os.path.join(shared_models_dir, model_type) - - if not os.path.exists(shared_model_path): - continue - - for app, app_models_dir in apps.items(): - if app == 'ComfyUI': - if model_type == 'Stable-diffusion': - app_model_path = os.path.join(app_models_dir, 'checkpoints') - elif model_type == 'Lora': - app_model_path = os.path.join(app_models_dir, 'loras') - elif model_type == 'ESRGAN': - app_model_path = os.path.join(app_models_dir, 'upscale_models') - else: - app_model_path = os.path.join(app_models_dir, model_type.lower()) - else: - app_model_path = os.path.join(app_models_dir, model_type) - - # Create the app model directory if it doesn't exist - os.makedirs(app_model_path, exist_ok=True) - - # Create symlinks for each file in the shared model directory - for filename in os.listdir(shared_model_path): - src = os.path.join(shared_model_path, filename) - dst = os.path.join(app_model_path, filename) - if os.path.isfile(src) and not os.path.exists(dst): - os.symlink(src, dst) - - print("Model symlinks updated.") def update_symlinks_periodically(): while True: @@ -436,57 +355,6 @@ def start_symlink_update_thread(): thread = threading.Thread(target=update_symlinks_periodically, daemon=True) thread.start() -# unused function -def obsolate_recreate_symlinks(): - # lutzapps - CHANGE #6 - use the new "shared_models" module for app model sharing - # remove this whole now unused function - return "replaced by utils.shared_models.update_model_symlinks()" - - shared_models_dir = '/workspace/shared_models' - apps = { - 'stable-diffusion-webui': '/workspace/stable-diffusion-webui/models', - 'stable-diffusion-webui-forge': '/workspace/stable-diffusion-webui-forge/models', - 'ComfyUI': '/workspace/ComfyUI/models' - } - model_types = ['Stable-diffusion', 'VAE', 'Lora', 'ESRGAN'] - - for model_type in model_types: - shared_model_path = os.path.join(shared_models_dir, model_type) - - if not os.path.exists(shared_model_path): - continue - - for app, app_models_dir in apps.items(): - if app == 'ComfyUI': - if model_type == 'Stable-diffusion': - app_model_path = os.path.join(app_models_dir, 'checkpoints') - elif model_type == 'Lora': - app_model_path = os.path.join(app_models_dir, 'loras') - elif model_type == 'ESRGAN': - app_model_path = os.path.join(app_models_dir, 'upscale_models') - else: - app_model_path = os.path.join(app_models_dir, model_type.lower()) - else: - app_model_path = os.path.join(app_models_dir, model_type) - - # Remove existing symlinks - if os.path.islink(app_model_path): - os.unlink(app_model_path) - elif os.path.isdir(app_model_path): - shutil.rmtree(app_model_path) - - # Create the app model directory if it doesn't exist - os.makedirs(app_model_path, exist_ok=True) - - # Create symlinks for each file in the shared model directory - for filename in os.listdir(shared_model_path): - src = os.path.join(shared_model_path, filename) - dst = os.path.join(app_model_path, filename) - if os.path.isfile(src) and not os.path.exists(dst): - os.symlink(src, dst) - - return "Symlinks recreated successfully." - # modified function @app.route('/recreate_symlinks', methods=['POST']) def recreate_symlinks_route(): @@ -494,13 +362,6 @@ def recreate_symlinks_route(): jsonResult = update_model_symlinks() return jsonResult - # remove below unused code - - try: - message = recreate_symlinks() - return jsonify({'status': 'success', 'message': message}) - except Exception as e: - return jsonify({'status': 'error', 'message': str(e)}) # modified function @app.route('/create_shared_folders', methods=['POST']) @@ -508,39 +369,6 @@ def create_shared_folders(): # lutzapps - CHANGE #8 - use the new "shared_models" module for app model sharing jsonResult = ensure_shared_models_folders() return jsonResult - # remove below unused code - - try: - shared_models_dir = '/workspace/shared_models' - model_types = ['Stable-diffusion', 'Lora', 'embeddings', 'VAE', 'hypernetworks', 'aesthetic_embeddings', 'controlnet', 'ESRGAN'] - - # Create shared models directory if it doesn't exist - os.makedirs(shared_models_dir, exist_ok=True) - - for model_type in model_types: - shared_model_path = os.path.join(shared_models_dir, model_type) - - # Create shared model type directory if it doesn't exist - os.makedirs(shared_model_path, exist_ok=True) - - # Create a README file in the shared models directory - readme_path = os.path.join(shared_models_dir, 'README.txt') - if not os.path.exists(readme_path): - with open(readme_path, 'w') as f: - f.write("Upload your models to the appropriate folders:\n\n") - f.write("- Stable-diffusion: for Stable Diffusion checkpoints\n") - f.write("- Lora: for LoRA models\n") - f.write("- embeddings: for Textual Inversion embeddings\n") - f.write("- VAE: for VAE models\n") - f.write("- hypernetworks: for Hypernetwork models\n") - f.write("- aesthetic_embeddings: for Aesthetic Gradient embeddings\n") - f.write("- controlnet: for ControlNet models\n") - f.write("- ESRGAN: for ESRGAN upscaling models\n\n") - f.write("These models will be automatically linked to all supported apps.") - - return jsonify({'status': 'success', 'message': 'Shared model folders created successfully.'}) - except Exception as e: - return jsonify({'status': 'error', 'message': str(e)}) def save_civitai_token(token): with open(CIVITAI_TOKEN_FILE, 'w') as f: @@ -644,7 +472,7 @@ def get_model_types_route(): 'desc': model_type_description } - i = i + 1 + i += 1 return model_types_dict diff --git a/official-templates/better-ai-launcher/app/templates/index.html b/official-templates/better-ai-launcher/app/templates/index.html index 0d2408c..4dae966 100644 --- a/official-templates/better-ai-launcher/app/templates/index.html +++ b/official-templates/better-ai-launcher/app/templates/index.html @@ -2418,7 +2418,7 @@
+
+ return
+
+init_app_configs() # load from JSON file (local or remote) with code-defaults otherwise
+
+# lutzapps - add kohya_ss support and handle the required local "venv" within the "kohya_ss" app folder
def ensure_kohya_local_venv_is_symlinked() -> tuple[bool, str]:
+ ### create a folder symlink for kohya's "local" 'venv' dir
# as kohya_ss' "setup.sh" assumes a "local" VENV under "/workspace/kohya_ss/venv",
# we will create a folder symlink "/workspace/kohya_ss/venv" -> "/workspace/bkohya"
- # to our global VENV and rename the original "venv" folder to "venv(BAK)"
+ # to our global VENV and rename the original "venv" folder to "venv(BAK)", if any exists,
+ # will we not the case normally.
- if not DEBUG_SETTINGS['use_bkohya_local_venv_symlink'] == "1":
- return True, "" # not fix the local KOHYA_SS VENV
+ if not DEBUG_SETTINGS['create_bkohya_to_local_venv_symlink']:
+ return True, "" # not fix the local KOHYA_SS VENV requirement
import shutil
@@ -251,13 +467,19 @@ def ensure_kohya_local_venv_is_symlinked() -> tuple[bool, str]:
bapp_app_path = app_configs[bapp_name]["app_path"] # '/workspace/kohya_ss'
bapp_app_path_venv = f"{bapp_app_path}/venv" # '/workspace/kohya_ss/venv'
- if not os.path.exists(bapp_app_path): # kohya is not installed
- return True, "" # no need to fix the local KOHYA VENV
-
- # kohya installed and has a local "venv" folder
- if os.path.exists(bapp_app_path_venv) and os.path.isdir(bapp_app_path_venv):
+ name = app_configs[bapp_name]["name"]
- # check if this local VENV is a folderlink to target our bkohya global VENV to venv_path
+ if not os.path.exists(bapp_app_path): # kohya is not installed
+ return True, f"{name} is not installed." # no need to fix the local KOHYA VENV
+
+ # check the src-folder of 'bkohya' downloaded VENV exists
+ if not os.path.exists(bapp_venv_path): # src_path to bkohya downloaded venv does NOT exists
+ return True, f"{name} VENV is not installed." # no need to fix the local KOHYA VENV, as the global KOHYA VENV does not exist
+
+ # kohya_ss is installed
+ if os.path.isdir(bapp_app_path_venv): # and has a local "venv" folder
+
+ # check if this local VENV is a folderlink to target the bkohya global VENV to venv_path
if os.path.islink(bapp_app_path_venv):
success_message = f"kohya_ss local venv folder '{bapp_app_path_venv}' is already symlinked"
@@ -273,18 +495,17 @@ def ensure_kohya_local_venv_is_symlinked() -> tuple[bool, str]:
i += 1
suffix = str(i)
- bak_venv_path += suffix # free target bame for "rename"
+ bak_venv_path += suffix # free target name for "rename"(move) operation of the folder
shutil.move(bapp_app_path_venv, bak_venv_path) # move=rename
print(f"local venv folder '{bapp_app_path_venv}' detected and renamed to '{bak_venv_path}'")
+ # now the path to the local "venv" is free, if it was already created it is now renamed
### create a folder symlink for kohya's "local" venv dir
- # check the src-folder to kohya downloaded venv exists
- if os.path.exists(bapp_venv_path): # src_path to bkohya downloaded venv exists
- # create a folder symlink for kohya local venv dir
- os.symlink(bapp_venv_path, bapp_app_path_venv, target_is_directory=True)
- success_message = f"created a symlink for kohya_ss local venv folder: '{bapp_venv_path}' -> '{bapp_app_path_venv}'"
- print(success_message)
+ # create a folder symlink for kohya local venv dir
+ os.symlink(bapp_venv_path, bapp_app_path_venv, target_is_directory=True)
+ success_message = f"created a symlink for kohya_ss local venv folder: '{bapp_app_path_venv}' -> '{bapp_venv_path}'"
+ print(success_message)
return True, success_message
@@ -295,4 +516,42 @@ def ensure_kohya_local_venv_is_symlinked() -> tuple[bool, str]:
return False, error_message
# lutzapps - add kohya_ss venv support
-ensure_kohya_local_venv_is_symlinked()
\ No newline at end of file
+ensure_kohya_local_venv_is_symlinked()
+
+# some verification steps of the VENV setup of the "kohya_ss" app:
+# even if it "looks" like the "venv" is in a local sub-folder of the "kohya_ss" dir,
+# this location is only "aliased/symlinked" there from the globally downloaded
+# tarfile "bkohya.tar.gz" which was expanded spearately into the folder "/workspace/bkohya".
+# So the VENV can be redownloaded separately from the github app at "/workspace/kohya_ss"
+ # root@9452ad7f4cd6:/workspace/kohya_ss# python --version
+ # Python 3.11.10
+ # root@fe889cc68f5a:/workspace/kohya_ss# pip --version
+ # pip 24.3.1 from /usr/local/lib/python3.11/dist-packages/pip (python 3.11)
+ #
+ # root@9452ad7f4cd6:/workspace/kohya_ss# python3 --version
+ # Python 3.11.10
+ # root@fe889cc68f5a:/workspace/kohya_ss# pip3 --version
+ # pip 24.3.1 from /usr/local/lib/python3.11/dist-packages/pip (python 3.11)
+ #
+ # root@9452ad7f4cd6:/workspace/kohya_ss# ls venv -la
+ # lrwxr-xr-x 1 root root 17 Nov 8 00:06 venv -> /workspace/bkohya
+ #
+ # root@9452ad7f4cd6:/workspace/kohya_ss# source venv/bin/activate
+ #
+ # (bkohya) root@9452ad7f4cd6:/workspace/kohya_ss# ls venv/bin/python* -la
+ # lrwxr-xr-x 1 root root 10 Nov 8 00:48 venv/bin/python -> python3.10
+ # lrwxr-xr-x 1 root root 10 Nov 8 00:48 venv/bin/python3 -> python3.10
+ # lrwxr-xr-x 1 root root 19 Nov 8 00:48 venv/bin/python3.10 -> /usr/bin/python3.10
+ #
+ # (bkohya) root@9452ad7f4cd6:/workspace/kohya_ss# python --version
+ # Python 3.10.12
+ # (bkohya) root@fe889cc68f5a:/workspace/kohya_ss# pip --version
+ # pip 22.0.2 from /workspace/venv/lib/python3.10/site-packages/pip (python 3.10)
+ #
+ # (bkohya) root@9452ad7f4cd6:/workspace/kohya_ss# python3 --version
+ # Python 3.10.12
+ # (bkohya) root@fe889cc68f5a:/workspace/kohya_ss# pip3 --version
+ # pip 22.0.2 from /workspace/venv/lib/python3.10/site-packages/pip (python 3.10)
+ #
+ # (bkohya) root@9452ad7f4cd6:/workspace/kohya_ss# deactivate
+ # root@9452ad7f4cd6:/workspace/kohya_ss#
diff --git a/official-templates/better-ai-launcher/app/utils/app_utils.py b/official-templates/better-ai-launcher/app/utils/app_utils.py
index b484bc4..3001754 100644
--- a/official-templates/better-ai-launcher/app/utils/app_utils.py
+++ b/official-templates/better-ai-launcher/app/utils/app_utils.py
@@ -13,6 +13,8 @@ import xml.etree.ElementTree as ET
import time
import datetime
import shutil
+from utils.app_configs import (DEBUG_SETTINGS, pretty_dict, init_app_configs, init_debug_settings, write_debug_setting, ensure_kohya_local_venv_is_symlinked)
+from utils.model_utils import (get_sha256_hash_from_file)
INSTALL_STATUS_FILE = '/tmp/install_status.json'
@@ -180,41 +182,45 @@ import time
# yield (out_line.rstrip(), err_line.rstrip())
-# this ist the v2 ("fast") version for "download_and_unpack_venv()" - can be (de-)/activated in DEBUG_SETTINGS dict
-def download_and_unpack_venv_fastversion(app_name, app_configs, send_websocket_message) -> tuple[bool, str]:
+# this is the v2 ("fast") version for "download_and_unpack_venv()" - can be (de-)/activated in DEBUG_SETTINGS dict
+def download_and_unpack_venv_v2(app_name:str, app_configs:dict, send_websocket_message) -> tuple[bool, str]:
+ # load the latest configured DEBUG_SETTINGS from the stored setting of the DEBUG_SETTINGS_FILE
+ init_debug_settings() # reload latest DEBUG_SETTINGS
+ # as this could overwrite the APP_CONFIGS_MANIFEST_URL, we reload the app_configs global dict
+ # from whatever Url is now defined
+ init_app_configs() # reload lastest app_configs dict
+
app_config = app_configs.get(app_name)
if not app_config:
return False, f"App '{app_name}' not found in configurations."
venv_path = app_config['venv_path']
- app_path = app_config['app_path']
download_url = app_config['download_url']
- total_size = app_config['size']
+ archive_size = app_config['archive_size']
+
tar_filename = os.path.basename(download_url)
workspace_dir = '/workspace'
downloaded_file = os.path.join(workspace_dir, tar_filename)
- from utils.app_configs import (DEBUG_SETTINGS, pretty_dict, init_debug_settings, write_debug_setting, ensure_kohya_local_venv_is_symlinked)
- # load the latest configured DEBUG_SETTINGS from the stored setting of the DEBUG_SETTINGS_FILE
- init_debug_settings()
- # show currently using DEBUG_SETTINGS
- print(f"\nCurrently using 'DEBUG_SETTINGS':\n{pretty_dict(DEBUG_SETTINGS)}")
-
write_debug_setting('tar_filename', tar_filename)
write_debug_setting('download_url', download_url)
try:
+ if DEBUG_SETTINGS['skip_to_github_stage']:
+ success, message = clone_application(app_config,send_websocket_message)
+ return success, message
+
save_install_status(app_name, 'in_progress', 0, 'Downloading')
- send_websocket_message('install_log', {'app_name': app_name, 'log': f'Downloading {total_size / (1024 * 1024):.2f} MB ...'})
+ send_websocket_message('install_log', {'app_name': app_name, 'log': f'Downloading {archive_size / (1024 * 1024):.2f} MB ...'})
start_time_download = time.time()
# debug with existing local cached TAR file
if os.path.exists(downloaded_file):
- write_debug_setting('used_local_tar', "1") # indicate using cached TAR file
- send_websocket_message('install_log', {'app_name': app_name, 'log': f"Used cached local tarfile '{downloaded_file}'"})
+ write_debug_setting('used_local_tarfile', True) # indicate using cached TAR file
+ send_websocket_message('used_local_tarfile', {'app_name': app_name, 'log': f"Used cached local tarfile '{downloaded_file}'"})
else:
- write_debug_setting('used_local_tar', "0") # indicate no cached TAR file found
+ write_debug_setting('used_local_tarfile', False) # indicate no cached TAR file found
try: ### download with ARIA2C
@@ -275,8 +281,8 @@ def download_and_unpack_venv_fastversion(app_name, app_configs, send_websocket_m
gid = match.group(1) # e.g., "cd57da"
downloaded_size_value = match.group(2) # e.g., "2.1"
downloaded_size_unit = match.group(3) # e.g., "GiB"
- total_size_value = match.group(4) # e.g., "4.0"
- total_size_unit = match.group(5) # e.g., "GiB"
+ total_size_value = match.group(4) # e.g., "4.0" (this could replace the 'archive_size' from the manifest)
+ total_size_unit = match.group(5) # e.g., "GiB" (with calculation to bytes, but not sure if its rounded)
percentage = int(match.group(6)) # e.g., "53"
connection_count = int(match.group(7)) # e.g., "16"
download_rate_value = match.group(8) # e.g., "1.9"
@@ -296,8 +302,8 @@ def download_and_unpack_venv_fastversion(app_name, app_configs, send_websocket_m
### original code
#speed = downloaded_size / elapsed_time # bytes/sec
- #percentage = (downloaded_size / total_size) * 100
- #eta = (total_size - downloaded_size) / speed if speed > 0 else 0 # sec
+ #percentage = (downloaded_size / archive_size) * 100
+ #eta = (archive_size - downloaded_size) / speed if speed > 0 else 0 # sec
send_websocket_message('install_progress', {
'app_name': app_name,
@@ -329,7 +335,7 @@ def download_and_unpack_venv_fastversion(app_name, app_configs, send_websocket_m
os.remove(f"{tar_filename}.aria2")
except Exception as e:
- error_msg = f"ERROR in download_and_unpack_venv_fastversion():download with ARIA2C\ncmdline: '{cmd_line}'\nException: {str(e)}"
+ error_msg = f"ERROR in download_and_unpack_venv_v2():download with ARIA2C\ncmdline: '{cmd_line}'\nException: {str(e)}"
print(error_msg)
error_message = f"Downloading VENV failed: {download_process.stderr.read() if download_process.stderr else 'Unknown error'}"
@@ -356,8 +362,8 @@ def download_and_unpack_venv_fastversion(app_name, app_configs, send_websocket_m
# if elapsed_time > 0:
# speed = downloaded_size / elapsed_time
- # percentage = (downloaded_size / total_size) * 100
- # eta = (total_size - downloaded_size) / speed if speed > 0 else 0
+ # percentage = (downloaded_size / archive_size) * 100
+ # eta = (archive_size - downloaded_size) / speed if speed > 0 else 0
# send_websocket_message('install_progress', {
# 'app_name': app_name,
@@ -375,29 +381,72 @@ def download_and_unpack_venv_fastversion(app_name, app_configs, send_websocket_m
return False, error_message
- send_websocket_message('install_log', {'app_name': app_name, 'log': 'Download completed. Starting unpacking...'})
- send_websocket_message('install_progress', {'app_name': app_name, 'percentage': 100, 'stage': 'Download Complete'})
+ send_websocket_message('install_log', {'app_name': app_name, 'log': 'Download completed. Starting Verification ...'})
+ # we use a 99% progress and indicate 1% for Verification against the files SHA256 hash
+ send_websocket_message('install_progress', {'app_name': app_name, 'percentage': 99, 'stage': 'Downloading'})
total_duration_download = f"{datetime.timedelta(seconds=int(time.time() - start_time_download))}"
write_debug_setting('total_duration_download', total_duration_download)
print(f"download did run {total_duration_download} for app '{app_name}'")
+ ### VERIFY stage
+ #
+ # Create TAR from the VENV current directory:
+ # IMPORTANT: cd INTO the folder you want to compress, as we use "." for source folder,
+ # to avoid having the foldername in the TAR file !!!
+ # PV piping is "nice-to-have" and is only used for showing "Progress Values" during compressing
+ #
+ # cd /workspace/bkohya
+ # #tar -czf | pv > /workspace/bkohya.tar.gz . (not the smallest TAR)#
+ # tar -cvf - . | gzip -9 - | pv > /workspace/bkohya.tar.gz
+ #
+ # afterwards create the SHA256 hash from this TAR with
+ # shasum -a 256 bkohya.tar.gz
+ #
+ # also report the uncompressed size from the current VENV directory,
+ # we need that as the 100% base for the progress indicators when uncompressing the TAR
+
+
+ # verify the downloaded TAR file against its SHA256 hash value from the manifest
+
+ download_sha256_hash = app_config["sha256_hash"].lower() # get the sha256_hash from the app manifest
+ file_verified = False
+
+ print(f"getting SHA256 Hash for '{downloaded_file}'")
+ successfull_HashGeneration, file_sha256_hash = get_sha256_hash_from_file(downloaded_file)
+
+ if successfull_HashGeneration and file_sha256_hash.lower() == download_sha256_hash.lower():
+ file_verified = True
+ message = f"Downloaded file '{os.path.basename(downloaded_file)}' was successfully (SHA256) verified."
+ print(message)
+
+ else:
+ if successfull_HashGeneration: # the generated SHA256 file hash did not match against the metadata hash
+ error_message = f"The downloaded file '{os.path.basename(downloaded_file)}' has DIFFERENT \nSHA256: {file_sha256_hash} as in the manifest\nFile is possibly corrupted and was DELETED!"
+ print(error_message)
+
+ os.remove(downloaded_file) # delete corrupted, downloaded file
+
+
+ else: # NOT successful, the hash contains the Exception
+ error_msg = file_sha256_hash
+ error_message = f"Exception occured while generating the SHA256 hash for '{downloaded_file}':\n{error_msg}"
+ print(error_message)
+
+ if not file_verified:
+ send_websocket_message('install_complete', {'app_name': app_name, 'status': 'error', 'message': error_message})
+ save_install_status(app_name, 'failed', 0, 'Failed')
+
+ return False, error_message
+
+ send_websocket_message('install_log', {'app_name': app_name, 'log': 'Verification completed. Starting unpacking ...'})
+ send_websocket_message('install_progress', {'app_name': app_name, 'percentage': 100, 'stage': 'Download Complete'})
+
+
### Decompression Stage (Unpacking the downloaded VENV)
start_time_unpack = time.time()
- # lutzapps - fix TAR packaging bug (compressed from the workspace root instead of bkohya VENV folder)
- # e.g. "bkohya/bin/activate", together with venv_path ("/workspace/bkohya") ends up as "/workspace/bkohya/bkohya/bin/activate"
- # TODO: need to repackage Kohya VENV correctly and then remove this fix!!!
-
- if app_name == "bkohya" and DEBUG_SETTINGS['use_bkohya_tar_folder_fix'] == "1":
- venv_path = "/workspace" # extracts then correctly to '/workspace/bkohya', instead of '/workspace/bkohya/bkohya'
-
- # Create TAR from the VENV current directory:
- # cd ~/Projects/Docker/madiator/workspace/bkohya
- # [tar -czf | pv > ~/Projects/Docker/madiator/workspace/bkohya.tar.gz . (not the smallest TAR)]
- # tar -cvf - . | gzip -9 - | pv > ~/Projects/Docker/madiator/workspace/bkohya.tar.gz
-
# Ensure the venv directory exists
os.makedirs(f"{venv_path}/", exist_ok=True) # append trailing "/" to make sure the last sub-folder is created
@@ -419,11 +468,8 @@ def download_and_unpack_venv_fastversion(app_name, app_configs, send_websocket_m
# 'bforge': 7689838771
# 'bkohya': 12192767148
- uncompressed_size_bytes = DEBUG_SETTINGS["manifests"][app_name]["venv_uncompressed_size"]
+ uncompressed_size_bytes = app_config["venv_uncompressed_size"]
- #sha256_hash = DEBUG_SETTINGS["manifests"][app_name]["sha256_hash"]
- # TODO: create with 'shasum -a 256 xxx.tar.gz'
-
### NOTE: as it turns out GZIP has problems with files bigger than 2 or 4 GB due to internal field bit restrictions
# cmd_line = f"gzip -l {downloaded_file}" # e.g. for 'ba1111.tar.gz'
@@ -601,7 +647,7 @@ def download_and_unpack_venv_fastversion(app_name, app_configs, send_websocket_m
# else any other line in stdout (which we not process)
except Exception as e:
- error_msg = f"ERROR in download_and_unpack_venv_fastversion():\ncmdline: '{cmd_line}'\nException: {str(e)}"
+ error_msg = f"ERROR in download_and_unpack_venv_v2():\ncmdline: '{cmd_line}'\nException: {str(e)}"
print(error_msg)
decompression_process.wait() # let the process finish
@@ -621,8 +667,11 @@ def download_and_unpack_venv_fastversion(app_name, app_configs, send_websocket_m
send_websocket_message('install_progress', {'app_name': app_name, 'percentage': 100, 'stage': 'Unpacking Complete'})
- print(f"'DEBUG_SETTINGS' after this run:\n{pretty_dict(DEBUG_SETTINGS)}")
+ ### installing the App from GITHUB
+ # Clone the repository if it doesn't exist
+ success, message = clone_application(app_name)
+ print(f"'DEBUG_SETTINGS' after this run:\n{pretty_dict(DEBUG_SETTINGS)}")
### original "v1" code (very slow code because of STATISTICS glory
@@ -647,66 +696,29 @@ def download_and_unpack_venv_fastversion(app_name, app_configs, send_websocket_m
# process.wait()
# rc = process.returncode
-
### installing the App from GITHUB
# Clone the repository if it doesn't exist
- if not os.path.exists(app_path):
- send_websocket_message('install_log', {'app_name': app_name, 'log': 'Cloning repository...'})
-
- repo_url = ''
- if app_name == 'bcomfy':
- repo_url = 'https://github.com/comfyanonymous/ComfyUI.git'
- elif app_name == 'bforge':
- repo_url = 'https://github.com/lllyasviel/stable-diffusion-webui-forge.git'
- elif app_name == 'ba1111':
- repo_url = 'https://github.com/AUTOMATIC1111/stable-diffusion-webui.git'
- elif app_name == 'bkohya': # lutzapps - added new Kohya app
- repo_url = 'https://github.com/bmaltais/kohya_ss.git'
-
- try: # add a repo assignment for Kohya
- repo = git.Repo.clone_from(repo_url, app_path, progress=lambda op_code, cur_count, max_count, message: send_websocket_message('install_log', {
- 'app_name': app_name,
- 'log': f"Cloning: {cur_count}/{max_count} {message}"
- }))
- send_websocket_message('install_log', {'app_name': app_name, 'log': 'Repository cloned successfully.'})
-
- # lutzapps - make sure we use Kohya with FLUX support
- if app_name == 'bkohya':
- branch_name = "sd3-flux.1" # this branch also uses a "sd-scripts" branch "SD3" automatically
- repo.git.checkout(branch_name)
-
- # Clone ComfyUI-Manager for Better ComfyUI
- if app_name == 'bcomfy':
- custom_nodes_path = os.path.join(app_path, 'custom_nodes')
- os.makedirs(custom_nodes_path, exist_ok=True)
- comfyui_manager_path = os.path.join(custom_nodes_path, 'ComfyUI-Manager')
- if not os.path.exists(comfyui_manager_path):
- send_websocket_message('install_log', {'app_name': app_name, 'log': 'Cloning ComfyUI-Manager...'})
- git.Repo.clone_from('https://github.com/ltdrdata/ComfyUI-Manager.git', comfyui_manager_path)
- send_websocket_message('install_log', {'app_name': app_name, 'log': 'ComfyUI-Manager cloned successfully.'})
-
- except git.exc.GitCommandError as e:
- send_websocket_message('install_log', {'app_name': app_name, 'log': f'Error cloning repository: {str(e)}'})
- return False, f"Error cloning repository: {str(e)}"
-
- if app_name == 'bkohya': # create a folder link for kohya_ss local "venv"
- ensure_kohya_local_venv_is_symlinked()
+ success, error_message = clone_application(app_name, send_websocket_message)
# Clean up the downloaded file
send_websocket_message('install_log', {'app_name': app_name, 'log': 'Cleaning up...'})
# lutzapps - debug with local TAR
# do NOT delete the Kohya venv
- if DEBUG_SETTINGS["delete_tarfile_after_download"] == "1": # this is the default, but can be overwritten
+ if DEBUG_SETTINGS["delete_tar_file_after_download"]: # this is the default, but can be overwritten
os.remove(downloaded_file)
send_websocket_message('install_log', {'app_name': app_name, 'log': 'Installation complete. Refresh page to start app'})
- save_install_status(app_name, 'completed', 100, 'Completed')
- send_websocket_message('install_complete', {'app_name': app_name, 'status': 'success', 'message': "Virtual environment installed successfully."})
- return True, "Virtual environment installed successfully."
+ if success:
+ save_install_status(app_name, 'completed', 100, 'Completed')
+ send_websocket_message('install_complete', {'app_name': app_name, 'status': 'success', 'message': "Virtual environment installed successfully."})
+ return True, "Virtual environment installed successfully."
+ else:
+ return False, error_message
+
except requests.RequestException as e:
- error_message = f"Download failed: {str(e)}"
+ error_message = f"Download/Decompression failed: {str(e)}"
send_websocket_message('install_complete', {'app_name': app_name, 'status': 'error', 'message': error_message})
save_install_status(app_name, 'failed', 0, 'Failed')
return False, error_message
@@ -716,8 +728,143 @@ def download_and_unpack_venv_fastversion(app_name, app_configs, send_websocket_m
send_websocket_message('install_complete', {'app_name': app_name, 'status': 'error', 'message': error_message})
return False, error_message
+### installing the App from GITHUB
+# Clone the repository if it doesn't exist
+def clone_application(app_config:dict, send_websocket_message) -> tuple[bool , str]:
+ try:
+ app_name = app_config['id']
+ app_path = app_config['app_path']
-def download_and_unpack_venv(app_name, app_configs, send_websocket_message):
+ if not os.path.exists(app_path): # only install new apps
+ repo_url = app_config['repo_url']
+ branch_name = app_config['branch_name']
+ if branch_name == "": # use the default branch
+ branch_name = "master"
+ clone_recursive = app_config['clone_recursive']
+
+ send_websocket_message('install_log', {'app_name': app_name, 'log': f"Cloning repository '{repo_url}' branch '{branch_name}' recursive={clone_recursive} ..."})
+
+ repo = git.Repo.clone_from(repo_url, app_path, # first 2 params are fix, then use named params
+ #branch=branch_name, # if we provide a branch here, we ONLY get this branch downloaded
+ # we want ALL branches, so we can easy checkout different versions from kohya_ss late, without re-downloading
+ recursive=clone_recursive, # include cloning submodules recursively (if needed as with Kohya)
+ progress=lambda op_code, cur_count, max_count, message: send_websocket_message('install_log', {
+ 'app_name': app_name,
+ 'log': f"Cloning: {cur_count}/{max_count} {message}"
+ }))
+
+
+ send_websocket_message('install_log', {'app_name': app_name, 'log': 'Repository cloned successfully.'})
+
+ # lutzapps - make sure we use Kohya with FLUX support
+ if not branch_name == "master":
+ repo.git.checkout(branch_name) # checkout the "sd3-flux.1" branch, but could later switch back to "master" easy
+ # the setup can be easy verified with git, here e.g. for the "kohya_ss" app:
+ # root@fe889cc68f5a:~# cd /workspace/kohya_ss
+ # root@fe889cc68f5a:/workspace/kohya_ss# git branch
+ # master
+ # * sd3-flux.1
+ # root@fe889cc68f5a:/workspace/kohya_ss# cd sd-scripts
+ # root@fe889cc68f5a:/workspace/kohya_ss/sd-scripts# git branch
+ # * (HEAD detached at b8896aa)
+ # main
+ #
+ # in the case of kohya_ss we need to fix a bug in the 'setup.sh' file,
+ # where they forgot to adapt the branch name from "master" to "sd3-flux.1"
+ # in the "#variables" section for refreshing kohya via git with 'setup.sh'
+ if app_name == 'bkohya':
+ success, message = update_kohya_setup_sh(app_path) # patch the 'setup.sh' file
+ print(message) # shows, if the patch was needed, and apllied successfully
+ else: # refresh app
+ if app_path['refresh']: # app wants auto-refreshes
+ # TODO: implement app refreshes via git pull or, in the case of 'kohya_ss' via "setup.sh"
+ message = f"Refreshing of app '{app_name}' is NYI"
+ print(message)
+
+ # Clone ComfyUI-Manager and other defined custom_nodes for Better ComfyUI
+ if app_name == 'bcomfy':
+ # install all defined custom nodes
+ custom_nodes_path = os.path.join(app_path, 'custom_nodes')
+ os.makedirs(f"{custom_nodes_path}/", exist_ok=True) # append a trailing slash to be sure last dir is created
+ for custom_node in app_config['custom_nodes']:
+ name = custom_node['name']
+ path = custom_node['path']
+ repo_url = custom_node['repo_url']
+ custom_node_path = os.path.join(custom_nodes_path, path)
+
+ if not os.path.exists(custom_node_path): # only install new custom nodes
+ send_websocket_message('install_log', {'app_name': app_name, 'log': f"Cloning '{name}' ..."})
+ git.Repo.clone_from(repo_url, custom_node_path)
+ send_websocket_message('install_log', {'app_name': app_name, 'log': f"'{name}' cloned successfully."})
+
+ # install requirements
+ venv_path = app_config['venv_path']
+ #app_path = app_config['app_path'] # already defined
+
+ try:
+ # Activate the virtual environment and run the commands
+ activate_venv = f"source {venv_path}/bin/activate"
+ change_dir_command = f"cd {custom_node_path}"
+ pip_install_command = "pip install -r requirements.txt"
+
+ full_command = f"{activate_venv} && {change_dir_command} && {pip_install_command}"
+
+ # TODO: rewrite this without shell
+ process = subprocess.Popen(full_command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, executable='/bin/bash')
+ output, _ = process.communicate()
+
+ if process.returncode == 0:
+ return True, f"Custom node requirements were successfully installed. Output: {output.decode('utf-8')}"
+ else:
+ return False, f"Error in custom node requirements installation. Output: {output.decode('utf-8')}"
+ except Exception as e:
+ return False, f"Error installing custom node requirements: {str(e)}"
+
+
+ except git.exc.GitCommandError as e:
+ send_websocket_message('install_log', {'app_name': app_name, 'log': f'Error cloning repository: {str(e)}'})
+ return False, f"Error cloning repository: {str(e)}"
+ except Exception as e:
+ send_websocket_message('install_log', {'app_name': app_name, 'log': f'Error cloning repository: {str(e)}'})
+ return False, f"Error cloning repository: {str(e)}"
+
+
+ if app_name == 'bkohya': # create a folder link for kohya_ss local "venv"
+ success, message = ensure_kohya_local_venv_is_symlinked()
+ if not success: # symlink not created, but still success=True and only a warning, can be fixed manually
+ message = f"{app_config['name']} was cloned and patched successfully, but the symlink to the local venv returned following problem:\n{message}"
+ else:
+ message = f"'{app_name}' was cloned successfully."
+
+ return True, message
+
+def update_kohya_setup_sh(app_path:str) -> tuple[bool, str]:
+ try:
+ # patch 'setup.sh' within the kohya_ss main folder for BRANCH="sd3-flux.1"
+ setup_sh_path = os.path.join(app_path, 'setup.sh')
+ if not os.path.exists(setup_sh_path):
+ return False, f"file '{setup_sh_path}' was not found"
+
+ with open(setup_sh_path, 'r') as file:
+ content = file.read()
+
+ # Use regex to search & replace wrong branch variable in the file
+ patched_content = re.sub(r'BRANCH="master"', 'BRANCH="sd3-flux.1"', content)
+
+ if patched_content == content:
+ message = f"'{setup_sh_path}' already fine, patch not needed."
+ else:
+ with open(setup_sh_path, 'w') as file:
+ file.write(patched_content)
+
+ message = f"'{setup_sh_path}' needed patch, successfully patched."
+
+ return True, message
+
+ except Exception as e:
+ return False, str(e)
+
+def download_and_unpack_venv_v1(app_name, app_configs, send_websocket_message):
app_config = app_configs.get(app_name)
if not app_config:
return False, f"App '{app_name}' not found in configurations."
@@ -725,14 +872,14 @@ def download_and_unpack_venv(app_name, app_configs, send_websocket_message):
venv_path = app_config['venv_path']
app_path = app_config['app_path']
download_url = app_config['download_url']
- total_size = app_config['size']
+ archive_size = app_config['size']
tar_filename = os.path.basename(download_url)
workspace_dir = '/workspace'
downloaded_file = os.path.join(workspace_dir, tar_filename)
try:
save_install_status(app_name, 'in_progress', 0, 'Downloading')
- send_websocket_message('install_log', {'app_name': app_name, 'log': f'Starting download of {total_size / (1024 * 1024):.2f} MB...'})
+ send_websocket_message('install_log', {'app_name': app_name, 'log': f'Starting download of {archive_size / (1024 * 1024):.2f} MB...'})
# lutzapps - debug with existing local TAR
if not os.path.exists(downloaded_file):
@@ -753,8 +900,8 @@ def download_and_unpack_venv(app_name, app_configs, send_websocket_message):
if elapsed_time > 0:
speed = downloaded_size / elapsed_time
- percentage = (downloaded_size / total_size) * 100
- eta = (total_size - downloaded_size) / speed if speed > 0 else 0
+ percentage = (downloaded_size / archive_size) * 100
+ eta = (archive_size - downloaded_size) / speed if speed > 0 else 0
send_websocket_message('install_progress', {
'app_name': app_name,
@@ -869,31 +1016,34 @@ def download_and_unpack_venv(app_name, app_configs, send_websocket_message):
send_websocket_message('install_complete', {'app_name': app_name, 'status': 'error', 'message': error_message})
return False, error_message
-### this is the function wgich switches between v0 and v1 debug setting for comparison
-def download_and_unpack_venv(app_name, app_configs, send_websocket_message) -> tuple[bool, str]:
- from app_configs import DEBUG_SETTINGS, write_debug_setting
+### this is the function which switches between v0 and v1 debug setting for comparison
+def download_and_unpack_venv(app_name:str, app_configs:dict, send_websocket_message) -> tuple[bool, str]:
+ from utils.app_configs import DEBUG_SETTINGS, write_debug_setting
installer_codeversion = DEBUG_SETTINGS['installer_codeversion'] # read from DEBUG_SETTINGS
- print(f"download_and_unpack_venv v{installer_codeversion} STARTING for '{app_name}'")
+ print(f"download_and_unpack_venv_{installer_codeversion} STARTING for '{app_name}'")
import time
start_time = time.time()
- if installer_codeversion == "1":
- download_and_unpack_venv(app_name, app_configs, send_websocket_message)
- elif installer_codeversion == "2":
- download_and_unpack_venv_fastversion(app_name, app_configs, send_websocket_message)
+ if installer_codeversion == "v1":
+ success, message = download_and_unpack_venv_v1(app_name, app_configs, send_websocket_message)
+ elif installer_codeversion == "v2":
+ success, message = download_and_unpack_venv_v2(app_name, app_configs, send_websocket_message)
else:
- print(f"unknown 'installer_codeversion' v{installer_codeversion} found, nothing run for app '{app_name}'")
+ error_msg = f"unknown 'installer_codeversion' {installer_codeversion} found, nothing run for app '{app_name}'"
+ print(error_msg)
+ success = False
+ message = error_msg
total_duration = f"{datetime.timedelta(seconds=int(time.time() - start_time))}"
write_debug_setting('app_name', app_name)
write_debug_setting('total_duration', total_duration)
- print(f"download_and_unpack_venv v{installer_codeversion} did run {total_duration} for app '{app_name}'")
-
+ print(f"download_and_unpack_venv_v{installer_codeversion} did run {total_duration} for app '{app_name}'")
+ return success, message
def fix_custom_nodes(app_name, app_configs):
if app_name != 'bcomfy':
@@ -921,54 +1071,9 @@ def fix_custom_nodes(app_name, app_configs):
return False, f"Error fixing custom nodes: {str(e)}"
# Replace the existing install_app function with this updated version
-def install_app(app_name, app_configs, send_websocket_message):
+def install_app(app_name:str, app_configs:dict, send_websocket_message) -> tuple[bool, str]:
if app_name in app_configs:
- #return download_and_unpack_venv(app_name, app_configs, send_websocket_message)
- return download_and_unpack_venv_fastversion(app_name, app_configs, send_websocket_message)
+ success, message = download_and_unpack_venv(app_name, app_configs, send_websocket_message)
+ return success, message
else:
return False, f"Unknown app: {app_name}"
-
-# unused function
-def onsolate_update_model_symlinks():
- # lutzapps - CHANGE #7 - use the new "shared_models" module for app model sharing
- # remove this whole now unused function
- return "replaced by utils.shared_models.update_model_symlinks()"
-
- shared_models_dir = '/workspace/shared_models'
- apps = {
- 'stable-diffusion-webui': '/workspace/stable-diffusion-webui/models',
- 'stable-diffusion-webui-forge': '/workspace/stable-diffusion-webui-forge/models',
- 'ComfyUI': '/workspace/ComfyUI/models'
- }
- model_types = ['Stable-diffusion', 'VAE', 'Lora', 'ESRGAN']
-
- for model_type in model_types:
- shared_model_path = os.path.join(shared_models_dir, model_type)
-
- if not os.path.exists(shared_model_path):
- continue
-
- for app, app_models_dir in apps.items():
- if app == 'ComfyUI':
- if model_type == 'Stable-diffusion':
- app_model_path = os.path.join(app_models_dir, 'checkpoints')
- elif model_type == 'Lora':
- app_model_path = os.path.join(app_models_dir, 'loras')
- elif model_type == 'ESRGAN':
- app_model_path = os.path.join(app_models_dir, 'upscale_models')
- else:
- app_model_path = os.path.join(app_models_dir, model_type.lower())
- else:
- app_model_path = os.path.join(app_models_dir, model_type)
-
- # Create the app model directory if it doesn't exist
- os.makedirs(app_model_path, exist_ok=True)
-
- # Create symlinks for each file in the shared model directory
- for filename in os.listdir(shared_model_path):
- src = os.path.join(shared_model_path, filename)
- dst = os.path.join(app_model_path, filename)
- if os.path.isfile(src) and not os.path.exists(dst):
- os.symlink(src, dst)
-
- print("Model symlinks updated.")
diff --git a/official-templates/better-ai-launcher/app/utils/model_utils.py b/official-templates/better-ai-launcher/app/utils/model_utils.py
index 69a35b1..d6ff370 100644
--- a/official-templates/better-ai-launcher/app/utils/model_utils.py
+++ b/official-templates/better-ai-launcher/app/utils/model_utils.py
@@ -71,7 +71,7 @@ def check_huggingface_url(url):
return True, repo_id, filename, folder_name, branch_name
-def download_model(url, model_name, model_type, civitai_token=None, hf_token=None, version_id=None, file_index=None):
+def download_model(url, model_name, model_type, civitai_token=None, hf_token=None, version_id=None, file_index=None) -> tuple[bool, str]:
ensure_shared_folder_exists()
is_civitai, is_civitai_api, model_id, _ = check_civitai_url(url)
is_huggingface, repo_id, hf_filename, hf_folder_name, hf_branch_name = check_huggingface_url(url) # TODO: double call
@@ -95,7 +95,7 @@ def download_model(url, model_name, model_type, civitai_token=None, hf_token=Non
return success, message
# lutzapps - added SHA256 checks for already existing ident and downloaded HuggingFace model
-def download_civitai_model(url, model_name, model_type, civitai_token, version_id=None, file_index=None):
+def download_civitai_model(url, model_name, model_type, civitai_token, version_id=None, file_index=None) -> tuple[bool, str]:
try:
is_civitai, is_civitai_api, model_id, url_version_id = check_civitai_url(url)
@@ -186,7 +186,7 @@ def get_sha256_hash_from_file(file_path:str) -> tuple[bool, str]:
for byte_block in iter(lambda: f.read(4096), b""):
sha256_hash.update(byte_block)
- return True, sha256_hash.hexdigest().upper()
+ return True, sha256_hash.hexdigest().lower()
except Exception as e:
return False, str(e)
@@ -247,7 +247,7 @@ def get_modelfile_hash_and_ident_existing_modelfile_exists(model_name:str, model
raise NotImplementedError("Copying a non-LFS file is not implemented.")
lfs = repo_file.lfs # BlobLfsInfo class instance
- download_sha256_hash = lfs.sha256.upper()
+ download_sha256_hash = lfs.sha256.lower()
print(f"Metadata from RepoFile LFS '{repo_file.rfilename}'")
print(f"SHA256: {download_sha256_hash}")
@@ -283,8 +283,8 @@ def get_modelfile_hash_and_ident_existing_modelfile_exists(model_name:str, model
# if NOT successful, the hash contains the Exception
print(f"SHA256 hash generated from local file: '{model_path}'\n{model_sha256_hash}")
- if successfull_HashGeneration and model_sha256_hash == download_sha256_hash:
- message = f"Existing and ident model aleady found for '{os.path.basename(model_path)}'"
+ if successfull_HashGeneration and model_sha256_hash.lower() == download_sha256_hash.lower():
+ message = f"Existing and ident model already found for '{os.path.basename(model_path)}'"
print(message)
send_websocket_message('model_download_progress', {
@@ -315,7 +315,7 @@ def get_modelfile_hash_and_ident_existing_modelfile_exists(model_name:str, model
# lutzapps - added SHA256 checks for already existing ident and downloaded HuggingFace model
-def download_huggingface_model(url, model_name, model_type, repo_id, hf_filename, hf_folder_name, hf_branch_name, hf_token=None):
+def download_huggingface_model(url, model_name, model_type, repo_id, hf_filename, hf_folder_name, hf_branch_name, hf_token=None) -> tuple[bool, str]:
try:
from huggingface_hub import hf_hub_download
@@ -372,7 +372,7 @@ def download_huggingface_model(url, model_name, model_type, repo_id, hf_filename
# lutzapps - added SHA256 check for downloaded CivitAI model
-def download_file(url, download_sha256_hash, file_path, headers=None):
+def download_file(url, download_sha256_hash, file_path, headers=None) -> tuple[bool, str]:
try:
response = requests.get(url, stream=True, headers=headers)
response.raise_for_status()
@@ -428,7 +428,7 @@ def check_downloaded_modelfile(model_path:str, download_sha256_hash:str, platfor
})
successfull_HashGeneration, model_sha256_hash = get_sha256_hash_from_file(model_path)
- if successfull_HashGeneration and model_sha256_hash == download_sha256_hash:
+ if successfull_HashGeneration and model_sha256_hash.lower() == download_sha256_hash.lower():
send_websocket_message('model_download_progress', {
'percentage': 100,
'stage': 'Complete',
diff --git a/official-templates/better-ai-launcher/app/utils/shared_models.py b/official-templates/better-ai-launcher/app/utils/shared_models.py
index c014c02..f29ec04 100644
--- a/official-templates/better-ai-launcher/app/utils/shared_models.py
+++ b/official-templates/better-ai-launcher/app/utils/shared_models.py
@@ -6,7 +6,7 @@ import time
from flask import jsonify
from utils.websocket_utils import (send_websocket_message, active_websockets)
-from utils.app_configs import (get_app_configs, init_global_dict_from_file, pretty_dict)
+from utils.app_configs import (get_app_configs, load_global_dict_from_file, pretty_dict)
### shared_models-v0.9.2 by lutzapps, Nov 5th 2024 ###
@@ -189,7 +189,9 @@ SHARED_MODEL_FOLDERS = {
# helper function called by "inline"-main() and ensure_shared_models_folders()
def init_shared_models_folders(send_SocketMessage:bool=True):
global SHARED_MODEL_FOLDERS
- init_global_dict_from_file(SHARED_MODEL_FOLDERS, SHARED_MODEL_FOLDERS_FILE, "SHARED_MODEL_FOLDERS", SHARED_MODELS_DIR)
+ success, dict = load_global_dict_from_file(SHARED_MODEL_FOLDERS, SHARED_MODEL_FOLDERS_FILE, "SHARED_MODEL_FOLDERS", SHARED_MODELS_DIR)
+ if success:
+ SHARED_MODEL_FOLDERS = dict
if os.path.exists(SHARED_MODEL_FOLDERS_FILE) and send_SocketMessage:
send_websocket_message('extend_ui_helper', {
@@ -341,7 +343,9 @@ def sync_with_app_configs_install_dirs():
# NOTE: this APP_INSTALL_DIRS_FILE is temporary synced with the app_configs dict
def init_app_install_dirs():
global APP_INSTALL_DIRS
- init_global_dict_from_file(APP_INSTALL_DIRS, APP_INSTALL_DIRS_FILE, "APP_INSTALL_DIRS", SHARED_MODELS_DIR)
+ success, dict = load_global_dict_from_file(APP_INSTALL_DIRS, APP_INSTALL_DIRS_FILE, "APP_INSTALL_DIRS", SHARED_MODELS_DIR)
+ if success:
+ APP_INSTALL_DIRS = dict
return
@@ -496,7 +500,9 @@ SHARED_MODEL_APP_MAP = {
# which does a default mapping from app code or (if exists) from external JSON 'SHARED_MODEL_APP_MAP_FILE' file
def init_shared_model_app_map():
global SHARED_MODEL_APP_MAP
- init_global_dict_from_file(SHARED_MODEL_APP_MAP, SHARED_MODEL_APP_MAP_FILE, "SHARED_MODEL_APP_MAP", SHARED_MODELS_DIR)
+ success, dict = load_global_dict_from_file(SHARED_MODEL_APP_MAP, SHARED_MODEL_APP_MAP_FILE, "SHARED_MODEL_APP_MAP", SHARED_MODELS_DIR)
+ if success:
+ SHARED_MODEL_APP_MAP = dict
return