mirror of
https://github.com/kodxana/madiator-docker-runpod.git
synced 2024-11-22 10:50:12 +01:00
Merge pull request #6 from lutzapps/app-consolidations
App Consolidation, app_configs and app_utils, Dockerfile
This commit is contained in:
commit
adf0c26857
7 changed files with 683 additions and 440 deletions
|
@ -7,22 +7,75 @@ FROM ${BASE_IMAGE:-madiator2011/better-base:cuda12.4} AS base
|
||||||
ARG BASE_IMAGE
|
ARG BASE_IMAGE
|
||||||
ENV BASE_IMAGE=$BASE_IMAGE
|
ENV BASE_IMAGE=$BASE_IMAGE
|
||||||
|
|
||||||
|
# lutzapps - replaced by above bake build-args
|
||||||
#FROM madiator2011/better-base:cuda12.4 AS base
|
#FROM madiator2011/better-base:cuda12.4 AS base
|
||||||
|
|
||||||
# lutzapps - prepare for local developement and debugging
|
# lutzapps - prepare for local developement and debugging
|
||||||
# needed to change the ORDER of "apt-get commands" and move the "update-alternatives" for python3
|
# needed to change the ORDER of "apt-get commands" and move the "update-alternatives" for python3
|
||||||
# AFTER the "apt-get remove -y python3.10" cmd, OTHERWISE the symlink to python3
|
# AFTER the "apt-get remove -y python3.10" cmd, OTHERWISE the symlink to python3
|
||||||
# is broken in the image and the VSCode debugger could not exec "python3" as CMD overwrite
|
# is broken in the image and the VSCode debugger could not exec "python3" as CMD overwrite
|
||||||
|
# also fixed a boring "Blinker" blocking error
|
||||||
|
|
||||||
# Install Python 3.11, set it as default, and remove Python 3.10
|
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
|
### ---> needed Tools for Installer
|
||||||
# removed: 2x git nginx ffmpeg (as they are already installed with the base image)
|
# removed: 2x git nginx ffmpeg (as they are already installed with the base image)
|
||||||
# added: pigz (for parallel execution of TAR files); zip (for easier folder compression)
|
# added: pigz (for parallel execution of TAR files); zip (for easier folder compression)
|
||||||
apt-get install -y python3.11 python3.11-venv python3.11-dev python3.11-distutils \
|
apt-get install -y aria2 pigz zip pv rsync zstd libtcmalloc-minimal4 bc \
|
||||||
aria2 pigz zip pv rsync zstd libtcmalloc-minimal4 bc && \
|
# add Python3.11 as system Python version, serving the Python Flask App
|
||||||
apt-get remove -y python3.10 python3.10-minimal libpython3.10-minimal libpython3.10-stdlib && \
|
python3.11 python3.11-venv python3.11-dev python3.11-distutils && \
|
||||||
|
# not remove Python3.10, as we need it for "official" app support (e.g. for kohya_ss VENV)
|
||||||
|
###apt-get remove -y python3.10 python3.10-minimal libpython3.10-minimal libpython3.10-stdlib && \
|
||||||
|
#
|
||||||
|
# setup an "alias" for "python" be symlinked to Python3.11
|
||||||
|
# (which is the default anyway after this installation here of Python 3.11)
|
||||||
update-alternatives --install /usr/bin/python python /usr/bin/python3.11 1 && \
|
update-alternatives --install /usr/bin/python python /usr/bin/python3.11 1 && \
|
||||||
|
#
|
||||||
|
# setup the "python3" alias for Python3.11, as this is what the debugger needs (not work with 3.10)
|
||||||
update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.11 1 && \
|
update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.11 1 && \
|
||||||
|
#
|
||||||
|
# VENV will have their own "preferred/supported/recommended" Python version, as e.g.
|
||||||
|
# the "kohya_ss" app, only supports Python up to version 3.10 (but not 3.11)
|
||||||
|
#
|
||||||
|
# until here we have a broken "Blinker" installation from some base images before,
|
||||||
|
# and if we try to "update" "Blinker" for Python3.10 or via e.g. "pip install --upgrade blinker",
|
||||||
|
# or "pip install blinker==x.y.z.z", this breaks, as it was installed in an APT bundle,
|
||||||
|
# which can not be safely upgraded (= Uninstall/Reinstall)! It breaks as follows:
|
||||||
|
# we get a blinker 1.4 uninstall error chained by trying to uninstall "distutils":
|
||||||
|
# 8.568 Found existing installation: blinker 1.4
|
||||||
|
# 8.782 error: uninstall-distutils-installed-package
|
||||||
|
# 8.782
|
||||||
|
# 8.782 × Cannot uninstall blinker 1.4
|
||||||
|
# 8.782 ╰─> It is a distutils installed project and thus we cannot accurately determine which files belong to it which would lead to only a partial uninstall.
|
||||||
|
# that not only blocks building the docker image, but later also breaks the "kohya_ss" app during setup,
|
||||||
|
# which try to install Blinker and Python310-venv from "setup-runpod.sh":
|
||||||
|
# # Install tk and python3.10-venv
|
||||||
|
# echo "Installing tk and python3.10-venv..."
|
||||||
|
# apt update -y && apt install -y python3-tk python3.10-venv
|
||||||
|
#
|
||||||
|
# Python 3.10 needs to run as Kohya's "official" requirement, and is used in the VENV
|
||||||
|
#
|
||||||
|
# first uninstall the APT bundle package for "Blinker"
|
||||||
|
apt-get remove -y python3-blinker && \
|
||||||
|
# then re-install the APT unbundled package of "Blinker back",
|
||||||
|
# together with Python3.10 venv, which we need to setup the kohya_ss VENV
|
||||||
|
apt-get install -y python3-tk python3.10-venv && \
|
||||||
|
# this re-captures back the "python3" alias for Python3.10, but not the "python" alias (stays for Python3.11)
|
||||||
|
# the "Python3.11" and "Python3.10" cmds work too
|
||||||
|
# global PIP is 3.11, VENV pip is 3.10
|
||||||
|
#
|
||||||
|
# ---> CUDA 12.4 Toolkit (is already in the 12.4 base-image)
|
||||||
|
wget https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2204/x86_64/cuda-keyring_1.1-1_all.deb && \
|
||||||
|
dpkg -i cuda-keyring_1.1-1_all.deb && \
|
||||||
|
### need to refresh the repository metatdata, after downloading this NVIDIA downloaded package list!!!
|
||||||
|
apt-get update && \
|
||||||
|
apt-get -y install cuda-toolkit-12-4 && \
|
||||||
|
#
|
||||||
|
# ---> get the latest cuDNN 9.x version supporting CUDA 12.x
|
||||||
|
# remove the current dev package which depends on libcudnn9-cuda-12 and breaks the upgrade otherwise
|
||||||
|
apt-get remove -y --allow-change-held-packages libcudnn9-cuda-12 libcudnn9-dev-cuda-12 && \
|
||||||
|
apt-get -y --allow-change-held-packages install cudnn-cuda-12 && \
|
||||||
|
#
|
||||||
|
# clean-up resources and caches
|
||||||
apt-get autoremove -y && \
|
apt-get autoremove -y && \
|
||||||
apt-get clean && \
|
apt-get clean && \
|
||||||
rm -rf /var/lib/apt/lists/*
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
|
@ -28,8 +28,7 @@ from utils.shared_models import (
|
||||||
SHARED_MODELS_DIR, SHARED_MODEL_FOLDERS, SHARED_MODEL_FOLDERS_FILE, ensure_shared_models_folders,
|
SHARED_MODELS_DIR, SHARED_MODEL_FOLDERS, SHARED_MODEL_FOLDERS_FILE, ensure_shared_models_folders,
|
||||||
APP_INSTALL_DIRS, APP_INSTALL_DIRS_FILE, init_app_install_dirs, # APP_INSTALL_DIRS dict/file/function
|
APP_INSTALL_DIRS, APP_INSTALL_DIRS_FILE, init_app_install_dirs, # APP_INSTALL_DIRS dict/file/function
|
||||||
MAP_APPS, sync_with_app_configs_install_dirs, # internal MAP_APPS dict and sync function
|
MAP_APPS, sync_with_app_configs_install_dirs, # internal MAP_APPS dict and sync function
|
||||||
SHARED_MODEL_APP_MAP, SHARED_MODEL_APP_MAP_FILE, init_shared_model_app_map, # SHARED_MODEL_APP_MAP dict/file/function
|
SHARED_MODEL_APP_MAP, SHARED_MODEL_APP_MAP_FILE, init_shared_model_app_map # SHARED_MODEL_APP_MAP dict/file/function
|
||||||
write_dict_to_jsonfile, read_dict_from_jsonfile, PrettyDICT # JSON helper functions
|
|
||||||
)
|
)
|
||||||
# the "update_model_symlinks()" function replaces the app.py function with the same same
|
# the "update_model_symlinks()" function replaces the app.py function with the same same
|
||||||
# and redirects to same function name "update_model_symlinks()" in the new "utils.shared_models" module
|
# and redirects to same function name "update_model_symlinks()" in the new "utils.shared_models" module
|
||||||
|
@ -340,92 +339,12 @@ def remove_existing_app_config(app_name):
|
||||||
return jsonify({'status': 'success', 'message': f'App {app_name} removed successfully'})
|
return jsonify({'status': 'success', 'message': f'App {app_name} removed successfully'})
|
||||||
return jsonify({'status': 'error', 'message': f'App {app_name} not found'})
|
return jsonify({'status': 'error', 'message': f'App {app_name} not found'})
|
||||||
|
|
||||||
# unused function
|
|
||||||
def obsolate_update_model_symlinks():
|
|
||||||
# lutzapps - CHANGE #3 - use the new "shared_models" module for app model sharing
|
|
||||||
# remove this whole now unused function
|
|
||||||
return "replaced by utils.shared_models.update_model_symlinks()"
|
|
||||||
|
|
||||||
# modified function
|
# modified function
|
||||||
def setup_shared_models():
|
def setup_shared_models():
|
||||||
# lutzapps - CHANGE #4 - use the new "shared_models" module for app model sharing
|
# lutzapps - CHANGE #4 - use the new "shared_models" module for app model sharing
|
||||||
jsonResult = update_model_symlinks()
|
jsonResult = update_model_symlinks()
|
||||||
|
|
||||||
return SHARED_MODELS_DIR # shared_models_dir is now owned and managed by the "shared_models" utils module
|
return SHARED_MODELS_DIR # shared_models_dir is now owned and managed by the "shared_models" utils module
|
||||||
# remove below unused code
|
|
||||||
|
|
||||||
shared_models_dir = '/workspace/shared_models'
|
|
||||||
model_types = ['Stable-diffusion', 'VAE', 'Lora', 'ESRGAN']
|
|
||||||
|
|
||||||
# Create shared models directory if it doesn't exist
|
|
||||||
os.makedirs(shared_models_dir, exist_ok=True)
|
|
||||||
|
|
||||||
for model_type in model_types:
|
|
||||||
shared_model_path = os.path.join(shared_models_dir, model_type)
|
|
||||||
|
|
||||||
# Create shared model type directory if it doesn't exist
|
|
||||||
os.makedirs(shared_model_path, exist_ok=True)
|
|
||||||
|
|
||||||
# Create a README file in the shared models directory
|
|
||||||
readme_path = os.path.join(shared_models_dir, 'README.txt')
|
|
||||||
if not os.path.exists(readme_path):
|
|
||||||
with open(readme_path, 'w') as f:
|
|
||||||
f.write("Upload your models to the appropriate folders:\n\n")
|
|
||||||
f.write("- Stable-diffusion: for Stable Diffusion models\n")
|
|
||||||
f.write("- VAE: for VAE models\n")
|
|
||||||
f.write("- Lora: for LoRA models\n")
|
|
||||||
f.write("- ESRGAN: for ESRGAN upscaling models\n\n")
|
|
||||||
f.write("These models will be automatically linked to all supported apps.")
|
|
||||||
|
|
||||||
print(f"Shared models directory created at {shared_models_dir}")
|
|
||||||
print("Shared models setup completed.")
|
|
||||||
|
|
||||||
return shared_models_dir
|
|
||||||
|
|
||||||
# unused function
|
|
||||||
def obsolate_update_model_symlinks():
|
|
||||||
# lutzapps - CHANGE #5 - use the new "shared_models" module for app model sharing
|
|
||||||
# remove this whole now unused function
|
|
||||||
return "replaced by utils.shared_models.update_model_symlinks()"
|
|
||||||
|
|
||||||
shared_models_dir = '/workspace/shared_models'
|
|
||||||
apps = {
|
|
||||||
'stable-diffusion-webui': '/workspace/stable-diffusion-webui/models',
|
|
||||||
'stable-diffusion-webui-forge': '/workspace/stable-diffusion-webui-forge/models',
|
|
||||||
'ComfyUI': '/workspace/ComfyUI/models'
|
|
||||||
}
|
|
||||||
model_types = ['Stable-diffusion', 'VAE', 'Lora', 'ESRGAN']
|
|
||||||
|
|
||||||
for model_type in model_types:
|
|
||||||
shared_model_path = os.path.join(shared_models_dir, model_type)
|
|
||||||
|
|
||||||
if not os.path.exists(shared_model_path):
|
|
||||||
continue
|
|
||||||
|
|
||||||
for app, app_models_dir in apps.items():
|
|
||||||
if app == 'ComfyUI':
|
|
||||||
if model_type == 'Stable-diffusion':
|
|
||||||
app_model_path = os.path.join(app_models_dir, 'checkpoints')
|
|
||||||
elif model_type == 'Lora':
|
|
||||||
app_model_path = os.path.join(app_models_dir, 'loras')
|
|
||||||
elif model_type == 'ESRGAN':
|
|
||||||
app_model_path = os.path.join(app_models_dir, 'upscale_models')
|
|
||||||
else:
|
|
||||||
app_model_path = os.path.join(app_models_dir, model_type.lower())
|
|
||||||
else:
|
|
||||||
app_model_path = os.path.join(app_models_dir, model_type)
|
|
||||||
|
|
||||||
# Create the app model directory if it doesn't exist
|
|
||||||
os.makedirs(app_model_path, exist_ok=True)
|
|
||||||
|
|
||||||
# Create symlinks for each file in the shared model directory
|
|
||||||
for filename in os.listdir(shared_model_path):
|
|
||||||
src = os.path.join(shared_model_path, filename)
|
|
||||||
dst = os.path.join(app_model_path, filename)
|
|
||||||
if os.path.isfile(src) and not os.path.exists(dst):
|
|
||||||
os.symlink(src, dst)
|
|
||||||
|
|
||||||
print("Model symlinks updated.")
|
|
||||||
|
|
||||||
def update_symlinks_periodically():
|
def update_symlinks_periodically():
|
||||||
while True:
|
while True:
|
||||||
|
@ -436,57 +355,6 @@ def start_symlink_update_thread():
|
||||||
thread = threading.Thread(target=update_symlinks_periodically, daemon=True)
|
thread = threading.Thread(target=update_symlinks_periodically, daemon=True)
|
||||||
thread.start()
|
thread.start()
|
||||||
|
|
||||||
# unused function
|
|
||||||
def obsolate_recreate_symlinks():
|
|
||||||
# lutzapps - CHANGE #6 - use the new "shared_models" module for app model sharing
|
|
||||||
# remove this whole now unused function
|
|
||||||
return "replaced by utils.shared_models.update_model_symlinks()"
|
|
||||||
|
|
||||||
shared_models_dir = '/workspace/shared_models'
|
|
||||||
apps = {
|
|
||||||
'stable-diffusion-webui': '/workspace/stable-diffusion-webui/models',
|
|
||||||
'stable-diffusion-webui-forge': '/workspace/stable-diffusion-webui-forge/models',
|
|
||||||
'ComfyUI': '/workspace/ComfyUI/models'
|
|
||||||
}
|
|
||||||
model_types = ['Stable-diffusion', 'VAE', 'Lora', 'ESRGAN']
|
|
||||||
|
|
||||||
for model_type in model_types:
|
|
||||||
shared_model_path = os.path.join(shared_models_dir, model_type)
|
|
||||||
|
|
||||||
if not os.path.exists(shared_model_path):
|
|
||||||
continue
|
|
||||||
|
|
||||||
for app, app_models_dir in apps.items():
|
|
||||||
if app == 'ComfyUI':
|
|
||||||
if model_type == 'Stable-diffusion':
|
|
||||||
app_model_path = os.path.join(app_models_dir, 'checkpoints')
|
|
||||||
elif model_type == 'Lora':
|
|
||||||
app_model_path = os.path.join(app_models_dir, 'loras')
|
|
||||||
elif model_type == 'ESRGAN':
|
|
||||||
app_model_path = os.path.join(app_models_dir, 'upscale_models')
|
|
||||||
else:
|
|
||||||
app_model_path = os.path.join(app_models_dir, model_type.lower())
|
|
||||||
else:
|
|
||||||
app_model_path = os.path.join(app_models_dir, model_type)
|
|
||||||
|
|
||||||
# Remove existing symlinks
|
|
||||||
if os.path.islink(app_model_path):
|
|
||||||
os.unlink(app_model_path)
|
|
||||||
elif os.path.isdir(app_model_path):
|
|
||||||
shutil.rmtree(app_model_path)
|
|
||||||
|
|
||||||
# Create the app model directory if it doesn't exist
|
|
||||||
os.makedirs(app_model_path, exist_ok=True)
|
|
||||||
|
|
||||||
# Create symlinks for each file in the shared model directory
|
|
||||||
for filename in os.listdir(shared_model_path):
|
|
||||||
src = os.path.join(shared_model_path, filename)
|
|
||||||
dst = os.path.join(app_model_path, filename)
|
|
||||||
if os.path.isfile(src) and not os.path.exists(dst):
|
|
||||||
os.symlink(src, dst)
|
|
||||||
|
|
||||||
return "Symlinks recreated successfully."
|
|
||||||
|
|
||||||
# modified function
|
# modified function
|
||||||
@app.route('/recreate_symlinks', methods=['POST'])
|
@app.route('/recreate_symlinks', methods=['POST'])
|
||||||
def recreate_symlinks_route():
|
def recreate_symlinks_route():
|
||||||
|
@ -494,13 +362,6 @@ def recreate_symlinks_route():
|
||||||
jsonResult = update_model_symlinks()
|
jsonResult = update_model_symlinks()
|
||||||
|
|
||||||
return jsonResult
|
return jsonResult
|
||||||
# remove below unused code
|
|
||||||
|
|
||||||
try:
|
|
||||||
message = recreate_symlinks()
|
|
||||||
return jsonify({'status': 'success', 'message': message})
|
|
||||||
except Exception as e:
|
|
||||||
return jsonify({'status': 'error', 'message': str(e)})
|
|
||||||
|
|
||||||
# modified function
|
# modified function
|
||||||
@app.route('/create_shared_folders', methods=['POST'])
|
@app.route('/create_shared_folders', methods=['POST'])
|
||||||
|
@ -508,39 +369,6 @@ def create_shared_folders():
|
||||||
# lutzapps - CHANGE #8 - use the new "shared_models" module for app model sharing
|
# lutzapps - CHANGE #8 - use the new "shared_models" module for app model sharing
|
||||||
jsonResult = ensure_shared_models_folders()
|
jsonResult = ensure_shared_models_folders()
|
||||||
return jsonResult
|
return jsonResult
|
||||||
# remove below unused code
|
|
||||||
|
|
||||||
try:
|
|
||||||
shared_models_dir = '/workspace/shared_models'
|
|
||||||
model_types = ['Stable-diffusion', 'Lora', 'embeddings', 'VAE', 'hypernetworks', 'aesthetic_embeddings', 'controlnet', 'ESRGAN']
|
|
||||||
|
|
||||||
# Create shared models directory if it doesn't exist
|
|
||||||
os.makedirs(shared_models_dir, exist_ok=True)
|
|
||||||
|
|
||||||
for model_type in model_types:
|
|
||||||
shared_model_path = os.path.join(shared_models_dir, model_type)
|
|
||||||
|
|
||||||
# Create shared model type directory if it doesn't exist
|
|
||||||
os.makedirs(shared_model_path, exist_ok=True)
|
|
||||||
|
|
||||||
# Create a README file in the shared models directory
|
|
||||||
readme_path = os.path.join(shared_models_dir, 'README.txt')
|
|
||||||
if not os.path.exists(readme_path):
|
|
||||||
with open(readme_path, 'w') as f:
|
|
||||||
f.write("Upload your models to the appropriate folders:\n\n")
|
|
||||||
f.write("- Stable-diffusion: for Stable Diffusion checkpoints\n")
|
|
||||||
f.write("- Lora: for LoRA models\n")
|
|
||||||
f.write("- embeddings: for Textual Inversion embeddings\n")
|
|
||||||
f.write("- VAE: for VAE models\n")
|
|
||||||
f.write("- hypernetworks: for Hypernetwork models\n")
|
|
||||||
f.write("- aesthetic_embeddings: for Aesthetic Gradient embeddings\n")
|
|
||||||
f.write("- controlnet: for ControlNet models\n")
|
|
||||||
f.write("- ESRGAN: for ESRGAN upscaling models\n\n")
|
|
||||||
f.write("These models will be automatically linked to all supported apps.")
|
|
||||||
|
|
||||||
return jsonify({'status': 'success', 'message': 'Shared model folders created successfully.'})
|
|
||||||
except Exception as e:
|
|
||||||
return jsonify({'status': 'error', 'message': str(e)})
|
|
||||||
|
|
||||||
def save_civitai_token(token):
|
def save_civitai_token(token):
|
||||||
with open(CIVITAI_TOKEN_FILE, 'w') as f:
|
with open(CIVITAI_TOKEN_FILE, 'w') as f:
|
||||||
|
@ -644,7 +472,7 @@ def get_model_types_route():
|
||||||
'desc': model_type_description
|
'desc': model_type_description
|
||||||
}
|
}
|
||||||
|
|
||||||
i = i + 1
|
i += 1
|
||||||
|
|
||||||
return model_types_dict
|
return model_types_dict
|
||||||
|
|
||||||
|
|
|
@ -2418,7 +2418,7 @@
|
||||||
<div class="download-info">
|
<div class="download-info">
|
||||||
<span class="download-speed"></span>
|
<span class="download-speed"></span>
|
||||||
<span class="download-eta"></span>
|
<span class="download-eta"></span>
|
||||||
</div>4
|
</div><!-- 4 lutzapps - remove a typo "4" -->
|
||||||
<div class="install-stage"></div>
|
<div class="install-stage"></div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
@ -2838,6 +2838,7 @@
|
||||||
|
|
||||||
function initializeUI() {
|
function initializeUI() {
|
||||||
updateStatus();
|
updateStatus();
|
||||||
|
// TODO: need a way this 2 functions not pollute the logs every second or 5 seconds
|
||||||
setInterval(updateStatus, 5000);
|
setInterval(updateStatus, 5000);
|
||||||
setInterval(updateLogs, 1000);
|
setInterval(updateLogs, 1000);
|
||||||
|
|
||||||
|
@ -3281,15 +3282,6 @@
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// lutzapps - obsolete function (can be deleted)
|
|
||||||
function copyToClipboard(text) {
|
|
||||||
navigator.clipboard.writeText(text).then(() => {
|
|
||||||
alert('URL copied to clipboard!');
|
|
||||||
}, (err) => {
|
|
||||||
console.error('Could not copy text: ', err);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Call this function when the Models tab is opened
|
// Call this function when the Models tab is opened
|
||||||
document.querySelector('.navbar-tabs a[onclick="openTab(event, \'models-tab\')"]').addEventListener('click', function() {
|
document.querySelector('.navbar-tabs a[onclick="openTab(event, \'models-tab\')"]').addEventListener('click', function() {
|
||||||
//alert("querySelector");
|
//alert("querySelector");
|
||||||
|
|
|
@ -3,71 +3,251 @@ import xml.etree.ElementTree as ET
|
||||||
import requests
|
import requests
|
||||||
import json
|
import json
|
||||||
|
|
||||||
def fetch_app_info():
|
# this is the replacement for the XML manifest, and defines all app_configs in full detail
|
||||||
url = "https://better.s3.madiator.com/"
|
APP_CONFIGS_MANIFEST_URL = "https://better.s3.madiator.com/app_configs.json"
|
||||||
response = requests.get(url)
|
# if this JSON can not be downloaded, the below code defaults apply
|
||||||
root = ET.fromstring(response.content)
|
# this app_configs dict can also be generated from code when at least one of following
|
||||||
|
# 2 ENV vars are found with following values:
|
||||||
app_info = {}
|
# 1. LOCAL_DEBUG = 'True' # this ENV var should not be passed when in the RUNPOD environment, as it disabled the CF proxy Urls of the App-Manager
|
||||||
for content in root.findall('{http://s3.amazonaws.com/doc/2006-03-01/}Contents'):
|
# and this ENV var also controls some other aspects of the app.
|
||||||
key = content.find('{http://s3.amazonaws.com/doc/2006-03-01/}Key').text
|
#
|
||||||
size = int(content.find('{http://s3.amazonaws.com/doc/2006-03-01/}Size').text)
|
# 2. APP_CONFIGS_FILE = 'True' # only exists for this one purpose, to generate the below Dict as file
|
||||||
app_name = key.split('/')[0]
|
# "/workspace/_app_configs.json", which then can be uploaded to the above defined APP_CONFIGS_MANIFEST_URL
|
||||||
|
# NOTE:
|
||||||
# lutzapps - fix "bug" in key element of the S3 XML document
|
|
||||||
# all other three apps have a "key" element like "bcomfy/bcomfy.tar.gz" or "bforge/bforge.tar.gz",
|
|
||||||
# with their "app_name" prefix + "/" + tar_filename
|
|
||||||
# only kohya is missing this "app_name" prefix and has a key element of only its tar_filename "bkohya.tar.gz"
|
|
||||||
# this results in the app_name "bkohya.tar.gz", instead of only "bkohya"
|
|
||||||
# TODO for madiator - move the "bkohya.tar.gz" into a subfolder "bkohya" in your S3 bucket
|
|
||||||
app_name = app_name.replace(".tar.gz", "") # cut any extension postfixes resulting from the wrong key.split() command
|
|
||||||
|
|
||||||
if app_name in ['ba1111', 'bcomfy', 'bforge', 'bkohya']: # lutzapps - added new kohya app
|
|
||||||
app_info[app_name] = {
|
|
||||||
'download_url': f"https://better.s3.madiator.com/{key}",
|
|
||||||
'size': size
|
|
||||||
}
|
|
||||||
|
|
||||||
return app_info
|
|
||||||
|
|
||||||
app_configs = {
|
app_configs = {
|
||||||
'bcomfy': {
|
'bcomfy': {
|
||||||
|
'id': 'bcomfy',
|
||||||
'name': 'Better Comfy UI',
|
'name': 'Better Comfy UI',
|
||||||
'command': 'cd /workspace/bcomfy && . ./bin/activate && cd /workspace/ComfyUI && python main.py --listen --port 3000 --enable-cors-header',
|
'command': 'cd /workspace/bcomfy && . ./bin/activate && cd /workspace/ComfyUI && python main.py --listen --port 3000 --enable-cors-header',
|
||||||
'venv_path': '/workspace/bcomfy',
|
'venv_path': '/workspace/bcomfy',
|
||||||
'app_path': '/workspace/ComfyUI',
|
'app_path': '/workspace/ComfyUI',
|
||||||
'port': 3000,
|
'port': 3000,
|
||||||
|
'download_url': 'https://better.s3.madiator.com/bcomfy/bcomfy.tar.gz', # (2024-11-08 18:50:00Z - lutzapps)
|
||||||
|
#'venv_uncompressed_size': 6452737952, # uncompressed size of the tar-file (in bytes) - lutzapps new version
|
||||||
|
'venv_uncompressed_size': 6155295493, # uncompressed size of the tar-file (in bytes) - original version
|
||||||
|
#'archive_size': 3389131462 # tar filesize (in bytes) - lutzapps new version
|
||||||
|
'archive_size': 3179595118, # tar filesize (in bytes) - original version
|
||||||
|
#'sha256_hash': '18e7d71b75656924f98d5b7fa583aa7c81425f666a703ef85f7dd0acf8f60886', # lutzapps new version
|
||||||
|
'sha256_hash': '7fd60808a120a1dd05287c2a9b3d38b3bdece84f085abc156e0a2ee8e6254b84', # original version
|
||||||
|
'repo_url': 'https://github.com/comfyanonymous/ComfyUI.git',
|
||||||
|
'branch_name': '', # empty branch_name means default = 'master'
|
||||||
|
'commit': '', # or commit hash (NYI)
|
||||||
|
'recursive': False,
|
||||||
|
'refresh': False,
|
||||||
|
'custom_nodes': [ # following custom_nodes will be git cloned and installed with "pip install -r requirements.txt" (in Testing)
|
||||||
|
{
|
||||||
|
'name': 'ComfyUI-Manager (ltdrdata)', # this node is installed in the VENV
|
||||||
|
'path': 'ComfyUI-Manager',
|
||||||
|
'repo_url': 'https://github.com/ltdrdata/ComfyUI-Manager.git'
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'name': 'ComfyUI-Essentials (cubic)', # this node is installed in the VENV
|
||||||
|
'path': 'ComfyUI_essentials',
|
||||||
|
'repo_url': 'https://github.com/cubiq/ComfyUI_essentials'
|
||||||
|
}
|
||||||
|
### planned custom nodes - To Be Discussed
|
||||||
|
# {
|
||||||
|
# 'name': 'rgthree comfy',
|
||||||
|
# 'path': 'rgthree-comfy',
|
||||||
|
# 'repo_url': 'https://github.com/rgthree/rgthree-comfy'
|
||||||
|
# },
|
||||||
|
# {
|
||||||
|
# 'name': 'was node suite comfyui',
|
||||||
|
# 'path': 'was-node-suite-comfyui',
|
||||||
|
# 'repo_url': 'https://github.com/WASasquatch/was-node-suite-comfyui'
|
||||||
|
# },
|
||||||
|
# {
|
||||||
|
# 'name': 'comfyui controlnet aux',
|
||||||
|
# 'path': 'comfyui_controlnet_aux',
|
||||||
|
# 'repo_url': 'https://github.com/Fannovel16/comfyui_controlnet_aux'
|
||||||
|
# },
|
||||||
|
# {
|
||||||
|
# 'name': 'x-flux-comfyui (XLabs-AI)',
|
||||||
|
# 'path': 'x-flux-comfyui',
|
||||||
|
# 'repo_url': 'https://github.com/XLabs-AI/x-flux-comfyui'
|
||||||
|
# },
|
||||||
|
# {
|
||||||
|
# 'name': 'ComfyUI-GGUF (city96)',
|
||||||
|
# 'path': 'ComfyUI-GGUF',
|
||||||
|
# 'repo_url': 'https://github.com/city96/ComfyUI-GGUF'
|
||||||
|
# },
|
||||||
|
# {
|
||||||
|
# 'name': 'ComfyUI-Florence2 (kijai)',
|
||||||
|
# 'path': 'ComfyUI-Florence2F',
|
||||||
|
# 'repo_url': 'https://github.com/kijai/ComfyUI-Florence2'
|
||||||
|
# },
|
||||||
|
# {
|
||||||
|
# 'name': 'ComfyUI-KJNodes (kijai)',
|
||||||
|
# 'path': 'ComfyUI-KJNodes',
|
||||||
|
# 'repo_url': 'https://github.com/kijai/ComfyUI-KJNodes'
|
||||||
|
# },
|
||||||
|
# {
|
||||||
|
# 'name': 'ComfyUI_UltimateSDUpscale (ssitu)',
|
||||||
|
# 'path': 'ComfyUI_UltimateSDUpscale',
|
||||||
|
# 'repo_url': 'https://github.com/ssitu/ComfyUI_UltimateSDUpscale'
|
||||||
|
# },
|
||||||
|
# {
|
||||||
|
# 'name': 'ControlAltAI Nodes (gseth)',
|
||||||
|
# 'path': 'ControlAltAI-Nodes',
|
||||||
|
# 'repo_url': 'https://github.com/gseth/ControlAltAI-Nodes'
|
||||||
|
# },
|
||||||
|
# {
|
||||||
|
# 'name': 'ComfyUI Easy-Use (yolain)',
|
||||||
|
# 'path': 'ComfyUI-Easy-Use',
|
||||||
|
# 'repo_url': 'https://github.com/yolain/ComfyUI-Easy-Use'
|
||||||
|
# },
|
||||||
|
# {
|
||||||
|
# 'name': 'ComfyUI Impact-Pack (tdrdata)',
|
||||||
|
# 'path': 'ComfyUI-Impact-Pack',
|
||||||
|
# 'repo_url': 'https://github.com/ltdrdata/ComfyUI-Impact-Pack'
|
||||||
|
# }
|
||||||
|
]
|
||||||
},
|
},
|
||||||
'bforge': {
|
'bforge': {
|
||||||
|
'id': 'bforge', # app_name
|
||||||
'name': 'Better Forge',
|
'name': 'Better Forge',
|
||||||
'command': 'cd /workspace/bforge && . ./bin/activate && cd /workspace/stable-diffusion-webui-forge && ./webui.sh -f --listen --enable-insecure-extension-access --api --port 7862',
|
'command': 'cd /workspace/bforge && . ./bin/activate && cd /workspace/stable-diffusion-webui-forge && ./webui.sh -f --listen --enable-insecure-extension-access --api --port 7862',
|
||||||
'venv_path': '/workspace/bforge',
|
'venv_path': '/workspace/bforge',
|
||||||
'app_path': '/workspace/stable-diffusion-webui-forge',
|
'app_path': '/workspace/stable-diffusion-webui-forge',
|
||||||
'port': 7862,
|
'port': 7862,
|
||||||
|
'download_url': 'https://better.s3.madiator.com/bforge/bforge.tar.gz',
|
||||||
|
'venv_uncompressed_size': 7689838771, # uncompressed size of the tar-file (in bytes),
|
||||||
|
'archive_size': 3691004078, # tar filesize (in bytes)
|
||||||
|
'sha256_hash': 'e87dae2324a065944c8d36d6ac4310af6d2ba6394f858ff04a34c51aa5f70bfb',
|
||||||
|
'repo_url': 'https://github.com/lllyasviel/stable-diffusion-webui-forge.git',
|
||||||
|
'branch_name': '', # empty branch_name means default = 'master'
|
||||||
|
'commit': '', # or commit hash (NYI)
|
||||||
|
'clone_recursive': False,
|
||||||
|
'refresh': False
|
||||||
},
|
},
|
||||||
'ba1111': {
|
'ba1111': {
|
||||||
|
'id': 'ba1111', # app_name
|
||||||
'name': 'Better A1111',
|
'name': 'Better A1111',
|
||||||
'command': 'cd /workspace/ba1111 && . ./bin/activate && cd /workspace/stable-diffusion-webui && ./webui.sh -f --listen --enable-insecure-extension-access --api --port 7863',
|
'command': 'cd /workspace/ba1111 && . ./bin/activate && cd /workspace/stable-diffusion-webui && ./webui.sh -f --listen --enable-insecure-extension-access --api --port 7863',
|
||||||
'venv_path': '/workspace/ba1111',
|
'venv_path': '/workspace/ba1111',
|
||||||
'app_path': '/workspace/stable-diffusion-webui',
|
'app_path': '/workspace/stable-diffusion-webui',
|
||||||
'port': 7863,
|
'port': 7863,
|
||||||
|
'download_url': 'https://better.s3.madiator.com/ba1111/ba1111.tar.gz',
|
||||||
|
'venv_uncompressed_size': 6794367826, # uncompressed size of the tar-file (in bytes),
|
||||||
|
'archive_size': 3383946179, # tar filesize (in bytes)
|
||||||
|
'sha256_hash': '1d70276bc93f5f992a2e722e76a469bf6a581488fa1723d6d40739f3d418ada9',
|
||||||
|
'repo_url': 'https://github.com/AUTOMATIC1111/stable-diffusion-webui.git',
|
||||||
|
'branch_name': '', # empty branch_name means default = 'master'
|
||||||
|
'commit': '', # or commit hash (NYI)
|
||||||
|
'clone_recursive': False,
|
||||||
|
'refresh': False
|
||||||
},
|
},
|
||||||
'bkohya': {
|
'bkohya': {
|
||||||
|
'id': 'bkohya', # app_name
|
||||||
'name': 'Better Kohya',
|
'name': 'Better Kohya',
|
||||||
'command': 'cd /workspace/bkohya && . ./bin/activate && cd /workspace/kohya_ss && ./gui.sh --listen --port 7860',
|
'command': 'cd /workspace/bkohya && . ./bin/activate && cd /workspace/kohya_ss && python ./kohya_gui.py --headless --share --server_port 7860', # TODO!! check ./kohya_gui.py
|
||||||
|
### for Gradio supported reverse proxy:
|
||||||
|
# --share -> Share the gradio UI
|
||||||
|
# --root_path ROOT_PATH -> root_path` for Gradio to enable reverse proxy support. e.g. /kohya_ss
|
||||||
|
# --listen LISTEN -> IP to listen on for connections to Gradio
|
||||||
|
|
||||||
|
# usage: kohya_gui.py [-h] [--config CONFIG] [--debug] [--listen LISTEN]
|
||||||
|
# [--username USERNAME] [--password PASSWORD]
|
||||||
|
# [--server_port SERVER_PORT] [--inbrowser] [--share]
|
||||||
|
# [--headless] [--language LANGUAGE] [--use-ipex]
|
||||||
|
# [--use-rocm] [--do_not_use_shell] [--do_not_share]
|
||||||
|
# [--requirements REQUIREMENTS] [--root_path ROOT_PATH]
|
||||||
|
# [--noverify]
|
||||||
|
#
|
||||||
|
# options:
|
||||||
|
# -h, --help show this help message and exit
|
||||||
|
# --config CONFIG Path to the toml config file for interface defaults
|
||||||
|
# --debug Debug on
|
||||||
|
# --listen LISTEN IP to listen on for connections to Gradio
|
||||||
|
# --username USERNAME Username for authentication
|
||||||
|
# --password PASSWORD Password for authentication
|
||||||
|
# --server_port SERVER_PORT
|
||||||
|
# Port to run the server listener on
|
||||||
|
# --inbrowser Open in browser
|
||||||
|
# --share Share the gradio UI
|
||||||
|
# --headless Is the server headless
|
||||||
|
# --language LANGUAGE Set custom language
|
||||||
|
# --use-ipex Use IPEX environment
|
||||||
|
# --use-rocm Use ROCm environment
|
||||||
|
# --do_not_use_shell Enforce not to use shell=True when running external
|
||||||
|
# commands
|
||||||
|
# --do_not_share Do not share the gradio UI
|
||||||
|
# --requirements REQUIREMENTS
|
||||||
|
# requirements file to use for validation
|
||||||
|
# --root_path ROOT_PATH
|
||||||
|
# `root_path` for Gradio to enable reverse proxy
|
||||||
|
# support. e.g. /kohya_ss
|
||||||
|
# --noverify Disable requirements verification
|
||||||
|
|
||||||
'venv_path': '/workspace/bkohya',
|
'venv_path': '/workspace/bkohya',
|
||||||
'app_path': '/workspace/kohya_ss',
|
'app_path': '/workspace/kohya_ss',
|
||||||
'port': 7860,
|
'port': 7860,
|
||||||
|
'download_url': 'https://better.s3.madiator.com/kohya.tar.gz', # (2024-11-08 13:13:00Z) - lutzapps
|
||||||
|
'venv_uncompressed_size': 12128345264, # uncompressed size of the tar-file (in bytes)
|
||||||
|
'archive_size': 6314758227, # tar filesize (in bytes)
|
||||||
|
'sha256_hash': '9a0c0ed5925109e82973d55e28f4914fff6728cfb7f7f028a62e2ec1a9e4f60a',
|
||||||
|
'repo_url': 'https://github.com/bmaltais/kohya_ss.git',
|
||||||
|
'branch_name': 'sd3-flux.1', # make sure we use Kohya with FLUX support branch
|
||||||
|
# this branch also uses a 'sd-scripts' HEAD branch of 'SD3', which gets automatically checked-out too
|
||||||
|
'commit': '', # or commit hash (NYI)
|
||||||
|
'clone_recursive': True, # is recursive clone
|
||||||
|
'refresh': False
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
def update_app_configs():
|
# lutzapps - not used anymore TODO: remove later
|
||||||
|
""" def fetch_app_info():
|
||||||
|
manifest_url = "https://better.s3.madiator.com/"
|
||||||
|
download_base_url = "https://better.s3.madiator.com/" # could be different base as the manifest file
|
||||||
|
|
||||||
|
app_info = {}
|
||||||
|
|
||||||
|
try: # be graceful when the server is not reachable, be it S3 or anything else
|
||||||
|
response = requests.get(manifest_url)
|
||||||
|
root = ET.fromstring(response.content)
|
||||||
|
|
||||||
|
for content in root.findall('{http://s3.amazonaws.com/doc/2006-03-01/}Contents'):
|
||||||
|
app_name_and_url = content.find('{http://s3.amazonaws.com/doc/2006-03-01/}Key').text
|
||||||
|
|
||||||
|
app_name = app_name_and_url.split('/')[0] # e.g. "bkohya/bkohya.tar.gz" -> "bkohya"
|
||||||
|
download_url = os.path.join(download_base_url, app_name_and_url)
|
||||||
|
|
||||||
|
if not (app_name in ['ba1111', 'bcomfy', 'bforge', 'bkohya']):
|
||||||
|
continue # skip unsupported app
|
||||||
|
|
||||||
|
# load code defaults
|
||||||
|
archive_size = app_configs[app_name]["archive_size"]
|
||||||
|
venv_uncompressed_size = app_configs[app_name]["venv_uncompressed_size"]
|
||||||
|
sha256_hash = app_configs[app_name]["sha256_hash"]
|
||||||
|
|
||||||
|
try: # try to find overwrites from code defaults
|
||||||
|
archive_size = int(content.find('archive_size').text)
|
||||||
|
venv_uncompressed_size = int(content.find('{http://s3.amazonaws.com/doc/2006-03-01/}venv_uncompressed_size').text)
|
||||||
|
sha256_hash = int(content.find('{http://s3.amazonaws.com/doc/2006-03-01/}sha256_hash').text)
|
||||||
|
except: # swallow any exception, mainly from not being defined (yet) in the XML manifest
|
||||||
|
print(f"App '{app_name}' Metadata could not be found in manifest '{manifest_url}', using code defaults!")
|
||||||
|
|
||||||
|
app_info[app_name] = {
|
||||||
|
'download_url': download_url,
|
||||||
|
'archive_size': archive_size,
|
||||||
|
'venv_uncompressed_size': venv_uncompressed_size, # TODO: provide in XML manifest
|
||||||
|
'sha256_hash': sha256_hash # TODO: provide in XML manifest
|
||||||
|
}
|
||||||
|
|
||||||
|
except requests.RequestException as e: # server not reachable, return empty dict
|
||||||
|
print(f"Manifest Url '{manifest_url}' not reachable, using code defaults!")
|
||||||
|
|
||||||
|
return app_info
|
||||||
|
"""
|
||||||
|
# lutzapps - not used anymore TODO: remove later
|
||||||
|
""" def update_app_configs():
|
||||||
app_info = fetch_app_info()
|
app_info = fetch_app_info()
|
||||||
for app_name, info in app_info.items():
|
for app_name, info in app_info.items():
|
||||||
if app_name in app_configs:
|
if app_name in app_configs:
|
||||||
app_configs[app_name].update(info)
|
app_configs[app_name].update(info) """
|
||||||
|
|
||||||
def get_app_configs():
|
def get_app_configs() -> dict:
|
||||||
return app_configs
|
return app_configs
|
||||||
|
|
||||||
def add_app_config(app_name, config):
|
def add_app_config(app_name, config):
|
||||||
|
@ -78,7 +258,8 @@ def remove_app_config(app_name):
|
||||||
del app_configs[app_name]
|
del app_configs[app_name]
|
||||||
|
|
||||||
# Update app_configs when this module is imported
|
# Update app_configs when this module is imported
|
||||||
update_app_configs()
|
# lutzapps - not used anymore TODO: remove later
|
||||||
|
#update_app_configs()
|
||||||
|
|
||||||
|
|
||||||
### lutzapps section
|
### lutzapps section
|
||||||
|
@ -108,7 +289,7 @@ def write_dict_to_jsonfile(dict:dict, json_filepath:str, overwrite:bool=False) -
|
||||||
return True, "" # success
|
return True, "" # success
|
||||||
|
|
||||||
# helper function called by init_app_install_dirs(), init_shared_model_app_map(), init_shared_models_folders() and init_debug_settings()
|
# helper function called by init_app_install_dirs(), init_shared_model_app_map(), init_shared_models_folders() and init_debug_settings()
|
||||||
def read_dict_from_jsonfile(json_filepath:str) -> dict:
|
def read_dict_from_jsonfile(json_filepath:str) -> tuple [dict, str]:
|
||||||
# Read JSON file from 'json_filepath' and return it as 'dict'
|
# Read JSON file from 'json_filepath' and return it as 'dict'
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -135,9 +316,9 @@ def pretty_dict(dict:dict) -> str:
|
||||||
|
|
||||||
return dict_string
|
return dict_string
|
||||||
|
|
||||||
# helper function for "init_app_install_dirs(), "init_shared_model_app_map()", "init_shared_models_folders()" and "inir_debug_settings()"
|
# helper function for "init_app_install_dirs(), "init_shared_model_app_map()", "init_shared_models_folders()" and "inir_DEBUG_SETTINGS()"
|
||||||
def init_global_dict_from_file(dict:dict, dict_filepath:str, dict_description:str, SHARED_MODELS_DIR:str="") -> bool:
|
def load_global_dict_from_file(dict:dict, dict_filepath:str, dict_description:str, SHARED_MODELS_DIR:str="", write_file:bool=True) -> tuple[bool, dict]:
|
||||||
# load or initialize the 'dict' for 'dict_description' from 'dict_filepath'
|
# returns the 'dict' for 'dict_description' from 'dict_filepath'
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if not SHARED_MODELS_DIR == "" and not os.path.exists(SHARED_MODELS_DIR):
|
if not SHARED_MODELS_DIR == "" and not os.path.exists(SHARED_MODELS_DIR):
|
||||||
|
@ -170,51 +351,40 @@ def init_global_dict_from_file(dict:dict, dict_filepath:str, dict_description:st
|
||||||
print(f"\nUsing {'external' if dict_filepath_found else 'default'} '{dict_description}':\n{pretty_dict(dict)}")
|
print(f"\nUsing {'external' if dict_filepath_found else 'default'} '{dict_description}':\n{pretty_dict(dict)}")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
error_msg = f"ERROR in shared_models:init_global_dict_from_file() - initializing dict Map File '{dict_filepath}'\nException: {str(e)}"
|
print(f"ERROR in shared_models:load_global_dict_from_file() - initializing dict Map File '{dict_filepath}'\nException: {str(e)}")
|
||||||
print(error_msg)
|
|
||||||
|
|
||||||
return False, error_msg
|
return False, {}
|
||||||
|
|
||||||
return True, "" # success
|
return True, dict # success
|
||||||
|
|
||||||
DEBUG_SETTINGS_FILE = "/workspace/_debug_settings.json"
|
DEBUG_SETTINGS_FILE = "/workspace/_debug_settings.json"
|
||||||
DEBUG_SETTINGS = {
|
DEBUG_SETTINGS = {
|
||||||
# these setting will be READ:
|
# these setting will be READ:
|
||||||
"manifests": { # uncompressed sizes of the tar-files
|
"APP_CONFIGS_MANIFEST_URL": "", # this setting, when not blank, overwrites the global APP_CONFIGS_MANIFEST_URL
|
||||||
"bcomfy": {
|
"installer_codeversion": "v2", # can be "v1" (original) or "v2" (fast)
|
||||||
"venv_uncompressed_size": 6155283197,
|
"delete_tar_file_after_download": True, # can be set to True to test only local unpack time and github setup
|
||||||
"sha256_hash": ""
|
"create_bkohya_to_local_venv_symlink": True, # when True, creates a folder symlink "venv" in "/workspace/kohya_ss" -> "/workspace/bkohya" VENV
|
||||||
},
|
"skip_to_github_stage": False, # when True, skip download and decompression stage and go directly to GH repo installation
|
||||||
"ba1111": {
|
|
||||||
"venv_uncompressed_size": 6794355530,
|
|
||||||
"sha256_hash": ""
|
|
||||||
},
|
|
||||||
"bforge": {
|
|
||||||
"venv_uncompressed_size": 7689838771,
|
|
||||||
"sha256_hash": ""
|
|
||||||
},
|
|
||||||
"bkohya": {
|
|
||||||
"venv_uncompressed_size": 12192767148,
|
|
||||||
"sha256_hash": ""
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"installer_codeversion": "2", # can be "1" (original) or "2" (fast)
|
|
||||||
"delete_tarfile_after_download": "1", # can be set to "0" to test only local unpack time and github setup
|
|
||||||
"use_bkohya_tar_folder_fix": "1", # the fix unpacks to "/workspace" and not to "/workspace/bkohya"
|
|
||||||
"use_bkohya_local_venv_symlink": "1", # when active, creates a folder symlink "venv" in "/workspace/kohya_ss" -> "/workspace/bkohya" VENV
|
|
||||||
# these settings will be WRITTEN:
|
# these settings will be WRITTEN:
|
||||||
"used_local_tar": "0", # works together with the above TAR local caching
|
"app_name": "", # last app_name the code run on
|
||||||
"app_name": "",
|
"used_local_tarfile": True, # works together with the above TAR local caching
|
||||||
"tar_filename": "",
|
"tar_filename": "", # last local tar_filename used
|
||||||
"download_url": "",
|
"download_url": "", # last used tar download_url
|
||||||
"total_duration_download": "0",
|
"total_duration_download": "00:00:00", # timespan-str "hh:mm:ss"
|
||||||
"total_duration_unpack": "0",
|
"total_duration_unpack": "00:00:00", # timespan-str "hh:mm:ss"
|
||||||
"total_duration": "0"
|
"total_duration": "00:00:00" # timespan-str "hh:mm:ss"
|
||||||
}
|
}
|
||||||
|
|
||||||
def init_debug_settings():
|
def init_debug_settings():
|
||||||
global DEBUG_SETTINGS
|
global DEBUG_SETTINGS
|
||||||
init_global_dict_from_file(DEBUG_SETTINGS, DEBUG_SETTINGS_FILE, "DEBUG_SETTINGS")
|
|
||||||
|
local_debug = os.environ.get('LOCAL_DEBUG', 'False') # support local browsing for development/debugging
|
||||||
|
generate_debug_settings_file = os.environ.get('DEBUG_SETTINGS_FILE', 'False') # generate the DEBUG_SETTINGS_FILE, if not exist already
|
||||||
|
write_file_if_not_exist = local_debug == 'True' or generate_debug_settings_file == 'True'
|
||||||
|
|
||||||
|
success, dict = load_global_dict_from_file(DEBUG_SETTINGS, DEBUG_SETTINGS_FILE, "DEBUG_SETTINGS", write_file=write_file_if_not_exist)
|
||||||
|
if success:
|
||||||
|
DEBUG_SETTINGS = dict
|
||||||
|
|
||||||
# read from DEBUG_SETTINGS
|
# read from DEBUG_SETTINGS
|
||||||
# installer_codeversion = DEBUG_SETTINGS['installer_codeversion'] # read from DEBUG_SETTINGS
|
# installer_codeversion = DEBUG_SETTINGS['installer_codeversion'] # read from DEBUG_SETTINGS
|
||||||
|
@ -233,14 +403,60 @@ def write_debug_setting(setting_name:str, setting_value:str):
|
||||||
# lutzapps - init some settings from DEBUG_SETTINGS_FILE
|
# lutzapps - init some settings from DEBUG_SETTINGS_FILE
|
||||||
init_debug_settings()
|
init_debug_settings()
|
||||||
|
|
||||||
# lutzapps - add kohya_ss support and required local VENV
|
APP_CONFIGS_FILE = APP_CONFIGS_MANIFEST_URL # default is the online manifest url defined as "master"
|
||||||
|
# can be overwritten with DEBUG_SETTINGS['APP_CONFIGS_MANIFEST_URL'], e.g. point to "/workspace/_app_configs.json"
|
||||||
|
# # which is the file, that is generated when the ENV var LOCAL_DEBUG='True' or the ENV var APP_CONFIGS_FILE='True'
|
||||||
|
# NOTE: an existing serialized dict in the "/workspace" folder will never be overwritten agin from the code defaults,
|
||||||
|
# and "wins" against the code-defaults. So even changes in the source-code for this dicts will NOT be used,
|
||||||
|
# when a local file exists. The idea here is that it is possible to overwrite code-defaults.
|
||||||
|
# BUT as long as the APP_CONFIGS_MANIFEST_URL not gets overwritten, the global "app_configs" dict will be always loaded
|
||||||
|
# from the central S3 server, or whatever is defined.
|
||||||
|
# the only way to overwrite this url, is via the DEBUG_SETTINGS_FILE "/workspace/_debug_settings.json"
|
||||||
|
# the default source-code setting for DEBUG_SETTINGS['APP_CONFIGS_MANIFEST_URL']: "" (is an empty string),
|
||||||
|
# which still makes the default APP_CONFIGS_MANIFEST_URL the central master.
|
||||||
|
# only when this setting is not empty, it can win against the central url, but also only when the Url is valid (locally or remote)
|
||||||
|
# should there be an invalid Url (central or local), or any other problem, then the code-defaults will be used.
|
||||||
|
#
|
||||||
|
# The DEBUG_SETTINGS_FILE is a dict which helps during debugging, testing of APP Installations,
|
||||||
|
# and generating ENV TAR files.
|
||||||
|
# Is will also NOT be generated as external FILE, as long the same 2 ENV vars, which control the APP_CONFIGS_FILE generation are set:
|
||||||
|
# LOCAL_DEBUG='True' or APP_CONFIGS_FILE='True'
|
||||||
|
#
|
||||||
|
# SUMMARY: The DEBUG_SETTINGS and APP_CONFIGS (aka app_configs in code) will never be written to the /workspace,
|
||||||
|
# when the IMAGE is used normally.
|
||||||
|
|
||||||
|
def init_app_configs():
|
||||||
|
global APP_CONFIGS_FILE
|
||||||
|
global app_configs
|
||||||
|
|
||||||
|
# check for overwrite of APP_CONFIGS_MANIFEST_URL
|
||||||
|
if not DEBUG_SETTINGS['APP_CONFIGS_MANIFEST_URL'] == "":
|
||||||
|
APP_CONFIGS_FILE = DEBUG_SETTINGS['APP_CONFIGS_MANIFEST_URL']
|
||||||
|
|
||||||
|
local_debug = os.environ.get('LOCAL_DEBUG', 'False') # support local browsing for development/debugging
|
||||||
|
generate_app_configs_file = os.environ.get('APP_CONFIGS_FILE', 'False') # generate the APP_CONFIGS_FILE, if not exist already
|
||||||
|
write_file_if_not_exists = local_debug == 'True' or generate_app_configs_file == 'True'
|
||||||
|
|
||||||
|
success, dict = load_global_dict_from_file(app_configs, APP_CONFIGS_FILE, "APP_CONFIGS", write_file=write_file_if_not_exists)
|
||||||
|
|
||||||
|
if success:
|
||||||
|
app_configs = dict # overwrite code-defaults (from local or external settings)
|
||||||
|
#else app_configs = <code defaults already initialized>
|
||||||
|
|
||||||
|
return
|
||||||
|
|
||||||
|
init_app_configs() # load from JSON file (local or remote) with code-defaults otherwise
|
||||||
|
|
||||||
|
# lutzapps - add kohya_ss support and handle the required local "venv" within the "kohya_ss" app folder
|
||||||
def ensure_kohya_local_venv_is_symlinked() -> tuple[bool, str]:
|
def ensure_kohya_local_venv_is_symlinked() -> tuple[bool, str]:
|
||||||
|
### create a folder symlink for kohya's "local" 'venv' dir
|
||||||
# as kohya_ss' "setup.sh" assumes a "local" VENV under "/workspace/kohya_ss/venv",
|
# as kohya_ss' "setup.sh" assumes a "local" VENV under "/workspace/kohya_ss/venv",
|
||||||
# we will create a folder symlink "/workspace/kohya_ss/venv" -> "/workspace/bkohya"
|
# we will create a folder symlink "/workspace/kohya_ss/venv" -> "/workspace/bkohya"
|
||||||
# to our global VENV and rename the original "venv" folder to "venv(BAK)"
|
# to our global VENV and rename the original "venv" folder to "venv(BAK)", if any exists,
|
||||||
|
# will we not the case normally.
|
||||||
|
|
||||||
if not DEBUG_SETTINGS['use_bkohya_local_venv_symlink'] == "1":
|
if not DEBUG_SETTINGS['create_bkohya_to_local_venv_symlink']:
|
||||||
return True, "" # not fix the local KOHYA_SS VENV
|
return True, "" # not fix the local KOHYA_SS VENV requirement
|
||||||
|
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
|
@ -251,13 +467,19 @@ def ensure_kohya_local_venv_is_symlinked() -> tuple[bool, str]:
|
||||||
bapp_app_path = app_configs[bapp_name]["app_path"] # '/workspace/kohya_ss'
|
bapp_app_path = app_configs[bapp_name]["app_path"] # '/workspace/kohya_ss'
|
||||||
bapp_app_path_venv = f"{bapp_app_path}/venv" # '/workspace/kohya_ss/venv'
|
bapp_app_path_venv = f"{bapp_app_path}/venv" # '/workspace/kohya_ss/venv'
|
||||||
|
|
||||||
|
name = app_configs[bapp_name]["name"]
|
||||||
|
|
||||||
if not os.path.exists(bapp_app_path): # kohya is not installed
|
if not os.path.exists(bapp_app_path): # kohya is not installed
|
||||||
return True, "" # no need to fix the local KOHYA VENV
|
return True, f"{name} is not installed." # no need to fix the local KOHYA VENV
|
||||||
|
|
||||||
# kohya installed and has a local "venv" folder
|
# check the src-folder of 'bkohya' downloaded VENV exists
|
||||||
if os.path.exists(bapp_app_path_venv) and os.path.isdir(bapp_app_path_venv):
|
if not os.path.exists(bapp_venv_path): # src_path to bkohya downloaded venv does NOT exists
|
||||||
|
return True, f"{name} VENV is not installed." # no need to fix the local KOHYA VENV, as the global KOHYA VENV does not exist
|
||||||
|
|
||||||
# check if this local VENV is a folderlink to target our bkohya global VENV to venv_path
|
# kohya_ss is installed
|
||||||
|
if os.path.isdir(bapp_app_path_venv): # and has a local "venv" folder
|
||||||
|
|
||||||
|
# check if this local VENV is a folderlink to target the bkohya global VENV to venv_path
|
||||||
if os.path.islink(bapp_app_path_venv):
|
if os.path.islink(bapp_app_path_venv):
|
||||||
success_message = f"kohya_ss local venv folder '{bapp_app_path_venv}' is already symlinked"
|
success_message = f"kohya_ss local venv folder '{bapp_app_path_venv}' is already symlinked"
|
||||||
|
|
||||||
|
@ -273,18 +495,17 @@ def ensure_kohya_local_venv_is_symlinked() -> tuple[bool, str]:
|
||||||
i += 1
|
i += 1
|
||||||
suffix = str(i)
|
suffix = str(i)
|
||||||
|
|
||||||
bak_venv_path += suffix # free target bame for "rename"
|
bak_venv_path += suffix # free target name for "rename"(move) operation of the folder
|
||||||
shutil.move(bapp_app_path_venv, bak_venv_path) # move=rename
|
shutil.move(bapp_app_path_venv, bak_venv_path) # move=rename
|
||||||
|
|
||||||
print(f"local venv folder '{bapp_app_path_venv}' detected and renamed to '{bak_venv_path}'")
|
print(f"local venv folder '{bapp_app_path_venv}' detected and renamed to '{bak_venv_path}'")
|
||||||
|
|
||||||
|
# now the path to the local "venv" is free, if it was already created it is now renamed
|
||||||
### create a folder symlink for kohya's "local" venv dir
|
### create a folder symlink for kohya's "local" venv dir
|
||||||
# check the src-folder to kohya downloaded venv exists
|
# create a folder symlink for kohya local venv dir
|
||||||
if os.path.exists(bapp_venv_path): # src_path to bkohya downloaded venv exists
|
os.symlink(bapp_venv_path, bapp_app_path_venv, target_is_directory=True)
|
||||||
# create a folder symlink for kohya local venv dir
|
success_message = f"created a symlink for kohya_ss local venv folder: '{bapp_app_path_venv}' -> '{bapp_venv_path}'"
|
||||||
os.symlink(bapp_venv_path, bapp_app_path_venv, target_is_directory=True)
|
print(success_message)
|
||||||
success_message = f"created a symlink for kohya_ss local venv folder: '{bapp_venv_path}' -> '{bapp_app_path_venv}'"
|
|
||||||
print(success_message)
|
|
||||||
|
|
||||||
return True, success_message
|
return True, success_message
|
||||||
|
|
||||||
|
@ -296,3 +517,41 @@ def ensure_kohya_local_venv_is_symlinked() -> tuple[bool, str]:
|
||||||
|
|
||||||
# lutzapps - add kohya_ss venv support
|
# lutzapps - add kohya_ss venv support
|
||||||
ensure_kohya_local_venv_is_symlinked()
|
ensure_kohya_local_venv_is_symlinked()
|
||||||
|
|
||||||
|
# some verification steps of the VENV setup of the "kohya_ss" app:
|
||||||
|
# even if it "looks" like the "venv" is in a local sub-folder of the "kohya_ss" dir,
|
||||||
|
# this location is only "aliased/symlinked" there from the globally downloaded
|
||||||
|
# tarfile "bkohya.tar.gz" which was expanded spearately into the folder "/workspace/bkohya".
|
||||||
|
# So the VENV can be redownloaded separately from the github app at "/workspace/kohya_ss"
|
||||||
|
# root@9452ad7f4cd6:/workspace/kohya_ss# python --version
|
||||||
|
# Python 3.11.10
|
||||||
|
# root@fe889cc68f5a:/workspace/kohya_ss# pip --version
|
||||||
|
# pip 24.3.1 from /usr/local/lib/python3.11/dist-packages/pip (python 3.11)
|
||||||
|
#
|
||||||
|
# root@9452ad7f4cd6:/workspace/kohya_ss# python3 --version
|
||||||
|
# Python 3.11.10
|
||||||
|
# root@fe889cc68f5a:/workspace/kohya_ss# pip3 --version
|
||||||
|
# pip 24.3.1 from /usr/local/lib/python3.11/dist-packages/pip (python 3.11)
|
||||||
|
#
|
||||||
|
# root@9452ad7f4cd6:/workspace/kohya_ss# ls venv -la
|
||||||
|
# lrwxr-xr-x 1 root root 17 Nov 8 00:06 venv -> /workspace/bkohya
|
||||||
|
#
|
||||||
|
# root@9452ad7f4cd6:/workspace/kohya_ss# source venv/bin/activate
|
||||||
|
#
|
||||||
|
# (bkohya) root@9452ad7f4cd6:/workspace/kohya_ss# ls venv/bin/python* -la
|
||||||
|
# lrwxr-xr-x 1 root root 10 Nov 8 00:48 venv/bin/python -> python3.10
|
||||||
|
# lrwxr-xr-x 1 root root 10 Nov 8 00:48 venv/bin/python3 -> python3.10
|
||||||
|
# lrwxr-xr-x 1 root root 19 Nov 8 00:48 venv/bin/python3.10 -> /usr/bin/python3.10
|
||||||
|
#
|
||||||
|
# (bkohya) root@9452ad7f4cd6:/workspace/kohya_ss# python --version
|
||||||
|
# Python 3.10.12
|
||||||
|
# (bkohya) root@fe889cc68f5a:/workspace/kohya_ss# pip --version
|
||||||
|
# pip 22.0.2 from /workspace/venv/lib/python3.10/site-packages/pip (python 3.10)
|
||||||
|
#
|
||||||
|
# (bkohya) root@9452ad7f4cd6:/workspace/kohya_ss# python3 --version
|
||||||
|
# Python 3.10.12
|
||||||
|
# (bkohya) root@fe889cc68f5a:/workspace/kohya_ss# pip3 --version
|
||||||
|
# pip 22.0.2 from /workspace/venv/lib/python3.10/site-packages/pip (python 3.10)
|
||||||
|
#
|
||||||
|
# (bkohya) root@9452ad7f4cd6:/workspace/kohya_ss# deactivate
|
||||||
|
# root@9452ad7f4cd6:/workspace/kohya_ss#
|
||||||
|
|
|
@ -13,6 +13,8 @@ import xml.etree.ElementTree as ET
|
||||||
import time
|
import time
|
||||||
import datetime
|
import datetime
|
||||||
import shutil
|
import shutil
|
||||||
|
from utils.app_configs import (DEBUG_SETTINGS, pretty_dict, init_app_configs, init_debug_settings, write_debug_setting, ensure_kohya_local_venv_is_symlinked)
|
||||||
|
from utils.model_utils import (get_sha256_hash_from_file)
|
||||||
|
|
||||||
INSTALL_STATUS_FILE = '/tmp/install_status.json'
|
INSTALL_STATUS_FILE = '/tmp/install_status.json'
|
||||||
|
|
||||||
|
@ -180,41 +182,45 @@ import time
|
||||||
|
|
||||||
# yield (out_line.rstrip(), err_line.rstrip())
|
# yield (out_line.rstrip(), err_line.rstrip())
|
||||||
|
|
||||||
# this ist the v2 ("fast") version for "download_and_unpack_venv()" - can be (de-)/activated in DEBUG_SETTINGS dict
|
# this is the v2 ("fast") version for "download_and_unpack_venv()" - can be (de-)/activated in DEBUG_SETTINGS dict
|
||||||
def download_and_unpack_venv_fastversion(app_name, app_configs, send_websocket_message) -> tuple[bool, str]:
|
def download_and_unpack_venv_v2(app_name:str, app_configs:dict, send_websocket_message) -> tuple[bool, str]:
|
||||||
|
# load the latest configured DEBUG_SETTINGS from the stored setting of the DEBUG_SETTINGS_FILE
|
||||||
|
init_debug_settings() # reload latest DEBUG_SETTINGS
|
||||||
|
# as this could overwrite the APP_CONFIGS_MANIFEST_URL, we reload the app_configs global dict
|
||||||
|
# from whatever Url is now defined
|
||||||
|
init_app_configs() # reload lastest app_configs dict
|
||||||
|
|
||||||
app_config = app_configs.get(app_name)
|
app_config = app_configs.get(app_name)
|
||||||
if not app_config:
|
if not app_config:
|
||||||
return False, f"App '{app_name}' not found in configurations."
|
return False, f"App '{app_name}' not found in configurations."
|
||||||
|
|
||||||
venv_path = app_config['venv_path']
|
venv_path = app_config['venv_path']
|
||||||
app_path = app_config['app_path']
|
|
||||||
download_url = app_config['download_url']
|
download_url = app_config['download_url']
|
||||||
total_size = app_config['size']
|
archive_size = app_config['archive_size']
|
||||||
|
|
||||||
tar_filename = os.path.basename(download_url)
|
tar_filename = os.path.basename(download_url)
|
||||||
workspace_dir = '/workspace'
|
workspace_dir = '/workspace'
|
||||||
downloaded_file = os.path.join(workspace_dir, tar_filename)
|
downloaded_file = os.path.join(workspace_dir, tar_filename)
|
||||||
|
|
||||||
from utils.app_configs import (DEBUG_SETTINGS, pretty_dict, init_debug_settings, write_debug_setting, ensure_kohya_local_venv_is_symlinked)
|
|
||||||
# load the latest configured DEBUG_SETTINGS from the stored setting of the DEBUG_SETTINGS_FILE
|
|
||||||
init_debug_settings()
|
|
||||||
# show currently using DEBUG_SETTINGS
|
|
||||||
print(f"\nCurrently using 'DEBUG_SETTINGS':\n{pretty_dict(DEBUG_SETTINGS)}")
|
|
||||||
|
|
||||||
write_debug_setting('tar_filename', tar_filename)
|
write_debug_setting('tar_filename', tar_filename)
|
||||||
write_debug_setting('download_url', download_url)
|
write_debug_setting('download_url', download_url)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
if DEBUG_SETTINGS['skip_to_github_stage']:
|
||||||
|
success, message = clone_application(app_config,send_websocket_message)
|
||||||
|
return success, message
|
||||||
|
|
||||||
save_install_status(app_name, 'in_progress', 0, 'Downloading')
|
save_install_status(app_name, 'in_progress', 0, 'Downloading')
|
||||||
send_websocket_message('install_log', {'app_name': app_name, 'log': f'Downloading {total_size / (1024 * 1024):.2f} MB ...'})
|
send_websocket_message('install_log', {'app_name': app_name, 'log': f'Downloading {archive_size / (1024 * 1024):.2f} MB ...'})
|
||||||
|
|
||||||
start_time_download = time.time()
|
start_time_download = time.time()
|
||||||
|
|
||||||
# debug with existing local cached TAR file
|
# debug with existing local cached TAR file
|
||||||
if os.path.exists(downloaded_file):
|
if os.path.exists(downloaded_file):
|
||||||
write_debug_setting('used_local_tar', "1") # indicate using cached TAR file
|
write_debug_setting('used_local_tarfile', True) # indicate using cached TAR file
|
||||||
send_websocket_message('install_log', {'app_name': app_name, 'log': f"Used cached local tarfile '{downloaded_file}'"})
|
send_websocket_message('used_local_tarfile', {'app_name': app_name, 'log': f"Used cached local tarfile '{downloaded_file}'"})
|
||||||
else:
|
else:
|
||||||
write_debug_setting('used_local_tar', "0") # indicate no cached TAR file found
|
write_debug_setting('used_local_tarfile', False) # indicate no cached TAR file found
|
||||||
|
|
||||||
try: ### download with ARIA2C
|
try: ### download with ARIA2C
|
||||||
|
|
||||||
|
@ -275,8 +281,8 @@ def download_and_unpack_venv_fastversion(app_name, app_configs, send_websocket_m
|
||||||
gid = match.group(1) # e.g., "cd57da"
|
gid = match.group(1) # e.g., "cd57da"
|
||||||
downloaded_size_value = match.group(2) # e.g., "2.1"
|
downloaded_size_value = match.group(2) # e.g., "2.1"
|
||||||
downloaded_size_unit = match.group(3) # e.g., "GiB"
|
downloaded_size_unit = match.group(3) # e.g., "GiB"
|
||||||
total_size_value = match.group(4) # e.g., "4.0"
|
total_size_value = match.group(4) # e.g., "4.0" (this could replace the 'archive_size' from the manifest)
|
||||||
total_size_unit = match.group(5) # e.g., "GiB"
|
total_size_unit = match.group(5) # e.g., "GiB" (with calculation to bytes, but not sure if its rounded)
|
||||||
percentage = int(match.group(6)) # e.g., "53"
|
percentage = int(match.group(6)) # e.g., "53"
|
||||||
connection_count = int(match.group(7)) # e.g., "16"
|
connection_count = int(match.group(7)) # e.g., "16"
|
||||||
download_rate_value = match.group(8) # e.g., "1.9"
|
download_rate_value = match.group(8) # e.g., "1.9"
|
||||||
|
@ -296,8 +302,8 @@ def download_and_unpack_venv_fastversion(app_name, app_configs, send_websocket_m
|
||||||
|
|
||||||
### original code
|
### original code
|
||||||
#speed = downloaded_size / elapsed_time # bytes/sec
|
#speed = downloaded_size / elapsed_time # bytes/sec
|
||||||
#percentage = (downloaded_size / total_size) * 100
|
#percentage = (downloaded_size / archive_size) * 100
|
||||||
#eta = (total_size - downloaded_size) / speed if speed > 0 else 0 # sec
|
#eta = (archive_size - downloaded_size) / speed if speed > 0 else 0 # sec
|
||||||
|
|
||||||
send_websocket_message('install_progress', {
|
send_websocket_message('install_progress', {
|
||||||
'app_name': app_name,
|
'app_name': app_name,
|
||||||
|
@ -329,7 +335,7 @@ def download_and_unpack_venv_fastversion(app_name, app_configs, send_websocket_m
|
||||||
os.remove(f"{tar_filename}.aria2")
|
os.remove(f"{tar_filename}.aria2")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
error_msg = f"ERROR in download_and_unpack_venv_fastversion():download with ARIA2C\ncmdline: '{cmd_line}'\nException: {str(e)}"
|
error_msg = f"ERROR in download_and_unpack_venv_v2():download with ARIA2C\ncmdline: '{cmd_line}'\nException: {str(e)}"
|
||||||
print(error_msg)
|
print(error_msg)
|
||||||
|
|
||||||
error_message = f"Downloading VENV failed: {download_process.stderr.read() if download_process.stderr else 'Unknown error'}"
|
error_message = f"Downloading VENV failed: {download_process.stderr.read() if download_process.stderr else 'Unknown error'}"
|
||||||
|
@ -356,8 +362,8 @@ def download_and_unpack_venv_fastversion(app_name, app_configs, send_websocket_m
|
||||||
|
|
||||||
# if elapsed_time > 0:
|
# if elapsed_time > 0:
|
||||||
# speed = downloaded_size / elapsed_time
|
# speed = downloaded_size / elapsed_time
|
||||||
# percentage = (downloaded_size / total_size) * 100
|
# percentage = (downloaded_size / archive_size) * 100
|
||||||
# eta = (total_size - downloaded_size) / speed if speed > 0 else 0
|
# eta = (archive_size - downloaded_size) / speed if speed > 0 else 0
|
||||||
|
|
||||||
# send_websocket_message('install_progress', {
|
# send_websocket_message('install_progress', {
|
||||||
# 'app_name': app_name,
|
# 'app_name': app_name,
|
||||||
|
@ -375,29 +381,72 @@ def download_and_unpack_venv_fastversion(app_name, app_configs, send_websocket_m
|
||||||
|
|
||||||
return False, error_message
|
return False, error_message
|
||||||
|
|
||||||
send_websocket_message('install_log', {'app_name': app_name, 'log': 'Download completed. Starting unpacking...'})
|
send_websocket_message('install_log', {'app_name': app_name, 'log': 'Download completed. Starting Verification ...'})
|
||||||
send_websocket_message('install_progress', {'app_name': app_name, 'percentage': 100, 'stage': 'Download Complete'})
|
# we use a 99% progress and indicate 1% for Verification against the files SHA256 hash
|
||||||
|
send_websocket_message('install_progress', {'app_name': app_name, 'percentage': 99, 'stage': 'Downloading'})
|
||||||
|
|
||||||
total_duration_download = f"{datetime.timedelta(seconds=int(time.time() - start_time_download))}"
|
total_duration_download = f"{datetime.timedelta(seconds=int(time.time() - start_time_download))}"
|
||||||
write_debug_setting('total_duration_download', total_duration_download)
|
write_debug_setting('total_duration_download', total_duration_download)
|
||||||
print(f"download did run {total_duration_download} for app '{app_name}'")
|
print(f"download did run {total_duration_download} for app '{app_name}'")
|
||||||
|
|
||||||
|
|
||||||
|
### VERIFY stage
|
||||||
|
#
|
||||||
|
# Create TAR from the VENV current directory:
|
||||||
|
# IMPORTANT: cd INTO the folder you want to compress, as we use "." for source folder,
|
||||||
|
# to avoid having the foldername in the TAR file !!!
|
||||||
|
# PV piping is "nice-to-have" and is only used for showing "Progress Values" during compressing
|
||||||
|
#
|
||||||
|
# cd /workspace/bkohya
|
||||||
|
# #tar -czf | pv > /workspace/bkohya.tar.gz . (not the smallest TAR)#
|
||||||
|
# tar -cvf - . | gzip -9 - | pv > /workspace/bkohya.tar.gz
|
||||||
|
#
|
||||||
|
# afterwards create the SHA256 hash from this TAR with
|
||||||
|
# shasum -a 256 bkohya.tar.gz
|
||||||
|
#
|
||||||
|
# also report the uncompressed size from the current VENV directory,
|
||||||
|
# we need that as the 100% base for the progress indicators when uncompressing the TAR
|
||||||
|
|
||||||
|
|
||||||
|
# verify the downloaded TAR file against its SHA256 hash value from the manifest
|
||||||
|
|
||||||
|
download_sha256_hash = app_config["sha256_hash"].lower() # get the sha256_hash from the app manifest
|
||||||
|
file_verified = False
|
||||||
|
|
||||||
|
print(f"getting SHA256 Hash for '{downloaded_file}'")
|
||||||
|
successfull_HashGeneration, file_sha256_hash = get_sha256_hash_from_file(downloaded_file)
|
||||||
|
|
||||||
|
if successfull_HashGeneration and file_sha256_hash.lower() == download_sha256_hash.lower():
|
||||||
|
file_verified = True
|
||||||
|
message = f"Downloaded file '{os.path.basename(downloaded_file)}' was successfully (SHA256) verified."
|
||||||
|
print(message)
|
||||||
|
|
||||||
|
else:
|
||||||
|
if successfull_HashGeneration: # the generated SHA256 file hash did not match against the metadata hash
|
||||||
|
error_message = f"The downloaded file '{os.path.basename(downloaded_file)}' has DIFFERENT \nSHA256: {file_sha256_hash} as in the manifest\nFile is possibly corrupted and was DELETED!"
|
||||||
|
print(error_message)
|
||||||
|
|
||||||
|
os.remove(downloaded_file) # delete corrupted, downloaded file
|
||||||
|
|
||||||
|
|
||||||
|
else: # NOT successful, the hash contains the Exception
|
||||||
|
error_msg = file_sha256_hash
|
||||||
|
error_message = f"Exception occured while generating the SHA256 hash for '{downloaded_file}':\n{error_msg}"
|
||||||
|
print(error_message)
|
||||||
|
|
||||||
|
if not file_verified:
|
||||||
|
send_websocket_message('install_complete', {'app_name': app_name, 'status': 'error', 'message': error_message})
|
||||||
|
save_install_status(app_name, 'failed', 0, 'Failed')
|
||||||
|
|
||||||
|
return False, error_message
|
||||||
|
|
||||||
|
send_websocket_message('install_log', {'app_name': app_name, 'log': 'Verification completed. Starting unpacking ...'})
|
||||||
|
send_websocket_message('install_progress', {'app_name': app_name, 'percentage': 100, 'stage': 'Download Complete'})
|
||||||
|
|
||||||
|
|
||||||
### Decompression Stage (Unpacking the downloaded VENV)
|
### Decompression Stage (Unpacking the downloaded VENV)
|
||||||
start_time_unpack = time.time()
|
start_time_unpack = time.time()
|
||||||
|
|
||||||
# lutzapps - fix TAR packaging bug (compressed from the workspace root instead of bkohya VENV folder)
|
|
||||||
# e.g. "bkohya/bin/activate", together with venv_path ("/workspace/bkohya") ends up as "/workspace/bkohya/bkohya/bin/activate"
|
|
||||||
# TODO: need to repackage Kohya VENV correctly and then remove this fix!!!
|
|
||||||
|
|
||||||
if app_name == "bkohya" and DEBUG_SETTINGS['use_bkohya_tar_folder_fix'] == "1":
|
|
||||||
venv_path = "/workspace" # extracts then correctly to '/workspace/bkohya', instead of '/workspace/bkohya/bkohya'
|
|
||||||
|
|
||||||
# Create TAR from the VENV current directory:
|
|
||||||
# cd ~/Projects/Docker/madiator/workspace/bkohya
|
|
||||||
# [tar -czf | pv > ~/Projects/Docker/madiator/workspace/bkohya.tar.gz . (not the smallest TAR)]
|
|
||||||
# tar -cvf - . | gzip -9 - | pv > ~/Projects/Docker/madiator/workspace/bkohya.tar.gz
|
|
||||||
|
|
||||||
# Ensure the venv directory exists
|
# Ensure the venv directory exists
|
||||||
os.makedirs(f"{venv_path}/", exist_ok=True) # append trailing "/" to make sure the last sub-folder is created
|
os.makedirs(f"{venv_path}/", exist_ok=True) # append trailing "/" to make sure the last sub-folder is created
|
||||||
|
|
||||||
|
@ -419,10 +468,7 @@ def download_and_unpack_venv_fastversion(app_name, app_configs, send_websocket_m
|
||||||
# 'bforge': 7689838771
|
# 'bforge': 7689838771
|
||||||
# 'bkohya': 12192767148
|
# 'bkohya': 12192767148
|
||||||
|
|
||||||
uncompressed_size_bytes = DEBUG_SETTINGS["manifests"][app_name]["venv_uncompressed_size"]
|
uncompressed_size_bytes = app_config["venv_uncompressed_size"]
|
||||||
|
|
||||||
#sha256_hash = DEBUG_SETTINGS["manifests"][app_name]["sha256_hash"]
|
|
||||||
# TODO: create with 'shasum -a 256 xxx.tar.gz'
|
|
||||||
|
|
||||||
### NOTE: as it turns out GZIP has problems with files bigger than 2 or 4 GB due to internal field bit restrictions
|
### NOTE: as it turns out GZIP has problems with files bigger than 2 or 4 GB due to internal field bit restrictions
|
||||||
|
|
||||||
|
@ -601,7 +647,7 @@ def download_and_unpack_venv_fastversion(app_name, app_configs, send_websocket_m
|
||||||
# else any other line in stdout (which we not process)
|
# else any other line in stdout (which we not process)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
error_msg = f"ERROR in download_and_unpack_venv_fastversion():\ncmdline: '{cmd_line}'\nException: {str(e)}"
|
error_msg = f"ERROR in download_and_unpack_venv_v2():\ncmdline: '{cmd_line}'\nException: {str(e)}"
|
||||||
print(error_msg)
|
print(error_msg)
|
||||||
|
|
||||||
decompression_process.wait() # let the process finish
|
decompression_process.wait() # let the process finish
|
||||||
|
@ -621,8 +667,11 @@ def download_and_unpack_venv_fastversion(app_name, app_configs, send_websocket_m
|
||||||
|
|
||||||
send_websocket_message('install_progress', {'app_name': app_name, 'percentage': 100, 'stage': 'Unpacking Complete'})
|
send_websocket_message('install_progress', {'app_name': app_name, 'percentage': 100, 'stage': 'Unpacking Complete'})
|
||||||
|
|
||||||
print(f"'DEBUG_SETTINGS' after this run:\n{pretty_dict(DEBUG_SETTINGS)}")
|
### installing the App from GITHUB
|
||||||
|
# Clone the repository if it doesn't exist
|
||||||
|
success, message = clone_application(app_name)
|
||||||
|
|
||||||
|
print(f"'DEBUG_SETTINGS' after this run:\n{pretty_dict(DEBUG_SETTINGS)}")
|
||||||
|
|
||||||
### original "v1" code (very slow code because of STATISTICS glory
|
### original "v1" code (very slow code because of STATISTICS glory
|
||||||
|
|
||||||
|
@ -647,66 +696,29 @@ def download_and_unpack_venv_fastversion(app_name, app_configs, send_websocket_m
|
||||||
# process.wait()
|
# process.wait()
|
||||||
# rc = process.returncode
|
# rc = process.returncode
|
||||||
|
|
||||||
|
|
||||||
### installing the App from GITHUB
|
### installing the App from GITHUB
|
||||||
# Clone the repository if it doesn't exist
|
# Clone the repository if it doesn't exist
|
||||||
if not os.path.exists(app_path):
|
success, error_message = clone_application(app_name, send_websocket_message)
|
||||||
send_websocket_message('install_log', {'app_name': app_name, 'log': 'Cloning repository...'})
|
|
||||||
|
|
||||||
repo_url = ''
|
|
||||||
if app_name == 'bcomfy':
|
|
||||||
repo_url = 'https://github.com/comfyanonymous/ComfyUI.git'
|
|
||||||
elif app_name == 'bforge':
|
|
||||||
repo_url = 'https://github.com/lllyasviel/stable-diffusion-webui-forge.git'
|
|
||||||
elif app_name == 'ba1111':
|
|
||||||
repo_url = 'https://github.com/AUTOMATIC1111/stable-diffusion-webui.git'
|
|
||||||
elif app_name == 'bkohya': # lutzapps - added new Kohya app
|
|
||||||
repo_url = 'https://github.com/bmaltais/kohya_ss.git'
|
|
||||||
|
|
||||||
try: # add a repo assignment for Kohya
|
|
||||||
repo = git.Repo.clone_from(repo_url, app_path, progress=lambda op_code, cur_count, max_count, message: send_websocket_message('install_log', {
|
|
||||||
'app_name': app_name,
|
|
||||||
'log': f"Cloning: {cur_count}/{max_count} {message}"
|
|
||||||
}))
|
|
||||||
send_websocket_message('install_log', {'app_name': app_name, 'log': 'Repository cloned successfully.'})
|
|
||||||
|
|
||||||
# lutzapps - make sure we use Kohya with FLUX support
|
|
||||||
if app_name == 'bkohya':
|
|
||||||
branch_name = "sd3-flux.1" # this branch also uses a "sd-scripts" branch "SD3" automatically
|
|
||||||
repo.git.checkout(branch_name)
|
|
||||||
|
|
||||||
# Clone ComfyUI-Manager for Better ComfyUI
|
|
||||||
if app_name == 'bcomfy':
|
|
||||||
custom_nodes_path = os.path.join(app_path, 'custom_nodes')
|
|
||||||
os.makedirs(custom_nodes_path, exist_ok=True)
|
|
||||||
comfyui_manager_path = os.path.join(custom_nodes_path, 'ComfyUI-Manager')
|
|
||||||
if not os.path.exists(comfyui_manager_path):
|
|
||||||
send_websocket_message('install_log', {'app_name': app_name, 'log': 'Cloning ComfyUI-Manager...'})
|
|
||||||
git.Repo.clone_from('https://github.com/ltdrdata/ComfyUI-Manager.git', comfyui_manager_path)
|
|
||||||
send_websocket_message('install_log', {'app_name': app_name, 'log': 'ComfyUI-Manager cloned successfully.'})
|
|
||||||
|
|
||||||
except git.exc.GitCommandError as e:
|
|
||||||
send_websocket_message('install_log', {'app_name': app_name, 'log': f'Error cloning repository: {str(e)}'})
|
|
||||||
return False, f"Error cloning repository: {str(e)}"
|
|
||||||
|
|
||||||
if app_name == 'bkohya': # create a folder link for kohya_ss local "venv"
|
|
||||||
ensure_kohya_local_venv_is_symlinked()
|
|
||||||
|
|
||||||
# Clean up the downloaded file
|
# Clean up the downloaded file
|
||||||
send_websocket_message('install_log', {'app_name': app_name, 'log': 'Cleaning up...'})
|
send_websocket_message('install_log', {'app_name': app_name, 'log': 'Cleaning up...'})
|
||||||
|
|
||||||
# lutzapps - debug with local TAR
|
# lutzapps - debug with local TAR
|
||||||
# do NOT delete the Kohya venv
|
# do NOT delete the Kohya venv
|
||||||
if DEBUG_SETTINGS["delete_tarfile_after_download"] == "1": # this is the default, but can be overwritten
|
if DEBUG_SETTINGS["delete_tar_file_after_download"]: # this is the default, but can be overwritten
|
||||||
os.remove(downloaded_file)
|
os.remove(downloaded_file)
|
||||||
|
|
||||||
send_websocket_message('install_log', {'app_name': app_name, 'log': 'Installation complete. Refresh page to start app'})
|
send_websocket_message('install_log', {'app_name': app_name, 'log': 'Installation complete. Refresh page to start app'})
|
||||||
|
|
||||||
save_install_status(app_name, 'completed', 100, 'Completed')
|
if success:
|
||||||
send_websocket_message('install_complete', {'app_name': app_name, 'status': 'success', 'message': "Virtual environment installed successfully."})
|
save_install_status(app_name, 'completed', 100, 'Completed')
|
||||||
return True, "Virtual environment installed successfully."
|
send_websocket_message('install_complete', {'app_name': app_name, 'status': 'success', 'message': "Virtual environment installed successfully."})
|
||||||
|
return True, "Virtual environment installed successfully."
|
||||||
|
else:
|
||||||
|
return False, error_message
|
||||||
|
|
||||||
except requests.RequestException as e:
|
except requests.RequestException as e:
|
||||||
error_message = f"Download failed: {str(e)}"
|
error_message = f"Download/Decompression failed: {str(e)}"
|
||||||
send_websocket_message('install_complete', {'app_name': app_name, 'status': 'error', 'message': error_message})
|
send_websocket_message('install_complete', {'app_name': app_name, 'status': 'error', 'message': error_message})
|
||||||
save_install_status(app_name, 'failed', 0, 'Failed')
|
save_install_status(app_name, 'failed', 0, 'Failed')
|
||||||
return False, error_message
|
return False, error_message
|
||||||
|
@ -716,8 +728,143 @@ def download_and_unpack_venv_fastversion(app_name, app_configs, send_websocket_m
|
||||||
send_websocket_message('install_complete', {'app_name': app_name, 'status': 'error', 'message': error_message})
|
send_websocket_message('install_complete', {'app_name': app_name, 'status': 'error', 'message': error_message})
|
||||||
return False, error_message
|
return False, error_message
|
||||||
|
|
||||||
|
### installing the App from GITHUB
|
||||||
|
# Clone the repository if it doesn't exist
|
||||||
|
def clone_application(app_config:dict, send_websocket_message) -> tuple[bool , str]:
|
||||||
|
try:
|
||||||
|
app_name = app_config['id']
|
||||||
|
app_path = app_config['app_path']
|
||||||
|
|
||||||
def download_and_unpack_venv(app_name, app_configs, send_websocket_message):
|
if not os.path.exists(app_path): # only install new apps
|
||||||
|
repo_url = app_config['repo_url']
|
||||||
|
branch_name = app_config['branch_name']
|
||||||
|
if branch_name == "": # use the default branch
|
||||||
|
branch_name = "master"
|
||||||
|
clone_recursive = app_config['clone_recursive']
|
||||||
|
|
||||||
|
send_websocket_message('install_log', {'app_name': app_name, 'log': f"Cloning repository '{repo_url}' branch '{branch_name}' recursive={clone_recursive} ..."})
|
||||||
|
|
||||||
|
repo = git.Repo.clone_from(repo_url, app_path, # first 2 params are fix, then use named params
|
||||||
|
#branch=branch_name, # if we provide a branch here, we ONLY get this branch downloaded
|
||||||
|
# we want ALL branches, so we can easy checkout different versions from kohya_ss late, without re-downloading
|
||||||
|
recursive=clone_recursive, # include cloning submodules recursively (if needed as with Kohya)
|
||||||
|
progress=lambda op_code, cur_count, max_count, message: send_websocket_message('install_log', {
|
||||||
|
'app_name': app_name,
|
||||||
|
'log': f"Cloning: {cur_count}/{max_count} {message}"
|
||||||
|
}))
|
||||||
|
|
||||||
|
|
||||||
|
send_websocket_message('install_log', {'app_name': app_name, 'log': 'Repository cloned successfully.'})
|
||||||
|
|
||||||
|
# lutzapps - make sure we use Kohya with FLUX support
|
||||||
|
if not branch_name == "master":
|
||||||
|
repo.git.checkout(branch_name) # checkout the "sd3-flux.1" branch, but could later switch back to "master" easy
|
||||||
|
# the setup can be easy verified with git, here e.g. for the "kohya_ss" app:
|
||||||
|
# root@fe889cc68f5a:~# cd /workspace/kohya_ss
|
||||||
|
# root@fe889cc68f5a:/workspace/kohya_ss# git branch
|
||||||
|
# master
|
||||||
|
# * sd3-flux.1
|
||||||
|
# root@fe889cc68f5a:/workspace/kohya_ss# cd sd-scripts
|
||||||
|
# root@fe889cc68f5a:/workspace/kohya_ss/sd-scripts# git branch
|
||||||
|
# * (HEAD detached at b8896aa)
|
||||||
|
# main
|
||||||
|
#
|
||||||
|
# in the case of kohya_ss we need to fix a bug in the 'setup.sh' file,
|
||||||
|
# where they forgot to adapt the branch name from "master" to "sd3-flux.1"
|
||||||
|
# in the "#variables" section for refreshing kohya via git with 'setup.sh'
|
||||||
|
if app_name == 'bkohya':
|
||||||
|
success, message = update_kohya_setup_sh(app_path) # patch the 'setup.sh' file
|
||||||
|
print(message) # shows, if the patch was needed, and apllied successfully
|
||||||
|
else: # refresh app
|
||||||
|
if app_path['refresh']: # app wants auto-refreshes
|
||||||
|
# TODO: implement app refreshes via git pull or, in the case of 'kohya_ss' via "setup.sh"
|
||||||
|
message = f"Refreshing of app '{app_name}' is NYI"
|
||||||
|
print(message)
|
||||||
|
|
||||||
|
# Clone ComfyUI-Manager and other defined custom_nodes for Better ComfyUI
|
||||||
|
if app_name == 'bcomfy':
|
||||||
|
# install all defined custom nodes
|
||||||
|
custom_nodes_path = os.path.join(app_path, 'custom_nodes')
|
||||||
|
os.makedirs(f"{custom_nodes_path}/", exist_ok=True) # append a trailing slash to be sure last dir is created
|
||||||
|
for custom_node in app_config['custom_nodes']:
|
||||||
|
name = custom_node['name']
|
||||||
|
path = custom_node['path']
|
||||||
|
repo_url = custom_node['repo_url']
|
||||||
|
custom_node_path = os.path.join(custom_nodes_path, path)
|
||||||
|
|
||||||
|
if not os.path.exists(custom_node_path): # only install new custom nodes
|
||||||
|
send_websocket_message('install_log', {'app_name': app_name, 'log': f"Cloning '{name}' ..."})
|
||||||
|
git.Repo.clone_from(repo_url, custom_node_path)
|
||||||
|
send_websocket_message('install_log', {'app_name': app_name, 'log': f"'{name}' cloned successfully."})
|
||||||
|
|
||||||
|
# install requirements
|
||||||
|
venv_path = app_config['venv_path']
|
||||||
|
#app_path = app_config['app_path'] # already defined
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Activate the virtual environment and run the commands
|
||||||
|
activate_venv = f"source {venv_path}/bin/activate"
|
||||||
|
change_dir_command = f"cd {custom_node_path}"
|
||||||
|
pip_install_command = "pip install -r requirements.txt"
|
||||||
|
|
||||||
|
full_command = f"{activate_venv} && {change_dir_command} && {pip_install_command}"
|
||||||
|
|
||||||
|
# TODO: rewrite this without shell
|
||||||
|
process = subprocess.Popen(full_command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, executable='/bin/bash')
|
||||||
|
output, _ = process.communicate()
|
||||||
|
|
||||||
|
if process.returncode == 0:
|
||||||
|
return True, f"Custom node requirements were successfully installed. Output: {output.decode('utf-8')}"
|
||||||
|
else:
|
||||||
|
return False, f"Error in custom node requirements installation. Output: {output.decode('utf-8')}"
|
||||||
|
except Exception as e:
|
||||||
|
return False, f"Error installing custom node requirements: {str(e)}"
|
||||||
|
|
||||||
|
|
||||||
|
except git.exc.GitCommandError as e:
|
||||||
|
send_websocket_message('install_log', {'app_name': app_name, 'log': f'Error cloning repository: {str(e)}'})
|
||||||
|
return False, f"Error cloning repository: {str(e)}"
|
||||||
|
except Exception as e:
|
||||||
|
send_websocket_message('install_log', {'app_name': app_name, 'log': f'Error cloning repository: {str(e)}'})
|
||||||
|
return False, f"Error cloning repository: {str(e)}"
|
||||||
|
|
||||||
|
|
||||||
|
if app_name == 'bkohya': # create a folder link for kohya_ss local "venv"
|
||||||
|
success, message = ensure_kohya_local_venv_is_symlinked()
|
||||||
|
if not success: # symlink not created, but still success=True and only a warning, can be fixed manually
|
||||||
|
message = f"{app_config['name']} was cloned and patched successfully, but the symlink to the local venv returned following problem:\n{message}"
|
||||||
|
else:
|
||||||
|
message = f"'{app_name}' was cloned successfully."
|
||||||
|
|
||||||
|
return True, message
|
||||||
|
|
||||||
|
def update_kohya_setup_sh(app_path:str) -> tuple[bool, str]:
|
||||||
|
try:
|
||||||
|
# patch 'setup.sh' within the kohya_ss main folder for BRANCH="sd3-flux.1"
|
||||||
|
setup_sh_path = os.path.join(app_path, 'setup.sh')
|
||||||
|
if not os.path.exists(setup_sh_path):
|
||||||
|
return False, f"file '{setup_sh_path}' was not found"
|
||||||
|
|
||||||
|
with open(setup_sh_path, 'r') as file:
|
||||||
|
content = file.read()
|
||||||
|
|
||||||
|
# Use regex to search & replace wrong branch variable in the file
|
||||||
|
patched_content = re.sub(r'BRANCH="master"', 'BRANCH="sd3-flux.1"', content)
|
||||||
|
|
||||||
|
if patched_content == content:
|
||||||
|
message = f"'{setup_sh_path}' already fine, patch not needed."
|
||||||
|
else:
|
||||||
|
with open(setup_sh_path, 'w') as file:
|
||||||
|
file.write(patched_content)
|
||||||
|
|
||||||
|
message = f"'{setup_sh_path}' needed patch, successfully patched."
|
||||||
|
|
||||||
|
return True, message
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
return False, str(e)
|
||||||
|
|
||||||
|
def download_and_unpack_venv_v1(app_name, app_configs, send_websocket_message):
|
||||||
app_config = app_configs.get(app_name)
|
app_config = app_configs.get(app_name)
|
||||||
if not app_config:
|
if not app_config:
|
||||||
return False, f"App '{app_name}' not found in configurations."
|
return False, f"App '{app_name}' not found in configurations."
|
||||||
|
@ -725,14 +872,14 @@ def download_and_unpack_venv(app_name, app_configs, send_websocket_message):
|
||||||
venv_path = app_config['venv_path']
|
venv_path = app_config['venv_path']
|
||||||
app_path = app_config['app_path']
|
app_path = app_config['app_path']
|
||||||
download_url = app_config['download_url']
|
download_url = app_config['download_url']
|
||||||
total_size = app_config['size']
|
archive_size = app_config['size']
|
||||||
tar_filename = os.path.basename(download_url)
|
tar_filename = os.path.basename(download_url)
|
||||||
workspace_dir = '/workspace'
|
workspace_dir = '/workspace'
|
||||||
downloaded_file = os.path.join(workspace_dir, tar_filename)
|
downloaded_file = os.path.join(workspace_dir, tar_filename)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
save_install_status(app_name, 'in_progress', 0, 'Downloading')
|
save_install_status(app_name, 'in_progress', 0, 'Downloading')
|
||||||
send_websocket_message('install_log', {'app_name': app_name, 'log': f'Starting download of {total_size / (1024 * 1024):.2f} MB...'})
|
send_websocket_message('install_log', {'app_name': app_name, 'log': f'Starting download of {archive_size / (1024 * 1024):.2f} MB...'})
|
||||||
|
|
||||||
# lutzapps - debug with existing local TAR
|
# lutzapps - debug with existing local TAR
|
||||||
if not os.path.exists(downloaded_file):
|
if not os.path.exists(downloaded_file):
|
||||||
|
@ -753,8 +900,8 @@ def download_and_unpack_venv(app_name, app_configs, send_websocket_message):
|
||||||
|
|
||||||
if elapsed_time > 0:
|
if elapsed_time > 0:
|
||||||
speed = downloaded_size / elapsed_time
|
speed = downloaded_size / elapsed_time
|
||||||
percentage = (downloaded_size / total_size) * 100
|
percentage = (downloaded_size / archive_size) * 100
|
||||||
eta = (total_size - downloaded_size) / speed if speed > 0 else 0
|
eta = (archive_size - downloaded_size) / speed if speed > 0 else 0
|
||||||
|
|
||||||
send_websocket_message('install_progress', {
|
send_websocket_message('install_progress', {
|
||||||
'app_name': app_name,
|
'app_name': app_name,
|
||||||
|
@ -869,31 +1016,34 @@ def download_and_unpack_venv(app_name, app_configs, send_websocket_message):
|
||||||
send_websocket_message('install_complete', {'app_name': app_name, 'status': 'error', 'message': error_message})
|
send_websocket_message('install_complete', {'app_name': app_name, 'status': 'error', 'message': error_message})
|
||||||
return False, error_message
|
return False, error_message
|
||||||
|
|
||||||
### this is the function wgich switches between v0 and v1 debug setting for comparison
|
### this is the function which switches between v0 and v1 debug setting for comparison
|
||||||
def download_and_unpack_venv(app_name, app_configs, send_websocket_message) -> tuple[bool, str]:
|
def download_and_unpack_venv(app_name:str, app_configs:dict, send_websocket_message) -> tuple[bool, str]:
|
||||||
from app_configs import DEBUG_SETTINGS, write_debug_setting
|
from utils.app_configs import DEBUG_SETTINGS, write_debug_setting
|
||||||
|
|
||||||
installer_codeversion = DEBUG_SETTINGS['installer_codeversion'] # read from DEBUG_SETTINGS
|
installer_codeversion = DEBUG_SETTINGS['installer_codeversion'] # read from DEBUG_SETTINGS
|
||||||
print(f"download_and_unpack_venv v{installer_codeversion} STARTING for '{app_name}'")
|
print(f"download_and_unpack_venv_{installer_codeversion} STARTING for '{app_name}'")
|
||||||
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
|
|
||||||
if installer_codeversion == "1":
|
if installer_codeversion == "v1":
|
||||||
download_and_unpack_venv(app_name, app_configs, send_websocket_message)
|
success, message = download_and_unpack_venv_v1(app_name, app_configs, send_websocket_message)
|
||||||
elif installer_codeversion == "2":
|
elif installer_codeversion == "v2":
|
||||||
download_and_unpack_venv_fastversion(app_name, app_configs, send_websocket_message)
|
success, message = download_and_unpack_venv_v2(app_name, app_configs, send_websocket_message)
|
||||||
else:
|
else:
|
||||||
print(f"unknown 'installer_codeversion' v{installer_codeversion} found, nothing run for app '{app_name}'")
|
error_msg = f"unknown 'installer_codeversion' {installer_codeversion} found, nothing run for app '{app_name}'"
|
||||||
|
print(error_msg)
|
||||||
|
success = False
|
||||||
|
message = error_msg
|
||||||
|
|
||||||
total_duration = f"{datetime.timedelta(seconds=int(time.time() - start_time))}"
|
total_duration = f"{datetime.timedelta(seconds=int(time.time() - start_time))}"
|
||||||
|
|
||||||
write_debug_setting('app_name', app_name)
|
write_debug_setting('app_name', app_name)
|
||||||
write_debug_setting('total_duration', total_duration)
|
write_debug_setting('total_duration', total_duration)
|
||||||
|
|
||||||
print(f"download_and_unpack_venv v{installer_codeversion} did run {total_duration} for app '{app_name}'")
|
print(f"download_and_unpack_venv_v{installer_codeversion} did run {total_duration} for app '{app_name}'")
|
||||||
|
return success, message
|
||||||
|
|
||||||
def fix_custom_nodes(app_name, app_configs):
|
def fix_custom_nodes(app_name, app_configs):
|
||||||
if app_name != 'bcomfy':
|
if app_name != 'bcomfy':
|
||||||
|
@ -921,54 +1071,9 @@ def fix_custom_nodes(app_name, app_configs):
|
||||||
return False, f"Error fixing custom nodes: {str(e)}"
|
return False, f"Error fixing custom nodes: {str(e)}"
|
||||||
|
|
||||||
# Replace the existing install_app function with this updated version
|
# Replace the existing install_app function with this updated version
|
||||||
def install_app(app_name, app_configs, send_websocket_message):
|
def install_app(app_name:str, app_configs:dict, send_websocket_message) -> tuple[bool, str]:
|
||||||
if app_name in app_configs:
|
if app_name in app_configs:
|
||||||
#return download_and_unpack_venv(app_name, app_configs, send_websocket_message)
|
success, message = download_and_unpack_venv(app_name, app_configs, send_websocket_message)
|
||||||
return download_and_unpack_venv_fastversion(app_name, app_configs, send_websocket_message)
|
return success, message
|
||||||
else:
|
else:
|
||||||
return False, f"Unknown app: {app_name}"
|
return False, f"Unknown app: {app_name}"
|
||||||
|
|
||||||
# unused function
|
|
||||||
def onsolate_update_model_symlinks():
|
|
||||||
# lutzapps - CHANGE #7 - use the new "shared_models" module for app model sharing
|
|
||||||
# remove this whole now unused function
|
|
||||||
return "replaced by utils.shared_models.update_model_symlinks()"
|
|
||||||
|
|
||||||
shared_models_dir = '/workspace/shared_models'
|
|
||||||
apps = {
|
|
||||||
'stable-diffusion-webui': '/workspace/stable-diffusion-webui/models',
|
|
||||||
'stable-diffusion-webui-forge': '/workspace/stable-diffusion-webui-forge/models',
|
|
||||||
'ComfyUI': '/workspace/ComfyUI/models'
|
|
||||||
}
|
|
||||||
model_types = ['Stable-diffusion', 'VAE', 'Lora', 'ESRGAN']
|
|
||||||
|
|
||||||
for model_type in model_types:
|
|
||||||
shared_model_path = os.path.join(shared_models_dir, model_type)
|
|
||||||
|
|
||||||
if not os.path.exists(shared_model_path):
|
|
||||||
continue
|
|
||||||
|
|
||||||
for app, app_models_dir in apps.items():
|
|
||||||
if app == 'ComfyUI':
|
|
||||||
if model_type == 'Stable-diffusion':
|
|
||||||
app_model_path = os.path.join(app_models_dir, 'checkpoints')
|
|
||||||
elif model_type == 'Lora':
|
|
||||||
app_model_path = os.path.join(app_models_dir, 'loras')
|
|
||||||
elif model_type == 'ESRGAN':
|
|
||||||
app_model_path = os.path.join(app_models_dir, 'upscale_models')
|
|
||||||
else:
|
|
||||||
app_model_path = os.path.join(app_models_dir, model_type.lower())
|
|
||||||
else:
|
|
||||||
app_model_path = os.path.join(app_models_dir, model_type)
|
|
||||||
|
|
||||||
# Create the app model directory if it doesn't exist
|
|
||||||
os.makedirs(app_model_path, exist_ok=True)
|
|
||||||
|
|
||||||
# Create symlinks for each file in the shared model directory
|
|
||||||
for filename in os.listdir(shared_model_path):
|
|
||||||
src = os.path.join(shared_model_path, filename)
|
|
||||||
dst = os.path.join(app_model_path, filename)
|
|
||||||
if os.path.isfile(src) and not os.path.exists(dst):
|
|
||||||
os.symlink(src, dst)
|
|
||||||
|
|
||||||
print("Model symlinks updated.")
|
|
||||||
|
|
|
@ -71,7 +71,7 @@ def check_huggingface_url(url):
|
||||||
|
|
||||||
return True, repo_id, filename, folder_name, branch_name
|
return True, repo_id, filename, folder_name, branch_name
|
||||||
|
|
||||||
def download_model(url, model_name, model_type, civitai_token=None, hf_token=None, version_id=None, file_index=None):
|
def download_model(url, model_name, model_type, civitai_token=None, hf_token=None, version_id=None, file_index=None) -> tuple[bool, str]:
|
||||||
ensure_shared_folder_exists()
|
ensure_shared_folder_exists()
|
||||||
is_civitai, is_civitai_api, model_id, _ = check_civitai_url(url)
|
is_civitai, is_civitai_api, model_id, _ = check_civitai_url(url)
|
||||||
is_huggingface, repo_id, hf_filename, hf_folder_name, hf_branch_name = check_huggingface_url(url) # TODO: double call
|
is_huggingface, repo_id, hf_filename, hf_folder_name, hf_branch_name = check_huggingface_url(url) # TODO: double call
|
||||||
|
@ -95,7 +95,7 @@ def download_model(url, model_name, model_type, civitai_token=None, hf_token=Non
|
||||||
return success, message
|
return success, message
|
||||||
|
|
||||||
# lutzapps - added SHA256 checks for already existing ident and downloaded HuggingFace model
|
# lutzapps - added SHA256 checks for already existing ident and downloaded HuggingFace model
|
||||||
def download_civitai_model(url, model_name, model_type, civitai_token, version_id=None, file_index=None):
|
def download_civitai_model(url, model_name, model_type, civitai_token, version_id=None, file_index=None) -> tuple[bool, str]:
|
||||||
try:
|
try:
|
||||||
is_civitai, is_civitai_api, model_id, url_version_id = check_civitai_url(url)
|
is_civitai, is_civitai_api, model_id, url_version_id = check_civitai_url(url)
|
||||||
|
|
||||||
|
@ -186,7 +186,7 @@ def get_sha256_hash_from_file(file_path:str) -> tuple[bool, str]:
|
||||||
for byte_block in iter(lambda: f.read(4096), b""):
|
for byte_block in iter(lambda: f.read(4096), b""):
|
||||||
sha256_hash.update(byte_block)
|
sha256_hash.update(byte_block)
|
||||||
|
|
||||||
return True, sha256_hash.hexdigest().upper()
|
return True, sha256_hash.hexdigest().lower()
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
return False, str(e)
|
return False, str(e)
|
||||||
|
@ -247,7 +247,7 @@ def get_modelfile_hash_and_ident_existing_modelfile_exists(model_name:str, model
|
||||||
raise NotImplementedError("Copying a non-LFS file is not implemented.")
|
raise NotImplementedError("Copying a non-LFS file is not implemented.")
|
||||||
|
|
||||||
lfs = repo_file.lfs # BlobLfsInfo class instance
|
lfs = repo_file.lfs # BlobLfsInfo class instance
|
||||||
download_sha256_hash = lfs.sha256.upper()
|
download_sha256_hash = lfs.sha256.lower()
|
||||||
|
|
||||||
print(f"Metadata from RepoFile LFS '{repo_file.rfilename}'")
|
print(f"Metadata from RepoFile LFS '{repo_file.rfilename}'")
|
||||||
print(f"SHA256: {download_sha256_hash}")
|
print(f"SHA256: {download_sha256_hash}")
|
||||||
|
@ -283,8 +283,8 @@ def get_modelfile_hash_and_ident_existing_modelfile_exists(model_name:str, model
|
||||||
# if NOT successful, the hash contains the Exception
|
# if NOT successful, the hash contains the Exception
|
||||||
print(f"SHA256 hash generated from local file: '{model_path}'\n{model_sha256_hash}")
|
print(f"SHA256 hash generated from local file: '{model_path}'\n{model_sha256_hash}")
|
||||||
|
|
||||||
if successfull_HashGeneration and model_sha256_hash == download_sha256_hash:
|
if successfull_HashGeneration and model_sha256_hash.lower() == download_sha256_hash.lower():
|
||||||
message = f"Existing and ident model aleady found for '{os.path.basename(model_path)}'"
|
message = f"Existing and ident model already found for '{os.path.basename(model_path)}'"
|
||||||
print(message)
|
print(message)
|
||||||
|
|
||||||
send_websocket_message('model_download_progress', {
|
send_websocket_message('model_download_progress', {
|
||||||
|
@ -315,7 +315,7 @@ def get_modelfile_hash_and_ident_existing_modelfile_exists(model_name:str, model
|
||||||
|
|
||||||
|
|
||||||
# lutzapps - added SHA256 checks for already existing ident and downloaded HuggingFace model
|
# lutzapps - added SHA256 checks for already existing ident and downloaded HuggingFace model
|
||||||
def download_huggingface_model(url, model_name, model_type, repo_id, hf_filename, hf_folder_name, hf_branch_name, hf_token=None):
|
def download_huggingface_model(url, model_name, model_type, repo_id, hf_filename, hf_folder_name, hf_branch_name, hf_token=None) -> tuple[bool, str]:
|
||||||
try:
|
try:
|
||||||
from huggingface_hub import hf_hub_download
|
from huggingface_hub import hf_hub_download
|
||||||
|
|
||||||
|
@ -372,7 +372,7 @@ def download_huggingface_model(url, model_name, model_type, repo_id, hf_filename
|
||||||
|
|
||||||
|
|
||||||
# lutzapps - added SHA256 check for downloaded CivitAI model
|
# lutzapps - added SHA256 check for downloaded CivitAI model
|
||||||
def download_file(url, download_sha256_hash, file_path, headers=None):
|
def download_file(url, download_sha256_hash, file_path, headers=None) -> tuple[bool, str]:
|
||||||
try:
|
try:
|
||||||
response = requests.get(url, stream=True, headers=headers)
|
response = requests.get(url, stream=True, headers=headers)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
@ -428,7 +428,7 @@ def check_downloaded_modelfile(model_path:str, download_sha256_hash:str, platfor
|
||||||
})
|
})
|
||||||
|
|
||||||
successfull_HashGeneration, model_sha256_hash = get_sha256_hash_from_file(model_path)
|
successfull_HashGeneration, model_sha256_hash = get_sha256_hash_from_file(model_path)
|
||||||
if successfull_HashGeneration and model_sha256_hash == download_sha256_hash:
|
if successfull_HashGeneration and model_sha256_hash.lower() == download_sha256_hash.lower():
|
||||||
send_websocket_message('model_download_progress', {
|
send_websocket_message('model_download_progress', {
|
||||||
'percentage': 100,
|
'percentage': 100,
|
||||||
'stage': 'Complete',
|
'stage': 'Complete',
|
||||||
|
|
|
@ -6,7 +6,7 @@ import time
|
||||||
|
|
||||||
from flask import jsonify
|
from flask import jsonify
|
||||||
from utils.websocket_utils import (send_websocket_message, active_websockets)
|
from utils.websocket_utils import (send_websocket_message, active_websockets)
|
||||||
from utils.app_configs import (get_app_configs, init_global_dict_from_file, pretty_dict)
|
from utils.app_configs import (get_app_configs, load_global_dict_from_file, pretty_dict)
|
||||||
|
|
||||||
### shared_models-v0.9.2 by lutzapps, Nov 5th 2024 ###
|
### shared_models-v0.9.2 by lutzapps, Nov 5th 2024 ###
|
||||||
|
|
||||||
|
@ -189,7 +189,9 @@ SHARED_MODEL_FOLDERS = {
|
||||||
# helper function called by "inline"-main() and ensure_shared_models_folders()
|
# helper function called by "inline"-main() and ensure_shared_models_folders()
|
||||||
def init_shared_models_folders(send_SocketMessage:bool=True):
|
def init_shared_models_folders(send_SocketMessage:bool=True):
|
||||||
global SHARED_MODEL_FOLDERS
|
global SHARED_MODEL_FOLDERS
|
||||||
init_global_dict_from_file(SHARED_MODEL_FOLDERS, SHARED_MODEL_FOLDERS_FILE, "SHARED_MODEL_FOLDERS", SHARED_MODELS_DIR)
|
success, dict = load_global_dict_from_file(SHARED_MODEL_FOLDERS, SHARED_MODEL_FOLDERS_FILE, "SHARED_MODEL_FOLDERS", SHARED_MODELS_DIR)
|
||||||
|
if success:
|
||||||
|
SHARED_MODEL_FOLDERS = dict
|
||||||
|
|
||||||
if os.path.exists(SHARED_MODEL_FOLDERS_FILE) and send_SocketMessage:
|
if os.path.exists(SHARED_MODEL_FOLDERS_FILE) and send_SocketMessage:
|
||||||
send_websocket_message('extend_ui_helper', {
|
send_websocket_message('extend_ui_helper', {
|
||||||
|
@ -341,7 +343,9 @@ def sync_with_app_configs_install_dirs():
|
||||||
# NOTE: this APP_INSTALL_DIRS_FILE is temporary synced with the app_configs dict
|
# NOTE: this APP_INSTALL_DIRS_FILE is temporary synced with the app_configs dict
|
||||||
def init_app_install_dirs():
|
def init_app_install_dirs():
|
||||||
global APP_INSTALL_DIRS
|
global APP_INSTALL_DIRS
|
||||||
init_global_dict_from_file(APP_INSTALL_DIRS, APP_INSTALL_DIRS_FILE, "APP_INSTALL_DIRS", SHARED_MODELS_DIR)
|
success, dict = load_global_dict_from_file(APP_INSTALL_DIRS, APP_INSTALL_DIRS_FILE, "APP_INSTALL_DIRS", SHARED_MODELS_DIR)
|
||||||
|
if success:
|
||||||
|
APP_INSTALL_DIRS = dict
|
||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -496,7 +500,9 @@ SHARED_MODEL_APP_MAP = {
|
||||||
# which does a default mapping from app code or (if exists) from external JSON 'SHARED_MODEL_APP_MAP_FILE' file
|
# which does a default mapping from app code or (if exists) from external JSON 'SHARED_MODEL_APP_MAP_FILE' file
|
||||||
def init_shared_model_app_map():
|
def init_shared_model_app_map():
|
||||||
global SHARED_MODEL_APP_MAP
|
global SHARED_MODEL_APP_MAP
|
||||||
init_global_dict_from_file(SHARED_MODEL_APP_MAP, SHARED_MODEL_APP_MAP_FILE, "SHARED_MODEL_APP_MAP", SHARED_MODELS_DIR)
|
success, dict = load_global_dict_from_file(SHARED_MODEL_APP_MAP, SHARED_MODEL_APP_MAP_FILE, "SHARED_MODEL_APP_MAP", SHARED_MODELS_DIR)
|
||||||
|
if success:
|
||||||
|
SHARED_MODEL_APP_MAP = dict
|
||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue