mirror of
https://github.com/kodxana/madiator-docker-runpod.git
synced 2024-11-25 04:10:12 +01:00
faster venv downloader and unpacker, new kohya (with Flux.1) app
This commit is contained in:
parent
45b3e1f247
commit
6c492fb3b5
9 changed files with 1030 additions and 176 deletions
|
@ -49,18 +49,43 @@
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"ports": [
|
"ports": [
|
||||||
{
|
// NOTE: during debugging, "start.sh" does *not* run, and following apps are not available right now:
|
||||||
"containerPort": 7222, // main Flask app port "App-Manager"
|
// {
|
||||||
"hostPort": 7222
|
// "containerPort": 22, // SSH
|
||||||
},
|
// "hostPort": 22
|
||||||
{
|
// },
|
||||||
"containerPort": 8181, // File-Browser
|
{
|
||||||
"hostPort": 8181
|
"containerPort": 7222, // main Flask app port "App-Manager"
|
||||||
},
|
"hostPort": 7222
|
||||||
{
|
},
|
||||||
"containerPort": 7777, // VSCode-Server
|
{
|
||||||
"hostPort": 7777
|
"containerPort": 8181, // File-Browser
|
||||||
}
|
"hostPort": 8181
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"containerPort": 7777, // VSCode-Server
|
||||||
|
"hostPort": 7777
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"containerPort": 3000, // ComfyUI
|
||||||
|
"hostPort": 3000
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"containerPort": 6006, // Tensorboard (needed by kohya_ss)
|
||||||
|
"hostPort": 6006
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"containerPort": 7860, // Kohya-ss (lutzapps - added new Kohya app with FLUX support)
|
||||||
|
"hostPort": 7860
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"containerPort": 7862, // Forge (aka Stable-Diffiusion-WebUI-Forge)
|
||||||
|
"hostPort": 7862
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"containerPort": 7863, // A1111 (aka Stable-Diffiusion-WebUI)
|
||||||
|
"hostPort": 7863
|
||||||
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"python": {
|
"python": {
|
||||||
|
|
|
@ -1,6 +1,12 @@
|
||||||
# lutzapps - use the specified CUDA version
|
# lutzapps - use the specified CUDA version
|
||||||
ARG BASE_IMAGE
|
ARG BASE_IMAGE
|
||||||
FROM ${BASE_IMAGE:-madiator2011/better-base:cuda12.4} AS base
|
FROM ${BASE_IMAGE:-madiator2011/better-base:cuda12.4} AS base
|
||||||
|
|
||||||
|
# lutzapps - pass the build-arg into the docker ENV as reference
|
||||||
|
# as BASE_IMAGE is in "global scope" of the Dockerfile, it need to be "consumed" to be available in this stage
|
||||||
|
ARG BASE_IMAGE
|
||||||
|
ENV BASE_IMAGE=$BASE_IMAGE
|
||||||
|
|
||||||
#FROM madiator2011/better-base:cuda12.4 AS base
|
#FROM madiator2011/better-base:cuda12.4 AS base
|
||||||
|
|
||||||
# lutzapps - prepare for local developement and debugging
|
# lutzapps - prepare for local developement and debugging
|
||||||
|
@ -10,16 +16,16 @@ FROM ${BASE_IMAGE:-madiator2011/better-base:cuda12.4} AS base
|
||||||
|
|
||||||
# Install Python 3.11, set it as default, and remove Python 3.10
|
# Install Python 3.11, set it as default, and remove Python 3.10
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
# removed: 2x git nginx ffmpeg (as they are already installed with the base image)
|
# removed: 2x git nginx ffmpeg (as they are already installed with the base image)
|
||||||
# added: zip (for easier folder compression)
|
# added: pigz (for parallel execution of TAR files); zip (for easier folder compression)
|
||||||
apt-get install -y python3.11 python3.11-venv python3.11-dev python3.11-distutils aria2 zip \
|
apt-get install -y python3.11 python3.11-venv python3.11-dev python3.11-distutils \
|
||||||
pv rsync zstd libtcmalloc-minimal4 bc && \
|
aria2 pigz zip pv rsync zstd libtcmalloc-minimal4 bc && \
|
||||||
apt-get remove -y python3.10 python3.10-minimal libpython3.10-minimal libpython3.10-stdlib && \
|
apt-get remove -y python3.10 python3.10-minimal libpython3.10-minimal libpython3.10-stdlib && \
|
||||||
update-alternatives --install /usr/bin/python python /usr/bin/python3.11 1 && \
|
update-alternatives --install /usr/bin/python python /usr/bin/python3.11 1 && \
|
||||||
update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.11 1 && \
|
update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.11 1 && \
|
||||||
apt-get autoremove -y && \
|
apt-get autoremove -y && \
|
||||||
apt-get clean && \
|
apt-get clean && \
|
||||||
rm -rf /var/lib/apt/lists/*
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# Install pip for Python 3.11
|
# Install pip for Python 3.11
|
||||||
RUN curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py && \
|
RUN curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py && \
|
||||||
|
|
|
@ -43,13 +43,14 @@ Part of the `madiator-docker-runpod` familiy of **RunPod Docker Containers for R
|
||||||
https://github.com/kodxana/madiator-docker-runpod<br>
|
https://github.com/kodxana/madiator-docker-runpod<br>
|
||||||
found under the directory `official-templates/better-ai-launcher`
|
found under the directory `official-templates/better-ai-launcher`
|
||||||
|
|
||||||
### Build Vars ###
|
|
||||||
IMAGE_BASE=madiator2011/better-launcher<br>
|
|
||||||
IMAGE_TAG=dev
|
|
||||||
|
|
||||||
## Build Options
|
## Build Options
|
||||||
To build with default options, run `docker buildx bake`, to build a specific target, run `docker buildx bake <target>`.
|
To build with default options, run `docker buildx bake`, to build a specific target, run `docker buildx bake <target>`.
|
||||||
|
|
||||||
|
### Build Vars (based on bake selection)
|
||||||
|
BASE_IMAGE=`$BASE_IMAGE`, e.g.<br>
|
||||||
|
BASE_IMAGE=madiator2011/better-base:cuda12.4
|
||||||
|
|
||||||
|
|
||||||
## Ports (System)
|
## Ports (System)
|
||||||
|
|
||||||
- 22/tcp (SSH)
|
- 22/tcp (SSH)
|
||||||
|
@ -60,13 +61,11 @@ To build with default options, run `docker buildx bake`, to build a specific tar
|
||||||
## Ports (Apps)
|
## Ports (Apps)
|
||||||
|
|
||||||
- 3000/http (ComfyUI)
|
- 3000/http (ComfyUI)
|
||||||
|
- 6006/http (Tensorboard [needed by kohya_ss])
|
||||||
|
- 7860/http (Kohya-ss) with FLUX.1 support
|
||||||
- 7862/http (Forge) aka Stable-Diffiusion-WebUI-Forge
|
- 7862/http (Forge) aka Stable-Diffiusion-WebUI-Forge
|
||||||
- 7863/http (A1111) aka Stable-Diffiusion-WebUI
|
- 7863/http (A1111) aka Stable-Diffiusion-WebUI
|
||||||
|
|
||||||
*coming soon*
|
|
||||||
- 7864/http (Kohya-ss)
|
|
||||||
- 6006/http (Tensorboard)
|
|
||||||
|
|
||||||
## ENV Vars (System)
|
## ENV Vars (System)
|
||||||
|
|
||||||
These ENV vars go into the docker container to support local debugging:<br>
|
These ENV vars go into the docker container to support local debugging:<br>
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
import os
|
import os
|
||||||
import xml.etree.ElementTree as ET
|
import xml.etree.ElementTree as ET
|
||||||
import requests
|
import requests
|
||||||
|
import json
|
||||||
|
|
||||||
def fetch_app_info():
|
def fetch_app_info():
|
||||||
url = "https://better.s3.madiator.com/"
|
url = "https://better.s3.madiator.com/"
|
||||||
|
@ -12,8 +13,16 @@ def fetch_app_info():
|
||||||
key = content.find('{http://s3.amazonaws.com/doc/2006-03-01/}Key').text
|
key = content.find('{http://s3.amazonaws.com/doc/2006-03-01/}Key').text
|
||||||
size = int(content.find('{http://s3.amazonaws.com/doc/2006-03-01/}Size').text)
|
size = int(content.find('{http://s3.amazonaws.com/doc/2006-03-01/}Size').text)
|
||||||
app_name = key.split('/')[0]
|
app_name = key.split('/')[0]
|
||||||
|
|
||||||
|
# lutzapps - fix "bug" in key element of the S3 XML document
|
||||||
|
# all other three apps have a "key" element like "bcomfy/bcomfy.tar.gz" or "bforge/bforge.tar.gz",
|
||||||
|
# with their "app_name" prefix + "/" + tar_filename
|
||||||
|
# only kohya is missing this "app_name" prefix and has a key element of only its tar_filename "bkohya.tar.gz"
|
||||||
|
# this results in the app_name "bkohya.tar.gz", instead of only "bkohya"
|
||||||
|
# TODO for madiator - move the "bkohya.tar.gz" into a subfolder "bkohya" in your S3 bucket
|
||||||
|
app_name = app_name.replace(".tar.gz", "") # cut any extension postfixes resulting from the wrong key.split() command
|
||||||
|
|
||||||
if app_name in ['ba1111', 'bcomfy', 'bforge']:
|
if app_name in ['ba1111', 'bcomfy', 'bforge', 'bkohya']: # lutzapps - added new kohya app
|
||||||
app_info[app_name] = {
|
app_info[app_name] = {
|
||||||
'download_url': f"https://better.s3.madiator.com/{key}",
|
'download_url': f"https://better.s3.madiator.com/{key}",
|
||||||
'size': size
|
'size': size
|
||||||
|
@ -42,6 +51,13 @@ app_configs = {
|
||||||
'venv_path': '/workspace/ba1111',
|
'venv_path': '/workspace/ba1111',
|
||||||
'app_path': '/workspace/stable-diffusion-webui',
|
'app_path': '/workspace/stable-diffusion-webui',
|
||||||
'port': 7863,
|
'port': 7863,
|
||||||
|
},
|
||||||
|
'bkohya': {
|
||||||
|
'name': 'Better Kohya',
|
||||||
|
'command': 'cd /workspace/bkohya && . ./bin/activate && cd /workspace/kohya_ss && ./gui.sh --listen --port 7860',
|
||||||
|
'venv_path': '/workspace/bkohya',
|
||||||
|
'app_path': '/workspace/kohya_ss',
|
||||||
|
'port': 7860,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -63,3 +79,220 @@ def remove_app_config(app_name):
|
||||||
|
|
||||||
# Update app_configs when this module is imported
|
# Update app_configs when this module is imported
|
||||||
update_app_configs()
|
update_app_configs()
|
||||||
|
|
||||||
|
|
||||||
|
### lutzapps section
|
||||||
|
# helper function called by init_app_install_dirs(), init_shared_model_app_map(), init_shared_models_folders() and init_debug_settings()
|
||||||
|
def write_dict_to_jsonfile(dict:dict, json_filepath:str, overwrite:bool=False) -> bool:
|
||||||
|
# Convert the 'dict' to JSON, and write the JSON object to file 'json_filepath'
|
||||||
|
|
||||||
|
#json_string = json.dumps(dict, indent=4, ensure_ascii=False, sort_keys=True)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if os.path.exists(json_filepath) and not overwrite:
|
||||||
|
error_msg = f"dictionary file '{json_filepath}' already exists (and overwrite={overwrite})"
|
||||||
|
#print(error_msg)
|
||||||
|
|
||||||
|
return False, error_msg # failure
|
||||||
|
|
||||||
|
# Write the JSON data to a file
|
||||||
|
with open(json_filepath, 'w', encoding='utf-8') as output_file:
|
||||||
|
json.dump(dict, output_file, ensure_ascii=False, indent=4, separators=(',', ': '))
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
error_msg = f"ERROR in shared_models:write_dict_to_jsonfile() - loading JSON Map File '{json_filepath}'\nException: {str(e)}"
|
||||||
|
print(error_msg)
|
||||||
|
|
||||||
|
return False, error_msg # failure
|
||||||
|
|
||||||
|
return True, "" # success
|
||||||
|
|
||||||
|
# helper function called by init_app_install_dirs(), init_shared_model_app_map(), init_shared_models_folders() and init_debug_settings()
|
||||||
|
def read_dict_from_jsonfile(json_filepath:str) -> dict:
|
||||||
|
# Read JSON file from 'json_filepath' and return it as 'dict'
|
||||||
|
|
||||||
|
try:
|
||||||
|
if os.path.exists(json_filepath):
|
||||||
|
with open(json_filepath, 'r') as input_file:
|
||||||
|
dict = json.load(input_file)
|
||||||
|
else:
|
||||||
|
error_msg = f"dictionary file '{json_filepath}' does not exist"
|
||||||
|
#print(error_msg)
|
||||||
|
|
||||||
|
return {}, error_msg # failure
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
error_msg = f"ERROR in shared_models:read_dict_from_jsonfile() - loading JSON Map File '{json_filepath}'\nException: {str(e)}"
|
||||||
|
print(error_msg)
|
||||||
|
|
||||||
|
return {}, error_msg # failure
|
||||||
|
|
||||||
|
return dict, "" # success
|
||||||
|
|
||||||
|
# helper function to return a pretty formatted DICT string for human consumption (Logs, JSON)
|
||||||
|
def pretty_dict(dict:dict) -> str:
|
||||||
|
dict_string = json.dumps(dict, ensure_ascii=False, indent=4, separators=(',', ': '))
|
||||||
|
|
||||||
|
return dict_string
|
||||||
|
|
||||||
|
# helper function for "init_app_install_dirs(), "init_shared_model_app_map()", "init_shared_models_folders()" and "inir_debug_settings()"
|
||||||
|
def init_global_dict_from_file(dict:dict, dict_filepath:str, dict_description:str, SHARED_MODELS_DIR:str="") -> bool:
|
||||||
|
# load or initialize the 'dict' for 'dict_description' from 'dict_filepath'
|
||||||
|
|
||||||
|
try:
|
||||||
|
if not SHARED_MODELS_DIR == "" and not os.path.exists(SHARED_MODELS_DIR):
|
||||||
|
print(f"\nThe SHARED_MODELS_DIR '{SHARED_MODELS_DIR}' is not found!\nCreate it by clicking the 'Create Shared Folders' button from the WebUI 'Settings' Tab\n")
|
||||||
|
|
||||||
|
return
|
||||||
|
|
||||||
|
if os.path.isfile(dict_filepath) and os.path.exists(dict_filepath):
|
||||||
|
dict_filepath_found = True
|
||||||
|
# read the dict_description from JSON file
|
||||||
|
print(f"\nExisting '{dict_description}' found and read from file '{dict_filepath}'\nThe file overwrites the code defaults!")
|
||||||
|
|
||||||
|
dict, error_msg = read_dict_from_jsonfile(dict_filepath)
|
||||||
|
if not error_msg == "":
|
||||||
|
print(error_msg)
|
||||||
|
|
||||||
|
else: # init the dict_description from app code
|
||||||
|
dict_filepath_found = False
|
||||||
|
print(f"No {dict_description}_FILE found, initializing default '{dict_description}' from code ...")
|
||||||
|
# use already defined dict from app code
|
||||||
|
# write the dict to JSON file
|
||||||
|
success, ErrorMsg = write_dict_to_jsonfile(dict, dict_filepath)
|
||||||
|
|
||||||
|
if success:
|
||||||
|
print(f"'{dict_description}' is initialized and written to file '{dict_filepath}'")
|
||||||
|
else:
|
||||||
|
print(ErrorMsg)
|
||||||
|
|
||||||
|
# Convert 'dict_description' dictionary to formatted JSON
|
||||||
|
print(f"\nUsing {'external' if dict_filepath_found else 'default'} '{dict_description}':\n{pretty_dict(dict)}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
error_msg = f"ERROR in shared_models:init_global_dict_from_file() - initializing dict Map File '{dict_filepath}'\nException: {str(e)}"
|
||||||
|
print(error_msg)
|
||||||
|
|
||||||
|
return False, error_msg
|
||||||
|
|
||||||
|
return True, "" # success
|
||||||
|
|
||||||
|
DEBUG_SETTINGS_FILE = "/workspace/_debug_settings.json"
|
||||||
|
DEBUG_SETTINGS = {
|
||||||
|
# these setting will be READ:
|
||||||
|
"manifests": { # uncompressed sizes of the tar-files
|
||||||
|
"bcomfy": {
|
||||||
|
"venv_uncompressed_size": 6155283197,
|
||||||
|
"sha256_hash": ""
|
||||||
|
},
|
||||||
|
"ba1111": {
|
||||||
|
"venv_uncompressed_size": 6794355530,
|
||||||
|
"sha256_hash": ""
|
||||||
|
},
|
||||||
|
"bforge": {
|
||||||
|
"venv_uncompressed_size": 7689838771,
|
||||||
|
"sha256_hash": ""
|
||||||
|
},
|
||||||
|
"bkohya": {
|
||||||
|
"venv_uncompressed_size": 12192767148,
|
||||||
|
"sha256_hash": ""
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"installer_codeversion": "2", # can be "1" (original) or "2" (fast)
|
||||||
|
"delete_tarfile_after_download": "1", # can be set to "0" to test only local unpack time and github setup
|
||||||
|
"use_bkohya_tar_folder_fix": "1", # the fix unpacks to "/workspace" and not to "/workspace/bkohya"
|
||||||
|
"use_bkohya_local_venv_symlink": "1", # when active, creates a folder symlink "venv" in "/workspace/kohya_ss" -> "/workspace/bkohya" VENV
|
||||||
|
# these settings will be WRITTEN:
|
||||||
|
"used_local_tar": "0", # works together with the above TAR local caching
|
||||||
|
"app_name": "",
|
||||||
|
"tar_filename": "",
|
||||||
|
"download_url": "",
|
||||||
|
"total_duration_download": "0",
|
||||||
|
"total_duration_unpack": "0",
|
||||||
|
"total_duration": "0"
|
||||||
|
}
|
||||||
|
|
||||||
|
def init_debug_settings():
|
||||||
|
global DEBUG_SETTINGS
|
||||||
|
init_global_dict_from_file(DEBUG_SETTINGS, DEBUG_SETTINGS_FILE, "DEBUG_SETTINGS")
|
||||||
|
|
||||||
|
# read from DEBUG_SETTINGS
|
||||||
|
# installer_codeversion = DEBUG_SETTINGS['installer_codeversion'] # read from DEBUG_SETTINGS
|
||||||
|
|
||||||
|
# write to DEBUG_SETTINGS
|
||||||
|
# write_debug_setting('app_name', "test") # write to DEBUG_SETTINGS
|
||||||
|
return
|
||||||
|
|
||||||
|
def write_debug_setting(setting_name:str, setting_value:str):
|
||||||
|
global DEBUG_SETTINGS
|
||||||
|
#DEBUG_SETTINGS = read_dict_from_jsonfile(DEBUG_SETTINGS_FILE)
|
||||||
|
DEBUG_SETTINGS[setting_name] = setting_value
|
||||||
|
write_dict_to_jsonfile(DEBUG_SETTINGS, DEBUG_SETTINGS_FILE, overwrite=True)
|
||||||
|
|
||||||
|
|
||||||
|
# lutzapps - init some settings from DEBUG_SETTINGS_FILE
|
||||||
|
init_debug_settings()
|
||||||
|
|
||||||
|
# lutzapps - add kohya_ss support and required local VENV
|
||||||
|
def ensure_kohya_local_venv_is_symlinked() -> tuple[bool, str]:
|
||||||
|
# as kohya_ss' "setup.sh" assumes a "local" VENV under "/workspace/kohya_ss/venv",
|
||||||
|
# we will create a folder symlink "/workspace/kohya_ss/venv" -> "/workspace/bkohya"
|
||||||
|
# to our global VENV and rename the original "venv" folder to "venv(BAK)"
|
||||||
|
|
||||||
|
if not DEBUG_SETTINGS['use_bkohya_local_venv_symlink'] == "1":
|
||||||
|
return True, "" # not fix the local KOHYA_SS VENV
|
||||||
|
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
try:
|
||||||
|
app_configs = get_app_configs()
|
||||||
|
bapp_name = "bkohya"
|
||||||
|
bapp_venv_path = app_configs[bapp_name]["venv_path"] # '/workspace/bkohya'
|
||||||
|
bapp_app_path = app_configs[bapp_name]["app_path"] # '/workspace/kohya_ss'
|
||||||
|
bapp_app_path_venv = f"{bapp_app_path}/venv" # '/workspace/kohya_ss/venv'
|
||||||
|
|
||||||
|
if not os.path.exists(bapp_app_path): # kohya is not installed
|
||||||
|
return True, "" # no need to fix the local KOHYA VENV
|
||||||
|
|
||||||
|
# kohya installed and has a local "venv" folder
|
||||||
|
if os.path.exists(bapp_app_path_venv) and os.path.isdir(bapp_app_path_venv):
|
||||||
|
|
||||||
|
# check if this local VENV is a folderlink to target our bkohya global VENV to venv_path
|
||||||
|
if os.path.islink(bapp_app_path_venv):
|
||||||
|
success_message = f"kohya_ss local venv folder '{bapp_app_path_venv}' is already symlinked"
|
||||||
|
|
||||||
|
print(success_message)
|
||||||
|
return True, success_message
|
||||||
|
|
||||||
|
# not a folder symlink, but a physical folder,
|
||||||
|
### rename the existing venv folder to BAK (collision-free)
|
||||||
|
bak_venv_path = f"{bapp_app_path_venv}(BAK)"
|
||||||
|
i = 0
|
||||||
|
suffix = ""
|
||||||
|
while os.path.exists(f"{bak_venv_path}{suffix}"):
|
||||||
|
i += 1
|
||||||
|
suffix = str(i)
|
||||||
|
|
||||||
|
bak_venv_path += suffix # free target bame for "rename"
|
||||||
|
shutil.move(bapp_app_path_venv, bak_venv_path) # move=rename
|
||||||
|
|
||||||
|
print(f"local venv folder '{bapp_app_path_venv}' detected and renamed to '{bak_venv_path}'")
|
||||||
|
|
||||||
|
### create a folder symlink for kohya's "local" venv dir
|
||||||
|
# check the src-folder to kohya downloaded venv exists
|
||||||
|
if os.path.exists(bapp_venv_path): # src_path to bkohya downloaded venv exists
|
||||||
|
# create a folder symlink for kohya local venv dir
|
||||||
|
os.symlink(bapp_venv_path, bapp_app_path_venv, target_is_directory=True)
|
||||||
|
success_message = f"created a symlink for kohya_ss local venv folder: '{bapp_venv_path}' -> '{bapp_app_path_venv}'"
|
||||||
|
print(success_message)
|
||||||
|
|
||||||
|
return True, success_message
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
error_message = f"ensure_kohya_local_venv_is_symlinked() failed: {str(e)}"
|
||||||
|
print(error_message)
|
||||||
|
|
||||||
|
return False, error_message
|
||||||
|
|
||||||
|
# lutzapps - add kohya_ss venv support
|
||||||
|
ensure_kohya_local_venv_is_symlinked()
|
|
@ -9,7 +9,9 @@ import requests
|
||||||
import traceback
|
import traceback
|
||||||
from tqdm import tqdm
|
from tqdm import tqdm
|
||||||
import xml.etree.ElementTree as ET
|
import xml.etree.ElementTree as ET
|
||||||
|
|
||||||
import time
|
import time
|
||||||
|
import datetime
|
||||||
import shutil
|
import shutil
|
||||||
|
|
||||||
INSTALL_STATUS_FILE = '/tmp/install_status.json'
|
INSTALL_STATUS_FILE = '/tmp/install_status.json'
|
||||||
|
@ -135,6 +137,586 @@ def get_install_status(app_name):
|
||||||
except (FileNotFoundError, json.JSONDecodeError):
|
except (FileNotFoundError, json.JSONDecodeError):
|
||||||
return {'status': 'not_started', 'progress': 0, 'stage': ''}
|
return {'status': 'not_started', 'progress': 0, 'stage': ''}
|
||||||
|
|
||||||
|
# lutzapps - fastversion for ownload_and_unpack_venv()
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
# currently not used
|
||||||
|
# import threading
|
||||||
|
# from queue import Queue, Empty
|
||||||
|
# from concurrent.futures import ThreadPoolExecutor
|
||||||
|
|
||||||
|
import time
|
||||||
|
|
||||||
|
### lutzapps
|
||||||
|
# helper function for threaded STDOUT/STDERR streaming into QUEUES for later consumption
|
||||||
|
# currently not used
|
||||||
|
|
||||||
|
# this the called by the thread_output_reader
|
||||||
|
# def enqueue_output(file, queue):
|
||||||
|
# for line in iter(file.readline, b''): # read the streaming file until the end (byte '')
|
||||||
|
# queue.put(line.decode('utf-8')) # and put it in queue as UTF-8 string (and not as byte array)
|
||||||
|
|
||||||
|
# def read_open_pipes(process):
|
||||||
|
# with ThreadPoolExecutor(2) as pool:
|
||||||
|
# queue_stdout, queue_stderr = Queue(), Queue()
|
||||||
|
|
||||||
|
# pool.submit(enqueue_output, process.stdout, queue_stdout)
|
||||||
|
# pool.submit(enqueue_output, process.stderr, queue_stderr)
|
||||||
|
|
||||||
|
# while True:
|
||||||
|
# if process.poll() is not None and queue_stdout.empty() and queue_stderr.empty():
|
||||||
|
# break # exit loop when process alive but not have any output in STDOUT or STDERR
|
||||||
|
|
||||||
|
# out_line = err_line = ''
|
||||||
|
|
||||||
|
# try:
|
||||||
|
# out_line = queue_stdout.get_nowait()
|
||||||
|
# except Empty:
|
||||||
|
# pass
|
||||||
|
# try:
|
||||||
|
# err_line = queue_stderr.get_nowait()
|
||||||
|
# except Empty:
|
||||||
|
# pass
|
||||||
|
|
||||||
|
# yield (out_line.rstrip(), err_line.rstrip())
|
||||||
|
|
||||||
|
# this ist the v2 ("fast") version for "download_and_unpack_venv()" - can be (de-)/activated in DEBUG_SETTINGS dict
|
||||||
|
def download_and_unpack_venv_fastversion(app_name, app_configs, send_websocket_message) -> tuple[bool, str]:
|
||||||
|
app_config = app_configs.get(app_name)
|
||||||
|
if not app_config:
|
||||||
|
return False, f"App '{app_name}' not found in configurations."
|
||||||
|
|
||||||
|
venv_path = app_config['venv_path']
|
||||||
|
app_path = app_config['app_path']
|
||||||
|
download_url = app_config['download_url']
|
||||||
|
total_size = app_config['size']
|
||||||
|
tar_filename = os.path.basename(download_url)
|
||||||
|
workspace_dir = '/workspace'
|
||||||
|
downloaded_file = os.path.join(workspace_dir, tar_filename)
|
||||||
|
|
||||||
|
from utils.app_configs import (DEBUG_SETTINGS, pretty_dict, init_debug_settings, write_debug_setting, ensure_kohya_local_venv_is_symlinked)
|
||||||
|
# load the latest configured DEBUG_SETTINGS from the stored setting of the DEBUG_SETTINGS_FILE
|
||||||
|
init_debug_settings()
|
||||||
|
# show currently using DEBUG_SETTINGS
|
||||||
|
print(f"\nCurrently using 'DEBUG_SETTINGS':\n{pretty_dict(DEBUG_SETTINGS)}")
|
||||||
|
|
||||||
|
write_debug_setting('tar_filename', tar_filename)
|
||||||
|
write_debug_setting('download_url', download_url)
|
||||||
|
|
||||||
|
try:
|
||||||
|
save_install_status(app_name, 'in_progress', 0, 'Downloading')
|
||||||
|
send_websocket_message('install_log', {'app_name': app_name, 'log': f'Downloading {total_size / (1024 * 1024):.2f} MB ...'})
|
||||||
|
|
||||||
|
start_time_download = time.time()
|
||||||
|
|
||||||
|
# debug with existing local cached TAR file
|
||||||
|
if os.path.exists(downloaded_file):
|
||||||
|
write_debug_setting('used_local_tar', "1") # indicate using cached TAR file
|
||||||
|
send_websocket_message('install_log', {'app_name': app_name, 'log': f"Used cached local tarfile '{downloaded_file}'"})
|
||||||
|
else:
|
||||||
|
write_debug_setting('used_local_tar', "0") # indicate no cached TAR file found
|
||||||
|
|
||||||
|
try: ### download with ARIA2C
|
||||||
|
|
||||||
|
# -x (--max-connection-per-server=) 16
|
||||||
|
### bash version with progress file
|
||||||
|
### aria2c --max-connection-per-server=16 --max-concurrent-downloads=16 --split=16 --summary-interval=1 https://better.s3.madiator.com/bkohya.tar.gz --dir=/workspace > /tmp/download-progress.txt &
|
||||||
|
### View file with "tail --follow /tmp/download-progress.txt" or "tail -n 2 /tmp/download-progress.txt"
|
||||||
|
|
||||||
|
### python version with stdout
|
||||||
|
### aria2c -x 16 -j 16 -s 16 --summary-interval=1 https://better.s3.madiator.com/bkohya.tar.gz --dir=/workspace
|
||||||
|
|
||||||
|
# start aria2c with 16 download threads, write summary every 1 sec to stdout for progress indicator
|
||||||
|
cmd_line = f"aria2c --max-connection-per-server=16 --max-concurrent-downloads=16 --split=16 --summary-interval=1 {download_url} --dir={workspace_dir}"
|
||||||
|
print(f"start DOWNLOAD with cmd '{cmd_line}'")
|
||||||
|
|
||||||
|
cmd = cmd_line.split(" ") # the cmdline args need to set a list of strings
|
||||||
|
|
||||||
|
# start the download
|
||||||
|
# download_process = subprocess.Popen(cmd_line, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
|
# download_isrunning = (download_process and download_process.poll())
|
||||||
|
|
||||||
|
print(f"stage: 'Downloading', launching cmd: '{cmd_line}'")
|
||||||
|
with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True) as download_process:
|
||||||
|
# this is the main loop during download
|
||||||
|
for line in download_process.stdout:
|
||||||
|
download_line = line.rstrip()
|
||||||
|
|
||||||
|
# capture download progress
|
||||||
|
# tail -n 6 /tmp/download-progress.txt
|
||||||
|
# During Download (last 6 lines):
|
||||||
|
# --->
|
||||||
|
# *** Download Progress Summary as of Sat Nov 2 17:23:16 2024 ***
|
||||||
|
# ===============================================================================
|
||||||
|
# [#cd57da 2.1GiB/4.0GiB(53%) CN:16 DL:1.9MiB ETA:16m19s]
|
||||||
|
# FILE: /worksapce/bkohya.tar.gz
|
||||||
|
# -------------------------------------------------------------------------------
|
||||||
|
# <---
|
||||||
|
|
||||||
|
# When ready (last 6 lines):
|
||||||
|
# Download Results:
|
||||||
|
# --->
|
||||||
|
# gid |stat|avg speed |path/URI
|
||||||
|
# ======+====+===========+=======================================================
|
||||||
|
# cd57da|OK | 1.6MiB/s|/workspace/bkohya.tar.gz
|
||||||
|
|
||||||
|
# Status Legend:
|
||||||
|
# (OK):download completed.
|
||||||
|
# <---
|
||||||
|
|
||||||
|
download_running_line_pattern = r"\[#(\w+)\s+(\d+\.?\d*)\s*([GMK]iB)/(\d+\.?\d*)\s*([GMK]iB)\((\d+)%\)\s+CN:(\d+)\s+DL:(\d+\.?\d*)\s*([GMK]iB)\s+ETA:(?:(\d+)h)?(?:(\d+)m)?(?:(\d+)s)?\]"
|
||||||
|
|
||||||
|
# download_finished_line = "cd57da|OK | 1.6MiB/s|/workspace/bkohya.tar.gz"
|
||||||
|
download_finished_line_pattern = r"([a-fA-F0-9]{6})\|(\w+)\s*\|\s*([\d.]+[GMK]iB/s)\|(.+)"
|
||||||
|
|
||||||
|
# try to find the download_running_line_pattern
|
||||||
|
match = re.match(download_running_line_pattern, download_line)
|
||||||
|
if match: # download_running_line_pattern
|
||||||
|
gid = match.group(1) # e.g., "cd57da"
|
||||||
|
downloaded_size_value = match.group(2) # e.g., "2.1"
|
||||||
|
downloaded_size_unit = match.group(3) # e.g., "GiB"
|
||||||
|
total_size_value = match.group(4) # e.g., "4.0"
|
||||||
|
total_size_unit = match.group(5) # e.g., "GiB"
|
||||||
|
percentage = int(match.group(6)) # e.g., "53"
|
||||||
|
connection_count = int(match.group(7)) # e.g., "16"
|
||||||
|
download_rate_value = match.group(8) # e.g., "1.9"
|
||||||
|
download_rate_unit = match.group(9) # e.g., "MiB"
|
||||||
|
eta_hours = int(match.group(10) or 0) # e.g., None if no hours present, or the hour value if present
|
||||||
|
eta_minutes = int(match.group(11) or 0) # e.g., "16" or None
|
||||||
|
eta_seconds = int(match.group(12) or 0) # e.g., "19" or None
|
||||||
|
|
||||||
|
# format "2.1GiB" as "2.1 GiB"
|
||||||
|
downloaded_size_formatted = f"{downloaded_size_value} {downloaded_size_unit}"
|
||||||
|
|
||||||
|
# format "1.9MiB" as "1.9 MiB/s"
|
||||||
|
download_rate_formatted = f"{download_rate_value} {download_rate_unit}/s"
|
||||||
|
|
||||||
|
# calculate eta in seconds
|
||||||
|
eta = eta_hours * 3600 + eta_minutes * 60 + eta_seconds
|
||||||
|
|
||||||
|
### original code
|
||||||
|
#speed = downloaded_size / elapsed_time # bytes/sec
|
||||||
|
#percentage = (downloaded_size / total_size) * 100
|
||||||
|
#eta = (total_size - downloaded_size) / speed if speed > 0 else 0 # sec
|
||||||
|
|
||||||
|
send_websocket_message('install_progress', {
|
||||||
|
'app_name': app_name,
|
||||||
|
'percentage': percentage,
|
||||||
|
'speed': download_rate_formatted, # f"{speed / (1024 * 1024):.2f} MiB/s",
|
||||||
|
'eta': f"{eta:.0f}",
|
||||||
|
'stage': 'Downloading',
|
||||||
|
'downloaded': downloaded_size_formatted # f"{downloaded_size / (1024 * 1024):.2f} MB"
|
||||||
|
})
|
||||||
|
|
||||||
|
else: # then try to find the download_finished_line_pattern
|
||||||
|
match = re.match(download_finished_line_pattern, download_line)
|
||||||
|
if match: # download_finished_line_pattern
|
||||||
|
finish_gid = match.group(1) # cd57da
|
||||||
|
status = match.group(2) # OK
|
||||||
|
speed = match.group(3) # 1.6MiB/s (GiB/s, MiB/s, or KiB/s)
|
||||||
|
finish_downloaded_file = match.group(4) # /workspace/bkohya.tar.gz
|
||||||
|
|
||||||
|
if finish_gid == gid and finish_downloaded_file == download_url:
|
||||||
|
download_isrunning = False # exit the downlood_isrunning loop
|
||||||
|
|
||||||
|
# else any other line in stdout (which we not process)
|
||||||
|
|
||||||
|
download_process.wait() # let the process finish
|
||||||
|
rc = download_process.returncode # and get the return code
|
||||||
|
|
||||||
|
# delete temporary ".aria2" file
|
||||||
|
if os.path.exists(f"{tar_filename}.aria2"):
|
||||||
|
os.remove(f"{tar_filename}.aria2")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
error_msg = f"ERROR in download_and_unpack_venv_fastversion():download with ARIA2C\ncmdline: '{cmd_line}'\nException: {str(e)}"
|
||||||
|
print(error_msg)
|
||||||
|
|
||||||
|
error_message = f"Downloading VENV failed: {download_process.stderr.read() if download_process.stderr else 'Unknown error'}"
|
||||||
|
send_websocket_message('install_complete', {'app_name': app_name, 'status': 'error', 'message': error_message})
|
||||||
|
save_install_status(app_name, 'failed', 0, 'Failed')
|
||||||
|
|
||||||
|
return False, error_message
|
||||||
|
|
||||||
|
### original (slow) download code
|
||||||
|
# response = requests.get(download_url, stream=True)
|
||||||
|
# response.raise_for_status()
|
||||||
|
|
||||||
|
# block_size = 8192
|
||||||
|
# downloaded_size = 0
|
||||||
|
# start_time = time.time()
|
||||||
|
|
||||||
|
# with open(downloaded_file, 'wb') as file:
|
||||||
|
# for chunk in response.iter_content(chunk_size=block_size):
|
||||||
|
# if chunk:
|
||||||
|
# file.write(chunk)
|
||||||
|
# downloaded_size += len(chunk)
|
||||||
|
# current_time = time.time()
|
||||||
|
# elapsed_time = current_time - start_time
|
||||||
|
|
||||||
|
# if elapsed_time > 0:
|
||||||
|
# speed = downloaded_size / elapsed_time
|
||||||
|
# percentage = (downloaded_size / total_size) * 100
|
||||||
|
# eta = (total_size - downloaded_size) / speed if speed > 0 else 0
|
||||||
|
|
||||||
|
# send_websocket_message('install_progress', {
|
||||||
|
# 'app_name': app_name,
|
||||||
|
# 'percentage': round(percentage, 2),
|
||||||
|
# 'speed': f"{speed / (1024 * 1024):.2f} MB/s",
|
||||||
|
# 'eta': f"{eta:.0f}",
|
||||||
|
# 'stage': 'Downloading',
|
||||||
|
# 'downloaded': f"{downloaded_size / (1024 * 1024):.2f} MB"
|
||||||
|
# })
|
||||||
|
|
||||||
|
if not os.path.exists(downloaded_file):
|
||||||
|
error_message = f"Downloading VENV failed, file '{downloaded_file}' does not exist, skipping 'Decompression' stage"
|
||||||
|
send_websocket_message('install_complete', {'app_name': app_name, 'status': 'error', 'message': error_message})
|
||||||
|
save_install_status(app_name, 'failed', 0, 'Failed')
|
||||||
|
|
||||||
|
return False, error_message
|
||||||
|
|
||||||
|
send_websocket_message('install_log', {'app_name': app_name, 'log': 'Download completed. Starting unpacking...'})
|
||||||
|
send_websocket_message('install_progress', {'app_name': app_name, 'percentage': 100, 'stage': 'Download Complete'})
|
||||||
|
|
||||||
|
total_duration_download = f"{datetime.timedelta(seconds=int(time.time() - start_time_download))}"
|
||||||
|
write_debug_setting('total_duration_download', total_duration_download)
|
||||||
|
print(f"download did run {total_duration_download} for app '{app_name}'")
|
||||||
|
|
||||||
|
|
||||||
|
### Decompression Stage (Unpacking the downloaded VENV)
|
||||||
|
start_time_unpack = time.time()
|
||||||
|
|
||||||
|
# lutzapps - fix TAR packaging bug (compressed from the workspace root instead of bkohya VENV folder)
|
||||||
|
# e.g. "bkohya/bin/activate", together with venv_path ("/workspace/bkohya") ends up as "/workspace/bkohya/bkohya/bin/activate"
|
||||||
|
# TODO: need to repackage Kohya VENV correctly and then remove this fix!!!
|
||||||
|
|
||||||
|
if app_name == "bkohya" and DEBUG_SETTINGS['use_bkohya_tar_folder_fix'] == "1":
|
||||||
|
venv_path = "/workspace" # extracts then correctly to '/workspace/bkohya', instead of '/workspace/bkohya/bkohya'
|
||||||
|
|
||||||
|
# Create TAR from the VENV current directory:
|
||||||
|
# cd ~/Projects/Docker/madiator/workspace/bkohya
|
||||||
|
# [tar -czf | pv > ~/Projects/Docker/madiator/workspace/bkohya.tar.gz . (not the smallest TAR)]
|
||||||
|
# tar -cvf - . | gzip -9 - | pv > ~/Projects/Docker/madiator/workspace/bkohya.tar.gz
|
||||||
|
|
||||||
|
# Ensure the venv directory exists
|
||||||
|
os.makedirs(f"{venv_path}/", exist_ok=True) # append trailing "/" to make sure the last sub-folder is created
|
||||||
|
|
||||||
|
# Unpack the tar.gz file
|
||||||
|
send_websocket_message('install_progress', {'app_name': app_name, 'percentage': 0, 'stage': 'Unpacking'})
|
||||||
|
|
||||||
|
|
||||||
|
### getting STATISTICS stage
|
||||||
|
# first we need the statistics of the tar.GZ file (statically or with the help of "gzip"
|
||||||
|
#
|
||||||
|
# NOTE: if we put this info already in the XML manifest, we could even skip the STATISTICS stage
|
||||||
|
# but it is very fast anyway
|
||||||
|
# we could also add a SHA256 hash to the XML manifest and verify the downloaded tar against this hash
|
||||||
|
# same as we already do for model file downloads
|
||||||
|
|
||||||
|
# VENV uncompressed sizes (in bytes) of the TAR GZ files:
|
||||||
|
# 'bcomfy': 6155283197
|
||||||
|
# 'ba1111': 6794355530
|
||||||
|
# 'bforge': 7689838771
|
||||||
|
# 'bkohya': 12192767148
|
||||||
|
|
||||||
|
uncompressed_size_bytes = DEBUG_SETTINGS["manifests"][app_name]["venv_uncompressed_size"]
|
||||||
|
|
||||||
|
#sha256_hash = DEBUG_SETTINGS["manifests"][app_name]["sha256_hash"]
|
||||||
|
# TODO: create with 'shasum -a 256 xxx.tar.gz'
|
||||||
|
|
||||||
|
### NOTE: as it turns out GZIP has problems with files bigger than 2 or 4 GB due to internal field bit restrictions
|
||||||
|
|
||||||
|
# cmd_line = f"gzip -l {downloaded_file}" # e.g. for 'ba1111.tar.gz'
|
||||||
|
|
||||||
|
# cmd = cmd_line.split(" ") # the cmdline args need to set a list of strings
|
||||||
|
|
||||||
|
# compressed_size_bytes = 0
|
||||||
|
# uncompressed_size_bytes = 0
|
||||||
|
|
||||||
|
# line_number = 0
|
||||||
|
# unexpected_line_results = ""
|
||||||
|
# compression_header_line = " compressed uncompressed ratio uncompressed_name" # header line#0
|
||||||
|
# compression_info_line = " 3383946179 2543929344 -33.1% /workspace/ba1111.tar" # info line#1
|
||||||
|
# # or can be also " 6295309068 3707578368 -69.8% /workspace/bkohya.tar"
|
||||||
|
|
||||||
|
# compression_info_line_pattern = r"^\s*(\d+)\s+(\d+)\s+([+-]?\d+\.?\d*%)\s+(.+)"
|
||||||
|
|
||||||
|
# print(f"stage: 'Statistics', launching cmd: '{cmd_line}'")
|
||||||
|
# with subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True) as statistics_process:
|
||||||
|
# # compressed uncompressed ratio uncompressed_name
|
||||||
|
# # 3383946179 2543929344 -33.1% /workspace/ba1111.tar
|
||||||
|
# for line in statistics_process.stdout:
|
||||||
|
# if line_number == 0: # compression_header line
|
||||||
|
# compression_header_line = line.rstrip()
|
||||||
|
# print(compression_header_line)
|
||||||
|
|
||||||
|
# if "uncompressed" in compression_header_line: # verify header line
|
||||||
|
# line_number += 1
|
||||||
|
# continue # skip this header line
|
||||||
|
# else: # unexpected result
|
||||||
|
# unexpected_line_results = compression_header_line
|
||||||
|
# elif line_number == 1 and unexpected_line_results == "": # compression_info line
|
||||||
|
# compression_info_line = line.rstrip()
|
||||||
|
# print(compression_info_line)
|
||||||
|
|
||||||
|
# # decode and extract compression info for statistics in main Decompression later
|
||||||
|
# match = re.match(compression_info_line_pattern, compression_info_line)
|
||||||
|
# if match:
|
||||||
|
# compressed_size_bytes = int(match.group(1)) # 3383946179
|
||||||
|
# uncompressed_size_bytes = int(match.group(2)) # 2543929344 or 0
|
||||||
|
# if uncompressed_size_bytes == 0: # TAR file has no compression at all
|
||||||
|
# uncompressed_size_bytes = compressed_size_bytes # use the compressed_size_bytes also as uncompressed_size_bytes
|
||||||
|
# compression_ratio = match.group(3) # -33.1% or 0.0%
|
||||||
|
# uncompressed_name = match.group(4) # ba1111.tar (note: the name here is without .gz)
|
||||||
|
|
||||||
|
# else: # more unexpected lines
|
||||||
|
# unexpected_line_results += f"\n{line.rstrip()}"
|
||||||
|
|
||||||
|
# line_number += 1
|
||||||
|
|
||||||
|
# statistics_process.wait() # let the process finish
|
||||||
|
# rc = statistics_process.returncode # and get the return code
|
||||||
|
|
||||||
|
# if (rc != 0) or (not unexpected_line_results == ""):
|
||||||
|
# error_message = f"GetCompressionInfo failed: {proc.stderr.read() if statistics_process.stderr else 'Unknown error'}\n{unexpected_line_results}"
|
||||||
|
# send_websocket_message('install_complete', {'app_name': app_name, 'status': 'error', 'message': error_message})
|
||||||
|
# save_install_status(app_name, 'failed', 0, 'Failed')
|
||||||
|
|
||||||
|
# return False, error_message
|
||||||
|
|
||||||
|
|
||||||
|
### Stage Unpacking
|
||||||
|
try: # unpack with PIGZ, a parallel version of gzip. Although it only uses a single thread for decompression,
|
||||||
|
# it starts 3 additional threads for reading, writing, and check calculation
|
||||||
|
|
||||||
|
### python version with stdout
|
||||||
|
### pigz -dc /workspace/bkohya.tar.gz | pv | tar xf -
|
||||||
|
|
||||||
|
# PIGZ always outputs/extracts to CURRENT directory!
|
||||||
|
# So make sure to change to the apps venv directory first!!!
|
||||||
|
|
||||||
|
# start PIGZ and use PV for progress data (could play with much more PV options)
|
||||||
|
cmd_line = f"cd {venv_path} && pigz -dc {downloaded_file} | pv | tar xf -" # note the last "-" for STDOUT dir for TAR extraction
|
||||||
|
|
||||||
|
# to SEE the PV values from code, when to setup the 3 Cmds and Pipes manually in subprocess Popen
|
||||||
|
|
||||||
|
pigz_cmd_line = f"pigz -dc {downloaded_file}" # the TAR/GZ file goes in thru STDIN
|
||||||
|
pigz_cmd = pigz_cmd_line.split(" ")
|
||||||
|
pigz_process = subprocess.Popen(pigz_cmd, stdout=subprocess.PIPE, text=True) # and passed to PV
|
||||||
|
|
||||||
|
# --force output (even if process has no termnial), progress-info is always passed thru STDERR, which we also pipe as text)
|
||||||
|
pv_process = subprocess.Popen(["pv", "--force"], stdin=pigz_process.stdout, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
||||||
|
|
||||||
|
# capture decompression progress
|
||||||
|
# tail -n 2 /tmp/decompression-progress.txt
|
||||||
|
# During Decompression (last 2 lines):
|
||||||
|
# --->
|
||||||
|
# 11.5GiB 0:03:02 [64.2MiB/s] [ <=> ]
|
||||||
|
# 11.5GiB 0:03:06 [63.0MiB/s] [ <=> ]
|
||||||
|
# <---
|
||||||
|
|
||||||
|
# When ready (last 2 lines):
|
||||||
|
# --->
|
||||||
|
# ???
|
||||||
|
# ???
|
||||||
|
# <---
|
||||||
|
|
||||||
|
# decompression_running_line = "11.5GiB 0:03:02 [64.2MiB/s] [ <=> ]"
|
||||||
|
decompression_running_line_pattern = r"(\d+\.?\d*)\s*([GMK]iB)\s+(\d+:\d{2}:\d{2})\s+\[(\d+\.?\d*)\s*([GMK]iB/s)\]\s+\[([<=>\s]+)\]"
|
||||||
|
|
||||||
|
print(f"stage: 'Unpacking', launching cmd: '{cmd_line}'")
|
||||||
|
# When you pass shell=True, Popen expects a single string argument, not a cmd and arg string-list.
|
||||||
|
# with subprocess.Popen(cmd_line, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True, text=True) as decompression_process:
|
||||||
|
# the "-" in the TAR cmd uses STDOUT as file output, which it gets thru the pipe from PIGZ, STDERR has the piped stderr from PV, CWD id the path to the VENV folder the TAR should extract to
|
||||||
|
with subprocess.Popen(["tar", "xf", "-"], stdin=pv_process.stdout, stdout=subprocess.PIPE, stderr=pv_process.stderr, text=True, cwd=venv_path) as decompression_process:
|
||||||
|
#stdout_piped, stderr_piped = decompression_process.communicate()
|
||||||
|
|
||||||
|
# close the piped stdouts
|
||||||
|
# pigz_process.stdout.close()
|
||||||
|
# pv_process.stdout.close()
|
||||||
|
|
||||||
|
for line in pv_process.stderr:
|
||||||
|
|
||||||
|
decompression_line = line.rstrip()
|
||||||
|
|
||||||
|
print(decompression_line) # show the progress in the log
|
||||||
|
|
||||||
|
# try to find the decompression_running_line_pattern
|
||||||
|
match = re.match(decompression_running_line_pattern, decompression_line)
|
||||||
|
if match: # decompression_running_line_pattern
|
||||||
|
decompression_size_bytes = float(match.group(1))# e.g. "11.5"
|
||||||
|
decompression_size_unit = match.group(2) # e.g. "GiB"
|
||||||
|
decompression_time_string = match.group(3) # e.g. "0:03:02"
|
||||||
|
decompression_speed = match.group(4) # e.g. "64.2"
|
||||||
|
decompression_speed_unit = match.group(5) # e.g. "MiB/s"
|
||||||
|
progress_bar = match.group(6) # e.g. "<=>"
|
||||||
|
|
||||||
|
# calculate the decompression_size in bytes
|
||||||
|
if decompression_size_unit == "KiB":
|
||||||
|
decompression_size_bytes *= 1024
|
||||||
|
elif decompression_size_unit == "MiB":
|
||||||
|
decompression_size_bytes *= 1024 * 1024
|
||||||
|
elif decompression_size_unit == "GiB":
|
||||||
|
decompression_size_bytes *= 1024 * 1024 * 1024
|
||||||
|
|
||||||
|
# calculate percentage based on total TAR "compressed_size_bytes", we extracted from the previous 'STATISTICS' stage
|
||||||
|
percentage = min(int((decompression_size_bytes / uncompressed_size_bytes) * 100), 100)
|
||||||
|
|
||||||
|
# format "64.2MiB/s" as "64.2 MiB/s"
|
||||||
|
decompression_speed_formatted = f"{decompression_speed} {decompression_speed_unit}"
|
||||||
|
|
||||||
|
# no ETA in 'Unpacking' stage (currently)
|
||||||
|
# but we could "calculate" ETA from the info we have or add some "PV" options to get it easier
|
||||||
|
|
||||||
|
### original code
|
||||||
|
#files_processed += 1
|
||||||
|
#percentage = min(int((files_processed / total_files) * 100), 100)
|
||||||
|
|
||||||
|
send_websocket_message('install_progress', {
|
||||||
|
'app_name': app_name,
|
||||||
|
'percentage': percentage,
|
||||||
|
'stage': 'Unpacking',
|
||||||
|
'processed': decompression_speed_formatted, # files_processed, # TODO: remove this later, as we not have/need this info
|
||||||
|
'total': "multiple" # total_files # TODO: remove this later, as we not have/need this info
|
||||||
|
})
|
||||||
|
|
||||||
|
# another summary line (every 1s) for the install_log
|
||||||
|
decompression_progress_details = f"{decompression_time_string} {percentage}% {int(decompression_size_bytes / (1024 * 1024))} / {int(uncompressed_size_bytes / (1024 * 1024))} MiB @ {decompression_speed} {decompression_speed_unit}"
|
||||||
|
|
||||||
|
send_websocket_message('install_log', {'app_name': app_name, 'log': f"Unpacking: {decompression_progress_details}"})
|
||||||
|
# index.html: div id="install-logs-bkohya" added line-per-line
|
||||||
|
# index.html: speedDisplay.textContent = `Processed: ${data.processed} / ${data.total} files`;
|
||||||
|
|
||||||
|
# else: # then try to find the decompression_finish_line_pattern
|
||||||
|
# match = re.match(decompression_finish_line_pattern, decompression_line)
|
||||||
|
# if match: # decompression_finish_line_pattern
|
||||||
|
# finish_gid = match.group(1) # cd57da
|
||||||
|
# status = match.group(2) # OK
|
||||||
|
# speed = match.group(3) # 1.6MiB/s (GiB/s, MiB/s, or KiB/s)
|
||||||
|
# finish_downloaded_file = match.group(4) # /workspace/bkohya.tar.gz
|
||||||
|
|
||||||
|
# if finish_gid == gid and finish_downloaded_file == download_url:
|
||||||
|
# decompression_isrunning = False # exit the decompression_isrunning loop
|
||||||
|
|
||||||
|
# else any other line in stdout (which we not process)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
error_msg = f"ERROR in download_and_unpack_venv_fastversion():\ncmdline: '{cmd_line}'\nException: {str(e)}"
|
||||||
|
print(error_msg)
|
||||||
|
|
||||||
|
decompression_process.wait() # let the process finish
|
||||||
|
rc = decompression_process.returncode # and get the return code
|
||||||
|
|
||||||
|
total_duration_unpack = f"{datetime.timedelta(seconds=int(time.time() - start_time_unpack))}"
|
||||||
|
write_debug_setting('total_duration_unpack', total_duration_unpack)
|
||||||
|
print(f"unpack did run {total_duration_unpack} for app '{app_name}'")
|
||||||
|
|
||||||
|
|
||||||
|
if rc != 0:
|
||||||
|
error_message = f"Unpacking failed: {decompression_process.stderr.read() if decompression_process.stderr else 'Unknown error'}"
|
||||||
|
send_websocket_message('install_complete', {'app_name': app_name, 'status': 'error', 'message': error_message})
|
||||||
|
save_install_status(app_name, 'failed', 0, 'Failed')
|
||||||
|
|
||||||
|
return False, error_message
|
||||||
|
|
||||||
|
send_websocket_message('install_progress', {'app_name': app_name, 'percentage': 100, 'stage': 'Unpacking Complete'})
|
||||||
|
|
||||||
|
print(f"'DEBUG_SETTINGS' after this run:\n{pretty_dict(DEBUG_SETTINGS)}")
|
||||||
|
|
||||||
|
|
||||||
|
### original "v1" code (very slow code because of STATISTICS glory
|
||||||
|
|
||||||
|
# unpack_command = f"tar -xzvf {downloaded_file} -C {venv_path}"
|
||||||
|
# process = subprocess.Popen(unpack_command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True)
|
||||||
|
|
||||||
|
# total_files = sum(1 for _ in subprocess.Popen(f"tar -tvf {downloaded_file}", shell=True, stdout=subprocess.PIPE).stdout)
|
||||||
|
# files_processed = 0
|
||||||
|
|
||||||
|
# for line in process.stdout:
|
||||||
|
# files_processed += 1
|
||||||
|
# percentage = min(int((files_processed / total_files) * 100), 100)
|
||||||
|
# send_websocket_message('install_progress', {
|
||||||
|
# 'app_name': app_name,
|
||||||
|
# 'percentage': percentage,
|
||||||
|
# 'stage': 'Unpacking',
|
||||||
|
# 'processed': files_processed,
|
||||||
|
# 'total': total_files
|
||||||
|
# })
|
||||||
|
# send_websocket_message('install_log', {'app_name': app_name, 'log': f"Unpacking: {line.strip()}"})
|
||||||
|
|
||||||
|
# process.wait()
|
||||||
|
# rc = process.returncode
|
||||||
|
|
||||||
|
|
||||||
|
### installing the App from GITHUB
|
||||||
|
# Clone the repository if it doesn't exist
|
||||||
|
if not os.path.exists(app_path):
|
||||||
|
send_websocket_message('install_log', {'app_name': app_name, 'log': 'Cloning repository...'})
|
||||||
|
|
||||||
|
repo_url = ''
|
||||||
|
if app_name == 'bcomfy':
|
||||||
|
repo_url = 'https://github.com/comfyanonymous/ComfyUI.git'
|
||||||
|
elif app_name == 'bforge':
|
||||||
|
repo_url = 'https://github.com/lllyasviel/stable-diffusion-webui-forge.git'
|
||||||
|
elif app_name == 'ba1111':
|
||||||
|
repo_url = 'https://github.com/AUTOMATIC1111/stable-diffusion-webui.git'
|
||||||
|
elif app_name == 'bkohya': # lutzapps - added new Kohya app
|
||||||
|
repo_url = 'https://github.com/bmaltais/kohya_ss.git'
|
||||||
|
|
||||||
|
try: # add a repo assignment for Kohya
|
||||||
|
repo = git.Repo.clone_from(repo_url, app_path, progress=lambda op_code, cur_count, max_count, message: send_websocket_message('install_log', {
|
||||||
|
'app_name': app_name,
|
||||||
|
'log': f"Cloning: {cur_count}/{max_count} {message}"
|
||||||
|
}))
|
||||||
|
send_websocket_message('install_log', {'app_name': app_name, 'log': 'Repository cloned successfully.'})
|
||||||
|
|
||||||
|
# lutzapps - make sure we use Kohya with FLUX support
|
||||||
|
if app_name == 'bkohya':
|
||||||
|
branch_name = "sd3-flux.1" # this branch also uses a "sd-scripts" branch "SD3" automatically
|
||||||
|
repo.git.checkout(branch_name)
|
||||||
|
|
||||||
|
# Clone ComfyUI-Manager for Better ComfyUI
|
||||||
|
if app_name == 'bcomfy':
|
||||||
|
custom_nodes_path = os.path.join(app_path, 'custom_nodes')
|
||||||
|
os.makedirs(custom_nodes_path, exist_ok=True)
|
||||||
|
comfyui_manager_path = os.path.join(custom_nodes_path, 'ComfyUI-Manager')
|
||||||
|
if not os.path.exists(comfyui_manager_path):
|
||||||
|
send_websocket_message('install_log', {'app_name': app_name, 'log': 'Cloning ComfyUI-Manager...'})
|
||||||
|
git.Repo.clone_from('https://github.com/ltdrdata/ComfyUI-Manager.git', comfyui_manager_path)
|
||||||
|
send_websocket_message('install_log', {'app_name': app_name, 'log': 'ComfyUI-Manager cloned successfully.'})
|
||||||
|
|
||||||
|
except git.exc.GitCommandError as e:
|
||||||
|
send_websocket_message('install_log', {'app_name': app_name, 'log': f'Error cloning repository: {str(e)}'})
|
||||||
|
return False, f"Error cloning repository: {str(e)}"
|
||||||
|
|
||||||
|
if app_name == 'bkohya': # create a folder link for kohya_ss local "venv"
|
||||||
|
ensure_kohya_local_venv_is_symlinked()
|
||||||
|
|
||||||
|
# Clean up the downloaded file
|
||||||
|
send_websocket_message('install_log', {'app_name': app_name, 'log': 'Cleaning up...'})
|
||||||
|
|
||||||
|
# lutzapps - debug with local TAR
|
||||||
|
# do NOT delete the Kohya venv
|
||||||
|
if DEBUG_SETTINGS["delete_tarfile_after_download"] == "1": # this is the default, but can be overwritten
|
||||||
|
os.remove(downloaded_file)
|
||||||
|
|
||||||
|
send_websocket_message('install_log', {'app_name': app_name, 'log': 'Installation complete. Refresh page to start app'})
|
||||||
|
|
||||||
|
save_install_status(app_name, 'completed', 100, 'Completed')
|
||||||
|
send_websocket_message('install_complete', {'app_name': app_name, 'status': 'success', 'message': "Virtual environment installed successfully."})
|
||||||
|
return True, "Virtual environment installed successfully."
|
||||||
|
except requests.RequestException as e:
|
||||||
|
error_message = f"Download failed: {str(e)}"
|
||||||
|
send_websocket_message('install_complete', {'app_name': app_name, 'status': 'error', 'message': error_message})
|
||||||
|
save_install_status(app_name, 'failed', 0, 'Failed')
|
||||||
|
return False, error_message
|
||||||
|
except Exception as e:
|
||||||
|
error_message = f"Installation failed: {str(e)}\n{traceback.format_exc()}"
|
||||||
|
save_install_status(app_name, 'failed', 0, 'Failed')
|
||||||
|
send_websocket_message('install_complete', {'app_name': app_name, 'status': 'error', 'message': error_message})
|
||||||
|
return False, error_message
|
||||||
|
|
||||||
|
|
||||||
def download_and_unpack_venv(app_name, app_configs, send_websocket_message):
|
def download_and_unpack_venv(app_name, app_configs, send_websocket_message):
|
||||||
app_config = app_configs.get(app_name)
|
app_config = app_configs.get(app_name)
|
||||||
if not app_config:
|
if not app_config:
|
||||||
|
@ -152,34 +734,36 @@ def download_and_unpack_venv(app_name, app_configs, send_websocket_message):
|
||||||
save_install_status(app_name, 'in_progress', 0, 'Downloading')
|
save_install_status(app_name, 'in_progress', 0, 'Downloading')
|
||||||
send_websocket_message('install_log', {'app_name': app_name, 'log': f'Starting download of {total_size / (1024 * 1024):.2f} MB...'})
|
send_websocket_message('install_log', {'app_name': app_name, 'log': f'Starting download of {total_size / (1024 * 1024):.2f} MB...'})
|
||||||
|
|
||||||
response = requests.get(download_url, stream=True)
|
# lutzapps - debug with existing local TAR
|
||||||
response.raise_for_status()
|
if not os.path.exists(downloaded_file):
|
||||||
|
response = requests.get(download_url, stream=True)
|
||||||
|
response.raise_for_status()
|
||||||
|
|
||||||
block_size = 8192
|
block_size = 8192
|
||||||
downloaded_size = 0
|
downloaded_size = 0
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
|
|
||||||
with open(downloaded_file, 'wb') as file:
|
with open(downloaded_file, 'wb') as file:
|
||||||
for chunk in response.iter_content(chunk_size=block_size):
|
for chunk in response.iter_content(chunk_size=block_size):
|
||||||
if chunk:
|
if chunk:
|
||||||
file.write(chunk)
|
file.write(chunk)
|
||||||
downloaded_size += len(chunk)
|
downloaded_size += len(chunk)
|
||||||
current_time = time.time()
|
current_time = time.time()
|
||||||
elapsed_time = current_time - start_time
|
elapsed_time = current_time - start_time
|
||||||
|
|
||||||
if elapsed_time > 0:
|
|
||||||
speed = downloaded_size / elapsed_time
|
|
||||||
percentage = (downloaded_size / total_size) * 100
|
|
||||||
eta = (total_size - downloaded_size) / speed if speed > 0 else 0
|
|
||||||
|
|
||||||
send_websocket_message('install_progress', {
|
if elapsed_time > 0:
|
||||||
'app_name': app_name,
|
speed = downloaded_size / elapsed_time
|
||||||
'percentage': round(percentage, 2),
|
percentage = (downloaded_size / total_size) * 100
|
||||||
'speed': f"{speed / (1024 * 1024):.2f} MB/s",
|
eta = (total_size - downloaded_size) / speed if speed > 0 else 0
|
||||||
'eta': f"{eta:.0f}",
|
|
||||||
'stage': 'Downloading',
|
send_websocket_message('install_progress', {
|
||||||
'downloaded': f"{downloaded_size / (1024 * 1024):.2f} MB"
|
'app_name': app_name,
|
||||||
})
|
'percentage': round(percentage, 2),
|
||||||
|
'speed': f"{speed / (1024 * 1024):.2f} MB/s",
|
||||||
|
'eta': f"{eta:.0f}",
|
||||||
|
'stage': 'Downloading',
|
||||||
|
'downloaded': f"{downloaded_size / (1024 * 1024):.2f} MB"
|
||||||
|
})
|
||||||
|
|
||||||
send_websocket_message('install_log', {'app_name': app_name, 'log': 'Download completed. Starting unpacking...'})
|
send_websocket_message('install_log', {'app_name': app_name, 'log': 'Download completed. Starting unpacking...'})
|
||||||
send_websocket_message('install_progress', {'app_name': app_name, 'percentage': 100, 'stage': 'Download Complete'})
|
send_websocket_message('install_progress', {'app_name': app_name, 'percentage': 100, 'stage': 'Download Complete'})
|
||||||
|
@ -189,6 +773,12 @@ def download_and_unpack_venv(app_name, app_configs, send_websocket_message):
|
||||||
|
|
||||||
# Unpack the tar.gz file
|
# Unpack the tar.gz file
|
||||||
send_websocket_message('install_progress', {'app_name': app_name, 'percentage': 0, 'stage': 'Unpacking'})
|
send_websocket_message('install_progress', {'app_name': app_name, 'percentage': 0, 'stage': 'Unpacking'})
|
||||||
|
|
||||||
|
# lutzapps - fix TAR bug (compressed from the workspace root instead of bbkohya)
|
||||||
|
# e.g. "bkohya/bin/activate", together with venv_path ("/workspace/bkohya") ends up as "/workspace/bkohya/bkohya/nin/activate"
|
||||||
|
if app_name == "bkohya":
|
||||||
|
venv_path = "/workspace"
|
||||||
|
|
||||||
unpack_command = f"tar -xzvf {downloaded_file} -C {venv_path}"
|
unpack_command = f"tar -xzvf {downloaded_file} -C {venv_path}"
|
||||||
process = subprocess.Popen(unpack_command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True)
|
process = subprocess.Popen(unpack_command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True)
|
||||||
|
|
||||||
|
@ -227,14 +817,21 @@ def download_and_unpack_venv(app_name, app_configs, send_websocket_message):
|
||||||
repo_url = 'https://github.com/lllyasviel/stable-diffusion-webui-forge.git'
|
repo_url = 'https://github.com/lllyasviel/stable-diffusion-webui-forge.git'
|
||||||
elif app_name == 'ba1111':
|
elif app_name == 'ba1111':
|
||||||
repo_url = 'https://github.com/AUTOMATIC1111/stable-diffusion-webui.git'
|
repo_url = 'https://github.com/AUTOMATIC1111/stable-diffusion-webui.git'
|
||||||
|
elif app_name == 'bkohya': # lutzapps - added new Kohya app
|
||||||
|
repo_url = 'https://github.com/bmaltais/kohya_ss.git'
|
||||||
|
|
||||||
try:
|
try: # add a repo assignment for Kohya
|
||||||
git.Repo.clone_from(repo_url, app_path, progress=lambda op_code, cur_count, max_count, message: send_websocket_message('install_log', {
|
repo = git.Repo.clone_from(repo_url, app_path, progress=lambda op_code, cur_count, max_count, message: send_websocket_message('install_log', {
|
||||||
'app_name': app_name,
|
'app_name': app_name,
|
||||||
'log': f"Cloning: {cur_count}/{max_count} {message}"
|
'log': f"Cloning: {cur_count}/{max_count} {message}"
|
||||||
}))
|
}))
|
||||||
send_websocket_message('install_log', {'app_name': app_name, 'log': 'Repository cloned successfully.'})
|
send_websocket_message('install_log', {'app_name': app_name, 'log': 'Repository cloned successfully.'})
|
||||||
|
|
||||||
|
# lutzapps - make sure we use Kohya with FLUX support
|
||||||
|
if app_name == 'bkohya':
|
||||||
|
branch_name = "sd3-flux.1" # this branch also uses a "sd-scripts" branch "SD3" automatically
|
||||||
|
repo.git.checkout(branch_name)
|
||||||
|
|
||||||
# Clone ComfyUI-Manager for Better ComfyUI
|
# Clone ComfyUI-Manager for Better ComfyUI
|
||||||
if app_name == 'bcomfy':
|
if app_name == 'bcomfy':
|
||||||
custom_nodes_path = os.path.join(app_path, 'custom_nodes')
|
custom_nodes_path = os.path.join(app_path, 'custom_nodes')
|
||||||
|
@ -251,7 +848,11 @@ def download_and_unpack_venv(app_name, app_configs, send_websocket_message):
|
||||||
|
|
||||||
# Clean up the downloaded file
|
# Clean up the downloaded file
|
||||||
send_websocket_message('install_log', {'app_name': app_name, 'log': 'Cleaning up...'})
|
send_websocket_message('install_log', {'app_name': app_name, 'log': 'Cleaning up...'})
|
||||||
os.remove(downloaded_file)
|
|
||||||
|
# lutzapps - debug with local TAR
|
||||||
|
# do NOT delete the Kohya venv
|
||||||
|
#os.remove(downloaded_file)
|
||||||
|
|
||||||
send_websocket_message('install_log', {'app_name': app_name, 'log': 'Installation complete. Refresh page to start app'})
|
send_websocket_message('install_log', {'app_name': app_name, 'log': 'Installation complete. Refresh page to start app'})
|
||||||
|
|
||||||
save_install_status(app_name, 'completed', 100, 'Completed')
|
save_install_status(app_name, 'completed', 100, 'Completed')
|
||||||
|
@ -268,6 +869,32 @@ def download_and_unpack_venv(app_name, app_configs, send_websocket_message):
|
||||||
send_websocket_message('install_complete', {'app_name': app_name, 'status': 'error', 'message': error_message})
|
send_websocket_message('install_complete', {'app_name': app_name, 'status': 'error', 'message': error_message})
|
||||||
return False, error_message
|
return False, error_message
|
||||||
|
|
||||||
|
### this is the function wgich switches between v0 and v1 debug setting for comparison
|
||||||
|
def download_and_unpack_venv(app_name, app_configs, send_websocket_message) -> tuple[bool, str]:
|
||||||
|
from app_configs import DEBUG_SETTINGS, write_debug_setting
|
||||||
|
|
||||||
|
installer_codeversion = DEBUG_SETTINGS['installer_codeversion'] # read from DEBUG_SETTINGS
|
||||||
|
print(f"download_and_unpack_venv v{installer_codeversion} STARTING for '{app_name}'")
|
||||||
|
|
||||||
|
import time
|
||||||
|
|
||||||
|
start_time = time.time()
|
||||||
|
|
||||||
|
if installer_codeversion == "1":
|
||||||
|
download_and_unpack_venv(app_name, app_configs, send_websocket_message)
|
||||||
|
elif installer_codeversion == "2":
|
||||||
|
download_and_unpack_venv_fastversion(app_name, app_configs, send_websocket_message)
|
||||||
|
else:
|
||||||
|
print(f"unknown 'installer_codeversion' v{installer_codeversion} found, nothing run for app '{app_name}'")
|
||||||
|
|
||||||
|
total_duration = f"{datetime.timedelta(seconds=int(time.time() - start_time))}"
|
||||||
|
|
||||||
|
write_debug_setting('app_name', app_name)
|
||||||
|
write_debug_setting('total_duration', total_duration)
|
||||||
|
|
||||||
|
print(f"download_and_unpack_venv v{installer_codeversion} did run {total_duration} for app '{app_name}'")
|
||||||
|
|
||||||
|
|
||||||
def fix_custom_nodes(app_name, app_configs):
|
def fix_custom_nodes(app_name, app_configs):
|
||||||
if app_name != 'bcomfy':
|
if app_name != 'bcomfy':
|
||||||
return False, "This operation is only available for Better ComfyUI."
|
return False, "This operation is only available for Better ComfyUI."
|
||||||
|
@ -296,7 +923,8 @@ def fix_custom_nodes(app_name, app_configs):
|
||||||
# Replace the existing install_app function with this updated version
|
# Replace the existing install_app function with this updated version
|
||||||
def install_app(app_name, app_configs, send_websocket_message):
|
def install_app(app_name, app_configs, send_websocket_message):
|
||||||
if app_name in app_configs:
|
if app_name in app_configs:
|
||||||
return download_and_unpack_venv(app_name, app_configs, send_websocket_message)
|
#return download_and_unpack_venv(app_name, app_configs, send_websocket_message)
|
||||||
|
return download_and_unpack_venv_fastversion(app_name, app_configs, send_websocket_message)
|
||||||
else:
|
else:
|
||||||
return False, f"Unknown app: {app_name}"
|
return False, f"Unknown app: {app_name}"
|
||||||
|
|
||||||
|
|
|
@ -3,14 +3,12 @@ import shutil
|
||||||
import datetime
|
import datetime
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import json
|
|
||||||
|
|
||||||
from flask import jsonify
|
from flask import jsonify
|
||||||
from utils.websocket_utils import send_websocket_message, active_websockets
|
from utils.websocket_utils import (send_websocket_message, active_websockets)
|
||||||
from utils.app_configs import (get_app_configs)
|
from utils.app_configs import (get_app_configs, init_global_dict_from_file, pretty_dict)
|
||||||
|
|
||||||
### shared_models-v0.9.1 by lutzapps, Oct 30th 2024 ###
|
### shared_models-v0.9.2 by lutzapps, Nov 5th 2024 ###
|
||||||
### dev-my-v0.6
|
|
||||||
|
|
||||||
# to run (and optionally DEBUG) this docker image "better-ai-launcher" in a local container on your own machine
|
# to run (and optionally DEBUG) this docker image "better-ai-launcher" in a local container on your own machine
|
||||||
# you need to define the ENV var "LOCAL_DEBUG" in the "VSCode Docker Extension"
|
# you need to define the ENV var "LOCAL_DEBUG" in the "VSCode Docker Extension"
|
||||||
|
@ -163,101 +161,6 @@ else:
|
||||||
print(f"MAKE_MAPPING_FILES_HIDDEN='{MAKE_MAPPING_FILES_HIDDEN}'\n")
|
print(f"MAKE_MAPPING_FILES_HIDDEN='{MAKE_MAPPING_FILES_HIDDEN}'\n")
|
||||||
|
|
||||||
|
|
||||||
# helper function to return a pretty formatted DICT string for human consumption (Logs, JSON)
|
|
||||||
def PrettyDICT(dict:dict) -> str:
|
|
||||||
dict_string = json.dumps(dict, ensure_ascii=False, indent=4, separators=(',', ': '))
|
|
||||||
|
|
||||||
return dict_string
|
|
||||||
|
|
||||||
# helper function called by init_shared_model_app_map() and init_shared_models_folders()
|
|
||||||
def write_dict_to_jsonfile(dict:dict, json_filepath:str, overwrite:bool=False) -> bool:
|
|
||||||
# Convert the 'dict' to JSON, and write the JSON object to file 'json_filepath'
|
|
||||||
|
|
||||||
#json_string = json.dumps(dict, indent=4, ensure_ascii=False, sort_keys=True)
|
|
||||||
|
|
||||||
try:
|
|
||||||
if os.path.exists(json_filepath) and not overwrite:
|
|
||||||
error_msg = f"dictionary file '{json_filepath}' already exists (and overwrite={overwrite})"
|
|
||||||
#print(error_msg)
|
|
||||||
|
|
||||||
return False, error_msg # failure
|
|
||||||
|
|
||||||
# Write the JSON data to a file
|
|
||||||
with open(json_filepath, 'w', encoding='utf-8') as output_file:
|
|
||||||
json.dump(dict, output_file, ensure_ascii=False, indent=4, separators=(',', ': '))
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
error_msg = f"ERROR in shared_models:write_dict_to_jsonfile() - loading JSON Map File '{json_filepath}'\nException: {str(e)}"
|
|
||||||
print(error_msg)
|
|
||||||
|
|
||||||
return False, error_msg # failure
|
|
||||||
|
|
||||||
return True, "" # success
|
|
||||||
|
|
||||||
# helper function called by init_shared_model_app_map() and init_shared_models_folders()
|
|
||||||
def read_dict_from_jsonfile(json_filepath:str) -> dict:
|
|
||||||
# Read JSON file from 'json_filepath' and return it as 'dict'
|
|
||||||
|
|
||||||
try:
|
|
||||||
if os.path.exists(json_filepath):
|
|
||||||
with open(json_filepath, 'r') as input_file:
|
|
||||||
dict = json.load(input_file)
|
|
||||||
else:
|
|
||||||
error_msg = f"dictionary file '{json_filepath}' does not exist"
|
|
||||||
#print(error_msg)
|
|
||||||
|
|
||||||
return {}, error_msg # failure
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
error_msg = f"ERROR in shared_models:read_dict_from_jsonfile() - loading JSON Map File '{json_filepath}'\nException: {str(e)}"
|
|
||||||
print(error_msg)
|
|
||||||
|
|
||||||
return {}, error_msg # failure
|
|
||||||
|
|
||||||
return dict, "" # success
|
|
||||||
|
|
||||||
# helper function for "init_app_install_dirs(), "init_shared_model_app_map()" and "init_shared_models_folders()"
|
|
||||||
def init_global_dict_from_file(dict:dict, dict_filepath:str, dict_description:str) -> bool:
|
|
||||||
# load or initialize the 'dict' for 'dict_description' from 'dict_filepath'
|
|
||||||
|
|
||||||
try:
|
|
||||||
if not os.path.exists(SHARED_MODELS_DIR):
|
|
||||||
print(f"\nThe SHARED_MODELS_DIR '{SHARED_MODELS_DIR}' is not found!\nCreate it by clicking the 'Create Shared Folders' button from the WebUI 'Settings' Tab\n")
|
|
||||||
|
|
||||||
return
|
|
||||||
|
|
||||||
if os.path.isfile(dict_filepath) and os.path.exists(dict_filepath):
|
|
||||||
dict_filepath_found = True
|
|
||||||
# read the dict_description from JSON file
|
|
||||||
print(f"\nExisting '{dict_description}' found and read from file '{dict_filepath}'\nThe file overwrites the code defaults!")
|
|
||||||
|
|
||||||
dict, error_msg = read_dict_from_jsonfile(dict_filepath)
|
|
||||||
if not error_msg == "":
|
|
||||||
print(error_msg)
|
|
||||||
|
|
||||||
else: # init the dict_description from app code
|
|
||||||
dict_filepath_found = False
|
|
||||||
print(f"No '{dict_description}'_FILE found, initializing default '{dict_description}' from code ...")
|
|
||||||
# use already defined dict from app code
|
|
||||||
# write the dict to JSON file
|
|
||||||
success, ErrorMsg = write_dict_to_jsonfile(dict, dict_filepath)
|
|
||||||
|
|
||||||
if success:
|
|
||||||
print(f"'{dict_description}' is initialized and written to file '{dict_filepath}'")
|
|
||||||
else:
|
|
||||||
print(ErrorMsg)
|
|
||||||
|
|
||||||
# Convert 'dict_description' dictionary to formatted JSON
|
|
||||||
print(f"\nUsing {'external' if dict_filepath_found else 'default'} '{dict_description}':\n{PrettyDICT(dict)}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
error_msg = f"ERROR in shared_models:init_global_dict_from_file() - initializing dict Map File '{dict_filepath}'\nException: {str(e)}"
|
|
||||||
print(error_msg)
|
|
||||||
|
|
||||||
return False, error_msg
|
|
||||||
|
|
||||||
return True, "" # success
|
|
||||||
|
|
||||||
# the below SHARED_MODEL_FOLDERS_FILE will be read and used (if exists),
|
# the below SHARED_MODEL_FOLDERS_FILE will be read and used (if exists),
|
||||||
# otherwise this file will be generated with the content of the below default SHARED_MODEL_FOLDERS dict
|
# otherwise this file will be generated with the content of the below default SHARED_MODEL_FOLDERS dict
|
||||||
SHARED_MODEL_FOLDERS_FILE = f"{SHARED_MODELS_DIR}/{HIDDEN_FILE_PREFIX}_shared_model_folders.json"
|
SHARED_MODEL_FOLDERS_FILE = f"{SHARED_MODELS_DIR}/{HIDDEN_FILE_PREFIX}_shared_model_folders.json"
|
||||||
|
@ -286,7 +189,7 @@ SHARED_MODEL_FOLDERS = {
|
||||||
# helper function called by "inline"-main() and ensure_shared_models_folders()
|
# helper function called by "inline"-main() and ensure_shared_models_folders()
|
||||||
def init_shared_models_folders(send_SocketMessage:bool=True):
|
def init_shared_models_folders(send_SocketMessage:bool=True):
|
||||||
global SHARED_MODEL_FOLDERS
|
global SHARED_MODEL_FOLDERS
|
||||||
init_global_dict_from_file(SHARED_MODEL_FOLDERS, SHARED_MODEL_FOLDERS_FILE, "SHARED_MODEL_FOLDERS")
|
init_global_dict_from_file(SHARED_MODEL_FOLDERS, SHARED_MODEL_FOLDERS_FILE, "SHARED_MODEL_FOLDERS", SHARED_MODELS_DIR)
|
||||||
|
|
||||||
if os.path.exists(SHARED_MODEL_FOLDERS_FILE) and send_SocketMessage:
|
if os.path.exists(SHARED_MODEL_FOLDERS_FILE) and send_SocketMessage:
|
||||||
send_websocket_message('extend_ui_helper', {
|
send_websocket_message('extend_ui_helper', {
|
||||||
|
@ -368,7 +271,7 @@ APP_INSTALL_DIRS = {
|
||||||
"A1111": "/workspace/stable-diffusion-webui",
|
"A1111": "/workspace/stable-diffusion-webui",
|
||||||
"Forge": "/workspace/stable-diffusion-webui-forge",
|
"Forge": "/workspace/stable-diffusion-webui-forge",
|
||||||
"ComfyUI": "/workspace/ComfyUI",
|
"ComfyUI": "/workspace/ComfyUI",
|
||||||
"Kohya_ss": "/workspace/Kohya_ss",
|
"kohya_ss": "/workspace/kohya_ss",
|
||||||
"CUSTOM1": "/workspace/joy-caption-batch"
|
"CUSTOM1": "/workspace/joy-caption-batch"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -397,6 +300,13 @@ APP_INSTALL_DIRS = {
|
||||||
# 'venv_path': '/workspace/ba1111',
|
# 'venv_path': '/workspace/ba1111',
|
||||||
# 'app_path': '/workspace/stable-diffusion-webui',
|
# 'app_path': '/workspace/stable-diffusion-webui',
|
||||||
# 'port': 7863,
|
# 'port': 7863,
|
||||||
|
# },
|
||||||
|
# 'bkohya': {
|
||||||
|
# 'name': 'Better Kohya',
|
||||||
|
# 'command': 'cd /workspace/bkohya && . ./bin/activate && cd /workspace/kohya_ss && ./gui.sh --listen --port 7860',
|
||||||
|
# 'venv_path': '/workspace/bkohya',
|
||||||
|
# 'app_path': '/workspace/kohya_ss',
|
||||||
|
# 'port': 7860,
|
||||||
# }
|
# }
|
||||||
# }
|
# }
|
||||||
|
|
||||||
|
@ -405,7 +315,8 @@ APP_INSTALL_DIRS = {
|
||||||
MAP_APPS = {
|
MAP_APPS = {
|
||||||
"bcomfy": "ComfyUI",
|
"bcomfy": "ComfyUI",
|
||||||
"bforge": "Forge",
|
"bforge": "Forge",
|
||||||
"ba1111": "A1111"
|
"ba1111": "A1111",
|
||||||
|
"bkohya": "kohya_ss" # lutzapps - added new kohya_ss app
|
||||||
}
|
}
|
||||||
|
|
||||||
# helper function called by main(), uses above "MAP_APPS" dict
|
# helper function called by main(), uses above "MAP_APPS" dict
|
||||||
|
@ -422,7 +333,7 @@ def sync_with_app_configs_install_dirs():
|
||||||
APP_INSTALL_DIRS[MAP_APPS[bapp_name]] = bapp_path # update path in APP_INSTALL_DIRS
|
APP_INSTALL_DIRS[MAP_APPS[bapp_name]] = bapp_path # update path in APP_INSTALL_DIRS
|
||||||
|
|
||||||
# show final synced APP_INSTALL_DIRS
|
# show final synced APP_INSTALL_DIRS
|
||||||
print(f"\nUsing synched 'APP_INSTALL_DIRS':\n{PrettyDICT(APP_INSTALL_DIRS)}")
|
print(f"\nUsing synched 'APP_INSTALL_DIRS':\n{pretty_dict(APP_INSTALL_DIRS)}")
|
||||||
|
|
||||||
|
|
||||||
# init global module 'APP_INSTALL_DIRS' dict: { 'app_name': 'app_installdir' }
|
# init global module 'APP_INSTALL_DIRS' dict: { 'app_name': 'app_installdir' }
|
||||||
|
@ -430,7 +341,7 @@ def sync_with_app_configs_install_dirs():
|
||||||
# NOTE: this APP_INSTALL_DIRS_FILE is temporary synced with the app_configs dict
|
# NOTE: this APP_INSTALL_DIRS_FILE is temporary synced with the app_configs dict
|
||||||
def init_app_install_dirs():
|
def init_app_install_dirs():
|
||||||
global APP_INSTALL_DIRS
|
global APP_INSTALL_DIRS
|
||||||
init_global_dict_from_file(APP_INSTALL_DIRS, APP_INSTALL_DIRS_FILE, "APP_INSTALL_DIRS")
|
init_global_dict_from_file(APP_INSTALL_DIRS, APP_INSTALL_DIRS_FILE, "APP_INSTALL_DIRS", SHARED_MODELS_DIR)
|
||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -449,7 +360,7 @@ SHARED_MODEL_APP_MAP_FILE = f"{SHARED_MODELS_DIR}/{HIDDEN_FILE_PREFIX}_shared_mo
|
||||||
# here is a list of all "known" model type dirs, and if they are used here (mapped) or
|
# here is a list of all "known" model type dirs, and if they are used here (mapped) or
|
||||||
# if they are currently "unmapped":
|
# if they are currently "unmapped":
|
||||||
#
|
#
|
||||||
# "Kohya_ss" (mapped): "/models"
|
# "kohya_ss" (mapped): "/models"
|
||||||
|
|
||||||
# "ComfyUI" (mapped): "/models/checkpoints", "/models/clip", "/models/controlnet", "/models/embeddings", "/models/hypernetworks", "/models/ipadapter/"(???), "/models/loras", "/models/reactor"(???), "/models/unet", "/models/upscale_models", "/models/vae", "/models/vae_approx"
|
# "ComfyUI" (mapped): "/models/checkpoints", "/models/clip", "/models/controlnet", "/models/embeddings", "/models/hypernetworks", "/models/ipadapter/"(???), "/models/loras", "/models/reactor"(???), "/models/unet", "/models/upscale_models", "/models/vae", "/models/vae_approx"
|
||||||
# "ComfyUI" (unmapped): "/models/clip_vision", "/models/diffusers", "/models/diffusion_models", "/models/gligen", "/models/photomaker", "/moedls/style_models",
|
# "ComfyUI" (unmapped): "/models/clip_vision", "/models/diffusers", "/models/diffusion_models", "/models/gligen", "/models/photomaker", "/moedls/style_models",
|
||||||
|
@ -463,7 +374,7 @@ SHARED_MODEL_APP_MAP = {
|
||||||
"ComfyUI": "/models/checkpoints",
|
"ComfyUI": "/models/checkpoints",
|
||||||
"A1111": "/models/Stable-diffusion",
|
"A1111": "/models/Stable-diffusion",
|
||||||
"Forge": "/models/Stable-diffusion",
|
"Forge": "/models/Stable-diffusion",
|
||||||
"Kohya_ss": "/models" # flatten all "ckpt" / "unet" models here
|
"kohya_ss": "/models" # flatten all "ckpt" / "unet" models here
|
||||||
},
|
},
|
||||||
|
|
||||||
"clip": {
|
"clip": {
|
||||||
|
@ -550,7 +461,7 @@ SHARED_MODEL_APP_MAP = {
|
||||||
"ComfyUI": "/models/unet",
|
"ComfyUI": "/models/unet",
|
||||||
"A1111": "/models/Stable-diffusion", # flatten all "ckpts" / "unet" models here
|
"A1111": "/models/Stable-diffusion", # flatten all "ckpts" / "unet" models here
|
||||||
"Forge": "/models/Stable-diffusion", # flatten all "ckpts" / "unet" models here
|
"Forge": "/models/Stable-diffusion", # flatten all "ckpts" / "unet" models here
|
||||||
"Kohya_ss": "/models" # flatten all "ckpt" / "unet" models here
|
"kohya_ss": "/models" # flatten all "ckpt" / "unet" models here
|
||||||
},
|
},
|
||||||
|
|
||||||
"upscale_models": {
|
"upscale_models": {
|
||||||
|
@ -585,7 +496,7 @@ SHARED_MODEL_APP_MAP = {
|
||||||
# which does a default mapping from app code or (if exists) from external JSON 'SHARED_MODEL_APP_MAP_FILE' file
|
# which does a default mapping from app code or (if exists) from external JSON 'SHARED_MODEL_APP_MAP_FILE' file
|
||||||
def init_shared_model_app_map():
|
def init_shared_model_app_map():
|
||||||
global SHARED_MODEL_APP_MAP
|
global SHARED_MODEL_APP_MAP
|
||||||
init_global_dict_from_file(SHARED_MODEL_APP_MAP, SHARED_MODEL_APP_MAP_FILE, "SHARED_MODEL_APP_MAP")
|
init_global_dict_from_file(SHARED_MODEL_APP_MAP, SHARED_MODEL_APP_MAP_FILE, "SHARED_MODEL_APP_MAP", SHARED_MODELS_DIR)
|
||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -919,3 +830,39 @@ print('\t- "populate_testdata.sh" (bash script to un-tar and expand all testdata
|
||||||
print('\t- "testdata_shared_models_link.tar.gz" (Testcase #1, read below)')
|
print('\t- "testdata_shared_models_link.tar.gz" (Testcase #1, read below)')
|
||||||
print('\t- "testdata_stable-diffusion-webui_pull.tar.gz" (Testcase #2, read below)')
|
print('\t- "testdata_stable-diffusion-webui_pull.tar.gz" (Testcase #2, read below)')
|
||||||
print('\t- "testdata_installed_apps_pull.tar.gz" (Testcase #3, read below)\n')
|
print('\t- "testdata_installed_apps_pull.tar.gz" (Testcase #3, read below)\n')
|
||||||
|
|
||||||
|
if LOCAL_DEBUG:
|
||||||
|
# simulate a RUNPOD environment (e.g. for "/workspace/kohya_ss/setup.sh" or "setup-runpod.sh")
|
||||||
|
RUNPOD_POD_ID = "0ce86d9cc8dd"
|
||||||
|
|
||||||
|
### setup.sh::
|
||||||
|
# Check if RUNPOD variable should be set
|
||||||
|
# RUNPOD=false
|
||||||
|
# if env_var_exists RUNPOD_POD_ID || env_var_exists RUNPOD_API_KEY; then
|
||||||
|
# RUNPOD=true
|
||||||
|
# fi
|
||||||
|
#
|
||||||
|
# # Check if the venv folder doesn't exist
|
||||||
|
# if [ ! -d "$SCRIPT_DIR/venv" ]; then
|
||||||
|
# echo "Creating venv..."
|
||||||
|
# python3 -m venv "$SCRIPT_DIR/venv"
|
||||||
|
# fi
|
||||||
|
#
|
||||||
|
# # Activate the virtual environment
|
||||||
|
# echo "Activating venv..."
|
||||||
|
# source "$SCRIPT_DIR/venv/bin/activate" || exit 1
|
||||||
|
|
||||||
|
# if [[ "$OSTYPE" == "lin"* ]]; then
|
||||||
|
# if [ "$RUNPOD" = true ]; then
|
||||||
|
# DIR="/workspace/kohya_ss"
|
||||||
|
#######
|
||||||
|
|
||||||
|
### app_configs.py::
|
||||||
|
# 'bkohya': {
|
||||||
|
# 'name': 'Better Kohya',
|
||||||
|
# 'command': 'cd /workspace/bkohya && . ./bin/activate && cd /workspace/kohya_ss && ./gui.sh --listen --port 7860',
|
||||||
|
# 'venv_path': '/workspace/bkohya',
|
||||||
|
# 'app_path': '/workspace/kohya_ss',
|
||||||
|
# 'port': 7860,
|
||||||
|
# }
|
||||||
|
#######
|
|
@ -4,14 +4,27 @@ services:
|
||||||
build:
|
build:
|
||||||
context: .
|
context: .
|
||||||
dockerfile: ./Dockerfile
|
dockerfile: ./Dockerfile
|
||||||
|
|
||||||
command: ["sh", "-c", "pip install debugpy -t /tmp && python /tmp/debugpy --listen 0.0.0.0:5678 -m flask run --host 0.0.0.0 --port 7222"]
|
command: ["sh", "-c", "pip install debugpy -t /tmp && python /tmp/debugpy --listen 0.0.0.0:5678 -m flask run --host 0.0.0.0 --port 7222"]
|
||||||
|
|
||||||
ports:
|
ports:
|
||||||
- 5678:5678 # (random) port for debuggy (adjust together in above "command")
|
- 5678:5678 # (random) port for debuggy (adjust together in above "command")
|
||||||
- 7222:7222 # main Flask app port better-launcher "App-Manager"
|
- 7222:7222 # main Flask app port better-launcher "App-Manager"
|
||||||
- 8181:8181 # File-Browser
|
### NOTE: during debugging, "start.sh" does *not* run, and following apps are not available right now:
|
||||||
- 7777:7777 # VSCode-Server
|
# - 22:22 # SSH
|
||||||
|
|
||||||
|
# - 8181:8181 # File-Browser
|
||||||
|
# - 7777:7777 # VSCode-Server
|
||||||
|
|
||||||
|
# - 3000:3000 # ComfyUI
|
||||||
|
# - 6006:6006 # Tensorboard (needed by kohya_ss)
|
||||||
|
# - 7860:7860 # Kohya-ss (lutzapps - added new Kohya app with FLUX support)
|
||||||
|
# - 7862:7862 # Forge (aka Stable-Diffiusion-WebUI-Forge)
|
||||||
|
# - 7863:7863 # A1111 (aka Stable-Diffiusion-WebUI)
|
||||||
|
|
||||||
env_file:
|
env_file:
|
||||||
- .env # pass additional env-vars (hf_token, civitai token, ssh public-key) from ".env" file to container
|
- .env # pass additional env-vars (hf_token, civitai token, ssh public-key) from ".env" file to container
|
||||||
|
|
||||||
environment:
|
environment:
|
||||||
- LOCAL_DEBUG=True # change app to localhost Urls and local Websockets (unsecured)
|
- LOCAL_DEBUG=True # change app to localhost Urls and local Websockets (unsecured)
|
||||||
# if you NOT want need this behaviour, then set `LOCAL_DEBUG=False` [default],
|
# if you NOT want need this behaviour, then set `LOCAL_DEBUG=False` [default],
|
||||||
|
|
|
@ -11,6 +11,14 @@ services:
|
||||||
volumes:
|
volumes:
|
||||||
- ${HOME}/Projects/Docker/madiator:/workspace:rw # # TODO: create the below folder before you run!
|
- ${HOME}/Projects/Docker/madiator:/workspace:rw # # TODO: create the below folder before you run!
|
||||||
ports:
|
ports:
|
||||||
|
- 22:22 # SSH
|
||||||
|
|
||||||
- 7222:7222 # main Flask app port better-launcher "App-Manager"
|
- 7222:7222 # main Flask app port better-launcher "App-Manager"
|
||||||
- 8181:8181 # File-Browser
|
- 8181:8181 # File-Browser
|
||||||
- 7777:7777 # VSCode-Server
|
- 7777:7777 # VSCode-Server
|
||||||
|
|
||||||
|
- 3000:3000 # ComfyUI
|
||||||
|
- 6006:6006 # Tensorboard (needed by kohya_ss)
|
||||||
|
- 7860:7860 # Kohya-ss (lutzapps - added new Kohya app with FLUX support)
|
||||||
|
- 7862:7862 # Forge (aka Stable-Diffiusion-WebUI-Forge)
|
||||||
|
- 7863:7863 # A1111 (aka Stable-Diffiusion-WebUI)
|
||||||
|
|
|
@ -9,11 +9,6 @@
|
||||||
# "dockerRun": {
|
# "dockerRun": {
|
||||||
# "envFiles": ["${workspaceFolder}/.env"], // pass additional env-vars from ".env" file to container
|
# "envFiles": ["${workspaceFolder}/.env"], // pass additional env-vars from ".env" file to container
|
||||||
|
|
||||||
|
|
||||||
### Build Vars ###
|
|
||||||
IMAGE_BASE=madiator2011/better-launcher
|
|
||||||
IMAGE_TAG=dev
|
|
||||||
|
|
||||||
### APP specific Vars ###
|
### APP specific Vars ###
|
||||||
DISABLE_PULLBACK_MODELS=False
|
DISABLE_PULLBACK_MODELS=False
|
||||||
# the default is, that app model files, which are found locally (in only one app),
|
# the default is, that app model files, which are found locally (in only one app),
|
||||||
|
|
Loading…
Reference in a new issue