mirror of
https://github.com/kodxana/madiator-docker-runpod.git
synced 2024-12-03 23:20:12 +01:00
dev-beta-v0.9.0 App Enhancements
This commit is contained in:
parent
63a67b2eb6
commit
763d32abf3
20 changed files with 3761 additions and 2252 deletions
|
@ -27,5 +27,4 @@
|
|||
**/obj
|
||||
**/secrets.dev.yaml
|
||||
**/values.dev.yaml
|
||||
LICENSE
|
||||
README.md
|
||||
LICENSE
|
|
@ -26,15 +26,22 @@
|
|||
"LOCAL_DEBUG": "True", // change app to localhost Urls and local Websockets (unsecured)
|
||||
// if you NOT want need this behaviour, then set `LOCAL_DEBUG=False` [default],
|
||||
// which is the same as NOT setting this ENV var at all.
|
||||
|
||||
"PYTHONDONTWRITEBYTECODE": 1,
|
||||
// keep Python from generating .pyc files in the container
|
||||
// this should however be removed for production as it disables bytecode caching
|
||||
|
||||
"FLASK_APP": "app/app.py",
|
||||
|
||||
"FLASK_ENV": "development", // changed from "production" [default],
|
||||
// only needed when "LOCAL_DEBUG=True", otherwise this ENV var can be obmitted
|
||||
|
||||
"GEVENT_SUPPORT": "True" // gevent monkey-patching is being used, enable gevent support in the debugger,
|
||||
"GEVENT_SUPPORT": "True", // gevent monkey-patching is being used, enable gevent support in the debugger,
|
||||
// only needed when "LOCAL_DEBUG=True", otherwise this ENV var can be obmitted
|
||||
|
||||
"GIT_PYTHON_TRACE": "full" // enables full logging for the GitPython code, used for cloning the apps,
|
||||
// bcomfy custom_nodes, and refreshing the apps via git fetch/merge = git pull
|
||||
|
||||
// "FLASK_DEBUG": "0" // "1" allows debugging in Chrome, but then VSCode debugger not works, "0" is the [default], which is the same as NOT setting this ENV var at all
|
||||
},
|
||||
"volumes": [
|
||||
|
|
|
@ -20,7 +20,7 @@ RUN apt-get update && \
|
|||
### ---> needed Tools for Installer
|
||||
# removed: 2x git nginx ffmpeg (as they are already installed with the base image)
|
||||
# added: pigz (for parallel execution of TAR files); zip (for easier folder compression)
|
||||
apt-get install -y aria2 pigz zip pv rsync zstd libtcmalloc-minimal4 bc \
|
||||
apt-get install -y aria2 pigz zip pv rsync zstd tree libtcmalloc-minimal4 bc \
|
||||
# add Python3.11 as system Python version, serving the Python Flask App
|
||||
python3.11 python3.11-venv python3.11-dev python3.11-distutils && \
|
||||
# not remove Python3.10, as we need it for "official" app support (e.g. for kohya_ss VENV)
|
||||
|
@ -85,6 +85,9 @@ RUN curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py && \
|
|||
python3.11 get-pip.py && \
|
||||
rm get-pip.py
|
||||
|
||||
# set the default python_cmd (and version) for the apps 'ba1111' and 'bforge' default launch script 'webui.sh'
|
||||
ENV python_cmd=python3.11
|
||||
|
||||
# Set the working directory
|
||||
WORKDIR /app
|
||||
|
||||
|
@ -102,15 +105,33 @@ COPY app .
|
|||
RUN curl -fsSL https://raw.githubusercontent.com/filebrowser/get/master/get.sh | bash
|
||||
|
||||
# Set environment variables for developent/production
|
||||
# overwrite this ENV in "tasks.json" docker run "env" section or overwrite in your ".env" file to "development"
|
||||
# ENV vars alternatively could also stay in two different Dockerfiles: 'Dockerfile-Prod' and 'Dockerfile-Dev')
|
||||
|
||||
### PROD START:
|
||||
#
|
||||
# ENV APP_CONFIGS_MANIFEST_URL=https://better.s3.madiator.com/app_configs.json
|
||||
#
|
||||
# overwrite this ENV in "tasks.json" docker run "env" section, docker-compose-debug.yml, or overwrite in your ".env" file to "development"
|
||||
ENV FLASK_ENV=production
|
||||
#
|
||||
### PROD END
|
||||
|
||||
### DEV START:
|
||||
#
|
||||
ENV APP_CONFIGS_MANIFEST_URL=https://better-dev.s3.madiator.com/app_configs.json
|
||||
#
|
||||
# alternativly add the following ENV vars in "tasks.json" docker run "env" section, docker-compose-debug.yml,
|
||||
# or populate them in in your ".env" file, instead of setting them here in the Dockerfile
|
||||
#
|
||||
# gevent monkey-patching is being used, enable gevent support in the debugger with GEVENT_SUPPORT=True
|
||||
# add this ENV in "tasks.json" docker run "env" section or populate in your ".env" file
|
||||
# ENV GEVENT_SUPPORT=True
|
||||
#
|
||||
# keep Python from generating .pyc files in the container
|
||||
# this should however be removed for production as it disables bytecode caching
|
||||
# ENV PYTHONDONTWRITEBYTECODE=1
|
||||
#
|
||||
### DEV END
|
||||
|
||||
# lutzapps - keep Python from generating .pyc files in the container
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
|
||||
# Turns off buffering for easier container logging
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
|
@ -128,14 +149,20 @@ EXPOSE 7222
|
|||
COPY nginx/nginx.conf /etc/nginx/nginx.conf
|
||||
COPY nginx/readme.html /usr/share/nginx/html/readme.html
|
||||
|
||||
# HELP configuration ('/static/' is configured be hosting '/app/static/' content in nginx.conf)
|
||||
COPY README.md /app/static/README.md
|
||||
COPY README-Development.txt /app/static/README-Development.txt
|
||||
COPY app/tests/README-SHARED_MODELS.txt /app/static/README-SHARED_MODELS.txt
|
||||
|
||||
|
||||
# Copy all necessary scripts
|
||||
COPY --from=scripts start.sh /
|
||||
#COPY --from=scripts start.sh /
|
||||
# --from=scripts is defined as a "shared" location in "docker-bake.hcl" in the "contexts" dict:
|
||||
# scripts = "../../container-template"
|
||||
# the local "start.sh" is (intentionally) empty
|
||||
# to build all from *one* location, copy "start.sh" here into the project workspace folder first
|
||||
# cp ../../container-template/scripts/start.sh start.sh
|
||||
#COPY start.sh /
|
||||
COPY start.sh /
|
||||
|
||||
COPY pre_start.sh /
|
||||
# lutzapps - add execution flags to added "/app/tests/populate_testdata.sh"
|
||||
|
|
|
@ -8,17 +8,19 @@ This image allows you to easily manage and run various AI applications on your R
|
|||
|
||||
### Features
|
||||
- Easy installation of pre-configured AI applications.
|
||||
- Start, stop, and monitor running applications.
|
||||
- Start, Stop, Delete, Check, Refresh and monitor running applications.
|
||||
- Multiple Versions per App can be selected.
|
||||
- View application logs in real-time.
|
||||
- Force kill applications if needed.
|
||||
- Download Manager for **HuggingFace** and **CivitAI** with `token` support for privat and gated models.
|
||||
- Shared Models Management for **Downloading and Sharing all models of all types to all installed AI applications**!
|
||||
|
||||
### Supported Applications
|
||||
- Better Comfy UI
|
||||
- Better ComfyUI
|
||||
- Better Forge
|
||||
- Better A1111
|
||||
- more Apps coming soon (AI Trainers as `Kohya` and `ai-toolkit` are planned)
|
||||
- Better Kohya
|
||||
- more Apps are planned (AI Trainer `ai-toolkit` and `joy-caption-batch` captioner)
|
||||
|
||||
### Getting Started
|
||||
- Access the Better App Manager interface through your RunPod instance URL.
|
||||
|
@ -36,7 +38,6 @@ If you encounter any issues:
|
|||
For more detailed information and guides, please visit the <a href="https://docs.runpod.io/">RunPod Documentation</a>.
|
||||
|
||||
|
||||
|
||||
Part of the `madiator-docker-runpod` familiy of **RunPod Docker Containers for RunPod**
|
||||
|
||||
## Github
|
||||
|
@ -60,11 +61,17 @@ BASE_IMAGE=madiator2011/better-base:cuda12.4
|
|||
|
||||
## Ports (Apps)
|
||||
|
||||
- 3000/http (ComfyUI)
|
||||
- 6006/http (Tensorboard [needed by kohya_ss])
|
||||
- 7862/http (Forge) aka Stable-Diffiusion-WebUI-Forge
|
||||
- 7863/http (A1111) aka Stable-Diffiusion-WebUI
|
||||
- 7864/http (Kohya-ss) with FLUX.1 support
|
||||
- 3000/http (ComfyUI)<br>
|
||||
- 7862/http (Forge) aka Stable-Diffiusion-WebUI-Forge<br>
|
||||
- 7863/http (A1111) aka Stable-Diffiusion-WebUI<br>
|
||||
|
||||
**New**:
|
||||
- [6006/http] tensorboard (supporting kohya_ss) - provided at the '/tensorboard/' sub-url
|
||||
- [7864/http] kohya-ss with FLUX.1 support - provided as a gradio link<br>
|
||||
`Kohya` is currently configured to run via a `public gradio link` (*.gradio.live domain)<br>
|
||||
`Tensorboard` currently is pre-started with bkohya and available at `http://localhost:6006/tensorboard/`<br>
|
||||
**Note**: Both Urls will be automatically opened, wenn you click the `Open Application` button.<br>
|
||||
Make sure to disable popup-blocker settings in your browser for these 2 additional domains!
|
||||
|
||||
## ENV Vars (System)
|
||||
|
||||
|
@ -89,12 +96,17 @@ see also explanantion in `".vscode/tasks.json"` or `"docker-compose.debug.yml"`
|
|||
gevent monkey-patching is being used, enable gevent support in the debugger.<br>
|
||||
only needed when `LOCAL_DEBUG=True`, otherwise this ENV var can be obmitted.
|
||||
|
||||
- GIT_PYTHON_TRACE=full
|
||||
|
||||
enables full logging for the GitPython code, used for cloning the apps,<br>
|
||||
bcomfy custom_nodes, and refreshing the apps via git fetch/merge = git pull.
|
||||
|
||||
- FLASK_DEBUG=0
|
||||
|
||||
"1" allows debugging in Chrome, but then the VSCode debugger will not works.<br>
|
||||
"0" is the [default], which is the same as NOT setting this ENV var at all.
|
||||
|
||||
### APP specific Vars
|
||||
## APP specific Vars
|
||||
- DISABLE_PULLBACK_MODELS=False
|
||||
|
||||
The default is, that app model files, which are found locally (in only one app), get automatically `pulled-back` into the `"/workspace/shared_models"` folder.<br>
|
||||
|
@ -104,6 +116,141 @@ see also explanantion in `".vscode/tasks.json"` or `"docker-compose.debug.yml"`
|
|||
If you **NOT** want this behaviour, then set `DISABLE_PULLBACK_MODELS=True`,<br>
|
||||
otherwise set `DISABLE_PULLBACK_MODELS=False` [default], which is the same as NOT setting this ENV var at all.
|
||||
|
||||
## APP specific USER Vars
|
||||
All apps can be provisioned in at least 2 Virtual Environment versions:<br>
|
||||
- 'official' - This setup is "to the point' as defined and recommended by the app owners on GitHub.<br>
|
||||
- 'latest' - This setup extends the 'official' Setup with the latest PyTorch and Cuda libraries, or<br>
|
||||
- in the case of ComfyUI - provides also an additional set of pre-installed Custom-Nodes.
|
||||
|
||||
The user can choose from all available versions during Setup, or pre-select the VENV_VERSION, which should be installed via following ENV vars in the format `VENV_VERSION_<app_id>`.
|
||||
|
||||
If these ENV vars are not set/passed into the container, the App-Manager will provide an UI for selecting them during Setup:
|
||||
|
||||
BCOMFY 'official' 5.43 GB (APP 75.3 MB):<br>
|
||||
|
||||
Python 3.11 && Cuda 12.4 && Recommended torch-2.5.1+cu124-cp311-cp311-linux_x86_64 && ComfyUI-Manager && comfy CLI
|
||||
|
||||
BCOMFY 'latest' 6.59 GB (APP 400.22 MB):<br>
|
||||
|
||||
Python 3.11 && Cuda 12.4 && Recommended torch-2.5.1+cu124-cp311-cp311-linux_x86_64 && ComfyUI-Manager && comfy CLI && 12x Custom Nodes (see below)
|
||||
|
||||
BFORGE 'official' 6.41 GB (APP 106.31 MB):<br>
|
||||
|
||||
Python 3.11 && Cuda 12.1 && Recommended torch-2.3.1+cu121-cp311-cp311-linux_x86_64
|
||||
|
||||
BFORGE 'latest' 6.62 GB (APP 105.58 MB):<br>
|
||||
|
||||
Python 3.11 && Cuda 12.4 && Upgraded to torch-2.5.1+cu124-cp311-cp311-linux_x86_64 && xformers
|
||||
|
||||
BA111 'official' 4.85 GB (APP 41.78 MB):<br>
|
||||
|
||||
Python 3.11 && Cuda 12.1 && Recommended torch-2.1.2+cu121-cp311-cp311-linux_x86_64
|
||||
|
||||
BA111 'latest' 5.88 GB (APP 40.65 MB):<br>
|
||||
|
||||
Python 3.11 && Cuda 12.4 && Upgraded to torch-2.5.1+cu124-cp311-cp311-linux_x86_64 && xformers
|
||||
|
||||
BKOHYA 'official':<br>
|
||||
|
||||
This does **NOT** exist
|
||||
|
||||
BKOHYA 'latest' 11.61 GB (APP 58.57 MB):<br>
|
||||
|
||||
Python 3.10 && FLUX.1 version with torch-2.5.0+cu124 (setup-runpod.sh with requirements_runpod.txt)
|
||||
(kohya_ss 'sd3-flux.1' branch and sd-scripts 'SD3' branch)
|
||||
|
||||
Example ENV vars to 'pre-select' a specific APP version:<br>
|
||||
|
||||
VENV_VERSION_BCOMFY=latest<br>
|
||||
VENV_VERSION_BFORGE=latest<br>
|
||||
VENV_VERSION_BA1111=latest<br>
|
||||
VENV_VERSION_BKOHYA=latest<br>
|
||||
|
||||
**NOTE**: Kohya currently is only available as the 'latest' (FLUX-)Version, and has **NO** 'official' version!<br><br>
|
||||
**NOTE**: The selected VENV also controls the setup of the App,<br>
|
||||
e.g. for BCOMFY 'latest', it also installs and activates 12 additional popular workflow 'custom nodes':<br>
|
||||
- https://github.com/cubiq/ComfyUI_essentials
|
||||
- https://github.com/rgthree/rgthree-comfy
|
||||
- https://github.com/WASasquatch/was-node-suite-comfyui
|
||||
- https://github.com/Fannovel16/comfyui_controlnet_aux
|
||||
- https://github.com/XLabs-AI/x-flux-comfyui
|
||||
- https://github.com/city96/ComfyUI-GGUF
|
||||
- https://github.com/kijai/ComfyUI-Florence2
|
||||
- https://github.com/kijai/ComfyUI-KJNodes
|
||||
- https://github.com/ssitu/ComfyUI_UltimateSDUpscale
|
||||
- https://github.com/gseth/ControlAltAI-Nodes
|
||||
- https://github.com/yolain/ComfyUI-Easy-Use
|
||||
- https://github.com/ltdrdata/ComfyUI-Impact-Pack
|
||||
|
||||
**NOTE**: If it makes sense, additional app versions will be added to the MANIFEST later, e.g. 'experimental' versions ;-)<br>
|
||||
|
||||
## Future plans
|
||||
|
||||
**We also plan for additional Apps**:<br>
|
||||
- @ostris `ai-toolkit` - another popular Trainer app, see https://github.com/ostris/ai-toolkit
|
||||
- @MNeMoNiCuZ `joy-caption-batch` - a popular captioning app, see https://github.com/MNeMoNiCuZ/joy-caption-batch
|
||||
for captioning with https://www.aimodels.fyi/models/huggingFace/llama-joycaption-alpha-two-hf-llava-fancyfeast
|
||||
|
||||
Such a captioning app adds very nicely with the need to have good captions for Trainers like `kohya_ss` and `ai-toolkit`, specifically when training custom LoRA `Flux.1` models.
|
||||
|
||||
## Better AI-Launcher Features
|
||||
|
||||
All Apps can now be also `refreshed` any time - with their `'Refresh Application'` button - to the latest GitHub state of their corresponding Repos. This will include refreshing repo sub-modules (as in the case with 'kohya_ss'), and also will refresh 'custom_nodes' (in the case of 'ComfyUI's installed 12 custom nodes). In the case of 'ComfyUI' also all custom module requirements will be updated to their latest definitions.<br>
|
||||
Note however, that refreshing an app needs to `reset` its status to the state, as when it was last installed/cloned!<br>
|
||||
That means that any changes in the `app_path` (existing files edited or new files added) get lost, including local model downloads into the various `models` sub-folders of the app!<br>
|
||||
|
||||
Before refreshing, the `Refresh Symlinks` code will be called to `pull-back` any locally downloaded model files,
|
||||
and save them into the `'shared_models'` workspace folder, before the actual `reset` is done.<br>
|
||||
So this operation is not 'light' and you should plan for that accordingly!<br>
|
||||
|
||||
Every App also can be `deleted` and installed as another version with its `'Delete Application'` button.<br>
|
||||
When `deleting` an app, the same logic applies as during `refreshing` and app, and the same `Refresh Symlinks` code will be called to `pull-back` any locally downloaded model files, and save them into the `'shared_models'` workspace folder, before the actual deletion of the app is done.<br>
|
||||
This should make it easier to switch between app versions, if needed ;-)<br>
|
||||
|
||||
`Downloading` and `Unpacking` of app versions runs with the fastest available options:
|
||||
- Downloads:
|
||||
ATM we use `aria2c --max-connection-per-server=16 --max-concurrent-downloads=16 --split=16` for downloading app version `TAR.gz` archives from a central `S3 bucket location`.<br>
|
||||
- Unpacking:
|
||||
The `TAR.gz` archives are compressed with `'gzip -9'` option to achieve the lowest possible archive file size during download, which at the same time still provides fast Unpacking rates.<br>
|
||||
Unpacking the archives is done also as fast as possible with `PIGZ`, a parallel version of gzip. Although it only uses a single thread for decompression, it starts 3 additional threads for reading, writing, and check calculation.
|
||||
- Verification:
|
||||
All downloaded TAR archives are `SHA256 hash` checked for possible download corruptions.<br>
|
||||
|
||||
After Unpacking and after Cloning/Installing the app, both the `app_path` and also the `venv_path` of the app are checked for correct and expected folder sizes. That should help to detect corrupted installations, which - for any possible reason - did not finish their corresponding stage.<br>
|
||||
|
||||
This last verification part can also be done later at any time with the `'Check Application'` button of the app.<br>
|
||||
|
||||
If the check code detects wrong sizes for the APP or VENV folders, which are UNDER an expected minimum size of the app_path and venv_path, it offers to `delete` the app. `'Check Application'` shows a verification summary of the expected and actual APP and VENV folder sizes, and it also shows which version is currently installed and when it was last refeshed. It even shows you, when an updated app version exists online.
|
||||
|
||||
### Shared Models
|
||||
`'Shared Models'` provides a very powerful and completely configurable `'mapping'` for all kind of 'model files, be it Checkpoints, LoRAs, Embeddings and many more, between a `'shared_models'` workspace folder, and **all** installed applications, be it the currently supported applications or **any custom app**.
|
||||
The mapping is completely transparent, and can be configures with 3 different kind of `mapping JSON files`.
|
||||
One map for the kinds of model types to share, another map for the installed app-path locations, and the third map `connecting` these two other maps. This allows **any** mapping to **any** app ;-)<br>
|
||||
|
||||
`'Shared Models'` supports file-symlinks for single-file models, but also folder-symlinks for folder-based models (e.g. most LLM models are provided as folders). The mapping supports both types of symlinks.
|
||||
|
||||
To further 'get started' with `'Shared Models'`, make sure to read the separate `README-SHARED-MODELS.txt` which also provides 3 sample scenarios in the form of 3 installable small TAR archives with 'test-dummy' models and a bash-script to install these test data files into your `'/workspace'` folder.
|
||||
This readme file, bash-script and 3 archives can be found in the `'/app/tests'` folder within the container (or source-code):
|
||||
|
||||
$ tree /app/tests
|
||||
/app/tests
|
||||
├── README-SHARED_MODELS.txt
|
||||
├── populate_testdata.sh
|
||||
├── testdata_installed_apps_pull.tar.gz
|
||||
├── testdata_shared_models_link.tar.gz
|
||||
└── testdata_stable-diffusion-webui_pull.tar.gz
|
||||
|
||||
1 directory, 5 files
|
||||
|
||||
### Model Downloader
|
||||
We also provide an intelligent `Model Downloader` to download all types of models directly into the `'shared_models'` workspace, from where these models will be automatically shared across all installed application, and mapped intelligently into their according (different named) local app model folders.<br>
|
||||
This `Model Downloader` currently supports `HuggingFace` and `CivitAI` download Urls and - in the case of CivitAI - has a very smart `CivitAi Model and Version Picker Dialog`, to choose between all available 'Versions', and from any selected Version between all its available 'Files', of a specified given CivitAI Model Id Url.<br>
|
||||
|
||||
On the `'Models'` tab of the App-Manager, some `Example URLs` for popular Models are provided both for `HuggingFace` and for `CivitAI`.
|
||||
|
||||
The `Model Downloader` supports also the use of `HuggingFace` and/or `CivitAI` `security tokens`, which can be provided as `ENV vars` (see below), or stored in hidden files in the workspace, or as one-time security tokens only available in memory in the web-form during model download.<br>
|
||||
This allows downloading `private models` and also `gated models` from both `HuggingFace` and `CivitAI`.
|
||||
|
||||
## ENV Vars (User and Secret Tokens)
|
||||
|
||||
**TODO: rename the file `"env.txt"` to `".env"` and adjust the ENV vars for your personal settings**
|
||||
|
@ -116,7 +263,7 @@ see also explanantion in `".vscode/tasks.json"` or `"docker-compose.debug.yml"`
|
|||
|
||||
Your `HuggingFace` token.<br><br>
|
||||
Can be a `READ` scoped token for downloading your `private` models, or `gated models` as e.g. `Flux.1 Dev` or METAs `Llama LLM models`.<br>
|
||||
The HF_TOKEN need to be a `READ/WRITE` scoped token, if you plan also to **UPLOAD** models to `HuggingFace` later, when we have Trainer Apps like `Kohya` or `ai-toolkit`.
|
||||
The HF_TOKEN need to be a `READ/WRITE` scoped token, if you plan also to **UPLOAD** models to `HuggingFace` later, when we support direct uploads of your trained models from Trainer Apps like `kohya_ss` or later from `ai-toolkit`.
|
||||
|
||||
- CIVITAI_API_TOKEN=xxx...xxx
|
||||
|
||||
|
@ -140,4 +287,4 @@ That mean, for this template/image, you should use these formats to pass the abo
|
|||
|
||||
- `{{ RUNPOD_SECRET_CIVITAI_API_TOKEN }}`
|
||||
|
||||
(c) 2024 RunPod Better App Manager. Created by Madiator2011.
|
||||
(c) 2024 RunPod Better App Manager. Created by Madiator2011 & lutzapps.
|
|
@ -6,22 +6,28 @@ import threading
|
|||
import time
|
||||
from flask import Flask, render_template, jsonify, request
|
||||
from flask_sock import Sock
|
||||
import re
|
||||
import json
|
||||
import signal
|
||||
import shutil
|
||||
import subprocess
|
||||
import traceback
|
||||
import logging
|
||||
|
||||
from utils.ssh_utils import setup_ssh, save_ssh_password, get_ssh_password, check_ssh_config, SSH_CONFIG_FILE
|
||||
from utils.filebrowser_utils import configure_filebrowser, start_filebrowser, stop_filebrowser, get_filebrowser_status, FILEBROWSER_PORT
|
||||
from utils.app_utils import (
|
||||
run_app, update_process_status, check_app_directories, get_app_status,
|
||||
force_kill_process_by_name, update_webui_user_sh, save_install_status,
|
||||
get_install_status, download_and_unpack_venv, fix_custom_nodes, is_process_running, install_app, # update_model_symlinks
|
||||
get_bkohya_launch_url # lutzapps - support dynamic generated gradio url
|
||||
run_app, run_bash_cmd, update_process_status, check_app_directories, get_app_status,
|
||||
force_kill_process_by_name, find_and_kill_process_by_port, update_webui_user_sh,
|
||||
fix_custom_nodes, is_process_running, install_app,
|
||||
get_available_venvs, get_bkohya_launch_url, init_app_status, # lutzapps - support dynamic generated gradio url and venv_size checks
|
||||
delete_app_installation, check_app_installation, refresh_app_installation # lutzapps - new app features for check and refresh app
|
||||
)
|
||||
from utils.websocket_utils import send_websocket_message, active_websockets
|
||||
from utils.app_configs import get_app_configs, add_app_config, remove_app_config, app_configs, DEBUG_SETTINGS, APP_CONFIGS_MANIFEST_URL
|
||||
from utils.model_utils import download_model, check_civitai_url, check_huggingface_url, format_size #, SHARED_MODELS_DIR # lutzapps - SHARED_MODELS_DIR is owned by shared_models module now
|
||||
|
||||
# lutzapps - CHANGE #1
|
||||
# lutzapps
|
||||
LOCAL_DEBUG = os.environ.get('LOCAL_DEBUG', 'False') # support local browsing for development/debugging
|
||||
|
||||
# use the new "utils.shared_models" module for app model sharing
|
||||
|
@ -55,10 +61,18 @@ from utils.shared_models import (
|
|||
# global module 'SHARED_MODEL_APP_MAP' dict: 'model_type' -> 'app_name:app_model_dir' (relative path)
|
||||
# which does a default mapping from app code or (if exists) from external JSON 'SHARED_MODEL_APP_MAP_FILE' file
|
||||
|
||||
"""
|
||||
from flask import Flask
|
||||
import logging
|
||||
|
||||
from utils.websocket_utils import send_websocket_message, active_websockets
|
||||
from utils.app_configs import get_app_configs, add_app_config, remove_app_config, app_configs
|
||||
from utils.model_utils import download_model, check_civitai_url, check_huggingface_url, format_size #, SHARED_MODELS_DIR # lutzapps - SHARED_MODELS_DIR is owned by shared_models module now
|
||||
logging.basicConfig(filename='record.log', level=logging.DEBUG)
|
||||
app = Flask(__name__)
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.run(debug=True)
|
||||
"""
|
||||
|
||||
#logging.basicConfig(filename='better-ai-launcher.log', level=logging.INFO) # CRITICAL, ERROR, WARNING, INFO, DEBUG
|
||||
|
||||
app = Flask(__name__)
|
||||
sock = Sock(app)
|
||||
|
@ -69,8 +83,6 @@ running_processes = {}
|
|||
|
||||
app_configs = get_app_configs()
|
||||
|
||||
#S3_BASE_URL = "https://better.s3.madiator.com/" # unused now
|
||||
|
||||
SETTINGS_FILE = '/workspace/.app_settings.json'
|
||||
|
||||
CIVITAI_TOKEN_FILE = '/workspace/.civitai_token'
|
||||
|
@ -108,35 +120,26 @@ def index():
|
|||
ssh_password = get_ssh_password()
|
||||
ssh_password_status = 'set' if ssh_password else 'not_set'
|
||||
|
||||
app_status = {}
|
||||
for app_name, config in app_configs.items():
|
||||
dirs_ok, message = check_app_directories(app_name, app_configs)
|
||||
status = get_app_status(app_name, running_processes)
|
||||
install_status = get_install_status(app_name)
|
||||
app_status[app_name] = {
|
||||
'name': config['name'],
|
||||
'dirs_ok': dirs_ok,
|
||||
'message': message,
|
||||
'port': config['port'],
|
||||
'status': status,
|
||||
'installed': dirs_ok,
|
||||
'install_status': install_status,
|
||||
'is_bcomfy': app_name == 'bcomfy'
|
||||
}
|
||||
|
||||
filebrowser_status = get_filebrowser_status()
|
||||
app_status = init_app_status(running_processes)
|
||||
|
||||
return render_template('index.html',
|
||||
apps=app_configs,
|
||||
app_status=app_status,
|
||||
pod_id=RUNPOD_POD_ID,
|
||||
RUNPOD_PUBLIC_IP=os.environ.get('RUNPOD_PUBLIC_IP'),
|
||||
RUNPOD_TCP_PORT_22=os.environ.get('RUNPOD_TCP_PORT_22'),
|
||||
enable_unsecure_localhost=os.environ.get('LOCAL_DEBUG'),
|
||||
settings=settings,
|
||||
current_auth_method=current_auth_method,
|
||||
ssh_password=ssh_password,
|
||||
ssh_password_status=ssh_password_status,
|
||||
filebrowser_status=filebrowser_status)
|
||||
apps=app_configs,
|
||||
app_status=app_status,
|
||||
pod_id=RUNPOD_POD_ID,
|
||||
RUNPOD_PUBLIC_IP=os.environ.get('RUNPOD_PUBLIC_IP'),
|
||||
RUNPOD_TCP_PORT_22=os.environ.get('RUNPOD_TCP_PORT_22'),
|
||||
|
||||
# lutzapps - allow localhost Url for unsecure "http" and "ws" WebSockets protocol,
|
||||
# according to 'LOCAL_DEBUG' ENV var
|
||||
enable_unsecure_localhost=os.environ.get('LOCAL_DEBUG'),
|
||||
app_configs_manifest_url=APP_CONFIGS_MANIFEST_URL,
|
||||
|
||||
settings=settings,
|
||||
current_auth_method=current_auth_method,
|
||||
ssh_password=ssh_password,
|
||||
ssh_password_status=ssh_password_status,
|
||||
filebrowser_status=filebrowser_status)
|
||||
|
||||
@app.route('/start/<app_name>')
|
||||
def start_app(app_name):
|
||||
|
@ -150,11 +153,38 @@ def start_app(app_name):
|
|||
update_webui_user_sh(app_name, app_configs)
|
||||
|
||||
command = app_configs[app_name]['command']
|
||||
|
||||
# bkohya enhancements
|
||||
if app_name == 'bkohya':
|
||||
# the --noverify flag currently is NOT supported anymore, need to check, in the meantime disable it
|
||||
# if DEBUG_SETTINGS['bkohya_noverify']:
|
||||
# # Use regex to search & replace command variable to launch bkohya
|
||||
# #command = re.sub(r'kohya_gui.py', 'kohya_gui.py --noverify', command)
|
||||
# print(f"launch bkohya with patched command '{command}'")
|
||||
|
||||
if DEBUG_SETTINGS['bkohya_run_tensorboard']: # default == True
|
||||
# auto-launch tensorboard together with bkohya app
|
||||
app_config = app_configs.get(app_name) # get bkohya app_config
|
||||
app_path = app_config['app_path']
|
||||
cmd_key = 'run-tensorboard' # read the tensorboard launch command from the 'run-tensorboard' cmd_key
|
||||
|
||||
### run_app() variant, but need to define as app
|
||||
# tensorboard_command = app_config['bash_cmds'][cmd_key] # get the bash_cmd value from app_config
|
||||
# message = f"Launch Tensorboard together with kohya_ss: cmd_key='{cmd_key}' ..."
|
||||
# print(message)
|
||||
# app_name = 'tensorboard'
|
||||
# threading.Thread(target=run_app, args=(app_name, tensorboard_command, running_processes)).start()
|
||||
|
||||
### run_bash_cmd() variant
|
||||
#run_bash_cmd(app_config, app_path, cmd_key=cmd_key)
|
||||
threading.Thread(target=run_bash_cmd, args=(app_config, app_path, cmd_key)).start()
|
||||
|
||||
|
||||
threading.Thread(target=run_app, args=(app_name, command, running_processes)).start()
|
||||
return jsonify({'status': 'started'})
|
||||
return jsonify({'status': 'already_running'})
|
||||
|
||||
@app.route('/stop/<app_name>')
|
||||
@app.route('/stop/<app_name>', methods=['GET'])
|
||||
def stop_app(app_name):
|
||||
if app_name in running_processes and get_app_status(app_name, running_processes) == 'running':
|
||||
try:
|
||||
|
@ -206,10 +236,11 @@ def kill_all():
|
|||
if get_app_status(app_key, running_processes) == 'running':
|
||||
stop_app(app_key)
|
||||
return jsonify({'status': 'success'})
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({'status': 'error', 'message': str(e)})
|
||||
|
||||
@app.route('/force_kill/<app_name>', methods=['POST'])
|
||||
@app.route('/force_kill/<app_name>', methods=['GET'])
|
||||
def force_kill_app(app_name):
|
||||
try:
|
||||
success, message = force_kill_process_by_name(app_name, app_configs)
|
||||
|
@ -217,9 +248,91 @@ def force_kill_app(app_name):
|
|||
return jsonify({'status': 'killed', 'message': message})
|
||||
else:
|
||||
return jsonify({'status': 'error', 'message': message})
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({'status': 'error', 'message': str(e)})
|
||||
|
||||
@app.route('/force_kill_by_port/<port>', methods=['GET'])
|
||||
def force_kill_by_port_route(port:int):
|
||||
try:
|
||||
success = find_and_kill_process_by_port(port)
|
||||
message = ''
|
||||
if success:
|
||||
return jsonify({'status': 'killed', 'message': message})
|
||||
else:
|
||||
return jsonify({'status': 'error', 'message': message})
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({'status': 'error', 'message': str(e)})
|
||||
|
||||
|
||||
# lutzapps - added check app feature
|
||||
|
||||
@app.route('/delete_app/<app_name>', methods=['GET'])
|
||||
def delete_app_installation_route(app_name:str):
|
||||
try:
|
||||
def progress_callback(message_type:str, message_data:str):
|
||||
try:
|
||||
send_websocket_message(message_type, message_data)
|
||||
print(message_data) # additionally print to output
|
||||
except Exception as e:
|
||||
print(f"Error sending progress update: {str(e)}")
|
||||
# Continue even if websocket fails
|
||||
pass
|
||||
|
||||
success, message = delete_app_installation(app_name, app_configs, progress_callback)
|
||||
if success:
|
||||
return jsonify({'status': 'deleted', 'message': message})
|
||||
else:
|
||||
return jsonify({'status': 'error', 'message': message})
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({'status': 'error', 'message': str(e)})
|
||||
|
||||
@app.route('/check_installation/<app_name>', methods=['GET'])
|
||||
def check_app_installation_route(app_name:str):
|
||||
try:
|
||||
def progress_callback(message_type, message_data):
|
||||
try:
|
||||
send_websocket_message(message_type, message_data)
|
||||
print(message_data) # additionally print to output
|
||||
except Exception as e:
|
||||
print(f"Error sending progress update: {str(e)}")
|
||||
# Continue even if websocket fails
|
||||
pass
|
||||
|
||||
success, message = check_app_installation(app_name, app_configs, progress_callback)
|
||||
if success:
|
||||
return jsonify({'status': 'checked', 'message': message})
|
||||
else:
|
||||
return jsonify({'status': 'error', 'message': message})
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({'status': 'error', 'message': str(e)})
|
||||
|
||||
# lutzapps - added refresh app feature
|
||||
@app.route('/refresh_installation/<app_name>', methods=['GET'])
|
||||
def refresh_app_installation_route(app_name:str):
|
||||
try:
|
||||
def progress_callback(message_type, message_data):
|
||||
try:
|
||||
send_websocket_message(message_type, message_data)
|
||||
print(message_data) # additionally print to output
|
||||
except Exception as e:
|
||||
print(f"Error sending progress update: {str(e)}")
|
||||
# Continue even if websocket fails
|
||||
pass
|
||||
|
||||
success, message = refresh_app_installation(app_name, app_configs, progress_callback)
|
||||
if success:
|
||||
return jsonify({'status': 'refreshed', 'message': message})
|
||||
else:
|
||||
return jsonify({'status': 'error', 'message': message})
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({'status': 'error', 'message': str(e)})
|
||||
|
||||
|
||||
from gevent.lock import RLock
|
||||
websocket_lock = RLock()
|
||||
|
||||
|
@ -272,8 +385,24 @@ def send_heartbeat():
|
|||
# Start heartbeat thread
|
||||
threading.Thread(target=send_heartbeat, daemon=True).start()
|
||||
|
||||
@app.route('/install/<app_name>', methods=['POST'])
|
||||
def install_app_route(app_name):
|
||||
|
||||
@app.route('/available_venvs/<app_name>', methods=['GET'])
|
||||
def available_venvs_route(app_name):
|
||||
try:
|
||||
success, venvs = get_available_venvs(app_name)
|
||||
if success:
|
||||
return jsonify({'status': 'success', 'available_venvs': venvs})
|
||||
else:
|
||||
return jsonify({'status': 'error', 'error': venvs})
|
||||
|
||||
except Exception as e:
|
||||
error_message = f"Error for {app_name}: {str(e)}\n{traceback.format_exc()}"
|
||||
app.logger.error(error_message)
|
||||
return jsonify({'status': 'error', 'message': error_message}), 500
|
||||
|
||||
# lutzapps - added venv_version
|
||||
@app.route('/install/<app_name>/<venv_version>', methods=['GET'])
|
||||
def install_app_route(app_name, venv_version):
|
||||
try:
|
||||
def progress_callback(message_type, message_data):
|
||||
try:
|
||||
|
@ -283,19 +412,20 @@ def install_app_route(app_name):
|
|||
# Continue even if websocket fails
|
||||
pass
|
||||
|
||||
success, message = install_app(app_name, app_configs, progress_callback)
|
||||
success, message = install_app(app_name, venv_version, progress_callback)
|
||||
if success:
|
||||
return jsonify({'status': 'success', 'message': message})
|
||||
else:
|
||||
return jsonify({'status': 'error', 'message': message})
|
||||
|
||||
except Exception as e:
|
||||
error_message = f"Installation error for {app_name}: {str(e)}\n{traceback.format_exc()}"
|
||||
app.logger.error(error_message)
|
||||
return jsonify({'status': 'error', 'message': error_message}), 500
|
||||
|
||||
@app.route('/fix_custom_nodes/<app_name>', methods=['POST'])
|
||||
@app.route('/fix_custom_nodes/<app_name>', methods=['GET'])
|
||||
def fix_custom_nodes_route(app_name):
|
||||
success, message = fix_custom_nodes(app_name)
|
||||
success, message = fix_custom_nodes(app_name, app_configs)
|
||||
if success:
|
||||
return jsonify({'status': 'success', 'message': message})
|
||||
else:
|
||||
|
@ -395,17 +525,17 @@ def start_symlink_update_thread():
|
|||
thread.start()
|
||||
|
||||
# modified function
|
||||
@app.route('/recreate_symlinks', methods=['POST'])
|
||||
@app.route('/recreate_symlinks', methods=['GET'])
|
||||
def recreate_symlinks_route():
|
||||
# lutzapps - CHANGE #7 - use the new "shared_models" module for app model sharing
|
||||
# lutzapps - use the new "shared_models" module for app model sharing
|
||||
jsonResult = update_model_symlinks()
|
||||
|
||||
return jsonResult
|
||||
|
||||
# modified function
|
||||
@app.route('/create_shared_folders', methods=['POST'])
|
||||
@app.route('/create_shared_folders', methods=['GET'])
|
||||
def create_shared_folders():
|
||||
# lutzapps - CHANGE #8 - use the new "shared_models" module for app model sharing
|
||||
# lutzapps - use the new "shared_models" module for app model sharing
|
||||
jsonResult = ensure_shared_models_folders()
|
||||
return jsonResult
|
||||
|
||||
|
@ -414,7 +544,7 @@ def save_civitai_token(token):
|
|||
json.dump({'token': token}, f)
|
||||
|
||||
# lutzapps - added function - 'HF_TOKEN' ENV var
|
||||
def load_huggingface_token():
|
||||
def load_huggingface_token()->str:
|
||||
# look FIRST for Huggingface token passed in as 'HF_TOKEN' ENV var
|
||||
HF_TOKEN = os.environ.get('HF_TOKEN', '')
|
||||
|
||||
|
@ -441,7 +571,7 @@ def load_huggingface_token():
|
|||
return None
|
||||
|
||||
# lutzapps - modified function - support 'CIVITAI_API_TOKEN' ENV var
|
||||
def load_civitai_token():
|
||||
def load_civitai_token()->str:
|
||||
# look FIRST for CivitAI token passed in as 'CIVITAI_API_TOKEN' ENV var
|
||||
CIVITAI_API_TOKEN = os.environ.get('CIVITAI_API_TOKEN', '')
|
||||
|
||||
|
@ -517,32 +647,33 @@ def get_model_types_route():
|
|||
|
||||
@app.route('/download_model', methods=['POST'])
|
||||
def download_model_route():
|
||||
# this function will be called first from the model downloader, which only paasses the url,
|
||||
# but did not parse for already existing version_id or file_index
|
||||
# if we ignore the already wanted version_id, the user will end up with the model-picker dialog
|
||||
# just to select the wanted version_id again, and then the model-picker calls also into this function,
|
||||
# but now with a non-blank version_id
|
||||
|
||||
try:
|
||||
data = request.json
|
||||
url = data.get('url')
|
||||
model_name = data.get('model_name')
|
||||
model_type = data.get('model_type')
|
||||
civitai_token = data.get('civitai_token')
|
||||
hf_token = data.get('hf_token')
|
||||
civitai_token = data.get('civitai_token') or load_civitai_token() # If no token provided in request, try to read from ENV and last from file
|
||||
hf_token = data.get('hf_token') or load_huggingface_token() # If no token provided in request, try to read from ENV and last from file
|
||||
version_id = data.get('version_id')
|
||||
file_index = data.get('file_index')
|
||||
|
||||
# If no token provided in request, try to read from file
|
||||
if not civitai_token:
|
||||
try:
|
||||
if os.path.exists('/workspace/.civitai_token'):
|
||||
with open('/workspace/.civitai_token', 'r') as f:
|
||||
token_data = json.load(f)
|
||||
civitai_token = token_data.get('token')
|
||||
except Exception as e:
|
||||
app.logger.error(f"Error reading token file: {str(e)}")
|
||||
is_civitai, _, url_model_id, url_version_id = check_civitai_url(url)
|
||||
if version_id == None: # model-picker dialog not used already
|
||||
version_id = url_version_id # get a possible version_id from the copy-pasted url
|
||||
|
||||
is_civitai, _, _, _ = check_civitai_url(url)
|
||||
is_huggingface, _, _, _, _ = check_huggingface_url(url)
|
||||
|
||||
# only CivitAI or Huggingface model downloads are supported for now
|
||||
if not (is_civitai or is_huggingface):
|
||||
return jsonify({'status': 'error', 'message': 'Unsupported URL. Please use Civitai or Hugging Face URLs.'}), 400
|
||||
|
||||
# CivitAI downloads require an API Token needed (e.g. for model variant downloads and private models)
|
||||
if is_civitai and not civitai_token:
|
||||
return jsonify({'status': 'error', 'message': 'Civitai token is required for downloading from Civitai.'}), 400
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -2,6 +2,16 @@ TESTDATA AND EXPLANATION OF MAPPING EVERYTHING YOU WANT
|
|||
|
||||
In the folder "/app/tests" you find the following files:
|
||||
|
||||
$ tree /app/tests
|
||||
/app/tests
|
||||
|-- README-SHARED_MODELS.txt
|
||||
|-- populate_testdata.sh
|
||||
|-- testdata_installed_apps_pull.tar.gz
|
||||
|-- testdata_shared_models_link.tar.gz
|
||||
`-- testdata_stable-diffusion-webui_pull.tar.gz
|
||||
|
||||
1 directory, 5 files
|
||||
|
||||
/app/tests/
|
||||
|
||||
- "README-SHARED_MODELS.txt" (this file)
|
||||
|
@ -15,16 +25,15 @@ In the folder "/app/tests" you find the following files:
|
|||
|
||||
CREATE TESTDATA (once done already):
|
||||
|
||||
cd /workspace
|
||||
|
||||
# For Testcase #1 - create testdata in "shared_models" folder with dummy models for most model_types:
|
||||
$ tar -czf testdata_shared_models_link.tar.gz shared_models
|
||||
$ tar -czf /app/tests/testdata_shared_models_link.tar.gz /workspace/shared_models
|
||||
|
||||
# For Testcase #2 - create testdata with SD-Models for A1111 to be pulled back into "shared_models" and linked back:
|
||||
$ tar -czf testdata_stable-diffusion-webui_pull.tar.gz stable-diffusion-webui
|
||||
$ tar -czf /app/tests/testdata_stable-diffusion-webui_pull.tar.gz /workspace/stable-diffusion-webui
|
||||
|
||||
# For Testcase #3 -create testdata with all possible "Apps" installed into your "/workspace"
|
||||
$ tar -czf /app/tests/testdata_installed_apps_pull.tar.gz Apps
|
||||
# For Testcase #3 - create testdata with all possible "Apps" installed into your "/workspace"
|
||||
$ cd /workspace
|
||||
$ tar -cvf - . | gzip -9 - | pv > /app/tests/testdata_installed_apps_pull.tar.gz
|
||||
|
||||
|
||||
USE TESTDATA:
|
||||
|
@ -34,85 +43,86 @@ USE TESTDATA:
|
|||
/app/tests/populate_testdata.sh::
|
||||
|
||||
# use these 3 test cases and extract/merge them accordingly into your workspace, bur READ before you mess you up too much!!
|
||||
tar -xzf /app/tests/testdata_shared_models_link.tar.gz /workspace
|
||||
tar -xzf /app/tests/testdata_stable-diffusion-webui_pull.tar.gz /workspace
|
||||
tar -xzf /app/tests/testdata_installed_apps_pull.tar.gz /workspace
|
||||
|
||||
tar -xzf /app/tests/testdata_shared_models_link.tar.gz -C /workspace
|
||||
tar -xzf /app/tests/testdata_stable-diffusion-webui_pull.tar.gz -C /workspace
|
||||
tar -xzf /app/tests/testdata_installed_apps_pull.tar.gz -C /workspace
|
||||
|
||||
|
||||
Testcase #1:
|
||||
|
||||
When you expand "./testdata_shared_models_link.tar.gz" into the "/workspace" folder, you get:
|
||||
When you extract "./testdata_shared_models_link.tar.gz" into the "/workspace" folder, you get the folder "shared_models" installed. This is our "shared models" central location to share all types of models accross apps.
|
||||
|
||||
$ tree shared_models
|
||||
|
||||
shared_models
|
||||
├── LLM
|
||||
│ └── Meta-Llama-3.1-8B
|
||||
│ ├── llm-Llama-modelfile1.txt
|
||||
│ ├── llm-Llama-modelfile2.txt
|
||||
│ └── llm-Llama-modelfile3.txt
|
||||
├── ckpt
|
||||
│ ├── ckpt-model1.txt
|
||||
│ └── ckpt-model2.txt
|
||||
├── clip
|
||||
│ └── clip-model1.txt
|
||||
├── controlnet
|
||||
│ └── controlnet-model1.txt
|
||||
├── embeddings
|
||||
│ ├── embedding-model1.txt
|
||||
│ └── embedding-model2.txt
|
||||
├── hypernetworks
|
||||
│ └── hypernetworks-model1.txt
|
||||
├── insightface
|
||||
│ └── insightface-model1.txt
|
||||
├── ipadapters
|
||||
│ ├── ipadapter-model1.txt
|
||||
│ └── xlabs
|
||||
│ └── xlabs-ipadapter-model1.txt
|
||||
├── loras
|
||||
│ ├── flux
|
||||
│ │ └── flux-lora-model1.txt
|
||||
│ ├── lora-SD-model1.txt
|
||||
│ ├── lora-SD-model2.txt
|
||||
│ ├── lora-SD-model3.txt
|
||||
│ ├── lora-SD-model4.txt
|
||||
│ ├── lora-SD-model5.txt
|
||||
│ ├── lora-model1.txt
|
||||
│ ├── lora-model2.txt
|
||||
│ └── xlabs
|
||||
│ └── xlabs-lora-model1.txt
|
||||
├── reactor
|
||||
│ ├── faces
|
||||
│ │ └── reactor-faces-model1.txt
|
||||
│ └── reactor-model1.txt
|
||||
├── unet
|
||||
│ ├── unet-model1.txt
|
||||
│ └── unet-model2.txt
|
||||
├── upscale_models
|
||||
│ └── esrgan-model1.txt
|
||||
├── vae
|
||||
│ └── vae-model1.txt
|
||||
└── vae-approx
|
||||
└── vae-apporox-model1.txt
|
||||
|-- LLM
|
||||
| `-- Meta-Llama-3.1-8B
|
||||
| |-- llm-Llama-modelfile1.txt
|
||||
| |-- llm-Llama-modelfile2.txt
|
||||
| `-- llm-Llama-modelfile3.txt
|
||||
|-- ckpt
|
||||
| |-- ckpt-model1.txt
|
||||
| `-- ckpt-model2.txt
|
||||
|-- clip
|
||||
| `-- clip-model1.txt
|
||||
|-- controlnet
|
||||
| `-- controlnet-model1.txt
|
||||
|-- embeddings
|
||||
| |-- embedding-model1.txt
|
||||
| `-- embedding-model2.txt
|
||||
|-- hypernetworks
|
||||
| `-- hypernetworks-model1.txt
|
||||
|-- insightface
|
||||
| `-- insightface-model1.txt
|
||||
|-- ipadapters
|
||||
| |-- xlabs
|
||||
| | `-- xlabs-ipadapter-model1.txt
|
||||
| `-- ipadapter-model1.txt
|
||||
|-- loras
|
||||
| |-- flux
|
||||
| | `-- flux-lora-model1.txt
|
||||
| |-- xlabs
|
||||
| | `-- xlabs-lora-model1.txt
|
||||
| |-- lora-SD-model1.txt
|
||||
| |-- lora-SD-model2.txt
|
||||
| |-- lora-SD-model3.txt
|
||||
| |-- lora-SD-model4.txt
|
||||
| |-- lora-SD-model5.txt
|
||||
| |-- lora-model1.txt
|
||||
| `-- lora-model2.txt
|
||||
|-- reactor
|
||||
| |-- faces
|
||||
| | `-- reactor-faces-model1.txt
|
||||
| `-- reactor-model1.txt
|
||||
|-- unet
|
||||
| |-- unet-model1.txt
|
||||
| `-- unet-model2.txt
|
||||
|-- upscale_models
|
||||
| `-- esrgan-model1.txt
|
||||
|-- vae
|
||||
| `-- vae-model1.txt
|
||||
`-- vae-approx
|
||||
`-- vae-apporox-model1.txt
|
||||
|
||||
20 directories, 29 files
|
||||
|
||||
|
||||
All these "*.txt" files "simulate" model files of a specific category (model type).
|
||||
When you have this test data and you click the "Recreate Symlinks" button on the "Settings" Tab, all these models will be shared with all "installed" apps, like:
|
||||
|
||||
A1111: /workspace/stable-diffusion-webui
|
||||
Forge: /workspace/stable-diffusion-webui-forge
|
||||
ComfyUI: /workspace/ComfyUI
|
||||
Kohya_ss: /workspace/Kohya_ss
|
||||
kohya_ss: /workspace/kohya_ss
|
||||
CUSTOM1: /workspace/joy-caption-batch
|
||||
|
||||
To "simulate" the installed app, you just need to create one or all of these folders manually, as empty folders. Maybe try it one-by-one, like you would do "in-real-life".
|
||||
|
||||
After there is at least ONE app installed, you can test the model sharing with the above mentioned Button "Recreate Symlinks".
|
||||
|
||||
All of these 29 models should be shared into all "installed" apps.
|
||||
|
||||
When you "add" a second app, also this new app will get all these models shared into its model folders, which can be differently named.
|
||||
When you "add" a second app, also this new app will get all these models shared into its local model folders, which can be a differently named folder from the shared_models.
|
||||
|
||||
Some model types (e.g. UNET) have a separate model folder from Checkpoints in ComfyUI, but in A1111/Forge, these 2 model types will be merged ("flattened") in one "Stable-Diffusion" model folder. See later in the third MAP shown here.
|
||||
|
||||
|
@ -130,19 +140,19 @@ You can also test to delete a "shared model" in the "shared_models directory, an
|
|||
Testcase #2:
|
||||
|
||||
In the second testdata TAR archive, you have some SD-models which simulate the installation of the model files once for ONE app, in this test case only for A1111.
|
||||
The "./testdata_stable-diffusion-webui_pull.tar.gz" is easier to handle than the one for Testcase #3, as it installs directly into the "original" App install location.
|
||||
The "./testdata_stable-diffusion-webui_pull.tar.gz" is easier to handle than the one for Testcase #3, as it installs directly into the "original" App install location of A1111 which is "/workspace/stable-diffusion-webui".
|
||||
|
||||
$ tree stable-diffusion-webui
|
||||
|
||||
stable-diffusion-webui
|
||||
├── _add
|
||||
│ ├── lora-SD-model2.txt
|
||||
│ ├── lora-SD-model3.txt
|
||||
│ ├── lora-SD-model4.txt
|
||||
│ └── lora-SD-model5.txt
|
||||
└── models
|
||||
└── Lora
|
||||
└── lora-SD-model1.txt
|
||||
|-- _add
|
||||
| |-- lora-SD-model2.txt
|
||||
| |-- lora-SD-model3.txt
|
||||
| |-- lora-SD-model4.txt
|
||||
| `-- lora-SD-model5.txt
|
||||
`-- models
|
||||
`-- Lora
|
||||
`-- lora-SD-model1.txt
|
||||
|
||||
4 directories, 5 files
|
||||
|
||||
|
@ -151,25 +161,25 @@ Testcase #3:
|
|||
|
||||
In this test case you also have other apps installed already, but the principle is the same, just a little bit more careful folder management.
|
||||
|
||||
The "./testdata_installed_apps_pull.tar.gz.tar.gz" extracts into an "Apps" folder.
|
||||
All folders in this extracted "Apps" folder should be copied into the "/workspace" folder, to simulate an installed A1111, Forge, ComfyUI and Kohya_ss. Make sure that at the end you NOT see the extracted "Apps" folder anymore, as you only used its SUB-FOLDERS to copy/move them into "/workspace" and replace/merge existing folder.
|
||||
The "./testdata_installed_apps_pull.tar.gz" extracts directly into the "/workspace" folder.
|
||||
It simulates an installed A1111, Forge, ComfyUI and kohya_ss, and joy-caption-batch.
|
||||
Copy/move them into "/workspace" and replace/merge existing folders.
|
||||
|
||||
$ tree Apps
|
||||
$ tree /workspace
|
||||
|
||||
Apps
|
||||
├── ComfyUI
|
||||
├── Kohya_ss
|
||||
├── _add
|
||||
│ ├── lora-SD-model2.txt
|
||||
│ ├── lora-SD-model3.txt
|
||||
│ ├── lora-SD-model4.txt
|
||||
│ └── lora-SD-model5.txt
|
||||
├── joy-caption-batch
|
||||
├── stable-diffusion-webui
|
||||
│ └── models
|
||||
│ └── Lora
|
||||
│ └── lora-SD-model1.txt
|
||||
└── stable-diffusion-webui-forge
|
||||
|-- ComfyUI
|
||||
|-- _add
|
||||
| |-- lora-SD-model2.txt
|
||||
| |-- lora-SD-model3.txt
|
||||
| |-- lora-SD-model4.txt
|
||||
| `-- lora-SD-model5.txt
|
||||
|-- joy-caption-batch
|
||||
|-- kohya_ss
|
||||
|-- stable-diffusion-webui
|
||||
| `-- models
|
||||
| `-- Lora
|
||||
| `-- lora-SD-model1.txt
|
||||
`-- stable-diffusion-webui-forge
|
||||
|
||||
9 directories, 5 files
|
||||
|
||||
|
@ -182,12 +192,14 @@ But it will then also be shared to all other "installed" apps, like ComfyUI, For
|
|||
|
||||
The only "mapping rule" for Kohya which is defined, is to get all "ckpt" (Checkpoint) model files and all UNET model files shared from the corresponding "shared_models" subfolders into its /models folder (see later in the 3rd MAP below).
|
||||
|
||||
In the testdata "Apps" folder you also find an "_add" folder, with 4 more SD-Models to play around with the App Sharing/Syncing framework. Put the in any local app model folder and watch what happens to them and where they the can be seen/used from other apps. You either wait a fewMinutes to let this happen automatically (every 5 Minutes), or you press the "Recreate Symlinks" button at any time to kick this off.
|
||||
In the testdata "Apps" folder you also find an "_add" folder, with 4 more SD-Models to play around with the App Sharing/Syncing framework. Put them in any local app model folder and watch what happens to them and where they can be seen/used from other apps. You either wait a few Minutes to let this happen automatically (every 5 Minutes), or you press the "Recreate Symlinks" button at any time to kick this off.
|
||||
|
||||
You can also test to see what happens, when you DELETE a model file from the shared_models sub-folders, and that all its symlinks shared to all apps will also automatically be removed, so no broken links will be left behind.
|
||||
|
||||
When you delete a symlink in an app model folder, only the local app "looses" the model (it is just only a link to the original shared model), so no worries here. Such locally removed symlinks however will be re-created again automatically.
|
||||
|
||||
YOu can merge this with Testcase #1
|
||||
|
||||
|
||||
BUT THAT IS ONLY THE BEGINNING.
|
||||
All this logic described here is controlled via 3 (three) "MAP" dictionary JSON files, which can be found also in the "/workspace/shared_models" folder, after you click the "Create Shared Folders" button on the "Settings" Tab. They will be auto-generated, if missing, or otherwise "used-as-is" :
|
||||
|
@ -238,13 +250,13 @@ IMPORTANT: Also be aware that when you add or change/rename folder names here, y
|
|||
"A1111": "/workspace/stable-diffusion-webui",
|
||||
"Forge": "/workspace/stable-diffusion-webui-forge",
|
||||
"ComfyUI": "/workspace/ComfyUI",
|
||||
"Kohya_ss": "/workspace/Kohya_ss",
|
||||
"kohya_ss": "/workspace/kohya_ss",
|
||||
"CUSTOM1": "/workspace/joy-caption-batch"
|
||||
}
|
||||
|
||||
This is the "APP_INSTALL_DIRS" map for the app install dirs within the "/workspace", and as you see, it also supports "CUSTOM" apps to be installed and participating at the model sharing.
|
||||
|
||||
This dictionary is "synced" with the main apps "app_configs" dictionary, so the installation folders are the same, and this should NOT be changed. What you can change in this MAP is to add "CUSTOM" apps, like "CUSTOM1" here e.g. to re-use the Llama LLM model which is centrally installed in "shared_models" under the LLM folder to be "shared" between ComfyUI and "Joy Caption Batch" tool, which is nice to generate your "Caption" files for your LoRA Training files with "Kohya_ss" for example.
|
||||
This dictionary is "synced" with the main apps "app_configs" dictionary, so the installation folders are the same, and this should NOT be changed. What you can change in this MAP is to add "CUSTOM" apps, like "CUSTOM1" here e.g. to re-use the Llama LLM model which is centrally installed in "shared_models" under the LLM folder to be "shared" between ComfyUI and "Joy Caption Batch" tool, which is nice to generate your "Caption" files for your LoRA Training files with "kohya_ss" for example.
|
||||
|
||||
|
||||
3.) The "SHARED_MODEL_APP_MAP" map found as "SHARED_MODEL_APP_MAP_FILE"
|
||||
|
@ -255,7 +267,7 @@ This dictionary is "synced" with the main apps "app_configs" dictionary, so the
|
|||
"ComfyUI": "/models/checkpoints",
|
||||
"A1111": "/models/Stable-diffusion",
|
||||
"Forge": "/models/Stable-diffusion",
|
||||
"Kohya_ss": "/models" # flatten all "ckpt" / "unet" models here
|
||||
"kohya_ss": "/models" # flatten all "ckpt" / "unet" models here
|
||||
},
|
||||
|
||||
"clip": {
|
||||
|
@ -342,7 +354,7 @@ This dictionary is "synced" with the main apps "app_configs" dictionary, so the
|
|||
"ComfyUI": "/models/unet",
|
||||
"A1111": "/models/Stable-diffusion", # flatten all "ckpts" / "unet" models here
|
||||
"Forge": "/models/Stable-diffusion", # flatten all "ckpts" / "unet" models here
|
||||
"Kohya_ss": "/models" # flatten all "ckpt" / "unet" models here
|
||||
"kohya_ss": "/models" # flatten all "ckpt" / "unet" models here
|
||||
},
|
||||
|
||||
"upscale_models": {
|
||||
|
@ -395,7 +407,7 @@ FOLDER SHARING, e.g. LLM folder-based models:
|
|||
_app_install.dirs.json:
|
||||
{
|
||||
...
|
||||
"Kohya_ss": "/workspace/Kohya_ss",
|
||||
"kohya_ss": "/workspace/kohya_ss",
|
||||
"CUSTOM1": "/workspace/joy-caption-batch"
|
||||
}
|
||||
|
||||
|
|
|
@ -3,10 +3,10 @@
|
|||
# please use the "./README-SHARED_MODELS.txt" before you extract these TARs!!!
|
||||
|
||||
# Testcase #1
|
||||
tar -xzf /app/tests/testdata_shared_models_link.tar.gz /workspace
|
||||
tar -xzf /app/tests/testdata_shared_models_link.tar.gz -C /workspace
|
||||
|
||||
# Testcase #2
|
||||
tar -xzf /app/tests/testdata_stable-diffusion-webui_pull.tar.gz /workspace
|
||||
tar -xzf /app/tests/testdata_stable-diffusion-webui_pull.tar.gz -C /workspace
|
||||
|
||||
# Testcase #3
|
||||
tar -xzf /app/tests/testdata_installed_apps_pull.tar.gz /workspace
|
||||
tar -xzf /app/tests/testdata_installed_apps_pull.tar.gz -C /workspace
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -5,150 +5,300 @@ import urllib.request
|
|||
import json
|
||||
|
||||
# this is the replacement for the XML manifest, and defines all app_configs in full detail
|
||||
APP_CONFIGS_MANIFEST_URL = "https://better.s3.madiator.com/app_configs.json"
|
||||
# if this JSON can not be downloaded, the below code defaults apply
|
||||
# this app_configs dict can also be generated from code when at least one of following
|
||||
# 2 ENV vars are found with following values:
|
||||
# 1. LOCAL_DEBUG = 'True' # this ENV var should not be passed when in the RUNPOD environment, as it disabled the CF proxy Urls of the App-Manager
|
||||
# and this ENV var also controls some other aspects of the app.
|
||||
#
|
||||
# 2. APP_CONFIGS_FILE = 'True' # only exists for this one purpose, to generate the below Dict as file
|
||||
# "/workspace/_app_configs.json", which then can be uploaded to the above defined APP_CONFIGS_MANIFEST_URL
|
||||
# NOTE:
|
||||
|
||||
# this will now be passed in from an ENV var 'APP_CONFIGS_MANIFEST_URL=xxx' set in the 'Dockerfile'
|
||||
# so the Dockerfile controls between 'production' and 'development' MANIFEST_URLs and S3 download locations for the VENVS
|
||||
#APP_CONFIGS_MANIFEST_URL = "https://better.s3.madiator.com/app_configs.json" # production MANIFEST_URL
|
||||
#APP_CONFIGS_MANIFEST_URL = "https://better-dev.s3.madiator.com/app_configs.json" # development MANIFEST_URL
|
||||
APP_CONFIGS_MANIFEST_URL = ""
|
||||
|
||||
# If this JSON file can not be downloaded from the MANIFEST_URL, the below code defaults apply
|
||||
# This 'app_configs' dict can also be generated from code when at least one of following
|
||||
# 2 ENV vars are found with following values:
|
||||
#
|
||||
# 1. LOCAL_DEBUG = 'True' # this ENV var should not be passed when in the RUNPOD environment,
|
||||
# as it disabled the CF proxy Urls of the App-Manager
|
||||
# This ENV var also controls some other (debugging) aspects of the app.
|
||||
#
|
||||
# 2. APP_CONFIGS_FILE = 'True' # only exists for this one purpose, to generate the below 'app_configs' dict
|
||||
# as file "/workspace/app_configs.json", which then can be uploaded to the above defined APP_CONFIGS_MANIFEST_URL
|
||||
|
||||
|
||||
COMMON_SETTINGS = {} # this is a global dict for "common_settings" loaded from below app_configs "common_settings" during init_app_configs()
|
||||
|
||||
# the 'common_settings' sub dict of app_configs will be stored in above COMMON_SETTINGS GLOBAL module dict var, and then removed from app_config, as this is not an app
|
||||
app_configs = {
|
||||
'common_settings': { # this 'common_settings' dictionary is transferrred to COMMON_SETTINGS global dict
|
||||
# during init_app_configs() and can be overwritten individally with DEBUG_SETTINGS['common_settings'], if non None or '' values found
|
||||
'base_download_url': '', # str: base-url used to resolve the RELATIVE download_urls of the individual VENVS
|
||||
# if app_configs['common_settings']['base_download_url'] is not explicitly set,
|
||||
# the default is to use generate the base download url from the APP_CONFIGS_MANIFEST_URL domain
|
||||
# (production or development S3 bucket URL), so switching URL also switches VENV download locations
|
||||
# this again could be overwritten via DEBUG_SETTINGS['common_settings']['base_download_url']
|
||||
# this also can be a different base url than the APP_CONFIGS_MANIFEST_URL, so the JSON file can be on a different
|
||||
# location than the TAR VENV files. All below 'download_url' are either RELATIVE to this 'base_download_url',
|
||||
# which is the case right now, but each VENV can also define an ABSOLUTE (and different) 'download_url' for itself
|
||||
# this 'base_download_url' can also be overwitten with DEBUG_SETTINGS['common_settings']['base_download_url']
|
||||
'verify_app_size': True, # bool: check APP folder sizes during Setup (can be overwritten with DEBUG_SETTINGS['common_settings']['verify_app_size'])
|
||||
'delete_unverified_app_path': False, # bool: if set to True, delete unverified APP_PATH folder from /workspace during Setup or check_app_installation() UI function,
|
||||
# if the defined 'minimum_app_size_kb' of the app does not match at minimum the result of 'du -sk' command against the installed app_path
|
||||
# can be overwritten with DEBUG_SETTINGS['common_settings']['delete_unverified_app_path']
|
||||
'verify_venv_size': True, # bool: check VENV folder sizes during Setup (can be overwritten with DEBUG_SETTINGS['common_settings']['verify_venv_size'])
|
||||
'delete_unverified_venv_path': False, # bool: if set to True, delete unverified VENV folder from /workspace during Setup or check_app_installation() UI function,
|
||||
# if the defined 'venv_uncompressed_size_kb' of the app's found 'venv_info' does not match at minimum the result of 'du -sk' command against the installed venv_path
|
||||
# can be overwritten with DEBUG_SETTINGS['common_settings']['delete_unverified_venv_path']
|
||||
'verify_tolerance_percent': 5 # percentage (int) factor the 'verify_sizes' for app_path and venv_path are allowed to vary
|
||||
# can be overwritten with DEBUG_SETTINGS['common_settings']['verify_tolerance_percent']
|
||||
},
|
||||
'bcomfy': {
|
||||
'id': 'bcomfy',
|
||||
'name': 'Better Comfy UI',
|
||||
'command': 'cd /workspace/bcomfy && . ./bin/activate && cd /workspace/ComfyUI && python main.py --listen --port 3000 --enable-cors-header',
|
||||
'venv_path': '/workspace/bcomfy',
|
||||
'app_path': '/workspace/ComfyUI',
|
||||
'port': 3000,
|
||||
'download_url': 'https://better.s3.madiator.com/bcomfy/bcomfy.tar.gz', # (2024-11-08 18:50:00Z - lutzapps)
|
||||
#'venv_uncompressed_size': 6452737952, # uncompressed size of the tar-file (in bytes) - lutzapps new version
|
||||
'venv_uncompressed_size': 6155295493, # uncompressed size of the tar-file (in bytes) - original version
|
||||
#'archive_size': 3389131462 # tar filesize (in bytes) - lutzapps new version
|
||||
'archive_size': 3179595118, # tar filesize (in bytes) - original version
|
||||
#'sha256_hash': '18e7d71b75656924f98d5b7fa583aa7c81425f666a703ef85f7dd0acf8f60886', # lutzapps new version
|
||||
'sha256_hash': '7fd60808a120a1dd05287c2a9b3d38b3bdece84f085abc156e0a2ee8e6254b84', # original version
|
||||
'app_path': '/workspace/ComfyUI',
|
||||
'repo_url': 'https://github.com/comfyanonymous/ComfyUI.git',
|
||||
'branch_name': '', # empty branch_name means default = 'master'
|
||||
'commit': '', # or commit hash (NYI)
|
||||
'clone_recursive': False,
|
||||
'refresh': False,
|
||||
'allow_refresh': True, # allow to refresh the app
|
||||
'venv_path': '/workspace/bcomfy',
|
||||
'venv_version_default': 'latest', # use the 'latest' VENV version by default,
|
||||
# can be overwritten with 'VENV_VERSION_<app_id>' ENV var or via DEBUG_SETTINGS['select_venv_version']
|
||||
'available_venvs': [
|
||||
{ # venv SETUP: pip install torch torchvision torchaudio --extra-index-url https://download.pytorch.org/whl/cu124 && pip install -r requirements.txt
|
||||
# install ComfyUI-Manager and Requirements
|
||||
'version': 'official',
|
||||
'build_info': 'v1.0 - Nov 16, 2024, 18:28 GMT by lutzapps',
|
||||
'notes': 'Python 3.11 && Cuda 12.4 && Recommended torch-2.5.1+cu124-cp311-cp311-linux_x86_64 && ComfyUI-Manager && comfy CLI',
|
||||
'branch_name': '', # empty branch_name means default = 'master'
|
||||
'commit_id': '', # if set, it wins over branch_name
|
||||
'clone_recursive': False,
|
||||
'minimum_app_size_kb': 77108, # du /workspace/ComfyUI -sk (without custom_nodes)
|
||||
'download_url': 'bcomfy/bcomfy-official.tar.gz',
|
||||
'venv_uncompressed_size_kb': 5696320, # uncompressed size of "bcomfy-official.tar.gz" (in KBytes), "du /workspace/bcomfy -sk"
|
||||
'archive_size_bytes': 3099730702, # tar filesize (in Bytes), "ls bcomfy-official.tar.gz -la"
|
||||
'sha256_hash': 'dc22367fba5829eda316858f3ff148659901f26ef8079cd76676ab1025923d19' # shasum -a 256 bcomfy-official.tar.gz
|
||||
},
|
||||
{ # venv SETUP: 'official' && installed all custom nodes from below list with their requirements
|
||||
'version': 'latest',
|
||||
'build_info': 'v1.0 - Nov 16, 2024, 20:48 GMT by lutzapps',
|
||||
'notes': 'Python 3.11 && Cuda 12.4 && Recommended torch-2.5.1+cu124-cp311-cp311-linux_x86_64 && ComfyUI-Manager && comfy CLI && 12x Custom Nodes',
|
||||
'branch_name': '', # empty branch_name means default = 'master'
|
||||
'commit_id': '', # if set, it wins over branch_name
|
||||
'clone_recursive': False,
|
||||
'minimum_app_size_kb': 409828, # "du /workspace/ComfyUI -sk" (with custom_nodes)
|
||||
'download_url': 'bcomfy/bcomfy-latest.tar.gz',
|
||||
'venv_uncompressed_size_kb': 6913704, # uncompressed size of "bcomfy-latest.tar.gz" (in KBytes), "du /workspace/bcomfy -sk"
|
||||
'archive_size_bytes': 3551621652, # tar filesize (in bytes), "ls bcomfy-latest.tar.gz -la"
|
||||
'sha256_hash': 'c621884e2d016d89a2806cb9371330493f7232168afb93e2fc1440d87da0b896' # shasum -a 256 bcomfy-latest.tar.gz
|
||||
}
|
||||
],
|
||||
'custom_nodes': [ # following custom_nodes will be git cloned and installed with "pip install -r requirements.txt" (in Testing)
|
||||
{
|
||||
'name': 'ComfyUI-Manager (ltdrdata)', # this node is installed in the VENV
|
||||
'venv_version': '*', # install this node into all (*) VENV versions
|
||||
'name': 'ComfyUI-Manager (ltdrdata)', # this node is installed into ALL (*) VENV versions
|
||||
'path': 'ComfyUI-Manager',
|
||||
'repo_url': 'https://github.com/ltdrdata/ComfyUI-Manager.git'
|
||||
'repo_url': 'https://github.com/ltdrdata/ComfyUI-Manager.git',
|
||||
'install_requirements_txt': True,
|
||||
'clone_recursive': False
|
||||
},
|
||||
{
|
||||
'name': 'ComfyUI-Essentials (cubic)', # this node is installed in the VENV
|
||||
'venv_version': 'latest', # install this node only in the 'latest' VENV, but not in the 'official' VENV
|
||||
'name': 'ComfyUI-Essentials (cubic)', # this node is installed only into the 'latest' VENV version
|
||||
'path': 'ComfyUI_essentials',
|
||||
'repo_url': 'https://github.com/cubiq/ComfyUI_essentials'
|
||||
'repo_url': 'https://github.com/cubiq/ComfyUI_essentials',
|
||||
'install_requirements_txt': True,
|
||||
'clone_recursive': False
|
||||
},
|
||||
{
|
||||
'venv_version': 'latest', # install this node only in the 'latest' VENV, but not in the 'official' VENV
|
||||
'name': 'rgthree comfy',
|
||||
'path': 'rgthree-comfy',
|
||||
'repo_url': 'https://github.com/rgthree/rgthree-comfy',
|
||||
'install_requirements_txt': True,
|
||||
'clone_recursive': False
|
||||
},
|
||||
{
|
||||
'venv_version': 'latest', # install this node only in the 'latest' VENV, but not in the 'official' VENV
|
||||
'name': 'was node-suite comfyui (WASasquatch)',
|
||||
'path': 'was-node-suite-comfyui',
|
||||
'repo_url': 'https://github.com/WASasquatch/was-node-suite-comfyui',
|
||||
'install_requirements_txt': True,
|
||||
'clone_recursive': False
|
||||
},
|
||||
{
|
||||
'venv_version': 'latest', # install this node only in the 'latest' VENV, but not in the 'official' VENV
|
||||
'name': 'comfyui controlnet-aux (Fannovel16)',
|
||||
'path': 'comfyui_controlnet_aux',
|
||||
'repo_url': 'https://github.com/Fannovel16/comfyui_controlnet_aux',
|
||||
'install_requirements_txt': True,
|
||||
'clone_recursive': False
|
||||
},
|
||||
{
|
||||
'venv_version': 'latest', # install this node only in the 'latest' VENV, but not in the 'official' VENV
|
||||
'name': 'x-flux-comfyui (XLabs-AI)',
|
||||
'path': 'x-flux-comfyui',
|
||||
'repo_url': 'https://github.com/XLabs-AI/x-flux-comfyui',
|
||||
'install_requirements_txt': True,
|
||||
'clone_recursive': False
|
||||
},
|
||||
{
|
||||
'venv_version': 'latest', # install this node only in the 'latest' VENV, but not in the 'official' VENV
|
||||
'name': 'ComfyUI-GGUF (city96)',
|
||||
'path': 'ComfyUI-GGUF',
|
||||
'repo_url': 'https://github.com/city96/ComfyUI-GGUF',
|
||||
'install_requirements_txt': True,
|
||||
'clone_recursive': False
|
||||
},
|
||||
{
|
||||
'venv_version': 'latest', # install this node only in the 'latest' VENV, but not in the 'official' VENV
|
||||
'name': 'ComfyUI-Florence2 (kijai)',
|
||||
'path': 'ComfyUI-Florence2F',
|
||||
'repo_url': 'https://github.com/kijai/ComfyUI-Florence2',
|
||||
'install_requirements_txt': True,
|
||||
'clone_recursive': False
|
||||
},
|
||||
{
|
||||
'venv_version': 'latest', # install this node only in the 'latest' VENV, but not in the 'official' VENV
|
||||
'name': 'ComfyUI KJNodes (kijai)',
|
||||
'path': 'ComfyUI-KJNodes',
|
||||
'repo_url': 'https://github.com/kijai/ComfyUI-KJNodes',
|
||||
'install_requirements_txt': True,
|
||||
'clone_recursive': False
|
||||
},
|
||||
{
|
||||
'venv_version': 'latest', # install this node only in the 'latest' VENV, but not in the 'official' VENV
|
||||
'name': 'ComfyUI UltimateSDUpscale (ssitu)',
|
||||
'path': 'ComfyUI_UltimateSDUpscale',
|
||||
'repo_url': 'https://github.com/ssitu/ComfyUI_UltimateSDUpscale',
|
||||
'install_requirements_txt': False, # NO requirements.txt file for PIP INSTALL
|
||||
'clone_recursive': True # clone this node --recursive according to README.md
|
||||
},
|
||||
{
|
||||
'venv_version': 'latest', # install this node only in the 'latest' VENV, but not in the 'official' VENV
|
||||
'name': 'ControlAltAI Nodes (gseth)',
|
||||
'path': 'ControlAltAI-Nodes',
|
||||
'repo_url': 'https://github.com/gseth/ControlAltAI-Nodes',
|
||||
'install_requirements_txt': False, # NO requirements.txt file for PIP INSTALL
|
||||
'clone_recursive': False
|
||||
},
|
||||
{
|
||||
'venv_version': 'latest', # install this node only in the 'latest' VENV, but not in the 'official' VENV
|
||||
'name': 'ComfyUI Easy-Use (yolain)',
|
||||
'path': 'ComfyUI-Easy-Use',
|
||||
'repo_url': 'https://github.com/yolain/ComfyUI-Easy-Use',
|
||||
'install_requirements_txt': True,
|
||||
'clone_recursive': False
|
||||
},
|
||||
{
|
||||
'venv_version': 'latest', # install this node only in the 'latest' VENV, but not in the 'official' VENV
|
||||
'name': 'ComfyUI Impact-Pack (tdrdata)',
|
||||
'path': 'ComfyUI-Impact-Pack',
|
||||
'repo_url': 'https://github.com/ltdrdata/ComfyUI-Impact-Pack',
|
||||
'install_requirements_txt': True,
|
||||
'clone_recursive': False
|
||||
}
|
||||
### planned custom nodes - To Be Discussed
|
||||
# {
|
||||
# 'name': 'rgthree comfy',
|
||||
# 'path': 'rgthree-comfy',
|
||||
# 'repo_url': 'https://github.com/rgthree/rgthree-comfy'
|
||||
# },
|
||||
# {
|
||||
# 'name': 'was node suite comfyui',
|
||||
# 'path': 'was-node-suite-comfyui',
|
||||
# 'repo_url': 'https://github.com/WASasquatch/was-node-suite-comfyui'
|
||||
# },
|
||||
# {
|
||||
# 'name': 'comfyui controlnet aux',
|
||||
# 'path': 'comfyui_controlnet_aux',
|
||||
# 'repo_url': 'https://github.com/Fannovel16/comfyui_controlnet_aux'
|
||||
# },
|
||||
# {
|
||||
# 'name': 'x-flux-comfyui (XLabs-AI)',
|
||||
# 'path': 'x-flux-comfyui',
|
||||
# 'repo_url': 'https://github.com/XLabs-AI/x-flux-comfyui'
|
||||
# },
|
||||
# {
|
||||
# 'name': 'ComfyUI-GGUF (city96)',
|
||||
# 'path': 'ComfyUI-GGUF',
|
||||
# 'repo_url': 'https://github.com/city96/ComfyUI-GGUF'
|
||||
# },
|
||||
# {
|
||||
# 'name': 'ComfyUI-Florence2 (kijai)',
|
||||
# 'path': 'ComfyUI-Florence2F',
|
||||
# 'repo_url': 'https://github.com/kijai/ComfyUI-Florence2'
|
||||
# },
|
||||
# {
|
||||
# 'name': 'ComfyUI-KJNodes (kijai)',
|
||||
# 'path': 'ComfyUI-KJNodes',
|
||||
# 'repo_url': 'https://github.com/kijai/ComfyUI-KJNodes'
|
||||
# },
|
||||
# {
|
||||
# 'name': 'ComfyUI_UltimateSDUpscale (ssitu)',
|
||||
# 'path': 'ComfyUI_UltimateSDUpscale',
|
||||
# 'repo_url': 'https://github.com/ssitu/ComfyUI_UltimateSDUpscale'
|
||||
# },
|
||||
# {
|
||||
# 'name': 'ControlAltAI Nodes (gseth)',
|
||||
# 'path': 'ControlAltAI-Nodes',
|
||||
# 'repo_url': 'https://github.com/gseth/ControlAltAI-Nodes'
|
||||
# },
|
||||
# {
|
||||
# 'name': 'ComfyUI Easy-Use (yolain)',
|
||||
# 'path': 'ComfyUI-Easy-Use',
|
||||
# 'repo_url': 'https://github.com/yolain/ComfyUI-Easy-Use'
|
||||
# },
|
||||
# {
|
||||
# 'name': 'ComfyUI Impact-Pack (tdrdata)',
|
||||
# 'path': 'ComfyUI-Impact-Pack',
|
||||
# 'repo_url': 'https://github.com/ltdrdata/ComfyUI-Impact-Pack'
|
||||
# }
|
||||
]
|
||||
],
|
||||
'bash_cmds': { # bcomfy helper cmds (all command run in activated VENV, and can pass a cwd, {app_path} makro support)
|
||||
'install-requirements': 'pip install -r requirements.txt', # for installing/refreshing custom_nodes
|
||||
'install-comfy-CLI': 'pip install comfy-cli', # install comfy CLI used in 'fix-custom-nodes'
|
||||
'fix-custom_nodes': 'comfy --skip-prompt --no-enable-telemetry set-default {app_path} && comfy node restore-dependencies',
|
||||
'pip-clean-up': 'pip cache purge && py3clean {app_path}' # clean-up pip install caches and python runtime caches
|
||||
},
|
||||
},
|
||||
'bforge': {
|
||||
'id': 'bforge', # app_name
|
||||
'name': 'Better Forge',
|
||||
'command': 'cd /workspace/bforge && . ./bin/activate && cd /workspace/stable-diffusion-webui-forge && ./webui.sh -f --listen --enable-insecure-extension-access --api --port 7862',
|
||||
'venv_path': '/workspace/bforge',
|
||||
'app_path': '/workspace/stable-diffusion-webui-forge',
|
||||
'port': 7862,
|
||||
'download_url': 'https://better.s3.madiator.com/bforge/bforge.tar.gz',
|
||||
'venv_uncompressed_size': 7689838771, # uncompressed size of the tar-file (in bytes),
|
||||
'archive_size': 3691004078, # tar filesize (in bytes)
|
||||
'sha256_hash': 'e87dae2324a065944c8d36d6ac4310af6d2ba6394f858ff04a34c51aa5f70bfb',
|
||||
'app_path': '/workspace/stable-diffusion-webui-forge',
|
||||
'repo_url': 'https://github.com/lllyasviel/stable-diffusion-webui-forge.git',
|
||||
'branch_name': '', # empty branch_name means default = 'master'
|
||||
'commit': '', # or commit hash (NYI)
|
||||
'clone_recursive': False,
|
||||
'refresh': False
|
||||
'allow_refresh': True, # allow to refresh the app
|
||||
'venv_path': '/workspace/bforge',
|
||||
'venv_version_default': 'latest', # use the 'latest' VENV version by default,
|
||||
# can be overwritten with 'VENV_VERSION_<app_id>' ENV var or via DEBUG_SETTINGS['select_venv_version']
|
||||
'available_venvs': [
|
||||
{ # venv SETUP: 'webui.sh -f can_run_as_root=1'
|
||||
'version': 'official',
|
||||
'build_info': 'v1.0 - Nov 16, 2024, 14:32 GMT by lutzapps',
|
||||
'notes': 'Python 3.11 && Cuda 12.1 && Recommended torch-2.3.1+cu121-cp311-cp311-linux_x86_64',
|
||||
'branch_name': '', # empty branch_name means default = 'master'
|
||||
'commit_id': '', # if set, it wins over branch_name
|
||||
'clone_recursive': False,
|
||||
'minimum_app_size_kb': 108860, # "du /workspace/stable-diffusion-webui-forge -sk"
|
||||
'download_url': 'bforge/bforge-official.tar.gz',
|
||||
'venv_uncompressed_size_kb': 6719164, # uncompressed size of "bforge-official.tar.gz" (in KBytes), "du /workspace/bforge -sk"
|
||||
'archive_size_bytes': 3338464965, # tar filesize (in Bytes), "ls bforge-official.tar.gz -la"
|
||||
'sha256_hash': 'e8e4d1cedd54be30c188d2a78570634b29bb7e9bb6cfa421f608c9b9813cdf7f' # shasum -a 256 bforge-official.tar.gz
|
||||
},
|
||||
{ # venv SETUP: 'official' && pip install --upgrade torch torchvision xformers --index-url "https://download.pytorch.org/whl/cu124"
|
||||
'version': 'latest',
|
||||
'build_info': 'v1.0 - Nov 16, 2024, 15:22 GMT by lutzapps',
|
||||
'notes': 'Python 3.11 && Cuda 12.4 && Upgraded to torch-2.5.1+cu124-cp311-cp311-linux_x86_64 && xformers',
|
||||
'branch_name': '', # empty branch_name means default = 'master'
|
||||
'commit_id': '', # if set, it wins over branch_name
|
||||
'clone_recursive': False,
|
||||
'minimum_app_size_kb': 108116, # "du /workspace/stable-diffusion-webui-forge -sk"
|
||||
'download_url': 'bforge/bforge-latest.tar.gz',
|
||||
'venv_uncompressed_size_kb': 6941664, # uncompressed size of "bforge-latest.tar.gz" (in KBytes), "du /workspace/bforge -sk"
|
||||
'archive_size_bytes': 3567217032, # tar filesize (in bytes), "ls bforge-latest.tar.gz -la"
|
||||
'sha256_hash': '65aeae1e5ff05d16647f8ab860694845d5d2aece5683fb2a96f6af6b4bdc05cd' # shasum -a 256 bforge-latest.tar.gz
|
||||
}
|
||||
]
|
||||
},
|
||||
'ba1111': {
|
||||
'id': 'ba1111', # app_name
|
||||
'name': 'Better A1111',
|
||||
'command': 'cd /workspace/ba1111 && . ./bin/activate && cd /workspace/stable-diffusion-webui && ./webui.sh -f --listen --enable-insecure-extension-access --api --port 7863',
|
||||
'venv_path': '/workspace/ba1111',
|
||||
'app_path': '/workspace/stable-diffusion-webui',
|
||||
'port': 7863,
|
||||
'download_url': 'https://better.s3.madiator.com/ba1111/ba1111.tar.gz',
|
||||
'venv_uncompressed_size': 6794367826, # uncompressed size of the tar-file (in bytes),
|
||||
'archive_size': 3383946179, # tar filesize (in bytes)
|
||||
'sha256_hash': '1d70276bc93f5f992a2e722e76a469bf6a581488fa1723d6d40739f3d418ada9',
|
||||
'app_path': '/workspace/stable-diffusion-webui',
|
||||
'repo_url': 'https://github.com/AUTOMATIC1111/stable-diffusion-webui.git',
|
||||
'branch_name': '', # empty branch_name means default = 'master'
|
||||
'commit': '', # or commit hash (NYI)
|
||||
'clone_recursive': False,
|
||||
'refresh': False
|
||||
'allow_refresh': True, # allow to refresh the app
|
||||
'venv_path': '/workspace/ba1111',
|
||||
'venv_version_default': 'latest', # use the 'latest' VENV version by default,
|
||||
# can be overwritten with 'VENV_VERSION_<app_id>' ENV var or via DEBUG_SETTINGS['select_venv_version']
|
||||
'available_venvs': [
|
||||
{ # venv SETUP: 'webui.sh -f can_run_as_root=1'
|
||||
'version': 'official',
|
||||
'build_info': 'v1.0 - Nov 16, 2024, 16:30 GMT by lutzapps',
|
||||
'notes': 'Python 3.11 && Cuda 12.1 && Recommended torch-2.1.2+cu121-cp311-cp311-linux_x86_64',
|
||||
'branch_name': '', # empty branch_name means default = 'master'
|
||||
'commit_id': '', # if set, it wins over branch_name
|
||||
'clone_recursive': False,
|
||||
'minimum_app_size_kb': 42448, # "du /workspace/stable-diffusion-webui -sk"
|
||||
'download_url': 'ba1111/ba1111-official.tar.gz',
|
||||
'venv_uncompressed_size_kb': 5090008, # uncompressed size of "ba1111-official.tar.gz" (in KBytes), "du /workspace/ba1111 -sk"
|
||||
'archive_size_bytes': 2577843561, # tar filesize (in Bytes), "ls ba1111-official.tar.gz -la"
|
||||
'sha256_hash': '4e81b2ed0704e44edfb8d48fd9b3649668619b014bd9127d8a337aca01f57b53' # shasum -a 256 ba1111-official.tar.gz
|
||||
},
|
||||
{ # venv SETUP: 'official' && pip install --upgrade torch torchvision xformers --index-url "https://download.pytorch.org/whl/cu124"
|
||||
'version': 'latest',
|
||||
'build_info': 'v1.0 - Nov 16, 2024, 17:03 GMT by lutzapps',
|
||||
'notes': 'Python 3.11 && Cuda 12.4 && Upgraded to torch-2.5.1+cu124-cp311-cp311-linux_x86_64 && xformers',
|
||||
'branch_name': '', # empty branch_name means default = 'master'
|
||||
'commit_id': '', # if set, it wins over branch_name
|
||||
'clone_recursive': False,
|
||||
'minimum_app_size_kb': 41628, # "du /workspace/stable-diffusion-webui -sk"
|
||||
'download_url': 'ba1111/ba1111-latest.tar.gz',
|
||||
'venv_uncompressed_size_kb': 6160684, # uncompressed size of "ba1111-latest.tar.gz" (in KBytes), "du /workspace/ba1111 -sk"
|
||||
'archive_size_bytes': 3306240911, # tar filesize (in bytes), "ls ba1111-latest.tar.gz -la"
|
||||
'sha256_hash': '759be4096bf836c6925496099cfb342e97287e7dc9a3bf92f3a38d57d30b1d7d' # shasum -a 256 ba1111-latest.tar.gz
|
||||
}
|
||||
]
|
||||
},
|
||||
'bkohya': {
|
||||
'id': 'bkohya', # app_name
|
||||
'name': 'Better Kohya',
|
||||
'command': 'cd /workspace/bkohya && . ./bin/activate && cd /workspace/kohya_ss && python ./kohya_gui.py --headless --share --server_port 7864', # TODO!! check other ""./kohya_gui.py" cmdlines options
|
||||
# need to check:
|
||||
# python ./kohya_gui.py --inbrowser --server_port 7864
|
||||
# works for now:
|
||||
# python ./kohya_gui.py --headless --share --server_port 7864
|
||||
'port': 7864,
|
||||
### need to check further command settings:
|
||||
# python ./kohya_gui.py --inbrowser --server_port 7864
|
||||
#
|
||||
# what works for now is the gradio setup (which is also currently activated):
|
||||
# python ./kohya_gui.py --headless --share --server_port 7864
|
||||
# creates a gradio link for 72h like e.g. https://b6365c256c395e755b.gradio.live
|
||||
#
|
||||
# the --noverify flag currently is NOT supported anymore, need to check, in the meantime disable it
|
||||
# NOTE: the --noverify switch can be inserted dynamically at runtime via DEBUG_SETTINGS['bkohya_noverify']=true
|
||||
# the idea is to make this a "run-once" option later when we have app-settings
|
||||
#
|
||||
### for Gradio supported reverse proxy:
|
||||
# --share -> Share the gradio UI
|
||||
# --root_path ROOT_PATH -> root_path` for Gradio to enable reverse proxy support. e.g. /kohya_ss
|
||||
|
@ -160,7 +310,7 @@ app_configs = {
|
|||
# [--headless] [--language LANGUAGE] [--use-ipex]
|
||||
# [--use-rocm] [--do_not_use_shell] [--do_not_share]
|
||||
# [--requirements REQUIREMENTS] [--root_path ROOT_PATH]
|
||||
# [--noverify]
|
||||
# [--noverify] ???
|
||||
#
|
||||
# options:
|
||||
# -h, --help show this help message and exit
|
||||
|
@ -186,87 +336,64 @@ app_configs = {
|
|||
# `root_path` for Gradio to enable reverse proxy
|
||||
# support. e.g. /kohya_ss
|
||||
# --noverify Disable requirements verification
|
||||
|
||||
'venv_path': '/workspace/bkohya',
|
||||
#
|
||||
'app_path': '/workspace/kohya_ss',
|
||||
'port': 7864,
|
||||
'download_url': 'https://better.s3.madiator.com/bkohya/kohya.tar.gz', # (2024-11-08 13:13:00Z) - lutzapps
|
||||
'venv_uncompressed_size': 12128345264, # uncompressed size of the tar-file (in bytes)
|
||||
'archive_size': 6314758227, # tar filesize (in bytes)
|
||||
'sha256_hash': '9a0c0ed5925109e82973d55e28f4914fff6728cfb7f7f028a62e2ec1a9e4f60a',
|
||||
'repo_url': 'https://github.com/bmaltais/kohya_ss.git',
|
||||
'branch_name': 'sd3-flux.1', # make sure we use Kohya with FLUX support branch
|
||||
# this branch also uses a 'sd-scripts' HEAD branch of 'SD3', which gets automatically checked-out too
|
||||
'commit': '', # or commit hash (NYI)
|
||||
'clone_recursive': True, # is recursive clone
|
||||
'refresh': False
|
||||
'allow_refresh': True, # allow to refresh the app
|
||||
'venv_path': '/workspace/bkohya',
|
||||
'venv_version_default': 'latest', # use the 'latest' VENV version by default,
|
||||
# can be overwritten with 'VENV_VERSION_<app_id>' ENV var or via DEBUG_SETTINGS['select_venv_version']
|
||||
'available_venvs': [
|
||||
{ # venv SETUP: 'setup-runpod.sh' -> kohya_gui.sh -> requirements_runpod.txt
|
||||
'version': 'latest',
|
||||
'build_info': 'v1.0 - Nov 8, 2024, 13:13 GMT by lutzapps',
|
||||
'notes': 'Python 3.10 && FLUX.1 version with torch-2.5.0+cu124 (setup-runpod.sh with requirements_runpod.txt)',
|
||||
'branch_name': 'sd3-flux.1', # make sure we use Kohya with FLUX support branch
|
||||
# this branch also uses a 'sd-scripts' HEAD branch of 'SD3', which gets automatically checked-out too
|
||||
'commit_id': '', # if set, it wins over branch_name
|
||||
'clone_recursive': True, # is recursive clone
|
||||
'minimum_app_size_kb': 59980, # "du /workspace/kohya_ss -sk"
|
||||
'download_url': 'bkohya/bkohya-latest.tar.gz',
|
||||
'venv_uncompressed_size_kb': 12175900, # uncompressed size of "bkohya-latest.tar.gz" (in KBytes), "du /workspace/bkohya -sk"
|
||||
'archive_size_bytes': 6314758227, # tar filesize (in Bytes), "ls bkohya-latest.tar.gz -la"
|
||||
'sha256_hash': '9a0c0ed5925109e82973d55e28f4914fff6728cfb7f7f028a62e2ec1a9e4f60a' # shasum -a 256 bkohya-latest.tar.gz
|
||||
}#,
|
||||
# { # there is currently no 'official' VENV for bkohya, as this is not needed
|
||||
# 'version': 'official',
|
||||
# 'build_info': '',
|
||||
# 'notes': '',
|
||||
# 'branch_name': '',
|
||||
# 'commit_id': '',
|
||||
# 'clone_recursive': '',
|
||||
# 'minimum_app_size_kb': '',
|
||||
# 'download_url': '',
|
||||
# 'venv_uncompressed_size_kb': 0, # uncompressed size of "bkohya-official.tar.gz" (in KBytes), "du /workspace/bkohya -sk"
|
||||
# 'archive_size_bytes': 0, # tar filesize (in bytes), "ls bkohya-official.tar.gz -la"
|
||||
# 'sha256_hash': '' # shasum -a 256 bkohya-official.tar.gz
|
||||
# }
|
||||
],
|
||||
'bash_cmds': { # bkohya helper cmds (all command run in activated VENV, and can pass a cwd, {app_path} makro support)
|
||||
'run-tensorboard': 'tensorboard --logdir logs --bind_all --path_prefix=/tensorboard' # for running tensorboard
|
||||
# #pip install tensorboard # 2.14.1 is pre-installed, "/tensorboard/" path location is defined via 'nginx.conf'
|
||||
# TENSORBOARD_PORT ENV var (default port=6006)
|
||||
# above cmd starts like this:
|
||||
# TensorBoard 2.14.1 at http://e5c0c7143716:6006/tensorboard/ (Press CTRL+C to quit)
|
||||
# => available at http://localhost:6006/tensorboard/
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
# lutzapps - not used anymore TODO: remove later
|
||||
""" def fetch_app_info():
|
||||
manifest_url = "https://better.s3.madiator.com/"
|
||||
download_base_url = "https://better.s3.madiator.com/" # could be different base as the manifest file
|
||||
|
||||
app_info = {}
|
||||
|
||||
try: # be graceful when the server is not reachable, be it S3 or anything else
|
||||
response = requests.get(manifest_url)
|
||||
root = ET.fromstring(response.content)
|
||||
|
||||
for content in root.findall('{http://s3.amazonaws.com/doc/2006-03-01/}Contents'):
|
||||
app_name_and_url = content.find('{http://s3.amazonaws.com/doc/2006-03-01/}Key').text
|
||||
|
||||
app_name = app_name_and_url.split('/')[0] # e.g. "bkohya/bkohya.tar.gz" -> "bkohya"
|
||||
download_url = os.path.join(download_base_url, app_name_and_url)
|
||||
|
||||
if not (app_name in ['ba1111', 'bcomfy', 'bforge', 'bkohya']):
|
||||
continue # skip unsupported app
|
||||
|
||||
# load code defaults
|
||||
archive_size = app_configs[app_name]["archive_size"]
|
||||
venv_uncompressed_size = app_configs[app_name]["venv_uncompressed_size"]
|
||||
sha256_hash = app_configs[app_name]["sha256_hash"]
|
||||
|
||||
try: # try to find overwrites from code defaults
|
||||
archive_size = int(content.find('archive_size').text)
|
||||
venv_uncompressed_size = int(content.find('{http://s3.amazonaws.com/doc/2006-03-01/}venv_uncompressed_size').text)
|
||||
sha256_hash = int(content.find('{http://s3.amazonaws.com/doc/2006-03-01/}sha256_hash').text)
|
||||
except: # swallow any exception, mainly from not being defined (yet) in the XML manifest
|
||||
print(f"App '{app_name}' Metadata could not be found in manifest '{manifest_url}', using code defaults!")
|
||||
|
||||
app_info[app_name] = {
|
||||
'download_url': download_url,
|
||||
'archive_size': archive_size,
|
||||
'venv_uncompressed_size': venv_uncompressed_size, # TODO: provide in XML manifest
|
||||
'sha256_hash': sha256_hash # TODO: provide in XML manifest
|
||||
}
|
||||
|
||||
except requests.RequestException as e: # server not reachable, return empty dict
|
||||
print(f"Manifest Url '{manifest_url}' not reachable, using code defaults!")
|
||||
|
||||
return app_info
|
||||
"""
|
||||
# lutzapps - not used anymore TODO: remove later
|
||||
""" def update_app_configs():
|
||||
app_info = fetch_app_info()
|
||||
for app_name, info in app_info.items():
|
||||
if app_name in app_configs:
|
||||
app_configs[app_name].update(info) """
|
||||
|
||||
def get_app_configs() -> dict:
|
||||
return app_configs
|
||||
|
||||
def add_app_config(app_name, config):
|
||||
def add_app_config(app_name:str, config:dict) -> dict:
|
||||
app_configs[app_name] = config
|
||||
return app_configs # return the modified app_configs
|
||||
|
||||
def remove_app_config(app_name):
|
||||
def remove_app_config(app_name:str) -> dict:
|
||||
if app_name in app_configs:
|
||||
del app_configs[app_name]
|
||||
|
||||
# Update app_configs when this module is imported
|
||||
# lutzapps - not used anymore TODO: remove later
|
||||
#update_app_configs()
|
||||
return app_configs # return the modified app_configs
|
||||
|
||||
|
||||
### lutzapps section
|
||||
|
@ -300,7 +427,7 @@ def read_dict_from_jsonfile(json_filepath:str) -> tuple [dict, str]:
|
|||
# Read JSON file from 'json_filepath' and return it as 'dict'
|
||||
|
||||
try:
|
||||
if ":" in json_filepath: # filepath is online Url containing ":" like http:/https:/ftp:
|
||||
if "://" in json_filepath: # filepath is online Url containing ":" like http:// https:// ftp://
|
||||
with urllib.request.urlopen(json_filepath) as url:
|
||||
dict = json.load(url)
|
||||
elif os.path.exists(json_filepath): # local file path, e.g. "/workspace/...""
|
||||
|
@ -326,7 +453,7 @@ def pretty_dict(dict:dict) -> str:
|
|||
|
||||
return dict_string
|
||||
|
||||
# helper function for "init_app_install_dirs(), "init_shared_model_app_map()", "init_shared_models_folders()" and "inir_DEBUG_SETTINGS()"
|
||||
# helper function for init_app_install_dirs(), init_shared_model_app_map(), init_shared_models_folders() and init_debug_settings()
|
||||
def load_global_dict_from_file(default_dict:dict, dict_filepath:str, dict_description:str, SHARED_MODELS_DIR:str="", write_file:bool=True) -> tuple[bool, dict]:
|
||||
# returns the 'dict' for 'dict_description' from 'dict_filepath'
|
||||
|
||||
|
@ -339,8 +466,8 @@ def load_global_dict_from_file(default_dict:dict, dict_filepath:str, dict_descri
|
|||
|
||||
return False, return_dict
|
||||
|
||||
# read from file, if filepath is online url (http:/https:/ftp:) or local filepath exists
|
||||
if ":" in dict_filepath or \
|
||||
# read from file, if filepath is online url (http:// https:// ftp://) or local filepath exists
|
||||
if "://" in dict_filepath or \
|
||||
os.path.isfile(dict_filepath) and os.path.exists(dict_filepath):
|
||||
dict_filepath_found = True
|
||||
# read the dict_description from JSON file
|
||||
|
@ -358,13 +485,18 @@ def load_global_dict_from_file(default_dict:dict, dict_filepath:str, dict_descri
|
|||
dict_filepath_found = False
|
||||
print(f"No {dict_description}_FILE found, initializing default '{dict_description}' from code ...")
|
||||
# use already defined dict from app code
|
||||
# write the dict to JSON file
|
||||
success, ErrorMsg = write_dict_to_jsonfile(default_dict, dict_filepath)
|
||||
if write_file:
|
||||
# write the dict to JSON file
|
||||
success, error_msg = write_dict_to_jsonfile(default_dict, dict_filepath)
|
||||
else:
|
||||
error_msg = f"Writing to file '{dict_filepath}' was skipped."
|
||||
|
||||
return_dict = default_dict # use the code-defaults dict passed in
|
||||
|
||||
if success:
|
||||
print(f"'{dict_description}' is initialized and written to file '{dict_filepath}'")
|
||||
else:
|
||||
print(ErrorMsg)
|
||||
print(error_msg)
|
||||
|
||||
# Convert 'dict_description' dictionary to formatted JSON
|
||||
print(f"\nUsing {'external' if dict_filepath_found else 'default'} '{dict_description}':\n{pretty_dict(return_dict)}")
|
||||
|
@ -377,22 +509,48 @@ def load_global_dict_from_file(default_dict:dict, dict_filepath:str, dict_descri
|
|||
return success, return_dict
|
||||
|
||||
|
||||
DEBUG_SETTINGS_FILE = "/workspace/_debug_settings.json"
|
||||
DEBUG_SETTINGS_FILE = "/workspace/.debug_settings.json"
|
||||
DEBUG_SETTINGS = {
|
||||
# these setting will be READ:
|
||||
"APP_CONFIGS_MANIFEST_URL": "", # this setting, when not blank, overwrites the global APP_CONFIGS_MANIFEST_URL
|
||||
"installer_codeversion": "v2", # can be "v1" (original) or "v2" (fast)
|
||||
"delete_tar_file_after_download": True, # can be set to True to test only local unpack time and github setup
|
||||
"create_bkohya_to_local_venv_symlink": True, # when True, creates a folder symlink "venv" in "/workspace/kohya_ss" -> "/workspace/bkohya" VENV
|
||||
"skip_to_github_stage": False, # when True, skip download and decompression stage and go directly to GH repo installation
|
||||
# these settings will be WRITTEN:
|
||||
"app_name": "", # last app_name the code run on
|
||||
"used_local_tarfile": True, # works together with the above TAR local caching
|
||||
"tar_filename": "", # last local tar_filename used
|
||||
"download_url": "", # last used tar download_url
|
||||
"total_duration_download": "00:00:00", # timespan-str "hh:mm:ss"
|
||||
"total_duration_unpack": "00:00:00", # timespan-str "hh:mm:ss"
|
||||
"total_duration": "00:00:00" # timespan-str "hh:mm:ss"
|
||||
### these setting will be READ:
|
||||
'APP_CONFIGS_MANIFEST_URL': None, # this setting, when not blank, overwrites the global APP_CONFIGS_MANIFEST_URL
|
||||
# it also defines the location where the code-defaults will be written to the first time
|
||||
'common_settings': {
|
||||
'base_download_url': None, # String: this setting, when not blank, overwrites the 'base_download_url' from the 'common_settings of the APP_CONFIGS_MANIFEST
|
||||
'verify_app_size': None, # True/False: check the actual app_size against the app_configs "minimum_app_size_kb",
|
||||
# when set, it overwrites the 'verify_app_size' from the 'common_settings' of the APP_CONFIGS_MANIFEST
|
||||
'delete_unverified_app_path': None, # True/False: delete the APP_PATH from the /workspace folder,
|
||||
# if the defined 'minimum_app_size_kb' of the app does not match at minimum the result of 'du -sk' command against the installed app_path
|
||||
# when set, it overwrites the 'delete_unverified_app_path' from the 'common_settings' of the APP_CONFIGS_MANIFEST
|
||||
'verify_venv_size': None, # True/False: check the actual venv_size against the app_configs "venv_uncompressed_size_kb",
|
||||
# when set, it overwrites the 'verify_venv_size' from the 'common_settings' of the APP_CONFIGS_MANIFEST
|
||||
'delete_unverified_venv_path': None, # True/False: delete the VENV from the /workspace folder,
|
||||
# if the defined 'venv_uncompressed_size_kb' of the app's found 'venv_info' does not match at minimum the result of 'du -sk' command against the installed venv_path
|
||||
# when set, it overwrites the 'delete_unverified_venv_path' from the 'common_settings' of the APP_CONFIGS_MANIFEST
|
||||
'verify_tolerance_percent': None # percentage (int) factor the 'verify_sizes' for app_path and venv_path are allowed to vary
|
||||
# when set, it overwrites the 'verify_tolerance_percent' from the 'common_settings' of the APP_CONFIGS_MANIFEST
|
||||
},
|
||||
'select_venv_version': None, # when set it overwrites the selected version 'official' or 'latest'
|
||||
# can be even any other version like 'experimental', if you provide a 'bXXXX-experimental.tar.gz' VENV archive
|
||||
# file, with the correct venv_info checksums/hashes
|
||||
'delete_tar_file_after_download': True, # can be set to False to test only local unpack time and github setup and avoid download time
|
||||
'skip_to_application_setup': False, # when True, skip download and decompression stage and go directly to GitHub cloning repo installation
|
||||
### KOHYA specific debug settings
|
||||
'create_bkohya_to_local_venv_symlink': True, # when True, creates a folder symlink "venv" in "/workspace/kohya_ss" -> "/workspace/bkohya" VENV
|
||||
'bkohya_run_tensorboard': True, # autostart tensorboard together with bkohya via cmd_key="run-tensorboard", available at http://localhost:6006/tensorboard/
|
||||
# the --noverify flag currently is NOT supported anymore, need to check, in the meantime disable it
|
||||
#'bkohya_noverify': False, # when True, the '--noverify' will be inserted into the cmdline and disable requirements verification
|
||||
# the default is to check the requirements verification every time when the app starts
|
||||
# the idea is to make this a "run-once" option later when we have app-settings
|
||||
#
|
||||
### these settings will be WRITTEN for informational purposes:
|
||||
'last_app_name': "", # last app_name the code did run for
|
||||
'used_venv_version': "", # last venv_version the code used
|
||||
'used_local_tarfile': True, # works together with the above TAR local caching setting
|
||||
'used_tar_filename': "", # last used local/downloaded tar_filename
|
||||
'used_download_url': "", # last used tar download_url
|
||||
'total_duration_download': "00:00:00", # timespan-str "hh:mm:ss"
|
||||
'total_duration_unpack': "00:00:00", # timespan-str "hh:mm:ss"
|
||||
'total_duration': "00:00:00" # timespan-str "hh:mm:ss"
|
||||
}
|
||||
|
||||
def init_debug_settings():
|
||||
|
@ -400,7 +558,8 @@ def init_debug_settings():
|
|||
|
||||
local_debug = os.environ.get('LOCAL_DEBUG', 'False') # support local browsing for development/debugging
|
||||
generate_debug_settings_file = os.environ.get('DEBUG_SETTINGS_FILE', 'False') # generate the DEBUG_SETTINGS_FILE, if not exist already
|
||||
write_file_if_not_exists = (local_debug == 'True' or local_debug == 'true' or generate_debug_settings_file == 'True' or generate_debug_settings_file == 'true')
|
||||
write_file_if_not_exists = (local_debug == 'True' or local_debug == 'true' or
|
||||
generate_debug_settings_file == 'True' or generate_debug_settings_file == 'true')
|
||||
|
||||
success, dict = load_global_dict_from_file(DEBUG_SETTINGS, DEBUG_SETTINGS_FILE, "DEBUG_SETTINGS", write_file=write_file_if_not_exists)
|
||||
if success:
|
||||
|
@ -424,23 +583,22 @@ def write_debug_setting(setting_name:str, setting_value:str):
|
|||
init_debug_settings()
|
||||
|
||||
APP_CONFIGS_FILE = APP_CONFIGS_MANIFEST_URL # default is the online manifest url defined as "master"
|
||||
# can be overwritten with DEBUG_SETTINGS['APP_CONFIGS_MANIFEST_URL'], e.g. point to "/workspace/_app_configs.json"
|
||||
# # which is the file, that is generated when the ENV var LOCAL_DEBUG='True' or the ENV var APP_CONFIGS_FILE='True'
|
||||
# NOTE: an existing serialized dict in the "/workspace" folder will never be overwritten agin from the code defaults,
|
||||
# can be overwritten with DEBUG_SETTINGS['APP_CONFIGS_MANIFEST_URL'], e.g. point to "/workspace/app_configs.json"
|
||||
# which is the file, that is generated when the ENV var LOCAL_DEBUG='True' or the ENV var APP_CONFIGS_FILE='True'
|
||||
# NOTE: an existing serialized dict in the "/workspace" folder will never be overwritten again from the code defaults,
|
||||
# and "wins" against the code-defaults. So even changes in the source-code for this dicts will NOT be used,
|
||||
# when a local file exists. The idea here is that it is possible to overwrite code-defaults.
|
||||
# BUT as long as the APP_CONFIGS_MANIFEST_URL not gets overwritten, the global "app_configs" dict will be always loaded
|
||||
# from the central S3 server, or whatever is defined.
|
||||
# the only way to overwrite this url, is via the DEBUG_SETTINGS_FILE "/workspace/_debug_settings.json"
|
||||
# the only way to overwrite this url, is via the hidden DEBUG_SETTINGS_FILE "/workspace/.debug_settings.json"
|
||||
# the default source-code setting for DEBUG_SETTINGS['APP_CONFIGS_MANIFEST_URL']: "" (is an empty string),
|
||||
# which still makes the default APP_CONFIGS_MANIFEST_URL the central master.
|
||||
# only when this setting is not empty, it can win against the central url, but also only when the Url is valid (locally or remote)
|
||||
# should there be an invalid Url (central or local), or any other problem, then the code-defaults will be used.
|
||||
#
|
||||
# The DEBUG_SETTINGS_FILE is a dict which helps during debugging, testing of APP Installations,
|
||||
# and generating ENV TAR files.
|
||||
# Is will also NOT be generated as external FILE, as long the same 2 ENV vars, which control the APP_CONFIGS_FILE generation are set:
|
||||
# LOCAL_DEBUG='True' or APP_CONFIGS_FILE='True'
|
||||
# The DEBUG_SETTINGS_FILE is a dict which helps during debugging and testing of APP Installations, and generating ENV TAR files.
|
||||
# It will also NOT be generated as external FILE, as long the same 2 ENV vars, which control the APP_CONFIGS_FILE generation are set:
|
||||
# LOCAL_DEBUG='True' or APP_CONFIGS_FILE='True'
|
||||
#
|
||||
# SUMMARY: The DEBUG_SETTINGS and APP_CONFIGS (aka app_configs in code) will never be written to the /workspace,
|
||||
# when the IMAGE is used normally.
|
||||
|
@ -449,38 +607,83 @@ def init_app_configs():
|
|||
global APP_CONFIGS_MANIFEST_URL
|
||||
global APP_CONFIGS_FILE
|
||||
global app_configs
|
||||
# store 'common_settings' in global module var
|
||||
global COMMON_SETTINGS
|
||||
|
||||
# read APP_CONFIGS_MANIFEST_URL from ENV var
|
||||
env_app_configs_manifest_url = os.environ.get('APP_CONFIGS_MANIFEST_URL', '')
|
||||
if not (env_app_configs_manifest_url == None or env_app_configs_manifest_url == ''):
|
||||
print(f"using APP_CONFIGS_MANIFEST_URL from ENV_SETTINGS: {env_app_configs_manifest_url}")
|
||||
APP_CONFIGS_MANIFEST_URL = env_app_configs_manifest_url
|
||||
APP_CONFIGS_FILE = APP_CONFIGS_MANIFEST_URL
|
||||
|
||||
# check for overwrite of APP_CONFIGS_MANIFEST_URL
|
||||
debug_app_configs_manifest_url = DEBUG_SETTINGS['APP_CONFIGS_MANIFEST_URL']
|
||||
if not debug_app_configs_manifest_url == "":
|
||||
if not (debug_app_configs_manifest_url == None or debug_app_configs_manifest_url == ''):
|
||||
print(f"using APP_CONFIGS_MANIFEST_URL from DEBUG_SETTINGS: {debug_app_configs_manifest_url}")
|
||||
APP_CONFIGS_MANIFEST_URL = debug_app_configs_manifest_url
|
||||
APP_CONFIGS_FILE = APP_CONFIGS_MANIFEST_URL
|
||||
|
||||
|
||||
print(f"\nUsing APP_CONFIGS_MANIFEST_URL={APP_CONFIGS_MANIFEST_URL}")
|
||||
|
||||
local_debug = os.environ.get('LOCAL_DEBUG', 'False') # support local browsing for development/debugging
|
||||
generate_app_configs_file = os.environ.get('APP_CONFIGS_FILE', 'False') # generate the APP_CONFIGS_FILE, if not exist already
|
||||
write_file_if_not_exists = (local_debug == 'True' or local_debug == 'true' or generate_app_configs_file == 'True' or generate_app_configs_file == 'true')
|
||||
|
||||
generate_default_app_configs_file = \
|
||||
(generate_app_configs_file == 'True' or generate_app_configs_file == 'true' or \
|
||||
local_debug == 'True' or local_debug == 'true')
|
||||
|
||||
write_file_if_not_exists = (("://" not in APP_CONFIGS_FILE) and generate_default_app_configs_file)
|
||||
|
||||
success, dict = load_global_dict_from_file(app_configs, APP_CONFIGS_FILE, "APP_CONFIGS", write_file=write_file_if_not_exists)
|
||||
|
||||
if success:
|
||||
app_configs = dict # overwrite code-defaults (from local or external/online JSON settings file)
|
||||
#else app_configs = <code defaults already initialized>
|
||||
if success: # if the passed-in APP_CONFIGS_MANIFEST_URL was valid and loaded successfully
|
||||
app_configs = dict # it overwrite the code-defaults (from local or external/online MANIFEST JSON settings file)
|
||||
else: # 404 not found MANIFEST_URL, fall-back to app_configs = <code-defaults already initialized>
|
||||
APP_CONFIGS_MANIFEST_URL += f"#not_found_using_code_defaults" # mark the code-default fall-back
|
||||
if generate_default_app_configs_file: # LOCAL_DEBUG='True' or APP_CONFIGS_FILE='True'
|
||||
default_app_configs_filepath = "/workspace/app_configs(default).json"
|
||||
# write the default dict from code to JSON default file and overwrite existing (old) files
|
||||
success = write_dict_to_jsonfile(app_configs, default_app_configs_filepath, overwrite=True)
|
||||
|
||||
# if initialized a second/third/... time from code-defaults (404 not found of MANIFEST_URL),
|
||||
# 'common_settings' is already removed from the app_configs dict in RAM, so check for this edge case
|
||||
if 'common_settings' in app_configs: # first init from code or first/second load from existing MANIFEST URL
|
||||
# transfer this 'common_settings' sub dictionary into the global module dict COMMON_SETTINGS
|
||||
COMMON_SETTINGS = app_configs['common_settings']
|
||||
# before we return the app_configs dict,
|
||||
app_configs = remove_app_config('common_settings') # remove 'common_settings', as it is not an "app"
|
||||
|
||||
# process DEBUG_SETTINGS['common_settings'] overwrites
|
||||
if not 'common_settings' in DEBUG_SETTINGS:
|
||||
return # no 'common_settings' overwrite section found
|
||||
|
||||
# loop thru all 'common_settings' from the DEBUG_SETTINGS_FILE and look for overwrites
|
||||
for key, value in DEBUG_SETTINGS['common_settings'].items():
|
||||
if not value == None or value == '': # if the setting is defned (not None and not a blank string)
|
||||
COMMON_SETTINGS[key] = value # overwrite the corresponding app_configs COMMON_SETTING from DEBUG_SETTINGS['common_settings']
|
||||
|
||||
# if app_configs['common_settings']['base_download_url'] is not explicitly set,
|
||||
# the default is to dynamically generate the 'base download url' from the APP_CONFIGS_MANIFEST_URL domain
|
||||
# ('production' or 'development' S3 bucket MANIFEST URL), so switching the MANIFEST URL
|
||||
# also switches the VENV base download locations, as VENV urls are defined as "RELATIVE" urls in the app_configs by default
|
||||
# this 'base_download_url' again could be overwritten via DEBUG_SETTINGS['common_settings']['base_download_url']
|
||||
if COMMON_SETTINGS['base_download_url'] == None or COMMON_SETTINGS['base_download_url'] == '':
|
||||
# if there is no 'base_download_url' already defined until here, generate it based from the MANIFEST_URL domain
|
||||
COMMON_SETTINGS['base_download_url'] = f"{os.path.dirname(APP_CONFIGS_MANIFEST_URL)}/" # append a final '/' for clarifying it is a base folder url
|
||||
|
||||
return
|
||||
|
||||
init_app_configs() # load from JSON file (local or remote) with code-defaults otherwise
|
||||
|
||||
|
||||
# lutzapps - add kohya_ss support and handle the required local "venv" within the "kohya_ss" app folder
|
||||
def ensure_kohya_local_venv_is_symlinked() -> tuple[bool, str]:
|
||||
### create a folder symlink for kohya's "local" 'venv' dir
|
||||
# as kohya_ss' "setup.sh" assumes a "local" VENV under "/workspace/kohya_ss/venv",
|
||||
# we will create a folder symlink "/workspace/kohya_ss/venv" -> "/workspace/bkohya"
|
||||
# to our global VENV and rename the original "venv" folder to "venv(BAK)", if any exists,
|
||||
# will we not the case normally.
|
||||
# which will be not the case normally.
|
||||
|
||||
if not DEBUG_SETTINGS['create_bkohya_to_local_venv_symlink']:
|
||||
return True, "" # not fix the local KOHYA_SS VENV requirement
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -33,7 +33,53 @@ def ensure_shared_folder_exists():
|
|||
# os.makedirs(os.path.join(SHARED_MODELS_DIR, folder), exist_ok=True)
|
||||
ensure_shared_models_folders()
|
||||
|
||||
def check_civitai_url(url):
|
||||
def check_civitai_url(url:str) -> tuple[bool, bool, str, str]:
|
||||
# https://education.civitai.com/civitais-guide-to-downloading-via-api/
|
||||
|
||||
civitai_domain = "civitai.com"
|
||||
|
||||
try:
|
||||
url = url.lower() # convert to lcase
|
||||
|
||||
### sample url for normal civitai_url
|
||||
# url = "https://civitai.com/models/618692?modelVersionId=691639"
|
||||
url_pattern_models = r"https://civitai\.com/models/(\d+)(?:\?modelversionid=(\d+))?"
|
||||
|
||||
### sample url for civitai_api_url
|
||||
# api_url = "https://civitai.com/api/download/models/12345?type=Model&format=SafeTensor&size=pruned&fp=fp16&token=YOUR_TOKEN_HERE"
|
||||
url_pattern_api_models = r"https://civitai\.com/api/download/models/(\d+)"
|
||||
|
||||
is_civitai = (civitai_domain in url) # any civitai url
|
||||
is_civitai_api = (is_civitai and ("/api/" in url)) # only civitai_api_url
|
||||
# refine the is_civitai to be only true, if NOT already a civit_api_url
|
||||
is_civitai = (is_civitai and not is_civitai_api)
|
||||
|
||||
model_id = None
|
||||
version_id = None
|
||||
|
||||
if not is_civitai:
|
||||
return is_civitai, is_civitai_api, model_id, version_id # False, False, None, None
|
||||
|
||||
if is_civitai_api:
|
||||
match = re.match(url_pattern_api_models, url)
|
||||
if match:
|
||||
model_id = match.group(1) # e.g., "619777"
|
||||
return is_civitai, is_civitai_api, model_id, version_id # False, True, model_id, None
|
||||
|
||||
# only case left (is_civitai == True)
|
||||
match = re.match(url_pattern_models, url)
|
||||
if match:
|
||||
model_id = match.group(1) # e.g., "618692"
|
||||
version_id = match.group(2) # e.g., "691639" or None if not present
|
||||
return is_civitai, is_civitai_api, model_id, version_id # True, False, model_id, version_id
|
||||
|
||||
except Exception as e:
|
||||
print(f"ERROR in Url parsing for CivitAI url: {url}: {str(e)}")
|
||||
|
||||
return False, False, None, None
|
||||
|
||||
# old version - disabled
|
||||
def check_civitai_url_v0(url):
|
||||
prefix = "civitai.com"
|
||||
try:
|
||||
if prefix in url:
|
||||
|
@ -54,7 +100,42 @@ def check_civitai_url(url):
|
|||
print("Error parsing Civitai model URL")
|
||||
return False, False, None, None
|
||||
|
||||
def check_huggingface_url(url):
|
||||
|
||||
def check_huggingface_url(url:str) -> tuple[bool, str, str, str, str]:
|
||||
huggingface_domain = "huggingface.co" # matches both ".com" and ".co"
|
||||
|
||||
try:
|
||||
url = url.lower() # convert to lcase
|
||||
|
||||
### sample url for normal huggingface_url
|
||||
# url1 = "https://huggingface.com/stabilityai/sd-vae-ft-mse-original/blob/main/vae-ft-mse-840000-ema-pruned.safetensors"
|
||||
# url = "https://huggingface.co/stabilityai/sd-vae-ft-mse-original/resolve/main/subfolder1/subfolder2/vae-ft-mse-840000-ema-pruned.safetensors"
|
||||
url_pattern = r"https://huggingface\.(?:co|com)/([\w.-]+/[\w.-]+)/(resolve|blob)/(main|tree)(?:/([\w./-]+))?/([\w.-]+)$"
|
||||
|
||||
is_huggingface = (huggingface_domain in url)
|
||||
|
||||
repo_id = None
|
||||
filename = None
|
||||
folder_name = None
|
||||
branch_name = None
|
||||
|
||||
match = re.match(url_pattern, url)
|
||||
if match:
|
||||
repo_id = match.group(1) # e.g., "stabilityai/sd-vae-ft-mse-original"
|
||||
folder_name = match.group(2) # e.g., "resolve" or "blob" (normally only the "resolve" folder is the download link of the model)
|
||||
branch_name = match.group(3) # e.g., "main" or "tree"
|
||||
folders = match.group(4) # e.g., "subfolder1/subfolder2" or None if not present
|
||||
filename = match.group(5) # e.g., "vae-ft-mse-840000-ema-pruned.safetensors"
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"ERROR in Url parsing for HuggingFace url: {url}: {str(e)}"
|
||||
print(error_msg)
|
||||
|
||||
return is_huggingface, repo_id, filename, folder_name, branch_name
|
||||
|
||||
|
||||
# old version - disabled
|
||||
def check_huggingface_url_v0(url):
|
||||
parsed_url = urlparse(url)
|
||||
if parsed_url.netloc not in ["huggingface.co", "huggingface.com"]:
|
||||
return False, None, None, None, None
|
||||
|
@ -74,7 +155,7 @@ def check_huggingface_url(url):
|
|||
def download_model(url, model_name, model_type, civitai_token=None, hf_token=None, version_id=None, file_index=None) -> tuple[bool, str]:
|
||||
ensure_shared_folder_exists()
|
||||
is_civitai, is_civitai_api, model_id, _ = check_civitai_url(url)
|
||||
is_huggingface, repo_id, hf_filename, hf_folder_name, hf_branch_name = check_huggingface_url(url) # TODO: double call
|
||||
is_huggingface, repo_id, hf_filename, hf_folder_name, hf_branch_name = check_huggingface_url(url) # TODO: double calls (need to)
|
||||
|
||||
if is_civitai or is_civitai_api:
|
||||
if not civitai_token:
|
||||
|
@ -97,42 +178,80 @@ def download_model(url, model_name, model_type, civitai_token=None, hf_token=Non
|
|||
# lutzapps - added SHA256 checks for already existing ident and downloaded HuggingFace model
|
||||
def download_civitai_model(url, model_name, model_type, civitai_token, version_id=None, file_index=None) -> tuple[bool, str]:
|
||||
try:
|
||||
|
||||
# Error: Exception downloading from CivitAI:
|
||||
# cannot access local variable 'civitai_file' where it is not associated with a value
|
||||
#
|
||||
# example of model Flux Dev Model: https://civitai.com/models/618692?modelVersionId=691639
|
||||
|
||||
# examine the url, and extract model_id and url_version_id
|
||||
is_civitai, is_civitai_api, model_id, url_version_id = check_civitai_url(url)
|
||||
|
||||
headers = {'Authorization': f'Bearer {civitai_token}'}
|
||||
from app import (load_civitai_token)
|
||||
# use provided token or try to read the token from ENV var or stored file
|
||||
if not civitai_token:
|
||||
civitai_token = load_civitai_token()
|
||||
|
||||
# use the civitai token for Authorization
|
||||
headers = {"Authorization": f"Bearer {civitai_token}"} if civitai_token else {}
|
||||
|
||||
if is_civitai_api:
|
||||
api_url = f"https://civitai.com/api/v1/model-versions/{url_version_id}"
|
||||
else:
|
||||
api_url = f"https://civitai.com/api/v1/models/{model_id}"
|
||||
|
||||
# get the model data from the civitai repository
|
||||
response = requests.get(api_url, headers=headers)
|
||||
response.raise_for_status()
|
||||
model_data = response.json()
|
||||
#response.raise_for_status()
|
||||
if response.status_code != 200:
|
||||
raise Exception(f"Failed to get model info: {response.text}")
|
||||
|
||||
model_data = response.json()
|
||||
|
||||
civitai_model_type = model_data['type']
|
||||
# map the civitai model type to our internal SHARED_MODEL_FOLDERS model_type
|
||||
model_type = MODEL_TYPE_MAPPING.get(civitai_model_type, 'Stable-diffusion') # default is 'ckpt'
|
||||
|
||||
if is_civitai_api:
|
||||
version_data = model_data
|
||||
model_data = version_data['model']
|
||||
else:
|
||||
if version_id:
|
||||
version_data = next((v for v in model_data['modelVersions'] if v['id'] == version_id), None)
|
||||
elif url_version_id:
|
||||
version_data = next((v for v in model_data['modelVersions'] if v['id'] == url_version_id), None)
|
||||
else:
|
||||
version_data = model_data['modelVersions'][0]
|
||||
|
||||
versions = model_data.get('modelVersions', [])
|
||||
|
||||
if not version_id:
|
||||
version_id = url_version_id
|
||||
|
||||
if version_id == None: # no version_id specified, let the user pick from available versions
|
||||
|
||||
return True, {
|
||||
'choice_required': {
|
||||
'type': 'version',
|
||||
'model_id': model_id,
|
||||
'versions': versions
|
||||
}
|
||||
}
|
||||
|
||||
# Get the selected version
|
||||
version_data = next(
|
||||
(v for v in versions if str(v['id']) == str(version_id)), None)
|
||||
|
||||
# if version_id:
|
||||
# version_data = next((v for v in versions if v['id'] == version_id), None)
|
||||
# elif url_version_id:
|
||||
# version_data = next((v for v in versions if v['id'] == url_version_id), None)
|
||||
# else:
|
||||
# version_data = versions[0]
|
||||
|
||||
if not version_data:
|
||||
return False, f"Version ID {version_id or url_version_id} not found for this model."
|
||||
|
||||
civitai_model_type = model_data['type']
|
||||
model_type = MODEL_TYPE_MAPPING.get(civitai_model_type, 'Stable-diffusion')
|
||||
|
||||
|
||||
files = version_data['files']
|
||||
if file_index is not None and 0 <= file_index < len(files):
|
||||
file_to_download = files[file_index]
|
||||
civitai_file = files[file_index]
|
||||
elif len(files) > 1:
|
||||
# If there are multiple files and no specific file was chosen, ask the user to choose
|
||||
file_options = [{'name': f['name'], 'size': f['sizeKB'], 'type': f['type']} for f in files]
|
||||
# extended for more info needed - 'metadata': {'format': 'SafeTensor', 'size': 'full', 'fp': 'fp32'},
|
||||
file_options = [{'name': f['name'], 'sizeKB': f['sizeKB'], 'type': f['type'], 'format': f['metadata']['format'], 'size': f['metadata']['size'], 'fp': f['metadata']['fp']} for f in files]
|
||||
return True, {
|
||||
'choice_required': {
|
||||
'type': 'file',
|
||||
|
|
|
@ -8,120 +8,6 @@ from flask import jsonify
|
|||
from utils.websocket_utils import (send_websocket_message, active_websockets)
|
||||
from utils.app_configs import (get_app_configs, load_global_dict_from_file, pretty_dict)
|
||||
|
||||
### shared_models-v0.9.2 by lutzapps, Nov 5th 2024 ###
|
||||
|
||||
# to run (and optionally DEBUG) this docker image "better-ai-launcher" in a local container on your own machine
|
||||
# you need to define the ENV var "LOCAL_DEBUG" in the "VSCode Docker Extension"
|
||||
# file ".vscode/tasks.json" in the ENV settings of the "dockerRun" section (or any other way),
|
||||
# and pass into the docker container:
|
||||
# tasks.json:
|
||||
# ...
|
||||
# "dockerRun": {
|
||||
# "containerName": "madiator2011-better-launcher", // no "/" allowed here for container name
|
||||
# "image": "madiator2011/better-launcher:dev",
|
||||
# "envFiles": ["${workspaceFolder}/.env"], // pass additional env-vars (hf_token, civitai token, ssh public-key) from ".env" file to container
|
||||
# "env": { // this ENV vars go into the docker container to support local debugging
|
||||
# "LOCAL_DEBUG": "True", // change app to localhost Urls and local Websockets (unsecured)
|
||||
# "FLASK_APP": "app/app.py",
|
||||
# "FLASK_ENV": "development", // changed from "production"
|
||||
# "GEVENT_SUPPORT": "True" // gevent monkey-patching is being used, enable gevent support in the debugger
|
||||
# // "FLASK_DEBUG": "0" // "1" allows debugging in Chrome, but then VSCode debugger not works
|
||||
# },
|
||||
# "volumes": [
|
||||
# {
|
||||
# "containerPath": "/app",
|
||||
# "localPath": "${workspaceFolder}" // the "/app" folder (and sub-folders) will be mapped locally for debugging and hot-reload
|
||||
# },
|
||||
# {
|
||||
# "containerPath": "/workspace",
|
||||
# // TODO: create this folder before you run!
|
||||
# "localPath": "${userHome}/Projects/Docker/Madiator/workspace"
|
||||
# }
|
||||
# ],
|
||||
# "ports": [
|
||||
# {
|
||||
# "containerPort": 7222, // main Flask app port "AppManager"
|
||||
# "hostPort": 7222
|
||||
# },
|
||||
# ...
|
||||
#
|
||||
#
|
||||
# NOTE: to use the "LOCAL_DEBUG" ENV var just for local consumption of this image, you can run it like
|
||||
#
|
||||
# docker run -it -d --name madiator-better-launcher -p 22:22 -p 7777:7777 -p 7222:7222 -p 3000:3000 -p 7862:7862 -p 7863:7863
|
||||
# -e LOCAL_DEBUG="True" -e RUNPOD_PUBLIC_IP="127.0.0.1" -e RUNPOD_TCP_PORT_22="22"
|
||||
# -e PUBLIC_KEY="ssh-ed25519 XXXXXXX...XXXXX user@machine-DNS.local"
|
||||
# --mount type=bind,source=/Users/test/Projects/Docker/madiator/workspace,target=/workspace
|
||||
# madiator2011/better-launcher:dev
|
||||
#
|
||||
# To run the full 'app.py' / 'index.html' webserver locally, is was needed to "patch" the
|
||||
# '/app/app.py' main application file and the
|
||||
# '/app/templates/index.html'file according to the "LOCAL_DEBUG" ENV var,
|
||||
# to switch the CF "proxy.runpod.net" Url for DEBUG:
|
||||
#
|
||||
### '/app/app.py' CHANGES ###:
|
||||
# # lutzapps - CHANGE #1
|
||||
# LOCAL_DEBUG = os.environ.get('LOCAL_DEBUG', 'False') # support local browsing for development/debugging
|
||||
# ...
|
||||
# filebrowser_status = get_filebrowser_status()
|
||||
# return render_template('index.html',
|
||||
# apps=app_configs,
|
||||
# app_status=app_status,
|
||||
# pod_id=RUNPOD_POD_ID,
|
||||
# RUNPOD_PUBLIC_IP=os.environ.get('RUNPOD_PUBLIC_IP'),
|
||||
# RUNPOD_TCP_PORT_22=os.environ.get('RUNPOD_TCP_PORT_22'),
|
||||
# # lutzapps - CHANGE #2 - allow localhost Url for unsecure "http" and "ws" WebSockets protocol,
|
||||
# according to LOCAL_DEBUG ENV var (used 3x in "index.html" changes)
|
||||
# enable_unsecure_localhost=os.environ.get('LOCAL_DEBUG'),
|
||||
# ...
|
||||
# other (non-related) app.py changes omitted here
|
||||
#
|
||||
### '/app/template/index.html' CHANGES ###:
|
||||
# <script>
|
||||
# ...
|
||||
# // *** lutzapps - Change #2 - support to run locally at http://localhost:${WS_PORT} (3 locations in "index.html")
|
||||
# const enable_unsecure_localhost = '{{ enable_unsecure_localhost }}';
|
||||
#
|
||||
# // default is to use the "production" WeckSockets CloudFlare URL
|
||||
# // NOTE: ` (back-ticks) are used here for template literals
|
||||
# var WS_URL = `wss://${podId}-${WS_PORT}.proxy.runpod.net/ws`; // need to be declared as var
|
||||
#
|
||||
# if `${enable_unsecure_localhost}` === 'True') { // value of LOCAL_DEBUG ENV var
|
||||
# // make sure to use "ws" Protocol (insecure) instead of "wss" (WebSockets Secure) for localhost,
|
||||
# // otherwise you will get the 'loadingOverlay' stay and stays on screen with ERROR:
|
||||
# // "WebSocket disconnected. Attempting to reconnect..." blocking the webpage http://localhost:7222
|
||||
# WS_URL = `ws://localhost:${WS_PORT}/ws`; // localhost WS (unsecured)
|
||||
# //alert(`Running locally with WS_URL=${WS_URL}`);
|
||||
# }
|
||||
# ...
|
||||
# function openApp(appKey, port) {
|
||||
# // *** lutzapps - Change #3 - support to run locally
|
||||
# // NOTE: ` (back-ticks) are used here for template literals
|
||||
# var url = `https://${podId}-${port}.proxy.runpod.net/`; // need to be declared as var
|
||||
# if `${enable_unsecure_localhost}` === 'True') {
|
||||
# url = `http://localhost:${port}/`; // remove runpod.net proxy
|
||||
# //alert(`openApp URL=${url}`);
|
||||
# }
|
||||
#
|
||||
# window.open(url, '_blank');
|
||||
# }
|
||||
# ...
|
||||
# function openFileBrowser() {
|
||||
# const podId = '{{ pod_id }}';
|
||||
#
|
||||
# // *** lutzapps - Change #5 - support to run locally
|
||||
# // NOTE: ` (back-ticks) are used here for template literals
|
||||
# var url = `https://${podId}-7222.proxy.runpod.net/fileapp/`; // need to be declared as var
|
||||
# if `${enable_unsecure_localhost}` === 'True') {
|
||||
# url = `http://localhost:7222/fileapp/`; // remove runpod.net proxy
|
||||
# //alert(`FileBrowser URL=${url}`);
|
||||
# }
|
||||
#
|
||||
# window.open(url, '_blank');
|
||||
# }
|
||||
# other (non-related) index.html changes omitted here
|
||||
|
||||
|
||||
README_FILE_PREFIX = "_readme-" # prefix for all different dynamically generated README file names
|
||||
|
||||
### support local docker container runs with locally BOUND Workspace, needed also during local debugging
|
||||
|
@ -313,7 +199,6 @@ APP_INSTALL_DIRS = {
|
|||
# }
|
||||
|
||||
# MAP between Madiator's "app_configs" dict and the "APP_INSTALL_DIRS" dict used in this module
|
||||
# TODO: this is temporary and should be merged/integrated better later
|
||||
MAP_APPS = {
|
||||
"bcomfy": "ComfyUI",
|
||||
"bforge": "Forge",
|
||||
|
@ -322,7 +207,6 @@ MAP_APPS = {
|
|||
}
|
||||
|
||||
# helper function called by main(), uses above "MAP_APPS" dict
|
||||
# TODO: this is temporary and should be merged/integrated better later
|
||||
def sync_with_app_configs_install_dirs():
|
||||
print(f"Syncing 'app_configs' dict 'app_path' into the 'APP_INSTALL_DIRS' dict ...")
|
||||
|
||||
|
@ -528,7 +412,7 @@ def remove_broken_model_symlinks(shared_model_folderpath:str, app_model_folderpa
|
|||
if os.path.islink(app_model_filepath) and not os.path.exists(app_model_filepath):
|
||||
# Remove existing stale/broken symlink
|
||||
broken_modellinks_count = broken_modellinks_count + 1
|
||||
dateInfo = "{:%B %d, %Y, %H:%M:%S GMT}".format(datetime.datetime.now())
|
||||
dateInfo = "{:%b %d, %Y, %H:%M:%S GMT}".format(datetime.datetime.now())
|
||||
broken_modellinks_info += f"\t{app_model_filename}\t[@ {dateInfo}]\n"
|
||||
|
||||
os.unlink(app_model_filepath) # try to unlink the file/folder symlink
|
||||
|
@ -575,8 +459,8 @@ def pull_unlinked_models_back_as_shared_models(shared_model_folderpath:str, app_
|
|||
continue # skip hidden filenames like ".DS_Store" (on macOS), ".keep" (on GitHub) and all "{README_FILE_PREFIX}*.txt" files
|
||||
|
||||
app_model_filepath = os.path.join(app_model_folderpath, app_model_filename)
|
||||
if os.path.islink(app_model_filepath) or os.path.isdir(app_model_filepath):
|
||||
continue # skip all already symlinked model files and sub-folders
|
||||
if os.path.islink(app_model_filepath) or os.path.isdir(app_model_filepath) or os.path.getsize(app_model_filepath) == 0:
|
||||
continue # skip all already symlinked model files and sub-folders and ZERO size "put yout model here" files
|
||||
|
||||
# real file, potentially a model file which can be pulled back "home"
|
||||
pulled_model_files_count = pulled_model_files_count + 1
|
||||
|
@ -587,7 +471,7 @@ def pull_unlinked_models_back_as_shared_models(shared_model_folderpath:str, app_
|
|||
|
||||
print(f"\tpulled-back local model '{app_model_filepath}'")
|
||||
|
||||
dateInfo = "{:%B %d, %Y, %H:%M:%S GMT}".format(datetime.datetime.now())
|
||||
dateInfo = "{:%b %d, %Y, %H:%M:%S GMT}".format(datetime.datetime.now())
|
||||
pulled_model_files_info += f"\t{app_model_filename}\t[@ {dateInfo}]\n"
|
||||
|
||||
### and re-link it back to this folder where it got just pulled back
|
||||
|
@ -626,9 +510,18 @@ def create_model_symlinks(shared_model_folderpath:str, app_model_folderpath:str,
|
|||
file_symlinks_created_count = 0
|
||||
|
||||
for shared_model_filename in os.listdir(shared_model_folderpath):
|
||||
# delete hidden huggingface ".cache" directories in each model directory, as they can exist
|
||||
# from possible prior huggingface model downloads.
|
||||
# this is a fragment from the hugginface_hub, and can be safely deleted
|
||||
shared_model_filepath = os.path.join(shared_model_folderpath, shared_model_filename)
|
||||
if shared_model_filename.startswith(".cache") and not os.path.isfile(shared_model_filepath): # hidden ".cache" folder
|
||||
shutil.rmtree(shared_model_filepath) # remove the hidden ".cache" huggingface folder
|
||||
print(f"Deleted hidden huggingface .cache folder '{shared_model_filepath}")
|
||||
continue
|
||||
|
||||
if shared_model_filename.startswith("."):
|
||||
continue # skip hidden filenames like ".DS_Store" (on macOS), ".keep" (on GitHub)
|
||||
|
||||
|
||||
# change the "readme-*.txt" files for the symlinked app folder models
|
||||
if shared_model_filename.startswith(README_FILE_PREFIX):
|
||||
# create a new readme file for the app_model_folderpath target folder
|
||||
|
@ -640,11 +533,16 @@ def create_model_symlinks(shared_model_folderpath:str, app_model_folderpath:str,
|
|||
|
||||
continue # skip the original "{README_FILE_PREFIX}*.txt" file
|
||||
|
||||
print(f"\tprocessing shared '{model_type}' model '{shared_model_filename}' ...")
|
||||
# get the full path from shared model filename
|
||||
src_filepath = os.path.join(shared_model_folderpath, shared_model_filename)
|
||||
dst_filepath = os.path.join(app_model_folderpath, shared_model_filename) # the dst_filepath always has the SAME filename as the src_filepath
|
||||
|
||||
|
||||
# skip the small dummy files and not treat them as models
|
||||
if os.path.getsize(src_filepath) < 100:
|
||||
continue # ZERO (or up to 99 Bytes) sized "put yout model here" files
|
||||
|
||||
print(f"\tprocessing shared '{model_type}' model '{shared_model_filename}' ...")
|
||||
|
||||
if not os.path.isfile(src_filepath): # srcFile is a sub-folder (e.g. "xlabs", or "flux")
|
||||
# skip sub-folders, as these require a separate mapping rule to support "flattening" such models
|
||||
# for apps which don't find their model_type models in sub-folders
|
||||
|
@ -698,7 +596,7 @@ def create_model_symlinks(shared_model_folderpath:str, app_model_folderpath:str,
|
|||
#
|
||||
# SHARED_MODEL_APP_MAP_FILE (str): "_shared_models_map.json" (based in SHARED_MODELS_DIR)
|
||||
# SHARED_MODEL_APP_MAP (dict) <- init from code, then write/read from path SHARED_MODEL_FOLDERS_FILE
|
||||
def update_model_symlinks() -> dict:
|
||||
def update_model_symlinks():# -> dict:
|
||||
try:
|
||||
print(f"Processing the master SHARED_MODELS_DIR: {SHARED_MODELS_DIR}")
|
||||
if not os.path.exists(SHARED_MODELS_DIR):
|
||||
|
|
|
@ -30,6 +30,10 @@ services:
|
|||
# if you NOT want need this behaviour, then set `LOCAL_DEBUG=False` [default],
|
||||
# which is the same as NOT setting this ENV var at all.
|
||||
|
||||
- PYTHONDONTWRITEBYTECODE=1
|
||||
# keep Python from generating .pyc files in the container
|
||||
# this should however be removed for production as it disables bytecode caching
|
||||
|
||||
- FLASK_APP=app/app.py
|
||||
|
||||
- FLASK_ENV=development # changed from "production" [default],
|
||||
|
@ -38,6 +42,9 @@ services:
|
|||
- GEVENT_SUPPORT=True # gevent monkey-patching is being used, enable gevent support in the debugger,
|
||||
# only needed when "LOCAL_DEBUG=True", otherwise this ENV var can be obmitted
|
||||
|
||||
- GIT_PYTHON_TRACE=full # enables full logging for the GitPython code, used for cloning the apps,
|
||||
# bcomfy custom_nodes, and refreshing the apps via git fetch/merge = git pull
|
||||
|
||||
#- "FLASK_DEBUG": "0" # "1" allows debugging in Chrome, but then VSCode debugger not works, "0" is the [default], which is the same as NOT setting this ENV var at all
|
||||
volumes:
|
||||
- ./app:/app:rw
|
||||
|
|
|
@ -9,7 +9,8 @@
|
|||
# "dockerRun": {
|
||||
# "envFiles": ["${workspaceFolder}/.env"], // pass additional env-vars from ".env" file to container
|
||||
|
||||
### APP specific Vars ###
|
||||
|
||||
## APP specific Vars
|
||||
DISABLE_PULLBACK_MODELS=False
|
||||
# the default is, that app model files, which are found locally (in only one app),
|
||||
# get automatically "pulled-back" into the '/workspace/shared_models' folder.
|
||||
|
@ -20,7 +21,10 @@ DISABLE_PULLBACK_MODELS=False
|
|||
# if you NOT want this behaviour, then set DISABLE_PULLBACK_MODELS=True,
|
||||
# otherwise set DISABLE_PULLBACK_MODELS=False [default] which is the same as NOT setting this ENV var at all.
|
||||
|
||||
### USER specific Vars and Secrets (Tokens) - TODO: adjust this for your personal settings ###
|
||||
|
||||
## USER specific Vars and Secrets (Tokens)
|
||||
# TODO: adjust this for your personal settings
|
||||
#
|
||||
PUBLIC_KEY=ssh-ed25519 XXX...XXX usermail@domain.com
|
||||
# Note: make sure to use the **full line content** from your `"*.pub"` key file!
|
||||
|
||||
|
@ -35,6 +39,27 @@ CIVITAI_API_TOKEN=XXX.XXX
|
|||
# Note: CivitAI currently only provides a `FULL` user token, acting as `you`,
|
||||
# so be careful with how to setup this token and with whom you share it!
|
||||
|
||||
### RUNPOD specific Vars ###
|
||||
|
||||
## APP specific USER Vars
|
||||
# All apps can be provisioned in at least 2 Virtual Environment versions:
|
||||
# - 'official' - This setup is "to the point' as defined and recommended by the app owners on GitHub.
|
||||
# - 'latest' - This setup extends the 'official' Setup with the latest PyTorch and Cuda libraries, or
|
||||
# - in the case of ComfyUI - provides also an additional set of pre-installed Custom-Nodes.
|
||||
#
|
||||
# The user can choose from all available versions during Setup, or pre-select the VENV_VERSION,
|
||||
# which should be installed via following ENV vars in the format `VENV_VERSION_<app_id>`.
|
||||
#
|
||||
# If these ENV vars are not set/passed into the container,
|
||||
# the App-Manager will provide an UI for selecting them during Setup:
|
||||
|
||||
#VENV_VERSION_BCOMFY=latest
|
||||
#VENV_VERSION_BFORGE=latest
|
||||
#VENV_VERSION_BA1111=latest
|
||||
#VENV_VERSION_BKOHYA=latest
|
||||
|
||||
# NOTE: Kohya currently is only available as the 'latest' (FLUX-)Version, and has **NO** 'official' version!
|
||||
|
||||
|
||||
## RUNPOD specific Vars
|
||||
RUNPOD_PUBLIC_IP=127.0.0.1
|
||||
RUNPOD_TCP_PORT_22=22
|
||||
|
|
|
@ -56,6 +56,19 @@ http {
|
|||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
}
|
||||
|
||||
# Tensorboard configuration (needed by kohya_ss)
|
||||
location /tensorboard/ {
|
||||
proxy_pass http://localhost:6006/;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_redirect off;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection "upgrade";
|
||||
}
|
||||
|
||||
# File Browser configuration
|
||||
location /fileapp/ {
|
||||
proxy_pass http://localhost:8181/;
|
||||
|
|
|
@ -64,7 +64,7 @@
|
|||
<p>For more detailed information and guides, please visit the <a href="https://docs.runpod.io/">RunPod Documentation</a>.</p>
|
||||
|
||||
<footer>
|
||||
<p>© 2024 RunPod Better App Manager. Created by Madiator2011.</p>
|
||||
<p>© 2024 RunPod Better App Manager. Created by Madiator2011 & lutzapps.</p>
|
||||
</footer>
|
||||
</body>
|
||||
</html>
|
112
official-templates/better-ai-launcher/tasks.json
Normal file
112
official-templates/better-ai-launcher/tasks.json
Normal file
|
@ -0,0 +1,112 @@
|
|||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"type": "docker-build",
|
||||
"label": "docker-build",
|
||||
"platform": "python",
|
||||
"dockerBuild": {
|
||||
"tag": "madiator2011/better-launcher:dev",
|
||||
"dockerfile": "${workspaceFolder}/Dockerfile",
|
||||
"context": "${workspaceFolder}",
|
||||
"pull": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "docker-run",
|
||||
"label": "docker-run: debug",
|
||||
"dependsOn": [
|
||||
"docker-build"
|
||||
],
|
||||
"dockerRun": {
|
||||
"containerName": "madiator2011-better-launcher", // no "/" allowed here for container name
|
||||
"image": "madiator2011/better-launcher:dev",
|
||||
"envFiles": ["${workspaceFolder}/.env"], // pass additional env-vars (hf_token, civitai token, ssh public-key) from ".env" file to container
|
||||
"env": { // this ENV vars go into the docker container to support local debugging
|
||||
"LOCAL_DEBUG": "True", // change app to localhost Urls and local Websockets (unsecured)
|
||||
// if you NOT want need this behaviour, then set `LOCAL_DEBUG=False` [default],
|
||||
// which is the same as NOT setting this ENV var at all.
|
||||
|
||||
"PYTHONDONTWRITEBYTECODE": 1,
|
||||
// keep Python from generating .pyc files in the container
|
||||
// this should however be removed for production as it disables bytecode caching
|
||||
|
||||
"FLASK_APP": "app/app.py",
|
||||
|
||||
"FLASK_ENV": "development", // changed from "production" [default],
|
||||
// only needed when "LOCAL_DEBUG=True", otherwise this ENV var can be obmitted
|
||||
|
||||
"GEVENT_SUPPORT": "True", // gevent monkey-patching is being used, enable gevent support in the debugger,
|
||||
// only needed when "LOCAL_DEBUG=True", otherwise this ENV var can be obmitted
|
||||
|
||||
"GIT_PYTHON_TRACE": "full" // enables full logging for the GitPython code, used for cloning the apps,
|
||||
// bcomfy custom_nodes, and refreshing the apps via git fetch/merge = git pull
|
||||
|
||||
// "FLASK_DEBUG": "0" // "1" allows debugging in Chrome, but then VSCode debugger not works, "0" is the [default], which is the same as NOT setting this ENV var at all
|
||||
},
|
||||
"volumes": [
|
||||
{
|
||||
"containerPath": "/app",
|
||||
"localPath": "${workspaceFolder}" // the "/app" folder (and sub-folders) will be mapped locally for debugging and hot-reload
|
||||
},
|
||||
{
|
||||
"containerPath": "/workspace",
|
||||
// TODO: create the below folder before you run!
|
||||
"localPath": "${userHome}/Projects/Docker/Madiator/workspace"
|
||||
}
|
||||
],
|
||||
"ports": [
|
||||
// NOTE: during debugging, "start.sh" does *not* run, and following apps are not available right now:
|
||||
// {
|
||||
// "containerPort": 22, // SSH
|
||||
// "hostPort": 22
|
||||
// },
|
||||
{
|
||||
"containerPort": 7222, // main Flask app port "App-Manager"
|
||||
"hostPort": 7222
|
||||
},
|
||||
{
|
||||
"containerPort": 8181, // File-Browser
|
||||
"hostPort": 8181
|
||||
},
|
||||
{
|
||||
"containerPort": 7777, // VSCode-Server
|
||||
"hostPort": 7777
|
||||
},
|
||||
{
|
||||
"containerPort": 3000, // ComfyUI
|
||||
"hostPort": 3000
|
||||
},
|
||||
{
|
||||
"containerPort": 6006, // Tensorboard (needed by kohya_ss)
|
||||
"hostPort": 6006
|
||||
},
|
||||
{
|
||||
"containerPort": 7862, // Forge (aka Stable-Diffiusion-WebUI-Forge)
|
||||
"hostPort": 7862
|
||||
},
|
||||
{
|
||||
"containerPort": 7863, // A1111 (aka Stable-Diffiusion-WebUI)
|
||||
"hostPort": 7863
|
||||
},
|
||||
{
|
||||
"containerPort": 7864, // Kohya-ss (lutzapps - added new Kohya app with FLUX support)
|
||||
"hostPort": 7864
|
||||
}
|
||||
]
|
||||
},
|
||||
"python": {
|
||||
"args": [
|
||||
"run",
|
||||
// "--no-debugger", // disabled to support VSCode debugger
|
||||
// "--no-reload", // disabled to support hot-reload
|
||||
"--host",
|
||||
"0.0.0.0",
|
||||
"--port",
|
||||
"7222"
|
||||
],
|
||||
"module": "flask"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
Loading…
Reference in a new issue