diff --git a/official-templates/better-ai-launcher/.vscode/tasks.json b/official-templates/better-ai-launcher/.vscode/tasks.json
index f07dd81..f9ccac1 100644
--- a/official-templates/better-ai-launcher/.vscode/tasks.json
+++ b/official-templates/better-ai-launcher/.vscode/tasks.json
@@ -24,10 +24,18 @@
"envFiles": ["${workspaceFolder}/.env"], // pass additional env-vars (hf_token, civitai token, ssh public-key) from ".env" file to container
"env": { // this ENV vars go into the docker container to support local debugging
"LOCAL_DEBUG": "True", // change app to localhost Urls and local Websockets (unsecured)
+ // if you NOT want need this behaviour, then set `LOCAL_DEBUG=False` [default],
+ // which is the same as NOT setting this ENV var at all.
+
"FLASK_APP": "app/app.py",
- "FLASK_ENV": "development", // changed from "production"
- "GEVENT_SUPPORT": "True" // gevent monkey-patching is being used, enable gevent support in the debugger
- // "FLASK_DEBUG": "0" // "1" allows debugging in Chrome, but then VSCode debugger not works
+
+ "FLASK_ENV": "development", // changed from "production" [default],
+ // only needed when "LOCAL_DEBUG=True", otherwise this ENV var can be obmitted
+
+ "GEVENT_SUPPORT": "True" // gevent monkey-patching is being used, enable gevent support in the debugger,
+ // only needed when "LOCAL_DEBUG=True", otherwise this ENV var can be obmitted
+
+ // "FLASK_DEBUG": "0" // "1" allows debugging in Chrome, but then VSCode debugger not works, "0" is the [default], which is the same as NOT setting this ENV var at all
},
"volumes": [
{
diff --git a/official-templates/better-ai-launcher/Dockerfile b/official-templates/better-ai-launcher/Dockerfile
index 6b5f09d..dbe0bd5 100644
--- a/official-templates/better-ai-launcher/Dockerfile
+++ b/official-templates/better-ai-launcher/Dockerfile
@@ -1,7 +1,7 @@
-# Use the specified base image
-
- # lutzapps - use uppercase "AS"
- FROM madiator2011/better-base:cuda12.4 AS base
+# lutzapps - use the specified CUDA version
+ARG BASE_IMAGE
+FROM ${BASE_IMAGE:-madiator2011/better-base:cuda12.4} AS base
+#FROM madiator2011/better-base:cuda12.4 AS base
# lutzapps - prepare for local developement and debugging
# needed to change the ORDER of "apt-get commands" and move the "update-alternatives" for python3
@@ -10,14 +10,16 @@
# Install Python 3.11, set it as default, and remove Python 3.10
RUN apt-get update && \
- apt-get install -y python3.11 python3.11-venv python3.11-dev python3.11-distutils aria2 git \
- pv git rsync zstd libtcmalloc-minimal4 bc nginx ffmpeg && \
- apt-get remove -y python3.10 python3.10-minimal libpython3.10-minimal libpython3.10-stdlib && \
- update-alternatives --install /usr/bin/python python /usr/bin/python3.11 1 && \
- update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.11 1 && \
- apt-get autoremove -y && \
- apt-get clean && \
- rm -rf /var/lib/apt/lists/*
+# removed: 2x git nginx ffmpeg (as they are already installed with the base image)
+# added: zip (for easier folder compression)
+ apt-get install -y python3.11 python3.11-venv python3.11-dev python3.11-distutils aria2 zip \
+ pv rsync zstd libtcmalloc-minimal4 bc && \
+ apt-get remove -y python3.10 python3.10-minimal libpython3.10-minimal libpython3.10-stdlib && \
+ update-alternatives --install /usr/bin/python python /usr/bin/python3.11 1 && \
+ update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.11 1 && \
+ apt-get autoremove -y && \
+ apt-get clean && \
+ rm -rf /var/lib/apt/lists/*
# Install pip for Python 3.11
RUN curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py && \
@@ -63,9 +65,6 @@ EXPOSE 7222
# lutzapps - added a "app/tests" folder with script and testdata and readme file
# lutzapps - grouped NGINX files in a sub-folder for cleaner view
-# Copy the README.md
-COPY nginx/README.md /usr/share/nginx/html/README.md
-
# NGINX configuration
COPY nginx/nginx.conf /etc/nginx/nginx.conf
COPY nginx/readme.html /usr/share/nginx/html/readme.html
diff --git a/official-templates/better-ai-launcher/README.md b/official-templates/better-ai-launcher/README.md
index 4a3d6ab..d2aa590 100644
--- a/official-templates/better-ai-launcher/README.md
+++ b/official-templates/better-ai-launcher/README.md
@@ -1,99 +1,144 @@
-# madiator-docker-runpod
-RunPod Docker Containers for RunPod
+# better-ai-launcher
+Better AI Launcher Image for RunPod and local development.
-**Better AI Launcher Container for RunPod and local develoment**
+## RunPod Better App Manager
+
+Welcome to the RunPod Better App Manager!
+This image allows you to easily manage and run various AI applications on your RunPod instance.
+
+### Features
+- Easy installation of pre-configured AI applications.
+- Start, stop, and monitor running applications.
+- View application logs in real-time.
+- Force kill applications if needed.
+- Download Manager for **HuggingFace** and **CivitAI** with `token` support for privat and gated models.
+- Shared Models Management for **Downloading and Sharing all models of all types to all installed AI applications**!
+
+### Supported Applications
+- Better Comfy UI
+- Better Forge
+- Better A1111
+- more Apps coming soon (AI Trainers as `Kohya` and `ai-toolkit` are planned)
+
+### Getting Started
+- Access the Better App Manager interface through your RunPod instance URL.
+- Install the desired application by clicking the **Install** button.
+- Once installed, use the **Start** button to launch the application.
+- Access the running application using the **Open App** button.
+
+### Troubleshooting
+If you encounter any issues:
+- Check the application logs for error messages.
+- Try stopping and restarting the application.
+- Use the `Force Kill` option if an application becomes unresponsive.
+- Refer to the RunPod documentation or contact support for further assistance.
+
+For more detailed information and guides, please visit the RunPod Documentation.
+
+
+
+Part of the `madiator-docker-runpod` familiy of **RunPod Docker Containers for RunPod**
+
+## Github
+https://github.com/kodxana/madiator-docker-runpod
+found under the directory `official-templates/better-ai-launcher`
### Build Vars ###
-IMAGE_BASE=madiator2011/better-launcher
-
+IMAGE_BASE=madiator2011/better-launcher
IMAGE_TAG=dev
-### Github: ###
-https://github.com/kodxana/madiator-docker-runpod
+## Build Options
+To build with default options, run `docker buildx bake`, to build a specific target, run `docker buildx bake `.
-### ENV Vars ###
+## Ports (System)
-These ENV vars go into the docker container to support local debugging:
-see also explanantion in ".vscode/tasks.json" or "docker-compose.debug.yml"
+- 22/tcp (SSH)
+- 7222/http (App-Manager)
+- 7777/http (VSCode-Server)
+- 8181/http (File-Browser)
-LOCAL_DEBUG=True
+## Ports (Apps)
- change app to localhost Urls and local Websockets (unsecured)
+- 3000/http (ComfyUI)
+- 7862/http (Forge) aka Stable-Diffiusion-WebUI-Forge
+- 7863/http (A1111) aka Stable-Diffiusion-WebUI
-FLASK_ENV=development
+*coming soon*
+- 7864/http (Kohya-ss)
+- 6006/http (Tensorboard)
- changed from "production" (default)
+## ENV Vars (System)
-GEVENT_SUPPORT=True
+These ENV vars go into the docker container to support local debugging:
+see also explanantion in `".vscode/tasks.json"` or `"docker-compose.debug.yml"`
- gevent monkey-patching is being used, enable gevent support in the debugger
-FLASK_DEBUG=0
+- LOCAL_DEBUG=True
- "1" allows debugging in Chrome, but then VSCode debugger not works
+ change app to localhost Urls and local Websockets (unsecured) for local debugging.
+ **TODO**: need to also setup a `bind workspace` in `".vscode/tasks.json"` or `"docker-compose.debug.yml"`
+ if you **NOT** want need this behaviour, then set `LOCAL_DEBUG=False` [default],
+ which is the same as NOT setting this ENV var at all.
-*User ENV Vars for Production:*
+- FLASK_ENV=development
-### APP specific Vars ###
-DISABLE_PULLBACK_MODELS=False
+ changed from "`production`" [default].
+ only needed when `LOCAL_DEBUG=True`, otherwise this ENV var can be obmitted.
-the default is, that app model files, which are found locally (in only one app),
-get automatically "pulled-back" into the '/workspace/shared_models' folder.
-From there they will be re-linked back not only to their own "pulled-back" model-type folder,
-but also will be linked back into all other corresponding app model-type folders.
-So the "pulled-back" model is automatically shared to all installed apps.
-If you NOT want this behaviour, then set DISABLE_PULLBACK_MODELS=True
+- GEVENT_SUPPORT=True
-### USER specific Vars and Secrets (Tokens) - TODO: adjust this for your personal settings ###
-PUBLIC_KEY=ssh-ed25519 xxx...xxx usermail@domain.com
+ gevent monkey-patching is being used, enable gevent support in the debugger.
+ only needed when `LOCAL_DEBUG=True`, otherwise this ENV var can be obmitted.
-HF_TOKEN=hf_xxx...xxx
+- FLASK_DEBUG=0
-CIVITAI_API_TOKEN=xxx.xxx
+ "1" allows debugging in Chrome, but then the VSCode debugger will not works.
+ "0" is the [default], which is the same as NOT setting this ENV var at all.
+
+### APP specific Vars
+- DISABLE_PULLBACK_MODELS=False
+
+ The default is, that app model files, which are found locally (in only one app), get automatically `pulled-back` into the `"/workspace/shared_models"` folder.
+ From there they will be re-linked back not only to their own `pulled-back` model-type folder, but also will be linked back into all other corresponding app model-type folders.
+ So the `pulled-back` model is automatically shared to all installed apps.
+
+ If you **NOT** want this behaviour, then set `DISABLE_PULLBACK_MODELS=True`,
+ otherwise set `DISABLE_PULLBACK_MODELS=False` [default], which is the same as NOT setting this ENV var at all.
+
+## ENV Vars (User and Secret Tokens)
+
+**TODO: rename the file `"env.txt"` to `".env"` and adjust the ENV vars for your personal settings**
+- PUBLIC_KEY=ssh-ed25519 xxx...xxx usermail@domain.com
+
+ your `PUBLIC ssh-key`
+ **Note**: make sure to use the **full line content** from your `"*.pub"` key file!
+
+- HF_TOKEN=hf_xxx...xxx
+
+ Your `HuggingFace` token.
+ Can be a `READ` scoped token for downloading your `private` models, or `gated models` as e.g. `Flux.1 Dev` or METAs `Llama LLM models`.
+ The HF_TOKEN need to be a `READ/WRITE` scoped token, if you plan also to **UPLOAD** models to `HuggingFace` later, when we have Trainer Apps like `Kohya` or `ai-toolkit`.
+
+- CIVITAI_API_TOKEN=xxx...xxx
+
+ Your `CivitAI` API token.
+ **Note**: CivitAI currently only provides a `FULL` user token, acting as `you`, so be careful with how to setup this token and with whom you share it!
**SECURITY TIP:**
-These 3 security sensitive environment vars should be stored as RUNPOD **SECRETS** and referenced directly in your POD Template in the format {{ RUNPOD_SECRET_MYENVVAR }}
+These three, user-specific and **security sensitive environment vars**, should be stored as RUNPOD **`SECRETS`** and be referenced directly in your POD Template in the format `{{ RUNPOD_SECRET_MYENVVAR }}`.
-From https://docs.runpod.io/pods/templates/secrets
+From https://docs.runpod.io/pods/templates/secrets
-You can reference your Secret directly in the Environment Variables section of your Pod template. To reference your Secret, reference it's key appended to the "RUNPOD_SECRET_" prefix.
+You can reference your Secret directly in the Environment Variables section of your Pod template. To reference your Secret, reference it's key appended to the `RUNPOD_SECRET_` prefix.
-That mean, for this template/image, you should use these formats:
+That mean, for this template/image, you should use these formats to pass the above ENV vars into the docker container:
-{{ RUNPOD_SECRET_PUBLIC_KEY}}
+- `{{ RUNPOD_SECRET_PUBLIC_KEY}}`
-{{ RUNPOD_SECRET_HF_TOKEN }}
+- `{{ RUNPOD_SECRET_HF_TOKEN }}`
-{{ RUNPOD_SECRET_CIVITAI_API_TOKEN }}
+- `{{ RUNPOD_SECRET_CIVITAI_API_TOKEN }}`
-
-### Ports: ###
- SSH-Port
-22:22/tcp
-
- App-Manager
-7222:7222/http
-
- VSCode-Server
-7777:7777/http
-
- File-Browser
-8181:8181/http
-
-
-### Apps: ###
- ComfyUI
-3000:3000/http
-
- Forge (Stable-Diffiusion-WebUI-Forge)
-7862:7862/http
-
- A1111 (Stable-Diffiusion-WebUI)
-7863:7863/http
-
-*coming soon*
-
- Kohya-ss
-7864:7864/http
\ No newline at end of file
+(c) 2024 RunPod Better App Manager. Created by Madiator2011.
\ No newline at end of file
diff --git a/official-templates/better-ai-launcher/app/tests/populate_testdata.sh b/official-templates/better-ai-launcher/app/tests/populate_testdata.sh
index f93ef0d..26442b6 100755
--- a/official-templates/better-ai-launcher/app/tests/populate_testdata.sh
+++ b/official-templates/better-ai-launcher/app/tests/populate_testdata.sh
@@ -1,6 +1,6 @@
#!/bin/bash
-# please use the "./readme-testdata.txt" before you extract these TARs!!!
+# please use the "./README-SHARED_MODELS.txt" before you extract these TARs!!!
# Testcase #1
tar -xzf /app/tests/testdata_shared_models_link.tar.gz /workspace
diff --git a/official-templates/better-ai-launcher/docker-bake.hcl b/official-templates/better-ai-launcher/docker-bake.hcl
index 5dfbfd9..2fc927e 100644
--- a/official-templates/better-ai-launcher/docker-bake.hcl
+++ b/official-templates/better-ai-launcher/docker-bake.hcl
@@ -1,16 +1,28 @@
group "default" {
- targets = ["better-ai-launcher"]
+ targets = [
+ "better-ai-launcher-cuda124",
+ "better-ai-launcher-cuda121"
+ ]
}
-target "better-ai-launcher" {
+target "better-ai-launcher-cuda124" {
+ dockerfile = "Dockerfile"
+ args = {
+ BASE_IMAGE = "madiator2011/better-base:cuda12.4",
+ }
+ contexts = {
+ scripts = "../../container-template"
+ }
+ tags = ["madiator2011/better-launcher:dev"]
+}
+
+target "better-ai-launcher-cuda121" {
dockerfile = "Dockerfile"
args = {
BASE_IMAGE = "madiator2011/better-base:cuda12.1",
}
contexts = {
scripts = "../../container-template"
- proxy = "../../container-template/proxy"
- logo = "../../container-template"
}
- tags = ["madiator2011/better-launcher:dev"]
+ tags = ["madiator2011/better-launcher:dev-cuda121"]
}
\ No newline at end of file
diff --git a/official-templates/better-ai-launcher/docker-compose.debug.yml b/official-templates/better-ai-launcher/docker-compose.debug.yml
index c4bc6de..2ba58ba 100644
--- a/official-templates/better-ai-launcher/docker-compose.debug.yml
+++ b/official-templates/better-ai-launcher/docker-compose.debug.yml
@@ -14,10 +14,18 @@ services:
- .env # pass additional env-vars (hf_token, civitai token, ssh public-key) from ".env" file to container
environment:
- LOCAL_DEBUG=True # change app to localhost Urls and local Websockets (unsecured)
+ # if you NOT want need this behaviour, then set `LOCAL_DEBUG=False` [default],
+ # which is the same as NOT setting this ENV var at all.
+
- FLASK_APP=app/app.py
- - FLASK_ENV=development # changed from "production"
- - GEVENT_SUPPORT=True # gevent monkey-patching is being used, enable gevent support in the debugger
- #- "FLASK_DEBUG": "0" # "1" allows debugging in Chrome, but then VSCode debugger not works
+
+ - FLASK_ENV=development # changed from "production" [default],
+ # only needed when "LOCAL_DEBUG=True", otherwise this ENV var can be obmitted
+
+ - GEVENT_SUPPORT=True # gevent monkey-patching is being used, enable gevent support in the debugger,
+ # only needed when "LOCAL_DEBUG=True", otherwise this ENV var can be obmitted
+
+ #- "FLASK_DEBUG": "0" # "1" allows debugging in Chrome, but then VSCode debugger not works, "0" is the [default], which is the same as NOT setting this ENV var at all
volumes:
- ./app:/app:rw
- ${HOME}/Projects/Docker/madiator:/workspace:rw # TODO: create the below folder before you run!
\ No newline at end of file
diff --git a/official-templates/better-ai-launcher/env.txt b/official-templates/better-ai-launcher/env.txt
index 3231f58..f4537fd 100644
--- a/official-templates/better-ai-launcher/env.txt
+++ b/official-templates/better-ai-launcher/env.txt
@@ -22,12 +22,23 @@ DISABLE_PULLBACK_MODELS=False
# but also will be linked back into all other corresponding app model-type folders.
# So the "pulled-back" model is automatically shared to all installed apps.
#
-# if you NOT want this behaviour, then set DISABLE_PULLBACK_MODELS=True
+# if you NOT want this behaviour, then set DISABLE_PULLBACK_MODELS=True,
+# otherwise set DISABLE_PULLBACK_MODELS=False [default] which is the same as NOT setting this ENV var at all.
### USER specific Vars and Secrets (Tokens) - TODO: adjust this for your personal settings ###
PUBLIC_KEY=ssh-ed25519 XXX...XXX usermail@domain.com
+# Note: make sure to use the **full line content** from your `"*.pub"` key file!
+
HF_TOKEN=hf_XXX...XXX
+# Your `HuggingFace` token.
+# Can be a `READ` scoped token for downloading your "private" models, or "gated models" as e.g. `Flux.1 Dev` or METAs `Llama LLM models`.
+# The HF_TOKEN need to be a `READ/WRITE` scoped token, if you plan also to UPLOAD models to "HuggingFace" later,
+# when we have Trainer Apps like "Kohya" or "ai-toolkit".
+
CIVITAI_API_TOKEN=XXX.XXX
+# Your `CivitAI` API token.
+# Note: CivitAI currently only provides a `FULL` user token, acting as `you`,
+# so be careful with how to setup this token and with whom you share it!
### RUNPOD specific Vars ###
RUNPOD_PUBLIC_IP=127.0.0.1
diff --git a/official-templates/better-ai-launcher/nginx/README.md b/official-templates/better-ai-launcher/nginx/README.md
deleted file mode 100644
index 3fb5ea4..0000000
--- a/official-templates/better-ai-launcher/nginx/README.md
+++ /dev/null
@@ -1,7 +0,0 @@
-## Build Options
-
-To build with default options, run `docker buildx bake`, to build a specific target, run `docker buildx bake `.
-
-## Ports
-
-- 22/tcp (SSH)