mirror of
https://github.com/kodxana/madiator-docker-runpod.git
synced 2024-11-22 02:40:12 +01:00
Upgrade Model Downloader (GUI State, SHA256 Checks, HF and CIVITAI token ENV vars)
This commit is contained in:
parent
0b4b82dec5
commit
6f02f95993
4 changed files with 588 additions and 138 deletions
|
@ -73,6 +73,8 @@ S3_BASE_URL = "https://better.s3.madiator.com/"
|
|||
SETTINGS_FILE = '/workspace/.app_settings.json'
|
||||
|
||||
CIVITAI_TOKEN_FILE = '/workspace/.civitai_token'
|
||||
HF_TOKEN_FILE = '/workspace/.hf_token' # lutzapps - added support for HF_TOKEN_FILE
|
||||
|
||||
|
||||
def load_settings():
|
||||
if os.path.exists(SETTINGS_FILE):
|
||||
|
@ -544,11 +546,58 @@ def save_civitai_token(token):
|
|||
with open(CIVITAI_TOKEN_FILE, 'w') as f:
|
||||
json.dump({'token': token}, f)
|
||||
|
||||
# lutzapps - added function - 'HF_TOKEN' ENV var
|
||||
def load_huggingface_token():
|
||||
# look FIRST for Huggingface token passed in as 'HF_TOKEN' ENV var
|
||||
HF_TOKEN = os.environ.get('HF_TOKEN', '')
|
||||
|
||||
if not HF_TOKEN == "":
|
||||
print("'HF_TOKEN' ENV var found")
|
||||
## send the found token to the WebUI "Models Downloader" 'hfToken' Password field to use
|
||||
# send_websocket_message('extend_ui_helper', {
|
||||
# 'cmd': 'hfToken', # 'hfToken' must match the DOM Id of the WebUI Password field in "index.html"
|
||||
# 'message': "Put the HF_TOKEN in the WebUI Password field 'hfToken'"
|
||||
# } )
|
||||
|
||||
return HF_TOKEN
|
||||
|
||||
# only if the 'HF_API_TOKEN' ENV var was not found, then handle it via local hidden HF_TOKEN_FILE
|
||||
try:
|
||||
if os.path.exists(HF_TOKEN_FILE):
|
||||
with open(HF_TOKEN_FILE, 'r') as f:
|
||||
data = json.load(f)
|
||||
|
||||
return data.get('token')
|
||||
except:
|
||||
return None
|
||||
|
||||
return None
|
||||
|
||||
# lutzapps - modified function - support 'CIVITAI_API_TOKEN' ENV var
|
||||
def load_civitai_token():
|
||||
# look FIRST for CivitAI token passed in as 'CIVITAI_API_TOKEN' ENV var
|
||||
CIVITAI_API_TOKEN = os.environ.get('CIVITAI_API_TOKEN', '')
|
||||
|
||||
if not CIVITAI_API_TOKEN == "":
|
||||
print("'CIVITAI_API_TOKEN' ENV var found")
|
||||
## send the found token to the WebUI "Models Downloader" 'hfToken' Password field to use
|
||||
# send_websocket_message('extend_ui_helper', {
|
||||
# 'cmd': 'civitaiToken', # 'civitaiToken' must match the DOM Id of the WebUI Password field in "index.html"
|
||||
# 'message': 'Put the CIVITAI_API_TOKEN in the WebUI Password field "civitaiToken"'
|
||||
# } )
|
||||
|
||||
return CIVITAI_API_TOKEN
|
||||
|
||||
# only if the 'CIVITAI_API_TOKEN' ENV var is not found, then handle it via local hidden CIVITAI_TOKEN_FILE
|
||||
try:
|
||||
if os.path.exists(CIVITAI_TOKEN_FILE):
|
||||
with open(CIVITAI_TOKEN_FILE, 'r') as f:
|
||||
data = json.load(f)
|
||||
|
||||
return data.get('token')
|
||||
except:
|
||||
return None
|
||||
|
||||
return None
|
||||
|
||||
@app.route('/save_civitai_token', methods=['POST'])
|
||||
|
@ -564,6 +613,12 @@ def get_civitai_token_route():
|
|||
token = load_civitai_token()
|
||||
return jsonify({'token': token})
|
||||
|
||||
# lutzapps - add support for passed in "HF_TOKEN" ENV var
|
||||
@app.route('/get_huggingface_token', methods=['GET'])
|
||||
def get_hugginface_token_route():
|
||||
token = load_huggingface_token()
|
||||
return jsonify({'token': token})
|
||||
|
||||
# lutzapps - CHANGE #9 - return model_types to populate the Download manager Select Option
|
||||
# new function to support the "Model Downloader" with the 'SHARED_MODEL_FOLDERS' dictionary
|
||||
@app.route('/get_model_types', methods=['GET'])
|
||||
|
@ -599,12 +654,12 @@ def download_model_route():
|
|||
model_name = request.json.get('model_name')
|
||||
model_type = request.json.get('model_type')
|
||||
civitai_token = request.json.get('civitai_token') or load_civitai_token()
|
||||
hf_token = request.json.get('hf_token')
|
||||
hf_token = request.json.get('hf_token') or load_huggingface_token() # lutzapps - added HF_TOKEN ENV var support
|
||||
version_id = request.json.get('version_id')
|
||||
file_index = request.json.get('file_index')
|
||||
|
||||
is_civitai, _, _, _ = check_civitai_url(url)
|
||||
is_huggingface, _, _, _, _ = check_huggingface_url(url)
|
||||
is_huggingface, _, _, _, _ = check_huggingface_url(url) # TODO: double call
|
||||
|
||||
if not (is_civitai or is_huggingface):
|
||||
return jsonify({'status': 'error', 'message': 'Unsupported URL. Please use Civitai or Hugging Face URLs.'}), 400
|
||||
|
@ -613,7 +668,7 @@ def download_model_route():
|
|||
return jsonify({'status': 'error', 'message': 'Civitai token is required for downloading from Civitai.'}), 400
|
||||
|
||||
try:
|
||||
success, message = download_model(url, model_name, model_type, send_websocket_message, civitai_token, hf_token, version_id, file_index)
|
||||
success, message = download_model(url, model_name, model_type, civitai_token, hf_token, version_id, file_index)
|
||||
if success:
|
||||
if isinstance(message, dict) and 'choice_required' in message:
|
||||
return jsonify({'status': 'choice_required', 'data': message['choice_required']})
|
||||
|
|
|
@ -908,7 +908,8 @@
|
|||
font-size: 12px;
|
||||
}
|
||||
|
||||
#model-download-status,
|
||||
/* lutzapps - allow word-wrap for model-download-status to see downloaded file at status: 'Complete'
|
||||
/*#model-download-status,*/
|
||||
#model-download-speed,
|
||||
#model-download-eta {
|
||||
margin-top: 5px;
|
||||
|
@ -918,6 +919,12 @@
|
|||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
#model-download-status {
|
||||
margin-top: 5px;
|
||||
height: 20px;
|
||||
line-height: 20px;
|
||||
word-wrap: break-word;
|
||||
}
|
||||
|
||||
/* Update the CSS for the token saving textbox */
|
||||
#civitaiTokenSave {
|
||||
|
@ -981,11 +988,12 @@
|
|||
margin-top: 20px;
|
||||
}
|
||||
|
||||
/* lutzapps - double definition
|
||||
#model-download-status,
|
||||
#model-download-speed,
|
||||
#model-download-eta {
|
||||
margin-top: 10px;
|
||||
}
|
||||
} */
|
||||
|
||||
#recreate-symlinks-container {
|
||||
margin-top: 20px;
|
||||
|
@ -1218,7 +1226,7 @@
|
|||
<!-- <option value="ESRGAN">Upscaler</option> -->
|
||||
</select>
|
||||
<input type="password" id="civitaiToken" placeholder="Civitai API Token">
|
||||
<input type="text" id="hfToken" placeholder="Hugging Face API Token (optional)">
|
||||
<input type="password" id="hfToken" placeholder="Hugging Face API Token (optional)">
|
||||
<button onclick="downloadModel()" class="settings-button">Download Model</button>
|
||||
</div>
|
||||
<div id="model-download-progress" style="display: none;">
|
||||
|
@ -1245,27 +1253,27 @@
|
|||
<div class="example-urls">
|
||||
<div class="example-url">
|
||||
<span class="example-label">Stable Diffusion:</span>
|
||||
<a href="#" class="example-link" id="example-sd" onclick="copyToClipboard(this.textContent); return false;"></a>
|
||||
<a href="#" class="example-link" id="example-sd" onclick="useInModelDownloader('ckpt', this.textContent); return false;"></a>
|
||||
</div>
|
||||
<div class="example-url">
|
||||
<span class="example-label">LoRA:</span>
|
||||
<a href="#" class="example-link" id="example-lora" onclick="copyToClipboard(this.textContent); return false;"></a>
|
||||
<a href="#" class="example-link" id="example-lora" onclick="useInModelDownloader('loras', this.textContent); return false;"></a>
|
||||
</div>
|
||||
<div class="example-url">
|
||||
<span class="example-label">VAE:</span>
|
||||
<a href="#" class="example-link" id="example-vae" onclick="copyToClipboard(this.textContent); return false;"></a>
|
||||
<a href="#" class="example-link" id="example-vae" onclick="useInModelDownloader('vae', this.textContent); return false;"></a>
|
||||
</div>
|
||||
<div class="example-url">
|
||||
<span class="example-label">Upscaler:</span>
|
||||
<a href="#" class="example-link" id="example-upscaler" onclick="copyToClipboard(this.textContent); return false;"></a>
|
||||
<a href="#" class="example-link" id="example-upscaler" onclick="useInModelDownloader('upscale_models', this.textContent); return false;"></a>
|
||||
</div>
|
||||
<div class="example-url">
|
||||
<span class="example-label">Flux Dev:</span>
|
||||
<a href="#" class="example-link" id="example-flux-dev" onclick="copyToClipboard(this.textContent); return false;"></a>
|
||||
<a href="#" class="example-link" id="example-flux-dev" onclick="useInModelDownloader('unet', this.textContent); return false;"></a>
|
||||
</div>
|
||||
<div class="example-url">
|
||||
<span class="example-label">Flux Schnell:</span>
|
||||
<a href="#" class="example-link" id="example-flux-schnell" onclick="copyToClipboard(this.textContent); return false;"></a>
|
||||
<a href="#" class="example-link" id="example-flux-schnell" onclick="useInModelDownloader('unet', this.textContent); return false;"></a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
@ -1303,6 +1311,17 @@
|
|||
const podId = '{{ pod_id }}';
|
||||
const WS_PORT = 7222; // This is the Nginx port
|
||||
|
||||
// lutzapps - remember the last know MODELTYPE_SELECTED of the WebUI Dom Id 'modelType' "select dropdown" model list
|
||||
// so the selection can be restored between "Tab Switches", and during refreshes/reloads of the modelType list
|
||||
// this is handled by the extendUIHelper() function, which is called directly from JavaScript events,
|
||||
// but is also called indirectly from Python code via WS message type='extend_ui_helper'
|
||||
// e.g. from the model_utils:download_civitai_model() function, to preserve "state" of the selected modelType
|
||||
|
||||
let MODELTYPE_SELECTED = "";
|
||||
modelType.onchange = function() {
|
||||
MODELTYPE_SELECTED = this.value;
|
||||
alert("onchange=" + MODELTYPE_SELECTED);
|
||||
}
|
||||
|
||||
// *** lutzapps - Change #2 - support to run locally at http://localhost:${WS_PORT} (3 locations in "index.html")
|
||||
const enable_unsecure_localhost = '{{ enable_unsecure_localhost }}';
|
||||
|
@ -1423,6 +1442,9 @@
|
|||
}
|
||||
|
||||
if (data.stage === 'Complete') {
|
||||
// lutzapps - clear 'Speed: N/A' and 'ETA: Calculating...' Div textContent, when no eta/speed data, but 'Complete'
|
||||
etaDiv.textContent = '';
|
||||
speedDiv.textContent = '';
|
||||
loadModelFolders(); // Refresh the Existing Models section when download is complete
|
||||
}
|
||||
}
|
||||
|
@ -1534,20 +1556,74 @@
|
|||
setInterval(updateStatus, 5000);
|
||||
setInterval(updateLogs, 1000);
|
||||
|
||||
initModelTypes(); // lutzapps - Change #4a - initialize the ModelTypes for the "Model Downloader"
|
||||
var data = {};
|
||||
data.cmd = 'refreshModelTypes';
|
||||
extendUIHelper(data); // lutzapps - initialize the available SHARED_MODEL_FOLDERS for the "Model Downloader" modelType select list
|
||||
}
|
||||
|
||||
// lutzapps - Change #4b - populate modeltype select options from shared_models
|
||||
async function initModelTypes() {
|
||||
const modelTypeSelect = document.getElementById('modelType');
|
||||
// lutzapps - populate modeltype select options from shared_models
|
||||
async function extendUIHelper(data) {
|
||||
// check the data
|
||||
// if no data is passed, the default cmd is "selectModelType" with no specific "model_type",
|
||||
// which means to re-select the last selected modelType option from the global var MODELTYPE_SELECTED
|
||||
var cmd = "selectModelType"; // default cmd, when called with empty data or empty cmd
|
||||
var model_type = MODELTYPE_SELECTED; // the 'modelType' option value (=foldername) for the select dropdown list
|
||||
var token = ""; // the token value of HF_TOKEN or CIVITAI_API_TOKEN to pass to the corresponding WebUI Password fields on the "Models" tab
|
||||
|
||||
var response;
|
||||
var result;
|
||||
|
||||
if (data !== undefined && data.cmd !== undefined) {
|
||||
cmd = data.cmd;
|
||||
if (cmd === "selectModelType" && data.model_type !== undefined) {
|
||||
model_type = data.model_type; // the model_type which is passed in to select
|
||||
}
|
||||
if ((cmd === "hfToken" || cmd === "civitaiToken") // cmd need to match the DOM id of the Password field on the "Models" tab
|
||||
&& data.token !== undefined) {
|
||||
token = data.token; // if token = undefined or empty "", then the corresponding token get fetched from the server
|
||||
}
|
||||
}
|
||||
|
||||
//alert("extendUIHelper(): cmd=" + cmd +", model_type=" + model_type + ", token=" + token); // debug-info (DISABLED)
|
||||
|
||||
switch (cmd) {
|
||||
case "civitaiToken":
|
||||
if (token === "") { // get the data from the Server
|
||||
response = await fetch('/get_civitai_token');
|
||||
result = await response.json();
|
||||
token = result['token'];
|
||||
}
|
||||
|
||||
alert(cmd + "=" + token);
|
||||
// pass tokens from HF or CIVITAI ENV vars into their Password fields
|
||||
document.getElementById(cmd).value = token; //'********'; // indicate a found token, but require to call back
|
||||
|
||||
break;
|
||||
|
||||
case "hfToken":
|
||||
if (token === "") { // get the data from the Server
|
||||
response = await fetch('/get_huggingface_token');
|
||||
result = await response.json();
|
||||
token = result['token'];
|
||||
}
|
||||
|
||||
alert(cmd + "=" + token);
|
||||
// pass tokens from HF or CIVITAI ENV vars into their Password fields
|
||||
document.getElementById(cmd).value = token; //'********'; // indicate a found token, but require to call back
|
||||
|
||||
break;
|
||||
|
||||
case "refreshModelTypes":
|
||||
// refresh and optionally select the 'modelType' list for "Model Downloader"
|
||||
var modelTypeSelect = document.getElementById('modelType');
|
||||
|
||||
// get the data from the Server
|
||||
const response = await fetch('/get_model_types');
|
||||
const result = await response.json();
|
||||
response = await fetch('/get_model_types');
|
||||
result = await response.json();
|
||||
|
||||
//alert(JSON.stringify(result)); // show the JSON-String
|
||||
var model_types = result; // get the JSON-Object
|
||||
var count = Object.keys(model_types).length; // #18 when using the default SHARED_MODEL_FOLDERS dict
|
||||
var count = Object.keys(model_types).length; // count=18, when using the default SHARED_MODEL_FOLDERS dict
|
||||
|
||||
// the "/get_model_types" app.get_model_types_route() function checks
|
||||
// if the SHARED_MODELS_DIR shared files already exists at the "/workspace" location.
|
||||
|
@ -1560,26 +1636,73 @@
|
|||
// when SHARED_MODELS_DIR exists (or updates), this function will be called via a Socket Message
|
||||
// to "refresh" its content automatically
|
||||
|
||||
var modelTypeSelected = modelTypeSelect.value; // remember the current selected modelType.option value
|
||||
modelTypeSelect.options.length = 0; // clear all current modelTypeSelect options
|
||||
|
||||
for (i = 0 ; i < count; i += 1) {
|
||||
modelTypeOption = document.createElement('option');
|
||||
|
||||
modelTypeOption.setAttribute('value', model_types[String(i)]['modelfolder']);
|
||||
modelType = model_types[String(i)]['modelfolder'];
|
||||
modelTypeOption.setAttribute('value', modelType);
|
||||
modelTypeOption.appendChild(document.createTextNode(model_types[String(i)]['desc']));
|
||||
//if (modelFolder === modelTypeSelected) {
|
||||
// modelTypeOption.selected = true; // reselect it
|
||||
//}
|
||||
|
||||
modelTypeSelect.appendChild(modelTypeOption);
|
||||
}
|
||||
|
||||
//modelTypeSelect.selectedIndex = modelfolder_index; // set the selected index
|
||||
//modelTypeSelect.options[mmodelfolder_index].selected = true; // and mark it as "selected" option
|
||||
if (modelTypeSelected === "") { // initial refresh, called by initializeUI() function
|
||||
modelTypeSelect.selectedIndex = 0; // use the first modelType option, usually "ckpt"
|
||||
MODELTYPE_SELECTED = modelTypeSelect.options[0].value; // NOT handled by the onchange() event handler
|
||||
}
|
||||
else {
|
||||
modelTypeSelect.value = modelTypeSelected; // (re-)apply the selected modelType option
|
||||
MODELTYPE_SELECTED = modelTypeSelected; // NOT handled by the onchange() event handler
|
||||
}
|
||||
|
||||
break;
|
||||
|
||||
case "selectModelType":
|
||||
// this is called by model_utils:download_civitai_model() which passed the downloading 'model_type'
|
||||
// to select for clarity, which can be different from the currently selected modelType option
|
||||
// if called without a 'model_type', the last MODELTYPE_SELECTED will be selected (after a "Tab Switch")
|
||||
|
||||
// refresh and optionally select the 'modelType' list for "Model Downloader"
|
||||
var modelTypeSelect = document.getElementById('modelType');
|
||||
modelTypeSelect.value = model_type;
|
||||
MODELTYPE_SELECTED = model_type; // NOT handled by the onchange() event handler
|
||||
|
||||
break;
|
||||
|
||||
default: // no cmd passed is same as "selectModelType" without a 'model_type'
|
||||
// this is already handled by "selectModelType" defaults, there is no "default" case needed here
|
||||
}
|
||||
}
|
||||
|
||||
async function downloadModel() {
|
||||
const url = document.getElementById('modelUrl').value;
|
||||
const modelName = document.getElementById('modelName').value;
|
||||
const modelType = document.getElementById('modelType').value;
|
||||
const hfToken = document.getElementById('hfToken').value;
|
||||
|
||||
//const hfToken = document.getElementById('hfToken').value;
|
||||
let hfToken = null;
|
||||
let civitaiToken = null;
|
||||
|
||||
// lutzapps - support HF_TOKEN ENV var
|
||||
// Check if the URL is from Huggingface
|
||||
if (url.toLowerCase().includes('huggingface.co')) { // be case-insensitive with this url
|
||||
hfToken = document.getElementById('hfToken').value;
|
||||
if (hfToken === '********') {
|
||||
// if the token is masked, fetch it from the server
|
||||
const response = await fetch('/get_huggingface_token');
|
||||
const result = await response.json();
|
||||
hfToken = result.token;
|
||||
}
|
||||
}
|
||||
// Check if the URL is from Civitai
|
||||
if (url.includes('civitai.com')) {
|
||||
if (url.toLowerCase().includes('civitai.com')) { // lutzapps - be case-insensitive with this url
|
||||
civitaiToken = document.getElementById('civitaiToken').value;
|
||||
if (civitaiToken === '********') {
|
||||
// If the token is masked, fetch it from the server
|
||||
|
@ -1595,7 +1718,7 @@
|
|||
}
|
||||
|
||||
await startModelDownload(url, modelName, modelType, civitaiToken, hfToken);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async function startModelDownload(url, modelName, modelType, civitaiToken, hfToken, versionId = null, fileIndex = null) {
|
||||
|
@ -1797,6 +1920,19 @@
|
|||
}
|
||||
}
|
||||
|
||||
// lutzapps - added HF_TOKEN support
|
||||
async function loadHFToken() {
|
||||
try {
|
||||
const response = await fetch('/get_huggingface_token');
|
||||
const result = await response.json();
|
||||
if (result.token) {
|
||||
document.getElementById('hfToken').value = '********';
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error loading Huggingface token:', error);
|
||||
}
|
||||
}
|
||||
|
||||
function formatSize(sizeInBytes) {
|
||||
const units = ['B', 'KB', 'MB', 'GB', 'TB'];
|
||||
let size = sizeInBytes;
|
||||
|
@ -1823,7 +1959,7 @@
|
|||
}
|
||||
}
|
||||
|
||||
// Example URLs
|
||||
// Example URLs - lutzapps - TODO: enrich data
|
||||
const exampleUrls = {
|
||||
'Stable-diffusion': 'https://civitai.com/models/90352/dreamshaper',
|
||||
'Lora': 'https://civitai.com/models/58390?modelVersionId=62833',
|
||||
|
@ -1842,6 +1978,25 @@
|
|||
document.getElementById('example-flux-schnell').textContent = exampleUrls['Flux-Schnell'];
|
||||
}
|
||||
|
||||
// lutzapps - replace function copyToClipboard() with function useInModelDownloader()
|
||||
function useInModelDownloader(modelType, modelUrl) {
|
||||
// copy the downloadUrl in the Model Downloader "Url" textbox
|
||||
document.getElementById('modelUrl').value = modelUrl;
|
||||
|
||||
// select the modelType in the modelType select list, we are just about to download
|
||||
var data = {};
|
||||
data.cmd = 'selectModelType';
|
||||
data.model_type = modelType;
|
||||
extendUIHelper(data);
|
||||
|
||||
navigator.clipboard.writeText(modelUrl).then(() => {
|
||||
alert('URL copied to Downloader and into the Clipboard!');
|
||||
}, (err) => {
|
||||
console.error('Could not copy text: ', err);
|
||||
});
|
||||
}
|
||||
|
||||
// lutzapps - obsolete function (can be deleted)
|
||||
function copyToClipboard(text) {
|
||||
navigator.clipboard.writeText(text).then(() => {
|
||||
alert('URL copied to clipboard!');
|
||||
|
@ -1852,8 +2007,12 @@
|
|||
|
||||
// Call this function when the Models tab is opened
|
||||
document.querySelector('.navbar-tabs a[onclick="openTab(event, \'models-tab\')"]').addEventListener('click', function() {
|
||||
loadModelFolders();
|
||||
//alert("querySelector");
|
||||
loadModelFolders(); // lutzapps - this ModelFolders is NOT for the 'modelType' "select dropdown" model list
|
||||
extendUIHelper(); // lutzapps - select the last know MODELTYPE_SELECTED in the WebUI Dom Id 'modelType' "select dropdown" model list
|
||||
loadCivitaiToken();
|
||||
loadHFToken(); // lutzapps - added HF_TOKEN ENV var Support
|
||||
|
||||
updateExampleUrls();
|
||||
});
|
||||
|
||||
|
@ -1878,10 +2037,13 @@
|
|||
// Additional actions when switching tabs
|
||||
if (tabName === 'apps-tab') {
|
||||
document.querySelector('.logs-section').style.display = 'flex';
|
||||
updateLogs(); // Refresh logs when switching back to Apps tab
|
||||
updateLogs(); // Refresh logs when switching back to the "Apps" tab
|
||||
} else if (tabName === 'models-tab') {
|
||||
loadModelFolders();
|
||||
loadCivitaiToken();
|
||||
// lutzapps - the following event handler functions already fired
|
||||
// in the queryselector eventhandler (esspecially for the "models-tab" tab)
|
||||
//loadModelFolders();
|
||||
//loadCivitaiToken();
|
||||
//loadHFToken(); // lutzapps - added HF_TOKEN Support
|
||||
} else if (tabName === 'settings-tab') {
|
||||
loadSshDetails();
|
||||
updateFileBrowserStatus();
|
||||
|
@ -2081,9 +2243,9 @@
|
|||
updateModelDownloadProgress(data.data);
|
||||
} else if (data.type === 'status_update') {
|
||||
updateAppStatus(data.data);
|
||||
// lutzapps - Change #6 - int the "Model Downloader's" ModelTypes select dropdown list
|
||||
} else if (data.type === 'init_model_downloader_model_types') {
|
||||
initModelTypes(data.data);
|
||||
// lutzapps - use the extendUIHelper to "bridge" certain extensions between Python and JavaScript
|
||||
} else if (data.type === 'extend_ui_helper') {
|
||||
extendUIHelper(data.data);
|
||||
}
|
||||
// Handle other message types as needed
|
||||
} catch (error) {
|
||||
|
|
|
@ -6,10 +6,12 @@ import json
|
|||
import re
|
||||
import time
|
||||
import math
|
||||
### model_utils-v0.2 by lutzapps, Oct 30th 2024 ###
|
||||
# lutzapps - modify for new shared_models module and overwrite for this module
|
||||
from utils.shared_models import (ensure_shared_models_folders, SHARED_MODELS_DIR)
|
||||
from utils.shared_models import (ensure_shared_models_folders, update_model_symlinks, SHARED_MODELS_DIR)
|
||||
from utils.websocket_utils import send_websocket_message, active_websockets
|
||||
|
||||
#SHARED_MODELS_DIR = '/workspace/shared_models'
|
||||
#SHARED_MODELS_DIR = '/workspace/shared_models' # this global var is now owned by the 'shared_models' module
|
||||
|
||||
# lutzapps - modify this CivitAI model_type mapping to the new SHARED_MODEL_FOLDERS map
|
||||
MODEL_TYPE_MAPPING = {
|
||||
|
@ -69,17 +71,17 @@ def check_huggingface_url(url):
|
|||
|
||||
return True, repo_id, filename, folder_name, branch_name
|
||||
|
||||
def download_model(url, model_name, model_type, send_websocket_message, civitai_token=None, hf_token=None, version_id=None, file_index=None):
|
||||
def download_model(url, model_name, model_type, civitai_token=None, hf_token=None, version_id=None, file_index=None):
|
||||
ensure_shared_folder_exists()
|
||||
is_civitai, is_civitai_api, model_id, _ = check_civitai_url(url)
|
||||
is_huggingface, repo_id, hf_filename, hf_folder_name, hf_branch_name = check_huggingface_url(url)
|
||||
is_huggingface, repo_id, hf_filename, hf_folder_name, hf_branch_name = check_huggingface_url(url) # TODO: double call
|
||||
|
||||
if is_civitai or is_civitai_api:
|
||||
if not civitai_token:
|
||||
return False, "Civitai token is required for downloading from Civitai"
|
||||
success, message = download_civitai_model(url, model_name, model_type, send_websocket_message, civitai_token, version_id, file_index)
|
||||
success, message = download_civitai_model(url, model_name, model_type, civitai_token, version_id, file_index)
|
||||
elif is_huggingface:
|
||||
success, message = download_huggingface_model(url, model_name, model_type, send_websocket_message, repo_id, hf_filename, hf_folder_name, hf_branch_name, hf_token)
|
||||
success, message = download_huggingface_model(url, model_name, model_type, repo_id, hf_filename, hf_folder_name, hf_branch_name, hf_token)
|
||||
else:
|
||||
return False, "Unsupported URL"
|
||||
|
||||
|
@ -92,7 +94,8 @@ def download_model(url, model_name, model_type, send_websocket_message, civitai_
|
|||
|
||||
return success, message
|
||||
|
||||
def download_civitai_model(url, model_name, model_type, send_websocket_message, civitai_token, version_id=None, file_index=None):
|
||||
# lutzapps - added SHA256 checks for already existing ident and downloaded HuggingFace model
|
||||
def download_civitai_model(url, model_name, model_type, civitai_token, version_id=None, file_index=None):
|
||||
try:
|
||||
is_civitai, is_civitai_api, model_id, url_version_id = check_civitai_url(url)
|
||||
|
||||
|
@ -139,21 +142,180 @@ def download_civitai_model(url, model_name, model_type, send_websocket_message,
|
|||
}
|
||||
}
|
||||
else:
|
||||
file_to_download = files[0]
|
||||
civitai_file = files[0] # that is the metadata civitai_file
|
||||
|
||||
download_url = file_to_download['downloadUrl']
|
||||
download_url = civitai_file['downloadUrl']
|
||||
if not model_name:
|
||||
model_name = file_to_download['name']
|
||||
model_name = civitai_file['name']
|
||||
|
||||
model_path = os.path.join(SHARED_MODELS_DIR, model_type, model_name)
|
||||
|
||||
platformInfo = {
|
||||
"platform_name": 'civitai',
|
||||
"civitai_file": civitai_file # civitai_file metadata dictionary
|
||||
}
|
||||
# call shared function for "huggingface" and "civitai" for SHA256 support and "Model Downloader UI" extended support
|
||||
download_sha256_hash, found_ident_local_model, message = get_modelfile_hash_and_ident_existing_modelfile_exists(
|
||||
model_name, model_type, model_path, # pass local workspace vars, then platform specific vars as dictionary
|
||||
platformInfo) # [str, bool, str]
|
||||
|
||||
if found_ident_local_model:
|
||||
return True, message
|
||||
|
||||
# model_path does NOT exist - run with original code
|
||||
|
||||
os.makedirs(os.path.dirname(model_path), exist_ok=True)
|
||||
|
||||
return download_file(download_url, model_path, send_websocket_message, headers)
|
||||
# lutzapps - add SHA256 check for download_sha256_hash is handled after download finished in download_file()
|
||||
return download_file(download_url, download_sha256_hash, model_path, headers) # [bool, str]
|
||||
|
||||
except requests.RequestException as e:
|
||||
return False, f"Error downloading from Civitai: {str(e)}"
|
||||
except Exception as e: # requests.RequestException as e:
|
||||
|
||||
def download_huggingface_model(url, model_name, model_type, send_websocket_message, repo_id, hf_filename, hf_folder_name, hf_branch_name, hf_token=None):
|
||||
return False, f"Exception downloading from CivitAI: {str(e)}"
|
||||
|
||||
|
||||
# lutzapps - calculate the SHA256 hash string of a file
|
||||
def get_sha256_hash_from_file(file_path:str) -> tuple[bool, str]:
|
||||
import hashlib # support SHA256 checks
|
||||
|
||||
try:
|
||||
sha256_hash = hashlib.sha256()
|
||||
|
||||
with open(file_path, "rb") as f:
|
||||
# read and update hash string value in blocks of 4K
|
||||
for byte_block in iter(lambda: f.read(4096), b""):
|
||||
sha256_hash.update(byte_block)
|
||||
|
||||
return True, sha256_hash.hexdigest().upper()
|
||||
|
||||
except Exception as e:
|
||||
return False, str(e)
|
||||
|
||||
|
||||
# lutzapps - support SHA256 Hash check of already locally existing modelfile against its metadata hash before downloading is needed
|
||||
# shared function for "huggingface" and "civitai" called by download_huggingface_model() and download_civitai_model()
|
||||
def get_modelfile_hash_and_ident_existing_modelfile_exists(model_name:str, model_type:str, model_path:str, platformInfo:dict) -> tuple[bool, str, str]:
|
||||
try:
|
||||
# update (and remember) the selected index of the modelType select list of the "Model Downloader"
|
||||
message = f"Select the ModelType '{model_type}' to download"
|
||||
print(message)
|
||||
|
||||
send_websocket_message('extend_ui_helper', {
|
||||
'cmd': 'selectModelType',
|
||||
'model_type': f'{model_type}', # e.g. "loras" or "vae"
|
||||
'message': message
|
||||
} )
|
||||
|
||||
# get the SHA256 hash - used for compare against existing or downloaded model
|
||||
platform_name = platformInfo['platform_name'].lower() # currently "civitai" or "huggingface", but could be extendend
|
||||
print(f"\nPlatform: {platform_name}")
|
||||
|
||||
match platform_name:
|
||||
case "huggingface":
|
||||
# get the platform-specific passed variables for "huggingface"
|
||||
hf_token = platformInfo['hf_token']
|
||||
repo_id = platformInfo['repo_id']
|
||||
hf_filename = platformInfo['hf_filename']
|
||||
|
||||
#from huggingface_hub import hf_hub_download
|
||||
# lutzapps - to get SHA256 hash from model
|
||||
from huggingface_hub import (
|
||||
# HfApi, # optional when not calling globally
|
||||
get_paths_info #list_files_info #DEPRECATED/MISSING: list_files_info => get_paths_info
|
||||
)
|
||||
from huggingface_hub.hf_api import (
|
||||
RepoFile, RepoFolder, BlobLfsInfo
|
||||
)
|
||||
|
||||
## optionally configure a HfApi client instead of calling globally
|
||||
# hf_api = HfApi(
|
||||
# endpoint = "https://huggingface.co", # can be a Private Hub endpoint
|
||||
# token = hf_token, # token is not persisted on the machine
|
||||
# )
|
||||
|
||||
print(f"getting SHA256 Hash for '{model_name}' from repo {repo_id}/{hf_filename}")
|
||||
# HfApi.list_files_info deprecated -> HfApi.get_paths_info (runs into exception, as connot be imported as missing)
|
||||
#files_info = hf_api.list_files_info(repo_id, hf_filename, expand=True)
|
||||
#paths_info = hf_api.get_paths_info(repo_id, hf_filename, expand=True) # use via HfApi
|
||||
paths_info = get_paths_info(repo_id, hf_filename, expand=True) # use global (works fine)
|
||||
|
||||
repo_file = paths_info[0] # RepoFile or RepoFolder class instance
|
||||
# check for RepoFolder or NON-LFS
|
||||
if isinstance(repo_file, RepoFolder):
|
||||
raise NotImplementedError("Downloading a folder is not implemented.")
|
||||
if not repo_file.lfs:
|
||||
raise NotImplementedError("Copying a non-LFS file is not implemented.")
|
||||
|
||||
lfs = repo_file.lfs # BlobLfsInfo class instance
|
||||
download_sha256_hash = lfs.sha256.upper()
|
||||
|
||||
print(f"Metadata from RepoFile LFS '{repo_file.rfilename}'")
|
||||
print(f"SHA256: {download_sha256_hash}")
|
||||
|
||||
case "civitai":
|
||||
# get the platform-specific passed variables for "civitai"
|
||||
civitai_file = platformInfo['civitai_file'] # civitai_file metadata dictionary
|
||||
|
||||
# get the SHA256 hash - used for compare against existing or downloaded model
|
||||
download_sha256_hash = civitai_file['hashes']['SHA256'] # civitai_file = passed file
|
||||
|
||||
### END platform specific code
|
||||
|
||||
# check if model file already exists
|
||||
if not os.path.exists(model_path):
|
||||
message = f"No local model '{os.path.basename(model_path)}' installed"
|
||||
print(message)
|
||||
|
||||
return download_sha256_hash, False, message
|
||||
|
||||
message = f"Model already exists: {os.path.basename(model_path)}, SHA256 check..."
|
||||
print(message)
|
||||
|
||||
send_websocket_message('model_download_progress', {
|
||||
'percentage': 0, # ugly
|
||||
'stage': 'Downloading',
|
||||
'message': message
|
||||
})
|
||||
|
||||
# check if existing model is ident with model to download
|
||||
# this can *take a while* for big models, but even better than to unnecessarily redownload the model
|
||||
successfull_HashGeneration, model_sha256_hash = get_sha256_hash_from_file(model_path)
|
||||
# if NOT successful, the hash contains the Exception
|
||||
print(f"SHA256 hash generated from local file: '{model_path}'\n{model_sha256_hash}")
|
||||
|
||||
if successfull_HashGeneration and model_sha256_hash == download_sha256_hash:
|
||||
message = f"Existing and ident model aleady found for '{os.path.basename(model_path)}'"
|
||||
print(message)
|
||||
|
||||
send_websocket_message('model_download_progress', {
|
||||
'percentage': 100,
|
||||
'stage': 'Complete',
|
||||
'message': message
|
||||
})
|
||||
|
||||
return download_sha256_hash, successfull_HashGeneration, message
|
||||
|
||||
else:
|
||||
if successfull_HashGeneration: # the generated SHA256 file model Hash did not match against the metadata hash
|
||||
message = f"Local installed model '{os.path.basename(model_path)}' has DIFFERENT \nSHA256: {model_sha256_hash}"
|
||||
print(message)
|
||||
|
||||
return download_sha256_hash, False, message
|
||||
|
||||
|
||||
else: # NOT successful, the hash contains the Exception
|
||||
error_msg = model_sha256_hash
|
||||
error_msg = f"Exception occured while generating the SHA256 hash for '{model_path}':\n{error_msg}"
|
||||
print(error_msg)
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Exception when downloading from {platform_name}: {str(e)}"
|
||||
|
||||
return "", False, error_msg # hash, identfile, message
|
||||
|
||||
|
||||
# lutzapps - added SHA256 checks for already existing ident and downloaded HuggingFace model
|
||||
def download_huggingface_model(url, model_name, model_type, repo_id, hf_filename, hf_folder_name, hf_branch_name, hf_token=None):
|
||||
try:
|
||||
from huggingface_hub import hf_hub_download
|
||||
|
||||
|
@ -161,6 +323,23 @@ def download_huggingface_model(url, model_name, model_type, send_websocket_messa
|
|||
model_name = hf_filename
|
||||
|
||||
model_path = os.path.join(SHARED_MODELS_DIR, model_type, model_name)
|
||||
|
||||
platformInfo = {
|
||||
"platform_name": 'huggingface',
|
||||
"hf_token": hf_token,
|
||||
"repo_id": repo_id,
|
||||
"hf_filename": hf_filename
|
||||
}
|
||||
# call shared function for "huggingface" and "civitai" for SHA256 support and "Model Downloader UI" extended support
|
||||
download_sha256_hash, found_ident_local_model, message = get_modelfile_hash_and_ident_existing_modelfile_exists(
|
||||
model_name, model_type, model_path, # pass local workspace vars, then platform specific vars as dictionary
|
||||
platformInfo) # [str, bool, str]
|
||||
|
||||
if found_ident_local_model:
|
||||
return True, message
|
||||
|
||||
# model_path does NOT exist - run with original code
|
||||
|
||||
os.makedirs(os.path.dirname(model_path), exist_ok=True)
|
||||
|
||||
send_websocket_message('model_download_progress', {
|
||||
|
@ -174,26 +353,26 @@ def download_huggingface_model(url, model_name, model_type, send_websocket_messa
|
|||
'filename': hf_filename,
|
||||
'subfolder': hf_folder_name,
|
||||
'revision': hf_branch_name,
|
||||
'local_dir': os.path.dirname(model_path),
|
||||
'local_dir_use_symlinks': False
|
||||
'local_dir': os.path.dirname(model_path)
|
||||
#'local_dir_use_symlinks': False # deprecated, should be removed
|
||||
}
|
||||
if hf_token:
|
||||
kwargs['token'] = hf_token
|
||||
|
||||
local_file = hf_hub_download(**kwargs)
|
||||
file_path = hf_hub_download(**kwargs) ### HF_DOWNLOAD_START
|
||||
### HF_DOWNLOAD COMPLETE
|
||||
|
||||
send_websocket_message('model_download_progress', {
|
||||
'percentage': 100,
|
||||
'stage': 'Complete',
|
||||
'message': f'Download complete: {model_name}'
|
||||
})
|
||||
|
||||
return True, f"Successfully downloaded {model_name} from Hugging Face"
|
||||
# SHA256 Hash checks of downloaded modelfile against its metadata hash
|
||||
# call shared function for "huggingface" and "civitai" for SHA256 support and "Model Downloader UI" extended support
|
||||
return check_downloaded_modelfile(file_path, download_sha256_hash, "huggingface") # [bool, str]
|
||||
|
||||
except Exception as e:
|
||||
return False, f"Error downloading from Hugging Face: {str(e)}"
|
||||
|
||||
def download_file(url, filepath, send_websocket_message, headers=None):
|
||||
return False, f"Exception when downloading from 'HuggingFace': {str(e)}"
|
||||
|
||||
|
||||
# lutzapps - added SHA256 check for downloaded CivitAI model
|
||||
def download_file(url, download_sha256_hash, file_path, headers=None):
|
||||
try:
|
||||
response = requests.get(url, stream=True, headers=headers)
|
||||
response.raise_for_status()
|
||||
|
@ -202,7 +381,7 @@ def download_file(url, filepath, send_websocket_message, headers=None):
|
|||
downloaded_size = 0
|
||||
start_time = time.time()
|
||||
|
||||
with open(filepath, 'wb') as file:
|
||||
with open(file_path, 'wb') as file: ### CIVITAI_DOWNLOAD
|
||||
for data in response.iter_content(block_size):
|
||||
size = file.write(data)
|
||||
downloaded_size += size
|
||||
|
@ -222,17 +401,65 @@ def download_file(url, filepath, send_websocket_message, headers=None):
|
|||
'message': f'Downloaded {format_size(downloaded_size)} / {format_size(total_size)}'
|
||||
})
|
||||
|
||||
### CIVITAI_DOWNLOAD COMPLETE
|
||||
|
||||
# SHA256 Hash checks of downloaded modelfile against its metadata hash
|
||||
# call shared function for "huggingface" and "civitai" for SHA256 support and "Model Downloader UI" extended support
|
||||
return check_downloaded_modelfile(file_path, download_sha256_hash, "civitai") # [bool, str]
|
||||
|
||||
except Exception as e:
|
||||
return False, f"Exception when downloading from CivitAI: {str(e)}"
|
||||
|
||||
# lutzapps - SHA256 Hash checks of downloaded modelfile against its metadata hash
|
||||
# shared function for "huggingface" and "civitai" for SHA256 support and "Model Downloader UI" extended support
|
||||
def check_downloaded_modelfile(model_path:str, download_sha256_hash:str, platform_name:str) -> tuple[bool, str]:
|
||||
try:
|
||||
# lutzapps - SHA256 check for download_sha256_hash
|
||||
if download_sha256_hash == "":
|
||||
|
||||
return False, f"Downloaded model could not be verified with Metadata, no SHA256 hash found on '{platform_name}'"
|
||||
|
||||
# check if downloaded local model file is ident with HF model download_sha256_hash metadata
|
||||
# this can take a while for big models, but even better than to have a corrupted model
|
||||
send_websocket_message('model_download_progress', {
|
||||
'percentage': 90, # change back from 100 to 90 (ugly)
|
||||
'stage': 'Complete', # leave it as 'Complete' as this "clears" SPEED/ETA Divs
|
||||
'message': f'SHA256 Check for Model: {os.path.basename(model_path)}'
|
||||
})
|
||||
|
||||
successfull_HashGeneration, model_sha256_hash = get_sha256_hash_from_file(model_path)
|
||||
if successfull_HashGeneration and model_sha256_hash == download_sha256_hash:
|
||||
send_websocket_message('model_download_progress', {
|
||||
'percentage': 100,
|
||||
'stage': 'Complete',
|
||||
'message': f'Download complete: {os.path.basename(filepath)}'
|
||||
'message': f'Download complete: {os.path.basename(model_path)}'
|
||||
})
|
||||
|
||||
return True, f"Successfully downloaded {os.path.basename(filepath)}"
|
||||
update_model_symlinks() # create symlinks for this new downloaded model for all installed apps
|
||||
|
||||
except requests.RequestException as e:
|
||||
return False, f"Error downloading file: {str(e)}"
|
||||
return True, f"Successfully downloaded (SHA256 checked, and symlinked) '{os.path.basename(model_path)}' from {platform_name}"
|
||||
|
||||
else:
|
||||
if successfull_HashGeneration: # the generated SHA256 file model Hash did not match against the metadata hash
|
||||
message = f"The downloaded model '{os.path.basename(model_path)}' has DIFFERENT \nSHA256: {model_sha256_hash} as stored on {platform_name}\nFile is possibly corrupted and was DELETED!"
|
||||
print(message)
|
||||
|
||||
os.remove(model_path) # delete corrupted, downloaded file
|
||||
|
||||
return download_sha256_hash, False, message
|
||||
|
||||
else: # NOT successful, the hash contains the Exception
|
||||
error_msg = model_sha256_hash
|
||||
error_msg = f"Exception occured while generating the SHA256 hash for '{model_path}':\n{error_msg}"
|
||||
print(error_msg)
|
||||
|
||||
except Exception as e:
|
||||
error_msg = f"Exception when downloading from {platform_name}: {str(e)}"
|
||||
|
||||
return False, error_msg
|
||||
|
||||
|
||||
# smaller helper functions
|
||||
def get_civitai_file_size(url, token):
|
||||
headers = {'Authorization': f'Bearer {token}'}
|
||||
try:
|
||||
|
|
|
@ -9,8 +9,8 @@ from flask import jsonify
|
|||
from utils.websocket_utils import send_websocket_message, active_websockets
|
||||
from utils.app_configs import (get_app_configs)
|
||||
|
||||
### shared_models-v0.7 by ViennaFlying, Oct 25th 2024 ###
|
||||
### dev-my-v0.3
|
||||
### shared_models-v0.9.1 by lutzapps, Oct 30th 2024 ###
|
||||
### dev-my-v0.6
|
||||
|
||||
# to run (and optionally DEBUG) this docker image "better-ai-launcher" in a local container on your own machine
|
||||
# you need to define the ENV var "LOCAL_DEBUG" in the "VSCode Docker Extension"
|
||||
|
@ -289,31 +289,31 @@ def init_shared_models_folders(send_SocketMessage:bool=True):
|
|||
init_global_dict_from_file(SHARED_MODEL_FOLDERS, SHARED_MODEL_FOLDERS_FILE, "SHARED_MODEL_FOLDERS")
|
||||
|
||||
if os.path.exists(SHARED_MODEL_FOLDERS_FILE) and send_SocketMessage:
|
||||
send_websocket_message('init_model_downloader_model_types', {
|
||||
'stage': 'Refresh',
|
||||
send_websocket_message('extend_ui_helper', {
|
||||
'cmd': 'refreshModelTypes',
|
||||
'message': 'New ModelTypes are available'
|
||||
})
|
||||
} )
|
||||
|
||||
return
|
||||
|
||||
### "inline"-main() ###
|
||||
# init the SHARED_MODEL_FOLDERS
|
||||
init_shared_models_folders(False) # dont send a WS-Message for "Model Downloader" at module init to init/refresh the model_type list
|
||||
init_shared_models_folders(False) # dont send a WS-Message for "Model Downloader" at module init, to init/refresh its modelType list
|
||||
|
||||
# ----------
|
||||
|
||||
# helper function called from "app.py" via WebUI
|
||||
# helper function called from "app.py" via WebUI "Create Shared Folders" button on "Settings" tab
|
||||
# ensures 'model_type' sub-folders for Model Mapping and the "Model Downloader" exists
|
||||
# in the SHARED_MODELS_DIR (uses above initialized 'SHARED_MODEL_FOLDERS' dict)
|
||||
def ensure_shared_models_folders():
|
||||
try:
|
||||
# init global module 'SHARED_MODEL_FOLDERS' dict: { 'model_type' (=subdir_names): 'app_model_dir'
|
||||
# from app code or from external JSON 'SHARED_MODEL_FOLDERS_FILE' file
|
||||
init_shared_models_folders()
|
||||
init_shared_models_folders(False) # (re-)read the SHARED_MODEL_FOLDERS_FILE again, if changed, but don't refresh modelTypes in "Model Downloader" yet
|
||||
|
||||
print(f"(re-)creating 'shared_models' model type sub-folders for Apps and the 'Model Downloader' in folder '{SHARED_MODELS_DIR}':")
|
||||
|
||||
# create the shared_models directory, if it doesn't exist
|
||||
# create the shared_models directory, if it doesn't exist yet
|
||||
os.makedirs(f"{SHARED_MODELS_DIR}/", exist_ok=True) # append slash to make sure folder is created
|
||||
|
||||
# create a "__README.txt" file in the shared_models directory
|
||||
|
@ -324,8 +324,8 @@ def ensure_shared_models_folders():
|
|||
|
||||
for model_type, model_type_description in SHARED_MODEL_FOLDERS.items():
|
||||
shared_model_folderpath = os.path.join(SHARED_MODELS_DIR, model_type)
|
||||
os.makedirs(os.path.dirname(f"{shared_model_folderpath}/"), exist_ok=True) # append trailing "/" to make sure the last sub-folder is created
|
||||
|
||||
os.makedirs(os.path.dirname(f"{shared_model_folderpath}/"), exist_ok=True) # append trailing "/" to make sure the last sub-folder is created
|
||||
print(f"'{model_type}' Folder created for '{model_type_description}'")
|
||||
|
||||
model_type_name = model_type
|
||||
|
@ -343,6 +343,12 @@ def ensure_shared_models_folders():
|
|||
readme_file.write("Models directly downloaded into an app model folder will be\n")
|
||||
readme_file.write("automatically pulled back into the corresponding shared folder and relinked back!\n")
|
||||
|
||||
# send a message for the "Model Downloader" to "refresh" its 'modelType' list
|
||||
send_websocket_message('extend_ui_helper', {
|
||||
'cmd': 'refreshModelTypes',
|
||||
'message': 'New ModelTypes are available'
|
||||
} )
|
||||
|
||||
return jsonify({'status': 'success', 'message': 'Shared model folders created successfully.'})
|
||||
|
||||
except Exception as e:
|
||||
|
@ -775,7 +781,7 @@ def create_model_symlinks(shared_model_folderpath:str, app_model_folderpath:str,
|
|||
#
|
||||
# SHARED_MODEL_APP_MAP_FILE (str): "_shared_models_map.json" (based in SHARED_MODELS_DIR)
|
||||
# SHARED_MODEL_APP_MAP (dict) <- init from code, then write/read from path SHARED_MODEL_FOLDERS_FILE
|
||||
def update_model_symlinks():
|
||||
def update_model_symlinks() -> dict:
|
||||
try:
|
||||
print(f"Processing the master SHARED_MODELS_DIR: {SHARED_MODELS_DIR}")
|
||||
if not os.path.exists(SHARED_MODELS_DIR):
|
||||
|
|
Loading…
Reference in a new issue