WhiteAiZ commited on
Commit
25c9f87
·
verified ·
1 Parent(s): 2714bfd

Update modules/launch_utils.py

Browse files
Files changed (1) hide show
  1. modules/launch_utils.py +578 -578
modules/launch_utils.py CHANGED
@@ -1,578 +1,578 @@
1
- # this scripts installs necessary requirements and launches main program in webui.py
2
- import logging
3
- import re
4
- import subprocess
5
- import os
6
- import shutil
7
- import sys
8
- import importlib.util
9
- import importlib.metadata
10
- import platform
11
- import json
12
- import shlex
13
- from functools import lru_cache
14
- from typing import NamedTuple
15
- from pathlib import Path
16
- os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
17
- from modules import cmd_args, errors
18
- from modules.paths_internal import script_path, extensions_dir, extensions_builtin_dir
19
- from modules.timer import startup_timer
20
- from modules import logging_config
21
- from modules_forge import forge_version
22
- from modules_forge.config import always_disabled_extensions
23
-
24
-
25
- args, _ = cmd_args.parser.parse_known_args()
26
- logging_config.setup_logging(args.loglevel)
27
-
28
- python = sys.executable
29
- git = os.environ.get('GIT', "git")
30
- index_url = os.environ.get('INDEX_URL', "")
31
- dir_repos = "repositories"
32
-
33
- # Whether to default to printing command output
34
- default_command_live = (os.environ.get('WEBUI_LAUNCH_LIVE_OUTPUT') == "1")
35
-
36
- os.environ.setdefault('GRADIO_ANALYTICS_ENABLED', 'False')
37
-
38
-
39
- def check_python_version():
40
- is_windows = platform.system() == "Windows"
41
- major = sys.version_info.major
42
- minor = sys.version_info.minor
43
- micro = sys.version_info.micro
44
-
45
- # Only show warning if Python version is < 3.7 or >= 3.14
46
- if not (major == 3 and 7 <= minor <= 13):
47
- errors.print_error_explanation(f"""
48
- INCOMPATIBLE PYTHON VERSION
49
-
50
- This program is tested with 3.10.6 Python, but you have {major}.{minor}.{micro}.
51
- If you encounter an error with "RuntimeError: Couldn't install torch." message,
52
- or any other error regarding unsuccessful package (library) installation,
53
- please downgrade (or upgrade) to the latest version of 3.10 Python
54
- and delete current Python and "venv" folder in WebUI's directory.
55
-
56
- You can download 3.10 Python from here: https://www.python.org/downloads/release/python-3106/
57
-
58
- {"Alternatively, use a binary release of WebUI: https://github.com/AUTOMATIC1111/stable-diffusion-webui/releases/tag/v1.0.0-pre" if is_windows else ""}
59
-
60
- Use --skip-python-version-check to suppress this warning.
61
- """)
62
-
63
-
64
- @lru_cache()
65
- def commit_hash():
66
- try:
67
- return subprocess.check_output([git, "-C", script_path, "rev-parse", "HEAD"], shell=False, encoding='utf8').strip()
68
- except Exception:
69
- return "<none>"
70
-
71
-
72
- @lru_cache()
73
- def git_tag_a1111():
74
- try:
75
- return subprocess.check_output([git, "-C", script_path, "describe", "--tags"], shell=False, encoding='utf8').strip()
76
- except Exception:
77
- try:
78
-
79
- changelog_md = os.path.join(script_path, "CHANGELOG.md")
80
- with open(changelog_md, "r", encoding="utf-8") as file:
81
- line = next((line.strip() for line in file if line.strip()), "<none>")
82
- line = line.replace("## ", "")
83
- return line
84
- except Exception:
85
- return "<none>"
86
-
87
-
88
- def git_tag():
89
- return 'f' + forge_version.version + '-' + git_tag_a1111()
90
-
91
-
92
- def run(command, desc=None, errdesc=None, custom_env=None, live: bool = default_command_live) -> str:
93
- if desc is not None:
94
- print(desc)
95
-
96
- run_kwargs = {
97
- "args": command,
98
- "shell": True,
99
- "env": os.environ if custom_env is None else custom_env,
100
- "encoding": 'utf8',
101
- "errors": 'ignore',
102
- }
103
-
104
- if not live:
105
- run_kwargs["stdout"] = run_kwargs["stderr"] = subprocess.PIPE
106
-
107
- result = subprocess.run(**run_kwargs)
108
-
109
- if result.returncode != 0:
110
- error_bits = [
111
- f"{errdesc or 'Error running command'}.",
112
- f"Command: {command}",
113
- f"Error code: {result.returncode}",
114
- ]
115
- if result.stdout:
116
- error_bits.append(f"stdout: {result.stdout}")
117
- if result.stderr:
118
- error_bits.append(f"stderr: {result.stderr}")
119
- raise RuntimeError("\n".join(error_bits))
120
-
121
- return (result.stdout or "")
122
-
123
-
124
- def is_installed(package):
125
- try:
126
- dist = importlib.metadata.distribution(package)
127
- except importlib.metadata.PackageNotFoundError:
128
- try:
129
- spec = importlib.util.find_spec(package)
130
- except ModuleNotFoundError:
131
- return False
132
-
133
- return spec is not None
134
-
135
- return dist is not None
136
-
137
-
138
- def repo_dir(name):
139
- return os.path.join(script_path, dir_repos, name)
140
-
141
-
142
- def run_pip(command, desc=None, live=default_command_live):
143
- if args.skip_install:
144
- return
145
-
146
- index_url_line = f' --index-url {index_url}' if index_url != '' else ''
147
- return run(f'"{python}" -m pip {command} --prefer-binary{index_url_line}', desc=f"Installing {desc}", errdesc=f"Couldn't install {desc}", live=live)
148
-
149
-
150
- def check_run_python(code: str) -> bool:
151
- result = subprocess.run([python, "-c", code], capture_output=True, shell=False)
152
- return result.returncode == 0
153
-
154
-
155
- def git_fix_workspace(dir, name):
156
- run(f'"{git}" -C "{dir}" fetch --refetch --no-auto-gc', f"Fetching all contents for {name}", f"Couldn't fetch {name}", live=True)
157
- run(f'"{git}" -C "{dir}" gc --aggressive --prune=now', f"Pruning {name}", f"Couldn't prune {name}", live=True)
158
- return
159
-
160
-
161
- def run_git(dir, name, command, desc=None, errdesc=None, custom_env=None, live: bool = default_command_live, autofix=True):
162
- try:
163
- return run(f'"{git}" -C "{dir}" {command}', desc=desc, errdesc=errdesc, custom_env=custom_env, live=live)
164
- except RuntimeError:
165
- if not autofix:
166
- raise
167
-
168
- print(f"{errdesc}, attempting autofix...")
169
- git_fix_workspace(dir, name)
170
-
171
- return run(f'"{git}" -C "{dir}" {command}', desc=desc, errdesc=errdesc, custom_env=custom_env, live=live)
172
-
173
-
174
- def git_clone(url, dir, name, commithash=None):
175
- # TODO clone into temporary dir and move if successful
176
-
177
- if os.path.exists(dir):
178
- if commithash is None:
179
- return
180
-
181
- current_hash = run_git(dir, name, 'rev-parse HEAD', None, f"Couldn't determine {name}'s hash: {commithash}", live=False).strip()
182
- if current_hash == commithash:
183
- return
184
-
185
- if run_git(dir, name, 'config --get remote.origin.url', None, f"Couldn't determine {name}'s origin URL", live=False).strip() != url:
186
- run_git(dir, name, f'remote set-url origin "{url}"', None, f"Failed to set {name}'s origin URL", live=False)
187
-
188
- run_git(dir, name, 'fetch', f"Fetching updates for {name}...", f"Couldn't fetch {name}", autofix=False)
189
-
190
- run_git(dir, name, f'checkout {commithash}', f"Checking out commit for {name} with hash: {commithash}...", f"Couldn't checkout commit {commithash} for {name}", live=True)
191
-
192
- return
193
-
194
- try:
195
- run(f'"{git}" clone --config core.filemode=false "{url}" "{dir}"', f"Cloning {name} into {dir}...", f"Couldn't clone {name}", live=True)
196
- except RuntimeError:
197
- shutil.rmtree(dir, ignore_errors=True)
198
- raise
199
-
200
- if commithash is not None:
201
- run(f'"{git}" -C "{dir}" checkout {commithash}', None, "Couldn't checkout {name}'s hash: {commithash}")
202
-
203
-
204
- def git_pull_recursive(dir):
205
- for subdir, _, _ in os.walk(dir):
206
- if os.path.exists(os.path.join(subdir, '.git')):
207
- try:
208
- output = subprocess.check_output([git, '-C', subdir, 'pull', '--autostash'])
209
- print(f"Pulled changes for repository in '{subdir}':\n{output.decode('utf-8').strip()}\n")
210
- except subprocess.CalledProcessError as e:
211
- print(f"Couldn't perform 'git pull' on repository in '{subdir}':\n{e.output.decode('utf-8').strip()}\n")
212
-
213
-
214
- def version_check(commit):
215
- try:
216
- import requests
217
- commits = requests.get('https://api.github.com/repos/AUTOMATIC1111/stable-diffusion-webui/branches/master').json()
218
- if commit != "<none>" and commits['commit']['sha'] != commit:
219
- print("--------------------------------------------------------")
220
- print("| You are not up to date with the most recent release. |")
221
- print("| Consider running `git pull` to update. |")
222
- print("--------------------------------------------------------")
223
- elif commits['commit']['sha'] == commit:
224
- print("You are up to date with the most recent release.")
225
- else:
226
- print("Not a git clone, can't perform version check.")
227
- except Exception as e:
228
- print("version check failed", e)
229
-
230
-
231
- def run_extension_installer(extension_dir):
232
- path_installer = os.path.join(extension_dir, "install.py")
233
- if not os.path.isfile(path_installer):
234
- return
235
-
236
- try:
237
- env = os.environ.copy()
238
- env['PYTHONPATH'] = f"{script_path}{os.pathsep}{env.get('PYTHONPATH', '')}"
239
-
240
- stdout = run(f'"{python}" "{path_installer}"', errdesc=f"Error running install.py for extension {extension_dir}", custom_env=env).strip()
241
- if stdout:
242
- print(stdout)
243
- except Exception as e:
244
- errors.report(str(e))
245
-
246
-
247
- def list_extensions(settings_file):
248
- settings = {}
249
-
250
- try:
251
- with open(settings_file, "r", encoding="utf8") as file:
252
- settings = json.load(file)
253
- except FileNotFoundError:
254
- pass
255
- except Exception:
256
- errors.report(f'\nCould not load settings\nThe config file "{settings_file}" is likely corrupted\nIt has been moved to the "tmp/config.json"\nReverting config to default\n\n''', exc_info=True)
257
- os.replace(settings_file, os.path.join(script_path, "tmp", "config.json"))
258
-
259
- disabled_extensions = set(settings.get('disabled_extensions', []) + always_disabled_extensions)
260
- disable_all_extensions = settings.get('disable_all_extensions', 'none')
261
-
262
- if disable_all_extensions != 'none' or args.disable_extra_extensions or args.disable_all_extensions or not os.path.isdir(extensions_dir):
263
- return []
264
-
265
- return [x for x in os.listdir(extensions_dir) if x not in disabled_extensions]
266
-
267
-
268
- def list_extensions_builtin(settings_file):
269
- settings = {}
270
-
271
- try:
272
- with open(settings_file, "r", encoding="utf8") as file:
273
- settings = json.load(file)
274
- except FileNotFoundError:
275
- pass
276
- except Exception:
277
- errors.report(f'\nCould not load settings\nThe config file "{settings_file}" is likely corrupted\nIt has been moved to the "tmp/config.json"\nReverting config to default\n\n''', exc_info=True)
278
- os.replace(settings_file, os.path.join(script_path, "tmp", "config.json"))
279
-
280
- disabled_extensions = set(settings.get('disabled_extensions', []))
281
- disable_all_extensions = settings.get('disable_all_extensions', 'none')
282
-
283
- if disable_all_extensions != 'none' or args.disable_extra_extensions or args.disable_all_extensions or not os.path.isdir(extensions_builtin_dir):
284
- return []
285
-
286
- return [x for x in os.listdir(extensions_builtin_dir) if x not in disabled_extensions]
287
-
288
-
289
- def run_extensions_installers(settings_file):
290
- if not os.path.isdir(extensions_dir):
291
- return
292
-
293
- with startup_timer.subcategory("run extensions installers"):
294
- for dirname_extension in list_extensions(settings_file):
295
- logging.debug(f"Installing {dirname_extension}")
296
-
297
- path = os.path.join(extensions_dir, dirname_extension)
298
-
299
- if os.path.isdir(path):
300
- run_extension_installer(path)
301
- startup_timer.record(dirname_extension)
302
-
303
- if not os.path.isdir(extensions_builtin_dir):
304
- return
305
-
306
- with startup_timer.subcategory("run extensions_builtin installers"):
307
- for dirname_extension in list_extensions_builtin(settings_file):
308
- logging.debug(f"Installing {dirname_extension}")
309
-
310
- path = os.path.join(extensions_builtin_dir, dirname_extension)
311
-
312
- if os.path.isdir(path):
313
- run_extension_installer(path)
314
- startup_timer.record(dirname_extension)
315
-
316
- return
317
-
318
-
319
- re_requirement = re.compile(r"\s*([-_a-zA-Z0-9]+)\s*(?:==\s*([-+_.a-zA-Z0-9]+))?\s*")
320
-
321
-
322
- def requirements_met(requirements_file):
323
- """
324
- Does a simple parse of a requirements.txt file to determine if all rerqirements in it
325
- are already installed. Returns True if so, False if not installed or parsing fails.
326
- """
327
-
328
- import importlib.metadata
329
- import packaging.version
330
-
331
- with open(requirements_file, "r", encoding="utf8") as file:
332
- for line in file:
333
- if line.strip() == "":
334
- continue
335
-
336
- m = re.match(re_requirement, line)
337
- if m is None:
338
- return False
339
-
340
- package = m.group(1).strip()
341
- version_required = (m.group(2) or "").strip()
342
-
343
- if version_required == "":
344
- continue
345
-
346
- try:
347
- version_installed = importlib.metadata.version(package)
348
- except Exception:
349
- return False
350
-
351
- if packaging.version.parse(version_required) != packaging.version.parse(version_installed):
352
- return False
353
-
354
- return True
355
-
356
- def get_cuda_comp_cap():
357
- """
358
- Returns float of CUDA Compute Capability using nvidia-smi
359
- Returns 0.0 on error
360
- CUDA Compute Capability
361
- ref https://developer.nvidia.com/cuda-gpus
362
- ref https://en.wikipedia.org/wiki/CUDA
363
- Blackwell consumer GPUs should return 12.0 data-center GPUs should return 10.0
364
- """
365
- try:
366
- return max(map(float, subprocess.check_output(['nvidia-smi', '--query-gpu=compute_cap', '--format=noheader,csv'], text=True).splitlines()))
367
- except Exception as _:
368
- return 0.0
369
-
370
- def prepare_environment():
371
- torch_index_url = os.environ.get('TORCH_INDEX_URL', "https://download.pytorch.org/whl/cu128")
372
- torch_command = os.environ.get('TORCH_COMMAND', f"pip install torch==2.7.1 torchvision --extra-index-url {torch_index_url}")
373
- if args.use_ipex:
374
- if platform.system() == "Windows":
375
- # The "Nuullll/intel-extension-for-pytorch" wheels were built from IPEX source for Intel Arc GPU: https://github.com/intel/intel-extension-for-pytorch/tree/xpu-main
376
- # This is NOT an Intel official release so please use it at your own risk!!
377
- # See https://github.com/Nuullll/intel-extension-for-pytorch/releases/tag/v2.0.110%2Bxpu-master%2Bdll-bundle for details.
378
- #
379
- # Strengths (over official IPEX 2.0.110 windows release):
380
- # - AOT build (for Arc GPU only) to eliminate JIT compilation overhead: https://github.com/intel/intel-extension-for-pytorch/issues/399
381
- # - Bundles minimal oneAPI 2023.2 dependencies into the python wheels, so users don't need to install oneAPI for the whole system.
382
- # - Provides a compatible torchvision wheel: https://github.com/intel/intel-extension-for-pytorch/issues/465
383
- # Limitation:
384
- # - Only works for python 3.10
385
- url_prefix = "https://github.com/Nuullll/intel-extension-for-pytorch/releases/download/v2.0.110%2Bxpu-master%2Bdll-bundle"
386
- torch_command = os.environ.get('TORCH_COMMAND', f"pip install {url_prefix}/torch-2.0.0a0+gite9ebda2-cp310-cp310-win_amd64.whl {url_prefix}/torchvision-0.15.2a0+fa99a53-cp310-cp310-win_amd64.whl {url_prefix}/intel_extension_for_pytorch-2.0.110+gitc6ea20b-cp310-cp310-win_amd64.whl")
387
- else:
388
- # Using official IPEX release for linux since it's already an AOT build.
389
- # However, users still have to install oneAPI toolkit and activate oneAPI environment manually.
390
- # See https://intel.github.io/intel-extension-for-pytorch/index.html#installation for details.
391
- torch_index_url = os.environ.get('TORCH_INDEX_URL', "https://pytorch-extension.intel.com/release-whl/stable/xpu/us/")
392
- torch_command = os.environ.get('TORCH_COMMAND', f"pip install torch==2.0.0a0 intel-extension-for-pytorch==2.0.110+gitba7f6c1 --extra-index-url {torch_index_url}")
393
- requirements_file = os.environ.get('REQS_FILE', "requirements_versions.txt")
394
- requirements_file_for_npu = os.environ.get('REQS_FILE_FOR_NPU', "requirements_npu.txt")
395
-
396
- xformers_package = os.environ.get('XFORMERS_PACKAGE', '--index-url https://download.pytorch.org/whl/cu128 xformers')
397
- clip_package = os.environ.get('CLIP_PACKAGE', "https://github.com/openai/CLIP/archive/d50d76daa670286dd6cacf3bcd80b5e4823fc8e1.zip")
398
- openclip_package = os.environ.get('OPENCLIP_PACKAGE', "https://github.com/mlfoundations/open_clip/archive/bb6e834e9c70d9c27d0dc3ecedeebeaeb1ffad6b.zip")
399
-
400
- if sys.version_info.major == 3 and sys.version_info.minor == 13: #for some reason python 3.13 needs this library
401
- try:
402
- if not is_installed("audioop-lts"):
403
- run_pip("install audioop-lts", "audioop-lts")
404
- except Exception as e:
405
- print(f"Failed to install audioop-lts: {e}")
406
-
407
- assets_repo = os.environ.get('ASSETS_REPO', "https://github.com/AUTOMATIC1111/stable-diffusion-webui-assets.git")
408
- stable_diffusion_repo = os.environ.get('STABLE_DIFFUSION_REPO', "hhttps://github.com/nlile/stablediffusion.git")
409
- stable_diffusion_xl_repo = os.environ.get('STABLE_DIFFUSION_XL_REPO', "https://github.com/Stability-AI/generative-models.git")
410
- k_diffusion_repo = os.environ.get('K_DIFFUSION_REPO', 'https://github.com/crowsonkb/k-diffusion.git')
411
- blip_repo = os.environ.get('BLIP_REPO', 'https://github.com/salesforce/BLIP.git')
412
-
413
- assets_commit_hash = os.environ.get('ASSETS_COMMIT_HASH', "6f7db241d2f8ba7457bac5ca9753331f0c266917")
414
- stable_diffusion_commit_hash = os.environ.get('STABLE_DIFFUSION_COMMIT_HASH', "47b6b607fdd31875c9279cd2f4f16b92e4ea958e")
415
- stable_diffusion_xl_commit_hash = os.environ.get('STABLE_DIFFUSION_XL_COMMIT_HASH', "45c443b316737a4ab6e40413d7794a7f5657c19f")
416
- k_diffusion_commit_hash = os.environ.get('K_DIFFUSION_COMMIT_HASH', "ab527a9a6d347f364e3d185ba6d714e22d80cb3c")
417
- blip_commit_hash = os.environ.get('BLIP_COMMIT_HASH', "48211a1594f1321b00f14c9f7a5b4813144b2fb9")
418
-
419
- try:
420
- # the existence of this file is a signal to webui.sh/bat that webui needs to be restarted when it stops execution
421
- os.remove(os.path.join(script_path, "tmp", "restart"))
422
- os.environ.setdefault('SD_WEBUI_RESTARTING', '1')
423
- except OSError:
424
- pass
425
-
426
- if not args.skip_python_version_check:
427
- check_python_version()
428
-
429
- startup_timer.record("checks")
430
-
431
- commit = commit_hash()
432
- tag = git_tag()
433
- startup_timer.record("git version info")
434
-
435
- print(f"Python {sys.version}")
436
- print(f"Version: {tag}")
437
- print(f"Commit hash: {commit}")
438
-
439
- if args.reinstall_torch or not is_installed("torch") or not is_installed("torchvision"):
440
- run(f'"{python}" -m {torch_command}', "Installing torch and torchvision", "Couldn't install torch", live=True)
441
- startup_timer.record("install torch")
442
-
443
- if args.use_ipex:
444
- args.skip_torch_cuda_test = True
445
- if not args.skip_torch_cuda_test and not check_run_python("import torch; assert torch.cuda.is_available()"):
446
- raise RuntimeError(
447
- 'Torch is not able to use GPU; '
448
- 'add --skip-torch-cuda-test to COMMANDLINE_ARGS variable to disable this check'
449
- )
450
- startup_timer.record("torch GPU test")
451
-
452
- if not is_installed("clip"):
453
- run_pip(f"install {clip_package}", "clip")
454
- startup_timer.record("install clip")
455
-
456
- if not is_installed("open_clip"):
457
- run_pip(f"install {openclip_package}", "open_clip")
458
- startup_timer.record("install open_clip")
459
-
460
- if (not is_installed("xformers") or args.reinstall_xformers) and args.xformers:
461
- run_pip(f"install -U -I --no-deps {xformers_package}", "xformers")
462
- startup_timer.record("install xformers")
463
-
464
- if not is_installed("ngrok") and args.ngrok:
465
- run_pip("install ngrok", "ngrok")
466
- startup_timer.record("install ngrok")
467
-
468
- os.makedirs(os.path.join(script_path, dir_repos), exist_ok=True)
469
-
470
- git_clone(assets_repo, repo_dir('stable-diffusion-webui-assets'), "assets", assets_commit_hash)
471
- git_clone(stable_diffusion_repo, repo_dir('stable-diffusion-stability-ai'), "Stable Diffusion", stable_diffusion_commit_hash)
472
- git_clone(stable_diffusion_xl_repo, repo_dir('generative-models'), "Stable Diffusion XL", stable_diffusion_xl_commit_hash)
473
- git_clone(k_diffusion_repo, repo_dir('k-diffusion'), "K-diffusion", k_diffusion_commit_hash)
474
- git_clone(blip_repo, repo_dir('BLIP'), "BLIP", blip_commit_hash)
475
-
476
- startup_timer.record("clone repositores")
477
-
478
- if not os.path.isfile(requirements_file):
479
- requirements_file = os.path.join(script_path, requirements_file)
480
-
481
- if not requirements_met(requirements_file):
482
- run_pip(f"install -r \"{requirements_file}\"", "requirements")
483
- startup_timer.record("install requirements")
484
-
485
- if not os.path.isfile(requirements_file_for_npu):
486
- requirements_file_for_npu = os.path.join(script_path, requirements_file_for_npu)
487
-
488
- if "torch_npu" in torch_command and not requirements_met(requirements_file_for_npu):
489
- run_pip(f"install -r \"{requirements_file_for_npu}\"", "requirements_for_npu")
490
- startup_timer.record("install requirements_for_npu")
491
-
492
- if not args.skip_install:
493
- run_extensions_installers(settings_file=args.ui_settings_file)
494
-
495
- if args.update_check:
496
- version_check(commit)
497
- startup_timer.record("check version")
498
-
499
- if args.update_all_extensions:
500
- git_pull_recursive(extensions_dir)
501
- startup_timer.record("update extensions")
502
-
503
- if "--exit" in sys.argv:
504
- print("Exiting because of --exit argument")
505
- exit(0)
506
-
507
-
508
-
509
- def configure_for_tests():
510
- if "--api" not in sys.argv:
511
- sys.argv.append("--api")
512
- if "--ckpt" not in sys.argv:
513
- sys.argv.append("--ckpt")
514
- sys.argv.append(os.path.join(script_path, "test/test_files/empty.pt"))
515
- if "--skip-torch-cuda-test" not in sys.argv:
516
- sys.argv.append("--skip-torch-cuda-test")
517
- if "--disable-nan-check" not in sys.argv:
518
- sys.argv.append("--disable-nan-check")
519
-
520
- os.environ['COMMANDLINE_ARGS'] = ""
521
-
522
-
523
- def configure_forge_reference_checkout(a1111_home: Path):
524
- """Set model paths based on an existing A1111 checkout."""
525
- class ModelRef(NamedTuple):
526
- arg_name: str
527
- relative_path: str
528
-
529
- refs = [
530
- ModelRef(arg_name="--ckpt-dir", relative_path="models/Stable-diffusion"),
531
- ModelRef(arg_name="--vae-dir", relative_path="models/VAE"),
532
- ModelRef(arg_name="--hypernetwork-dir", relative_path="models/hypernetworks"),
533
- ModelRef(arg_name="--embeddings-dir", relative_path="embeddings"),
534
- ModelRef(arg_name="--lora-dir", relative_path="models/Lora"),
535
- # Ref A1111 need to have sd-webui-controlnet installed.
536
- ModelRef(arg_name="--controlnet-dir", relative_path="models/ControlNet"),
537
- ModelRef(arg_name="--controlnet-preprocessor-models-dir", relative_path="extensions/sd-webui-controlnet/annotator/downloads"),
538
- ]
539
-
540
- for ref in refs:
541
- target_path = a1111_home / ref.relative_path
542
- if not target_path.exists():
543
- print(f"Path {target_path} does not exist. Skip setting {ref.arg_name}")
544
- continue
545
-
546
- if ref.arg_name in sys.argv:
547
- # Do not override existing dir setting.
548
- continue
549
-
550
- sys.argv.append(ref.arg_name)
551
- sys.argv.append(str(target_path))
552
-
553
-
554
- def start():
555
- print(f"Launching {'API server' if '--nowebui' in sys.argv else 'Web UI'} with arguments: {shlex.join(sys.argv[1:])}")
556
- import webui
557
- if '--nowebui' in sys.argv:
558
- webui.api_only()
559
- else:
560
- webui.webui()
561
-
562
- from modules_forge import main_thread
563
-
564
- main_thread.loop()
565
- return
566
-
567
-
568
- def dump_sysinfo():
569
- from modules import sysinfo
570
- import datetime
571
-
572
- text = sysinfo.get()
573
- filename = f"sysinfo-{datetime.datetime.utcnow().strftime('%Y-%m-%d-%H-%M')}.json"
574
-
575
- with open(filename, "w", encoding="utf8") as file:
576
- file.write(text)
577
-
578
- return filename
 
1
+ # this scripts installs necessary requirements and launches main program in webui.py
2
+ import logging
3
+ import re
4
+ import subprocess
5
+ import os
6
+ import shutil
7
+ import sys
8
+ import importlib.util
9
+ import importlib.metadata
10
+ import platform
11
+ import json
12
+ import shlex
13
+ from functools import lru_cache
14
+ from typing import NamedTuple
15
+ from pathlib import Path
16
+ os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
17
+ from modules import cmd_args, errors
18
+ from modules.paths_internal import script_path, extensions_dir, extensions_builtin_dir
19
+ from modules.timer import startup_timer
20
+ from modules import logging_config
21
+ from modules_forge import forge_version
22
+ from modules_forge.config import always_disabled_extensions
23
+
24
+
25
+ args, _ = cmd_args.parser.parse_known_args()
26
+ logging_config.setup_logging(args.loglevel)
27
+
28
+ python = sys.executable
29
+ git = os.environ.get('GIT', "git")
30
+ index_url = os.environ.get('INDEX_URL', "")
31
+ dir_repos = "repositories"
32
+
33
+ # Whether to default to printing command output
34
+ default_command_live = (os.environ.get('WEBUI_LAUNCH_LIVE_OUTPUT') == "1")
35
+
36
+ os.environ.setdefault('GRADIO_ANALYTICS_ENABLED', 'False')
37
+
38
+
39
+ def check_python_version():
40
+ is_windows = platform.system() == "Windows"
41
+ major = sys.version_info.major
42
+ minor = sys.version_info.minor
43
+ micro = sys.version_info.micro
44
+
45
+ # Only show warning if Python version is < 3.7 or >= 3.14
46
+ if not (major == 3 and 7 <= minor <= 13):
47
+ errors.print_error_explanation(f"""
48
+ INCOMPATIBLE PYTHON VERSION
49
+
50
+ This program is tested with 3.10.6 Python, but you have {major}.{minor}.{micro}.
51
+ If you encounter an error with "RuntimeError: Couldn't install torch." message,
52
+ or any other error regarding unsuccessful package (library) installation,
53
+ please downgrade (or upgrade) to the latest version of 3.10 Python
54
+ and delete current Python and "venv" folder in WebUI's directory.
55
+
56
+ You can download 3.10 Python from here: https://www.python.org/downloads/release/python-3106/
57
+
58
+ {"Alternatively, use a binary release of WebUI: https://github.com/AUTOMATIC1111/stable-diffusion-webui/releases/tag/v1.0.0-pre" if is_windows else ""}
59
+
60
+ Use --skip-python-version-check to suppress this warning.
61
+ """)
62
+
63
+
64
+ @lru_cache()
65
+ def commit_hash():
66
+ try:
67
+ return subprocess.check_output([git, "-C", script_path, "rev-parse", "HEAD"], shell=False, encoding='utf8').strip()
68
+ except Exception:
69
+ return "<none>"
70
+
71
+
72
+ @lru_cache()
73
+ def git_tag_a1111():
74
+ try:
75
+ return subprocess.check_output([git, "-C", script_path, "describe", "--tags"], shell=False, encoding='utf8').strip()
76
+ except Exception:
77
+ try:
78
+
79
+ changelog_md = os.path.join(script_path, "CHANGELOG.md")
80
+ with open(changelog_md, "r", encoding="utf-8") as file:
81
+ line = next((line.strip() for line in file if line.strip()), "<none>")
82
+ line = line.replace("## ", "")
83
+ return line
84
+ except Exception:
85
+ return "<none>"
86
+
87
+
88
+ def git_tag():
89
+ return 'f' + forge_version.version + '-' + git_tag_a1111()
90
+
91
+
92
+ def run(command, desc=None, errdesc=None, custom_env=None, live: bool = default_command_live) -> str:
93
+ if desc is not None:
94
+ print(desc)
95
+
96
+ run_kwargs = {
97
+ "args": command,
98
+ "shell": True,
99
+ "env": os.environ if custom_env is None else custom_env,
100
+ "encoding": 'utf8',
101
+ "errors": 'ignore',
102
+ }
103
+
104
+ if not live:
105
+ run_kwargs["stdout"] = run_kwargs["stderr"] = subprocess.PIPE
106
+
107
+ result = subprocess.run(**run_kwargs)
108
+
109
+ if result.returncode != 0:
110
+ error_bits = [
111
+ f"{errdesc or 'Error running command'}.",
112
+ f"Command: {command}",
113
+ f"Error code: {result.returncode}",
114
+ ]
115
+ if result.stdout:
116
+ error_bits.append(f"stdout: {result.stdout}")
117
+ if result.stderr:
118
+ error_bits.append(f"stderr: {result.stderr}")
119
+ raise RuntimeError("\n".join(error_bits))
120
+
121
+ return (result.stdout or "")
122
+
123
+
124
+ def is_installed(package):
125
+ try:
126
+ dist = importlib.metadata.distribution(package)
127
+ except importlib.metadata.PackageNotFoundError:
128
+ try:
129
+ spec = importlib.util.find_spec(package)
130
+ except ModuleNotFoundError:
131
+ return False
132
+
133
+ return spec is not None
134
+
135
+ return dist is not None
136
+
137
+
138
+ def repo_dir(name):
139
+ return os.path.join(script_path, dir_repos, name)
140
+
141
+
142
+ def run_pip(command, desc=None, live=default_command_live):
143
+ if args.skip_install:
144
+ return
145
+
146
+ index_url_line = f' --index-url {index_url}' if index_url != '' else ''
147
+ return run(f'"{python}" -m pip {command} --prefer-binary{index_url_line}', desc=f"Installing {desc}", errdesc=f"Couldn't install {desc}", live=live)
148
+
149
+
150
+ def check_run_python(code: str) -> bool:
151
+ result = subprocess.run([python, "-c", code], capture_output=True, shell=False)
152
+ return result.returncode == 0
153
+
154
+
155
+ def git_fix_workspace(dir, name):
156
+ run(f'"{git}" -C "{dir}" fetch --refetch --no-auto-gc', f"Fetching all contents for {name}", f"Couldn't fetch {name}", live=True)
157
+ run(f'"{git}" -C "{dir}" gc --aggressive --prune=now', f"Pruning {name}", f"Couldn't prune {name}", live=True)
158
+ return
159
+
160
+
161
+ def run_git(dir, name, command, desc=None, errdesc=None, custom_env=None, live: bool = default_command_live, autofix=True):
162
+ try:
163
+ return run(f'"{git}" -C "{dir}" {command}', desc=desc, errdesc=errdesc, custom_env=custom_env, live=live)
164
+ except RuntimeError:
165
+ if not autofix:
166
+ raise
167
+
168
+ print(f"{errdesc}, attempting autofix...")
169
+ git_fix_workspace(dir, name)
170
+
171
+ return run(f'"{git}" -C "{dir}" {command}', desc=desc, errdesc=errdesc, custom_env=custom_env, live=live)
172
+
173
+
174
+ def git_clone(url, dir, name, commithash=None):
175
+ # TODO clone into temporary dir and move if successful
176
+
177
+ if os.path.exists(dir):
178
+ if commithash is None:
179
+ return
180
+
181
+ current_hash = run_git(dir, name, 'rev-parse HEAD', None, f"Couldn't determine {name}'s hash: {commithash}", live=False).strip()
182
+ if current_hash == commithash:
183
+ return
184
+
185
+ if run_git(dir, name, 'config --get remote.origin.url', None, f"Couldn't determine {name}'s origin URL", live=False).strip() != url:
186
+ run_git(dir, name, f'remote set-url origin "{url}"', None, f"Failed to set {name}'s origin URL", live=False)
187
+
188
+ run_git(dir, name, 'fetch', f"Fetching updates for {name}...", f"Couldn't fetch {name}", autofix=False)
189
+
190
+ run_git(dir, name, f'checkout {commithash}', f"Checking out commit for {name} with hash: {commithash}...", f"Couldn't checkout commit {commithash} for {name}", live=True)
191
+
192
+ return
193
+
194
+ try:
195
+ run(f'"{git}" clone --config core.filemode=false "{url}" "{dir}"', f"Cloning {name} into {dir}...", f"Couldn't clone {name}", live=True)
196
+ except RuntimeError:
197
+ shutil.rmtree(dir, ignore_errors=True)
198
+ raise
199
+
200
+ if commithash is not None:
201
+ run(f'"{git}" -C "{dir}" checkout {commithash}', None, "Couldn't checkout {name}'s hash: {commithash}")
202
+
203
+
204
+ def git_pull_recursive(dir):
205
+ for subdir, _, _ in os.walk(dir):
206
+ if os.path.exists(os.path.join(subdir, '.git')):
207
+ try:
208
+ output = subprocess.check_output([git, '-C', subdir, 'pull', '--autostash'])
209
+ print(f"Pulled changes for repository in '{subdir}':\n{output.decode('utf-8').strip()}\n")
210
+ except subprocess.CalledProcessError as e:
211
+ print(f"Couldn't perform 'git pull' on repository in '{subdir}':\n{e.output.decode('utf-8').strip()}\n")
212
+
213
+
214
+ def version_check(commit):
215
+ try:
216
+ import requests
217
+ commits = requests.get('https://api.github.com/repos/AUTOMATIC1111/stable-diffusion-webui/branches/master').json()
218
+ if commit != "<none>" and commits['commit']['sha'] != commit:
219
+ print("--------------------------------------------------------")
220
+ print("| You are not up to date with the most recent release. |")
221
+ print("| Consider running `git pull` to update. |")
222
+ print("--------------------------------------------------------")
223
+ elif commits['commit']['sha'] == commit:
224
+ print("You are up to date with the most recent release.")
225
+ else:
226
+ print("Not a git clone, can't perform version check.")
227
+ except Exception as e:
228
+ print("version check failed", e)
229
+
230
+
231
+ def run_extension_installer(extension_dir):
232
+ path_installer = os.path.join(extension_dir, "install.py")
233
+ if not os.path.isfile(path_installer):
234
+ return
235
+
236
+ try:
237
+ env = os.environ.copy()
238
+ env['PYTHONPATH'] = f"{script_path}{os.pathsep}{env.get('PYTHONPATH', '')}"
239
+
240
+ stdout = run(f'"{python}" "{path_installer}"', errdesc=f"Error running install.py for extension {extension_dir}", custom_env=env).strip()
241
+ if stdout:
242
+ print(stdout)
243
+ except Exception as e:
244
+ errors.report(str(e))
245
+
246
+
247
+ def list_extensions(settings_file):
248
+ settings = {}
249
+
250
+ try:
251
+ with open(settings_file, "r", encoding="utf8") as file:
252
+ settings = json.load(file)
253
+ except FileNotFoundError:
254
+ pass
255
+ except Exception:
256
+ errors.report(f'\nCould not load settings\nThe config file "{settings_file}" is likely corrupted\nIt has been moved to the "tmp/config.json"\nReverting config to default\n\n''', exc_info=True)
257
+ os.replace(settings_file, os.path.join(script_path, "tmp", "config.json"))
258
+
259
+ disabled_extensions = set(settings.get('disabled_extensions', []) + always_disabled_extensions)
260
+ disable_all_extensions = settings.get('disable_all_extensions', 'none')
261
+
262
+ if disable_all_extensions != 'none' or args.disable_extra_extensions or args.disable_all_extensions or not os.path.isdir(extensions_dir):
263
+ return []
264
+
265
+ return [x for x in os.listdir(extensions_dir) if x not in disabled_extensions]
266
+
267
+
268
+ def list_extensions_builtin(settings_file):
269
+ settings = {}
270
+
271
+ try:
272
+ with open(settings_file, "r", encoding="utf8") as file:
273
+ settings = json.load(file)
274
+ except FileNotFoundError:
275
+ pass
276
+ except Exception:
277
+ errors.report(f'\nCould not load settings\nThe config file "{settings_file}" is likely corrupted\nIt has been moved to the "tmp/config.json"\nReverting config to default\n\n''', exc_info=True)
278
+ os.replace(settings_file, os.path.join(script_path, "tmp", "config.json"))
279
+
280
+ disabled_extensions = set(settings.get('disabled_extensions', []))
281
+ disable_all_extensions = settings.get('disable_all_extensions', 'none')
282
+
283
+ if disable_all_extensions != 'none' or args.disable_extra_extensions or args.disable_all_extensions or not os.path.isdir(extensions_builtin_dir):
284
+ return []
285
+
286
+ return [x for x in os.listdir(extensions_builtin_dir) if x not in disabled_extensions]
287
+
288
+
289
+ def run_extensions_installers(settings_file):
290
+ if not os.path.isdir(extensions_dir):
291
+ return
292
+
293
+ with startup_timer.subcategory("run extensions installers"):
294
+ for dirname_extension in list_extensions(settings_file):
295
+ logging.debug(f"Installing {dirname_extension}")
296
+
297
+ path = os.path.join(extensions_dir, dirname_extension)
298
+
299
+ if os.path.isdir(path):
300
+ run_extension_installer(path)
301
+ startup_timer.record(dirname_extension)
302
+
303
+ if not os.path.isdir(extensions_builtin_dir):
304
+ return
305
+
306
+ with startup_timer.subcategory("run extensions_builtin installers"):
307
+ for dirname_extension in list_extensions_builtin(settings_file):
308
+ logging.debug(f"Installing {dirname_extension}")
309
+
310
+ path = os.path.join(extensions_builtin_dir, dirname_extension)
311
+
312
+ if os.path.isdir(path):
313
+ run_extension_installer(path)
314
+ startup_timer.record(dirname_extension)
315
+
316
+ return
317
+
318
+
319
+ re_requirement = re.compile(r"\s*([-_a-zA-Z0-9]+)\s*(?:==\s*([-+_.a-zA-Z0-9]+))?\s*")
320
+
321
+
322
+ def requirements_met(requirements_file):
323
+ """
324
+ Does a simple parse of a requirements.txt file to determine if all rerqirements in it
325
+ are already installed. Returns True if so, False if not installed or parsing fails.
326
+ """
327
+
328
+ import importlib.metadata
329
+ import packaging.version
330
+
331
+ with open(requirements_file, "r", encoding="utf8") as file:
332
+ for line in file:
333
+ if line.strip() == "":
334
+ continue
335
+
336
+ m = re.match(re_requirement, line)
337
+ if m is None:
338
+ return False
339
+
340
+ package = m.group(1).strip()
341
+ version_required = (m.group(2) or "").strip()
342
+
343
+ if version_required == "":
344
+ continue
345
+
346
+ try:
347
+ version_installed = importlib.metadata.version(package)
348
+ except Exception:
349
+ return False
350
+
351
+ if packaging.version.parse(version_required) != packaging.version.parse(version_installed):
352
+ return False
353
+
354
+ return True
355
+
356
+ def get_cuda_comp_cap():
357
+ """
358
+ Returns float of CUDA Compute Capability using nvidia-smi
359
+ Returns 0.0 on error
360
+ CUDA Compute Capability
361
+ ref https://developer.nvidia.com/cuda-gpus
362
+ ref https://en.wikipedia.org/wiki/CUDA
363
+ Blackwell consumer GPUs should return 12.0 data-center GPUs should return 10.0
364
+ """
365
+ try:
366
+ return max(map(float, subprocess.check_output(['nvidia-smi', '--query-gpu=compute_cap', '--format=noheader,csv'], text=True).splitlines()))
367
+ except Exception as _:
368
+ return 0.0
369
+
370
+ def prepare_environment():
371
+ torch_index_url = os.environ.get('TORCH_INDEX_URL', "https://download.pytorch.org/whl/cu128")
372
+ torch_command = os.environ.get('TORCH_COMMAND', f"pip install torch==2.7.1 torchvision --extra-index-url {torch_index_url}")
373
+ if args.use_ipex:
374
+ if platform.system() == "Windows":
375
+ # The "Nuullll/intel-extension-for-pytorch" wheels were built from IPEX source for Intel Arc GPU: https://github.com/intel/intel-extension-for-pytorch/tree/xpu-main
376
+ # This is NOT an Intel official release so please use it at your own risk!!
377
+ # See https://github.com/Nuullll/intel-extension-for-pytorch/releases/tag/v2.0.110%2Bxpu-master%2Bdll-bundle for details.
378
+ #
379
+ # Strengths (over official IPEX 2.0.110 windows release):
380
+ # - AOT build (for Arc GPU only) to eliminate JIT compilation overhead: https://github.com/intel/intel-extension-for-pytorch/issues/399
381
+ # - Bundles minimal oneAPI 2023.2 dependencies into the python wheels, so users don't need to install oneAPI for the whole system.
382
+ # - Provides a compatible torchvision wheel: https://github.com/intel/intel-extension-for-pytorch/issues/465
383
+ # Limitation:
384
+ # - Only works for python 3.10
385
+ url_prefix = "https://github.com/Nuullll/intel-extension-for-pytorch/releases/download/v2.0.110%2Bxpu-master%2Bdll-bundle"
386
+ torch_command = os.environ.get('TORCH_COMMAND', f"pip install {url_prefix}/torch-2.0.0a0+gite9ebda2-cp310-cp310-win_amd64.whl {url_prefix}/torchvision-0.15.2a0+fa99a53-cp310-cp310-win_amd64.whl {url_prefix}/intel_extension_for_pytorch-2.0.110+gitc6ea20b-cp310-cp310-win_amd64.whl")
387
+ else:
388
+ # Using official IPEX release for linux since it's already an AOT build.
389
+ # However, users still have to install oneAPI toolkit and activate oneAPI environment manually.
390
+ # See https://intel.github.io/intel-extension-for-pytorch/index.html#installation for details.
391
+ torch_index_url = os.environ.get('TORCH_INDEX_URL', "https://pytorch-extension.intel.com/release-whl/stable/xpu/us/")
392
+ torch_command = os.environ.get('TORCH_COMMAND', f"pip install torch==2.0.0a0 intel-extension-for-pytorch==2.0.110+gitba7f6c1 --extra-index-url {torch_index_url}")
393
+ requirements_file = os.environ.get('REQS_FILE', "requirements_versions.txt")
394
+ requirements_file_for_npu = os.environ.get('REQS_FILE_FOR_NPU', "requirements_npu.txt")
395
+
396
+ xformers_package = os.environ.get('XFORMERS_PACKAGE', '--index-url https://download.pytorch.org/whl/cu128 xformers')
397
+ clip_package = os.environ.get('CLIP_PACKAGE', "https://github.com/openai/CLIP/archive/d50d76daa670286dd6cacf3bcd80b5e4823fc8e1.zip")
398
+ openclip_package = os.environ.get('OPENCLIP_PACKAGE', "https://github.com/mlfoundations/open_clip/archive/bb6e834e9c70d9c27d0dc3ecedeebeaeb1ffad6b.zip")
399
+
400
+ if sys.version_info.major == 3 and sys.version_info.minor == 13: #for some reason python 3.13 needs this library
401
+ try:
402
+ if not is_installed("audioop-lts"):
403
+ run_pip("install audioop-lts", "audioop-lts")
404
+ except Exception as e:
405
+ print(f"Failed to install audioop-lts: {e}")
406
+
407
+ assets_repo = os.environ.get('ASSETS_REPO', "https://github.com/AUTOMATIC1111/stable-diffusion-webui-assets.git")
408
+ stable_diffusion_repo = os.environ.get('STABLE_DIFFUSION_REPO', "https://github.com/nlile/stablediffusion.git")
409
+ stable_diffusion_xl_repo = os.environ.get('STABLE_DIFFUSION_XL_REPO', "https://github.com/Stability-AI/generative-models.git")
410
+ k_diffusion_repo = os.environ.get('K_DIFFUSION_REPO', 'https://github.com/crowsonkb/k-diffusion.git')
411
+ blip_repo = os.environ.get('BLIP_REPO', 'https://github.com/salesforce/BLIP.git')
412
+
413
+ assets_commit_hash = os.environ.get('ASSETS_COMMIT_HASH', "6f7db241d2f8ba7457bac5ca9753331f0c266917")
414
+ stable_diffusion_commit_hash = os.environ.get('STABLE_DIFFUSION_COMMIT_HASH', "47b6b607fdd31875c9279cd2f4f16b92e4ea958e")
415
+ stable_diffusion_xl_commit_hash = os.environ.get('STABLE_DIFFUSION_XL_COMMIT_HASH', "45c443b316737a4ab6e40413d7794a7f5657c19f")
416
+ k_diffusion_commit_hash = os.environ.get('K_DIFFUSION_COMMIT_HASH', "ab527a9a6d347f364e3d185ba6d714e22d80cb3c")
417
+ blip_commit_hash = os.environ.get('BLIP_COMMIT_HASH', "48211a1594f1321b00f14c9f7a5b4813144b2fb9")
418
+
419
+ try:
420
+ # the existence of this file is a signal to webui.sh/bat that webui needs to be restarted when it stops execution
421
+ os.remove(os.path.join(script_path, "tmp", "restart"))
422
+ os.environ.setdefault('SD_WEBUI_RESTARTING', '1')
423
+ except OSError:
424
+ pass
425
+
426
+ if not args.skip_python_version_check:
427
+ check_python_version()
428
+
429
+ startup_timer.record("checks")
430
+
431
+ commit = commit_hash()
432
+ tag = git_tag()
433
+ startup_timer.record("git version info")
434
+
435
+ print(f"Python {sys.version}")
436
+ print(f"Version: {tag}")
437
+ print(f"Commit hash: {commit}")
438
+
439
+ if args.reinstall_torch or not is_installed("torch") or not is_installed("torchvision"):
440
+ run(f'"{python}" -m {torch_command}', "Installing torch and torchvision", "Couldn't install torch", live=True)
441
+ startup_timer.record("install torch")
442
+
443
+ if args.use_ipex:
444
+ args.skip_torch_cuda_test = True
445
+ if not args.skip_torch_cuda_test and not check_run_python("import torch; assert torch.cuda.is_available()"):
446
+ raise RuntimeError(
447
+ 'Torch is not able to use GPU; '
448
+ 'add --skip-torch-cuda-test to COMMANDLINE_ARGS variable to disable this check'
449
+ )
450
+ startup_timer.record("torch GPU test")
451
+
452
+ if not is_installed("clip"):
453
+ run_pip(f"install {clip_package}", "clip")
454
+ startup_timer.record("install clip")
455
+
456
+ if not is_installed("open_clip"):
457
+ run_pip(f"install {openclip_package}", "open_clip")
458
+ startup_timer.record("install open_clip")
459
+
460
+ if (not is_installed("xformers") or args.reinstall_xformers) and args.xformers:
461
+ run_pip(f"install -U -I --no-deps {xformers_package}", "xformers")
462
+ startup_timer.record("install xformers")
463
+
464
+ if not is_installed("ngrok") and args.ngrok:
465
+ run_pip("install ngrok", "ngrok")
466
+ startup_timer.record("install ngrok")
467
+
468
+ os.makedirs(os.path.join(script_path, dir_repos), exist_ok=True)
469
+
470
+ git_clone(assets_repo, repo_dir('stable-diffusion-webui-assets'), "assets", assets_commit_hash)
471
+ git_clone(stable_diffusion_repo, repo_dir('stable-diffusion-stability-ai'), "Stable Diffusion", stable_diffusion_commit_hash)
472
+ git_clone(stable_diffusion_xl_repo, repo_dir('generative-models'), "Stable Diffusion XL", stable_diffusion_xl_commit_hash)
473
+ git_clone(k_diffusion_repo, repo_dir('k-diffusion'), "K-diffusion", k_diffusion_commit_hash)
474
+ git_clone(blip_repo, repo_dir('BLIP'), "BLIP", blip_commit_hash)
475
+
476
+ startup_timer.record("clone repositores")
477
+
478
+ if not os.path.isfile(requirements_file):
479
+ requirements_file = os.path.join(script_path, requirements_file)
480
+
481
+ if not requirements_met(requirements_file):
482
+ run_pip(f"install -r \"{requirements_file}\"", "requirements")
483
+ startup_timer.record("install requirements")
484
+
485
+ if not os.path.isfile(requirements_file_for_npu):
486
+ requirements_file_for_npu = os.path.join(script_path, requirements_file_for_npu)
487
+
488
+ if "torch_npu" in torch_command and not requirements_met(requirements_file_for_npu):
489
+ run_pip(f"install -r \"{requirements_file_for_npu}\"", "requirements_for_npu")
490
+ startup_timer.record("install requirements_for_npu")
491
+
492
+ if not args.skip_install:
493
+ run_extensions_installers(settings_file=args.ui_settings_file)
494
+
495
+ if args.update_check:
496
+ version_check(commit)
497
+ startup_timer.record("check version")
498
+
499
+ if args.update_all_extensions:
500
+ git_pull_recursive(extensions_dir)
501
+ startup_timer.record("update extensions")
502
+
503
+ if "--exit" in sys.argv:
504
+ print("Exiting because of --exit argument")
505
+ exit(0)
506
+
507
+
508
+
509
+ def configure_for_tests():
510
+ if "--api" not in sys.argv:
511
+ sys.argv.append("--api")
512
+ if "--ckpt" not in sys.argv:
513
+ sys.argv.append("--ckpt")
514
+ sys.argv.append(os.path.join(script_path, "test/test_files/empty.pt"))
515
+ if "--skip-torch-cuda-test" not in sys.argv:
516
+ sys.argv.append("--skip-torch-cuda-test")
517
+ if "--disable-nan-check" not in sys.argv:
518
+ sys.argv.append("--disable-nan-check")
519
+
520
+ os.environ['COMMANDLINE_ARGS'] = ""
521
+
522
+
523
+ def configure_forge_reference_checkout(a1111_home: Path):
524
+ """Set model paths based on an existing A1111 checkout."""
525
+ class ModelRef(NamedTuple):
526
+ arg_name: str
527
+ relative_path: str
528
+
529
+ refs = [
530
+ ModelRef(arg_name="--ckpt-dir", relative_path="models/Stable-diffusion"),
531
+ ModelRef(arg_name="--vae-dir", relative_path="models/VAE"),
532
+ ModelRef(arg_name="--hypernetwork-dir", relative_path="models/hypernetworks"),
533
+ ModelRef(arg_name="--embeddings-dir", relative_path="embeddings"),
534
+ ModelRef(arg_name="--lora-dir", relative_path="models/Lora"),
535
+ # Ref A1111 need to have sd-webui-controlnet installed.
536
+ ModelRef(arg_name="--controlnet-dir", relative_path="models/ControlNet"),
537
+ ModelRef(arg_name="--controlnet-preprocessor-models-dir", relative_path="extensions/sd-webui-controlnet/annotator/downloads"),
538
+ ]
539
+
540
+ for ref in refs:
541
+ target_path = a1111_home / ref.relative_path
542
+ if not target_path.exists():
543
+ print(f"Path {target_path} does not exist. Skip setting {ref.arg_name}")
544
+ continue
545
+
546
+ if ref.arg_name in sys.argv:
547
+ # Do not override existing dir setting.
548
+ continue
549
+
550
+ sys.argv.append(ref.arg_name)
551
+ sys.argv.append(str(target_path))
552
+
553
+
554
+ def start():
555
+ print(f"Launching {'API server' if '--nowebui' in sys.argv else 'Web UI'} with arguments: {shlex.join(sys.argv[1:])}")
556
+ import webui
557
+ if '--nowebui' in sys.argv:
558
+ webui.api_only()
559
+ else:
560
+ webui.webui()
561
+
562
+ from modules_forge import main_thread
563
+
564
+ main_thread.loop()
565
+ return
566
+
567
+
568
+ def dump_sysinfo():
569
+ from modules import sysinfo
570
+ import datetime
571
+
572
+ text = sysinfo.get()
573
+ filename = f"sysinfo-{datetime.datetime.utcnow().strftime('%Y-%m-%d-%H-%M')}.json"
574
+
575
+ with open(filename, "w", encoding="utf8") as file:
576
+ file.write(text)
577
+
578
+ return filename