import configparser import mimetypes import shutil import traceback import folder_paths import os import sys import threading import locale import subprocess # don't remove this from tqdm.auto import tqdm import concurrent from urllib.parse import urlparse import http.client import re import nodes import hashlib from datetime import datetime from distutils.version import StrictVersion try: import cm_global except: glob_path = os.path.join(os.path.dirname(__file__), "glob") sys.path.append(glob_path) import cm_global print(f"[WARN] ComfyUI-Manager: Your ComfyUI version is outdated. Please update to the latest version.") version = [2, 13, 1] version_str = f"V{version[0]}.{version[1]}" + (f'.{version[2]}' if len(version) > 2 else '') print(f"### Loading: ComfyUI-Manager ({version_str})") comfy_ui_hash = "-" cache_lock = threading.Lock() pip_map = None def get_installed_packages(): global pip_map if pip_map is None: try: result = subprocess.check_output([sys.executable, '-m', 'pip', 'list'], universal_newlines=True) pip_map = {} for line in result.split('\n'): x = line.strip() if x: y = line.split() if y[0] == 'Package' or y[0].startswith('-'): continue pip_map[y[0]] = y[1] except subprocess.CalledProcessError as e: print(f"[ComfyUI-Manager] Failed to retrieve the information of installed pip packages.") return set() return pip_map def clear_pip_cache(): global pip_map pip_map = None def is_blacklisted(name): name = name.strip() pattern = r'([^<>!=]+)([<>!=]=?)(.*)' match = re.search(pattern, name) if match: name = match.group(1) if name in cm_global.pip_downgrade_blacklist: pips = get_installed_packages() if match is None: if name in pips: return True elif match.group(2) in ['<=', '==', '<']: if name in pips: if StrictVersion(pips[name]) >= StrictVersion(match.group(3)): return True return False def handle_stream(stream, prefix): stream.reconfigure(encoding=locale.getpreferredencoding(), errors='replace') for msg in stream: if prefix == '[!]' and ('it/s]' in msg or 's/it]' in msg) and ('%|' in msg or 'it [' in msg): if msg.startswith('100%'): print('\r' + msg, end="", file=sys.stderr), else: print('\r' + msg[:-1], end="", file=sys.stderr), else: if prefix == '[!]': print(prefix, msg, end="", file=sys.stderr) else: print(prefix, msg, end="") def run_script(cmd, cwd='.'): if len(cmd) > 0 and cmd[0].startswith("#"): print(f"[ComfyUI-Manager] Unexpected behavior: `{cmd}`") return 0 process = subprocess.Popen(cmd, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, bufsize=1) stdout_thread = threading.Thread(target=handle_stream, args=(process.stdout, "")) stderr_thread = threading.Thread(target=handle_stream, args=(process.stderr, "[!]")) stdout_thread.start() stderr_thread.start() stdout_thread.join() stderr_thread.join() return process.wait() try: import git except: my_path = os.path.dirname(__file__) requirements_path = os.path.join(my_path, "requirements.txt") print(f"## ComfyUI-Manager: installing dependencies") run_script([sys.executable, '-s', '-m', 'pip', 'install', '-r', requirements_path]) try: import git except: print(f"## [ERROR] ComfyUI-Manager: Attempting to reinstall dependencies using an alternative method.") run_script([sys.executable, '-s', '-m', 'pip', 'install', '--user', '-r', requirements_path]) try: import git except: print(f"## [ERROR] ComfyUI-Manager: Failed to install the GitPython package in the correct Python environment. Please install it manually in the appropriate environment. (You can seek help at https://app.element.io/#/room/%23comfyui_space%3Amatrix.org)") print(f"## ComfyUI-Manager: installing dependencies done.") from git.remote import RemoteProgress sys.path.append('../..') from torchvision.datasets.utils import download_url comfy_ui_required_revision = 1930 comfy_ui_required_commit_datetime = datetime(2024, 1, 24, 0, 0, 0) comfy_ui_revision = "Unknown" comfy_ui_commit_datetime = datetime(1900, 1, 1, 0, 0, 0) comfy_path = os.path.dirname(folder_paths.__file__) custom_nodes_path = os.path.join(comfy_path, 'custom_nodes') js_path = os.path.join(comfy_path, "web", "extensions") comfyui_manager_path = os.path.dirname(__file__) cache_dir = os.path.join(comfyui_manager_path, '.cache') local_db_model = os.path.join(comfyui_manager_path, "model-list.json") local_db_alter = os.path.join(comfyui_manager_path, "alter-list.json") local_db_custom_node_list = os.path.join(comfyui_manager_path, "custom-node-list.json") local_db_extension_node_mappings = os.path.join(comfyui_manager_path, "extension-node-map.json") git_script_path = os.path.join(os.path.dirname(__file__), "git_helper.py") components_path = os.path.join(comfyui_manager_path, 'components') startup_script_path = os.path.join(comfyui_manager_path, "startup-scripts") config_path = os.path.join(os.path.dirname(__file__), "config.ini") cached_config = None channel_list_path = os.path.join(comfyui_manager_path, 'channels.list') channel_dict = None channel_list = None from comfy.cli_args import args import latent_preview def get_channel_dict(): global channel_dict if channel_dict is None: channel_dict = {} if not os.path.exists(channel_list_path): shutil.copy(channel_list_path+'.template', channel_list_path) with open(os.path.join(comfyui_manager_path, 'channels.list'), 'r') as file: channels = file.read() for x in channels.split('\n'): channel_info = x.split("::") if len(channel_info) == 2: channel_dict[channel_info[0]] = channel_info[1] return channel_dict def get_channel_list(): global channel_list if channel_list is None: channel_list = [] for k, v in get_channel_dict().items(): channel_list.append(f"{k}::{v}") return channel_list def write_config(): config = configparser.ConfigParser() config['default'] = { 'preview_method': get_current_preview_method(), 'badge_mode': get_config()['badge_mode'], 'git_exe': get_config()['git_exe'], 'channel_url': get_config()['channel_url'], 'share_option': get_config()['share_option'], 'bypass_ssl': get_config()['bypass_ssl'], "file_logging": get_config()['file_logging'], 'default_ui': get_config()['default_ui'], 'component_policy': get_config()['component_policy'], 'double_click_policy': get_config()['double_click_policy'], 'windows_selector_event_loop_policy': get_config()['windows_selector_event_loop_policy'], 'model_download_by_agent': get_config()['model_download_by_agent'], 'downgrade_blacklist': get_config()['downgrade_blacklist'] } with open(config_path, 'w') as configfile: config.write(configfile) def read_config(): try: config = configparser.ConfigParser() config.read(config_path) default_conf = config['default'] return { 'preview_method': default_conf['preview_method'] if 'preview_method' in default_conf else get_current_preview_method(), 'badge_mode': default_conf['badge_mode'] if 'badge_mode' in default_conf else 'none', 'git_exe': default_conf['git_exe'] if 'git_exe' in default_conf else '', 'channel_url': default_conf['channel_url'] if 'channel_url' in default_conf else 'https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main', 'share_option': default_conf['share_option'] if 'share_option' in default_conf else 'all', 'bypass_ssl': default_conf['bypass_ssl'] if 'bypass_ssl' in default_conf else False, 'file_logging': default_conf['file_logging'] if 'file_logging' in default_conf else True, 'default_ui': default_conf['default_ui'] if 'default_ui' in default_conf else 'none', 'component_policy': default_conf['component_policy'] if 'component_policy' in default_conf else 'workflow', 'double_click_policy': default_conf['double_click_policy'] if 'double_click_policy' in default_conf else 'copy-all', 'windows_selector_event_loop_policy': default_conf['windows_selector_event_loop_policy'] if 'windows_selector_event_loop_policy' in default_conf else False, 'model_download_by_agent': default_conf['model_download_by_agent'] if 'model_download_by_agent' in default_conf else False, 'downgrade_blacklist': default_conf['downgrade_blacklist'] if 'downgrade_blacklist' in default_conf else '', } except Exception: return { 'preview_method': get_current_preview_method(), 'badge_mode': 'none', 'git_exe': '', 'channel_url': 'https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main', 'share_option': 'all', 'bypass_ssl': False, 'file_logging': True, 'default_ui': 'none', 'component_policy': 'workflow', 'double_click_policy': 'copy-all', 'windows_selector_event_loop_policy': False, 'model_download_by_agent': False, 'downgrade_blacklist': '' } def get_config(): global cached_config if cached_config is None: cached_config = read_config() return cached_config def get_current_preview_method(): if args.preview_method == latent_preview.LatentPreviewMethod.Auto: return "auto" elif args.preview_method == latent_preview.LatentPreviewMethod.Latent2RGB: return "latent2rgb" elif args.preview_method == latent_preview.LatentPreviewMethod.TAESD: return "taesd" else: return "none" def set_preview_method(method): if method == 'auto': args.preview_method = latent_preview.LatentPreviewMethod.Auto elif method == 'latent2rgb': args.preview_method = latent_preview.LatentPreviewMethod.Latent2RGB elif method == 'taesd': args.preview_method = latent_preview.LatentPreviewMethod.TAESD else: args.preview_method = latent_preview.LatentPreviewMethod.NoPreviews get_config()['preview_method'] = args.preview_method set_preview_method(get_config()['preview_method']) def set_badge_mode(mode): get_config()['badge_mode'] = mode def set_default_ui_mode(mode): get_config()['default_ui'] = mode def set_component_policy(mode): get_config()['component_policy'] = mode def set_double_click_policy(mode): get_config()['double_click_policy'] = mode def try_install_script(url, repo_path, install_cmd): if (len(install_cmd) > 0 and install_cmd[0].startswith('#')) or (platform.system() == "Windows" and comfy_ui_commit_datetime.date() >= comfy_ui_required_commit_datetime.date()): if not os.path.exists(startup_script_path): os.makedirs(startup_script_path) script_path = os.path.join(startup_script_path, "install-scripts.txt") with open(script_path, "a") as file: obj = [repo_path] + install_cmd file.write(f"{obj}\n") return True else: if len(install_cmd) == 5 and install_cmd[2:4] == ['pip', 'install']: if is_blacklisted(install_cmd[4]): print(f"[ComfyUI-Manager] skip black listed pip installation: '{install_cmd[4]}'") return True print(f"\n## ComfyUI-Manager: EXECUTE => {install_cmd}") code = run_script(install_cmd, cwd=repo_path) if platform.system() == "Windows": try: if comfy_ui_commit_datetime.date() < comfy_ui_required_commit_datetime.date(): print("\n\n###################################################################") print(f"[WARN] ComfyUI-Manager: Your ComfyUI version ({comfy_ui_revision})[{comfy_ui_commit_datetime.date()}] is too old. Please update to the latest version.") print(f"[WARN] The extension installation feature may not work properly in the current installed ComfyUI version on Windows environment.") print("###################################################################\n\n") except: pass if code != 0: if url is None: url = os.path.dirname(repo_path) print(f"install script failed: {url}") return False def print_comfyui_version(): global comfy_ui_revision global comfy_ui_commit_datetime global comfy_ui_hash is_detached = False try: repo = git.Repo(os.path.dirname(folder_paths.__file__)) comfy_ui_revision = len(list(repo.iter_commits('HEAD'))) comfy_ui_hash = repo.head.commit.hexsha cm_global.variables['comfyui.revision'] = comfy_ui_revision comfy_ui_commit_datetime = repo.head.commit.committed_datetime cm_global.variables['comfyui.commit_datetime'] = comfy_ui_commit_datetime is_detached = repo.head.is_detached current_branch = repo.active_branch.name try: if comfy_ui_commit_datetime.date() < comfy_ui_required_commit_datetime.date(): print(f"\n\n## [WARN] ComfyUI-Manager: Your ComfyUI version ({comfy_ui_revision})[{comfy_ui_commit_datetime.date()}] is too old. Please update to the latest version. ##\n\n") except: pass # process on_revision_detected --> if 'cm.on_revision_detected_handler' in cm_global.variables: for k, f in cm_global.variables['cm.on_revision_detected_handler']: try: f(comfy_ui_revision) except Exception: print(f"[ERROR] '{k}' on_revision_detected_handler") traceback.print_exc() del cm_global.variables['cm.on_revision_detected_handler'] else: print(f"[ComfyUI-Manager] Some features are restricted due to your ComfyUI being outdated.") # <-- if current_branch == "master": print(f"### ComfyUI Revision: {comfy_ui_revision} [{comfy_ui_hash[:8]}] | Released on '{comfy_ui_commit_datetime.date()}'") else: print(f"### ComfyUI Revision: {comfy_ui_revision} on '{current_branch}' [{comfy_ui_hash[:8]}] | Released on '{comfy_ui_commit_datetime.date()}'") except: if is_detached: print(f"### ComfyUI Revision: {comfy_ui_revision} [{comfy_ui_hash[:8]}] *DETACHED | Released on '{comfy_ui_commit_datetime.date()}'") else: print("### ComfyUI Revision: UNKNOWN (The currently installed ComfyUI is not a Git repository)") print_comfyui_version() # use subprocess to avoid file system lock by git (Windows) def __win_check_git_update(path, do_fetch=False, do_update=False): if do_fetch: command = [sys.executable, git_script_path, "--fetch", path] elif do_update: command = [sys.executable, git_script_path, "--pull", path] else: command = [sys.executable, git_script_path, "--check", path] process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) output, _ = process.communicate() output = output.decode('utf-8').strip() if 'detected dubious' in output: # fix and try again safedir_path = path.replace('\\', '/') try: print(f"[ComfyUI-Manager] Try fixing 'dubious repository' error on '{safedir_path}' repo") process = subprocess.Popen(['git', 'config', '--global', '--add', 'safe.directory', safedir_path], stdout=subprocess.PIPE, stderr=subprocess.PIPE) output, _ = process.communicate() process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE) output, _ = process.communicate() output = output.decode('utf-8').strip() except Exception: print(f'[ComfyUI-Manager] failed to fixing') if 'detected dubious' in output: print(f'\n[ComfyUI-Manager] Failed to fixing repository setup. Please execute this command on cmd: \n' f'-----------------------------------------------------------------------------------------\n' f'git config --global --add safe.directory "{safedir_path}"\n' f'-----------------------------------------------------------------------------------------\n') if do_update: if "CUSTOM NODE PULL: Success" in output: process.wait() print(f"\rUpdated: {path}") return True, True # updated elif "CUSTOM NODE PULL: None" in output: process.wait() return False, True # there is no update else: print(f"\rUpdate error: {path}") process.wait() return False, False # update failed else: if "CUSTOM NODE CHECK: True" in output: process.wait() return True, True elif "CUSTOM NODE CHECK: False" in output: process.wait() return False, True else: print(f"\rFetch error: {path}") print(f"\n{output}\n") process.wait() return False, True def __win_check_git_pull(path): command = [sys.executable, git_script_path, "--pull", path] process = subprocess.Popen(command) process.wait() def switch_to_default_branch(repo): show_result = repo.git.remote("show", "origin") matches = re.search(r"\s*HEAD branch:\s*(.*)", show_result) if matches: default_branch = matches.group(1) repo.git.checkout(default_branch) def git_repo_has_updates(path, do_fetch=False, do_update=False): if do_fetch: print(f"\x1b[2K\rFetching: {path}", end='') elif do_update: print(f"\x1b[2K\rUpdating: {path}", end='') # Check if the path is a git repository if not os.path.exists(os.path.join(path, '.git')): raise ValueError('Not a git repository') if platform.system() == "Windows": updated, success = __win_check_git_update(path, do_fetch, do_update) if updated and success: execute_install_script(None, path, lazy_mode=True) return updated, success else: # Fetch the latest commits from the remote repository repo = git.Repo(path) current_branch = repo.active_branch branch_name = current_branch.name remote_name = 'origin' remote = repo.remote(name=remote_name) # Get the current commit hash commit_hash = repo.head.commit.hexsha if do_fetch or do_update: remote.fetch() if do_update: if repo.head.is_detached: switch_to_default_branch(repo) remote_commit_hash = repo.refs[f'{remote_name}/{branch_name}'].object.hexsha if commit_hash == remote_commit_hash: repo.close() return False, True try: remote.pull() repo.git.submodule('update', '--init', '--recursive') new_commit_hash = repo.head.commit.hexsha if commit_hash != new_commit_hash: execute_install_script(None, path) print(f"\x1b[2K\rUpdated: {path}") return True, True else: return False, False except Exception as e: print(f"\nUpdating failed: {path}\n{e}", file=sys.stderr) return False, False if repo.head.is_detached: repo.close() return True, True # Get commit hash of the remote branch current_branch = repo.active_branch branch_name = current_branch.name remote_commit_hash = repo.refs[f'{remote_name}/{branch_name}'].object.hexsha # Compare the commit hashes to determine if the local repository is behind the remote repository if commit_hash != remote_commit_hash: # Get the commit dates commit_date = repo.head.commit.committed_datetime remote_commit_date = repo.refs[f'{remote_name}/{branch_name}'].object.committed_datetime # Compare the commit dates to determine if the local repository is behind the remote repository if commit_date < remote_commit_date: repo.close() return True, True repo.close() return False, True def git_pull(path): # Check if the path is a git repository if not os.path.exists(os.path.join(path, '.git')): raise ValueError('Not a git repository') # Pull the latest changes from the remote repository if platform.system() == "Windows": return __win_check_git_pull(path) else: repo = git.Repo(path) if repo.is_dirty(): repo.git.stash() if repo.head.is_detached: switch_to_default_branch(repo) current_branch = repo.active_branch remote_name = current_branch.tracking_branch().remote_name remote = repo.remote(name=remote_name) remote.pull() repo.git.submodule('update', '--init', '--recursive') repo.close() return True async def get_data(uri, silent=False): if not silent: print(f"FETCH DATA from: {uri}") if uri.startswith("http"): async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session: async with session.get(uri) as resp: json_text = await resp.text() else: with cache_lock: with open(uri, "r", encoding="utf-8") as f: json_text = f.read() json_obj = json.loads(json_text) return json_obj async def populate_github_stats(json_obj, filename, silent=False): uri = os.path.join(comfyui_manager_path, filename) with open(uri, "r", encoding='utf-8') as f: github_stats = json.load(f) if 'custom_nodes' in json_obj: for i, node in enumerate(json_obj['custom_nodes']): url = node['reference'] if url in github_stats: json_obj['custom_nodes'][i]['stars'] = github_stats[url]['stars'] json_obj['custom_nodes'][i]['last_update'] = github_stats[url]['last_update'] else: json_obj['custom_nodes'][i]['stars'] = -1 json_obj['custom_nodes'][i]['last_update'] = -1 return json_obj def setup_js(): import nodes js_dest_path = os.path.join(js_path, "comfyui-manager") if hasattr(nodes, "EXTENSION_WEB_DIRS"): if os.path.exists(js_dest_path): shutil.rmtree(js_dest_path) else: print(f"[WARN] ComfyUI-Manager: Your ComfyUI version is outdated. Please update to the latest version.") # setup js if not os.path.exists(js_dest_path): os.makedirs(js_dest_path) js_src_path = os.path.join(comfyui_manager_path, "js", "comfyui-manager.js") print(f"### ComfyUI-Manager: Copy .js from '{js_src_path}' to '{js_dest_path}'") shutil.copy(js_src_path, js_dest_path) setup_js() def setup_environment(): git_exe = get_config()['git_exe'] if git_exe != '': git.Git().update_environment(GIT_PYTHON_GIT_EXECUTABLE=git_exe) setup_environment() # Expand Server api import server from aiohttp import web import aiohttp import json import zipfile import urllib.request def simple_hash(input_string): hash_value = 0 for char in input_string: hash_value = (hash_value * 31 + ord(char)) % (2**32) return hash_value def is_file_created_within_one_day(file_path): if not os.path.exists(file_path): return False file_creation_time = os.path.getctime(file_path) current_time = datetime.now().timestamp() time_difference = current_time - file_creation_time return time_difference <= 86400 async def get_data_by_mode(mode, filename): try: if mode == "local": uri = os.path.join(comfyui_manager_path, filename) json_obj = await get_data(uri) else: uri = get_config()['channel_url'] + '/' + filename cache_uri = str(simple_hash(uri))+'_'+filename cache_uri = os.path.join(cache_dir, cache_uri) if mode == "cache": if is_file_created_within_one_day(cache_uri): json_obj = await get_data(cache_uri) else: json_obj = await get_data(uri) with cache_lock: with open(cache_uri, "w", encoding='utf-8') as file: json.dump(json_obj, file, indent=4, sort_keys=True) else: uri = get_config()['channel_url'] + '/' + filename json_obj = await get_data(uri) with cache_lock: with open(cache_uri, "w", encoding='utf-8') as file: json.dump(json_obj, file, indent=4, sort_keys=True) except Exception as e: print(f"[ComfyUI-Manager] Due to a network error, switching to local mode.\n=> {filename}\n=> {e}") uri = os.path.join(comfyui_manager_path, filename) json_obj = await get_data(uri) return json_obj def get_model_dir(data): if data['save_path'] != 'default': if '..' in data['save_path'] or data['save_path'].startswith('/'): print(f"[WARN] '{data['save_path']}' is not allowed path. So it will be saved into 'models/etc'.") base_model = "etc" else: if data['save_path'].startswith("custom_nodes"): base_model = os.path.join(comfy_path, data['save_path']) else: base_model = os.path.join(folder_paths.models_dir, data['save_path']) else: model_type = data['type'] if model_type == "checkpoints": base_model = folder_paths.folder_names_and_paths["checkpoints"][0][0] elif model_type == "unclip": base_model = folder_paths.folder_names_and_paths["checkpoints"][0][0] elif model_type == "VAE": base_model = folder_paths.folder_names_and_paths["vae"][0][0] elif model_type == "lora": base_model = folder_paths.folder_names_and_paths["loras"][0][0] elif model_type == "T2I-Adapter": base_model = folder_paths.folder_names_and_paths["controlnet"][0][0] elif model_type == "T2I-Style": base_model = folder_paths.folder_names_and_paths["controlnet"][0][0] elif model_type == "controlnet": base_model = folder_paths.folder_names_and_paths["controlnet"][0][0] elif model_type == "clip_vision": base_model = folder_paths.folder_names_and_paths["clip_vision"][0][0] elif model_type == "gligen": base_model = folder_paths.folder_names_and_paths["gligen"][0][0] elif model_type == "upscale": base_model = folder_paths.folder_names_and_paths["upscale_models"][0][0] elif model_type == "embeddings": base_model = folder_paths.folder_names_and_paths["embeddings"][0][0] else: base_model = "etc" return base_model def get_model_path(data): base_model = get_model_dir(data) return os.path.join(base_model, data['filename']) def check_a_custom_node_installed(item, do_fetch=False, do_update_check=True, do_update=False): item['installed'] = 'None' if item['install_type'] == 'git-clone' and len(item['files']) == 1: url = item['files'][0] if url.endswith("/"): url = url[:-1] dir_name = os.path.splitext(os.path.basename(url))[0].replace(".git", "") dir_path = os.path.join(custom_nodes_path, dir_name) if os.path.exists(dir_path): try: item['installed'] = 'True' # default if cm_global.try_call(api="cm.is_import_failed_extension", name=dir_name): item['installed'] = 'Fail' if do_update_check: update_state, success = git_repo_has_updates(dir_path, do_fetch, do_update) if (do_update_check or do_update) and update_state: item['installed'] = 'Update' elif do_update and not success: item['installed'] = 'Fail' except: if cm_global.try_call(api="cm.is_import_failed_extension", name=dir_name): item['installed'] = 'Fail' else: item['installed'] = 'True' elif os.path.exists(dir_path + ".disabled"): item['installed'] = 'Disabled' else: item['installed'] = 'False' elif item['install_type'] == 'copy' and len(item['files']) == 1: dir_name = os.path.basename(item['files'][0]) if item['files'][0].endswith('.py'): base_path = custom_nodes_path elif 'js_path' in item: base_path = os.path.join(js_path, item['js_path']) else: base_path = js_path file_path = os.path.join(base_path, dir_name) if os.path.exists(file_path): if cm_global.try_call(api="cm.is_import_failed_extension", name=dir_name): item['installed'] = 'Fail' else: item['installed'] = 'True' elif os.path.exists(file_path + ".disabled"): item['installed'] = 'Disabled' else: item['installed'] = 'False' def check_custom_nodes_installed(json_obj, do_fetch=False, do_update_check=True, do_update=False): if do_fetch: print("Start fetching...", end="") elif do_update: print("Start updating...", end="") elif do_update_check: print("Start update check...", end="") def process_custom_node(item): check_a_custom_node_installed(item, do_fetch, do_update_check, do_update) with concurrent.futures.ThreadPoolExecutor(4) as executor: for item in json_obj['custom_nodes']: executor.submit(process_custom_node, item) if do_fetch: print(f"\x1b[2K\rFetching done.") elif do_update: update_exists = any(item['installed'] == 'Update' for item in json_obj['custom_nodes']) if update_exists: print(f"\x1b[2K\rUpdate done.") else: print(f"\x1b[2K\rAll extensions are already up-to-date.") elif do_update_check: print(f"\x1b[2K\rUpdate check done.") def nickname_filter(json_obj): preemptions_map = {} for k, x in json_obj.items(): if 'preemptions' in x[1]: for y in x[1]['preemptions']: preemptions_map[y] = k elif k.endswith("/ComfyUI"): for y in x[0]: preemptions_map[y] = k updates = {} for k, x in json_obj.items(): removes = set() for y in x[0]: k2 = preemptions_map.get(y) if k2 is not None and k != k2: removes.add(y) if len(removes) > 0: updates[k] = [y for y in x[0] if y not in removes] for k, v in updates.items(): json_obj[k][0] = v return json_obj @server.PromptServer.instance.routes.get("/customnode/getmappings") async def fetch_customnode_mappings(request): mode = request.rel_url.query["mode"] nickname_mode = False if mode == "nickname": mode = "local" nickname_mode = True json_obj = await get_data_by_mode(mode, 'extension-node-map.json') if nickname_mode: json_obj = nickname_filter(json_obj) all_nodes = set() patterns = [] for k, x in json_obj.items(): all_nodes.update(set(x[0])) if 'nodename_pattern' in x[1]: patterns.append((x[1]['nodename_pattern'], x[0])) missing_nodes = set(nodes.NODE_CLASS_MAPPINGS.keys()) - all_nodes for x in missing_nodes: for pat, item in patterns: if re.match(pat, x): item.append(x) return web.json_response(json_obj, content_type='application/json') @server.PromptServer.instance.routes.get("/customnode/fetch_updates") async def fetch_updates(request): try: json_obj = await get_data_by_mode(request.rel_url.query["mode"], 'custom-node-list.json') check_custom_nodes_installed(json_obj, True) update_exists = any('custom_nodes' in json_obj and 'installed' in node and node['installed'] == 'Update' for node in json_obj['custom_nodes']) if update_exists: return web.Response(status=201) return web.Response(status=200) except: return web.Response(status=400) @server.PromptServer.instance.routes.get("/customnode/update_all") async def update_all(request): try: save_snapshot_with_postfix('autosave') json_obj = await get_data_by_mode(request.rel_url.query["mode"], 'custom-node-list.json') check_custom_nodes_installed(json_obj, do_update=True) updated = [item['title'] for item in json_obj['custom_nodes'] if item['installed'] == 'Update'] failed = [item['title'] for item in json_obj['custom_nodes'] if item['installed'] == 'Fail'] res = {'updated': updated, 'failed': failed} if len(updated) == 0 and len(failed) == 0: status = 200 else: status = 201 return web.json_response(res, status=status, content_type='application/json') except: return web.Response(status=400) finally: clear_pip_cache() def convert_markdown_to_html(input_text): pattern_a = re.compile(r'\[a/([^]]+)\]\(([^)]+)\)') pattern_w = re.compile(r'\[w/([^]]+)\]') pattern_i = re.compile(r'\[i/([^]]+)\]') pattern_bold = re.compile(r'\*\*([^*]+)\*\*') pattern_white = re.compile(r'%%([^*]+)%%') def replace_a(match): return f"{match.group(1)}" def replace_w(match): return f"
{match.group(1)}
" def replace_i(match): return f"{match.group(1)}
" def replace_bold(match): return f"{match.group(1)}" def replace_white(match): return f"{match.group(1)}" input_text = input_text.replace('\\[', '[').replace('\\]', ']').replace('<', '<').replace('>', '>') result_text = re.sub(pattern_a, replace_a, input_text) result_text = re.sub(pattern_w, replace_w, result_text) result_text = re.sub(pattern_i, replace_i, result_text) result_text = re.sub(pattern_bold, replace_bold, result_text) result_text = re.sub(pattern_white, replace_white, result_text) return result_text.replace("\n", "Your ComfyUI is too OUTDATED!!!
' + markdown_content except: pass return web.Response(text=markdown_content, status=200) else: return web.Response(text="Unable to retrieve Notice", status=200) else: return web.Response(text="Unable to retrieve Notice", status=200) @server.PromptServer.instance.routes.get("/manager/reboot") def restart(self): try: sys.stdout.close_log() except Exception as e: pass return os.execv(sys.executable, [sys.executable] + sys.argv) def sanitize_filename(input_string): # 알파벳, 숫자, 및 밑줄 이외의 문자를 밑줄로 대체 result_string = re.sub(r'[^a-zA-Z0-9_]', '_', input_string) return result_string @server.PromptServer.instance.routes.post("/manager/component/save") async def save_component(request): try: data = await request.json() name = data['name'] workflow = data['workflow'] if not os.path.exists(components_path): os.mkdir(components_path) if 'packname' in workflow and workflow['packname'] != '': sanitized_name = sanitize_filename(workflow['packname'])+'.pack' else: sanitized_name = sanitize_filename(name)+'.json' filepath = os.path.join(components_path, sanitized_name) components = {} if os.path.exists(filepath): with open(filepath) as f: components = json.load(f) components[name] = workflow with open(filepath, 'w') as f: json.dump(components, f, indent=4, sort_keys=True) return web.Response(text=filepath, status=200) except: return web.Response(status=400) @server.PromptServer.instance.routes.post("/manager/component/loads") async def load_components(request): try: json_files = [f for f in os.listdir(components_path) if f.endswith('.json')] pack_files = [f for f in os.listdir(components_path) if f.endswith('.pack')] components = {} for json_file in json_files + pack_files: file_path = os.path.join(components_path, json_file) with open(file_path, 'r') as file: try: # When there is a conflict between the .pack and the .json, the pack takes precedence and overrides. components.update(json.load(file)) except json.JSONDecodeError as e: print(f"[ComfyUI-Manager] Error decoding component file in file {json_file}: {e}") return web.json_response(components) except Exception as e: print(f"[ComfyUI-Manager] failed to load components\n{e}") return web.Response(status=400) @server.PromptServer.instance.routes.get("/manager/share_option") async def share_option(request): if "value" in request.rel_url.query: get_config()['share_option'] = request.rel_url.query['value'] write_config() else: return web.Response(text=get_config()['share_option'], status=200) return web.Response(status=200) def get_openart_auth(): if not os.path.exists(os.path.join(comfyui_manager_path, ".openart_key")): return None try: with open(os.path.join(comfyui_manager_path, ".openart_key"), "r") as f: openart_key = f.read().strip() return openart_key if openart_key else None except: return None def get_matrix_auth(): if not os.path.exists(os.path.join(comfyui_manager_path, "matrix_auth")): return None try: with open(os.path.join(comfyui_manager_path, "matrix_auth"), "r") as f: matrix_auth = f.read() homeserver, username, password = matrix_auth.strip().split("\n") if not homeserver or not username or not password: return None return { "homeserver": homeserver, "username": username, "password": password, } except: return None def get_comfyworkflows_auth(): if not os.path.exists(os.path.join(comfyui_manager_path, "comfyworkflows_sharekey")): return None try: with open(os.path.join(comfyui_manager_path, "comfyworkflows_sharekey"), "r") as f: share_key = f.read() if not share_key.strip(): return None return share_key except: return None def get_youml_settings(): if not os.path.exists(os.path.join(comfyui_manager_path, ".youml")): return None try: with open(os.path.join(comfyui_manager_path, ".youml"), "r") as f: youml_settings = f.read().strip() return youml_settings if youml_settings else None except: return None def set_youml_settings(settings): with open(os.path.join(comfyui_manager_path, ".youml"), "w") as f: f.write(settings) @server.PromptServer.instance.routes.get("/manager/get_openart_auth") async def api_get_openart_auth(request): # print("Getting stored Matrix credentials...") openart_key = get_openart_auth() if not openart_key: return web.Response(status=404) return web.json_response({"openart_key": openart_key}) @server.PromptServer.instance.routes.post("/manager/set_openart_auth") async def api_set_openart_auth(request): json_data = await request.json() openart_key = json_data['openart_key'] with open(os.path.join(comfyui_manager_path, ".openart_key"), "w") as f: f.write(openart_key) return web.Response(status=200) @server.PromptServer.instance.routes.get("/manager/get_matrix_auth") async def api_get_matrix_auth(request): # print("Getting stored Matrix credentials...") matrix_auth = get_matrix_auth() if not matrix_auth: return web.Response(status=404) return web.json_response(matrix_auth) @server.PromptServer.instance.routes.get("/manager/youml/settings") async def api_get_youml_settings(request): youml_settings = get_youml_settings() if not youml_settings: return web.Response(status=404) return web.json_response(json.loads(youml_settings)) @server.PromptServer.instance.routes.post("/manager/youml/settings") async def api_set_youml_settings(request): json_data = await request.json() set_youml_settings(json.dumps(json_data)) return web.Response(status=200) @server.PromptServer.instance.routes.get("/manager/get_comfyworkflows_auth") async def api_get_comfyworkflows_auth(request): # Check if the user has provided Matrix credentials in a file called 'matrix_accesstoken' # in the same directory as the ComfyUI base folder # print("Getting stored Comfyworkflows.com auth...") comfyworkflows_auth = get_comfyworkflows_auth() if not comfyworkflows_auth: return web.Response(status=404) return web.json_response({"comfyworkflows_sharekey" : comfyworkflows_auth}) args.enable_cors_header = "*" if hasattr(server.PromptServer.instance, "app"): app = server.PromptServer.instance.app cors_middleware = server.create_cors_middleware(args.enable_cors_header) app.middlewares.append(cors_middleware) @server.PromptServer.instance.routes.post("/manager/set_esheep_workflow_and_images") async def set_esheep_workflow_and_images(request): json_data = await request.json() current_workflow = json_data['workflow'] images = json_data['images'] with open(os.path.join(comfyui_manager_path, "esheep_share_message.json"), "w", encoding='utf-8') as file: json.dump(json_data, file, indent=4) return web.Response(status=200) @server.PromptServer.instance.routes.get("/manager/get_esheep_workflow_and_images") async def get_esheep_workflow_and_images(request): with open(os.path.join(comfyui_manager_path, "esheep_share_message.json"), 'r', encoding='utf-8') as file: data = json.load(file) return web.Response(status=200, text=json.dumps(data)) def set_matrix_auth(json_data): homeserver = json_data['homeserver'] username = json_data['username'] password = json_data['password'] with open(os.path.join(comfyui_manager_path, "matrix_auth"), "w") as f: f.write("\n".join([homeserver, username, password])) def set_comfyworkflows_auth(comfyworkflows_sharekey): with open(os.path.join(comfyui_manager_path, "comfyworkflows_sharekey"), "w") as f: f.write(comfyworkflows_sharekey) def has_provided_matrix_auth(matrix_auth): return matrix_auth['homeserver'].strip() and matrix_auth['username'].strip() and matrix_auth['password'].strip() def has_provided_comfyworkflows_auth(comfyworkflows_sharekey): return comfyworkflows_sharekey.strip() def extract_model_file_names(json_data): """Extract unique file names from the input JSON data.""" file_names = set() model_filename_extensions = {'.safetensors', '.ckpt', '.pt', '.pth', '.bin'} # Recursively search for file names in the JSON data def recursive_search(data): if isinstance(data, dict): for value in data.values(): recursive_search(value) elif isinstance(data, list): for item in data: recursive_search(item) elif isinstance(data, str) and '.' in data: file_names.add(os.path.basename(data)) # file_names.add(data) recursive_search(json_data) return [f for f in list(file_names) if os.path.splitext(f)[1] in model_filename_extensions] def find_file_paths(base_dir, file_names): """Find the paths of the files in the base directory.""" file_paths = {} for root, dirs, files in os.walk(base_dir): # Exclude certain directories dirs[:] = [d for d in dirs if d not in ['.git']] for file in files: if file in file_names: file_paths[file] = os.path.join(root, file) return file_paths def compute_sha256_checksum(filepath): """Compute the SHA256 checksum of a file, in chunks""" sha256 = hashlib.sha256() with open(filepath, 'rb') as f: for chunk in iter(lambda: f.read(4096), b''): sha256.update(chunk) return sha256.hexdigest() @server.PromptServer.instance.routes.post("/manager/share") async def share_art(request): # get json data json_data = await request.json() matrix_auth = json_data['matrix_auth'] comfyworkflows_sharekey = json_data['cw_auth']['cw_sharekey'] set_matrix_auth(matrix_auth) set_comfyworkflows_auth(comfyworkflows_sharekey) share_destinations = json_data['share_destinations'] credits = json_data['credits'] title = json_data['title'] description = json_data['description'] is_nsfw = json_data['is_nsfw'] prompt = json_data['prompt'] potential_outputs = json_data['potential_outputs'] selected_output_index = json_data['selected_output_index'] try: output_to_share = potential_outputs[int(selected_output_index)] except: # for now, pick the first output output_to_share = potential_outputs[0] assert output_to_share['type'] in ('image', 'output') output_dir = folder_paths.get_output_directory() if output_to_share['type'] == 'image': asset_filename = output_to_share['image']['filename'] asset_subfolder = output_to_share['image']['subfolder'] if output_to_share['image']['type'] == 'temp': output_dir = folder_paths.get_temp_directory() else: asset_filename = output_to_share['output']['filename'] asset_subfolder = output_to_share['output']['subfolder'] if asset_subfolder: asset_filepath = os.path.join(output_dir, asset_subfolder, asset_filename) else: asset_filepath = os.path.join(output_dir, asset_filename) # get the mime type of the asset assetFileType = mimetypes.guess_type(asset_filepath)[0] share_website_host = "UNKNOWN" if "comfyworkflows" in share_destinations: share_website_host = "https://comfyworkflows.com" share_endpoint = f"{share_website_host}/api" # get presigned urls async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session: async with session.post( f"{share_endpoint}/get_presigned_urls", json={ "assetFileName": asset_filename, "assetFileType": assetFileType, "workflowJsonFileName" : 'workflow.json', "workflowJsonFileType" : 'application/json', }, ) as resp: assert resp.status == 200 presigned_urls_json = await resp.json() assetFilePresignedUrl = presigned_urls_json["assetFilePresignedUrl"] assetFileKey = presigned_urls_json["assetFileKey"] workflowJsonFilePresignedUrl = presigned_urls_json["workflowJsonFilePresignedUrl"] workflowJsonFileKey = presigned_urls_json["workflowJsonFileKey"] # upload asset async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session: async with session.put(assetFilePresignedUrl, data=open(asset_filepath, "rb")) as resp: assert resp.status == 200 # upload workflow json async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session: async with session.put(workflowJsonFilePresignedUrl, data=json.dumps(prompt['workflow']).encode('utf-8')) as resp: assert resp.status == 200 model_filenames = extract_model_file_names(prompt['workflow']) model_file_paths = find_file_paths(folder_paths.base_path, model_filenames) models_info = {} for filename, filepath in model_file_paths.items(): models_info[filename] = { "filename": filename, "sha256_checksum": compute_sha256_checksum(filepath), "relative_path": os.path.relpath(filepath, folder_paths.base_path), } # make a POST request to /api/upload_workflow with form data key values async with aiohttp.ClientSession(trust_env=True, connector=aiohttp.TCPConnector(verify_ssl=False)) as session: form = aiohttp.FormData() if comfyworkflows_sharekey: form.add_field("shareKey", comfyworkflows_sharekey) form.add_field("source", "comfyui_manager") form.add_field("assetFileKey", assetFileKey) form.add_field("assetFileType", assetFileType) form.add_field("workflowJsonFileKey", workflowJsonFileKey) form.add_field("sharedWorkflowWorkflowJsonString", json.dumps(prompt['workflow'])) form.add_field("sharedWorkflowPromptJsonString", json.dumps(prompt['output'])) form.add_field("shareWorkflowCredits", credits) form.add_field("shareWorkflowTitle", title) form.add_field("shareWorkflowDescription", description) form.add_field("shareWorkflowIsNSFW", str(is_nsfw).lower()) form.add_field("currentSnapshot", json.dumps(get_current_snapshot())) form.add_field("modelsInfo", json.dumps(models_info)) async with session.post( f"{share_endpoint}/upload_workflow", data=form, ) as resp: assert resp.status == 200 upload_workflow_json = await resp.json() workflowId = upload_workflow_json["workflowId"] # check if the user has provided Matrix credentials if "matrix" in share_destinations: comfyui_share_room_id = '!LGYSoacpJPhIfBqVfb:matrix.org' filename = os.path.basename(asset_filepath) content_type = assetFileType try: from matrix_client.api import MatrixHttpApi from matrix_client.client import MatrixClient homeserver = 'matrix.org' if matrix_auth: homeserver = matrix_auth.get('homeserver', 'matrix.org') homeserver = homeserver.replace("http://", "https://") if not homeserver.startswith("https://"): homeserver = "https://" + homeserver client = MatrixClient(homeserver) try: token = client.login(username=matrix_auth['username'], password=matrix_auth['password']) if not token: return web.json_response({"error" : "Invalid Matrix credentials."}, content_type='application/json', status=400) except: return web.json_response({"error" : "Invalid Matrix credentials."}, content_type='application/json', status=400) matrix = MatrixHttpApi(homeserver, token=token) with open(asset_filepath, 'rb') as f: mxc_url = matrix.media_upload(f.read(), content_type, filename=filename)['content_uri'] workflow_json_mxc_url = matrix.media_upload(prompt['workflow'], 'application/json', filename='workflow.json')['content_uri'] text_content = "" if title: text_content += f"{title}\n" if description: text_content += f"{description}\n" if credits: text_content += f"\ncredits: {credits}\n" response = matrix.send_message(comfyui_share_room_id, text_content) response = matrix.send_content(comfyui_share_room_id, mxc_url, filename, 'm.image') response = matrix.send_content(comfyui_share_room_id, workflow_json_mxc_url, 'workflow.json', 'm.file') except: import traceback traceback.print_exc() return web.json_response({"error": "An error occurred when sharing your art to Matrix."}, content_type='application/json', status=500) return web.json_response({ "comfyworkflows": { "url": None if "comfyworkflows" not in share_destinations else f"{share_website_host}/workflows/{workflowId}", }, "matrix": { "success": None if "matrix" not in share_destinations else True } }, content_type='application/json', status=200) def sanitize(data): return data.replace("<", "<").replace(">", ">") def lookup_customnode_by_url(data, target): for x in data['custom_nodes']: if target in x['files']: dir_name = os.path.splitext(os.path.basename(target))[0].replace(".git", "") dir_path = os.path.join(custom_nodes_path, dir_name) if os.path.exists(dir_path): x['installed'] = 'True' elif os.path.exists(dir_path + ".disabled"): x['installed'] = 'Disabled' return x return None async def _confirm_try_install(sender, custom_node_url, msg): json_obj = await get_data_by_mode('default', 'custom-node-list.json') sender = sanitize(sender) msg = sanitize(msg) target = lookup_customnode_by_url(json_obj, custom_node_url) if target is not None: server.PromptServer.instance.send_sync("cm-api-try-install-customnode", {"sender": sender, "target": target, "msg": msg}) else: print(f"[ComfyUI Manager API] Failed to try install - Unknown custom node url '{custom_node_url}'") def confirm_try_install(sender, custom_node_url, msg): asyncio.run(_confirm_try_install(sender, custom_node_url, msg)) cm_global.register_api('cm.try-install-custom-node', confirm_try_install) import asyncio async def default_cache_update(): async def get_cache(filename): uri = 'https://raw.githubusercontent.com/ltdrdata/ComfyUI-Manager/main/' + filename cache_uri = str(simple_hash(uri)) + '_' + filename cache_uri = os.path.join(cache_dir, cache_uri) json_obj = await get_data(uri, True) with cache_lock: with open(cache_uri, "w", encoding='utf-8') as file: json.dump(json_obj, file, indent=4, sort_keys=True) print(f"[ComfyUI-Manager] default cache updated: {uri}") a = get_cache("custom-node-list.json") b = get_cache("extension-node-map.json") c = get_cache("model-list.json") d = get_cache("alter-list.json") await asyncio.gather(a, b, c, d) threading.Thread(target=lambda: asyncio.run(default_cache_update())).start() if not os.path.exists(config_path): get_config() write_config() WEB_DIRECTORY = "js" NODE_CLASS_MAPPINGS = {} __all__ = ['NODE_CLASS_MAPPINGS'] cm_global.register_extension('ComfyUI-Manager', {'version': version, 'name': 'ComfyUI Manager', 'nodes': {'Terminal Log //CM'}, 'description': 'It provides the ability to manage custom nodes in ComfyUI.', })