| | |
| |
|
| |
|
| | import os |
| | import time |
| |
|
| | supported_pt_extensions = {".ckpt", ".pt", ".bin", ".pth", ".safetensors"} |
| |
|
| | folder_names_and_paths = {} |
| |
|
| | |
| | base_path = None |
| | models_dir = None |
| |
|
| | filename_list_cache = {} |
| |
|
| |
|
| | def add_model_folder_path(folder_name, full_folder_path): |
| | global folder_names_and_paths |
| | if folder_name in folder_names_and_paths: |
| | folder_names_and_paths[folder_name][0].append(full_folder_path) |
| | else: |
| | folder_names_and_paths[folder_name] = ([full_folder_path], set()) |
| |
|
| |
|
| | def get_folder_paths(folder_name): |
| | return folder_names_and_paths[folder_name][0][:] |
| |
|
| |
|
| | def recursive_search(directory, excluded_dir_names=None): |
| | if not os.path.isdir(directory): |
| | return [], {} |
| |
|
| | if excluded_dir_names is None: |
| | excluded_dir_names = [] |
| |
|
| | result = [] |
| | dirs = {} |
| |
|
| | |
| | try: |
| | dirs[directory] = os.path.getmtime(directory) |
| | except FileNotFoundError: |
| | print(f"Warning: Unable to access {directory}. Skipping this path.") |
| |
|
| | for dirpath, subdirs, filenames in os.walk(directory, followlinks=True, topdown=True): |
| | subdirs[:] = [d for d in subdirs if d not in excluded_dir_names] |
| | for file_name in filenames: |
| | relative_path = os.path.relpath(os.path.join(dirpath, file_name), directory) |
| | result.append(relative_path) |
| |
|
| | for d in subdirs: |
| | path = os.path.join(dirpath, d) |
| | try: |
| | dirs[path] = os.path.getmtime(path) |
| | except FileNotFoundError: |
| | print(f"Warning: Unable to access {path}. Skipping this path.") |
| | continue |
| | return result, dirs |
| |
|
| |
|
| | def filter_files_extensions(files, extensions): |
| | return sorted(list(filter(lambda a: os.path.splitext(a)[-1].lower() in extensions or len(extensions) == 0, files))) |
| |
|
| |
|
| | def get_full_path(folder_name, filename): |
| | global folder_names_and_paths |
| | if folder_name not in folder_names_and_paths: |
| | return None |
| | folders = folder_names_and_paths[folder_name] |
| | filename = os.path.relpath(os.path.join("/", filename), "/") |
| | for x in folders[0]: |
| | full_path = os.path.join(x, filename) |
| | if os.path.isfile(full_path): |
| | return full_path |
| |
|
| | return None |
| |
|
| |
|
| | def get_filename_list_(folder_name): |
| | global folder_names_and_paths |
| | output_list = set() |
| | folders = folder_names_and_paths[folder_name] |
| | output_folders = {} |
| | for x in folders[0]: |
| | files, folders_all = recursive_search(x, excluded_dir_names=[".git"]) |
| | output_list.update(filter_files_extensions(files, folders[1])) |
| | output_folders = {**output_folders, **folders_all} |
| |
|
| | return (sorted(list(output_list)), output_folders, time.perf_counter()) |
| |
|
| |
|
| | def cached_filename_list_(folder_name): |
| | global filename_list_cache |
| | global folder_names_and_paths |
| | if folder_name not in filename_list_cache: |
| | return None |
| | out = filename_list_cache[folder_name] |
| |
|
| | for x in out[1]: |
| | time_modified = out[1][x] |
| | folder = x |
| | if os.path.getmtime(folder) != time_modified: |
| | return None |
| |
|
| | folders = folder_names_and_paths[folder_name] |
| | for x in folders[0]: |
| | if os.path.isdir(x): |
| | if x not in out[1]: |
| | return None |
| |
|
| | return out |
| |
|
| |
|
| | def get_filename_list(folder_name): |
| | out = cached_filename_list_(folder_name) |
| | if out is None: |
| | out = get_filename_list_(folder_name) |
| | global filename_list_cache |
| | filename_list_cache[folder_name] = out |
| | return list(out[0]) |
| |
|