Spaces:
Sleeping
Sleeping
| import os | |
| import gemmi | |
| import shutil | |
| import gradio as gr | |
| import subprocess | |
| import json | |
| import yaml | |
| from pathlib import Path | |
| def mcif_gz_to_pdb(file_path: str) -> str: | |
| """ | |
| Converts a .mcif.gz file to pdb and saves it to the same directory. Returns the path to the pdb file. | |
| Parameters: | |
| ---------- | |
| file_path: str, | |
| Path to the .mcif.gz file. | |
| Returns | |
| ------- | |
| str: path to the generated pdb file. | |
| """ | |
| st = gemmi.read_structure(file_path) | |
| st.setup_entities() # Recommended for consistent entity handling [web:18] | |
| pdb_path = file_path.replace(".cif.gz", ".pdb") | |
| st.write_minimal_pdb(pdb_path) | |
| return pdb_path | |
| def download_results_as_zip(directory): | |
| """ | |
| Check that an output directory is specified, then creates a zip file of the directory for download. | |
| Parameters: | |
| ---------- | |
| directory: gr.State or str | |
| Path to the directory containing generated results. None if generation has not been run yet. | |
| Returns | |
| ------- | |
| str: path to the generated zip file, to be passed to gr.File for download. | |
| """ | |
| if directory is None: | |
| return gr.update() | |
| zip_path = f"{directory}.zip" | |
| shutil.make_archive(directory, 'zip', directory) | |
| return zip_path | |
| def load_config(file_path: str | Path) -> dict | list: | |
| """ | |
| Load YAML or JSON file into a Python object. | |
| Args: | |
| file_path: Path to the YAML or JSON file. | |
| Returns: | |
| Parsed Python object (dict, list, etc.). | |
| Raises: | |
| ValueError: If extension is not .yaml, .yml, or .json. | |
| Exception: On parse errors. | |
| """ | |
| path = Path(file_path) | |
| if not path.exists(): | |
| raise FileNotFoundError(f"File not found: {file_path}") | |
| ext = path.suffix.lower() | |
| if ext in {'.yaml', '.yml'}: | |
| with open(path, 'r', encoding='utf-8') as f: | |
| return yaml.safe_load(f) # Secure loader [web:1][web:4] | |
| elif ext == '.json': | |
| with open(path, 'r', encoding='utf-8') as f: | |
| return json.load(f) # Built-in JSON loader [web:12] | |
| else: | |
| raise ValueError(f"Unsupported extension: {ext}. Use .yaml, .yml, or .json.") | |
| def move_all_files_to_shared_directory(job_files: list[gr.File], support_files: list[gr.File], shared_dir: str): | |
| """ | |
| Move a list of gr.File objects to a shared directory and return their new paths. | |
| Parameters: | |
| ---------- | |
| job_files: list of str, or gr.File object with file_count="multiple" | |
| List of paths to the user-uploaded job files, which can be .json, .pdb, or .cif files. | |
| support_files: list of str, or gr.File object with file_count="multiple" | |
| List of paths to the user-uploaded support files, which can be .pdb or .cif files. | |
| shared_dir: str | |
| The directory where all files will be moved to. This is needed because gr.File objects store each file in a separate temp directory on the server, and we need to move them to the same dir. | |
| Returns: | |
| ------- | |
| new_job_paths: | |
| List of new file paths corresponding to the input job files. | |
| new_support_paths: | |
| List of new file paths corresponding to the input support files. | |
| """ | |
| os.makedirs(shared_dir, exist_ok=True) | |
| new_job_paths = [] | |
| new_support_paths = [] | |
| for file in job_files: | |
| new_path = os.path.join(shared_dir, os.path.basename(file)) | |
| shutil.copy2(file.name, new_path) # Copy to preserve original files | |
| new_job_paths.append(new_path) | |
| if support_files is not None: # an empty support_upload would give a NoneType object not an empty list | |
| for file in support_files: | |
| new_path = os.path.join(shared_dir, os.path.basename(file)) | |
| shutil.copy2(file.name, new_path) | |
| new_support_paths.append(new_path) | |
| return new_job_paths, new_support_paths | |