File size: 2,355 Bytes
b327bba
 
 
 
 
d136fb1
b327bba
4cc69fa
b327bba
 
 
 
 
 
 
 
 
1f14419
 
 
 
 
 
 
81c8a55
b327bba
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d136fb1
81c8a55
 
b327bba
81c8a55
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
from pathlib import Path
from typing import Optional
from uuid import uuid4
import hashlib
import json

import gradio as gr
from modal_app import app, chai1_inference, download_inference_dependencies, here
  
def compute_Chai1(
    force_redownload: bool = False,
    fasta_file: Optional[str] = None,
    inference_config_file: Optional[str] = None,
    output_dir: Optional[str] = None,
    run_id: Optional[str] = None,
):
    """Compute a Chai1 simulation.

    Args:
        x (float | int): The number to square.

    Returns:
        float: The square of the input number.
    """
    with app.run():
        
        print("🧬 checking inference dependencies")
        download_inference_dependencies.remote(force=force_redownload)

        if fasta_file is None:
            fasta_file = here / "inputs" / "chai1_default_input.fasta"
        print(f"🧬 running Chai inference on {fasta_file}")
        fasta_content = Path(fasta_file).read_text()

        if inference_config_file is None:
            inference_config_file = here / "inputs" / "chai1_quick_inference.json"
        print(f"🧬 loading Chai inference config from {inference_config_file}")
        inference_config = json.loads(Path(inference_config_file).read_text())

        if run_id is None:
            run_id = hashlib.sha256(uuid4().bytes).hexdigest()[:8]  # short id
        print(f"🧬 running inference with {run_id=}")

        results = chai1_inference.remote(fasta_content, inference_config, run_id)

        if output_dir is None:
            output_dir = Path("./results")
            output_dir.mkdir(parents=True, exist_ok=True)

        print(f"🧬 saving results to disk locally in {output_dir}")
        for ii, (scores, cif) in enumerate(results):
            (Path(output_dir) / f"{run_id}-scores.model_idx_{ii}.npz").write_bytes(scores)
            (Path(output_dir) / f"{run_id}-preds.model_idx_{ii}.cif").write_text(cif)


# Create a standard Gradio interface
demo = gr.Interface(
    fn=compute_Chai1,
    inputs=gr.Number(label="Enter a number"),
    outputs=gr.Number(label="Square of the number"),
    title="Compute Square using Modal",
    description="Enter a number to compute its square using a remote Modal function."
)

# Launch both the Gradio web interface and the MCP server
if __name__ == "__main__":
    demo.launch(mcp_server=True)