PhDFlo commited on
Commit
4b4da04
·
1 Parent(s): 3cd15af

Add create json file and correc creation of fasta file

Browse files
Files changed (1) hide show
  1. app.py +39 -1
app.py CHANGED
@@ -11,6 +11,44 @@ from modal_app import app, chai1_inference, download_inference_dependencies, her
11
 
12
  # Definition of the tools for the MCP server
13
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
14
  # Function to compute Chai1 inference
15
  def compute_Chai1(
16
  fasta_file: Optional[str] = None,
@@ -37,7 +75,7 @@ def compute_Chai1(
37
  fasta_file = here / "inputs" / "chai1_default_input.fasta"
38
  else:
39
  name_file = here / "inputs" / "chai1_custom_input.fasta"
40
- with open(fasta_file, "w") as f:
41
  f.write(fasta_file)
42
  fasta_file = name_file
43
 
 
11
 
12
  # Definition of the tools for the MCP server
13
 
14
+ # Function to create a custom JSON config file
15
+ def create_custom_config(
16
+ num_trunk_recycles: int = 3,
17
+ num_diffn_timesteps: int = 200,
18
+ seed: int = 42,
19
+ use_esm_embeddings: bool = True,
20
+ use_msa_server: bool = True,
21
+ output_file: Optional[str] = None
22
+ ) -> str:
23
+ """Create a custom JSON configuration file for Chai1 inference.
24
+
25
+ Args:
26
+ num_trunk_recycles (int, optional): Number of trunk recycles. Defaults to 3.
27
+ num_diffn_timesteps (int, optional): Number of diffusion timesteps. Defaults to 200.
28
+ seed (int, optional): Random seed. Defaults to 42.
29
+ use_esm_embeddings (bool, optional): Whether to use ESM embeddings. Defaults to True.
30
+ use_msa_server (bool, optional): Whether to use MSA server. Defaults to True.
31
+ output_file (str, optional): Path to save the config file. If None, saves to default location.
32
+
33
+ Returns:
34
+ str: Path to the created config file
35
+ """
36
+ config = {
37
+ "num_trunk_recycles": num_trunk_recycles,
38
+ "num_diffn_timesteps": num_diffn_timesteps,
39
+ "seed": seed,
40
+ "use_esm_embeddings": use_esm_embeddings,
41
+ "use_msa_server": use_msa_server
42
+ }
43
+
44
+ if output_file is None:
45
+ output_file = here / "inputs" / "chai1_custom_inference.json"
46
+
47
+ with open(output_file, "w") as f:
48
+ json.dump(config, f, indent=4)
49
+
50
+ return str(output_file)
51
+
52
  # Function to compute Chai1 inference
53
  def compute_Chai1(
54
  fasta_file: Optional[str] = None,
 
75
  fasta_file = here / "inputs" / "chai1_default_input.fasta"
76
  else:
77
  name_file = here / "inputs" / "chai1_custom_input.fasta"
78
+ with open(fasta_file, "x") as f:
79
  f.write(fasta_file)
80
  fasta_file = name_file
81