pswap commited on
Commit
c5ca742
·
1 Parent(s): de94248
.gitignore CHANGED
@@ -1 +1,2 @@
1
- __pycache__/*
 
 
1
+ __pycache__
2
+ *.pyc
__pycache__/esm_utils.cpython-310.pyc DELETED
Binary file (3.58 kB)
 
__pycache__/prodigal.cpython-310.pyc DELETED
Binary file (1.96 kB)
 
app.py CHANGED
@@ -242,22 +242,11 @@ with gr.Blocks(theme='NoCrypt/miku', css=custom_css) as demo:
242
  run_button = gr.Button("🔍 Search")
243
 
244
  with gr.Column(elem_classes=["output-container"]):
245
- # output_table = gr.DataFrame(
246
- # headers=node_attributes+['euclidean','cosine','dot_product'],
247
- # label="Nearest Phages in INPHARED"
248
- # )
249
- # output_table.download_button = True
250
  output_table = gr.DataFrame(
251
  headers=node_attributes+['euclidean','cosine','dot_product'],
252
- label="Nearest Phages in INPHARED",
253
- interactive=False,
254
- wrap=True,
255
- type="pandas",
256
- elem_id="output_table",
257
- value=None,
258
- datatype="str",
259
- download_button=True
260
  )
 
261
 
262
  #gr.Markdown("https://www.biorxiv.org/content/10.1101/2024.12.17.627486v1")
263
 
 
242
  run_button = gr.Button("🔍 Search")
243
 
244
  with gr.Column(elem_classes=["output-container"]):
 
 
 
 
 
245
  output_table = gr.DataFrame(
246
  headers=node_attributes+['euclidean','cosine','dot_product'],
247
+ label="Nearest Phages in INPHARED"
 
 
 
 
 
 
 
248
  )
249
+ output_table.download_button = True
250
 
251
  #gr.Markdown("https://www.biorxiv.org/content/10.1101/2024.12.17.627486v1")
252
 
esm_utils.py CHANGED
@@ -28,7 +28,7 @@ class EsmEmbedding:
28
 
29
  with torch.no_grad():
30
  outputs = self.model(**tokens, output_hidden_states=True)
31
- hidden = outputs.hidden_states[-1][0].detach().to(torch.float64) # shape: [seq_len, hidden_dim]
32
 
33
  mean_embedding = hidden[1:-1].mean(dim=0) # mean over non-[CLS]/[EOS]
34
  cls_embedding = hidden[0] # CLS token
 
28
 
29
  with torch.no_grad():
30
  outputs = self.model(**tokens, output_hidden_states=True)
31
+ hidden = outputs.hidden_states[-1][0].detach() # shape: [seq_len, hidden_dim]
32
 
33
  mean_embedding = hidden[1:-1].mean(dim=0) # mean over non-[CLS]/[EOS]
34
  cls_embedding = hidden[0] # CLS token