Royrotem100 commited on
Commit
1882a34
·
1 Parent(s): 28acd69

Add DictaLM 2.0 instruct model

Browse files
Files changed (1) hide show
  1. app.py +4 -1
app.py CHANGED
@@ -2,6 +2,7 @@ import os
2
  import gradio as gr
3
  from http import HTTPStatus
4
  from typing import Generator, List, Optional, Tuple, Dict
 
5
  from urllib.error import HTTPError
6
  from flask import Flask, request, jsonify
7
  from transformers import AutoTokenizer, AutoModelForCausalLM
@@ -57,13 +58,15 @@ def predict():
57
  outputs = model.generate(inputs['input_ids'], max_length=1024, temperature=0.7, top_p=0.9)
58
 
59
  # Decode the output
60
- prediction = tokenizer.decode(outputs[0], skip_special_tokens=True)
61
 
62
  return jsonify({"prediction": prediction})
63
 
64
  def run_flask():
65
  app.run(host='0.0.0.0', port=5000)
66
 
 
 
67
 
68
  # Run Flask in a separate thread
69
  threading.Thread(target=run_flask).start()
 
2
  import gradio as gr
3
  from http import HTTPStatus
4
  from typing import Generator, List, Optional, Tuple, Dict
5
+ import re
6
  from urllib.error import HTTPError
7
  from flask import Flask, request, jsonify
8
  from transformers import AutoTokenizer, AutoModelForCausalLM
 
58
  outputs = model.generate(inputs['input_ids'], max_length=1024, temperature=0.7, top_p=0.9)
59
 
60
  # Decode the output
61
+ prediction = tokenizer.decode(outputs[0], skip_special_tokens=True).replace(formatted_text, '').strip()
62
 
63
  return jsonify({"prediction": prediction})
64
 
65
  def run_flask():
66
  app.run(host='0.0.0.0', port=5000)
67
 
68
+ def is_hebrew(text: str) -> bool:
69
+ return bool(re.search(r'[\u0590-\u05FF]', text))
70
 
71
  # Run Flask in a separate thread
72
  threading.Thread(target=run_flask).start()