JDhruv14 commited on
Commit
34bd51b
·
verified ·
1 Parent(s): 06e919e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +21 -0
app.py CHANGED
@@ -63,6 +63,27 @@ def chat_fn(message, history, system_text, temperature, top_p, max_new, min_new)
63
  if eos:
64
  gen_cfg_kwargs["eos_token_id"] = eos
65
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
66
  gen_cfg = GenerationConfig(**gen_cfg_kwargs)
67
 
68
  with torch.no_grad():
 
63
  if eos:
64
  gen_cfg_kwargs["eos_token_id"] = eos
65
 
66
+ def infer_text(history, system_text=""):
67
+ """
68
+ Reply in the user’s language with 2–3 concise points (200–400 words); cite Gita verses when relevant.
69
+ """
70
+ if not history:
71
+ return "" # nothing to answer
72
+
73
+ # Split out the newest user message and the prior turns
74
+ last_user_msg, _ = history[-1]
75
+ prior_history = history[:-1]
76
+
77
+ # Call your existing generator with sane defaults
78
+ return chat_fn(
79
+ message=last_user_msg,
80
+ history=prior_history,
81
+ system_text=system_text,
82
+ temperature=0.7,
83
+ top_p=0.9,
84
+ max_new=256,
85
+ )
86
+
87
  gen_cfg = GenerationConfig(**gen_cfg_kwargs)
88
 
89
  with torch.no_grad():