Fawad97 commited on
Commit
f90b60c
·
verified ·
1 Parent(s): a6eac87

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +42 -11
app.py CHANGED
@@ -5,7 +5,6 @@ os.system("pip install transformers gradio torch")
5
 
6
  import gradio as gr
7
  from transformers import pipeline, AutoTokenizer, AutoModelForCausalLM
8
-
9
  # Use a smaller model to reduce memory usage
10
  model_name = "distilgpt2"
11
  tokenizer = AutoTokenizer.from_pretrained(model_name)
@@ -14,20 +13,52 @@ model = AutoModelForCausalLM.from_pretrained(model_name)
14
  # Initialize the pipeline with the model and tokenizer
15
  generator = pipeline("text-generation", model=model, tokenizer=tokenizer, device=-1) # Use device=-1 for CPU
16
 
17
- def process_input(text):
 
 
 
18
  try:
19
- output = generator(text, max_length=50, do_sample=True)
20
- return output[0]['generated_text']
 
 
 
 
 
 
 
 
 
 
 
 
 
21
  except Exception as e:
22
  return f"An error occurred: {e}"
23
 
 
 
 
 
 
 
 
24
  # Set up the Gradio interface
25
- iface = gr.Interface(
26
- fn=process_input,
27
- inputs=gr.Textbox(label="Enter your query"),
28
- outputs=gr.Textbox(label="Response"),
29
- title="Interactive Bank Management App"
30
- )
 
 
 
 
 
 
 
 
 
31
 
32
  # Launch the Gradio app
33
- iface.launch()
 
5
 
6
  import gradio as gr
7
  from transformers import pipeline, AutoTokenizer, AutoModelForCausalLM
 
8
  # Use a smaller model to reduce memory usage
9
  model_name = "distilgpt2"
10
  tokenizer = AutoTokenizer.from_pretrained(model_name)
 
13
  # Initialize the pipeline with the model and tokenizer
14
  generator = pipeline("text-generation", model=model, tokenizer=tokenizer, device=-1) # Use device=-1 for CPU
15
 
16
+ # In-memory storage for generated data
17
+ generated_data = []
18
+
19
+ def process_input(name, account_number, balance, query):
20
  try:
21
+ # Format the query with user details
22
+ formatted_query = f"Name: {name}\nAccount Number: {account_number}\nBalance: {balance}\nQuery: {query}"
23
+ output = generator(formatted_query, max_length=150, do_sample=True)
24
+ response = output[0]['generated_text']
25
+
26
+ # Save generated data
27
+ generated_data.append({
28
+ 'name': name,
29
+ 'account_number': account_number,
30
+ 'balance': balance,
31
+ 'query': query,
32
+ 'response': response
33
+ })
34
+
35
+ return response
36
  except Exception as e:
37
  return f"An error occurred: {e}"
38
 
39
+ def view_history():
40
+ # Display stored generated data
41
+ if generated_data:
42
+ history = "\n\n".join([f"Name: {data['name']}\nAccount Number: {data['account_number']}\nBalance: {data['balance']}\nQuery: {data['query']}\nResponse: {data['response']}" for data in generated_data])
43
+ return history
44
+ return "No history available."
45
+
46
  # Set up the Gradio interface
47
+ with gr.Blocks() as demo:
48
+ with gr.Row():
49
+ name = gr.Textbox(label="Enter your name")
50
+ account_number = gr.Textbox(label="Enter your account number")
51
+ balance = gr.Textbox(label="Enter your account balance")
52
+ query = gr.Textbox(label="Enter your query")
53
+ submit_button = gr.Button("Submit")
54
+
55
+ response_output = gr.Textbox(label="Response")
56
+ history_button = gr.Button("View History")
57
+ history_output = gr.Textbox(label="Query History")
58
+
59
+ # Define the interactions
60
+ submit_button.click(process_input, inputs=[name, account_number, balance, query], outputs=response_output)
61
+ history_button.click(view_history, outputs=history_output)
62
 
63
  # Launch the Gradio app
64
+ demo.launch()