Deva1211 commited on
Commit
dd852f5
·
1 Parent(s): f0d1216

Working but using google/gemma-2b-it model

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -4,8 +4,8 @@ from transformers import AutoModelForCausalLM, AutoTokenizer
4
 
5
  # Load model and tokenizer
6
  print("Loading DialoGPT-medium...")
7
- tokenizer = AutoTokenizer.from_pretrained("microsoft/DialoGPT-medium")
8
- model = AutoModelForCausalLM.from_pretrained("microsoft/DialoGPT-medium")
9
 
10
  # Add pad token if it doesn't exist
11
  if tokenizer.pad_token is None:
 
4
 
5
  # Load model and tokenizer
6
  print("Loading DialoGPT-medium...")
7
+ tokenizer = AutoTokenizer.from_pretrained("google/gemma-2b-it")
8
+ model = AutoModelForCausalLM.from_pretrained("google/gemma-2b-it")
9
 
10
  # Add pad token if it doesn't exist
11
  if tokenizer.pad_token is None: