File size: 244 Bytes
6f00e2b
 
 
 
 
1
2
3
4
5
6
# Example Python code to load MedGemma model
from transformers import AutoTokenizer, AutoModelForCausalLM

tokenizer = AutoTokenizer.from_pretrained("google/medgemma-4b-it")
model = AutoModelForCausalLM.from_pretrained("google/medgemma-4b-it")