Spaces:
Runtime error
Runtime error
| import streamlit as st | |
| from transformers import AutoTokenizer, AutoModelForCausalLM | |
| import torch | |
| access_token = st.secrets["HF_ACCESS_TOKEN"] | |
| model_name = "meta-llama/Meta-Llama-3-8B" | |
| tokenizer = AutoTokenizer.from_pretrained(model_name, token=access_token) | |
| model = AutoModelForCausalLM.from_pretrained(model_name, token=access_token) | |
| text_input = st.text_area("Enter text:") | |
| if text_input: | |
| inputs = tokenizer(text_input, return_tensors="pt") | |
| input_ids = inputs.input_ids | |
| # Use the model to get the output logits | |
| with torch.no_grad(): | |
| output = model(input_ids) | |
| # Extract logits (remove the batch dimension as there's only one input example) | |
| logits = output.logits.squeeze(0) | |
| # Pair tokens with their corresponding logits | |
| tokens = tokenizer.convert_ids_to_tokens(input_ids[0]) | |
| token_logit_pairs = [(token, logits[idx].tolist()) for idx, token in enumerate(tokens)] | |
| st.json(token_logit_pairs) |