Text Generation
Transformers
Safetensors
English
pulselm
ppg
multimodal
health
qwen
physiology
biosignal
conversational
custom_code
# Use a pipeline as a high-level helper
from transformers import pipeline
pipe = pipeline("text-generation", model="Manhph2211/PulseLM", trust_remote_code=True)
messages = [
{"role": "user", "content": "Who are you?"},
]
pipe(messages)# Load model directly
from transformers import AutoModel
model = AutoModel.from_pretrained("Manhph2211/PulseLM", trust_remote_code=True, dtype="auto")Quick Links
PulseLM: A Foundation Dataset and Benchmark for PPG-Text Learning
Quick Start
# transformers>=4.46.0 accelerate>=1.0.1 peft>=0.13.2 safetensors
from transformers import AutoModelForCausalLM, AutoTokenizer
import torch
tokenizer = AutoTokenizer.from_pretrained("Manhph2211/PulseLM", trust_remote_code=True)
model = AutoModelForCausalLM.from_pretrained(
"Manhph2211/PulseLM",
trust_remote_code=True,
torch_dtype=torch.bfloat16,
device_map="auto"
)
- Downloads last month
- 172
# Gated model: Login with a HF token with gated access permission hf auth login