mytest-model / main.py
aslan-asilon3's picture
Upload main.py
f739329 verified
import pandas as pd
from huggingface_hub import HfApi, login
from transformers import BertTokenizer, BertForSequenceClassification, Trainer, TrainingArguments
from datasets import Dataset
# Log in to Hugging Face
login() # Pastikan kamu sudah login ke akun Hugging Face
# Load dataset
data = {
'text': ["I love programming!", "I hate bugs.", "Python is great.", "I dislike syntax errors."],
'label': [1, 0, 1, 0] # 1 untuk sentimen positif, 0 untuk sentimen negatif
}
df = pd.DataFrame(data)
# Convert to Hugging Face dataset
dataset = Dataset.from_pandas(df)
# Split the dataset into training and evaluation sets
train_test_split = dataset.train_test_split(test_size=0.2) # 80% train, 20% eval
train_dataset = train_test_split['train']
eval_dataset = train_test_split['test']
# Tokenize the text
tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')
def tokenize_function(examples):
return tokenizer(examples["text"], padding="max_length", truncation=True)
# Tokenisasi data training dan evaluasi
tokenized_train = train_dataset.map(tokenize_function, batched=True)
tokenized_eval = eval_dataset.map(tokenize_function, batched=True)
# Define model
model = BertForSequenceClassification.from_pretrained('bert-base-uncased', num_labels=2)
# Set training arguments
training_args = TrainingArguments(
output_dir='./results',
evaluation_strategy="epoch",
learning_rate=2e-5,
per_device_train_batch_size=2,
num_train_epochs=3,
weight_decay=0.01,
)
# Train the model
trainer = Trainer(
model=model,
args=training_args,
train_dataset=tokenized_train,
eval_dataset=tokenized_eval, # Menyediakan dataset evaluasi di sini
)
trainer.train()
# Save the model
model.save_pretrained('./mytest-model')
tokenizer.save_pretrained('./mytest-model')
# Define model ID
model_id = "aslan-asilon3/mytest-model"
api = HfApi()
# Create a new repo on Hugging Face if it doesn't already exist
try:
api.create_repo(repo_id=model_id)
except Exception as e:
print(f"Repo mungkin sudah ada: {e}")
# Upload model dan tokenizer
model.push_to_hub(model_id)
tokenizer.push_to_hub(model_id)
print(f"Model berhasil diunggah ke Hugging Face: {model_id}")