GhifSmile's picture
Training in progress, epoch 1
c2b08cc
{
"_name_or_path": "distilbert-base-uncased",
"activation": "gelu",
"architectures": [
"DistilBertForSequenceClassification"
],
"attention_dropout": 0.1,
"dim": 768,
"dropout": 0.1,
"hidden_dim": 3072,
"id2label": {
"0": "Beauty & Care",
"1": "Books & Stationery",
"2": "Business Expense",
"3": "Car Loan",
"4": "Cash Withdrawal",
"5": "Credit Card",
"6": "Dentist",
"7": "Donation & Social",
"8": "Education",
"9": "Electricity & Water Bills",
"10": "Entertainment",
"11": "Eye Care",
"12": "Family",
"13": "Fashion",
"14": "Fees & Charges",
"15": "Food & Drink",
"16": "Gadget & Electronics",
"17": "Gas & Fuel",
"18": "Gifts",
"19": "Groceries & General",
"20": "Health & Medical",
"21": "Home / Rent",
"22": "Home Loan",
"23": "Household Tools",
"24": "Insurance",
"25": "Investment",
"26": "Kids",
"27": "Laundry",
"28": "Loan",
"29": "Pets",
"30": "Printshop",
"31": "Public Transport",
"32": "Service & Auto Parts",
"33": "Services Needs",
"34": "Shipping",
"35": "Shopping",
"36": "Sports",
"37": "Subscription",
"38": "Taxes",
"39": "Telephone & Internet",
"40": "Top Up",
"41": "Transport",
"42": "Travel"
},
"initializer_range": 0.02,
"label2id": {
"Beauty & Care": 0,
"Books & Stationery": 1,
"Business Expense": 2,
"Car Loan": 3,
"Cash Withdrawal": 4,
"Credit Card": 5,
"Dentist": 6,
"Donation & Social": 7,
"Education": 8,
"Electricity & Water Bills": 9,
"Entertainment": 10,
"Eye Care": 11,
"Family": 12,
"Fashion": 13,
"Fees & Charges": 14,
"Food & Drink": 15,
"Gadget & Electronics": 16,
"Gas & Fuel": 17,
"Gifts": 18,
"Groceries & General": 19,
"Health & Medical": 20,
"Home / Rent": 21,
"Home Loan": 22,
"Household Tools": 23,
"Insurance": 24,
"Investment": 25,
"Kids": 26,
"Laundry": 27,
"Loan": 28,
"Pets": 29,
"Printshop": 30,
"Public Transport": 31,
"Service & Auto Parts": 32,
"Services Needs": 33,
"Shipping": 34,
"Shopping": 35,
"Sports": 36,
"Subscription": 37,
"Taxes": 38,
"Telephone & Internet": 39,
"Top Up": 40,
"Transport": 41,
"Travel": 42
},
"max_position_embeddings": 512,
"model_type": "distilbert",
"n_heads": 12,
"n_layers": 6,
"pad_token_id": 0,
"problem_type": "single_label_classification",
"qa_dropout": 0.1,
"seq_classif_dropout": 0.2,
"sinusoidal_pos_embds": false,
"tie_weights_": true,
"torch_dtype": "float32",
"transformers_version": "4.27.4",
"vocab_size": 30522
}