| { | |
| "architectures": [ | |
| "VisionEncoderDecoderModel" | |
| ], | |
| "decoder": { | |
| "_name_or_path": "google-bert/bert-base-multilingual-cased", | |
| "add_cross_attention": true, | |
| "architectures": [ | |
| "BertForMaskedLM" | |
| ], | |
| "attention_probs_dropout_prob": 0.1, | |
| "bad_words_ids": null, | |
| "begin_suppress_tokens": null, | |
| "bos_token_id": null, | |
| "chunk_size_feed_forward": 0, | |
| "classifier_dropout": null, | |
| "cross_attention_hidden_size": null, | |
| "decoder_start_token_id": null, | |
| "directionality": "bidi", | |
| "diversity_penalty": 0.0, | |
| "do_sample": false, | |
| "early_stopping": false, | |
| "encoder_no_repeat_ngram_size": 0, | |
| "eos_token_id": null, | |
| "exponential_decay_length_penalty": null, | |
| "finetuning_task": null, | |
| "forced_bos_token_id": null, | |
| "forced_eos_token_id": null, | |
| "hidden_act": "gelu", | |
| "hidden_dropout_prob": 0.1, | |
| "hidden_size": 768, | |
| "id2label": { | |
| "0": "LABEL_0", | |
| "1": "LABEL_1" | |
| }, | |
| "initializer_range": 0.02, | |
| "intermediate_size": 3072, | |
| "is_decoder": true, | |
| "is_encoder_decoder": false, | |
| "label2id": { | |
| "LABEL_0": 0, | |
| "LABEL_1": 1 | |
| }, | |
| "layer_norm_eps": 1e-12, | |
| "length_penalty": 1.0, | |
| "max_length": 20, | |
| "max_position_embeddings": 512, | |
| "min_length": 0, | |
| "model_type": "bert", | |
| "no_repeat_ngram_size": 0, | |
| "num_attention_heads": 12, | |
| "num_beam_groups": 1, | |
| "num_beams": 1, | |
| "num_hidden_layers": 12, | |
| "num_return_sequences": 1, | |
| "output_attentions": false, | |
| "output_hidden_states": false, | |
| "output_scores": false, | |
| "pad_token_id": 0, | |
| "pooler_fc_size": 768, | |
| "pooler_num_attention_heads": 12, | |
| "pooler_num_fc_layers": 3, | |
| "pooler_size_per_head": 128, | |
| "pooler_type": "first_token_transform", | |
| "position_embedding_type": "absolute", | |
| "prefix": null, | |
| "problem_type": null, | |
| "pruned_heads": {}, | |
| "remove_invalid_values": false, | |
| "repetition_penalty": 1.0, | |
| "return_dict": true, | |
| "return_dict_in_generate": false, | |
| "sep_token_id": null, | |
| "suppress_tokens": null, | |
| "task_specific_params": null, | |
| "temperature": 1.0, | |
| "tf_legacy_loss": false, | |
| "tie_encoder_decoder": false, | |
| "tie_word_embeddings": true, | |
| "tokenizer_class": null, | |
| "top_k": 50, | |
| "top_p": 1.0, | |
| "torch_dtype": null, | |
| "torchscript": false, | |
| "type_vocab_size": 2, | |
| "typical_p": 1.0, | |
| "use_bfloat16": false, | |
| "use_cache": true, | |
| "vocab_size": 119547 | |
| }, | |
| "decoder_start_token_id": 0, | |
| "early_stopping": null, | |
| "encoder": { | |
| "_name_or_path": "Zannatul/google-vit-base-patch16-384-in21k-batch_16_epoch_4_classes_24_final_withAug_12th_May", | |
| "add_cross_attention": false, | |
| "architectures": [ | |
| "ViTForImageClassification" | |
| ], | |
| "attention_probs_dropout_prob": 0.0, | |
| "bad_words_ids": null, | |
| "begin_suppress_tokens": null, | |
| "bos_token_id": null, | |
| "chunk_size_feed_forward": 0, | |
| "cross_attention_hidden_size": null, | |
| "decoder_start_token_id": null, | |
| "diversity_penalty": 0.0, | |
| "do_sample": false, | |
| "early_stopping": false, | |
| "encoder_no_repeat_ngram_size": 0, | |
| "encoder_stride": 16, | |
| "eos_token_id": null, | |
| "exponential_decay_length_penalty": null, | |
| "finetuning_task": null, | |
| "forced_bos_token_id": null, | |
| "forced_eos_token_id": null, | |
| "hidden_act": "gelu", | |
| "hidden_dropout_prob": 0.0, | |
| "hidden_size": 768, | |
| "id2label": { | |
| "0": "Bhapa Pitha(\u09ad\u09be\u09aa\u09be \u09aa\u09bf\u09a0\u09be)", | |
| "1": "Biriyani(\u09ac\u09bf\u09b0\u09bf\u09df\u09be\u09a8\u09bf)", | |
| "2": "Chicken Pulao(\u09ae\u09cb\u09b0\u0997 \u09aa\u09cb\u09b2\u09be\u0993)", | |
| "3": "Chickpease Bhuna(\u099b\u09cb\u09b2\u09be\u09ad\u09c1\u09a8\u09be)", | |
| "4": "Egg Curry(\u09a1\u09bf\u09ae\u09ad\u09c1\u09a8\u09be)", | |
| "5": "Falooda(\u09ab\u09be\u09b2\u09c1\u09a6\u09be)", | |
| "6": "Fuchka(\u09ab\u09c1\u099a\u0995\u09be)", | |
| "7": "Haleem(\u09b9\u09be\u09b2\u09bf\u09ae)", | |
| "8": "Jalebi(\u099c\u09bf\u09b2\u09be\u09aa\u09c0)", | |
| "9": "Kala Bhuna(\u0995\u09be\u09b2\u09be \u09ad\u09c1\u09a8\u09be)", | |
| "10": "Khichuri(\u0996\u09bf\u099a\u09c1\u09a1\u09bc\u09bf)", | |
| "11": "Malpua Pitha(\u09ae\u09be\u09b2\u09aa\u09c1\u09df\u09be \u09aa\u09bf\u09a0\u09be)", | |
| "12": "Mustard Hilsa(\u09b8\u09b0\u09b7\u09c7 \u0987\u09b2\u09bf\u09b6)", | |
| "13": "Nakshi Pitha(\u09a8\u0995\u09b6\u09bf \u09aa\u09bf\u09a0\u09be)", | |
| "14": "Panta Ilish(\u09aa\u09be\u09a8\u09cd\u09a4\u09be \u0987\u09b2\u09bf\u09b6)", | |
| "15": "Patishapta Pitha(\u09aa\u09be\u099f\u09bf\u09b8\u09be\u09aa\u099f\u09be)", | |
| "16": "Prawn Malai Curry(\u099a\u09bf\u0982\u09dc\u09bf \u09ae\u09be\u09b2\u09be\u0987\u0995\u09be\u09b0\u09c0)", | |
| "17": "Rasgulla(\u09b0\u09b8\u0997\u09cb\u09b2\u09cd\u09b2\u09be)", | |
| "18": "Rose Cookies(\u09ab\u09c1\u09b2\u099d\u09c1\u09b0\u09bf \u09aa\u09bf\u09a0\u09be)", | |
| "19": "Roshmalai(\u09b0\u09b8\u09ae\u09be\u09b2\u09be\u0987)", | |
| "20": "Shahi Tukra(\u09b6\u09be\u09b9\u09bf \u099f\u09c1\u0995\u09b0\u09be)", | |
| "21": "Shingara(\u09b8\u09bf\u0999\u09cd\u0997\u09be\u09b0\u09be)", | |
| "22": "Sweet Yogurt(\u09ae\u09bf\u09b7\u09cd\u099f\u09bf \u09a6\u0987)", | |
| "23": "Tehari(\u09a4\u09c7\u09b9\u09be\u09b0\u09bf)" | |
| }, | |
| "image_size": 384, | |
| "initializer_range": 0.02, | |
| "intermediate_size": 3072, | |
| "is_decoder": false, | |
| "is_encoder_decoder": false, | |
| "label2id": { | |
| "Bhapa Pitha(\u09ad\u09be\u09aa\u09be \u09aa\u09bf\u09a0\u09be)": "0", | |
| "Biriyani(\u09ac\u09bf\u09b0\u09bf\u09df\u09be\u09a8\u09bf)": "1", | |
| "Chicken Pulao(\u09ae\u09cb\u09b0\u0997 \u09aa\u09cb\u09b2\u09be\u0993)": "2", | |
| "Chickpease Bhuna(\u099b\u09cb\u09b2\u09be\u09ad\u09c1\u09a8\u09be)": "3", | |
| "Egg Curry(\u09a1\u09bf\u09ae\u09ad\u09c1\u09a8\u09be)": "4", | |
| "Falooda(\u09ab\u09be\u09b2\u09c1\u09a6\u09be)": "5", | |
| "Fuchka(\u09ab\u09c1\u099a\u0995\u09be)": "6", | |
| "Haleem(\u09b9\u09be\u09b2\u09bf\u09ae)": "7", | |
| "Jalebi(\u099c\u09bf\u09b2\u09be\u09aa\u09c0)": "8", | |
| "Kala Bhuna(\u0995\u09be\u09b2\u09be \u09ad\u09c1\u09a8\u09be)": "9", | |
| "Khichuri(\u0996\u09bf\u099a\u09c1\u09a1\u09bc\u09bf)": "10", | |
| "Malpua Pitha(\u09ae\u09be\u09b2\u09aa\u09c1\u09df\u09be \u09aa\u09bf\u09a0\u09be)": "11", | |
| "Mustard Hilsa(\u09b8\u09b0\u09b7\u09c7 \u0987\u09b2\u09bf\u09b6)": "12", | |
| "Nakshi Pitha(\u09a8\u0995\u09b6\u09bf \u09aa\u09bf\u09a0\u09be)": "13", | |
| "Panta Ilish(\u09aa\u09be\u09a8\u09cd\u09a4\u09be \u0987\u09b2\u09bf\u09b6)": "14", | |
| "Patishapta Pitha(\u09aa\u09be\u099f\u09bf\u09b8\u09be\u09aa\u099f\u09be)": "15", | |
| "Prawn Malai Curry(\u099a\u09bf\u0982\u09dc\u09bf \u09ae\u09be\u09b2\u09be\u0987\u0995\u09be\u09b0\u09c0)": "16", | |
| "Rasgulla(\u09b0\u09b8\u0997\u09cb\u09b2\u09cd\u09b2\u09be)": "17", | |
| "Rose Cookies(\u09ab\u09c1\u09b2\u099d\u09c1\u09b0\u09bf \u09aa\u09bf\u09a0\u09be)": "18", | |
| "Roshmalai(\u09b0\u09b8\u09ae\u09be\u09b2\u09be\u0987)": "19", | |
| "Shahi Tukra(\u09b6\u09be\u09b9\u09bf \u099f\u09c1\u0995\u09b0\u09be)": "20", | |
| "Shingara(\u09b8\u09bf\u0999\u09cd\u0997\u09be\u09b0\u09be)": "21", | |
| "Sweet Yogurt(\u09ae\u09bf\u09b7\u09cd\u099f\u09bf \u09a6\u0987)": "22", | |
| "Tehari(\u09a4\u09c7\u09b9\u09be\u09b0\u09bf)": "23" | |
| }, | |
| "layer_norm_eps": 1e-12, | |
| "length_penalty": 1.0, | |
| "max_length": 20, | |
| "min_length": 0, | |
| "model_type": "vit", | |
| "no_repeat_ngram_size": 0, | |
| "num_attention_heads": 12, | |
| "num_beam_groups": 1, | |
| "num_beams": 1, | |
| "num_channels": 3, | |
| "num_hidden_layers": 12, | |
| "num_return_sequences": 1, | |
| "output_attentions": false, | |
| "output_hidden_states": false, | |
| "output_scores": false, | |
| "pad_token_id": null, | |
| "patch_size": 16, | |
| "prefix": null, | |
| "problem_type": "single_label_classification", | |
| "pruned_heads": {}, | |
| "qkv_bias": true, | |
| "remove_invalid_values": false, | |
| "repetition_penalty": 1.0, | |
| "return_dict": true, | |
| "return_dict_in_generate": false, | |
| "sep_token_id": null, | |
| "suppress_tokens": null, | |
| "task_specific_params": null, | |
| "temperature": 1.0, | |
| "tf_legacy_loss": false, | |
| "tie_encoder_decoder": false, | |
| "tie_word_embeddings": true, | |
| "tokenizer_class": null, | |
| "top_k": 50, | |
| "top_p": 1.0, | |
| "torch_dtype": "float32", | |
| "torchscript": false, | |
| "typical_p": 1.0, | |
| "use_bfloat16": false | |
| }, | |
| "eos_token_id": 2, | |
| "is_encoder_decoder": true, | |
| "length_penalty": null, | |
| "max_length": null, | |
| "model_type": "vision-encoder-decoder", | |
| "no_repeat_ngram_size": null, | |
| "num_beams": null, | |
| "pad_token_id": 1, | |
| "tie_word_embeddings": false, | |
| "torch_dtype": "float32", | |
| "transformers_version": "4.45.2", | |
| "vocab_size": 50265 | |
| } | |