File size: 1,786 Bytes
c098c05
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
{
  "_name_or_path": "google/mobilenet_v2_1.0_224",
  "architectures": [
    "MobileNetV2ForImageClassification"
  ],
  "classifier_dropout_prob": 0.2,
  "depth_divisible_by": 8,
  "depth_multiplier": 1.0,
  "expand_ratio": 6,
  "finegrained_output": true,
  "first_layer_is_expansion": true,
  "hidden_act": "relu6",
  "id2label": {
    "0": "aicchdii",
    "1": "aihlhlng",
    "2": "aikhaik",
    "3": "aikhpaa",
    "4": "aisaicch",
    "5": "aithyrath",
    "6": "araephs",
    "7": "binbn",
    "8": "borm",
    "9": "bupheph",
    "10": "cchanghan",
    "11": "chaaanaay",
    "12": "chataa",
    "13": "chingchang",
    "14": "dkbaw",
    "15": "edkelk",
    "16": "ekbehd",
    "17": "fanhan",
    "18": "khmaicch",
    "19": "khwaamsukh",
    "20": "nakeriiyn",
    "21": "nanglng",
    "22": "ngaanbaan",
    "23": "ochkhdii",
    "24": "omaanaacch",
    "25": "siththi",
    "26": "smkhwr"
  },
  "image_size": 224,
  "initializer_range": 0.02,
  "label2id": {
    "aicchdii": 0,
    "aihlhlng": 1,
    "aikhaik": 2,
    "aikhpaa": 3,
    "aisaicch": 4,
    "aithyrath": 5,
    "araephs": 6,
    "binbn": 7,
    "borm": 8,
    "bupheph": 9,
    "cchanghan": 10,
    "chaaanaay": 11,
    "chataa": 12,
    "chingchang": 13,
    "dkbaw": 14,
    "edkelk": 15,
    "ekbehd": 16,
    "fanhan": 17,
    "khmaicch": 18,
    "khwaamsukh": 19,
    "nakeriiyn": 20,
    "nanglng": 21,
    "ngaanbaan": 22,
    "ochkhdii": 23,
    "omaanaacch": 24,
    "siththi": 25,
    "smkhwr": 26
  },
  "layer_norm_eps": 0.001,
  "min_depth": 8,
  "model_type": "mobilenet_v2",
  "num_channels": 3,
  "output_stride": 32,
  "problem_type": "single_label_classification",
  "semantic_loss_ignore_index": 255,
  "tf_padding": true,
  "torch_dtype": "float32",
  "transformers_version": "4.48.3"
}