Upload folder using huggingface_hub
Browse files- README.md +11 -0
- added_tokens.json +1060 -0
- config.json +145 -0
- merges.txt +0 -0
- model-00001-of-00002.safetensors +3 -0
- model-00002-of-00002.safetensors +3 -0
- model.safetensors.index.json +967 -0
- norm_stats.json +418 -0
- special_tokens_map.json +47 -0
- tokenizer_config.json +0 -0
- vocab.json +0 -0
README.md
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
license: cc
|
| 3 |
+
---
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
This model is a DM0 supervised fine-tuned checkpoint of RoboChallenge scan_QR_code task.
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
| Model | Description | Input Images | Action Dim | Model Size |
|
| 10 |
+
| - | - | - |- | - |
|
| 11 |
+
| DM0-table30_scan_QR_code | Vision-language action model | Triple Views | 32 | 2.4B |
|
added_tokens.json
ADDED
|
@@ -0,0 +1,1060 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"</think>": 151668,
|
| 3 |
+
"</tool_call>": 151658,
|
| 4 |
+
"</tool_response>": 151666,
|
| 5 |
+
"<action>0</action>": 151973,
|
| 6 |
+
"<action>100</action>": 152073,
|
| 7 |
+
"<action>101</action>": 152074,
|
| 8 |
+
"<action>102</action>": 152075,
|
| 9 |
+
"<action>103</action>": 152076,
|
| 10 |
+
"<action>104</action>": 152077,
|
| 11 |
+
"<action>105</action>": 152078,
|
| 12 |
+
"<action>106</action>": 152079,
|
| 13 |
+
"<action>107</action>": 152080,
|
| 14 |
+
"<action>108</action>": 152081,
|
| 15 |
+
"<action>109</action>": 152082,
|
| 16 |
+
"<action>10</action>": 151983,
|
| 17 |
+
"<action>110</action>": 152083,
|
| 18 |
+
"<action>111</action>": 152084,
|
| 19 |
+
"<action>112</action>": 152085,
|
| 20 |
+
"<action>113</action>": 152086,
|
| 21 |
+
"<action>114</action>": 152087,
|
| 22 |
+
"<action>115</action>": 152088,
|
| 23 |
+
"<action>116</action>": 152089,
|
| 24 |
+
"<action>117</action>": 152090,
|
| 25 |
+
"<action>118</action>": 152091,
|
| 26 |
+
"<action>119</action>": 152092,
|
| 27 |
+
"<action>11</action>": 151984,
|
| 28 |
+
"<action>120</action>": 152093,
|
| 29 |
+
"<action>121</action>": 152094,
|
| 30 |
+
"<action>122</action>": 152095,
|
| 31 |
+
"<action>123</action>": 152096,
|
| 32 |
+
"<action>124</action>": 152097,
|
| 33 |
+
"<action>125</action>": 152098,
|
| 34 |
+
"<action>126</action>": 152099,
|
| 35 |
+
"<action>127</action>": 152100,
|
| 36 |
+
"<action>128</action>": 152101,
|
| 37 |
+
"<action>129</action>": 152102,
|
| 38 |
+
"<action>12</action>": 151985,
|
| 39 |
+
"<action>130</action>": 152103,
|
| 40 |
+
"<action>131</action>": 152104,
|
| 41 |
+
"<action>132</action>": 152105,
|
| 42 |
+
"<action>133</action>": 152106,
|
| 43 |
+
"<action>134</action>": 152107,
|
| 44 |
+
"<action>135</action>": 152108,
|
| 45 |
+
"<action>136</action>": 152109,
|
| 46 |
+
"<action>137</action>": 152110,
|
| 47 |
+
"<action>138</action>": 152111,
|
| 48 |
+
"<action>139</action>": 152112,
|
| 49 |
+
"<action>13</action>": 151986,
|
| 50 |
+
"<action>140</action>": 152113,
|
| 51 |
+
"<action>141</action>": 152114,
|
| 52 |
+
"<action>142</action>": 152115,
|
| 53 |
+
"<action>143</action>": 152116,
|
| 54 |
+
"<action>144</action>": 152117,
|
| 55 |
+
"<action>145</action>": 152118,
|
| 56 |
+
"<action>146</action>": 152119,
|
| 57 |
+
"<action>147</action>": 152120,
|
| 58 |
+
"<action>148</action>": 152121,
|
| 59 |
+
"<action>149</action>": 152122,
|
| 60 |
+
"<action>14</action>": 151987,
|
| 61 |
+
"<action>150</action>": 152123,
|
| 62 |
+
"<action>151</action>": 152124,
|
| 63 |
+
"<action>152</action>": 152125,
|
| 64 |
+
"<action>153</action>": 152126,
|
| 65 |
+
"<action>154</action>": 152127,
|
| 66 |
+
"<action>155</action>": 152128,
|
| 67 |
+
"<action>156</action>": 152129,
|
| 68 |
+
"<action>157</action>": 152130,
|
| 69 |
+
"<action>158</action>": 152131,
|
| 70 |
+
"<action>159</action>": 152132,
|
| 71 |
+
"<action>15</action>": 151988,
|
| 72 |
+
"<action>160</action>": 152133,
|
| 73 |
+
"<action>161</action>": 152134,
|
| 74 |
+
"<action>162</action>": 152135,
|
| 75 |
+
"<action>163</action>": 152136,
|
| 76 |
+
"<action>164</action>": 152137,
|
| 77 |
+
"<action>165</action>": 152138,
|
| 78 |
+
"<action>166</action>": 152139,
|
| 79 |
+
"<action>167</action>": 152140,
|
| 80 |
+
"<action>168</action>": 152141,
|
| 81 |
+
"<action>169</action>": 152142,
|
| 82 |
+
"<action>16</action>": 151989,
|
| 83 |
+
"<action>170</action>": 152143,
|
| 84 |
+
"<action>171</action>": 152144,
|
| 85 |
+
"<action>172</action>": 152145,
|
| 86 |
+
"<action>173</action>": 152146,
|
| 87 |
+
"<action>174</action>": 152147,
|
| 88 |
+
"<action>175</action>": 152148,
|
| 89 |
+
"<action>176</action>": 152149,
|
| 90 |
+
"<action>177</action>": 152150,
|
| 91 |
+
"<action>178</action>": 152151,
|
| 92 |
+
"<action>179</action>": 152152,
|
| 93 |
+
"<action>17</action>": 151990,
|
| 94 |
+
"<action>180</action>": 152153,
|
| 95 |
+
"<action>181</action>": 152154,
|
| 96 |
+
"<action>182</action>": 152155,
|
| 97 |
+
"<action>183</action>": 152156,
|
| 98 |
+
"<action>184</action>": 152157,
|
| 99 |
+
"<action>185</action>": 152158,
|
| 100 |
+
"<action>186</action>": 152159,
|
| 101 |
+
"<action>187</action>": 152160,
|
| 102 |
+
"<action>188</action>": 152161,
|
| 103 |
+
"<action>189</action>": 152162,
|
| 104 |
+
"<action>18</action>": 151991,
|
| 105 |
+
"<action>190</action>": 152163,
|
| 106 |
+
"<action>191</action>": 152164,
|
| 107 |
+
"<action>192</action>": 152165,
|
| 108 |
+
"<action>193</action>": 152166,
|
| 109 |
+
"<action>194</action>": 152167,
|
| 110 |
+
"<action>195</action>": 152168,
|
| 111 |
+
"<action>196</action>": 152169,
|
| 112 |
+
"<action>197</action>": 152170,
|
| 113 |
+
"<action>198</action>": 152171,
|
| 114 |
+
"<action>199</action>": 152172,
|
| 115 |
+
"<action>19</action>": 151992,
|
| 116 |
+
"<action>1</action>": 151974,
|
| 117 |
+
"<action>200</action>": 152173,
|
| 118 |
+
"<action>201</action>": 152174,
|
| 119 |
+
"<action>202</action>": 152175,
|
| 120 |
+
"<action>203</action>": 152176,
|
| 121 |
+
"<action>204</action>": 152177,
|
| 122 |
+
"<action>205</action>": 152178,
|
| 123 |
+
"<action>206</action>": 152179,
|
| 124 |
+
"<action>207</action>": 152180,
|
| 125 |
+
"<action>208</action>": 152181,
|
| 126 |
+
"<action>209</action>": 152182,
|
| 127 |
+
"<action>20</action>": 151993,
|
| 128 |
+
"<action>210</action>": 152183,
|
| 129 |
+
"<action>211</action>": 152184,
|
| 130 |
+
"<action>212</action>": 152185,
|
| 131 |
+
"<action>213</action>": 152186,
|
| 132 |
+
"<action>214</action>": 152187,
|
| 133 |
+
"<action>215</action>": 152188,
|
| 134 |
+
"<action>216</action>": 152189,
|
| 135 |
+
"<action>217</action>": 152190,
|
| 136 |
+
"<action>218</action>": 152191,
|
| 137 |
+
"<action>219</action>": 152192,
|
| 138 |
+
"<action>21</action>": 151994,
|
| 139 |
+
"<action>220</action>": 152193,
|
| 140 |
+
"<action>221</action>": 152194,
|
| 141 |
+
"<action>222</action>": 152195,
|
| 142 |
+
"<action>223</action>": 152196,
|
| 143 |
+
"<action>224</action>": 152197,
|
| 144 |
+
"<action>225</action>": 152198,
|
| 145 |
+
"<action>226</action>": 152199,
|
| 146 |
+
"<action>227</action>": 152200,
|
| 147 |
+
"<action>228</action>": 152201,
|
| 148 |
+
"<action>229</action>": 152202,
|
| 149 |
+
"<action>22</action>": 151995,
|
| 150 |
+
"<action>230</action>": 152203,
|
| 151 |
+
"<action>231</action>": 152204,
|
| 152 |
+
"<action>232</action>": 152205,
|
| 153 |
+
"<action>233</action>": 152206,
|
| 154 |
+
"<action>234</action>": 152207,
|
| 155 |
+
"<action>235</action>": 152208,
|
| 156 |
+
"<action>236</action>": 152209,
|
| 157 |
+
"<action>237</action>": 152210,
|
| 158 |
+
"<action>238</action>": 152211,
|
| 159 |
+
"<action>239</action>": 152212,
|
| 160 |
+
"<action>23</action>": 151996,
|
| 161 |
+
"<action>240</action>": 152213,
|
| 162 |
+
"<action>241</action>": 152214,
|
| 163 |
+
"<action>242</action>": 152215,
|
| 164 |
+
"<action>243</action>": 152216,
|
| 165 |
+
"<action>244</action>": 152217,
|
| 166 |
+
"<action>245</action>": 152218,
|
| 167 |
+
"<action>246</action>": 152219,
|
| 168 |
+
"<action>247</action>": 152220,
|
| 169 |
+
"<action>248</action>": 152221,
|
| 170 |
+
"<action>249</action>": 152222,
|
| 171 |
+
"<action>24</action>": 151997,
|
| 172 |
+
"<action>250</action>": 152223,
|
| 173 |
+
"<action>251</action>": 152224,
|
| 174 |
+
"<action>252</action>": 152225,
|
| 175 |
+
"<action>253</action>": 152226,
|
| 176 |
+
"<action>254</action>": 152227,
|
| 177 |
+
"<action>255</action>": 152228,
|
| 178 |
+
"<action>256</action>": 152229,
|
| 179 |
+
"<action>257</action>": 152230,
|
| 180 |
+
"<action>258</action>": 152231,
|
| 181 |
+
"<action>259</action>": 152232,
|
| 182 |
+
"<action>25</action>": 151998,
|
| 183 |
+
"<action>260</action>": 152233,
|
| 184 |
+
"<action>261</action>": 152234,
|
| 185 |
+
"<action>262</action>": 152235,
|
| 186 |
+
"<action>263</action>": 152236,
|
| 187 |
+
"<action>264</action>": 152237,
|
| 188 |
+
"<action>265</action>": 152238,
|
| 189 |
+
"<action>266</action>": 152239,
|
| 190 |
+
"<action>267</action>": 152240,
|
| 191 |
+
"<action>268</action>": 152241,
|
| 192 |
+
"<action>269</action>": 152242,
|
| 193 |
+
"<action>26</action>": 151999,
|
| 194 |
+
"<action>270</action>": 152243,
|
| 195 |
+
"<action>271</action>": 152244,
|
| 196 |
+
"<action>272</action>": 152245,
|
| 197 |
+
"<action>273</action>": 152246,
|
| 198 |
+
"<action>274</action>": 152247,
|
| 199 |
+
"<action>275</action>": 152248,
|
| 200 |
+
"<action>276</action>": 152249,
|
| 201 |
+
"<action>277</action>": 152250,
|
| 202 |
+
"<action>278</action>": 152251,
|
| 203 |
+
"<action>279</action>": 152252,
|
| 204 |
+
"<action>27</action>": 152000,
|
| 205 |
+
"<action>280</action>": 152253,
|
| 206 |
+
"<action>281</action>": 152254,
|
| 207 |
+
"<action>282</action>": 152255,
|
| 208 |
+
"<action>283</action>": 152256,
|
| 209 |
+
"<action>284</action>": 152257,
|
| 210 |
+
"<action>285</action>": 152258,
|
| 211 |
+
"<action>286</action>": 152259,
|
| 212 |
+
"<action>287</action>": 152260,
|
| 213 |
+
"<action>288</action>": 152261,
|
| 214 |
+
"<action>289</action>": 152262,
|
| 215 |
+
"<action>28</action>": 152001,
|
| 216 |
+
"<action>290</action>": 152263,
|
| 217 |
+
"<action>291</action>": 152264,
|
| 218 |
+
"<action>292</action>": 152265,
|
| 219 |
+
"<action>293</action>": 152266,
|
| 220 |
+
"<action>294</action>": 152267,
|
| 221 |
+
"<action>295</action>": 152268,
|
| 222 |
+
"<action>296</action>": 152269,
|
| 223 |
+
"<action>297</action>": 152270,
|
| 224 |
+
"<action>298</action>": 152271,
|
| 225 |
+
"<action>299</action>": 152272,
|
| 226 |
+
"<action>29</action>": 152002,
|
| 227 |
+
"<action>2</action>": 151975,
|
| 228 |
+
"<action>300</action>": 152273,
|
| 229 |
+
"<action>301</action>": 152274,
|
| 230 |
+
"<action>302</action>": 152275,
|
| 231 |
+
"<action>303</action>": 152276,
|
| 232 |
+
"<action>304</action>": 152277,
|
| 233 |
+
"<action>305</action>": 152278,
|
| 234 |
+
"<action>306</action>": 152279,
|
| 235 |
+
"<action>307</action>": 152280,
|
| 236 |
+
"<action>308</action>": 152281,
|
| 237 |
+
"<action>309</action>": 152282,
|
| 238 |
+
"<action>30</action>": 152003,
|
| 239 |
+
"<action>310</action>": 152283,
|
| 240 |
+
"<action>311</action>": 152284,
|
| 241 |
+
"<action>312</action>": 152285,
|
| 242 |
+
"<action>313</action>": 152286,
|
| 243 |
+
"<action>314</action>": 152287,
|
| 244 |
+
"<action>315</action>": 152288,
|
| 245 |
+
"<action>316</action>": 152289,
|
| 246 |
+
"<action>317</action>": 152290,
|
| 247 |
+
"<action>318</action>": 152291,
|
| 248 |
+
"<action>319</action>": 152292,
|
| 249 |
+
"<action>31</action>": 152004,
|
| 250 |
+
"<action>320</action>": 152293,
|
| 251 |
+
"<action>321</action>": 152294,
|
| 252 |
+
"<action>322</action>": 152295,
|
| 253 |
+
"<action>323</action>": 152296,
|
| 254 |
+
"<action>324</action>": 152297,
|
| 255 |
+
"<action>325</action>": 152298,
|
| 256 |
+
"<action>326</action>": 152299,
|
| 257 |
+
"<action>327</action>": 152300,
|
| 258 |
+
"<action>328</action>": 152301,
|
| 259 |
+
"<action>329</action>": 152302,
|
| 260 |
+
"<action>32</action>": 152005,
|
| 261 |
+
"<action>330</action>": 152303,
|
| 262 |
+
"<action>331</action>": 152304,
|
| 263 |
+
"<action>332</action>": 152305,
|
| 264 |
+
"<action>333</action>": 152306,
|
| 265 |
+
"<action>334</action>": 152307,
|
| 266 |
+
"<action>335</action>": 152308,
|
| 267 |
+
"<action>336</action>": 152309,
|
| 268 |
+
"<action>337</action>": 152310,
|
| 269 |
+
"<action>338</action>": 152311,
|
| 270 |
+
"<action>339</action>": 152312,
|
| 271 |
+
"<action>33</action>": 152006,
|
| 272 |
+
"<action>340</action>": 152313,
|
| 273 |
+
"<action>341</action>": 152314,
|
| 274 |
+
"<action>342</action>": 152315,
|
| 275 |
+
"<action>343</action>": 152316,
|
| 276 |
+
"<action>344</action>": 152317,
|
| 277 |
+
"<action>345</action>": 152318,
|
| 278 |
+
"<action>346</action>": 152319,
|
| 279 |
+
"<action>347</action>": 152320,
|
| 280 |
+
"<action>348</action>": 152321,
|
| 281 |
+
"<action>349</action>": 152322,
|
| 282 |
+
"<action>34</action>": 152007,
|
| 283 |
+
"<action>350</action>": 152323,
|
| 284 |
+
"<action>351</action>": 152324,
|
| 285 |
+
"<action>352</action>": 152325,
|
| 286 |
+
"<action>353</action>": 152326,
|
| 287 |
+
"<action>354</action>": 152327,
|
| 288 |
+
"<action>355</action>": 152328,
|
| 289 |
+
"<action>356</action>": 152329,
|
| 290 |
+
"<action>357</action>": 152330,
|
| 291 |
+
"<action>358</action>": 152331,
|
| 292 |
+
"<action>359</action>": 152332,
|
| 293 |
+
"<action>35</action>": 152008,
|
| 294 |
+
"<action>360</action>": 152333,
|
| 295 |
+
"<action>361</action>": 152334,
|
| 296 |
+
"<action>362</action>": 152335,
|
| 297 |
+
"<action>363</action>": 152336,
|
| 298 |
+
"<action>364</action>": 152337,
|
| 299 |
+
"<action>365</action>": 152338,
|
| 300 |
+
"<action>366</action>": 152339,
|
| 301 |
+
"<action>367</action>": 152340,
|
| 302 |
+
"<action>368</action>": 152341,
|
| 303 |
+
"<action>369</action>": 152342,
|
| 304 |
+
"<action>36</action>": 152009,
|
| 305 |
+
"<action>370</action>": 152343,
|
| 306 |
+
"<action>371</action>": 152344,
|
| 307 |
+
"<action>372</action>": 152345,
|
| 308 |
+
"<action>373</action>": 152346,
|
| 309 |
+
"<action>374</action>": 152347,
|
| 310 |
+
"<action>375</action>": 152348,
|
| 311 |
+
"<action>376</action>": 152349,
|
| 312 |
+
"<action>377</action>": 152350,
|
| 313 |
+
"<action>378</action>": 152351,
|
| 314 |
+
"<action>379</action>": 152352,
|
| 315 |
+
"<action>37</action>": 152010,
|
| 316 |
+
"<action>380</action>": 152353,
|
| 317 |
+
"<action>381</action>": 152354,
|
| 318 |
+
"<action>382</action>": 152355,
|
| 319 |
+
"<action>383</action>": 152356,
|
| 320 |
+
"<action>384</action>": 152357,
|
| 321 |
+
"<action>385</action>": 152358,
|
| 322 |
+
"<action>386</action>": 152359,
|
| 323 |
+
"<action>387</action>": 152360,
|
| 324 |
+
"<action>388</action>": 152361,
|
| 325 |
+
"<action>389</action>": 152362,
|
| 326 |
+
"<action>38</action>": 152011,
|
| 327 |
+
"<action>390</action>": 152363,
|
| 328 |
+
"<action>391</action>": 152364,
|
| 329 |
+
"<action>392</action>": 152365,
|
| 330 |
+
"<action>393</action>": 152366,
|
| 331 |
+
"<action>394</action>": 152367,
|
| 332 |
+
"<action>395</action>": 152368,
|
| 333 |
+
"<action>396</action>": 152369,
|
| 334 |
+
"<action>397</action>": 152370,
|
| 335 |
+
"<action>398</action>": 152371,
|
| 336 |
+
"<action>399</action>": 152372,
|
| 337 |
+
"<action>39</action>": 152012,
|
| 338 |
+
"<action>3</action>": 151976,
|
| 339 |
+
"<action>400</action>": 152373,
|
| 340 |
+
"<action>401</action>": 152374,
|
| 341 |
+
"<action>402</action>": 152375,
|
| 342 |
+
"<action>403</action>": 152376,
|
| 343 |
+
"<action>404</action>": 152377,
|
| 344 |
+
"<action>405</action>": 152378,
|
| 345 |
+
"<action>406</action>": 152379,
|
| 346 |
+
"<action>407</action>": 152380,
|
| 347 |
+
"<action>408</action>": 152381,
|
| 348 |
+
"<action>409</action>": 152382,
|
| 349 |
+
"<action>40</action>": 152013,
|
| 350 |
+
"<action>410</action>": 152383,
|
| 351 |
+
"<action>411</action>": 152384,
|
| 352 |
+
"<action>412</action>": 152385,
|
| 353 |
+
"<action>413</action>": 152386,
|
| 354 |
+
"<action>414</action>": 152387,
|
| 355 |
+
"<action>415</action>": 152388,
|
| 356 |
+
"<action>416</action>": 152389,
|
| 357 |
+
"<action>417</action>": 152390,
|
| 358 |
+
"<action>418</action>": 152391,
|
| 359 |
+
"<action>419</action>": 152392,
|
| 360 |
+
"<action>41</action>": 152014,
|
| 361 |
+
"<action>420</action>": 152393,
|
| 362 |
+
"<action>421</action>": 152394,
|
| 363 |
+
"<action>422</action>": 152395,
|
| 364 |
+
"<action>423</action>": 152396,
|
| 365 |
+
"<action>424</action>": 152397,
|
| 366 |
+
"<action>425</action>": 152398,
|
| 367 |
+
"<action>426</action>": 152399,
|
| 368 |
+
"<action>427</action>": 152400,
|
| 369 |
+
"<action>428</action>": 152401,
|
| 370 |
+
"<action>429</action>": 152402,
|
| 371 |
+
"<action>42</action>": 152015,
|
| 372 |
+
"<action>430</action>": 152403,
|
| 373 |
+
"<action>431</action>": 152404,
|
| 374 |
+
"<action>432</action>": 152405,
|
| 375 |
+
"<action>433</action>": 152406,
|
| 376 |
+
"<action>434</action>": 152407,
|
| 377 |
+
"<action>435</action>": 152408,
|
| 378 |
+
"<action>436</action>": 152409,
|
| 379 |
+
"<action>437</action>": 152410,
|
| 380 |
+
"<action>438</action>": 152411,
|
| 381 |
+
"<action>439</action>": 152412,
|
| 382 |
+
"<action>43</action>": 152016,
|
| 383 |
+
"<action>440</action>": 152413,
|
| 384 |
+
"<action>441</action>": 152414,
|
| 385 |
+
"<action>442</action>": 152415,
|
| 386 |
+
"<action>443</action>": 152416,
|
| 387 |
+
"<action>444</action>": 152417,
|
| 388 |
+
"<action>445</action>": 152418,
|
| 389 |
+
"<action>446</action>": 152419,
|
| 390 |
+
"<action>447</action>": 152420,
|
| 391 |
+
"<action>448</action>": 152421,
|
| 392 |
+
"<action>449</action>": 152422,
|
| 393 |
+
"<action>44</action>": 152017,
|
| 394 |
+
"<action>450</action>": 152423,
|
| 395 |
+
"<action>451</action>": 152424,
|
| 396 |
+
"<action>452</action>": 152425,
|
| 397 |
+
"<action>453</action>": 152426,
|
| 398 |
+
"<action>454</action>": 152427,
|
| 399 |
+
"<action>455</action>": 152428,
|
| 400 |
+
"<action>456</action>": 152429,
|
| 401 |
+
"<action>457</action>": 152430,
|
| 402 |
+
"<action>458</action>": 152431,
|
| 403 |
+
"<action>459</action>": 152432,
|
| 404 |
+
"<action>45</action>": 152018,
|
| 405 |
+
"<action>460</action>": 152433,
|
| 406 |
+
"<action>461</action>": 152434,
|
| 407 |
+
"<action>462</action>": 152435,
|
| 408 |
+
"<action>463</action>": 152436,
|
| 409 |
+
"<action>464</action>": 152437,
|
| 410 |
+
"<action>465</action>": 152438,
|
| 411 |
+
"<action>466</action>": 152439,
|
| 412 |
+
"<action>467</action>": 152440,
|
| 413 |
+
"<action>468</action>": 152441,
|
| 414 |
+
"<action>469</action>": 152442,
|
| 415 |
+
"<action>46</action>": 152019,
|
| 416 |
+
"<action>470</action>": 152443,
|
| 417 |
+
"<action>471</action>": 152444,
|
| 418 |
+
"<action>472</action>": 152445,
|
| 419 |
+
"<action>473</action>": 152446,
|
| 420 |
+
"<action>474</action>": 152447,
|
| 421 |
+
"<action>475</action>": 152448,
|
| 422 |
+
"<action>476</action>": 152449,
|
| 423 |
+
"<action>477</action>": 152450,
|
| 424 |
+
"<action>478</action>": 152451,
|
| 425 |
+
"<action>479</action>": 152452,
|
| 426 |
+
"<action>47</action>": 152020,
|
| 427 |
+
"<action>480</action>": 152453,
|
| 428 |
+
"<action>481</action>": 152454,
|
| 429 |
+
"<action>482</action>": 152455,
|
| 430 |
+
"<action>483</action>": 152456,
|
| 431 |
+
"<action>484</action>": 152457,
|
| 432 |
+
"<action>485</action>": 152458,
|
| 433 |
+
"<action>486</action>": 152459,
|
| 434 |
+
"<action>487</action>": 152460,
|
| 435 |
+
"<action>488</action>": 152461,
|
| 436 |
+
"<action>489</action>": 152462,
|
| 437 |
+
"<action>48</action>": 152021,
|
| 438 |
+
"<action>490</action>": 152463,
|
| 439 |
+
"<action>491</action>": 152464,
|
| 440 |
+
"<action>492</action>": 152465,
|
| 441 |
+
"<action>493</action>": 152466,
|
| 442 |
+
"<action>494</action>": 152467,
|
| 443 |
+
"<action>495</action>": 152468,
|
| 444 |
+
"<action>496</action>": 152469,
|
| 445 |
+
"<action>497</action>": 152470,
|
| 446 |
+
"<action>498</action>": 152471,
|
| 447 |
+
"<action>499</action>": 152472,
|
| 448 |
+
"<action>49</action>": 152022,
|
| 449 |
+
"<action>4</action>": 151977,
|
| 450 |
+
"<action>500</action>": 152473,
|
| 451 |
+
"<action>501</action>": 152474,
|
| 452 |
+
"<action>502</action>": 152475,
|
| 453 |
+
"<action>503</action>": 152476,
|
| 454 |
+
"<action>504</action>": 152477,
|
| 455 |
+
"<action>505</action>": 152478,
|
| 456 |
+
"<action>506</action>": 152479,
|
| 457 |
+
"<action>507</action>": 152480,
|
| 458 |
+
"<action>508</action>": 152481,
|
| 459 |
+
"<action>509</action>": 152482,
|
| 460 |
+
"<action>50</action>": 152023,
|
| 461 |
+
"<action>510</action>": 152483,
|
| 462 |
+
"<action>511</action>": 152484,
|
| 463 |
+
"<action>512</action>": 152485,
|
| 464 |
+
"<action>513</action>": 152486,
|
| 465 |
+
"<action>514</action>": 152487,
|
| 466 |
+
"<action>515</action>": 152488,
|
| 467 |
+
"<action>516</action>": 152489,
|
| 468 |
+
"<action>517</action>": 152490,
|
| 469 |
+
"<action>518</action>": 152491,
|
| 470 |
+
"<action>519</action>": 152492,
|
| 471 |
+
"<action>51</action>": 152024,
|
| 472 |
+
"<action>520</action>": 152493,
|
| 473 |
+
"<action>521</action>": 152494,
|
| 474 |
+
"<action>522</action>": 152495,
|
| 475 |
+
"<action>523</action>": 152496,
|
| 476 |
+
"<action>524</action>": 152497,
|
| 477 |
+
"<action>525</action>": 152498,
|
| 478 |
+
"<action>526</action>": 152499,
|
| 479 |
+
"<action>527</action>": 152500,
|
| 480 |
+
"<action>528</action>": 152501,
|
| 481 |
+
"<action>529</action>": 152502,
|
| 482 |
+
"<action>52</action>": 152025,
|
| 483 |
+
"<action>530</action>": 152503,
|
| 484 |
+
"<action>531</action>": 152504,
|
| 485 |
+
"<action>532</action>": 152505,
|
| 486 |
+
"<action>533</action>": 152506,
|
| 487 |
+
"<action>534</action>": 152507,
|
| 488 |
+
"<action>535</action>": 152508,
|
| 489 |
+
"<action>536</action>": 152509,
|
| 490 |
+
"<action>537</action>": 152510,
|
| 491 |
+
"<action>538</action>": 152511,
|
| 492 |
+
"<action>539</action>": 152512,
|
| 493 |
+
"<action>53</action>": 152026,
|
| 494 |
+
"<action>540</action>": 152513,
|
| 495 |
+
"<action>541</action>": 152514,
|
| 496 |
+
"<action>542</action>": 152515,
|
| 497 |
+
"<action>543</action>": 152516,
|
| 498 |
+
"<action>544</action>": 152517,
|
| 499 |
+
"<action>545</action>": 152518,
|
| 500 |
+
"<action>546</action>": 152519,
|
| 501 |
+
"<action>547</action>": 152520,
|
| 502 |
+
"<action>548</action>": 152521,
|
| 503 |
+
"<action>549</action>": 152522,
|
| 504 |
+
"<action>54</action>": 152027,
|
| 505 |
+
"<action>550</action>": 152523,
|
| 506 |
+
"<action>551</action>": 152524,
|
| 507 |
+
"<action>552</action>": 152525,
|
| 508 |
+
"<action>553</action>": 152526,
|
| 509 |
+
"<action>554</action>": 152527,
|
| 510 |
+
"<action>555</action>": 152528,
|
| 511 |
+
"<action>556</action>": 152529,
|
| 512 |
+
"<action>557</action>": 152530,
|
| 513 |
+
"<action>558</action>": 152531,
|
| 514 |
+
"<action>559</action>": 152532,
|
| 515 |
+
"<action>55</action>": 152028,
|
| 516 |
+
"<action>560</action>": 152533,
|
| 517 |
+
"<action>561</action>": 152534,
|
| 518 |
+
"<action>562</action>": 152535,
|
| 519 |
+
"<action>563</action>": 152536,
|
| 520 |
+
"<action>564</action>": 152537,
|
| 521 |
+
"<action>565</action>": 152538,
|
| 522 |
+
"<action>566</action>": 152539,
|
| 523 |
+
"<action>567</action>": 152540,
|
| 524 |
+
"<action>568</action>": 152541,
|
| 525 |
+
"<action>569</action>": 152542,
|
| 526 |
+
"<action>56</action>": 152029,
|
| 527 |
+
"<action>570</action>": 152543,
|
| 528 |
+
"<action>571</action>": 152544,
|
| 529 |
+
"<action>572</action>": 152545,
|
| 530 |
+
"<action>573</action>": 152546,
|
| 531 |
+
"<action>574</action>": 152547,
|
| 532 |
+
"<action>575</action>": 152548,
|
| 533 |
+
"<action>576</action>": 152549,
|
| 534 |
+
"<action>577</action>": 152550,
|
| 535 |
+
"<action>578</action>": 152551,
|
| 536 |
+
"<action>579</action>": 152552,
|
| 537 |
+
"<action>57</action>": 152030,
|
| 538 |
+
"<action>580</action>": 152553,
|
| 539 |
+
"<action>581</action>": 152554,
|
| 540 |
+
"<action>582</action>": 152555,
|
| 541 |
+
"<action>583</action>": 152556,
|
| 542 |
+
"<action>584</action>": 152557,
|
| 543 |
+
"<action>585</action>": 152558,
|
| 544 |
+
"<action>586</action>": 152559,
|
| 545 |
+
"<action>587</action>": 152560,
|
| 546 |
+
"<action>588</action>": 152561,
|
| 547 |
+
"<action>589</action>": 152562,
|
| 548 |
+
"<action>58</action>": 152031,
|
| 549 |
+
"<action>590</action>": 152563,
|
| 550 |
+
"<action>591</action>": 152564,
|
| 551 |
+
"<action>592</action>": 152565,
|
| 552 |
+
"<action>593</action>": 152566,
|
| 553 |
+
"<action>594</action>": 152567,
|
| 554 |
+
"<action>595</action>": 152568,
|
| 555 |
+
"<action>596</action>": 152569,
|
| 556 |
+
"<action>597</action>": 152570,
|
| 557 |
+
"<action>598</action>": 152571,
|
| 558 |
+
"<action>599</action>": 152572,
|
| 559 |
+
"<action>59</action>": 152032,
|
| 560 |
+
"<action>5</action>": 151978,
|
| 561 |
+
"<action>600</action>": 152573,
|
| 562 |
+
"<action>601</action>": 152574,
|
| 563 |
+
"<action>602</action>": 152575,
|
| 564 |
+
"<action>603</action>": 152576,
|
| 565 |
+
"<action>604</action>": 152577,
|
| 566 |
+
"<action>605</action>": 152578,
|
| 567 |
+
"<action>606</action>": 152579,
|
| 568 |
+
"<action>607</action>": 152580,
|
| 569 |
+
"<action>608</action>": 152581,
|
| 570 |
+
"<action>609</action>": 152582,
|
| 571 |
+
"<action>60</action>": 152033,
|
| 572 |
+
"<action>610</action>": 152583,
|
| 573 |
+
"<action>611</action>": 152584,
|
| 574 |
+
"<action>612</action>": 152585,
|
| 575 |
+
"<action>613</action>": 152586,
|
| 576 |
+
"<action>614</action>": 152587,
|
| 577 |
+
"<action>615</action>": 152588,
|
| 578 |
+
"<action>616</action>": 152589,
|
| 579 |
+
"<action>617</action>": 152590,
|
| 580 |
+
"<action>618</action>": 152591,
|
| 581 |
+
"<action>619</action>": 152592,
|
| 582 |
+
"<action>61</action>": 152034,
|
| 583 |
+
"<action>620</action>": 152593,
|
| 584 |
+
"<action>621</action>": 152594,
|
| 585 |
+
"<action>622</action>": 152595,
|
| 586 |
+
"<action>623</action>": 152596,
|
| 587 |
+
"<action>624</action>": 152597,
|
| 588 |
+
"<action>625</action>": 152598,
|
| 589 |
+
"<action>626</action>": 152599,
|
| 590 |
+
"<action>627</action>": 152600,
|
| 591 |
+
"<action>628</action>": 152601,
|
| 592 |
+
"<action>629</action>": 152602,
|
| 593 |
+
"<action>62</action>": 152035,
|
| 594 |
+
"<action>630</action>": 152603,
|
| 595 |
+
"<action>631</action>": 152604,
|
| 596 |
+
"<action>632</action>": 152605,
|
| 597 |
+
"<action>633</action>": 152606,
|
| 598 |
+
"<action>634</action>": 152607,
|
| 599 |
+
"<action>635</action>": 152608,
|
| 600 |
+
"<action>636</action>": 152609,
|
| 601 |
+
"<action>637</action>": 152610,
|
| 602 |
+
"<action>638</action>": 152611,
|
| 603 |
+
"<action>639</action>": 152612,
|
| 604 |
+
"<action>63</action>": 152036,
|
| 605 |
+
"<action>640</action>": 152613,
|
| 606 |
+
"<action>641</action>": 152614,
|
| 607 |
+
"<action>642</action>": 152615,
|
| 608 |
+
"<action>643</action>": 152616,
|
| 609 |
+
"<action>644</action>": 152617,
|
| 610 |
+
"<action>645</action>": 152618,
|
| 611 |
+
"<action>646</action>": 152619,
|
| 612 |
+
"<action>647</action>": 152620,
|
| 613 |
+
"<action>648</action>": 152621,
|
| 614 |
+
"<action>649</action>": 152622,
|
| 615 |
+
"<action>64</action>": 152037,
|
| 616 |
+
"<action>650</action>": 152623,
|
| 617 |
+
"<action>651</action>": 152624,
|
| 618 |
+
"<action>652</action>": 152625,
|
| 619 |
+
"<action>653</action>": 152626,
|
| 620 |
+
"<action>654</action>": 152627,
|
| 621 |
+
"<action>655</action>": 152628,
|
| 622 |
+
"<action>656</action>": 152629,
|
| 623 |
+
"<action>657</action>": 152630,
|
| 624 |
+
"<action>658</action>": 152631,
|
| 625 |
+
"<action>659</action>": 152632,
|
| 626 |
+
"<action>65</action>": 152038,
|
| 627 |
+
"<action>660</action>": 152633,
|
| 628 |
+
"<action>661</action>": 152634,
|
| 629 |
+
"<action>662</action>": 152635,
|
| 630 |
+
"<action>663</action>": 152636,
|
| 631 |
+
"<action>664</action>": 152637,
|
| 632 |
+
"<action>665</action>": 152638,
|
| 633 |
+
"<action>666</action>": 152639,
|
| 634 |
+
"<action>667</action>": 152640,
|
| 635 |
+
"<action>668</action>": 152641,
|
| 636 |
+
"<action>669</action>": 152642,
|
| 637 |
+
"<action>66</action>": 152039,
|
| 638 |
+
"<action>670</action>": 152643,
|
| 639 |
+
"<action>671</action>": 152644,
|
| 640 |
+
"<action>672</action>": 152645,
|
| 641 |
+
"<action>673</action>": 152646,
|
| 642 |
+
"<action>674</action>": 152647,
|
| 643 |
+
"<action>675</action>": 152648,
|
| 644 |
+
"<action>676</action>": 152649,
|
| 645 |
+
"<action>677</action>": 152650,
|
| 646 |
+
"<action>678</action>": 152651,
|
| 647 |
+
"<action>679</action>": 152652,
|
| 648 |
+
"<action>67</action>": 152040,
|
| 649 |
+
"<action>680</action>": 152653,
|
| 650 |
+
"<action>681</action>": 152654,
|
| 651 |
+
"<action>682</action>": 152655,
|
| 652 |
+
"<action>683</action>": 152656,
|
| 653 |
+
"<action>684</action>": 152657,
|
| 654 |
+
"<action>685</action>": 152658,
|
| 655 |
+
"<action>686</action>": 152659,
|
| 656 |
+
"<action>687</action>": 152660,
|
| 657 |
+
"<action>688</action>": 152661,
|
| 658 |
+
"<action>689</action>": 152662,
|
| 659 |
+
"<action>68</action>": 152041,
|
| 660 |
+
"<action>690</action>": 152663,
|
| 661 |
+
"<action>691</action>": 152664,
|
| 662 |
+
"<action>692</action>": 152665,
|
| 663 |
+
"<action>693</action>": 152666,
|
| 664 |
+
"<action>694</action>": 152667,
|
| 665 |
+
"<action>695</action>": 152668,
|
| 666 |
+
"<action>696</action>": 152669,
|
| 667 |
+
"<action>697</action>": 152670,
|
| 668 |
+
"<action>698</action>": 152671,
|
| 669 |
+
"<action>699</action>": 152672,
|
| 670 |
+
"<action>69</action>": 152042,
|
| 671 |
+
"<action>6</action>": 151979,
|
| 672 |
+
"<action>700</action>": 152673,
|
| 673 |
+
"<action>701</action>": 152674,
|
| 674 |
+
"<action>702</action>": 152675,
|
| 675 |
+
"<action>703</action>": 152676,
|
| 676 |
+
"<action>704</action>": 152677,
|
| 677 |
+
"<action>705</action>": 152678,
|
| 678 |
+
"<action>706</action>": 152679,
|
| 679 |
+
"<action>707</action>": 152680,
|
| 680 |
+
"<action>708</action>": 152681,
|
| 681 |
+
"<action>709</action>": 152682,
|
| 682 |
+
"<action>70</action>": 152043,
|
| 683 |
+
"<action>710</action>": 152683,
|
| 684 |
+
"<action>711</action>": 152684,
|
| 685 |
+
"<action>712</action>": 152685,
|
| 686 |
+
"<action>713</action>": 152686,
|
| 687 |
+
"<action>714</action>": 152687,
|
| 688 |
+
"<action>715</action>": 152688,
|
| 689 |
+
"<action>716</action>": 152689,
|
| 690 |
+
"<action>717</action>": 152690,
|
| 691 |
+
"<action>718</action>": 152691,
|
| 692 |
+
"<action>719</action>": 152692,
|
| 693 |
+
"<action>71</action>": 152044,
|
| 694 |
+
"<action>720</action>": 152693,
|
| 695 |
+
"<action>721</action>": 152694,
|
| 696 |
+
"<action>722</action>": 152695,
|
| 697 |
+
"<action>723</action>": 152696,
|
| 698 |
+
"<action>724</action>": 152697,
|
| 699 |
+
"<action>725</action>": 152698,
|
| 700 |
+
"<action>726</action>": 152699,
|
| 701 |
+
"<action>727</action>": 152700,
|
| 702 |
+
"<action>72</action>": 152045,
|
| 703 |
+
"<action>73</action>": 152046,
|
| 704 |
+
"<action>74</action>": 152047,
|
| 705 |
+
"<action>75</action>": 152048,
|
| 706 |
+
"<action>76</action>": 152049,
|
| 707 |
+
"<action>77</action>": 152050,
|
| 708 |
+
"<action>78</action>": 152051,
|
| 709 |
+
"<action>79</action>": 152052,
|
| 710 |
+
"<action>7</action>": 151980,
|
| 711 |
+
"<action>80</action>": 152053,
|
| 712 |
+
"<action>81</action>": 152054,
|
| 713 |
+
"<action>82</action>": 152055,
|
| 714 |
+
"<action>83</action>": 152056,
|
| 715 |
+
"<action>84</action>": 152057,
|
| 716 |
+
"<action>85</action>": 152058,
|
| 717 |
+
"<action>86</action>": 152059,
|
| 718 |
+
"<action>87</action>": 152060,
|
| 719 |
+
"<action>88</action>": 152061,
|
| 720 |
+
"<action>89</action>": 152062,
|
| 721 |
+
"<action>8</action>": 151981,
|
| 722 |
+
"<action>90</action>": 152063,
|
| 723 |
+
"<action>91</action>": 152064,
|
| 724 |
+
"<action>92</action>": 152065,
|
| 725 |
+
"<action>93</action>": 152066,
|
| 726 |
+
"<action>94</action>": 152067,
|
| 727 |
+
"<action>95</action>": 152068,
|
| 728 |
+
"<action>96</action>": 152069,
|
| 729 |
+
"<action>97</action>": 152070,
|
| 730 |
+
"<action>98</action>": 152071,
|
| 731 |
+
"<action>99</action>": 152072,
|
| 732 |
+
"<action>9</action>": 151982,
|
| 733 |
+
"<action_0>": 151698,
|
| 734 |
+
"<action_100>": 151798,
|
| 735 |
+
"<action_101>": 151799,
|
| 736 |
+
"<action_102>": 151800,
|
| 737 |
+
"<action_103>": 151801,
|
| 738 |
+
"<action_104>": 151802,
|
| 739 |
+
"<action_105>": 151803,
|
| 740 |
+
"<action_106>": 151804,
|
| 741 |
+
"<action_107>": 151805,
|
| 742 |
+
"<action_108>": 151806,
|
| 743 |
+
"<action_109>": 151807,
|
| 744 |
+
"<action_10>": 151708,
|
| 745 |
+
"<action_110>": 151808,
|
| 746 |
+
"<action_111>": 151809,
|
| 747 |
+
"<action_112>": 151810,
|
| 748 |
+
"<action_113>": 151811,
|
| 749 |
+
"<action_114>": 151812,
|
| 750 |
+
"<action_115>": 151813,
|
| 751 |
+
"<action_116>": 151814,
|
| 752 |
+
"<action_117>": 151815,
|
| 753 |
+
"<action_118>": 151816,
|
| 754 |
+
"<action_119>": 151817,
|
| 755 |
+
"<action_11>": 151709,
|
| 756 |
+
"<action_120>": 151818,
|
| 757 |
+
"<action_121>": 151819,
|
| 758 |
+
"<action_122>": 151820,
|
| 759 |
+
"<action_123>": 151821,
|
| 760 |
+
"<action_124>": 151822,
|
| 761 |
+
"<action_125>": 151823,
|
| 762 |
+
"<action_126>": 151824,
|
| 763 |
+
"<action_127>": 151825,
|
| 764 |
+
"<action_128>": 151826,
|
| 765 |
+
"<action_129>": 151827,
|
| 766 |
+
"<action_12>": 151710,
|
| 767 |
+
"<action_130>": 151828,
|
| 768 |
+
"<action_131>": 151829,
|
| 769 |
+
"<action_132>": 151830,
|
| 770 |
+
"<action_133>": 151831,
|
| 771 |
+
"<action_134>": 151832,
|
| 772 |
+
"<action_135>": 151833,
|
| 773 |
+
"<action_136>": 151834,
|
| 774 |
+
"<action_137>": 151835,
|
| 775 |
+
"<action_138>": 151836,
|
| 776 |
+
"<action_139>": 151837,
|
| 777 |
+
"<action_13>": 151711,
|
| 778 |
+
"<action_140>": 151838,
|
| 779 |
+
"<action_141>": 151839,
|
| 780 |
+
"<action_142>": 151840,
|
| 781 |
+
"<action_143>": 151841,
|
| 782 |
+
"<action_144>": 151842,
|
| 783 |
+
"<action_145>": 151843,
|
| 784 |
+
"<action_146>": 151844,
|
| 785 |
+
"<action_147>": 151845,
|
| 786 |
+
"<action_148>": 151846,
|
| 787 |
+
"<action_149>": 151847,
|
| 788 |
+
"<action_14>": 151712,
|
| 789 |
+
"<action_150>": 151848,
|
| 790 |
+
"<action_151>": 151849,
|
| 791 |
+
"<action_152>": 151850,
|
| 792 |
+
"<action_153>": 151851,
|
| 793 |
+
"<action_154>": 151852,
|
| 794 |
+
"<action_155>": 151853,
|
| 795 |
+
"<action_156>": 151854,
|
| 796 |
+
"<action_157>": 151855,
|
| 797 |
+
"<action_158>": 151856,
|
| 798 |
+
"<action_159>": 151857,
|
| 799 |
+
"<action_15>": 151713,
|
| 800 |
+
"<action_160>": 151858,
|
| 801 |
+
"<action_161>": 151859,
|
| 802 |
+
"<action_162>": 151860,
|
| 803 |
+
"<action_163>": 151861,
|
| 804 |
+
"<action_164>": 151862,
|
| 805 |
+
"<action_165>": 151863,
|
| 806 |
+
"<action_166>": 151864,
|
| 807 |
+
"<action_167>": 151865,
|
| 808 |
+
"<action_168>": 151866,
|
| 809 |
+
"<action_169>": 151867,
|
| 810 |
+
"<action_16>": 151714,
|
| 811 |
+
"<action_170>": 151868,
|
| 812 |
+
"<action_171>": 151869,
|
| 813 |
+
"<action_172>": 151870,
|
| 814 |
+
"<action_173>": 151871,
|
| 815 |
+
"<action_174>": 151872,
|
| 816 |
+
"<action_175>": 151873,
|
| 817 |
+
"<action_176>": 151874,
|
| 818 |
+
"<action_177>": 151875,
|
| 819 |
+
"<action_178>": 151876,
|
| 820 |
+
"<action_179>": 151877,
|
| 821 |
+
"<action_17>": 151715,
|
| 822 |
+
"<action_180>": 151878,
|
| 823 |
+
"<action_181>": 151879,
|
| 824 |
+
"<action_182>": 151880,
|
| 825 |
+
"<action_183>": 151881,
|
| 826 |
+
"<action_184>": 151882,
|
| 827 |
+
"<action_185>": 151883,
|
| 828 |
+
"<action_186>": 151884,
|
| 829 |
+
"<action_187>": 151885,
|
| 830 |
+
"<action_188>": 151886,
|
| 831 |
+
"<action_189>": 151887,
|
| 832 |
+
"<action_18>": 151716,
|
| 833 |
+
"<action_190>": 151888,
|
| 834 |
+
"<action_191>": 151889,
|
| 835 |
+
"<action_192>": 151890,
|
| 836 |
+
"<action_193>": 151891,
|
| 837 |
+
"<action_194>": 151892,
|
| 838 |
+
"<action_195>": 151893,
|
| 839 |
+
"<action_196>": 151894,
|
| 840 |
+
"<action_197>": 151895,
|
| 841 |
+
"<action_198>": 151896,
|
| 842 |
+
"<action_199>": 151897,
|
| 843 |
+
"<action_19>": 151717,
|
| 844 |
+
"<action_1>": 151699,
|
| 845 |
+
"<action_200>": 151898,
|
| 846 |
+
"<action_201>": 151899,
|
| 847 |
+
"<action_202>": 151900,
|
| 848 |
+
"<action_203>": 151901,
|
| 849 |
+
"<action_204>": 151902,
|
| 850 |
+
"<action_205>": 151903,
|
| 851 |
+
"<action_206>": 151904,
|
| 852 |
+
"<action_207>": 151905,
|
| 853 |
+
"<action_208>": 151906,
|
| 854 |
+
"<action_209>": 151907,
|
| 855 |
+
"<action_20>": 151718,
|
| 856 |
+
"<action_210>": 151908,
|
| 857 |
+
"<action_211>": 151909,
|
| 858 |
+
"<action_212>": 151910,
|
| 859 |
+
"<action_213>": 151911,
|
| 860 |
+
"<action_214>": 151912,
|
| 861 |
+
"<action_215>": 151913,
|
| 862 |
+
"<action_216>": 151914,
|
| 863 |
+
"<action_217>": 151915,
|
| 864 |
+
"<action_218>": 151916,
|
| 865 |
+
"<action_219>": 151917,
|
| 866 |
+
"<action_21>": 151719,
|
| 867 |
+
"<action_220>": 151918,
|
| 868 |
+
"<action_221>": 151919,
|
| 869 |
+
"<action_222>": 151920,
|
| 870 |
+
"<action_223>": 151921,
|
| 871 |
+
"<action_224>": 151922,
|
| 872 |
+
"<action_225>": 151923,
|
| 873 |
+
"<action_226>": 151924,
|
| 874 |
+
"<action_227>": 151925,
|
| 875 |
+
"<action_228>": 151926,
|
| 876 |
+
"<action_229>": 151927,
|
| 877 |
+
"<action_22>": 151720,
|
| 878 |
+
"<action_230>": 151928,
|
| 879 |
+
"<action_231>": 151929,
|
| 880 |
+
"<action_232>": 151930,
|
| 881 |
+
"<action_233>": 151931,
|
| 882 |
+
"<action_234>": 151932,
|
| 883 |
+
"<action_235>": 151933,
|
| 884 |
+
"<action_236>": 151934,
|
| 885 |
+
"<action_237>": 151935,
|
| 886 |
+
"<action_238>": 151936,
|
| 887 |
+
"<action_239>": 151937,
|
| 888 |
+
"<action_23>": 151721,
|
| 889 |
+
"<action_240>": 151938,
|
| 890 |
+
"<action_241>": 151939,
|
| 891 |
+
"<action_242>": 151940,
|
| 892 |
+
"<action_243>": 151941,
|
| 893 |
+
"<action_244>": 151942,
|
| 894 |
+
"<action_245>": 151943,
|
| 895 |
+
"<action_246>": 151944,
|
| 896 |
+
"<action_247>": 151945,
|
| 897 |
+
"<action_248>": 151946,
|
| 898 |
+
"<action_249>": 151947,
|
| 899 |
+
"<action_24>": 151722,
|
| 900 |
+
"<action_250>": 151948,
|
| 901 |
+
"<action_251>": 151949,
|
| 902 |
+
"<action_252>": 151950,
|
| 903 |
+
"<action_253>": 151951,
|
| 904 |
+
"<action_254>": 151952,
|
| 905 |
+
"<action_25>": 151723,
|
| 906 |
+
"<action_26>": 151724,
|
| 907 |
+
"<action_27>": 151725,
|
| 908 |
+
"<action_28>": 151726,
|
| 909 |
+
"<action_29>": 151727,
|
| 910 |
+
"<action_2>": 151700,
|
| 911 |
+
"<action_30>": 151728,
|
| 912 |
+
"<action_31>": 151729,
|
| 913 |
+
"<action_32>": 151730,
|
| 914 |
+
"<action_33>": 151731,
|
| 915 |
+
"<action_34>": 151732,
|
| 916 |
+
"<action_35>": 151733,
|
| 917 |
+
"<action_36>": 151734,
|
| 918 |
+
"<action_37>": 151735,
|
| 919 |
+
"<action_38>": 151736,
|
| 920 |
+
"<action_39>": 151737,
|
| 921 |
+
"<action_3>": 151701,
|
| 922 |
+
"<action_40>": 151738,
|
| 923 |
+
"<action_41>": 151739,
|
| 924 |
+
"<action_42>": 151740,
|
| 925 |
+
"<action_43>": 151741,
|
| 926 |
+
"<action_44>": 151742,
|
| 927 |
+
"<action_45>": 151743,
|
| 928 |
+
"<action_46>": 151744,
|
| 929 |
+
"<action_47>": 151745,
|
| 930 |
+
"<action_48>": 151746,
|
| 931 |
+
"<action_49>": 151747,
|
| 932 |
+
"<action_4>": 151702,
|
| 933 |
+
"<action_50>": 151748,
|
| 934 |
+
"<action_51>": 151749,
|
| 935 |
+
"<action_52>": 151750,
|
| 936 |
+
"<action_53>": 151751,
|
| 937 |
+
"<action_54>": 151752,
|
| 938 |
+
"<action_55>": 151753,
|
| 939 |
+
"<action_56>": 151754,
|
| 940 |
+
"<action_57>": 151755,
|
| 941 |
+
"<action_58>": 151756,
|
| 942 |
+
"<action_59>": 151757,
|
| 943 |
+
"<action_5>": 151703,
|
| 944 |
+
"<action_60>": 151758,
|
| 945 |
+
"<action_61>": 151759,
|
| 946 |
+
"<action_62>": 151760,
|
| 947 |
+
"<action_63>": 151761,
|
| 948 |
+
"<action_64>": 151762,
|
| 949 |
+
"<action_65>": 151763,
|
| 950 |
+
"<action_66>": 151764,
|
| 951 |
+
"<action_67>": 151765,
|
| 952 |
+
"<action_68>": 151766,
|
| 953 |
+
"<action_69>": 151767,
|
| 954 |
+
"<action_6>": 151704,
|
| 955 |
+
"<action_70>": 151768,
|
| 956 |
+
"<action_71>": 151769,
|
| 957 |
+
"<action_72>": 151770,
|
| 958 |
+
"<action_73>": 151771,
|
| 959 |
+
"<action_74>": 151772,
|
| 960 |
+
"<action_75>": 151773,
|
| 961 |
+
"<action_76>": 151774,
|
| 962 |
+
"<action_77>": 151775,
|
| 963 |
+
"<action_78>": 151776,
|
| 964 |
+
"<action_79>": 151777,
|
| 965 |
+
"<action_7>": 151705,
|
| 966 |
+
"<action_80>": 151778,
|
| 967 |
+
"<action_81>": 151779,
|
| 968 |
+
"<action_82>": 151780,
|
| 969 |
+
"<action_83>": 151781,
|
| 970 |
+
"<action_84>": 151782,
|
| 971 |
+
"<action_85>": 151783,
|
| 972 |
+
"<action_86>": 151784,
|
| 973 |
+
"<action_87>": 151785,
|
| 974 |
+
"<action_88>": 151786,
|
| 975 |
+
"<action_89>": 151787,
|
| 976 |
+
"<action_8>": 151706,
|
| 977 |
+
"<action_90>": 151788,
|
| 978 |
+
"<action_91>": 151789,
|
| 979 |
+
"<action_92>": 151790,
|
| 980 |
+
"<action_93>": 151791,
|
| 981 |
+
"<action_94>": 151792,
|
| 982 |
+
"<action_95>": 151793,
|
| 983 |
+
"<action_96>": 151794,
|
| 984 |
+
"<action_97>": 151795,
|
| 985 |
+
"<action_98>": 151796,
|
| 986 |
+
"<action_99>": 151797,
|
| 987 |
+
"<action_9>": 151707,
|
| 988 |
+
"<dream>": 151682,
|
| 989 |
+
"<dream_end>": 151684,
|
| 990 |
+
"<dream_start>": 151683,
|
| 991 |
+
"<eef_control>": 151971,
|
| 992 |
+
"<im_end>": 151681,
|
| 993 |
+
"<im_patch>": 151679,
|
| 994 |
+
"<im_start>": 151680,
|
| 995 |
+
"<joint_control>": 151972,
|
| 996 |
+
"<patch_end>": 151690,
|
| 997 |
+
"<patch_newline>": 151691,
|
| 998 |
+
"<patch_start>": 151689,
|
| 999 |
+
"<robot_aloha>": 151955,
|
| 1000 |
+
"<robot_arx5>": 151957,
|
| 1001 |
+
"<robot_dlr_edan>": 151970,
|
| 1002 |
+
"<robot_fanuc_mate>": 151969,
|
| 1003 |
+
"<robot_franka>": 151954,
|
| 1004 |
+
"<robot_g1>": 151960,
|
| 1005 |
+
"<robot_google_robot>": 151965,
|
| 1006 |
+
"<robot_jaco2>": 151968,
|
| 1007 |
+
"<robot_kuka>": 151963,
|
| 1008 |
+
"<robot_r1_lite>": 151956,
|
| 1009 |
+
"<robot_realman_756f>": 151961,
|
| 1010 |
+
"<robot_sawyer>": 151967,
|
| 1011 |
+
"<robot_stretch>": 151966,
|
| 1012 |
+
"<robot_umi>": 151958,
|
| 1013 |
+
"<robot_ur5>": 151953,
|
| 1014 |
+
"<robot_widowx>": 151962,
|
| 1015 |
+
"<robot_xarm>": 151964,
|
| 1016 |
+
"<robot_z1>": 151959,
|
| 1017 |
+
"<think>": 151667,
|
| 1018 |
+
"<tool_call>": 151657,
|
| 1019 |
+
"<tool_response>": 151665,
|
| 1020 |
+
"<video_end>": 151688,
|
| 1021 |
+
"<video_start>": 151687,
|
| 1022 |
+
"<|BOT|>": 151670,
|
| 1023 |
+
"<|CALL_END|>": 151672,
|
| 1024 |
+
"<|CALL_START|>": 151671,
|
| 1025 |
+
"<|EOT|>": 151669,
|
| 1026 |
+
"<|IMG_END|>": 151676,
|
| 1027 |
+
"<|IMG_START|>": 151675,
|
| 1028 |
+
"<|MASK_1e69f|>": 151685,
|
| 1029 |
+
"<|META_END|>": 151678,
|
| 1030 |
+
"<|META_START|>": 151677,
|
| 1031 |
+
"<|THINK_END|>": 151674,
|
| 1032 |
+
"<|THINK_START|>": 151673,
|
| 1033 |
+
"<|UNMASK_1e69f|>": 151686,
|
| 1034 |
+
"<|box_end|>": 151649,
|
| 1035 |
+
"<|box_start|>": 151648,
|
| 1036 |
+
"<|endoftext|>": 151643,
|
| 1037 |
+
"<|file_sep|>": 151664,
|
| 1038 |
+
"<|fim_middle|>": 151660,
|
| 1039 |
+
"<|fim_pad|>": 151662,
|
| 1040 |
+
"<|fim_prefix|>": 151659,
|
| 1041 |
+
"<|fim_suffix|>": 151661,
|
| 1042 |
+
"<|im_end|>": 151645,
|
| 1043 |
+
"<|im_start|>": 151644,
|
| 1044 |
+
"<|image_pad|>": 151655,
|
| 1045 |
+
"<|object_ref_end|>": 151647,
|
| 1046 |
+
"<|object_ref_start|>": 151646,
|
| 1047 |
+
"<|quad_end|>": 151651,
|
| 1048 |
+
"<|quad_start|>": 151650,
|
| 1049 |
+
"<|repo_name|>": 151663,
|
| 1050 |
+
"<|video_pad|>": 151656,
|
| 1051 |
+
"<|vision_end|>": 151653,
|
| 1052 |
+
"<|vision_pad|>": 151654,
|
| 1053 |
+
"<|vision_start|>": 151652,
|
| 1054 |
+
"<|begin▁of▁mask|>": 151693,
|
| 1055 |
+
"<|begin▁of▁sentence|>": 151692,
|
| 1056 |
+
"<|end▁of▁mask|>": 151694,
|
| 1057 |
+
"<|end▁of▁sentence|>": 151697,
|
| 1058 |
+
"<|fim▁begin|>": 151695,
|
| 1059 |
+
"<|fim▁hole|>": 151696
|
| 1060 |
+
}
|
config.json
ADDED
|
@@ -0,0 +1,145 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"action_config": {
|
| 3 |
+
"attention_bias": false,
|
| 4 |
+
"attention_dropout": 0.0,
|
| 5 |
+
"bos_token_id": 151643,
|
| 6 |
+
"eos_token_id": 151645,
|
| 7 |
+
"head_dim": 128,
|
| 8 |
+
"hidden_act": "silu",
|
| 9 |
+
"hidden_size": 1024,
|
| 10 |
+
"initializer_range": 0.02,
|
| 11 |
+
"intermediate_size": 1536,
|
| 12 |
+
"max_position_embeddings": 40960,
|
| 13 |
+
"max_window_layers": 28,
|
| 14 |
+
"model_type": "qwen3",
|
| 15 |
+
"num_attention_heads": 16,
|
| 16 |
+
"num_hidden_layers": 28,
|
| 17 |
+
"num_key_value_heads": 8,
|
| 18 |
+
"rms_norm_eps": 1e-06,
|
| 19 |
+
"rope_scaling": null,
|
| 20 |
+
"rope_theta": 1000000,
|
| 21 |
+
"sliding_window": 4096,
|
| 22 |
+
"use_cache": true,
|
| 23 |
+
"use_sliding_window": false,
|
| 24 |
+
"vocab_size": 151936
|
| 25 |
+
},
|
| 26 |
+
"action_dim": 32,
|
| 27 |
+
"ar_loss": true,
|
| 28 |
+
"ar_loss_weight": 1.0,
|
| 29 |
+
"architectures": [
|
| 30 |
+
"DM0ForCausalLM"
|
| 31 |
+
],
|
| 32 |
+
"attention_bias": false,
|
| 33 |
+
"attention_dropout": 0.0,
|
| 34 |
+
"chunk_size": 50,
|
| 35 |
+
"fm_loss": true,
|
| 36 |
+
"head_dim": 128,
|
| 37 |
+
"hidden_act": "silu",
|
| 38 |
+
"hidden_size": 2048,
|
| 39 |
+
"image_aspect_ratio": "pad",
|
| 40 |
+
"initializer_range": 0.02,
|
| 41 |
+
"intermediate_size": 6144,
|
| 42 |
+
"llm_config": {
|
| 43 |
+
"_name_or_path": "",
|
| 44 |
+
"add_cross_attention": false,
|
| 45 |
+
"architectures": null,
|
| 46 |
+
"attention_bias": false,
|
| 47 |
+
"attention_dropout": 0.0,
|
| 48 |
+
"bad_words_ids": null,
|
| 49 |
+
"begin_suppress_tokens": null,
|
| 50 |
+
"bos_token_id": 151643,
|
| 51 |
+
"chunk_size_feed_forward": 0,
|
| 52 |
+
"cross_attention_hidden_size": null,
|
| 53 |
+
"decoder_start_token_id": null,
|
| 54 |
+
"diversity_penalty": 0.0,
|
| 55 |
+
"do_sample": false,
|
| 56 |
+
"early_stopping": false,
|
| 57 |
+
"encoder_no_repeat_ngram_size": 0,
|
| 58 |
+
"eos_token_id": 151645,
|
| 59 |
+
"exponential_decay_length_penalty": null,
|
| 60 |
+
"finetuning_task": null,
|
| 61 |
+
"forced_bos_token_id": null,
|
| 62 |
+
"forced_eos_token_id": null,
|
| 63 |
+
"head_dim": 128,
|
| 64 |
+
"hidden_act": "silu",
|
| 65 |
+
"hidden_size": 2048,
|
| 66 |
+
"id2label": {
|
| 67 |
+
"0": "LABEL_0",
|
| 68 |
+
"1": "LABEL_1"
|
| 69 |
+
},
|
| 70 |
+
"initializer_range": 0.02,
|
| 71 |
+
"intermediate_size": 6144,
|
| 72 |
+
"is_decoder": false,
|
| 73 |
+
"is_encoder_decoder": false,
|
| 74 |
+
"label2id": {
|
| 75 |
+
"LABEL_0": 0,
|
| 76 |
+
"LABEL_1": 1
|
| 77 |
+
},
|
| 78 |
+
"length_penalty": 1.0,
|
| 79 |
+
"max_length": 20,
|
| 80 |
+
"max_position_embeddings": 40960,
|
| 81 |
+
"max_window_layers": 28,
|
| 82 |
+
"min_length": 0,
|
| 83 |
+
"model_type": "qwen3",
|
| 84 |
+
"no_repeat_ngram_size": 0,
|
| 85 |
+
"num_attention_heads": 16,
|
| 86 |
+
"num_beam_groups": 1,
|
| 87 |
+
"num_beams": 1,
|
| 88 |
+
"num_hidden_layers": 28,
|
| 89 |
+
"num_key_value_heads": 8,
|
| 90 |
+
"num_return_sequences": 1,
|
| 91 |
+
"output_attentions": false,
|
| 92 |
+
"output_hidden_states": false,
|
| 93 |
+
"output_scores": false,
|
| 94 |
+
"pad_token_id": null,
|
| 95 |
+
"prefix": null,
|
| 96 |
+
"problem_type": null,
|
| 97 |
+
"pruned_heads": {},
|
| 98 |
+
"remove_invalid_values": false,
|
| 99 |
+
"repetition_penalty": 1.0,
|
| 100 |
+
"return_dict": true,
|
| 101 |
+
"return_dict_in_generate": false,
|
| 102 |
+
"rms_norm_eps": 1e-06,
|
| 103 |
+
"rope_scaling": null,
|
| 104 |
+
"rope_theta": 1000000,
|
| 105 |
+
"sep_token_id": null,
|
| 106 |
+
"sliding_window": 4096,
|
| 107 |
+
"suppress_tokens": null,
|
| 108 |
+
"task_specific_params": null,
|
| 109 |
+
"temperature": 1.0,
|
| 110 |
+
"tf_legacy_loss": false,
|
| 111 |
+
"tie_encoder_decoder": false,
|
| 112 |
+
"tie_word_embeddings": false,
|
| 113 |
+
"tokenizer_class": null,
|
| 114 |
+
"top_k": 50,
|
| 115 |
+
"top_p": 1.0,
|
| 116 |
+
"torch_dtype": null,
|
| 117 |
+
"torchscript": false,
|
| 118 |
+
"typical_p": 1.0,
|
| 119 |
+
"use_bfloat16": false,
|
| 120 |
+
"use_cache": true,
|
| 121 |
+
"use_sliding_window": false,
|
| 122 |
+
"vocab_size": 152701
|
| 123 |
+
},
|
| 124 |
+
"max_position_embeddings": 40960,
|
| 125 |
+
"max_window_layers": 28,
|
| 126 |
+
"mm_hidden_size": 4096,
|
| 127 |
+
"mm_projector_type": "linear4x",
|
| 128 |
+
"mm_vision_tower": "pe_lang_l14_728",
|
| 129 |
+
"model_type": "dexbotic_dm0",
|
| 130 |
+
"num_attention_heads": 16,
|
| 131 |
+
"num_hidden_layers": 28,
|
| 132 |
+
"num_key_value_heads": 8,
|
| 133 |
+
"rms_norm_eps": 1e-06,
|
| 134 |
+
"rope_scaling": null,
|
| 135 |
+
"rope_theta": 1000000,
|
| 136 |
+
"sliding_window": 4096,
|
| 137 |
+
"tokenizer_model_max_length": 2048,
|
| 138 |
+
"tokenizer_padding_side": "right",
|
| 139 |
+
"torch_dtype": "bfloat16",
|
| 140 |
+
"transformers_version": "4.51.0",
|
| 141 |
+
"use_cache": false,
|
| 142 |
+
"use_mm_proj": true,
|
| 143 |
+
"use_sliding_window": false,
|
| 144 |
+
"vocab_size": 152701
|
| 145 |
+
}
|
merges.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
model-00001-of-00002.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:77c988d81b57e921829aaabfee4a208bdb452ca3409401233dce1ae1d260aec0
|
| 3 |
+
size 4852908280
|
model-00002-of-00002.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:db595a4663b60b3f0a9b78409b73bff93cff791a5d3a5fdb11c4b011c4531f58
|
| 3 |
+
size 317594632
|
model.safetensors.index.json
ADDED
|
@@ -0,0 +1,967 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"metadata": {
|
| 3 |
+
"total_size": 5170374720
|
| 4 |
+
},
|
| 5 |
+
"weight_map": {
|
| 6 |
+
"model.action_expert.lm_head.weight": "model-00002-of-00002.safetensors",
|
| 7 |
+
"model.action_expert.model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 8 |
+
"model.action_expert.model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 9 |
+
"model.action_expert.model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 10 |
+
"model.action_expert.model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 11 |
+
"model.action_expert.model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 12 |
+
"model.action_expert.model.layers.0.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 13 |
+
"model.action_expert.model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 14 |
+
"model.action_expert.model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 15 |
+
"model.action_expert.model.layers.0.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 16 |
+
"model.action_expert.model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 17 |
+
"model.action_expert.model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 18 |
+
"model.action_expert.model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 19 |
+
"model.action_expert.model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 20 |
+
"model.action_expert.model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 21 |
+
"model.action_expert.model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 22 |
+
"model.action_expert.model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 23 |
+
"model.action_expert.model.layers.1.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 24 |
+
"model.action_expert.model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 25 |
+
"model.action_expert.model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 26 |
+
"model.action_expert.model.layers.1.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 27 |
+
"model.action_expert.model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 28 |
+
"model.action_expert.model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 29 |
+
"model.action_expert.model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 30 |
+
"model.action_expert.model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 31 |
+
"model.action_expert.model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 32 |
+
"model.action_expert.model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 33 |
+
"model.action_expert.model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 34 |
+
"model.action_expert.model.layers.10.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 35 |
+
"model.action_expert.model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 36 |
+
"model.action_expert.model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 37 |
+
"model.action_expert.model.layers.10.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 38 |
+
"model.action_expert.model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 39 |
+
"model.action_expert.model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 40 |
+
"model.action_expert.model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 41 |
+
"model.action_expert.model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 42 |
+
"model.action_expert.model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 43 |
+
"model.action_expert.model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 44 |
+
"model.action_expert.model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 45 |
+
"model.action_expert.model.layers.11.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 46 |
+
"model.action_expert.model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 47 |
+
"model.action_expert.model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 48 |
+
"model.action_expert.model.layers.11.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 49 |
+
"model.action_expert.model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 50 |
+
"model.action_expert.model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 51 |
+
"model.action_expert.model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 52 |
+
"model.action_expert.model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 53 |
+
"model.action_expert.model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 54 |
+
"model.action_expert.model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 55 |
+
"model.action_expert.model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 56 |
+
"model.action_expert.model.layers.12.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 57 |
+
"model.action_expert.model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 58 |
+
"model.action_expert.model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 59 |
+
"model.action_expert.model.layers.12.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 60 |
+
"model.action_expert.model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 61 |
+
"model.action_expert.model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 62 |
+
"model.action_expert.model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 63 |
+
"model.action_expert.model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 64 |
+
"model.action_expert.model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 65 |
+
"model.action_expert.model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 66 |
+
"model.action_expert.model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 67 |
+
"model.action_expert.model.layers.13.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 68 |
+
"model.action_expert.model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 69 |
+
"model.action_expert.model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 70 |
+
"model.action_expert.model.layers.13.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 71 |
+
"model.action_expert.model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 72 |
+
"model.action_expert.model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 73 |
+
"model.action_expert.model.layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 74 |
+
"model.action_expert.model.layers.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 75 |
+
"model.action_expert.model.layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 76 |
+
"model.action_expert.model.layers.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 77 |
+
"model.action_expert.model.layers.14.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 78 |
+
"model.action_expert.model.layers.14.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 79 |
+
"model.action_expert.model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 80 |
+
"model.action_expert.model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 81 |
+
"model.action_expert.model.layers.14.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 82 |
+
"model.action_expert.model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 83 |
+
"model.action_expert.model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 84 |
+
"model.action_expert.model.layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 85 |
+
"model.action_expert.model.layers.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 86 |
+
"model.action_expert.model.layers.15.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 87 |
+
"model.action_expert.model.layers.15.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 88 |
+
"model.action_expert.model.layers.15.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 89 |
+
"model.action_expert.model.layers.15.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 90 |
+
"model.action_expert.model.layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 91 |
+
"model.action_expert.model.layers.15.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 92 |
+
"model.action_expert.model.layers.15.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 93 |
+
"model.action_expert.model.layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 94 |
+
"model.action_expert.model.layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 95 |
+
"model.action_expert.model.layers.16.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 96 |
+
"model.action_expert.model.layers.16.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 97 |
+
"model.action_expert.model.layers.16.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 98 |
+
"model.action_expert.model.layers.16.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 99 |
+
"model.action_expert.model.layers.16.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 100 |
+
"model.action_expert.model.layers.16.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 101 |
+
"model.action_expert.model.layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 102 |
+
"model.action_expert.model.layers.16.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 103 |
+
"model.action_expert.model.layers.16.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 104 |
+
"model.action_expert.model.layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 105 |
+
"model.action_expert.model.layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 106 |
+
"model.action_expert.model.layers.17.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 107 |
+
"model.action_expert.model.layers.17.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 108 |
+
"model.action_expert.model.layers.17.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 109 |
+
"model.action_expert.model.layers.17.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 110 |
+
"model.action_expert.model.layers.17.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 111 |
+
"model.action_expert.model.layers.17.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 112 |
+
"model.action_expert.model.layers.17.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 113 |
+
"model.action_expert.model.layers.17.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 114 |
+
"model.action_expert.model.layers.17.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 115 |
+
"model.action_expert.model.layers.17.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 116 |
+
"model.action_expert.model.layers.17.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 117 |
+
"model.action_expert.model.layers.18.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 118 |
+
"model.action_expert.model.layers.18.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 119 |
+
"model.action_expert.model.layers.18.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 120 |
+
"model.action_expert.model.layers.18.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 121 |
+
"model.action_expert.model.layers.18.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 122 |
+
"model.action_expert.model.layers.18.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 123 |
+
"model.action_expert.model.layers.18.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 124 |
+
"model.action_expert.model.layers.18.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 125 |
+
"model.action_expert.model.layers.18.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 126 |
+
"model.action_expert.model.layers.18.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 127 |
+
"model.action_expert.model.layers.18.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 128 |
+
"model.action_expert.model.layers.19.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 129 |
+
"model.action_expert.model.layers.19.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 130 |
+
"model.action_expert.model.layers.19.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 131 |
+
"model.action_expert.model.layers.19.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 132 |
+
"model.action_expert.model.layers.19.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 133 |
+
"model.action_expert.model.layers.19.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 134 |
+
"model.action_expert.model.layers.19.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 135 |
+
"model.action_expert.model.layers.19.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 136 |
+
"model.action_expert.model.layers.19.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 137 |
+
"model.action_expert.model.layers.19.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 138 |
+
"model.action_expert.model.layers.19.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 139 |
+
"model.action_expert.model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 140 |
+
"model.action_expert.model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 141 |
+
"model.action_expert.model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 142 |
+
"model.action_expert.model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 143 |
+
"model.action_expert.model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 144 |
+
"model.action_expert.model.layers.2.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 145 |
+
"model.action_expert.model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 146 |
+
"model.action_expert.model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 147 |
+
"model.action_expert.model.layers.2.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 148 |
+
"model.action_expert.model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 149 |
+
"model.action_expert.model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 150 |
+
"model.action_expert.model.layers.20.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 151 |
+
"model.action_expert.model.layers.20.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 152 |
+
"model.action_expert.model.layers.20.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 153 |
+
"model.action_expert.model.layers.20.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 154 |
+
"model.action_expert.model.layers.20.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 155 |
+
"model.action_expert.model.layers.20.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 156 |
+
"model.action_expert.model.layers.20.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 157 |
+
"model.action_expert.model.layers.20.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 158 |
+
"model.action_expert.model.layers.20.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 159 |
+
"model.action_expert.model.layers.20.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 160 |
+
"model.action_expert.model.layers.20.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 161 |
+
"model.action_expert.model.layers.21.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 162 |
+
"model.action_expert.model.layers.21.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 163 |
+
"model.action_expert.model.layers.21.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 164 |
+
"model.action_expert.model.layers.21.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 165 |
+
"model.action_expert.model.layers.21.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 166 |
+
"model.action_expert.model.layers.21.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 167 |
+
"model.action_expert.model.layers.21.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 168 |
+
"model.action_expert.model.layers.21.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 169 |
+
"model.action_expert.model.layers.21.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 170 |
+
"model.action_expert.model.layers.21.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 171 |
+
"model.action_expert.model.layers.21.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 172 |
+
"model.action_expert.model.layers.22.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 173 |
+
"model.action_expert.model.layers.22.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 174 |
+
"model.action_expert.model.layers.22.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 175 |
+
"model.action_expert.model.layers.22.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 176 |
+
"model.action_expert.model.layers.22.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 177 |
+
"model.action_expert.model.layers.22.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 178 |
+
"model.action_expert.model.layers.22.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 179 |
+
"model.action_expert.model.layers.22.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 180 |
+
"model.action_expert.model.layers.22.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 181 |
+
"model.action_expert.model.layers.22.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 182 |
+
"model.action_expert.model.layers.22.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 183 |
+
"model.action_expert.model.layers.23.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 184 |
+
"model.action_expert.model.layers.23.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 185 |
+
"model.action_expert.model.layers.23.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 186 |
+
"model.action_expert.model.layers.23.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 187 |
+
"model.action_expert.model.layers.23.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 188 |
+
"model.action_expert.model.layers.23.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 189 |
+
"model.action_expert.model.layers.23.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 190 |
+
"model.action_expert.model.layers.23.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 191 |
+
"model.action_expert.model.layers.23.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 192 |
+
"model.action_expert.model.layers.23.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 193 |
+
"model.action_expert.model.layers.23.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 194 |
+
"model.action_expert.model.layers.24.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 195 |
+
"model.action_expert.model.layers.24.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 196 |
+
"model.action_expert.model.layers.24.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 197 |
+
"model.action_expert.model.layers.24.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 198 |
+
"model.action_expert.model.layers.24.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 199 |
+
"model.action_expert.model.layers.24.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 200 |
+
"model.action_expert.model.layers.24.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 201 |
+
"model.action_expert.model.layers.24.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 202 |
+
"model.action_expert.model.layers.24.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 203 |
+
"model.action_expert.model.layers.24.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 204 |
+
"model.action_expert.model.layers.24.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 205 |
+
"model.action_expert.model.layers.25.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 206 |
+
"model.action_expert.model.layers.25.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 207 |
+
"model.action_expert.model.layers.25.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 208 |
+
"model.action_expert.model.layers.25.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 209 |
+
"model.action_expert.model.layers.25.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 210 |
+
"model.action_expert.model.layers.25.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 211 |
+
"model.action_expert.model.layers.25.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 212 |
+
"model.action_expert.model.layers.25.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 213 |
+
"model.action_expert.model.layers.25.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 214 |
+
"model.action_expert.model.layers.25.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 215 |
+
"model.action_expert.model.layers.25.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 216 |
+
"model.action_expert.model.layers.26.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 217 |
+
"model.action_expert.model.layers.26.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 218 |
+
"model.action_expert.model.layers.26.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 219 |
+
"model.action_expert.model.layers.26.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 220 |
+
"model.action_expert.model.layers.26.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 221 |
+
"model.action_expert.model.layers.26.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 222 |
+
"model.action_expert.model.layers.26.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 223 |
+
"model.action_expert.model.layers.26.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 224 |
+
"model.action_expert.model.layers.26.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 225 |
+
"model.action_expert.model.layers.26.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 226 |
+
"model.action_expert.model.layers.26.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 227 |
+
"model.action_expert.model.layers.27.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 228 |
+
"model.action_expert.model.layers.27.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 229 |
+
"model.action_expert.model.layers.27.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 230 |
+
"model.action_expert.model.layers.27.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 231 |
+
"model.action_expert.model.layers.27.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 232 |
+
"model.action_expert.model.layers.27.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 233 |
+
"model.action_expert.model.layers.27.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 234 |
+
"model.action_expert.model.layers.27.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 235 |
+
"model.action_expert.model.layers.27.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 236 |
+
"model.action_expert.model.layers.27.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 237 |
+
"model.action_expert.model.layers.27.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 238 |
+
"model.action_expert.model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 239 |
+
"model.action_expert.model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 240 |
+
"model.action_expert.model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 241 |
+
"model.action_expert.model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 242 |
+
"model.action_expert.model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 243 |
+
"model.action_expert.model.layers.3.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 244 |
+
"model.action_expert.model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 245 |
+
"model.action_expert.model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 246 |
+
"model.action_expert.model.layers.3.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 247 |
+
"model.action_expert.model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 248 |
+
"model.action_expert.model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 249 |
+
"model.action_expert.model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 250 |
+
"model.action_expert.model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 251 |
+
"model.action_expert.model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 252 |
+
"model.action_expert.model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 253 |
+
"model.action_expert.model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 254 |
+
"model.action_expert.model.layers.4.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 255 |
+
"model.action_expert.model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 256 |
+
"model.action_expert.model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 257 |
+
"model.action_expert.model.layers.4.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 258 |
+
"model.action_expert.model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 259 |
+
"model.action_expert.model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 260 |
+
"model.action_expert.model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 261 |
+
"model.action_expert.model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 262 |
+
"model.action_expert.model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 263 |
+
"model.action_expert.model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 264 |
+
"model.action_expert.model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 265 |
+
"model.action_expert.model.layers.5.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 266 |
+
"model.action_expert.model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 267 |
+
"model.action_expert.model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 268 |
+
"model.action_expert.model.layers.5.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 269 |
+
"model.action_expert.model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 270 |
+
"model.action_expert.model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 271 |
+
"model.action_expert.model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 272 |
+
"model.action_expert.model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 273 |
+
"model.action_expert.model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 274 |
+
"model.action_expert.model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 275 |
+
"model.action_expert.model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 276 |
+
"model.action_expert.model.layers.6.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 277 |
+
"model.action_expert.model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 278 |
+
"model.action_expert.model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 279 |
+
"model.action_expert.model.layers.6.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 280 |
+
"model.action_expert.model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 281 |
+
"model.action_expert.model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 282 |
+
"model.action_expert.model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 283 |
+
"model.action_expert.model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 284 |
+
"model.action_expert.model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 285 |
+
"model.action_expert.model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 286 |
+
"model.action_expert.model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 287 |
+
"model.action_expert.model.layers.7.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 288 |
+
"model.action_expert.model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 289 |
+
"model.action_expert.model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 290 |
+
"model.action_expert.model.layers.7.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 291 |
+
"model.action_expert.model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 292 |
+
"model.action_expert.model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 293 |
+
"model.action_expert.model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 294 |
+
"model.action_expert.model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 295 |
+
"model.action_expert.model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 296 |
+
"model.action_expert.model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 297 |
+
"model.action_expert.model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 298 |
+
"model.action_expert.model.layers.8.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 299 |
+
"model.action_expert.model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 300 |
+
"model.action_expert.model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 301 |
+
"model.action_expert.model.layers.8.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 302 |
+
"model.action_expert.model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 303 |
+
"model.action_expert.model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 304 |
+
"model.action_expert.model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 305 |
+
"model.action_expert.model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 306 |
+
"model.action_expert.model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 307 |
+
"model.action_expert.model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 308 |
+
"model.action_expert.model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 309 |
+
"model.action_expert.model.layers.9.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 310 |
+
"model.action_expert.model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 311 |
+
"model.action_expert.model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 312 |
+
"model.action_expert.model.layers.9.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 313 |
+
"model.action_expert.model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 314 |
+
"model.action_expert.model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 315 |
+
"model.action_expert.model.norm.weight": "model-00001-of-00002.safetensors",
|
| 316 |
+
"model.action_in_proj.bias": "model-00002-of-00002.safetensors",
|
| 317 |
+
"model.action_in_proj.weight": "model-00002-of-00002.safetensors",
|
| 318 |
+
"model.action_out_proj.bias": "model-00002-of-00002.safetensors",
|
| 319 |
+
"model.action_out_proj.weight": "model-00002-of-00002.safetensors",
|
| 320 |
+
"model.action_time_mlp_in.bias": "model-00002-of-00002.safetensors",
|
| 321 |
+
"model.action_time_mlp_in.weight": "model-00002-of-00002.safetensors",
|
| 322 |
+
"model.action_time_mlp_out.bias": "model-00002-of-00002.safetensors",
|
| 323 |
+
"model.action_time_mlp_out.weight": "model-00002-of-00002.safetensors",
|
| 324 |
+
"model.llm.embed_tokens.weight": "model-00001-of-00002.safetensors",
|
| 325 |
+
"model.llm.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 326 |
+
"model.llm.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 327 |
+
"model.llm.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 328 |
+
"model.llm.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 329 |
+
"model.llm.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 330 |
+
"model.llm.layers.0.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 331 |
+
"model.llm.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 332 |
+
"model.llm.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 333 |
+
"model.llm.layers.0.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 334 |
+
"model.llm.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 335 |
+
"model.llm.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 336 |
+
"model.llm.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 337 |
+
"model.llm.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 338 |
+
"model.llm.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 339 |
+
"model.llm.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 340 |
+
"model.llm.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 341 |
+
"model.llm.layers.1.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 342 |
+
"model.llm.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 343 |
+
"model.llm.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 344 |
+
"model.llm.layers.1.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 345 |
+
"model.llm.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 346 |
+
"model.llm.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 347 |
+
"model.llm.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 348 |
+
"model.llm.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 349 |
+
"model.llm.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 350 |
+
"model.llm.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 351 |
+
"model.llm.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 352 |
+
"model.llm.layers.10.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 353 |
+
"model.llm.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 354 |
+
"model.llm.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 355 |
+
"model.llm.layers.10.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 356 |
+
"model.llm.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 357 |
+
"model.llm.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 358 |
+
"model.llm.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 359 |
+
"model.llm.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 360 |
+
"model.llm.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 361 |
+
"model.llm.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 362 |
+
"model.llm.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 363 |
+
"model.llm.layers.11.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 364 |
+
"model.llm.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 365 |
+
"model.llm.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 366 |
+
"model.llm.layers.11.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 367 |
+
"model.llm.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 368 |
+
"model.llm.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 369 |
+
"model.llm.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 370 |
+
"model.llm.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 371 |
+
"model.llm.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 372 |
+
"model.llm.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 373 |
+
"model.llm.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 374 |
+
"model.llm.layers.12.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 375 |
+
"model.llm.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 376 |
+
"model.llm.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 377 |
+
"model.llm.layers.12.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 378 |
+
"model.llm.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 379 |
+
"model.llm.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 380 |
+
"model.llm.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 381 |
+
"model.llm.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 382 |
+
"model.llm.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 383 |
+
"model.llm.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 384 |
+
"model.llm.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 385 |
+
"model.llm.layers.13.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 386 |
+
"model.llm.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 387 |
+
"model.llm.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 388 |
+
"model.llm.layers.13.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 389 |
+
"model.llm.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 390 |
+
"model.llm.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 391 |
+
"model.llm.layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 392 |
+
"model.llm.layers.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 393 |
+
"model.llm.layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 394 |
+
"model.llm.layers.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 395 |
+
"model.llm.layers.14.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 396 |
+
"model.llm.layers.14.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 397 |
+
"model.llm.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 398 |
+
"model.llm.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 399 |
+
"model.llm.layers.14.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 400 |
+
"model.llm.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 401 |
+
"model.llm.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 402 |
+
"model.llm.layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 403 |
+
"model.llm.layers.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 404 |
+
"model.llm.layers.15.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 405 |
+
"model.llm.layers.15.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 406 |
+
"model.llm.layers.15.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 407 |
+
"model.llm.layers.15.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 408 |
+
"model.llm.layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 409 |
+
"model.llm.layers.15.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 410 |
+
"model.llm.layers.15.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 411 |
+
"model.llm.layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 412 |
+
"model.llm.layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 413 |
+
"model.llm.layers.16.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 414 |
+
"model.llm.layers.16.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 415 |
+
"model.llm.layers.16.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 416 |
+
"model.llm.layers.16.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 417 |
+
"model.llm.layers.16.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 418 |
+
"model.llm.layers.16.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 419 |
+
"model.llm.layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 420 |
+
"model.llm.layers.16.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 421 |
+
"model.llm.layers.16.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 422 |
+
"model.llm.layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 423 |
+
"model.llm.layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 424 |
+
"model.llm.layers.17.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 425 |
+
"model.llm.layers.17.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 426 |
+
"model.llm.layers.17.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 427 |
+
"model.llm.layers.17.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 428 |
+
"model.llm.layers.17.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 429 |
+
"model.llm.layers.17.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 430 |
+
"model.llm.layers.17.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 431 |
+
"model.llm.layers.17.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 432 |
+
"model.llm.layers.17.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 433 |
+
"model.llm.layers.17.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 434 |
+
"model.llm.layers.17.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 435 |
+
"model.llm.layers.18.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 436 |
+
"model.llm.layers.18.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 437 |
+
"model.llm.layers.18.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 438 |
+
"model.llm.layers.18.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 439 |
+
"model.llm.layers.18.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 440 |
+
"model.llm.layers.18.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 441 |
+
"model.llm.layers.18.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 442 |
+
"model.llm.layers.18.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 443 |
+
"model.llm.layers.18.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 444 |
+
"model.llm.layers.18.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 445 |
+
"model.llm.layers.18.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 446 |
+
"model.llm.layers.19.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 447 |
+
"model.llm.layers.19.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 448 |
+
"model.llm.layers.19.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 449 |
+
"model.llm.layers.19.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 450 |
+
"model.llm.layers.19.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 451 |
+
"model.llm.layers.19.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 452 |
+
"model.llm.layers.19.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 453 |
+
"model.llm.layers.19.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 454 |
+
"model.llm.layers.19.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 455 |
+
"model.llm.layers.19.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 456 |
+
"model.llm.layers.19.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 457 |
+
"model.llm.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 458 |
+
"model.llm.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 459 |
+
"model.llm.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 460 |
+
"model.llm.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 461 |
+
"model.llm.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 462 |
+
"model.llm.layers.2.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 463 |
+
"model.llm.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 464 |
+
"model.llm.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 465 |
+
"model.llm.layers.2.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 466 |
+
"model.llm.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 467 |
+
"model.llm.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 468 |
+
"model.llm.layers.20.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 469 |
+
"model.llm.layers.20.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 470 |
+
"model.llm.layers.20.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 471 |
+
"model.llm.layers.20.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 472 |
+
"model.llm.layers.20.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 473 |
+
"model.llm.layers.20.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 474 |
+
"model.llm.layers.20.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 475 |
+
"model.llm.layers.20.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 476 |
+
"model.llm.layers.20.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 477 |
+
"model.llm.layers.20.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 478 |
+
"model.llm.layers.20.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 479 |
+
"model.llm.layers.21.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 480 |
+
"model.llm.layers.21.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 481 |
+
"model.llm.layers.21.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 482 |
+
"model.llm.layers.21.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 483 |
+
"model.llm.layers.21.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 484 |
+
"model.llm.layers.21.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 485 |
+
"model.llm.layers.21.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 486 |
+
"model.llm.layers.21.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 487 |
+
"model.llm.layers.21.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 488 |
+
"model.llm.layers.21.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 489 |
+
"model.llm.layers.21.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 490 |
+
"model.llm.layers.22.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 491 |
+
"model.llm.layers.22.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 492 |
+
"model.llm.layers.22.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 493 |
+
"model.llm.layers.22.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 494 |
+
"model.llm.layers.22.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 495 |
+
"model.llm.layers.22.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 496 |
+
"model.llm.layers.22.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 497 |
+
"model.llm.layers.22.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 498 |
+
"model.llm.layers.22.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 499 |
+
"model.llm.layers.22.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 500 |
+
"model.llm.layers.22.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 501 |
+
"model.llm.layers.23.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 502 |
+
"model.llm.layers.23.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 503 |
+
"model.llm.layers.23.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 504 |
+
"model.llm.layers.23.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 505 |
+
"model.llm.layers.23.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 506 |
+
"model.llm.layers.23.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 507 |
+
"model.llm.layers.23.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 508 |
+
"model.llm.layers.23.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 509 |
+
"model.llm.layers.23.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 510 |
+
"model.llm.layers.23.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 511 |
+
"model.llm.layers.23.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 512 |
+
"model.llm.layers.24.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 513 |
+
"model.llm.layers.24.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 514 |
+
"model.llm.layers.24.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 515 |
+
"model.llm.layers.24.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 516 |
+
"model.llm.layers.24.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 517 |
+
"model.llm.layers.24.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 518 |
+
"model.llm.layers.24.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 519 |
+
"model.llm.layers.24.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 520 |
+
"model.llm.layers.24.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 521 |
+
"model.llm.layers.24.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 522 |
+
"model.llm.layers.24.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 523 |
+
"model.llm.layers.25.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 524 |
+
"model.llm.layers.25.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 525 |
+
"model.llm.layers.25.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 526 |
+
"model.llm.layers.25.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 527 |
+
"model.llm.layers.25.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 528 |
+
"model.llm.layers.25.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 529 |
+
"model.llm.layers.25.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 530 |
+
"model.llm.layers.25.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 531 |
+
"model.llm.layers.25.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 532 |
+
"model.llm.layers.25.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 533 |
+
"model.llm.layers.25.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 534 |
+
"model.llm.layers.26.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 535 |
+
"model.llm.layers.26.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 536 |
+
"model.llm.layers.26.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 537 |
+
"model.llm.layers.26.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 538 |
+
"model.llm.layers.26.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 539 |
+
"model.llm.layers.26.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 540 |
+
"model.llm.layers.26.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 541 |
+
"model.llm.layers.26.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 542 |
+
"model.llm.layers.26.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 543 |
+
"model.llm.layers.26.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 544 |
+
"model.llm.layers.26.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 545 |
+
"model.llm.layers.27.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 546 |
+
"model.llm.layers.27.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 547 |
+
"model.llm.layers.27.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 548 |
+
"model.llm.layers.27.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 549 |
+
"model.llm.layers.27.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 550 |
+
"model.llm.layers.27.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 551 |
+
"model.llm.layers.27.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 552 |
+
"model.llm.layers.27.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 553 |
+
"model.llm.layers.27.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 554 |
+
"model.llm.layers.27.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 555 |
+
"model.llm.layers.27.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 556 |
+
"model.llm.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 557 |
+
"model.llm.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 558 |
+
"model.llm.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 559 |
+
"model.llm.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 560 |
+
"model.llm.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 561 |
+
"model.llm.layers.3.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 562 |
+
"model.llm.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 563 |
+
"model.llm.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 564 |
+
"model.llm.layers.3.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 565 |
+
"model.llm.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 566 |
+
"model.llm.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 567 |
+
"model.llm.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 568 |
+
"model.llm.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 569 |
+
"model.llm.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 570 |
+
"model.llm.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 571 |
+
"model.llm.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 572 |
+
"model.llm.layers.4.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 573 |
+
"model.llm.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 574 |
+
"model.llm.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 575 |
+
"model.llm.layers.4.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 576 |
+
"model.llm.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 577 |
+
"model.llm.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 578 |
+
"model.llm.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 579 |
+
"model.llm.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 580 |
+
"model.llm.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 581 |
+
"model.llm.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 582 |
+
"model.llm.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 583 |
+
"model.llm.layers.5.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 584 |
+
"model.llm.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 585 |
+
"model.llm.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 586 |
+
"model.llm.layers.5.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 587 |
+
"model.llm.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 588 |
+
"model.llm.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 589 |
+
"model.llm.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 590 |
+
"model.llm.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 591 |
+
"model.llm.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 592 |
+
"model.llm.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 593 |
+
"model.llm.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 594 |
+
"model.llm.layers.6.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 595 |
+
"model.llm.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 596 |
+
"model.llm.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 597 |
+
"model.llm.layers.6.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 598 |
+
"model.llm.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 599 |
+
"model.llm.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 600 |
+
"model.llm.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 601 |
+
"model.llm.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 602 |
+
"model.llm.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 603 |
+
"model.llm.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 604 |
+
"model.llm.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 605 |
+
"model.llm.layers.7.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 606 |
+
"model.llm.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 607 |
+
"model.llm.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 608 |
+
"model.llm.layers.7.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 609 |
+
"model.llm.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 610 |
+
"model.llm.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 611 |
+
"model.llm.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 612 |
+
"model.llm.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 613 |
+
"model.llm.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 614 |
+
"model.llm.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 615 |
+
"model.llm.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 616 |
+
"model.llm.layers.8.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 617 |
+
"model.llm.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 618 |
+
"model.llm.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 619 |
+
"model.llm.layers.8.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 620 |
+
"model.llm.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 621 |
+
"model.llm.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 622 |
+
"model.llm.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 623 |
+
"model.llm.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
|
| 624 |
+
"model.llm.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
|
| 625 |
+
"model.llm.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
|
| 626 |
+
"model.llm.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
|
| 627 |
+
"model.llm.layers.9.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
|
| 628 |
+
"model.llm.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
|
| 629 |
+
"model.llm.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
|
| 630 |
+
"model.llm.layers.9.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
|
| 631 |
+
"model.llm.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
|
| 632 |
+
"model.llm.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
|
| 633 |
+
"model.llm.norm.weight": "model-00001-of-00002.safetensors",
|
| 634 |
+
"model.mm_projector.weight": "model-00001-of-00002.safetensors",
|
| 635 |
+
"model.mm_vision_tower.vision_tower.class_embedding": "model-00001-of-00002.safetensors",
|
| 636 |
+
"model.mm_vision_tower.vision_tower.conv1.weight": "model-00001-of-00002.safetensors",
|
| 637 |
+
"model.mm_vision_tower.vision_tower.ln_pre.bias": "model-00001-of-00002.safetensors",
|
| 638 |
+
"model.mm_vision_tower.vision_tower.ln_pre.weight": "model-00001-of-00002.safetensors",
|
| 639 |
+
"model.mm_vision_tower.vision_tower.positional_embedding": "model-00001-of-00002.safetensors",
|
| 640 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.0.attn.in_proj_bias": "model-00001-of-00002.safetensors",
|
| 641 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.0.attn.in_proj_weight": "model-00001-of-00002.safetensors",
|
| 642 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.0.attn.out_proj.bias": "model-00001-of-00002.safetensors",
|
| 643 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.0.attn.out_proj.weight": "model-00001-of-00002.safetensors",
|
| 644 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.0.ln_1.bias": "model-00001-of-00002.safetensors",
|
| 645 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.0.ln_1.weight": "model-00001-of-00002.safetensors",
|
| 646 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.0.ln_2.bias": "model-00001-of-00002.safetensors",
|
| 647 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.0.ln_2.weight": "model-00001-of-00002.safetensors",
|
| 648 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.0.ls_1.gamma": "model-00001-of-00002.safetensors",
|
| 649 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.0.ls_2.gamma": "model-00001-of-00002.safetensors",
|
| 650 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.0.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
|
| 651 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.0.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
|
| 652 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.0.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
|
| 653 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.0.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
|
| 654 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.1.attn.in_proj_bias": "model-00001-of-00002.safetensors",
|
| 655 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.1.attn.in_proj_weight": "model-00001-of-00002.safetensors",
|
| 656 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.1.attn.out_proj.bias": "model-00001-of-00002.safetensors",
|
| 657 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.1.attn.out_proj.weight": "model-00001-of-00002.safetensors",
|
| 658 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.1.ln_1.bias": "model-00001-of-00002.safetensors",
|
| 659 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.1.ln_1.weight": "model-00001-of-00002.safetensors",
|
| 660 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.1.ln_2.bias": "model-00001-of-00002.safetensors",
|
| 661 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.1.ln_2.weight": "model-00001-of-00002.safetensors",
|
| 662 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.1.ls_1.gamma": "model-00001-of-00002.safetensors",
|
| 663 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.1.ls_2.gamma": "model-00001-of-00002.safetensors",
|
| 664 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.1.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
|
| 665 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.1.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
|
| 666 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.1.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
|
| 667 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.1.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
|
| 668 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.10.attn.in_proj_bias": "model-00001-of-00002.safetensors",
|
| 669 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.10.attn.in_proj_weight": "model-00001-of-00002.safetensors",
|
| 670 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.10.attn.out_proj.bias": "model-00001-of-00002.safetensors",
|
| 671 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.10.attn.out_proj.weight": "model-00001-of-00002.safetensors",
|
| 672 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.10.ln_1.bias": "model-00001-of-00002.safetensors",
|
| 673 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.10.ln_1.weight": "model-00001-of-00002.safetensors",
|
| 674 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.10.ln_2.bias": "model-00001-of-00002.safetensors",
|
| 675 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.10.ln_2.weight": "model-00001-of-00002.safetensors",
|
| 676 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.10.ls_1.gamma": "model-00001-of-00002.safetensors",
|
| 677 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.10.ls_2.gamma": "model-00001-of-00002.safetensors",
|
| 678 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.10.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
|
| 679 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.10.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
|
| 680 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.10.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
|
| 681 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.10.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
|
| 682 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.11.attn.in_proj_bias": "model-00001-of-00002.safetensors",
|
| 683 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.11.attn.in_proj_weight": "model-00001-of-00002.safetensors",
|
| 684 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.11.attn.out_proj.bias": "model-00001-of-00002.safetensors",
|
| 685 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.11.attn.out_proj.weight": "model-00001-of-00002.safetensors",
|
| 686 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.11.ln_1.bias": "model-00001-of-00002.safetensors",
|
| 687 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.11.ln_1.weight": "model-00001-of-00002.safetensors",
|
| 688 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.11.ln_2.bias": "model-00001-of-00002.safetensors",
|
| 689 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.11.ln_2.weight": "model-00001-of-00002.safetensors",
|
| 690 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.11.ls_1.gamma": "model-00001-of-00002.safetensors",
|
| 691 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.11.ls_2.gamma": "model-00001-of-00002.safetensors",
|
| 692 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.11.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
|
| 693 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.11.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
|
| 694 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.11.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
|
| 695 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.11.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
|
| 696 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.12.attn.in_proj_bias": "model-00001-of-00002.safetensors",
|
| 697 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.12.attn.in_proj_weight": "model-00001-of-00002.safetensors",
|
| 698 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.12.attn.out_proj.bias": "model-00001-of-00002.safetensors",
|
| 699 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.12.attn.out_proj.weight": "model-00001-of-00002.safetensors",
|
| 700 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.12.ln_1.bias": "model-00001-of-00002.safetensors",
|
| 701 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.12.ln_1.weight": "model-00001-of-00002.safetensors",
|
| 702 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.12.ln_2.bias": "model-00001-of-00002.safetensors",
|
| 703 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.12.ln_2.weight": "model-00001-of-00002.safetensors",
|
| 704 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.12.ls_1.gamma": "model-00001-of-00002.safetensors",
|
| 705 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.12.ls_2.gamma": "model-00001-of-00002.safetensors",
|
| 706 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.12.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
|
| 707 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.12.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
|
| 708 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.12.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
|
| 709 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.12.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
|
| 710 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.13.attn.in_proj_bias": "model-00001-of-00002.safetensors",
|
| 711 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.13.attn.in_proj_weight": "model-00001-of-00002.safetensors",
|
| 712 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.13.attn.out_proj.bias": "model-00001-of-00002.safetensors",
|
| 713 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.13.attn.out_proj.weight": "model-00001-of-00002.safetensors",
|
| 714 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.13.ln_1.bias": "model-00001-of-00002.safetensors",
|
| 715 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.13.ln_1.weight": "model-00001-of-00002.safetensors",
|
| 716 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.13.ln_2.bias": "model-00001-of-00002.safetensors",
|
| 717 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.13.ln_2.weight": "model-00001-of-00002.safetensors",
|
| 718 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.13.ls_1.gamma": "model-00001-of-00002.safetensors",
|
| 719 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.13.ls_2.gamma": "model-00001-of-00002.safetensors",
|
| 720 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.13.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
|
| 721 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.13.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
|
| 722 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.13.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
|
| 723 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.13.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
|
| 724 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.14.attn.in_proj_bias": "model-00001-of-00002.safetensors",
|
| 725 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.14.attn.in_proj_weight": "model-00001-of-00002.safetensors",
|
| 726 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.14.attn.out_proj.bias": "model-00001-of-00002.safetensors",
|
| 727 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.14.attn.out_proj.weight": "model-00001-of-00002.safetensors",
|
| 728 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.14.ln_1.bias": "model-00001-of-00002.safetensors",
|
| 729 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.14.ln_1.weight": "model-00001-of-00002.safetensors",
|
| 730 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.14.ln_2.bias": "model-00001-of-00002.safetensors",
|
| 731 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.14.ln_2.weight": "model-00001-of-00002.safetensors",
|
| 732 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.14.ls_1.gamma": "model-00001-of-00002.safetensors",
|
| 733 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.14.ls_2.gamma": "model-00001-of-00002.safetensors",
|
| 734 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.14.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
|
| 735 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.14.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
|
| 736 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.14.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
|
| 737 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.14.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
|
| 738 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.15.attn.in_proj_bias": "model-00001-of-00002.safetensors",
|
| 739 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.15.attn.in_proj_weight": "model-00001-of-00002.safetensors",
|
| 740 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.15.attn.out_proj.bias": "model-00001-of-00002.safetensors",
|
| 741 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.15.attn.out_proj.weight": "model-00001-of-00002.safetensors",
|
| 742 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.15.ln_1.bias": "model-00001-of-00002.safetensors",
|
| 743 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.15.ln_1.weight": "model-00001-of-00002.safetensors",
|
| 744 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.15.ln_2.bias": "model-00001-of-00002.safetensors",
|
| 745 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.15.ln_2.weight": "model-00001-of-00002.safetensors",
|
| 746 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.15.ls_1.gamma": "model-00001-of-00002.safetensors",
|
| 747 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.15.ls_2.gamma": "model-00001-of-00002.safetensors",
|
| 748 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.15.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
|
| 749 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.15.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
|
| 750 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.15.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
|
| 751 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.15.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
|
| 752 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.16.attn.in_proj_bias": "model-00001-of-00002.safetensors",
|
| 753 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.16.attn.in_proj_weight": "model-00001-of-00002.safetensors",
|
| 754 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.16.attn.out_proj.bias": "model-00001-of-00002.safetensors",
|
| 755 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.16.attn.out_proj.weight": "model-00001-of-00002.safetensors",
|
| 756 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.16.ln_1.bias": "model-00001-of-00002.safetensors",
|
| 757 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.16.ln_1.weight": "model-00001-of-00002.safetensors",
|
| 758 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.16.ln_2.bias": "model-00001-of-00002.safetensors",
|
| 759 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.16.ln_2.weight": "model-00001-of-00002.safetensors",
|
| 760 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.16.ls_1.gamma": "model-00001-of-00002.safetensors",
|
| 761 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.16.ls_2.gamma": "model-00001-of-00002.safetensors",
|
| 762 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.16.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
|
| 763 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.16.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
|
| 764 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.16.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
|
| 765 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.16.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
|
| 766 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.17.attn.in_proj_bias": "model-00001-of-00002.safetensors",
|
| 767 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.17.attn.in_proj_weight": "model-00001-of-00002.safetensors",
|
| 768 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.17.attn.out_proj.bias": "model-00001-of-00002.safetensors",
|
| 769 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.17.attn.out_proj.weight": "model-00001-of-00002.safetensors",
|
| 770 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.17.ln_1.bias": "model-00001-of-00002.safetensors",
|
| 771 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.17.ln_1.weight": "model-00001-of-00002.safetensors",
|
| 772 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.17.ln_2.bias": "model-00001-of-00002.safetensors",
|
| 773 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.17.ln_2.weight": "model-00001-of-00002.safetensors",
|
| 774 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.17.ls_1.gamma": "model-00001-of-00002.safetensors",
|
| 775 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.17.ls_2.gamma": "model-00001-of-00002.safetensors",
|
| 776 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.17.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
|
| 777 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.17.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
|
| 778 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.17.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
|
| 779 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.17.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
|
| 780 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.18.attn.in_proj_bias": "model-00001-of-00002.safetensors",
|
| 781 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.18.attn.in_proj_weight": "model-00001-of-00002.safetensors",
|
| 782 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.18.attn.out_proj.bias": "model-00001-of-00002.safetensors",
|
| 783 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.18.attn.out_proj.weight": "model-00001-of-00002.safetensors",
|
| 784 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.18.ln_1.bias": "model-00001-of-00002.safetensors",
|
| 785 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.18.ln_1.weight": "model-00001-of-00002.safetensors",
|
| 786 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.18.ln_2.bias": "model-00001-of-00002.safetensors",
|
| 787 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.18.ln_2.weight": "model-00001-of-00002.safetensors",
|
| 788 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.18.ls_1.gamma": "model-00001-of-00002.safetensors",
|
| 789 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.18.ls_2.gamma": "model-00001-of-00002.safetensors",
|
| 790 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.18.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
|
| 791 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.18.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
|
| 792 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.18.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
|
| 793 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.18.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
|
| 794 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.19.attn.in_proj_bias": "model-00001-of-00002.safetensors",
|
| 795 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.19.attn.in_proj_weight": "model-00001-of-00002.safetensors",
|
| 796 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.19.attn.out_proj.bias": "model-00001-of-00002.safetensors",
|
| 797 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.19.attn.out_proj.weight": "model-00001-of-00002.safetensors",
|
| 798 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.19.ln_1.bias": "model-00001-of-00002.safetensors",
|
| 799 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.19.ln_1.weight": "model-00001-of-00002.safetensors",
|
| 800 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.19.ln_2.bias": "model-00001-of-00002.safetensors",
|
| 801 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.19.ln_2.weight": "model-00001-of-00002.safetensors",
|
| 802 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.19.ls_1.gamma": "model-00001-of-00002.safetensors",
|
| 803 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.19.ls_2.gamma": "model-00001-of-00002.safetensors",
|
| 804 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.19.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
|
| 805 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.19.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
|
| 806 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.19.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
|
| 807 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.19.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
|
| 808 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.2.attn.in_proj_bias": "model-00001-of-00002.safetensors",
|
| 809 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.2.attn.in_proj_weight": "model-00001-of-00002.safetensors",
|
| 810 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.2.attn.out_proj.bias": "model-00001-of-00002.safetensors",
|
| 811 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.2.attn.out_proj.weight": "model-00001-of-00002.safetensors",
|
| 812 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.2.ln_1.bias": "model-00001-of-00002.safetensors",
|
| 813 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.2.ln_1.weight": "model-00001-of-00002.safetensors",
|
| 814 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.2.ln_2.bias": "model-00001-of-00002.safetensors",
|
| 815 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.2.ln_2.weight": "model-00001-of-00002.safetensors",
|
| 816 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.2.ls_1.gamma": "model-00001-of-00002.safetensors",
|
| 817 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.2.ls_2.gamma": "model-00001-of-00002.safetensors",
|
| 818 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.2.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
|
| 819 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.2.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
|
| 820 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.2.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
|
| 821 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.2.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
|
| 822 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.20.attn.in_proj_bias": "model-00001-of-00002.safetensors",
|
| 823 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.20.attn.in_proj_weight": "model-00001-of-00002.safetensors",
|
| 824 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.20.attn.out_proj.bias": "model-00001-of-00002.safetensors",
|
| 825 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.20.attn.out_proj.weight": "model-00001-of-00002.safetensors",
|
| 826 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.20.ln_1.bias": "model-00001-of-00002.safetensors",
|
| 827 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.20.ln_1.weight": "model-00001-of-00002.safetensors",
|
| 828 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.20.ln_2.bias": "model-00001-of-00002.safetensors",
|
| 829 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.20.ln_2.weight": "model-00001-of-00002.safetensors",
|
| 830 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.20.ls_1.gamma": "model-00001-of-00002.safetensors",
|
| 831 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.20.ls_2.gamma": "model-00001-of-00002.safetensors",
|
| 832 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.20.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
|
| 833 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.20.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
|
| 834 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.20.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
|
| 835 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.20.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
|
| 836 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.21.attn.in_proj_bias": "model-00001-of-00002.safetensors",
|
| 837 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.21.attn.in_proj_weight": "model-00001-of-00002.safetensors",
|
| 838 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.21.attn.out_proj.bias": "model-00001-of-00002.safetensors",
|
| 839 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.21.attn.out_proj.weight": "model-00001-of-00002.safetensors",
|
| 840 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.21.ln_1.bias": "model-00001-of-00002.safetensors",
|
| 841 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.21.ln_1.weight": "model-00001-of-00002.safetensors",
|
| 842 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.21.ln_2.bias": "model-00001-of-00002.safetensors",
|
| 843 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.21.ln_2.weight": "model-00001-of-00002.safetensors",
|
| 844 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.21.ls_1.gamma": "model-00001-of-00002.safetensors",
|
| 845 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.21.ls_2.gamma": "model-00001-of-00002.safetensors",
|
| 846 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.21.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
|
| 847 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.21.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
|
| 848 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.21.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
|
| 849 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.21.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
|
| 850 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.22.attn.in_proj_bias": "model-00001-of-00002.safetensors",
|
| 851 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.22.attn.in_proj_weight": "model-00001-of-00002.safetensors",
|
| 852 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.22.attn.out_proj.bias": "model-00001-of-00002.safetensors",
|
| 853 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.22.attn.out_proj.weight": "model-00001-of-00002.safetensors",
|
| 854 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.22.ln_1.bias": "model-00001-of-00002.safetensors",
|
| 855 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.22.ln_1.weight": "model-00001-of-00002.safetensors",
|
| 856 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.22.ln_2.bias": "model-00001-of-00002.safetensors",
|
| 857 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.22.ln_2.weight": "model-00001-of-00002.safetensors",
|
| 858 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.22.ls_1.gamma": "model-00001-of-00002.safetensors",
|
| 859 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.22.ls_2.gamma": "model-00001-of-00002.safetensors",
|
| 860 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.22.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
|
| 861 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.22.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
|
| 862 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.22.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
|
| 863 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.22.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
|
| 864 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.3.attn.in_proj_bias": "model-00001-of-00002.safetensors",
|
| 865 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.3.attn.in_proj_weight": "model-00001-of-00002.safetensors",
|
| 866 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.3.attn.out_proj.bias": "model-00001-of-00002.safetensors",
|
| 867 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.3.attn.out_proj.weight": "model-00001-of-00002.safetensors",
|
| 868 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.3.ln_1.bias": "model-00001-of-00002.safetensors",
|
| 869 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.3.ln_1.weight": "model-00001-of-00002.safetensors",
|
| 870 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.3.ln_2.bias": "model-00001-of-00002.safetensors",
|
| 871 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.3.ln_2.weight": "model-00001-of-00002.safetensors",
|
| 872 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.3.ls_1.gamma": "model-00001-of-00002.safetensors",
|
| 873 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.3.ls_2.gamma": "model-00001-of-00002.safetensors",
|
| 874 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.3.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
|
| 875 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.3.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
|
| 876 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.3.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
|
| 877 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.3.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
|
| 878 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.4.attn.in_proj_bias": "model-00001-of-00002.safetensors",
|
| 879 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.4.attn.in_proj_weight": "model-00001-of-00002.safetensors",
|
| 880 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.4.attn.out_proj.bias": "model-00001-of-00002.safetensors",
|
| 881 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.4.attn.out_proj.weight": "model-00001-of-00002.safetensors",
|
| 882 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.4.ln_1.bias": "model-00001-of-00002.safetensors",
|
| 883 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.4.ln_1.weight": "model-00001-of-00002.safetensors",
|
| 884 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.4.ln_2.bias": "model-00001-of-00002.safetensors",
|
| 885 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.4.ln_2.weight": "model-00001-of-00002.safetensors",
|
| 886 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.4.ls_1.gamma": "model-00001-of-00002.safetensors",
|
| 887 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.4.ls_2.gamma": "model-00001-of-00002.safetensors",
|
| 888 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.4.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
|
| 889 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.4.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
|
| 890 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.4.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
|
| 891 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.4.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
|
| 892 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.5.attn.in_proj_bias": "model-00001-of-00002.safetensors",
|
| 893 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.5.attn.in_proj_weight": "model-00001-of-00002.safetensors",
|
| 894 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.5.attn.out_proj.bias": "model-00001-of-00002.safetensors",
|
| 895 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.5.attn.out_proj.weight": "model-00001-of-00002.safetensors",
|
| 896 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.5.ln_1.bias": "model-00001-of-00002.safetensors",
|
| 897 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.5.ln_1.weight": "model-00001-of-00002.safetensors",
|
| 898 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.5.ln_2.bias": "model-00001-of-00002.safetensors",
|
| 899 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.5.ln_2.weight": "model-00001-of-00002.safetensors",
|
| 900 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.5.ls_1.gamma": "model-00001-of-00002.safetensors",
|
| 901 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.5.ls_2.gamma": "model-00001-of-00002.safetensors",
|
| 902 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.5.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
|
| 903 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.5.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
|
| 904 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.5.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
|
| 905 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.5.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
|
| 906 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.6.attn.in_proj_bias": "model-00001-of-00002.safetensors",
|
| 907 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.6.attn.in_proj_weight": "model-00001-of-00002.safetensors",
|
| 908 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.6.attn.out_proj.bias": "model-00001-of-00002.safetensors",
|
| 909 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.6.attn.out_proj.weight": "model-00001-of-00002.safetensors",
|
| 910 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.6.ln_1.bias": "model-00001-of-00002.safetensors",
|
| 911 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.6.ln_1.weight": "model-00001-of-00002.safetensors",
|
| 912 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.6.ln_2.bias": "model-00001-of-00002.safetensors",
|
| 913 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.6.ln_2.weight": "model-00001-of-00002.safetensors",
|
| 914 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.6.ls_1.gamma": "model-00001-of-00002.safetensors",
|
| 915 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.6.ls_2.gamma": "model-00001-of-00002.safetensors",
|
| 916 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.6.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
|
| 917 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.6.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
|
| 918 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.6.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
|
| 919 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.6.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
|
| 920 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.7.attn.in_proj_bias": "model-00001-of-00002.safetensors",
|
| 921 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.7.attn.in_proj_weight": "model-00001-of-00002.safetensors",
|
| 922 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.7.attn.out_proj.bias": "model-00001-of-00002.safetensors",
|
| 923 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.7.attn.out_proj.weight": "model-00001-of-00002.safetensors",
|
| 924 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.7.ln_1.bias": "model-00001-of-00002.safetensors",
|
| 925 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.7.ln_1.weight": "model-00001-of-00002.safetensors",
|
| 926 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.7.ln_2.bias": "model-00001-of-00002.safetensors",
|
| 927 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.7.ln_2.weight": "model-00001-of-00002.safetensors",
|
| 928 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.7.ls_1.gamma": "model-00001-of-00002.safetensors",
|
| 929 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.7.ls_2.gamma": "model-00001-of-00002.safetensors",
|
| 930 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.7.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
|
| 931 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.7.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
|
| 932 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.7.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
|
| 933 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.7.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
|
| 934 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.8.attn.in_proj_bias": "model-00001-of-00002.safetensors",
|
| 935 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.8.attn.in_proj_weight": "model-00001-of-00002.safetensors",
|
| 936 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.8.attn.out_proj.bias": "model-00001-of-00002.safetensors",
|
| 937 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.8.attn.out_proj.weight": "model-00001-of-00002.safetensors",
|
| 938 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.8.ln_1.bias": "model-00001-of-00002.safetensors",
|
| 939 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.8.ln_1.weight": "model-00001-of-00002.safetensors",
|
| 940 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.8.ln_2.bias": "model-00001-of-00002.safetensors",
|
| 941 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.8.ln_2.weight": "model-00001-of-00002.safetensors",
|
| 942 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.8.ls_1.gamma": "model-00001-of-00002.safetensors",
|
| 943 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.8.ls_2.gamma": "model-00001-of-00002.safetensors",
|
| 944 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.8.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
|
| 945 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.8.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
|
| 946 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.8.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
|
| 947 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.8.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
|
| 948 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.9.attn.in_proj_bias": "model-00001-of-00002.safetensors",
|
| 949 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.9.attn.in_proj_weight": "model-00001-of-00002.safetensors",
|
| 950 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.9.attn.out_proj.bias": "model-00001-of-00002.safetensors",
|
| 951 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.9.attn.out_proj.weight": "model-00001-of-00002.safetensors",
|
| 952 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.9.ln_1.bias": "model-00001-of-00002.safetensors",
|
| 953 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.9.ln_1.weight": "model-00001-of-00002.safetensors",
|
| 954 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.9.ln_2.bias": "model-00001-of-00002.safetensors",
|
| 955 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.9.ln_2.weight": "model-00001-of-00002.safetensors",
|
| 956 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.9.ls_1.gamma": "model-00001-of-00002.safetensors",
|
| 957 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.9.ls_2.gamma": "model-00001-of-00002.safetensors",
|
| 958 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.9.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
|
| 959 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.9.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
|
| 960 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.9.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
|
| 961 |
+
"model.mm_vision_tower.vision_tower.transformer.resblocks.9.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
|
| 962 |
+
"model.mm_vision_tower.vision_tower.vit_downsampler1.bias": "model-00001-of-00002.safetensors",
|
| 963 |
+
"model.mm_vision_tower.vision_tower.vit_downsampler1.weight": "model-00001-of-00002.safetensors",
|
| 964 |
+
"model.mm_vision_tower.vision_tower.vit_downsampler2.bias": "model-00001-of-00002.safetensors",
|
| 965 |
+
"model.mm_vision_tower.vision_tower.vit_downsampler2.weight": "model-00001-of-00002.safetensors"
|
| 966 |
+
}
|
| 967 |
+
}
|
norm_stats.json
ADDED
|
@@ -0,0 +1,418 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"default": {
|
| 3 |
+
"min": -1,
|
| 4 |
+
"max": 1
|
| 5 |
+
},
|
| 6 |
+
"action": {
|
| 7 |
+
"min": [
|
| 8 |
+
-0.29664285260066386,
|
| 9 |
+
-1.0064209161698818,
|
| 10 |
+
-0.9700927966460585,
|
| 11 |
+
-0.7063018346458674,
|
| 12 |
+
-0.8859181877970694,
|
| 13 |
+
-1.2271162762641907,
|
| 14 |
+
5.093999952077865e-05,
|
| 15 |
+
-0.36686077966094016,
|
| 16 |
+
-1.3326892539063468,
|
| 17 |
+
-1.0772549609303474,
|
| 18 |
+
-0.4578383115693927,
|
| 19 |
+
-0.6990939566493034,
|
| 20 |
+
-1.0249067378878594,
|
| 21 |
+
0.001097599977394566,
|
| 22 |
+
0.0,
|
| 23 |
+
0.0,
|
| 24 |
+
0.0,
|
| 25 |
+
0.0,
|
| 26 |
+
0.0,
|
| 27 |
+
0.0,
|
| 28 |
+
0.0,
|
| 29 |
+
0.0,
|
| 30 |
+
0.0,
|
| 31 |
+
0.0,
|
| 32 |
+
0.0,
|
| 33 |
+
0.0,
|
| 34 |
+
0.0,
|
| 35 |
+
0.0,
|
| 36 |
+
0.0,
|
| 37 |
+
0.0,
|
| 38 |
+
0.0,
|
| 39 |
+
0.0
|
| 40 |
+
],
|
| 41 |
+
"q01": [
|
| 42 |
+
-0.29664285260066386,
|
| 43 |
+
-1.0064209161698818,
|
| 44 |
+
-0.9700927966460585,
|
| 45 |
+
-0.7063018346458674,
|
| 46 |
+
-0.8859181877970694,
|
| 47 |
+
-1.2271162762641907,
|
| 48 |
+
5.093999952077865e-05,
|
| 49 |
+
-0.36686077966094016,
|
| 50 |
+
-1.3326892539063468,
|
| 51 |
+
-1.0772549609303474,
|
| 52 |
+
-0.4578383115693927,
|
| 53 |
+
-0.6990939566493034,
|
| 54 |
+
-1.0249067378878594,
|
| 55 |
+
0.001097599977394566,
|
| 56 |
+
0.0,
|
| 57 |
+
0.0,
|
| 58 |
+
0.0,
|
| 59 |
+
0.0,
|
| 60 |
+
0.0,
|
| 61 |
+
0.0,
|
| 62 |
+
0.0,
|
| 63 |
+
0.0,
|
| 64 |
+
0.0,
|
| 65 |
+
0.0,
|
| 66 |
+
0.0,
|
| 67 |
+
0.0,
|
| 68 |
+
0.0,
|
| 69 |
+
0.0,
|
| 70 |
+
0.0,
|
| 71 |
+
0.0,
|
| 72 |
+
0.0,
|
| 73 |
+
0.0
|
| 74 |
+
],
|
| 75 |
+
"max": [
|
| 76 |
+
0.31680365440770986,
|
| 77 |
+
1.55446404479444,
|
| 78 |
+
0.8437494644150139,
|
| 79 |
+
0.8567714843034744,
|
| 80 |
+
0.9194872800707818,
|
| 81 |
+
1.115740490913391,
|
| 82 |
+
0.064897559389472,
|
| 83 |
+
0.34572673292458056,
|
| 84 |
+
1.5243960717454552,
|
| 85 |
+
0.9029977995753287,
|
| 86 |
+
0.4493604528889059,
|
| 87 |
+
0.8104894745349887,
|
| 88 |
+
0.8790016122460362,
|
| 89 |
+
0.09958143641608767,
|
| 90 |
+
0.0,
|
| 91 |
+
0.0,
|
| 92 |
+
0.0,
|
| 93 |
+
0.0,
|
| 94 |
+
0.0,
|
| 95 |
+
0.0,
|
| 96 |
+
0.0,
|
| 97 |
+
0.0,
|
| 98 |
+
0.0,
|
| 99 |
+
0.0,
|
| 100 |
+
0.0,
|
| 101 |
+
0.0,
|
| 102 |
+
0.0,
|
| 103 |
+
0.0,
|
| 104 |
+
0.0,
|
| 105 |
+
0.0,
|
| 106 |
+
0.0,
|
| 107 |
+
0.0
|
| 108 |
+
],
|
| 109 |
+
"q99": [
|
| 110 |
+
0.31680365440770986,
|
| 111 |
+
1.55446404479444,
|
| 112 |
+
0.8437494644150139,
|
| 113 |
+
0.8567714843034744,
|
| 114 |
+
0.9194872800707818,
|
| 115 |
+
1.115740490913391,
|
| 116 |
+
0.064897559389472,
|
| 117 |
+
0.34572673292458056,
|
| 118 |
+
1.5243960717454552,
|
| 119 |
+
0.9029977995753287,
|
| 120 |
+
0.4493604528889059,
|
| 121 |
+
0.8104894745349887,
|
| 122 |
+
0.8790016122460362,
|
| 123 |
+
0.09958143641608767,
|
| 124 |
+
0.0,
|
| 125 |
+
0.0,
|
| 126 |
+
0.0,
|
| 127 |
+
0.0,
|
| 128 |
+
0.0,
|
| 129 |
+
0.0,
|
| 130 |
+
0.0,
|
| 131 |
+
0.0,
|
| 132 |
+
0.0,
|
| 133 |
+
0.0,
|
| 134 |
+
0.0,
|
| 135 |
+
0.0,
|
| 136 |
+
0.0,
|
| 137 |
+
0.0,
|
| 138 |
+
0.0,
|
| 139 |
+
0.0,
|
| 140 |
+
0.0,
|
| 141 |
+
0.0
|
| 142 |
+
],
|
| 143 |
+
"mean": [
|
| 144 |
+
0.0003966177615766301,
|
| 145 |
+
-0.001015278850267402,
|
| 146 |
+
0.0011589070075299658,
|
| 147 |
+
-0.00030084021439238517,
|
| 148 |
+
-0.0006273830688070092,
|
| 149 |
+
-0.00038493126583616653,
|
| 150 |
+
0.020112000550920602,
|
| 151 |
+
-0.0071700415321164,
|
| 152 |
+
-0.032652913413482094,
|
| 153 |
+
0.026740316402796796,
|
| 154 |
+
-0.006595703345263428,
|
| 155 |
+
-0.014728355854055951,
|
| 156 |
+
0.003061622476766039,
|
| 157 |
+
0.043490193388128415,
|
| 158 |
+
0.0,
|
| 159 |
+
0.0,
|
| 160 |
+
0.0,
|
| 161 |
+
0.0,
|
| 162 |
+
0.0,
|
| 163 |
+
0.0,
|
| 164 |
+
0.0,
|
| 165 |
+
0.0,
|
| 166 |
+
0.0,
|
| 167 |
+
0.0,
|
| 168 |
+
0.0,
|
| 169 |
+
0.0,
|
| 170 |
+
0.0,
|
| 171 |
+
0.0,
|
| 172 |
+
0.0,
|
| 173 |
+
0.0,
|
| 174 |
+
0.0,
|
| 175 |
+
0.0
|
| 176 |
+
],
|
| 177 |
+
"std": [
|
| 178 |
+
0.08520092985551739,
|
| 179 |
+
0.40391918787554953,
|
| 180 |
+
0.2600454646154714,
|
| 181 |
+
0.24051768341568502,
|
| 182 |
+
0.29104021572897126,
|
| 183 |
+
0.36971027984458243,
|
| 184 |
+
0.02167139818247563,
|
| 185 |
+
0.09872109554056098,
|
| 186 |
+
0.39762897406109704,
|
| 187 |
+
0.2685085507656408,
|
| 188 |
+
0.143449967154996,
|
| 189 |
+
0.21974380023596493,
|
| 190 |
+
0.2935887069532265,
|
| 191 |
+
0.035147037677825795,
|
| 192 |
+
0.0,
|
| 193 |
+
0.0,
|
| 194 |
+
0.0,
|
| 195 |
+
0.0,
|
| 196 |
+
0.0,
|
| 197 |
+
0.0,
|
| 198 |
+
0.0,
|
| 199 |
+
0.0,
|
| 200 |
+
0.0,
|
| 201 |
+
0.0,
|
| 202 |
+
0.0,
|
| 203 |
+
0.0,
|
| 204 |
+
0.0,
|
| 205 |
+
0.0,
|
| 206 |
+
0.0,
|
| 207 |
+
0.0,
|
| 208 |
+
0.0,
|
| 209 |
+
0.0
|
| 210 |
+
]
|
| 211 |
+
},
|
| 212 |
+
"state": {
|
| 213 |
+
"min": [
|
| 214 |
+
-0.48777978316545484,
|
| 215 |
+
-0.00044117931202054036,
|
| 216 |
+
-1.413148864141479,
|
| 217 |
+
-1.1187900252342224,
|
| 218 |
+
-0.5297525460720062,
|
| 219 |
+
-1.5704759739398955,
|
| 220 |
+
6.79199993610382e-05,
|
| 221 |
+
-0.06590842202305794,
|
| 222 |
+
0.002297797024145257,
|
| 223 |
+
-1.577302715587616,
|
| 224 |
+
-0.4193348071575165,
|
| 225 |
+
-0.38875938205718996,
|
| 226 |
+
-1.5892602016448976,
|
| 227 |
+
0.001097599977394566,
|
| 228 |
+
0.0,
|
| 229 |
+
0.0,
|
| 230 |
+
0.0,
|
| 231 |
+
0.0,
|
| 232 |
+
0.0,
|
| 233 |
+
0.0,
|
| 234 |
+
0.0,
|
| 235 |
+
0.0,
|
| 236 |
+
0.0,
|
| 237 |
+
0.0,
|
| 238 |
+
0.0,
|
| 239 |
+
0.0,
|
| 240 |
+
0.0,
|
| 241 |
+
0.0,
|
| 242 |
+
0.0,
|
| 243 |
+
0.0,
|
| 244 |
+
0.0,
|
| 245 |
+
0.0
|
| 246 |
+
],
|
| 247 |
+
"q01": [
|
| 248 |
+
-0.48777978316545484,
|
| 249 |
+
-0.00044117931202054036,
|
| 250 |
+
-1.413148864141479,
|
| 251 |
+
-1.1187900252342224,
|
| 252 |
+
-0.5297525460720062,
|
| 253 |
+
-1.5704759739398955,
|
| 254 |
+
6.79199993610382e-05,
|
| 255 |
+
-0.06590842202305794,
|
| 256 |
+
0.002297797024145257,
|
| 257 |
+
-1.577302715587616,
|
| 258 |
+
-0.4193348071575165,
|
| 259 |
+
-0.38875938205718996,
|
| 260 |
+
-1.5892602016448976,
|
| 261 |
+
0.001097599977394566,
|
| 262 |
+
0.0,
|
| 263 |
+
0.0,
|
| 264 |
+
0.0,
|
| 265 |
+
0.0,
|
| 266 |
+
0.0,
|
| 267 |
+
0.0,
|
| 268 |
+
0.0,
|
| 269 |
+
0.0,
|
| 270 |
+
0.0,
|
| 271 |
+
0.0,
|
| 272 |
+
0.0,
|
| 273 |
+
0.0,
|
| 274 |
+
0.0,
|
| 275 |
+
0.0,
|
| 276 |
+
0.0,
|
| 277 |
+
0.0,
|
| 278 |
+
0.0,
|
| 279 |
+
0.0
|
| 280 |
+
],
|
| 281 |
+
"max": [
|
| 282 |
+
0.12846337782144557,
|
| 283 |
+
1.974319732106477,
|
| 284 |
+
0.005549456139840148,
|
| 285 |
+
1.7441358847618105,
|
| 286 |
+
1.2150322723388673,
|
| 287 |
+
1.7416288507938384,
|
| 288 |
+
0.06576353938132525,
|
| 289 |
+
0.6493011023700237,
|
| 290 |
+
2.088287352469284,
|
| 291 |
+
0.007228130289167245,
|
| 292 |
+
0.7374681864738464,
|
| 293 |
+
1.1344388277053832,
|
| 294 |
+
0.422852683258057,
|
| 295 |
+
0.09958143641608767,
|
| 296 |
+
0.0,
|
| 297 |
+
0.0,
|
| 298 |
+
0.0,
|
| 299 |
+
0.0,
|
| 300 |
+
0.0,
|
| 301 |
+
0.0,
|
| 302 |
+
0.0,
|
| 303 |
+
0.0,
|
| 304 |
+
0.0,
|
| 305 |
+
0.0,
|
| 306 |
+
0.0,
|
| 307 |
+
0.0,
|
| 308 |
+
0.0,
|
| 309 |
+
0.0,
|
| 310 |
+
0.0,
|
| 311 |
+
0.0,
|
| 312 |
+
0.0,
|
| 313 |
+
0.0
|
| 314 |
+
],
|
| 315 |
+
"q99": [
|
| 316 |
+
0.12846337782144557,
|
| 317 |
+
1.974319732106477,
|
| 318 |
+
0.005549456139840148,
|
| 319 |
+
1.7441358847618105,
|
| 320 |
+
1.2150322723388673,
|
| 321 |
+
1.7416288507938384,
|
| 322 |
+
0.06576353938132525,
|
| 323 |
+
0.6493011023700237,
|
| 324 |
+
2.088287352469284,
|
| 325 |
+
0.007228130289167245,
|
| 326 |
+
0.7374681864738464,
|
| 327 |
+
1.1344388277053832,
|
| 328 |
+
0.422852683258057,
|
| 329 |
+
0.09958143641608767,
|
| 330 |
+
0.0,
|
| 331 |
+
0.0,
|
| 332 |
+
0.0,
|
| 333 |
+
0.0,
|
| 334 |
+
0.0,
|
| 335 |
+
0.0,
|
| 336 |
+
0.0,
|
| 337 |
+
0.0,
|
| 338 |
+
0.0,
|
| 339 |
+
0.0,
|
| 340 |
+
0.0,
|
| 341 |
+
0.0,
|
| 342 |
+
0.0,
|
| 343 |
+
0.0,
|
| 344 |
+
0.0,
|
| 345 |
+
0.0,
|
| 346 |
+
0.0,
|
| 347 |
+
0.0
|
| 348 |
+
],
|
| 349 |
+
"mean": [
|
| 350 |
+
-0.1354223477616232,
|
| 351 |
+
0.748403122283525,
|
| 352 |
+
-0.5159049054048651,
|
| 353 |
+
-0.1711923571877502,
|
| 354 |
+
0.364577625836545,
|
| 355 |
+
0.29703506164399085,
|
| 356 |
+
0.02017463243228523,
|
| 357 |
+
0.3006864438950653,
|
| 358 |
+
1.1462652393698405,
|
| 359 |
+
-0.7940032384471981,
|
| 360 |
+
0.1025005142460481,
|
| 361 |
+
0.46216051787791945,
|
| 362 |
+
-0.33569846724711794,
|
| 363 |
+
0.04569733551901795,
|
| 364 |
+
0.0,
|
| 365 |
+
0.0,
|
| 366 |
+
0.0,
|
| 367 |
+
0.0,
|
| 368 |
+
0.0,
|
| 369 |
+
0.0,
|
| 370 |
+
0.0,
|
| 371 |
+
0.0,
|
| 372 |
+
0.0,
|
| 373 |
+
0.0,
|
| 374 |
+
0.0,
|
| 375 |
+
0.0,
|
| 376 |
+
0.0,
|
| 377 |
+
0.0,
|
| 378 |
+
0.0,
|
| 379 |
+
0.0,
|
| 380 |
+
0.0,
|
| 381 |
+
0.0
|
| 382 |
+
],
|
| 383 |
+
"std": [
|
| 384 |
+
0.15234792966982214,
|
| 385 |
+
0.7464528376594433,
|
| 386 |
+
0.49087351472607793,
|
| 387 |
+
0.40467612653395385,
|
| 388 |
+
0.45911776899290985,
|
| 389 |
+
0.5456512144583491,
|
| 390 |
+
0.0216803011322912,
|
| 391 |
+
0.18163186458356742,
|
| 392 |
+
0.7252287039062986,
|
| 393 |
+
0.5040179423451102,
|
| 394 |
+
0.2280523076654934,
|
| 395 |
+
0.4050891587812491,
|
| 396 |
+
0.45898793127109044,
|
| 397 |
+
0.03489119633732977,
|
| 398 |
+
0.0,
|
| 399 |
+
0.0,
|
| 400 |
+
0.0,
|
| 401 |
+
0.0,
|
| 402 |
+
0.0,
|
| 403 |
+
0.0,
|
| 404 |
+
0.0,
|
| 405 |
+
0.0,
|
| 406 |
+
0.0,
|
| 407 |
+
0.0,
|
| 408 |
+
0.0,
|
| 409 |
+
0.0,
|
| 410 |
+
0.0,
|
| 411 |
+
0.0,
|
| 412 |
+
0.0,
|
| 413 |
+
0.0,
|
| 414 |
+
0.0,
|
| 415 |
+
0.0
|
| 416 |
+
]
|
| 417 |
+
}
|
| 418 |
+
}
|
special_tokens_map.json
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"additional_special_tokens": [
|
| 3 |
+
"<|EOT|>",
|
| 4 |
+
"<|BOT|>",
|
| 5 |
+
"<|CALL_START|>",
|
| 6 |
+
"<|CALL_END|>",
|
| 7 |
+
"<|THINK_START|>",
|
| 8 |
+
"<|THINK_END|>",
|
| 9 |
+
"<|IMG_START|>",
|
| 10 |
+
"<|IMG_END|>",
|
| 11 |
+
"<|META_START|>",
|
| 12 |
+
"<|META_END|>",
|
| 13 |
+
"<im_patch>",
|
| 14 |
+
"<im_start>",
|
| 15 |
+
"<im_end>",
|
| 16 |
+
"<dream>",
|
| 17 |
+
"<dream_start>",
|
| 18 |
+
"<dream_end>",
|
| 19 |
+
"<|MASK_1e69f|>",
|
| 20 |
+
"<|UNMASK_1e69f|>",
|
| 21 |
+
"<video_start>",
|
| 22 |
+
"<video_end>",
|
| 23 |
+
"<patch_start>",
|
| 24 |
+
"<patch_end>",
|
| 25 |
+
"<patch_newline>",
|
| 26 |
+
"<|begin▁of▁sentence|>",
|
| 27 |
+
"<|begin▁of▁mask|>",
|
| 28 |
+
"<|end▁of▁mask|>",
|
| 29 |
+
"<|fim▁begin|>",
|
| 30 |
+
"<|fim▁hole|>",
|
| 31 |
+
"<|end▁of▁sentence|>"
|
| 32 |
+
],
|
| 33 |
+
"eos_token": {
|
| 34 |
+
"content": "<|im_end|>",
|
| 35 |
+
"lstrip": false,
|
| 36 |
+
"normalized": false,
|
| 37 |
+
"rstrip": false,
|
| 38 |
+
"single_word": false
|
| 39 |
+
},
|
| 40 |
+
"pad_token": {
|
| 41 |
+
"content": "<|endoftext|>",
|
| 42 |
+
"lstrip": false,
|
| 43 |
+
"normalized": false,
|
| 44 |
+
"rstrip": false,
|
| 45 |
+
"single_word": false
|
| 46 |
+
}
|
| 47 |
+
}
|
tokenizer_config.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
vocab.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|