tlhoang commited on
Commit
da0f61f
·
1 Parent(s): 24f05f6
Example.ipynb DELETED
@@ -1,291 +0,0 @@
1
- {
2
- "cells": [
3
- {
4
- "cell_type": "code",
5
- "execution_count": 1,
6
- "id": "274b013f-9f96-4973-875f-f3910e2789c4",
7
- "metadata": {},
8
- "outputs": [],
9
- "source": [
10
- "from transformers import AutoModelForSeq2SeqLM\n",
11
- "from model.utils import MorganFingerprint, morgan_fingerprint_to_text, clean_output, smiles_to_3d\n",
12
- "from model.tokenizer import SmilesTokenizer"
13
- ]
14
- },
15
- {
16
- "cell_type": "code",
17
- "execution_count": 2,
18
- "id": "b8c70b09-8c7d-439b-bd42-4cfbb41715ca",
19
- "metadata": {},
20
- "outputs": [],
21
- "source": [
22
- "checkpoint_path=\"lamthuy/MorganGen\"\n",
23
- "model = AutoModelForSeq2SeqLM.from_pretrained(checkpoint_path)\n",
24
- "tokenizer = SmilesTokenizer(vocab_file=\"data/vocab_morgan.txt\")"
25
- ]
26
- },
27
- {
28
- "cell_type": "code",
29
- "execution_count": 3,
30
- "id": "9c49a040-13fd-4a63-aef1-6112a42f3eed",
31
- "metadata": {},
32
- "outputs": [
33
- {
34
- "name": "stderr",
35
- "output_type": "stream",
36
- "text": [
37
- "[22:30:12] DEPRECATION WARNING: please use MorganGenerator\n"
38
- ]
39
- }
40
- ],
41
- "source": [
42
- "smiles=\"CC(=O)OC1=CC=CC=C1C(=O)O\"\n",
43
- "m = MorganFingerprint()\n",
44
- "mf = m.smiles_to_morgan(smiles)"
45
- ]
46
- },
47
- {
48
- "cell_type": "code",
49
- "execution_count": 4,
50
- "id": "a7be100a-8cdd-48cf-a6cb-5444d793e9c2",
51
- "metadata": {},
52
- "outputs": [
53
- {
54
- "name": "stdout",
55
- "output_type": "stream",
56
- "text": [
57
- "[389][456][650][695][807][909][1017][1035][1047][1057][1088][1199][1380][1410][1447][1468][1616][1729][1750][1775][1873][1917][1970][1991]\n"
58
- ]
59
- }
60
- ],
61
- "source": [
62
- "s = morgan_fingerprint_to_text(mf)\n",
63
- "print(s)"
64
- ]
65
- },
66
- {
67
- "cell_type": "code",
68
- "execution_count": 5,
69
- "id": "6740dcbc-86a4-4b32-a439-91312dd4a9fc",
70
- "metadata": {},
71
- "outputs": [],
72
- "source": [
73
- "input_ids = tokenizer.encode(s, return_tensors=\"pt\")\n",
74
- "# Generate output sequence\n",
75
- "output_ids = model.generate(input_ids, max_length=64, num_beams=5)"
76
- ]
77
- },
78
- {
79
- "cell_type": "code",
80
- "execution_count": 6,
81
- "id": "f423bbe8-5717-4757-84f1-ab6dd5531ea7",
82
- "metadata": {},
83
- "outputs": [],
84
- "source": [
85
- "clean_output_ids = clean_output(output_ids[0])\n",
86
- "# Decode the generated output\n",
87
- "output_text = tokenizer.decode(clean_output_ids)\n",
88
- "output_text = output_text.replace(\" \", \"\")"
89
- ]
90
- },
91
- {
92
- "cell_type": "code",
93
- "execution_count": 7,
94
- "id": "e2678826-caa9-4cb2-bc4d-0ab5e54f76d0",
95
- "metadata": {},
96
- "outputs": [
97
- {
98
- "name": "stdout",
99
- "output_type": "stream",
100
- "text": [
101
- "CC(=O)Oc1ccc(OC(O)=C(C)O)cc1OC1CC1\n"
102
- ]
103
- }
104
- ],
105
- "source": [
106
- "print(output_text)"
107
- ]
108
- },
109
- {
110
- "cell_type": "code",
111
- "execution_count": 8,
112
- "id": "c4dcff84-4552-48ca-8b2c-8f2d4911ec8a",
113
- "metadata": {},
114
- "outputs": [
115
- {
116
- "data": {
117
- "application/3dmoljs_load.v0": "<div id=\"3dmolviewer_1747776613981611\" style=\"position: relative; width: 400px; height: 300px;\">\n <p id=\"3dmolwarning_1747776613981611\" style=\"background-color:#ffcccc;color:black\">3Dmol.js failed to load for some reason. Please check your browser console for error messages.<br></p>\n </div>\n<script>\n\nvar loadScriptAsync = function(uri){\n return new Promise((resolve, reject) => {\n //this is to ignore the existence of requirejs amd\n var savedexports, savedmodule;\n if (typeof exports !== 'undefined') savedexports = exports;\n else exports = {}\n if (typeof module !== 'undefined') savedmodule = module;\n else module = {}\n\n var tag = document.createElement('script');\n tag.src = uri;\n tag.async = true;\n tag.onload = () => {\n exports = savedexports;\n module = savedmodule;\n resolve();\n };\n var firstScriptTag = document.getElementsByTagName('script')[0];\n firstScriptTag.parentNode.insertBefore(tag, firstScriptTag);\n});\n};\n\nif(typeof $3Dmolpromise === 'undefined') {\n$3Dmolpromise = null;\n $3Dmolpromise = loadScriptAsync('https://cdnjs.cloudflare.com/ajax/libs/3Dmol/2.4.2/3Dmol-min.js');\n}\n\nvar viewer_1747776613981611 = null;\nvar warn = document.getElementById(\"3dmolwarning_1747776613981611\");\nif(warn) {\n warn.parentNode.removeChild(warn);\n}\n$3Dmolpromise.then(function() {\nviewer_1747776613981611 = $3Dmol.createViewer(document.getElementById(\"3dmolviewer_1747776613981611\"),{backgroundColor:\"white\"});\nviewer_1747776613981611.zoomTo();\n\tviewer_1747776613981611.addModel(\"HETATM 1 C1 UNL 1 -2.960 -2.244 -0.038 1.00 0.00 C \\nHETATM 2 C2 UNL 1 -2.082 -1.129 -0.492 1.00 0.00 C \\nHETATM 3 O1 UNL 1 -2.368 -0.505 -1.549 1.00 0.00 O \\nHETATM 4 O2 UNL 1 -0.895 -0.866 0.204 1.00 0.00 O \\nHETATM 5 C3 UNL 1 -0.098 0.280 0.047 1.00 0.00 C \\nHETATM 6 C4 UNL 1 -0.677 1.524 -0.273 1.00 0.00 C \\nHETATM 7 C5 UNL 1 0.117 2.662 -0.414 1.00 0.00 C \\nHETATM 8 C6 UNL 1 1.495 2.579 -0.227 1.00 0.00 C \\nHETATM 9 C7 UNL 1 2.084 1.360 0.110 1.00 0.00 C \\nHETATM 10 C8 UNL 1 1.302 0.198 0.258 1.00 0.00 C \\nHETATM 11 C9 UNL 1 1.971 -1.079 0.619 1.00 0.00 C \\nHETATM 12 O3 UNL 1 1.311 -2.146 0.738 1.00 0.00 O \\nHETATM 13 O4 UNL 1 3.347 -1.111 0.831 1.00 0.00 O \\nHETATM 14 H1 UNL 1 -4.022 -2.010 -0.262 1.00 0.00 H \\nHETATM 15 H2 UNL 1 -2.849 -2.390 1.057 1.00 0.00 H \\nHETATM 16 H3 UNL 1 -2.671 -3.179 -0.561 1.00 0.00 H \\nHETATM 17 H4 UNL 1 -1.748 1.624 -0.386 1.00 0.00 H \\nHETATM 18 H5 UNL 1 -0.339 3.613 -0.660 1.00 0.00 H \\nHETATM 19 H6 UNL 1 2.109 3.465 -0.336 1.00 0.00 H \\nHETATM 20 H7 UNL 1 3.157 1.329 0.256 1.00 0.00 H \\nHETATM 21 H8 UNL 1 3.816 -1.974 1.077 1.00 0.00 H \\nCONECT 1 2 14 15 16\\nCONECT 2 3 3 4\\nCONECT 4 5\\nCONECT 5 6 6 10\\nCONECT 6 7 17\\nCONECT 7 8 8 18\\nCONECT 8 9 19\\nCONECT 9 10 10 20\\nCONECT 10 11\\nCONECT 11 12 12 13\\nCONECT 13 21\\nEND\\n\",\"pdb\");\n\tviewer_1747776613981611.setStyle({\"stick\": {}});\n\tviewer_1747776613981611.zoomTo();\nviewer_1747776613981611.render();\n});\n</script>",
118
- "text/html": [
119
- "<div id=\"3dmolviewer_1747776613981611\" style=\"position: relative; width: 400px; height: 300px;\">\n",
120
- " <p id=\"3dmolwarning_1747776613981611\" style=\"background-color:#ffcccc;color:black\">3Dmol.js failed to load for some reason. Please check your browser console for error messages.<br></p>\n",
121
- " </div>\n",
122
- "<script>\n",
123
- "\n",
124
- "var loadScriptAsync = function(uri){\n",
125
- " return new Promise((resolve, reject) => {\n",
126
- " //this is to ignore the existence of requirejs amd\n",
127
- " var savedexports, savedmodule;\n",
128
- " if (typeof exports !== 'undefined') savedexports = exports;\n",
129
- " else exports = {}\n",
130
- " if (typeof module !== 'undefined') savedmodule = module;\n",
131
- " else module = {}\n",
132
- "\n",
133
- " var tag = document.createElement('script');\n",
134
- " tag.src = uri;\n",
135
- " tag.async = true;\n",
136
- " tag.onload = () => {\n",
137
- " exports = savedexports;\n",
138
- " module = savedmodule;\n",
139
- " resolve();\n",
140
- " };\n",
141
- " var firstScriptTag = document.getElementsByTagName('script')[0];\n",
142
- " firstScriptTag.parentNode.insertBefore(tag, firstScriptTag);\n",
143
- "});\n",
144
- "};\n",
145
- "\n",
146
- "if(typeof $3Dmolpromise === 'undefined') {\n",
147
- "$3Dmolpromise = null;\n",
148
- " $3Dmolpromise = loadScriptAsync('https://cdnjs.cloudflare.com/ajax/libs/3Dmol/2.4.2/3Dmol-min.js');\n",
149
- "}\n",
150
- "\n",
151
- "var viewer_1747776613981611 = null;\n",
152
- "var warn = document.getElementById(\"3dmolwarning_1747776613981611\");\n",
153
- "if(warn) {\n",
154
- " warn.parentNode.removeChild(warn);\n",
155
- "}\n",
156
- "$3Dmolpromise.then(function() {\n",
157
- "viewer_1747776613981611 = $3Dmol.createViewer(document.getElementById(\"3dmolviewer_1747776613981611\"),{backgroundColor:\"white\"});\n",
158
- "viewer_1747776613981611.zoomTo();\n",
159
- "\tviewer_1747776613981611.addModel(\"HETATM 1 C1 UNL 1 -2.960 -2.244 -0.038 1.00 0.00 C \\nHETATM 2 C2 UNL 1 -2.082 -1.129 -0.492 1.00 0.00 C \\nHETATM 3 O1 UNL 1 -2.368 -0.505 -1.549 1.00 0.00 O \\nHETATM 4 O2 UNL 1 -0.895 -0.866 0.204 1.00 0.00 O \\nHETATM 5 C3 UNL 1 -0.098 0.280 0.047 1.00 0.00 C \\nHETATM 6 C4 UNL 1 -0.677 1.524 -0.273 1.00 0.00 C \\nHETATM 7 C5 UNL 1 0.117 2.662 -0.414 1.00 0.00 C \\nHETATM 8 C6 UNL 1 1.495 2.579 -0.227 1.00 0.00 C \\nHETATM 9 C7 UNL 1 2.084 1.360 0.110 1.00 0.00 C \\nHETATM 10 C8 UNL 1 1.302 0.198 0.258 1.00 0.00 C \\nHETATM 11 C9 UNL 1 1.971 -1.079 0.619 1.00 0.00 C \\nHETATM 12 O3 UNL 1 1.311 -2.146 0.738 1.00 0.00 O \\nHETATM 13 O4 UNL 1 3.347 -1.111 0.831 1.00 0.00 O \\nHETATM 14 H1 UNL 1 -4.022 -2.010 -0.262 1.00 0.00 H \\nHETATM 15 H2 UNL 1 -2.849 -2.390 1.057 1.00 0.00 H \\nHETATM 16 H3 UNL 1 -2.671 -3.179 -0.561 1.00 0.00 H \\nHETATM 17 H4 UNL 1 -1.748 1.624 -0.386 1.00 0.00 H \\nHETATM 18 H5 UNL 1 -0.339 3.613 -0.660 1.00 0.00 H \\nHETATM 19 H6 UNL 1 2.109 3.465 -0.336 1.00 0.00 H \\nHETATM 20 H7 UNL 1 3.157 1.329 0.256 1.00 0.00 H \\nHETATM 21 H8 UNL 1 3.816 -1.974 1.077 1.00 0.00 H \\nCONECT 1 2 14 15 16\\nCONECT 2 3 3 4\\nCONECT 4 5\\nCONECT 5 6 6 10\\nCONECT 6 7 17\\nCONECT 7 8 8 18\\nCONECT 8 9 19\\nCONECT 9 10 10 20\\nCONECT 10 11\\nCONECT 11 12 12 13\\nCONECT 13 21\\nEND\\n\",\"pdb\");\n",
160
- "\tviewer_1747776613981611.setStyle({\"stick\": {}});\n",
161
- "\tviewer_1747776613981611.zoomTo();\n",
162
- "viewer_1747776613981611.render();\n",
163
- "});\n",
164
- "</script>"
165
- ]
166
- },
167
- "metadata": {},
168
- "output_type": "display_data"
169
- },
170
- {
171
- "data": {
172
- "text/plain": [
173
- "<py3Dmol.view at 0x16c184d40>"
174
- ]
175
- },
176
- "execution_count": 8,
177
- "metadata": {},
178
- "output_type": "execute_result"
179
- }
180
- ],
181
- "source": [
182
- "smiles_to_3d([smiles])"
183
- ]
184
- },
185
- {
186
- "cell_type": "code",
187
- "execution_count": 9,
188
- "id": "e3f26dd2-4ef5-44cd-b52c-103feb68aea8",
189
- "metadata": {},
190
- "outputs": [
191
- {
192
- "data": {
193
- "application/3dmoljs_load.v0": "<div id=\"3dmolviewer_1747776614005095\" style=\"position: relative; width: 400px; height: 300px;\">\n <p id=\"3dmolwarning_1747776614005095\" style=\"background-color:#ffcccc;color:black\">3Dmol.js failed to load for some reason. Please check your browser console for error messages.<br></p>\n </div>\n<script>\n\nvar loadScriptAsync = function(uri){\n return new Promise((resolve, reject) => {\n //this is to ignore the existence of requirejs amd\n var savedexports, savedmodule;\n if (typeof exports !== 'undefined') savedexports = exports;\n else exports = {}\n if (typeof module !== 'undefined') savedmodule = module;\n else module = {}\n\n var tag = document.createElement('script');\n tag.src = uri;\n tag.async = true;\n tag.onload = () => {\n exports = savedexports;\n module = savedmodule;\n resolve();\n };\n var firstScriptTag = document.getElementsByTagName('script')[0];\n firstScriptTag.parentNode.insertBefore(tag, firstScriptTag);\n});\n};\n\nif(typeof $3Dmolpromise === 'undefined') {\n$3Dmolpromise = null;\n $3Dmolpromise = loadScriptAsync('https://cdnjs.cloudflare.com/ajax/libs/3Dmol/2.4.2/3Dmol-min.js');\n}\n\nvar viewer_1747776614005095 = null;\nvar warn = document.getElementById(\"3dmolwarning_1747776614005095\");\nif(warn) {\n warn.parentNode.removeChild(warn);\n}\n$3Dmolpromise.then(function() {\nviewer_1747776614005095 = $3Dmol.createViewer(document.getElementById(\"3dmolviewer_1747776614005095\"),{backgroundColor:\"white\"});\nviewer_1747776614005095.zoomTo();\n\tviewer_1747776614005095.addModel(\"HETATM 1 C1 UNL 1 -4.717 0.510 -2.372 1.00 0.00 C \\nHETATM 2 C2 UNL 1 -3.261 0.760 -2.175 1.00 0.00 C \\nHETATM 3 O1 UNL 1 -2.685 1.648 -2.859 1.00 0.00 O \\nHETATM 4 O2 UNL 1 -2.584 0.082 -1.153 1.00 0.00 O \\nHETATM 5 C3 UNL 1 -1.194 0.066 -0.970 1.00 0.00 C \\nHETATM 6 C4 UNL 1 -0.325 0.091 -2.077 1.00 0.00 C \\nHETATM 7 C5 UNL 1 1.058 0.063 -1.896 1.00 0.00 C \\nHETATM 8 C6 UNL 1 1.603 -0.017 -0.614 1.00 0.00 C \\nHETATM 9 O3 UNL 1 2.999 -0.032 -0.480 1.00 0.00 O \\nHETATM 10 C7 UNL 1 3.701 -0.669 0.552 1.00 0.00 C \\nHETATM 11 O4 UNL 1 3.065 -1.642 1.329 1.00 0.00 O \\nHETATM 12 C8 UNL 1 4.992 -0.366 0.757 1.00 0.00 C \\nHETATM 13 C9 UNL 1 5.681 0.665 -0.092 1.00 0.00 C \\nHETATM 14 O5 UNL 1 5.704 -1.011 1.768 1.00 0.00 O \\nHETATM 15 C10 UNL 1 0.744 -0.039 0.497 1.00 0.00 C \\nHETATM 16 C11 UNL 1 -0.647 -0.011 0.335 1.00 0.00 C \\nHETATM 17 O6 UNL 1 -1.425 -0.044 1.506 1.00 0.00 O \\nHETATM 18 C12 UNL 1 -2.848 -0.066 1.555 1.00 0.00 C \\nHETATM 19 C13 UNL 1 -3.488 1.310 1.563 1.00 0.00 C \\nHETATM 20 C14 UNL 1 -3.391 0.506 2.850 1.00 0.00 C \\nHETATM 21 H1 UNL 1 -4.986 0.638 -3.442 1.00 0.00 H \\nHETATM 22 H2 UNL 1 -4.967 -0.526 -2.061 1.00 0.00 H \\nHETATM 23 H3 UNL 1 -5.301 1.225 -1.757 1.00 0.00 H \\nHETATM 24 H4 UNL 1 -0.714 0.105 -3.086 1.00 0.00 H \\nHETATM 25 H5 UNL 1 1.711 0.084 -2.760 1.00 0.00 H \\nHETATM 26 H6 UNL 1 3.532 -2.137 2.080 1.00 0.00 H \\nHETATM 27 H7 UNL 1 6.737 0.795 0.227 1.00 0.00 H \\nHETATM 28 H8 UNL 1 5.672 0.346 -1.155 1.00 0.00 H \\nHETATM 29 H9 UNL 1 5.163 1.642 0.004 1.00 0.00 H \\nHETATM 30 H10 UNL 1 6.680 -0.804 1.947 1.00 0.00 H \\nHETATM 31 H11 UNL 1 1.150 -0.050 1.500 1.00 0.00 H \\nHETATM 32 H12 UNL 1 -3.405 -0.933 1.139 1.00 0.00 H \\nHETATM 33 H13 UNL 1 -2.773 2.156 1.490 1.00 0.00 H \\nHETATM 34 H14 UNL 1 -4.502 1.336 1.110 1.00 0.00 H \\nHETATM 35 H15 UNL 1 -4.355 0.066 3.181 1.00 0.00 H \\nHETATM 36 H16 UNL 1 -2.625 0.884 3.562 1.00 0.00 H \\nCONECT 1 2 21 22 23\\nCONECT 2 3 3 4\\nCONECT 4 5\\nCONECT 5 6 6 16\\nCONECT 6 7 24\\nCONECT 7 8 8 25\\nCONECT 8 9 15\\nCONECT 9 10\\nCONECT 10 11 12 12\\nCONECT 11 26\\nCONECT 12 13 14\\nCONECT 13 27 28 29\\nCONECT 14 30\\nCONECT 15 16 16 31\\nCONECT 16 17\\nCONECT 17 18\\nCONECT 18 19 20 32\\nCONECT 19 20 33 34\\nCONECT 20 35 36\\nEND\\n\",\"pdb\");\n\tviewer_1747776614005095.setStyle({\"stick\": {}});\n\tviewer_1747776614005095.zoomTo();\nviewer_1747776614005095.render();\n});\n</script>",
194
- "text/html": [
195
- "<div id=\"3dmolviewer_1747776614005095\" style=\"position: relative; width: 400px; height: 300px;\">\n",
196
- " <p id=\"3dmolwarning_1747776614005095\" style=\"background-color:#ffcccc;color:black\">3Dmol.js failed to load for some reason. Please check your browser console for error messages.<br></p>\n",
197
- " </div>\n",
198
- "<script>\n",
199
- "\n",
200
- "var loadScriptAsync = function(uri){\n",
201
- " return new Promise((resolve, reject) => {\n",
202
- " //this is to ignore the existence of requirejs amd\n",
203
- " var savedexports, savedmodule;\n",
204
- " if (typeof exports !== 'undefined') savedexports = exports;\n",
205
- " else exports = {}\n",
206
- " if (typeof module !== 'undefined') savedmodule = module;\n",
207
- " else module = {}\n",
208
- "\n",
209
- " var tag = document.createElement('script');\n",
210
- " tag.src = uri;\n",
211
- " tag.async = true;\n",
212
- " tag.onload = () => {\n",
213
- " exports = savedexports;\n",
214
- " module = savedmodule;\n",
215
- " resolve();\n",
216
- " };\n",
217
- " var firstScriptTag = document.getElementsByTagName('script')[0];\n",
218
- " firstScriptTag.parentNode.insertBefore(tag, firstScriptTag);\n",
219
- "});\n",
220
- "};\n",
221
- "\n",
222
- "if(typeof $3Dmolpromise === 'undefined') {\n",
223
- "$3Dmolpromise = null;\n",
224
- " $3Dmolpromise = loadScriptAsync('https://cdnjs.cloudflare.com/ajax/libs/3Dmol/2.4.2/3Dmol-min.js');\n",
225
- "}\n",
226
- "\n",
227
- "var viewer_1747776614005095 = null;\n",
228
- "var warn = document.getElementById(\"3dmolwarning_1747776614005095\");\n",
229
- "if(warn) {\n",
230
- " warn.parentNode.removeChild(warn);\n",
231
- "}\n",
232
- "$3Dmolpromise.then(function() {\n",
233
- "viewer_1747776614005095 = $3Dmol.createViewer(document.getElementById(\"3dmolviewer_1747776614005095\"),{backgroundColor:\"white\"});\n",
234
- "viewer_1747776614005095.zoomTo();\n",
235
- "\tviewer_1747776614005095.addModel(\"HETATM 1 C1 UNL 1 -4.717 0.510 -2.372 1.00 0.00 C \\nHETATM 2 C2 UNL 1 -3.261 0.760 -2.175 1.00 0.00 C \\nHETATM 3 O1 UNL 1 -2.685 1.648 -2.859 1.00 0.00 O \\nHETATM 4 O2 UNL 1 -2.584 0.082 -1.153 1.00 0.00 O \\nHETATM 5 C3 UNL 1 -1.194 0.066 -0.970 1.00 0.00 C \\nHETATM 6 C4 UNL 1 -0.325 0.091 -2.077 1.00 0.00 C \\nHETATM 7 C5 UNL 1 1.058 0.063 -1.896 1.00 0.00 C \\nHETATM 8 C6 UNL 1 1.603 -0.017 -0.614 1.00 0.00 C \\nHETATM 9 O3 UNL 1 2.999 -0.032 -0.480 1.00 0.00 O \\nHETATM 10 C7 UNL 1 3.701 -0.669 0.552 1.00 0.00 C \\nHETATM 11 O4 UNL 1 3.065 -1.642 1.329 1.00 0.00 O \\nHETATM 12 C8 UNL 1 4.992 -0.366 0.757 1.00 0.00 C \\nHETATM 13 C9 UNL 1 5.681 0.665 -0.092 1.00 0.00 C \\nHETATM 14 O5 UNL 1 5.704 -1.011 1.768 1.00 0.00 O \\nHETATM 15 C10 UNL 1 0.744 -0.039 0.497 1.00 0.00 C \\nHETATM 16 C11 UNL 1 -0.647 -0.011 0.335 1.00 0.00 C \\nHETATM 17 O6 UNL 1 -1.425 -0.044 1.506 1.00 0.00 O \\nHETATM 18 C12 UNL 1 -2.848 -0.066 1.555 1.00 0.00 C \\nHETATM 19 C13 UNL 1 -3.488 1.310 1.563 1.00 0.00 C \\nHETATM 20 C14 UNL 1 -3.391 0.506 2.850 1.00 0.00 C \\nHETATM 21 H1 UNL 1 -4.986 0.638 -3.442 1.00 0.00 H \\nHETATM 22 H2 UNL 1 -4.967 -0.526 -2.061 1.00 0.00 H \\nHETATM 23 H3 UNL 1 -5.301 1.225 -1.757 1.00 0.00 H \\nHETATM 24 H4 UNL 1 -0.714 0.105 -3.086 1.00 0.00 H \\nHETATM 25 H5 UNL 1 1.711 0.084 -2.760 1.00 0.00 H \\nHETATM 26 H6 UNL 1 3.532 -2.137 2.080 1.00 0.00 H \\nHETATM 27 H7 UNL 1 6.737 0.795 0.227 1.00 0.00 H \\nHETATM 28 H8 UNL 1 5.672 0.346 -1.155 1.00 0.00 H \\nHETATM 29 H9 UNL 1 5.163 1.642 0.004 1.00 0.00 H \\nHETATM 30 H10 UNL 1 6.680 -0.804 1.947 1.00 0.00 H \\nHETATM 31 H11 UNL 1 1.150 -0.050 1.500 1.00 0.00 H \\nHETATM 32 H12 UNL 1 -3.405 -0.933 1.139 1.00 0.00 H \\nHETATM 33 H13 UNL 1 -2.773 2.156 1.490 1.00 0.00 H \\nHETATM 34 H14 UNL 1 -4.502 1.336 1.110 1.00 0.00 H \\nHETATM 35 H15 UNL 1 -4.355 0.066 3.181 1.00 0.00 H \\nHETATM 36 H16 UNL 1 -2.625 0.884 3.562 1.00 0.00 H \\nCONECT 1 2 21 22 23\\nCONECT 2 3 3 4\\nCONECT 4 5\\nCONECT 5 6 6 16\\nCONECT 6 7 24\\nCONECT 7 8 8 25\\nCONECT 8 9 15\\nCONECT 9 10\\nCONECT 10 11 12 12\\nCONECT 11 26\\nCONECT 12 13 14\\nCONECT 13 27 28 29\\nCONECT 14 30\\nCONECT 15 16 16 31\\nCONECT 16 17\\nCONECT 17 18\\nCONECT 18 19 20 32\\nCONECT 19 20 33 34\\nCONECT 20 35 36\\nEND\\n\",\"pdb\");\n",
236
- "\tviewer_1747776614005095.setStyle({\"stick\": {}});\n",
237
- "\tviewer_1747776614005095.zoomTo();\n",
238
- "viewer_1747776614005095.render();\n",
239
- "});\n",
240
- "</script>"
241
- ]
242
- },
243
- "metadata": {},
244
- "output_type": "display_data"
245
- },
246
- {
247
- "data": {
248
- "text/plain": [
249
- "<py3Dmol.view at 0x17615aed0>"
250
- ]
251
- },
252
- "execution_count": 9,
253
- "metadata": {},
254
- "output_type": "execute_result"
255
- }
256
- ],
257
- "source": [
258
- "smiles_to_3d([output_text])"
259
- ]
260
- },
261
- {
262
- "cell_type": "code",
263
- "execution_count": null,
264
- "id": "492c8624-cfb4-414e-a07b-a083ba74b108",
265
- "metadata": {},
266
- "outputs": [],
267
- "source": []
268
- }
269
- ],
270
- "metadata": {
271
- "kernelspec": {
272
- "display_name": "Python 3 (ipykernel)",
273
- "language": "python",
274
- "name": "python3"
275
- },
276
- "language_info": {
277
- "codemirror_mode": {
278
- "name": "ipython",
279
- "version": 3
280
- },
281
- "file_extension": ".py",
282
- "mimetype": "text/x-python",
283
- "name": "python",
284
- "nbconvert_exporter": "python",
285
- "pygments_lexer": "ipython3",
286
- "version": "3.12.10"
287
- }
288
- },
289
- "nbformat": 4,
290
- "nbformat_minor": 5
291
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
README.md DELETED
@@ -1,51 +0,0 @@
1
- ---
2
- license: mit
3
- ---
4
- ## MorganGen
5
- A generative model trained on 120 million SMILES strings from the ZINC database. The model takes as input a sequence of indices representing the active bits in a 2048-bit Morgan fingerprint. Each index corresponds to a bit set to 1, while all other bits are 0.
6
- ```
7
- s = [12][184][1200]
8
- ```
9
- represents a fingerprint where only bits 12, 184, and 1200 are set to 1, and the remaining bits are 0.
10
- # Running example
11
- The following code snippet in the notebook demonstrates how to load the model from a checkpoint and generate a new SMILES string, conditioned on a given input SMILES.
12
- ```python
13
- from transformers import AutoModelForSeq2SeqLM
14
- from model.utils import MorganFingerprint, morgan_fingerprint_to_text, clean_output, smiles_to_3d
15
- from model.tokenizer import SmilesTokenizer
16
-
17
- # Load the checkpoint and the tokenizer
18
- checkpoint_path="lamthuy/MorganGen"
19
- model = AutoModelForSeq2SeqLM.from_pretrained(checkpoint_path)
20
- tokenizer = SmilesTokenizer(vocab_file="data/vocab_morgan.txt")
21
-
22
- # Given a SMILES, get its fingerpint
23
- smiles="CC(=O)OC1=CC=CC=C1C(=O)O"
24
- m = MorganFingerprint()
25
- mf = m.smiles_to_morgan(smiles)
26
-
27
- # convert it to the indices text format
28
- s = morgan_fingerprint_to_text(mf)
29
-
30
- # encode
31
- input_ids = tokenizer.encode(s, return_tensors="pt")
32
- # Generate output sequence
33
- output_ids = model.generate(input_ids, max_length=64, num_beams=5)
34
-
35
- # decode
36
- clean_output_ids = clean_output(output_ids[0])
37
- # Decode the generated output
38
- output_text = tokenizer.decode(clean_output_ids)
39
- output_text = output_text.replace(" ", "")
40
- ```
41
-
42
- # Reference
43
- ```
44
- @inproceedings{hoang2024morgangen,
45
- title={MorganGen: Generative Modeling of SMILES Using Morgan Fingerprint Features},
46
- author={Hoang, Lam Thanh and D{\'\i}az, Ra{\'u}l Fern{\'a}ndez and Lopez, Vanessa},
47
- booktitle={American Chemical Society (ACS) Fall Meeting},
48
- year={2024}
49
- }
50
-
51
- ```
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
config.json DELETED
@@ -1,75 +0,0 @@
1
- {
2
- "_name_or_path": "facebook/bart-base",
3
- "activation_dropout": 0.1,
4
- "activation_function": "gelu",
5
- "add_bias_logits": false,
6
- "add_final_layer_norm": false,
7
- "architectures": [
8
- "BartForConditionalGeneration"
9
- ],
10
- "attention_dropout": 0.1,
11
- "bos_token_id": 0,
12
- "classif_dropout": 0.1,
13
- "classifier_dropout": 0.0,
14
- "d_model": 768,
15
- "decoder_attention_heads": 12,
16
- "decoder_ffn_dim": 3072,
17
- "decoder_layerdrop": 0.0,
18
- "decoder_layers": 6,
19
- "decoder_start_token_id": 2,
20
- "dropout": 0.1,
21
- "early_stopping": true,
22
- "encoder_attention_heads": 12,
23
- "encoder_ffn_dim": 3072,
24
- "encoder_layerdrop": 0.0,
25
- "encoder_layers": 6,
26
- "eos_token_id": 2,
27
- "forced_bos_token_id": 0,
28
- "forced_eos_token_id": 2,
29
- "gradient_checkpointing": false,
30
- "id2label": {
31
- "0": "LABEL_0",
32
- "1": "LABEL_1",
33
- "2": "LABEL_2"
34
- },
35
- "init_std": 0.02,
36
- "is_encoder_decoder": true,
37
- "label2id": {
38
- "LABEL_0": 0,
39
- "LABEL_1": 1,
40
- "LABEL_2": 2
41
- },
42
- "max_position_embeddings": 1024,
43
- "model_type": "bart",
44
- "no_repeat_ngram_size": 3,
45
- "normalize_before": false,
46
- "normalize_embedding": true,
47
- "num_beams": 4,
48
- "num_hidden_layers": 6,
49
- "pad_token_id": 1,
50
- "scale_embedding": false,
51
- "task_specific_params": {
52
- "summarization": {
53
- "length_penalty": 1.0,
54
- "max_length": 128,
55
- "min_length": 12,
56
- "num_beams": 4
57
- },
58
- "summarization_cnn": {
59
- "length_penalty": 2.0,
60
- "max_length": 142,
61
- "min_length": 56,
62
- "num_beams": 4
63
- },
64
- "summarization_xsum": {
65
- "length_penalty": 1.0,
66
- "max_length": 62,
67
- "min_length": 11,
68
- "num_beams": 6
69
- }
70
- },
71
- "torch_dtype": "float16",
72
- "transformers_version": "4.37.0",
73
- "use_cache": true,
74
- "vocab_size": 50265
75
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
data/vocab_morgan.txt DELETED
@@ -1,2670 +0,0 @@
1
- [PAD]
2
- [unused1]
3
- [unused2]
4
- [unused3]
5
- [unused4]
6
- [unused5]
7
- [unused6]
8
- [unused7]
9
- [unused8]
10
- [unused9]
11
- [unused10]
12
- [UNK]
13
- [CLS]
14
- [SEP]
15
- [MASK]
16
- c
17
- C
18
- (
19
- )
20
- O
21
- 1
22
- 2
23
- =
24
- N
25
- .
26
- n
27
- 3
28
- F
29
- Cl
30
- >>
31
- ~
32
- -
33
- 4
34
- [C@H]
35
- S
36
- [C@@H]
37
- [O-]
38
- Br
39
- #
40
- /
41
- [nH]
42
- [N+]
43
- s
44
- 5
45
- o
46
- P
47
- [Na+]
48
- [Si]
49
- I
50
- [Na]
51
- [Pd]
52
- [K+]
53
- [K]
54
- [P]
55
- B
56
- [C@]
57
- [C@@]
58
- [Cl-]
59
- 6
60
- [OH-]
61
- \
62
- [N-]
63
- [Li]
64
- [H]
65
- [2H]
66
- [NH4+]
67
- [c-]
68
- [P-]
69
- [Cs+]
70
- [Li+]
71
- [Cs]
72
- [NaH]
73
- [H-]
74
- [O+]
75
- [BH4-]
76
- [Cu]
77
- 7
78
- [Mg]
79
- [Fe+2]
80
- [n+]
81
- [Sn]
82
- [BH-]
83
- [Pd+2]
84
- [CH]
85
- [I-]
86
- [Br-]
87
- [C-]
88
- [Zn]
89
- [B-]
90
- [F-]
91
- [Al]
92
- [P+]
93
- [BH3-]
94
- [Fe]
95
- [C]
96
- [AlH4]
97
- [Ni]
98
- [SiH]
99
- 8
100
- [Cu+2]
101
- [Mn]
102
- [AlH]
103
- [nH+]
104
- [AlH4-]
105
- [O-2]
106
- [Cr]
107
- [Mg+2]
108
- [NH3+]
109
- [S@]
110
- [Pt]
111
- [Al+3]
112
- [S@@]
113
- [S-]
114
- [Ti]
115
- [Zn+2]
116
- [PH]
117
- [NH2+]
118
- [Ru]
119
- [Ag+]
120
- [S+]
121
- [I+3]
122
- [NH+]
123
- [Ca+2]
124
- [Ag]
125
- 9
126
- [Os]
127
- [Se]
128
- [SiH2]
129
- [Ca]
130
- [Ti+4]
131
- [Ac]
132
- [Cu+]
133
- [S]
134
- [Rh]
135
- [Cl+3]
136
- [cH-]
137
- [Zn+]
138
- [O]
139
- [Cl+]
140
- [SH]
141
- [H+]
142
- [Pd+]
143
- [se]
144
- [PH+]
145
- [I]
146
- [Pt+2]
147
- [C+]
148
- [Mg+]
149
- [Hg]
150
- [W]
151
- [SnH]
152
- [SiH3]
153
- [Fe+3]
154
- [NH]
155
- [Mo]
156
- [CH2+]
157
- %10
158
- [CH2-]
159
- [CH2]
160
- [n-]
161
- [Ce+4]
162
- [NH-]
163
- [Co]
164
- [I+]
165
- [PH2]
166
- [Pt+4]
167
- [Ce]
168
- [B]
169
- [Sn+2]
170
- [Ba+2]
171
- %11
172
- [Fe-3]
173
- [18F]
174
- [SH-]
175
- [Pb+2]
176
- [Os-2]
177
- [Zr+4]
178
- [N]
179
- [Ir]
180
- [Bi]
181
- [Ni+2]
182
- [P@]
183
- [Co+2]
184
- [s+]
185
- [As]
186
- [P+3]
187
- [Hg+2]
188
- [Yb+3]
189
- [CH-]
190
- [Zr+2]
191
- [Mn+2]
192
- [CH+]
193
- [In]
194
- [KH]
195
- [Ce+3]
196
- [Zr]
197
- [AlH2-]
198
- [OH2+]
199
- [Ti+3]
200
- [Rh+2]
201
- [Sb]
202
- [S-2]
203
- %12
204
- [P@@]
205
- [Si@H]
206
- [Mn+4]
207
- p
208
- [Ba]
209
- [NH2-]
210
- [Ge]
211
- [Pb+4]
212
- [Cr+3]
213
- [Au]
214
- [LiH]
215
- [Sc+3]
216
- [o+]
217
- [Rh-3]
218
- %13
219
- [Br]
220
- [Sb-]
221
- [S@+]
222
- [I+2]
223
- [Ar]
224
- [V]
225
- [Cu-]
226
- [Al-]
227
- [Te]
228
- [13c]
229
- [13C]
230
- [Cl]
231
- [PH4+]
232
- [SiH4]
233
- [te]
234
- [CH3-]
235
- [S@@+]
236
- [Rh+3]
237
- [SH+]
238
- [Bi+3]
239
- [Br+2]
240
- [La]
241
- [La+3]
242
- [Pt-2]
243
- [N@@]
244
- [PH3+]
245
- [N@]
246
- [Si+4]
247
- [Sr+2]
248
- [Al+]
249
- [Pb]
250
- [SeH]
251
- [Si-]
252
- [V+5]
253
- [Y+3]
254
- [Re]
255
- [Ru+]
256
- [Sm]
257
- *
258
- [3H]
259
- [NH2]
260
- [Ag-]
261
- [13CH3]
262
- [OH+]
263
- [Ru+3]
264
- [OH]
265
- [Gd+3]
266
- [13CH2]
267
- [In+3]
268
- [Si@@]
269
- [Si@]
270
- [Ti+2]
271
- [Sn+]
272
- [Cl+2]
273
- [AlH-]
274
- [Pd-2]
275
- [SnH3]
276
- [B+3]
277
- [Cu-2]
278
- [Nd+3]
279
- [Pb+3]
280
- [13cH]
281
- [Fe-4]
282
- [Ga]
283
- [Sn+4]
284
- [Hg+]
285
- [11CH3]
286
- [Hf]
287
- [Pr]
288
- [Y]
289
- [S+2]
290
- [Cd]
291
- [Cr+6]
292
- [Zr+3]
293
- [Rh+]
294
- [CH3]
295
- [N-3]
296
- [Hf+2]
297
- [Th]
298
- [Sb+3]
299
- %14
300
- [Cr+2]
301
- [Ru+2]
302
- [Hf+4]
303
- [14C]
304
- [Ta]
305
- [Tl+]
306
- [B+]
307
- [Os+4]
308
- [PdH2]
309
- [Pd-]
310
- [Cd+2]
311
- [Co+3]
312
- [S+4]
313
- [Nb+5]
314
- [123I]
315
- [c+]
316
- [Rb+]
317
- [V+2]
318
- [CH3+]
319
- [Ag+2]
320
- [cH+]
321
- [Mn+3]
322
- [Se-]
323
- [As-]
324
- [Eu+3]
325
- [SH2]
326
- [Sm+3]
327
- [IH+]
328
- %15
329
- [OH3+]
330
- [PH3]
331
- [IH2+]
332
- [SH2+]
333
- [Ir+3]
334
- [AlH3]
335
- [Sc]
336
- [Yb]
337
- [15NH2]
338
- [Lu]
339
- [sH+]
340
- [Gd]
341
- [18F-]
342
- [SH3+]
343
- [SnH4]
344
- [TeH]
345
- [Si@@H]
346
- [Ga+3]
347
- [CaH2]
348
- [Tl]
349
- [Ta+5]
350
- [GeH]
351
- [Br+]
352
- [Sr]
353
- [Tl+3]
354
- [Sm+2]
355
- [PH5]
356
- %16
357
- [N@@+]
358
- [Au+3]
359
- [C-4]
360
- [Nd]
361
- [Ti+]
362
- [IH]
363
- [N@+]
364
- [125I]
365
- [Eu]
366
- [Sn+3]
367
- [Nb]
368
- [Er+3]
369
- [123I-]
370
- [14c]
371
- %17
372
- [SnH2]
373
- [YH]
374
- [Sb+5]
375
- [Pr+3]
376
- [Ir+]
377
- [N+3]
378
- [AlH2]
379
- [19F]
380
- %18
381
- [Tb]
382
- [14CH]
383
- [Mo+4]
384
- [Si+]
385
- [BH]
386
- [Be]
387
- [Rb]
388
- [pH]
389
- %19
390
- %20
391
- [Xe]
392
- [Ir-]
393
- [Be+2]
394
- [C+4]
395
- [RuH2]
396
- [15NH]
397
- [U+2]
398
- [Au-]
399
- %21
400
- %22
401
- [Au+]
402
- [15n]
403
- [Al+2]
404
- [Tb+3]
405
- [15N]
406
- [V+3]
407
- [W+6]
408
- [14CH3]
409
- [Cr+4]
410
- [ClH+]
411
- b
412
- [Ti+6]
413
- [Nd+]
414
- [Zr+]
415
- [PH2+]
416
- [Fm]
417
- [N@H+]
418
- [RuH]
419
- [Dy+3]
420
- %23
421
- [Hf+3]
422
- [W+4]
423
- [11C]
424
- [13CH]
425
- [Er]
426
- [124I]
427
- [LaH]
428
- [F]
429
- [siH]
430
- [Ga+]
431
- [Cm]
432
- [GeH3]
433
- [IH-]
434
- [U+6]
435
- [SeH+]
436
- [32P]
437
- [SeH-]
438
- [Pt-]
439
- [Ir+2]
440
- [se+]
441
- [U]
442
- [F+]
443
- [BH2]
444
- [As+]
445
- [Cf]
446
- [ClH2+]
447
- [Ni+]
448
- [TeH3]
449
- [SbH2]
450
- [Ag+3]
451
- %24
452
- [18O]
453
- [PH4]
454
- [Os+2]
455
- [Na-]
456
- [Sb+2]
457
- [V+4]
458
- [Ho+3]
459
- [68Ga]
460
- [PH-]
461
- [Bi+2]
462
- [Ce+2]
463
- [Pd+3]
464
- [99Tc]
465
- [13C@@H]
466
- [Fe+6]
467
- [c]
468
- [GeH2]
469
- [10B]
470
- [Cu+3]
471
- [Mo+2]
472
- [Cr+]
473
- [Pd+4]
474
- [Dy]
475
- [AsH]
476
- [Ba+]
477
- [SeH2]
478
- [In+]
479
- [TeH2]
480
- [BrH+]
481
- [14cH]
482
- [W+]
483
- [13C@H]
484
- [AsH2]
485
- [In+2]
486
- [N+2]
487
- [N@@H+]
488
- [SbH]
489
- [60Co]
490
- [AsH4+]
491
- [AsH3]
492
- [18OH]
493
- [Ru-2]
494
- [Na-2]
495
- [CuH2]
496
- [31P]
497
- [Ti+5]
498
- [35S]
499
- [P@@H]
500
- [ArH]
501
- [Co+]
502
- [Zr-2]
503
- [BH2-]
504
- [131I]
505
- [SH5]
506
- [VH]
507
- [B+2]
508
- [Yb+2]
509
- [14C@H]
510
- [211At]
511
- [NH3+2]
512
- [IrH]
513
- [IrH2]
514
- [Rh-]
515
- [Cr-]
516
- [Sb+]
517
- [Ni+3]
518
- [TaH3]
519
- [Tl+2]
520
- [64Cu]
521
- [Tc]
522
- [Cd+]
523
- [1H]
524
- [15nH]
525
- [AlH2+]
526
- [FH+2]
527
- [BiH3]
528
- [Ru-]
529
- [Mo+6]
530
- [AsH+]
531
- [BaH2]
532
- [BaH]
533
- [Fe+4]
534
- [229Th]
535
- [Th+4]
536
- [As+3]
537
- [NH+3]
538
- [P@H]
539
- [Li-]
540
- [7NaH]
541
- [Bi+]
542
- [PtH+2]
543
- [p-]
544
- [Re+5]
545
- [NiH]
546
- [Ni-]
547
- [Xe+]
548
- [Ca+]
549
- [11c]
550
- [Rh+4]
551
- [AcH]
552
- [HeH]
553
- [Sc+2]
554
- [Mn+]
555
- [UH]
556
- [14CH2]
557
- [SiH4+]
558
- [18OH2]
559
- [Ac-]
560
- [Re+4]
561
- [118Sn]
562
- [153Sm]
563
- [P+2]
564
- [9CH]
565
- [9CH3]
566
- [Y-]
567
- [NiH2]
568
- [Si+2]
569
- [Mn+6]
570
- [ZrH2]
571
- [C-2]
572
- [Bi+5]
573
- [24NaH]
574
- [Fr]
575
- [15CH]
576
- [Se+]
577
- [At]
578
- [P-3]
579
- [124I-]
580
- [CuH2-]
581
- [Nb+4]
582
- [Nb+3]
583
- [MgH]
584
- [Ir+4]
585
- [67Ga+3]
586
- [67Ga]
587
- [13N]
588
- [15OH2]
589
- [2NH]
590
- [Ho]
591
- [Cn]
592
- [He]
593
- [Ne]
594
- [Kr]
595
- [Pm]
596
- [Tm]
597
- [Po]
598
- [Rn]
599
- [Ra]
600
- [Pa]
601
- [Np]
602
- [Pu]
603
- [Am]
604
- [Bk]
605
- [Es]
606
- [Md]
607
- [No]
608
- [Lr]
609
- [Rf]
610
- [Db]
611
- [Sg]
612
- [Bh]
613
- [Hs]
614
- [Mt]
615
- [Ds]
616
- [Rg]
617
- [Nh]
618
- [Fl]
619
- [Mc]
620
- [Lv]
621
- [Ts]
622
- [Og]
623
- [0]
624
- [1]
625
- [2]
626
- [3]
627
- [4]
628
- [5]
629
- [6]
630
- [7]
631
- [8]
632
- [9]
633
- [10]
634
- [11]
635
- [12]
636
- [13]
637
- [14]
638
- [15]
639
- [16]
640
- [17]
641
- [18]
642
- [19]
643
- [20]
644
- [21]
645
- [22]
646
- [23]
647
- [24]
648
- [25]
649
- [26]
650
- [27]
651
- [28]
652
- [29]
653
- [30]
654
- [31]
655
- [32]
656
- [33]
657
- [34]
658
- [35]
659
- [36]
660
- [37]
661
- [38]
662
- [39]
663
- [40]
664
- [41]
665
- [42]
666
- [43]
667
- [44]
668
- [45]
669
- [46]
670
- [47]
671
- [48]
672
- [49]
673
- [50]
674
- [51]
675
- [52]
676
- [53]
677
- [54]
678
- [55]
679
- [56]
680
- [57]
681
- [58]
682
- [59]
683
- [60]
684
- [61]
685
- [62]
686
- [63]
687
- [64]
688
- [65]
689
- [66]
690
- [67]
691
- [68]
692
- [69]
693
- [70]
694
- [71]
695
- [72]
696
- [73]
697
- [74]
698
- [75]
699
- [76]
700
- [77]
701
- [78]
702
- [79]
703
- [80]
704
- [81]
705
- [82]
706
- [83]
707
- [84]
708
- [85]
709
- [86]
710
- [87]
711
- [88]
712
- [89]
713
- [90]
714
- [91]
715
- [92]
716
- [93]
717
- [94]
718
- [95]
719
- [96]
720
- [97]
721
- [98]
722
- [99]
723
- [100]
724
- [101]
725
- [102]
726
- [103]
727
- [104]
728
- [105]
729
- [106]
730
- [107]
731
- [108]
732
- [109]
733
- [110]
734
- [111]
735
- [112]
736
- [113]
737
- [114]
738
- [115]
739
- [116]
740
- [117]
741
- [118]
742
- [119]
743
- [120]
744
- [121]
745
- [122]
746
- [123]
747
- [124]
748
- [125]
749
- [126]
750
- [127]
751
- [128]
752
- [129]
753
- [130]
754
- [131]
755
- [132]
756
- [133]
757
- [134]
758
- [135]
759
- [136]
760
- [137]
761
- [138]
762
- [139]
763
- [140]
764
- [141]
765
- [142]
766
- [143]
767
- [144]
768
- [145]
769
- [146]
770
- [147]
771
- [148]
772
- [149]
773
- [150]
774
- [151]
775
- [152]
776
- [153]
777
- [154]
778
- [155]
779
- [156]
780
- [157]
781
- [158]
782
- [159]
783
- [160]
784
- [161]
785
- [162]
786
- [163]
787
- [164]
788
- [165]
789
- [166]
790
- [167]
791
- [168]
792
- [169]
793
- [170]
794
- [171]
795
- [172]
796
- [173]
797
- [174]
798
- [175]
799
- [176]
800
- [177]
801
- [178]
802
- [179]
803
- [180]
804
- [181]
805
- [182]
806
- [183]
807
- [184]
808
- [185]
809
- [186]
810
- [187]
811
- [188]
812
- [189]
813
- [190]
814
- [191]
815
- [192]
816
- [193]
817
- [194]
818
- [195]
819
- [196]
820
- [197]
821
- [198]
822
- [199]
823
- [200]
824
- [201]
825
- [202]
826
- [203]
827
- [204]
828
- [205]
829
- [206]
830
- [207]
831
- [208]
832
- [209]
833
- [210]
834
- [211]
835
- [212]
836
- [213]
837
- [214]
838
- [215]
839
- [216]
840
- [217]
841
- [218]
842
- [219]
843
- [220]
844
- [221]
845
- [222]
846
- [223]
847
- [224]
848
- [225]
849
- [226]
850
- [227]
851
- [228]
852
- [229]
853
- [230]
854
- [231]
855
- [232]
856
- [233]
857
- [234]
858
- [235]
859
- [236]
860
- [237]
861
- [238]
862
- [239]
863
- [240]
864
- [241]
865
- [242]
866
- [243]
867
- [244]
868
- [245]
869
- [246]
870
- [247]
871
- [248]
872
- [249]
873
- [250]
874
- [251]
875
- [252]
876
- [253]
877
- [254]
878
- [255]
879
- [256]
880
- [257]
881
- [258]
882
- [259]
883
- [260]
884
- [261]
885
- [262]
886
- [263]
887
- [264]
888
- [265]
889
- [266]
890
- [267]
891
- [268]
892
- [269]
893
- [270]
894
- [271]
895
- [272]
896
- [273]
897
- [274]
898
- [275]
899
- [276]
900
- [277]
901
- [278]
902
- [279]
903
- [280]
904
- [281]
905
- [282]
906
- [283]
907
- [284]
908
- [285]
909
- [286]
910
- [287]
911
- [288]
912
- [289]
913
- [290]
914
- [291]
915
- [292]
916
- [293]
917
- [294]
918
- [295]
919
- [296]
920
- [297]
921
- [298]
922
- [299]
923
- [300]
924
- [301]
925
- [302]
926
- [303]
927
- [304]
928
- [305]
929
- [306]
930
- [307]
931
- [308]
932
- [309]
933
- [310]
934
- [311]
935
- [312]
936
- [313]
937
- [314]
938
- [315]
939
- [316]
940
- [317]
941
- [318]
942
- [319]
943
- [320]
944
- [321]
945
- [322]
946
- [323]
947
- [324]
948
- [325]
949
- [326]
950
- [327]
951
- [328]
952
- [329]
953
- [330]
954
- [331]
955
- [332]
956
- [333]
957
- [334]
958
- [335]
959
- [336]
960
- [337]
961
- [338]
962
- [339]
963
- [340]
964
- [341]
965
- [342]
966
- [343]
967
- [344]
968
- [345]
969
- [346]
970
- [347]
971
- [348]
972
- [349]
973
- [350]
974
- [351]
975
- [352]
976
- [353]
977
- [354]
978
- [355]
979
- [356]
980
- [357]
981
- [358]
982
- [359]
983
- [360]
984
- [361]
985
- [362]
986
- [363]
987
- [364]
988
- [365]
989
- [366]
990
- [367]
991
- [368]
992
- [369]
993
- [370]
994
- [371]
995
- [372]
996
- [373]
997
- [374]
998
- [375]
999
- [376]
1000
- [377]
1001
- [378]
1002
- [379]
1003
- [380]
1004
- [381]
1005
- [382]
1006
- [383]
1007
- [384]
1008
- [385]
1009
- [386]
1010
- [387]
1011
- [388]
1012
- [389]
1013
- [390]
1014
- [391]
1015
- [392]
1016
- [393]
1017
- [394]
1018
- [395]
1019
- [396]
1020
- [397]
1021
- [398]
1022
- [399]
1023
- [400]
1024
- [401]
1025
- [402]
1026
- [403]
1027
- [404]
1028
- [405]
1029
- [406]
1030
- [407]
1031
- [408]
1032
- [409]
1033
- [410]
1034
- [411]
1035
- [412]
1036
- [413]
1037
- [414]
1038
- [415]
1039
- [416]
1040
- [417]
1041
- [418]
1042
- [419]
1043
- [420]
1044
- [421]
1045
- [422]
1046
- [423]
1047
- [424]
1048
- [425]
1049
- [426]
1050
- [427]
1051
- [428]
1052
- [429]
1053
- [430]
1054
- [431]
1055
- [432]
1056
- [433]
1057
- [434]
1058
- [435]
1059
- [436]
1060
- [437]
1061
- [438]
1062
- [439]
1063
- [440]
1064
- [441]
1065
- [442]
1066
- [443]
1067
- [444]
1068
- [445]
1069
- [446]
1070
- [447]
1071
- [448]
1072
- [449]
1073
- [450]
1074
- [451]
1075
- [452]
1076
- [453]
1077
- [454]
1078
- [455]
1079
- [456]
1080
- [457]
1081
- [458]
1082
- [459]
1083
- [460]
1084
- [461]
1085
- [462]
1086
- [463]
1087
- [464]
1088
- [465]
1089
- [466]
1090
- [467]
1091
- [468]
1092
- [469]
1093
- [470]
1094
- [471]
1095
- [472]
1096
- [473]
1097
- [474]
1098
- [475]
1099
- [476]
1100
- [477]
1101
- [478]
1102
- [479]
1103
- [480]
1104
- [481]
1105
- [482]
1106
- [483]
1107
- [484]
1108
- [485]
1109
- [486]
1110
- [487]
1111
- [488]
1112
- [489]
1113
- [490]
1114
- [491]
1115
- [492]
1116
- [493]
1117
- [494]
1118
- [495]
1119
- [496]
1120
- [497]
1121
- [498]
1122
- [499]
1123
- [500]
1124
- [501]
1125
- [502]
1126
- [503]
1127
- [504]
1128
- [505]
1129
- [506]
1130
- [507]
1131
- [508]
1132
- [509]
1133
- [510]
1134
- [511]
1135
- [512]
1136
- [513]
1137
- [514]
1138
- [515]
1139
- [516]
1140
- [517]
1141
- [518]
1142
- [519]
1143
- [520]
1144
- [521]
1145
- [522]
1146
- [523]
1147
- [524]
1148
- [525]
1149
- [526]
1150
- [527]
1151
- [528]
1152
- [529]
1153
- [530]
1154
- [531]
1155
- [532]
1156
- [533]
1157
- [534]
1158
- [535]
1159
- [536]
1160
- [537]
1161
- [538]
1162
- [539]
1163
- [540]
1164
- [541]
1165
- [542]
1166
- [543]
1167
- [544]
1168
- [545]
1169
- [546]
1170
- [547]
1171
- [548]
1172
- [549]
1173
- [550]
1174
- [551]
1175
- [552]
1176
- [553]
1177
- [554]
1178
- [555]
1179
- [556]
1180
- [557]
1181
- [558]
1182
- [559]
1183
- [560]
1184
- [561]
1185
- [562]
1186
- [563]
1187
- [564]
1188
- [565]
1189
- [566]
1190
- [567]
1191
- [568]
1192
- [569]
1193
- [570]
1194
- [571]
1195
- [572]
1196
- [573]
1197
- [574]
1198
- [575]
1199
- [576]
1200
- [577]
1201
- [578]
1202
- [579]
1203
- [580]
1204
- [581]
1205
- [582]
1206
- [583]
1207
- [584]
1208
- [585]
1209
- [586]
1210
- [587]
1211
- [588]
1212
- [589]
1213
- [590]
1214
- [591]
1215
- [592]
1216
- [593]
1217
- [594]
1218
- [595]
1219
- [596]
1220
- [597]
1221
- [598]
1222
- [599]
1223
- [600]
1224
- [601]
1225
- [602]
1226
- [603]
1227
- [604]
1228
- [605]
1229
- [606]
1230
- [607]
1231
- [608]
1232
- [609]
1233
- [610]
1234
- [611]
1235
- [612]
1236
- [613]
1237
- [614]
1238
- [615]
1239
- [616]
1240
- [617]
1241
- [618]
1242
- [619]
1243
- [620]
1244
- [621]
1245
- [622]
1246
- [623]
1247
- [624]
1248
- [625]
1249
- [626]
1250
- [627]
1251
- [628]
1252
- [629]
1253
- [630]
1254
- [631]
1255
- [632]
1256
- [633]
1257
- [634]
1258
- [635]
1259
- [636]
1260
- [637]
1261
- [638]
1262
- [639]
1263
- [640]
1264
- [641]
1265
- [642]
1266
- [643]
1267
- [644]
1268
- [645]
1269
- [646]
1270
- [647]
1271
- [648]
1272
- [649]
1273
- [650]
1274
- [651]
1275
- [652]
1276
- [653]
1277
- [654]
1278
- [655]
1279
- [656]
1280
- [657]
1281
- [658]
1282
- [659]
1283
- [660]
1284
- [661]
1285
- [662]
1286
- [663]
1287
- [664]
1288
- [665]
1289
- [666]
1290
- [667]
1291
- [668]
1292
- [669]
1293
- [670]
1294
- [671]
1295
- [672]
1296
- [673]
1297
- [674]
1298
- [675]
1299
- [676]
1300
- [677]
1301
- [678]
1302
- [679]
1303
- [680]
1304
- [681]
1305
- [682]
1306
- [683]
1307
- [684]
1308
- [685]
1309
- [686]
1310
- [687]
1311
- [688]
1312
- [689]
1313
- [690]
1314
- [691]
1315
- [692]
1316
- [693]
1317
- [694]
1318
- [695]
1319
- [696]
1320
- [697]
1321
- [698]
1322
- [699]
1323
- [700]
1324
- [701]
1325
- [702]
1326
- [703]
1327
- [704]
1328
- [705]
1329
- [706]
1330
- [707]
1331
- [708]
1332
- [709]
1333
- [710]
1334
- [711]
1335
- [712]
1336
- [713]
1337
- [714]
1338
- [715]
1339
- [716]
1340
- [717]
1341
- [718]
1342
- [719]
1343
- [720]
1344
- [721]
1345
- [722]
1346
- [723]
1347
- [724]
1348
- [725]
1349
- [726]
1350
- [727]
1351
- [728]
1352
- [729]
1353
- [730]
1354
- [731]
1355
- [732]
1356
- [733]
1357
- [734]
1358
- [735]
1359
- [736]
1360
- [737]
1361
- [738]
1362
- [739]
1363
- [740]
1364
- [741]
1365
- [742]
1366
- [743]
1367
- [744]
1368
- [745]
1369
- [746]
1370
- [747]
1371
- [748]
1372
- [749]
1373
- [750]
1374
- [751]
1375
- [752]
1376
- [753]
1377
- [754]
1378
- [755]
1379
- [756]
1380
- [757]
1381
- [758]
1382
- [759]
1383
- [760]
1384
- [761]
1385
- [762]
1386
- [763]
1387
- [764]
1388
- [765]
1389
- [766]
1390
- [767]
1391
- [768]
1392
- [769]
1393
- [770]
1394
- [771]
1395
- [772]
1396
- [773]
1397
- [774]
1398
- [775]
1399
- [776]
1400
- [777]
1401
- [778]
1402
- [779]
1403
- [780]
1404
- [781]
1405
- [782]
1406
- [783]
1407
- [784]
1408
- [785]
1409
- [786]
1410
- [787]
1411
- [788]
1412
- [789]
1413
- [790]
1414
- [791]
1415
- [792]
1416
- [793]
1417
- [794]
1418
- [795]
1419
- [796]
1420
- [797]
1421
- [798]
1422
- [799]
1423
- [800]
1424
- [801]
1425
- [802]
1426
- [803]
1427
- [804]
1428
- [805]
1429
- [806]
1430
- [807]
1431
- [808]
1432
- [809]
1433
- [810]
1434
- [811]
1435
- [812]
1436
- [813]
1437
- [814]
1438
- [815]
1439
- [816]
1440
- [817]
1441
- [818]
1442
- [819]
1443
- [820]
1444
- [821]
1445
- [822]
1446
- [823]
1447
- [824]
1448
- [825]
1449
- [826]
1450
- [827]
1451
- [828]
1452
- [829]
1453
- [830]
1454
- [831]
1455
- [832]
1456
- [833]
1457
- [834]
1458
- [835]
1459
- [836]
1460
- [837]
1461
- [838]
1462
- [839]
1463
- [840]
1464
- [841]
1465
- [842]
1466
- [843]
1467
- [844]
1468
- [845]
1469
- [846]
1470
- [847]
1471
- [848]
1472
- [849]
1473
- [850]
1474
- [851]
1475
- [852]
1476
- [853]
1477
- [854]
1478
- [855]
1479
- [856]
1480
- [857]
1481
- [858]
1482
- [859]
1483
- [860]
1484
- [861]
1485
- [862]
1486
- [863]
1487
- [864]
1488
- [865]
1489
- [866]
1490
- [867]
1491
- [868]
1492
- [869]
1493
- [870]
1494
- [871]
1495
- [872]
1496
- [873]
1497
- [874]
1498
- [875]
1499
- [876]
1500
- [877]
1501
- [878]
1502
- [879]
1503
- [880]
1504
- [881]
1505
- [882]
1506
- [883]
1507
- [884]
1508
- [885]
1509
- [886]
1510
- [887]
1511
- [888]
1512
- [889]
1513
- [890]
1514
- [891]
1515
- [892]
1516
- [893]
1517
- [894]
1518
- [895]
1519
- [896]
1520
- [897]
1521
- [898]
1522
- [899]
1523
- [900]
1524
- [901]
1525
- [902]
1526
- [903]
1527
- [904]
1528
- [905]
1529
- [906]
1530
- [907]
1531
- [908]
1532
- [909]
1533
- [910]
1534
- [911]
1535
- [912]
1536
- [913]
1537
- [914]
1538
- [915]
1539
- [916]
1540
- [917]
1541
- [918]
1542
- [919]
1543
- [920]
1544
- [921]
1545
- [922]
1546
- [923]
1547
- [924]
1548
- [925]
1549
- [926]
1550
- [927]
1551
- [928]
1552
- [929]
1553
- [930]
1554
- [931]
1555
- [932]
1556
- [933]
1557
- [934]
1558
- [935]
1559
- [936]
1560
- [937]
1561
- [938]
1562
- [939]
1563
- [940]
1564
- [941]
1565
- [942]
1566
- [943]
1567
- [944]
1568
- [945]
1569
- [946]
1570
- [947]
1571
- [948]
1572
- [949]
1573
- [950]
1574
- [951]
1575
- [952]
1576
- [953]
1577
- [954]
1578
- [955]
1579
- [956]
1580
- [957]
1581
- [958]
1582
- [959]
1583
- [960]
1584
- [961]
1585
- [962]
1586
- [963]
1587
- [964]
1588
- [965]
1589
- [966]
1590
- [967]
1591
- [968]
1592
- [969]
1593
- [970]
1594
- [971]
1595
- [972]
1596
- [973]
1597
- [974]
1598
- [975]
1599
- [976]
1600
- [977]
1601
- [978]
1602
- [979]
1603
- [980]
1604
- [981]
1605
- [982]
1606
- [983]
1607
- [984]
1608
- [985]
1609
- [986]
1610
- [987]
1611
- [988]
1612
- [989]
1613
- [990]
1614
- [991]
1615
- [992]
1616
- [993]
1617
- [994]
1618
- [995]
1619
- [996]
1620
- [997]
1621
- [998]
1622
- [999]
1623
- [1000]
1624
- [1001]
1625
- [1002]
1626
- [1003]
1627
- [1004]
1628
- [1005]
1629
- [1006]
1630
- [1007]
1631
- [1008]
1632
- [1009]
1633
- [1010]
1634
- [1011]
1635
- [1012]
1636
- [1013]
1637
- [1014]
1638
- [1015]
1639
- [1016]
1640
- [1017]
1641
- [1018]
1642
- [1019]
1643
- [1020]
1644
- [1021]
1645
- [1022]
1646
- [1023]
1647
- [1024]
1648
- [1025]
1649
- [1026]
1650
- [1027]
1651
- [1028]
1652
- [1029]
1653
- [1030]
1654
- [1031]
1655
- [1032]
1656
- [1033]
1657
- [1034]
1658
- [1035]
1659
- [1036]
1660
- [1037]
1661
- [1038]
1662
- [1039]
1663
- [1040]
1664
- [1041]
1665
- [1042]
1666
- [1043]
1667
- [1044]
1668
- [1045]
1669
- [1046]
1670
- [1047]
1671
- [1048]
1672
- [1049]
1673
- [1050]
1674
- [1051]
1675
- [1052]
1676
- [1053]
1677
- [1054]
1678
- [1055]
1679
- [1056]
1680
- [1057]
1681
- [1058]
1682
- [1059]
1683
- [1060]
1684
- [1061]
1685
- [1062]
1686
- [1063]
1687
- [1064]
1688
- [1065]
1689
- [1066]
1690
- [1067]
1691
- [1068]
1692
- [1069]
1693
- [1070]
1694
- [1071]
1695
- [1072]
1696
- [1073]
1697
- [1074]
1698
- [1075]
1699
- [1076]
1700
- [1077]
1701
- [1078]
1702
- [1079]
1703
- [1080]
1704
- [1081]
1705
- [1082]
1706
- [1083]
1707
- [1084]
1708
- [1085]
1709
- [1086]
1710
- [1087]
1711
- [1088]
1712
- [1089]
1713
- [1090]
1714
- [1091]
1715
- [1092]
1716
- [1093]
1717
- [1094]
1718
- [1095]
1719
- [1096]
1720
- [1097]
1721
- [1098]
1722
- [1099]
1723
- [1100]
1724
- [1101]
1725
- [1102]
1726
- [1103]
1727
- [1104]
1728
- [1105]
1729
- [1106]
1730
- [1107]
1731
- [1108]
1732
- [1109]
1733
- [1110]
1734
- [1111]
1735
- [1112]
1736
- [1113]
1737
- [1114]
1738
- [1115]
1739
- [1116]
1740
- [1117]
1741
- [1118]
1742
- [1119]
1743
- [1120]
1744
- [1121]
1745
- [1122]
1746
- [1123]
1747
- [1124]
1748
- [1125]
1749
- [1126]
1750
- [1127]
1751
- [1128]
1752
- [1129]
1753
- [1130]
1754
- [1131]
1755
- [1132]
1756
- [1133]
1757
- [1134]
1758
- [1135]
1759
- [1136]
1760
- [1137]
1761
- [1138]
1762
- [1139]
1763
- [1140]
1764
- [1141]
1765
- [1142]
1766
- [1143]
1767
- [1144]
1768
- [1145]
1769
- [1146]
1770
- [1147]
1771
- [1148]
1772
- [1149]
1773
- [1150]
1774
- [1151]
1775
- [1152]
1776
- [1153]
1777
- [1154]
1778
- [1155]
1779
- [1156]
1780
- [1157]
1781
- [1158]
1782
- [1159]
1783
- [1160]
1784
- [1161]
1785
- [1162]
1786
- [1163]
1787
- [1164]
1788
- [1165]
1789
- [1166]
1790
- [1167]
1791
- [1168]
1792
- [1169]
1793
- [1170]
1794
- [1171]
1795
- [1172]
1796
- [1173]
1797
- [1174]
1798
- [1175]
1799
- [1176]
1800
- [1177]
1801
- [1178]
1802
- [1179]
1803
- [1180]
1804
- [1181]
1805
- [1182]
1806
- [1183]
1807
- [1184]
1808
- [1185]
1809
- [1186]
1810
- [1187]
1811
- [1188]
1812
- [1189]
1813
- [1190]
1814
- [1191]
1815
- [1192]
1816
- [1193]
1817
- [1194]
1818
- [1195]
1819
- [1196]
1820
- [1197]
1821
- [1198]
1822
- [1199]
1823
- [1200]
1824
- [1201]
1825
- [1202]
1826
- [1203]
1827
- [1204]
1828
- [1205]
1829
- [1206]
1830
- [1207]
1831
- [1208]
1832
- [1209]
1833
- [1210]
1834
- [1211]
1835
- [1212]
1836
- [1213]
1837
- [1214]
1838
- [1215]
1839
- [1216]
1840
- [1217]
1841
- [1218]
1842
- [1219]
1843
- [1220]
1844
- [1221]
1845
- [1222]
1846
- [1223]
1847
- [1224]
1848
- [1225]
1849
- [1226]
1850
- [1227]
1851
- [1228]
1852
- [1229]
1853
- [1230]
1854
- [1231]
1855
- [1232]
1856
- [1233]
1857
- [1234]
1858
- [1235]
1859
- [1236]
1860
- [1237]
1861
- [1238]
1862
- [1239]
1863
- [1240]
1864
- [1241]
1865
- [1242]
1866
- [1243]
1867
- [1244]
1868
- [1245]
1869
- [1246]
1870
- [1247]
1871
- [1248]
1872
- [1249]
1873
- [1250]
1874
- [1251]
1875
- [1252]
1876
- [1253]
1877
- [1254]
1878
- [1255]
1879
- [1256]
1880
- [1257]
1881
- [1258]
1882
- [1259]
1883
- [1260]
1884
- [1261]
1885
- [1262]
1886
- [1263]
1887
- [1264]
1888
- [1265]
1889
- [1266]
1890
- [1267]
1891
- [1268]
1892
- [1269]
1893
- [1270]
1894
- [1271]
1895
- [1272]
1896
- [1273]
1897
- [1274]
1898
- [1275]
1899
- [1276]
1900
- [1277]
1901
- [1278]
1902
- [1279]
1903
- [1280]
1904
- [1281]
1905
- [1282]
1906
- [1283]
1907
- [1284]
1908
- [1285]
1909
- [1286]
1910
- [1287]
1911
- [1288]
1912
- [1289]
1913
- [1290]
1914
- [1291]
1915
- [1292]
1916
- [1293]
1917
- [1294]
1918
- [1295]
1919
- [1296]
1920
- [1297]
1921
- [1298]
1922
- [1299]
1923
- [1300]
1924
- [1301]
1925
- [1302]
1926
- [1303]
1927
- [1304]
1928
- [1305]
1929
- [1306]
1930
- [1307]
1931
- [1308]
1932
- [1309]
1933
- [1310]
1934
- [1311]
1935
- [1312]
1936
- [1313]
1937
- [1314]
1938
- [1315]
1939
- [1316]
1940
- [1317]
1941
- [1318]
1942
- [1319]
1943
- [1320]
1944
- [1321]
1945
- [1322]
1946
- [1323]
1947
- [1324]
1948
- [1325]
1949
- [1326]
1950
- [1327]
1951
- [1328]
1952
- [1329]
1953
- [1330]
1954
- [1331]
1955
- [1332]
1956
- [1333]
1957
- [1334]
1958
- [1335]
1959
- [1336]
1960
- [1337]
1961
- [1338]
1962
- [1339]
1963
- [1340]
1964
- [1341]
1965
- [1342]
1966
- [1343]
1967
- [1344]
1968
- [1345]
1969
- [1346]
1970
- [1347]
1971
- [1348]
1972
- [1349]
1973
- [1350]
1974
- [1351]
1975
- [1352]
1976
- [1353]
1977
- [1354]
1978
- [1355]
1979
- [1356]
1980
- [1357]
1981
- [1358]
1982
- [1359]
1983
- [1360]
1984
- [1361]
1985
- [1362]
1986
- [1363]
1987
- [1364]
1988
- [1365]
1989
- [1366]
1990
- [1367]
1991
- [1368]
1992
- [1369]
1993
- [1370]
1994
- [1371]
1995
- [1372]
1996
- [1373]
1997
- [1374]
1998
- [1375]
1999
- [1376]
2000
- [1377]
2001
- [1378]
2002
- [1379]
2003
- [1380]
2004
- [1381]
2005
- [1382]
2006
- [1383]
2007
- [1384]
2008
- [1385]
2009
- [1386]
2010
- [1387]
2011
- [1388]
2012
- [1389]
2013
- [1390]
2014
- [1391]
2015
- [1392]
2016
- [1393]
2017
- [1394]
2018
- [1395]
2019
- [1396]
2020
- [1397]
2021
- [1398]
2022
- [1399]
2023
- [1400]
2024
- [1401]
2025
- [1402]
2026
- [1403]
2027
- [1404]
2028
- [1405]
2029
- [1406]
2030
- [1407]
2031
- [1408]
2032
- [1409]
2033
- [1410]
2034
- [1411]
2035
- [1412]
2036
- [1413]
2037
- [1414]
2038
- [1415]
2039
- [1416]
2040
- [1417]
2041
- [1418]
2042
- [1419]
2043
- [1420]
2044
- [1421]
2045
- [1422]
2046
- [1423]
2047
- [1424]
2048
- [1425]
2049
- [1426]
2050
- [1427]
2051
- [1428]
2052
- [1429]
2053
- [1430]
2054
- [1431]
2055
- [1432]
2056
- [1433]
2057
- [1434]
2058
- [1435]
2059
- [1436]
2060
- [1437]
2061
- [1438]
2062
- [1439]
2063
- [1440]
2064
- [1441]
2065
- [1442]
2066
- [1443]
2067
- [1444]
2068
- [1445]
2069
- [1446]
2070
- [1447]
2071
- [1448]
2072
- [1449]
2073
- [1450]
2074
- [1451]
2075
- [1452]
2076
- [1453]
2077
- [1454]
2078
- [1455]
2079
- [1456]
2080
- [1457]
2081
- [1458]
2082
- [1459]
2083
- [1460]
2084
- [1461]
2085
- [1462]
2086
- [1463]
2087
- [1464]
2088
- [1465]
2089
- [1466]
2090
- [1467]
2091
- [1468]
2092
- [1469]
2093
- [1470]
2094
- [1471]
2095
- [1472]
2096
- [1473]
2097
- [1474]
2098
- [1475]
2099
- [1476]
2100
- [1477]
2101
- [1478]
2102
- [1479]
2103
- [1480]
2104
- [1481]
2105
- [1482]
2106
- [1483]
2107
- [1484]
2108
- [1485]
2109
- [1486]
2110
- [1487]
2111
- [1488]
2112
- [1489]
2113
- [1490]
2114
- [1491]
2115
- [1492]
2116
- [1493]
2117
- [1494]
2118
- [1495]
2119
- [1496]
2120
- [1497]
2121
- [1498]
2122
- [1499]
2123
- [1500]
2124
- [1501]
2125
- [1502]
2126
- [1503]
2127
- [1504]
2128
- [1505]
2129
- [1506]
2130
- [1507]
2131
- [1508]
2132
- [1509]
2133
- [1510]
2134
- [1511]
2135
- [1512]
2136
- [1513]
2137
- [1514]
2138
- [1515]
2139
- [1516]
2140
- [1517]
2141
- [1518]
2142
- [1519]
2143
- [1520]
2144
- [1521]
2145
- [1522]
2146
- [1523]
2147
- [1524]
2148
- [1525]
2149
- [1526]
2150
- [1527]
2151
- [1528]
2152
- [1529]
2153
- [1530]
2154
- [1531]
2155
- [1532]
2156
- [1533]
2157
- [1534]
2158
- [1535]
2159
- [1536]
2160
- [1537]
2161
- [1538]
2162
- [1539]
2163
- [1540]
2164
- [1541]
2165
- [1542]
2166
- [1543]
2167
- [1544]
2168
- [1545]
2169
- [1546]
2170
- [1547]
2171
- [1548]
2172
- [1549]
2173
- [1550]
2174
- [1551]
2175
- [1552]
2176
- [1553]
2177
- [1554]
2178
- [1555]
2179
- [1556]
2180
- [1557]
2181
- [1558]
2182
- [1559]
2183
- [1560]
2184
- [1561]
2185
- [1562]
2186
- [1563]
2187
- [1564]
2188
- [1565]
2189
- [1566]
2190
- [1567]
2191
- [1568]
2192
- [1569]
2193
- [1570]
2194
- [1571]
2195
- [1572]
2196
- [1573]
2197
- [1574]
2198
- [1575]
2199
- [1576]
2200
- [1577]
2201
- [1578]
2202
- [1579]
2203
- [1580]
2204
- [1581]
2205
- [1582]
2206
- [1583]
2207
- [1584]
2208
- [1585]
2209
- [1586]
2210
- [1587]
2211
- [1588]
2212
- [1589]
2213
- [1590]
2214
- [1591]
2215
- [1592]
2216
- [1593]
2217
- [1594]
2218
- [1595]
2219
- [1596]
2220
- [1597]
2221
- [1598]
2222
- [1599]
2223
- [1600]
2224
- [1601]
2225
- [1602]
2226
- [1603]
2227
- [1604]
2228
- [1605]
2229
- [1606]
2230
- [1607]
2231
- [1608]
2232
- [1609]
2233
- [1610]
2234
- [1611]
2235
- [1612]
2236
- [1613]
2237
- [1614]
2238
- [1615]
2239
- [1616]
2240
- [1617]
2241
- [1618]
2242
- [1619]
2243
- [1620]
2244
- [1621]
2245
- [1622]
2246
- [1623]
2247
- [1624]
2248
- [1625]
2249
- [1626]
2250
- [1627]
2251
- [1628]
2252
- [1629]
2253
- [1630]
2254
- [1631]
2255
- [1632]
2256
- [1633]
2257
- [1634]
2258
- [1635]
2259
- [1636]
2260
- [1637]
2261
- [1638]
2262
- [1639]
2263
- [1640]
2264
- [1641]
2265
- [1642]
2266
- [1643]
2267
- [1644]
2268
- [1645]
2269
- [1646]
2270
- [1647]
2271
- [1648]
2272
- [1649]
2273
- [1650]
2274
- [1651]
2275
- [1652]
2276
- [1653]
2277
- [1654]
2278
- [1655]
2279
- [1656]
2280
- [1657]
2281
- [1658]
2282
- [1659]
2283
- [1660]
2284
- [1661]
2285
- [1662]
2286
- [1663]
2287
- [1664]
2288
- [1665]
2289
- [1666]
2290
- [1667]
2291
- [1668]
2292
- [1669]
2293
- [1670]
2294
- [1671]
2295
- [1672]
2296
- [1673]
2297
- [1674]
2298
- [1675]
2299
- [1676]
2300
- [1677]
2301
- [1678]
2302
- [1679]
2303
- [1680]
2304
- [1681]
2305
- [1682]
2306
- [1683]
2307
- [1684]
2308
- [1685]
2309
- [1686]
2310
- [1687]
2311
- [1688]
2312
- [1689]
2313
- [1690]
2314
- [1691]
2315
- [1692]
2316
- [1693]
2317
- [1694]
2318
- [1695]
2319
- [1696]
2320
- [1697]
2321
- [1698]
2322
- [1699]
2323
- [1700]
2324
- [1701]
2325
- [1702]
2326
- [1703]
2327
- [1704]
2328
- [1705]
2329
- [1706]
2330
- [1707]
2331
- [1708]
2332
- [1709]
2333
- [1710]
2334
- [1711]
2335
- [1712]
2336
- [1713]
2337
- [1714]
2338
- [1715]
2339
- [1716]
2340
- [1717]
2341
- [1718]
2342
- [1719]
2343
- [1720]
2344
- [1721]
2345
- [1722]
2346
- [1723]
2347
- [1724]
2348
- [1725]
2349
- [1726]
2350
- [1727]
2351
- [1728]
2352
- [1729]
2353
- [1730]
2354
- [1731]
2355
- [1732]
2356
- [1733]
2357
- [1734]
2358
- [1735]
2359
- [1736]
2360
- [1737]
2361
- [1738]
2362
- [1739]
2363
- [1740]
2364
- [1741]
2365
- [1742]
2366
- [1743]
2367
- [1744]
2368
- [1745]
2369
- [1746]
2370
- [1747]
2371
- [1748]
2372
- [1749]
2373
- [1750]
2374
- [1751]
2375
- [1752]
2376
- [1753]
2377
- [1754]
2378
- [1755]
2379
- [1756]
2380
- [1757]
2381
- [1758]
2382
- [1759]
2383
- [1760]
2384
- [1761]
2385
- [1762]
2386
- [1763]
2387
- [1764]
2388
- [1765]
2389
- [1766]
2390
- [1767]
2391
- [1768]
2392
- [1769]
2393
- [1770]
2394
- [1771]
2395
- [1772]
2396
- [1773]
2397
- [1774]
2398
- [1775]
2399
- [1776]
2400
- [1777]
2401
- [1778]
2402
- [1779]
2403
- [1780]
2404
- [1781]
2405
- [1782]
2406
- [1783]
2407
- [1784]
2408
- [1785]
2409
- [1786]
2410
- [1787]
2411
- [1788]
2412
- [1789]
2413
- [1790]
2414
- [1791]
2415
- [1792]
2416
- [1793]
2417
- [1794]
2418
- [1795]
2419
- [1796]
2420
- [1797]
2421
- [1798]
2422
- [1799]
2423
- [1800]
2424
- [1801]
2425
- [1802]
2426
- [1803]
2427
- [1804]
2428
- [1805]
2429
- [1806]
2430
- [1807]
2431
- [1808]
2432
- [1809]
2433
- [1810]
2434
- [1811]
2435
- [1812]
2436
- [1813]
2437
- [1814]
2438
- [1815]
2439
- [1816]
2440
- [1817]
2441
- [1818]
2442
- [1819]
2443
- [1820]
2444
- [1821]
2445
- [1822]
2446
- [1823]
2447
- [1824]
2448
- [1825]
2449
- [1826]
2450
- [1827]
2451
- [1828]
2452
- [1829]
2453
- [1830]
2454
- [1831]
2455
- [1832]
2456
- [1833]
2457
- [1834]
2458
- [1835]
2459
- [1836]
2460
- [1837]
2461
- [1838]
2462
- [1839]
2463
- [1840]
2464
- [1841]
2465
- [1842]
2466
- [1843]
2467
- [1844]
2468
- [1845]
2469
- [1846]
2470
- [1847]
2471
- [1848]
2472
- [1849]
2473
- [1850]
2474
- [1851]
2475
- [1852]
2476
- [1853]
2477
- [1854]
2478
- [1855]
2479
- [1856]
2480
- [1857]
2481
- [1858]
2482
- [1859]
2483
- [1860]
2484
- [1861]
2485
- [1862]
2486
- [1863]
2487
- [1864]
2488
- [1865]
2489
- [1866]
2490
- [1867]
2491
- [1868]
2492
- [1869]
2493
- [1870]
2494
- [1871]
2495
- [1872]
2496
- [1873]
2497
- [1874]
2498
- [1875]
2499
- [1876]
2500
- [1877]
2501
- [1878]
2502
- [1879]
2503
- [1880]
2504
- [1881]
2505
- [1882]
2506
- [1883]
2507
- [1884]
2508
- [1885]
2509
- [1886]
2510
- [1887]
2511
- [1888]
2512
- [1889]
2513
- [1890]
2514
- [1891]
2515
- [1892]
2516
- [1893]
2517
- [1894]
2518
- [1895]
2519
- [1896]
2520
- [1897]
2521
- [1898]
2522
- [1899]
2523
- [1900]
2524
- [1901]
2525
- [1902]
2526
- [1903]
2527
- [1904]
2528
- [1905]
2529
- [1906]
2530
- [1907]
2531
- [1908]
2532
- [1909]
2533
- [1910]
2534
- [1911]
2535
- [1912]
2536
- [1913]
2537
- [1914]
2538
- [1915]
2539
- [1916]
2540
- [1917]
2541
- [1918]
2542
- [1919]
2543
- [1920]
2544
- [1921]
2545
- [1922]
2546
- [1923]
2547
- [1924]
2548
- [1925]
2549
- [1926]
2550
- [1927]
2551
- [1928]
2552
- [1929]
2553
- [1930]
2554
- [1931]
2555
- [1932]
2556
- [1933]
2557
- [1934]
2558
- [1935]
2559
- [1936]
2560
- [1937]
2561
- [1938]
2562
- [1939]
2563
- [1940]
2564
- [1941]
2565
- [1942]
2566
- [1943]
2567
- [1944]
2568
- [1945]
2569
- [1946]
2570
- [1947]
2571
- [1948]
2572
- [1949]
2573
- [1950]
2574
- [1951]
2575
- [1952]
2576
- [1953]
2577
- [1954]
2578
- [1955]
2579
- [1956]
2580
- [1957]
2581
- [1958]
2582
- [1959]
2583
- [1960]
2584
- [1961]
2585
- [1962]
2586
- [1963]
2587
- [1964]
2588
- [1965]
2589
- [1966]
2590
- [1967]
2591
- [1968]
2592
- [1969]
2593
- [1970]
2594
- [1971]
2595
- [1972]
2596
- [1973]
2597
- [1974]
2598
- [1975]
2599
- [1976]
2600
- [1977]
2601
- [1978]
2602
- [1979]
2603
- [1980]
2604
- [1981]
2605
- [1982]
2606
- [1983]
2607
- [1984]
2608
- [1985]
2609
- [1986]
2610
- [1987]
2611
- [1988]
2612
- [1989]
2613
- [1990]
2614
- [1991]
2615
- [1992]
2616
- [1993]
2617
- [1994]
2618
- [1995]
2619
- [1996]
2620
- [1997]
2621
- [1998]
2622
- [1999]
2623
- [2000]
2624
- [2001]
2625
- [2002]
2626
- [2003]
2627
- [2004]
2628
- [2005]
2629
- [2006]
2630
- [2007]
2631
- [2008]
2632
- [2009]
2633
- [2010]
2634
- [2011]
2635
- [2012]
2636
- [2013]
2637
- [2014]
2638
- [2015]
2639
- [2016]
2640
- [2017]
2641
- [2018]
2642
- [2019]
2643
- [2020]
2644
- [2021]
2645
- [2022]
2646
- [2023]
2647
- [2024]
2648
- [2025]
2649
- [2026]
2650
- [2027]
2651
- [2028]
2652
- [2029]
2653
- [2030]
2654
- [2031]
2655
- [2032]
2656
- [2033]
2657
- [2034]
2658
- [2035]
2659
- [2036]
2660
- [2037]
2661
- [2038]
2662
- [2039]
2663
- [2040]
2664
- [2041]
2665
- [2042]
2666
- [2043]
2667
- [2044]
2668
- [2045]
2669
- [2046]
2670
- [2047]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
generation_config.json DELETED
@@ -1,13 +0,0 @@
1
- {
2
- "_from_model_config": true,
3
- "bos_token_id": 0,
4
- "decoder_start_token_id": 2,
5
- "early_stopping": true,
6
- "eos_token_id": 2,
7
- "forced_bos_token_id": 0,
8
- "forced_eos_token_id": 2,
9
- "no_repeat_ngram_size": 3,
10
- "num_beams": 4,
11
- "pad_token_id": 1,
12
- "transformers_version": "4.37.0"
13
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
global_step4120000/mp_rank_00_model_states.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:21d1573828b5ea09d93b840a2d0fddc4ff050baed01d8e418d5a108a4ca2abb5
3
- size 279011960
 
 
 
 
global_step4120000/zero_pp_rank_0_mp_rank_00_optim_states.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:4124c72760850e3dfe47ef6271cf52c342902ddf4895ab2af48c1cc85df94b7b
3
- size 836532205
 
 
 
 
global_step4120000/zero_pp_rank_1_mp_rank_00_optim_states.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:b99a8df8657f8304bd84ea3872b48ab441afd17d7dc7a6beb9462944a1769b15
3
- size 836542061
 
 
 
 
latest DELETED
@@ -1 +0,0 @@
1
- global_step4120000
 
 
model.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:2add04b3d765e6b586d45a138bbdf5938a91d4668e23469c5fc977ec57685eb8
3
- size 510592834
 
 
 
 
model/__init__.py DELETED
File without changes
model/tokenizer.py DELETED
@@ -1,325 +0,0 @@
1
- # Requriments - transformers, tokenizers
2
- # Right now, the Smiles Tokenizer uses an exiesting vocab file from rxnfp that is fairly comprehensive and from the USPTO dataset.
3
- # The vocab may be expanded in the near future
4
-
5
- import collections
6
- import os
7
- import re
8
- import pkg_resources
9
- from typing import List
10
- from transformers import BertTokenizer
11
- from logging import getLogger
12
- from model.utils import get_atoms_from_smiles
13
-
14
-
15
- logger = getLogger(__name__)
16
- """
17
- SMI_REGEX_PATTERN: str
18
- SMILES regex pattern for tokenization. Designed by Schwaller et. al.
19
-
20
- References
21
- ----------
22
- .. [1] Philippe Schwaller, Teodoro Laino, Théophile Gaudin, Peter Bolgar, Christopher A. Hunter, Costas Bekas, and Alpha A. Lee
23
- ACS Central Science 2019 5 (9): Molecular Transformer: A Model for Uncertainty-Calibrated Chemical Reaction Prediction
24
- 1572-1583 DOI: 10.1021/acscentsci.9b00576
25
- """
26
-
27
- SMI_REGEX_PATTERN = r"""(\[[^\]]+]|Br?|Cl?|N|O|S|P|F|I|b|c|n|o|s|p|\(|\)|\.|=|#|-|\+|\\|\/|:|~|@|\?|>>?|\*|\$|\%[0-9]{2}|[0-9])"""
28
-
29
- # add vocab_file dict
30
- VOCAB_FILES_NAMES = {"vocab_file": "vocab.txt"}
31
-
32
-
33
- def get_default_tokenizer():
34
- default_vocab_path = (pkg_resources.resource_filename("deepchem",
35
- "feat/tests/vocab.txt"))
36
- return SmilesTokenizer(default_vocab_path)
37
-
38
-
39
- class SmilesTokenizer(BertTokenizer):
40
- """
41
- Creates the SmilesTokenizer class. The tokenizer heavily inherits from the BertTokenizer
42
- implementation found in Huggingface's transformers library. It runs a WordPiece tokenization
43
- algorithm over SMILES strings using the tokenisation SMILES regex developed by Schwaller et. al.
44
-
45
- Please see https://github.com/huggingface/transformers
46
- and https://github.com/rxn4chemistry/rxnfp for more details.
47
-
48
- Examples
49
- --------
50
- >>> from deepchem.feat.smiles_tokenizer import SmilesTokenizer
51
- >>> current_dir = os.path.dirname(os.path.realpath(__file__))
52
- >>> vocab_path = os.path.join(current_dir, 'tests/data', 'vocab.txt')
53
- >>> tokenizer = SmilesTokenizer(vocab_path)
54
- >>> print(tokenizer.encode("CC(=O)OC1=CC=CC=C1C(=O)O"))
55
- [12, 16, 16, 17, 22, 19, 18, 19, 16, 20, 22, 16, 16, 22, 16, 16, 22, 16, 20, 16, 17, 22, 19, 18, 19, 13]
56
-
57
-
58
- References
59
- ----------
60
- .. [1] Schwaller, Philippe; Probst, Daniel; Vaucher, Alain C.; Nair, Vishnu H; Kreutter, David;
61
- Laino, Teodoro; et al. (2019): Mapping the Space of Chemical Reactions using Attention-Based Neural
62
- Networks. ChemRxiv. Preprint. https://doi.org/10.26434/chemrxiv.9897365.v3
63
-
64
- Note
65
- ----
66
- This class requires huggingface's transformers and tokenizers libraries to be installed.
67
- """
68
- vocab_files_names = VOCAB_FILES_NAMES
69
-
70
- def __init__(
71
- self,
72
- vocab_file: str = '',
73
- # unk_token="[UNK]",
74
- # sep_token="[SEP]",
75
- # pad_token="[PAD]",
76
- # cls_token="[CLS]",
77
- # mask_token="[MASK]",
78
- **kwargs):
79
- """Constructs a SmilesTokenizer.
80
-
81
- Parameters
82
- ----------
83
- vocab_file: str
84
- Path to a SMILES character per line vocabulary file.
85
- Default vocab file is found in deepchem/feat/tests/data/vocab.txt
86
- """
87
-
88
- super().__init__(vocab_file, **kwargs)
89
- # take into account special tokens in max length
90
- # self.max_len_single_sentence = self.model_max_length - 2
91
- # self.max_len_sentences_pair = self.model_max_length - 3
92
-
93
- if not os.path.isfile(vocab_file):
94
- raise ValueError(
95
- "Can't find a vocab file at path '{}'.".format(vocab_file))
96
- self.vocab = load_vocab(vocab_file)
97
- self.highest_unused_index = max(
98
- [i for i, v in enumerate(self.vocab.keys()) if v.startswith("[unused")])
99
- self.ids_to_tokens = collections.OrderedDict(
100
- [(ids, tok) for tok, ids in self.vocab.items()])
101
- self.basic_tokenizer = BasicSmilesTokenizer()
102
- self.init_kwargs["model_max_length"] = self.model_max_length
103
-
104
- @property
105
- def vocab_size(self):
106
- return len(self.vocab)
107
-
108
- @property
109
- def vocab_list(self):
110
- return list(self.vocab.keys())
111
-
112
- def _tokenize(self, text: str):
113
- """Tokenize a string into a list of tokens.
114
-
115
- Parameters
116
- ----------
117
- text: str
118
- Input string sequence to be tokenized.
119
- """
120
-
121
- split_tokens = [str(token[1]) for token in get_atoms_from_smiles(text)]
122
- return split_tokens
123
-
124
- @staticmethod
125
- def get_atom_indices(text):
126
- atoms = get_atoms_from_smiles(text)
127
- indices = []
128
- for i, a in enumerate(atoms):
129
- if a[0] == 'ATOM':
130
- indices.append(i)
131
- return indices
132
-
133
- def _convert_token_to_id(self, token: str):
134
- """Converts a token (str/unicode) in an id using the vocab.
135
-
136
- Parameters
137
- ----------
138
- token: str
139
- String token from a larger sequence to be converted to a numerical id.
140
- """
141
-
142
- return self.vocab.get(token, self.vocab.get(self.unk_token))
143
-
144
- def _convert_id_to_token(self, index: int):
145
- """Converts an index (integer) in a token (string/unicode) using the vocab.
146
-
147
- Parameters
148
- ----------
149
- index: int
150
- Integer index to be converted back to a string-based token as part of a larger sequence.
151
- """
152
-
153
- return self.ids_to_tokens.get(index, self.unk_token)
154
-
155
- def convert_tokens_to_string(self, tokens: List[str]):
156
- """Converts a sequence of tokens (string) in a single string.
157
-
158
- Parameters
159
- ----------
160
- tokens: List[str]
161
- List of tokens for a given string sequence.
162
-
163
- Returns
164
- -------
165
- out_string: str
166
- Single string from combined tokens.
167
- """
168
-
169
- out_string: str = " ".join(tokens).replace(" ##", "").strip()
170
- return out_string
171
-
172
- def add_special_tokens_ids_single_sequence(self, token_ids: List[int]):
173
- """Adds special tokens to the a sequence for sequence classification tasks.
174
-
175
- A BERT sequence has the following format: [CLS] X [SEP]
176
-
177
- Parameters
178
- ----------
179
- token_ids: list[int]
180
- list of tokenized input ids. Can be obtained using the encode or encode_plus methods.
181
- """
182
-
183
- return [self.cls_token_id] + token_ids + [self.sep_token_id]
184
-
185
- def add_special_tokens_single_sequence(self, tokens: List[str]):
186
- """Adds special tokens to the a sequence for sequence classification tasks.
187
- A BERT sequence has the following format: [CLS] X [SEP]
188
-
189
- Parameters
190
- ----------
191
- tokens: List[str]
192
- List of tokens for a given string sequence.
193
- """
194
- return [self.cls_token] + tokens + [self.sep_token]
195
-
196
- def add_special_tokens_ids_sequence_pair(self, token_ids_0: List[int],
197
- token_ids_1: List[int]) -> List[int]:
198
- """Adds special tokens to a sequence pair for sequence classification tasks.
199
- A BERT sequence pair has the following format: [CLS] A [SEP] B [SEP]
200
-
201
- Parameters
202
- ----------
203
- token_ids_0: List[int]
204
- List of ids for the first string sequence in the sequence pair (A).
205
- token_ids_1: List[int]
206
- List of tokens for the second string sequence in the sequence pair (B).
207
- """
208
-
209
- sep = [self.sep_token_id]
210
- cls = [self.cls_token_id]
211
-
212
- return cls + token_ids_0 + sep + token_ids_1 + sep
213
-
214
- def add_padding_tokens(self,
215
- token_ids: List[int],
216
- length: int,
217
- right: bool = True) -> List[int]:
218
- """Adds padding tokens to return a sequence of length max_length.
219
- By default padding tokens are added to the right of the sequence.
220
-
221
- Parameters
222
- ----------
223
- token_ids: list[int]
224
- list of tokenized input ids. Can be obtained using the encode or encode_plus methods.
225
- length: int
226
- TODO
227
- right: bool, default True
228
- TODO
229
-
230
- Returns
231
- -------
232
- List[int]
233
- TODO
234
- """
235
- padding = [self.pad_token_id] * (length - len(token_ids))
236
-
237
- if right:
238
- return token_ids + padding
239
- else:
240
- return padding + token_ids
241
-
242
- def save_vocabulary(
243
- self, vocab_path: str
244
- ): # -> tuple[str]: doctest issue raised with this return type annotation
245
- """Save the tokenizer vocabulary to a file.
246
-
247
- Parameters
248
- ----------
249
- vocab_path: obj: str
250
- The directory in which to save the SMILES character per line vocabulary file.
251
- Default vocab file is found in deepchem/feat/tests/data/vocab.txt
252
-
253
- Returns
254
- -------
255
- vocab_file: Tuple
256
- Paths to the files saved.
257
- typle with string to a SMILES character per line vocabulary file.
258
- Default vocab file is found in deepchem/feat/tests/data/vocab.txt
259
- """
260
- index = 0
261
- if os.path.isdir(vocab_path):
262
- vocab_file = os.path.join(vocab_path, VOCAB_FILES_NAMES["vocab_file"])
263
- else:
264
- vocab_file = vocab_path
265
- with open(vocab_file, "w", encoding="utf-8") as writer:
266
- for token, token_index in sorted(
267
- self.vocab.items(), key=lambda kv: kv[1]):
268
- if index != token_index:
269
- logger.warning(
270
- "Saving vocabulary to {}: vocabulary indices are not consecutive."
271
- " Please check that the vocabulary is not corrupted!".format(
272
- vocab_file))
273
- index = token_index
274
- writer.write(token + "\n")
275
- index += 1
276
- return (vocab_file,)
277
-
278
-
279
- class BasicSmilesTokenizer(object):
280
- """
281
- Run basic SMILES tokenization using a regex pattern developed by Schwaller et. al.
282
- This tokenizer is to be used when a tokenizer that does not require the transformers library by HuggingFace is required.
283
-
284
- Examples
285
- --------
286
- >>> from deepchem.feat.smiles_tokenizer import BasicSmilesTokenizer
287
- >>> tokenizer = BasicSmilesTokenizer()
288
- >>> print(tokenizer.tokenize("CC(=O)OC1=CC=CC=C1C(=O)O"))
289
- ['C', 'C', '(', '=', 'O', ')', 'O', 'C', '1', '=', 'C', 'C', '=', 'C', 'C', '=', 'C', '1', 'C', '(', '=', 'O', ')', 'O']
290
-
291
-
292
- References
293
- ----------
294
- .. [1] Philippe Schwaller, Teodoro Laino, Théophile Gaudin, Peter Bolgar, Christopher A. Hunter, Costas Bekas, and Alpha A. Lee
295
- ACS Central Science 2019 5 (9): Molecular Transformer: A Model for Uncertainty-Calibrated Chemical Reaction Prediction
296
- 1572-1583 DOI: 10.1021/acscentsci.9b00576
297
- """
298
-
299
- def __init__(self, regex_pattern: str = SMI_REGEX_PATTERN):
300
- """Constructs a BasicSMILESTokenizer.
301
-
302
- Parameters
303
- ----------
304
- regex: string
305
- SMILES token regex
306
- """
307
- self.regex_pattern = regex_pattern
308
- self.regex = re.compile(self.regex_pattern)
309
-
310
- def tokenize(self, text):
311
- """Basic Tokenization of a SMILES.
312
- """
313
- tokens = [token for token in self.regex.findall(text)]
314
- return tokens
315
-
316
-
317
- def load_vocab(vocab_file):
318
- """Loads a vocabulary file into a dictionary."""
319
- vocab = collections.OrderedDict()
320
- with open(vocab_file, "r", encoding="utf-8") as reader:
321
- tokens = reader.readlines()
322
- for index, token in enumerate(tokens):
323
- token = token.rstrip("\n")
324
- vocab[token] = index
325
- return vocab
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
model/trainer.py DELETED
@@ -1,57 +0,0 @@
1
- from typing import Optional, Dict, Union, Any, List, Tuple
2
- from transformers import Trainer
3
- import torch
4
- from torch import nn
5
- from torch.utils.data import DataLoader, Dataset
6
-
7
-
8
- class CustomTrainer(Trainer):
9
- def __init__(self, **kwargs):
10
- self.num_chunks = kwargs.pop("num_chunks")
11
- self.max_length = kwargs.pop("max_length")
12
- self.my_tokenizer = kwargs.pop("my_tokenizer")
13
- super(CustomTrainer, self).__init__(**kwargs)
14
- print(f"Using device: {self.args.device}")
15
-
16
- def training_step(self, model: nn.Module, inputs: Dict[str, Union[torch.Tensor, Any]]) -> torch.Tensor:
17
- return super(CustomTrainer, self).training_step(model, inputs)
18
-
19
- def prediction_step(
20
- self,
21
- model: nn.Module,
22
- inputs: Dict[str, Union[torch.Tensor, Any]],
23
- prediction_loss_only: bool,
24
- ignore_keys: Optional[List[str]] = None,
25
- ) -> Tuple[Optional[torch.Tensor], Optional[torch.Tensor], Optional[torch.Tensor]]:
26
- return super(CustomTrainer, self).prediction_step(model, inputs, prediction_loss_only, ignore_keys)
27
-
28
- def get_train_dataloader(self):
29
- train_dataloader = DataLoader(self.train_dataset,
30
- batch_size=self.args.per_device_train_batch_size,
31
- # num_workers=0,
32
- pin_memory=True)
33
- return train_dataloader
34
-
35
- def get_eval_dataloader(self, eval_dataset: Optional[Dataset] = None) -> DataLoader:
36
- if not eval_dataset:
37
- eval_dataset = self.eval_dataset
38
- validation_dataloader = DataLoader(eval_dataset,
39
- batch_size=self.args.per_device_eval_batch_size,
40
- # num_workers=0,
41
- pin_memory=True,
42
- # shuffle=False
43
- )
44
- return validation_dataloader
45
-
46
- def compute_loss(self, model, inputs, return_outputs=False):
47
- """
48
- How the loss is computed by Trainer. By default, all models return the loss in the first element.
49
-
50
- Subclass and override for custom behavior.
51
- """
52
- outputs = model(**inputs)
53
- if self.state.global_step % 501 == 0:
54
- print({'loss': torch.mean(outputs['loss']).item(),
55
- 'steps': self.state.global_step})
56
- loss = outputs['loss']
57
- return (loss, outputs) if return_outputs else loss
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
model/utils.py DELETED
@@ -1,181 +0,0 @@
1
- from typing import Optional
2
- import numpy as np
3
- import py3Dmol
4
- from rdkit import Chem, DataStructs
5
- from rdkit.Chem import AllChem
6
- import torch
7
-
8
-
9
- class MorganFingerprint:
10
- def __init__(self, shape: Optional[int] = 2048, radius: Optional[int] = 2):
11
- self.shape = shape
12
- self.radius = radius
13
-
14
- @staticmethod
15
- def canonicalize(smiles):
16
- mol = Chem.MolFromSmiles(smiles)
17
- if mol is not None:
18
- return Chem.MolToSmiles(mol, isomericSmiles=True)
19
- else:
20
- return smiles
21
-
22
- def smiles_to_morgan(self, smile: str) -> torch.Tensor:
23
- try:
24
- smile = self.canonicalize(smile)
25
- mol = Chem.MolFromSmiles(smile)
26
- features_vec = AllChem.GetMorganFingerprintAsBitVect(
27
- mol, self.radius, nBits=self.shape
28
- )
29
- features = np.zeros((1,))
30
- DataStructs.ConvertToNumpyArray(features_vec, features)
31
- except Exception as e:
32
- features = np.zeros((self.shape,))
33
- return torch.tensor(features, dtype=torch.float32)
34
-
35
-
36
- def get_morgan(input_sequences):
37
- m = MorganFingerprint()
38
- morgans = []
39
- for s in input_sequences:
40
- r = m.smiles_to_morgan(s)
41
- indices_of_ones = torch.nonzero(r == 1.0, as_tuple=False)
42
- indices_of_ones = indices_of_ones.squeeze(-1)
43
- indices_of_ones = indices_of_ones.tolist()
44
- s = ""
45
- for i in indices_of_ones:
46
- s += "[" + str(i) + "]"
47
- morgans.append(s)
48
- return morgans
49
-
50
-
51
- def prepare_input_and_labels_morgan(tokenizer, input_sequences, max_length):
52
- outputs = {}
53
- batch_size = len(input_sequences)
54
- morgans = get_morgan(input_sequences)
55
- input_sequences_morgans = input_sequences + morgans
56
- inputs = tokenizer.batch_encode_plus(input_sequences_morgans, max_length=max_length, padding='max_length',
57
- return_tensors='pt', truncation=True)
58
- smiles_ids = inputs['input_ids'][:batch_size]
59
- smiles_ids = torch.where(smiles_ids == 0, -100, smiles_ids)
60
- morgan_ids = inputs['input_ids'][batch_size:]
61
- morgan_attention_mask = inputs['attention_mask'][batch_size:]
62
- outputs['labels'] = smiles_ids
63
- outputs['input_ids'] = morgan_ids
64
- outputs['attention_mask'] = morgan_attention_mask
65
- return outputs
66
-
67
-
68
- def get_atoms_from_smiles(smiles):
69
- """
70
- Iterates over a SMILES string, yielding tokens and offsets
71
-
72
- Parameters
73
- ----------
74
- smiles : iterable
75
- The SMILES string to iterate over
76
-
77
- Yields
78
- ------
79
- tuple(TokenType, str, int)
80
- A tuple describing the type of token and the associated data and offset in the smiles string
81
- """
82
- organic_subset = 'B C N O P S F Cl Br I * b c n o s p'.split()
83
- s = smiles
84
- smiles = iter(smiles)
85
- token = ''
86
- peek = None
87
- offset = -1
88
- atoms = []
89
- while True:
90
- if peek:
91
- char = peek
92
- else:
93
- char = next(smiles, '')
94
- offset += 1
95
- peek = None
96
- if not char:
97
- break
98
- if char == '[':
99
- token = char
100
- move = 0
101
- for char in smiles:
102
- move += 1
103
- token += char
104
- if char == ']':
105
- break
106
- atoms.append(('ATOM', token, offset))
107
- offset += move
108
- elif char in organic_subset:
109
- peek = next(smiles, '')
110
- if char + peek in organic_subset:
111
- atoms.append(('ATOM', char + peek, offset))
112
- peek = None
113
- else:
114
- atoms.append(('ATOM', char, offset))
115
- offset += 1
116
- elif char in '-=#$:.':
117
- atoms.append(('BOND_TYPE', char, offset))
118
- elif char == '(':
119
- atoms.append(('BRANCH_START', '(', offset))
120
- elif char == ')':
121
- atoms.append(('BRANCH_END', ')', offset))
122
- elif char == '%':
123
- # If smiles is too short this will raise a ValueError, which is
124
- # (slightly) prettier than a StopIteration.
125
- atoms.append(('RING_NUM', int(next(smiles, '') + next(smiles, '')), offset + 1))
126
- offset += 2
127
- elif char in '/\\':
128
- atoms.append(('EZSTEREO', char, offset))
129
- elif char.isdigit():
130
- atoms.append(('RING_NUM', int(char), offset))
131
- for _, a, offset in atoms:
132
- assert str(a) == s[offset: (offset + len(str(a)))]
133
- return atoms
134
-
135
-
136
- def clean_output(output_ids):
137
- clean_output_ids = []
138
- start = False
139
- for i in output_ids:
140
- if i == 13:
141
- break
142
- if start:
143
- if i > 14:
144
- clean_output_ids.append(i)
145
- if i == 0:
146
- start = True
147
- return clean_output_ids
148
-
149
-
150
- def morgan_fingerprint_to_text(morgan_fn):
151
- indices_of_ones = torch.nonzero(morgan_fn == 1.0, as_tuple=False)
152
- indices_of_ones = indices_of_ones.squeeze(-1)
153
- indices_of_ones = indices_of_ones.tolist()
154
- s = ""
155
- for i in indices_of_ones:
156
- s += "[" + str(i) + "]"
157
- return s
158
-
159
-
160
- def smiles_to_3d(smiles_list, width=400, height=300):
161
- # Visualize the 3D structure using py3Dmol
162
- view = py3Dmol.view(width=width, height=height)
163
- for smiles in smiles_list:
164
- # Generate the RDKit molecule object
165
- mol = Chem.MolFromSmiles(smiles)
166
- if mol is None:
167
- raise ValueError("Invalid SMILES string")
168
-
169
- # Add hydrogens to the molecule
170
- mol = Chem.AddHs(mol)
171
-
172
- # Generate 3D coordinates
173
- AllChem.EmbedMolecule(mol, randomSeed=42)
174
- AllChem.UFFOptimizeMolecule(mol)
175
-
176
- # Generate the 3D structure in the form of a pdb string
177
- pdb = Chem.MolToPDBBlock(mol)
178
- view.addModel(pdb, 'pdb')
179
- view.setStyle({'stick': {}})
180
- view.zoomTo()
181
- return view
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
rng_state_0.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:6cd095dfb9a400482d1cbe7aeb660e1635d4c46839d11b4c9fd38b3d26db3672
3
- size 14512
 
 
 
 
rng_state_1.pth DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:92852906cdd6fb428f8147e3fdaadeb026c6cce1f3a0bf56c0c7593ab725edb5
3
- size 14512
 
 
 
 
train.py DELETED
@@ -1,109 +0,0 @@
1
- import argparse
2
- from transformers import TrainingArguments, IntervalStrategy, EarlyStoppingCallback, AutoModelForSeq2SeqLM
3
- from model.tokenizer import SmilesTokenizer
4
- from model.trainer import CustomTrainer
5
- from datasets import load_dataset
6
- from model.utils import prepare_input_and_labels_morgan
7
- import glob2
8
-
9
- if __name__ == "__main__":
10
- parser = argparse.ArgumentParser(description='AntBrain training')
11
- parser.add_argument(
12
- '-t', '--train', default='./data/train_graph.jsonl.gz',
13
- type=str,
14
- help='Root directory with the training data')
15
- parser.add_argument(
16
- '-v', '--validation', default='./data/train_graph.jsonl.gz', type=str,
17
- help='Root directory with the validation data')
18
- parser.add_argument("--learning-rate", default=3e-05, type=float)
19
- parser.add_argument("--per-device-train-batch-size", default=8, type=int)
20
- parser.add_argument("--per-device-eval-batch-size", default=8, type=int)
21
- parser.add_argument("--weight-decay", default=0.01, type=float)
22
- parser.add_argument("--epochs", default=5, type=int)
23
- parser.add_argument("--save-total-limit", default=3, type=int)
24
- parser.add_argument("--saving_steps", default=1000, type=int)
25
- parser.add_argument("--evaluation_steps", default=1, type=int)
26
- parser.add_argument("--adam-eps", default=1e-08, type=float)
27
- parser.add_argument("--adam-betas", default=(0.9, 0.999), nargs="+", type=float)
28
- parser.add_argument("--warmup-updates", default=500, type=int)
29
- parser.add_argument("--warmup_steps", default=500, type=int)
30
- parser.add_argument("--max_steps", default=500, type=int)
31
- parser.add_argument("--patience", default=200, type=int)
32
- parser.add_argument("--num_workers", default=10, type=int)
33
- parser.add_argument("--logging-steps", default=1, type=int)
34
- parser.add_argument("--fp16", action='store_true')
35
- parser.add_argument("--deepspeed", default=None, type=str, help="Deep speed configuration file")
36
- parser.add_argument("--local_rank", type=int, default=-1)
37
- parser.add_argument('-m', '--model', type=str, default='facebook/bart-base', help='model name')
38
- parser.add_argument('--max_length', type=int, default=128, help='Max sequence length')
39
- parser.add_argument('--num_chunks', type=int, default=2, help='number of chunks per training step')
40
- parser.add_argument('--vocab_path', type=str, default="./data/vocab_morgan.txt", help='vocab file path.')
41
- parser.add_argument('--output_dir', type=str, default="./results", help='output dir where the models are saved')
42
- parser.add_argument('--checkpoint', type=str, default=None, help='Path to the check point.')
43
- parser.add_argument('--ignore_data_skip', type=str, default='yes', help='whether skip checking data before training '
44
- 'from checkpoint')
45
-
46
- args = parser.parse_args()
47
- tokenizer = SmilesTokenizer(vocab_file=args.vocab_path)
48
-
49
- # DataLoaders
50
- file_lists = args.train.replace("'", "").split(",")
51
- train_files = []
52
- for file_list in file_lists:
53
- train_files += glob2.glob(file_list)
54
- train_files.sort()
55
- print("Training data")
56
- for file in train_files:
57
- print(file)
58
- validation_files = glob2.glob(args.validation.replace("'", ""))
59
- validation_files.sort()
60
- print("Validation data", validation_files)
61
- data_set = load_dataset('json',
62
- data_files={'train': train_files,
63
- 'val': validation_files},
64
- streaming=True)
65
- data_set = data_set.map(lambda e: prepare_input_and_labels_morgan(tokenizer=tokenizer, input_sequences=e['smiles'],
66
- max_length=args.max_length),
67
- batched=True,
68
- remove_columns=["atom", "smiles", "bond_edges", "bond_types"],
69
- batch_size=100)
70
-
71
- data_set = data_set.with_format('torch')
72
- model = AutoModelForSeq2SeqLM.from_pretrained(args.model)
73
-
74
- training_args = TrainingArguments(
75
- output_dir=args.output_dir,
76
- evaluation_strategy=IntervalStrategy.STEPS,
77
- learning_rate=args.learning_rate,
78
- per_device_train_batch_size=args.per_device_train_batch_size,
79
- per_device_eval_batch_size=args.per_device_eval_batch_size,
80
- weight_decay=args.weight_decay,
81
- save_total_limit=args.save_total_limit,
82
- save_steps=args.saving_steps,
83
- eval_steps=args.evaluation_steps,
84
- num_train_epochs=args.epochs,
85
- logging_steps=args.logging_steps,
86
- fp16=args.fp16,
87
- dataloader_num_workers=args.num_workers,
88
- load_best_model_at_end=True,
89
- deepspeed=args.deepspeed,
90
- max_steps=args.max_steps,
91
- warmup_steps=args.warmup_steps,
92
- ignore_data_skip=args.ignore_data_skip == 'yes'
93
- )
94
-
95
- trainer = CustomTrainer(
96
- model=model,
97
- args=training_args,
98
- train_dataset=data_set['train'],
99
- eval_dataset=data_set['val'],
100
- num_chunks=args.num_chunks,
101
- my_tokenizer=tokenizer,
102
- max_length=args.max_length,
103
- callbacks=[EarlyStoppingCallback(early_stopping_patience=args.patience)]
104
- )
105
- if args.checkpoint:
106
- trainer.train(args.checkpoint)
107
- else:
108
- trainer.train()
109
- trainer.evaluate()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
trainer_state.json DELETED
The diff for this file is too large to render. See raw diff
 
training_args.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:5b2db6ec78a7157dbeee04114a5472abebf66b70394acaffb21a3238de5d390e
3
- size 6200
 
 
 
 
zero_to_fp32.py DELETED
@@ -1,587 +0,0 @@
1
- #!/usr/bin/env python
2
-
3
- # Copyright (c) Microsoft Corporation.
4
- # SPDX-License-Identifier: Apache-2.0
5
-
6
- # DeepSpeed Team
7
-
8
- # This script extracts fp32 consolidated weights from a zero 1, 2 and 3 DeepSpeed checkpoints. It gets
9
- # copied into the top level checkpoint dir, so the user can easily do the conversion at any point in
10
- # the future. Once extracted, the weights don't require DeepSpeed and can be used in any
11
- # application.
12
- #
13
- # example: python zero_to_fp32.py . pytorch_model.bin
14
-
15
- import argparse
16
- import torch
17
- import glob
18
- import math
19
- import os
20
- import re
21
- from collections import OrderedDict
22
- from dataclasses import dataclass
23
-
24
- # while this script doesn't use deepspeed to recover data, since the checkpoints are pickled with
25
- # DeepSpeed data structures it has to be available in the current python environment.
26
- from deepspeed.utils import logger
27
- from deepspeed.checkpoint.constants import (DS_VERSION, OPTIMIZER_STATE_DICT, SINGLE_PARTITION_OF_FP32_GROUPS,
28
- FP32_FLAT_GROUPS, ZERO_STAGE, PARTITION_COUNT, PARAM_SHAPES, BUFFER_NAMES,
29
- FROZEN_PARAM_SHAPES, FROZEN_PARAM_FRAGMENTS)
30
-
31
-
32
- @dataclass
33
- class zero_model_state:
34
- buffers: dict()
35
- param_shapes: dict()
36
- shared_params: list
37
- ds_version: int
38
- frozen_param_shapes: dict()
39
- frozen_param_fragments: dict()
40
-
41
-
42
- debug = 0
43
-
44
- # load to cpu
45
- device = torch.device('cpu')
46
-
47
-
48
- def atoi(text):
49
- return int(text) if text.isdigit() else text
50
-
51
-
52
- def natural_keys(text):
53
- '''
54
- alist.sort(key=natural_keys) sorts in human order
55
- http://nedbatchelder.com/blog/200712/human_sorting.html
56
- (See Toothy's implementation in the comments)
57
- '''
58
- return [atoi(c) for c in re.split(r'(\d+)', text)]
59
-
60
-
61
- def get_model_state_file(checkpoint_dir, zero_stage):
62
- if not os.path.isdir(checkpoint_dir):
63
- raise FileNotFoundError(f"Directory '{checkpoint_dir}' doesn't exist")
64
-
65
- # there should be only one file
66
- if zero_stage <= 2:
67
- file = os.path.join(checkpoint_dir, "mp_rank_00_model_states.pt")
68
- elif zero_stage == 3:
69
- file = os.path.join(checkpoint_dir, "zero_pp_rank_0_mp_rank_00_model_states.pt")
70
-
71
- if not os.path.exists(file):
72
- raise FileNotFoundError(f"can't find model states file at '{file}'")
73
-
74
- return file
75
-
76
-
77
- def get_checkpoint_files(checkpoint_dir, glob_pattern):
78
- # XXX: need to test that this simple glob rule works for multi-node setup too
79
- ckpt_files = sorted(glob.glob(os.path.join(checkpoint_dir, glob_pattern)), key=natural_keys)
80
-
81
- if len(ckpt_files) == 0:
82
- raise FileNotFoundError(f"can't find {glob_pattern} files in directory '{checkpoint_dir}'")
83
-
84
- return ckpt_files
85
-
86
-
87
- def get_optim_files(checkpoint_dir):
88
- return get_checkpoint_files(checkpoint_dir, "*_optim_states.pt")
89
-
90
-
91
- def get_model_state_files(checkpoint_dir):
92
- return get_checkpoint_files(checkpoint_dir, "*_model_states.pt")
93
-
94
-
95
- def parse_model_states(files):
96
- zero_model_states = []
97
- for file in files:
98
- state_dict = torch.load(file, map_location=device)
99
-
100
- if BUFFER_NAMES not in state_dict:
101
- raise ValueError(f"{file} is not a model state checkpoint")
102
- buffer_names = state_dict[BUFFER_NAMES]
103
- if debug:
104
- print("Found buffers:", buffer_names)
105
-
106
- # recover just the buffers while restoring them to fp32 if they were saved in fp16
107
- buffers = {k: v.float() for k, v in state_dict["module"].items() if k in buffer_names}
108
- param_shapes = state_dict[PARAM_SHAPES]
109
-
110
- # collect parameters that are included in param_shapes
111
- param_names = []
112
- for s in param_shapes:
113
- for name in s.keys():
114
- param_names.append(name)
115
-
116
- # update with frozen parameters
117
- frozen_param_shapes = state_dict.get(FROZEN_PARAM_SHAPES, None)
118
- if frozen_param_shapes is not None:
119
- if debug:
120
- print(f"Found frozen_param_shapes: {frozen_param_shapes}")
121
- param_names += list(frozen_param_shapes.keys())
122
-
123
- # handle shared params
124
- shared_params = [[k, v] for k, v in state_dict["shared_params"].items()]
125
-
126
- ds_version = state_dict.get(DS_VERSION, None)
127
-
128
- frozen_param_fragments = state_dict.get(FROZEN_PARAM_FRAGMENTS, None)
129
-
130
- z_model_state = zero_model_state(buffers=buffers,
131
- param_shapes=param_shapes,
132
- shared_params=shared_params,
133
- ds_version=ds_version,
134
- frozen_param_shapes=frozen_param_shapes,
135
- frozen_param_fragments=frozen_param_fragments)
136
- zero_model_states.append(z_model_state)
137
-
138
- return zero_model_states
139
-
140
-
141
- def parse_optim_states(files, ds_checkpoint_dir):
142
-
143
- total_files = len(files)
144
- state_dicts = []
145
- for f in files:
146
- state_dict = torch.load(f, map_location=device)
147
- # immediately discard the potentially huge 2 optimizer states as we only care for fp32 master weights
148
- # and also handle the case where it was already removed by another helper script
149
- state_dict["optimizer_state_dict"].pop("optimizer_state_dict", None)
150
- state_dicts.append(state_dict)
151
-
152
- if not ZERO_STAGE in state_dicts[0][OPTIMIZER_STATE_DICT]:
153
- raise ValueError(f"{files[0]} is not a zero checkpoint")
154
- zero_stage = state_dicts[0][OPTIMIZER_STATE_DICT][ZERO_STAGE]
155
- world_size = state_dicts[0][OPTIMIZER_STATE_DICT][PARTITION_COUNT]
156
-
157
- # For ZeRO-2 each param group can have different partition_count as data parallelism for expert
158
- # parameters can be different from data parallelism for non-expert parameters. So we can just
159
- # use the max of the partition_count to get the dp world_size.
160
-
161
- if type(world_size) is list:
162
- world_size = max(world_size)
163
-
164
- if world_size != total_files:
165
- raise ValueError(
166
- f"Expected {world_size} of '*_optim_states.pt' under '{ds_checkpoint_dir}' but found {total_files} files. "
167
- "Possibly due to an overwrite of an old checkpoint, or a checkpoint didn't get saved by one or more processes."
168
- )
169
-
170
- # the groups are named differently in each stage
171
- if zero_stage <= 2:
172
- fp32_groups_key = SINGLE_PARTITION_OF_FP32_GROUPS
173
- elif zero_stage == 3:
174
- fp32_groups_key = FP32_FLAT_GROUPS
175
- else:
176
- raise ValueError(f"unknown zero stage {zero_stage}")
177
-
178
- if zero_stage <= 2:
179
- fp32_flat_groups = [state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key] for i in range(len(state_dicts))]
180
- elif zero_stage == 3:
181
- # if there is more than one param group, there will be multiple flattened tensors - one
182
- # flattened tensor per group - for simplicity merge them into a single tensor
183
- #
184
- # XXX: could make the script more memory efficient for when there are multiple groups - it
185
- # will require matching the sub-lists of param_shapes for each param group flattened tensor
186
-
187
- fp32_flat_groups = [
188
- torch.cat(state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key], 0) for i in range(len(state_dicts))
189
- ]
190
-
191
- return zero_stage, world_size, fp32_flat_groups
192
-
193
-
194
- def _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir):
195
- """
196
- Returns fp32 state_dict reconstructed from ds checkpoint
197
-
198
- Args:
199
- - ``ds_checkpoint_dir``: path to the deepspeed checkpoint folder (where the optimizer files are)
200
-
201
- """
202
- print(f"Processing zero checkpoint '{ds_checkpoint_dir}'")
203
-
204
- optim_files = get_optim_files(ds_checkpoint_dir)
205
- zero_stage, world_size, fp32_flat_groups = parse_optim_states(optim_files, ds_checkpoint_dir)
206
- print(f"Detected checkpoint of type zero stage {zero_stage}, world_size: {world_size}")
207
-
208
- model_files = get_model_state_files(ds_checkpoint_dir)
209
-
210
- zero_model_states = parse_model_states(model_files)
211
- print(f'Parsing checkpoint created by deepspeed=={zero_model_states[0].ds_version}')
212
-
213
- if zero_stage <= 2:
214
- return _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states)
215
- elif zero_stage == 3:
216
- return _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states)
217
-
218
-
219
- def _zero2_merge_frozen_params(state_dict, zero_model_states):
220
- if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
221
- return
222
-
223
- frozen_param_shapes = zero_model_states[0].frozen_param_shapes
224
- frozen_param_fragments = zero_model_states[0].frozen_param_fragments
225
-
226
- if debug:
227
- num_elem = sum(s.numel() for s in frozen_param_shapes.values())
228
- print(f'rank 0: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
229
-
230
- wanted_params = len(frozen_param_shapes)
231
- wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
232
- avail_numel = sum([p.numel() for p in frozen_param_fragments.values()])
233
- print(f'Frozen params: Have {avail_numel} numels to process.')
234
- print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
235
-
236
- total_params = 0
237
- total_numel = 0
238
- for name, shape in frozen_param_shapes.items():
239
- total_params += 1
240
- unpartitioned_numel = shape.numel()
241
- total_numel += unpartitioned_numel
242
-
243
- state_dict[name] = frozen_param_fragments[name]
244
-
245
- if debug:
246
- print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
247
-
248
- print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
249
-
250
-
251
- def _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
252
- param_shapes = zero_model_states[0].param_shapes
253
-
254
- # Reconstruction protocol:
255
- #
256
- # XXX: document this
257
-
258
- if debug:
259
- for i in range(world_size):
260
- for j in range(len(fp32_flat_groups[0])):
261
- print(f"{FP32_FLAT_GROUPS}[{i}][{j}].shape={fp32_flat_groups[i][j].shape}")
262
-
263
- # XXX: memory usage doubles here (zero2)
264
- num_param_groups = len(fp32_flat_groups[0])
265
- merged_single_partition_of_fp32_groups = []
266
- for i in range(num_param_groups):
267
- merged_partitions = [sd[i] for sd in fp32_flat_groups]
268
- full_single_fp32_vector = torch.cat(merged_partitions, 0)
269
- merged_single_partition_of_fp32_groups.append(full_single_fp32_vector)
270
- avail_numel = sum(
271
- [full_single_fp32_vector.numel() for full_single_fp32_vector in merged_single_partition_of_fp32_groups])
272
-
273
- if debug:
274
- wanted_params = sum([len(shapes) for shapes in param_shapes])
275
- wanted_numel = sum([sum(shape.numel() for shape in shapes.values()) for shapes in param_shapes])
276
- # not asserting if there is a mismatch due to possible padding
277
- print(f"Have {avail_numel} numels to process.")
278
- print(f"Need {wanted_numel} numels in {wanted_params} params.")
279
-
280
- # params
281
- # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
282
- # out-of-core computing solution
283
- total_numel = 0
284
- total_params = 0
285
- for shapes, full_single_fp32_vector in zip(param_shapes, merged_single_partition_of_fp32_groups):
286
- offset = 0
287
- avail_numel = full_single_fp32_vector.numel()
288
- for name, shape in shapes.items():
289
-
290
- unpartitioned_numel = shape.numel()
291
- total_numel += unpartitioned_numel
292
- total_params += 1
293
-
294
- if debug:
295
- print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
296
- state_dict[name] = full_single_fp32_vector.narrow(0, offset, unpartitioned_numel).view(shape)
297
- offset += unpartitioned_numel
298
-
299
- # Z2 started to align to 2*world_size to improve nccl performance. Therefore both offset and
300
- # avail_numel can differ by anywhere between 0..2*world_size. Due to two unrelated complex
301
- # paddings performed in the code it's almost impossible to predict the exact numbers w/o the
302
- # live optimizer object, so we are checking that the numbers are within the right range
303
- align_to = 2 * world_size
304
-
305
- def zero2_align(x):
306
- return align_to * math.ceil(x / align_to)
307
-
308
- if debug:
309
- print(f"original offset={offset}, avail_numel={avail_numel}")
310
-
311
- offset = zero2_align(offset)
312
- avail_numel = zero2_align(avail_numel)
313
-
314
- if debug:
315
- print(f"aligned offset={offset}, avail_numel={avail_numel}")
316
-
317
- # Sanity check
318
- if offset != avail_numel:
319
- raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
320
-
321
- print(f"Reconstructed fp32 state dict with {total_params} params {total_numel} elements")
322
-
323
-
324
- def _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states):
325
- state_dict = OrderedDict()
326
-
327
- # buffers
328
- buffers = zero_model_states[0].buffers
329
- state_dict.update(buffers)
330
- if debug:
331
- print(f"added {len(buffers)} buffers")
332
-
333
- _zero2_merge_frozen_params(state_dict, zero_model_states)
334
-
335
- _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
336
-
337
- # recover shared parameters
338
- for pair in zero_model_states[0].shared_params:
339
- if pair[1] in state_dict:
340
- state_dict[pair[0]] = state_dict[pair[1]]
341
-
342
- return state_dict
343
-
344
-
345
- def zero3_partitioned_param_info(unpartitioned_numel, world_size):
346
- remainder = unpartitioned_numel % world_size
347
- padding_numel = (world_size - remainder) if remainder else 0
348
- partitioned_numel = math.ceil(unpartitioned_numel / world_size)
349
- return partitioned_numel, padding_numel
350
-
351
-
352
- def _zero3_merge_frozen_params(state_dict, world_size, zero_model_states):
353
- if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
354
- return
355
-
356
- if debug:
357
- for i in range(world_size):
358
- num_elem = sum(s.numel() for s in zero_model_states[i].frozen_param_fragments.values())
359
- print(f'rank {i}: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
360
-
361
- frozen_param_shapes = zero_model_states[0].frozen_param_shapes
362
- wanted_params = len(frozen_param_shapes)
363
- wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
364
- avail_numel = sum([p.numel() for p in zero_model_states[0].frozen_param_fragments.values()]) * world_size
365
- print(f'Frozen params: Have {avail_numel} numels to process.')
366
- print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
367
-
368
- total_params = 0
369
- total_numel = 0
370
- for name, shape in zero_model_states[0].frozen_param_shapes.items():
371
- total_params += 1
372
- unpartitioned_numel = shape.numel()
373
- total_numel += unpartitioned_numel
374
-
375
- param_frags = tuple(model_state.frozen_param_fragments[name] for model_state in zero_model_states)
376
- state_dict[name] = torch.cat(param_frags, 0).narrow(0, 0, unpartitioned_numel).view(shape)
377
-
378
- partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
379
-
380
- if debug:
381
- print(
382
- f"Frozen params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
383
- )
384
-
385
- print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
386
-
387
-
388
- def _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
389
- param_shapes = zero_model_states[0].param_shapes
390
- avail_numel = fp32_flat_groups[0].numel() * world_size
391
- # Reconstruction protocol: For zero3 we need to zip the partitions together at boundary of each
392
- # param, re-consolidating each param, while dealing with padding if any
393
-
394
- # merge list of dicts, preserving order
395
- param_shapes = {k: v for d in param_shapes for k, v in d.items()}
396
-
397
- if debug:
398
- for i in range(world_size):
399
- print(f"{FP32_FLAT_GROUPS}[{i}].shape={fp32_flat_groups[i].shape}")
400
-
401
- wanted_params = len(param_shapes)
402
- wanted_numel = sum(shape.numel() for shape in param_shapes.values())
403
- # not asserting if there is a mismatch due to possible padding
404
- avail_numel = fp32_flat_groups[0].numel() * world_size
405
- print(f"Trainable params: Have {avail_numel} numels to process.")
406
- print(f"Trainable params: Need {wanted_numel} numels in {wanted_params} params.")
407
-
408
- # params
409
- # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
410
- # out-of-core computing solution
411
- offset = 0
412
- total_numel = 0
413
- total_params = 0
414
- for name, shape in param_shapes.items():
415
-
416
- unpartitioned_numel = shape.numel()
417
- total_numel += unpartitioned_numel
418
- total_params += 1
419
-
420
- partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
421
-
422
- if debug:
423
- print(
424
- f"Trainable params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
425
- )
426
-
427
- # XXX: memory usage doubles here
428
- state_dict[name] = torch.cat(
429
- tuple(fp32_flat_groups[i].narrow(0, offset, partitioned_numel) for i in range(world_size)),
430
- 0).narrow(0, 0, unpartitioned_numel).view(shape)
431
- offset += partitioned_numel
432
-
433
- offset *= world_size
434
-
435
- # Sanity check
436
- if offset != avail_numel:
437
- raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
438
-
439
- print(f"Reconstructed Trainable fp32 state dict with {total_params} params {total_numel} elements")
440
-
441
-
442
- def _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states):
443
- state_dict = OrderedDict()
444
-
445
- # buffers
446
- buffers = zero_model_states[0].buffers
447
- state_dict.update(buffers)
448
- if debug:
449
- print(f"added {len(buffers)} buffers")
450
-
451
- _zero3_merge_frozen_params(state_dict, world_size, zero_model_states)
452
-
453
- _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
454
-
455
- # recover shared parameters
456
- for pair in zero_model_states[0].shared_params:
457
- if pair[1] in state_dict:
458
- state_dict[pair[0]] = state_dict[pair[1]]
459
-
460
- return state_dict
461
-
462
-
463
- def get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag=None):
464
- """
465
- Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated state_dict that can be loaded with
466
- ``load_state_dict()`` and used for training without DeepSpeed or shared with others, for example
467
- via a model hub.
468
-
469
- Args:
470
- - ``checkpoint_dir``: path to the desired checkpoint folder
471
- - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in 'latest' file. e.g., ``global_step14``
472
-
473
- Returns:
474
- - pytorch ``state_dict``
475
-
476
- Note: this approach may not work if your application doesn't have sufficient free CPU memory and
477
- you may need to use the offline approach using the ``zero_to_fp32.py`` script that is saved with
478
- the checkpoint.
479
-
480
- A typical usage might be ::
481
-
482
- from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint
483
- # do the training and checkpoint saving
484
- state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir) # already on cpu
485
- model = model.cpu() # move to cpu
486
- model.load_state_dict(state_dict)
487
- # submit to model hub or save the model to share with others
488
-
489
- In this example the ``model`` will no longer be usable in the deepspeed context of the same
490
- application. i.e. you will need to re-initialize the deepspeed engine, since
491
- ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
492
-
493
- If you want it all done for you, use ``load_state_dict_from_zero_checkpoint`` instead.
494
-
495
- """
496
- if tag is None:
497
- latest_path = os.path.join(checkpoint_dir, 'latest')
498
- if os.path.isfile(latest_path):
499
- with open(latest_path, 'r') as fd:
500
- tag = fd.read().strip()
501
- else:
502
- raise ValueError(f"Unable to find 'latest' file at {latest_path}")
503
-
504
- ds_checkpoint_dir = os.path.join(checkpoint_dir, tag)
505
-
506
- if not os.path.isdir(ds_checkpoint_dir):
507
- raise FileNotFoundError(f"Directory '{ds_checkpoint_dir}' doesn't exist")
508
-
509
- return _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir)
510
-
511
-
512
- def convert_zero_checkpoint_to_fp32_state_dict(checkpoint_dir, output_file, tag=None):
513
- """
514
- Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` file that can be
515
- loaded with ``torch.load(file)`` + ``load_state_dict()`` and used for training without DeepSpeed.
516
-
517
- Args:
518
- - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
519
- - ``output_file``: path to the pytorch fp32 state_dict output file (e.g. path/pytorch_model.bin)
520
- - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
521
- """
522
-
523
- state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag)
524
- print(f"Saving fp32 state dict to {output_file}")
525
- torch.save(state_dict, output_file)
526
-
527
-
528
- def load_state_dict_from_zero_checkpoint(model, checkpoint_dir, tag=None):
529
- """
530
- 1. Put the provided model to cpu
531
- 2. Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict``
532
- 3. Load it into the provided model
533
-
534
- Args:
535
- - ``model``: the model object to update
536
- - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
537
- - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
538
-
539
- Returns:
540
- - ``model`: modified model
541
-
542
- Make sure you have plenty of CPU memory available before you call this function. If you don't
543
- have enough use the ``zero_to_fp32.py`` utility to do the conversion. You will find it
544
- conveniently placed for you in the checkpoint folder.
545
-
546
- A typical usage might be ::
547
-
548
- from deepspeed.utils.zero_to_fp32 import load_state_dict_from_zero_checkpoint
549
- model = load_state_dict_from_zero_checkpoint(trainer.model, checkpoint_dir)
550
- # submit to model hub or save the model to share with others
551
-
552
- Note, that once this was run, the ``model`` will no longer be usable in the deepspeed context
553
- of the same application. i.e. you will need to re-initialize the deepspeed engine, since
554
- ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
555
-
556
- """
557
- logger.info(f"Extracting fp32 weights")
558
- state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag)
559
-
560
- logger.info(f"Overwriting model with fp32 weights")
561
- model = model.cpu()
562
- model.load_state_dict(state_dict, strict=False)
563
-
564
- return model
565
-
566
-
567
- if __name__ == "__main__":
568
-
569
- parser = argparse.ArgumentParser()
570
- parser.add_argument("checkpoint_dir",
571
- type=str,
572
- help="path to the desired checkpoint folder, e.g., path/checkpoint-12")
573
- parser.add_argument(
574
- "output_file",
575
- type=str,
576
- help="path to the pytorch fp32 state_dict output file (e.g. path/checkpoint-12/pytorch_model.bin)")
577
- parser.add_argument("-t",
578
- "--tag",
579
- type=str,
580
- default=None,
581
- help="checkpoint tag used as a unique identifier for checkpoint. e.g., global_step1")
582
- parser.add_argument("-d", "--debug", action='store_true', help="enable debug")
583
- args = parser.parse_args()
584
-
585
- debug = args.debug
586
-
587
- convert_zero_checkpoint_to_fp32_state_dict(args.checkpoint_dir, args.output_file, tag=args.tag)