PHZane commited on
Commit
2abebee
·
1 Parent(s): 6b660d7

Upload 29 files

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tmp_trainer/model_state.pdparams filter=lfs diff=lfs merge=lfs -text
.ipynb_checkpoints/P-tunning-checkpoint.ipynb ADDED
@@ -0,0 +1,143 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 5,
6
+ "metadata": {
7
+ "pycharm": {
8
+ "is_executing": true
9
+ }
10
+ },
11
+ "outputs": [
12
+ {
13
+ "ename": "ModuleNotFoundError",
14
+ "evalue": "No module named 'utilities'",
15
+ "output_type": "error",
16
+ "traceback": [
17
+ "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
18
+ "\u001b[1;31mModuleNotFoundError\u001b[0m Traceback (most recent call last)",
19
+ "Cell \u001b[1;32mIn[5], line 14\u001b[0m\n\u001b[0;32m 11\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mtransformers\u001b[39;00m\n\u001b[0;32m 12\u001b[0m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;21;01mpandas\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m \u001b[38;5;21;01mpd\u001b[39;00m\n\u001b[1;32m---> 14\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mutilities\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m \u001b[38;5;241m*\u001b[39m\n\u001b[0;32m 15\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mtransformers\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m AutoTokenizer\n\u001b[0;32m 16\u001b[0m \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;21;01mtransformers\u001b[39;00m \u001b[38;5;28;01mimport\u001b[39;00m AutoModelForCausalLM\n",
20
+ "\u001b[1;31mModuleNotFoundError\u001b[0m: No module named 'utilities'"
21
+ ]
22
+ }
23
+ ],
24
+ "source": [
25
+ "import datasets\n",
26
+ "import tempfile\n",
27
+ "import logging\n",
28
+ "import random\n",
29
+ "import config\n",
30
+ "import os\n",
31
+ "import yaml\n",
32
+ "import logging\n",
33
+ "import time\n",
34
+ "import torch\n",
35
+ "import transformers\n",
36
+ "import pandas as pd\n",
37
+ "\n",
38
+ "from utilities import *\n",
39
+ "from transformers import AutoTokenizer\n",
40
+ "from transformers import AutoModelForCausalLM\n",
41
+ "from transformers import TrainingArguments\n",
42
+ "from transformers import AutoModelForCausalLM\n",
43
+ "from llama import BasicModelRunner\n",
44
+ "from llama import BasicModelRunner\n",
45
+ "\n",
46
+ "logger = logging.getLogger(__name__)\n",
47
+ "global_config = None"
48
+ ]
49
+ },
50
+ {
51
+ "cell_type": "code",
52
+ "execution_count": 2,
53
+ "metadata": {},
54
+ "outputs": [],
55
+ "source": [
56
+ "dataset_path = \"lamini/lamini_docs\"\n",
57
+ "use_hf = True"
58
+ ]
59
+ },
60
+ {
61
+ "cell_type": "code",
62
+ "execution_count": 3,
63
+ "metadata": {},
64
+ "outputs": [],
65
+ "source": [
66
+ "model_name = \"EleutherAI/pythia-70m\""
67
+ ]
68
+ },
69
+ {
70
+ "cell_type": "code",
71
+ "execution_count": 4,
72
+ "metadata": {},
73
+ "outputs": [],
74
+ "source": [
75
+ "training_config = {\n",
76
+ " \"model\": {\n",
77
+ " \"pretrained_name\": model_name,\n",
78
+ " \"max_length\" : 2048\n",
79
+ " },\n",
80
+ " \"datasets\": {\n",
81
+ " \"use_hf\": use_hf,\n",
82
+ " \"path\": dataset_path\n",
83
+ " },\n",
84
+ " \"verbose\": True\n",
85
+ "}"
86
+ ]
87
+ },
88
+ {
89
+ "cell_type": "code",
90
+ "execution_count": 6,
91
+ "metadata": {},
92
+ "outputs": [
93
+ {
94
+ "ename": "NameError",
95
+ "evalue": "name 'AutoTokenizer' is not defined",
96
+ "output_type": "error",
97
+ "traceback": [
98
+ "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
99
+ "\u001b[1;31mNameError\u001b[0m Traceback (most recent call last)",
100
+ "Cell \u001b[1;32mIn[6], line 1\u001b[0m\n\u001b[1;32m----> 1\u001b[0m tokenizer \u001b[38;5;241m=\u001b[39m \u001b[43mAutoTokenizer\u001b[49m\u001b[38;5;241m.\u001b[39mfrom_pretrained(model_name)\n\u001b[0;32m 2\u001b[0m tokenizer\u001b[38;5;241m.\u001b[39mpad_token \u001b[38;5;241m=\u001b[39m tokenizer\u001b[38;5;241m.\u001b[39meos_token\n\u001b[0;32m 3\u001b[0m train_dataset, test_dataset \u001b[38;5;241m=\u001b[39m tokenize_and_split_data(training_config, tokenizer)\n",
101
+ "\u001b[1;31mNameError\u001b[0m: name 'AutoTokenizer' is not defined"
102
+ ]
103
+ }
104
+ ],
105
+ "source": [
106
+ "tokenizer = AutoTokenizer.from_pretrained(model_name)\n",
107
+ "tokenizer.pad_token = tokenizer.eos_token\n",
108
+ "train_dataset, test_dataset = tokenize_and_split_data(training_config, tokenizer)\n",
109
+ "\n",
110
+ "print(train_dataset)\n",
111
+ "print(test_dataset)"
112
+ ]
113
+ },
114
+ {
115
+ "cell_type": "code",
116
+ "execution_count": null,
117
+ "metadata": {},
118
+ "outputs": [],
119
+ "source": []
120
+ }
121
+ ],
122
+ "metadata": {
123
+ "kernelspec": {
124
+ "display_name": "Python 3 (ipykernel)",
125
+ "language": "python",
126
+ "name": "python3"
127
+ },
128
+ "language_info": {
129
+ "codemirror_mode": {
130
+ "name": "ipython",
131
+ "version": 3
132
+ },
133
+ "file_extension": ".py",
134
+ "mimetype": "text/x-python",
135
+ "name": "python",
136
+ "nbconvert_exporter": "python",
137
+ "pygments_lexer": "ipython3",
138
+ "version": "3.9.13"
139
+ }
140
+ },
141
+ "nbformat": 4,
142
+ "nbformat_minor": 1
143
+ }
408-h2.csv ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ id,smoking,�Ա�,����,סԺ����,����,����,����Ƶ��,����ѹ,����ѹ,���IJ�3,֧��������3,��Ѫѹ2,����2,���IJ�2,�������ಡ1,��������1,��Ѫ�ܼ���1,�����Ը���,��Ӳ��,��ϸ������4to10,C��Ӧ����0#068to8#2,����C��Ӧ����0to3,��ϸ��������0to20,�ܰ�ϸ������ֵ0#8to3#5,������ϸ������ֵ1#8to6#3,��������ϸ������ֵ0#004to0#08,����ϸ������ֵ0#12to0#8,Ѫ�彵����ԭ���lt0#5,�����Ͷ�95to98,������̼��ѹ35to45,����ѹ������Ũ�ȱȴ���300,�׵���35to51,�򵰰�20to30,���ܶ�֬���׵��̴�С��3#37,���ܶ�֬���׵��̴�0#91to2#17,�ܵ��̴�2#85to5#69,������֬С��1#7,���ص�2#9to7#5,����90to420,����44to106,level
2
+ 286.0,1.0,1.0,64.0,2.0,37.1,98.0,20.0,120.0,70.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,1.0,0.0,1.0,2.0,2.0,1.0,2.0,0.0,2.0,0.0,2.0,2.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0
3
+ 183.0,1.0,1.0,85.0,8.0,36.5,90.0,18.0,145.0,71.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,0.0,2.0,1.0,2.0,2.0,2.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0
4
+ 219.0,1.0,1.0,93.0,2.0,37.0,104.0,25.0,91.0,56.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,1.0,2.0,2.0,2.0,1.0,0.0,2.0,0.0,2.0,1.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0
5
+ 70.0,1.0,1.0,64.0,4.0,36.8,108.0,24.0,114.0,66.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,1.0,2.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0
6
+ 82.0,1.0,1.0,69.0,2.0,36.8,84.0,16.0,116.0,70.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,2.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0
7
+ 392.0,1.0,1.0,91.0,17.0,36.0,97.0,20.0,169.0,89.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,2.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0,1.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0
8
+ 308.0,1.0,1.0,73.0,2.0,36.3,96.0,19.0,130.0,92.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,2.0,1.0,2.0,1.0,0.0,2.0,2.0,1.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,2.0,1.0,1.0
9
+ 50.0,1.0,0.0,76.0,3.0,36.0,72.0,20.0,149.0,87.0,0.0,3.0,2.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,1.0,1.0,1.0,1.0,1.0,0.0,2.0,0.0,2.0,1.0,1.0,1.0,1.0,1.0,2.0,1.0,1.0,1.0,0.0
10
+ 273.0,0.0,1.0,93.0,1.0,36.3,104.0,16.0,157.0,98.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,1.0,1.0,2.0,1.0,0.0,1.0,2.0,1.0,1.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0
11
+ 354.0,1.0,1.0,84.0,4.0,36.0,94.0,20.0,117.0,74.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,0.0,0.0,2.0,2.0,2.0,1.0,0.0,0.0,0.0,0.0,2.0,2.0,1.0,2.0,2.0,1.0,2.0,2.0,2.0,1.0
12
+ 161.0,1.0,1.0,84.0,2.0,36.7,97.0,24.0,139.0,74.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,2.0,2.0,2.0,2.0,2.0,1.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0
13
+ 360.0,1.0,1.0,85.0,5.0,36.0,88.0,20.0,124.0,62.0,0.0,0.0,2.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,2.0,2.0,2.0,2.0,2.0,2.0,2.0,1.0,0.0,2.0,2.0,2.0,2.0,2.0,0.0,0.0,0.0,0.0,2.0,1.0,1.0,1.0
14
+ 407.0,1.0,1.0,93.0,4.0,36.8,96.0,20.0,140.0,85.0,0.0,0.0,2.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,2.0,0.0,1.0,1.0,1.0,2.0,1.0,0.0,2.0,2.0,1.0,2.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0
15
+ 330.0,1.0,1.0,78.0,20.0,37.0,84.0,16.0,112.0,60.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,2.0,2.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0,2.0,1.0,0.0,0.0,0.0,0.0,2.0,2.0,2.0,1.0
16
+ 254.0,0.0,1.0,74.0,5.0,36.2,90.0,28.0,106.0,77.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,2.0,1.0,1.0,2.0,1.0,1.0,1.0,1.0,2.0,1.0,1.0,1.0
17
+ 258.0,0.0,1.0,79.0,7.0,37.8,134.0,37.0,180.0,82.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,2.0,2.0,2.0,2.0,2.0,1.0,2.0,2.0,2.0,2.0,1.0,1.0,2.0,2.0,2.0,1.0,1.0,1.0,1.0,1.0
18
+ 320.0,1.0,1.0,75.0,9.0,36.9,117.0,21.0,158.0,83.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,2.0,0.0,0.0,2.0,2.0,2.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0
19
+ 102.0,1.0,1.0,74.0,1.0,36.8,86.0,20.0,108.0,71.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,0.0,2.0,2.0,2.0,2.0,1.0,0.0,1.0,2.0,2.0,2.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0
20
+ 159.0,1.0,1.0,84.0,1.0,37.2,92.0,20.0,127.0,65.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,1.0,1.0,1.0,2.0,0.0,2.0,2.0,1.0,1.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0
21
+ 77.0,1.0,1.0,66.0,3.0,36.3,80.0,25.0,111.0,72.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,2.0,1.0,1.0,1.0,1.0,1.0,0.0
22
+ 396.0,1.0,1.0,91.0,21.0,36.4,93.0,25.0,115.0,73.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,2.0,0.0,1.0,2.0,2.0,2.0,0.0,0.0,0.0,0.0,2.0,2.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0
23
+ 271.0,0.0,1.0,91.0,31.0,36.5,85.0,25.0,116.0,69.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,2.0,2.0,1.0,2.0,2.0,2.0,1.0,0.0,0.0,0.0,1.0,2.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0
24
+ 146.0,1.0,1.0,81.0,2.0,36.9,76.0,28.0,160.0,87.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,1.0,1.0,1.0,1.0,1.0,0.0,2.0,2.0,1.0,2.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0
25
+ 262.0,0.0,1.0,84.0,3.0,36.4,72.0,21.0,138.0,53.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,1.0,1.0,1.0,2.0,2.0,2.0,1.0,2.0,2.0,2.0,2.0,2.0,0.0,0.0,0.0,0.0,2.0,2.0,2.0,1.0
26
+ 337.0,1.0,1.0,80.0,2.0,36.8,110.0,36.0,124.0,79.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,0.0,1.0,2.0,2.0,2.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0
27
+ 37.0,0.0,1.0,79.0,4.0,38.2,117.0,20.0,141.0,65.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,0.0,1.0,2.0,1.0,1.0,1.0,2.0,2.0,2.0,1.0,1.0,2.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0
28
+ 260.0,0.0,1.0,82.0,2.0,38.3,95.0,16.0,85.0,58.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,2.0,2.0,1.0,2.0,0.0,0.0,0.0,0.0,2.0,2.0,0.0,0.0,0.0,0.0,1.0,2.0,1.0,1.0
29
+ 397.0,1.0,1.0,91.0,22.0,36.3,76.0,25.0,113.0,74.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,2.0,2.0,1.0,1.0,2.0,2.0,1.0,0.0,2.0,0.0,2.0,2.0,1.0,2.0,1.0,2.0,1.0,1.0,1.0,1.0
30
+ 280.0,1.0,1.0,56.0,2.0,36.3,106.0,20.0,162.0,72.0,0.0,0.0,2.0,2.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,1.0,2.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,2.0,1.0,1.0,2.0,2.0,2.0,1.0
31
+ 113.0,1.0,1.0,76.0,2.0,36.8,85.0,20.0,135.0,85.0,0.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,2.0,0.0,2.0,2.0,1.0,2.0,1.0,0.0,2.0,2.0,2.0,2.0,0.0,1.0,1.0,1.0,1.0,2.0,2.0,2.0,0.0
32
+ 177.0,1.0,1.0,85.0,3.0,37.2,70.0,20.0,151.0,53.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,2.0,1.0,2.0,2.0,1.0,0.0,2.0,0.0,2.0,1.0,1.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0
33
+ 125.0,1.0,1.0,78.0,4.0,37.0,104.0,20.0,109.0,64.0,0.0,0.0,2.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,2.0,2.0,2.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,2.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0
34
+ 7.0,0.0,0.0,77.0,2.0,36.9,104.0,20.0,128.0,66.0,0.0,0.0,2.0,2.0,0.0,1.0,1.0,0.0,0.0,0.0,1.0,2.0,2.0,2.0,2.0,1.0,2.0,1.0,2.0,0.0,0.0,0.0,2.0,1.0,1.0,2.0,2.0,1.0,2.0,2.0,2.0,0.0
35
+ 152.0,1.0,1.0,81.0,8.0,36.5,68.0,20.0,138.0,68.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,2.0,1.0,1.0,2.0,2.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,2.0,0.0
36
+ 176.0,1.0,1.0,85.0,3.0,36.3,86.0,20.0,180.0,81.0,0.0,0.0,2.0,2.0,2.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,2.0,2.0,2.0,2.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,2.0,1.0,1.0,1.0,2.0,2.0,1.0,0.0
37
+ 379.0,1.0,1.0,89.0,3.0,36.7,110.0,26.0,130.0,75.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,0.0,2.0,2.0,2.0,1.0,1.0,2.0,1.0,1.0,2.0,1.0,1.0,1.0,2.0,1.0,1.0,2.0,2.0,2.0,1.0
38
+ 137.0,1.0,1.0,80.0,2.0,36.8,88.0,22.0,140.0,79.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,0.0,2.0,1.0,2.0,1.0,2.0,1.0,2.0,0.0,2.0,2.0,0.0,2.0,2.0,1.0,1.0,1.0,2.0,1.0,0.0
39
+ 18.0,0.0,0.0,88.0,2.0,36.7,96.0,20.0,147.0,90.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,1.0,2.0,2.0,2.0,1.0,1.0,2.0,1.0,2.0,1.0,1.0,1.0,2.0,1.0,1.0,1.0,1.0,1.0,0.0
40
+ 217.0,1.0,1.0,91.0,2.0,36.3,62.0,26.0,91.0,55.0,0.0,0.0,2.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,0.0,1.0,1.0,2.0,1.0,2.0,0.0,2.0,2.0,2.0,1.0,0.0,1.0,1.0,1.0,1.0,2.0,2.0,2.0,0.0
41
+ 131.0,1.0,1.0,79.0,3.0,36.3,109.0,18.0,130.0,72.0,0.0,3.0,2.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,2.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,1.0,1.0,2.0,0.0
42
+ 193.0,1.0,1.0,87.0,2.0,36.5,83.0,20.0,159.0,71.0,0.0,0.0,2.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,0.0,2.0,1.0,2.0,2.0,1.0,1.0,1.0,1.0,2.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0
43
+ 153.0,1.0,1.0,82.0,2.0,36.6,105.0,20.0,132.0,83.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,0.0,1.0,2.0,2.0,2.0,1.0,2.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,2.0,2.0,2.0,0.0
44
+ 362.0,1.0,1.0,85.0,7.0,36.8,100.0,18.0,144.0,80.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,2.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0
45
+ 214.0,1.0,1.0,90.0,4.0,36.6,80.0,20.0,165.0,83.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,2.0,1.0,1.0,2.0,1.0,1.0,2.0,1.0,1.0,0.0
46
+ 90.0,1.0,1.0,72.0,6.0,36.7,98.0,30.0,125.0,90.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,1.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,2.0,1.0,1.0,1.0,1.0,1.0,0.0
47
+ 384.0,1.0,1.0,91.0,3.0,36.6,115.0,17.0,116.0,68.0,0.0,0.0,2.0,2.0,2.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,2.0,1.0,2.0,1.0,1.0,0.0,1.0,1.0,2.0,2.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,2.0,1.0
48
+ 203.0,1.0,1.0,87.0,5.0,36.5,73.0,16.0,113.0,65.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,2.0,0.0,1.0,2.0,1.0,1.0,1.0,0.0,0.0,0.0,1.0,2.0,0.0,0.0,0.0,0.0,1.0,1.0,2.0,0.0
49
+ 60.0,1.0,1.0,60.0,3.0,36.2,78.0,22.0,108.0,59.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,2.0,1.0,1.0,1.0,2.0,1.0,1.0,1.0,2.0,2.0,1.0,2.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0
50
+ 86.0,1.0,1.0,70.0,12.0,36.5,80.0,16.0,157.0,80.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,0.0,1.0,2.0,2.0,2.0,0.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0
51
+ 340.0,1.0,1.0,81.0,2.0,37.9,80.0,22.0,122.0,78.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,0.0,1.0,2.0,1.0,1.0,1.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,2.0,1.0,1.0,1.0
52
+ 14.0,0.0,0.0,86.0,6.0,36.5,90.0,25.0,102.0,60.0,3.0,0.0,2.0,2.0,0.0,1.0,0.0,0.0,0.0,0.0,2.0,2.0,0.0,2.0,2.0,2.0,2.0,2.0,2.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,2.0,2.0,2.0,0.0
53
+ 104.0,1.0,1.0,74.0,3.0,36.5,84.0,21.0,155.0,95.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,2.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,2.0,2.0,1.0,2.0,2.0,1.0,1.0,2.0,1.0,0.0
54
+ 83.0,1.0,1.0,70.0,4.0,37.2,84.0,20.0,123.0,75.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,0.0,2.0,1.0,2.0,2.0,2.0,1.0,0.0,0.0,0.0,1.0,1.0,2.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0
55
+ 154.0,1.0,1.0,82.0,2.0,36.8,98.0,22.0,126.0,61.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,0.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0,0.0,0.0,2.0,1.0,1.0,1.0,1.0,1.0,2.0,1.0,1.0,0.0
56
+ 361.0,1.0,1.0,85.0,6.0,37.0,82.0,18.0,136.0,60.0,0.0,0.0,2.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0
57
+ 376.0,1.0,1.0,88.0,8.0,37.0,117.0,31.0,132.0,70.0,3.0,0.0,2.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,2.0,2.0,0.0,0.0,1.0,2.0,2.0,2.0,2.0,0.0,0.0,0.0,2.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,2.0,1.0
58
+ 195.0,1.0,1.0,87.0,2.0,36.7,82.0,22.0,119.0,67.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,2.0,2.0,2.0,2.0,2.0,1.0,2.0,2.0,2.0,2.0,1.0,1.0,1.0,2.0,2.0,1.0,2.0,2.0,2.0,0.0
59
+ 374.0,1.0,1.0,88.0,2.0,37.1,118.0,28.0,161.0,81.0,3.0,0.0,0.0,2.0,2.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,2.0,2.0,1.0,2.0,2.0,2.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,2.0,1.0,1.0,1.0
60
+ 88.0,1.0,1.0,72.0,2.0,36.8,100.0,18.0,131.0,73.0,0.0,0.0,0.0,2.0,2.0,0.0,1.0,0.0,0.0,0.0,1.0,2.0,0.0,0.0,1.0,2.0,2.0,2.0,2.0,0.0,0.0,0.0,2.0,2.0,1.0,2.0,1.0,1.0,1.0,1.0,1.0,0.0
61
+ 292.0,1.0,1.0,69.0,1.0,36.3,118.0,24.0,96.0,70.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,0.0,2.0,1.0,2.0,1.0,0.0,2.0,2.0,2.0,1.0,0.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0
62
+ 224.0,0.0,0.0,75.0,2.0,36.5,100.0,20.0,140.0,90.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,1.0,2.0,2.0,1.0,0.0,2.0,2.0,1.0,2.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0
63
+ 119.0,1.0,1.0,77.0,2.0,37.5,104.0,24.0,123.0,72.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,2.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0,1.0,2.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0
64
+ 72.0,1.0,1.0,65.0,2.0,37.0,90.0,30.0,107.0,60.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,2.0,2.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0,2.0,1.0,1.0,1.0,1.0,1.0,2.0,1.0,2.0,0.0
65
+ 210.0,1.0,1.0,89.0,2.0,36.8,86.0,23.0,136.0,79.0,3.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,2.0,1.0,2.0,1.0,1.0,1.0,2.0,2.0,1.0,1.0,1.0,2.0,1.0,1.0,1.0,2.0,1.0,0.0
66
+ 289.0,1.0,1.0,65.0,1.0,36.0,104.0,24.0,122.0,81.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,0.0,1.0,1.0,2.0,2.0,2.0,0.0,2.0,2.0,2.0,1.0,0.0,2.0,1.0,1.0,1.0,2.0,1.0,1.0,1.0
67
+ 229.0,0.0,0.0,79.0,13.0,36.0,80.0,22.0,122.0,60.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,2.0,0.0,2.0,2.0,2.0,1.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,2.0,1.0,1.0,2.0,1.0,1.0,1.0
68
+ 209.0,1.0,1.0,89.0,2.0,36.2,78.0,20.0,147.0,64.0,3.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,0.0,1.0,2.0,2.0,2.0,1.0,1.0,1.0,2.0,2.0,1.0,1.0,1.0,2.0,2.0,1.0,1.0,2.0,1.0,0.0
69
+ 365.0,1.0,1.0,86.0,4.0,36.8,80.0,25.0,121.0,67.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,1.0,2.0,1.0,1.0,2.0,0.0,1.0,0.0,2.0,1.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0
70
+ 42.0,0.0,1.0,85.0,3.0,36.8,81.0,20.0,126.0,55.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,2.0,2.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,2.0,1.0,1.0,1.0,1.0,1.0,2.0,1.0,1.0,0.0
71
+ 302.0,1.0,1.0,71.0,3.0,36.4,104.0,26.0,149.0,71.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,0.0,2.0,2.0,2.0,2.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0
72
+ 36.0,0.0,1.0,79.0,2.0,36.9,108.0,25.0,136.0,88.0,3.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,0.0,2.0,1.0,2.0,1.0,2.0,0.0,2.0,0.0,2.0,1.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0
73
+ 148.0,1.0,1.0,81.0,3.0,37.0,112.0,25.0,125.0,71.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,0.0,2.0,1.0,2.0,2.0,2.0,1.0,2.0,1.0,2.0,2.0,2.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0
74
+ 49.0,1.0,0.0,76.0,2.0,36.7,104.0,20.0,125.0,61.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,2.0,1.0,1.0,2.0,1.0,0.0,2.0,0.0,2.0,1.0,0.0,1.0,1.0,1.0,2.0,1.0,1.0,1.0,0.0
75
+ 371.0,1.0,1.0,88.0,1.0,36.7,104.0,30.0,143.0,75.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,2.0,2.0,1.0,2.0,0.0,2.0,2.0,2.0,1.0,0.0,1.0,1.0,1.0,1.0,2.0,1.0,2.0,1.0
76
+ 44.0,0.0,1.0,86.0,3.0,36.5,101.0,33.0,133.0,84.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,0.0,2.0,1.0,2.0,2.0,1.0,1.0,2.0,2.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,2.0,2.0,2.0,0.0
77
+ 55.0,1.0,1.0,56.0,3.0,37.4,104.0,24.0,161.0,88.0,0.0,0.0,2.0,2.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,2.0,0.0,1.0,2.0,2.0,1.0,2.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,2.0,1.0,2.0,0.0
78
+ 267.0,0.0,1.0,87.0,2.0,36.0,92.0,20.0,145.0,84.0,3.0,3.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,1.0,1.0,1.0,2.0,2.0,2.0,2.0,1.0,1.0,2.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,2.0,1.0,1.0,1.0
79
+ 270.0,0.0,1.0,91.0,30.0,36.2,90.0,22.0,140.0,70.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,2.0,0.0,1.0,2.0,2.0,2.0,1.0,0.0,0.0,0.0,2.0,2.0,1.0,1.0,1.0,1.0,2.0,1.0,1.0,1.0
80
+ 185.0,1.0,1.0,86.0,2.0,36.5,80.0,23.0,163.0,87.0,0.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,1.0,1.0,1.0,2.0,1.0,0.0,1.0,0.0,2.0,2.0,1.0,1.0,2.0,1.0,1.0,2.0,1.0,1.0,0.0
81
+ 79.0,1.0,1.0,67.0,6.0,36.5,85.0,20.0,108.0,50.0,3.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,2.0,2.0,2.0,2.0,0.0,1.0,0.0,2.0,1.0,1.0,0.0,0.0,0.0,0.0,2.0,1.0,1.0,0.0
82
+ 259.0,0.0,1.0,82.0,2.0,37.5,70.0,24.0,126.0,60.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,2.0,2.0,1.0,2.0,1.0,1.0,2.0,0.0,0.0,0.0,2.0,1.0,1.0,2.0,2.0,1.0,2.0,1.0,1.0,1.0
83
+ 250.0,0.0,1.0,66.0,23.0,39.2,120.0,22.0,102.0,64.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,2.0,2.0,2.0,2.0,2.0,1.0,1.0,1.0,2.0,0.0,0.0,0.0,2.0,1.0,1.0,2.0,2.0,1.0,2.0,2.0,2.0,1.0
84
+ 389.0,1.0,1.0,91.0,14.0,37.0,105.0,24.0,140.0,91.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,0.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0
85
+ 310.0,1.0,1.0,75.0,1.0,37.6,100.0,22.0,163.0,84.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,2.0,2.0,1.0
86
+ 33.0,0.0,1.0,73.0,3.0,36.7,78.0,22.0,135.0,75.0,0.0,0.0,0.0,2.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0
87
+ 98.0,1.0,1.0,73.0,2.0,37.0,116.0,20.0,122.0,66.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,0.0,2.0,1.0,2.0,2.0,2.0,0.0,0.0,0.0,0.0,2.0,0.0,1.0,2.0,1.0,1.0,1.0,1.0,1.0,0.0
88
+ 9.0,0.0,0.0,79.0,3.0,36.8,74.0,21.0,110.0,64.0,0.0,0.0,2.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,2.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0,2.0,2.0,1.0,1.0,1.0,1.0,2.0,1.0,1.0,0.0
89
+ 346.0,1.0,1.0,83.0,1.0,36.2,84.0,26.0,171.0,95.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,1.0,0.0,1.0,2.0,2.0,2.0,2.0,0.0,0.0,0.0,0.0,1.0,2.0,1.0,1.0,1.0,2.0,1.0,2.0,1.0,1.0
90
+ 175.0,1.0,1.0,85.0,2.0,37.2,80.0,30.0,148.0,53.0,3.0,0.0,2.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,2.0,2.0,2.0,1.0,2.0,2.0,2.0,1.0,1.0,0.0,0.0,0.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0
91
+ 198.0,1.0,1.0,87.0,3.0,36.8,135.0,24.0,140.0,101.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,1.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0,2.0,2.0,1.0,2.0,2.0,1.0,1.0,1.0,1.0,0.0
92
+ 294.0,1.0,1.0,69.0,5.0,36.7,112.0,30.0,188.0,112.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0
93
+ 62.0,1.0,1.0,62.0,3.0,37.0,104.0,22.0,114.0,81.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,1.0,1.0,2.0,1.0,0.0,1.0,2.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0
94
+ 355.0,1.0,1.0,84.0,4.0,36.7,89.0,20.0,140.0,70.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,2.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0,0.0,0.0,2.0,1.0,1.0,1.0,2.0,1.0,1.0,1.0,1.0,1.0
95
+ 180.0,1.0,1.0,85.0,5.0,36.6,55.0,20.0,153.0,55.0,0.0,0.0,2.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,1.0,2.0,0.0,1.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0,2.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0
96
+ 167.0,1.0,1.0,84.0,4.0,36.0,80.0,20.0,89.0,51.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,2.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0,0.0,2.0,2.0,1.0,1.0,1.0,1.0,1.0,2.0,2.0,0.0
97
+ 188.0,1.0,1.0,86.0,3.0,36.8,94.0,20.0,164.0,92.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,2.0,1.0,1.0,1.0,0.0,2.0,2.0,1.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,2.0,0.0
98
+ 2.0,0.0,0.0,69.0,2.0,36.8,100.0,23.0,120.0,80.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,1.0,1.0,1.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0,0.0,1.0,2.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0
99
+ 256.0,0.0,1.0,75.0,6.0,37.0,85.0,24.0,153.0,90.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,2.0,1.0,2.0,2.0,1.0,2.0,1.0,0.0,0.0,0.0,1.0,2.0,1.0,1.0,1.0,1.0,1.0,2.0,1.0,1.0
100
+ 108.0,1.0,1.0,75.0,7.0,37.0,80.0,23.0,133.0,70.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,2.0,2.0,0.0,1.0,2.0,2.0,2.0,1.0,1.0,2.0,2.0,2.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0
101
+ 32.0,0.0,1.0,71.0,3.0,36.7,94.0,22.0,134.0,92.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,2.0,2.0,2.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0,1.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0
102
+ 17.0,0.0,0.0,87.0,3.0,36.9,104.0,20.0,107.0,74.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,2.0,2.0,1.0,2.0,1.0,1.0,1.0,0.0,0.0,0.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0
103
+ 297.0,1.0,1.0,69.0,8.0,36.0,100.0,18.0,102.0,73.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,2.0,1.0,2.0,2.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,1.0,1.0,1.0,1.0,2.0,2.0,1.0
104
+ 89.0,1.0,1.0,72.0,4.0,36.5,80.0,18.0,160.0,90.0,3.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,0.0,0.0,2.0,2.0,2.0,2.0,1.0,0.0,0.0,0.0,1.0,1.0,2.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0
105
+ 92.0,1.0,1.0,72.0,9.0,36.3,110.0,22.0,117.0,69.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,2.0,1.0,1.0,2.0,2.0,1.0,1.0,2.0,2.0,2.0,1.0,1.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,0.0
106
+ 173.0,1.0,1.0,85.0,2.0,36.4,98.0,18.0,146.0,80.0,3.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,0.0,1.0,2.0,2.0,2.0,1.0,0.0,0.0,0.0,0.0,1.0,1.0,2.0,1.0,1.0,1.0,2.0,2.0,2.0,0.0
107
+ 221.0,0.0,0.0,57.0,1.0,36.0,101.0,24.0,100.0,70.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,2.0,2.0,1.0,1.0,2.0,0.0,0.0,0.0,0.0,2.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0
108
+ 227.0,0.0,0.0,79.0,5.0,35.8,68.0,18.0,153.0,71.0,3.0,0.0,2.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,0.0,2.0,1.0,2.0,1.0,0.0,0.0,0.0,0.0,2.0,2.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0
109
+ 317.0,1.0,1.0,75.0,3.0,37.0,73.0,20.0,123.0,73.0,0.0,0.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,2.0,1.0,2.0,2.0,2.0,1.0,0.0,0.0,0.0,0.0,1.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0
110
+ 43.0,0.0,1.0,85.0,11.0,37.2,128.0,24.0,154.0,94.0,3.0,0.0,2.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,2.0,2.0,0.0,2.0,2.0,2.0,2.0,1.0,2.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0
111
+ 103.0,1.0,1.0,74.0,2.0,35.7,93.0,15.0,152.0,78.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,1.0,0.0
112
+ 115.0,1.0,1.0,77.0,2.0,36.7,108.0,21.0,149.0,80.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,0.0,2.0,1.0,2.0,1.0,2.0,1.0,0.0,0.0,0.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,1.0,0.0
113
+ 255.0,0.0,1.0,74.0,6.0,37.0,95.0,28.0,105.0,90.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,2.0,0.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0,2.0,2.0,0.0,0.0,0.0,0.0,1.0,2.0,1.0,1.0
114
+ 243.0,0.0,0.0,90.0,9.0,36.5,108.0,28.0,185.0,111.0,0.0,0.0,2.0,2.0,2.0,0.0,0.0,1.0,0.0,0.0,1.0,2.0,2.0,2.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0,2.0,2.0,1.0,1.0,1.0,1.0,2.0,1.0,1.0,1.0
115
+ 80.0,1.0,1.0,68.0,2.0,36.7,90.0,30.0,114.0,68.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,2.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0
116
+ 196.0,1.0,1.0,87.0,2.0,37.7,101.0,25.0,148.0,92.0,3.0,0.0,2.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,1.0,2.0,2.0,2.0,2.0,2.0,2.0,1.0,1.0,2.0,2.0,2.0,1.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,2.0,0.0
117
+ 63.0,1.0,1.0,62.0,5.0,36.0,100.0,20.0,115.0,85.0,0.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,2.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0
118
+ 56.0,1.0,1.0,57.0,2.0,37.5,119.0,23.0,107.0,61.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0
119
+ 232.0,0.0,0.0,85.0,3.0,36.0,108.0,31.0,180.0,102.0,0.0,0.0,2.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,2.0,0.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,1.0,2.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0
120
+ 65.0,1.0,1.0,63.0,3.0,36.6,80.0,22.0,108.0,57.0,3.0,3.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,1.0,0.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0,0.0,1.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0
121
+ 226.0,0.0,0.0,79.0,2.0,36.7,97.0,22.0,125.0,72.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,2.0,2.0,2.0,2.0,1.0,1.0,1.0,1.0,2.0,2.0,1.0,1.0,1.0
122
+ 264.0,0.0,1.0,85.0,2.0,36.8,112.0,28.0,107.0,67.0,0.0,0.0,2.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,2.0,2.0,1.0,2.0,1.0,2.0,2.0,0.0,0.0,0.0,2.0,1.0,1.0,1.0,2.0,1.0,1.0,1.0,1.0,1.0
123
+ 47.0,0.0,1.0,99.0,2.0,36.4,124.0,24.0,169.0,73.0,0.0,0.0,2.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,2.0,2.0,2.0,2.0,2.0,1.0,2.0,2.0,2.0,1.0,2.0,1.0,2.0,1.0,2.0,2.0,2.0,2.0,2.0,0.0
124
+ 242.0,0.0,0.0,90.0,2.0,36.8,97.0,32.0,168.0,79.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,2.0,0.0,1.0,2.0,1.0,2.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,2.0,2.0,1.0,1.0
125
+ 202.0,1.0,1.0,87.0,4.0,37.1,96.0,21.0,114.0,54.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,2.0,2.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0
126
+ 395.0,1.0,1.0,91.0,20.0,36.5,93.0,28.0,133.0,72.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,2.0,2.0,1.0,2.0,2.0,2.0,1.0,0.0,0.0,0.0,1.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0
127
+ 234.0,0.0,0.0,87.0,3.0,37.2,92.0,22.0,173.0,84.0,0.0,0.0,2.0,2.0,2.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,2.0,2.0,2.0,2.0,2.0,1.0,1.0,0.0,0.0,0.0,1.0,2.0,0.0,0.0,0.0,0.0,2.0,1.0,2.0,1.0
128
+ 116.0,1.0,1.0,77.0,2.0,36.8,90.0,22.0,156.0,83.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,0.0,2.0,2.0,2.0,2.0,1.0,1.0,1.0,0.0,0.0,0.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0
129
+ 27.0,0.0,1.0,56.0,3.0,36.5,93.0,18.0,133.0,87.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,1.0,1.0,1.0,2.0,1.0,1.0,2.0,1.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0
130
+ 391.0,1.0,1.0,91.0,16.0,36.1,98.0,25.0,132.0,84.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,2.0,2.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0,2.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0
131
+ 26.0,0.0,0.0,99.0,1.0,37.0,84.0,24.0,142.0,72.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,0.0,2.0,1.0,2.0,2.0,1.0,0.0,2.0,2.0,1.0,2.0,0.0,1.0,2.0,1.0,2.0,2.0,1.0,2.0,0.0
132
+ 343.0,1.0,1.0,81.0,16.0,36.7,92.0,26.0,111.0,54.0,3.0,0.0,2.0,2.0,0.0,0.0,1.0,0.0,0.0,0.0,2.0,2.0,0.0,2.0,1.0,2.0,1.0,2.0,0.0,0.0,0.0,0.0,2.0,1.0,1.0,2.0,1.0,2.0,2.0,2.0,2.0,1.0
133
+ 58.0,1.0,1.0,59.0,5.0,36.7,98.0,20.0,130.0,80.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,2.0,1.0,0.0,2.0,2.0,2.0,2.0,0.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,2.0,1.0,1.0,0.0
134
+ 349.0,1.0,1.0,83.0,9.0,36.6,75.0,25.0,120.0,65.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,2.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0,2.0,1.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0
135
+ 109.0,1.0,1.0,76.0,2.0,36.4,52.0,20.0,108.0,63.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,2.0,2.0,2.0,1.0,2.0,2.0,1.0,2.0,0.0,0.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0
136
+ 3.0,0.0,0.0,69.0,2.0,38.2,106.0,23.0,131.0,80.0,0.0,0.0,2.0,2.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,1.0,0.0,2.0,1.0,1.0,2.0,0.0,0.0,0.0,0.0,2.0,1.0,0.0,0.0,0.0,0.0,2.0,1.0,2.0,0.0
137
+ 12.0,0.0,0.0,81.0,4.0,36.8,101.0,22.0,151.0,97.0,0.0,0.0,2.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,1.0,0.0,1.0,1.0,1.0,2.0,1.0,0.0,2.0,0.0,2.0,1.0,0.0,1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0
__pycache__/COPD_AVG_FL_Dy.cpython-39.pyc ADDED
Binary file (6.31 kB). View file
 
__pycache__/Clients_Dy.cpython-39.pyc ADDED
Binary file (3.08 kB). View file
 
__pycache__/GetData_Dy.cpython-39.pyc ADDED
Binary file (4.26 kB). View file
 
__pycache__/load.cpython-39.pyc ADDED
Binary file (3.36 kB). View file
 
app.py ADDED
@@ -0,0 +1,238 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+
3
+ from load import load_model
4
+ import gradio as gr
5
+
6
+
7
+
8
+ class Neuro_net(torch.nn.Module):
9
+ def __init__(self):
10
+ super(Neuro_net, self).__init__()
11
+ self.layer1 = torch.nn.Linear(40, 20)
12
+ self.layer2 = torch.nn.Linear(20, 10)
13
+ self.layer3 = torch.nn.Linear(10, 5)
14
+ self.layer4 = torch.nn.Linear(5, 2)
15
+ self.layer5 = torch.nn.Softmax(dim=0)
16
+
17
+ def forward(self, input):
18
+ tensor = torch.relu(self.layer1(input))
19
+ tensor = torch.relu(self.layer2(tensor))
20
+ tensor = torch.relu(self.layer3(tensor))
21
+ tensor = self.layer4(tensor)
22
+ tensor = self.layer5(tensor)
23
+ return tensor
24
+
25
+
26
+
27
+ def test(a):
28
+ return "heigh"
29
+
30
+ def load_beforeFL(a0,a1,a2,a3,a4,a5,a6,a7,a8,a9,
31
+ a10,a11,a12,a13,a14,a15,a16,a17,a18,a19,
32
+ a20,a21,a22,a23,a24,a25,a26,a27,a28,a29,
33
+ a30,a31,a32,a33,a34,a35,a36,a37,a38,a39):
34
+ input = []
35
+ input.append(a0)
36
+ input.append(a1)
37
+ input.append(a2)
38
+ input.append(a3)
39
+ input.append(a4)
40
+ input.append(a5)
41
+ input.append(a6)
42
+ input.append(a7)
43
+ input.append(a8)
44
+ input.append(a9)
45
+ input.append(a10)
46
+ input.append(a11)
47
+ input.append(a12)
48
+ input.append(a13)
49
+ input.append(a14)
50
+ input.append(a15)
51
+ input.append(a16)
52
+ input.append(a17)
53
+ input.append(a18)
54
+ input.append(a19)
55
+ input.append(a20)
56
+ input.append(a21)
57
+ input.append(a22)
58
+ input.append(a23)
59
+ input.append(a24)
60
+ input.append(a25)
61
+ input.append(a26)
62
+ input.append(a27)
63
+ input.append(a28)
64
+ input.append(a29)
65
+ input.append(a30)
66
+ input.append(a31)
67
+ input.append(a32)
68
+ input.append(a33)
69
+ input.append(a34)
70
+ input.append(a35)
71
+ input.append(a36)
72
+ input.append(a37)
73
+ input.append(a38)
74
+ input.append(a39)
75
+ print(input)
76
+ output = load_model("Yes",input)
77
+ return output
78
+
79
+
80
+ def load_beforeTIDM(a0,a1,a2,a3,a4,a5,a6,a7,a8,a9,
81
+ a10,a11,a12,a13,a14,a15,a16,a17,a18,a19,
82
+ a20,a21,a22,a23,a24,a25,a26,a27,a28,a29,
83
+ a30,a31,a32,a33,a34,a35,a36,a37,a38,a39):
84
+ input = []
85
+ input.append(a0)
86
+ input.append(a1)
87
+ input.append(a2)
88
+ input.append(a3)
89
+ input.append(a4)
90
+ input.append(a5)
91
+ input.append(a6)
92
+ input.append(a7)
93
+ input.append(a8)
94
+ input.append(a9)
95
+ input.append(a10)
96
+ input.append(a11)
97
+ input.append(a12)
98
+ input.append(a13)
99
+ input.append(a14)
100
+ input.append(a15)
101
+ input.append(a16)
102
+ input.append(a17)
103
+ input.append(a18)
104
+ input.append(a19)
105
+ input.append(a20)
106
+ input.append(a21)
107
+ input.append(a22)
108
+ input.append(a23)
109
+ input.append(a24)
110
+ input.append(a25)
111
+ input.append(a26)
112
+ input.append(a27)
113
+ input.append(a28)
114
+ input.append(a29)
115
+ input.append(a30)
116
+ input.append(a31)
117
+ input.append(a32)
118
+ input.append(a33)
119
+ input.append(a34)
120
+ input.append(a35)
121
+ input.append(a36)
122
+ input.append(a37)
123
+ input.append(a38)
124
+ input.append(a39)
125
+ print(input)
126
+ output = load_model("No",input)
127
+ return output
128
+ em = [
129
+ 1.0,1.0,78.0,7.0,37.3,110.0,21.0,130.0,81.0,3.0,0.0,2.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,2.0,0.0,2.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0,2.0,2.0,1.0,2.0,1.0,1.0,1.0,2.0,1.0
130
+ ]
131
+ with gr.Blocks(css="style.css") as demo:
132
+
133
+ gr.Markdown("# Practice of Distributed Machine Learning in Clinical Modeling for Chronic Obstructive Pulmonary Disease Demo 🖍️")
134
+ title = "Practice of Distributed Machine Learning in Clinical Modeling for Chronic Obstructive Pulmonary Disease Demo",
135
+ description = "First fill in the clinical data such as the patient's laboratory examination, and then click the prediction button. Wait for a moment, and in the output box, you will obtain a prediction of the risk of worsening in AECOPD patients🖍"
136
+ # gr.TextArea(description)
137
+ gr.Textbox(value=description,label="How to use this website🖊️🤗🤗🤗🖊️")
138
+ with gr.Column():
139
+ with gr.Row():
140
+ input_0 = gr.Number(value=em[0],label="smoking(Yes=1,No=0)")
141
+ input_1 = gr.Number(value=em[1],label="Sex(Female=1, Male=0)")
142
+ input_2 = gr.Number(value=em[2],label="Age")
143
+ input_3 = gr.Number(value=em[3],label="Number of hospitalizations")
144
+ input_4 = gr.Number(value=em[4],label="Temperature")
145
+ input_5 = gr.Number(value=em[5],label="Pulse")
146
+ input_6 = gr.Number(value=em[6],label="Respiratory rate")
147
+ input_7 = gr.Number(value=em[7],label=" Systolic pressure")
148
+ with gr.Column():
149
+ with gr.Row():
150
+ input_8 = gr.Number(value=em[8],label="Diastolic pressure")
151
+ input_9 = gr.Number(value=em[9],label="pulmonary heart disease(yes=3,No=0)")
152
+ input_10 = gr.Number(value=em[10],label="Bronchiectasia(yes=3,No=0)")
153
+ input_11 = gr.Number(value=em[11],label="hypertension(yes=2,No=0)")
154
+ input_12 = gr.Number(value=em[12],label="diabetes(yes=2,No=0)")
155
+ input_13 = gr.Number(value=em[13],label="coronary heart disease(yes=2,No=0)")
156
+ input_14 = gr.Number(value=em[14],label="chronic kidney disease(yes=1,No=0)")
157
+ input_15 = gr.Number(value=em[15],label="malignancy(yes=1,No=0)")
158
+ with gr.Column():
159
+ with gr.Row():
160
+ input_16 = gr.Number(value=em[16],label="Cerebrovascular disease(yes=1,No=0)")
161
+ input_17 = gr.Number(value=em[17],label="viral hepatitis(yes=1,No=0)")
162
+ input_18 = gr.Number(value=em[18],label="cirrhosis(yes=1,No=0)")
163
+ input_19 = gr.Number(value=em[19],label="wbc(0-2=0,2-4=1,>4=2)")
164
+ input_20 = gr.Number(value=em[20],label="C-reactive protein((RC[-1]),(RC[-1]>0.068,RC[-1]<8.2)1,2)))")
165
+ input_21 = gr.Number(value=em[21],label="high sensitivity c reactive protein(0-3=1)")
166
+ input_22 = gr.Number(value=em[22],label="erythrocyte sedimentation rate(0-20=1)")
167
+ input_23 = gr.Number(value=em[23],label="Absolute value of lymphocytes")
168
+ with gr.Column():
169
+ with gr.Row():
170
+ input_24 = gr.Number(value=em[24],label="anc")
171
+ input_25 = gr.Number(value=em[25],label="Absolute value of eosinophils")
172
+ input_26 = gr.Number(value=em[26],label="Absolute value of monocytes")
173
+ input_27 = gr.Number(value=em[27],label="Serum procalcitonin detection")
174
+ input_28 = gr.Number(value=em[28],label="Oxygen Saturation")
175
+ input_29 = gr.Number(value=em[29],label="partial pressure of carbon dioxide")
176
+ input_30 = gr.Number(value=em[30],label="Oxygen partial pressure inhalation oxygen concentration ratio greater than 300")
177
+ input_31 = gr.Number(value=em[31],label="albumin")
178
+ with gr.Column():
179
+ with gr.Row():
180
+ input_32 = gr.Number(value=em[32],label="globulin")
181
+ input_33 = gr.Number(value=em[33],label="Low density lipoprotein cholesterol")
182
+ input_34 = gr.Number(value=em[34],label="High-density lipoprotein cholesterol")
183
+ input_35 = gr.Number(value=em[35],label="total cholesterol")
184
+ input_36 = gr.Number(value=em[36],label="triglyceride")
185
+ input_37 = gr.Number(value=em[37],label="urea nitrogen")
186
+ input_38 = gr.Number(value=em[38],label="uric acid")
187
+ input_39 = gr.Number(value=em[39],label="creatinine")
188
+ # with gr.Column():
189
+ gr.Markdown("## Prediction")
190
+ # gr.Textbox(value="[0.0,0.0,87.0,5.0,36.9,81.0,33.0,138.0,62.0,0.0,0.0,2.0,2.0,2.0,0.0,0.0,0.0,0.0,0.0,2.0,1.0,0.0,0.0,1.0,2.0,2.0,2.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,2.0,2.0,1.0]", label="For eample:")
191
+ btn = gr.Button(value="CML (Click here to predict)",elem_classes="slide")
192
+ TIDM = gr.Textbox(label="Deterioration risk of AECOPD")
193
+ btn.click(fn=load_beforeTIDM,inputs=[input_0,input_1,input_2,input_3,input_4,input_5,input_6,input_7,input_8,input_9,
194
+ input_10,input_11,input_12,input_13,input_14,input_15,input_16,input_17,input_18,input_19,
195
+ input_20,input_21,input_22,input_23,input_24,input_25,input_26,input_27,input_28,input_29,
196
+ input_30,input_31,input_32,input_33,input_34,input_35,input_36,input_37,input_38,input_39],outputs=TIDM)
197
+
198
+ btn_FL = gr.Button(value="FL(Click here to predict)",elem_classes="slide")
199
+ FL = gr.Textbox(label="Deterioration risk of AECOPD")
200
+ btn_FL.click(fn=load_beforeFL,
201
+ inputs=[input_0, input_1, input_2, input_3, input_4, input_5, input_6, input_7, input_8, input_9,
202
+ input_10, input_11, input_12, input_13, input_14, input_15, input_16, input_17, input_18,
203
+ input_19,
204
+ input_20, input_21, input_22, input_23, input_24, input_25, input_26, input_27, input_28,
205
+ input_29,
206
+ input_30, input_31, input_32, input_33, input_34, input_35, input_36, input_37, input_38,
207
+ input_39], outputs=FL)
208
+ gr.Markdown("## Examples")
209
+ gr.Examples(
210
+ examples=[
211
+ [
212
+ 1.0,1.0,78.0,7.0,37.3,110.0,21.0,130.0,81.0,3.0,0.0,2.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,2.0,0.0,2.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0,2.0,2.0,1.0,2.0,1.0,1.0,1.0,2.0,1.0
213
+ ]
214
+
215
+ ],
216
+ inputs=[input_0, input_1, input_2, input_3, input_4, input_5, input_6, input_7, input_8, input_9,
217
+ input_10, input_11, input_12, input_13, input_14, input_15, input_16, input_17, input_18,
218
+ input_19,
219
+ input_20, input_21, input_22, input_23, input_24, input_25, input_26, input_27, input_28,
220
+ input_29,
221
+ input_30, input_31, input_32, input_33, input_34, input_35, input_36, input_37, input_38,
222
+ input_39], outputs=TIDM,
223
+ fn=load_beforeTIDM,
224
+ cache_examples=True)
225
+ # gr.Markdown("## FL Examples")
226
+ #
227
+ # gr.Examples(
228
+ # examples=[[0.0,1.0,88.0,2.0,36.5,112.0,22.0,101.0,74.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,1.0,0.0,1.0,2.0,2.0,1.0,2.0,1.0,2.0,1.0,2.0,1.0,1.0,2.0,2.0,2.0,2.0,2.0,2.0,2.0]],
229
+ # inputs=[input_0, input_1, input_2, input_3, input_4, input_5, input_6, input_7, input_8, input_9,
230
+ # input_10, input_11, input_12, input_13, input_14, input_15, input_16, input_17, input_18,
231
+ # input_19,
232
+ # input_20, input_21, input_22, input_23, input_24, input_25, input_26, input_27, input_28,
233
+ # input_29,
234
+ # input_30, input_31, input_32, input_33, input_34, input_35, input_36, input_37, input_38,
235
+ # input_39], outputs=FL,
236
+ # fn=load_beforeFL,
237
+ # cache_examples=True)
238
+ demo.launch()
gradio_cached_examples/63/log.csv ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ Future risk of illness,flag,username,timestamp
2
+ "H1 predict: Height.
3
+ H2 predict: Low.
4
+ H3 predict: Height.
5
+ ",,,2023-10-02 20:56:28.086482
gradio_cached_examples/64/log.csv ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ Future risk of illness,flag,username,timestamp
2
+ "H1 predict: Height.
3
+ H2 predict: Low.
4
+ H3 predict: Height.
5
+ ",,,2023-10-02 20:56:28.086482
load.py ADDED
@@ -0,0 +1,138 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import pandas as pd
3
+ from sklearn.model_selection import train_test_split
4
+ class GetDataSet(object):
5
+ def __init__(self):
6
+ self.train_data = None
7
+ self.train_label = None
8
+ self.test_data = None
9
+ self.test_label = None
10
+ self.copdDataSetConstruct()
11
+
12
+ def copdDataSetConstruct(self):
13
+ data = pd.read_csv('408-h2.csv',encoding='gbk')
14
+
15
+ x = data.drop(['id','level'], axis=1)
16
+
17
+ y = data['level']
18
+
19
+ x_train, x_test, y_train, y_test = train_test_split(x, y, test_size=0.4, random_state=5) # random_state 随机种子
20
+ #print(x_train)
21
+ #print(y_train)
22
+ x_tr = x_train.loc[:].values
23
+ y_tr = y_train.loc[:].values
24
+ x_te = x_test.loc[:].values
25
+ y_te = y_test.loc[:].values
26
+
27
+ self.train_data = x_tr
28
+ self.train_label = y_tr
29
+ self.test_data = x_te
30
+ self.test_label = y_te
31
+ class Neuro_net(torch.nn.Module):
32
+ def __init__(self):
33
+ super(Neuro_net, self).__init__()
34
+ self.layer1 = torch.nn.Linear(40, 20)
35
+ self.layer2 = torch.nn.Linear(20, 10)
36
+ self.layer3 = torch.nn.Linear(10, 5)
37
+ self.layer4 = torch.nn.Linear(5, 2)
38
+ self.layer5 = torch.nn.Softmax(dim=0)
39
+
40
+ def forward(self, input):
41
+ tensor = torch.relu(self.layer1(input))
42
+ tensor = torch.relu(self.layer2(tensor))
43
+ tensor = torch.relu(self.layer3(tensor))
44
+ tensor = self.layer4(tensor)
45
+ tensor = self.layer5(tensor)
46
+ return tensor
47
+
48
+ def load_model(fl,input):
49
+ data_test_h1 = GetDataSet()
50
+ data_test_h1.test_data[0] = input
51
+ input = data_test_h1.test_data
52
+ if fl == "Yes":
53
+ model = torch.load("net_gb.pt")
54
+ print(fl)
55
+ print(input)
56
+ input = torch.from_numpy(input).to(torch.float32)
57
+ # model.eval()
58
+ with torch.no_grad():
59
+ # output = model(torch.tensor([0.0,0.0,87.0,5.0,36.9,81.0,33.0,138.0,62.0,0.0,0.0,2.0,2.0,2.0,0.0,0.0,0.0,0.0,0.0,2.0,1.0,0.0,0.0,1.0,2.0,2.0,2.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,2.0,2.0,1.0]))
60
+ output = model(torch.tensor(input))
61
+ print(output)
62
+ # print(len([0.0,0.0,87.0,5.0,36.9,81.0,33.0,138.0,62.0,0.0,0.0,2.0,2.0,2.0,0.0,0.0,0.0,0.0,0.0,2.0,1.0,0.0,0.0,1.0,2.0,2.0,2.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,2.0,2.0,1.0]))
63
+ pridect_h1_y = torch.max(output,dim=1)[1]
64
+ pridect_h1_label = pridect_h1_y.data.numpy()
65
+ print(pridect_h1_y)
66
+ if int(pridect_h1_label[0])==1:
67
+ return "FL predict: Height."
68
+ else:
69
+ return "FL predict: Low."
70
+ else:
71
+ model_h1 = torch.load("net_h1.pt")
72
+ model_h2 = torch.load("net_h2.pt")
73
+ model_h3 = torch.load("net_h3.pt")
74
+ print(fl)
75
+ print(input)
76
+ input = torch.from_numpy(input).to(torch.float32)
77
+ model_h1.eval()
78
+ model_h2.eval()
79
+ model_h3.eval()
80
+ with torch.no_grad():
81
+ # output = model(torch.tensor(1.0,1.0,64.0,2.0,37.1,98.0,20.0,120.0,70.0,3.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.0,1.0,0.0,1.0,2.0,2.0,1.0,2.0,0.0,2.0,0.0,2.0,2.0,0.0,0.0,0.0,0.0,0.0,1.0,1.0,1.0))
82
+ output_h1 = model_h1(torch.tensor(input))
83
+ output_h2 = model_h2(torch.tensor(input))
84
+ output_h3 = model_h3(torch.tensor(input))
85
+ print(output_h1)
86
+ print(output_h2)
87
+ print(output_h3)
88
+ # print(len([0.0,0.0,87.0,5.0,36.9,81.0,33.0,138.0,62.0,0.0,0.0,2.0,2.0,2.0,0.0,0.0,0.0,0.0,0.0,2.0,1.0,0.0,0.0,1.0,2.0,2.0,2.0,1.0,0.0,0.0,0.0,1.0,1.0,0.0,0.0,0.0,0.0,2.0,2.0,1.0]))
89
+ # print(len(output_h1))
90
+ pridect_h1_y = torch.max(output_h1,dim = 1)[1]
91
+ pridect_h1_label = pridect_h1_y.data.numpy()
92
+ pridect_h2_y = torch.max(output_h2,dim = 1)[1]
93
+ pridect_h2_label = pridect_h2_y.data.numpy()
94
+ pridect_h3_y = torch.max(output_h3,dim = 1)[1]
95
+ pridect_h3_label = pridect_h3_y.data.numpy()
96
+
97
+ # print(pridect_h1_y)
98
+ # print(pridect_h2_y)
99
+ # print(pridect_h3_y)
100
+ print(pridect_h1_label)
101
+ print(pridect_h2_label)
102
+ print(pridect_h3_label)
103
+ output = ""
104
+ if int(pridect_h1_label[0]) == 1:
105
+ print("sick")
106
+ output +="H1 predict: Height.\n"
107
+ else:
108
+ print("no sick")
109
+ output += "H1 predict: Low.\n"
110
+ if int(pridect_h2_label[0]) == 1:
111
+ print("sick")
112
+ output += "H2 predict: Height.\n"
113
+
114
+ else:
115
+ print("no sick")
116
+ output += "H2 predict: Low.\n"
117
+ if int(pridect_h3_label[0]) == 1:
118
+ print("sick")
119
+ output += "H3 predict: Height.\n"
120
+
121
+ else:
122
+ print("no sick")
123
+ output += "H3 predict: Low.\n"
124
+ return output
125
+
126
+
127
+
128
+ # print(data_test_h1.train_data[0])
129
+ # print(len(data_test_h1.train_data))
130
+
131
+ # test_h1_x = torch.from_numpy(data_test_h1.test_data).float()
132
+ # test_h1_y = torch.tensor(data_test_h1.test_label)
133
+ # a = [1.0,1.0,78.0,7.0,37.3,110.0,21.0,130.0,81.0,3.0,0.0,2.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,1.0,2.0,0.0,2.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0,2.0,2.0,1.0,2.0,1.0,1.0,1.0,2.0,1.0]
134
+ # data_test_h1.train_data[0] = a
135
+ # print(data_test_h1.train_data[25])
136
+ # print(load_model("No",data_test_h1.train_data))
137
+ # print(load_model("Yes",data_test_h1.train_data))
138
+ # print("=============================")
net_gb.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a634ef8e95b5b18ca43c90c83718bdce5061e7fc774296a4569b962b25c8d7ec
3
+ size 7877
net_h1.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e242fe05b707d7015048d0bb0eeb13ba70eeb5b51470e848e9a3452141da41f7
3
+ size 7877
net_h2.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eaeb6cc8792eda694a1ce0499f52f7d7e3d6c40bae386675154aa9daf887103c
3
+ size 7877
net_h3.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a61837c2f5beb8f25202f685759c2b58c448f2dc8803b27441694c5cb54ce5dc
3
+ size 7877
requirement.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ torch
style.css ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ .slide {
2
+
3
+ border: 2px solid #2980b9;
4
+ color: #2980b9;
5
+ position: relative;
6
+ overflow: hidden;
7
+ z-index: 1;
8
+ transition: .5s;
9
+
10
+ }
11
+
12
+ .slide::before {
13
+ content: "";
14
+ position: absolute;
15
+ z-index: -1;
16
+ width: 0;
17
+ height: 100%;
18
+ left: 0;
19
+ background-color: #2980b9;
20
+ transition: ease-in-out .5s;
21
+ }
22
+
23
+ .slide:hover::before {
24
+ width: 100%;
25
+ }
26
+ #b_1{
27
+ background-color: #4CAF50;
28
+ }
tmp_trainer/all_results.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 3.0,
3
+ "train_loss": 2.680424372355143,
4
+ "train_runtime": 11.0862,
5
+ "train_samples_per_second": 0.8118215323828388,
6
+ "train_steps_per_second": 0.27060717746094626
7
+ }
tmp_trainer/model_state.pdparams ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:986323e837e875f3e7a71b7cbe7a47b018741d78bca13c4e2aefd5d759621e98
3
+ size 597370105
tmp_trainer/runs/Oct04_19-18-17_LAPTOP-9VNL3PC0/vdlrecords.1696418297.log ADDED
Binary file (5.33 kB). View file
 
tmp_trainer/runs/Oct04_19-41-26_LAPTOP-9VNL3PC0/vdlrecords.1696419687.log ADDED
Binary file (5.33 kB). View file
 
tmp_trainer/runs/Oct04_20-56-42_LAPTOP-9VNL3PC0/vdlrecords.1696424203.log ADDED
Binary file (5.33 kB). View file
 
tmp_trainer/special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]"}
tmp_trainer/template_config.json ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ [{"text": "sentence1"}, {"hard": "和"}, {"text": "sentence2"}, {"hard": "说的是"}, {"mask": null, "length": 1}, {"hard": "同的事情。"}]
2
+ {"class": "ManualTemplate"}
tmp_trainer/tokenizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"do_lower_case": true, "unk_token": "[UNK]", "sep_token": "[SEP]", "pad_token": "[PAD]", "cls_token": "[CLS]", "mask_token": "[MASK]", "model_max_length": 2048, "tokenizer_class": "ErnieTokenizer"}
tmp_trainer/train_results.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 3.0,
3
+ "train_loss": 2.680424372355143,
4
+ "train_runtime": 11.0862,
5
+ "train_samples_per_second": 0.8118215323828388,
6
+ "train_steps_per_second": 0.27060717746094626
7
+ }
tmp_trainer/trainer_state.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 3.0,
5
+ "global_step": 3,
6
+ "is_local_process_zero": true,
7
+ "is_world_process_zero": true,
8
+ "log_history": [
9
+ {
10
+ "epoch": 3.0,
11
+ "step": 3,
12
+ "train_loss": 2.680424372355143,
13
+ "train_runtime": 11.0862,
14
+ "train_samples_per_second": 0.8118215323828388,
15
+ "train_steps_per_second": 0.27060717746094626
16
+ }
17
+ ],
18
+ "max_steps": 3,
19
+ "num_train_epochs": 3,
20
+ "total_flos": 0,
21
+ "trial_name": null,
22
+ "trial_params": null
23
+ }
tmp_trainer/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:826442a3622f099a95cd20361905a9c77ca3fb6884113831d8ab0701640415fc
3
+ size 2310
tmp_trainer/verbalizer_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"0": ["不"], "1": ["相"]}
tmp_trainer/vocab.txt ADDED
The diff for this file is too large to render. See raw diff