content
stringlengths
7
1.05M
fixed_cases
stringlengths
1
1.28M
microcode = ''' def macroop VPADDD_XMM_XMM { vaddi dest=xmm0, src1=xmm0v, src2=xmm0m, size=4, VL=16 }; def macroop VPADDD_XMM_M { ldfp128 ufp1, seg, sib, "DISPLACEMENT + 0", dataSize=16 vaddi dest=xmm0, src1=xmm0v, src2=ufp1, size=4, VL=16 }; def macroop VPADDD_XMM_P { rdip t7 ldfp128 ufp1, seg, riprel, "DISPLACEMENT + 0", dataSize=16 vaddi dest=xmm0, src1=xmm0v, src2=ufp1, size=4, VL=16 }; def macroop VPADDD_YMM_YMM { vaddi dest=xmm0, src1=xmm0v, src2=xmm0m, size=4, VL=32 }; def macroop VPADDD_YMM_M { ldfp256 ufp1, seg, sib, "DISPLACEMENT + 0", dataSize=32 vaddi dest=xmm0, src1=xmm0v, src2=ufp1, size=4, VL=32 }; def macroop VPADDD_YMM_P { rdip t7 ldfp256 ufp1, seg, riprel, "DISPLACEMENT + 0", dataSize=32 vaddi dest=xmm0, src1=xmm0v, src2=ufp1, size=4, VL=32 }; def macroop VPADDD_ZMM_ZMM { vaddi dest=xmm0, src1=xmm0v, src2=xmm0m, size=4, VL=64 }; def macroop VPADDD_ZMM_M { ldfp512 ufp1, seg, sib, "DISPLACEMENT + 0", dataSize=64 vaddi dest=xmm0, src1=xmm0v, src2=ufp1, size=4, VL=64 }; def macroop VPADDD_ZMM_P { rdip t7 ldfp512 ufp1, seg, riprel, "DISPLACEMENT + 0", dataSize=64 vaddi dest=xmm0, src1=xmm0v, src2=ufp1, size=4, VL=64 }; '''
microcode = '\ndef macroop VPADDD_XMM_XMM {\n vaddi dest=xmm0, src1=xmm0v, src2=xmm0m, size=4, VL=16\n};\n\ndef macroop VPADDD_XMM_M {\n ldfp128 ufp1, seg, sib, "DISPLACEMENT + 0", dataSize=16\n vaddi dest=xmm0, src1=xmm0v, src2=ufp1, size=4, VL=16\n};\n\ndef macroop VPADDD_XMM_P {\n rdip t7\n ldfp128 ufp1, seg, riprel, "DISPLACEMENT + 0", dataSize=16\n vaddi dest=xmm0, src1=xmm0v, src2=ufp1, size=4, VL=16\n};\n\ndef macroop VPADDD_YMM_YMM {\n vaddi dest=xmm0, src1=xmm0v, src2=xmm0m, size=4, VL=32\n};\n\ndef macroop VPADDD_YMM_M {\n ldfp256 ufp1, seg, sib, "DISPLACEMENT + 0", dataSize=32\n vaddi dest=xmm0, src1=xmm0v, src2=ufp1, size=4, VL=32\n};\n\ndef macroop VPADDD_YMM_P {\n rdip t7\n ldfp256 ufp1, seg, riprel, "DISPLACEMENT + 0", dataSize=32\n vaddi dest=xmm0, src1=xmm0v, src2=ufp1, size=4, VL=32\n};\n\ndef macroop VPADDD_ZMM_ZMM {\n vaddi dest=xmm0, src1=xmm0v, src2=xmm0m, size=4, VL=64\n};\n\ndef macroop VPADDD_ZMM_M {\n ldfp512 ufp1, seg, sib, "DISPLACEMENT + 0", dataSize=64\n vaddi dest=xmm0, src1=xmm0v, src2=ufp1, size=4, VL=64\n};\n\ndef macroop VPADDD_ZMM_P {\n rdip t7\n ldfp512 ufp1, seg, riprel, "DISPLACEMENT + 0", dataSize=64\n vaddi dest=xmm0, src1=xmm0v, src2=ufp1, size=4, VL=64\n};\n'
# -*- coding: utf-8 -*- ''' @author: Andreas Peldszus ''' folds = [ ['micro_k009', 'micro_b050', 'micro_b055', 'micro_b041', 'micro_b036', 'micro_b007', 'micro_k021', 'micro_d08', 'micro_b033', 'micro_b060', 'micro_b034', 'micro_k003', 'micro_d15', 'micro_d20', 'micro_k012', 'micro_d13', 'micro_b010', 'micro_k029', 'micro_d14', 'micro_b024', 'micro_b009', 'micro_k011'], ['micro_k001', 'micro_d11', 'micro_b058', 'micro_k014', 'micro_b029', 'micro_d03', 'micro_d07', 'micro_b002', 'micro_k023', 'micro_b049', 'micro_d01', 'micro_k006', 'micro_b031', 'micro_d12', 'micro_k024', 'micro_k017', 'micro_b054', 'micro_b012', 'micro_b062', 'micro_b052', 'micro_k008', 'micro_d21', 'micro_b016'], ['micro_k031', 'micro_b018', 'micro_k010', 'micro_k025', 'micro_b040', 'micro_d06', 'micro_b061', 'micro_b039', 'micro_d02', 'micro_b045', 'micro_b008', 'micro_b032', 'micro_b059', 'micro_k013', 'micro_b047', 'micro_k002', 'micro_b053', 'micro_b044', 'micro_b023', 'micro_d19', 'micro_b030', 'micro_b025', 'micro_b003'], ['micro_b051', 'micro_k007', 'micro_k019', 'micro_b019', 'micro_b042', 'micro_b027', 'micro_b046', 'micro_b057', 'micro_d16', 'micro_b006', 'micro_k018', 'micro_b015', 'micro_d17', 'micro_d18', 'micro_b004', 'micro_k020', 'micro_b001', 'micro_b028', 'micro_b011', 'micro_b017', 'micro_k022', 'micro_b013'], ['micro_b056', 'micro_d23', 'micro_b038', 'micro_d04', 'micro_d05', 'micro_b005', 'micro_k027', 'micro_b026', 'micro_k004', 'micro_b020', 'micro_b014', 'micro_d22', 'micro_b021', 'micro_b048', 'micro_b022', 'micro_k015', 'micro_b037', 'micro_b035', 'micro_d10', 'micro_d09', 'micro_k016', 'micro_b064'], ['micro_b056', 'micro_b018', 'micro_b058', 'micro_k025', 'micro_b042', 'micro_b027', 'micro_b046', 'micro_b057', 'micro_b002', 'micro_b049', 'micro_d01', 'micro_b060', 'micro_b031', 'micro_d12', 'micro_b054', 'micro_b004', 'micro_k012', 'micro_d10', 'micro_b035', 'micro_b009', 'micro_b064', 'micro_k016', 'micro_d21'], ['micro_k031', 'micro_d11', 'micro_b038', 'micro_k014', 'micro_b040', 'micro_b005', 'micro_b053', 'micro_b020', 'micro_b033', 'micro_k004', 'micro_b034', 'micro_k003', 'micro_d15', 'micro_k029', 'micro_k024', 'micro_b001', 'micro_b052', 'micro_b010', 'micro_d14', 'micro_b023', 'micro_b003', 'micro_b025', 'micro_b016'], ['micro_k001', 'micro_d23', 'micro_k010', 'micro_b041', 'micro_b036', 'micro_b007', 'micro_k021', 'micro_k027', 'micro_k023', 'micro_b014', 'micro_b047', 'micro_k006', 'micro_b021', 'micro_b048', 'micro_d13', 'micro_k020', 'micro_b037', 'micro_k015', 'micro_b028', 'micro_b030', 'micro_d09', 'micro_b013'], ['micro_b051', 'micro_k007', 'micro_b055', 'micro_b019', 'micro_b029', 'micro_d03', 'micro_b061', 'micro_b026', 'micro_d02', 'micro_b045', 'micro_b008', 'micro_b032', 'micro_b059', 'micro_d20', 'micro_k013', 'micro_b022', 'micro_k017', 'micro_b017', 'micro_b062', 'micro_k008', 'micro_k022', 'micro_b012'], ['micro_k009', 'micro_b050', 'micro_k019', 'micro_d04', 'micro_d05', 'micro_d06', 'micro_d07', 'micro_b039', 'micro_d16', 'micro_b006', 'micro_k018', 'micro_b015', 'micro_d17', 'micro_d18', 'micro_d08', 'micro_k002', 'micro_d22', 'micro_b044', 'micro_d19', 'micro_b024', 'micro_b011', 'micro_k011'], ['micro_k001', 'micro_d11', 'micro_b038', 'micro_b041', 'micro_b042', 'micro_b007', 'micro_k021', 'micro_k027', 'micro_b002', 'micro_b045', 'micro_d01', 'micro_b060', 'micro_d15', 'micro_k029', 'micro_d08', 'micro_k020', 'micro_d10', 'micro_b044', 'micro_b028', 'micro_d19', 'micro_b064', 'micro_b003'], ['micro_k031', 'micro_b050', 'micro_k019', 'micro_b019', 'micro_b036', 'micro_b027', 'micro_b046', 'micro_b057', 'micro_k023', 'micro_b049', 'micro_b008', 'micro_k006', 'micro_b021', 'micro_d20', 'micro_k012', 'micro_b054', 'micro_d22', 'micro_b052', 'micro_d14', 'micro_b024', 'micro_k008', 'micro_b062', 'micro_b012'], ['micro_k009', 'micro_k007', 'micro_b055', 'micro_k025', 'micro_b040', 'micro_b005', 'micro_b061', 'micro_b026', 'micro_d02', 'micro_b014', 'micro_b047', 'micro_b032', 'micro_b059', 'micro_k013', 'micro_k024', 'micro_d12', 'micro_d13', 'micro_b010', 'micro_b009', 'micro_b017', 'micro_k022', 'micro_b016', 'micro_d21'], ['micro_b051', 'micro_d23', 'micro_b058', 'micro_k014', 'micro_b029', 'micro_d03', 'micro_b053', 'micro_b020', 'micro_b033', 'micro_k004', 'micro_b034', 'micro_b015', 'micro_b031', 'micro_b048', 'micro_b022', 'micro_k015', 'micro_k017', 'micro_b035', 'micro_b013', 'micro_d09', 'micro_b030', 'micro_k011'], ['micro_b056', 'micro_b018', 'micro_k010', 'micro_d04', 'micro_d05', 'micro_d06', 'micro_d07', 'micro_b039', 'micro_d16', 'micro_b006', 'micro_k018', 'micro_k003', 'micro_d17', 'micro_d18', 'micro_b004', 'micro_k002', 'micro_b001', 'micro_b037', 'micro_b023', 'micro_b011', 'micro_b025', 'micro_k016'], ['micro_k031', 'micro_k007', 'micro_b055', 'micro_b019', 'micro_b029', 'micro_d03', 'micro_d07', 'micro_b002', 'micro_d16', 'micro_b006', 'micro_b014', 'micro_b015', 'micro_d15', 'micro_d17', 'micro_k017', 'micro_b001', 'micro_k022', 'micro_b017', 'micro_d19', 'micro_b023', 'micro_b011', 'micro_b013', 'micro_d21'], ['micro_b056', 'micro_b050', 'micro_k019', 'micro_d04', 'micro_d05', 'micro_d06', 'micro_k027', 'micro_b026', 'micro_d02', 'micro_b049', 'micro_b047', 'micro_b032', 'micro_b059', 'micro_k012', 'micro_k018', 'micro_d13', 'micro_b009', 'micro_k002', 'micro_b044', 'micro_k008', 'micro_b030', 'micro_b025'], ['micro_b051', 'micro_d11', 'micro_k010', 'micro_b041', 'micro_b042', 'micro_b007', 'micro_b046', 'micro_d08', 'micro_b033', 'micro_b060', 'micro_b008', 'micro_b031', 'micro_k013', 'micro_d20', 'micro_b037', 'micro_k020', 'micro_d22', 'micro_d18', 'micro_k015', 'micro_b035', 'micro_k016', 'micro_b064'], ['micro_k009', 'micro_d23', 'micro_b038', 'micro_k014', 'micro_b040', 'micro_b005', 'micro_b061', 'micro_b039', 'micro_k004', 'micro_b020', 'micro_b034', 'micro_k003', 'micro_b021', 'micro_b048', 'micro_b054', 'micro_b053', 'micro_b052', 'micro_b062', 'micro_d14', 'micro_b028', 'micro_b003', 'micro_k011'], ['micro_k001', 'micro_b018', 'micro_b058', 'micro_k025', 'micro_b036', 'micro_b027', 'micro_k021', 'micro_b057', 'micro_k023', 'micro_b045', 'micro_d01', 'micro_k006', 'micro_b022', 'micro_k029', 'micro_k024', 'micro_d12', 'micro_b004', 'micro_b010', 'micro_d10', 'micro_b024', 'micro_b012', 'micro_d09', 'micro_b016'], ['micro_b051', 'micro_d11', 'micro_b058', 'micro_k025', 'micro_b036', 'micro_b007', 'micro_k021', 'micro_b057', 'micro_k023', 'micro_b020', 'micro_k003', 'micro_k018', 'micro_d17', 'micro_b048', 'micro_b054', 'micro_b053', 'micro_d10', 'micro_b035', 'micro_k015', 'micro_b024', 'micro_b001', 'micro_d09'], ['micro_b056', 'micro_d23', 'micro_k010', 'micro_k014', 'micro_d05', 'micro_d06', 'micro_k027', 'micro_b039', 'micro_d02', 'micro_b045', 'micro_d01', 'micro_b032', 'micro_b059', 'micro_d20', 'micro_k012', 'micro_k002', 'micro_b013', 'micro_b052', 'micro_d18', 'micro_b023', 'micro_b012', 'micro_d21', 'micro_k011'], ['micro_k031', 'micro_k007', 'micro_b055', 'micro_b041', 'micro_b042', 'micro_b027', 'micro_b046', 'micro_d08', 'micro_b033', 'micro_b060', 'micro_b008', 'micro_b031', 'micro_k006', 'micro_k013', 'micro_b047', 'micro_k020', 'micro_b009', 'micro_b017', 'micro_b028', 'micro_b016', 'micro_k008', 'micro_b025', 'micro_b030'], ['micro_k009', 'micro_b050', 'micro_k019', 'micro_b019', 'micro_b029', 'micro_d03', 'micro_d07', 'micro_b002', 'micro_d16', 'micro_b006', 'micro_b034', 'micro_b015', 'micro_b021', 'micro_k029', 'micro_b022', 'micro_b044', 'micro_k017', 'micro_b004', 'micro_b062', 'micro_d14', 'micro_b064', 'micro_b003'], ['micro_k001', 'micro_b018', 'micro_b038', 'micro_d04', 'micro_b040', 'micro_b005', 'micro_b061', 'micro_b026', 'micro_k004', 'micro_b049', 'micro_b014', 'micro_d22', 'micro_d15', 'micro_d12', 'micro_k024', 'micro_d13', 'micro_b037', 'micro_b010', 'micro_b011', 'micro_d19', 'micro_k022', 'micro_k016'], ['micro_k031', 'micro_d23', 'micro_k010', 'micro_b019', 'micro_b036', 'micro_b027', 'micro_k021', 'micro_k027', 'micro_d16', 'micro_b006', 'micro_b032', 'micro_b015', 'micro_d17', 'micro_b059', 'micro_k024', 'micro_b001', 'micro_b004', 'micro_d18', 'micro_b028', 'micro_d19', 'micro_b030', 'micro_k011', 'micro_b003'], ['micro_k009', 'micro_b018', 'micro_b038', 'micro_k014', 'micro_d05', 'micro_d06', 'micro_d07', 'micro_b026', 'micro_k023', 'micro_b014', 'micro_d01', 'micro_k006', 'micro_b031', 'micro_d12', 'micro_b054', 'micro_d08', 'micro_b037', 'micro_d10', 'micro_b035', 'micro_k015', 'micro_b064', 'micro_b016', 'micro_d21'], ['micro_b051', 'micro_d11', 'micro_b058', 'micro_d04', 'micro_b029', 'micro_d03', 'micro_b053', 'micro_b020', 'micro_b033', 'micro_k004', 'micro_k018', 'micro_k003', 'micro_b022', 'micro_b011', 'micro_d13', 'micro_k020', 'micro_k029', 'micro_b044', 'micro_b023', 'micro_b009', 'micro_k016', 'micro_b025'], ['micro_b056', 'micro_b050', 'micro_b055', 'micro_k025', 'micro_b040', 'micro_b005', 'micro_b061', 'micro_b039', 'micro_d02', 'micro_b045', 'micro_b034', 'micro_d22', 'micro_d15', 'micro_d20', 'micro_k013', 'micro_k012', 'micro_b010', 'micro_b013', 'micro_b017', 'micro_d09', 'micro_k022', 'micro_b012'], ['micro_k001', 'micro_k007', 'micro_k019', 'micro_b041', 'micro_b042', 'micro_b007', 'micro_b046', 'micro_b057', 'micro_b002', 'micro_b049', 'micro_b008', 'micro_b060', 'micro_b021', 'micro_b048', 'micro_b047', 'micro_k002', 'micro_k017', 'micro_b052', 'micro_d14', 'micro_b024', 'micro_k008', 'micro_b062'], ['micro_k009', 'micro_k007', 'micro_b055', 'micro_d04', 'micro_b040', 'micro_d06', 'micro_b061', 'micro_b039', 'micro_d02', 'micro_b014', 'micro_d01', 'micro_b032', 'micro_b059', 'micro_d20', 'micro_k013', 'micro_b022', 'micro_k017', 'micro_b017', 'micro_b062', 'micro_d14', 'micro_k022', 'micro_b003'], ['micro_k001', 'micro_b050', 'micro_k019', 'micro_k025', 'micro_d05', 'micro_b005', 'micro_d07', 'micro_b026', 'micro_k023', 'micro_b049', 'micro_b047', 'micro_b060', 'micro_b021', 'micro_b048', 'micro_k024', 'micro_d22', 'micro_b052', 'micro_d09', 'micro_b035', 'micro_b011', 'micro_b024', 'micro_b012'], ['micro_k031', 'micro_d11', 'micro_b058', 'micro_k014', 'micro_b029', 'micro_d03', 'micro_b053', 'micro_b020', 'micro_b033', 'micro_k004', 'micro_b034', 'micro_b015', 'micro_d15', 'micro_k029', 'micro_d13', 'micro_k002', 'micro_b009', 'micro_b044', 'micro_b010', 'micro_k008', 'micro_d19', 'micro_b025', 'micro_b016'], ['micro_b056', 'micro_d23', 'micro_b038', 'micro_b019', 'micro_b036', 'micro_b007', 'micro_k021', 'micro_b057', 'micro_b002', 'micro_b045', 'micro_b008', 'micro_k006', 'micro_b031', 'micro_d12', 'micro_b054', 'micro_b004', 'micro_k012', 'micro_d10', 'micro_b023', 'micro_b030', 'micro_b064', 'micro_k016', 'micro_k011'], ['micro_b051', 'micro_b018', 'micro_k010', 'micro_b041', 'micro_b042', 'micro_b027', 'micro_b046', 'micro_k027', 'micro_d16', 'micro_b006', 'micro_k018', 'micro_k003', 'micro_d17', 'micro_d18', 'micro_d08', 'micro_k020', 'micro_b001', 'micro_b037', 'micro_b028', 'micro_k015', 'micro_d21', 'micro_b013'], ['micro_b051', 'micro_b018', 'micro_k010', 'micro_b041', 'micro_b042', 'micro_b027', 'micro_k021', 'micro_b057', 'micro_k023', 'micro_b045', 'micro_d01', 'micro_k006', 'micro_b031', 'micro_d20', 'micro_k012', 'micro_b022', 'micro_k008', 'micro_b052', 'micro_b062', 'micro_d14', 'micro_k022', 'micro_b003', 'micro_b016'], ['micro_b056', 'micro_b050', 'micro_k019', 'micro_k025', 'micro_b040', 'micro_b005', 'micro_b053', 'micro_b020', 'micro_b033', 'micro_k004', 'micro_b034', 'micro_b015', 'micro_d15', 'micro_k029', 'micro_d13', 'micro_k002', 'micro_b009', 'micro_b044', 'micro_b028', 'micro_b030', 'micro_d19', 'micro_b025'], ['micro_k031', 'micro_d11', 'micro_b038', 'micro_k014', 'micro_d05', 'micro_d06', 'micro_d07', 'micro_b026', 'micro_b002', 'micro_b049', 'micro_b047', 'micro_b060', 'micro_b021', 'micro_b048', 'micro_b004', 'micro_k020', 'micro_d22', 'micro_d09', 'micro_b037', 'micro_b023', 'micro_b012', 'micro_b064'], ['micro_k009', 'micro_k007', 'micro_b055', 'micro_b019', 'micro_b036', 'micro_b007', 'micro_b046', 'micro_k027', 'micro_d16', 'micro_b006', 'micro_k018', 'micro_k003', 'micro_d17', 'micro_d18', 'micro_d08', 'micro_b010', 'micro_b001', 'micro_k017', 'micro_b035', 'micro_k015', 'micro_d21', 'micro_b013', 'micro_k011'], ['micro_k001', 'micro_d23', 'micro_b058', 'micro_d04', 'micro_b029', 'micro_d03', 'micro_b061', 'micro_b039', 'micro_d02', 'micro_b014', 'micro_b008', 'micro_b032', 'micro_b059', 'micro_k013', 'micro_k024', 'micro_d12', 'micro_b054', 'micro_d10', 'micro_b017', 'micro_b024', 'micro_b011', 'micro_k016'], ['micro_k001', 'micro_b018', 'micro_k010', 'micro_b019', 'micro_b042', 'micro_b007', 'micro_b046', 'micro_d08', 'micro_b033', 'micro_b006', 'micro_b008', 'micro_b032', 'micro_k006', 'micro_k017', 'micro_b047', 'micro_k020', 'micro_k029', 'micro_b044', 'micro_b028', 'micro_b011', 'micro_k022', 'micro_b003'], ['micro_b056', 'micro_k007', 'micro_b055', 'micro_b041', 'micro_b036', 'micro_b027', 'micro_k021', 'micro_b057', 'micro_k023', 'micro_b020', 'micro_k003', 'micro_k018', 'micro_d17', 'micro_k013', 'micro_b022', 'micro_b001', 'micro_k002', 'micro_k015', 'micro_d14', 'micro_b023', 'micro_b013', 'micro_k016'], ['micro_k031', 'micro_b050', 'micro_k019', 'micro_k014', 'micro_d05', 'micro_d06', 'micro_k027', 'micro_b039', 'micro_d02', 'micro_b045', 'micro_d01', 'micro_b031', 'micro_b059', 'micro_k012', 'micro_k024', 'micro_d12', 'micro_d18', 'micro_b052', 'micro_b035', 'micro_d10', 'micro_b017', 'micro_d21'], ['micro_b051', 'micro_d23', 'micro_b038', 'micro_d04', 'micro_b040', 'micro_b005', 'micro_d07', 'micro_b002', 'micro_d16', 'micro_b060', 'micro_b014', 'micro_d22', 'micro_b021', 'micro_b048', 'micro_b054', 'micro_b053', 'micro_b004', 'micro_b062', 'micro_k008', 'micro_d09', 'micro_b030', 'micro_b064', 'micro_k011'], ['micro_k009', 'micro_d11', 'micro_b058', 'micro_k025', 'micro_b029', 'micro_d03', 'micro_b061', 'micro_b026', 'micro_k004', 'micro_b049', 'micro_b034', 'micro_b015', 'micro_d15', 'micro_d20', 'micro_b037', 'micro_d13', 'micro_b010', 'micro_b009', 'micro_d19', 'micro_b024', 'micro_b012', 'micro_b025', 'micro_b016'], ['micro_k001', 'micro_d23', 'micro_b058', 'micro_d04', 'micro_b042', 'micro_b027', 'micro_b046', 'micro_k027', 'micro_d16', 'micro_b006', 'micro_b034', 'micro_k003', 'micro_d15', 'micro_k029', 'micro_d13', 'micro_k020', 'micro_b009', 'micro_b044', 'micro_b037', 'micro_b024', 'micro_d19', 'micro_b025'], ['micro_b051', 'micro_k007', 'micro_b055', 'micro_b019', 'micro_b029', 'micro_d03', 'micro_b061', 'micro_b026', 'micro_d02', 'micro_b045', 'micro_b008', 'micro_b032', 'micro_b059', 'micro_k013', 'micro_k024', 'micro_d12', 'micro_b054', 'micro_d10', 'micro_b017', 'micro_b023', 'micro_b012', 'micro_k011'], ['micro_b056', 'micro_b050', 'micro_k019', 'micro_k014', 'micro_d05', 'micro_d06', 'micro_d07', 'micro_b039', 'micro_k023', 'micro_b014', 'micro_d01', 'micro_k006', 'micro_b021', 'micro_d20', 'micro_k012', 'micro_b022', 'micro_k008', 'micro_b052', 'micro_b062', 'micro_d14', 'micro_k022', 'micro_b003', 'micro_b016'], ['micro_k009', 'micro_b018', 'micro_k010', 'micro_b041', 'micro_b036', 'micro_b007', 'micro_k021', 'micro_b057', 'micro_b002', 'micro_b049', 'micro_b047', 'micro_b060', 'micro_b031', 'micro_b048', 'micro_d08', 'micro_b010', 'micro_d22', 'micro_d09', 'micro_b030', 'micro_b028', 'micro_b011', 'micro_b064', 'micro_k016'], ['micro_k031', 'micro_d11', 'micro_b038', 'micro_k025', 'micro_b040', 'micro_b005', 'micro_b053', 'micro_b020', 'micro_b033', 'micro_k004', 'micro_k018', 'micro_b015', 'micro_d17', 'micro_d18', 'micro_b004', 'micro_k002', 'micro_b001', 'micro_k017', 'micro_b035', 'micro_k015', 'micro_d21', 'micro_b013'] ] tids = sorted(list(set([tid for fold in folds for tid in fold]))) def get_static_folds(): for i, test_tids in enumerate(folds): train_tids = [tid for tid in tids if tid not in test_tids] yield train_tids, test_tids, i
""" @author: Andreas Peldszus """ folds = [['micro_k009', 'micro_b050', 'micro_b055', 'micro_b041', 'micro_b036', 'micro_b007', 'micro_k021', 'micro_d08', 'micro_b033', 'micro_b060', 'micro_b034', 'micro_k003', 'micro_d15', 'micro_d20', 'micro_k012', 'micro_d13', 'micro_b010', 'micro_k029', 'micro_d14', 'micro_b024', 'micro_b009', 'micro_k011'], ['micro_k001', 'micro_d11', 'micro_b058', 'micro_k014', 'micro_b029', 'micro_d03', 'micro_d07', 'micro_b002', 'micro_k023', 'micro_b049', 'micro_d01', 'micro_k006', 'micro_b031', 'micro_d12', 'micro_k024', 'micro_k017', 'micro_b054', 'micro_b012', 'micro_b062', 'micro_b052', 'micro_k008', 'micro_d21', 'micro_b016'], ['micro_k031', 'micro_b018', 'micro_k010', 'micro_k025', 'micro_b040', 'micro_d06', 'micro_b061', 'micro_b039', 'micro_d02', 'micro_b045', 'micro_b008', 'micro_b032', 'micro_b059', 'micro_k013', 'micro_b047', 'micro_k002', 'micro_b053', 'micro_b044', 'micro_b023', 'micro_d19', 'micro_b030', 'micro_b025', 'micro_b003'], ['micro_b051', 'micro_k007', 'micro_k019', 'micro_b019', 'micro_b042', 'micro_b027', 'micro_b046', 'micro_b057', 'micro_d16', 'micro_b006', 'micro_k018', 'micro_b015', 'micro_d17', 'micro_d18', 'micro_b004', 'micro_k020', 'micro_b001', 'micro_b028', 'micro_b011', 'micro_b017', 'micro_k022', 'micro_b013'], ['micro_b056', 'micro_d23', 'micro_b038', 'micro_d04', 'micro_d05', 'micro_b005', 'micro_k027', 'micro_b026', 'micro_k004', 'micro_b020', 'micro_b014', 'micro_d22', 'micro_b021', 'micro_b048', 'micro_b022', 'micro_k015', 'micro_b037', 'micro_b035', 'micro_d10', 'micro_d09', 'micro_k016', 'micro_b064'], ['micro_b056', 'micro_b018', 'micro_b058', 'micro_k025', 'micro_b042', 'micro_b027', 'micro_b046', 'micro_b057', 'micro_b002', 'micro_b049', 'micro_d01', 'micro_b060', 'micro_b031', 'micro_d12', 'micro_b054', 'micro_b004', 'micro_k012', 'micro_d10', 'micro_b035', 'micro_b009', 'micro_b064', 'micro_k016', 'micro_d21'], ['micro_k031', 'micro_d11', 'micro_b038', 'micro_k014', 'micro_b040', 'micro_b005', 'micro_b053', 'micro_b020', 'micro_b033', 'micro_k004', 'micro_b034', 'micro_k003', 'micro_d15', 'micro_k029', 'micro_k024', 'micro_b001', 'micro_b052', 'micro_b010', 'micro_d14', 'micro_b023', 'micro_b003', 'micro_b025', 'micro_b016'], ['micro_k001', 'micro_d23', 'micro_k010', 'micro_b041', 'micro_b036', 'micro_b007', 'micro_k021', 'micro_k027', 'micro_k023', 'micro_b014', 'micro_b047', 'micro_k006', 'micro_b021', 'micro_b048', 'micro_d13', 'micro_k020', 'micro_b037', 'micro_k015', 'micro_b028', 'micro_b030', 'micro_d09', 'micro_b013'], ['micro_b051', 'micro_k007', 'micro_b055', 'micro_b019', 'micro_b029', 'micro_d03', 'micro_b061', 'micro_b026', 'micro_d02', 'micro_b045', 'micro_b008', 'micro_b032', 'micro_b059', 'micro_d20', 'micro_k013', 'micro_b022', 'micro_k017', 'micro_b017', 'micro_b062', 'micro_k008', 'micro_k022', 'micro_b012'], ['micro_k009', 'micro_b050', 'micro_k019', 'micro_d04', 'micro_d05', 'micro_d06', 'micro_d07', 'micro_b039', 'micro_d16', 'micro_b006', 'micro_k018', 'micro_b015', 'micro_d17', 'micro_d18', 'micro_d08', 'micro_k002', 'micro_d22', 'micro_b044', 'micro_d19', 'micro_b024', 'micro_b011', 'micro_k011'], ['micro_k001', 'micro_d11', 'micro_b038', 'micro_b041', 'micro_b042', 'micro_b007', 'micro_k021', 'micro_k027', 'micro_b002', 'micro_b045', 'micro_d01', 'micro_b060', 'micro_d15', 'micro_k029', 'micro_d08', 'micro_k020', 'micro_d10', 'micro_b044', 'micro_b028', 'micro_d19', 'micro_b064', 'micro_b003'], ['micro_k031', 'micro_b050', 'micro_k019', 'micro_b019', 'micro_b036', 'micro_b027', 'micro_b046', 'micro_b057', 'micro_k023', 'micro_b049', 'micro_b008', 'micro_k006', 'micro_b021', 'micro_d20', 'micro_k012', 'micro_b054', 'micro_d22', 'micro_b052', 'micro_d14', 'micro_b024', 'micro_k008', 'micro_b062', 'micro_b012'], ['micro_k009', 'micro_k007', 'micro_b055', 'micro_k025', 'micro_b040', 'micro_b005', 'micro_b061', 'micro_b026', 'micro_d02', 'micro_b014', 'micro_b047', 'micro_b032', 'micro_b059', 'micro_k013', 'micro_k024', 'micro_d12', 'micro_d13', 'micro_b010', 'micro_b009', 'micro_b017', 'micro_k022', 'micro_b016', 'micro_d21'], ['micro_b051', 'micro_d23', 'micro_b058', 'micro_k014', 'micro_b029', 'micro_d03', 'micro_b053', 'micro_b020', 'micro_b033', 'micro_k004', 'micro_b034', 'micro_b015', 'micro_b031', 'micro_b048', 'micro_b022', 'micro_k015', 'micro_k017', 'micro_b035', 'micro_b013', 'micro_d09', 'micro_b030', 'micro_k011'], ['micro_b056', 'micro_b018', 'micro_k010', 'micro_d04', 'micro_d05', 'micro_d06', 'micro_d07', 'micro_b039', 'micro_d16', 'micro_b006', 'micro_k018', 'micro_k003', 'micro_d17', 'micro_d18', 'micro_b004', 'micro_k002', 'micro_b001', 'micro_b037', 'micro_b023', 'micro_b011', 'micro_b025', 'micro_k016'], ['micro_k031', 'micro_k007', 'micro_b055', 'micro_b019', 'micro_b029', 'micro_d03', 'micro_d07', 'micro_b002', 'micro_d16', 'micro_b006', 'micro_b014', 'micro_b015', 'micro_d15', 'micro_d17', 'micro_k017', 'micro_b001', 'micro_k022', 'micro_b017', 'micro_d19', 'micro_b023', 'micro_b011', 'micro_b013', 'micro_d21'], ['micro_b056', 'micro_b050', 'micro_k019', 'micro_d04', 'micro_d05', 'micro_d06', 'micro_k027', 'micro_b026', 'micro_d02', 'micro_b049', 'micro_b047', 'micro_b032', 'micro_b059', 'micro_k012', 'micro_k018', 'micro_d13', 'micro_b009', 'micro_k002', 'micro_b044', 'micro_k008', 'micro_b030', 'micro_b025'], ['micro_b051', 'micro_d11', 'micro_k010', 'micro_b041', 'micro_b042', 'micro_b007', 'micro_b046', 'micro_d08', 'micro_b033', 'micro_b060', 'micro_b008', 'micro_b031', 'micro_k013', 'micro_d20', 'micro_b037', 'micro_k020', 'micro_d22', 'micro_d18', 'micro_k015', 'micro_b035', 'micro_k016', 'micro_b064'], ['micro_k009', 'micro_d23', 'micro_b038', 'micro_k014', 'micro_b040', 'micro_b005', 'micro_b061', 'micro_b039', 'micro_k004', 'micro_b020', 'micro_b034', 'micro_k003', 'micro_b021', 'micro_b048', 'micro_b054', 'micro_b053', 'micro_b052', 'micro_b062', 'micro_d14', 'micro_b028', 'micro_b003', 'micro_k011'], ['micro_k001', 'micro_b018', 'micro_b058', 'micro_k025', 'micro_b036', 'micro_b027', 'micro_k021', 'micro_b057', 'micro_k023', 'micro_b045', 'micro_d01', 'micro_k006', 'micro_b022', 'micro_k029', 'micro_k024', 'micro_d12', 'micro_b004', 'micro_b010', 'micro_d10', 'micro_b024', 'micro_b012', 'micro_d09', 'micro_b016'], ['micro_b051', 'micro_d11', 'micro_b058', 'micro_k025', 'micro_b036', 'micro_b007', 'micro_k021', 'micro_b057', 'micro_k023', 'micro_b020', 'micro_k003', 'micro_k018', 'micro_d17', 'micro_b048', 'micro_b054', 'micro_b053', 'micro_d10', 'micro_b035', 'micro_k015', 'micro_b024', 'micro_b001', 'micro_d09'], ['micro_b056', 'micro_d23', 'micro_k010', 'micro_k014', 'micro_d05', 'micro_d06', 'micro_k027', 'micro_b039', 'micro_d02', 'micro_b045', 'micro_d01', 'micro_b032', 'micro_b059', 'micro_d20', 'micro_k012', 'micro_k002', 'micro_b013', 'micro_b052', 'micro_d18', 'micro_b023', 'micro_b012', 'micro_d21', 'micro_k011'], ['micro_k031', 'micro_k007', 'micro_b055', 'micro_b041', 'micro_b042', 'micro_b027', 'micro_b046', 'micro_d08', 'micro_b033', 'micro_b060', 'micro_b008', 'micro_b031', 'micro_k006', 'micro_k013', 'micro_b047', 'micro_k020', 'micro_b009', 'micro_b017', 'micro_b028', 'micro_b016', 'micro_k008', 'micro_b025', 'micro_b030'], ['micro_k009', 'micro_b050', 'micro_k019', 'micro_b019', 'micro_b029', 'micro_d03', 'micro_d07', 'micro_b002', 'micro_d16', 'micro_b006', 'micro_b034', 'micro_b015', 'micro_b021', 'micro_k029', 'micro_b022', 'micro_b044', 'micro_k017', 'micro_b004', 'micro_b062', 'micro_d14', 'micro_b064', 'micro_b003'], ['micro_k001', 'micro_b018', 'micro_b038', 'micro_d04', 'micro_b040', 'micro_b005', 'micro_b061', 'micro_b026', 'micro_k004', 'micro_b049', 'micro_b014', 'micro_d22', 'micro_d15', 'micro_d12', 'micro_k024', 'micro_d13', 'micro_b037', 'micro_b010', 'micro_b011', 'micro_d19', 'micro_k022', 'micro_k016'], ['micro_k031', 'micro_d23', 'micro_k010', 'micro_b019', 'micro_b036', 'micro_b027', 'micro_k021', 'micro_k027', 'micro_d16', 'micro_b006', 'micro_b032', 'micro_b015', 'micro_d17', 'micro_b059', 'micro_k024', 'micro_b001', 'micro_b004', 'micro_d18', 'micro_b028', 'micro_d19', 'micro_b030', 'micro_k011', 'micro_b003'], ['micro_k009', 'micro_b018', 'micro_b038', 'micro_k014', 'micro_d05', 'micro_d06', 'micro_d07', 'micro_b026', 'micro_k023', 'micro_b014', 'micro_d01', 'micro_k006', 'micro_b031', 'micro_d12', 'micro_b054', 'micro_d08', 'micro_b037', 'micro_d10', 'micro_b035', 'micro_k015', 'micro_b064', 'micro_b016', 'micro_d21'], ['micro_b051', 'micro_d11', 'micro_b058', 'micro_d04', 'micro_b029', 'micro_d03', 'micro_b053', 'micro_b020', 'micro_b033', 'micro_k004', 'micro_k018', 'micro_k003', 'micro_b022', 'micro_b011', 'micro_d13', 'micro_k020', 'micro_k029', 'micro_b044', 'micro_b023', 'micro_b009', 'micro_k016', 'micro_b025'], ['micro_b056', 'micro_b050', 'micro_b055', 'micro_k025', 'micro_b040', 'micro_b005', 'micro_b061', 'micro_b039', 'micro_d02', 'micro_b045', 'micro_b034', 'micro_d22', 'micro_d15', 'micro_d20', 'micro_k013', 'micro_k012', 'micro_b010', 'micro_b013', 'micro_b017', 'micro_d09', 'micro_k022', 'micro_b012'], ['micro_k001', 'micro_k007', 'micro_k019', 'micro_b041', 'micro_b042', 'micro_b007', 'micro_b046', 'micro_b057', 'micro_b002', 'micro_b049', 'micro_b008', 'micro_b060', 'micro_b021', 'micro_b048', 'micro_b047', 'micro_k002', 'micro_k017', 'micro_b052', 'micro_d14', 'micro_b024', 'micro_k008', 'micro_b062'], ['micro_k009', 'micro_k007', 'micro_b055', 'micro_d04', 'micro_b040', 'micro_d06', 'micro_b061', 'micro_b039', 'micro_d02', 'micro_b014', 'micro_d01', 'micro_b032', 'micro_b059', 'micro_d20', 'micro_k013', 'micro_b022', 'micro_k017', 'micro_b017', 'micro_b062', 'micro_d14', 'micro_k022', 'micro_b003'], ['micro_k001', 'micro_b050', 'micro_k019', 'micro_k025', 'micro_d05', 'micro_b005', 'micro_d07', 'micro_b026', 'micro_k023', 'micro_b049', 'micro_b047', 'micro_b060', 'micro_b021', 'micro_b048', 'micro_k024', 'micro_d22', 'micro_b052', 'micro_d09', 'micro_b035', 'micro_b011', 'micro_b024', 'micro_b012'], ['micro_k031', 'micro_d11', 'micro_b058', 'micro_k014', 'micro_b029', 'micro_d03', 'micro_b053', 'micro_b020', 'micro_b033', 'micro_k004', 'micro_b034', 'micro_b015', 'micro_d15', 'micro_k029', 'micro_d13', 'micro_k002', 'micro_b009', 'micro_b044', 'micro_b010', 'micro_k008', 'micro_d19', 'micro_b025', 'micro_b016'], ['micro_b056', 'micro_d23', 'micro_b038', 'micro_b019', 'micro_b036', 'micro_b007', 'micro_k021', 'micro_b057', 'micro_b002', 'micro_b045', 'micro_b008', 'micro_k006', 'micro_b031', 'micro_d12', 'micro_b054', 'micro_b004', 'micro_k012', 'micro_d10', 'micro_b023', 'micro_b030', 'micro_b064', 'micro_k016', 'micro_k011'], ['micro_b051', 'micro_b018', 'micro_k010', 'micro_b041', 'micro_b042', 'micro_b027', 'micro_b046', 'micro_k027', 'micro_d16', 'micro_b006', 'micro_k018', 'micro_k003', 'micro_d17', 'micro_d18', 'micro_d08', 'micro_k020', 'micro_b001', 'micro_b037', 'micro_b028', 'micro_k015', 'micro_d21', 'micro_b013'], ['micro_b051', 'micro_b018', 'micro_k010', 'micro_b041', 'micro_b042', 'micro_b027', 'micro_k021', 'micro_b057', 'micro_k023', 'micro_b045', 'micro_d01', 'micro_k006', 'micro_b031', 'micro_d20', 'micro_k012', 'micro_b022', 'micro_k008', 'micro_b052', 'micro_b062', 'micro_d14', 'micro_k022', 'micro_b003', 'micro_b016'], ['micro_b056', 'micro_b050', 'micro_k019', 'micro_k025', 'micro_b040', 'micro_b005', 'micro_b053', 'micro_b020', 'micro_b033', 'micro_k004', 'micro_b034', 'micro_b015', 'micro_d15', 'micro_k029', 'micro_d13', 'micro_k002', 'micro_b009', 'micro_b044', 'micro_b028', 'micro_b030', 'micro_d19', 'micro_b025'], ['micro_k031', 'micro_d11', 'micro_b038', 'micro_k014', 'micro_d05', 'micro_d06', 'micro_d07', 'micro_b026', 'micro_b002', 'micro_b049', 'micro_b047', 'micro_b060', 'micro_b021', 'micro_b048', 'micro_b004', 'micro_k020', 'micro_d22', 'micro_d09', 'micro_b037', 'micro_b023', 'micro_b012', 'micro_b064'], ['micro_k009', 'micro_k007', 'micro_b055', 'micro_b019', 'micro_b036', 'micro_b007', 'micro_b046', 'micro_k027', 'micro_d16', 'micro_b006', 'micro_k018', 'micro_k003', 'micro_d17', 'micro_d18', 'micro_d08', 'micro_b010', 'micro_b001', 'micro_k017', 'micro_b035', 'micro_k015', 'micro_d21', 'micro_b013', 'micro_k011'], ['micro_k001', 'micro_d23', 'micro_b058', 'micro_d04', 'micro_b029', 'micro_d03', 'micro_b061', 'micro_b039', 'micro_d02', 'micro_b014', 'micro_b008', 'micro_b032', 'micro_b059', 'micro_k013', 'micro_k024', 'micro_d12', 'micro_b054', 'micro_d10', 'micro_b017', 'micro_b024', 'micro_b011', 'micro_k016'], ['micro_k001', 'micro_b018', 'micro_k010', 'micro_b019', 'micro_b042', 'micro_b007', 'micro_b046', 'micro_d08', 'micro_b033', 'micro_b006', 'micro_b008', 'micro_b032', 'micro_k006', 'micro_k017', 'micro_b047', 'micro_k020', 'micro_k029', 'micro_b044', 'micro_b028', 'micro_b011', 'micro_k022', 'micro_b003'], ['micro_b056', 'micro_k007', 'micro_b055', 'micro_b041', 'micro_b036', 'micro_b027', 'micro_k021', 'micro_b057', 'micro_k023', 'micro_b020', 'micro_k003', 'micro_k018', 'micro_d17', 'micro_k013', 'micro_b022', 'micro_b001', 'micro_k002', 'micro_k015', 'micro_d14', 'micro_b023', 'micro_b013', 'micro_k016'], ['micro_k031', 'micro_b050', 'micro_k019', 'micro_k014', 'micro_d05', 'micro_d06', 'micro_k027', 'micro_b039', 'micro_d02', 'micro_b045', 'micro_d01', 'micro_b031', 'micro_b059', 'micro_k012', 'micro_k024', 'micro_d12', 'micro_d18', 'micro_b052', 'micro_b035', 'micro_d10', 'micro_b017', 'micro_d21'], ['micro_b051', 'micro_d23', 'micro_b038', 'micro_d04', 'micro_b040', 'micro_b005', 'micro_d07', 'micro_b002', 'micro_d16', 'micro_b060', 'micro_b014', 'micro_d22', 'micro_b021', 'micro_b048', 'micro_b054', 'micro_b053', 'micro_b004', 'micro_b062', 'micro_k008', 'micro_d09', 'micro_b030', 'micro_b064', 'micro_k011'], ['micro_k009', 'micro_d11', 'micro_b058', 'micro_k025', 'micro_b029', 'micro_d03', 'micro_b061', 'micro_b026', 'micro_k004', 'micro_b049', 'micro_b034', 'micro_b015', 'micro_d15', 'micro_d20', 'micro_b037', 'micro_d13', 'micro_b010', 'micro_b009', 'micro_d19', 'micro_b024', 'micro_b012', 'micro_b025', 'micro_b016'], ['micro_k001', 'micro_d23', 'micro_b058', 'micro_d04', 'micro_b042', 'micro_b027', 'micro_b046', 'micro_k027', 'micro_d16', 'micro_b006', 'micro_b034', 'micro_k003', 'micro_d15', 'micro_k029', 'micro_d13', 'micro_k020', 'micro_b009', 'micro_b044', 'micro_b037', 'micro_b024', 'micro_d19', 'micro_b025'], ['micro_b051', 'micro_k007', 'micro_b055', 'micro_b019', 'micro_b029', 'micro_d03', 'micro_b061', 'micro_b026', 'micro_d02', 'micro_b045', 'micro_b008', 'micro_b032', 'micro_b059', 'micro_k013', 'micro_k024', 'micro_d12', 'micro_b054', 'micro_d10', 'micro_b017', 'micro_b023', 'micro_b012', 'micro_k011'], ['micro_b056', 'micro_b050', 'micro_k019', 'micro_k014', 'micro_d05', 'micro_d06', 'micro_d07', 'micro_b039', 'micro_k023', 'micro_b014', 'micro_d01', 'micro_k006', 'micro_b021', 'micro_d20', 'micro_k012', 'micro_b022', 'micro_k008', 'micro_b052', 'micro_b062', 'micro_d14', 'micro_k022', 'micro_b003', 'micro_b016'], ['micro_k009', 'micro_b018', 'micro_k010', 'micro_b041', 'micro_b036', 'micro_b007', 'micro_k021', 'micro_b057', 'micro_b002', 'micro_b049', 'micro_b047', 'micro_b060', 'micro_b031', 'micro_b048', 'micro_d08', 'micro_b010', 'micro_d22', 'micro_d09', 'micro_b030', 'micro_b028', 'micro_b011', 'micro_b064', 'micro_k016'], ['micro_k031', 'micro_d11', 'micro_b038', 'micro_k025', 'micro_b040', 'micro_b005', 'micro_b053', 'micro_b020', 'micro_b033', 'micro_k004', 'micro_k018', 'micro_b015', 'micro_d17', 'micro_d18', 'micro_b004', 'micro_k002', 'micro_b001', 'micro_k017', 'micro_b035', 'micro_k015', 'micro_d21', 'micro_b013']] tids = sorted(list(set([tid for fold in folds for tid in fold]))) def get_static_folds(): for (i, test_tids) in enumerate(folds): train_tids = [tid for tid in tids if tid not in test_tids] yield (train_tids, test_tids, i)
load("@io_bazel_rules_sass//:defs.bzl", "sass_repositories") def rules_web_dependencies(): sass_repositories()
load('@io_bazel_rules_sass//:defs.bzl', 'sass_repositories') def rules_web_dependencies(): sass_repositories()
count = 0 inputNum = 1 prevInput = -1 while (inputNum > 0): inputNum = int(input()) if inputNum == prevInput: count += 1 prevInput = inputNum print (f'{count}')
count = 0 input_num = 1 prev_input = -1 while inputNum > 0: input_num = int(input()) if inputNum == prevInput: count += 1 prev_input = inputNum print(f'{count}')
def snail(h, a, b): now_high = 0 d = 0 while now_high < h: now_high += a if now_high < h: now_high -= b d += 1 print(d) snail(int(input()), int(input()), int(input()))
def snail(h, a, b): now_high = 0 d = 0 while now_high < h: now_high += a if now_high < h: now_high -= b d += 1 print(d) snail(int(input()), int(input()), int(input()))
apiAttachAvailable = u'API tillg\xe4ngligt' apiAttachNotAvailable = u'Inte tillg\xe4ngligt' apiAttachPendingAuthorization = u'Godk\xe4nnande avvaktas' apiAttachRefused = u'Nekades' apiAttachSuccess = u'Det lyckades' apiAttachUnknown = u'Ok\xe4nd' budDeletedFriend = u'Borttagen fr\xe5n kontaktlistan' budFriend = u'V\xe4n' budNeverBeenFriend = u'Aldrig varit i kontaktlistan' budPendingAuthorization = u'Godk\xe4nnande avvaktas' budUnknown = u'Ok\xe4nd' cfrBlockedByRecipient = u'Samtalet blockerades av mottagaren' cfrMiscError = u'Div fel' cfrNoCommonCodec = u'Gemensam codec saknas' cfrNoProxyFound = u'Mellanserver finns inte' cfrNotAuthorizedByRecipient = u'Aktuell anv\xe4ndare inte godk\xe4nd av mottagaren' cfrRecipientNotFriend = u'Mottagaren ej en v\xe4n' cfrRemoteDeviceError = u'Det har uppst\xe5tt problem med motpartens ljudenhet' cfrSessionTerminated = u'Sessionen avslutad' cfrSoundIOError = u'I/O-fel p\xe5 ljudet' cfrSoundRecordingError = u'Ljudinspelningsfel' cfrUnknown = u'Ok\xe4nd' cfrUserDoesNotExist = u'Anv\xe4ndaren/telefonnumret finns inte' cfrUserIsOffline = u'Anv\xe4ndaren \xe4r offline' chsAllCalls = u'Legacy-dialog' chsDialog = u'Dialog' chsIncomingCalls = u'Kr\xe4ver multi-godk\xe4nnande' chsLegacyDialog = u'Legacy-dialog' chsMissedCalls = u'Dialog' chsMultiNeedAccept = u'Kr\xe4ver multi-godk\xe4nnande' chsMultiSubscribed = u'Multi-abonnerade' chsOutgoingCalls = u'Multi-abonnerade' chsUnknown = u'Ok\xe4nd' chsUnsubscribed = u'Avabonnerad' clsBusy = u'Upptaget' clsCancelled = u'Avbruten' clsEarlyMedia = u'Spelar Early Media' clsFailed = u'Samtalet kunde inte kopplas' clsFinished = u'Avslutat' clsInProgress = u'P\xe5g\xe5ende samtal' clsLocalHold = u'Lokalt parkerat samtal' clsMissed = u'missat samtal' clsOnHold = u'Parkerad' clsRefused = u'Nekades' clsRemoteHold = u'Fj\xe4rrparkerat samtal' clsRinging = u'pratat' clsRouting = u'Routar' clsTransferred = u'Ok\xe4nd' clsTransferring = u'Ok\xe4nd' clsUnknown = u'Ok\xe4nd' clsUnplaced = u'Inte uppringt' clsVoicemailBufferingGreeting = u'Buffrar h\xe4lsningen' clsVoicemailCancelled = u'R\xf6stmeddelandet avbr\xf6ts' clsVoicemailFailed = u'R\xf6stmeddelandet misslyckades' clsVoicemailPlayingGreeting = u'Spelar h\xe4lsningen' clsVoicemailRecording = u'Spelar in r\xf6stmeddelande' clsVoicemailSent = u'R\xf6stmeddelandet skickades' clsVoicemailUploading = u'Laddar upp r\xf6stmeddelande' cltIncomingP2P = u'Inkommande P2P-samtal' cltIncomingPSTN = u'Inkommande telefonsamtal' cltOutgoingP2P = u'Utg\xe5ende P2P-samtal' cltOutgoingPSTN = u'Utg\xe5ende telefonsamtal' cltUnknown = u'Ok\xe4nd' cmeAddedMembers = u'Medlemmar lades till' cmeCreatedChatWith = u'Startade chatt med' cmeEmoted = u'Ok\xe4nd' cmeLeft = u'L\xe4mnade' cmeSaid = u'Redan sagt' cmeSawMembers = u'S\xe5g medlemmar' cmeSetTopic = u'Ange \xe4mne' cmeUnknown = u'Ok\xe4nd' cmsRead = u'L\xe4stes' cmsReceived = u'Togs emot' cmsSending = u'S\xe4nder...' cmsSent = u'Skickades' cmsUnknown = u'Ok\xe4nd' conConnecting = u'Ansluter...' conOffline = u'Offline' conOnline = u'Online' conPausing = u'Pauserar' conUnknown = u'Ok\xe4nd' cusAway = u'Tillf\xe4lligt borta' cusDoNotDisturb = u'St\xf6r ej' cusInvisible = u'Osynlig' cusLoggedOut = u'Offline' cusNotAvailable = u'Inte tillg\xe4ngligt' cusOffline = u'Offline' cusOnline = u'Online' cusSkypeMe = u'Skype Me' cusUnknown = u'Ok\xe4nd' cvsBothEnabled = u'Skickar och tar emot video' cvsNone = u'Ingen video' cvsReceiveEnabled = u'Tar emot video' cvsSendEnabled = u'Skickar video' cvsUnknown = u'' grpAllFriends = u'Alla kontakter' grpAllUsers = u'Alla anv\xe4ndare' grpCustomGroup = u'S\xe4rskild' grpOnlineFriends = u'Online-v\xe4nner' grpPendingAuthorizationFriends = u'Godk\xe4nnande avvaktas' grpProposedSharedGroup = u'Proposed Shared Group' grpRecentlyContactedUsers = u'Nyligen kontaktade anv\xe4ndare' grpSharedGroup = u'Shared Group' grpSkypeFriends = u'Skype-kontakter' grpSkypeOutFriends = u'SkypeOut-kontakter' grpUngroupedFriends = u'Icke grupperade kontakter' grpUnknown = u'Ok\xe4nd' grpUsersAuthorizedByMe = u'Godk\xe4nda av mig' grpUsersBlockedByMe = u'Blockerade av mig' grpUsersWaitingMyAuthorization = u'Avvaktar mitt godk\xe4nnande' leaAddDeclined = u'Till\xe4gg nekades' leaAddedNotAuthorized = u'Den som l\xe4ggs till m\xe5ste vara godk\xe4nd' leaAdderNotFriend = u'Den som l\xe4gger till m\xe5ste vara en v\xe4n' leaUnknown = u'Ok\xe4nd' leaUnsubscribe = u'Avabonnerad' leaUserIncapable = u'Anv\xe4ndaren kan inte' leaUserNotFound = u'Anv\xe4ndaren finns inte' olsAway = u'Tillf\xe4lligt borta' olsDoNotDisturb = u'St\xf6r ej' olsNotAvailable = u'Inte tillg\xe4ngligt' olsOffline = u'Offline' olsOnline = u'Online' olsSkypeMe = u'Skype Me' olsSkypeOut = u'SkypeOut' olsUnknown = u'Ok\xe4nd' smsMessageStatusComposing = u'Composing' smsMessageStatusDelivered = u'Delivered' smsMessageStatusFailed = u'Failed' smsMessageStatusRead = u'Read' smsMessageStatusReceived = u'Received' smsMessageStatusSendingToServer = u'Sending to Server' smsMessageStatusSentToServer = u'Sent to Server' smsMessageStatusSomeTargetsFailed = u'Some Targets Failed' smsMessageStatusUnknown = u'Unknown' smsMessageTypeCCRequest = u'Confirmation Code Request' smsMessageTypeCCSubmit = u'Confirmation Code Submit' smsMessageTypeIncoming = u'Incoming' smsMessageTypeOutgoing = u'Outgoing' smsMessageTypeUnknown = u'Unknown' smsTargetStatusAcceptable = u'Acceptable' smsTargetStatusAnalyzing = u'Analyzing' smsTargetStatusDeliveryFailed = u'Delivery Failed' smsTargetStatusDeliveryPending = u'Delivery Pending' smsTargetStatusDeliverySuccessful = u'Delivery Successful' smsTargetStatusNotRoutable = u'Not Routable' smsTargetStatusUndefined = u'Undefined' smsTargetStatusUnknown = u'Unknown' usexFemale = u'Kvinna' usexMale = u'Man' usexUnknown = u'Ok\xe4nd' vmrConnectError = u'Anslutningsfel' vmrFileReadError = u'Fill\xe4sningsfel' vmrFileWriteError = u'Filskrivningsfel' vmrMiscError = u'Div fel' vmrNoError = u'Inget fel' vmrNoPrivilege = u'Voicemail-beh\xf6righet saknas' vmrNoVoicemail = u'R\xf6stmeddelande saknas' vmrPlaybackError = u'Uppspelningsfel' vmrRecordingError = u'Inspelningsfel' vmrUnknown = u'Ok\xe4nd' vmsBlank = u'Tomt' vmsBuffering = u'Buffrar' vmsDeleting = u'Tar bort' vmsDownloading = u'Laddar ner' vmsFailed = u'Misslyckades' vmsNotDownloaded = u'Inte nerladdat' vmsPlayed = u'Uppspelat' vmsPlaying = u'Spelar upp' vmsRecorded = u'Inspelat' vmsRecording = u'Spelar in r\xf6stmeddelande' vmsUnknown = u'Ok\xe4nd' vmsUnplayed = u'Inte uppspelat' vmsUploaded = u'Uppladdat' vmsUploading = u'Laddar upp' vmtCustomGreeting = u'S\xe4rskild h\xe4lsning' vmtDefaultGreeting = u'Standardh\xe4lsning' vmtIncoming = u'Nytt r\xf6stmeddelande' vmtOutgoing = u'Utg\xe5ende' vmtUnknown = u'Ok\xe4nd' vssAvailable = u'Tillg\xe4ngligt' vssNotAvailable = u'Inte tillg\xe4ngligt' vssPaused = u'Pausat' vssRejected = u'Nekades' vssRunning = u'P\xe5g\xe5r' vssStarting = u'Startar' vssStopping = u'Stannar' vssUnknown = u'Ok\xe4nd'
api_attach_available = u'API tillgängligt' api_attach_not_available = u'Inte tillgängligt' api_attach_pending_authorization = u'Godkännande avvaktas' api_attach_refused = u'Nekades' api_attach_success = u'Det lyckades' api_attach_unknown = u'Okänd' bud_deleted_friend = u'Borttagen från kontaktlistan' bud_friend = u'Vän' bud_never_been_friend = u'Aldrig varit i kontaktlistan' bud_pending_authorization = u'Godkännande avvaktas' bud_unknown = u'Okänd' cfr_blocked_by_recipient = u'Samtalet blockerades av mottagaren' cfr_misc_error = u'Div fel' cfr_no_common_codec = u'Gemensam codec saknas' cfr_no_proxy_found = u'Mellanserver finns inte' cfr_not_authorized_by_recipient = u'Aktuell användare inte godkänd av mottagaren' cfr_recipient_not_friend = u'Mottagaren ej en vän' cfr_remote_device_error = u'Det har uppstått problem med motpartens ljudenhet' cfr_session_terminated = u'Sessionen avslutad' cfr_sound_io_error = u'I/O-fel på ljudet' cfr_sound_recording_error = u'Ljudinspelningsfel' cfr_unknown = u'Okänd' cfr_user_does_not_exist = u'Användaren/telefonnumret finns inte' cfr_user_is_offline = u'Användaren är offline' chs_all_calls = u'Legacy-dialog' chs_dialog = u'Dialog' chs_incoming_calls = u'Kräver multi-godkännande' chs_legacy_dialog = u'Legacy-dialog' chs_missed_calls = u'Dialog' chs_multi_need_accept = u'Kräver multi-godkännande' chs_multi_subscribed = u'Multi-abonnerade' chs_outgoing_calls = u'Multi-abonnerade' chs_unknown = u'Okänd' chs_unsubscribed = u'Avabonnerad' cls_busy = u'Upptaget' cls_cancelled = u'Avbruten' cls_early_media = u'Spelar Early Media' cls_failed = u'Samtalet kunde inte kopplas' cls_finished = u'Avslutat' cls_in_progress = u'Pågående samtal' cls_local_hold = u'Lokalt parkerat samtal' cls_missed = u'missat samtal' cls_on_hold = u'Parkerad' cls_refused = u'Nekades' cls_remote_hold = u'Fjärrparkerat samtal' cls_ringing = u'pratat' cls_routing = u'Routar' cls_transferred = u'Okänd' cls_transferring = u'Okänd' cls_unknown = u'Okänd' cls_unplaced = u'Inte uppringt' cls_voicemail_buffering_greeting = u'Buffrar hälsningen' cls_voicemail_cancelled = u'Röstmeddelandet avbröts' cls_voicemail_failed = u'Röstmeddelandet misslyckades' cls_voicemail_playing_greeting = u'Spelar hälsningen' cls_voicemail_recording = u'Spelar in röstmeddelande' cls_voicemail_sent = u'Röstmeddelandet skickades' cls_voicemail_uploading = u'Laddar upp röstmeddelande' clt_incoming_p2_p = u'Inkommande P2P-samtal' clt_incoming_pstn = u'Inkommande telefonsamtal' clt_outgoing_p2_p = u'Utgående P2P-samtal' clt_outgoing_pstn = u'Utgående telefonsamtal' clt_unknown = u'Okänd' cme_added_members = u'Medlemmar lades till' cme_created_chat_with = u'Startade chatt med' cme_emoted = u'Okänd' cme_left = u'Lämnade' cme_said = u'Redan sagt' cme_saw_members = u'Såg medlemmar' cme_set_topic = u'Ange ämne' cme_unknown = u'Okänd' cms_read = u'Lästes' cms_received = u'Togs emot' cms_sending = u'Sänder...' cms_sent = u'Skickades' cms_unknown = u'Okänd' con_connecting = u'Ansluter...' con_offline = u'Offline' con_online = u'Online' con_pausing = u'Pauserar' con_unknown = u'Okänd' cus_away = u'Tillfälligt borta' cus_do_not_disturb = u'Stör ej' cus_invisible = u'Osynlig' cus_logged_out = u'Offline' cus_not_available = u'Inte tillgängligt' cus_offline = u'Offline' cus_online = u'Online' cus_skype_me = u'Skype Me' cus_unknown = u'Okänd' cvs_both_enabled = u'Skickar och tar emot video' cvs_none = u'Ingen video' cvs_receive_enabled = u'Tar emot video' cvs_send_enabled = u'Skickar video' cvs_unknown = u'' grp_all_friends = u'Alla kontakter' grp_all_users = u'Alla användare' grp_custom_group = u'Särskild' grp_online_friends = u'Online-vänner' grp_pending_authorization_friends = u'Godkännande avvaktas' grp_proposed_shared_group = u'Proposed Shared Group' grp_recently_contacted_users = u'Nyligen kontaktade användare' grp_shared_group = u'Shared Group' grp_skype_friends = u'Skype-kontakter' grp_skype_out_friends = u'SkypeOut-kontakter' grp_ungrouped_friends = u'Icke grupperade kontakter' grp_unknown = u'Okänd' grp_users_authorized_by_me = u'Godkända av mig' grp_users_blocked_by_me = u'Blockerade av mig' grp_users_waiting_my_authorization = u'Avvaktar mitt godkännande' lea_add_declined = u'Tillägg nekades' lea_added_not_authorized = u'Den som läggs till måste vara godkänd' lea_adder_not_friend = u'Den som lägger till måste vara en vän' lea_unknown = u'Okänd' lea_unsubscribe = u'Avabonnerad' lea_user_incapable = u'Användaren kan inte' lea_user_not_found = u'Användaren finns inte' ols_away = u'Tillfälligt borta' ols_do_not_disturb = u'Stör ej' ols_not_available = u'Inte tillgängligt' ols_offline = u'Offline' ols_online = u'Online' ols_skype_me = u'Skype Me' ols_skype_out = u'SkypeOut' ols_unknown = u'Okänd' sms_message_status_composing = u'Composing' sms_message_status_delivered = u'Delivered' sms_message_status_failed = u'Failed' sms_message_status_read = u'Read' sms_message_status_received = u'Received' sms_message_status_sending_to_server = u'Sending to Server' sms_message_status_sent_to_server = u'Sent to Server' sms_message_status_some_targets_failed = u'Some Targets Failed' sms_message_status_unknown = u'Unknown' sms_message_type_cc_request = u'Confirmation Code Request' sms_message_type_cc_submit = u'Confirmation Code Submit' sms_message_type_incoming = u'Incoming' sms_message_type_outgoing = u'Outgoing' sms_message_type_unknown = u'Unknown' sms_target_status_acceptable = u'Acceptable' sms_target_status_analyzing = u'Analyzing' sms_target_status_delivery_failed = u'Delivery Failed' sms_target_status_delivery_pending = u'Delivery Pending' sms_target_status_delivery_successful = u'Delivery Successful' sms_target_status_not_routable = u'Not Routable' sms_target_status_undefined = u'Undefined' sms_target_status_unknown = u'Unknown' usex_female = u'Kvinna' usex_male = u'Man' usex_unknown = u'Okänd' vmr_connect_error = u'Anslutningsfel' vmr_file_read_error = u'Filläsningsfel' vmr_file_write_error = u'Filskrivningsfel' vmr_misc_error = u'Div fel' vmr_no_error = u'Inget fel' vmr_no_privilege = u'Voicemail-behörighet saknas' vmr_no_voicemail = u'Röstmeddelande saknas' vmr_playback_error = u'Uppspelningsfel' vmr_recording_error = u'Inspelningsfel' vmr_unknown = u'Okänd' vms_blank = u'Tomt' vms_buffering = u'Buffrar' vms_deleting = u'Tar bort' vms_downloading = u'Laddar ner' vms_failed = u'Misslyckades' vms_not_downloaded = u'Inte nerladdat' vms_played = u'Uppspelat' vms_playing = u'Spelar upp' vms_recorded = u'Inspelat' vms_recording = u'Spelar in röstmeddelande' vms_unknown = u'Okänd' vms_unplayed = u'Inte uppspelat' vms_uploaded = u'Uppladdat' vms_uploading = u'Laddar upp' vmt_custom_greeting = u'Särskild hälsning' vmt_default_greeting = u'Standardhälsning' vmt_incoming = u'Nytt röstmeddelande' vmt_outgoing = u'Utgående' vmt_unknown = u'Okänd' vss_available = u'Tillgängligt' vss_not_available = u'Inte tillgängligt' vss_paused = u'Pausat' vss_rejected = u'Nekades' vss_running = u'Pågår' vss_starting = u'Startar' vss_stopping = u'Stannar' vss_unknown = u'Okänd'
class Student: def __init__(self, name, school): self.name = name self.school = school self.marks = [] def average(self): return sum(self.marks) / len(self.marks) @classmethod def friend(cls, origin, friend_name, *args, **kwargs): return cls(friend_name, origin.school, *args, **kwargs) # def schools(self, schools_name): # return Student(self.name, schools_name) # anna = Student("Anna", "MIT") # friend = anna.friend("Greg") # john = Student("John", "MIT") # schools = john.schools("Oxford") # print(friend.name) # print(friend.school) # print(schools.name) # print(schools.school) # def friend(self, friend_name): # return Student(friend_name, self.school) ## class WorkingStudent(Student): def __init__(self, name, school, salary, job_title): super().__init__(name, school) self.salary = salary self.job_title = job_title anna = WorkingStudent("Anna", "MIT", 30.00, "software dewloper") print(anna.salary) friend = WorkingStudent.friend(anna, "Greg", 20.00, "software developer") # anna is the origin, "Greg" is friend_name, 20.00 is *args, job_title="software deweloper" is ** kwargs print(friend.name) print(friend.school) print(friend.salary)
class Student: def __init__(self, name, school): self.name = name self.school = school self.marks = [] def average(self): return sum(self.marks) / len(self.marks) @classmethod def friend(cls, origin, friend_name, *args, **kwargs): return cls(friend_name, origin.school, *args, **kwargs) class Workingstudent(Student): def __init__(self, name, school, salary, job_title): super().__init__(name, school) self.salary = salary self.job_title = job_title anna = working_student('Anna', 'MIT', 30.0, 'software dewloper') print(anna.salary) friend = WorkingStudent.friend(anna, 'Greg', 20.0, 'software developer') print(friend.name) print(friend.school) print(friend.salary)
def round_counter(): RCi = 0 RCi_list = [] # print(type(RCi)) l = 0 for l in range (48): # print(type(RCi)) RCi = bin(RCi)[2:].zfill(6) RCi_one_list = [int(d) for d in str((RCi))] # print(RCi_one_list) t = 1 + RCi_one_list[0] + RCi_one_list[1] t = t % 2 RCi_one_list.append(RCi_one_list.pop(0)) # print(RCi_one_list) RCi_one_list[5] = t RCi = ''.join((str(e) for e in RCi_one_list)) RCi_list.append(RCi) # print(RCi, i, "bin") RCi = int(RCi, 2) # print(RCi, i, "dec") return RCi_list # RCi = 0 # # print(type(RCi)) # # for i in range(16): # a = round_counter() # print(a)
def round_counter(): r_ci = 0 r_ci_list = [] l = 0 for l in range(48): r_ci = bin(RCi)[2:].zfill(6) r_ci_one_list = [int(d) for d in str(RCi)] t = 1 + RCi_one_list[0] + RCi_one_list[1] t = t % 2 RCi_one_list.append(RCi_one_list.pop(0)) RCi_one_list[5] = t r_ci = ''.join((str(e) for e in RCi_one_list)) RCi_list.append(RCi) r_ci = int(RCi, 2) return RCi_list
# enter your file path DATA_PATH = "./lab1/data" # file names IN_FILE = "input.txt" OUT_FILE = "output.txt" def main(): lines = [] # open input file with open("{}/{}".format(DATA_PATH, IN_FILE), "r") as file: # reach each line for line in file: # print removing newline print(line.strip()) # append to table lines.append(line) # invert line order lines = lines[-1::-1] # open output file with open("{}/{}".format(DATA_PATH, OUT_FILE), "w") as file: for line in lines: file.write(line) if __name__ == "__main__": main()
data_path = './lab1/data' in_file = 'input.txt' out_file = 'output.txt' def main(): lines = [] with open('{}/{}'.format(DATA_PATH, IN_FILE), 'r') as file: for line in file: print(line.strip()) lines.append(line) lines = lines[-1::-1] with open('{}/{}'.format(DATA_PATH, OUT_FILE), 'w') as file: for line in lines: file.write(line) if __name__ == '__main__': main()
#!/usr/bin/python # Copyright 2017 Hewlett-Packard Enterprise Development Company, L.P. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. def Integer(value): return value def ObjectName(value): return (value,) PHY_DRIVE_MIB_OUTPUT = { 'SNMPv2-SMI::enterprises.232.3.2.5.1.1.1.2.0': { 'cpqDaPhyDrvCntlrIndex': {ObjectName('2.0'): Integer(2)}}, 'SNMPv2-SMI::enterprises.232.3.2.5.1.1.1.2.1': { 'cpqDaPhyDrvCntlrIndex': {ObjectName('2.1'): Integer(2)}}, 'SNMPv2-SMI::enterprises.232.3.2.5.1.1.1.2.2': { 'cpqDaPhyDrvCntlrIndex': {ObjectName('2.2'): Integer(2)}}, 'SNMPv2-SMI::enterprises.232.3.2.5.1.1.1.2.3': { 'cpqDaPhyDrvCntlrIndex': {ObjectName('2.3'): Integer(2)}}, 'SNMPv2-SMI::enterprises.232.3.2.5.1.1.2.2.0': { 'cpqDaPhyDrvIndex': {ObjectName('2.0'): Integer(0)}}, 'SNMPv2-SMI::enterprises.232.3.2.5.1.1.2.2.1': { 'cpqDaPhyDrvIndex': {ObjectName('2.1'): Integer(1)}}, 'SNMPv2-SMI::enterprises.232.3.2.5.1.1.2.2.2': { 'cpqDaPhyDrvIndex': {ObjectName('2.2'): Integer(2)}}, 'SNMPv2-SMI::enterprises.232.3.2.5.1.1.2.2.3': { 'cpqDaPhyDrvIndex': {ObjectName('2.3'): Integer(3)}}, 'SNMPv2-SMI::enterprises.232.3.2.5.1.1.45.2.0': { 'cpqDaPhyDrvSize': {ObjectName('2.0'): Integer(286102)}}, 'SNMPv2-SMI::enterprises.232.3.2.5.1.1.45.2.1': { 'cpqDaPhyDrvSize': {ObjectName('2.1'): Integer(286102)}}, 'SNMPv2-SMI::enterprises.232.3.2.5.1.1.45.2.2': { 'cpqDaPhyDrvSize': {ObjectName('2.2'): Integer(286102)}}, 'SNMPv2-SMI::enterprises.232.3.2.5.1.1.45.2.3': { 'cpqDaPhyDrvSize': {ObjectName('2.3'): Integer(286102)}} }
def integer(value): return value def object_name(value): return (value,) phy_drive_mib_output = {'SNMPv2-SMI::enterprises.232.3.2.5.1.1.1.2.0': {'cpqDaPhyDrvCntlrIndex': {object_name('2.0'): integer(2)}}, 'SNMPv2-SMI::enterprises.232.3.2.5.1.1.1.2.1': {'cpqDaPhyDrvCntlrIndex': {object_name('2.1'): integer(2)}}, 'SNMPv2-SMI::enterprises.232.3.2.5.1.1.1.2.2': {'cpqDaPhyDrvCntlrIndex': {object_name('2.2'): integer(2)}}, 'SNMPv2-SMI::enterprises.232.3.2.5.1.1.1.2.3': {'cpqDaPhyDrvCntlrIndex': {object_name('2.3'): integer(2)}}, 'SNMPv2-SMI::enterprises.232.3.2.5.1.1.2.2.0': {'cpqDaPhyDrvIndex': {object_name('2.0'): integer(0)}}, 'SNMPv2-SMI::enterprises.232.3.2.5.1.1.2.2.1': {'cpqDaPhyDrvIndex': {object_name('2.1'): integer(1)}}, 'SNMPv2-SMI::enterprises.232.3.2.5.1.1.2.2.2': {'cpqDaPhyDrvIndex': {object_name('2.2'): integer(2)}}, 'SNMPv2-SMI::enterprises.232.3.2.5.1.1.2.2.3': {'cpqDaPhyDrvIndex': {object_name('2.3'): integer(3)}}, 'SNMPv2-SMI::enterprises.232.3.2.5.1.1.45.2.0': {'cpqDaPhyDrvSize': {object_name('2.0'): integer(286102)}}, 'SNMPv2-SMI::enterprises.232.3.2.5.1.1.45.2.1': {'cpqDaPhyDrvSize': {object_name('2.1'): integer(286102)}}, 'SNMPv2-SMI::enterprises.232.3.2.5.1.1.45.2.2': {'cpqDaPhyDrvSize': {object_name('2.2'): integer(286102)}}, 'SNMPv2-SMI::enterprises.232.3.2.5.1.1.45.2.3': {'cpqDaPhyDrvSize': {object_name('2.3'): integer(286102)}}}
# Simple account class with balance class Account: def __init__(self, name, balance): self.name = name self.balance = balance print("Account is created for {}".format(self.name)) def deposit(self, amount): if amount > 0: self.balance += amount def withdraw(self, amount): if 0 < amount < self.balance: self.balance -= amount else: print("You dont have sufficient balance to withdraw") def show_balance(self): print('Your current balance is {}'.format(self.balance)) if __name__ == "__main__": name=str(input("Please Enter Your name:\n")) act_balance = 100.0 name1 = Account(name, act_balance) name1.show_balance() payment = float(input("Enter the ammount, which has to be deposited:\n")) name1.deposit(payment) name1.show_balance() payment = float(input("Enter the ammount, How much you need withdraw:\n")) name1.withdraw(payment) name1.show_balance()
class Account: def __init__(self, name, balance): self.name = name self.balance = balance print('Account is created for {}'.format(self.name)) def deposit(self, amount): if amount > 0: self.balance += amount def withdraw(self, amount): if 0 < amount < self.balance: self.balance -= amount else: print('You dont have sufficient balance to withdraw') def show_balance(self): print('Your current balance is {}'.format(self.balance)) if __name__ == '__main__': name = str(input('Please Enter Your name:\n')) act_balance = 100.0 name1 = account(name, act_balance) name1.show_balance() payment = float(input('Enter the ammount, which has to be deposited:\n')) name1.deposit(payment) name1.show_balance() payment = float(input('Enter the ammount, How much you need withdraw:\n')) name1.withdraw(payment) name1.show_balance()
def isPrime(number): prime = True global code for c in range(2, number): if number % c == 0: prime = False if prime is True and len(str(number)) == 3: code = number code = counter = 0 while True: isPrime(counter) if counter == 1000: break counter += 1 print(code) # OUTPUT: 997 # The code was correct!
def is_prime(number): prime = True global code for c in range(2, number): if number % c == 0: prime = False if prime is True and len(str(number)) == 3: code = number code = counter = 0 while True: is_prime(counter) if counter == 1000: break counter += 1 print(code)
#!/usr/bin/env python # -*- coding: utf-8 -*- __author__ = 'ole' class Message: irc = None propagate = True # IRC-protocol attributes prefix = "" command = [] content = "" # IRC-command dependent attributes cmd = [] # bot-command within message nick = "" channel = None def __init__(self, irc, message): self.irc = irc # get prefix (hostname, etc.) prefix_end = 0 if len(message) > 0 and message[0] == ":": prefix_end = message.find(" ") self.prefix = message[1:prefix_end] prefix_end += 1 # get content content_start = message.find(" :") if ~content_start: self.content = message[content_start + 2:] self.command = message[prefix_end:content_start].split(" ") else: self.command = message[prefix_end:].split(" ") # get channel and/or nick if len(self.command) > 1 and len(self.command[1]) > 0 and self.command[1][0] == "#": self.channel = self.command[1] nick_end = self.prefix.find("!") if ~nick_end: self.nick = self.prefix[0:nick_end] # get command if self.content[0:irc.command_prefix_len] == irc.command_prefix: self.cmd = self.content[irc.command_prefix_len:].split() def get_events(self): events = ['on_receive'] if self.command[0] == "PRIVMSG": assert len(self.command) > 1 events.append('on_message') if self.command[1] == self.irc.ircNick: events.append('on_private_message') elif self.channel: events.append('on_channel_message') if len(self.cmd): events.append('on_command') elif self.command[0] == "KICK": assert len(self.command) > 2 if self.command[2] == self.irc.ircNick: events.append('on_kick') elif self.command[0] == "JOIN": events.append('on_join') elif self.command[0] == "PING": events.append('on_ping') elif self.command[0] == "NOTICE": events.append('on_notice') elif self.command[0] in ["QUIT", "PART"]: events.append('on_quit') return events def __str__(self): return "Message[{0}] <{1}>: {2}".format(" ".join(self.command), self.prefix, self.content)
__author__ = 'ole' class Message: irc = None propagate = True prefix = '' command = [] content = '' cmd = [] nick = '' channel = None def __init__(self, irc, message): self.irc = irc prefix_end = 0 if len(message) > 0 and message[0] == ':': prefix_end = message.find(' ') self.prefix = message[1:prefix_end] prefix_end += 1 content_start = message.find(' :') if ~content_start: self.content = message[content_start + 2:] self.command = message[prefix_end:content_start].split(' ') else: self.command = message[prefix_end:].split(' ') if len(self.command) > 1 and len(self.command[1]) > 0 and (self.command[1][0] == '#'): self.channel = self.command[1] nick_end = self.prefix.find('!') if ~nick_end: self.nick = self.prefix[0:nick_end] if self.content[0:irc.command_prefix_len] == irc.command_prefix: self.cmd = self.content[irc.command_prefix_len:].split() def get_events(self): events = ['on_receive'] if self.command[0] == 'PRIVMSG': assert len(self.command) > 1 events.append('on_message') if self.command[1] == self.irc.ircNick: events.append('on_private_message') elif self.channel: events.append('on_channel_message') if len(self.cmd): events.append('on_command') elif self.command[0] == 'KICK': assert len(self.command) > 2 if self.command[2] == self.irc.ircNick: events.append('on_kick') elif self.command[0] == 'JOIN': events.append('on_join') elif self.command[0] == 'PING': events.append('on_ping') elif self.command[0] == 'NOTICE': events.append('on_notice') elif self.command[0] in ['QUIT', 'PART']: events.append('on_quit') return events def __str__(self): return 'Message[{0}] <{1}>: {2}'.format(' '.join(self.command), self.prefix, self.content)
# Done by Carlos Amaral (2020/09/22) # SCU 4.7 - Range Function with a For Loop for i in range(1,10): print(i, " squared is ", i**2)
for i in range(1, 10): print(i, ' squared is ', i ** 2)
DEBUG = True DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', } } INSTALLED_APPS = [ 'tests.testapp', ] ROOT_URLCONF = 'tests.urls'
debug = True databases = {'default': {'ENGINE': 'django.db.backends.sqlite3'}} installed_apps = ['tests.testapp'] root_urlconf = 'tests.urls'
#!/usr/bin/env python # -*- coding: utf-8 -*- __author__ = "nebula" class BeFilteredException(Exception): def __init__(self, t): self._t = t @property def type(self): return self._t
__author__ = 'nebula' class Befilteredexception(Exception): def __init__(self, t): self._t = t @property def type(self): return self._t
def gcd_iter(u, v): while v: u, v = v, u % v return abs(u)
def gcd_iter(u, v): while v: (u, v) = (v, u % v) return abs(u)
# Silvio Dunst # Variables can be of type function, and we can call those variables. # This means that lists, tuples and dicts can have variables of type function in them, # so we could have a dict that stores the letter of the menu and the function associated with that letter. # We could access that function by that letter. (NOTE: that this is not more efficient to run, just more efficient to write) def fun1(): print("this is fun1") def fun2(): print("this is fun2") whichFun = fun1 whichFun() whichFun = fun2 whichFun()
def fun1(): print('this is fun1') def fun2(): print('this is fun2') which_fun = fun1 which_fun() which_fun = fun2 which_fun()
def isInterleave(s1: str, s2: str, s3: str) -> bool: def dfs(i, j, k): # dfs with memo if i == len(s1): return s2[j:] == s3[k:] if j == len(s2): return s1[i:] == s3[k:] if (i, j) in memo: return memo[(i, j)] res = False if s1[i] == s3[k] and dfs(i + 1, j, k + 1) or s2[j] == s3[k] and dfs(i, j + 1, k + 1): res = True memo[(i, j)] = res return memo[(i,j)] if len(s1) + len(s2) != len(s3): return False memo = {} return dfs(0, 0, 0) def interleave(s1, s2, s3): m, n = len(s1), len(s2) if m + n != len(s3): return False dp = [[False for _ in range(n + 1)] for _ in range(m + 1)] for i in range(m + 1): for j in range(n + 1): if i == 0 and j == 0: # both are empty dp[i][j] = True elif i == 0: dp[i][j] = dp[i][j - 1] and s2[j - 1] == s3[i + j - 1] elif j == 0: dp[i][j] = dp[i - 1][j] and s1[i - 1] == s3[i + j - 1] else: dp[i][j] = dp[i - 1][j] and s1[i - 1] == s3[i + j - 1] or dp[i][j - 1] and s2[j - 1] == s3[i + j - 1] return dp[m][n]
def is_interleave(s1: str, s2: str, s3: str) -> bool: def dfs(i, j, k): if i == len(s1): return s2[j:] == s3[k:] if j == len(s2): return s1[i:] == s3[k:] if (i, j) in memo: return memo[i, j] res = False if s1[i] == s3[k] and dfs(i + 1, j, k + 1) or (s2[j] == s3[k] and dfs(i, j + 1, k + 1)): res = True memo[i, j] = res return memo[i, j] if len(s1) + len(s2) != len(s3): return False memo = {} return dfs(0, 0, 0) def interleave(s1, s2, s3): (m, n) = (len(s1), len(s2)) if m + n != len(s3): return False dp = [[False for _ in range(n + 1)] for _ in range(m + 1)] for i in range(m + 1): for j in range(n + 1): if i == 0 and j == 0: dp[i][j] = True elif i == 0: dp[i][j] = dp[i][j - 1] and s2[j - 1] == s3[i + j - 1] elif j == 0: dp[i][j] = dp[i - 1][j] and s1[i - 1] == s3[i + j - 1] else: dp[i][j] = dp[i - 1][j] and s1[i - 1] == s3[i + j - 1] or (dp[i][j - 1] and s2[j - 1] == s3[i + j - 1]) return dp[m][n]
with open("inputs/6.txt") as f: input = f.read() groups = [group.split() for group in input.split('\n\n')] total_answers = 0 for group in groups: total_answers += len(set(''.join(group))) print("Part 1 answer:", total_answers) total_answers = 0 for group in groups: sets = list(map(set, group)) total_answers += len(sets[0].intersection(*sets)) print("Part 2 answer:", total_answers)
with open('inputs/6.txt') as f: input = f.read() groups = [group.split() for group in input.split('\n\n')] total_answers = 0 for group in groups: total_answers += len(set(''.join(group))) print('Part 1 answer:', total_answers) total_answers = 0 for group in groups: sets = list(map(set, group)) total_answers += len(sets[0].intersection(*sets)) print('Part 2 answer:', total_answers)
#disorder def distinct_list(a): return list(set(a))
def distinct_list(a): return list(set(a))
f = open('in_domain_dev.tsv', 'r').readlines() g1 = open('val.src', 'w') g2 = open('val.tgt', 'w') for line in f: line = line.split('\t') _, label, _, text = line g1.write('{}'.format(text)) g2.write('{}\n'.format(label))
f = open('in_domain_dev.tsv', 'r').readlines() g1 = open('val.src', 'w') g2 = open('val.tgt', 'w') for line in f: line = line.split('\t') (_, label, _, text) = line g1.write('{}'.format(text)) g2.write('{}\n'.format(label))
matriz = [[0,0,0],[0,0,0],[0,0,0]] for l in range(0,3): for c in range(0,3): matriz[l][c] = int(input(f'Digite os valores para [{l}, {c}]: ')) print('=-='*30) for l in range(0,3): for c in range(0,3): print(f'[{matriz[l][c]: ^5}', end = '') print()
matriz = [[0, 0, 0], [0, 0, 0], [0, 0, 0]] for l in range(0, 3): for c in range(0, 3): matriz[l][c] = int(input(f'Digite os valores para [{l}, {c}]: ')) print('=-=' * 30) for l in range(0, 3): for c in range(0, 3): print(f'[{matriz[l][c]: ^5}', end='') print()
def node_to_string(node, layer = 0): message = '|' * layer + repr(node) + '\n' for child in node.children: message += node_to_string(child, layer + 1) return message
def node_to_string(node, layer=0): message = '|' * layer + repr(node) + '\n' for child in node.children: message += node_to_string(child, layer + 1) return message
config = { "host_server": "<host_address>", 'token_request':{ 'client_id': '<client_id>', 'client_secret': '<client_secret>', 'Ocp-Apim-Subscription-Key': '<Ocp-Apim-Subscription-Key> For Access Token', }, 'ecom':{ 'Content-Type': 'application/json', 'Ocp-Apim-Subscription-Key': '<Ocp-Apim-Subscription-Key> For the eCommerce product', }, 'ecom_initiate_payment_body':{ "merchantInfo": { "merchantSerialNumber": "<merchantSerialNumber>", }, } }
config = {'host_server': '<host_address>', 'token_request': {'client_id': '<client_id>', 'client_secret': '<client_secret>', 'Ocp-Apim-Subscription-Key': '<Ocp-Apim-Subscription-Key> For Access Token'}, 'ecom': {'Content-Type': 'application/json', 'Ocp-Apim-Subscription-Key': '<Ocp-Apim-Subscription-Key> For the eCommerce product'}, 'ecom_initiate_payment_body': {'merchantInfo': {'merchantSerialNumber': '<merchantSerialNumber>'}}}
def calc(n): if n<12: return n if dp[n]: return dp[n] x=calc(n//2)+calc(n//3)+calc(n//4) dp[n]=max(n,x) return dp[n] dp=[0 for i in range(1000000)] n=int(input()) print(calc(n)) #============================== def decode(s): for i in range(2,len(s)+1): prev=int(s[i-2]) cur=int(s[i-1]) if prev>=3 or prev==0: dp[i]=dp[i-1] else: if cur==0: dp[i]=dp[i-2] else: if prev==1: dp[i]=dp[i-1]+dp[i-2] elif prev==2: if cur>6: dp[i]=dp[i-1] else: dp[i]=dp[i-1]+dp[i-2] return dp[len(s)] n=int(input()) dp=[0 for i in range(n+1)] dp[0]=1 dp[1]=1 print(decode(str(n))) #============================== def minCost(cost,m,n): dp=[[0 for i in range(3)]for i in range(3)] dp[0][0]=cost[0][0] for i in range(1,m+1): dp[i][0]=dp[i-1][0]+cost[i][0] for j in range(1,n+1): dp[0][j]=dp[0][j-1]+cost[0][j] for i in range(1,m+1): for j in range(1,n+1): dp[i][j]=cost[i][j]+min(dp[i-1][j],dp[i-1][j-1],dp[i][j-1]) return dp[m][n] cost = [[1, 2, 3], [4, 8, 2], [1, 5, 3]] print(minCost(cost, 2, 2)) #============================== def editDistance_recur(s1,s2,m,n): if m==0: return n if n==0: return m if s1[m-1]==s2[n-1]: return editDistance_recur(s1,s2,m-1,n-1) return 1+min(editDistance_recur(s1,s2,m,n-1),#insert editDistance_recur(s1,s2,m-1,n),#delete editDistance_recur(s1,s2,m-1,n-1))#replace print("edit distance using recursion : ", editDistance_recur("sunday","saturday",6,8)) #============================== def editDistance_dp(s1,s2,m,n): for i in range(m+1): for j in range(n+1): if i==0: dp[i][j]=j elif j==0: dp[i][j]=i elif s1[i-1]==s2[j-1]: dp[i][j]=dp[i-1][j-1] else: dp[i][j]=1+min(dp[i][j-1],#insert dp[i-1][j],#delete dp[i-1][j-1])#replace return dp[m][n] m=6 n=8 dp=[[0 for i in range(n+1)]for j in range(m+1)] print("edit distance using dp : ", editDistance_dp("sunday","saturday",m,n)) #============================== def lcsLength(x,y): m=len(x) n=len(y) dp = [[None]*(n+1) for i in range(m+1)] for i in range(m+1): for j in range(n+1): if i==0 or j==0: dp[i][j]=0 elif x[i-1]==y[j-1]: dp[i][j]=dp[i-1][j-1]+1 else: dp[i][j]=max(dp[i-1][j],dp[i][j-1]) return dp[m][n] print("length of LCS : ",lcsLength("AGGTAB","GXTXAYB")) #============================== def lcsPrint(x,y): m=len(x) n=len(y) dp = [[0 for i in range(n+1)] for j in range(m+1)] for i in range(m+1): for j in range(n+1): if i==0 or j==0: dp[i][j]=0 elif x[i-1]==y[j-1]: dp[i][j]=dp[i-1][j-1]+1 else: dp[i][j]=max(dp[i-1][j],dp[i][j-1]) index=dp[m][n] lcs=[""]*(index+1) lcs[index]="" i=m j=n while i>0 and j>0: if x[i-1]==y[j-1]: lcs[index-1]=x[i-1] i-=1 j-=1 index-=1 elif dp[i-1][j]>dp[i][j-1]: i-=1 else: j-=1 return lcs print("LCS : ",lcsPrint("AGGTAB","GXTXAYB")) #============================== R = 3 C = 5 def issafe(i, j): if (i >= 0 and i < R and j >= 0 and j < C): return True return False def rotOranges(v): changed = False no = 2 while (True): for i in range(R): for j in range(C): if (v[i][j] == no): if (issafe(i + 1, j) and v[i + 1][j] == 1): v[i + 1][j] = v[i][j] + 1 changed = True if (issafe(i, j + 1) and v[i][j + 1] == 1): v[i][j + 1] = v[i][j] + 1 changed = True if (issafe(i - 1, j) and v[i - 1][j] == 1): v[i - 1][j] = v[i][j] + 1 changed = True if (issafe(i, j - 1) and v[i][j - 1] == 1): v[i][j - 1] = v[i][j] + 1 changed = True if (not changed): break changed = False no += 1 for i in range(R): for j in range(C): if (v[i][j] == 1): return -1 return no - 2 if __name__ == "__main__": v = [[2, 1, 0, 2, 1], [1, 0, 1, 2, 1], [1, 0, 0, 2, 1]] print("Max time incurred: ",rotOranges(v)) #============================== # pattern searching def KMPSearch(pat, txt): M = len(pat) N = len(txt) lps = [0]*M j = 0 # index for pat[] computeLPSArray(pat, M, lps) i = 0 # index for txt[] while i < N: if pat[j] == txt[i]: i += 1 j += 1 if j == M: print ("Found pattern at index " + str(i-j)) j = lps[j-1] elif i < N and pat[j] != txt[i]: if j != 0: j = lps[j-1] else: i += 1 def computeLPSArray(pat, M, lps): len = 0 # length of the previous longest prefix suffix lps[0] # lps[0] is always 0 i = 1 while i < M: if pat[i]== pat[len]: len += 1 lps[i] = len i += 1 else: if len != 0: len = lps[len-1] else: lps[i] = 0 i += 1 txt = "ABABDABACDABABCABAB" pat = "ABABCABAB" KMPSearch(pat, txt) #==============================
def calc(n): if n < 12: return n if dp[n]: return dp[n] x = calc(n // 2) + calc(n // 3) + calc(n // 4) dp[n] = max(n, x) return dp[n] dp = [0 for i in range(1000000)] n = int(input()) print(calc(n)) def decode(s): for i in range(2, len(s) + 1): prev = int(s[i - 2]) cur = int(s[i - 1]) if prev >= 3 or prev == 0: dp[i] = dp[i - 1] elif cur == 0: dp[i] = dp[i - 2] elif prev == 1: dp[i] = dp[i - 1] + dp[i - 2] elif prev == 2: if cur > 6: dp[i] = dp[i - 1] else: dp[i] = dp[i - 1] + dp[i - 2] return dp[len(s)] n = int(input()) dp = [0 for i in range(n + 1)] dp[0] = 1 dp[1] = 1 print(decode(str(n))) def min_cost(cost, m, n): dp = [[0 for i in range(3)] for i in range(3)] dp[0][0] = cost[0][0] for i in range(1, m + 1): dp[i][0] = dp[i - 1][0] + cost[i][0] for j in range(1, n + 1): dp[0][j] = dp[0][j - 1] + cost[0][j] for i in range(1, m + 1): for j in range(1, n + 1): dp[i][j] = cost[i][j] + min(dp[i - 1][j], dp[i - 1][j - 1], dp[i][j - 1]) return dp[m][n] cost = [[1, 2, 3], [4, 8, 2], [1, 5, 3]] print(min_cost(cost, 2, 2)) def edit_distance_recur(s1, s2, m, n): if m == 0: return n if n == 0: return m if s1[m - 1] == s2[n - 1]: return edit_distance_recur(s1, s2, m - 1, n - 1) return 1 + min(edit_distance_recur(s1, s2, m, n - 1), edit_distance_recur(s1, s2, m - 1, n), edit_distance_recur(s1, s2, m - 1, n - 1)) print('edit distance using recursion : ', edit_distance_recur('sunday', 'saturday', 6, 8)) def edit_distance_dp(s1, s2, m, n): for i in range(m + 1): for j in range(n + 1): if i == 0: dp[i][j] = j elif j == 0: dp[i][j] = i elif s1[i - 1] == s2[j - 1]: dp[i][j] = dp[i - 1][j - 1] else: dp[i][j] = 1 + min(dp[i][j - 1], dp[i - 1][j], dp[i - 1][j - 1]) return dp[m][n] m = 6 n = 8 dp = [[0 for i in range(n + 1)] for j in range(m + 1)] print('edit distance using dp : ', edit_distance_dp('sunday', 'saturday', m, n)) def lcs_length(x, y): m = len(x) n = len(y) dp = [[None] * (n + 1) for i in range(m + 1)] for i in range(m + 1): for j in range(n + 1): if i == 0 or j == 0: dp[i][j] = 0 elif x[i - 1] == y[j - 1]: dp[i][j] = dp[i - 1][j - 1] + 1 else: dp[i][j] = max(dp[i - 1][j], dp[i][j - 1]) return dp[m][n] print('length of LCS : ', lcs_length('AGGTAB', 'GXTXAYB')) def lcs_print(x, y): m = len(x) n = len(y) dp = [[0 for i in range(n + 1)] for j in range(m + 1)] for i in range(m + 1): for j in range(n + 1): if i == 0 or j == 0: dp[i][j] = 0 elif x[i - 1] == y[j - 1]: dp[i][j] = dp[i - 1][j - 1] + 1 else: dp[i][j] = max(dp[i - 1][j], dp[i][j - 1]) index = dp[m][n] lcs = [''] * (index + 1) lcs[index] = '' i = m j = n while i > 0 and j > 0: if x[i - 1] == y[j - 1]: lcs[index - 1] = x[i - 1] i -= 1 j -= 1 index -= 1 elif dp[i - 1][j] > dp[i][j - 1]: i -= 1 else: j -= 1 return lcs print('LCS : ', lcs_print('AGGTAB', 'GXTXAYB')) r = 3 c = 5 def issafe(i, j): if i >= 0 and i < R and (j >= 0) and (j < C): return True return False def rot_oranges(v): changed = False no = 2 while True: for i in range(R): for j in range(C): if v[i][j] == no: if issafe(i + 1, j) and v[i + 1][j] == 1: v[i + 1][j] = v[i][j] + 1 changed = True if issafe(i, j + 1) and v[i][j + 1] == 1: v[i][j + 1] = v[i][j] + 1 changed = True if issafe(i - 1, j) and v[i - 1][j] == 1: v[i - 1][j] = v[i][j] + 1 changed = True if issafe(i, j - 1) and v[i][j - 1] == 1: v[i][j - 1] = v[i][j] + 1 changed = True if not changed: break changed = False no += 1 for i in range(R): for j in range(C): if v[i][j] == 1: return -1 return no - 2 if __name__ == '__main__': v = [[2, 1, 0, 2, 1], [1, 0, 1, 2, 1], [1, 0, 0, 2, 1]] print('Max time incurred: ', rot_oranges(v)) def kmp_search(pat, txt): m = len(pat) n = len(txt) lps = [0] * M j = 0 compute_lps_array(pat, M, lps) i = 0 while i < N: if pat[j] == txt[i]: i += 1 j += 1 if j == M: print('Found pattern at index ' + str(i - j)) j = lps[j - 1] elif i < N and pat[j] != txt[i]: if j != 0: j = lps[j - 1] else: i += 1 def compute_lps_array(pat, M, lps): len = 0 lps[0] i = 1 while i < M: if pat[i] == pat[len]: len += 1 lps[i] = len i += 1 elif len != 0: len = lps[len - 1] else: lps[i] = 0 i += 1 txt = 'ABABDABACDABABCABAB' pat = 'ABABCABAB' kmp_search(pat, txt)
'''For a range of numbers starting at 2, determine whether the number is 'perfect', 'abundant' or 'deficient', ''' topNum = input("What is the upper number for the range:") topNum = int(topNum) theNum=2 while theNum <= topNum: # sum up the divisors divisor = 1 sumOfDivisors = 0 while divisor < theNum: if theNum % divisor==0: sumOfDivisors = sumOfDivisors + divisor divisor = divisor + 1 # classify the number based on its divisor sum if theNum == sumOfDivisors: print(theNum,"is perfect") elif theNum < sumOfDivisors: print(theNum,"is abundant") else: print(theNum,"is deficient") theNum += 1
"""For a range of numbers starting at 2, determine whether the number is 'perfect', 'abundant' or 'deficient', """ top_num = input('What is the upper number for the range:') top_num = int(topNum) the_num = 2 while theNum <= topNum: divisor = 1 sum_of_divisors = 0 while divisor < theNum: if theNum % divisor == 0: sum_of_divisors = sumOfDivisors + divisor divisor = divisor + 1 if theNum == sumOfDivisors: print(theNum, 'is perfect') elif theNum < sumOfDivisors: print(theNum, 'is abundant') else: print(theNum, 'is deficient') the_num += 1
def div(n1,n2): d = n1 // n2 r = n1 - n2 * d print(r) while True: try: num1 = float(input("Please enter the first number:")) num2 = float(input("Please enter the second number:")) except ValueError: print("Please enter the number correctly.") else: div(num1,num2) break
def div(n1, n2): d = n1 // n2 r = n1 - n2 * d print(r) while True: try: num1 = float(input('Please enter the first number:')) num2 = float(input('Please enter the second number:')) except ValueError: print('Please enter the number correctly.') else: div(num1, num2) break
#!/usr/bin/env python # coding=utf-8 def concat(*args, sep = "/"): return sep.join(args) concat("earth","mars","venus") concat("earth","mars","venus", sep=".")
def concat(*args, sep='/'): return sep.join(args) concat('earth', 'mars', 'venus') concat('earth', 'mars', 'venus', sep='.')
class Television: def __init__(self): self.ligada = False self.canal = 4 def power(self): if self.ligada: self.ligada = False else: self.ligada = True def canal_up(self): if self.ligada: self.canal += 1 def canal_down(self): if self.ligada: self.canal -= 1 tv = Television() print(tv.ligada) tv.power() print(tv.ligada) tv.power() print(tv.ligada) tv.canal_up() tv.canal_up() print(tv.canal)
class Television: def __init__(self): self.ligada = False self.canal = 4 def power(self): if self.ligada: self.ligada = False else: self.ligada = True def canal_up(self): if self.ligada: self.canal += 1 def canal_down(self): if self.ligada: self.canal -= 1 tv = television() print(tv.ligada) tv.power() print(tv.ligada) tv.power() print(tv.ligada) tv.canal_up() tv.canal_up() print(tv.canal)
# Use words.txt as the file name fname = input("Enter file name: ") fh = open(fname) for inp in fname: print(inp.isUpper()) print('hello im the impostor, changes made by satyam raj') print('ima a hacker, coz im doing this bullshit for hacktoberfest')
fname = input('Enter file name: ') fh = open(fname) for inp in fname: print(inp.isUpper()) print('hello im the impostor, changes made by satyam raj') print('ima a hacker, coz im doing this bullshit for hacktoberfest')
# Definition for a binary tree node. # class TreeNode: # def __init__(self, val=0, left=None, right=None): # self.val = val # self.left = left # self.right = right class Solution: def dfs(self, node: TreeNode, sum: int) -> int: res = 1 if node.val == sum else 0 if node.left: res += self.dfs(node.left, sum - node.val) if node.right: res += self.dfs(node.right, sum - node.val) return res def pathSum(self, root: TreeNode, sum: int) -> int: if not root: return 0 result = 0 nodes = [] nodes.append(root) while nodes: node = nodes.pop() result += self.dfs(node, sum) if node.left: nodes.append(node.left) if node.right: nodes.append(node.right) return result
class Solution: def dfs(self, node: TreeNode, sum: int) -> int: res = 1 if node.val == sum else 0 if node.left: res += self.dfs(node.left, sum - node.val) if node.right: res += self.dfs(node.right, sum - node.val) return res def path_sum(self, root: TreeNode, sum: int) -> int: if not root: return 0 result = 0 nodes = [] nodes.append(root) while nodes: node = nodes.pop() result += self.dfs(node, sum) if node.left: nodes.append(node.left) if node.right: nodes.append(node.right) return result
def sssi(M, s): L = [] for i in M[::-1]: if s >= i: s -= i L.append(1) else: L.append(0) if s == 0: return L[::-1] print("!!!!!")
def sssi(M, s): l = [] for i in M[::-1]: if s >= i: s -= i L.append(1) else: L.append(0) if s == 0: return L[::-1] print('!!!!!')
__title__ = 'consign' __description__ = 'Python storage for humans.' __url__ = 'https://requests.readthedocs.io' __version__ = '0.2.2' __author__ = 'Daniel Jadraque' __author_email__ = 'jadraque@hey.com' __license__ = 'Apache 2.0' __copyright__ = 'Copyright 2021 Daniel Jadraque'
__title__ = 'consign' __description__ = 'Python storage for humans.' __url__ = 'https://requests.readthedocs.io' __version__ = '0.2.2' __author__ = 'Daniel Jadraque' __author_email__ = 'jadraque@hey.com' __license__ = 'Apache 2.0' __copyright__ = 'Copyright 2021 Daniel Jadraque'
nome = input("Digite seu nome: ") nome = nome.strip().title() print("No seu nome tem silva? {}".format("Silva" in nome)) print("O seu nome: {}".format(nome))
nome = input('Digite seu nome: ') nome = nome.strip().title() print('No seu nome tem silva? {}'.format('Silva' in nome)) print('O seu nome: {}'.format(nome))
class Employee: numEmployee = 0 def __init__(self, name, rate): self.owed = 0 self.name = name self.rate = rate Employee.numEmployee += 1 def __repr__(self): return "a custom object (%r)" % self.name def __del__(self): Employee.numEmployee -= 1 def hours(self, num_hours): self.owed += num_hours * self.rate return "%.2f hours worked" % num_hours def pay(self): self.owed = 0 return "payed %s" % self.name # inheritance class SpecialEmployee(Employee): def __init__(self, name, rate, bonus): Employee.__init__(self, name, rate) self.bonus = bonus def hours(self, num_hours): self.owed += num_hours * self.rate + self.bonus return "%.2f hours worked" % num_hours # class methods # class Aexp: # base = 2 # # @classmethod # def exp(cls, x): # return cls.base ** x # # # class Bexp(Aexp): # __base = 3 # # def __exp(self): # return x ** cls.base # emp1 = Employee("Anna", 8.50) # emp2 = Employee("Mark", 12.34) # # print(emp1.hours(20), emp1.owed, emp1.pay()) # print(emp2)
class Employee: num_employee = 0 def __init__(self, name, rate): self.owed = 0 self.name = name self.rate = rate Employee.numEmployee += 1 def __repr__(self): return 'a custom object (%r)' % self.name def __del__(self): Employee.numEmployee -= 1 def hours(self, num_hours): self.owed += num_hours * self.rate return '%.2f hours worked' % num_hours def pay(self): self.owed = 0 return 'payed %s' % self.name class Specialemployee(Employee): def __init__(self, name, rate, bonus): Employee.__init__(self, name, rate) self.bonus = bonus def hours(self, num_hours): self.owed += num_hours * self.rate + self.bonus return '%.2f hours worked' % num_hours
def get_unique_iterable_objects_in_order(iterable): unique_objects_in_order = [] for object_ in iterable: if object_ not in unique_objects_in_order: unique_objects_in_order.append(object_) return unique_objects_in_order
def get_unique_iterable_objects_in_order(iterable): unique_objects_in_order = [] for object_ in iterable: if object_ not in unique_objects_in_order: unique_objects_in_order.append(object_) return unique_objects_in_order
# -*- mode: python ; coding: utf-8 -*- block_cipher = None a = Analysis(['D:\\temp\\main.py'], pathex=['D:\\temp'], binaries=[], datas=[], hiddenimports=[], hookspath=[], runtime_hooks=[], excludes=[], win_no_prefer_redirects=False, win_private_assemblies=False, cipher=block_cipher, noarchive=False) pyz = PYZ(a.pure, a.zipped_data, cipher=block_cipher) exe = EXE(pyz, a.scripts, a.binaries, a.zipfiles, a.datas, [], name='Stock_Market', debug=False, bootloader_ignore_signals=False, strip=False, upx=True, upx_exclude=[], runtime_tmpdir=None, console=False ) #Notes for the new version :- #Version 5:- #Now, prices for the shares will be refreshed automatically. #Bug fixes.
block_cipher = None a = analysis(['D:\\temp\\main.py'], pathex=['D:\\temp'], binaries=[], datas=[], hiddenimports=[], hookspath=[], runtime_hooks=[], excludes=[], win_no_prefer_redirects=False, win_private_assemblies=False, cipher=block_cipher, noarchive=False) pyz = pyz(a.pure, a.zipped_data, cipher=block_cipher) exe = exe(pyz, a.scripts, a.binaries, a.zipfiles, a.datas, [], name='Stock_Market', debug=False, bootloader_ignore_signals=False, strip=False, upx=True, upx_exclude=[], runtime_tmpdir=None, console=False)
RAWDATA_DIR = '/staging/as/skchoudh/rna-seq-datasets/single/ornithorhynchus_anatinus/SRP007412' OUT_DIR = '/staging/as/skchoudh/rna-seq-output/ornithorhynchus_anatinus/SRP007412' CDNA_FA_GZ = '/home/cmb-panasas2/skchoudh/genomes/ornithorhynchus_anatinus/cdna/Ornithorhynchus_anatinus.OANA5.cdna.all.fa.gz' CDNA_IDX = '/home/cmb-panasas2/skchoudh/genomes/ornithorhynchus_anatinus/cdna/Ornithorhynchus_anatinus.OANA5.cdna.all.kallisto.index'
rawdata_dir = '/staging/as/skchoudh/rna-seq-datasets/single/ornithorhynchus_anatinus/SRP007412' out_dir = '/staging/as/skchoudh/rna-seq-output/ornithorhynchus_anatinus/SRP007412' cdna_fa_gz = '/home/cmb-panasas2/skchoudh/genomes/ornithorhynchus_anatinus/cdna/Ornithorhynchus_anatinus.OANA5.cdna.all.fa.gz' cdna_idx = '/home/cmb-panasas2/skchoudh/genomes/ornithorhynchus_anatinus/cdna/Ornithorhynchus_anatinus.OANA5.cdna.all.kallisto.index'
def say_hello(name): print("Hello,", name) user_name = input("What is your name? ") say_hello(user_name)
def say_hello(name): print('Hello,', name) user_name = input('What is your name? ') say_hello(user_name)
#xsv Libraries / dictionaries / variables or # WHATEVER YOU FUCKING CALL THEM! exampleVars = [ { 'Name': 'xH', 'Info': { 'Staff': ['Council'], 'Dev': ['Im Dynamic'], 'Office': ['Couch'], 'Role':[] } }, { 'Name': 'xsv', 'Info': { 'Staff': ['Im Dynamic'], 'Dev': ['@Blynd'], 'Office':[], 'Role':['Development Team'] } } ]
example_vars = [{'Name': 'xH', 'Info': {'Staff': ['Council'], 'Dev': ['Im Dynamic'], 'Office': ['Couch'], 'Role': []}}, {'Name': 'xsv', 'Info': {'Staff': ['Im Dynamic'], 'Dev': ['@Blynd'], 'Office': [], 'Role': ['Development Team']}}]
BATCH_SIZE = 128 EPOCHS = 120 LR = 0.001 NGRAM = 10 NUM_CLASS = 20 EMBEDDING_DIM = 100 CUDA = True DEVICE = 3 DEBUG = False # FILENAME = '../bsnn/data/tor_erase_tls2.traffic' FILENAME = '../bsnn/data/20_header_payload_all.traffic' # LABELS ={'facebook': 0, 'yahoomail': 1, 'Youtube': 2, 'itunes': 3, 'mysql': 4, 'filezilla': 5, 'amazon': 6, 'skype': 7, 'google': 8, 'gmail': 9, 'vimeo': 10, 'twitter': 11, 'spotify': 12, 'netflix': 13, 'aimchat': 14, 'voipbuster': 15, 'jd': 16, 'taobao': 17, 'pp': 18, 'weibo': 19, 'baidu': 20, 'thunder': 21, 'sohu': 22, 'youku': 23, 'tudou': 24, 'KG': 25, 'sinaUC': 26, 'cloudmusic': 27, 'qq': 28, 'ftps': 29, 'snmp': 30, 'ssh': 31, 'https': 32, 'smtp': 33, 'dns': 34, 'bittorrent': 35} # LABELS = {'vimeo': 0, 'spotify': 1, 'voipbuster': 2, 'sinauc': 3, 'cloudmusic': 4, 'weibo': 5, 'baidu': 6, 'tudou': 7, 'amazon': 8, 'thunder': 9, 'gmail': 10, 'pplive': 11, 'qq': 12, 'taobao': 13, 'yahoomail': 14, 'itunes': 15, 'twitter': 16, 'jd': 17, 'sohu': 18, 'youtube': 19, 'youku': 20, 'netflix': 21, 'aimchat': 22, 'kugou': 23, 'skype': 24, 'facebook': 25, 'google': 26, 'mssql': 27, 'ms-exchange': 28} # LABELS = {'audio': 0, 'browsing': 1, 'chat': 2, # 'file': 3, 'mail': 4, # 'p2p': 5, 'video': 6, 'voip': 7} LABELS = {'reddit': 0, 'facebook': 1, 'NeteaseMusic': 2, 'twitter': 3, 'qqmail': 4, 'instagram': 5, 'weibo': 6, 'iqiyi': 7, 'imdb': 8, 'TED': 9, 'douban': 10, 'amazon': 11, 'youtube': 12, 'JD': 13, 'youku': 14, 'baidu': 15, 'google': 16, 'tieba': 17, 'taobao': 18, 'bing': 19} test_percent = 0.2 # save model load_model_name = 'mymodel/ulti.model1' save_model_name = 'mymodel/ulti.model2'
batch_size = 128 epochs = 120 lr = 0.001 ngram = 10 num_class = 20 embedding_dim = 100 cuda = True device = 3 debug = False filename = '../bsnn/data/20_header_payload_all.traffic' labels = {'reddit': 0, 'facebook': 1, 'NeteaseMusic': 2, 'twitter': 3, 'qqmail': 4, 'instagram': 5, 'weibo': 6, 'iqiyi': 7, 'imdb': 8, 'TED': 9, 'douban': 10, 'amazon': 11, 'youtube': 12, 'JD': 13, 'youku': 14, 'baidu': 15, 'google': 16, 'tieba': 17, 'taobao': 18, 'bing': 19} test_percent = 0.2 load_model_name = 'mymodel/ulti.model1' save_model_name = 'mymodel/ulti.model2'
def inputGrades(nm): grades = [] for i in range(0,nm,1): grade = float(input('Enter your Grade: ')) grades.append(grade) return grades def printGrades(nm,x): for i in range(0,nm,1): print(x[i]) def avgGrades(nm,x): Sum = 0 for i in range(0,nm,1): Sum = Sum + x[i] average = Sum / nm return average def HighLowGrades(nm,x): highG = 0 lowG = 100 for i in range(0,nm,1): if(x[i] < lowG): lowG = x[i] if(x[i] > highG): highG = x[i] return highG, lowG def sortGrades(nm,x): for i in range(0,nm-1,1): for i in range(0,nm-1,1): if(x[i] > x[i+1]): temp = x[i] x[i] = x[i+1] x[i+1] = temp return x numGrades = int(input('Enter How Many Grades You Have: ')) myGrades = inputGrades(numGrades) printGrades(numGrades,myGrades) averageGrades = avgGrades(numGrades,myGrades) print('Your Average Grade is: ',round(averageGrades)) highG, lowG = HighLowGrades(numGrades,myGrades) print('Your Highest Grade is:',highG,'Your Lowest Grade is:',lowG) sortedGrades = sortGrades(numGrades,myGrades) print('Sorted List of Grades: ',sortedGrades)
def input_grades(nm): grades = [] for i in range(0, nm, 1): grade = float(input('Enter your Grade: ')) grades.append(grade) return grades def print_grades(nm, x): for i in range(0, nm, 1): print(x[i]) def avg_grades(nm, x): sum = 0 for i in range(0, nm, 1): sum = Sum + x[i] average = Sum / nm return average def high_low_grades(nm, x): high_g = 0 low_g = 100 for i in range(0, nm, 1): if x[i] < lowG: low_g = x[i] if x[i] > highG: high_g = x[i] return (highG, lowG) def sort_grades(nm, x): for i in range(0, nm - 1, 1): for i in range(0, nm - 1, 1): if x[i] > x[i + 1]: temp = x[i] x[i] = x[i + 1] x[i + 1] = temp return x num_grades = int(input('Enter How Many Grades You Have: ')) my_grades = input_grades(numGrades) print_grades(numGrades, myGrades) average_grades = avg_grades(numGrades, myGrades) print('Your Average Grade is: ', round(averageGrades)) (high_g, low_g) = high_low_grades(numGrades, myGrades) print('Your Highest Grade is:', highG, 'Your Lowest Grade is:', lowG) sorted_grades = sort_grades(numGrades, myGrades) print('Sorted List of Grades: ', sortedGrades)
#!/usr/bin/python3 # -*- coding: utf-8 -*- # Created by Ross on 2019/8/21 class Solution: def lengthOfLIS(self, nums: List[int]) -> int: dp = [0] * len(nums) if len(nums) == 0: return 0 dp[0] = 1 for i in range(1, len(nums)): maxval = 0 for j in range(0, i): if nums[j] < nums[i]: maxval = max(maxval, dp[j]) dp[i] = maxval + 1 return max(dp) if __name__ == '__main__': s = Solution() print(s.lengthOfLIS(nums=[10, 9, 2, 5, 3, 7, 101, 18]))
class Solution: def length_of_lis(self, nums: List[int]) -> int: dp = [0] * len(nums) if len(nums) == 0: return 0 dp[0] = 1 for i in range(1, len(nums)): maxval = 0 for j in range(0, i): if nums[j] < nums[i]: maxval = max(maxval, dp[j]) dp[i] = maxval + 1 return max(dp) if __name__ == '__main__': s = solution() print(s.lengthOfLIS(nums=[10, 9, 2, 5, 3, 7, 101, 18]))
class TransformationTypeEnum(): __DOUBLE_SIGMOID = "double_sigmoid" __SIGMOID = "sigmoid" __REVERSE_SIGMOID = "reverse_sigmoid" __RIGHT_STEP = "right_step" __LEFT_STEP = "left_step" __STEP = "step" __CUSTOM_INTERPOLATION = "custom_interpolation" __NO_TRANSFORMATION = "no_transformation" @property def DOUBLE_SIGMOID(self): return self.__DOUBLE_SIGMOID @DOUBLE_SIGMOID.setter def DOUBLE_SIGMOID(self, value): raise ValueError("Do not assign value to a TransformationTypeEnum field") @property def SIGMOID(self): return self.__SIGMOID @SIGMOID.setter def SIGMOID(self, value): raise ValueError("Do not assign value to a TransformationTypeEnum field") @property def REVERSE_SIGMOID(self): return self.__REVERSE_SIGMOID @REVERSE_SIGMOID.setter def REVERSE_SIGMOID(self, value): raise ValueError("Do not assign value to a TransformationTypeEnum field") @property def RIGHT_STEP(self): return self.__RIGHT_STEP @RIGHT_STEP.setter def RIGHT_STEP(self, value): raise ValueError("Do not assign value to a TransformationTypeEnum field") @property def LEFT_STEP(self): return self.__LEFT_STEP @LEFT_STEP.setter def LEFT_STEP(self, value): raise ValueError("Do not assign value to a TransformationTypeEnum field") @property def STEP(self): return self.__STEP @STEP.setter def STEP(self, value): raise ValueError("Do not assign value to a TransformationTypeEnum field") @property def CUSTOM_INTERPOLATION(self): return self.__CUSTOM_INTERPOLATION @CUSTOM_INTERPOLATION.setter def CUSTOM_INTERPOLATION(self, value): raise ValueError("Do not assign value to a TransformationTypeEnum field") @property def NO_TRANSFORMATION(self): return self.__NO_TRANSFORMATION @NO_TRANSFORMATION.setter def NO_TRANSFORMATION(self, value): raise ValueError("Do not assign value to a TransformationTypeEnum field")
class Transformationtypeenum: __double_sigmoid = 'double_sigmoid' __sigmoid = 'sigmoid' __reverse_sigmoid = 'reverse_sigmoid' __right_step = 'right_step' __left_step = 'left_step' __step = 'step' __custom_interpolation = 'custom_interpolation' __no_transformation = 'no_transformation' @property def double_sigmoid(self): return self.__DOUBLE_SIGMOID @DOUBLE_SIGMOID.setter def double_sigmoid(self, value): raise value_error('Do not assign value to a TransformationTypeEnum field') @property def sigmoid(self): return self.__SIGMOID @SIGMOID.setter def sigmoid(self, value): raise value_error('Do not assign value to a TransformationTypeEnum field') @property def reverse_sigmoid(self): return self.__REVERSE_SIGMOID @REVERSE_SIGMOID.setter def reverse_sigmoid(self, value): raise value_error('Do not assign value to a TransformationTypeEnum field') @property def right_step(self): return self.__RIGHT_STEP @RIGHT_STEP.setter def right_step(self, value): raise value_error('Do not assign value to a TransformationTypeEnum field') @property def left_step(self): return self.__LEFT_STEP @LEFT_STEP.setter def left_step(self, value): raise value_error('Do not assign value to a TransformationTypeEnum field') @property def step(self): return self.__STEP @STEP.setter def step(self, value): raise value_error('Do not assign value to a TransformationTypeEnum field') @property def custom_interpolation(self): return self.__CUSTOM_INTERPOLATION @CUSTOM_INTERPOLATION.setter def custom_interpolation(self, value): raise value_error('Do not assign value to a TransformationTypeEnum field') @property def no_transformation(self): return self.__NO_TRANSFORMATION @NO_TRANSFORMATION.setter def no_transformation(self, value): raise value_error('Do not assign value to a TransformationTypeEnum field')
a = True # Creates a variable set to true while a: # While this variable is true, loop. Used to handle for 2+ word inputs inp = input('Enter ONE word: ') # Takes the users input test = inp.split() # Makes the users input a list of each word, to check how many words are there if len(test) == 1: # If this list has 1 value, then it is only one word, so: a = False # Set a to false so it ends the loop print(inp[::-1]) # Print the reverse of the string [{Beginning}:{End}:-1(Reverse)] else: # If the list does not have 1 value (0 or 2+) then the input is invalid, so: # Ask the user to make a new input, and loop through again. print('Your input was either 2 words, or nothing at all. Please try again :)')
a = True while a: inp = input('Enter ONE word: ') test = inp.split() if len(test) == 1: a = False print(inp[::-1]) else: print('Your input was either 2 words, or nothing at all. Please try again :)')
def main(request, response): type = request.GET.first("type", None) is_revalidation = request.headers.get("If-Modified-Since", None) content = "/* nothing to see here */" response.add_required_headers = False if is_revalidation is not None: response.writer.write_status(304) response.writer.write_header("x-content-type-options", "nosniff") response.writer.write_header("content-length", 0) if(type != None): response.writer.write_header("content-type", type) response.writer.end_headers() response.writer.write("") else: response.writer.write_status(200) response.writer.write_header("x-content-type-options", "nosniff") response.writer.write_header("content-length", len(content)) if(type != None): response.writer.write_header("content-type", type) response.writer.end_headers() response.writer.write(content)
def main(request, response): type = request.GET.first('type', None) is_revalidation = request.headers.get('If-Modified-Since', None) content = '/* nothing to see here */' response.add_required_headers = False if is_revalidation is not None: response.writer.write_status(304) response.writer.write_header('x-content-type-options', 'nosniff') response.writer.write_header('content-length', 0) if type != None: response.writer.write_header('content-type', type) response.writer.end_headers() response.writer.write('') else: response.writer.write_status(200) response.writer.write_header('x-content-type-options', 'nosniff') response.writer.write_header('content-length', len(content)) if type != None: response.writer.write_header('content-type', type) response.writer.end_headers() response.writer.write(content)
def calcula_soma(A: int, B: int): if (not isinstance(A, int) or not isinstance(B, int)): raise TypeError return f'SOMA = {A + B}'
def calcula_soma(A: int, B: int): if not isinstance(A, int) or not isinstance(B, int): raise TypeError return f'SOMA = {A + B}'
# Flask config reference: http://flask.pocoo.org/docs/1.0/config/ # Flask-Sqlalchemy config reference: http://flask-sqlalchemy.pocoo.org/2.3/config/ POSTGRES_ENV_VARS_DEV = { 'user': 'postgres', 'pwd': '1qaz2wsx', 'host': 'jblin', 'port': '5432', 'db': 'postgres', } POSTGRES_ENV_VARS_PRD = { # Set the new variables for production here } class BaseConfig(object): # Flask DEBUG = False TESTING = False SECRET_KEY = "12qwaszx" JSONIFY_MIMETYPE = "application/json" # Flask-Sqlalchemy SQLALCHEMY_DATABASE_URI = "postgresql://%(user)s:%(pwd)s@%(host)s:%(port)s/%(db)s" % POSTGRES_ENV_VARS_DEV SQLALCHEMY_TRACK_MODIFICATIONS = False class DevConfig(BaseConfig): DEBUG = True class TestConfig(BaseConfig): TESTING = True class PrdConfig(BaseConfig): # SQLALCHEMY_DATABASE_URI = "postgresql://%(user)s:%(password)s@%(host)s:%(port)s/%(db)s" % POSTGRES_ENV_VARS_PRD pass
postgres_env_vars_dev = {'user': 'postgres', 'pwd': '1qaz2wsx', 'host': 'jblin', 'port': '5432', 'db': 'postgres'} postgres_env_vars_prd = {} class Baseconfig(object): debug = False testing = False secret_key = '12qwaszx' jsonify_mimetype = 'application/json' sqlalchemy_database_uri = 'postgresql://%(user)s:%(pwd)s@%(host)s:%(port)s/%(db)s' % POSTGRES_ENV_VARS_DEV sqlalchemy_track_modifications = False class Devconfig(BaseConfig): debug = True class Testconfig(BaseConfig): testing = True class Prdconfig(BaseConfig): pass
def sort_address_results(addresses, postcode): ''' # Not to be used. Address Index API should sort for us. If required address object should include the following fields: paoStartNumber = address['nag']['pao']['paoStartNumber'] saoStartNumber = address['nag']['sao']['saoStartNumber'] street_number = paoStartNumber or saoStartNumber or 999999 addresses.append({ 'number': int(street_number), 'address': address['formattedAddress'], 'confidenceScore': address['confidenceScore'], 'underlyingScore': address['underlyingScore'] }) ''' number_of_results = len(addresses) if postcode or number_of_results < 40: sorted_addresses = sorted(addresses, key=lambda k: k['number']) else: sorted_addresses = sorted(addresses, key=lambda k: k['confidenceScore']) return sorted_addresses
def sort_address_results(addresses, postcode): """ # Not to be used. Address Index API should sort for us. If required address object should include the following fields: paoStartNumber = address['nag']['pao']['paoStartNumber'] saoStartNumber = address['nag']['sao']['saoStartNumber'] street_number = paoStartNumber or saoStartNumber or 999999 addresses.append({ 'number': int(street_number), 'address': address['formattedAddress'], 'confidenceScore': address['confidenceScore'], 'underlyingScore': address['underlyingScore'] }) """ number_of_results = len(addresses) if postcode or number_of_results < 40: sorted_addresses = sorted(addresses, key=lambda k: k['number']) else: sorted_addresses = sorted(addresses, key=lambda k: k['confidenceScore']) return sorted_addresses
#!/usr/bin/env python3 # https://codeforces.com/problemset/problem/851/A n,k,t = list(map(int,input().split())) print(min(t,n+k-t,k))
(n, k, t) = list(map(int, input().split())) print(min(t, n + k - t, k))
# Ch02-FlowControl-while_break_continue_statements.py # This is an example for while, break, continue statements. # # https://automatetheboringstuff.com/chapter2/ # Flow Control Statements: Lesson 6 - while Loops, break, and continue # # I changed the original code slightly. count = 0 while True: print('What is your name?') name = input() if name != 'Joe': if count > 2: print('Hint: The name should be Joe.') count = count + 1 continue print('Hi, Joe. What is the password?') password = input() if password == 'swordfish': break print('Access granted')
count = 0 while True: print('What is your name?') name = input() if name != 'Joe': if count > 2: print('Hint: The name should be Joe.') count = count + 1 continue print('Hi, Joe. What is the password?') password = input() if password == 'swordfish': break print('Access granted')
#!/bin/python3 # Complete the solve function below. def solve(s): words = s.split(' ') for i in range(len(words)): words[i] = words[i].capitalize() return ' '.join(words) if __name__ == '__main__': s = input() result = solve(s) print(result)
def solve(s): words = s.split(' ') for i in range(len(words)): words[i] = words[i].capitalize() return ' '.join(words) if __name__ == '__main__': s = input() result = solve(s) print(result)
# Assignment 6 # Author: Jignesh Chaudhary, Student Id: 197320 # a) Heapsort: Implement the heapsort algorithm from your textbook (Algorithm 7.5) but modify it so it ends after finding z largest keys # in non-increasing order. You can hardcode the keys to be integer type. Implement test program to provide unsorted integer list, z # value, and outputs the z sorted number of elements. Then analyze the algorithm and provide the analysis using big O notation. class heap: '''This class take array as a input and find z number of largest key in non-increasing order class has method call siftdown, makeheap, removekey and display''' def __init__(self, array, z): '''Constructur which create unsorted array from input and take int z to find z number of largest key in array ''' self.array = [] self.z = z for i in array: self.array.append(i) def siftdown(self, array, i, size): # this method used by makeheap method to arrange array data in proper manner array = self.array leftchild = 2*i+1 rightchild = 2*i+2 parent = i if leftchild <= size-1 and array[leftchild] > array[i]: parent = leftchild if rightchild <= size-1 and array[rightchild] > array[parent]: parent = rightchild if parent != i: largchild = array[i] array[i] = array[parent] array[parent] = largchild self.siftdown(array, parent, size) def makeheap(self): size = len(self.array) J = (size//2)-1 while J>=0: self.siftdown(self.array, J, size) J -= 1 def removekey(self): # this function will run heapsort algorithm for z number of large key end = len(self.array)-1 for i in range(self.z):# modify algorithm root = self.array[0] self.array[0] = self.array[end] self.array[end] = root self.siftdown(self.array, 0, end) end -= 1 def display(self): # to display array result=[] for i in range(1,len(self.array)-1): result.append(self.array[i]) print(self.array) if __name__ == '__main__': z = 4 list = [7,8,5,9,3,2,6,4,1,10] test = heap(list, z) test.display() test.makeheap() test.removekey() test.display()
class Heap: """This class take array as a input and find z number of largest key in non-increasing order class has method call siftdown, makeheap, removekey and display""" def __init__(self, array, z): """Constructur which create unsorted array from input and take int z to find z number of largest key in array """ self.array = [] self.z = z for i in array: self.array.append(i) def siftdown(self, array, i, size): array = self.array leftchild = 2 * i + 1 rightchild = 2 * i + 2 parent = i if leftchild <= size - 1 and array[leftchild] > array[i]: parent = leftchild if rightchild <= size - 1 and array[rightchild] > array[parent]: parent = rightchild if parent != i: largchild = array[i] array[i] = array[parent] array[parent] = largchild self.siftdown(array, parent, size) def makeheap(self): size = len(self.array) j = size // 2 - 1 while J >= 0: self.siftdown(self.array, J, size) j -= 1 def removekey(self): end = len(self.array) - 1 for i in range(self.z): root = self.array[0] self.array[0] = self.array[end] self.array[end] = root self.siftdown(self.array, 0, end) end -= 1 def display(self): result = [] for i in range(1, len(self.array) - 1): result.append(self.array[i]) print(self.array) if __name__ == '__main__': z = 4 list = [7, 8, 5, 9, 3, 2, 6, 4, 1, 10] test = heap(list, z) test.display() test.makeheap() test.removekey() test.display()
def main(str1, str2): m = len(str1) n = len(str2) j = 0 i = 0 while j < m and i < n: if str1[j] == str2[i]: j = j+1 i = i + 1 return j == m str2 = str(input()) N = int(input()) for i in range(N): str1 = str(input()) if main(str1, str2): print("POSITIVE") else: print( "NEGATIVE")
def main(str1, str2): m = len(str1) n = len(str2) j = 0 i = 0 while j < m and i < n: if str1[j] == str2[i]: j = j + 1 i = i + 1 return j == m str2 = str(input()) n = int(input()) for i in range(N): str1 = str(input()) if main(str1, str2): print('POSITIVE') else: print('NEGATIVE')
def process_blok(x0_x1_y0_y1_z0_z1_): x0_, x1_, y0_, y1_, z0_, z1_ = x0_x1_y0_y1_z0_z1_ print('Setting number of threads in process_blok to %d.\n' %nthread) os.environ['MKL_NUM_THREADS'] = str(nthread) # load and dilate initial voxel peak positions voxl_peaklidx_blok = np.zeros_like(bimage_peak_fine.value) voxl_peaklidx_blok[x0_:x1_, y0_:y1_, z0_:z1_] = 1 voxl_peaklidx_blok *= bimage_peak_fine.value voxl_valdlidx_blok = morphology.binary_dilation(voxl_peaklidx_blok, cell_ball) voxl_position_peak = np.argwhere(voxl_peaklidx_blok) voxl_position = np.argwhere(voxl_valdlidx_blok) voxl_peakaidx = np.nonzero(voxl_peaklidx_blok[voxl_valdlidx_blok])[0] x0, y0, z0 = voxl_position.min(0) x1, y1, z1 = voxl_position.max(0) + 1 voxl_timesers_blok = [None] * lt tic = time.time() for ti in range(lt): image_name_hdf = image_dir(image_names[ti], frame_i) + 'image_aligned.hdf5' with h5py.File(image_name_hdf, 'r') as file_handle: voxl_timesers_blok[ti] = file_handle['V3D'][z0:z1, y0:y1, x0:x1].T print('Load data time: %.1f minutes.\n' %((time.time() - tic) / 60)) voxl_timesers_blok = np.transpose(voxl_timesers_blok, (1, 2, 3, 0)) voxl_timesers = voxl_timesers_blok[voxl_valdlidx_blok[x0:x1, y0:y1, z0:z1]] del voxl_timesers_blok # perform slice-time correction, if there is more than one slice if lz > 1: for i in range(len(voxl_position)): # get timepoints of midpoint and zi plane for interpolation zi = voxl_position[i, 2] # number of plane timepoints_zi = np.arange(lt) * 1000.0 / freq_stack + zi * t_exposure timepoints_zm = np.arange(lt) * 1000.0 / freq_stack + (lz / 2) * t_exposure # make spline interpolator and interpolate timeseries spline_interpolator_xyzi = \ interpolate.InterpolatedUnivariateSpline(timepoints_zi, voxl_timesers[i]) voxl_timesers[i] = spline_interpolator_xyzi(timepoints_zm) def normalize_rank_timesers(timesers): #for i, time_i in enumerate(timesers): # timesers[i] = stats.rankdata(time_i) mn = timesers.mean(1) sd = timesers.std(1, ddof=1) return (timesers - mn[:, None]) / (sd[:, None] * np.sqrt(lt - 1)) # get voxel connectivity from proximities (distances) and similarities (correlations) voxl_position_peak_phys = voxl_position_peak * [resn_x * ds, resn_y * ds, resn_z] voxl_timesers_peak_rank = normalize_rank_timesers(voxl_timesers[voxl_peakaidx]) # connectivity is given by the combination of high proximity and high similarity voxl_conn_peak = np.zeros((len(voxl_peakaidx), len(voxl_peakaidx)), dtype=bool) idx = np.linspace(0, len(voxl_peakaidx), 11, dtype=int) for i in range(len(idx) - 1): idx_i = np.r_[idx[i]:idx[i + 1]] voxl_dist_peak_i = np.sqrt( np.square(voxl_position_peak_phys[idx_i, 0:1] - voxl_position_peak_phys[:, 0:1].T) + np.square(voxl_position_peak_phys[idx_i, 1:2] - voxl_position_peak_phys[:, 1:2].T) + np.square(voxl_position_peak_phys[idx_i, 2:3] - voxl_position_peak_phys[:, 2:3].T)) voxl_corr_peak_i = np.dot(voxl_timesers_peak_rank[idx_i], voxl_timesers_peak_rank.T) voxl_neib_peak_i = voxl_dist_peak_i < cell_diam voxl_neib_simi_i = np.array([corr_ij > np.median(corr_ij[neib_ij]) for neib_ij, corr_ij in zip(voxl_neib_peak_i, voxl_corr_peak_i)]) voxl_conn_peak[idx_i] = (voxl_neib_peak_i & voxl_neib_simi_i) voxl_conn_peak = voxl_conn_peak | voxl_conn_peak.T voxl_powr_peak = np.mean(np.square(voxl_timesers[voxl_peakaidx]), 1) del voxl_timesers_peak_rank return (voxl_position, voxl_timesers, voxl_peakaidx, voxl_position_peak, voxl_position_peak_phys, voxl_conn_peak, voxl_powr_peak) def blok_cell_detection(blok_i_blok_xyz_01): '''Detect individual cells using the sparse NMF algorithm''' blok_i, blok_xyz_01 = blok_i_blok_xyz_01 print('Setting number of threads in blok_cell_detection to %d.\n' %nthread) os.environ['MKL_NUM_THREADS'] = str(nthread) (voxl_position, voxl_timesers, voxl_peakaidx, voxl_position_peak, voxl_position_peak_phys, voxl_conn_peak, voxl_powr_peak) = process_blok(blok_xyz_01) blok_voxl_nmbr = len(voxl_position) # number of voxels in blok peak_valdlidx = np.arange(len(voxl_position_peak)) voxl_fraction = 100 # decremented if nnmf fails for iter_i in range(128): # 0.95**31 = 0.2 try: # estimate sparseness of each component cmpn_nmbr = np.round(peak_valdlidx.size / (0.5 * cell_voxl_nmbr)).astype(int) print((iter_i, voxl_fraction, cmpn_nmbr)) tic = time.time() cmpn_clusters = \ cluster.AgglomerativeClustering( n_clusters=cmpn_nmbr, connectivity=voxl_conn_peak[peak_valdlidx[:,None],peak_valdlidx[None]], linkage='ward')\ .fit(voxl_position_peak_phys[peak_valdlidx]) cmpn_labl = cmpn_clusters.labels_ print('Hierarchical Clustering time: %.1f minutes.\n' %((time.time() - tic) / 60)) # initialize spatial component properties cmpn_spceinit = np.zeros((blok_voxl_nmbr, cmpn_nmbr + 1)) cmpn_neibhood = np.zeros((blok_voxl_nmbr, cmpn_nmbr + 1), dtype=bool) cmpn_sparsity = np.zeros(cmpn_nmbr + 1) cmpn_percentl = np.zeros(cmpn_nmbr + 1) for cmpn_i in range(cmpn_nmbr): # initialize spatial component cmpn_spceinit[voxl_peakaidx[peak_valdlidx], cmpn_i] = (cmpn_labl == cmpn_i) # get neighborhood of component cmpn_centroid_phys_i = \ np.median(voxl_position_peak_phys[peak_valdlidx][cmpn_labl == cmpn_i], 0) dist_from_centroid_to_peak = \ np.sqrt( np.square((cmpn_centroid_phys_i[0] - voxl_position_peak_phys[peak_valdlidx, 0])) + np.square((cmpn_centroid_phys_i[1] - voxl_position_peak_phys[peak_valdlidx, 1])) + np.square((cmpn_centroid_phys_i[2] - voxl_position_peak_phys[peak_valdlidx, 2])) ) cmpn_midpoint_i = voxl_position_peak[peak_valdlidx][np.argmin(dist_from_centroid_to_peak)] cmpn_neibaidx_i = cmpn_midpoint_i + (np.argwhere(cell_ball) - cell_ball_midpoint) cmpn_neibaidx_i = cmpn_neibaidx_i[(cmpn_neibaidx_i >= 0).all(1)] cmpn_neibaidx_i = cmpn_neibaidx_i[(cmpn_neibaidx_i < [lx//ds, ly//ds, lz]).all(1)] def relative_indx(ni): return np.nonzero(np.all(voxl_position == ni, 1))[0][0] cmpn_neibridx_i = np.array([relative_indx(ni) for ni in cmpn_neibaidx_i]) cmpn_neibhood[cmpn_neibridx_i, cmpn_i] = 1 cmpn_vect_i = np.zeros(len(cmpn_neibridx_i)) cmpn_vect_i[:cell_voxl_nmbr] = 1 cmpn_sparsity[cmpn_i] = sparseness(cmpn_vect_i) cmpn_percentl[cmpn_i] = 100 * (1 - np.mean(cmpn_vect_i)) voxl_valdlidx = cmpn_neibhood.any(1) voxl_position_vald = voxl_position[voxl_valdlidx] voxl_timesers_vald = voxl_timesers[voxl_valdlidx] cmpn_spceinit_vald = cmpn_spceinit[voxl_valdlidx] cmpn_neibhood_vald = cmpn_neibhood[voxl_valdlidx] # initialize background component cmpn_spceinit_vald[:, -1] = 1 cmpn_neibhood_vald[:, -1] = 1 tic = time.time() cmpn_spcesers_vald, cmpn_timesers_vald, d = nnmf_sparse( voxl_timesers_vald, voxl_position_vald, cmpn_spceinit_vald, cmpn_neibhood_vald, cmpn_sparsity, cmpn_percentl, miniter=10, maxiter=100, tolfun=1e-3) detection_success = 1 print('NMF time: %.1f minutes.\n' %((time.time() - tic) / 60)) break except ValueError: detection_success = 0 voxl_fraction *= 0.97 thr_powr_peak = np.percentile(voxl_powr_peak, 100 - voxl_fraction) peak_valdlidx = np.where(voxl_powr_peak > thr_powr_peak)[0] # get cell positions and timeseries, and save cell data with h5py.File(cell_dir + '/Block' + str(blok_i).zfill(5) + '.hdf5', 'w') as file_handle: if detection_success: for cmpn_i in range(cmpn_nmbr): cmpn_lidx_i = np.nonzero(cmpn_spcesers_vald[:, cmpn_i])[0] cmpn_position_i = voxl_position_vald[cmpn_lidx_i] cmpn_spcesers_i = cmpn_spcesers_vald[cmpn_lidx_i, cmpn_i] mean_spcevoxl_i = bimage_mean.value[list(zip(*cmpn_position_i))] mean_i = np.sum(mean_spcevoxl_i * cmpn_spcesers_i) / np.sum(cmpn_spcesers_i) cmpn_timesers_i = cmpn_timesers_vald[cmpn_i] cmpn_timesers_i = cmpn_timesers_i * mean_i / np.mean(cmpn_timesers_i) hdf5_dir = '/cmpn/' + str(cmpn_i).zfill(5) file_handle[hdf5_dir + '/cmpn_position'] = cmpn_position_i file_handle[hdf5_dir + '/cmpn_spcesers'] = cmpn_spcesers_i file_handle[hdf5_dir + '/cmpn_timesers'] = cmpn_timesers_i file_handle['cmpn_nmbr'] = cmpn_nmbr file_handle['success'] = 1 def nnmf_sparse(V0, XYZ0, W0, B0, Sparsity0, Percentl0, tolfun=1e-4, miniter=10, maxiter=100, verbosity=1, time_mean=1.0): print('Setting number of threads in nnmf_sparse to %d.\n' %nthread) os.environ['MKL_NUM_THREADS'] = str(nthread) # CAUTION: Input variable is modified to save memory V0 *= (time_mean / V0.mean(1)[:, None]) # normalize voxel timeseries V = V0[:, dt_range].astype(float) # copy input signal XYZ = XYZ0.astype(int) W = W0.astype(float) B = B0.astype(bool) Sparsity = Sparsity0.copy() Percentl = Percentl0.copy() # get dimensions n, t = V.shape n_, c = W.shape assert(n_ == n) H = np.zeros((c, t)) # zero timeseries array dnorm_prev = np.full(2, np.inf) # last two d-norms for iter_i in range(maxiter): # save current states H_ = H.copy() # Alternate least squares with regularization H = np.maximum(linalg.lstsq(W, V)[0], 0) H *= (time_mean / H.mean(1)[:, None]) # normalize component timeseries W = np.maximum(linalg.lstsq(V.T, H.T)[0], 0) W[np.logical_not(B)] = 0 # restrict component boundaries for ci in range(c): W_ci = W[B[:, ci], ci] if any(W_ci) & ((Sparsity[ci] > 0) | (Percentl[ci] > 0)): # get relative dimensions of component XYZ_ci = XYZ[B[:, ci]] - XYZ[B[:, ci]].min(0) # enforce component sparsity and percentile threshold W_ci = projection(W_ci, Sparsity[ci], at_least_as_sparse=True) # W_ci[W_ci <= np.percentile(W_ci, Percentl[ci])] = 0 # retain component of maximal size L_ci = np.zeros(np.ptp(XYZ_ci, 0) + 1, dtype=bool) L_ci[list(zip(*XYZ_ci))] = W_ci > 0 L_ci = measure.label(L_ci, connectivity=3) lci_size = np.bincount(L_ci[L_ci.nonzero()]) W_ci[L_ci[list(zip(*XYZ_ci))] != np.argmax(lci_size)] = 0 W[B[:, ci], ci] = W_ci # Get norm of difference and check for convergence dnorm = np.sqrt(np.mean(np.square(V - W.dot(H)))) / time_mean diffh = np.sqrt(np.mean(np.square(H - H_ ))) / time_mean if ((dnorm_prev.max(0) - dnorm) < tolfun) & (diffh < tolfun): if (iter_i >= miniter): break dnorm_prev[1] = dnorm_prev[0] dnorm_prev[0] = dnorm if verbosity: print((iter_i, dnorm, diffh)) # Perform final regression on full input timeseries H = np.maximum(linalg.lstsq(W, V0)[0], 0) H *= (time_mean / H.mean(1)[:, None]) # normalize component timeseries return (W, H, dnorm) def projection(Si, s, at_least_as_sparse=False): assert(Si.ndim == 1) S = np.copy(Si) # copy input signal if s <= 0: return np.maximum(S, 0) # enforce nonnegativity d = S.size L2 = np.sqrt(np.sum(np.square(S))) # fixed l2-norm L1 = L2 * (np.sqrt(d) * (1 - s) + s) # desired l1-norm # quit if at_least_sparse=True and original exceeds target sparsity if at_least_as_sparse: if L1 >= np.sum(np.abs(S)): return S # initialize components with negative values Z = np.zeros(S.shape, dtype=bool) negatives = True while negatives: # Fix components with negative values at 0 Z = Z | (S < 0) S[Z] = 0 # Project to the sum-constraint hyperplane S += (L1 - np.sum(S)) / (d - np.sum(Z)) S[Z] = 0 # Get midpoints of hyperplane, M M = np.tile(L1 / (d - np.sum(Z)), d) M[Z] = 0 P = S - M # Solve for Alph, L2 = l2[M + Alph*(S-M)] = l2[P*Alph + M], # where L2 is defined above, and l2 is the l2-norm operator. # For convenience, we square both sides and find the roots, # 0 = (l2[P*Alph + M])^2 - (L2)^2 # 0 = sum((P*Alph)^2) + sum(2*P*M*Alph) + sum(M^2) - L2^2 A = np.sum(P * P) B = 2 * np.sum(P * M) C = np.sum(M * M) - L2**2 Alph = (-B + np.real(np.sqrt(B**2 - 4 * A * C))) / (2 * A) # Project within the sum-constraint hyperplane to match L2 S = M + Alph * P # Check for negative values in solution negatives = np.any(S < 0) return S
def process_blok(x0_x1_y0_y1_z0_z1_): (x0_, x1_, y0_, y1_, z0_, z1_) = x0_x1_y0_y1_z0_z1_ print('Setting number of threads in process_blok to %d.\n' % nthread) os.environ['MKL_NUM_THREADS'] = str(nthread) voxl_peaklidx_blok = np.zeros_like(bimage_peak_fine.value) voxl_peaklidx_blok[x0_:x1_, y0_:y1_, z0_:z1_] = 1 voxl_peaklidx_blok *= bimage_peak_fine.value voxl_valdlidx_blok = morphology.binary_dilation(voxl_peaklidx_blok, cell_ball) voxl_position_peak = np.argwhere(voxl_peaklidx_blok) voxl_position = np.argwhere(voxl_valdlidx_blok) voxl_peakaidx = np.nonzero(voxl_peaklidx_blok[voxl_valdlidx_blok])[0] (x0, y0, z0) = voxl_position.min(0) (x1, y1, z1) = voxl_position.max(0) + 1 voxl_timesers_blok = [None] * lt tic = time.time() for ti in range(lt): image_name_hdf = image_dir(image_names[ti], frame_i) + 'image_aligned.hdf5' with h5py.File(image_name_hdf, 'r') as file_handle: voxl_timesers_blok[ti] = file_handle['V3D'][z0:z1, y0:y1, x0:x1].T print('Load data time: %.1f minutes.\n' % ((time.time() - tic) / 60)) voxl_timesers_blok = np.transpose(voxl_timesers_blok, (1, 2, 3, 0)) voxl_timesers = voxl_timesers_blok[voxl_valdlidx_blok[x0:x1, y0:y1, z0:z1]] del voxl_timesers_blok if lz > 1: for i in range(len(voxl_position)): zi = voxl_position[i, 2] timepoints_zi = np.arange(lt) * 1000.0 / freq_stack + zi * t_exposure timepoints_zm = np.arange(lt) * 1000.0 / freq_stack + lz / 2 * t_exposure spline_interpolator_xyzi = interpolate.InterpolatedUnivariateSpline(timepoints_zi, voxl_timesers[i]) voxl_timesers[i] = spline_interpolator_xyzi(timepoints_zm) def normalize_rank_timesers(timesers): mn = timesers.mean(1) sd = timesers.std(1, ddof=1) return (timesers - mn[:, None]) / (sd[:, None] * np.sqrt(lt - 1)) voxl_position_peak_phys = voxl_position_peak * [resn_x * ds, resn_y * ds, resn_z] voxl_timesers_peak_rank = normalize_rank_timesers(voxl_timesers[voxl_peakaidx]) voxl_conn_peak = np.zeros((len(voxl_peakaidx), len(voxl_peakaidx)), dtype=bool) idx = np.linspace(0, len(voxl_peakaidx), 11, dtype=int) for i in range(len(idx) - 1): idx_i = np.r_[idx[i]:idx[i + 1]] voxl_dist_peak_i = np.sqrt(np.square(voxl_position_peak_phys[idx_i, 0:1] - voxl_position_peak_phys[:, 0:1].T) + np.square(voxl_position_peak_phys[idx_i, 1:2] - voxl_position_peak_phys[:, 1:2].T) + np.square(voxl_position_peak_phys[idx_i, 2:3] - voxl_position_peak_phys[:, 2:3].T)) voxl_corr_peak_i = np.dot(voxl_timesers_peak_rank[idx_i], voxl_timesers_peak_rank.T) voxl_neib_peak_i = voxl_dist_peak_i < cell_diam voxl_neib_simi_i = np.array([corr_ij > np.median(corr_ij[neib_ij]) for (neib_ij, corr_ij) in zip(voxl_neib_peak_i, voxl_corr_peak_i)]) voxl_conn_peak[idx_i] = voxl_neib_peak_i & voxl_neib_simi_i voxl_conn_peak = voxl_conn_peak | voxl_conn_peak.T voxl_powr_peak = np.mean(np.square(voxl_timesers[voxl_peakaidx]), 1) del voxl_timesers_peak_rank return (voxl_position, voxl_timesers, voxl_peakaidx, voxl_position_peak, voxl_position_peak_phys, voxl_conn_peak, voxl_powr_peak) def blok_cell_detection(blok_i_blok_xyz_01): """Detect individual cells using the sparse NMF algorithm""" (blok_i, blok_xyz_01) = blok_i_blok_xyz_01 print('Setting number of threads in blok_cell_detection to %d.\n' % nthread) os.environ['MKL_NUM_THREADS'] = str(nthread) (voxl_position, voxl_timesers, voxl_peakaidx, voxl_position_peak, voxl_position_peak_phys, voxl_conn_peak, voxl_powr_peak) = process_blok(blok_xyz_01) blok_voxl_nmbr = len(voxl_position) peak_valdlidx = np.arange(len(voxl_position_peak)) voxl_fraction = 100 for iter_i in range(128): try: cmpn_nmbr = np.round(peak_valdlidx.size / (0.5 * cell_voxl_nmbr)).astype(int) print((iter_i, voxl_fraction, cmpn_nmbr)) tic = time.time() cmpn_clusters = cluster.AgglomerativeClustering(n_clusters=cmpn_nmbr, connectivity=voxl_conn_peak[peak_valdlidx[:, None], peak_valdlidx[None]], linkage='ward').fit(voxl_position_peak_phys[peak_valdlidx]) cmpn_labl = cmpn_clusters.labels_ print('Hierarchical Clustering time: %.1f minutes.\n' % ((time.time() - tic) / 60)) cmpn_spceinit = np.zeros((blok_voxl_nmbr, cmpn_nmbr + 1)) cmpn_neibhood = np.zeros((blok_voxl_nmbr, cmpn_nmbr + 1), dtype=bool) cmpn_sparsity = np.zeros(cmpn_nmbr + 1) cmpn_percentl = np.zeros(cmpn_nmbr + 1) for cmpn_i in range(cmpn_nmbr): cmpn_spceinit[voxl_peakaidx[peak_valdlidx], cmpn_i] = cmpn_labl == cmpn_i cmpn_centroid_phys_i = np.median(voxl_position_peak_phys[peak_valdlidx][cmpn_labl == cmpn_i], 0) dist_from_centroid_to_peak = np.sqrt(np.square(cmpn_centroid_phys_i[0] - voxl_position_peak_phys[peak_valdlidx, 0]) + np.square(cmpn_centroid_phys_i[1] - voxl_position_peak_phys[peak_valdlidx, 1]) + np.square(cmpn_centroid_phys_i[2] - voxl_position_peak_phys[peak_valdlidx, 2])) cmpn_midpoint_i = voxl_position_peak[peak_valdlidx][np.argmin(dist_from_centroid_to_peak)] cmpn_neibaidx_i = cmpn_midpoint_i + (np.argwhere(cell_ball) - cell_ball_midpoint) cmpn_neibaidx_i = cmpn_neibaidx_i[(cmpn_neibaidx_i >= 0).all(1)] cmpn_neibaidx_i = cmpn_neibaidx_i[(cmpn_neibaidx_i < [lx // ds, ly // ds, lz]).all(1)] def relative_indx(ni): return np.nonzero(np.all(voxl_position == ni, 1))[0][0] cmpn_neibridx_i = np.array([relative_indx(ni) for ni in cmpn_neibaidx_i]) cmpn_neibhood[cmpn_neibridx_i, cmpn_i] = 1 cmpn_vect_i = np.zeros(len(cmpn_neibridx_i)) cmpn_vect_i[:cell_voxl_nmbr] = 1 cmpn_sparsity[cmpn_i] = sparseness(cmpn_vect_i) cmpn_percentl[cmpn_i] = 100 * (1 - np.mean(cmpn_vect_i)) voxl_valdlidx = cmpn_neibhood.any(1) voxl_position_vald = voxl_position[voxl_valdlidx] voxl_timesers_vald = voxl_timesers[voxl_valdlidx] cmpn_spceinit_vald = cmpn_spceinit[voxl_valdlidx] cmpn_neibhood_vald = cmpn_neibhood[voxl_valdlidx] cmpn_spceinit_vald[:, -1] = 1 cmpn_neibhood_vald[:, -1] = 1 tic = time.time() (cmpn_spcesers_vald, cmpn_timesers_vald, d) = nnmf_sparse(voxl_timesers_vald, voxl_position_vald, cmpn_spceinit_vald, cmpn_neibhood_vald, cmpn_sparsity, cmpn_percentl, miniter=10, maxiter=100, tolfun=0.001) detection_success = 1 print('NMF time: %.1f minutes.\n' % ((time.time() - tic) / 60)) break except ValueError: detection_success = 0 voxl_fraction *= 0.97 thr_powr_peak = np.percentile(voxl_powr_peak, 100 - voxl_fraction) peak_valdlidx = np.where(voxl_powr_peak > thr_powr_peak)[0] with h5py.File(cell_dir + '/Block' + str(blok_i).zfill(5) + '.hdf5', 'w') as file_handle: if detection_success: for cmpn_i in range(cmpn_nmbr): cmpn_lidx_i = np.nonzero(cmpn_spcesers_vald[:, cmpn_i])[0] cmpn_position_i = voxl_position_vald[cmpn_lidx_i] cmpn_spcesers_i = cmpn_spcesers_vald[cmpn_lidx_i, cmpn_i] mean_spcevoxl_i = bimage_mean.value[list(zip(*cmpn_position_i))] mean_i = np.sum(mean_spcevoxl_i * cmpn_spcesers_i) / np.sum(cmpn_spcesers_i) cmpn_timesers_i = cmpn_timesers_vald[cmpn_i] cmpn_timesers_i = cmpn_timesers_i * mean_i / np.mean(cmpn_timesers_i) hdf5_dir = '/cmpn/' + str(cmpn_i).zfill(5) file_handle[hdf5_dir + '/cmpn_position'] = cmpn_position_i file_handle[hdf5_dir + '/cmpn_spcesers'] = cmpn_spcesers_i file_handle[hdf5_dir + '/cmpn_timesers'] = cmpn_timesers_i file_handle['cmpn_nmbr'] = cmpn_nmbr file_handle['success'] = 1 def nnmf_sparse(V0, XYZ0, W0, B0, Sparsity0, Percentl0, tolfun=0.0001, miniter=10, maxiter=100, verbosity=1, time_mean=1.0): print('Setting number of threads in nnmf_sparse to %d.\n' % nthread) os.environ['MKL_NUM_THREADS'] = str(nthread) v0 *= time_mean / V0.mean(1)[:, None] v = V0[:, dt_range].astype(float) xyz = XYZ0.astype(int) w = W0.astype(float) b = B0.astype(bool) sparsity = Sparsity0.copy() percentl = Percentl0.copy() (n, t) = V.shape (n_, c) = W.shape assert n_ == n h = np.zeros((c, t)) dnorm_prev = np.full(2, np.inf) for iter_i in range(maxiter): h_ = H.copy() h = np.maximum(linalg.lstsq(W, V)[0], 0) h *= time_mean / H.mean(1)[:, None] w = np.maximum(linalg.lstsq(V.T, H.T)[0], 0) W[np.logical_not(B)] = 0 for ci in range(c): w_ci = W[B[:, ci], ci] if any(W_ci) & ((Sparsity[ci] > 0) | (Percentl[ci] > 0)): xyz_ci = XYZ[B[:, ci]] - XYZ[B[:, ci]].min(0) w_ci = projection(W_ci, Sparsity[ci], at_least_as_sparse=True) l_ci = np.zeros(np.ptp(XYZ_ci, 0) + 1, dtype=bool) L_ci[list(zip(*XYZ_ci))] = W_ci > 0 l_ci = measure.label(L_ci, connectivity=3) lci_size = np.bincount(L_ci[L_ci.nonzero()]) W_ci[L_ci[list(zip(*XYZ_ci))] != np.argmax(lci_size)] = 0 W[B[:, ci], ci] = W_ci dnorm = np.sqrt(np.mean(np.square(V - W.dot(H)))) / time_mean diffh = np.sqrt(np.mean(np.square(H - H_))) / time_mean if (dnorm_prev.max(0) - dnorm < tolfun) & (diffh < tolfun): if iter_i >= miniter: break dnorm_prev[1] = dnorm_prev[0] dnorm_prev[0] = dnorm if verbosity: print((iter_i, dnorm, diffh)) h = np.maximum(linalg.lstsq(W, V0)[0], 0) h *= time_mean / H.mean(1)[:, None] return (W, H, dnorm) def projection(Si, s, at_least_as_sparse=False): assert Si.ndim == 1 s = np.copy(Si) if s <= 0: return np.maximum(S, 0) d = S.size l2 = np.sqrt(np.sum(np.square(S))) l1 = L2 * (np.sqrt(d) * (1 - s) + s) if at_least_as_sparse: if L1 >= np.sum(np.abs(S)): return S z = np.zeros(S.shape, dtype=bool) negatives = True while negatives: z = Z | (S < 0) S[Z] = 0 s += (L1 - np.sum(S)) / (d - np.sum(Z)) S[Z] = 0 m = np.tile(L1 / (d - np.sum(Z)), d) M[Z] = 0 p = S - M a = np.sum(P * P) b = 2 * np.sum(P * M) c = np.sum(M * M) - L2 ** 2 alph = (-B + np.real(np.sqrt(B ** 2 - 4 * A * C))) / (2 * A) s = M + Alph * P negatives = np.any(S < 0) return S
add_library('video') c=None w=640.0 h=480.0 darkmode=False def setup(): global c,cx,cy size(640,480) background(0 if darkmode else 255) stroke(23,202,230) imageMode(CENTER) c=Capture(this,640,480) c.start(); def genart(): line(dx,cy-h/2,dx,cy+h/2) def draw(): cx=width/2 cy=height/2 image(c,cx,cy,w,h) #genart() def captureEvent(cpt): cpt.read() def keyPressed(): global darkmode if key=='d': darkmode=not darkmode background(0 if darkmode else 255)
add_library('video') c = None w = 640.0 h = 480.0 darkmode = False def setup(): global c, cx, cy size(640, 480) background(0 if darkmode else 255) stroke(23, 202, 230) image_mode(CENTER) c = capture(this, 640, 480) c.start() def genart(): line(dx, cy - h / 2, dx, cy + h / 2) def draw(): cx = width / 2 cy = height / 2 image(c, cx, cy, w, h) def capture_event(cpt): cpt.read() def key_pressed(): global darkmode if key == 'd': darkmode = not darkmode background(0 if darkmode else 255)
# # PySNMP MIB module CLNS-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CLNS-MIB # Produced by pysmi-0.3.4 at Wed May 1 12:25:07 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") ConstraintsIntersection, ValueRangeConstraint, ConstraintsUnion, ValueSizeConstraint, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsIntersection", "ValueRangeConstraint", "ConstraintsUnion", "ValueSizeConstraint", "SingleValueConstraint") NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance") Counter32, experimental, TimeTicks, iso, ModuleIdentity, MibIdentifier, ObjectIdentity, Counter64, NotificationType, Unsigned32, Bits, Gauge32, Integer32, IpAddress, MibScalar, MibTable, MibTableRow, MibTableColumn = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "experimental", "TimeTicks", "iso", "ModuleIdentity", "MibIdentifier", "ObjectIdentity", "Counter64", "NotificationType", "Unsigned32", "Bits", "Gauge32", "Integer32", "IpAddress", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn") DisplayString, TextualConvention, PhysAddress = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention", "PhysAddress") clns = MibIdentifier((1, 3, 6, 1, 3, 1)) class ClnpAddress(OctetString): subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(1, 21) clnp = MibIdentifier((1, 3, 6, 1, 3, 1, 1)) error = MibIdentifier((1, 3, 6, 1, 3, 1, 2)) echo = MibIdentifier((1, 3, 6, 1, 3, 1, 3)) es_is = MibIdentifier((1, 3, 6, 1, 3, 1, 4)).setLabel("es-is") clnpForwarding = MibScalar((1, 3, 6, 1, 3, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("is", 1), ("es", 2)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: clnpForwarding.setStatus('mandatory') if mibBuilder.loadTexts: clnpForwarding.setDescription('The indication of whether this entity is active as an intermediate or end system. Only intermediate systems will forward PDUs onward that are not addressed to them.') clnpDefaultLifeTime = MibScalar((1, 3, 6, 1, 3, 1, 1, 2), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: clnpDefaultLifeTime.setStatus('mandatory') if mibBuilder.loadTexts: clnpDefaultLifeTime.setDescription('The default value inserted into the Lifetime field of the CLNP PDU header of PDUs sourced by this entity.') clnpInReceives = MibScalar((1, 3, 6, 1, 3, 1, 1, 3), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpInReceives.setStatus('mandatory') if mibBuilder.loadTexts: clnpInReceives.setDescription('The total number of input PDUs received from all connected network interfaces running CLNP, including errors.') clnpInHdrErrors = MibScalar((1, 3, 6, 1, 3, 1, 1, 4), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpInHdrErrors.setStatus('mandatory') if mibBuilder.loadTexts: clnpInHdrErrors.setDescription('The number of input PDUs discarded due to errors in the CLNP header, including bad checksums, version mismatch, lifetime exceeded, errors discovered in processing options, etc.') clnpInAddrErrors = MibScalar((1, 3, 6, 1, 3, 1, 1, 5), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpInAddrErrors.setStatus('mandatory') if mibBuilder.loadTexts: clnpInAddrErrors.setDescription("The number of input PDUs discarded because the NSAP address in the CLNP header's destination field was not a valid NSAP to be received at this entity. This count includes addresses not understood. For end systems, this is a count of PDUs which arrived with a destination NSAP which was not local.") clnpForwPDUs = MibScalar((1, 3, 6, 1, 3, 1, 1, 6), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpForwPDUs.setStatus('mandatory') if mibBuilder.loadTexts: clnpForwPDUs.setDescription('The number of input PDUs for which this entity was not the final destination and which an attempt was made to forward them onward.') clnpInUnknownNLPs = MibScalar((1, 3, 6, 1, 3, 1, 1, 7), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpInUnknownNLPs.setStatus('mandatory') if mibBuilder.loadTexts: clnpInUnknownNLPs.setDescription('The number of locally-addressed PDUs successfully received but discarded because the network layer protocol was unknown or unsupported (e.g., not CLNP or ES-IS).') clnpInUnknownULPs = MibScalar((1, 3, 6, 1, 3, 1, 1, 8), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpInUnknownULPs.setStatus('mandatory') if mibBuilder.loadTexts: clnpInUnknownULPs.setDescription('The number of locally-addressed PDUs successfully received but discarded because the upper layer protocol was unknown or unsupported (e.g., not TP4).') clnpInDiscards = MibScalar((1, 3, 6, 1, 3, 1, 1, 9), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpInDiscards.setStatus('mandatory') if mibBuilder.loadTexts: clnpInDiscards.setDescription('The number of input CLNP PDUs for which no problems were encountered to prevent their continued processing, but were discarded (e.g., for lack of buffer space). Note that this counter does not include any PDUs discarded while awaiting re-assembly.') clnpInDelivers = MibScalar((1, 3, 6, 1, 3, 1, 1, 10), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpInDelivers.setStatus('mandatory') if mibBuilder.loadTexts: clnpInDelivers.setDescription('The total number of input PDUs successfully delivered to the CLNS transport user.') clnpOutRequests = MibScalar((1, 3, 6, 1, 3, 1, 1, 11), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpOutRequests.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutRequests.setDescription('The total number of CLNP PDUs which local CLNS user protocols supplied to CLNP for transmission requests. This counter does not include any PDUs counted in clnpForwPDUs.') clnpOutDiscards = MibScalar((1, 3, 6, 1, 3, 1, 1, 12), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpOutDiscards.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutDiscards.setDescription('The number of output CLNP PDUs for which no other problem was encountered to prevent their transmission but were discarded (e.g., for lack of buffer space). Note this counter includes PDUs counted in clnpForwPDUs.') clnpOutNoRoutes = MibScalar((1, 3, 6, 1, 3, 1, 1, 13), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpOutNoRoutes.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutNoRoutes.setDescription('The number of CLNP PDUs discarded because no route could be found to transmit them to their destination. This counter includes any PDUs counted in clnpForwPDUs.') clnpReasmTimeout = MibScalar((1, 3, 6, 1, 3, 1, 1, 14), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpReasmTimeout.setStatus('mandatory') if mibBuilder.loadTexts: clnpReasmTimeout.setDescription('The maximum number of seconds which received segments are held while they are awaiting reassembly at this entity.') clnpReasmReqds = MibScalar((1, 3, 6, 1, 3, 1, 1, 15), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpReasmReqds.setStatus('mandatory') if mibBuilder.loadTexts: clnpReasmReqds.setDescription('The number of CLNP segments received which needed to be reassembled at this entity.') clnpReasmOKs = MibScalar((1, 3, 6, 1, 3, 1, 1, 16), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpReasmOKs.setStatus('mandatory') if mibBuilder.loadTexts: clnpReasmOKs.setDescription('The number of CLNP PDUs successfully re-assembled at this entity.') clnpReasmFails = MibScalar((1, 3, 6, 1, 3, 1, 1, 17), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpReasmFails.setStatus('mandatory') if mibBuilder.loadTexts: clnpReasmFails.setDescription('The number of failures detected by the CLNP reassembly algorithm (for any reason: timed out, buffer size, etc).') clnpSegOKs = MibScalar((1, 3, 6, 1, 3, 1, 1, 18), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpSegOKs.setStatus('mandatory') if mibBuilder.loadTexts: clnpSegOKs.setDescription('The number of CLNP PDUs that have been successfully segmented at this entity.') clnpSegFails = MibScalar((1, 3, 6, 1, 3, 1, 1, 19), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpSegFails.setStatus('mandatory') if mibBuilder.loadTexts: clnpSegFails.setDescription('The number of CLNP PDUs that have been discarded because they needed to be fragmented at this entity but could not.') clnpSegCreates = MibScalar((1, 3, 6, 1, 3, 1, 1, 20), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpSegCreates.setStatus('mandatory') if mibBuilder.loadTexts: clnpSegCreates.setDescription('The number of CLNP PDU segments that have been generated as a result of segmentation at this entity.') clnpInOpts = MibScalar((1, 3, 6, 1, 3, 1, 1, 25), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpInOpts.setStatus('mandatory') if mibBuilder.loadTexts: clnpInOpts.setDescription('The number of CLNP PDU segments that have been input with options at this entity.') clnpOutOpts = MibScalar((1, 3, 6, 1, 3, 1, 1, 26), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpOutOpts.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutOpts.setDescription('The number of CLNP PDU segments that have been generated with options by this entity.') clnpRoutingDiscards = MibScalar((1, 3, 6, 1, 3, 1, 1, 27), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpRoutingDiscards.setStatus('mandatory') if mibBuilder.loadTexts: clnpRoutingDiscards.setDescription('The number of routing entries which were chosen to be discarded even though they are valid. One possible reason for discarding such an entry could be to free-up buffer space for other routing entries.') clnpAddrTable = MibTable((1, 3, 6, 1, 3, 1, 1, 21), ) if mibBuilder.loadTexts: clnpAddrTable.setStatus('mandatory') if mibBuilder.loadTexts: clnpAddrTable.setDescription("The table of addressing information relevant to this entity's CLNP addresses. ") clnpAddrEntry = MibTableRow((1, 3, 6, 1, 3, 1, 1, 21, 1), ).setIndexNames((0, "CLNS-MIB", "clnpAdEntAddr")) if mibBuilder.loadTexts: clnpAddrEntry.setStatus('mandatory') if mibBuilder.loadTexts: clnpAddrEntry.setDescription("The addressing information for one of this entity's CLNP addresses.") clnpAdEntAddr = MibTableColumn((1, 3, 6, 1, 3, 1, 1, 21, 1, 1), ClnpAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpAdEntAddr.setStatus('mandatory') if mibBuilder.loadTexts: clnpAdEntAddr.setDescription("The CLNP address to which this entry's addressing information pertains.") clnpAdEntIfIndex = MibTableColumn((1, 3, 6, 1, 3, 1, 1, 21, 1, 2), Integer32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpAdEntIfIndex.setStatus('mandatory') if mibBuilder.loadTexts: clnpAdEntIfIndex.setDescription('The index value which uniquely identifies the interface to which this entry is applicable. The interface identified by a particular value of this index is the same interface as identified by the same value of ifIndex.') clnpAdEntReasmMaxSize = MibTableColumn((1, 3, 6, 1, 3, 1, 1, 21, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpAdEntReasmMaxSize.setStatus('mandatory') if mibBuilder.loadTexts: clnpAdEntReasmMaxSize.setDescription('The size of the largest CLNP PDU which this entity can re-assemble from incoming CLNP segmented PDUs received on this interface.') clnpRoutingTable = MibTable((1, 3, 6, 1, 3, 1, 1, 22), ) if mibBuilder.loadTexts: clnpRoutingTable.setStatus('mandatory') if mibBuilder.loadTexts: clnpRoutingTable.setDescription("This entity's CLNP routing table.") clnpRouteEntry = MibTableRow((1, 3, 6, 1, 3, 1, 1, 22, 1), ).setIndexNames((0, "CLNS-MIB", "clnpRouteDest")) if mibBuilder.loadTexts: clnpRouteEntry.setStatus('mandatory') if mibBuilder.loadTexts: clnpRouteEntry.setDescription('A route to a particular destination.') clnpRouteDest = MibTableColumn((1, 3, 6, 1, 3, 1, 1, 22, 1, 1), ClnpAddress()).setMaxAccess("readwrite") if mibBuilder.loadTexts: clnpRouteDest.setStatus('mandatory') if mibBuilder.loadTexts: clnpRouteDest.setDescription('The destination CLNP address of this route.') clnpRouteIfIndex = MibTableColumn((1, 3, 6, 1, 3, 1, 1, 22, 1, 2), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: clnpRouteIfIndex.setStatus('mandatory') if mibBuilder.loadTexts: clnpRouteIfIndex.setDescription('The index value which uniquely identifies the local interface through which the next hop of this route should be reached. The interface identified by a particular value of this index is the same as identified by the same value of ifIndex.') clnpRouteMetric1 = MibTableColumn((1, 3, 6, 1, 3, 1, 1, 22, 1, 3), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: clnpRouteMetric1.setStatus('mandatory') if mibBuilder.loadTexts: clnpRouteMetric1.setDescription("The primary routing metric for this route. The semantics of this metric are determined by the routing-protocol specified in the route's clnpRouteProto value. If this metric is not used, its value should be set to -1.") clnpRouteMetric2 = MibTableColumn((1, 3, 6, 1, 3, 1, 1, 22, 1, 4), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: clnpRouteMetric2.setStatus('mandatory') if mibBuilder.loadTexts: clnpRouteMetric2.setDescription("An alternate routing metric for this route. The semantics of this metric are determined by the routing-protocol specified in the route's clnpRouteProto value. If this metric is not used, its value should be set to -1.") clnpRouteMetric3 = MibTableColumn((1, 3, 6, 1, 3, 1, 1, 22, 1, 5), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: clnpRouteMetric3.setStatus('mandatory') if mibBuilder.loadTexts: clnpRouteMetric3.setDescription("An alternate routing metric for this route. The semantics of this metric are determined by the routing-protocol specified in the route's clnpRouteProto value. If this metric is not used, its value should be set to -1.") clnpRouteMetric4 = MibTableColumn((1, 3, 6, 1, 3, 1, 1, 22, 1, 6), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: clnpRouteMetric4.setStatus('mandatory') if mibBuilder.loadTexts: clnpRouteMetric4.setDescription("An alternate routing metric for this route. The semantics of this metric are determined by the routing-protocol specified in the route's clnpRouteProto value. If this metric is not used, its value should be set to -1.") clnpRouteNextHop = MibTableColumn((1, 3, 6, 1, 3, 1, 1, 22, 1, 7), ClnpAddress()).setMaxAccess("readwrite") if mibBuilder.loadTexts: clnpRouteNextHop.setStatus('mandatory') if mibBuilder.loadTexts: clnpRouteNextHop.setDescription('The CLNP address of the next hop of this route.') clnpRouteType = MibTableColumn((1, 3, 6, 1, 3, 1, 1, 22, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("other", 1), ("invalid", 2), ("direct", 3), ("remote", 4)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: clnpRouteType.setStatus('mandatory') if mibBuilder.loadTexts: clnpRouteType.setDescription('The type of route. Setting this object to the value invalid(2) has the effect of invaliding the corresponding entry in the clnpRoutingTable. That is, it effectively dissasociates the destination identified with said entry from the route identified with said entry. It is an implementation-specific matter as to whether the agent removes an invalidated entry from the table. Accordingly, management stations must be prepared to receive tabular information from agents that corresponds to entries not currently in use. Proper interpretation of such entries requires examination of the relevant clnpRouteType object.') clnpRouteProto = MibTableColumn((1, 3, 6, 1, 3, 1, 1, 22, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 9, 11, 12, 13, 14))).clone(namedValues=NamedValues(("other", 1), ("local", 2), ("netmgmt", 3), ("is-is", 9), ("ciscoIgrp", 11), ("bbnSpfIgp", 12), ("ospf", 13), ("bgp", 14)))).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpRouteProto.setStatus('mandatory') if mibBuilder.loadTexts: clnpRouteProto.setDescription('The routing mechanism via which this route was learned. Inclusion of values for gateway routing protocols is not intended to imply that hosts should support those protocols.') clnpRouteAge = MibTableColumn((1, 3, 6, 1, 3, 1, 1, 22, 1, 10), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: clnpRouteAge.setStatus('mandatory') if mibBuilder.loadTexts: clnpRouteAge.setDescription("The number of seconds since this route was last updated or otherwise determined to be correct. Note that no semantics of `too old' can be implied except through knowledge of the routing protocol by which the route was learned.") clnpRouteMetric5 = MibTableColumn((1, 3, 6, 1, 3, 1, 1, 22, 1, 11), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: clnpRouteMetric5.setStatus('mandatory') if mibBuilder.loadTexts: clnpRouteMetric5.setDescription("An alternate routing metric for this route. The semantics of this metric are determined by the routing-protocol specified in the route's clnpRouteProto value. If this metric is not used, its value should be set to -1.") clnpRouteInfo = MibTableColumn((1, 3, 6, 1, 3, 1, 1, 22, 1, 12), ObjectIdentifier()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpRouteInfo.setStatus('mandatory') if mibBuilder.loadTexts: clnpRouteInfo.setDescription("A reference to MIB definitions specific to the particular routing protocol which is responsible for this route, as determined by the value specified in the route's clnpRouteProto value. If this information is not present, its value should be set to the OBJECT IDENTIFIER { 0 0 }, which is a syntatically valid object identifier, and any conformant implementation of ASN.1 and BER must be able to generate and recognize this value.") clnpNetToMediaTable = MibTable((1, 3, 6, 1, 3, 1, 1, 23), ) if mibBuilder.loadTexts: clnpNetToMediaTable.setStatus('mandatory') if mibBuilder.loadTexts: clnpNetToMediaTable.setDescription('The CLNP Address Translation table used for mapping from CLNP addresses to physical addresses.') clnpNetToMediaEntry = MibTableRow((1, 3, 6, 1, 3, 1, 1, 23, 1), ).setIndexNames((0, "CLNS-MIB", "clnpNetToMediaIfIndex"), (0, "CLNS-MIB", "clnpNetToMediaNetAddress")) if mibBuilder.loadTexts: clnpNetToMediaEntry.setStatus('mandatory') if mibBuilder.loadTexts: clnpNetToMediaEntry.setDescription("Each entry contains one CLNP address to `physical' address equivalence.") clnpNetToMediaIfIndex = MibTableColumn((1, 3, 6, 1, 3, 1, 1, 23, 1, 1), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: clnpNetToMediaIfIndex.setStatus('mandatory') if mibBuilder.loadTexts: clnpNetToMediaIfIndex.setDescription("The interface on which this entry's equivalence is effective. The interface identified by a particular value of this index is the same interface as identified by the same value of ifIndex.") clnpNetToMediaPhysAddress = MibTableColumn((1, 3, 6, 1, 3, 1, 1, 23, 1, 2), PhysAddress()).setMaxAccess("readwrite") if mibBuilder.loadTexts: clnpNetToMediaPhysAddress.setStatus('mandatory') if mibBuilder.loadTexts: clnpNetToMediaPhysAddress.setDescription("The media-dependent `physical' address.") clnpNetToMediaNetAddress = MibTableColumn((1, 3, 6, 1, 3, 1, 1, 23, 1, 3), ClnpAddress()).setMaxAccess("readwrite") if mibBuilder.loadTexts: clnpNetToMediaNetAddress.setStatus('mandatory') if mibBuilder.loadTexts: clnpNetToMediaNetAddress.setDescription("The CLNP address corresponding to the media- dependent `physical' address.") clnpNetToMediaType = MibTableColumn((1, 3, 6, 1, 3, 1, 1, 23, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("other", 1), ("invalid", 2), ("dynamic", 3), ("static", 4)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: clnpNetToMediaType.setStatus('mandatory') if mibBuilder.loadTexts: clnpNetToMediaType.setDescription('The type of mapping. Setting this object to the value invalid(2) has the effect of invalidating the corresponding entry in the clnpNetToMediaTable. That is, it effectively dissassociates the interface identified with said entry from the mapping identified with said entry. It is an implementation-specific matter as to whether the agent removes an invalidated entry from the table. Accordingly, management stations must be prepared to receive tabular information from agents that corresponds to entries not currently in use. Proper interpretation of such entries requires examination of the relevant clnpNetToMediaType object.') clnpNetToMediaAge = MibTableColumn((1, 3, 6, 1, 3, 1, 1, 23, 1, 5), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: clnpNetToMediaAge.setStatus('mandatory') if mibBuilder.loadTexts: clnpNetToMediaAge.setDescription("The number of seconds since this entry was last updated or otherwise determined to be correct. Note that no semantics of `too old' can be implied except through knowledge of the type of entry.") clnpNetToMediaHoldTime = MibTableColumn((1, 3, 6, 1, 3, 1, 1, 23, 1, 6), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: clnpNetToMediaHoldTime.setStatus('mandatory') if mibBuilder.loadTexts: clnpNetToMediaHoldTime.setDescription('The time in seconds this entry will be valid. Static entries should always report this field as -1.') clnpMediaToNetTable = MibTable((1, 3, 6, 1, 3, 1, 1, 24), ) if mibBuilder.loadTexts: clnpMediaToNetTable.setStatus('mandatory') if mibBuilder.loadTexts: clnpMediaToNetTable.setDescription('The CLNP Address Translation table used for mapping from physical addresses to CLNP addresses.') clnpMediaToNetEntry = MibTableRow((1, 3, 6, 1, 3, 1, 1, 24, 1), ).setIndexNames((0, "CLNS-MIB", "clnpMediaToNetIfIndex"), (0, "CLNS-MIB", "clnpMediaToNetPhysAddress")) if mibBuilder.loadTexts: clnpMediaToNetEntry.setStatus('mandatory') if mibBuilder.loadTexts: clnpMediaToNetEntry.setDescription("Each entry contains on ClnpAddress to `physical' address equivalence.") clnpMediaToNetIfIndex = MibTableColumn((1, 3, 6, 1, 3, 1, 1, 24, 1, 1), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: clnpMediaToNetIfIndex.setStatus('mandatory') if mibBuilder.loadTexts: clnpMediaToNetIfIndex.setDescription("The interface on which this entry's equivalence is effective. The interface identified by a particular value of this index is the same interface as identified by the same value of ifIndex.") clnpMediaToNetAddress = MibScalar((1, 3, 6, 1, 3, 1, 1, 24, 1, 2), ClnpAddress()).setMaxAccess("readwrite") if mibBuilder.loadTexts: clnpMediaToNetAddress.setStatus('mandatory') if mibBuilder.loadTexts: clnpMediaToNetAddress.setDescription("The ClnpAddress corresponding to the media- dependent `physical' address.") clnpMediaToNetPhysAddress = MibTableColumn((1, 3, 6, 1, 3, 1, 1, 24, 1, 3), PhysAddress()).setMaxAccess("readwrite") if mibBuilder.loadTexts: clnpMediaToNetPhysAddress.setStatus('mandatory') if mibBuilder.loadTexts: clnpMediaToNetPhysAddress.setDescription("The media-dependent `physical' address.") clnpMediaToNetType = MibTableColumn((1, 3, 6, 1, 3, 1, 1, 24, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("other", 1), ("invalid", 2), ("dynamic", 3), ("static", 4)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: clnpMediaToNetType.setStatus('mandatory') if mibBuilder.loadTexts: clnpMediaToNetType.setDescription('The type of mapping. Setting this object to the value invalid(2) has the effect of invalidating the corresponding entry in the clnpMediaToNetTable. That is, it effectively dissassociates the interface identified with said entry from the mapping identified with said entry. It is an implementation-specific matter as to whether the agent removes an invalidated entry from the table. Accordingly, management stations must be prepared to receive tabular information from agents that corresponds to entries not currently in use. Proper interpretation of such entries requires examination of the relevant clnpMediaToNetType object.') clnpMediaToNetAge = MibTableColumn((1, 3, 6, 1, 3, 1, 1, 24, 1, 5), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: clnpMediaToNetAge.setStatus('mandatory') if mibBuilder.loadTexts: clnpMediaToNetAge.setDescription("The number of seconds since this entry was last updated or otherwise determined to be correct. Note that no semantics of `too old' can be implied except through knowledge of the type of entry.") clnpMediaToNetHoldTime = MibTableColumn((1, 3, 6, 1, 3, 1, 1, 24, 1, 6), Integer32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: clnpMediaToNetHoldTime.setStatus('mandatory') if mibBuilder.loadTexts: clnpMediaToNetHoldTime.setDescription('The time in seconds this entry will be valid. Static entries should always report this field as -1.') clnpInErrors = MibScalar((1, 3, 6, 1, 3, 1, 2, 1), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpInErrors.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrors.setDescription('The number of CLNP Error PDUs received by this entity.') clnpOutErrors = MibScalar((1, 3, 6, 1, 3, 1, 2, 2), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpOutErrors.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrors.setDescription('The number of CLNP Error PDUs sent by this entity.') clnpInErrUnspecs = MibScalar((1, 3, 6, 1, 3, 1, 2, 3), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpInErrUnspecs.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrUnspecs.setDescription('The number of unspecified CLNP Error PDUs received by this entity.') clnpInErrProcs = MibScalar((1, 3, 6, 1, 3, 1, 2, 4), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpInErrProcs.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrProcs.setDescription('The number of protocol procedure CLNP Error PDUs received by this entity.') clnpInErrCksums = MibScalar((1, 3, 6, 1, 3, 1, 2, 5), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpInErrCksums.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrCksums.setDescription('The number of checksum CLNP Error PDUs received by this entity.') clnpInErrCongests = MibScalar((1, 3, 6, 1, 3, 1, 2, 6), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpInErrCongests.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrCongests.setDescription('The number of congestion drop CLNP Error PDUs received by this entity.') clnpInErrHdrs = MibScalar((1, 3, 6, 1, 3, 1, 2, 7), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpInErrHdrs.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrHdrs.setDescription('The number of header syntax CLNP Error PDUs received by this entity.') clnpInErrSegs = MibScalar((1, 3, 6, 1, 3, 1, 2, 8), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpInErrSegs.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrSegs.setDescription('The number of segmentation disallowed CLNP Error PDUs received by this entity.') clnpInErrIncomps = MibScalar((1, 3, 6, 1, 3, 1, 2, 9), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpInErrIncomps.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrIncomps.setDescription('The number of incomplete PDU CLNP Error PDUs received by this entity.') clnpInErrDups = MibScalar((1, 3, 6, 1, 3, 1, 2, 10), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpInErrDups.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrDups.setDescription('The number of duplicate option CLNP Error PDUs received by this entity.') clnpInErrUnreachDsts = MibScalar((1, 3, 6, 1, 3, 1, 2, 11), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpInErrUnreachDsts.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrUnreachDsts.setDescription('The number of unreachable destination CLNP Error PDUs received by this entity.') clnpInErrUnknownDsts = MibScalar((1, 3, 6, 1, 3, 1, 2, 12), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpInErrUnknownDsts.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrUnknownDsts.setDescription('The number of unknown destination CLNP Error PDUs received by this entity.') clnpInErrSRUnspecs = MibScalar((1, 3, 6, 1, 3, 1, 2, 13), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpInErrSRUnspecs.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrSRUnspecs.setDescription('The number of unspecified source route CLNP Error PDUs received by this entity.') clnpInErrSRSyntaxes = MibScalar((1, 3, 6, 1, 3, 1, 2, 14), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpInErrSRSyntaxes.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrSRSyntaxes.setDescription('The number of source route syntax CLNP Error PDUs received by this entity.') clnpInErrSRUnkAddrs = MibScalar((1, 3, 6, 1, 3, 1, 2, 15), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpInErrSRUnkAddrs.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrSRUnkAddrs.setDescription('The number of source route unknown address CLNP Error PDUs received by this entity.') clnpInErrSRBadPaths = MibScalar((1, 3, 6, 1, 3, 1, 2, 16), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpInErrSRBadPaths.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrSRBadPaths.setDescription('The number of source route bad path CLNP Error PDUs received by this entity.') clnpInErrHops = MibScalar((1, 3, 6, 1, 3, 1, 2, 17), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpInErrHops.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrHops.setDescription('The number of hop count exceeded CLNP Error PDUs received by this entity.') clnpInErrHopReassms = MibScalar((1, 3, 6, 1, 3, 1, 2, 18), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpInErrHopReassms.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrHopReassms.setDescription('The number of hop count exceeded while reassembling CLNP Error PDUs received by this entity.') clnpInErrUnsOptions = MibScalar((1, 3, 6, 1, 3, 1, 2, 19), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpInErrUnsOptions.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrUnsOptions.setDescription('The number of unsupported option CLNP Error PDUs received by this entity.') clnpInErrUnsVersions = MibScalar((1, 3, 6, 1, 3, 1, 2, 20), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpInErrUnsVersions.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrUnsVersions.setDescription('The number of version mismatch CLNP Error PDUs received by this entity.') clnpInErrUnsSecurities = MibScalar((1, 3, 6, 1, 3, 1, 2, 21), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpInErrUnsSecurities.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrUnsSecurities.setDescription('The number of unsupported security option CLNP Error PDUs received by this entity.') clnpInErrUnsSRs = MibScalar((1, 3, 6, 1, 3, 1, 2, 22), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpInErrUnsSRs.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrUnsSRs.setDescription('The number of unsupported source route option CLNP Error PDUs received by this entity.') clnpInErrUnsRRs = MibScalar((1, 3, 6, 1, 3, 1, 2, 23), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpInErrUnsRRs.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrUnsRRs.setDescription('The number of unsupported record route option CLNP Error PDUs received by this entity.') clnpInErrInterferences = MibScalar((1, 3, 6, 1, 3, 1, 2, 24), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpInErrInterferences.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrInterferences.setDescription('The number of reassembly interference CLNP Error PDUs received by this entity.') clnpOutErrUnspecs = MibScalar((1, 3, 6, 1, 3, 1, 2, 25), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpOutErrUnspecs.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrUnspecs.setDescription('The number of unspecified CLNP Error PDUs sent by this entity.') clnpOutErrProcs = MibScalar((1, 3, 6, 1, 3, 1, 2, 26), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpOutErrProcs.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrProcs.setDescription('The number of protocol procedure CLNP Error PDUs sent by this entity.') clnpOutErrCksums = MibScalar((1, 3, 6, 1, 3, 1, 2, 27), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpOutErrCksums.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrCksums.setDescription('The number of checksum CLNP Error PDUs sent by this entity.') clnpOutErrCongests = MibScalar((1, 3, 6, 1, 3, 1, 2, 28), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpOutErrCongests.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrCongests.setDescription('The number of congestion drop CLNP Error PDUs sent by this entity.') clnpOutErrHdrs = MibScalar((1, 3, 6, 1, 3, 1, 2, 29), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpOutErrHdrs.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrHdrs.setDescription('The number of header syntax CLNP Error PDUs sent by this entity.') clnpOutErrSegs = MibScalar((1, 3, 6, 1, 3, 1, 2, 30), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpOutErrSegs.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrSegs.setDescription('The number of segmentation disallowed CLNP Error PDUs sent by this entity.') clnpOutErrIncomps = MibScalar((1, 3, 6, 1, 3, 1, 2, 31), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpOutErrIncomps.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrIncomps.setDescription('The number of incomplete PDU CLNP Error PDUs sent by this entity.') clnpOutErrDups = MibScalar((1, 3, 6, 1, 3, 1, 2, 32), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpOutErrDups.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrDups.setDescription('The number of duplicate option CLNP Error PDUs sent by this entity.') clnpOutErrUnreachDsts = MibScalar((1, 3, 6, 1, 3, 1, 2, 33), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpOutErrUnreachDsts.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrUnreachDsts.setDescription('The number of unreachable destination CLNP Error PDUs sent by this entity.') clnpOutErrUnknownDsts = MibScalar((1, 3, 6, 1, 3, 1, 2, 34), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpOutErrUnknownDsts.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrUnknownDsts.setDescription('The number of unknown destination CLNP Error PDUs sent by this entity.') clnpOutErrSRUnspecs = MibScalar((1, 3, 6, 1, 3, 1, 2, 35), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpOutErrSRUnspecs.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrSRUnspecs.setDescription('The number of unspecified source route CLNP Error PDUs sent by this entity.') clnpOutErrSRSyntaxes = MibScalar((1, 3, 6, 1, 3, 1, 2, 36), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpOutErrSRSyntaxes.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrSRSyntaxes.setDescription('The number of source route syntax CLNP Error PDUs sent by this entity.') clnpOutErrSRUnkAddrs = MibScalar((1, 3, 6, 1, 3, 1, 2, 37), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpOutErrSRUnkAddrs.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrSRUnkAddrs.setDescription('The number of source route unknown address CLNP Error PDUs sent by this entity.') clnpOutErrSRBadPaths = MibScalar((1, 3, 6, 1, 3, 1, 2, 38), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpOutErrSRBadPaths.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrSRBadPaths.setDescription('The number of source route bad path CLNP Error PDUs sent by this entity.') clnpOutErrHops = MibScalar((1, 3, 6, 1, 3, 1, 2, 39), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpOutErrHops.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrHops.setDescription('The number of hop count exceeded CLNP Error PDUs sent by this entity.') clnpOutErrHopReassms = MibScalar((1, 3, 6, 1, 3, 1, 2, 40), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpOutErrHopReassms.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrHopReassms.setDescription('The number of hop count exceeded while reassembling CLNP Error PDUs sent by this entity.') clnpOutErrUnsOptions = MibScalar((1, 3, 6, 1, 3, 1, 2, 41), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpOutErrUnsOptions.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrUnsOptions.setDescription('The number of unsupported option CLNP Error PDUs sent by this entity.') clnpOutErrUnsVersions = MibScalar((1, 3, 6, 1, 3, 1, 2, 42), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpOutErrUnsVersions.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrUnsVersions.setDescription('The number of version mismatch CLNP Error PDUs sent by this entity.') clnpOutErrUnsSecurities = MibScalar((1, 3, 6, 1, 3, 1, 2, 43), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpOutErrUnsSecurities.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrUnsSecurities.setDescription('The number of unsupported security option CLNP Error PDUs sent by this entity.') clnpOutErrUnsSRs = MibScalar((1, 3, 6, 1, 3, 1, 2, 44), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpOutErrUnsSRs.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrUnsSRs.setDescription('The number of unsupported source route option CLNP Error PDUs sent by this entity.') clnpOutErrUnsRRs = MibScalar((1, 3, 6, 1, 3, 1, 2, 45), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpOutErrUnsRRs.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrUnsRRs.setDescription('The number of unsupported record route option CLNP Error PDUs sent by this entity.') clnpOutErrInterferences = MibScalar((1, 3, 6, 1, 3, 1, 2, 46), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: clnpOutErrInterferences.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrInterferences.setDescription('The number of reassembly interference CLNP Error PDUs sent by this entity.') esisESHins = MibScalar((1, 3, 6, 1, 3, 1, 4, 1), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: esisESHins.setStatus('mandatory') if mibBuilder.loadTexts: esisESHins.setDescription('The number of ESH PDUs received by this entity.') esisESHouts = MibScalar((1, 3, 6, 1, 3, 1, 4, 2), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: esisESHouts.setStatus('mandatory') if mibBuilder.loadTexts: esisESHouts.setDescription('The number of ESH PDUs sent by this entity.') esisISHins = MibScalar((1, 3, 6, 1, 3, 1, 4, 3), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: esisISHins.setStatus('mandatory') if mibBuilder.loadTexts: esisISHins.setDescription('The number of ISH PDUs received by this entity.') esisISHouts = MibScalar((1, 3, 6, 1, 3, 1, 4, 4), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: esisISHouts.setStatus('mandatory') if mibBuilder.loadTexts: esisISHouts.setDescription('The number of ISH PDUs sent by this entity.') esisRDUins = MibScalar((1, 3, 6, 1, 3, 1, 4, 5), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: esisRDUins.setStatus('mandatory') if mibBuilder.loadTexts: esisRDUins.setDescription('The number of RDU PDUs received by this entity.') esisRDUouts = MibScalar((1, 3, 6, 1, 3, 1, 4, 6), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: esisRDUouts.setStatus('mandatory') if mibBuilder.loadTexts: esisRDUouts.setDescription('The number of RDU PDUs sent by this entity.') mibBuilder.exportSymbols("CLNS-MIB", clnpInErrSRBadPaths=clnpInErrSRBadPaths, clnpNetToMediaEntry=clnpNetToMediaEntry, esisESHouts=esisESHouts, clnpInOpts=clnpInOpts, clnpInErrProcs=clnpInErrProcs, clnpInHdrErrors=clnpInHdrErrors, clnpAdEntAddr=clnpAdEntAddr, clnpMediaToNetHoldTime=clnpMediaToNetHoldTime, clnpOutErrIncomps=clnpOutErrIncomps, clnpOutErrCksums=clnpOutErrCksums, clnpOutErrHopReassms=clnpOutErrHopReassms, clnpForwarding=clnpForwarding, clnpInErrUnsSRs=clnpInErrUnsSRs, clnpInErrUnsRRs=clnpInErrUnsRRs, clnpInErrSegs=clnpInErrSegs, clnpInErrHops=clnpInErrHops, clnpNetToMediaIfIndex=clnpNetToMediaIfIndex, clnpAdEntReasmMaxSize=clnpAdEntReasmMaxSize, clnpInErrSRSyntaxes=clnpInErrSRSyntaxes, clnpInDiscards=clnpInDiscards, clnpOutErrSRUnkAddrs=clnpOutErrSRUnkAddrs, clnpRouteEntry=clnpRouteEntry, clnpInErrUnsOptions=clnpInErrUnsOptions, clnpOutErrInterferences=clnpOutErrInterferences, clnpAddrTable=clnpAddrTable, clnpOutErrSRBadPaths=clnpOutErrSRBadPaths, clnpDefaultLifeTime=clnpDefaultLifeTime, clnpInErrUnknownDsts=clnpInErrUnknownDsts, clnpForwPDUs=clnpForwPDUs, clnpOutDiscards=clnpOutDiscards, clnpReasmOKs=clnpReasmOKs, clnpReasmReqds=clnpReasmReqds, clnpInErrUnsVersions=clnpInErrUnsVersions, error=error, clnpInErrInterferences=clnpInErrInterferences, clnpOutErrProcs=clnpOutErrProcs, clnpOutErrCongests=clnpOutErrCongests, clnpInUnknownNLPs=clnpInUnknownNLPs, clnpNetToMediaPhysAddress=clnpNetToMediaPhysAddress, clnpMediaToNetTable=clnpMediaToNetTable, clnpMediaToNetAddress=clnpMediaToNetAddress, clnpInErrCksums=clnpInErrCksums, clnpInErrors=clnpInErrors, clnpOutRequests=clnpOutRequests, clnpNetToMediaAge=clnpNetToMediaAge, clnpInErrHdrs=clnpInErrHdrs, clnpRouteMetric2=clnpRouteMetric2, clnpInErrHopReassms=clnpInErrHopReassms, clnpNetToMediaTable=clnpNetToMediaTable, clnpInDelivers=clnpInDelivers, clnpInErrUnsSecurities=clnpInErrUnsSecurities, clnpRouteMetric5=clnpRouteMetric5, clnpMediaToNetType=clnpMediaToNetType, clnpOutErrSRSyntaxes=clnpOutErrSRSyntaxes, clnpRouteAge=clnpRouteAge, clnpOutErrUnsSRs=clnpOutErrUnsSRs, clnpMediaToNetEntry=clnpMediaToNetEntry, clnpOutErrUnknownDsts=clnpOutErrUnknownDsts, clnpOutOpts=clnpOutOpts, clnpInAddrErrors=clnpInAddrErrors, ClnpAddress=ClnpAddress, clnpOutErrHops=clnpOutErrHops, clnpInErrSRUnspecs=clnpInErrSRUnspecs, clnpAdEntIfIndex=clnpAdEntIfIndex, clnpRoutingTable=clnpRoutingTable, clnpRouteIfIndex=clnpRouteIfIndex, clnpInErrCongests=clnpInErrCongests, clnpOutErrSegs=clnpOutErrSegs, clnpOutErrUnreachDsts=clnpOutErrUnreachDsts, clnpRouteType=clnpRouteType, clnpInErrIncomps=clnpInErrIncomps, esisISHouts=esisISHouts, clnpRouteInfo=clnpRouteInfo, clnpOutErrHdrs=clnpOutErrHdrs, esisRDUins=esisRDUins, esisISHins=esisISHins, clnpInErrUnspecs=clnpInErrUnspecs, clnp=clnp, clnpMediaToNetPhysAddress=clnpMediaToNetPhysAddress, clnpRouteProto=clnpRouteProto, clnpNetToMediaHoldTime=clnpNetToMediaHoldTime, esisESHins=esisESHins, clnpAddrEntry=clnpAddrEntry, clnpOutErrSRUnspecs=clnpOutErrSRUnspecs, clnpOutErrors=clnpOutErrors, clnpOutErrUnsRRs=clnpOutErrUnsRRs, clnpReasmTimeout=clnpReasmTimeout, clnpReasmFails=clnpReasmFails, clnpRoutingDiscards=clnpRoutingDiscards, clns=clns, clnpNetToMediaType=clnpNetToMediaType, es_is=es_is, esisRDUouts=esisRDUouts, clnpInUnknownULPs=clnpInUnknownULPs, clnpOutErrDups=clnpOutErrDups, clnpInErrSRUnkAddrs=clnpInErrSRUnkAddrs, clnpSegOKs=clnpSegOKs, clnpRouteMetric3=clnpRouteMetric3, clnpRouteNextHop=clnpRouteNextHop, clnpOutErrUnsOptions=clnpOutErrUnsOptions, clnpOutErrUnsSecurities=clnpOutErrUnsSecurities, clnpRouteMetric1=clnpRouteMetric1, echo=echo, clnpRouteMetric4=clnpRouteMetric4, clnpNetToMediaNetAddress=clnpNetToMediaNetAddress, clnpOutNoRoutes=clnpOutNoRoutes, clnpOutErrUnsVersions=clnpOutErrUnsVersions, clnpInReceives=clnpInReceives, clnpOutErrUnspecs=clnpOutErrUnspecs, clnpSegFails=clnpSegFails, clnpInErrDups=clnpInErrDups, clnpInErrUnreachDsts=clnpInErrUnreachDsts, clnpMediaToNetAge=clnpMediaToNetAge, clnpRouteDest=clnpRouteDest, clnpMediaToNetIfIndex=clnpMediaToNetIfIndex, clnpSegCreates=clnpSegCreates)
(object_identifier, integer, octet_string) = mibBuilder.importSymbols('ASN1', 'ObjectIdentifier', 'Integer', 'OctetString') (named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues') (constraints_intersection, value_range_constraint, constraints_union, value_size_constraint, single_value_constraint) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'ConstraintsIntersection', 'ValueRangeConstraint', 'ConstraintsUnion', 'ValueSizeConstraint', 'SingleValueConstraint') (notification_group, module_compliance) = mibBuilder.importSymbols('SNMPv2-CONF', 'NotificationGroup', 'ModuleCompliance') (counter32, experimental, time_ticks, iso, module_identity, mib_identifier, object_identity, counter64, notification_type, unsigned32, bits, gauge32, integer32, ip_address, mib_scalar, mib_table, mib_table_row, mib_table_column) = mibBuilder.importSymbols('SNMPv2-SMI', 'Counter32', 'experimental', 'TimeTicks', 'iso', 'ModuleIdentity', 'MibIdentifier', 'ObjectIdentity', 'Counter64', 'NotificationType', 'Unsigned32', 'Bits', 'Gauge32', 'Integer32', 'IpAddress', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn') (display_string, textual_convention, phys_address) = mibBuilder.importSymbols('SNMPv2-TC', 'DisplayString', 'TextualConvention', 'PhysAddress') clns = mib_identifier((1, 3, 6, 1, 3, 1)) class Clnpaddress(OctetString): subtype_spec = OctetString.subtypeSpec + value_size_constraint(1, 21) clnp = mib_identifier((1, 3, 6, 1, 3, 1, 1)) error = mib_identifier((1, 3, 6, 1, 3, 1, 2)) echo = mib_identifier((1, 3, 6, 1, 3, 1, 3)) es_is = mib_identifier((1, 3, 6, 1, 3, 1, 4)).setLabel('es-is') clnp_forwarding = mib_scalar((1, 3, 6, 1, 3, 1, 1, 1), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2))).clone(namedValues=named_values(('is', 1), ('es', 2)))).setMaxAccess('readwrite') if mibBuilder.loadTexts: clnpForwarding.setStatus('mandatory') if mibBuilder.loadTexts: clnpForwarding.setDescription('The indication of whether this entity is active as an intermediate or end system. Only intermediate systems will forward PDUs onward that are not addressed to them.') clnp_default_life_time = mib_scalar((1, 3, 6, 1, 3, 1, 1, 2), integer32()).setMaxAccess('readwrite') if mibBuilder.loadTexts: clnpDefaultLifeTime.setStatus('mandatory') if mibBuilder.loadTexts: clnpDefaultLifeTime.setDescription('The default value inserted into the Lifetime field of the CLNP PDU header of PDUs sourced by this entity.') clnp_in_receives = mib_scalar((1, 3, 6, 1, 3, 1, 1, 3), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpInReceives.setStatus('mandatory') if mibBuilder.loadTexts: clnpInReceives.setDescription('The total number of input PDUs received from all connected network interfaces running CLNP, including errors.') clnp_in_hdr_errors = mib_scalar((1, 3, 6, 1, 3, 1, 1, 4), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpInHdrErrors.setStatus('mandatory') if mibBuilder.loadTexts: clnpInHdrErrors.setDescription('The number of input PDUs discarded due to errors in the CLNP header, including bad checksums, version mismatch, lifetime exceeded, errors discovered in processing options, etc.') clnp_in_addr_errors = mib_scalar((1, 3, 6, 1, 3, 1, 1, 5), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpInAddrErrors.setStatus('mandatory') if mibBuilder.loadTexts: clnpInAddrErrors.setDescription("The number of input PDUs discarded because the NSAP address in the CLNP header's destination field was not a valid NSAP to be received at this entity. This count includes addresses not understood. For end systems, this is a count of PDUs which arrived with a destination NSAP which was not local.") clnp_forw_pd_us = mib_scalar((1, 3, 6, 1, 3, 1, 1, 6), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpForwPDUs.setStatus('mandatory') if mibBuilder.loadTexts: clnpForwPDUs.setDescription('The number of input PDUs for which this entity was not the final destination and which an attempt was made to forward them onward.') clnp_in_unknown_nl_ps = mib_scalar((1, 3, 6, 1, 3, 1, 1, 7), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpInUnknownNLPs.setStatus('mandatory') if mibBuilder.loadTexts: clnpInUnknownNLPs.setDescription('The number of locally-addressed PDUs successfully received but discarded because the network layer protocol was unknown or unsupported (e.g., not CLNP or ES-IS).') clnp_in_unknown_ul_ps = mib_scalar((1, 3, 6, 1, 3, 1, 1, 8), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpInUnknownULPs.setStatus('mandatory') if mibBuilder.loadTexts: clnpInUnknownULPs.setDescription('The number of locally-addressed PDUs successfully received but discarded because the upper layer protocol was unknown or unsupported (e.g., not TP4).') clnp_in_discards = mib_scalar((1, 3, 6, 1, 3, 1, 1, 9), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpInDiscards.setStatus('mandatory') if mibBuilder.loadTexts: clnpInDiscards.setDescription('The number of input CLNP PDUs for which no problems were encountered to prevent their continued processing, but were discarded (e.g., for lack of buffer space). Note that this counter does not include any PDUs discarded while awaiting re-assembly.') clnp_in_delivers = mib_scalar((1, 3, 6, 1, 3, 1, 1, 10), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpInDelivers.setStatus('mandatory') if mibBuilder.loadTexts: clnpInDelivers.setDescription('The total number of input PDUs successfully delivered to the CLNS transport user.') clnp_out_requests = mib_scalar((1, 3, 6, 1, 3, 1, 1, 11), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpOutRequests.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutRequests.setDescription('The total number of CLNP PDUs which local CLNS user protocols supplied to CLNP for transmission requests. This counter does not include any PDUs counted in clnpForwPDUs.') clnp_out_discards = mib_scalar((1, 3, 6, 1, 3, 1, 1, 12), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpOutDiscards.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutDiscards.setDescription('The number of output CLNP PDUs for which no other problem was encountered to prevent their transmission but were discarded (e.g., for lack of buffer space). Note this counter includes PDUs counted in clnpForwPDUs.') clnp_out_no_routes = mib_scalar((1, 3, 6, 1, 3, 1, 1, 13), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpOutNoRoutes.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutNoRoutes.setDescription('The number of CLNP PDUs discarded because no route could be found to transmit them to their destination. This counter includes any PDUs counted in clnpForwPDUs.') clnp_reasm_timeout = mib_scalar((1, 3, 6, 1, 3, 1, 1, 14), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpReasmTimeout.setStatus('mandatory') if mibBuilder.loadTexts: clnpReasmTimeout.setDescription('The maximum number of seconds which received segments are held while they are awaiting reassembly at this entity.') clnp_reasm_reqds = mib_scalar((1, 3, 6, 1, 3, 1, 1, 15), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpReasmReqds.setStatus('mandatory') if mibBuilder.loadTexts: clnpReasmReqds.setDescription('The number of CLNP segments received which needed to be reassembled at this entity.') clnp_reasm_o_ks = mib_scalar((1, 3, 6, 1, 3, 1, 1, 16), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpReasmOKs.setStatus('mandatory') if mibBuilder.loadTexts: clnpReasmOKs.setDescription('The number of CLNP PDUs successfully re-assembled at this entity.') clnp_reasm_fails = mib_scalar((1, 3, 6, 1, 3, 1, 1, 17), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpReasmFails.setStatus('mandatory') if mibBuilder.loadTexts: clnpReasmFails.setDescription('The number of failures detected by the CLNP reassembly algorithm (for any reason: timed out, buffer size, etc).') clnp_seg_o_ks = mib_scalar((1, 3, 6, 1, 3, 1, 1, 18), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpSegOKs.setStatus('mandatory') if mibBuilder.loadTexts: clnpSegOKs.setDescription('The number of CLNP PDUs that have been successfully segmented at this entity.') clnp_seg_fails = mib_scalar((1, 3, 6, 1, 3, 1, 1, 19), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpSegFails.setStatus('mandatory') if mibBuilder.loadTexts: clnpSegFails.setDescription('The number of CLNP PDUs that have been discarded because they needed to be fragmented at this entity but could not.') clnp_seg_creates = mib_scalar((1, 3, 6, 1, 3, 1, 1, 20), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpSegCreates.setStatus('mandatory') if mibBuilder.loadTexts: clnpSegCreates.setDescription('The number of CLNP PDU segments that have been generated as a result of segmentation at this entity.') clnp_in_opts = mib_scalar((1, 3, 6, 1, 3, 1, 1, 25), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpInOpts.setStatus('mandatory') if mibBuilder.loadTexts: clnpInOpts.setDescription('The number of CLNP PDU segments that have been input with options at this entity.') clnp_out_opts = mib_scalar((1, 3, 6, 1, 3, 1, 1, 26), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpOutOpts.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutOpts.setDescription('The number of CLNP PDU segments that have been generated with options by this entity.') clnp_routing_discards = mib_scalar((1, 3, 6, 1, 3, 1, 1, 27), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpRoutingDiscards.setStatus('mandatory') if mibBuilder.loadTexts: clnpRoutingDiscards.setDescription('The number of routing entries which were chosen to be discarded even though they are valid. One possible reason for discarding such an entry could be to free-up buffer space for other routing entries.') clnp_addr_table = mib_table((1, 3, 6, 1, 3, 1, 1, 21)) if mibBuilder.loadTexts: clnpAddrTable.setStatus('mandatory') if mibBuilder.loadTexts: clnpAddrTable.setDescription("The table of addressing information relevant to this entity's CLNP addresses. ") clnp_addr_entry = mib_table_row((1, 3, 6, 1, 3, 1, 1, 21, 1)).setIndexNames((0, 'CLNS-MIB', 'clnpAdEntAddr')) if mibBuilder.loadTexts: clnpAddrEntry.setStatus('mandatory') if mibBuilder.loadTexts: clnpAddrEntry.setDescription("The addressing information for one of this entity's CLNP addresses.") clnp_ad_ent_addr = mib_table_column((1, 3, 6, 1, 3, 1, 1, 21, 1, 1), clnp_address()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpAdEntAddr.setStatus('mandatory') if mibBuilder.loadTexts: clnpAdEntAddr.setDescription("The CLNP address to which this entry's addressing information pertains.") clnp_ad_ent_if_index = mib_table_column((1, 3, 6, 1, 3, 1, 1, 21, 1, 2), integer32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpAdEntIfIndex.setStatus('mandatory') if mibBuilder.loadTexts: clnpAdEntIfIndex.setDescription('The index value which uniquely identifies the interface to which this entry is applicable. The interface identified by a particular value of this index is the same interface as identified by the same value of ifIndex.') clnp_ad_ent_reasm_max_size = mib_table_column((1, 3, 6, 1, 3, 1, 1, 21, 1, 3), integer32().subtype(subtypeSpec=value_range_constraint(0, 65535))).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpAdEntReasmMaxSize.setStatus('mandatory') if mibBuilder.loadTexts: clnpAdEntReasmMaxSize.setDescription('The size of the largest CLNP PDU which this entity can re-assemble from incoming CLNP segmented PDUs received on this interface.') clnp_routing_table = mib_table((1, 3, 6, 1, 3, 1, 1, 22)) if mibBuilder.loadTexts: clnpRoutingTable.setStatus('mandatory') if mibBuilder.loadTexts: clnpRoutingTable.setDescription("This entity's CLNP routing table.") clnp_route_entry = mib_table_row((1, 3, 6, 1, 3, 1, 1, 22, 1)).setIndexNames((0, 'CLNS-MIB', 'clnpRouteDest')) if mibBuilder.loadTexts: clnpRouteEntry.setStatus('mandatory') if mibBuilder.loadTexts: clnpRouteEntry.setDescription('A route to a particular destination.') clnp_route_dest = mib_table_column((1, 3, 6, 1, 3, 1, 1, 22, 1, 1), clnp_address()).setMaxAccess('readwrite') if mibBuilder.loadTexts: clnpRouteDest.setStatus('mandatory') if mibBuilder.loadTexts: clnpRouteDest.setDescription('The destination CLNP address of this route.') clnp_route_if_index = mib_table_column((1, 3, 6, 1, 3, 1, 1, 22, 1, 2), integer32()).setMaxAccess('readwrite') if mibBuilder.loadTexts: clnpRouteIfIndex.setStatus('mandatory') if mibBuilder.loadTexts: clnpRouteIfIndex.setDescription('The index value which uniquely identifies the local interface through which the next hop of this route should be reached. The interface identified by a particular value of this index is the same as identified by the same value of ifIndex.') clnp_route_metric1 = mib_table_column((1, 3, 6, 1, 3, 1, 1, 22, 1, 3), integer32()).setMaxAccess('readwrite') if mibBuilder.loadTexts: clnpRouteMetric1.setStatus('mandatory') if mibBuilder.loadTexts: clnpRouteMetric1.setDescription("The primary routing metric for this route. The semantics of this metric are determined by the routing-protocol specified in the route's clnpRouteProto value. If this metric is not used, its value should be set to -1.") clnp_route_metric2 = mib_table_column((1, 3, 6, 1, 3, 1, 1, 22, 1, 4), integer32()).setMaxAccess('readwrite') if mibBuilder.loadTexts: clnpRouteMetric2.setStatus('mandatory') if mibBuilder.loadTexts: clnpRouteMetric2.setDescription("An alternate routing metric for this route. The semantics of this metric are determined by the routing-protocol specified in the route's clnpRouteProto value. If this metric is not used, its value should be set to -1.") clnp_route_metric3 = mib_table_column((1, 3, 6, 1, 3, 1, 1, 22, 1, 5), integer32()).setMaxAccess('readwrite') if mibBuilder.loadTexts: clnpRouteMetric3.setStatus('mandatory') if mibBuilder.loadTexts: clnpRouteMetric3.setDescription("An alternate routing metric for this route. The semantics of this metric are determined by the routing-protocol specified in the route's clnpRouteProto value. If this metric is not used, its value should be set to -1.") clnp_route_metric4 = mib_table_column((1, 3, 6, 1, 3, 1, 1, 22, 1, 6), integer32()).setMaxAccess('readwrite') if mibBuilder.loadTexts: clnpRouteMetric4.setStatus('mandatory') if mibBuilder.loadTexts: clnpRouteMetric4.setDescription("An alternate routing metric for this route. The semantics of this metric are determined by the routing-protocol specified in the route's clnpRouteProto value. If this metric is not used, its value should be set to -1.") clnp_route_next_hop = mib_table_column((1, 3, 6, 1, 3, 1, 1, 22, 1, 7), clnp_address()).setMaxAccess('readwrite') if mibBuilder.loadTexts: clnpRouteNextHop.setStatus('mandatory') if mibBuilder.loadTexts: clnpRouteNextHop.setDescription('The CLNP address of the next hop of this route.') clnp_route_type = mib_table_column((1, 3, 6, 1, 3, 1, 1, 22, 1, 8), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4))).clone(namedValues=named_values(('other', 1), ('invalid', 2), ('direct', 3), ('remote', 4)))).setMaxAccess('readwrite') if mibBuilder.loadTexts: clnpRouteType.setStatus('mandatory') if mibBuilder.loadTexts: clnpRouteType.setDescription('The type of route. Setting this object to the value invalid(2) has the effect of invaliding the corresponding entry in the clnpRoutingTable. That is, it effectively dissasociates the destination identified with said entry from the route identified with said entry. It is an implementation-specific matter as to whether the agent removes an invalidated entry from the table. Accordingly, management stations must be prepared to receive tabular information from agents that corresponds to entries not currently in use. Proper interpretation of such entries requires examination of the relevant clnpRouteType object.') clnp_route_proto = mib_table_column((1, 3, 6, 1, 3, 1, 1, 22, 1, 9), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 9, 11, 12, 13, 14))).clone(namedValues=named_values(('other', 1), ('local', 2), ('netmgmt', 3), ('is-is', 9), ('ciscoIgrp', 11), ('bbnSpfIgp', 12), ('ospf', 13), ('bgp', 14)))).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpRouteProto.setStatus('mandatory') if mibBuilder.loadTexts: clnpRouteProto.setDescription('The routing mechanism via which this route was learned. Inclusion of values for gateway routing protocols is not intended to imply that hosts should support those protocols.') clnp_route_age = mib_table_column((1, 3, 6, 1, 3, 1, 1, 22, 1, 10), integer32()).setMaxAccess('readwrite') if mibBuilder.loadTexts: clnpRouteAge.setStatus('mandatory') if mibBuilder.loadTexts: clnpRouteAge.setDescription("The number of seconds since this route was last updated or otherwise determined to be correct. Note that no semantics of `too old' can be implied except through knowledge of the routing protocol by which the route was learned.") clnp_route_metric5 = mib_table_column((1, 3, 6, 1, 3, 1, 1, 22, 1, 11), integer32()).setMaxAccess('readwrite') if mibBuilder.loadTexts: clnpRouteMetric5.setStatus('mandatory') if mibBuilder.loadTexts: clnpRouteMetric5.setDescription("An alternate routing metric for this route. The semantics of this metric are determined by the routing-protocol specified in the route's clnpRouteProto value. If this metric is not used, its value should be set to -1.") clnp_route_info = mib_table_column((1, 3, 6, 1, 3, 1, 1, 22, 1, 12), object_identifier()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpRouteInfo.setStatus('mandatory') if mibBuilder.loadTexts: clnpRouteInfo.setDescription("A reference to MIB definitions specific to the particular routing protocol which is responsible for this route, as determined by the value specified in the route's clnpRouteProto value. If this information is not present, its value should be set to the OBJECT IDENTIFIER { 0 0 }, which is a syntatically valid object identifier, and any conformant implementation of ASN.1 and BER must be able to generate and recognize this value.") clnp_net_to_media_table = mib_table((1, 3, 6, 1, 3, 1, 1, 23)) if mibBuilder.loadTexts: clnpNetToMediaTable.setStatus('mandatory') if mibBuilder.loadTexts: clnpNetToMediaTable.setDescription('The CLNP Address Translation table used for mapping from CLNP addresses to physical addresses.') clnp_net_to_media_entry = mib_table_row((1, 3, 6, 1, 3, 1, 1, 23, 1)).setIndexNames((0, 'CLNS-MIB', 'clnpNetToMediaIfIndex'), (0, 'CLNS-MIB', 'clnpNetToMediaNetAddress')) if mibBuilder.loadTexts: clnpNetToMediaEntry.setStatus('mandatory') if mibBuilder.loadTexts: clnpNetToMediaEntry.setDescription("Each entry contains one CLNP address to `physical' address equivalence.") clnp_net_to_media_if_index = mib_table_column((1, 3, 6, 1, 3, 1, 1, 23, 1, 1), integer32()).setMaxAccess('readwrite') if mibBuilder.loadTexts: clnpNetToMediaIfIndex.setStatus('mandatory') if mibBuilder.loadTexts: clnpNetToMediaIfIndex.setDescription("The interface on which this entry's equivalence is effective. The interface identified by a particular value of this index is the same interface as identified by the same value of ifIndex.") clnp_net_to_media_phys_address = mib_table_column((1, 3, 6, 1, 3, 1, 1, 23, 1, 2), phys_address()).setMaxAccess('readwrite') if mibBuilder.loadTexts: clnpNetToMediaPhysAddress.setStatus('mandatory') if mibBuilder.loadTexts: clnpNetToMediaPhysAddress.setDescription("The media-dependent `physical' address.") clnp_net_to_media_net_address = mib_table_column((1, 3, 6, 1, 3, 1, 1, 23, 1, 3), clnp_address()).setMaxAccess('readwrite') if mibBuilder.loadTexts: clnpNetToMediaNetAddress.setStatus('mandatory') if mibBuilder.loadTexts: clnpNetToMediaNetAddress.setDescription("The CLNP address corresponding to the media- dependent `physical' address.") clnp_net_to_media_type = mib_table_column((1, 3, 6, 1, 3, 1, 1, 23, 1, 4), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4))).clone(namedValues=named_values(('other', 1), ('invalid', 2), ('dynamic', 3), ('static', 4)))).setMaxAccess('readwrite') if mibBuilder.loadTexts: clnpNetToMediaType.setStatus('mandatory') if mibBuilder.loadTexts: clnpNetToMediaType.setDescription('The type of mapping. Setting this object to the value invalid(2) has the effect of invalidating the corresponding entry in the clnpNetToMediaTable. That is, it effectively dissassociates the interface identified with said entry from the mapping identified with said entry. It is an implementation-specific matter as to whether the agent removes an invalidated entry from the table. Accordingly, management stations must be prepared to receive tabular information from agents that corresponds to entries not currently in use. Proper interpretation of such entries requires examination of the relevant clnpNetToMediaType object.') clnp_net_to_media_age = mib_table_column((1, 3, 6, 1, 3, 1, 1, 23, 1, 5), integer32()).setMaxAccess('readwrite') if mibBuilder.loadTexts: clnpNetToMediaAge.setStatus('mandatory') if mibBuilder.loadTexts: clnpNetToMediaAge.setDescription("The number of seconds since this entry was last updated or otherwise determined to be correct. Note that no semantics of `too old' can be implied except through knowledge of the type of entry.") clnp_net_to_media_hold_time = mib_table_column((1, 3, 6, 1, 3, 1, 1, 23, 1, 6), integer32()).setMaxAccess('readwrite') if mibBuilder.loadTexts: clnpNetToMediaHoldTime.setStatus('mandatory') if mibBuilder.loadTexts: clnpNetToMediaHoldTime.setDescription('The time in seconds this entry will be valid. Static entries should always report this field as -1.') clnp_media_to_net_table = mib_table((1, 3, 6, 1, 3, 1, 1, 24)) if mibBuilder.loadTexts: clnpMediaToNetTable.setStatus('mandatory') if mibBuilder.loadTexts: clnpMediaToNetTable.setDescription('The CLNP Address Translation table used for mapping from physical addresses to CLNP addresses.') clnp_media_to_net_entry = mib_table_row((1, 3, 6, 1, 3, 1, 1, 24, 1)).setIndexNames((0, 'CLNS-MIB', 'clnpMediaToNetIfIndex'), (0, 'CLNS-MIB', 'clnpMediaToNetPhysAddress')) if mibBuilder.loadTexts: clnpMediaToNetEntry.setStatus('mandatory') if mibBuilder.loadTexts: clnpMediaToNetEntry.setDescription("Each entry contains on ClnpAddress to `physical' address equivalence.") clnp_media_to_net_if_index = mib_table_column((1, 3, 6, 1, 3, 1, 1, 24, 1, 1), integer32()).setMaxAccess('readwrite') if mibBuilder.loadTexts: clnpMediaToNetIfIndex.setStatus('mandatory') if mibBuilder.loadTexts: clnpMediaToNetIfIndex.setDescription("The interface on which this entry's equivalence is effective. The interface identified by a particular value of this index is the same interface as identified by the same value of ifIndex.") clnp_media_to_net_address = mib_scalar((1, 3, 6, 1, 3, 1, 1, 24, 1, 2), clnp_address()).setMaxAccess('readwrite') if mibBuilder.loadTexts: clnpMediaToNetAddress.setStatus('mandatory') if mibBuilder.loadTexts: clnpMediaToNetAddress.setDescription("The ClnpAddress corresponding to the media- dependent `physical' address.") clnp_media_to_net_phys_address = mib_table_column((1, 3, 6, 1, 3, 1, 1, 24, 1, 3), phys_address()).setMaxAccess('readwrite') if mibBuilder.loadTexts: clnpMediaToNetPhysAddress.setStatus('mandatory') if mibBuilder.loadTexts: clnpMediaToNetPhysAddress.setDescription("The media-dependent `physical' address.") clnp_media_to_net_type = mib_table_column((1, 3, 6, 1, 3, 1, 1, 24, 1, 4), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4))).clone(namedValues=named_values(('other', 1), ('invalid', 2), ('dynamic', 3), ('static', 4)))).setMaxAccess('readwrite') if mibBuilder.loadTexts: clnpMediaToNetType.setStatus('mandatory') if mibBuilder.loadTexts: clnpMediaToNetType.setDescription('The type of mapping. Setting this object to the value invalid(2) has the effect of invalidating the corresponding entry in the clnpMediaToNetTable. That is, it effectively dissassociates the interface identified with said entry from the mapping identified with said entry. It is an implementation-specific matter as to whether the agent removes an invalidated entry from the table. Accordingly, management stations must be prepared to receive tabular information from agents that corresponds to entries not currently in use. Proper interpretation of such entries requires examination of the relevant clnpMediaToNetType object.') clnp_media_to_net_age = mib_table_column((1, 3, 6, 1, 3, 1, 1, 24, 1, 5), integer32()).setMaxAccess('readwrite') if mibBuilder.loadTexts: clnpMediaToNetAge.setStatus('mandatory') if mibBuilder.loadTexts: clnpMediaToNetAge.setDescription("The number of seconds since this entry was last updated or otherwise determined to be correct. Note that no semantics of `too old' can be implied except through knowledge of the type of entry.") clnp_media_to_net_hold_time = mib_table_column((1, 3, 6, 1, 3, 1, 1, 24, 1, 6), integer32()).setMaxAccess('readwrite') if mibBuilder.loadTexts: clnpMediaToNetHoldTime.setStatus('mandatory') if mibBuilder.loadTexts: clnpMediaToNetHoldTime.setDescription('The time in seconds this entry will be valid. Static entries should always report this field as -1.') clnp_in_errors = mib_scalar((1, 3, 6, 1, 3, 1, 2, 1), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpInErrors.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrors.setDescription('The number of CLNP Error PDUs received by this entity.') clnp_out_errors = mib_scalar((1, 3, 6, 1, 3, 1, 2, 2), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpOutErrors.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrors.setDescription('The number of CLNP Error PDUs sent by this entity.') clnp_in_err_unspecs = mib_scalar((1, 3, 6, 1, 3, 1, 2, 3), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpInErrUnspecs.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrUnspecs.setDescription('The number of unspecified CLNP Error PDUs received by this entity.') clnp_in_err_procs = mib_scalar((1, 3, 6, 1, 3, 1, 2, 4), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpInErrProcs.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrProcs.setDescription('The number of protocol procedure CLNP Error PDUs received by this entity.') clnp_in_err_cksums = mib_scalar((1, 3, 6, 1, 3, 1, 2, 5), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpInErrCksums.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrCksums.setDescription('The number of checksum CLNP Error PDUs received by this entity.') clnp_in_err_congests = mib_scalar((1, 3, 6, 1, 3, 1, 2, 6), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpInErrCongests.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrCongests.setDescription('The number of congestion drop CLNP Error PDUs received by this entity.') clnp_in_err_hdrs = mib_scalar((1, 3, 6, 1, 3, 1, 2, 7), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpInErrHdrs.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrHdrs.setDescription('The number of header syntax CLNP Error PDUs received by this entity.') clnp_in_err_segs = mib_scalar((1, 3, 6, 1, 3, 1, 2, 8), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpInErrSegs.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrSegs.setDescription('The number of segmentation disallowed CLNP Error PDUs received by this entity.') clnp_in_err_incomps = mib_scalar((1, 3, 6, 1, 3, 1, 2, 9), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpInErrIncomps.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrIncomps.setDescription('The number of incomplete PDU CLNP Error PDUs received by this entity.') clnp_in_err_dups = mib_scalar((1, 3, 6, 1, 3, 1, 2, 10), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpInErrDups.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrDups.setDescription('The number of duplicate option CLNP Error PDUs received by this entity.') clnp_in_err_unreach_dsts = mib_scalar((1, 3, 6, 1, 3, 1, 2, 11), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpInErrUnreachDsts.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrUnreachDsts.setDescription('The number of unreachable destination CLNP Error PDUs received by this entity.') clnp_in_err_unknown_dsts = mib_scalar((1, 3, 6, 1, 3, 1, 2, 12), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpInErrUnknownDsts.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrUnknownDsts.setDescription('The number of unknown destination CLNP Error PDUs received by this entity.') clnp_in_err_sr_unspecs = mib_scalar((1, 3, 6, 1, 3, 1, 2, 13), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpInErrSRUnspecs.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrSRUnspecs.setDescription('The number of unspecified source route CLNP Error PDUs received by this entity.') clnp_in_err_sr_syntaxes = mib_scalar((1, 3, 6, 1, 3, 1, 2, 14), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpInErrSRSyntaxes.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrSRSyntaxes.setDescription('The number of source route syntax CLNP Error PDUs received by this entity.') clnp_in_err_sr_unk_addrs = mib_scalar((1, 3, 6, 1, 3, 1, 2, 15), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpInErrSRUnkAddrs.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrSRUnkAddrs.setDescription('The number of source route unknown address CLNP Error PDUs received by this entity.') clnp_in_err_sr_bad_paths = mib_scalar((1, 3, 6, 1, 3, 1, 2, 16), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpInErrSRBadPaths.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrSRBadPaths.setDescription('The number of source route bad path CLNP Error PDUs received by this entity.') clnp_in_err_hops = mib_scalar((1, 3, 6, 1, 3, 1, 2, 17), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpInErrHops.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrHops.setDescription('The number of hop count exceeded CLNP Error PDUs received by this entity.') clnp_in_err_hop_reassms = mib_scalar((1, 3, 6, 1, 3, 1, 2, 18), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpInErrHopReassms.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrHopReassms.setDescription('The number of hop count exceeded while reassembling CLNP Error PDUs received by this entity.') clnp_in_err_uns_options = mib_scalar((1, 3, 6, 1, 3, 1, 2, 19), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpInErrUnsOptions.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrUnsOptions.setDescription('The number of unsupported option CLNP Error PDUs received by this entity.') clnp_in_err_uns_versions = mib_scalar((1, 3, 6, 1, 3, 1, 2, 20), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpInErrUnsVersions.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrUnsVersions.setDescription('The number of version mismatch CLNP Error PDUs received by this entity.') clnp_in_err_uns_securities = mib_scalar((1, 3, 6, 1, 3, 1, 2, 21), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpInErrUnsSecurities.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrUnsSecurities.setDescription('The number of unsupported security option CLNP Error PDUs received by this entity.') clnp_in_err_uns_s_rs = mib_scalar((1, 3, 6, 1, 3, 1, 2, 22), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpInErrUnsSRs.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrUnsSRs.setDescription('The number of unsupported source route option CLNP Error PDUs received by this entity.') clnp_in_err_uns_r_rs = mib_scalar((1, 3, 6, 1, 3, 1, 2, 23), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpInErrUnsRRs.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrUnsRRs.setDescription('The number of unsupported record route option CLNP Error PDUs received by this entity.') clnp_in_err_interferences = mib_scalar((1, 3, 6, 1, 3, 1, 2, 24), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpInErrInterferences.setStatus('mandatory') if mibBuilder.loadTexts: clnpInErrInterferences.setDescription('The number of reassembly interference CLNP Error PDUs received by this entity.') clnp_out_err_unspecs = mib_scalar((1, 3, 6, 1, 3, 1, 2, 25), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpOutErrUnspecs.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrUnspecs.setDescription('The number of unspecified CLNP Error PDUs sent by this entity.') clnp_out_err_procs = mib_scalar((1, 3, 6, 1, 3, 1, 2, 26), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpOutErrProcs.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrProcs.setDescription('The number of protocol procedure CLNP Error PDUs sent by this entity.') clnp_out_err_cksums = mib_scalar((1, 3, 6, 1, 3, 1, 2, 27), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpOutErrCksums.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrCksums.setDescription('The number of checksum CLNP Error PDUs sent by this entity.') clnp_out_err_congests = mib_scalar((1, 3, 6, 1, 3, 1, 2, 28), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpOutErrCongests.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrCongests.setDescription('The number of congestion drop CLNP Error PDUs sent by this entity.') clnp_out_err_hdrs = mib_scalar((1, 3, 6, 1, 3, 1, 2, 29), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpOutErrHdrs.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrHdrs.setDescription('The number of header syntax CLNP Error PDUs sent by this entity.') clnp_out_err_segs = mib_scalar((1, 3, 6, 1, 3, 1, 2, 30), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpOutErrSegs.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrSegs.setDescription('The number of segmentation disallowed CLNP Error PDUs sent by this entity.') clnp_out_err_incomps = mib_scalar((1, 3, 6, 1, 3, 1, 2, 31), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpOutErrIncomps.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrIncomps.setDescription('The number of incomplete PDU CLNP Error PDUs sent by this entity.') clnp_out_err_dups = mib_scalar((1, 3, 6, 1, 3, 1, 2, 32), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpOutErrDups.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrDups.setDescription('The number of duplicate option CLNP Error PDUs sent by this entity.') clnp_out_err_unreach_dsts = mib_scalar((1, 3, 6, 1, 3, 1, 2, 33), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpOutErrUnreachDsts.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrUnreachDsts.setDescription('The number of unreachable destination CLNP Error PDUs sent by this entity.') clnp_out_err_unknown_dsts = mib_scalar((1, 3, 6, 1, 3, 1, 2, 34), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpOutErrUnknownDsts.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrUnknownDsts.setDescription('The number of unknown destination CLNP Error PDUs sent by this entity.') clnp_out_err_sr_unspecs = mib_scalar((1, 3, 6, 1, 3, 1, 2, 35), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpOutErrSRUnspecs.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrSRUnspecs.setDescription('The number of unspecified source route CLNP Error PDUs sent by this entity.') clnp_out_err_sr_syntaxes = mib_scalar((1, 3, 6, 1, 3, 1, 2, 36), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpOutErrSRSyntaxes.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrSRSyntaxes.setDescription('The number of source route syntax CLNP Error PDUs sent by this entity.') clnp_out_err_sr_unk_addrs = mib_scalar((1, 3, 6, 1, 3, 1, 2, 37), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpOutErrSRUnkAddrs.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrSRUnkAddrs.setDescription('The number of source route unknown address CLNP Error PDUs sent by this entity.') clnp_out_err_sr_bad_paths = mib_scalar((1, 3, 6, 1, 3, 1, 2, 38), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpOutErrSRBadPaths.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrSRBadPaths.setDescription('The number of source route bad path CLNP Error PDUs sent by this entity.') clnp_out_err_hops = mib_scalar((1, 3, 6, 1, 3, 1, 2, 39), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpOutErrHops.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrHops.setDescription('The number of hop count exceeded CLNP Error PDUs sent by this entity.') clnp_out_err_hop_reassms = mib_scalar((1, 3, 6, 1, 3, 1, 2, 40), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpOutErrHopReassms.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrHopReassms.setDescription('The number of hop count exceeded while reassembling CLNP Error PDUs sent by this entity.') clnp_out_err_uns_options = mib_scalar((1, 3, 6, 1, 3, 1, 2, 41), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpOutErrUnsOptions.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrUnsOptions.setDescription('The number of unsupported option CLNP Error PDUs sent by this entity.') clnp_out_err_uns_versions = mib_scalar((1, 3, 6, 1, 3, 1, 2, 42), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpOutErrUnsVersions.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrUnsVersions.setDescription('The number of version mismatch CLNP Error PDUs sent by this entity.') clnp_out_err_uns_securities = mib_scalar((1, 3, 6, 1, 3, 1, 2, 43), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpOutErrUnsSecurities.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrUnsSecurities.setDescription('The number of unsupported security option CLNP Error PDUs sent by this entity.') clnp_out_err_uns_s_rs = mib_scalar((1, 3, 6, 1, 3, 1, 2, 44), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpOutErrUnsSRs.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrUnsSRs.setDescription('The number of unsupported source route option CLNP Error PDUs sent by this entity.') clnp_out_err_uns_r_rs = mib_scalar((1, 3, 6, 1, 3, 1, 2, 45), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpOutErrUnsRRs.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrUnsRRs.setDescription('The number of unsupported record route option CLNP Error PDUs sent by this entity.') clnp_out_err_interferences = mib_scalar((1, 3, 6, 1, 3, 1, 2, 46), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: clnpOutErrInterferences.setStatus('mandatory') if mibBuilder.loadTexts: clnpOutErrInterferences.setDescription('The number of reassembly interference CLNP Error PDUs sent by this entity.') esis_es_hins = mib_scalar((1, 3, 6, 1, 3, 1, 4, 1), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: esisESHins.setStatus('mandatory') if mibBuilder.loadTexts: esisESHins.setDescription('The number of ESH PDUs received by this entity.') esis_es_houts = mib_scalar((1, 3, 6, 1, 3, 1, 4, 2), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: esisESHouts.setStatus('mandatory') if mibBuilder.loadTexts: esisESHouts.setDescription('The number of ESH PDUs sent by this entity.') esis_is_hins = mib_scalar((1, 3, 6, 1, 3, 1, 4, 3), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: esisISHins.setStatus('mandatory') if mibBuilder.loadTexts: esisISHins.setDescription('The number of ISH PDUs received by this entity.') esis_is_houts = mib_scalar((1, 3, 6, 1, 3, 1, 4, 4), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: esisISHouts.setStatus('mandatory') if mibBuilder.loadTexts: esisISHouts.setDescription('The number of ISH PDUs sent by this entity.') esis_rd_uins = mib_scalar((1, 3, 6, 1, 3, 1, 4, 5), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: esisRDUins.setStatus('mandatory') if mibBuilder.loadTexts: esisRDUins.setDescription('The number of RDU PDUs received by this entity.') esis_rd_uouts = mib_scalar((1, 3, 6, 1, 3, 1, 4, 6), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: esisRDUouts.setStatus('mandatory') if mibBuilder.loadTexts: esisRDUouts.setDescription('The number of RDU PDUs sent by this entity.') mibBuilder.exportSymbols('CLNS-MIB', clnpInErrSRBadPaths=clnpInErrSRBadPaths, clnpNetToMediaEntry=clnpNetToMediaEntry, esisESHouts=esisESHouts, clnpInOpts=clnpInOpts, clnpInErrProcs=clnpInErrProcs, clnpInHdrErrors=clnpInHdrErrors, clnpAdEntAddr=clnpAdEntAddr, clnpMediaToNetHoldTime=clnpMediaToNetHoldTime, clnpOutErrIncomps=clnpOutErrIncomps, clnpOutErrCksums=clnpOutErrCksums, clnpOutErrHopReassms=clnpOutErrHopReassms, clnpForwarding=clnpForwarding, clnpInErrUnsSRs=clnpInErrUnsSRs, clnpInErrUnsRRs=clnpInErrUnsRRs, clnpInErrSegs=clnpInErrSegs, clnpInErrHops=clnpInErrHops, clnpNetToMediaIfIndex=clnpNetToMediaIfIndex, clnpAdEntReasmMaxSize=clnpAdEntReasmMaxSize, clnpInErrSRSyntaxes=clnpInErrSRSyntaxes, clnpInDiscards=clnpInDiscards, clnpOutErrSRUnkAddrs=clnpOutErrSRUnkAddrs, clnpRouteEntry=clnpRouteEntry, clnpInErrUnsOptions=clnpInErrUnsOptions, clnpOutErrInterferences=clnpOutErrInterferences, clnpAddrTable=clnpAddrTable, clnpOutErrSRBadPaths=clnpOutErrSRBadPaths, clnpDefaultLifeTime=clnpDefaultLifeTime, clnpInErrUnknownDsts=clnpInErrUnknownDsts, clnpForwPDUs=clnpForwPDUs, clnpOutDiscards=clnpOutDiscards, clnpReasmOKs=clnpReasmOKs, clnpReasmReqds=clnpReasmReqds, clnpInErrUnsVersions=clnpInErrUnsVersions, error=error, clnpInErrInterferences=clnpInErrInterferences, clnpOutErrProcs=clnpOutErrProcs, clnpOutErrCongests=clnpOutErrCongests, clnpInUnknownNLPs=clnpInUnknownNLPs, clnpNetToMediaPhysAddress=clnpNetToMediaPhysAddress, clnpMediaToNetTable=clnpMediaToNetTable, clnpMediaToNetAddress=clnpMediaToNetAddress, clnpInErrCksums=clnpInErrCksums, clnpInErrors=clnpInErrors, clnpOutRequests=clnpOutRequests, clnpNetToMediaAge=clnpNetToMediaAge, clnpInErrHdrs=clnpInErrHdrs, clnpRouteMetric2=clnpRouteMetric2, clnpInErrHopReassms=clnpInErrHopReassms, clnpNetToMediaTable=clnpNetToMediaTable, clnpInDelivers=clnpInDelivers, clnpInErrUnsSecurities=clnpInErrUnsSecurities, clnpRouteMetric5=clnpRouteMetric5, clnpMediaToNetType=clnpMediaToNetType, clnpOutErrSRSyntaxes=clnpOutErrSRSyntaxes, clnpRouteAge=clnpRouteAge, clnpOutErrUnsSRs=clnpOutErrUnsSRs, clnpMediaToNetEntry=clnpMediaToNetEntry, clnpOutErrUnknownDsts=clnpOutErrUnknownDsts, clnpOutOpts=clnpOutOpts, clnpInAddrErrors=clnpInAddrErrors, ClnpAddress=ClnpAddress, clnpOutErrHops=clnpOutErrHops, clnpInErrSRUnspecs=clnpInErrSRUnspecs, clnpAdEntIfIndex=clnpAdEntIfIndex, clnpRoutingTable=clnpRoutingTable, clnpRouteIfIndex=clnpRouteIfIndex, clnpInErrCongests=clnpInErrCongests, clnpOutErrSegs=clnpOutErrSegs, clnpOutErrUnreachDsts=clnpOutErrUnreachDsts, clnpRouteType=clnpRouteType, clnpInErrIncomps=clnpInErrIncomps, esisISHouts=esisISHouts, clnpRouteInfo=clnpRouteInfo, clnpOutErrHdrs=clnpOutErrHdrs, esisRDUins=esisRDUins, esisISHins=esisISHins, clnpInErrUnspecs=clnpInErrUnspecs, clnp=clnp, clnpMediaToNetPhysAddress=clnpMediaToNetPhysAddress, clnpRouteProto=clnpRouteProto, clnpNetToMediaHoldTime=clnpNetToMediaHoldTime, esisESHins=esisESHins, clnpAddrEntry=clnpAddrEntry, clnpOutErrSRUnspecs=clnpOutErrSRUnspecs, clnpOutErrors=clnpOutErrors, clnpOutErrUnsRRs=clnpOutErrUnsRRs, clnpReasmTimeout=clnpReasmTimeout, clnpReasmFails=clnpReasmFails, clnpRoutingDiscards=clnpRoutingDiscards, clns=clns, clnpNetToMediaType=clnpNetToMediaType, es_is=es_is, esisRDUouts=esisRDUouts, clnpInUnknownULPs=clnpInUnknownULPs, clnpOutErrDups=clnpOutErrDups, clnpInErrSRUnkAddrs=clnpInErrSRUnkAddrs, clnpSegOKs=clnpSegOKs, clnpRouteMetric3=clnpRouteMetric3, clnpRouteNextHop=clnpRouteNextHop, clnpOutErrUnsOptions=clnpOutErrUnsOptions, clnpOutErrUnsSecurities=clnpOutErrUnsSecurities, clnpRouteMetric1=clnpRouteMetric1, echo=echo, clnpRouteMetric4=clnpRouteMetric4, clnpNetToMediaNetAddress=clnpNetToMediaNetAddress, clnpOutNoRoutes=clnpOutNoRoutes, clnpOutErrUnsVersions=clnpOutErrUnsVersions, clnpInReceives=clnpInReceives, clnpOutErrUnspecs=clnpOutErrUnspecs, clnpSegFails=clnpSegFails, clnpInErrDups=clnpInErrDups, clnpInErrUnreachDsts=clnpInErrUnreachDsts, clnpMediaToNetAge=clnpMediaToNetAge, clnpRouteDest=clnpRouteDest, clnpMediaToNetIfIndex=clnpMediaToNetIfIndex, clnpSegCreates=clnpSegCreates)
VERSION = (0, 2, 2, '', 1) if VERSION[3] and VERSION[4]: VERSION_TEXT = '{0}.{1}.{2}{3}{4}'.format(*VERSION) else: VERSION_TEXT = '{0}.{1}.{2}'.format(*VERSION[0:3]) VERSION_EXTRA = '' LICENSE = 'MIT License'
version = (0, 2, 2, '', 1) if VERSION[3] and VERSION[4]: version_text = '{0}.{1}.{2}{3}{4}'.format(*VERSION) else: version_text = '{0}.{1}.{2}'.format(*VERSION[0:3]) version_extra = '' license = 'MIT License'
#NOTE: LIST - Secuence of mutable values names = ["Harry", "Hik", "Linkin", "Park"] #ADDS ANOTHER ITEM IN THE LIST names.append("Paramore") #SORT THE LIST IN APLHABETIC ORDER names.sort() #PRINTS THE LIST print(names)
names = ['Harry', 'Hik', 'Linkin', 'Park'] names.append('Paramore') names.sort() print(names)
__author__ = "Ian Goodfellow" class Agent(object): pass
__author__ = 'Ian Goodfellow' class Agent(object): pass
c = 41 # THE VARIABLE C equals 41 c == 40 # c eqauls 40 which is false becasue of the variable above c != 40 and c <41 #c does not equal 40 is a true statement but the other statement is false, making it a false statement c != 40 or c <41 #True statement in an or statement not c == 40 #this is a true statement not c > 40 #false statement c <= 41 #true not false #true True and false #false false or True #true false or false or false #false True and True and false #false false == 0 #this is a true True == 0 #false True == 1 #this statement
c = 41 c == 40 c != 40 and c < 41 c != 40 or c < 41 not c == 40 not c > 40 c <= 41 not false True and false false or True false or false or false True and True and false false == 0 True == 0 True == 1
dna = 'ACGTN' rna = 'ACGUN' # dictionary nucleotide acronym to full name nuc2name = { 'A': 'Adenosine', 'C': 'Cysteine', 'T': 'Thymine', 'G': 'Guanine', 'U': 'Uracil', 'N': 'Unknown' }
dna = 'ACGTN' rna = 'ACGUN' nuc2name = {'A': 'Adenosine', 'C': 'Cysteine', 'T': 'Thymine', 'G': 'Guanine', 'U': 'Uracil', 'N': 'Unknown'}
print("Checking Imports") import_list = ['signal', 'psutil', 'time', 'pypresence', 'random'] modules = {} for package in import_list: try: modules[package] = __import__(package) except ImportError: print(f"Package: {package} is missing please install") print("Loading CONFIG\n") # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # CONFIG # # Change the client_id to your own if you want to use your own assets and name. # client_id = '808908940993495040' # # The app can update every 15 seconds according to discord tos (https://discord.com/developers/docs/rich-presence/how-to#updating-presence) # However, you can change it to whatever you want at the risk of being Rate-Limited (never have been actualy rate-limited so I dont know the limit. 1 Second works tho) # rpc_limit = 5 # int with 1 second being the min # # These are the default quotes or texts displayed. Change to your liking. # Button is optional # def updateDynamicText(): cpu_per = round(modules["psutil"].cpu_percent(), 1) mem_per = round(modules["psutil"].virtual_memory().percent, 1) text = [ { "name": "CPU / RAM", "line1": f"CPU: {cpu_per}%", "line2": f"RAM: {mem_per}%", }, { "name": "The Welcoming", "line1": f"Yo we pimp", "line2": f"chimping", "button": [{"label": "Misfits", "url": "https://scuffed.store/"}, {"label": "Fitz", "url": "https://fitz.fanfiber.com/"}] }, ] return {"text": text, "size": len(text)} # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # Spawn Discord RPC connection RPC = modules["pypresence"].Presence(client_id, pipe=0) RPC.connect() # https://stackoverflow.com/questions/18499497/how-to-process-sigterm-signal-gracefully # Thanks to this thread for safe shutdown class GracefulKiller: kill_now = False def __init__(self): modules["signal"].signal(modules["signal"].SIGINT, self.exit_gracefully) modules["signal"].signal(modules["signal"].SIGTERM, self.exit_gracefully) def exit_gracefully(self): self.kill_now = True if __name__ == '__main__': killer = GracefulKiller() print(f"Client ID: {client_id}") print(f"Updating every: {rpc_limit} seconds") while not killer.kill_now: # Presence text = updateDynamicText() x = modules["random"].randint(0,text["size"]-1) try: text["text"][x]["button"] except KeyError: print(RPC.update(details=text["text"][x]["line1"], state=text["text"][x]["line2"])) else: print(RPC.update(details=text["text"][x]["line1"], state=text["text"][x]["line2"], buttons=text["text"][x]["button"])) # https://discord.com/developers/docs/rich-presence/how-to#updating-presence # Should only update every 15 seconds, however, this is not a limit and it can go as fast as you want it too. # However, you will probabaly get rate-limited and will have to serve a cooldown period if you go too fast. modules["time"].sleep(rpc_limit) print("I was killed") RPC.close()
print('Checking Imports') import_list = ['signal', 'psutil', 'time', 'pypresence', 'random'] modules = {} for package in import_list: try: modules[package] = __import__(package) except ImportError: print(f'Package: {package} is missing please install') print('Loading CONFIG\n') client_id = '808908940993495040' rpc_limit = 5 def update_dynamic_text(): cpu_per = round(modules['psutil'].cpu_percent(), 1) mem_per = round(modules['psutil'].virtual_memory().percent, 1) text = [{'name': 'CPU / RAM', 'line1': f'CPU: {cpu_per}%', 'line2': f'RAM: {mem_per}%'}, {'name': 'The Welcoming', 'line1': f'Yo we pimp', 'line2': f'chimping', 'button': [{'label': 'Misfits', 'url': 'https://scuffed.store/'}, {'label': 'Fitz', 'url': 'https://fitz.fanfiber.com/'}]}] return {'text': text, 'size': len(text)} rpc = modules['pypresence'].Presence(client_id, pipe=0) RPC.connect() class Gracefulkiller: kill_now = False def __init__(self): modules['signal'].signal(modules['signal'].SIGINT, self.exit_gracefully) modules['signal'].signal(modules['signal'].SIGTERM, self.exit_gracefully) def exit_gracefully(self): self.kill_now = True if __name__ == '__main__': killer = graceful_killer() print(f'Client ID: {client_id}') print(f'Updating every: {rpc_limit} seconds') while not killer.kill_now: text = update_dynamic_text() x = modules['random'].randint(0, text['size'] - 1) try: text['text'][x]['button'] except KeyError: print(RPC.update(details=text['text'][x]['line1'], state=text['text'][x]['line2'])) else: print(RPC.update(details=text['text'][x]['line1'], state=text['text'][x]['line2'], buttons=text['text'][x]['button'])) modules['time'].sleep(rpc_limit) print('I was killed') RPC.close()
# A linked list node class Node: def __init__(self, value=None, next=None): self.value = value self.next = next # Helper function to print given linked list def printList(head): ptr = head while ptr: print(ptr.value, end=" -> ") ptr = ptr.next print("None") # Function to remove duplicates from a sorted list def removeDuplicates(head): previous = None current = head # take an empty set to store linked list nodes for future reference s = set() # do till linked list is not empty while current: # if current node is seen before, ignore it if current.value in s: previous.next = current.next # insert current node into the set and proceed to next node else: s.add(current.value) previous = current current = previous.next return head if __name__ == '__main__': # input keys keys = [3,4,3,2,6,1,2,6] # construct linked list head = None for i in reversed(range(len(keys))): head = Node(keys[i], head) removeDuplicates(head) # print linked list printList(head)
class Node: def __init__(self, value=None, next=None): self.value = value self.next = next def print_list(head): ptr = head while ptr: print(ptr.value, end=' -> ') ptr = ptr.next print('None') def remove_duplicates(head): previous = None current = head s = set() while current: if current.value in s: previous.next = current.next else: s.add(current.value) previous = current current = previous.next return head if __name__ == '__main__': keys = [3, 4, 3, 2, 6, 1, 2, 6] head = None for i in reversed(range(len(keys))): head = node(keys[i], head) remove_duplicates(head) print_list(head)
#!/usr/bin/env python3 # https://www.urionlinejudge.com.br/judge/en/problems/view/1014 def main(): X = int(input()) Y = float(input()) CONSUMPTION = X / Y print(format(CONSUMPTION, '.3f'), "km/l") # Start the execution if it's the main script if __name__ == "__main__": main()
def main(): x = int(input()) y = float(input()) consumption = X / Y print(format(CONSUMPTION, '.3f'), 'km/l') if __name__ == '__main__': main()
''' Data Columns - Exercise 1 The file reads the file with neuron lengths (neuron_data.txt) and saves an identical copy of the file. ''' Infile = open('neuron_data.txt') Outfile = open('neuron_data-copy.txt', 'w') for line in Infile: Outfile.write(line) Outfile.close()
""" Data Columns - Exercise 1 The file reads the file with neuron lengths (neuron_data.txt) and saves an identical copy of the file. """ infile = open('neuron_data.txt') outfile = open('neuron_data-copy.txt', 'w') for line in Infile: Outfile.write(line) Outfile.close()
class Video: def __init__(self, files): self.files = files def setFiles(self, files): self.files = files def getFiles(self): return self.files
class Video: def __init__(self, files): self.files = files def set_files(self, files): self.files = files def get_files(self): return self.files
class JsonUtil: @staticmethod def list_obj_dict(list): new_list = [] for obj in list: new_list.append(obj.__dict__) return new_list
class Jsonutil: @staticmethod def list_obj_dict(list): new_list = [] for obj in list: new_list.append(obj.__dict__) return new_list
# 1 x = float(input("First number: ")) y = float(input("Second number: ")) if x > y: print(f"{x} is greater than {y}") elif x < y: print(f"{x} is less than {y}") else: print(f"{x} is equal to {y}") # bb quiz = float(input("quiz: ")) mid_term = float(input("midterm: ")) final = float(input("final: ")) avg = (quiz + mid_term + final) / 3 if avg > 90: print("Congrats! You got an A") # if else x = 6 if x >=3: y = x-3 else: y = x+3 print("y is",y) # max x = float(input("First number: ")) y = float(input("Second number: ")) print("The larger number is:", x if x > y else y) # max(x, y) can also be used
x = float(input('First number: ')) y = float(input('Second number: ')) if x > y: print(f'{x} is greater than {y}') elif x < y: print(f'{x} is less than {y}') else: print(f'{x} is equal to {y}') quiz = float(input('quiz: ')) mid_term = float(input('midterm: ')) final = float(input('final: ')) avg = (quiz + mid_term + final) / 3 if avg > 90: print('Congrats! You got an A') x = 6 if x >= 3: y = x - 3 else: y = x + 3 print('y is', y) x = float(input('First number: ')) y = float(input('Second number: ')) print('The larger number is:', x if x > y else y)
x = {} print(type(x)) file_counts = {"jpg":10, "txt":14, "csv":2, "py":23} print(file_counts) print(file_counts["txt"]) print("html" in file_counts) #true if found #dictionaries are mutable file_counts["cfg"] = 8 #add item print(file_counts) file_counts["csv"] = 17 #replaces value for already assigned csv print(file_counts) del file_counts["cfg"] print(file_counts) file_counts = {"jpg":10, "txt":14, "csv":2, "py":23} for extension in file_counts: print(extension) for ext, amount in file_counts.items(): #value pairs print("There are {} files with the .{} extension".format(amount, ext)) print(file_counts.keys(), file_counts.values()) cool_beasts = {"octopuses":"tentacles", "dolphins":"fins", "rhinos":"horns"} for keys, values in cool_beasts.items(): print("{} have {}".format(keys, values)) print("Test get: " + cool_beasts.get("octopuses")) def count_letters(text): result = {} for letter in text: if letter not in result: result[letter] = 0 result[letter] += 1 return result count_letters("aaaaa") count_letters("tenant") print(count_letters("lksdajfo;asijnfl;kdnv;oisrnfg;lzknv;oizdfo;hgj")) wardrobe = {"shirt":["red","blue","white"], "jeans":["blue","black"]} for clothes, color in wardrobe.items(): for n in color: print("{} {}".format(n, clothes))
x = {} print(type(x)) file_counts = {'jpg': 10, 'txt': 14, 'csv': 2, 'py': 23} print(file_counts) print(file_counts['txt']) print('html' in file_counts) file_counts['cfg'] = 8 print(file_counts) file_counts['csv'] = 17 print(file_counts) del file_counts['cfg'] print(file_counts) file_counts = {'jpg': 10, 'txt': 14, 'csv': 2, 'py': 23} for extension in file_counts: print(extension) for (ext, amount) in file_counts.items(): print('There are {} files with the .{} extension'.format(amount, ext)) print(file_counts.keys(), file_counts.values()) cool_beasts = {'octopuses': 'tentacles', 'dolphins': 'fins', 'rhinos': 'horns'} for (keys, values) in cool_beasts.items(): print('{} have {}'.format(keys, values)) print('Test get: ' + cool_beasts.get('octopuses')) def count_letters(text): result = {} for letter in text: if letter not in result: result[letter] = 0 result[letter] += 1 return result count_letters('aaaaa') count_letters('tenant') print(count_letters('lksdajfo;asijnfl;kdnv;oisrnfg;lzknv;oizdfo;hgj')) wardrobe = {'shirt': ['red', 'blue', 'white'], 'jeans': ['blue', 'black']} for (clothes, color) in wardrobe.items(): for n in color: print('{} {}'.format(n, clothes))
# Tweepy # Copyright 2010-2021 Joshua Roesslein # See LICENSE for details. def list_to_csv(item_list): if item_list: return ','.join(map(str, item_list))
def list_to_csv(item_list): if item_list: return ','.join(map(str, item_list))
''' Given an array of integers nums sorted in non-decreasing order, find the starting and ending position of a given target value. If target is not found in the array, return [-1, -1]. You must write an algorithm with O(log n) runtime complexity ''' class Solution: def searchRange(self, nums: List[int], target: int) -> List[int]: if not nums: return [-1, -1] left, right = -1, len(nums) - 1 while left < right - 1: mid = (left + right) // 2 if nums[mid] < target: left = mid elif nums[mid] >= target: right = mid if nums[right] == target: ans_left = right else: return [-1, -1] left, right = left + 1, len(nums) while left < right - 1: mid = (left + right) // 2 if nums[mid] > target: right = mid elif nums[mid] == target: left = mid ans_right = right - 1 return [ans_left, ans_right]
""" Given an array of integers nums sorted in non-decreasing order, find the starting and ending position of a given target value. If target is not found in the array, return [-1, -1]. You must write an algorithm with O(log n) runtime complexity """ class Solution: def search_range(self, nums: List[int], target: int) -> List[int]: if not nums: return [-1, -1] (left, right) = (-1, len(nums) - 1) while left < right - 1: mid = (left + right) // 2 if nums[mid] < target: left = mid elif nums[mid] >= target: right = mid if nums[right] == target: ans_left = right else: return [-1, -1] (left, right) = (left + 1, len(nums)) while left < right - 1: mid = (left + right) // 2 if nums[mid] > target: right = mid elif nums[mid] == target: left = mid ans_right = right - 1 return [ans_left, ans_right]
_base_ = [ '../_base_/models/resnet50.py', '../_base_/datasets/imagenet_bs32.py', '../_base_/schedules/imagenet_bs256.py', '../_base_/default_runtime.py' ] load_from = "https://download.openmmlab.com/mmclassification/v0/resnet/resnet50_8xb32_in1k_20210831-ea4938fc.pth"
_base_ = ['../_base_/models/resnet50.py', '../_base_/datasets/imagenet_bs32.py', '../_base_/schedules/imagenet_bs256.py', '../_base_/default_runtime.py'] load_from = 'https://download.openmmlab.com/mmclassification/v0/resnet/resnet50_8xb32_in1k_20210831-ea4938fc.pth'
x = int(input("Please enter a number: ")) if x % 2 == 0 : print(x, "is even") else : print(x, "is odd")
x = int(input('Please enter a number: ')) if x % 2 == 0: print(x, 'is even') else: print(x, 'is odd')
class Memoization: allow_attr_memoization = False def _set_attr(obj, attr_name, value_func): setattr(obj, attr_name, value_func()) return getattr(obj, attr_name) def _memoize_attr(obj, attr_name, value_func): if not Memoization.allow_attr_memoization: return value_func() try: return getattr(obj, attr_name) except AttributeError: return _set_attr(obj, attr_name, value_func)
class Memoization: allow_attr_memoization = False def _set_attr(obj, attr_name, value_func): setattr(obj, attr_name, value_func()) return getattr(obj, attr_name) def _memoize_attr(obj, attr_name, value_func): if not Memoization.allow_attr_memoization: return value_func() try: return getattr(obj, attr_name) except AttributeError: return _set_attr(obj, attr_name, value_func)
class BotConfiguration(object): def __init__(self, auth_token, name, avatar): self._auth_token = auth_token self._name = name self._avatar = avatar @property def name(self): return self._name @property def avatar(self): return self._avatar @property def auth_token(self): return self._auth_token
class Botconfiguration(object): def __init__(self, auth_token, name, avatar): self._auth_token = auth_token self._name = name self._avatar = avatar @property def name(self): return self._name @property def avatar(self): return self._avatar @property def auth_token(self): return self._auth_token
#author SANKALP SAXENA # Complete the has_cycle function below. # # For your reference: # # SinglyLinkedListNode: # int data # SinglyLinkedListNode next # # def has_cycle(head): s = set() temp = head while True: if temp.next == None: return False if temp.next not in s: s.add(temp.next) else: return True temp = temp.next return False
def has_cycle(head): s = set() temp = head while True: if temp.next == None: return False if temp.next not in s: s.add(temp.next) else: return True temp = temp.next return False
{ 'targets': [ { 'target_name': 'test_new_target', 'defines': [ 'V8_DEPRECATION_WARNINGS=1' ], 'sources': [ '../entry_point.c', 'test_new_target.c' ] } ] }
{'targets': [{'target_name': 'test_new_target', 'defines': ['V8_DEPRECATION_WARNINGS=1'], 'sources': ['../entry_point.c', 'test_new_target.c']}]}
usa = ['atlanta','new york','chicago','baltimore'] uk = ['london','bristol','cambridge'] india = ['mumbai','delhi','banglore'] ''' #(a) i=input("Enter city name: ") if i in usa: print('city is in USA') elif i in uk: print('city is in UK') elif i in india: print('city is in INDIA') else: print('idk the country of the city entered!!') ''' #(b) i=input("Enter 1st city name: ") j=input("Enter 2nd city name: ") if (i in usa and j in usa) or (i in uk and j in uk) or (i in india and j in india): print("Both are in same country!") else: print("Both are in different countries!")
usa = ['atlanta', 'new york', 'chicago', 'baltimore'] uk = ['london', 'bristol', 'cambridge'] india = ['mumbai', 'delhi', 'banglore'] '\n#(a)\ni=input("Enter city name: ")\nif i in usa:\n print(\'city is in USA\')\nelif i in uk:\n print(\'city is in UK\')\nelif i in india:\n print(\'city is in INDIA\')\nelse:\n print(\'idk the country of the city entered!!\')\n' i = input('Enter 1st city name: ') j = input('Enter 2nd city name: ') if i in usa and j in usa or (i in uk and j in uk) or (i in india and j in india): print('Both are in same country!') else: print('Both are in different countries!')
#Base Class, Uses a descriptor to set a value class Descriptor: def __init__(self, name=None, **opts): self.name = name for key, value in opts.items(): setattr( self, key, value) def __set__(self, instance, value): instance.__dict__[self.name] = value #Descriptor for enforcing types class Typed( Descriptor ): expected_type = type( None ) def __set__(self, instance, value): if not isinstance(value, self.expected_type): raise TypeError('expected '+str(self.expected_type ) ) super().__set__(instance, value) #Descriptor for enforcing values class Unsigned(Descriptor): def __set__(self, instance, value): if value < 0: raise ValueError('Expected >=0 ') super().__set__( instance, value ) #Descriptor for enforcing size class MaxSized(Descriptor): def __init__(self, name=None, **opts): if 'size' not in opts: raise TypeError('missing size option') super().__init__(name, **opts ) def __set__(self, instance, value): if len(value) >= self.size: raise ValueError( 'size must be < ' + str( self.size ) ) super().__set__(instance, value) class Integer(Typed): expected_type = int class UnsignedInteger( Integer, Unsigned): pass class Float(Typed): expected_type = float class UnsignedFloat( Float, Unsigned): pass class String(Typed): expected_type = str class SizedString( String, MaxSized): pass #Method 1 class Stock: #Specify constraints name = SizedString('name', size=8) shares = UnsignedInteger('shares') price = UnsignedFloat('price') def __init__(self, name, shares, price): self.name = name self.shares = shares self.price = price print('Method 1') s = Stock('ACME', 50, 91.1) print( s.name ) s.shares = 75 print( s.shares) try: s.shares = -1 except: pass print( s.shares ) try: s.name = 'AAAABBBC' except: pass print( s.name ) ''' There are some techniques that can be used to simplify the specification of constraints in classes. One approach is to use a class decorator ''' #Class decorator to apply constraints def check_attributes(**kwargs ): def decorate( cls ): for key,value in kwargs.items(): if isinstance( value, Descriptor ): value.name = key setattr(cls, key , value) else: setattr(cls, key, value(key) ) return cls return decorate #Example @check_attributes( name=SizedString(size=8), shares=UnsignedInteger, price=UnsignedFloat ) class Stock: def __init__(self, name, shares, price): self.name = name self.shares = shares self.price = price print('Method 2') s = Stock('ACME', 50, 91.1) print( s.name ) s.shares = 75 print( s.shares) try: s.shares = -1 except: pass print( s.shares ) try: s.name = 'AAAABBBC' except: pass print( s.name ) #A metaclass that applies checking Method3 class checkedmeta( type ): def __new__(cls, clsname, bases, methods ): #Attach attribute names to the descriptors for key, value in methods.items(): if isinstance( value, Descriptor ): value.name = key return type.__new__(cls, clsname, bases, methods) # Example class Stock(metaclass=checkedmeta): #default name eq name,better than method1 name = SizedString(size=8) shares = UnsignedInteger() price = UnsignedFloat() def __init__(self, name, shares, price): self.name = name self.shares = shares self.price = price print('Method 3') s = Stock('ACME', 50, 91.1) print( s.name ) s.shares = 75 print( s.shares) try: s.shares = -1 except: pass print( s.shares ) try: s.name = 'AAAABBBC' except: pass print( s.name ) #Normal class Point: x = Integer('x') y = Integer('y') #MetaClass class Point(metaclass=checkedmeta): x = Integer() y = Integer() #use property functions~ TODO:: Change Typed,Unsigned,MaxSized as Method,
class Descriptor: def __init__(self, name=None, **opts): self.name = name for (key, value) in opts.items(): setattr(self, key, value) def __set__(self, instance, value): instance.__dict__[self.name] = value class Typed(Descriptor): expected_type = type(None) def __set__(self, instance, value): if not isinstance(value, self.expected_type): raise type_error('expected ' + str(self.expected_type)) super().__set__(instance, value) class Unsigned(Descriptor): def __set__(self, instance, value): if value < 0: raise value_error('Expected >=0 ') super().__set__(instance, value) class Maxsized(Descriptor): def __init__(self, name=None, **opts): if 'size' not in opts: raise type_error('missing size option') super().__init__(name, **opts) def __set__(self, instance, value): if len(value) >= self.size: raise value_error('size must be < ' + str(self.size)) super().__set__(instance, value) class Integer(Typed): expected_type = int class Unsignedinteger(Integer, Unsigned): pass class Float(Typed): expected_type = float class Unsignedfloat(Float, Unsigned): pass class String(Typed): expected_type = str class Sizedstring(String, MaxSized): pass class Stock: name = sized_string('name', size=8) shares = unsigned_integer('shares') price = unsigned_float('price') def __init__(self, name, shares, price): self.name = name self.shares = shares self.price = price print('Method 1') s = stock('ACME', 50, 91.1) print(s.name) s.shares = 75 print(s.shares) try: s.shares = -1 except: pass print(s.shares) try: s.name = 'AAAABBBC' except: pass print(s.name) '\nThere are some techniques that can be used to simplify the specification of constraints\nin classes. One approach is to use a class decorator\n' def check_attributes(**kwargs): def decorate(cls): for (key, value) in kwargs.items(): if isinstance(value, Descriptor): value.name = key setattr(cls, key, value) else: setattr(cls, key, value(key)) return cls return decorate @check_attributes(name=sized_string(size=8), shares=UnsignedInteger, price=UnsignedFloat) class Stock: def __init__(self, name, shares, price): self.name = name self.shares = shares self.price = price print('Method 2') s = stock('ACME', 50, 91.1) print(s.name) s.shares = 75 print(s.shares) try: s.shares = -1 except: pass print(s.shares) try: s.name = 'AAAABBBC' except: pass print(s.name) class Checkedmeta(type): def __new__(cls, clsname, bases, methods): for (key, value) in methods.items(): if isinstance(value, Descriptor): value.name = key return type.__new__(cls, clsname, bases, methods) class Stock(metaclass=checkedmeta): name = sized_string(size=8) shares = unsigned_integer() price = unsigned_float() def __init__(self, name, shares, price): self.name = name self.shares = shares self.price = price print('Method 3') s = stock('ACME', 50, 91.1) print(s.name) s.shares = 75 print(s.shares) try: s.shares = -1 except: pass print(s.shares) try: s.name = 'AAAABBBC' except: pass print(s.name) class Point: x = integer('x') y = integer('y') class Point(metaclass=checkedmeta): x = integer() y = integer()
{ "targets": [ { "target_name": "napi_test", "sources": [ "napi.test.c" ] }, { "target_name": "napi_arguments", "sources": [ "napi_arguments.c" ] }, { "target_name": "napi_async", "sources": [ "napi_async.cc" ] }, { "target_name": "napi_construct", "sources": [ "napi_construct.c" ] }, { "target_name": "napi_error", "sources": [ "napi_error.c" ] }, { "target_name": "napi_fatal_error", "sources": [ "napi_fatal_error.c" ] }, { "target_name": "napi_make_callback_recurse", "sources": [ "napi_make_callback_recurse.cc" ] }, { "target_name": "napi_make_callback", "sources": [ "napi_make_callback.c" ] }, { "target_name": "napi_object_wrap", "sources": [ "napi_object_wrap.c" ] }, { "target_name": "napi_reference", "sources": [ "napi_reference.c" ] }, { "target_name": "napi_new_target", "sources": [ "napi_new_target.c" ] }, { "target_name": "napi_string", "sources": [ "napi_string.c" ] }, { "target_name": "napi_thread_safe", "sources": [ "napi_thread_safe.c" ] }, { "target_name": "napi_tsfn", "sources": [ "napi_tsfn.c" ] }, { "target_name": "napi_typedarray", "sources": [ "napi_typedarray.c" ] } ] }
{'targets': [{'target_name': 'napi_test', 'sources': ['napi.test.c']}, {'target_name': 'napi_arguments', 'sources': ['napi_arguments.c']}, {'target_name': 'napi_async', 'sources': ['napi_async.cc']}, {'target_name': 'napi_construct', 'sources': ['napi_construct.c']}, {'target_name': 'napi_error', 'sources': ['napi_error.c']}, {'target_name': 'napi_fatal_error', 'sources': ['napi_fatal_error.c']}, {'target_name': 'napi_make_callback_recurse', 'sources': ['napi_make_callback_recurse.cc']}, {'target_name': 'napi_make_callback', 'sources': ['napi_make_callback.c']}, {'target_name': 'napi_object_wrap', 'sources': ['napi_object_wrap.c']}, {'target_name': 'napi_reference', 'sources': ['napi_reference.c']}, {'target_name': 'napi_new_target', 'sources': ['napi_new_target.c']}, {'target_name': 'napi_string', 'sources': ['napi_string.c']}, {'target_name': 'napi_thread_safe', 'sources': ['napi_thread_safe.c']}, {'target_name': 'napi_tsfn', 'sources': ['napi_tsfn.c']}, {'target_name': 'napi_typedarray', 'sources': ['napi_typedarray.c']}]}
__title__ = 'Django Platform Data Service' __version__ = '0.0.4' __author__ = 'Komol Nath Roy' __license__ = 'MIT' __copyright__ = 'Copyright 2020 Komol Nath Roy' VERSION = __version__ default_app_config = 'django_pds.apps.DjangoPdsConfig'
__title__ = 'Django Platform Data Service' __version__ = '0.0.4' __author__ = 'Komol Nath Roy' __license__ = 'MIT' __copyright__ = 'Copyright 2020 Komol Nath Roy' version = __version__ default_app_config = 'django_pds.apps.DjangoPdsConfig'
#!C:\Python27 # For 500,000 mutables, this takes WAY to long. Look for better options orgFile = open('InputList.txt', 'r') newFile = open('SortedList.txt', 'w') unsortedList = [line.strip() for line in orgFile] def bubble_sort(list): for i in reversed(range(len(list))): finished = True for j in range(i): if list[j] > list[j + 1]: list[j], list[j + 1] = list[j + 1], list[j] finished = False if finished: break return list sortedList = bubble_sort(unsortedList) for i in sortedList: newFile.write("%s\n" % i) orgFile.close() newFile.clsoe()
org_file = open('InputList.txt', 'r') new_file = open('SortedList.txt', 'w') unsorted_list = [line.strip() for line in orgFile] def bubble_sort(list): for i in reversed(range(len(list))): finished = True for j in range(i): if list[j] > list[j + 1]: (list[j], list[j + 1]) = (list[j + 1], list[j]) finished = False if finished: break return list sorted_list = bubble_sort(unsortedList) for i in sortedList: newFile.write('%s\n' % i) orgFile.close() newFile.clsoe()
students = [] def displayMenu(): print("What would you like to do?") print("\t(a) Add new student: ") print("\t(v) View students: ") print("\t(q) Quit: ") choice = input("Type one letter (a/v/q): ").strip() return choice #test the function def doAdd(): currentstudent = {} currentstudent["name"]=input("Enter Name: ") currentstudent["modules"]= readmodules() students.append(currentstudent) def readmodules(): modules = [] modulesName = input("\tEnter the first module name (blank to quit): ").strip() while modulesName != "": module = {} module["name"] = modulesName module["grade"] = int(input("Enter Grade: ")) modules.append(module) modulesName = input("\tEnter next module name (blank to quit: )").strip() return modules def displayModules(modules): print("\tName \tGrade") for module in modules: print("\t{}\t{}".format(module["name"], module["grade"])) def doView(): for currentstudent in students: print(currentstudent["name"]) displayModules(currentstudent["modules"]) choice = displayMenu() while (choice != 'q'): if choice == 'a': doAdd() elif choice == 'v': doView() elif choice != 'q': print("\n\nPlease select a, v or q") choice = displayMenu() print(students)
students = [] def display_menu(): print('What would you like to do?') print('\t(a) Add new student: ') print('\t(v) View students: ') print('\t(q) Quit: ') choice = input('Type one letter (a/v/q): ').strip() return choice def do_add(): currentstudent = {} currentstudent['name'] = input('Enter Name: ') currentstudent['modules'] = readmodules() students.append(currentstudent) def readmodules(): modules = [] modules_name = input('\tEnter the first module name (blank to quit): ').strip() while modulesName != '': module = {} module['name'] = modulesName module['grade'] = int(input('Enter Grade: ')) modules.append(module) modules_name = input('\tEnter next module name (blank to quit: )').strip() return modules def display_modules(modules): print('\tName \tGrade') for module in modules: print('\t{}\t{}'.format(module['name'], module['grade'])) def do_view(): for currentstudent in students: print(currentstudent['name']) display_modules(currentstudent['modules']) choice = display_menu() while choice != 'q': if choice == 'a': do_add() elif choice == 'v': do_view() elif choice != 'q': print('\n\nPlease select a, v or q') choice = display_menu() print(students)
def collect(items, item): if item in collecting_items: return collecting_items.append(item) return def drop(items, item): if item in collecting_items: collecting_items.remove(item) return return def combine_items(old, new): if old in collecting_items: for el in collecting_items: if el == old: index_of_el = collecting_items.index(el) collecting_items.insert(index_of_el + 1, new) return return def renew(items, item): if item in collecting_items: collecting_items.remove(item) collecting_items.append(item) return return def split_items(items, item): oldy, newy = item.split(":") return oldy, newy def is_item_in_list(items, item): if item in items: is_item_there = True return is_item_there return collecting_items = input().split(", ") command, item = input().split(" - ") is_item_there = False while command != "Craft!": if command == "Collect": is_item_in_list(collecting_items, command) if not is_item_there: collect(command) if command == "Drop": is_item_in_list(collecting_items, command) if not is_item_there: drop(command) if command == "Combine Items": if not is_item_there: result3, result4 = split_items(item) # splitting the combining items by ":" combine_items(result3, result4) if command == "Renew": is_item_in_list(collecting_items, command) if not is_item_there: renew(command) print(collecting_items) command = input().split(" - ") print(", ".join(collecting_items)) print(*collecting_items, sep=", ") # Iron, Sword, Stone # Drop - Bronze # Combine Items - Sword:Bow # Renew - Iron # Craft!
def collect(items, item): if item in collecting_items: return collecting_items.append(item) return def drop(items, item): if item in collecting_items: collecting_items.remove(item) return return def combine_items(old, new): if old in collecting_items: for el in collecting_items: if el == old: index_of_el = collecting_items.index(el) collecting_items.insert(index_of_el + 1, new) return return def renew(items, item): if item in collecting_items: collecting_items.remove(item) collecting_items.append(item) return return def split_items(items, item): (oldy, newy) = item.split(':') return (oldy, newy) def is_item_in_list(items, item): if item in items: is_item_there = True return is_item_there return collecting_items = input().split(', ') (command, item) = input().split(' - ') is_item_there = False while command != 'Craft!': if command == 'Collect': is_item_in_list(collecting_items, command) if not is_item_there: collect(command) if command == 'Drop': is_item_in_list(collecting_items, command) if not is_item_there: drop(command) if command == 'Combine Items': if not is_item_there: (result3, result4) = split_items(item) combine_items(result3, result4) if command == 'Renew': is_item_in_list(collecting_items, command) if not is_item_there: renew(command) print(collecting_items) command = input().split(' - ') print(', '.join(collecting_items)) print(*collecting_items, sep=', ')
# Make this unique, and don't share it with anybody. SECRET_KEY = '' DATABASES = { 'default': { 'ENGINE': 'django.db.backends.', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'. 'NAME': '', # Or path to database file if using sqlite3. 'USER': '', # Not used with sqlite3. 'PASSWORD': '', # Not used with sqlite3. 'HOST': '', # Set to empty string for localhost. Not used with sqlite3. 'PORT': '', # Set to empty string for default. Not used with sqlite3. } } JOBS_NEW_THRESHOLD = 1 JOBS_NOTIFICATION_LIST = [] # list of email addresses GATEKEEPER_ENABLE_AUTOMODERATION = True GATEKEEPER_DEFAULT_STATUS = 0 GATEKEEPER_MODERATOR_LIST = JOBS_NOTIFICATION_LIST
secret_key = '' databases = {'default': {'ENGINE': 'django.db.backends.', 'NAME': '', 'USER': '', 'PASSWORD': '', 'HOST': '', 'PORT': ''}} jobs_new_threshold = 1 jobs_notification_list = [] gatekeeper_enable_automoderation = True gatekeeper_default_status = 0 gatekeeper_moderator_list = JOBS_NOTIFICATION_LIST
#* Asked by Facebook #? Given a list of sorted numbers, build a list of strings displaying numbers, #? Where each string is first and last number of linearly increasing numbers. #! Example: #? Input: [0,1,2,2,5,7,8,9,9,10,11,15] #? Output: ['0 -> 2','5 -> 5','7 -> 11','15 -> 15'] #! Note that numbers will not be lower than 0, also numbers may repeat def truncateList(lst): if len(lst) == 0: return list() num = lst[0] next_num = num+1 start_num = num final_lst = list() i = 1 while i < len(lst): if lst[i] == num or lst[i] == next_num: pass else: final_lst.append(f"{start_num} -> {num}") start_num = lst[i] num = lst[i] next_num = num + 1 i+=1 else: final_lst.append(f"{start_num} -> {num}") return final_lst print(truncateList([0,1,2,2,5,7,8,9,9,10,11,15])) # ['0 -> 2', '5 -> 5', '7 -> 11', '15 -> 15']
def truncate_list(lst): if len(lst) == 0: return list() num = lst[0] next_num = num + 1 start_num = num final_lst = list() i = 1 while i < len(lst): if lst[i] == num or lst[i] == next_num: pass else: final_lst.append(f'{start_num} -> {num}') start_num = lst[i] num = lst[i] next_num = num + 1 i += 1 else: final_lst.append(f'{start_num} -> {num}') return final_lst print(truncate_list([0, 1, 2, 2, 5, 7, 8, 9, 9, 10, 11, 15]))
expected_output = { "session_type": { "AnyConnect": { "username": { "lee": { "index": { 1: { "assigned_ip": "192.168.246.1", "bytes": {"rx": 4942, "tx": 11079}, "duration": "0h:00m:15s", "encryption": "RC4 AES128", "group_policy": "EngPolicy", "hashing": "SHA1", "inactivity": "0h:00m:00s", "license": "AnyConnect Premium", "login_time": "15:25:13 EST Fri Jan 28 2011", "nac_result": "Unknown", "protocol": "AnyConnect-Parent SSL-Tunnel DTLS-Tunnel", "public_ip": "10.139.1.2", "tunnel_group": "EngGroup", "vlan": "none", "vlan_mapping": "N/A", } } }, "yumi": { "index": { 2: { "assigned_ip": "192.168.246.2", "bytes": {"rx": 6942, "tx": 11055}, "duration": "0h:05m:15s", "encryption": "RC4 AES128", "group_policy": "EngPolicy", "hashing": "SHA1", "inactivity": "0h:00m:00s", "license": "AnyConnect Premium", "login_time": "15:25:13 EST Fri Jan 29 2011", "nac_result": "Unknown", "protocol": "AnyConnect-Parent SSL-Tunnel DTLS-Tunnel", "public_ip": "10.139.1.3", "tunnel_group": "EngGroup", "vlan": "none", "vlan_mapping": "N/A", } } }, } } } }
expected_output = {'session_type': {'AnyConnect': {'username': {'lee': {'index': {1: {'assigned_ip': '192.168.246.1', 'bytes': {'rx': 4942, 'tx': 11079}, 'duration': '0h:00m:15s', 'encryption': 'RC4 AES128', 'group_policy': 'EngPolicy', 'hashing': 'SHA1', 'inactivity': '0h:00m:00s', 'license': 'AnyConnect Premium', 'login_time': '15:25:13 EST Fri Jan 28 2011', 'nac_result': 'Unknown', 'protocol': 'AnyConnect-Parent SSL-Tunnel DTLS-Tunnel', 'public_ip': '10.139.1.2', 'tunnel_group': 'EngGroup', 'vlan': 'none', 'vlan_mapping': 'N/A'}}}, 'yumi': {'index': {2: {'assigned_ip': '192.168.246.2', 'bytes': {'rx': 6942, 'tx': 11055}, 'duration': '0h:05m:15s', 'encryption': 'RC4 AES128', 'group_policy': 'EngPolicy', 'hashing': 'SHA1', 'inactivity': '0h:00m:00s', 'license': 'AnyConnect Premium', 'login_time': '15:25:13 EST Fri Jan 29 2011', 'nac_result': 'Unknown', 'protocol': 'AnyConnect-Parent SSL-Tunnel DTLS-Tunnel', 'public_ip': '10.139.1.3', 'tunnel_group': 'EngGroup', 'vlan': 'none', 'vlan_mapping': 'N/A'}}}}}}}
# Author: Jochen Gast <jochen.gast@visinf.tu-darmstadt.de> class MovingAverage: postfix = "avg" def __init__(self): self.sum = 0.0 self.count = 0 def add_value(self, sigma, addcount=1): self.sum += sigma self.count += addcount def add_average(self, avg, addcount): self.sum += avg * addcount self.count += addcount def mean(self): return self.sum / self.count class ExponentialMovingAverage: postfix = "ema" def __init__(self, alpha=0.7): self.weighted_sum = 0.0 self.weighted_count = 0 self.alpha = alpha def add_value(self, sigma): self.weighted_sum = sigma + (1.0 - self.alpha) * self.weighted_sum self.weighted_count = 1 + (1.0 - self.alpha) * self.weighted_count def add_average(self, avg, addcount): self.weighted_sum = avg * addcount + (1.0 - self.alpha) * self.weighted_sum self.weighted_count = addcount + (1.0 - self.alpha) * self.weighted_count def mean(self): return self.weighted_sum / self.weighted_count
class Movingaverage: postfix = 'avg' def __init__(self): self.sum = 0.0 self.count = 0 def add_value(self, sigma, addcount=1): self.sum += sigma self.count += addcount def add_average(self, avg, addcount): self.sum += avg * addcount self.count += addcount def mean(self): return self.sum / self.count class Exponentialmovingaverage: postfix = 'ema' def __init__(self, alpha=0.7): self.weighted_sum = 0.0 self.weighted_count = 0 self.alpha = alpha def add_value(self, sigma): self.weighted_sum = sigma + (1.0 - self.alpha) * self.weighted_sum self.weighted_count = 1 + (1.0 - self.alpha) * self.weighted_count def add_average(self, avg, addcount): self.weighted_sum = avg * addcount + (1.0 - self.alpha) * self.weighted_sum self.weighted_count = addcount + (1.0 - self.alpha) * self.weighted_count def mean(self): return self.weighted_sum / self.weighted_count
ejem = "esto es un ejemplo" print (ejem) print (ejem[8:18], ejem[5:7], ejem[0:4]) subejem = ejem[8:18] + ejem[4:8] + ejem[0:4] print (subejem) #ejem = ejem.split(" ") #print (ejem[2:4], ejem[1::-1])
ejem = 'esto es un ejemplo' print(ejem) print(ejem[8:18], ejem[5:7], ejem[0:4]) subejem = ejem[8:18] + ejem[4:8] + ejem[0:4] print(subejem)
max_char = 105 sample_rate = 22050 n_fft = 1024 hop_length = 256 win_length = 1024 preemphasis = 0.97 ref_db = 20 max_db = 100 mel_dim = 80 max_length = 780 reduction = 4 embedding_dim = 128 symbol_length = 70 d = 256 c = 512 f = n_fft // 2 + 1 batch_size = 16 checkpoint_step = 500 max_T = 160 learning_rate = 0.0002 beta1 = 0.5 beta2 = 0.9
max_char = 105 sample_rate = 22050 n_fft = 1024 hop_length = 256 win_length = 1024 preemphasis = 0.97 ref_db = 20 max_db = 100 mel_dim = 80 max_length = 780 reduction = 4 embedding_dim = 128 symbol_length = 70 d = 256 c = 512 f = n_fft // 2 + 1 batch_size = 16 checkpoint_step = 500 max_t = 160 learning_rate = 0.0002 beta1 = 0.5 beta2 = 0.9
''' Problem Description The program takes two dictionaries and concatenates them into one dictionary. Problem Solution 1. Declare and initialize two dictionaries with some key-value pairs 2. Use the update() function to add the key-value pair from the second dictionary to the first dictionary. 3. Print the final dictionary. 4. Exit. ''' d1={'A': 1,'B': 2} d2={'C': 3} d1.update(d2) print(f'First dictionary is: {d1}\nSecon dictionary is: {d2}\nConcatenated dictionary is: {d1}')
""" Problem Description The program takes two dictionaries and concatenates them into one dictionary. Problem Solution 1. Declare and initialize two dictionaries with some key-value pairs 2. Use the update() function to add the key-value pair from the second dictionary to the first dictionary. 3. Print the final dictionary. 4. Exit. """ d1 = {'A': 1, 'B': 2} d2 = {'C': 3} d1.update(d2) print(f'First dictionary is: {d1}\nSecon dictionary is: {d2}\nConcatenated dictionary is: {d1}')
LOC_RECENT = u'/AppData/Roaming/Microsoft/Windows/Recent/' LOC_REG = u'/Windows/System32/config/' LOC_WINEVT = LOC_REG LOC_WINEVTX = u'/Windows/System32/winevt/logs/' LOC_AMCACHE = u'/Windows/AppCompat/Programs/' SYSTEM_FILE = [ # [artifact, src_path, dest_dir] # registry hives ['regb', LOC_REG + u'RegBack/SAM', u'/Registry/RegBack/'], ['regb', LOC_REG + u'RegBack/SECURITY', u'/Registry/RegBack/'], ['regb', LOC_REG + u'RegBack/SOFTWARE', u'/Registry/RegBack/'], ['regb', LOC_REG + u'RegBack/SYSTEM', u'/Registry/RegBack/'], ['regb_xp', LOC_REG + u'Repair/SAM', u'/Registry/Repair/'], ['regb_xp', LOC_REG + u'Repair/SECURITY', u'/Registry/Repair/'], ['regb_xp', LOC_REG + u'Repair/software', u'/Registry/Repair/'], ['regb_xp', LOC_REG + u'Repair/system', u'/Registry/Repair/'], # system logs ['evtl_xp', LOC_WINEVT + u'AppEvent.evt', u'/OSLogs/evtl/'], ['evtl_xp', LOC_WINEVT + u'SecEvent.evt', u'/OSLogs/evtl'], ['evtl_xp', LOC_WINEVT + u'SysEvent.evt', u'/OSLogs/evtl'], ['setupapi', u'/Windows/Inf/setupapi.dev.log', u'/Registry/'], ['setupapi_xp', u'/Windows/setupapi.log', u'/Registry/'], # mru ['amcache', LOC_AMCACHE + u'RecentFileCache.bcf', u'/MRU/Prog/recentfilecache/'], # persistence ['sch_xp', u'/Windows/SchedLgU.txt', u'/Autoruns/sch_tsks/'], # etl ['etl', u'/ProgramData/Microsoft/Windows/Power Efficiency Diagnostics/energy-ntkl.etl', u'/Misc/etl/'], ['etl', u'/ProgramData/Microsoft/Windows/Power Efficiency Diagnostics/energy-trace.etl', u'/Misc/etl/'], ['etl', u'/Windows/System32/LogFiles/WMI/LwtNetLog.etl', u'/Misc/etl/'], ['etl', u'/Windows/System32/LogFiles/WMI/Wifi.etl', u'/Misc/etl/'], # file system ['logfile', u'/$LogFile', u'/Filesystem/'], ['mft', u'/$MFT', u'/Filesystem/'], # others ['bits', u'/ProgramData/Microsoft/Network/Downloader/qmgr.dat', u'/Misc/bits/'], ['pagefile', u'/pagefile.sys', u'/Memory/pagefile/'] ] SYSTEM_DIR = [ # [artifact, src_path, dest_dir, isRecursive, stringToMatch] # registry hives ['reg', LOC_REG[:-1], u'/Registry/', False, u'SAM'], ['reg', LOC_REG[:-1], u'/Registry/', False, u'SECURITY'], ['reg', LOC_REG[:-1], u'/Registry/', False, u'SOFTWARE'], ['reg', LOC_REG[:-1], u'/Registry/', False, u'SYSTEM'], # system logs ['etl', u'/Windows/System32/WDI/LogFiles', u'/OSLogs/etl/', False, u'.etl'], ['evtl', LOC_WINEVTX[:-1], u'/OSLogs/evtl/', False, None], ['ual', u'/Windows/System32/LogFiles/SUM', u'/OSLogs/ual/', False, u'.mdb'], # mru ['amcache', LOC_AMCACHE[:-1], u'/MRU/Prog/amcache/', False, u'Amcache'], ['prefetch', u'/Windows/Prefetch', u'/MRU/Prog/prefetch/', False, u'.pf'], ['sccm', u'/Windows/System32/wbem/Repository', u'/MRU/Prog/sccm/', False, None], ['srum', u'/Windows/System32/sru', u'/MRU/Prog/srum/', False, None], ['sqm', u'/ProgramData/Microsoft/Windows/Sqm/Upload', u'/MRU/Prog/sqm/', False, u'.sqm'], ['syscache', u'/System Volume Information', u'/MRU/Prog/syscache/', False, u'Syscache'], # persistence ['sch_job', u'/Windows/Tasks', u'/Autoruns/sch_tsks/', False, u'.job'], ['sch_xml', u'/Windows/System32/Tasks', u'/Autoruns/sch_tsks/', True, None], ['startupinfo', u'/Windows/System32/wdi/LogFiles/StartupInfo', u'/Autoruns/startupinfo/', False, u'StartupInfo'], # others ['antimalware', u'/ProgramData/Microsoft/Microsoft Antimalware/Support', u'/VirusScans/', False, u'MPLog'], ['defender', u'/ProgramData/Microsoft/Windows Defender/Support', u'/VirusScans/', False, u'MPLog'], ['certutil', u'/Windows/System32/config/systemprofile/AppData/LocalLow/Microsoft/CryptnetUrlCache/MetaData', u'/Misc/certutil/', False, None], ['recycle', u'/$Recycle.Bin', u'/Recycle/', True, None], ['recycle_xp', u'/RECYCLER', u'/Recycle/', True, None], ['sig_ctlg', u'/Windows/System32/CatRoot', u'/Misc/signatures/', True, None], ['wer', u'/ProgramData/Microsoft/Windows/WER', u'/Misc/wer/', True, None] ] USER_FILE = [ # [artifact, src_path, dest_dir] # system logs ['etl', u'/AppData/Local/Microsoft/Windows/Explorer/ExplorerStartupLog.etl', u'/OSLogs/etl/'], ['etl', u'/AppData/Local/Microsoft/Windows/Explorer/ExplorerStartupLog_RunOnce.etl', u'/OSLogs/etl/'], ['etl', u'/AppData/Local/Packages/Microsoft.Windows.Cortana_cw5n1h2txyewy/TempState/Traces/CortanaTrace1.etl', u'/OSLogs/etl/'], ['pshist', u'/AppData/Roaming/Microsoft/Windows/PowerShell/PSReadline/ConsoleHost_history.txt', u'/OSLogs/pshist/'] ] USER_DIR = [ # [artifact, src_path, dest_dir, isRecursive, stringToMatch] # registry hives ['ntuser', u'/', u'/Registry/', False, u'NTUSER'], ['usrclass', u'/AppData/Local/Microsoft/Windows/', u'/Registry/', False, u'UsrClass'], ['usrclass_xp', u'/Local Settings/Application Data/Microsoft/Windows/', u'/Registry/', False, u'UsrClass'], # mru ['iehist', u'/AppData/Local/Microsoft/Windows/WebCache', u'/MRU/Files/iehist/', False, None], ['iehist_xp', u'/Local Settings/History/History.IE5', u'/MRU/Files/iehist/', True, None], ['jmp', LOC_RECENT + u'AutomaticDestinations', u'/MRU/Files/jmp/', False, None], ['jmp', LOC_RECENT + u'CustomDestinations', u'/MRU/Files/jmp/', False, None], ['lnk', LOC_RECENT, u'MRU/Files/lnk', False, None], ['lnk_xp', u'/Recent/', u'MRU/Files/lnk', False, None], ['thumbcache', u'/AppData/Local/Microsoft/Windows/Explorer', u'/MRU/thumbcache/', False, u'thumbcache_'], ['timeline', u'/AppData/Local/ConnectedDevicesPlatform', u'/MRU/timeline/', True, None], # others ['certutil', u'/AppData/LocalLow/Microsoft/CryptnetUrlCache/MetaData', u'/Misc/certutil/', False, None], ['rdpcache', u'/AppData/Local/Microsoft/Terminal Server Client/Cache', u'/Misc/rdpcache/', False, None], ['rdpcache_xp', u'/Local Settings/Application Data/Microsoft/Terminal Server Client/Cache', u'/Misc/rdpcache/', False, None] ] FILE_ADS = [ # [artifact, src_path, dest_dir, ads_name] # file system ['usnjrnl', u'/$Extend/$UsnJrnl', u'/Filesystem/', u'$J'] ]
loc_recent = u'/AppData/Roaming/Microsoft/Windows/Recent/' loc_reg = u'/Windows/System32/config/' loc_winevt = LOC_REG loc_winevtx = u'/Windows/System32/winevt/logs/' loc_amcache = u'/Windows/AppCompat/Programs/' system_file = [['regb', LOC_REG + u'RegBack/SAM', u'/Registry/RegBack/'], ['regb', LOC_REG + u'RegBack/SECURITY', u'/Registry/RegBack/'], ['regb', LOC_REG + u'RegBack/SOFTWARE', u'/Registry/RegBack/'], ['regb', LOC_REG + u'RegBack/SYSTEM', u'/Registry/RegBack/'], ['regb_xp', LOC_REG + u'Repair/SAM', u'/Registry/Repair/'], ['regb_xp', LOC_REG + u'Repair/SECURITY', u'/Registry/Repair/'], ['regb_xp', LOC_REG + u'Repair/software', u'/Registry/Repair/'], ['regb_xp', LOC_REG + u'Repair/system', u'/Registry/Repair/'], ['evtl_xp', LOC_WINEVT + u'AppEvent.evt', u'/OSLogs/evtl/'], ['evtl_xp', LOC_WINEVT + u'SecEvent.evt', u'/OSLogs/evtl'], ['evtl_xp', LOC_WINEVT + u'SysEvent.evt', u'/OSLogs/evtl'], ['setupapi', u'/Windows/Inf/setupapi.dev.log', u'/Registry/'], ['setupapi_xp', u'/Windows/setupapi.log', u'/Registry/'], ['amcache', LOC_AMCACHE + u'RecentFileCache.bcf', u'/MRU/Prog/recentfilecache/'], ['sch_xp', u'/Windows/SchedLgU.txt', u'/Autoruns/sch_tsks/'], ['etl', u'/ProgramData/Microsoft/Windows/Power Efficiency Diagnostics/energy-ntkl.etl', u'/Misc/etl/'], ['etl', u'/ProgramData/Microsoft/Windows/Power Efficiency Diagnostics/energy-trace.etl', u'/Misc/etl/'], ['etl', u'/Windows/System32/LogFiles/WMI/LwtNetLog.etl', u'/Misc/etl/'], ['etl', u'/Windows/System32/LogFiles/WMI/Wifi.etl', u'/Misc/etl/'], ['logfile', u'/$LogFile', u'/Filesystem/'], ['mft', u'/$MFT', u'/Filesystem/'], ['bits', u'/ProgramData/Microsoft/Network/Downloader/qmgr.dat', u'/Misc/bits/'], ['pagefile', u'/pagefile.sys', u'/Memory/pagefile/']] system_dir = [['reg', LOC_REG[:-1], u'/Registry/', False, u'SAM'], ['reg', LOC_REG[:-1], u'/Registry/', False, u'SECURITY'], ['reg', LOC_REG[:-1], u'/Registry/', False, u'SOFTWARE'], ['reg', LOC_REG[:-1], u'/Registry/', False, u'SYSTEM'], ['etl', u'/Windows/System32/WDI/LogFiles', u'/OSLogs/etl/', False, u'.etl'], ['evtl', LOC_WINEVTX[:-1], u'/OSLogs/evtl/', False, None], ['ual', u'/Windows/System32/LogFiles/SUM', u'/OSLogs/ual/', False, u'.mdb'], ['amcache', LOC_AMCACHE[:-1], u'/MRU/Prog/amcache/', False, u'Amcache'], ['prefetch', u'/Windows/Prefetch', u'/MRU/Prog/prefetch/', False, u'.pf'], ['sccm', u'/Windows/System32/wbem/Repository', u'/MRU/Prog/sccm/', False, None], ['srum', u'/Windows/System32/sru', u'/MRU/Prog/srum/', False, None], ['sqm', u'/ProgramData/Microsoft/Windows/Sqm/Upload', u'/MRU/Prog/sqm/', False, u'.sqm'], ['syscache', u'/System Volume Information', u'/MRU/Prog/syscache/', False, u'Syscache'], ['sch_job', u'/Windows/Tasks', u'/Autoruns/sch_tsks/', False, u'.job'], ['sch_xml', u'/Windows/System32/Tasks', u'/Autoruns/sch_tsks/', True, None], ['startupinfo', u'/Windows/System32/wdi/LogFiles/StartupInfo', u'/Autoruns/startupinfo/', False, u'StartupInfo'], ['antimalware', u'/ProgramData/Microsoft/Microsoft Antimalware/Support', u'/VirusScans/', False, u'MPLog'], ['defender', u'/ProgramData/Microsoft/Windows Defender/Support', u'/VirusScans/', False, u'MPLog'], ['certutil', u'/Windows/System32/config/systemprofile/AppData/LocalLow/Microsoft/CryptnetUrlCache/MetaData', u'/Misc/certutil/', False, None], ['recycle', u'/$Recycle.Bin', u'/Recycle/', True, None], ['recycle_xp', u'/RECYCLER', u'/Recycle/', True, None], ['sig_ctlg', u'/Windows/System32/CatRoot', u'/Misc/signatures/', True, None], ['wer', u'/ProgramData/Microsoft/Windows/WER', u'/Misc/wer/', True, None]] user_file = [['etl', u'/AppData/Local/Microsoft/Windows/Explorer/ExplorerStartupLog.etl', u'/OSLogs/etl/'], ['etl', u'/AppData/Local/Microsoft/Windows/Explorer/ExplorerStartupLog_RunOnce.etl', u'/OSLogs/etl/'], ['etl', u'/AppData/Local/Packages/Microsoft.Windows.Cortana_cw5n1h2txyewy/TempState/Traces/CortanaTrace1.etl', u'/OSLogs/etl/'], ['pshist', u'/AppData/Roaming/Microsoft/Windows/PowerShell/PSReadline/ConsoleHost_history.txt', u'/OSLogs/pshist/']] user_dir = [['ntuser', u'/', u'/Registry/', False, u'NTUSER'], ['usrclass', u'/AppData/Local/Microsoft/Windows/', u'/Registry/', False, u'UsrClass'], ['usrclass_xp', u'/Local Settings/Application Data/Microsoft/Windows/', u'/Registry/', False, u'UsrClass'], ['iehist', u'/AppData/Local/Microsoft/Windows/WebCache', u'/MRU/Files/iehist/', False, None], ['iehist_xp', u'/Local Settings/History/History.IE5', u'/MRU/Files/iehist/', True, None], ['jmp', LOC_RECENT + u'AutomaticDestinations', u'/MRU/Files/jmp/', False, None], ['jmp', LOC_RECENT + u'CustomDestinations', u'/MRU/Files/jmp/', False, None], ['lnk', LOC_RECENT, u'MRU/Files/lnk', False, None], ['lnk_xp', u'/Recent/', u'MRU/Files/lnk', False, None], ['thumbcache', u'/AppData/Local/Microsoft/Windows/Explorer', u'/MRU/thumbcache/', False, u'thumbcache_'], ['timeline', u'/AppData/Local/ConnectedDevicesPlatform', u'/MRU/timeline/', True, None], ['certutil', u'/AppData/LocalLow/Microsoft/CryptnetUrlCache/MetaData', u'/Misc/certutil/', False, None], ['rdpcache', u'/AppData/Local/Microsoft/Terminal Server Client/Cache', u'/Misc/rdpcache/', False, None], ['rdpcache_xp', u'/Local Settings/Application Data/Microsoft/Terminal Server Client/Cache', u'/Misc/rdpcache/', False, None]] file_ads = [['usnjrnl', u'/$Extend/$UsnJrnl', u'/Filesystem/', u'$J']]
class Config(): appId = None apiKey = None domain = None def __init__(self, appId, apiKey, domain): self.appId = appId self.apiKey = apiKey self.domain = domain
class Config: app_id = None api_key = None domain = None def __init__(self, appId, apiKey, domain): self.appId = appId self.apiKey = apiKey self.domain = domain
for i in range(int(input())): array_length = int(input()) array = map(int, input().split()) s = sum(array) if s < array_length: print(1) else: print(s - array_length)
for i in range(int(input())): array_length = int(input()) array = map(int, input().split()) s = sum(array) if s < array_length: print(1) else: print(s - array_length)
def _get_value(obj, key): list_end = key.find("]") is_list = list_end > 0 if is_list: list_index = int(key[list_end - 1]) return obj[list_index] return obj[key] def find(obj, path): try: # Base case if len(path) == 0: return obj key = str(path[0]) rest = path[1:] nested = _get_value(obj, key) return find(nested, rest) except IndexError: raise IndexError except KeyError: raise KeyError except TypeError: raise TypeError
def _get_value(obj, key): list_end = key.find(']') is_list = list_end > 0 if is_list: list_index = int(key[list_end - 1]) return obj[list_index] return obj[key] def find(obj, path): try: if len(path) == 0: return obj key = str(path[0]) rest = path[1:] nested = _get_value(obj, key) return find(nested, rest) except IndexError: raise IndexError except KeyError: raise KeyError except TypeError: raise TypeError
GRAPH = { "A":["B","D","E"], "B":["A","C","D"], "C":["B","G"], "D":["A","B","E","F"], "E":["A","D"], "F":["D"], "G":["C"] } visited_list = [] # an empty list of visited nodes def dfs(graph, current_vertex, visited): visited.append(current_vertex) for vertex in graph[current_vertex]: # check neighbours if vertex not in visited: dfs(graph, vertex, visited) # recursive call # stack will store return address, parameters # and local variables return visited # main program traversal = dfs(GRAPH, 'A', visited_list) print('Nodes visited in this order:', traversal)
graph = {'A': ['B', 'D', 'E'], 'B': ['A', 'C', 'D'], 'C': ['B', 'G'], 'D': ['A', 'B', 'E', 'F'], 'E': ['A', 'D'], 'F': ['D'], 'G': ['C']} visited_list = [] def dfs(graph, current_vertex, visited): visited.append(current_vertex) for vertex in graph[current_vertex]: if vertex not in visited: dfs(graph, vertex, visited) return visited traversal = dfs(GRAPH, 'A', visited_list) print('Nodes visited in this order:', traversal)
class Solution: def minCostClimbingStairs(self, cost): dp = [0] * (len(cost) + 1) for i in range(2, len(dp)): dp[i] = min(dp[i - 2] + cost[i - 2], dp[i - 1] + cost[i - 1]) return dp[-1] s = Solution() print(s.minCostClimbingStairs([10, 15, 20])) print(s.minCostClimbingStairs([1, 100, 1, 1, 1, 100, 1, 1, 100, 1]))
class Solution: def min_cost_climbing_stairs(self, cost): dp = [0] * (len(cost) + 1) for i in range(2, len(dp)): dp[i] = min(dp[i - 2] + cost[i - 2], dp[i - 1] + cost[i - 1]) return dp[-1] s = solution() print(s.minCostClimbingStairs([10, 15, 20])) print(s.minCostClimbingStairs([1, 100, 1, 1, 1, 100, 1, 1, 100, 1]))
def save_transcriptions(path, transcriptions): with open(path, 'w') as f: for key in transcriptions: f.write('{} {}\n'.format(key, transcriptions[key])) def load_transcriptions(path): transcriptions = {} with open(path, "r") as f: for line_no, line in enumerate(f): if len(line) == 0: continue try: image_id, transcription = parse_transcription_line(line) except ValueError: raise ValueError('Failed to parse line {} of file {}'.format(line_no, path)) transcriptions[image_id] = transcription return transcriptions def parse_transcription_line(line): image_id, transcription = line.split(" ", maxsplit=1) if transcription[-1] == '\n': transcription = transcription[:-1] return image_id, transcription
def save_transcriptions(path, transcriptions): with open(path, 'w') as f: for key in transcriptions: f.write('{} {}\n'.format(key, transcriptions[key])) def load_transcriptions(path): transcriptions = {} with open(path, 'r') as f: for (line_no, line) in enumerate(f): if len(line) == 0: continue try: (image_id, transcription) = parse_transcription_line(line) except ValueError: raise value_error('Failed to parse line {} of file {}'.format(line_no, path)) transcriptions[image_id] = transcription return transcriptions def parse_transcription_line(line): (image_id, transcription) = line.split(' ', maxsplit=1) if transcription[-1] == '\n': transcription = transcription[:-1] return (image_id, transcription)