content stringlengths 7 1.05M | fixed_cases stringlengths 1 1.28M |
|---|---|
#!/usr/bin/python
#300 most common words according to google
#Source: https://github.com/first20hours/google-10000-english
#Some words that might refer to scientific information have been removed
boringWords=set(['the','of','and','to','a','in','for','is','on','that','by','this','with','i','you','it','not','or','be','are','from','at','as','your','all','have','new','more','an','was','we','will','home','can','us','about','if','page','my','has','search','free','but','our','one','other','do','no','information','time','they','site','he','up','may','what','which','their','news','out','use','any','there','see','only','so','his','when','contact','here','business','who','web','also','now','help','get','pm','view','online','c','e','first','am','been','would','how','were','me','s','services','some','these','click','its','like','service','x','than','find','price','date','back','top','people','had','list','name','just','over','state','year','day','into','email','two','health','n','world','re','next','used','go','b','work','last','most','products','music','buy','data','make','them','should','product','system','post','her','city','t','add','policy','number','such','please','available','copyright','support','message','after','best','software','then','jan','good','video','well','d','where','info','rights','public','books','high','school','through','m','each','links','she','review','years','order','very','privacy','book','items','company','r','read','group','sex','need','many','user','said','de','does','set','under','general','research','university','january','mail','full','map','reviews','program','life','know','games','way','days','management','p','part','could','great','united','hotel','real','f','item','international','center','ebay','must','store','travel','comments','made','development','report','off','member','details','line','terms','before','hotels','did','send','right','type','because','those','using','results','office','education','national','car','design','take','posted','internet','address','community','within','states','area','want','phone','dvd','shipping','reserved','subject','between','forum','family','l','long','based','w','code','show','o','even','black','check','special','prices','website','index','being','women','much','sign','file','link','open','today','technology','south','case','project','same','pages','version','section','own','found','sports','house','related','both'])
| boring_words = set(['the', 'of', 'and', 'to', 'a', 'in', 'for', 'is', 'on', 'that', 'by', 'this', 'with', 'i', 'you', 'it', 'not', 'or', 'be', 'are', 'from', 'at', 'as', 'your', 'all', 'have', 'new', 'more', 'an', 'was', 'we', 'will', 'home', 'can', 'us', 'about', 'if', 'page', 'my', 'has', 'search', 'free', 'but', 'our', 'one', 'other', 'do', 'no', 'information', 'time', 'they', 'site', 'he', 'up', 'may', 'what', 'which', 'their', 'news', 'out', 'use', 'any', 'there', 'see', 'only', 'so', 'his', 'when', 'contact', 'here', 'business', 'who', 'web', 'also', 'now', 'help', 'get', 'pm', 'view', 'online', 'c', 'e', 'first', 'am', 'been', 'would', 'how', 'were', 'me', 's', 'services', 'some', 'these', 'click', 'its', 'like', 'service', 'x', 'than', 'find', 'price', 'date', 'back', 'top', 'people', 'had', 'list', 'name', 'just', 'over', 'state', 'year', 'day', 'into', 'email', 'two', 'health', 'n', 'world', 're', 'next', 'used', 'go', 'b', 'work', 'last', 'most', 'products', 'music', 'buy', 'data', 'make', 'them', 'should', 'product', 'system', 'post', 'her', 'city', 't', 'add', 'policy', 'number', 'such', 'please', 'available', 'copyright', 'support', 'message', 'after', 'best', 'software', 'then', 'jan', 'good', 'video', 'well', 'd', 'where', 'info', 'rights', 'public', 'books', 'high', 'school', 'through', 'm', 'each', 'links', 'she', 'review', 'years', 'order', 'very', 'privacy', 'book', 'items', 'company', 'r', 'read', 'group', 'sex', 'need', 'many', 'user', 'said', 'de', 'does', 'set', 'under', 'general', 'research', 'university', 'january', 'mail', 'full', 'map', 'reviews', 'program', 'life', 'know', 'games', 'way', 'days', 'management', 'p', 'part', 'could', 'great', 'united', 'hotel', 'real', 'f', 'item', 'international', 'center', 'ebay', 'must', 'store', 'travel', 'comments', 'made', 'development', 'report', 'off', 'member', 'details', 'line', 'terms', 'before', 'hotels', 'did', 'send', 'right', 'type', 'because', 'those', 'using', 'results', 'office', 'education', 'national', 'car', 'design', 'take', 'posted', 'internet', 'address', 'community', 'within', 'states', 'area', 'want', 'phone', 'dvd', 'shipping', 'reserved', 'subject', 'between', 'forum', 'family', 'l', 'long', 'based', 'w', 'code', 'show', 'o', 'even', 'black', 'check', 'special', 'prices', 'website', 'index', 'being', 'women', 'much', 'sign', 'file', 'link', 'open', 'today', 'technology', 'south', 'case', 'project', 'same', 'pages', 'version', 'section', 'own', 'found', 'sports', 'house', 'related', 'both']) |
{
'targets': [
{
'target_name': 'publish',
'type':'none',
'dependencies': [
'appjs'
],
'copies':[
{
'destination': '<(module_root_dir)/app/data/node_modules/appjs/',
'files': [
'<(module_root_dir)/README.md',
'<(module_root_dir)/package.json',
'<(module_root_dir)/lib/',
]
},
{
'destination': '<(module_root_dir)/app/data/node_modules/appjs/examples/',
'files': [
'<(module_root_dir)/examples/hello-world/',
]
},
{
'destination': '<(module_root_dir)/app/data/node_modules/appjs/cli/',
'files': [
'<(module_root_dir)/cli/postinstall.js',
]
},
{
'destination': '<(module_root_dir)/app/data/node_modules/',
'files': [
'<(module_root_dir)/node_modules/mime/',
]
},
{
'destination': '<(module_root_dir)/app/data/',
'files': [
'<(module_root_dir)/examples/hello-world/content/',
'<(module_root_dir)/examples/hello-world/app.js'
]
}
],
'conditions': [
['OS=="mac"', {
'copies': [
{
'destination': '<(module_root_dir)/build/Release/',
'files': [
'<(module_root_dir)/deps/cef/Release/lib.target/libcef.dylib',
]
},
{
'destination': '<(module_root_dir)/app/data/node_modules/appjs-darwin/libs/',
'files': [
'<(module_root_dir)/deps/cef/Release/lib.target/libcef.dylib',
'<(module_root_dir)/deps/cef/Release/lib.target/ffmpegsumo.so',
],
},
{
'destination': '<(module_root_dir)/app/data/node_modules/appjs/bindings/darwin/ia32/',
'files': [
'<(PRODUCT_DIR)/appjs.node'
]
},
{
'destination': '<(module_root_dir)/app/data/node_modules/appjs-darwin/',
'files': [
'<(module_root_dir)/data/mac/package.json',
'<(module_root_dir)/data/mac/index.js',
'<(module_root_dir)/data/mac/README.md'
],
},
{
'destination': '<(module_root_dir)/app/data/bin/Contents/',
'files': [
'<(module_root_dir)/deps/cef/Release/Resources/'
]
},
{
'destination': '<(module_root_dir)/app/data/node_modules/appjs-darwin/data/',
'files': [
'<(module_root_dir)/deps/cef/Release/Resources/chrome.pak',
]
},
{
'destination': '<(module_root_dir)/data/pak',
'files': [
'<(module_root_dir)/deps/cef/Release/Resources/chrome.pak',
]
},
{
'destination': '<(module_root_dir)/app/',
'files': [
'<(module_root_dir)/data/mac/app.sh',
]
},
{
'destination': '<(module_root_dir)/app/data/bin/',
'files': [
'<(module_root_dir)/data/mac/node-bin/node/',
]
}
]
}],
['OS=="linux"', {
'copies': [
{
'destination': '<(module_root_dir)/build/Release/',
'files': [
'<(module_root_dir)/deps/cef/Release/lib.target/libcef.so',
]
},
{
'destination': '<(module_root_dir)/app/data/node_modules/appjs/bindings/linux/<(target_arch)/',
'files': [
'<(PRODUCT_DIR)/appjs.node'
]
},
{
'destination': '<(module_root_dir)/app/data/node_modules/appjs-linux-<(target_arch)/libs/',
'files': [
'<(module_root_dir)/deps/cef/Release/lib.target/libcef.so',
],
},
{
'destination': '<(module_root_dir)/app/data/node_modules/appjs-linux-<(target_arch)/',
'files': [
'<(module_root_dir)/data/linux/<(target_arch)/package.json',
'<(module_root_dir)/data/linux/index.js',
'<(module_root_dir)/data/linux/README.md',
],
},
{
'destination': '<(module_root_dir)/app/',
'files': [
'<(module_root_dir)/data/linux/app.sh',
]
},
{
'destination': '<(module_root_dir)/app/data/bin/',
'files': [
'<(module_root_dir)/data/linux/<(target_arch)/node-bin/node',
'<(module_root_dir)/deps/cef/Release/lib.target/libffmpegsumo.so'
]
},
{
'destination': '<(module_root_dir)/app/data/node_modules/appjs-linux-<(target_arch)/data/',
'files': [
'<(module_root_dir)/deps/cef/Release/chrome.pak',
'<(module_root_dir)/deps/cef/Release/locales/'
]
},
{
'destination': '<(module_root_dir)/data/pak',
'files': [
'<(module_root_dir)/deps/cef/Release/chrome.pak',
'<(module_root_dir)/deps/cef/Release/locales/'
]
}
]
}],
['OS=="win"', {
'copies': [
{
'destination': '<(module_root_dir)/build/Release/',
'files': [
'<(module_root_dir)/deps/cef/Release/libcef.dll',
'<(module_root_dir)/deps/cef/Release/avcodec-54.dll',
'<(module_root_dir)/deps/cef/Release/avformat-54.dll',
'<(module_root_dir)/deps/cef/Release/avutil-51.dll',
'<(module_root_dir)/deps/cef/Release/d3dcompiler_43.dll',
'<(module_root_dir)/deps/cef/Release/d3dx9_43.dll',
'<(module_root_dir)/deps/cef/Release/icudt.dll',
'<(module_root_dir)/deps/cef/Release/libEGL.dll',
'<(module_root_dir)/deps/cef/Release/libGLESv2.dll'
]
},
{
'destination': '<(module_root_dir)/app/data/node_modules/appjs/bindings/win32/ia32/',
'files': [
'<(PRODUCT_DIR)/appjs.node'
]
},
{
'destination': '<(module_root_dir)/app/data/node_modules/appjs-win32/libs/',
'files': [
'<(module_root_dir)/deps/cef/Release/libcef.dll',
'<(module_root_dir)/deps/cef/Release/avcodec-54.dll',
'<(module_root_dir)/deps/cef/Release/avformat-54.dll',
'<(module_root_dir)/deps/cef/Release/avutil-51.dll',
'<(module_root_dir)/deps/cef/Release/d3dcompiler_43.dll',
'<(module_root_dir)/deps/cef/Release/d3dx9_43.dll',
'<(module_root_dir)/deps/cef/Release/icudt.dll',
'<(module_root_dir)/deps/cef/Release/libEGL.dll',
'<(module_root_dir)/deps/cef/Release/libGLESv2.dll',
],
},
{
'destination': '<(module_root_dir)/app/data/node_modules/appjs-win32/',
'files': [
'<(module_root_dir)/data/win/package.json',
'<(module_root_dir)/data/win/index.js',
'<(module_root_dir)/data/win/README.md'
],
},
{
'destination': '<(module_root_dir)/app/',
'files': [
'<(module_root_dir)/data/win/app.exe',
]
},
{
'destination': '<(module_root_dir)/app/data/bin/',
'files': [
'<(module_root_dir)/data/win/node-bin/node.exe',
]
},
{
'destination': '<(module_root_dir)/app/data/node_modules/appjs-win32/data/',
'files': [
'<(module_root_dir)/deps/cef/Release/chrome.pak',
'<(module_root_dir)/deps/cef/Release/locales/'
]
},
{
'destination': '<(module_root_dir)/data/pak',
'files': [
'<(module_root_dir)/deps/cef/Release/chrome.pak',
'<(module_root_dir)/deps/cef/Release/locales/'
]
}
]
}]
]
},
{
'target_name': 'appjs',
'msvs_guid': 'A9D6DC71-C0DC-4549-AEB1-3B15B44E86A9',
'sources': [
'src/main.cpp',
'src/appjs.cpp',
'src/appjs_app.cpp',
'src/appjs_window.cpp',
'src/appjs_menu.cpp',
'src/appjs_status_icon.cpp',
'src/native_window/native_window.cpp',
'src/native_menu/native_menu.cpp',
'src/native_status_icon/native_status_icon.cpp',
'src/includes/cef_handler.cpp',
'src/includes/cef.cpp',
'src/includes/cef_loop.cpp',
'src/includes/cef_scheme_handler.cpp',
'src/includes/cef_sync_handler.cpp',
'src/includes/util.cpp',
],
'dependencies': [
'<(module_root_dir)/deps/cef/dll_wrapper.gyp:libcef_dll_wrapper'
],
'include_dirs': [
'src/',
'deps/cef/'
],
'cflags': [
'-fPIC',
'-Wall',
'-std=c++0x'
],
'conditions': [
['OS=="mac"', {
'sources': [
'src/native_window/native_window_mac.mm',
'src/native_menu/native_menu_mac.mm',
'src/native_status_icon/native_status_icon_mac.mm'
],
'defines': [
'__MAC__',
],
'cflags': [ '-m32' ],
'ldflags': [ '-m32' ],
'xcode_settings': {
'OTHER_CFLAGS': ['-ObjC++'],
'OTHER_LDFLAGS':['-Xlinker -rpath -Xlinker @loader_path/../../../../appjs-darwin/libs/'],
'ARCHS': [ 'i386' ]
},
'link_settings': {
'libraries': [
'<(module_root_dir)/deps/cef/Release/lib.target/libcef.dylib',
'<(module_root_dir)/build/Release/cef_dll_wrapper.node',
'-lobjc'
]
}
}],
['OS=="linux"', {
'sources': [
'src/native_window/native_window_linux.cpp',
'src/native_menu/native_menu_linux.cpp',
'src/native_status_icon/native_status_icon_linux.cpp'
],
'defines': [
'__LINUX__',
'<!@(uname -a | grep "Ubuntu" > /dev/null && echo "__UBUNTU__" || echo "__NOTUBUNTU__")'
],
'cflags': [
'<!@(pkg-config --cflags gtk+-2.0 gthread-2.0)',
],
'link_settings': {
'ldflags': [
'<!@(pkg-config --libs-only-L --libs-only-other gtk+-2.0 gthread-2.0)',
'-Wl,-R,\'$$ORIGIN/../../../../appjs-linux-<(target_arch)/libs/\'',
],
'libraries': [
'<!@(pkg-config --libs-only-l gtk+-2.0 gthread-2.0)',
'<(module_root_dir)/deps/cef/Release/lib.target/libcef.so',
'<(module_root_dir)/build/Release/obj.target/deps/cef/cef_dll_wrapper.node'
],
}
}],
['OS=="win"', {
'msvs_settings': {
'VCCLCompilerTool': {
'WholeProgramOptimization': 'true', # /GL, whole program optimization, needed for LTCG
'OmitFramePointers': 'true',
'EnableFunctionLevelLinking': 'true',
'EnableIntrinsicFunctions': 'true',
'RuntimeTypeInfo': 'false',
'ExceptionHandling': '1',
'AdditionalOptions': [ '/EHsc' ]
},
'VCLibrarianTool': {
'AdditionalOptions': [
'/LTCG', # link time code generation
],
},
'VCLinkerTool': {
'LinkTimeCodeGeneration': 1, # link-time code generation
'OptimizeReferences': 2, # /OPT:REF
'EnableCOMDATFolding': 2, # /OPT:ICF
'LinkIncremental': 1, # disable incremental linking
},
},
'sources': [
'src/includes/util_win.cpp',
'src/native_window/native_window_win.cpp',
'src/native_menu/native_menu_win.cpp',
'src/native_status_icon/native_status_icon_win.cpp'
],
'defines': [
'__WIN__',
'_WINSOCKAPI_',
'_UNICODE',
'UNICODE'
],
'link_settings': {
'libraries': [
'GdiPlus.lib',
'Shlwapi.lib',
'<(module_root_dir)/deps/cef/lib/Release/libcef.lib',
'<(module_root_dir)/build/Release/lib/libcef_dll_wrapper.lib'
],
},
}]
]
}
]
}
| {'targets': [{'target_name': 'publish', 'type': 'none', 'dependencies': ['appjs'], 'copies': [{'destination': '<(module_root_dir)/app/data/node_modules/appjs/', 'files': ['<(module_root_dir)/README.md', '<(module_root_dir)/package.json', '<(module_root_dir)/lib/']}, {'destination': '<(module_root_dir)/app/data/node_modules/appjs/examples/', 'files': ['<(module_root_dir)/examples/hello-world/']}, {'destination': '<(module_root_dir)/app/data/node_modules/appjs/cli/', 'files': ['<(module_root_dir)/cli/postinstall.js']}, {'destination': '<(module_root_dir)/app/data/node_modules/', 'files': ['<(module_root_dir)/node_modules/mime/']}, {'destination': '<(module_root_dir)/app/data/', 'files': ['<(module_root_dir)/examples/hello-world/content/', '<(module_root_dir)/examples/hello-world/app.js']}], 'conditions': [['OS=="mac"', {'copies': [{'destination': '<(module_root_dir)/build/Release/', 'files': ['<(module_root_dir)/deps/cef/Release/lib.target/libcef.dylib']}, {'destination': '<(module_root_dir)/app/data/node_modules/appjs-darwin/libs/', 'files': ['<(module_root_dir)/deps/cef/Release/lib.target/libcef.dylib', '<(module_root_dir)/deps/cef/Release/lib.target/ffmpegsumo.so']}, {'destination': '<(module_root_dir)/app/data/node_modules/appjs/bindings/darwin/ia32/', 'files': ['<(PRODUCT_DIR)/appjs.node']}, {'destination': '<(module_root_dir)/app/data/node_modules/appjs-darwin/', 'files': ['<(module_root_dir)/data/mac/package.json', '<(module_root_dir)/data/mac/index.js', '<(module_root_dir)/data/mac/README.md']}, {'destination': '<(module_root_dir)/app/data/bin/Contents/', 'files': ['<(module_root_dir)/deps/cef/Release/Resources/']}, {'destination': '<(module_root_dir)/app/data/node_modules/appjs-darwin/data/', 'files': ['<(module_root_dir)/deps/cef/Release/Resources/chrome.pak']}, {'destination': '<(module_root_dir)/data/pak', 'files': ['<(module_root_dir)/deps/cef/Release/Resources/chrome.pak']}, {'destination': '<(module_root_dir)/app/', 'files': ['<(module_root_dir)/data/mac/app.sh']}, {'destination': '<(module_root_dir)/app/data/bin/', 'files': ['<(module_root_dir)/data/mac/node-bin/node/']}]}], ['OS=="linux"', {'copies': [{'destination': '<(module_root_dir)/build/Release/', 'files': ['<(module_root_dir)/deps/cef/Release/lib.target/libcef.so']}, {'destination': '<(module_root_dir)/app/data/node_modules/appjs/bindings/linux/<(target_arch)/', 'files': ['<(PRODUCT_DIR)/appjs.node']}, {'destination': '<(module_root_dir)/app/data/node_modules/appjs-linux-<(target_arch)/libs/', 'files': ['<(module_root_dir)/deps/cef/Release/lib.target/libcef.so']}, {'destination': '<(module_root_dir)/app/data/node_modules/appjs-linux-<(target_arch)/', 'files': ['<(module_root_dir)/data/linux/<(target_arch)/package.json', '<(module_root_dir)/data/linux/index.js', '<(module_root_dir)/data/linux/README.md']}, {'destination': '<(module_root_dir)/app/', 'files': ['<(module_root_dir)/data/linux/app.sh']}, {'destination': '<(module_root_dir)/app/data/bin/', 'files': ['<(module_root_dir)/data/linux/<(target_arch)/node-bin/node', '<(module_root_dir)/deps/cef/Release/lib.target/libffmpegsumo.so']}, {'destination': '<(module_root_dir)/app/data/node_modules/appjs-linux-<(target_arch)/data/', 'files': ['<(module_root_dir)/deps/cef/Release/chrome.pak', '<(module_root_dir)/deps/cef/Release/locales/']}, {'destination': '<(module_root_dir)/data/pak', 'files': ['<(module_root_dir)/deps/cef/Release/chrome.pak', '<(module_root_dir)/deps/cef/Release/locales/']}]}], ['OS=="win"', {'copies': [{'destination': '<(module_root_dir)/build/Release/', 'files': ['<(module_root_dir)/deps/cef/Release/libcef.dll', '<(module_root_dir)/deps/cef/Release/avcodec-54.dll', '<(module_root_dir)/deps/cef/Release/avformat-54.dll', '<(module_root_dir)/deps/cef/Release/avutil-51.dll', '<(module_root_dir)/deps/cef/Release/d3dcompiler_43.dll', '<(module_root_dir)/deps/cef/Release/d3dx9_43.dll', '<(module_root_dir)/deps/cef/Release/icudt.dll', '<(module_root_dir)/deps/cef/Release/libEGL.dll', '<(module_root_dir)/deps/cef/Release/libGLESv2.dll']}, {'destination': '<(module_root_dir)/app/data/node_modules/appjs/bindings/win32/ia32/', 'files': ['<(PRODUCT_DIR)/appjs.node']}, {'destination': '<(module_root_dir)/app/data/node_modules/appjs-win32/libs/', 'files': ['<(module_root_dir)/deps/cef/Release/libcef.dll', '<(module_root_dir)/deps/cef/Release/avcodec-54.dll', '<(module_root_dir)/deps/cef/Release/avformat-54.dll', '<(module_root_dir)/deps/cef/Release/avutil-51.dll', '<(module_root_dir)/deps/cef/Release/d3dcompiler_43.dll', '<(module_root_dir)/deps/cef/Release/d3dx9_43.dll', '<(module_root_dir)/deps/cef/Release/icudt.dll', '<(module_root_dir)/deps/cef/Release/libEGL.dll', '<(module_root_dir)/deps/cef/Release/libGLESv2.dll']}, {'destination': '<(module_root_dir)/app/data/node_modules/appjs-win32/', 'files': ['<(module_root_dir)/data/win/package.json', '<(module_root_dir)/data/win/index.js', '<(module_root_dir)/data/win/README.md']}, {'destination': '<(module_root_dir)/app/', 'files': ['<(module_root_dir)/data/win/app.exe']}, {'destination': '<(module_root_dir)/app/data/bin/', 'files': ['<(module_root_dir)/data/win/node-bin/node.exe']}, {'destination': '<(module_root_dir)/app/data/node_modules/appjs-win32/data/', 'files': ['<(module_root_dir)/deps/cef/Release/chrome.pak', '<(module_root_dir)/deps/cef/Release/locales/']}, {'destination': '<(module_root_dir)/data/pak', 'files': ['<(module_root_dir)/deps/cef/Release/chrome.pak', '<(module_root_dir)/deps/cef/Release/locales/']}]}]]}, {'target_name': 'appjs', 'msvs_guid': 'A9D6DC71-C0DC-4549-AEB1-3B15B44E86A9', 'sources': ['src/main.cpp', 'src/appjs.cpp', 'src/appjs_app.cpp', 'src/appjs_window.cpp', 'src/appjs_menu.cpp', 'src/appjs_status_icon.cpp', 'src/native_window/native_window.cpp', 'src/native_menu/native_menu.cpp', 'src/native_status_icon/native_status_icon.cpp', 'src/includes/cef_handler.cpp', 'src/includes/cef.cpp', 'src/includes/cef_loop.cpp', 'src/includes/cef_scheme_handler.cpp', 'src/includes/cef_sync_handler.cpp', 'src/includes/util.cpp'], 'dependencies': ['<(module_root_dir)/deps/cef/dll_wrapper.gyp:libcef_dll_wrapper'], 'include_dirs': ['src/', 'deps/cef/'], 'cflags': ['-fPIC', '-Wall', '-std=c++0x'], 'conditions': [['OS=="mac"', {'sources': ['src/native_window/native_window_mac.mm', 'src/native_menu/native_menu_mac.mm', 'src/native_status_icon/native_status_icon_mac.mm'], 'defines': ['__MAC__'], 'cflags': ['-m32'], 'ldflags': ['-m32'], 'xcode_settings': {'OTHER_CFLAGS': ['-ObjC++'], 'OTHER_LDFLAGS': ['-Xlinker -rpath -Xlinker @loader_path/../../../../appjs-darwin/libs/'], 'ARCHS': ['i386']}, 'link_settings': {'libraries': ['<(module_root_dir)/deps/cef/Release/lib.target/libcef.dylib', '<(module_root_dir)/build/Release/cef_dll_wrapper.node', '-lobjc']}}], ['OS=="linux"', {'sources': ['src/native_window/native_window_linux.cpp', 'src/native_menu/native_menu_linux.cpp', 'src/native_status_icon/native_status_icon_linux.cpp'], 'defines': ['__LINUX__', '<!@(uname -a | grep "Ubuntu" > /dev/null && echo "__UBUNTU__" || echo "__NOTUBUNTU__")'], 'cflags': ['<!@(pkg-config --cflags gtk+-2.0 gthread-2.0)'], 'link_settings': {'ldflags': ['<!@(pkg-config --libs-only-L --libs-only-other gtk+-2.0 gthread-2.0)', "-Wl,-R,'$$ORIGIN/../../../../appjs-linux-<(target_arch)/libs/'"], 'libraries': ['<!@(pkg-config --libs-only-l gtk+-2.0 gthread-2.0)', '<(module_root_dir)/deps/cef/Release/lib.target/libcef.so', '<(module_root_dir)/build/Release/obj.target/deps/cef/cef_dll_wrapper.node']}}], ['OS=="win"', {'msvs_settings': {'VCCLCompilerTool': {'WholeProgramOptimization': 'true', 'OmitFramePointers': 'true', 'EnableFunctionLevelLinking': 'true', 'EnableIntrinsicFunctions': 'true', 'RuntimeTypeInfo': 'false', 'ExceptionHandling': '1', 'AdditionalOptions': ['/EHsc']}, 'VCLibrarianTool': {'AdditionalOptions': ['/LTCG']}, 'VCLinkerTool': {'LinkTimeCodeGeneration': 1, 'OptimizeReferences': 2, 'EnableCOMDATFolding': 2, 'LinkIncremental': 1}}, 'sources': ['src/includes/util_win.cpp', 'src/native_window/native_window_win.cpp', 'src/native_menu/native_menu_win.cpp', 'src/native_status_icon/native_status_icon_win.cpp'], 'defines': ['__WIN__', '_WINSOCKAPI_', '_UNICODE', 'UNICODE'], 'link_settings': {'libraries': ['GdiPlus.lib', 'Shlwapi.lib', '<(module_root_dir)/deps/cef/lib/Release/libcef.lib', '<(module_root_dir)/build/Release/lib/libcef_dll_wrapper.lib']}}]]}]} |
n, m = map(int, input().split())
ruins = [list(map(int, input().split())) for _ in range(n)]
imos = [0] * (m+1)
sum_s = 0
for i in range(n):
l, r, s = ruins[i]
l -= 1
imos[l] += s
imos[r] -= s
sum_s += s
for i in range(m):
imos[i+1] += imos[i]
print(sum_s - min(imos[:-1]))
| (n, m) = map(int, input().split())
ruins = [list(map(int, input().split())) for _ in range(n)]
imos = [0] * (m + 1)
sum_s = 0
for i in range(n):
(l, r, s) = ruins[i]
l -= 1
imos[l] += s
imos[r] -= s
sum_s += s
for i in range(m):
imos[i + 1] += imos[i]
print(sum_s - min(imos[:-1])) |
def check(s, t):
v = 0
for i in range(len(t)):
if t[i] == s[v]:
v += 1
if v == len(s):
return "Yes"
return "No"
while True:
try:
s, t = input().split()
ans = check(s, t)
print(ans)
except:
break
| def check(s, t):
v = 0
for i in range(len(t)):
if t[i] == s[v]:
v += 1
if v == len(s):
return 'Yes'
return 'No'
while True:
try:
(s, t) = input().split()
ans = check(s, t)
print(ans)
except:
break |
class _descriptor(object):
def __get__(self, *_):
raise RuntimeError("more like funtime error")
class Methods(object):
ok_method = "ok"
err_method = _descriptor()
| class _Descriptor(object):
def __get__(self, *_):
raise runtime_error('more like funtime error')
class Methods(object):
ok_method = 'ok'
err_method = _descriptor() |
'''
+Build Your Dream Python Project Discord Team
Invite: https://dsc.gg/python_team
19 Feb 2021
@alexandros answer to @Subham
https://discord.com/channels/794684213697052712/794684213697052718/812231193213796362
Sublist Yielder
'''
lst = [[1, 3, 4], [2, 5, 7]]
def f(lst):
for sublst in lst:
yield from sublst
yield '\n'
for item in f(lst):
end_char = '' if item == '\n' else ' '
print(item, end = end_char)
# Output
'''
1 3 4
2 5 7
'''
| """
+Build Your Dream Python Project Discord Team
Invite: https://dsc.gg/python_team
19 Feb 2021
@alexandros answer to @Subham
https://discord.com/channels/794684213697052712/794684213697052718/812231193213796362
Sublist Yielder
"""
lst = [[1, 3, 4], [2, 5, 7]]
def f(lst):
for sublst in lst:
yield from sublst
yield '\n'
for item in f(lst):
end_char = '' if item == '\n' else ' '
print(item, end=end_char)
'\n1 3 4 \n2 5 7 \n' |
#----------* CHALLENGE 35 *----------
#Ask the user to enter their name and then display their name three times.
name = input("Enter your name: ")
for i in range (1,4):
print(name+" ") | name = input('Enter your name: ')
for i in range(1, 4):
print(name + ' ') |
edibles = ["ham", "spam","eggs","nuts"]
for food in edibles:
if food == "spams":
print("No more spam please!")
break
print("Great, delicious " + food)
else:
print("I am so glad: No " + food +"!")
print("Finally, I finished stuffing myself") | edibles = ['ham', 'spam', 'eggs', 'nuts']
for food in edibles:
if food == 'spams':
print('No more spam please!')
break
print('Great, delicious ' + food)
else:
print('I am so glad: No ' + food + '!')
print('Finally, I finished stuffing myself') |
domain = 'mycompany.local'
user = 'automation@vsphere.local'
pwd = 'somepassword'
| domain = 'mycompany.local'
user = 'automation@vsphere.local'
pwd = 'somepassword' |
game = "Hello world"
#print(id(game))
def game_board(player=0, row=0, col=0, just_display=False):
game = "Change !"
#print(id(game))
print(game)
print(game)
game_board()
print(game)
#print(id(game))
| game = 'Hello world'
def game_board(player=0, row=0, col=0, just_display=False):
game = 'Change !'
print(game)
print(game)
game_board()
print(game) |
def groupnames(name_iterable):
name_dict = {}
for name in name_iterable:
key = _groupkeyfunc(name)
name_dict.setdefault(key, []).append(name)
for k, v in name_dict.iteritems():
aux = [(_sortkeyfunc(name), name) for name in v]
aux.sort()
name_dict[k] = tuple([ n for __, n in aux ])
return name_dict
| def groupnames(name_iterable):
name_dict = {}
for name in name_iterable:
key = _groupkeyfunc(name)
name_dict.setdefault(key, []).append(name)
for (k, v) in name_dict.iteritems():
aux = [(_sortkeyfunc(name), name) for name in v]
aux.sort()
name_dict[k] = tuple([n for (__, n) in aux])
return name_dict |
class reverse_iter:
def __init__(self, iterable) -> None:
self.iterable = iterable
self.start = len(iterable)
def __iter__(self) -> iter:
return self
def __next__(self) -> int:
if self.start > 0:
self.start -= 1
return self.iterable[self.start]
raise StopIteration()
if __name__ == '__main__':
reversed_list = reverse_iter([1, 2, 3, 4])
for item in reversed_list:
print(item)
| class Reverse_Iter:
def __init__(self, iterable) -> None:
self.iterable = iterable
self.start = len(iterable)
def __iter__(self) -> iter:
return self
def __next__(self) -> int:
if self.start > 0:
self.start -= 1
return self.iterable[self.start]
raise stop_iteration()
if __name__ == '__main__':
reversed_list = reverse_iter([1, 2, 3, 4])
for item in reversed_list:
print(item) |
class Node:
def __init__(self, data):
self.data = data
self.next = None
class LinkedList:
def __init__(self):
self.head = None
def insertAtLast(self, data):
node = Node(data)
if self.head == None:
self.head = node
return
currPointer = self.head
while currPointer.next != None:
currPointer = currPointer.next
currPointer.next = node
def deleteNode(self, dataKey):
prevNode = None
currNode = self.head
# if head element is to be deleted
if currNode.data == dataKey:
currNode = currNode.next
self.head = currNode
return
while currNode:
if currNode.data == dataKey:
prevNode.next = currNode.next
return
prevNode = currNode
currNode = currNode.next
print('Node with data key not found')
def __str__(self):
result = ''
currNode = self.head
while currNode:
if currNode.next != None:
result += str(currNode.data) + ' -> '
else:
result += str(currNode.data)
currNode = currNode.next
return result
if __name__ == "__main__":
linkedList = LinkedList()
for i in range(10):
linkedList.insertAtLast(i)
print(linkedList)
# Deletion from head
linkedList.deleteNode(0)
print(linkedList)
# Deletion from mid
linkedList.deleteNode(5)
print(linkedList)
# deletion from last
linkedList.deleteNode(9)
print(linkedList) | class Node:
def __init__(self, data):
self.data = data
self.next = None
class Linkedlist:
def __init__(self):
self.head = None
def insert_at_last(self, data):
node = node(data)
if self.head == None:
self.head = node
return
curr_pointer = self.head
while currPointer.next != None:
curr_pointer = currPointer.next
currPointer.next = node
def delete_node(self, dataKey):
prev_node = None
curr_node = self.head
if currNode.data == dataKey:
curr_node = currNode.next
self.head = currNode
return
while currNode:
if currNode.data == dataKey:
prevNode.next = currNode.next
return
prev_node = currNode
curr_node = currNode.next
print('Node with data key not found')
def __str__(self):
result = ''
curr_node = self.head
while currNode:
if currNode.next != None:
result += str(currNode.data) + ' -> '
else:
result += str(currNode.data)
curr_node = currNode.next
return result
if __name__ == '__main__':
linked_list = linked_list()
for i in range(10):
linkedList.insertAtLast(i)
print(linkedList)
linkedList.deleteNode(0)
print(linkedList)
linkedList.deleteNode(5)
print(linkedList)
linkedList.deleteNode(9)
print(linkedList) |
# Definition for a binary tree node.
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def __init__(self):
self.incr = 0
def convertBST(self, root: TreeNode) -> TreeNode:
def dfs(node):
if not node:
return 0
self.convertBST(node.right)
self.incr += node.val
node.val = self.incr
self.convertBST(node.left)
dfs(root)
return root
t = TreeNode(5)
t.left = TreeNode(2)
t.right = TreeNode(13)
slu = Solution()
print(slu.convertBST(t))
| class Treenode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def __init__(self):
self.incr = 0
def convert_bst(self, root: TreeNode) -> TreeNode:
def dfs(node):
if not node:
return 0
self.convertBST(node.right)
self.incr += node.val
node.val = self.incr
self.convertBST(node.left)
dfs(root)
return root
t = tree_node(5)
t.left = tree_node(2)
t.right = tree_node(13)
slu = solution()
print(slu.convertBST(t)) |
#import necessary data
df = pd.read_csv('~/lstm_test/input/nu_public_rep.csv', sep=';',
parse_dates={'dt' : ['Date', 'Time']}, infer_datetime_format=True,
low_memory=False, na_values=['nan','?'], index_col='dt')
#fill nan with the means of the columns to handle missing databases
droping_list_all=[]
for j in range(0,7):
if not df.iloc[:, j].notnull().all():
droping_list_all.append(j)
#print(df.iloc[:,j].unique())
droping_list_all
#This small loop will replace any missing vlaues with means of columns (study this!!)
for j in range(0,7):
df.iloc[:,j]=df.iloc[:,j].fillna(df.iloc[:,j].mean())
#Get sum of missing calues - sanity check (should be zero)
df.isnull().sum()
| df = pd.read_csv('~/lstm_test/input/nu_public_rep.csv', sep=';', parse_dates={'dt': ['Date', 'Time']}, infer_datetime_format=True, low_memory=False, na_values=['nan', '?'], index_col='dt')
droping_list_all = []
for j in range(0, 7):
if not df.iloc[:, j].notnull().all():
droping_list_all.append(j)
droping_list_all
for j in range(0, 7):
df.iloc[:, j] = df.iloc[:, j].fillna(df.iloc[:, j].mean())
df.isnull().sum() |
#!/usr/bin/env python
# coding: utf8
def writeResults(links, outfile):
fw = open(outfile, "w")
for link in links:
fw.write(link)
| def write_results(links, outfile):
fw = open(outfile, 'w')
for link in links:
fw.write(link) |
i = get_safe('id')
username = ""
if i != 0:
username = get('username')
username = escape_string(username)
query = 'some query'
it = execute(query + username)
| i = get_safe('id')
username = ''
if i != 0:
username = get('username')
username = escape_string(username)
query = 'some query'
it = execute(query + username) |
'''
file:except.py
exceptions for piclock
'''
class NoConfigError(Exception):
'''
No Config Error Exception
'''
class CharacterNotFound(Exception):
'''
Character not found exception
'''
class MatrixCharError(Exception):
'''
Matrix Character Error
'''
class NoMqttConfigSection(Exception):
'''
No MQTT config Section Error
'''
class NoMqttServerSetting(Exception):
'''
No MQTT server settings in config file
'''
class ColorClassError(Exception):
'''
Color Class error
'''
class FontError(Exception):
'''
Font Error Class
'''
class TemperaturReadError(Exception):
'''
Read Error on temperatur file
'''
| """
file:except.py
exceptions for piclock
"""
class Noconfigerror(Exception):
"""
No Config Error Exception
"""
class Characternotfound(Exception):
"""
Character not found exception
"""
class Matrixcharerror(Exception):
"""
Matrix Character Error
"""
class Nomqttconfigsection(Exception):
"""
No MQTT config Section Error
"""
class Nomqttserversetting(Exception):
"""
No MQTT server settings in config file
"""
class Colorclasserror(Exception):
"""
Color Class error
"""
class Fonterror(Exception):
"""
Font Error Class
"""
class Temperaturreaderror(Exception):
"""
Read Error on temperatur file
""" |
# Objectives
#
# In this stage, you should write a program that:
#
# Reads matrix A A A from the input.
# Reads matrix B B B from the input.
# Outputs their sum if it is possible to add them. Otherwise, it should output the ERROR message.
#
# Each matrix in the input is given in the following way: the first line contains the number of rows
# nnn and the number of columns mmm. Then nnn lines follow, each containing mmm integers
# representing one row of the matrix.
#
# Output the result in the same way but don't print the dimensions of the matrix.
def same_dimensions(n_rows_a, n_columns_a, n_rows_b, n_columns_b):
return n_rows_a == n_rows_b and n_columns_a == n_columns_b
def addition(mat_a, mat_b, n_rows, n_columns):
return [[str(mat_a[i][j] + mat_b[i][j]) for j in range(n_columns)] for i in range(n_rows)]
def print_matrix(mat):
for row in mat:
print(*row)
def add_matrices():
a, b = list(map(int, input().split()))
mat_a = [list(map(int, input().split())) for _ in range(a)]
c, d = list(map(int, input().split()))
mat_b = [list(map(int, input().split())) for _ in range(c)]
if not same_dimensions(a, b, c, d):
print('ERROR')
else:
mat_sum = addition(mat_a, mat_b, a, b)
for row in mat_sum:
print(*row)
if __name__ == '__main__':
add_matrices()
| def same_dimensions(n_rows_a, n_columns_a, n_rows_b, n_columns_b):
return n_rows_a == n_rows_b and n_columns_a == n_columns_b
def addition(mat_a, mat_b, n_rows, n_columns):
return [[str(mat_a[i][j] + mat_b[i][j]) for j in range(n_columns)] for i in range(n_rows)]
def print_matrix(mat):
for row in mat:
print(*row)
def add_matrices():
(a, b) = list(map(int, input().split()))
mat_a = [list(map(int, input().split())) for _ in range(a)]
(c, d) = list(map(int, input().split()))
mat_b = [list(map(int, input().split())) for _ in range(c)]
if not same_dimensions(a, b, c, d):
print('ERROR')
else:
mat_sum = addition(mat_a, mat_b, a, b)
for row in mat_sum:
print(*row)
if __name__ == '__main__':
add_matrices() |
rows = 9
for i in range(rows):
# nested loop
for j in range(i):
# display number
print(i, end=' ')
# new line after each row
print('')
| rows = 9
for i in range(rows):
for j in range(i):
print(i, end=' ')
print('') |
# OpenWeatherMap API Key
weather_api_key = "ea45174aae3e4fc5de1af5d14f74cd81"
# Google API Key
g_key = "AIzaSyCiyVKYg3FYX0fAR2S2RGR1m-kUN947W9s" | weather_api_key = 'ea45174aae3e4fc5de1af5d14f74cd81'
g_key = 'AIzaSyCiyVKYg3FYX0fAR2S2RGR1m-kUN947W9s' |
def editDistance(str1, str2, m, n, ci=1,crm=1,crp=1):
dp = [[0 for i in range(n + 1)] for j in range(m + 1)]
for i in range(m + 1):
for j in range(n + 1):
if i == 0:
dp[i][j] = j
elif j == 0:
dp[i][j] = i
elif str1[i-1] == str2[j-1]:
dp[i][j] = dp[i-1][j-1]
else:
dp[i][j] = min((dp[i][j-1] + ci), # Insert
(dp[i-1][j] + crm), # Remove
(dp[i-1][j-1] + crp)) # Replace
return dp[m][n] ##or dp[-1][-1]
str1 = "MARCH"
str2 = "CART"
cost_insert = 1
cost_remove = 1
cost_replace = 1
ed=editDistance(str1, str2, len(str1), len(str2), cost_insert,cost_remove,cost_replace)
print('Minimum Edit Distance:',ed)
## OUTPUT:
'''
Minimum Edit Distance: 3
''' | def edit_distance(str1, str2, m, n, ci=1, crm=1, crp=1):
dp = [[0 for i in range(n + 1)] for j in range(m + 1)]
for i in range(m + 1):
for j in range(n + 1):
if i == 0:
dp[i][j] = j
elif j == 0:
dp[i][j] = i
elif str1[i - 1] == str2[j - 1]:
dp[i][j] = dp[i - 1][j - 1]
else:
dp[i][j] = min(dp[i][j - 1] + ci, dp[i - 1][j] + crm, dp[i - 1][j - 1] + crp)
return dp[m][n]
str1 = 'MARCH'
str2 = 'CART'
cost_insert = 1
cost_remove = 1
cost_replace = 1
ed = edit_distance(str1, str2, len(str1), len(str2), cost_insert, cost_remove, cost_replace)
print('Minimum Edit Distance:', ed)
'\nMinimum Edit Distance: 3\n\n' |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
def funcion_generadora_print():
try:
print("GENERADOR: Se va a generar un PRIMER dato")
yield "valorGenerado1"
print("GENERADOR: Se va a generar un SEGUNDO dato")
yield "valorGenerado2"
print("GENERADOR: Se va a generar un TERCER dato")
yield "valorGenerado3"
finally:
print("GENERADOR: Terminando y limpiando")
if __name__ == "__main__":
generador = funcion_generadora_print()
for elemento in generador:
print(elemento)
| def funcion_generadora_print():
try:
print('GENERADOR: Se va a generar un PRIMER dato')
yield 'valorGenerado1'
print('GENERADOR: Se va a generar un SEGUNDO dato')
yield 'valorGenerado2'
print('GENERADOR: Se va a generar un TERCER dato')
yield 'valorGenerado3'
finally:
print('GENERADOR: Terminando y limpiando')
if __name__ == '__main__':
generador = funcion_generadora_print()
for elemento in generador:
print(elemento) |
file_input="dump.seq_e10.mci.I12"
num=1
output=open('assign_cluster_num.out','w')
for i in open(file_input):
i=i.replace('\n','')
o=i.split()
for j in o:
output.write(j+'\t'+str(num)+'\n')
num+=1
output.close()
| file_input = 'dump.seq_e10.mci.I12'
num = 1
output = open('assign_cluster_num.out', 'w')
for i in open(file_input):
i = i.replace('\n', '')
o = i.split()
for j in o:
output.write(j + '\t' + str(num) + '\n')
num += 1
output.close() |
t=int(input('Tabuada do : '))
r1 = t * 1
r2 = t * 2
r3 = t * 3
r4 = t * 4
r5 = t * 5
r6 = t * 6
r7 = t * 7
r8 = t * 8
r9 = t * 9
r10 = t * 10
print("RESUlTADO\n------------------------")
print('{} x 1 = {}'.format(t,r1))
print('{} x 2 = {}'.format(t,r2))
print('{} x 3 = {}'.format(t,r3))
print('{} x 4 = {}'.format(t,r4))
print('{} x 5 = {}'.format(t,r5))
print('{} x 6 = {}'.format(t,r6))
print('{} x 7 = {}'.format(t,r7))
print('{} x 8 = {}'.format(t,r8))
print('{} x 9 = {}'.format(t,r9))
print('{} x 10 = {}'.format(t,r10))
#Gabarito
print("-"*20)
print('{} x {:2} = {}'.format(t, 1, t*1))
print('{} x {:2} = {}'.format(t, 2, t*2))
print('{} x {:2} = {}'.format(t, 3, t*3))
print('{} x {:2} = {}'.format(t, 4, t*4))
print('{} x {:2} = {}'.format(t, 5, t*5))
print('{} x {:2} = {}'.format(t, 6, t*6))
print('{} x {:2} = {}'.format(t, 7, t*7))
print('{} x {:2} = {}'.format(t, 8, t*8))
print('{} x {:2} = {}'.format(t, 9, t*9))
print('{} x {:2} = {}'.format(t, 10, t*10))
print("-"*20) | t = int(input('Tabuada do : '))
r1 = t * 1
r2 = t * 2
r3 = t * 3
r4 = t * 4
r5 = t * 5
r6 = t * 6
r7 = t * 7
r8 = t * 8
r9 = t * 9
r10 = t * 10
print('RESUlTADO\n------------------------')
print('{} x 1 = {}'.format(t, r1))
print('{} x 2 = {}'.format(t, r2))
print('{} x 3 = {}'.format(t, r3))
print('{} x 4 = {}'.format(t, r4))
print('{} x 5 = {}'.format(t, r5))
print('{} x 6 = {}'.format(t, r6))
print('{} x 7 = {}'.format(t, r7))
print('{} x 8 = {}'.format(t, r8))
print('{} x 9 = {}'.format(t, r9))
print('{} x 10 = {}'.format(t, r10))
print('-' * 20)
print('{} x {:2} = {}'.format(t, 1, t * 1))
print('{} x {:2} = {}'.format(t, 2, t * 2))
print('{} x {:2} = {}'.format(t, 3, t * 3))
print('{} x {:2} = {}'.format(t, 4, t * 4))
print('{} x {:2} = {}'.format(t, 5, t * 5))
print('{} x {:2} = {}'.format(t, 6, t * 6))
print('{} x {:2} = {}'.format(t, 7, t * 7))
print('{} x {:2} = {}'.format(t, 8, t * 8))
print('{} x {:2} = {}'.format(t, 9, t * 9))
print('{} x {:2} = {}'.format(t, 10, t * 10))
print('-' * 20) |
auditory = my_gaussian(x, mu_auditory, sigma_auditory)
visual = my_gaussian(x, mu_visual, sigma_visual)
posterior_pointwise = visual * auditory
posterior_pointwise /= posterior_pointwise.sum()
with plt.xkcd():
fig = plt.figure(figsize=(fig_w, fig_h))
my_plot(x, auditory, visual, posterior_pointwise)
plt.title('Sample output')
plt.show() | auditory = my_gaussian(x, mu_auditory, sigma_auditory)
visual = my_gaussian(x, mu_visual, sigma_visual)
posterior_pointwise = visual * auditory
posterior_pointwise /= posterior_pointwise.sum()
with plt.xkcd():
fig = plt.figure(figsize=(fig_w, fig_h))
my_plot(x, auditory, visual, posterior_pointwise)
plt.title('Sample output')
plt.show() |
# Tutorial skipper snippet
def skip_tutorial():
MAPLE_ADMINISTARTOR = 2007
quests_to_complete = [
20820, # The City of Ereve
20821, # Knight's Orientation
20822, # The Path of Bravery
20823, # Question and Answer
20824, # Knight's Cavalier
20825, # Well-Behaved Student
20826, # Lesson 1 - Ereve History
20827, # What's Next?
20828, # Lesson 2 - Physical Training
20829, # Lesson 3 - Battle Basics 1
20830, # A Much-Needed Break
20831, # Lesson 3 - Battle Basics 2
20832, # Lesson, Interrupted
20833, # Tiny Bird
20834, # The Tranquil Garden
20835, # The Chief Knights
20836, # Lesson, Resumed
20837, # Lesson 5 - Skills
20838, # Certified Knight
20839, # Meeting with the Empress
]
map_to_warp = 130000000 # Ereve
target_level = 10
sm.setSpeakerID(MAPLE_ADMINISTARTOR)
sm.removeEscapeButton()
sm.lockInGameUI(True)
if sm.sendAskYesNo("Would you like to skip the tutorial questline and instantly arrive at #m" + str(map_to_warp) + "#?"):
if sm.getChr().getLevel() < target_level:
sm.addLevel(target_level - sm.getChr().getLevel())
for quest in quests_to_complete:
sm.completeQuestNoRewards(quest)
sm.warp(map_to_warp)
sm.lockInGameUI(False)
sm.dispose()
skip_tutorial()
sm.showEffect("Effect/OnUserEff.img/guideEffect/cygnusTutorial/0", 0, 0)
sm.invokeAfterDelay(5000, "showEffect", "Effect/OnUserEff.img/guideEffect/cygnusTutorial/1", 0, 0) | def skip_tutorial():
maple_administartor = 2007
quests_to_complete = [20820, 20821, 20822, 20823, 20824, 20825, 20826, 20827, 20828, 20829, 20830, 20831, 20832, 20833, 20834, 20835, 20836, 20837, 20838, 20839]
map_to_warp = 130000000
target_level = 10
sm.setSpeakerID(MAPLE_ADMINISTARTOR)
sm.removeEscapeButton()
sm.lockInGameUI(True)
if sm.sendAskYesNo('Would you like to skip the tutorial questline and instantly arrive at #m' + str(map_to_warp) + '#?'):
if sm.getChr().getLevel() < target_level:
sm.addLevel(target_level - sm.getChr().getLevel())
for quest in quests_to_complete:
sm.completeQuestNoRewards(quest)
sm.warp(map_to_warp)
sm.lockInGameUI(False)
sm.dispose()
skip_tutorial()
sm.showEffect('Effect/OnUserEff.img/guideEffect/cygnusTutorial/0', 0, 0)
sm.invokeAfterDelay(5000, 'showEffect', 'Effect/OnUserEff.img/guideEffect/cygnusTutorial/1', 0, 0) |
class ATM:
def __init__(self,balance,bank_name):
self.balance = balance
self.bank_name = bank_name
self.withdrawals_list = []
def show_withdrawals(self):
for withdrawal in self.withdrawals_list:
print("withdrawal: "+str(withdrawal))
def print_information(self):
print("Welcome to "+ self.bank_name)
print("current balance: " + str(self.balance))
def check_balance(self , request):
if self.balance < request:
print("Can't give you all this money !!")
elif request < 0:
print("More than zero plz!")
def withdraw(self, request):
self.withdrawals_list.append(request)
notes = [100,50,10,5]
if self.balance > request:
self.balance = self.balance - request
for note in notes:
while request >= note:
request -= note
print("Give " + str(note))
balance1 = 500
balance2 = 1000
atm1 = ATM(balance1, "islamy bank")
atm2 = ATM(balance2, "baraka bank")
atm1.print_information()
atm1.check_balance(300)
atm1.withdraw(300)
atm1.print_information()
atm1.check_balance(250)
atm1.withdraw(250)
atm1.show_withdrawals()
atm2.print_information()
atm2.check_balance(500)
atm2.withdraw(500)
atm2.print_information()
atm2.check_balance(455)
atm2.withdraw(455)
atm2.show_withdrawals() | class Atm:
def __init__(self, balance, bank_name):
self.balance = balance
self.bank_name = bank_name
self.withdrawals_list = []
def show_withdrawals(self):
for withdrawal in self.withdrawals_list:
print('withdrawal: ' + str(withdrawal))
def print_information(self):
print('Welcome to ' + self.bank_name)
print('current balance: ' + str(self.balance))
def check_balance(self, request):
if self.balance < request:
print("Can't give you all this money !!")
elif request < 0:
print('More than zero plz!')
def withdraw(self, request):
self.withdrawals_list.append(request)
notes = [100, 50, 10, 5]
if self.balance > request:
self.balance = self.balance - request
for note in notes:
while request >= note:
request -= note
print('Give ' + str(note))
balance1 = 500
balance2 = 1000
atm1 = atm(balance1, 'islamy bank')
atm2 = atm(balance2, 'baraka bank')
atm1.print_information()
atm1.check_balance(300)
atm1.withdraw(300)
atm1.print_information()
atm1.check_balance(250)
atm1.withdraw(250)
atm1.show_withdrawals()
atm2.print_information()
atm2.check_balance(500)
atm2.withdraw(500)
atm2.print_information()
atm2.check_balance(455)
atm2.withdraw(455)
atm2.show_withdrawals() |
class Solution:
def findMedianSortedArrays(self, nums1: List[int], nums2: List[int]) -> float:
# brute force
ar = nums1 + nums2
ar.sort()
n = len(ar)
median = ar[n//2]
if n%2 != 0:
return median
else:
return (median + ar[(n//2)-1])/2 | class Solution:
def find_median_sorted_arrays(self, nums1: List[int], nums2: List[int]) -> float:
ar = nums1 + nums2
ar.sort()
n = len(ar)
median = ar[n // 2]
if n % 2 != 0:
return median
else:
return (median + ar[n // 2 - 1]) / 2 |
defines = {}
specials = {}
defines.update(globals()["__builtins__"])
def define(fnc):
defines[fnc.__name__] = fnc
return fnc
def rename(name):
def deco(fnc):
fnc.__name__ = name
return define(fnc)
return deco
def special(fnc):
specials[fnc.__name__] = fnc
return fnc
def register(name, value):
defines[name] = value
| defines = {}
specials = {}
defines.update(globals()['__builtins__'])
def define(fnc):
defines[fnc.__name__] = fnc
return fnc
def rename(name):
def deco(fnc):
fnc.__name__ = name
return define(fnc)
return deco
def special(fnc):
specials[fnc.__name__] = fnc
return fnc
def register(name, value):
defines[name] = value |
def insertion_sort(array):
for i in range(1, len(array)):
key = array[i]
j = i - 1
while j >= 0 and array[i] < array[j]:
array[i], array[j] = array[j], array[i]
j -= 1
i -= 1
return array
def test_insertion_sort_simple():
assert insertion_sort([4, 2, 5, 1, 7]) == [1, 2, 4, 5, 7]
def test_insertion_sort_one_number():
assert insertion_sort([1]) == [1]
def test_insertion_sort_already_sorted():
assert insertion_sort([1, 2]) == [1, 2]
| def insertion_sort(array):
for i in range(1, len(array)):
key = array[i]
j = i - 1
while j >= 0 and array[i] < array[j]:
(array[i], array[j]) = (array[j], array[i])
j -= 1
i -= 1
return array
def test_insertion_sort_simple():
assert insertion_sort([4, 2, 5, 1, 7]) == [1, 2, 4, 5, 7]
def test_insertion_sort_one_number():
assert insertion_sort([1]) == [1]
def test_insertion_sort_already_sorted():
assert insertion_sort([1, 2]) == [1, 2] |
def generatorA(value, stop):
i = 0
while i < stop:
value = (value * 16807) % 2147483647
yield value
i += 1
def generatorB(value, stop):
i = 0
while i < stop:
value = (value * 48271) % 2147483647
yield value
i += 1
def generatorAPart2(value, stop):
i = 0
while i < stop:
value = (value * 16807) % 2147483647
while value % 4 != 0:
value = (value * 16807) % 2147483647
yield value
i += 1
def generatorBPart2(value, stop):
i = 0
while i < stop:
value = (value * 48271) % 2147483647
while value % 8 != 0:
value = (value * 48271) % 2147483647
yield value
i += 1
| def generator_a(value, stop):
i = 0
while i < stop:
value = value * 16807 % 2147483647
yield value
i += 1
def generator_b(value, stop):
i = 0
while i < stop:
value = value * 48271 % 2147483647
yield value
i += 1
def generator_a_part2(value, stop):
i = 0
while i < stop:
value = value * 16807 % 2147483647
while value % 4 != 0:
value = value * 16807 % 2147483647
yield value
i += 1
def generator_b_part2(value, stop):
i = 0
while i < stop:
value = value * 48271 % 2147483647
while value % 8 != 0:
value = value * 48271 % 2147483647
yield value
i += 1 |
# https://www.codechef.com/ELE32018/problems/JACKJILL
t=int(input())
for _ in range(t):
n,k,d=[int(x) for x in input().strip().split()]
a=[int(x) for x in input().strip().split()]
b=[int(x) for x in input().strip().split()]
flag=0
res1=0
res2=0
for i in range(k):
res1 += a[i]
res2 += b[i]
if(res1+res2 >= d):
flag=1
else:
for i in range(k,n):
res1 = res1 + a[i] - a[i-k]
res2 = res2 + b[i] - b[i-k]
if(res1+res2 >= d):
flag=1
break
if(flag):
print("no")
else:
print("yes")
| t = int(input())
for _ in range(t):
(n, k, d) = [int(x) for x in input().strip().split()]
a = [int(x) for x in input().strip().split()]
b = [int(x) for x in input().strip().split()]
flag = 0
res1 = 0
res2 = 0
for i in range(k):
res1 += a[i]
res2 += b[i]
if res1 + res2 >= d:
flag = 1
else:
for i in range(k, n):
res1 = res1 + a[i] - a[i - k]
res2 = res2 + b[i] - b[i - k]
if res1 + res2 >= d:
flag = 1
break
if flag:
print('no')
else:
print('yes') |
class Solution(object):
def findMaxConsecutiveOnes(self, nums):
ans = 0
cnt = 0
for i in range(len(nums)):
if nums[i] == 1:
cnt += 1
ans = max(ans, cnt)
else:
cnt = 0
return ans
| class Solution(object):
def find_max_consecutive_ones(self, nums):
ans = 0
cnt = 0
for i in range(len(nums)):
if nums[i] == 1:
cnt += 1
ans = max(ans, cnt)
else:
cnt = 0
return ans |
# Accept one int and one float type value & display average
a = int(input("Enter int no.: "))
b = float(input("Enter float no.: "))
c = (a+b)/2
print(f"Average value {c}")
| a = int(input('Enter int no.: '))
b = float(input('Enter float no.: '))
c = (a + b) / 2
print(f'Average value {c}') |
class Solution:
def minimizeError(self, prices: List[str], target: int) -> str:
# A[i] := (costCeil - costFloor, costCeil, costFloor)
# the lower the costCeil - costFloor, the cheaper to ceil it
A = []
sumFloored = 0
sumCeiled = 0
for price in map(float, prices):
floored = math.floor(price)
ceiled = math.ceil(price)
sumFloored += floored
sumCeiled += ceiled
costFloor = price - floored
costCeil = ceiled - price
A.append((costCeil - costFloor, costCeil, costFloor))
if not sumFloored <= target <= sumCeiled:
return '-1'
A.sort()
nCeiled = target - sumFloored
return '{:.3f}'.format(sum(a[1] for a in A[:nCeiled]) +
sum(a[2] for a in A[nCeiled:]))
| class Solution:
def minimize_error(self, prices: List[str], target: int) -> str:
a = []
sum_floored = 0
sum_ceiled = 0
for price in map(float, prices):
floored = math.floor(price)
ceiled = math.ceil(price)
sum_floored += floored
sum_ceiled += ceiled
cost_floor = price - floored
cost_ceil = ceiled - price
A.append((costCeil - costFloor, costCeil, costFloor))
if not sumFloored <= target <= sumCeiled:
return '-1'
A.sort()
n_ceiled = target - sumFloored
return '{:.3f}'.format(sum((a[1] for a in A[:nCeiled])) + sum((a[2] for a in A[nCeiled:]))) |
# GENERATED VERSION FILE
# TIME: Mon Oct 11 04:02:23 2021
__version__ = '1.2.0+f83fd55'
short_version = '1.2.0'
version_info = (1, 2, 0)
| __version__ = '1.2.0+f83fd55'
short_version = '1.2.0'
version_info = (1, 2, 0) |
values = input("Enter comma separated values: ")
list = values.split(",")
tuple = tuple(list)
#this is a forked branch
print("list: {}".format(list))
print("tuple: {}".format(tuple))
| values = input('Enter comma separated values: ')
list = values.split(',')
tuple = tuple(list)
print('list: {}'.format(list))
print('tuple: {}'.format(tuple)) |
def add_replicaset(instance, alias, roles, servers,
status='healthy', all_rw=False, weight=None):
r_uuid = '{}-uuid'.format(alias)
r_servers = []
for s in servers: # servers = ['alias-1', 'alias-2']
r_servers.append({
'alias': s,
'uuid': '{}-uuid'.format(s),
'uri': '{}-uri'.format(s),
'status': 'healthy',
'replicaset': {
'uuid': r_uuid,
'alias': alias,
'roles': roles,
}
})
instance.add_topology_servers(r_servers)
replicaset = {
'uuid': r_uuid,
'alias': alias,
'status': status,
'roles': roles,
'weight': weight,
'all_rw': all_rw,
'servers': [{'alias': s, 'priority': i + 1} for i, s in enumerate(servers)]
}
instance.add_topology_replicaset(replicaset)
return replicaset
def set_box_cfg(instace, memtx_memory):
instace.set_box_cfd({
'memtx_memory': memtx_memory,
})
| def add_replicaset(instance, alias, roles, servers, status='healthy', all_rw=False, weight=None):
r_uuid = '{}-uuid'.format(alias)
r_servers = []
for s in servers:
r_servers.append({'alias': s, 'uuid': '{}-uuid'.format(s), 'uri': '{}-uri'.format(s), 'status': 'healthy', 'replicaset': {'uuid': r_uuid, 'alias': alias, 'roles': roles}})
instance.add_topology_servers(r_servers)
replicaset = {'uuid': r_uuid, 'alias': alias, 'status': status, 'roles': roles, 'weight': weight, 'all_rw': all_rw, 'servers': [{'alias': s, 'priority': i + 1} for (i, s) in enumerate(servers)]}
instance.add_topology_replicaset(replicaset)
return replicaset
def set_box_cfg(instace, memtx_memory):
instace.set_box_cfd({'memtx_memory': memtx_memory}) |
DEBUG = True
SECRET_KEY = "kasih-tau-gak-ya"
BUNDLE_ERRORS = True
MONGO_HOST = 'mongo'
MONGO_DBNAME = 'news-api'
MONGO_PORT = 27017
JSONIFY_PRETTYPRINT_REGULAR = False | debug = True
secret_key = 'kasih-tau-gak-ya'
bundle_errors = True
mongo_host = 'mongo'
mongo_dbname = 'news-api'
mongo_port = 27017
jsonify_prettyprint_regular = False |
class Solution:
def mostCommonWord(self, paragraph: str, banned: List[str]) -> str:
'''
T: O(n log n), n = number of words
S: O(n)
'''
for punc in "!?',;.":
paragraph = paragraph.replace(punc, ' ')
words = paragraph.lower().strip().split()
bannedSet = set(banned)
wordCount = {}
for word in words:
if word in bannedSet: continue
wordCount[word] = wordCount.get(word, 0) + 1
words = sorted(wordCount.items(), key = lambda x: (-x[1], x[0]))
return words[0][0]
| class Solution:
def most_common_word(self, paragraph: str, banned: List[str]) -> str:
"""
T: O(n log n), n = number of words
S: O(n)
"""
for punc in "!?',;.":
paragraph = paragraph.replace(punc, ' ')
words = paragraph.lower().strip().split()
banned_set = set(banned)
word_count = {}
for word in words:
if word in bannedSet:
continue
wordCount[word] = wordCount.get(word, 0) + 1
words = sorted(wordCount.items(), key=lambda x: (-x[1], x[0]))
return words[0][0] |
# configuration file
TRAINING_FILE_ORIGINAL = '../mnist_train.csv'
TRAINING_FILE = '../input/mnist_train_folds.csv'
TESTING_FILE = '../input/mnist_test.csv'
MODEL_OUTPUT = "../models/" | training_file_original = '../mnist_train.csv'
training_file = '../input/mnist_train_folds.csv'
testing_file = '../input/mnist_test.csv'
model_output = '../models/' |
__all__ = [
"BoundHandler",
"ClassHook",
"Handler",
"Hook",
"Hookable",
"HookableMeta",
"HookDescriptor",
"InstanceHook",
"hookable",
]
| __all__ = ['BoundHandler', 'ClassHook', 'Handler', 'Hook', 'Hookable', 'HookableMeta', 'HookDescriptor', 'InstanceHook', 'hookable'] |
class gen_config(object):
vocab_size = 35000
train_dir = "/home/ssc/project/dataset/dataset/weibo/v0.0/"
data_dir = "/home/ssc/project/dataset/dataset/weibo/v0.0/"
buckets = [(5, 10), (10, 15), (20, 25), (40, 50)] | class Gen_Config(object):
vocab_size = 35000
train_dir = '/home/ssc/project/dataset/dataset/weibo/v0.0/'
data_dir = '/home/ssc/project/dataset/dataset/weibo/v0.0/'
buckets = [(5, 10), (10, 15), (20, 25), (40, 50)] |
#!/usr/bin/env python
# Assumption for the star format:
# contain one or more 'data_*' blocks, for each block,
# either
'''
data_*
loop_
item1 #1
...
itemn #n
item1_data ... itemn_data
...
item1_data ... itemn_data
'''
# or
'''
data_*
item1 item1_data
...
itemn itemn_data
'''
def star_data(star):
# return 2 dict: {data1:line_num, data2:line_num}, {data1:lines, data2:lines}
data_dict = {}
with open(star) as read:
for i, line in enumerate(read.readlines()):
if line[:5] == 'data_':
line = line.strip()
data_dict[line] = i
# get an inverse dictionary
inv = {v: k for k, v in data_dict.items()}
# get the lines for each key
d_lines_dict = {}
with open(star) as read:
lines = read.readlines()
line_num = sorted(inv)
for i, num in enumerate(line_num):
try:
newline = line_num[i+1]
except IndexError:
newline = len(lines) + 1
d_lines_dict[inv[num]] = lines[num-1:newline-1]
return data_dict, d_lines_dict
def data_parse(d_lines):
# return a dict, whose keys are 'data_', ('loop_'), and items in 'data_'/'loop_'
item_dict = {}
if d_lines[3][:5] == 'loop_':
n = 4
else:
n = 3
item_dict['data_'] = d_lines[:n]
for i, line in enumerate(d_lines[n:]):
if line[:4] != '_rln': break
item_dict['loop_'] = d_lines[n:n+i]
for j in item_dict['loop_']:
k, v = j.split()
# change the value to an integer and minus 1, to be convenient
if n == 4:
item_dict[k] = int(v.strip('#')) - 1
elif n == 3:
item_dict[k] = v.strip()
return item_dict
def star_parse(star, data):
# parse a star file containing the data label.
# return a dict, whose keys are 'data_', ('loop_'), and items in 'data_'/'loop_'
d_lines = star_data(star)[1][data]
return data_parse(d_lines)
| """
data_*
loop_
item1 #1
...
itemn #n
item1_data ... itemn_data
...
item1_data ... itemn_data
"""
'\n\ndata_*\n\nitem1 item1_data\n...\nitemn itemn_data\n\n'
def star_data(star):
data_dict = {}
with open(star) as read:
for (i, line) in enumerate(read.readlines()):
if line[:5] == 'data_':
line = line.strip()
data_dict[line] = i
inv = {v: k for (k, v) in data_dict.items()}
d_lines_dict = {}
with open(star) as read:
lines = read.readlines()
line_num = sorted(inv)
for (i, num) in enumerate(line_num):
try:
newline = line_num[i + 1]
except IndexError:
newline = len(lines) + 1
d_lines_dict[inv[num]] = lines[num - 1:newline - 1]
return (data_dict, d_lines_dict)
def data_parse(d_lines):
item_dict = {}
if d_lines[3][:5] == 'loop_':
n = 4
else:
n = 3
item_dict['data_'] = d_lines[:n]
for (i, line) in enumerate(d_lines[n:]):
if line[:4] != '_rln':
break
item_dict['loop_'] = d_lines[n:n + i]
for j in item_dict['loop_']:
(k, v) = j.split()
if n == 4:
item_dict[k] = int(v.strip('#')) - 1
elif n == 3:
item_dict[k] = v.strip()
return item_dict
def star_parse(star, data):
d_lines = star_data(star)[1][data]
return data_parse(d_lines) |
#
# PySNMP MIB module TRANGOP5830S-RU-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/TRANGOP5830S-RU-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 21:19:36 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ConstraintsUnion, ValueRangeConstraint, ConstraintsIntersection, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ConstraintsUnion", "ValueRangeConstraint", "ConstraintsIntersection", "ValueSizeConstraint")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
Integer32, iso, Unsigned32, Counter64, enterprises, ObjectIdentity, MibIdentifier, Bits, MibScalar, MibTable, MibTableRow, MibTableColumn, NotificationType, Counter32, Gauge32, IpAddress, ModuleIdentity, TimeTicks = mibBuilder.importSymbols("SNMPv2-SMI", "Integer32", "iso", "Unsigned32", "Counter64", "enterprises", "ObjectIdentity", "MibIdentifier", "Bits", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "NotificationType", "Counter32", "Gauge32", "IpAddress", "ModuleIdentity", "TimeTicks")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
class DisplayString(OctetString):
pass
trango = MibIdentifier((1, 3, 6, 1, 4, 1, 5454))
tbw = MibIdentifier((1, 3, 6, 1, 4, 1, 5454, 1))
p5830sru = MibIdentifier((1, 3, 6, 1, 4, 1, 5454, 1, 24))
rusys = MibIdentifier((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1))
rurf = MibIdentifier((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2))
mibinfo = MibIdentifier((1, 3, 6, 1, 4, 1, 5454, 1, 24, 5))
ruversion = MibIdentifier((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 1))
ruswitches = MibIdentifier((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 8))
rutraffic = MibIdentifier((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 9))
ruipconfig = MibIdentifier((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 13))
rurftable = MibIdentifier((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4))
ruism = MibIdentifier((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 5))
ruunii = MibIdentifier((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 6))
ruversionHW = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 1, 1), OctetString().subtype(subtypeSpec=ValueSizeConstraint(2, 2)).setFixedLength(2)).setMaxAccess("readonly")
if mibBuilder.loadTexts: ruversionHW.setStatus('mandatory')
ruversionFW = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 80))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ruversionFW.setStatus('mandatory')
ruversionFPGA = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(4, 4)).setFixedLength(4)).setMaxAccess("readonly")
if mibBuilder.loadTexts: ruversionFPGA.setStatus('mandatory')
ruversionFWChecksum = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 1, 4), OctetString().subtype(subtypeSpec=ValueSizeConstraint(4, 4)).setFixedLength(4)).setMaxAccess("readonly")
if mibBuilder.loadTexts: ruversionFWChecksum.setStatus('mandatory')
ruversionFPGAChecksum = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 1, 5), OctetString().subtype(subtypeSpec=ValueSizeConstraint(4, 4)).setFixedLength(4)).setMaxAccess("readonly")
if mibBuilder.loadTexts: ruversionFPGAChecksum.setStatus('mandatory')
rusysDeviceId = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(6, 6)).setFixedLength(6)).setMaxAccess("readonly")
if mibBuilder.loadTexts: rusysDeviceId.setStatus('mandatory')
rusysDefOpMode = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(16, 0))).clone(namedValues=NamedValues(("on", 16), ("off", 0)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rusysDefOpMode.setStatus('mandatory')
rusysCurOpMode = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(16, 0))).clone(namedValues=NamedValues(("on", 16), ("off", 0)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rusysCurOpMode.setStatus('mandatory')
rusysActivateOpmode = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("deactivated", 0), ("activated", 1))).clone('deactivated')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rusysActivateOpmode.setStatus('mandatory')
rusysReadCommStr = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rusysReadCommStr.setStatus('mandatory')
rusysWriteCommStr = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 7), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rusysWriteCommStr.setStatus('mandatory')
ruswitchesBlockBroadcastMulticast = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 8, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("passed", 0), ("blocked", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ruswitchesBlockBroadcastMulticast.setStatus('mandatory')
ruswitchesHTTPD = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 8, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ruswitchesHTTPD.setStatus('mandatory')
ruswitchesAutoScanMasterSignal = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 8, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("disabled", 0), ("enabled", 1)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ruswitchesAutoScanMasterSignal.setStatus('mandatory')
rutrafficEthInOctets = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 9, 1), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rutrafficEthInOctets.setStatus('mandatory')
rutrafficEthOutOctets = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 9, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rutrafficEthOutOctets.setStatus('mandatory')
rutrafficRfInOctets = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 9, 3), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rutrafficRfInOctets.setStatus('mandatory')
rutrafficRfOutOctets = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 9, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rutrafficRfOutOctets.setStatus('mandatory')
rusysTemperature = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 10), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-128, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rusysTemperature.setStatus('mandatory')
rusysUpdateFlashAndActivate = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("no", 0), ("yes", 1))).clone('no')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rusysUpdateFlashAndActivate.setStatus('mandatory')
rusysReboot = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1))).clone(namedValues=NamedValues(("deactivated", 0), ("activated", 1))).clone('deactivated')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rusysReboot.setStatus('mandatory')
ruipconfigIpAddress = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 13, 1), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ruipconfigIpAddress.setStatus('mandatory')
ruipconfigSubnet = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 13, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ruipconfigSubnet.setStatus('mandatory')
ruipconfigDefaultGateway = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 13, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ruipconfigDefaultGateway.setStatus('mandatory')
rurfRSSI = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-128, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rurfRSSI.setStatus('mandatory')
rurftableChannel1 = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(5260, 5340), ValueRangeConstraint(5736, 5836), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rurftableChannel1.setStatus('mandatory')
rurftableChannel2 = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(5260, 5340), ValueRangeConstraint(5736, 5836), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rurftableChannel2.setStatus('mandatory')
rurftableChannel3 = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(5260, 5340), ValueRangeConstraint(5736, 5836), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rurftableChannel3.setStatus('mandatory')
rurftableChannel4 = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(5260, 5340), ValueRangeConstraint(5736, 5836), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rurftableChannel4.setStatus('mandatory')
rurftableChannel5 = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(5260, 5340), ValueRangeConstraint(5736, 5836), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rurftableChannel5.setStatus('mandatory')
rurftableChannel6 = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(5260, 5340), ValueRangeConstraint(5736, 5836), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rurftableChannel6.setStatus('mandatory')
rurftableChannel7 = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(5260, 5340), ValueRangeConstraint(5736, 5836), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rurftableChannel7.setStatus('mandatory')
rurftableChannel8 = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(5260, 5340), ValueRangeConstraint(5736, 5836), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rurftableChannel8.setStatus('mandatory')
rurftableChannel9 = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(5260, 5340), ValueRangeConstraint(5736, 5836), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rurftableChannel9.setStatus('mandatory')
rurftableChannel10 = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(5260, 5340), ValueRangeConstraint(5736, 5836), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rurftableChannel10.setStatus('mandatory')
rurftableChannel11 = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(5260, 5340), ValueRangeConstraint(5736, 5836), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rurftableChannel11.setStatus('mandatory')
rurftableChannel12 = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(5260, 5340), ValueRangeConstraint(5736, 5836), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rurftableChannel12.setStatus('mandatory')
rurftableChannel13 = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(5260, 5340), ValueRangeConstraint(5736, 5836), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rurftableChannel13.setStatus('mandatory')
rurftableChannel14 = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(5260, 5340), ValueRangeConstraint(5736, 5836), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rurftableChannel14.setStatus('mandatory')
rurftableChannel15 = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(5260, 5340), ValueRangeConstraint(5736, 5836), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rurftableChannel15.setStatus('mandatory')
rurftableChannel16 = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(5260, 5340), ValueRangeConstraint(5736, 5836), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rurftableChannel16.setStatus('mandatory')
rurftableChannel17 = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(5260, 5340), ValueRangeConstraint(5736, 5836), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rurftableChannel17.setStatus('mandatory')
rurftableChannel18 = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(5260, 5340), ValueRangeConstraint(5736, 5836), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rurftableChannel18.setStatus('mandatory')
rurftableChannel19 = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(5260, 5340), ValueRangeConstraint(5736, 5836), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rurftableChannel19.setStatus('mandatory')
rurftableChannel20 = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(5260, 5340), ValueRangeConstraint(5736, 5836), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rurftableChannel20.setStatus('mandatory')
rurftableChannel21 = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 21), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(5260, 5340), ValueRangeConstraint(5736, 5836), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rurftableChannel21.setStatus('mandatory')
rurftableChannel22 = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 22), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(5260, 5340), ValueRangeConstraint(5736, 5836), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rurftableChannel22.setStatus('mandatory')
rurftableChannel23 = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 23), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(5260, 5340), ValueRangeConstraint(5736, 5836), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rurftableChannel23.setStatus('mandatory')
rurftableChannel24 = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 24), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(5260, 5340), ValueRangeConstraint(5736, 5836), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rurftableChannel24.setStatus('mandatory')
rurftableChannel25 = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 25), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(5260, 5340), ValueRangeConstraint(5736, 5836), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rurftableChannel25.setStatus('mandatory')
rurftableChannel26 = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 26), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(5260, 5340), ValueRangeConstraint(5736, 5836), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rurftableChannel26.setStatus('mandatory')
rurftableChannel27 = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 27), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(5260, 5340), ValueRangeConstraint(5736, 5836), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rurftableChannel27.setStatus('mandatory')
rurftableChannel28 = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 28), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(5260, 5340), ValueRangeConstraint(5736, 5836), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rurftableChannel28.setStatus('mandatory')
rurftableChannel29 = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 29), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(5260, 5340), ValueRangeConstraint(5736, 5836), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rurftableChannel29.setStatus('mandatory')
rurftableChannel30 = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 30), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(5260, 5340), ValueRangeConstraint(5736, 5836), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rurftableChannel30.setStatus('mandatory')
ruismTxPowerMax = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 5, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-128, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ruismTxPowerMax.setStatus('mandatory')
ruismTxPowerMin = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 5, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-128, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ruismTxPowerMin.setStatus('mandatory')
ruismTxPower = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 5, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-128, 127))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ruismTxPower.setStatus('mandatory')
ruismRxThreshold = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 5, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(-90, -90), ValueRangeConstraint(-85, -85), ValueRangeConstraint(-80, -80), ValueRangeConstraint(-75, -75), ValueRangeConstraint(-70, -70), ValueRangeConstraint(-65, -65), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ruismRxThreshold.setStatus('mandatory')
ruuniiTxPowerMax = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 6, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-128, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ruuniiTxPowerMax.setStatus('mandatory')
ruuniiTxPowerMin = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 6, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-128, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ruuniiTxPowerMin.setStatus('mandatory')
ruuniiTxPower = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 6, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-128, 127))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ruuniiTxPower.setStatus('mandatory')
ruuniiRxThreshold = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 6, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(ValueRangeConstraint(-90, -90), ValueRangeConstraint(-85, -85), ValueRangeConstraint(-80, -80), ValueRangeConstraint(-75, -75), ValueRangeConstraint(-70, -70), ValueRangeConstraint(-65, -65), ))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ruuniiRxThreshold.setStatus('mandatory')
mibinfoVersion = MibScalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 5, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mibinfoVersion.setStatus('mandatory')
mibBuilder.exportSymbols("TRANGOP5830S-RU-MIB", mibinfo=mibinfo, rurftableChannel9=rurftableChannel9, ruswitchesAutoScanMasterSignal=ruswitchesAutoScanMasterSignal, ruversionFWChecksum=ruversionFWChecksum, rusysActivateOpmode=rusysActivateOpmode, ruversionHW=ruversionHW, rurftableChannel24=rurftableChannel24, rurftableChannel27=rurftableChannel27, rurftableChannel2=rurftableChannel2, rutrafficEthInOctets=rutrafficEthInOctets, rurftableChannel14=rurftableChannel14, ruismTxPowerMax=ruismTxPowerMax, rusys=rusys, ruunii=ruunii, rurftableChannel15=rurftableChannel15, rusysCurOpMode=rusysCurOpMode, rurftableChannel5=rurftableChannel5, rurftableChannel11=rurftableChannel11, ruversionFPGAChecksum=ruversionFPGAChecksum, rurftableChannel12=rurftableChannel12, rusysDeviceId=rusysDeviceId, rurftableChannel25=rurftableChannel25, ruswitches=ruswitches, ruismTxPowerMin=ruismTxPowerMin, ruuniiTxPower=ruuniiTxPower, ruversionFPGA=ruversionFPGA, rurftableChannel8=rurftableChannel8, p5830sru=p5830sru, rutrafficRfInOctets=rutrafficRfInOctets, ruismTxPower=ruismTxPower, rusysReboot=rusysReboot, rusysUpdateFlashAndActivate=rusysUpdateFlashAndActivate, rurftableChannel4=rurftableChannel4, rurftable=rurftable, rurftableChannel6=rurftableChannel6, rurftableChannel16=rurftableChannel16, rusysReadCommStr=rusysReadCommStr, ruversionFW=ruversionFW, ruswitchesBlockBroadcastMulticast=ruswitchesBlockBroadcastMulticast, rurftableChannel30=rurftableChannel30, rurftableChannel18=rurftableChannel18, ruipconfig=ruipconfig, ruismRxThreshold=ruismRxThreshold, rurftableChannel28=rurftableChannel28, rurftableChannel10=rurftableChannel10, rutraffic=rutraffic, rurftableChannel13=rurftableChannel13, rurftableChannel3=rurftableChannel3, ruuniiTxPowerMax=ruuniiTxPowerMax, rusysTemperature=rusysTemperature, rusysWriteCommStr=rusysWriteCommStr, rurftableChannel22=rurftableChannel22, mibinfoVersion=mibinfoVersion, tbw=tbw, ruipconfigDefaultGateway=ruipconfigDefaultGateway, rurftableChannel17=rurftableChannel17, ruswitchesHTTPD=ruswitchesHTTPD, rurftableChannel21=rurftableChannel21, ruipconfigIpAddress=ruipconfigIpAddress, ruuniiTxPowerMin=ruuniiTxPowerMin, rurftableChannel26=rurftableChannel26, ruism=ruism, rurftableChannel19=rurftableChannel19, trango=trango, rurfRSSI=rurfRSSI, rutrafficRfOutOctets=rutrafficRfOutOctets, rurftableChannel23=rurftableChannel23, rutrafficEthOutOctets=rutrafficEthOutOctets, ruversion=ruversion, rurftableChannel20=rurftableChannel20, rurftableChannel1=rurftableChannel1, DisplayString=DisplayString, ruipconfigSubnet=ruipconfigSubnet, rurftableChannel7=rurftableChannel7, rurftableChannel29=rurftableChannel29, rusysDefOpMode=rusysDefOpMode, rurf=rurf, ruuniiRxThreshold=ruuniiRxThreshold)
| (integer, object_identifier, octet_string) = mibBuilder.importSymbols('ASN1', 'Integer', 'ObjectIdentifier', 'OctetString')
(named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues')
(single_value_constraint, constraints_union, value_range_constraint, constraints_intersection, value_size_constraint) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'SingleValueConstraint', 'ConstraintsUnion', 'ValueRangeConstraint', 'ConstraintsIntersection', 'ValueSizeConstraint')
(notification_group, module_compliance) = mibBuilder.importSymbols('SNMPv2-CONF', 'NotificationGroup', 'ModuleCompliance')
(integer32, iso, unsigned32, counter64, enterprises, object_identity, mib_identifier, bits, mib_scalar, mib_table, mib_table_row, mib_table_column, notification_type, counter32, gauge32, ip_address, module_identity, time_ticks) = mibBuilder.importSymbols('SNMPv2-SMI', 'Integer32', 'iso', 'Unsigned32', 'Counter64', 'enterprises', 'ObjectIdentity', 'MibIdentifier', 'Bits', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'NotificationType', 'Counter32', 'Gauge32', 'IpAddress', 'ModuleIdentity', 'TimeTicks')
(display_string, textual_convention) = mibBuilder.importSymbols('SNMPv2-TC', 'DisplayString', 'TextualConvention')
class Displaystring(OctetString):
pass
trango = mib_identifier((1, 3, 6, 1, 4, 1, 5454))
tbw = mib_identifier((1, 3, 6, 1, 4, 1, 5454, 1))
p5830sru = mib_identifier((1, 3, 6, 1, 4, 1, 5454, 1, 24))
rusys = mib_identifier((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1))
rurf = mib_identifier((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2))
mibinfo = mib_identifier((1, 3, 6, 1, 4, 1, 5454, 1, 24, 5))
ruversion = mib_identifier((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 1))
ruswitches = mib_identifier((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 8))
rutraffic = mib_identifier((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 9))
ruipconfig = mib_identifier((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 13))
rurftable = mib_identifier((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4))
ruism = mib_identifier((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 5))
ruunii = mib_identifier((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 6))
ruversion_hw = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 1, 1), octet_string().subtype(subtypeSpec=value_size_constraint(2, 2)).setFixedLength(2)).setMaxAccess('readonly')
if mibBuilder.loadTexts:
ruversionHW.setStatus('mandatory')
ruversion_fw = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 1, 2), display_string().subtype(subtypeSpec=value_size_constraint(0, 80))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
ruversionFW.setStatus('mandatory')
ruversion_fpga = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 1, 3), octet_string().subtype(subtypeSpec=value_size_constraint(4, 4)).setFixedLength(4)).setMaxAccess('readonly')
if mibBuilder.loadTexts:
ruversionFPGA.setStatus('mandatory')
ruversion_fw_checksum = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 1, 4), octet_string().subtype(subtypeSpec=value_size_constraint(4, 4)).setFixedLength(4)).setMaxAccess('readonly')
if mibBuilder.loadTexts:
ruversionFWChecksum.setStatus('mandatory')
ruversion_fpga_checksum = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 1, 5), octet_string().subtype(subtypeSpec=value_size_constraint(4, 4)).setFixedLength(4)).setMaxAccess('readonly')
if mibBuilder.loadTexts:
ruversionFPGAChecksum.setStatus('mandatory')
rusys_device_id = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 2), octet_string().subtype(subtypeSpec=value_size_constraint(6, 6)).setFixedLength(6)).setMaxAccess('readonly')
if mibBuilder.loadTexts:
rusysDeviceId.setStatus('mandatory')
rusys_def_op_mode = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 3), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(16, 0))).clone(namedValues=named_values(('on', 16), ('off', 0)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rusysDefOpMode.setStatus('mandatory')
rusys_cur_op_mode = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 4), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(16, 0))).clone(namedValues=named_values(('on', 16), ('off', 0)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
rusysCurOpMode.setStatus('mandatory')
rusys_activate_opmode = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 5), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1))).clone(namedValues=named_values(('deactivated', 0), ('activated', 1))).clone('deactivated')).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rusysActivateOpmode.setStatus('mandatory')
rusys_read_comm_str = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 6), display_string().subtype(subtypeSpec=value_size_constraint(0, 32))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rusysReadCommStr.setStatus('mandatory')
rusys_write_comm_str = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 7), display_string().subtype(subtypeSpec=value_size_constraint(0, 32))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rusysWriteCommStr.setStatus('mandatory')
ruswitches_block_broadcast_multicast = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 8, 1), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1))).clone(namedValues=named_values(('passed', 0), ('blocked', 1)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
ruswitchesBlockBroadcastMulticast.setStatus('mandatory')
ruswitches_httpd = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 8, 5), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1))).clone(namedValues=named_values(('disabled', 0), ('enabled', 1)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
ruswitchesHTTPD.setStatus('mandatory')
ruswitches_auto_scan_master_signal = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 8, 6), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1))).clone(namedValues=named_values(('disabled', 0), ('enabled', 1)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
ruswitchesAutoScanMasterSignal.setStatus('mandatory')
rutraffic_eth_in_octets = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 9, 1), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
rutrafficEthInOctets.setStatus('mandatory')
rutraffic_eth_out_octets = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 9, 2), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
rutrafficEthOutOctets.setStatus('mandatory')
rutraffic_rf_in_octets = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 9, 3), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
rutrafficRfInOctets.setStatus('mandatory')
rutraffic_rf_out_octets = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 9, 4), counter32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
rutrafficRfOutOctets.setStatus('mandatory')
rusys_temperature = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 10), integer32().subtype(subtypeSpec=value_range_constraint(-128, 127))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
rusysTemperature.setStatus('mandatory')
rusys_update_flash_and_activate = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 11), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1))).clone(namedValues=named_values(('no', 0), ('yes', 1))).clone('no')).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rusysUpdateFlashAndActivate.setStatus('mandatory')
rusys_reboot = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 12), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(0, 1))).clone(namedValues=named_values(('deactivated', 0), ('activated', 1))).clone('deactivated')).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rusysReboot.setStatus('mandatory')
ruipconfig_ip_address = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 13, 1), ip_address()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
ruipconfigIpAddress.setStatus('mandatory')
ruipconfig_subnet = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 13, 2), ip_address()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
ruipconfigSubnet.setStatus('mandatory')
ruipconfig_default_gateway = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 1, 13, 3), ip_address()).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
ruipconfigDefaultGateway.setStatus('mandatory')
rurf_rssi = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 1), integer32().subtype(subtypeSpec=value_range_constraint(-128, 127))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
rurfRSSI.setStatus('mandatory')
rurftable_channel1 = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 1), integer32().subtype(subtypeSpec=constraints_union(value_range_constraint(5260, 5340), value_range_constraint(5736, 5836)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rurftableChannel1.setStatus('mandatory')
rurftable_channel2 = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 2), integer32().subtype(subtypeSpec=constraints_union(value_range_constraint(5260, 5340), value_range_constraint(5736, 5836)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rurftableChannel2.setStatus('mandatory')
rurftable_channel3 = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 3), integer32().subtype(subtypeSpec=constraints_union(value_range_constraint(5260, 5340), value_range_constraint(5736, 5836)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rurftableChannel3.setStatus('mandatory')
rurftable_channel4 = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 4), integer32().subtype(subtypeSpec=constraints_union(value_range_constraint(5260, 5340), value_range_constraint(5736, 5836)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rurftableChannel4.setStatus('mandatory')
rurftable_channel5 = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 5), integer32().subtype(subtypeSpec=constraints_union(value_range_constraint(5260, 5340), value_range_constraint(5736, 5836)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rurftableChannel5.setStatus('mandatory')
rurftable_channel6 = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 6), integer32().subtype(subtypeSpec=constraints_union(value_range_constraint(5260, 5340), value_range_constraint(5736, 5836)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rurftableChannel6.setStatus('mandatory')
rurftable_channel7 = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 7), integer32().subtype(subtypeSpec=constraints_union(value_range_constraint(5260, 5340), value_range_constraint(5736, 5836)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rurftableChannel7.setStatus('mandatory')
rurftable_channel8 = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 8), integer32().subtype(subtypeSpec=constraints_union(value_range_constraint(5260, 5340), value_range_constraint(5736, 5836)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rurftableChannel8.setStatus('mandatory')
rurftable_channel9 = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 9), integer32().subtype(subtypeSpec=constraints_union(value_range_constraint(5260, 5340), value_range_constraint(5736, 5836)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rurftableChannel9.setStatus('mandatory')
rurftable_channel10 = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 10), integer32().subtype(subtypeSpec=constraints_union(value_range_constraint(5260, 5340), value_range_constraint(5736, 5836)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rurftableChannel10.setStatus('mandatory')
rurftable_channel11 = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 11), integer32().subtype(subtypeSpec=constraints_union(value_range_constraint(5260, 5340), value_range_constraint(5736, 5836)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rurftableChannel11.setStatus('mandatory')
rurftable_channel12 = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 12), integer32().subtype(subtypeSpec=constraints_union(value_range_constraint(5260, 5340), value_range_constraint(5736, 5836)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rurftableChannel12.setStatus('mandatory')
rurftable_channel13 = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 13), integer32().subtype(subtypeSpec=constraints_union(value_range_constraint(5260, 5340), value_range_constraint(5736, 5836)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rurftableChannel13.setStatus('mandatory')
rurftable_channel14 = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 14), integer32().subtype(subtypeSpec=constraints_union(value_range_constraint(5260, 5340), value_range_constraint(5736, 5836)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rurftableChannel14.setStatus('mandatory')
rurftable_channel15 = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 15), integer32().subtype(subtypeSpec=constraints_union(value_range_constraint(5260, 5340), value_range_constraint(5736, 5836)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rurftableChannel15.setStatus('mandatory')
rurftable_channel16 = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 16), integer32().subtype(subtypeSpec=constraints_union(value_range_constraint(5260, 5340), value_range_constraint(5736, 5836)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rurftableChannel16.setStatus('mandatory')
rurftable_channel17 = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 17), integer32().subtype(subtypeSpec=constraints_union(value_range_constraint(5260, 5340), value_range_constraint(5736, 5836)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rurftableChannel17.setStatus('mandatory')
rurftable_channel18 = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 18), integer32().subtype(subtypeSpec=constraints_union(value_range_constraint(5260, 5340), value_range_constraint(5736, 5836)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rurftableChannel18.setStatus('mandatory')
rurftable_channel19 = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 19), integer32().subtype(subtypeSpec=constraints_union(value_range_constraint(5260, 5340), value_range_constraint(5736, 5836)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rurftableChannel19.setStatus('mandatory')
rurftable_channel20 = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 20), integer32().subtype(subtypeSpec=constraints_union(value_range_constraint(5260, 5340), value_range_constraint(5736, 5836)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rurftableChannel20.setStatus('mandatory')
rurftable_channel21 = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 21), integer32().subtype(subtypeSpec=constraints_union(value_range_constraint(5260, 5340), value_range_constraint(5736, 5836)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rurftableChannel21.setStatus('mandatory')
rurftable_channel22 = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 22), integer32().subtype(subtypeSpec=constraints_union(value_range_constraint(5260, 5340), value_range_constraint(5736, 5836)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rurftableChannel22.setStatus('mandatory')
rurftable_channel23 = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 23), integer32().subtype(subtypeSpec=constraints_union(value_range_constraint(5260, 5340), value_range_constraint(5736, 5836)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rurftableChannel23.setStatus('mandatory')
rurftable_channel24 = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 24), integer32().subtype(subtypeSpec=constraints_union(value_range_constraint(5260, 5340), value_range_constraint(5736, 5836)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rurftableChannel24.setStatus('mandatory')
rurftable_channel25 = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 25), integer32().subtype(subtypeSpec=constraints_union(value_range_constraint(5260, 5340), value_range_constraint(5736, 5836)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rurftableChannel25.setStatus('mandatory')
rurftable_channel26 = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 26), integer32().subtype(subtypeSpec=constraints_union(value_range_constraint(5260, 5340), value_range_constraint(5736, 5836)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rurftableChannel26.setStatus('mandatory')
rurftable_channel27 = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 27), integer32().subtype(subtypeSpec=constraints_union(value_range_constraint(5260, 5340), value_range_constraint(5736, 5836)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rurftableChannel27.setStatus('mandatory')
rurftable_channel28 = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 28), integer32().subtype(subtypeSpec=constraints_union(value_range_constraint(5260, 5340), value_range_constraint(5736, 5836)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rurftableChannel28.setStatus('mandatory')
rurftable_channel29 = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 29), integer32().subtype(subtypeSpec=constraints_union(value_range_constraint(5260, 5340), value_range_constraint(5736, 5836)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rurftableChannel29.setStatus('mandatory')
rurftable_channel30 = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 4, 30), integer32().subtype(subtypeSpec=constraints_union(value_range_constraint(5260, 5340), value_range_constraint(5736, 5836)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
rurftableChannel30.setStatus('mandatory')
ruism_tx_power_max = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 5, 1), integer32().subtype(subtypeSpec=value_range_constraint(-128, 127))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
ruismTxPowerMax.setStatus('mandatory')
ruism_tx_power_min = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 5, 2), integer32().subtype(subtypeSpec=value_range_constraint(-128, 127))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
ruismTxPowerMin.setStatus('mandatory')
ruism_tx_power = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 5, 3), integer32().subtype(subtypeSpec=value_range_constraint(-128, 127))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
ruismTxPower.setStatus('mandatory')
ruism_rx_threshold = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 5, 4), integer32().subtype(subtypeSpec=constraints_union(value_range_constraint(-90, -90), value_range_constraint(-85, -85), value_range_constraint(-80, -80), value_range_constraint(-75, -75), value_range_constraint(-70, -70), value_range_constraint(-65, -65)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
ruismRxThreshold.setStatus('mandatory')
ruunii_tx_power_max = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 6, 1), integer32().subtype(subtypeSpec=value_range_constraint(-128, 127))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
ruuniiTxPowerMax.setStatus('mandatory')
ruunii_tx_power_min = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 6, 2), integer32().subtype(subtypeSpec=value_range_constraint(-128, 127))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
ruuniiTxPowerMin.setStatus('mandatory')
ruunii_tx_power = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 6, 3), integer32().subtype(subtypeSpec=value_range_constraint(-128, 127))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
ruuniiTxPower.setStatus('mandatory')
ruunii_rx_threshold = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 2, 6, 4), integer32().subtype(subtypeSpec=constraints_union(value_range_constraint(-90, -90), value_range_constraint(-85, -85), value_range_constraint(-80, -80), value_range_constraint(-75, -75), value_range_constraint(-70, -70), value_range_constraint(-65, -65)))).setMaxAccess('readwrite')
if mibBuilder.loadTexts:
ruuniiRxThreshold.setStatus('mandatory')
mibinfo_version = mib_scalar((1, 3, 6, 1, 4, 1, 5454, 1, 24, 5, 1), display_string().subtype(subtypeSpec=value_size_constraint(0, 32))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
mibinfoVersion.setStatus('mandatory')
mibBuilder.exportSymbols('TRANGOP5830S-RU-MIB', mibinfo=mibinfo, rurftableChannel9=rurftableChannel9, ruswitchesAutoScanMasterSignal=ruswitchesAutoScanMasterSignal, ruversionFWChecksum=ruversionFWChecksum, rusysActivateOpmode=rusysActivateOpmode, ruversionHW=ruversionHW, rurftableChannel24=rurftableChannel24, rurftableChannel27=rurftableChannel27, rurftableChannel2=rurftableChannel2, rutrafficEthInOctets=rutrafficEthInOctets, rurftableChannel14=rurftableChannel14, ruismTxPowerMax=ruismTxPowerMax, rusys=rusys, ruunii=ruunii, rurftableChannel15=rurftableChannel15, rusysCurOpMode=rusysCurOpMode, rurftableChannel5=rurftableChannel5, rurftableChannel11=rurftableChannel11, ruversionFPGAChecksum=ruversionFPGAChecksum, rurftableChannel12=rurftableChannel12, rusysDeviceId=rusysDeviceId, rurftableChannel25=rurftableChannel25, ruswitches=ruswitches, ruismTxPowerMin=ruismTxPowerMin, ruuniiTxPower=ruuniiTxPower, ruversionFPGA=ruversionFPGA, rurftableChannel8=rurftableChannel8, p5830sru=p5830sru, rutrafficRfInOctets=rutrafficRfInOctets, ruismTxPower=ruismTxPower, rusysReboot=rusysReboot, rusysUpdateFlashAndActivate=rusysUpdateFlashAndActivate, rurftableChannel4=rurftableChannel4, rurftable=rurftable, rurftableChannel6=rurftableChannel6, rurftableChannel16=rurftableChannel16, rusysReadCommStr=rusysReadCommStr, ruversionFW=ruversionFW, ruswitchesBlockBroadcastMulticast=ruswitchesBlockBroadcastMulticast, rurftableChannel30=rurftableChannel30, rurftableChannel18=rurftableChannel18, ruipconfig=ruipconfig, ruismRxThreshold=ruismRxThreshold, rurftableChannel28=rurftableChannel28, rurftableChannel10=rurftableChannel10, rutraffic=rutraffic, rurftableChannel13=rurftableChannel13, rurftableChannel3=rurftableChannel3, ruuniiTxPowerMax=ruuniiTxPowerMax, rusysTemperature=rusysTemperature, rusysWriteCommStr=rusysWriteCommStr, rurftableChannel22=rurftableChannel22, mibinfoVersion=mibinfoVersion, tbw=tbw, ruipconfigDefaultGateway=ruipconfigDefaultGateway, rurftableChannel17=rurftableChannel17, ruswitchesHTTPD=ruswitchesHTTPD, rurftableChannel21=rurftableChannel21, ruipconfigIpAddress=ruipconfigIpAddress, ruuniiTxPowerMin=ruuniiTxPowerMin, rurftableChannel26=rurftableChannel26, ruism=ruism, rurftableChannel19=rurftableChannel19, trango=trango, rurfRSSI=rurfRSSI, rutrafficRfOutOctets=rutrafficRfOutOctets, rurftableChannel23=rurftableChannel23, rutrafficEthOutOctets=rutrafficEthOutOctets, ruversion=ruversion, rurftableChannel20=rurftableChannel20, rurftableChannel1=rurftableChannel1, DisplayString=DisplayString, ruipconfigSubnet=ruipconfigSubnet, rurftableChannel7=rurftableChannel7, rurftableChannel29=rurftableChannel29, rusysDefOpMode=rusysDefOpMode, rurf=rurf, ruuniiRxThreshold=ruuniiRxThreshold) |
class CarrinhoCompras:
def __init__(self):
self.produtos = []
def insere_produto(self, produto):
self.produtos.append(produto)
def lista_produtos(self):
for produto in self.produtos:
print(produto.nome, produto.valor)
def soma_total(self):
total = 0
for produto in self.produtos:
total += produto.valor
return total
class Produto:
def __init__(self, nome, valor):
self.nome = nome
self.valor = valor
carrinho = CarrinhoCompras()
prod1 = Produto("camisa", 50)
prod2 = Produto("short", 90)
prod3 = Produto("meias", 25)
carrinho.insere_produto(prod1)
carrinho.insere_produto(prod2)
carrinho.insere_produto(prod3)
carrinho.insere_produto(prod1)
carrinho.lista_produtos()
print(carrinho.soma_total())
| class Carrinhocompras:
def __init__(self):
self.produtos = []
def insere_produto(self, produto):
self.produtos.append(produto)
def lista_produtos(self):
for produto in self.produtos:
print(produto.nome, produto.valor)
def soma_total(self):
total = 0
for produto in self.produtos:
total += produto.valor
return total
class Produto:
def __init__(self, nome, valor):
self.nome = nome
self.valor = valor
carrinho = carrinho_compras()
prod1 = produto('camisa', 50)
prod2 = produto('short', 90)
prod3 = produto('meias', 25)
carrinho.insere_produto(prod1)
carrinho.insere_produto(prod2)
carrinho.insere_produto(prod3)
carrinho.insere_produto(prod1)
carrinho.lista_produtos()
print(carrinho.soma_total()) |
# Sage version information for Python scripts
# This file is auto-generated by the sage-update-version script, do not edit!
version = '8.3.beta3'
date = '2018-05-27'
| version = '8.3.beta3'
date = '2018-05-27' |
class Judge:
PAPER_LIMIT = 7
def __init__(
self,
judge_id,
first,
last,
email,
phone,
preferred_categories,
is_paper_reviewer,
presentation_availability,
):
self.judge_id = judge_id # int
self.first = first # str
self.last = last # str
self.email = email # str
self.phone = phone # str
self.preferred_categories = preferred_categories # list of int
self.is_paper_reviewer = is_paper_reviewer # bool
self.presentation_availability = presentation_availability # list of float
self.presentation_slots = len(self.presentation_availability) # int
self.assigned_presentations = [] # list of Student
self.assigned_times = [] # list of float
self.assigned_papers = [] # list of Student
def __eq__(self, other):
return self.judge_id == other.judge_id
def assign_presentation(self, student, time_index=None):
if not self.presentation_slots:
raise Exception("No slots available")
if len(student.presentation_judges) >= 2:
raise Exception("Too many presentation judges")
if (
len(student.presentation_judges) == 1
and student.presentation_judges[0] == self
):
raise Exception("Trying to add same judge twice")
self.presentation_slots -= 1
self.assigned_presentations.append(student)
if not time_index:
time_index = self.presentation_availability[self.presentation_slots]
self.assigned_times.append(time_index)
student.presentation_judges.append(self)
student.presentation_time = time_index
def assign_paper(self, student):
# if len(self.assigned_papers) >= self.PAPER_LIMIT:
# raise Exception("Paper limit reached")
if len(student.paper_judges) >= 2:
raise Exception("Too many paper judges")
if len(student.paper_judges) == 1 and student.paper_judges[0] == self:
raise Exception("Trying to add same judge twice")
self.assigned_papers.append(student)
student.paper_judges.append(self)
def __str__(self):
return f"{self.first} {self.last}"
# return "\n".join(
# [f"{field}: {value}" for field, value in self.__dict__.items()]
# )
| class Judge:
paper_limit = 7
def __init__(self, judge_id, first, last, email, phone, preferred_categories, is_paper_reviewer, presentation_availability):
self.judge_id = judge_id
self.first = first
self.last = last
self.email = email
self.phone = phone
self.preferred_categories = preferred_categories
self.is_paper_reviewer = is_paper_reviewer
self.presentation_availability = presentation_availability
self.presentation_slots = len(self.presentation_availability)
self.assigned_presentations = []
self.assigned_times = []
self.assigned_papers = []
def __eq__(self, other):
return self.judge_id == other.judge_id
def assign_presentation(self, student, time_index=None):
if not self.presentation_slots:
raise exception('No slots available')
if len(student.presentation_judges) >= 2:
raise exception('Too many presentation judges')
if len(student.presentation_judges) == 1 and student.presentation_judges[0] == self:
raise exception('Trying to add same judge twice')
self.presentation_slots -= 1
self.assigned_presentations.append(student)
if not time_index:
time_index = self.presentation_availability[self.presentation_slots]
self.assigned_times.append(time_index)
student.presentation_judges.append(self)
student.presentation_time = time_index
def assign_paper(self, student):
if len(student.paper_judges) >= 2:
raise exception('Too many paper judges')
if len(student.paper_judges) == 1 and student.paper_judges[0] == self:
raise exception('Trying to add same judge twice')
self.assigned_papers.append(student)
student.paper_judges.append(self)
def __str__(self):
return f'{self.first} {self.last}' |
class dispatcher:
name = 'dispatcher'
def __init__(self, function_to_exec):
self.function = function_to_exec
return self.function
def get_name(self):
return self.name
def function_one(a,b):
return a + b
def function_two():
return 'function two' | class Dispatcher:
name = 'dispatcher'
def __init__(self, function_to_exec):
self.function = function_to_exec
return self.function
def get_name(self):
return self.name
def function_one(a, b):
return a + b
def function_two():
return 'function two' |
class UniformExchange(object):
def __init__(self, A):
if not isinstance(A, (float, int)) or A <= 0:
raise ValueError('Exchange constant must be positive float/int.')
else:
self.A = A
def get_mif(self):
# Create mif string.
mif = '# UniformExchange\n'
mif += 'Specify Oxs_UniformExchange {\n'
mif += '\tA {}\n'.format(self.A)
mif += '}\n\n'
return mif
| class Uniformexchange(object):
def __init__(self, A):
if not isinstance(A, (float, int)) or A <= 0:
raise value_error('Exchange constant must be positive float/int.')
else:
self.A = A
def get_mif(self):
mif = '# UniformExchange\n'
mif += 'Specify Oxs_UniformExchange {\n'
mif += '\tA {}\n'.format(self.A)
mif += '}\n\n'
return mif |
# model
model = Model()
i1 = Input("op1", "TENSOR_QUANT8_ASYMM", "{1, 2, 2, 1}, 0.8, 5")
i2 = Output("op2", "TENSOR_QUANT8_ASYMM", "{1, 3, 3, 1}, 0.8, 5")
w = Int32Scalar("width", 3)
h = Int32Scalar("height", 3)
model = model.Operation("RESIZE_BILINEAR", i1, w, h).To(i2)
# Example 1. Input in operand 0,
input0 = {i1: # input 0
[1, 1, 2, 2]}
output0 = {i2: # output 0
[1, 1, 1,
2, 2, 2,
2, 2, 2]}
# Instantiate an example
Example((input0, output0))
| model = model()
i1 = input('op1', 'TENSOR_QUANT8_ASYMM', '{1, 2, 2, 1}, 0.8, 5')
i2 = output('op2', 'TENSOR_QUANT8_ASYMM', '{1, 3, 3, 1}, 0.8, 5')
w = int32_scalar('width', 3)
h = int32_scalar('height', 3)
model = model.Operation('RESIZE_BILINEAR', i1, w, h).To(i2)
input0 = {i1: [1, 1, 2, 2]}
output0 = {i2: [1, 1, 1, 2, 2, 2, 2, 2, 2]}
example((input0, output0)) |
def add(a, b):
print(f"ADDING {a} + {b}")
return a + b
def subtract(a, b):
print(f"SUBTRACTING {a} - {b}")
return a - b
def multiply(a, b):
print(f"MULTIPLYING {a} * {b}")
return a * b
def divide(a, b):
print(f"DIVIDING {a} / {b}")
return a / b
a = float(input("input a:"))
b = float(input("input b:"))
c = float(input("input c:"))
d = float(input("input d:"))
e = float(input("input e:"))
print("calculate (a+b)*c-(d/e)")
print("Directly:",(a+b)*c-(d/e))
print("Using the functions:", subtract(multiply(add(a,b),c), divide(d, e)))
| def add(a, b):
print(f'ADDING {a} + {b}')
return a + b
def subtract(a, b):
print(f'SUBTRACTING {a} - {b}')
return a - b
def multiply(a, b):
print(f'MULTIPLYING {a} * {b}')
return a * b
def divide(a, b):
print(f'DIVIDING {a} / {b}')
return a / b
a = float(input('input a:'))
b = float(input('input b:'))
c = float(input('input c:'))
d = float(input('input d:'))
e = float(input('input e:'))
print('calculate (a+b)*c-(d/e)')
print('Directly:', (a + b) * c - d / e)
print('Using the functions:', subtract(multiply(add(a, b), c), divide(d, e))) |
# Description: Pass Statement in Python
# Pass statement in for loop
for var in range(5):
pass
# Pass statement in a while loop
while True:
pass # Busy-wait for keyboard interrupt (Ctrl + C)
# This is commonly used for creating minimal classes:
class MyEmptyClass:
pass
# Pass can also be used is as a place-holder for a function or conditional body while working on new code
def defineAFunction():
pass # Remember to implement this!
| for var in range(5):
pass
while True:
pass
class Myemptyclass:
pass
def define_a_function():
pass |
number_dict = {
"0" : {
"color" : (187,173,160),
"font_size" : 45,
"backgroud_color" : (205,193,180),
"coordinate" : [(0,0), (0,0), (0,0), (0,0)]
},
"2" : {
"color" : (119, 110, 101),
"font_size" : [70, 60, 50, 40],
"backgroud_color" : (238, 228, 218),
"coordinate" : [(40,10), (30,3), (25,2), (22,3)]
},
"4" : {
"color" : (119, 110, 101),
"font_size" : [70, 60, 50, 40],
"backgroud_color" : (237, 224, 200),
"coordinate" : [(40,10), (30,3), (25,2), (22,3)]
},
"8" : {
"color" : (249, 246, 242),
"font_size" : [70, 60, 50, 40],
"backgroud_color" : (242, 177, 121),
"coordinate" : [(40,10), (30,3), (25,2), (22,3)]
},
"16" : {
"color" : (249, 246, 242),
"font_size" : [70, 60, 50, 40],
"backgroud_color" : (235, 140, 82),
"coordinate" : [(15,10), (8,3), (6,2), (6,3)]
},
"32" : {
"color" : (249, 246, 242),
"font_size" : [70, 60, 50, 40],
"backgroud_color" : (245, 124, 95),
"coordinate" : [(20,10), (10,3), (8,2), (9,3)]
},
"64" : {
"color" : (249, 246, 242),
"font_size" : [70, 60, 50, 40],
"backgroud_color" : (233, 89, 55),
"coordinate" : [(20,10), (10,3), (8,2), (9,3)]
},
"128" : {
"color" : (249, 246, 242),
"font_size" : [50, 40, 30, 25],
"backgroud_color" : (242, 216, 106),
"coordinate" : [(15,25), (10,15), (10,15), (10,15)]
},
"256" : {
"color" : (249, 246, 242),
"font_size" : [50, 40, 30, 25],
"backgroud_color" : (237, 202, 75),
"coordinate" : [(15,25), (10,15), (10,15), (10,15)]
},
"512" : {
"color" : (249, 246, 242),
"font_size" : [50, 40, 30, 25],
"backgroud_color" : (228, 192, 42),
"coordinate" : [(15,25), (10,15), (10,15), (10,15)]
},
"1024" : {
"color" : (249, 246, 242),
"font_size" : [40, 30, 24, 20],
"backgroud_color" : (237, 195, 20),
"coordinate" : [(11,30), (8,23), (8,20), (8,18)]
},
"2048" : {
"color" : (249, 246, 242),
"font_size" : [40, 30, 24, 20],
"backgroud_color" : (237, 195, 20),
"coordinate" : [(13,30), (10,23), (10,20), (10,18)]
},
"4096" : {
"color" : (249, 246, 242),
"font_size" : [40, 30, 24, 20],
"backgroud_color" : (71, 71, 82),
"coordinate" : [(13,30), (10,23), (10,20), (10,18)]
},
"8192" : {
"color" : (249, 246, 242),
"font_size" : [40, 30, 24, 20],
"backgroud_color" : (71, 71, 82),
"coordinate" : [(13,30), (10,23), (10,20), (10,18)]
},
"16384" : {
"color" : (249, 246, 242),
"font_size" : [32, 24, 19, 16],
"backgroud_color" : (71, 71, 82),
"coordinate" : [(11,35), (10,28), (10,25), (9,20)]
},
"32768" : {
"color" : (249, 246, 242),
"font_size" : [32, 24, 19, 16],
"backgroud_color" : (71, 71, 82),
"coordinate" : [(12,35), (11,28), (11,25), (10,20)]
},
"65536" : {
"color" : (249, 246, 242),
"font_size" : [32, 24, 19, 16],
"backgroud_color" : (71, 71, 82),
"coordinate" : [(14,35), (12,28), (11,25), (10,20)]
},
"131072" : {
"color" : (249, 246, 242),
"font_size" : [28, 20, 16, 13],
"backgroud_color" : (71, 71, 82),
"coordinate" : [(10,37), (10,32), (9,26), (9,23)]
},
"262144" : {
"color" : (249, 246, 242),
"font_size" : [28, 20, 16, 13],
"backgroud_color" : (71, 71, 82),
"coordinate" : [(11,39), (11,32), (10,26), (9,23)]
},
"524288" : {
"color" : (249, 246, 242),
"font_size" : [28, 20, 16, 13],
"backgroud_color" : (71, 71, 82),
"coordinate" : [(12,37), (11,32), (11,26), (9,23)]
},
"1048576" : {
"color" : (249, 246, 242),
"font_size" : [24, 17, 14, 12],
"backgroud_color" : (71, 71, 82),
"coordinate" : [(9,42), (9,33), (8,28), (8,24)]
},
"2097152" : {
"color" : (249, 246, 242),
"font_size" : [24, 17, 14, 12],
"backgroud_color" : (71, 71, 82),
"coordinate" : [(11,42), (10,33), (9,28), (9,24)]
},
"4194304" : {
"color" : (249, 246, 242),
"font_size" : [24, 17, 14, 12],
"backgroud_color" : (71, 71, 82),
"coordinate" : [(10,42), (10,33), (9,28), (9,24)]
},
"8388608" : {
"color" : (249, 246, 242),
"font_size" : [24, 17, 14, 12],
"backgroud_color" : (71, 71, 82),
"coordinate" : [(11,42), (11,33), (10,28), (9,24)]
}
} | number_dict = {'0': {'color': (187, 173, 160), 'font_size': 45, 'backgroud_color': (205, 193, 180), 'coordinate': [(0, 0), (0, 0), (0, 0), (0, 0)]}, '2': {'color': (119, 110, 101), 'font_size': [70, 60, 50, 40], 'backgroud_color': (238, 228, 218), 'coordinate': [(40, 10), (30, 3), (25, 2), (22, 3)]}, '4': {'color': (119, 110, 101), 'font_size': [70, 60, 50, 40], 'backgroud_color': (237, 224, 200), 'coordinate': [(40, 10), (30, 3), (25, 2), (22, 3)]}, '8': {'color': (249, 246, 242), 'font_size': [70, 60, 50, 40], 'backgroud_color': (242, 177, 121), 'coordinate': [(40, 10), (30, 3), (25, 2), (22, 3)]}, '16': {'color': (249, 246, 242), 'font_size': [70, 60, 50, 40], 'backgroud_color': (235, 140, 82), 'coordinate': [(15, 10), (8, 3), (6, 2), (6, 3)]}, '32': {'color': (249, 246, 242), 'font_size': [70, 60, 50, 40], 'backgroud_color': (245, 124, 95), 'coordinate': [(20, 10), (10, 3), (8, 2), (9, 3)]}, '64': {'color': (249, 246, 242), 'font_size': [70, 60, 50, 40], 'backgroud_color': (233, 89, 55), 'coordinate': [(20, 10), (10, 3), (8, 2), (9, 3)]}, '128': {'color': (249, 246, 242), 'font_size': [50, 40, 30, 25], 'backgroud_color': (242, 216, 106), 'coordinate': [(15, 25), (10, 15), (10, 15), (10, 15)]}, '256': {'color': (249, 246, 242), 'font_size': [50, 40, 30, 25], 'backgroud_color': (237, 202, 75), 'coordinate': [(15, 25), (10, 15), (10, 15), (10, 15)]}, '512': {'color': (249, 246, 242), 'font_size': [50, 40, 30, 25], 'backgroud_color': (228, 192, 42), 'coordinate': [(15, 25), (10, 15), (10, 15), (10, 15)]}, '1024': {'color': (249, 246, 242), 'font_size': [40, 30, 24, 20], 'backgroud_color': (237, 195, 20), 'coordinate': [(11, 30), (8, 23), (8, 20), (8, 18)]}, '2048': {'color': (249, 246, 242), 'font_size': [40, 30, 24, 20], 'backgroud_color': (237, 195, 20), 'coordinate': [(13, 30), (10, 23), (10, 20), (10, 18)]}, '4096': {'color': (249, 246, 242), 'font_size': [40, 30, 24, 20], 'backgroud_color': (71, 71, 82), 'coordinate': [(13, 30), (10, 23), (10, 20), (10, 18)]}, '8192': {'color': (249, 246, 242), 'font_size': [40, 30, 24, 20], 'backgroud_color': (71, 71, 82), 'coordinate': [(13, 30), (10, 23), (10, 20), (10, 18)]}, '16384': {'color': (249, 246, 242), 'font_size': [32, 24, 19, 16], 'backgroud_color': (71, 71, 82), 'coordinate': [(11, 35), (10, 28), (10, 25), (9, 20)]}, '32768': {'color': (249, 246, 242), 'font_size': [32, 24, 19, 16], 'backgroud_color': (71, 71, 82), 'coordinate': [(12, 35), (11, 28), (11, 25), (10, 20)]}, '65536': {'color': (249, 246, 242), 'font_size': [32, 24, 19, 16], 'backgroud_color': (71, 71, 82), 'coordinate': [(14, 35), (12, 28), (11, 25), (10, 20)]}, '131072': {'color': (249, 246, 242), 'font_size': [28, 20, 16, 13], 'backgroud_color': (71, 71, 82), 'coordinate': [(10, 37), (10, 32), (9, 26), (9, 23)]}, '262144': {'color': (249, 246, 242), 'font_size': [28, 20, 16, 13], 'backgroud_color': (71, 71, 82), 'coordinate': [(11, 39), (11, 32), (10, 26), (9, 23)]}, '524288': {'color': (249, 246, 242), 'font_size': [28, 20, 16, 13], 'backgroud_color': (71, 71, 82), 'coordinate': [(12, 37), (11, 32), (11, 26), (9, 23)]}, '1048576': {'color': (249, 246, 242), 'font_size': [24, 17, 14, 12], 'backgroud_color': (71, 71, 82), 'coordinate': [(9, 42), (9, 33), (8, 28), (8, 24)]}, '2097152': {'color': (249, 246, 242), 'font_size': [24, 17, 14, 12], 'backgroud_color': (71, 71, 82), 'coordinate': [(11, 42), (10, 33), (9, 28), (9, 24)]}, '4194304': {'color': (249, 246, 242), 'font_size': [24, 17, 14, 12], 'backgroud_color': (71, 71, 82), 'coordinate': [(10, 42), (10, 33), (9, 28), (9, 24)]}, '8388608': {'color': (249, 246, 242), 'font_size': [24, 17, 14, 12], 'backgroud_color': (71, 71, 82), 'coordinate': [(11, 42), (11, 33), (10, 28), (9, 24)]}} |
def eqindexMultiPass(data):
"Multi pass"
for i in range(len(data)):
suml, sumr = sum(data[:i]), sum(data[i+1:])
if suml == sumr:
yield i
| def eqindex_multi_pass(data):
"""Multi pass"""
for i in range(len(data)):
(suml, sumr) = (sum(data[:i]), sum(data[i + 1:]))
if suml == sumr:
yield i |
class languages():
def __init__(self, fDic):
self.fDic = fDic
self.scripts = set(self.fDic.scripts)
self.scripts.discard('dflt')
def languageSyntax(self, script, language):
return 'languagesystem %s %s;' %(script, language)
def syntax(self):
result = [self.languageSyntax('DFLT', 'dflt')]
for script in self.scripts:
result.append(self.languageSyntax(script, 'dflt'))
for script, language in self.fDic.localized.keys():
result.append(self.languageSyntax(script, language))
return '\n'.join(result)
| class Languages:
def __init__(self, fDic):
self.fDic = fDic
self.scripts = set(self.fDic.scripts)
self.scripts.discard('dflt')
def language_syntax(self, script, language):
return 'languagesystem %s %s;' % (script, language)
def syntax(self):
result = [self.languageSyntax('DFLT', 'dflt')]
for script in self.scripts:
result.append(self.languageSyntax(script, 'dflt'))
for (script, language) in self.fDic.localized.keys():
result.append(self.languageSyntax(script, language))
return '\n'.join(result) |
n = int(input())
for i in range(1,n+1):
temp = n
for j in range(1,i):
print(temp,end="")
temp = temp -1
for j in range(1,(2*n) - (2*i) + 2):
print(n-i+1,end="")
for j in range(1,i):
temp = temp+1
print(temp,end="")
print()
for i in range(n-1,0,-1):
temp = n
for j in range(1,i):
print(temp,end="")
temp = temp - 1
for j in range(1,(2*n) - (2*i) + 2):
print(n-i+1,end="")
for j in range(1,i):
temp = temp+1
print(temp,end="")
print()
| n = int(input())
for i in range(1, n + 1):
temp = n
for j in range(1, i):
print(temp, end='')
temp = temp - 1
for j in range(1, 2 * n - 2 * i + 2):
print(n - i + 1, end='')
for j in range(1, i):
temp = temp + 1
print(temp, end='')
print()
for i in range(n - 1, 0, -1):
temp = n
for j in range(1, i):
print(temp, end='')
temp = temp - 1
for j in range(1, 2 * n - 2 * i + 2):
print(n - i + 1, end='')
for j in range(1, i):
temp = temp + 1
print(temp, end='')
print() |
class Solution:
def shortestPalindrome(self, s: str) -> str:
temp = s + '#' + s[::-1]
i = 1
l = 0
lps = [0] * len(temp)
while i < len(temp):
if temp[i] == temp[l]:
lps[i] = l + 1
i += 1
l += 1
elif l != 0:
l = lps[l - 1]
else:
i += 1
return s[l:][::-1] + s
| class Solution:
def shortest_palindrome(self, s: str) -> str:
temp = s + '#' + s[::-1]
i = 1
l = 0
lps = [0] * len(temp)
while i < len(temp):
if temp[i] == temp[l]:
lps[i] = l + 1
i += 1
l += 1
elif l != 0:
l = lps[l - 1]
else:
i += 1
return s[l:][::-1] + s |
l1 = [1, 3, 5, 7, 9] # list mutable (read write)
t1 = (1, 3, 5, 7, 9) # tuple imutable (read only)
def f(x):
x.append(29)
f(l1)
print(l1)
f(t1)
print(t1)
| l1 = [1, 3, 5, 7, 9]
t1 = (1, 3, 5, 7, 9)
def f(x):
x.append(29)
f(l1)
print(l1)
f(t1)
print(t1) |
# A list contains authorized users' discord IDs.
OWNER = 184335517947658240 # foxfair
# Staff
AUTHORIZED = [
OWNER,
129405976020385792, # Auri
423991805156261889, # Kim
699053180079702098, # Gelica
266289895415218177, # Yang
294058604854509589, # Giana
107209352816914432, # Tooch
97145923691347968, # baosao
137798184721186817, # Vince
]
# Channels that the bot is authorized to send messages.
SEND_MSG_CHANNELS = [
725807955559055451, # bot-logs in my dev server.
# 725798620531785749, # villager-adoption-team in beyond stalks
]
| owner = 184335517947658240
authorized = [OWNER, 129405976020385792, 423991805156261889, 699053180079702098, 266289895415218177, 294058604854509589, 107209352816914432, 97145923691347968, 137798184721186817]
send_msg_channels = [725807955559055451] |
number = [i for i in range(1, 3001330+1)]
# number = [i for i in range(1, 10)]
number2 = number[:]
last = len(number) % 2 != 0
while len(number) > 1:
next_last = len(number) % 2 != last
number = [j for i, j in enumerate(number) if i % 2 != last]
last = next_last
print('#1', number[0])
number = number2
while len(number) > 1:
pop = set()
last = 0
for i in range(len(number) // 2):
last = number[i]
pop.add(number[(2 * i + (len(number) - i) // 2) % len(number)])
number = [i for i in number if i not in pop]
if len(number) == 1: break
pop = set()
start = number.index(last) + 1
for i in range(start, len(number)):
pop.add(number[(i + (len(number) + i - start) // 2) % len(number)])
number = [i for i in number if i not in pop]
print('#2', number[0])
| number = [i for i in range(1, 3001330 + 1)]
number2 = number[:]
last = len(number) % 2 != 0
while len(number) > 1:
next_last = len(number) % 2 != last
number = [j for (i, j) in enumerate(number) if i % 2 != last]
last = next_last
print('#1', number[0])
number = number2
while len(number) > 1:
pop = set()
last = 0
for i in range(len(number) // 2):
last = number[i]
pop.add(number[(2 * i + (len(number) - i) // 2) % len(number)])
number = [i for i in number if i not in pop]
if len(number) == 1:
break
pop = set()
start = number.index(last) + 1
for i in range(start, len(number)):
pop.add(number[(i + (len(number) + i - start) // 2) % len(number)])
number = [i for i in number if i not in pop]
print('#2', number[0]) |
class PagingModifier:
def __init__(self,
Id: int = None,
End: int = None,
Start: int = None,
Limit: int = None,
):
self.Id = Id
self.Start = Start
self.End = End
self.Limit = Limit
| class Pagingmodifier:
def __init__(self, Id: int=None, End: int=None, Start: int=None, Limit: int=None):
self.Id = Id
self.Start = Start
self.End = End
self.Limit = Limit |
employee_file=open("employee.txt","w") #write mode- erases previous content
employee_file.write("David - Software Developer")
print(employee_file.readable())
#print(employee_file.read())
employee_file.close()
#David - Software Developer
# previous data and content is vanished | employee_file = open('employee.txt', 'w')
employee_file.write('David - Software Developer')
print(employee_file.readable())
employee_file.close() |
__author__ = 'Lena'
class Group:
def __init__(self, name, header, footer):
self.name=name
self.header=header
self.footer=footer
| __author__ = 'Lena'
class Group:
def __init__(self, name, header, footer):
self.name = name
self.header = header
self.footer = footer |
TICKET_PRODUCTS = '''
query getTicketProducts {
tutorialProducts {
id
type
name
nameKo
nameEn
desc
descKo
descEn
warning
warningKo
warningEn
startAt
finishAt
total
remainingCount
isSoldOut
owner {
profile {
name
nameKo
nameEn
email
image
avatarUrl
}
}
price
isEditablePrice
isUniqueInType
active
cancelableDate
ticketOpenAt
ticketCloseAt
createdAt
updatedAt
purchaseCount
isPurchased
}
conferenceProducts {
id
type
name
nameKo
nameEn
desc
descKo
descEn
warning
warningKo
warningEn
startAt
finishAt
total
remainingCount
isSoldOut
owner {
profile {
name
nameKo
nameEn
email
image
avatarUrl
}
}
price
isEditablePrice
isUniqueInType
active
cancelableDate
ticketOpenAt
ticketCloseAt
createdAt
updatedAt
purchaseCount
isPurchased
}
}
'''
BUY_TICKET = '''
mutation BuyTicket($productId: ID!, $payment: PaymentInput!, $options: JSONString) {
buyTicket(productId:$productId, payment: $payment, options:$options) {
ticket{
id
amount
merchantUid
impUid
pgTid
receiptUrl
paidAt
status
}
}
}
'''
MY_TICKETS = '''
query getMyTickets {
myTickets {
isDomesticCard
amount
merchantUid
receiptUrl
paidAt
cancelReceiptUrl
cancelledAt
status
product{
id
type
name
nameKo
nameEn
desc
descKo
descEn
startAt
finishAt
total
owner {
profile {
name
nameKo
nameEn
email
image
avatarUrl
}
}
price
isEditablePrice
isUniqueInType
active
cancelableDate
ticketOpenAt
ticketCloseAt
createdAt
updatedAt
purchaseCount
}
options
}
}
'''
TICKET = '''
query getTicket($globalId: ID, $id: Int) {
ticket(globalId: $globalId, id: $id) {
isDomesticCard
amount
merchantUid
receiptUrl
paidAt
cancelReceiptUrl
cancelledAt
status
product{
id
type
name
nameKo
nameEn
desc
descKo
descEn
startAt
finishAt
total
owner {
profile {
name
nameKo
nameEn
email
image
avatarUrl
}
}
price
isEditablePrice
isUniqueInType
active
cancelableDate
ticketOpenAt
ticketCloseAt
createdAt
updatedAt
purchaseCount
}
options
}
}
'''
CANCEL_TICKET = '''
mutation cancelTicket($ticketId: ID!) {
cancelTicket(ticketId:$ticketId) {
ticket{
id
status
impUid
pgTid
receiptUrl
paidAt
cancelReceiptUrl
cancelledAt
}
}
}
'''
| ticket_products = '\nquery getTicketProducts {\n tutorialProducts {\n id\n type\n name\n nameKo\n nameEn\n desc\n descKo\n descEn\n warning\n warningKo\n warningEn\n startAt\n finishAt\n total\n remainingCount\n isSoldOut\n owner {\n profile {\n name\n nameKo\n nameEn\n email\n image\n avatarUrl\n }\n }\n price\n isEditablePrice\n isUniqueInType\n active\n cancelableDate\n ticketOpenAt\n ticketCloseAt\n createdAt\n updatedAt\n purchaseCount\n isPurchased\n }\n conferenceProducts {\n id\n type\n name\n nameKo\n nameEn\n desc\n descKo\n descEn\n warning\n warningKo\n warningEn\n startAt\n finishAt\n total\n remainingCount\n isSoldOut\n owner {\n profile {\n name\n nameKo\n nameEn\n email\n image\n avatarUrl\n }\n }\n price\n isEditablePrice\n isUniqueInType\n active\n cancelableDate\n ticketOpenAt\n ticketCloseAt\n createdAt\n updatedAt\n purchaseCount\n isPurchased\n }\n}\n'
buy_ticket = '\nmutation BuyTicket($productId: ID!, $payment: PaymentInput!, $options: JSONString) {\n buyTicket(productId:$productId, payment: $payment, options:$options) {\n ticket{\n id\n amount\n merchantUid\n impUid\n pgTid\n receiptUrl\n paidAt\n status\n }\n }\n}\n'
my_tickets = '\nquery getMyTickets {\n myTickets {\n isDomesticCard\n amount\n merchantUid\n receiptUrl\n paidAt\n cancelReceiptUrl\n cancelledAt\n status\n \n product{\n id\n type\n name\n nameKo\n nameEn\n desc\n descKo\n descEn\n startAt\n finishAt\n total\n owner {\n profile {\n name\n nameKo\n nameEn\n email\n image\n avatarUrl\n }\n }\n price\n isEditablePrice\n isUniqueInType\n active\n cancelableDate\n ticketOpenAt\n ticketCloseAt\n createdAt\n updatedAt\n purchaseCount\n }\n options\n }\n}\n'
ticket = '\nquery getTicket($globalId: ID, $id: Int) {\n ticket(globalId: $globalId, id: $id) {\n isDomesticCard\n amount\n merchantUid\n receiptUrl\n paidAt\n cancelReceiptUrl\n cancelledAt\n status\n \n product{\n id\n type\n name\n nameKo\n nameEn\n desc\n descKo\n descEn\n startAt\n finishAt\n total\n owner {\n profile {\n name\n nameKo\n nameEn\n email\n image\n avatarUrl\n }\n }\n price\n isEditablePrice\n isUniqueInType\n active\n cancelableDate\n ticketOpenAt\n ticketCloseAt\n createdAt\n updatedAt\n purchaseCount\n }\n options\n }\n}\n'
cancel_ticket = '\nmutation cancelTicket($ticketId: ID!) {\n cancelTicket(ticketId:$ticketId) {\n ticket{\n id\n status\n impUid\n pgTid\n receiptUrl\n paidAt\n cancelReceiptUrl\n cancelledAt\n }\n }\n}\n' |
# This resets variables every time the Supreme While Loop is reset
# If you tried to mess around with this code, you'll find results/errors on the second time the While Loop is executed
# Messing with this code will generally not break anything on the first topic that Indra talks About
yesnorep = True
ignoreinteract = False
repeating = 0
cussed = False
sensitive = False
topicdone = False
nevermind = False
if ignoreinteract == False:
interactions = interactions + 1
if loveBonus == 3 and interest < 75:
print("Hey, " + name + ".")
sleep(1.5)
print("Recently I can't help but feel that all my love and affection for\nyou is being ignored")
sleep(2)
print("As if you didn't care about me.")
sleep(1.5)
loveBonus = 0
interest = interest - 5
ChangeMind()
if negloveBonus == 3 and interest > 150 and interactions >= 30:
print("Hey, " + name + ".")
sleep(1.5)
print("Remember that one time you said you loved me, and I rejected you?")
sleep(2.5)
print("Well...")
sleep(1.5)
print("I've decided to forget that and move on.")
sleep(1.75)
print("What's in the past is in the past, right?")
sleep(1.5)
print("We should forgive those who have wronged us in the past.")
sleep(2.5)
print("Now that I've gotten to know you more, I think you aren't actually that bad!")
sleep(2.5)
print("Maybe I should've given you a better chance...")
sleep(2.5)
print("Oh well...")
sleep(1.75)
print("Just wanted to clear that up!")
CustomRecord("Changed Mind", "Neglove --> Neutral", +3)
if os.path.exists(intlog) == False:
StartEdit()
exactcurrentdatelist = list((datetime.now()).timetuple())
currentdatelist = [exactcurrentdatelist[0],
exactcurrentdatelist[1], exactcurrentdatelist[2]]
lastlogdate = currentdatelist
if isinstance(birthdate, date):
if currentdate.year == nextyear and birthdate != date(1000, 1, 1):
if currentdate.month == birthdate.month and currentdate.day == birthdate.day:
age = age + 1
CustomRecord("Birthday", str(age), +10)
print("Wait a minute...")
sleep(2)
slowprint(lead_dots = True)
sleep(2)
print("Congratulations!")
sleep(1.5)
print("It seems that today is your birthday!")
sleep(1.55)
print(f"Looks like you're turning {age} today!")
sleep(2)
print("Good for you!")
sleep(1.5)
if age == 16:
print(f"Turning {age} is a very important milestone!")
sleep(1.75)
elif age == 18:
print(
f"Turning {age} is probably one of the most important milestones in one's life!")
sleep(2.5)
print("I've been working on something recently...")
sleep(2.75)
print("I would love to show it to you!")
sleep(1.75)
print("Let me see...")
sleep(1.75)
print("Oh! There it is!")
sleep(1.75)
notify("Happy Birthday!", f"Congratulations on turning {age}!")
sleep(2)
print("Do you like it?")
sleep(1.5)
print("I worked pretty hard trying to figure that out.")
sleep(2.5)
print("I Wonder what you'll do on this special occasion?")
sleep(2.25)
print ("Well, whatever it is, I hope you have fun!")
sleep(2.5)
Save()
| yesnorep = True
ignoreinteract = False
repeating = 0
cussed = False
sensitive = False
topicdone = False
nevermind = False
if ignoreinteract == False:
interactions = interactions + 1
if loveBonus == 3 and interest < 75:
print('Hey, ' + name + '.')
sleep(1.5)
print("Recently I can't help but feel that all my love and affection for\nyou is being ignored")
sleep(2)
print("As if you didn't care about me.")
sleep(1.5)
love_bonus = 0
interest = interest - 5
change_mind()
if negloveBonus == 3 and interest > 150 and (interactions >= 30):
print('Hey, ' + name + '.')
sleep(1.5)
print('Remember that one time you said you loved me, and I rejected you?')
sleep(2.5)
print('Well...')
sleep(1.5)
print("I've decided to forget that and move on.")
sleep(1.75)
print("What's in the past is in the past, right?")
sleep(1.5)
print('We should forgive those who have wronged us in the past.')
sleep(2.5)
print("Now that I've gotten to know you more, I think you aren't actually that bad!")
sleep(2.5)
print("Maybe I should've given you a better chance...")
sleep(2.5)
print('Oh well...')
sleep(1.75)
print('Just wanted to clear that up!')
custom_record('Changed Mind', 'Neglove --> Neutral', +3)
if os.path.exists(intlog) == False:
start_edit()
exactcurrentdatelist = list(datetime.now().timetuple())
currentdatelist = [exactcurrentdatelist[0], exactcurrentdatelist[1], exactcurrentdatelist[2]]
lastlogdate = currentdatelist
if isinstance(birthdate, date):
if currentdate.year == nextyear and birthdate != date(1000, 1, 1):
if currentdate.month == birthdate.month and currentdate.day == birthdate.day:
age = age + 1
custom_record('Birthday', str(age), +10)
print('Wait a minute...')
sleep(2)
slowprint(lead_dots=True)
sleep(2)
print('Congratulations!')
sleep(1.5)
print('It seems that today is your birthday!')
sleep(1.55)
print(f"Looks like you're turning {age} today!")
sleep(2)
print('Good for you!')
sleep(1.5)
if age == 16:
print(f'Turning {age} is a very important milestone!')
sleep(1.75)
elif age == 18:
print(f"Turning {age} is probably one of the most important milestones in one's life!")
sleep(2.5)
print("I've been working on something recently...")
sleep(2.75)
print('I would love to show it to you!')
sleep(1.75)
print('Let me see...')
sleep(1.75)
print('Oh! There it is!')
sleep(1.75)
notify('Happy Birthday!', f'Congratulations on turning {age}!')
sleep(2)
print('Do you like it?')
sleep(1.5)
print('I worked pretty hard trying to figure that out.')
sleep(2.5)
print("I Wonder what you'll do on this special occasion?")
sleep(2.25)
print('Well, whatever it is, I hope you have fun!')
sleep(2.5)
save() |
__author__ = "Lucas Grulich (grulich@uni-mainz.de)"
__version__ = "0.0.14"
# --- Globals ----------------------------------------------------------------------------------------------------------
MONOSCALE_SHADOWLEVEL = 1
OAP_FILE_EXTENSION = ".oap"
DEFAULT_TYPE = "ARRAY2D"
SLICE_SIZE = 64
# --- Markers ------------------------------------------------
MARKER = {
'poisson': 7, # Value of the poisson spot
'flood_fill': 8, # Value of the flood fill
}
# --- Colors -------------------------------------------------
COLOR = {
0: 0, # Shadow level 0 -> background color of images.
1: 100, # Shadow level 1 -> usually between 25% and 33%
2: 200, # Shadow level 2 -> light intensity of 50 %
3: 255, # Shadow level 3 -> usually between 66% and 75%
MARKER['poisson']: 50, # Poisson spot color
}
# --- Particle Types -----------------------------------------
UNDEFINED = b'u' # Not yet classified
INDEFINABLE = b'i' # Not possible to classify
ERRONEOUS = b'e' # Artefacts or erroneous images
SPHERE = b's' # Spherical particles
COLUMN = b'c' # Column-like particles
ROSETTE = b'r' # Rosettes
DENDRITE = b'd' # Dendrites
PLATE = b'p' # Plates
| __author__ = 'Lucas Grulich (grulich@uni-mainz.de)'
__version__ = '0.0.14'
monoscale_shadowlevel = 1
oap_file_extension = '.oap'
default_type = 'ARRAY2D'
slice_size = 64
marker = {'poisson': 7, 'flood_fill': 8}
color = {0: 0, 1: 100, 2: 200, 3: 255, MARKER['poisson']: 50}
undefined = b'u'
indefinable = b'i'
erroneous = b'e'
sphere = b's'
column = b'c'
rosette = b'r'
dendrite = b'd'
plate = b'p' |
# -*- coding: utf-8 -*-
class Java(object):
KEY = 'java'
LABEL = 'Java'
DEPENDENCIES = ['java', 'javac']
TEMP_DIR = 'java'
SUFFIX = '.java'
# javac {class_path} tmp/Estimator.java
# class_path = '-cp ./gson.jar'
CMD_COMPILE = 'javac {class_path} {src_dir}/{src_file}'
# java {class_path} Estimator <args>
# class_path = '-cp ./gson.jar:./tmp'
CMD_EXECUTE = 'java {class_path} {dest_dir}/{dest_file}'
| class Java(object):
key = 'java'
label = 'Java'
dependencies = ['java', 'javac']
temp_dir = 'java'
suffix = '.java'
cmd_compile = 'javac {class_path} {src_dir}/{src_file}'
cmd_execute = 'java {class_path} {dest_dir}/{dest_file}' |
#Dictionaries Challenge 22: Database Admin Program
print("Welcome to the Database Admin Program")
#Create a dictionary to hold all username:password key-value pairs
log_on_information = {
'mooman74':'alskes145',
'meramo1986':'kehns010101',
'nickyD':'world1star',
'george2':'booo3oha',
'admin00':'admin1234',
}
#Get user input
username = input("Enter your username: ")
#Simulate logging on...
#Get user password
if username in log_on_information.keys():
password = input("Enter your password: ")
if password == log_on_information[username]:
print("\nHello " + username + "! You are logged in!")
if username == 'admin00':
#Show the whole database to the admin account
print("\nHere is the current user database:")
for key, value in log_on_information.items():
print("Username: " + key + "\t\tPassword: " + value)
else:
#Allow standard user to change their password
password_change = input("Would you like to change your password (yes/no): ").lower().strip()
if password_change == 'yes':
new_password = input("What would you like your new password to be (min 8 chars): ")
if len(new_password) >= 8:
log_on_information[username] = new_password
else:
print(new_password + " is not the minimum eight characters.")
print("\n" + username + " your password is " + log_on_information[username] + ".")
else:
print("\nThank you, goodbye.")
#User did not enter their password correctly
else:
print("Password incorrect!")
#User not in database
else:
print("Username not in database. Goodbye.")
| print('Welcome to the Database Admin Program')
log_on_information = {'mooman74': 'alskes145', 'meramo1986': 'kehns010101', 'nickyD': 'world1star', 'george2': 'booo3oha', 'admin00': 'admin1234'}
username = input('Enter your username: ')
if username in log_on_information.keys():
password = input('Enter your password: ')
if password == log_on_information[username]:
print('\nHello ' + username + '! You are logged in!')
if username == 'admin00':
print('\nHere is the current user database:')
for (key, value) in log_on_information.items():
print('Username: ' + key + '\t\tPassword: ' + value)
else:
password_change = input('Would you like to change your password (yes/no): ').lower().strip()
if password_change == 'yes':
new_password = input('What would you like your new password to be (min 8 chars): ')
if len(new_password) >= 8:
log_on_information[username] = new_password
else:
print(new_password + ' is not the minimum eight characters.')
print('\n' + username + ' your password is ' + log_on_information[username] + '.')
else:
print('\nThank you, goodbye.')
else:
print('Password incorrect!')
else:
print('Username not in database. Goodbye.') |
class Inventory:
def __init__(self):
self._prisonKeys = False
self._sunflowerSeeds = False
self._guardiansMoney = False
self._guardiansSword = False
self._dragonsKey = False
@property
def prison_keys(self):
return self._prisonKeys
@prison_keys.setter
def prison_keys(self, value):
self._prisonKeys = value
@property
def sunflower_seeds(self):
return self._sunflowerSeeds
@sunflower_seeds.setter
def sunflower_seeds(self, value):
self._sunflowerSeeds = value
@property
def guardians_money(self):
return self._guardiansMoney
@guardians_money.setter
def guardians_money(self, value):
self._guardiansMoney
@property
def guardians_sword(self):
return self._guardiansSword
@guardians_sword.setter
def guardians_sword(self, value):
self._guardiansSword = value
@property
def dragons_key(self):
return self._dragonsKey
@dragons_key.setter
def dragons_key(self, value):
self._dragonsKey = value
| class Inventory:
def __init__(self):
self._prisonKeys = False
self._sunflowerSeeds = False
self._guardiansMoney = False
self._guardiansSword = False
self._dragonsKey = False
@property
def prison_keys(self):
return self._prisonKeys
@prison_keys.setter
def prison_keys(self, value):
self._prisonKeys = value
@property
def sunflower_seeds(self):
return self._sunflowerSeeds
@sunflower_seeds.setter
def sunflower_seeds(self, value):
self._sunflowerSeeds = value
@property
def guardians_money(self):
return self._guardiansMoney
@guardians_money.setter
def guardians_money(self, value):
self._guardiansMoney
@property
def guardians_sword(self):
return self._guardiansSword
@guardians_sword.setter
def guardians_sword(self, value):
self._guardiansSword = value
@property
def dragons_key(self):
return self._dragonsKey
@dragons_key.setter
def dragons_key(self, value):
self._dragonsKey = value |
def snail(array):
temp_list = []
if array and len(array) > 1:
if isinstance(array[0], list):
temp_list.extend(array[0])
else:
temp_list.append(array[0])
array.pop(0)
for lis_index in range(len(array)):
temp_list.append(array[lis_index][-1])
array[lis_index].pop(-1)
if isinstance(array[-1], list):
temp_list.extend(array[-1][::-1])
else:
temp_list.append(array[-1])
array.pop(-1)
for lis_index in range(len(array)):
temp_list.append(array[::-1][lis_index][0])
array[::-1][lis_index].pop(0)
temp_list.extend(snail(array))
return temp_list
elif array:
return array[0]
else:
return []
| def snail(array):
temp_list = []
if array and len(array) > 1:
if isinstance(array[0], list):
temp_list.extend(array[0])
else:
temp_list.append(array[0])
array.pop(0)
for lis_index in range(len(array)):
temp_list.append(array[lis_index][-1])
array[lis_index].pop(-1)
if isinstance(array[-1], list):
temp_list.extend(array[-1][::-1])
else:
temp_list.append(array[-1])
array.pop(-1)
for lis_index in range(len(array)):
temp_list.append(array[::-1][lis_index][0])
array[::-1][lis_index].pop(0)
temp_list.extend(snail(array))
return temp_list
elif array:
return array[0]
else:
return [] |
def get_scale_factor(input_units, sampling_rate=None):
if input_units=='ms':
return 1e3
elif input_units=='s':
return 1
elif input_units=='samples':
if sampling_rate is None:
raise ValueError('Must provide sampling_rate if input_units=="samples"')
return sampling_rate | def get_scale_factor(input_units, sampling_rate=None):
if input_units == 'ms':
return 1000.0
elif input_units == 's':
return 1
elif input_units == 'samples':
if sampling_rate is None:
raise value_error('Must provide sampling_rate if input_units=="samples"')
return sampling_rate |
config = {'luna_raw':'/root/ssd_data/LUNA/',
'luna_data':'/research/dept8/jzwang/dataset/LUNA16/combined/',
'preprocess_result_path':'/research/dept8/jzwang/dataset/HKU/preprocessed/numpy/',
'luna_abbr':'./labels/shorter.csv',
'luna_label':'./labels/lunaqualified_all.csv',
'luna_candidate_label':'./labels/luna_candidate_all.csv',
'lidc_xml':'./lidc_xml',
'preprocessing_backend':'python'
}
| config = {'luna_raw': '/root/ssd_data/LUNA/', 'luna_data': '/research/dept8/jzwang/dataset/LUNA16/combined/', 'preprocess_result_path': '/research/dept8/jzwang/dataset/HKU/preprocessed/numpy/', 'luna_abbr': './labels/shorter.csv', 'luna_label': './labels/lunaqualified_all.csv', 'luna_candidate_label': './labels/luna_candidate_all.csv', 'lidc_xml': './lidc_xml', 'preprocessing_backend': 'python'} |
# https://www.codechef.com/SEPT20B/problems/TREE2
T = int(input())
for _ in range(T):
x = input()
l = list(map(int, input().split()))
s = set(l)
m = min(s)
ops = len(s)
if m: print(ops)
else: print(ops-1)
| t = int(input())
for _ in range(T):
x = input()
l = list(map(int, input().split()))
s = set(l)
m = min(s)
ops = len(s)
if m:
print(ops)
else:
print(ops - 1) |
class SessionGenerator(object):
session: "TrainingSession"
def __init__(self,
training_cycle,
fatigue_rating,
current_training_max):
load_size = self.determine_load_size(
fatigue_rating,
training_cycle.previous_large_load_training_max,
current_training_max
)
self.session = self.generate_session(training_cycle.config,
load_size,
current_training_max)
training_cycle.previous_training_max = current_training_max
if fatigue_rating == "low":
training_cycle.previous_large_load_training_max = current_training_max
training_cycle.save()
def determine_load_size(self,
fatigue_rating,
previous_large_load_training_max,
current_training_max):
if current_training_max > previous_large_load_training_max: # TM improved from last fresh session.
return {
'low': 'large',
'medium': 'medium',
'high': 'medium'
}[fatigue_rating]
else: # TM stagnated or regressed from last fresh session.
return {
'low': 'supramaximal',
'medium': 'medium',
'high': 'small'
}[fatigue_rating]
def generate_session(self, config, load_size, training_max):
def calculate_set_quantity(reps_per_set, intensity, inol):
'''
Calculates the sets required to accomplish the work desired.
`extra_reps` is how many repetitions are left over after the
calculated number of flat (specified repetition quantity) sets. For
instance, if the user needs to complete 18 repetitions in sets of
5, `extra_reps` will be 3.
Returns a tuple comprising `(sets, extra_reps)`.
'''
total_reps = round(inol * 100 * (1 - intensity))
extra_reps = round(total_reps % reps_per_set)
sets = round((total_reps - extra_reps) / reps_per_set)
return sets, extra_reps
if load_size == 'supramaximal':
config.inol_target_large += config.supramaximal_inol_increment
config.save()
load_size = 'large'
load_size_map = {
'reps_per_set': {
'small': config.reps_per_set_small,
'medium': config.reps_per_set_medium,
'large': config.reps_per_set_large
},
'inol_targets': {
'small': config.inol_target_small,
'medium': config.inol_target_medium,
'large': config.inol_target_large
},
'intensity_targets': {
'small': config.intensity_target_small,
'medium': config.intensity_target_medium,
'large': config.intensity_target_large
}
}
sets, extra_reps = calculate_set_quantity(
load_size_map['reps_per_set'][load_size],
load_size_map['intensity_targets'][load_size],
load_size_map['inol_targets'][load_size]
)
session = TrainingSession(
sets=sets,
reps_per_set=load_size_map['reps_per_set'][load_size],
extra_reps=extra_reps,
intensity=load_size_map['intensity_targets'][load_size],
training_max=training_max
)
return session
class TrainingSession(object):
sets: int
reps_per_set: int
extra_reps: int
intensity: float
training_max: float
def __init__(self, **kwargs):
for kw in kwargs:
setattr(self, kw, kwargs[kw])
@property
def e1rm(self):
return self.training_max / 0.9
@property
def load(self):
return self.intensity * self.e1rm
| class Sessiongenerator(object):
session: 'TrainingSession'
def __init__(self, training_cycle, fatigue_rating, current_training_max):
load_size = self.determine_load_size(fatigue_rating, training_cycle.previous_large_load_training_max, current_training_max)
self.session = self.generate_session(training_cycle.config, load_size, current_training_max)
training_cycle.previous_training_max = current_training_max
if fatigue_rating == 'low':
training_cycle.previous_large_load_training_max = current_training_max
training_cycle.save()
def determine_load_size(self, fatigue_rating, previous_large_load_training_max, current_training_max):
if current_training_max > previous_large_load_training_max:
return {'low': 'large', 'medium': 'medium', 'high': 'medium'}[fatigue_rating]
else:
return {'low': 'supramaximal', 'medium': 'medium', 'high': 'small'}[fatigue_rating]
def generate_session(self, config, load_size, training_max):
def calculate_set_quantity(reps_per_set, intensity, inol):
"""
Calculates the sets required to accomplish the work desired.
`extra_reps` is how many repetitions are left over after the
calculated number of flat (specified repetition quantity) sets. For
instance, if the user needs to complete 18 repetitions in sets of
5, `extra_reps` will be 3.
Returns a tuple comprising `(sets, extra_reps)`.
"""
total_reps = round(inol * 100 * (1 - intensity))
extra_reps = round(total_reps % reps_per_set)
sets = round((total_reps - extra_reps) / reps_per_set)
return (sets, extra_reps)
if load_size == 'supramaximal':
config.inol_target_large += config.supramaximal_inol_increment
config.save()
load_size = 'large'
load_size_map = {'reps_per_set': {'small': config.reps_per_set_small, 'medium': config.reps_per_set_medium, 'large': config.reps_per_set_large}, 'inol_targets': {'small': config.inol_target_small, 'medium': config.inol_target_medium, 'large': config.inol_target_large}, 'intensity_targets': {'small': config.intensity_target_small, 'medium': config.intensity_target_medium, 'large': config.intensity_target_large}}
(sets, extra_reps) = calculate_set_quantity(load_size_map['reps_per_set'][load_size], load_size_map['intensity_targets'][load_size], load_size_map['inol_targets'][load_size])
session = training_session(sets=sets, reps_per_set=load_size_map['reps_per_set'][load_size], extra_reps=extra_reps, intensity=load_size_map['intensity_targets'][load_size], training_max=training_max)
return session
class Trainingsession(object):
sets: int
reps_per_set: int
extra_reps: int
intensity: float
training_max: float
def __init__(self, **kwargs):
for kw in kwargs:
setattr(self, kw, kwargs[kw])
@property
def e1rm(self):
return self.training_max / 0.9
@property
def load(self):
return self.intensity * self.e1rm |
filename = "CCaseScene.unity"
subA = ("<<<<<<< Updated upstream\n")
subB = ("=======\n")
subC = (">>>>>>> Stashed changes\n")
with open(filename, 'r+') as f:
data = f.read()
count = 0
# Iterate starts
while True:
# Find indexes
start, end = data.index(subA), data.index(subB)
# Slice string
data = data[:start] + data[end:]
data = data.replace(subB,"", 1).replace(subC,"", 1)
# Iterate ends
count += 1
if(data.find(subA) < 0):
# if(count > 4):
break
# return pointer to top of file so we can re-write the content with replaced string
f.seek(0)
# Deletes
f.truncate()
# re-write the content with the updated content
f.write(data)
# close file
f.close()
| filename = 'CCaseScene.unity'
sub_a = '<<<<<<< Updated upstream\n'
sub_b = '=======\n'
sub_c = '>>>>>>> Stashed changes\n'
with open(filename, 'r+') as f:
data = f.read()
count = 0
while True:
(start, end) = (data.index(subA), data.index(subB))
data = data[:start] + data[end:]
data = data.replace(subB, '', 1).replace(subC, '', 1)
count += 1
if data.find(subA) < 0:
break
f.seek(0)
f.truncate()
f.write(data)
f.close() |
# New feature in Python 3.8, assignment expressions (known as the walrus operator)
# Assignment expression are written with a new notation (:=). This operator is often
# called the walrus operator as it resembles the eyes and tusks of a walrus on its side.
#
# Video explanation: https://realpython.com/lessons/assignment-expressions/
# PEP 572 https://www.python.org/dev/peps/pep-0572/
# Assignment expressions allow you to assign and return a value in the same expression.
walrus = True
print(walrus) # True
# In Python 3.8, we can combine these two expressions. It will assign walrus to True
# and return True
print(walrus := True) # True
# Another example with a while loop. This program allows you to input a text until you
# input the word quit.
inputs = list()
while True:
current = input("Write something: ")
if current == "quit":
break
inputs.append(current)
# With assignment expressions this code can be simplified
inputs = list()
while (current := input("Write something: ")) != "quit":
inputs.append(current)
| walrus = True
print(walrus)
print((walrus := True))
inputs = list()
while True:
current = input('Write something: ')
if current == 'quit':
break
inputs.append(current)
inputs = list()
while (current := input('Write something: ')) != 'quit':
inputs.append(current) |
def solve():
A = int(input())
row = (A + 2) // 3 # from (100 100) to (100+row-1, 100)
board = []
for _ in range(1000):
board.append([0]*1000)
I = J = 100 # [2, 999]
for _ in range(1000):
print('{} {}'.format(I, J))
I_, J_ = map(int, input().split())
if I_ == 0 and J_ == 0:
return
board[I_][J_] = 1
while board[I-1][J-1] and board[I-1][J] and board[I-1][J+1] and I < 100+row-3:
I += 1
input()
if __name__ == '__main__':
T = int(input())
for t in range(T):
solve() | def solve():
a = int(input())
row = (A + 2) // 3
board = []
for _ in range(1000):
board.append([0] * 1000)
i = j = 100
for _ in range(1000):
print('{} {}'.format(I, J))
(i_, j_) = map(int, input().split())
if I_ == 0 and J_ == 0:
return
board[I_][J_] = 1
while board[I - 1][J - 1] and board[I - 1][J] and board[I - 1][J + 1] and (I < 100 + row - 3):
i += 1
input()
if __name__ == '__main__':
t = int(input())
for t in range(T):
solve() |
deleteObject = True
editObject = True
getObject = {'id': 1234,
'fingerprint': 'aa:bb:cc:dd',
'label': 'label',
'notes': 'notes',
'key': 'ssh-rsa AAAAB3N...pa67 user@example.com'}
createObject = getObject
getAllObjects = [getObject]
| delete_object = True
edit_object = True
get_object = {'id': 1234, 'fingerprint': 'aa:bb:cc:dd', 'label': 'label', 'notes': 'notes', 'key': 'ssh-rsa AAAAB3N...pa67 user@example.com'}
create_object = getObject
get_all_objects = [getObject] |
'''
Created on Apr 27, 2015
@author: DHawkins
'''
POSITION = [
{'abbrev': 'al', 'column': 7, 'row': 7, 'state': 'alabama'},
{'abbrev': 'ak', 'column': 1, 'row': 8, 'state': 'alaska'},
{'abbrev': 'az', 'column': 2, 'row': 6, 'state': 'arizona'},
{'abbrev': 'ar', 'column': 5, 'row': 6, 'state': 'arkansas'},
{'abbrev': 'ca', 'column': 1, 'row': 5, 'state': 'california'},
{'abbrev': 'co', 'column': 3, 'row': 5, 'state': 'colorado'},
{'abbrev': 'ct', 'column': 10, 'row': 3, 'state': 'connecticut'},
{'abbrev': 'dc', 'column': 9, 'row': 6, 'state': 'district of columbia'},
{'abbrev': 'de', 'column': 10, 'row': 5, 'state': 'delaware'},
{'abbrev': 'fl', 'column': 8, 'row': 8, 'state': 'florida'},
{'abbrev': 'ga', 'column': 8, 'row': 7, 'state': 'georgia'},
{'abbrev': 'hi', 'column': 2, 'row': 8, 'state': 'hawaii'},
{'abbrev': 'id', 'column': 2, 'row': 3, 'state': 'idaho'},
{'abbrev': 'il', 'column': 6, 'row': 4, 'state': 'illinois'},
{'abbrev': 'in', 'column': 7, 'row': 4, 'state': 'indiana'},
{'abbrev': 'ia', 'column': 5, 'row': 4, 'state': 'iowa'},
{'abbrev': 'ks', 'column': 4, 'row': 6, 'state': 'kansas'},
{'abbrev': 'ky', 'column': 6, 'row': 5, 'state': 'kentucky'},
{'abbrev': 'la', 'column': 5, 'row': 7, 'state': 'louisiana'},
{'abbrev': 'me', 'column': 11, 'row': 1, 'state': 'maine'},
{'abbrev': 'md', 'column': 9, 'row': 5, 'state': 'maryland'},
{'abbrev': 'ma', 'column': 11, 'row': 2, 'state': 'massachusetts'},
{'abbrev': 'mi', 'column': 7, 'row': 3, 'state': 'michigan'},
{'abbrev': 'mn', 'column': 5, 'row': 3, 'state': 'minnesota'},
{'abbrev': 'ms', 'column': 6, 'row': 7, 'state': 'mississippi'},
{'abbrev': 'mo', 'column': 5, 'row': 5, 'state': 'missouri'},
{'abbrev': 'mt', 'column': 3, 'row': 3, 'state': 'montana'},
{'abbrev': 'ne', 'column': 4, 'row': 5, 'state': 'nebraska'},
{'abbrev': 'nv', 'column': 2, 'row': 4, 'state': 'nevada'},
{'abbrev': 'nh', 'column': 10, 'row': 2, 'state': 'new hampshire'},
{'abbrev': 'nj', 'column': 10, 'row': 4, 'state': 'new jersey'},
{'abbrev': 'nm', 'column': 3, 'row': 6, 'state': 'new mexico'},
{'abbrev': 'ny', 'column': 9, 'row': 3, 'state': 'new york'},
{'abbrev': 'nc', 'column': 7, 'row': 6, 'state': 'north carolina'},
{'abbrev': 'nd', 'column': 4, 'row': 3, 'state': 'north dakota'},
{'abbrev': 'oh', 'column': 8, 'row': 4, 'state': 'ohio'},
{'abbrev': 'ok', 'column': 4, 'row': 7, 'state': 'oklahoma'},
{'abbrev': 'or', 'column': 1, 'row': 4, 'state': 'oregon'},
{'abbrev': 'pa', 'column': 9, 'row': 4, 'state': 'pennsylvania'},
{'abbrev': 'ri', 'column': 11, 'row': 3, 'state': 'rhode island'},
{'abbrev': 'sc', 'column': 8, 'row': 6, 'state': 'south carolina'},
{'abbrev': 'sd', 'column': 4, 'row': 4, 'state': 'south dakota'},
{'abbrev': 'tn', 'column': 6, 'row': 6, 'state': 'tennessee'},
{'abbrev': 'tx', 'column': 4, 'row': 8, 'state': 'texas'},
{'abbrev': 'ut', 'column': 2, 'row': 5, 'state': 'utah'},
{'abbrev': 'vt', 'column': 9, 'row': 2, 'state': 'vermont'},
{'abbrev': 'va', 'column': 8, 'row': 5, 'state': 'virginia'},
{'abbrev': 'wa', 'column': 1, 'row': 3, 'state': 'washington'},
{'abbrev': 'wv', 'column': 7, 'row': 5, 'state': 'west virginia'},
{'abbrev': 'wi', 'column': 6, 'row': 3, 'state': 'wisconsin'},
{'abbrev': 'wy', 'column': 3, 'row': 4, 'state': 'wyoming'}
]
| """
Created on Apr 27, 2015
@author: DHawkins
"""
position = [{'abbrev': 'al', 'column': 7, 'row': 7, 'state': 'alabama'}, {'abbrev': 'ak', 'column': 1, 'row': 8, 'state': 'alaska'}, {'abbrev': 'az', 'column': 2, 'row': 6, 'state': 'arizona'}, {'abbrev': 'ar', 'column': 5, 'row': 6, 'state': 'arkansas'}, {'abbrev': 'ca', 'column': 1, 'row': 5, 'state': 'california'}, {'abbrev': 'co', 'column': 3, 'row': 5, 'state': 'colorado'}, {'abbrev': 'ct', 'column': 10, 'row': 3, 'state': 'connecticut'}, {'abbrev': 'dc', 'column': 9, 'row': 6, 'state': 'district of columbia'}, {'abbrev': 'de', 'column': 10, 'row': 5, 'state': 'delaware'}, {'abbrev': 'fl', 'column': 8, 'row': 8, 'state': 'florida'}, {'abbrev': 'ga', 'column': 8, 'row': 7, 'state': 'georgia'}, {'abbrev': 'hi', 'column': 2, 'row': 8, 'state': 'hawaii'}, {'abbrev': 'id', 'column': 2, 'row': 3, 'state': 'idaho'}, {'abbrev': 'il', 'column': 6, 'row': 4, 'state': 'illinois'}, {'abbrev': 'in', 'column': 7, 'row': 4, 'state': 'indiana'}, {'abbrev': 'ia', 'column': 5, 'row': 4, 'state': 'iowa'}, {'abbrev': 'ks', 'column': 4, 'row': 6, 'state': 'kansas'}, {'abbrev': 'ky', 'column': 6, 'row': 5, 'state': 'kentucky'}, {'abbrev': 'la', 'column': 5, 'row': 7, 'state': 'louisiana'}, {'abbrev': 'me', 'column': 11, 'row': 1, 'state': 'maine'}, {'abbrev': 'md', 'column': 9, 'row': 5, 'state': 'maryland'}, {'abbrev': 'ma', 'column': 11, 'row': 2, 'state': 'massachusetts'}, {'abbrev': 'mi', 'column': 7, 'row': 3, 'state': 'michigan'}, {'abbrev': 'mn', 'column': 5, 'row': 3, 'state': 'minnesota'}, {'abbrev': 'ms', 'column': 6, 'row': 7, 'state': 'mississippi'}, {'abbrev': 'mo', 'column': 5, 'row': 5, 'state': 'missouri'}, {'abbrev': 'mt', 'column': 3, 'row': 3, 'state': 'montana'}, {'abbrev': 'ne', 'column': 4, 'row': 5, 'state': 'nebraska'}, {'abbrev': 'nv', 'column': 2, 'row': 4, 'state': 'nevada'}, {'abbrev': 'nh', 'column': 10, 'row': 2, 'state': 'new hampshire'}, {'abbrev': 'nj', 'column': 10, 'row': 4, 'state': 'new jersey'}, {'abbrev': 'nm', 'column': 3, 'row': 6, 'state': 'new mexico'}, {'abbrev': 'ny', 'column': 9, 'row': 3, 'state': 'new york'}, {'abbrev': 'nc', 'column': 7, 'row': 6, 'state': 'north carolina'}, {'abbrev': 'nd', 'column': 4, 'row': 3, 'state': 'north dakota'}, {'abbrev': 'oh', 'column': 8, 'row': 4, 'state': 'ohio'}, {'abbrev': 'ok', 'column': 4, 'row': 7, 'state': 'oklahoma'}, {'abbrev': 'or', 'column': 1, 'row': 4, 'state': 'oregon'}, {'abbrev': 'pa', 'column': 9, 'row': 4, 'state': 'pennsylvania'}, {'abbrev': 'ri', 'column': 11, 'row': 3, 'state': 'rhode island'}, {'abbrev': 'sc', 'column': 8, 'row': 6, 'state': 'south carolina'}, {'abbrev': 'sd', 'column': 4, 'row': 4, 'state': 'south dakota'}, {'abbrev': 'tn', 'column': 6, 'row': 6, 'state': 'tennessee'}, {'abbrev': 'tx', 'column': 4, 'row': 8, 'state': 'texas'}, {'abbrev': 'ut', 'column': 2, 'row': 5, 'state': 'utah'}, {'abbrev': 'vt', 'column': 9, 'row': 2, 'state': 'vermont'}, {'abbrev': 'va', 'column': 8, 'row': 5, 'state': 'virginia'}, {'abbrev': 'wa', 'column': 1, 'row': 3, 'state': 'washington'}, {'abbrev': 'wv', 'column': 7, 'row': 5, 'state': 'west virginia'}, {'abbrev': 'wi', 'column': 6, 'row': 3, 'state': 'wisconsin'}, {'abbrev': 'wy', 'column': 3, 'row': 4, 'state': 'wyoming'}] |
# A ring buffer is a non-growable buffer with a fixed size. When the ring buffer is full
# and a new element is inserted, the oldest element in the ring buffer is overwritten with
# the newest element. This kind of data structure is very useful for use cases such as
# storing logs and history information, where you typically want to store information up
# until it reaches a certain age, after which you don't care about it anymore and don't
# mind seeing it overwritten by newer data.
# Implement this behavior in the RingBuffer class. RingBuffer has two methods, append and
# get. The append method adds elements to the buffer. The get method returns all of the
# elements in the buffer in a list in their given order. It should not return any None
# values in the list even if they are present in the ring buffer.
class RingBuffer:
def __init__(self, capacity):
self.capacity = capacity
self.current = 0
self.storage = [None] * capacity
def append(self, item):
if self.current == self.capacity:
self.current = 0
self.storage[self.current] = item
self.current += 1
def get(self):
temp_list = []
for item in self.storage:
if item:
temp_list.append(item)
return temp_list
buffer = RingBuffer(3)
buffer.get() # should return []
buffer.append("a")
buffer.append("b")
buffer.append("c")
print(
f"Should return \"['a', 'b', 'c']\" \t --> \t {buffer.get()}"
) # should return ['a', 'b', 'c']
# 'd' overwrites the oldest value in the ring buffer, which is 'a'
buffer.append("d")
print(
f"Should return \"['d', 'b', 'c']\" \t --> \t {buffer.get()}"
) # should return ['d', 'b', 'c']
buffer.append("e")
buffer.append("f")
print(
f"Should return \"['d', 'e', 'f']\" \t --> \t {buffer.get()}"
) # should return ['d', 'e', 'f']
# Testing
# http://pythontutor.com/visualize.html#code=class%20RingBuffer%3A%0A%20%20%20%20def%20__init__%28self,%20capacity%29%3A%0A%20%20%20%20%20%20%20%20self.capacity%20%3D%20capacity%0A%20%20%20%20%20%20%20%20self.current%20%3D%200%0A%20%20%20%20%20%20%20%20self.storage%20%3D%20%5BNone%5D%20*%20capacity%0A%0A%20%20%20%20def%20append%28self,%20item%29%3A%0A%20%20%20%20%20%20%20%20if%20self.current%20%3D%3D%20self.capacity%3A%0A%20%20%20%20%20%20%20%20%20%20%20%20self.current%20%3D%200%0A%0A%20%20%20%20%20%20%20%20self.storage%5Bself.current%5D%20%3D%20item%0A%20%20%20%20%20%20%20%20self.current%20%2B%3D%201%0A%0A%20%20%20%20def%20get%28self%29%3A%0A%20%20%20%20%20%20%20%20temp_list%20%3D%20%5B%5D%0A%20%20%20%20%20%20%20%20for%20item%20in%20self.storage%3A%0A%20%20%20%20%20%20%20%20%20%20%20%20if%20item%3A%0A%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20%20temp_list.append%28item%29%0A%20%20%20%20%20%20%20%20return%20temp_list%0A%0A%0Abuffer%20%3D%20RingBuffer%283%29%0A%0Abuffer.get%28%29%20%20%20%23%20should%20return%20%5B%5D%0A%0Abuffer.append%28'a'%29%0Abuffer.append%28'b'%29%0Abuffer.append%28'c'%29%0A%0Aprint%28f%22Should%20return%20%5C%22%5B'a',%20'b',%20'c'%5D%5C%22%20%5Ct%20--%3E%20%5Ct%20%7Bbuffer.get%28%29%7D%22%29%20%20%20%23%20should%20return%20%5B'a',%20'b',%20'c'%5D%0A%0A%23%20'd'%20overwrites%20the%20oldest%20value%20in%20the%20ring%20buffer,%20which%20is%20'a'%0Abuffer.append%28'd'%29%0A%0Aprint%28f%22Should%20return%20%5C%22%5B'd',%20'b',%20'c'%5D%5C%22%20%5Ct%20--%3E%20%5Ct%20%7Bbuffer.get%28%29%7D%22%29%20%20%20%23%20should%20return%20%5B'd',%20'b',%20'c'%5D%0A%0Abuffer.append%28'e'%29%0Abuffer.append%28'f'%29%0A%0Aprint%28f%22Should%20return%20%5C%22%5B'd',%20'e',%20'f'%5D%5C%22%20%5Ct%20--%3E%20%5Ct%20%7Bbuffer.get%28%29%7D%22%29%20%20%20%23%20should%20return%20%5B'd',%20'e',%20'f'%5D%0A&cumulative=false&curInstr=52&heapPrimitives=nevernest&mode=display&origin=opt-frontend.js&py=3&rawInputLstJSON=%5B%5D&textReferences=false
| class Ringbuffer:
def __init__(self, capacity):
self.capacity = capacity
self.current = 0
self.storage = [None] * capacity
def append(self, item):
if self.current == self.capacity:
self.current = 0
self.storage[self.current] = item
self.current += 1
def get(self):
temp_list = []
for item in self.storage:
if item:
temp_list.append(item)
return temp_list
buffer = ring_buffer(3)
buffer.get()
buffer.append('a')
buffer.append('b')
buffer.append('c')
print(f"""Should return "['a', 'b', 'c']" \t --> \t {buffer.get()}""")
buffer.append('d')
print(f"""Should return "['d', 'b', 'c']" \t --> \t {buffer.get()}""")
buffer.append('e')
buffer.append('f')
print(f"""Should return "['d', 'e', 'f']" \t --> \t {buffer.get()}""") |
HEADER_JSON_CONTENT = {
'Content-type': 'application/json', 'Accept': 'text/plain'
}
SUCCESS_MESSAGES = {
201: "Created Propertie: {title} in {provinces} province(s)",
}
ERROR_MESSAGES = {
422: {
'message': "Please, verify data for register this propertie"
},
404: {
'message': "Provide an valid ID to retrive the queried propertie."
},
}
MIN_BEDS = 1
MAX_BEDS = 5
MIN_BATHS = 1
MAX_BATHS = 5
MIN_SQUARE_METERS = 20
MAX_SQUARE_METERS = MIN_SQUARE_METERS * 12
MIN_LONGITUDE = 0
MAX_LONGITUDE = 1400
MIN_LATITUDE = 0
MAX_LATITUDE = 1000
PROVINCES = {
'Gode': {
'boundaries': {
'upperLeft': {
'x': 0,
'y': 1000,
},
'bottomRight': {
'x': 600,
'y': 500,
},
}
},
'Ruja': {
'boundaries': {
'upperLeft': {
'x': 400,
'y': 1000,
},
'bottomRight': {
'x': 1100,
'y': 500,
},
}
},
'Jaby': {
'boundaries': {
'upperLeft': {
'x': 1100,
'y': 1000,
},
'bottomRight': {
'x': 1400,
'y': 500,
},
}
},
'Scavy': {
'boundaries': {
'upperLeft': {
'x': 0,
'y': 500,
},
'bottomRight': {
'x': 600,
'y': 0,
},
}
},
'Groola': {
'boundaries': {
'upperLeft': {
'x': 600,
'y': 500,
},
'bottomRight': {
'x': 800,
'y': 0,
},
}
},
'Nova': {
'boundaries': {
'upperLeft': {
'x': 800,
'y': 500,
},
'bottomRight': {
'x': 1400,
'y': 0,
},
}
}
}
| header_json_content = {'Content-type': 'application/json', 'Accept': 'text/plain'}
success_messages = {201: 'Created Propertie: {title} in {provinces} province(s)'}
error_messages = {422: {'message': 'Please, verify data for register this propertie'}, 404: {'message': 'Provide an valid ID to retrive the queried propertie.'}}
min_beds = 1
max_beds = 5
min_baths = 1
max_baths = 5
min_square_meters = 20
max_square_meters = MIN_SQUARE_METERS * 12
min_longitude = 0
max_longitude = 1400
min_latitude = 0
max_latitude = 1000
provinces = {'Gode': {'boundaries': {'upperLeft': {'x': 0, 'y': 1000}, 'bottomRight': {'x': 600, 'y': 500}}}, 'Ruja': {'boundaries': {'upperLeft': {'x': 400, 'y': 1000}, 'bottomRight': {'x': 1100, 'y': 500}}}, 'Jaby': {'boundaries': {'upperLeft': {'x': 1100, 'y': 1000}, 'bottomRight': {'x': 1400, 'y': 500}}}, 'Scavy': {'boundaries': {'upperLeft': {'x': 0, 'y': 500}, 'bottomRight': {'x': 600, 'y': 0}}}, 'Groola': {'boundaries': {'upperLeft': {'x': 600, 'y': 500}, 'bottomRight': {'x': 800, 'y': 0}}}, 'Nova': {'boundaries': {'upperLeft': {'x': 800, 'y': 500}, 'bottomRight': {'x': 1400, 'y': 0}}}} |
jogador = {}
soma = 0
gols = []
jogador['nome'] = str(input('Nome do Jogador: '))
jogador['njogos'] = int(input(f'Quanta partidas {jogador["nome"]} Jogou?'))
for v in range(0, jogador['njogos']):
temp = int(input(f'Quantos gols na partida {v}? '))
soma += temp
gols.append(temp)
jogador['gols'] = gols[:]
print('-=' * 30)
print(jogador)
print('-=' * 30)
print(f'O campo nome tem o valor {jogador["nome"]}')
print(f'O campo gols tem o valor {gols}')
print(f'O campo total tem o valor {soma}')
print('-=' * 30)
print(f'O jogador {jogador["nome"]} jogou {jogador["njogos"]} partidas.')
for i, v in enumerate(gols):
print(f' => Na partida {i}, fez {v} gols')
print(f'Foi um total de {soma}')
| jogador = {}
soma = 0
gols = []
jogador['nome'] = str(input('Nome do Jogador: '))
jogador['njogos'] = int(input(f"Quanta partidas {jogador['nome']} Jogou?"))
for v in range(0, jogador['njogos']):
temp = int(input(f'Quantos gols na partida {v}? '))
soma += temp
gols.append(temp)
jogador['gols'] = gols[:]
print('-=' * 30)
print(jogador)
print('-=' * 30)
print(f"O campo nome tem o valor {jogador['nome']}")
print(f'O campo gols tem o valor {gols}')
print(f'O campo total tem o valor {soma}')
print('-=' * 30)
print(f"O jogador {jogador['nome']} jogou {jogador['njogos']} partidas.")
for (i, v) in enumerate(gols):
print(f' => Na partida {i}, fez {v} gols')
print(f'Foi um total de {soma}') |
# -*- coding: utf-8 -*-
# This file is generated from NI-TClk API metadata version 255.0.0d0
functions = {
'ConfigureForHomogeneousTriggers': {
'documentation': {
'description': '\nConfigures the attributes commonly required for the TClk synchronization\nof device sessions with homogeneous triggers in a single PXI chassis or\na single PC. Use niTClk_ConfigureForHomogeneousTriggers to configure\nthe attributes for the reference clocks, start triggers, reference\ntriggers, script triggers, and pause triggers. If\nniTClk_ConfigureForHomogeneousTriggers cannot perform all the steps\nappropriate for the given sessions, it returns an error. If an error is\nreturned, use the instrument driver functions and attributes for signal\nrouting, along with the following NI-TClk attributes:\nNITCLK_ATTR_START_TRIGGER_MASTER_SESSION\nNITCLK_ATTR_REF_TRIGGER_MASTER_SESSION\nNITCLK_ATTR_PAUSE_TRIGGER_MASTER_SESSION\nniTClk_ConfigureForHomogeneousTriggers affects the following clocks and\ntriggers: - Reference clocks - Start triggers - Reference triggers -\nScript triggers - Pause triggers Reference Clocks\nniTClk_ConfigureForHomogeneousTriggers configures the reference clocks\nif they are needed. Specifically, if the internal sample clocks or\ninternal sample clock timebases are used, and the reference clock source\nis not configured--or is set to None (no trigger\nconfigured)--niTClk_ConfigureForHomogeneousTriggers configures the\nfollowing: PXI--The reference clock source on all devices is set to be\nthe 10 MHz PXI backplane clock (PXI_CLK10). PCI--One of the devices\nexports its 10 MHz onboard reference clock to RTSI 7. The reference\nclock source on all devices is set to be RTSI 7. Note: If the reference\nclock source is set to a value other than None,\nniTClk_ConfigureForHomogeneousTriggers cannot configure the reference\nclock source. Start Triggers If the start trigger is set to None (no\ntrigger configured) for all sessions, the sessions are configured to\nshare the start trigger. The start trigger is shared by: - Implicitly\nexporting the start trigger from one session - Configuring the other\nsessions for digital edge start triggers with sources corresponding to\nthe exported start trigger - Setting\nNITCLK_ATTR_START_TRIGGER_MASTER_SESSION to the session that is\nexporting the trigger for all sessions If the start triggers are None\nfor all except one session, niTClk_ConfigureForHomogeneousTriggers\nconfigures the sessions to share the start trigger from the one excepted\nsession. The start trigger is shared by: - Implicitly exporting start\ntrigger from the session with the start trigger that is not None -\nConfiguring the other sessions for digital-edge start triggers with\nsources corresponding to the exported start trigger - Setting\nNITCLK_ATTR_START_TRIGGER_MASTER_SESSION to the session that is\nexporting the trigger for all sessions If start triggers are configured\nfor all sessions, niTClk_ConfigureForHomogeneousTriggers does not\naffect the start triggers. Start triggers are considered to be\nconfigured for all sessions if either of the following conditions is\ntrue: - No session has a start trigger that is None - One session has a\nstart trigger that is None, and all other sessions have start triggers\nother than None. The one session with the None trigger must have\nNITCLK_ATTR_START_TRIGGER_MASTER_SESSION set to itself, indicating\nthat the session itself is the start trigger master Reference Triggers\nniTClk_ConfigureForHomogeneousTriggers configures sessions that support\nreference triggers to share the reference triggers if the reference\ntriggers are None (no trigger configured) for all except one session.\nThe reference triggers are shared by: - Implicitly exporting the\nreference trigger from the session whose reference trigger is not None -\nConfiguring the other sessions that support the reference trigger for\ndigital-edge reference triggers with sources corresponding to the\nexported reference trigger - Setting\nNITCLK_ATTR_REF_TRIGGER_MASTER_SESSION to the session that is\nexporting the trigger for all sessions that support reference trigger If\nthe reference triggers are configured for all sessions that support\nreference triggers, niTClk_ConfigureForHomogeneousTriggers does not\naffect the reference triggers. Reference triggers are considered to be\nconfigured for all sessions if either one or the other of the following\nconditions is true: - No session has a reference trigger that is None -\nOne session has a reference trigger that is None, and all other sessions\nhave reference triggers other than None. The one session with the None\ntrigger must have NITCLK_ATTR_REF_TRIGGER_MASTER_SESSION set to\nitself, indicating that the session itself is the reference trigger\nmaster Reference Trigger Holdoffs Acquisition sessions may be configured\nwith the reference trigger. For acquisition sessions, when the reference\ntrigger is shared, niTClk_ConfigureForHomogeneousTriggers configures\nthe holdoff attributes (which are instrument driver specific) on the\nreference trigger master session so that the session does not recognize\nthe reference trigger before the other sessions are ready. This\ncondition is only relevant when the sample clock rates, sample clock\ntimebase rates, sample counts, holdoffs, and/or any delays for the\nacquisitions are different. When the sample clock rates, sample clock\ntimebase rates, and/or the sample counts are different in acquisition\nsessions sharing the reference trigger, you should also set the holdoff\nattributes for the reference trigger master using the instrument driver.\nPause Triggers\nniTClk_ConfigureForHomogeneousTriggers configures generation sessions\nthat support pause triggers to share them, if the pause triggers are\nNone (no trigger configured) for all except one session. The pause\ntriggers are shared by: - Implicitly exporting the pause trigger from\nthe session whose script trigger is not None - Configuring the other\nsessions that support the pause trigger for digital-edge pause triggers\nwith sources corresponding to the exported pause trigger - Setting\nNITCLK_ATTR_PAUSE_TRIGGER_MASTER_SESSION to the session that is\nexporting the trigger for all sessions that support script triggers If\nthe pause triggers are configured for all generation sessions that\nsupport pause triggers, niTClk_ConfigureForHomogeneousTriggers does not\naffect pause triggers. Pause triggers are considered to be configured\nfor all sessions if either one or the other of the following conditions\nis true: - No session has a pause trigger that is None - One session has\na pause trigger that is None and all other sessions have pause triggers\nother than None. The one session with the None trigger must have\nNITCLK_ATTR_PAUSE_TRIGGER_MASTER_SESSION set to itself, indicating\nthat the session itself is the pause trigger master Note: TClk\nsynchronization is not supported for pause triggers on acquisition\nsessions.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'Number of elements in the sessions array'
},
'name': 'sessionCount',
'type': 'ViUInt32'
},
{
'direction': 'in',
'documentation': {
'description': 'sessions is an array of sessions that are being synchronized.'
},
'is_session_handle': False,
'name': 'sessions',
'python_api_converter_name': 'convert_to_nitclk_session_number_list',
'size': {
'mechanism': 'len',
'value': 'sessionCount'
},
'type': 'ViSession[]',
'type_in_documentation': 'list of instrument-specific sessions or nitclk.SessionReference instances'
}
],
'returns': 'ViStatus'
},
'FinishSyncPulseSenderSynchronize': {
'documentation': {
'description': 'Finishes synchronizing the Sync Pulse Sender.'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'Number of elements in the sessions array'
},
'name': 'sessionCount',
'type': 'ViUInt32'
},
{
'direction': 'in',
'documentation': {
'description': 'sessions is an array of sessions that are being synchronized.'
},
'is_session_handle': False,
'name': 'sessions',
'python_api_converter_name': 'convert_to_nitclk_session_number_list',
'size': {
'mechanism': 'len',
'value': 'sessionCount'
},
'type': 'ViSession[]',
'type_in_documentation': 'list of instrument-specific sessions or nitclk.SessionReference instances'
},
{
'default_value': 'hightime.timedelta(seconds=0.0)',
'direction': 'in',
'documentation': {
'description': '\nMinimal period of TClk, expressed in seconds. Supported values are\nbetween 0.0 s and 0.050 s (50 ms). Minimal period for a single\nchassis/PC is 200 ns. If the specified value is less than 200 ns,\nNI-TClk automatically coerces minTime to 200 ns. For multichassis\nsynchronization, adjust this value to account for propagation delays\nthrough the various devices and cables.\n'
},
'name': 'minTime',
'python_api_converter_name': 'convert_timedelta_to_seconds_real64',
'type': 'ViReal64',
'type_in_documentation': 'hightime.timedelta, datetime.timedelta, or float in seconds'
}
],
'returns': 'ViStatus'
},
'GetAttributeViReal64': {
'codegen_method': 'private',
'documentation': {
'description': 'Gets the value of an NI-TClk ViReal64 attribute.'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'session references the sessions being synchronized.'
},
'name': 'session',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'Pass VI_NULL or an empty string'
},
'name': 'channelName',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe ID of the attribute that you want to get Supported Attribute\nNITCLK_ATTR_SAMPLE_CLOCK_DELAY\n'
},
'name': 'attributeId',
'type': 'ViAttr'
},
{
'direction': 'out',
'documentation': {
'description': 'The value that you are getting'
},
'name': 'value',
'type': 'ViReal64'
}
],
'returns': 'ViStatus'
},
'GetAttributeViSession': {
'codegen_method': 'private',
'documentation': {
'description': 'Gets the value of an NI-TClk ViSession attribute.'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'session references the sessions being synchronized.'
},
'name': 'session',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'Pass VI_NULL or an empty string'
},
'name': 'channelName',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe ID of the attribute that you want to set Supported Attributes\nNITCLK_ATTR_START_TRIGGER_MASTER_SESSION\nNITCLK_ATTR_REF_TRIGGER_MASTER_SESSION\nNITCLK_ATTR_PAUSE_TRIGGER_MASTER_SESSION\n'
},
'name': 'attributeId',
'type': 'ViAttr'
},
{
'direction': 'out',
'documentation': {
'description': 'The value that you are getting'
},
'is_session_handle': False,
'name': 'value',
'type': 'ViSession'
}
],
'returns': 'ViStatus'
},
'GetAttributeViString': {
'codegen_method': 'private',
'documentation': {
'description': '\nThis function queries the value of an NI-TClk ViString attribute. You\nmust provide a ViChar array to serve as a buffer for the value. You pass\nthe number of bytes in the buffer as bufSize. If the current value of\nthe attribute, including the terminating NULL byte, is larger than the\nsize you indicate in bufSize, the function copies bufSize minus 1 bytes\ninto the buffer, places an ASCII NULL byte at the end of the buffer, and\nreturns the array size that you must pass to get the entire value. For\nexample, if the value is "123456" and bufSize is 4, the function places\n"123" into the buffer and returns 7. If you want to call\nniTClk_GetAttributeViString just to get the required array size, pass 0\nfor bufSize and VI_NULL for the value.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'session references the sessions being synchronized.'
},
'name': 'session',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'Pass VI_NULL or an empty string'
},
'name': 'channelName',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe ID of the attribute that you want to get Supported Attributes\nNITCLK_ATTR_SYNC_PULSE_SOURCE\nNITCLK_ATTR_SYNC_PULSE_CLOCK_SOURCE\nNITCLK_ATTR_EXPORTED_SYNC_PULSE_OUTPUT_TERMINAL\n'
},
'name': 'attributeId',
'type': 'ViAttr'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe number of bytes in the ViChar array that you specify for the value\nparameter\n'
},
'name': 'bufSize',
'type': 'ViInt32'
},
{
'direction': 'out',
'documentation': {
'description': 'The value that you are getting'
},
'name': 'value',
'size': {
'mechanism': 'ivi-dance',
'value': 'bufSize'
},
'type': 'ViChar[]'
}
],
'returns': 'ViStatus'
},
'GetExtendedErrorInfo': {
'codegen_method': 'private',
'documentation': {
'description': '\nReports extended error information for the most recent NI-TClk function\nthat returned an error. To establish the function that returned an\nerror, use the return values of the individual functions because once\nniTClk_GetExtendedErrorInfo reports an errorString, it does not report\nan empty string again.\n'
},
'is_error_handling': True,
'parameters': [
{
'direction': 'out',
'documentation': {
'description': '\nExtended error description. If errorString is NULL, then it is not large\nenough to hold the entire error description. In this case, the return\nvalue of niTClk_GetExtendedErrorInfo is the size that you should use\nfor niTClk_GetExtendedErrorInfo to return the full error string.\n'
},
'name': 'errorString',
'size': {
'mechanism': 'ivi-dance',
'value': 'errorStringSize'
},
'type': 'ViChar[]'
},
{
'direction': 'in',
'documentation': {
'description': '\nSize of the errorString. If errorStringSize is 0, then it is not large\nenough to hold the entire error description. In this case, the return\nvalue of niTClk_GetExtendedErrorInfo is the size that you should use\nfor niTClk_GetExtendedErrorInfo to return the full error string.\n'
},
'name': 'errorStringSize',
'type': 'ViUInt32'
}
],
'returns': 'ViStatus'
},
'Initiate': {
'documentation': {
'description': '\nInitiates the acquisition or generation sessions specified, taking into\nconsideration any special requirements needed for synchronization. For\nexample, the session exporting the TClk-synchronized start trigger is\nnot initiated until after niTClk_Initiate initiates all the sessions\nthat import the TClk-synchronized start trigger.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'Number of elements in the sessions array'
},
'name': 'sessionCount',
'type': 'ViUInt32'
},
{
'direction': 'in',
'documentation': {
'description': 'sessions is an array of sessions that are being synchronized.'
},
'is_session_handle': False,
'name': 'sessions',
'python_api_converter_name': 'convert_to_nitclk_session_number_list',
'size': {
'mechanism': 'len',
'value': 'sessionCount'
},
'type': 'ViSession[]',
'type_in_documentation': 'list of instrument-specific sessions or nitclk.SessionReference instances'
}
],
'returns': 'ViStatus'
},
'IsDone': {
'documentation': {
'description': '\nMonitors the progress of the acquisitions and/or generations\ncorresponding to sessions.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'Number of elements in the sessions array'
},
'name': 'sessionCount',
'type': 'ViUInt32'
},
{
'direction': 'in',
'documentation': {
'description': 'sessions is an array of sessions that are being synchronized.'
},
'is_session_handle': False,
'name': 'sessions',
'python_api_converter_name': 'convert_to_nitclk_session_number_list',
'size': {
'mechanism': 'len',
'value': 'sessionCount'
},
'type': 'ViSession[]',
'type_in_documentation': 'list of instrument-specific sessions or nitclk.SessionReference instances'
},
{
'direction': 'out',
'documentation': {
'description': '\nIndicates that the operation is done. The operation is done when each\nsession has completed without any errors or when any one of the sessions\nreports an error.\n'
},
'name': 'done',
'type': 'ViBoolean'
}
],
'returns': 'ViStatus'
},
'SetAttributeViReal64': {
'codegen_method': 'private',
'documentation': {
'description': '\nSets the value of an NI-TClk VIReal64 attribute.\nniTClk_SetAttributeViReal64 is a low-level function that you can use to\nset the values NI-TClk attributes. NI-TClk contains high-level functions\nthat set most of the attributes. It is best to use the high-level\nfunctions as much as possible.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'session references the sessions being synchronized.'
},
'name': 'session',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'Pass VI_NULL or an empty string'
},
'name': 'channelName',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe ID of the attribute that you want to set Supported Attribute\nNITCLK_ATTR_SAMPLE_CLOCK_DELAY\n'
},
'name': 'attributeId',
'type': 'ViAttr'
},
{
'direction': 'in',
'documentation': {
'description': 'The value for the attribute'
},
'name': 'value',
'type': 'ViReal64'
}
],
'returns': 'ViStatus'
},
'SetAttributeViSession': {
'codegen_method': 'private',
'documentation': {
'description': '\nSets the value of an NI-TClk ViSession attribute.\nniTClk_SetAttributeViSession is a low-level function that you can use\nto set the values NI-TClk attributes. NI-TClk contains high-level\nfunctions that set most of the attributes. It is best to use the\nhigh-level functions as much as possible.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'session references the sessions being synchronized.'
},
'name': 'session',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': '\nPass VI_NULL or an empty string'
},
'name': 'channelName',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': '\nThe ID of the attribute that you want to set Supported Attributes\nNITCLK_ATTR_START_TRIGGER_MASTER_SESSION\nNITCLK_ATTR_REF_TRIGGER_MASTER_SESSION\nNITCLK_ATTR_PAUSE_TRIGGER_MASTER_SESSION\n'
},
'name': 'attributeId',
'type': 'ViAttr'
},
{
'direction': 'in',
'documentation': {
'description': 'The value for the attribute'
},
'is_session_handle': False,
'name': 'value',
'type': 'ViSession'
}
],
'returns': 'ViStatus'
},
'SetAttributeViString': {
'codegen_method': 'private',
'documentation': {
'description': '\nSets the value of an NI-TClk VIString attribute.\nniTClk_SetAttributeViString is a low-level function that you can use to\nset the values of NI-TClk attributes. NI-TClk contain high-level\nfunctions that set most of the attributes. It is best to use the\nhigh-level functions as much as possible.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'session references the sessions being synchronized.'
},
'name': 'session',
'type': 'ViSession'
},
{
'direction': 'in',
'documentation': {
'description': 'Pass VI_NULL or an empty string'
},
'name': 'channelName',
'type': 'ViConstString'
},
{
'direction': 'in',
'documentation': {
'description': '\nPass the ID of the attribute that you want to set Supported Attributes\nNITCLK_ATTR_SYNC_PULSE_SOURCE\nNITCLK_ATTR_SYNC_PULSE_CLOCK_SOURCE\nNITCLK_ATTR_EXPORTED_SYNC_PULSE_OUTPUT_TERMINAL\n'
},
'name': 'attributeId',
'type': 'ViAttr'
},
{
'direction': 'in',
'documentation': {
'description': 'Pass the value for the attribute'
},
'name': 'value',
'type': 'ViConstString'
}
],
'returns': 'ViStatus'
},
'SetupForSyncPulseSenderSynchronize': {
'documentation': {
'description': 'Configures the TClks on all the devices and prepares the Sync Pulse Sender for synchronization'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'Number of elements in the sessions array'
},
'name': 'sessionCount',
'type': 'ViUInt32'
},
{
'direction': 'in',
'documentation': {
'description': 'sessions is an array of sessions that are being synchronized.'
},
'is_session_handle': False,
'name': 'sessions',
'python_api_converter_name': 'convert_to_nitclk_session_number_list',
'size': {
'mechanism': 'len',
'value': 'sessionCount'
},
'type': 'ViSession[]',
'type_in_documentation': 'list of instrument-specific sessions or nitclk.SessionReference instances'
},
{
'default_value': 'hightime.timedelta(seconds=0.0)',
'direction': 'in',
'documentation': {
'description': '\nMinimal period of TClk, expressed in seconds. Supported values are\nbetween 0.0 s and 0.050 s (50 ms). Minimal period for a single\nchassis/PC is 200 ns. If the specified value is less than 200 ns,\nNI-TClk automatically coerces minTime to 200 ns. For multichassis\nsynchronization, adjust this value to account for propagation delays\nthrough the various devices and cables.\n'
},
'name': 'minTime',
'python_api_converter_name': 'convert_timedelta_to_seconds_real64',
'type': 'ViReal64',
'type_in_documentation': 'hightime.timedelta, datetime.timedelta, or float in seconds'
}
],
'returns': 'ViStatus'
},
'Synchronize': {
'documentation': {
'description': '\nSynchronizes the TClk signals on the given sessions. After\nniTClk_Synchronize executes, TClk signals from all sessions are\nsynchronized. Note: Before using this NI-TClk function, verify that your\nsystem is configured as specified in the PXI Trigger Lines and RTSI\nLines topic of the NI-TClk Synchronization Help. You can locate this\nhelp file at Start>>Programs>>National Instruments>>NI-TClk.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'Number of elements in the sessions array'
},
'name': 'sessionCount',
'type': 'ViUInt32'
},
{
'direction': 'in',
'documentation': {
'description': 'sessions is an array of sessions that are being synchronized.'
},
'is_session_handle': False,
'name': 'sessions',
'python_api_converter_name': 'convert_to_nitclk_session_number_list',
'size': {
'mechanism': 'len',
'value': 'sessionCount'
},
'type': 'ViSession[]',
'type_in_documentation': 'list of instrument-specific sessions or nitclk.SessionReference instances'
},
{
'default_value': 'hightime.timedelta(seconds=0.0)',
'direction': 'in',
'documentation': {
'description': '\nMinimal period of TClk, expressed in seconds. Supported values are\nbetween 0.0 s and 0.050 s (50 ms). Minimal period for a single\nchassis/PC is 200 ns. If the specified value is less than 200 ns,\nNI-TClk automatically coerces minTime to 200 ns. For multichassis\nsynchronization, adjust this value to account for propagation delays\nthrough the various devices and cables.\n'
},
'name': 'minTclkPeriod',
'python_api_converter_name': 'convert_timedelta_to_seconds_real64',
'type': 'ViReal64',
'type_in_documentation': 'hightime.timedelta, datetime.timedelta, or float in seconds'
}
],
'returns': 'ViStatus'
},
'SynchronizeToSyncPulseSender': {
'documentation': {
'description': 'Synchronizes the other devices to the Sync Pulse Sender.'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'Number of elements in the sessions array'
},
'name': 'sessionCount',
'type': 'ViUInt32'
},
{
'direction': 'in',
'documentation': {
'description': 'sessions is an array of sessions that are being synchronized.'
},
'is_session_handle': False,
'name': 'sessions',
'python_api_converter_name': 'convert_to_nitclk_session_number_list',
'size': {
'mechanism': 'len',
'value': 'sessionCount'
},
'type': 'ViSession[]',
'type_in_documentation': 'list of instrument-specific sessions or nitclk.SessionReference instances'
},
{
'default_value': 'hightime.timedelta(seconds=0.0)',
'direction': 'in',
'documentation': {
'description': '\nMinimal period of TClk, expressed in seconds. Supported values are\nbetween 0.0 s and 0.050 s (50 ms). Minimal period for a single\nchassis/PC is 200 ns. If the specified value is less than 200 ns,\nNI-TClk automatically coerces minTime to 200 ns. For multichassis\nsynchronization, adjust this value to account for propagation delays\nthrough the various devices and cables.\n'
},
'name': 'minTime',
'python_api_converter_name': 'convert_timedelta_to_seconds_real64',
'type': 'ViReal64',
'type_in_documentation': 'hightime.timedelta, datetime.timedelta, or float in seconds'
}
],
'returns': 'ViStatus'
},
'WaitUntilDone': {
'documentation': {
'description': '\nCall this function to pause execution of your program until the\nacquisitions and/or generations corresponding to sessions are done or\nuntil the function returns a timeout error. niTClk_WaitUntilDone is a\nblocking function that periodically checks the operation status. It\nreturns control to the calling program if the operation completes\nsuccessfully or an error occurs (including a timeout error). This\nfunction is most useful for finite data operations that you expect to\ncomplete within a certain time.\n'
},
'parameters': [
{
'direction': 'in',
'documentation': {
'description': 'Number of elements in the sessions array'
},
'name': 'sessionCount',
'type': 'ViUInt32'
},
{
'direction': 'in',
'documentation': {
'description': 'sessions is an array of sessions that are being synchronized.'
},
'is_session_handle': False,
'name': 'sessions',
'python_api_converter_name': 'convert_to_nitclk_session_number_list',
'size': {
'mechanism': 'len',
'value': 'sessionCount'
},
'type': 'ViSession[]',
'type_in_documentation': 'list of instrument-specific sessions or nitclk.SessionReference instances'
},
{
'default_value': 'hightime.timedelta(seconds=0.0)',
'direction': 'in',
'documentation': {
'description': '\nThe amount of time in seconds that niTClk_WaitUntilDone waits for the\nsessions to complete. If timeout is exceeded, niTClk_WaitUntilDone\nreturns an error.\n'
},
'name': 'timeout',
'python_api_converter_name': 'convert_timedelta_to_seconds_real64',
'type': 'ViReal64',
'type_in_documentation': 'hightime.timedelta, datetime.timedelta, or float in seconds'
}
],
'returns': 'ViStatus'
}
}
| functions = {'ConfigureForHomogeneousTriggers': {'documentation': {'description': '\nConfigures the attributes commonly required for the TClk synchronization\nof device sessions with homogeneous triggers in a single PXI chassis or\na single PC. Use niTClk_ConfigureForHomogeneousTriggers to configure\nthe attributes for the reference clocks, start triggers, reference\ntriggers, script triggers, and pause triggers. If\nniTClk_ConfigureForHomogeneousTriggers cannot perform all the steps\nappropriate for the given sessions, it returns an error. If an error is\nreturned, use the instrument driver functions and attributes for signal\nrouting, along with the following NI-TClk attributes:\nNITCLK_ATTR_START_TRIGGER_MASTER_SESSION\nNITCLK_ATTR_REF_TRIGGER_MASTER_SESSION\nNITCLK_ATTR_PAUSE_TRIGGER_MASTER_SESSION\nniTClk_ConfigureForHomogeneousTriggers affects the following clocks and\ntriggers: - Reference clocks - Start triggers - Reference triggers -\nScript triggers - Pause triggers Reference Clocks\nniTClk_ConfigureForHomogeneousTriggers configures the reference clocks\nif they are needed. Specifically, if the internal sample clocks or\ninternal sample clock timebases are used, and the reference clock source\nis not configured--or is set to None (no trigger\nconfigured)--niTClk_ConfigureForHomogeneousTriggers configures the\nfollowing: PXI--The reference clock source on all devices is set to be\nthe 10 MHz PXI backplane clock (PXI_CLK10). PCI--One of the devices\nexports its 10 MHz onboard reference clock to RTSI 7. The reference\nclock source on all devices is set to be RTSI 7. Note: If the reference\nclock source is set to a value other than None,\nniTClk_ConfigureForHomogeneousTriggers cannot configure the reference\nclock source. Start Triggers If the start trigger is set to None (no\ntrigger configured) for all sessions, the sessions are configured to\nshare the start trigger. The start trigger is shared by: - Implicitly\nexporting the start trigger from one session - Configuring the other\nsessions for digital edge start triggers with sources corresponding to\nthe exported start trigger - Setting\nNITCLK_ATTR_START_TRIGGER_MASTER_SESSION to the session that is\nexporting the trigger for all sessions If the start triggers are None\nfor all except one session, niTClk_ConfigureForHomogeneousTriggers\nconfigures the sessions to share the start trigger from the one excepted\nsession. The start trigger is shared by: - Implicitly exporting start\ntrigger from the session with the start trigger that is not None -\nConfiguring the other sessions for digital-edge start triggers with\nsources corresponding to the exported start trigger - Setting\nNITCLK_ATTR_START_TRIGGER_MASTER_SESSION to the session that is\nexporting the trigger for all sessions If start triggers are configured\nfor all sessions, niTClk_ConfigureForHomogeneousTriggers does not\naffect the start triggers. Start triggers are considered to be\nconfigured for all sessions if either of the following conditions is\ntrue: - No session has a start trigger that is None - One session has a\nstart trigger that is None, and all other sessions have start triggers\nother than None. The one session with the None trigger must have\nNITCLK_ATTR_START_TRIGGER_MASTER_SESSION set to itself, indicating\nthat the session itself is the start trigger master Reference Triggers\nniTClk_ConfigureForHomogeneousTriggers configures sessions that support\nreference triggers to share the reference triggers if the reference\ntriggers are None (no trigger configured) for all except one session.\nThe reference triggers are shared by: - Implicitly exporting the\nreference trigger from the session whose reference trigger is not None -\nConfiguring the other sessions that support the reference trigger for\ndigital-edge reference triggers with sources corresponding to the\nexported reference trigger - Setting\nNITCLK_ATTR_REF_TRIGGER_MASTER_SESSION to the session that is\nexporting the trigger for all sessions that support reference trigger If\nthe reference triggers are configured for all sessions that support\nreference triggers, niTClk_ConfigureForHomogeneousTriggers does not\naffect the reference triggers. Reference triggers are considered to be\nconfigured for all sessions if either one or the other of the following\nconditions is true: - No session has a reference trigger that is None -\nOne session has a reference trigger that is None, and all other sessions\nhave reference triggers other than None. The one session with the None\ntrigger must have NITCLK_ATTR_REF_TRIGGER_MASTER_SESSION set to\nitself, indicating that the session itself is the reference trigger\nmaster Reference Trigger Holdoffs Acquisition sessions may be configured\nwith the reference trigger. For acquisition sessions, when the reference\ntrigger is shared, niTClk_ConfigureForHomogeneousTriggers configures\nthe holdoff attributes (which are instrument driver specific) on the\nreference trigger master session so that the session does not recognize\nthe reference trigger before the other sessions are ready. This\ncondition is only relevant when the sample clock rates, sample clock\ntimebase rates, sample counts, holdoffs, and/or any delays for the\nacquisitions are different. When the sample clock rates, sample clock\ntimebase rates, and/or the sample counts are different in acquisition\nsessions sharing the reference trigger, you should also set the holdoff\nattributes for the reference trigger master using the instrument driver.\nPause Triggers\nniTClk_ConfigureForHomogeneousTriggers configures generation sessions\nthat support pause triggers to share them, if the pause triggers are\nNone (no trigger configured) for all except one session. The pause\ntriggers are shared by: - Implicitly exporting the pause trigger from\nthe session whose script trigger is not None - Configuring the other\nsessions that support the pause trigger for digital-edge pause triggers\nwith sources corresponding to the exported pause trigger - Setting\nNITCLK_ATTR_PAUSE_TRIGGER_MASTER_SESSION to the session that is\nexporting the trigger for all sessions that support script triggers If\nthe pause triggers are configured for all generation sessions that\nsupport pause triggers, niTClk_ConfigureForHomogeneousTriggers does not\naffect pause triggers. Pause triggers are considered to be configured\nfor all sessions if either one or the other of the following conditions\nis true: - No session has a pause trigger that is None - One session has\na pause trigger that is None and all other sessions have pause triggers\nother than None. The one session with the None trigger must have\nNITCLK_ATTR_PAUSE_TRIGGER_MASTER_SESSION set to itself, indicating\nthat the session itself is the pause trigger master Note: TClk\nsynchronization is not supported for pause triggers on acquisition\nsessions.\n'}, 'parameters': [{'direction': 'in', 'documentation': {'description': 'Number of elements in the sessions array'}, 'name': 'sessionCount', 'type': 'ViUInt32'}, {'direction': 'in', 'documentation': {'description': 'sessions is an array of sessions that are being synchronized.'}, 'is_session_handle': False, 'name': 'sessions', 'python_api_converter_name': 'convert_to_nitclk_session_number_list', 'size': {'mechanism': 'len', 'value': 'sessionCount'}, 'type': 'ViSession[]', 'type_in_documentation': 'list of instrument-specific sessions or nitclk.SessionReference instances'}], 'returns': 'ViStatus'}, 'FinishSyncPulseSenderSynchronize': {'documentation': {'description': 'Finishes synchronizing the Sync Pulse Sender.'}, 'parameters': [{'direction': 'in', 'documentation': {'description': 'Number of elements in the sessions array'}, 'name': 'sessionCount', 'type': 'ViUInt32'}, {'direction': 'in', 'documentation': {'description': 'sessions is an array of sessions that are being synchronized.'}, 'is_session_handle': False, 'name': 'sessions', 'python_api_converter_name': 'convert_to_nitclk_session_number_list', 'size': {'mechanism': 'len', 'value': 'sessionCount'}, 'type': 'ViSession[]', 'type_in_documentation': 'list of instrument-specific sessions or nitclk.SessionReference instances'}, {'default_value': 'hightime.timedelta(seconds=0.0)', 'direction': 'in', 'documentation': {'description': '\nMinimal period of TClk, expressed in seconds. Supported values are\nbetween 0.0 s and 0.050 s (50 ms). Minimal period for a single\nchassis/PC is 200 ns. If the specified value is less than 200 ns,\nNI-TClk automatically coerces minTime to 200 ns. For multichassis\nsynchronization, adjust this value to account for propagation delays\nthrough the various devices and cables.\n'}, 'name': 'minTime', 'python_api_converter_name': 'convert_timedelta_to_seconds_real64', 'type': 'ViReal64', 'type_in_documentation': 'hightime.timedelta, datetime.timedelta, or float in seconds'}], 'returns': 'ViStatus'}, 'GetAttributeViReal64': {'codegen_method': 'private', 'documentation': {'description': 'Gets the value of an NI-TClk ViReal64 attribute.'}, 'parameters': [{'direction': 'in', 'documentation': {'description': 'session references the sessions being synchronized.'}, 'name': 'session', 'type': 'ViSession'}, {'direction': 'in', 'documentation': {'description': 'Pass VI_NULL or an empty string'}, 'name': 'channelName', 'type': 'ViConstString'}, {'direction': 'in', 'documentation': {'description': '\nThe ID of the attribute that you want to get Supported Attribute\nNITCLK_ATTR_SAMPLE_CLOCK_DELAY\n'}, 'name': 'attributeId', 'type': 'ViAttr'}, {'direction': 'out', 'documentation': {'description': 'The value that you are getting'}, 'name': 'value', 'type': 'ViReal64'}], 'returns': 'ViStatus'}, 'GetAttributeViSession': {'codegen_method': 'private', 'documentation': {'description': 'Gets the value of an NI-TClk ViSession attribute.'}, 'parameters': [{'direction': 'in', 'documentation': {'description': 'session references the sessions being synchronized.'}, 'name': 'session', 'type': 'ViSession'}, {'direction': 'in', 'documentation': {'description': 'Pass VI_NULL or an empty string'}, 'name': 'channelName', 'type': 'ViConstString'}, {'direction': 'in', 'documentation': {'description': '\nThe ID of the attribute that you want to set Supported Attributes\nNITCLK_ATTR_START_TRIGGER_MASTER_SESSION\nNITCLK_ATTR_REF_TRIGGER_MASTER_SESSION\nNITCLK_ATTR_PAUSE_TRIGGER_MASTER_SESSION\n'}, 'name': 'attributeId', 'type': 'ViAttr'}, {'direction': 'out', 'documentation': {'description': 'The value that you are getting'}, 'is_session_handle': False, 'name': 'value', 'type': 'ViSession'}], 'returns': 'ViStatus'}, 'GetAttributeViString': {'codegen_method': 'private', 'documentation': {'description': '\nThis function queries the value of an NI-TClk ViString attribute. You\nmust provide a ViChar array to serve as a buffer for the value. You pass\nthe number of bytes in the buffer as bufSize. If the current value of\nthe attribute, including the terminating NULL byte, is larger than the\nsize you indicate in bufSize, the function copies bufSize minus 1 bytes\ninto the buffer, places an ASCII NULL byte at the end of the buffer, and\nreturns the array size that you must pass to get the entire value. For\nexample, if the value is "123456" and bufSize is 4, the function places\n"123" into the buffer and returns 7. If you want to call\nniTClk_GetAttributeViString just to get the required array size, pass 0\nfor bufSize and VI_NULL for the value.\n'}, 'parameters': [{'direction': 'in', 'documentation': {'description': 'session references the sessions being synchronized.'}, 'name': 'session', 'type': 'ViSession'}, {'direction': 'in', 'documentation': {'description': 'Pass VI_NULL or an empty string'}, 'name': 'channelName', 'type': 'ViConstString'}, {'direction': 'in', 'documentation': {'description': '\nThe ID of the attribute that you want to get Supported Attributes\nNITCLK_ATTR_SYNC_PULSE_SOURCE\nNITCLK_ATTR_SYNC_PULSE_CLOCK_SOURCE\nNITCLK_ATTR_EXPORTED_SYNC_PULSE_OUTPUT_TERMINAL\n'}, 'name': 'attributeId', 'type': 'ViAttr'}, {'direction': 'in', 'documentation': {'description': '\nThe number of bytes in the ViChar array that you specify for the value\nparameter\n'}, 'name': 'bufSize', 'type': 'ViInt32'}, {'direction': 'out', 'documentation': {'description': 'The value that you are getting'}, 'name': 'value', 'size': {'mechanism': 'ivi-dance', 'value': 'bufSize'}, 'type': 'ViChar[]'}], 'returns': 'ViStatus'}, 'GetExtendedErrorInfo': {'codegen_method': 'private', 'documentation': {'description': '\nReports extended error information for the most recent NI-TClk function\nthat returned an error. To establish the function that returned an\nerror, use the return values of the individual functions because once\nniTClk_GetExtendedErrorInfo reports an errorString, it does not report\nan empty string again.\n'}, 'is_error_handling': True, 'parameters': [{'direction': 'out', 'documentation': {'description': '\nExtended error description. If errorString is NULL, then it is not large\nenough to hold the entire error description. In this case, the return\nvalue of niTClk_GetExtendedErrorInfo is the size that you should use\nfor niTClk_GetExtendedErrorInfo to return the full error string.\n'}, 'name': 'errorString', 'size': {'mechanism': 'ivi-dance', 'value': 'errorStringSize'}, 'type': 'ViChar[]'}, {'direction': 'in', 'documentation': {'description': '\nSize of the errorString. If errorStringSize is 0, then it is not large\nenough to hold the entire error description. In this case, the return\nvalue of niTClk_GetExtendedErrorInfo is the size that you should use\nfor niTClk_GetExtendedErrorInfo to return the full error string.\n'}, 'name': 'errorStringSize', 'type': 'ViUInt32'}], 'returns': 'ViStatus'}, 'Initiate': {'documentation': {'description': '\nInitiates the acquisition or generation sessions specified, taking into\nconsideration any special requirements needed for synchronization. For\nexample, the session exporting the TClk-synchronized start trigger is\nnot initiated until after niTClk_Initiate initiates all the sessions\nthat import the TClk-synchronized start trigger.\n'}, 'parameters': [{'direction': 'in', 'documentation': {'description': 'Number of elements in the sessions array'}, 'name': 'sessionCount', 'type': 'ViUInt32'}, {'direction': 'in', 'documentation': {'description': 'sessions is an array of sessions that are being synchronized.'}, 'is_session_handle': False, 'name': 'sessions', 'python_api_converter_name': 'convert_to_nitclk_session_number_list', 'size': {'mechanism': 'len', 'value': 'sessionCount'}, 'type': 'ViSession[]', 'type_in_documentation': 'list of instrument-specific sessions or nitclk.SessionReference instances'}], 'returns': 'ViStatus'}, 'IsDone': {'documentation': {'description': '\nMonitors the progress of the acquisitions and/or generations\ncorresponding to sessions.\n'}, 'parameters': [{'direction': 'in', 'documentation': {'description': 'Number of elements in the sessions array'}, 'name': 'sessionCount', 'type': 'ViUInt32'}, {'direction': 'in', 'documentation': {'description': 'sessions is an array of sessions that are being synchronized.'}, 'is_session_handle': False, 'name': 'sessions', 'python_api_converter_name': 'convert_to_nitclk_session_number_list', 'size': {'mechanism': 'len', 'value': 'sessionCount'}, 'type': 'ViSession[]', 'type_in_documentation': 'list of instrument-specific sessions or nitclk.SessionReference instances'}, {'direction': 'out', 'documentation': {'description': '\nIndicates that the operation is done. The operation is done when each\nsession has completed without any errors or when any one of the sessions\nreports an error.\n'}, 'name': 'done', 'type': 'ViBoolean'}], 'returns': 'ViStatus'}, 'SetAttributeViReal64': {'codegen_method': 'private', 'documentation': {'description': '\nSets the value of an NI-TClk VIReal64 attribute.\nniTClk_SetAttributeViReal64 is a low-level function that you can use to\nset the values NI-TClk attributes. NI-TClk contains high-level functions\nthat set most of the attributes. It is best to use the high-level\nfunctions as much as possible.\n'}, 'parameters': [{'direction': 'in', 'documentation': {'description': 'session references the sessions being synchronized.'}, 'name': 'session', 'type': 'ViSession'}, {'direction': 'in', 'documentation': {'description': 'Pass VI_NULL or an empty string'}, 'name': 'channelName', 'type': 'ViConstString'}, {'direction': 'in', 'documentation': {'description': '\nThe ID of the attribute that you want to set Supported Attribute\nNITCLK_ATTR_SAMPLE_CLOCK_DELAY\n'}, 'name': 'attributeId', 'type': 'ViAttr'}, {'direction': 'in', 'documentation': {'description': 'The value for the attribute'}, 'name': 'value', 'type': 'ViReal64'}], 'returns': 'ViStatus'}, 'SetAttributeViSession': {'codegen_method': 'private', 'documentation': {'description': '\nSets the value of an NI-TClk ViSession attribute.\nniTClk_SetAttributeViSession is a low-level function that you can use\nto set the values NI-TClk attributes. NI-TClk contains high-level\nfunctions that set most of the attributes. It is best to use the\nhigh-level functions as much as possible.\n'}, 'parameters': [{'direction': 'in', 'documentation': {'description': 'session references the sessions being synchronized.'}, 'name': 'session', 'type': 'ViSession'}, {'direction': 'in', 'documentation': {'description': '\nPass VI_NULL or an empty string'}, 'name': 'channelName', 'type': 'ViConstString'}, {'direction': 'in', 'documentation': {'description': '\nThe ID of the attribute that you want to set Supported Attributes\nNITCLK_ATTR_START_TRIGGER_MASTER_SESSION\nNITCLK_ATTR_REF_TRIGGER_MASTER_SESSION\nNITCLK_ATTR_PAUSE_TRIGGER_MASTER_SESSION\n'}, 'name': 'attributeId', 'type': 'ViAttr'}, {'direction': 'in', 'documentation': {'description': 'The value for the attribute'}, 'is_session_handle': False, 'name': 'value', 'type': 'ViSession'}], 'returns': 'ViStatus'}, 'SetAttributeViString': {'codegen_method': 'private', 'documentation': {'description': '\nSets the value of an NI-TClk VIString attribute.\nniTClk_SetAttributeViString is a low-level function that you can use to\nset the values of NI-TClk attributes. NI-TClk contain high-level\nfunctions that set most of the attributes. It is best to use the\nhigh-level functions as much as possible.\n'}, 'parameters': [{'direction': 'in', 'documentation': {'description': 'session references the sessions being synchronized.'}, 'name': 'session', 'type': 'ViSession'}, {'direction': 'in', 'documentation': {'description': 'Pass VI_NULL or an empty string'}, 'name': 'channelName', 'type': 'ViConstString'}, {'direction': 'in', 'documentation': {'description': '\nPass the ID of the attribute that you want to set Supported Attributes\nNITCLK_ATTR_SYNC_PULSE_SOURCE\nNITCLK_ATTR_SYNC_PULSE_CLOCK_SOURCE\nNITCLK_ATTR_EXPORTED_SYNC_PULSE_OUTPUT_TERMINAL\n'}, 'name': 'attributeId', 'type': 'ViAttr'}, {'direction': 'in', 'documentation': {'description': 'Pass the value for the attribute'}, 'name': 'value', 'type': 'ViConstString'}], 'returns': 'ViStatus'}, 'SetupForSyncPulseSenderSynchronize': {'documentation': {'description': 'Configures the TClks on all the devices and prepares the Sync Pulse Sender for synchronization'}, 'parameters': [{'direction': 'in', 'documentation': {'description': 'Number of elements in the sessions array'}, 'name': 'sessionCount', 'type': 'ViUInt32'}, {'direction': 'in', 'documentation': {'description': 'sessions is an array of sessions that are being synchronized.'}, 'is_session_handle': False, 'name': 'sessions', 'python_api_converter_name': 'convert_to_nitclk_session_number_list', 'size': {'mechanism': 'len', 'value': 'sessionCount'}, 'type': 'ViSession[]', 'type_in_documentation': 'list of instrument-specific sessions or nitclk.SessionReference instances'}, {'default_value': 'hightime.timedelta(seconds=0.0)', 'direction': 'in', 'documentation': {'description': '\nMinimal period of TClk, expressed in seconds. Supported values are\nbetween 0.0 s and 0.050 s (50 ms). Minimal period for a single\nchassis/PC is 200 ns. If the specified value is less than 200 ns,\nNI-TClk automatically coerces minTime to 200 ns. For multichassis\nsynchronization, adjust this value to account for propagation delays\nthrough the various devices and cables.\n'}, 'name': 'minTime', 'python_api_converter_name': 'convert_timedelta_to_seconds_real64', 'type': 'ViReal64', 'type_in_documentation': 'hightime.timedelta, datetime.timedelta, or float in seconds'}], 'returns': 'ViStatus'}, 'Synchronize': {'documentation': {'description': '\nSynchronizes the TClk signals on the given sessions. After\nniTClk_Synchronize executes, TClk signals from all sessions are\nsynchronized. Note: Before using this NI-TClk function, verify that your\nsystem is configured as specified in the PXI Trigger Lines and RTSI\nLines topic of the NI-TClk Synchronization Help. You can locate this\nhelp file at Start>>Programs>>National Instruments>>NI-TClk.\n'}, 'parameters': [{'direction': 'in', 'documentation': {'description': 'Number of elements in the sessions array'}, 'name': 'sessionCount', 'type': 'ViUInt32'}, {'direction': 'in', 'documentation': {'description': 'sessions is an array of sessions that are being synchronized.'}, 'is_session_handle': False, 'name': 'sessions', 'python_api_converter_name': 'convert_to_nitclk_session_number_list', 'size': {'mechanism': 'len', 'value': 'sessionCount'}, 'type': 'ViSession[]', 'type_in_documentation': 'list of instrument-specific sessions or nitclk.SessionReference instances'}, {'default_value': 'hightime.timedelta(seconds=0.0)', 'direction': 'in', 'documentation': {'description': '\nMinimal period of TClk, expressed in seconds. Supported values are\nbetween 0.0 s and 0.050 s (50 ms). Minimal period for a single\nchassis/PC is 200 ns. If the specified value is less than 200 ns,\nNI-TClk automatically coerces minTime to 200 ns. For multichassis\nsynchronization, adjust this value to account for propagation delays\nthrough the various devices and cables.\n'}, 'name': 'minTclkPeriod', 'python_api_converter_name': 'convert_timedelta_to_seconds_real64', 'type': 'ViReal64', 'type_in_documentation': 'hightime.timedelta, datetime.timedelta, or float in seconds'}], 'returns': 'ViStatus'}, 'SynchronizeToSyncPulseSender': {'documentation': {'description': 'Synchronizes the other devices to the Sync Pulse Sender.'}, 'parameters': [{'direction': 'in', 'documentation': {'description': 'Number of elements in the sessions array'}, 'name': 'sessionCount', 'type': 'ViUInt32'}, {'direction': 'in', 'documentation': {'description': 'sessions is an array of sessions that are being synchronized.'}, 'is_session_handle': False, 'name': 'sessions', 'python_api_converter_name': 'convert_to_nitclk_session_number_list', 'size': {'mechanism': 'len', 'value': 'sessionCount'}, 'type': 'ViSession[]', 'type_in_documentation': 'list of instrument-specific sessions or nitclk.SessionReference instances'}, {'default_value': 'hightime.timedelta(seconds=0.0)', 'direction': 'in', 'documentation': {'description': '\nMinimal period of TClk, expressed in seconds. Supported values are\nbetween 0.0 s and 0.050 s (50 ms). Minimal period for a single\nchassis/PC is 200 ns. If the specified value is less than 200 ns,\nNI-TClk automatically coerces minTime to 200 ns. For multichassis\nsynchronization, adjust this value to account for propagation delays\nthrough the various devices and cables.\n'}, 'name': 'minTime', 'python_api_converter_name': 'convert_timedelta_to_seconds_real64', 'type': 'ViReal64', 'type_in_documentation': 'hightime.timedelta, datetime.timedelta, or float in seconds'}], 'returns': 'ViStatus'}, 'WaitUntilDone': {'documentation': {'description': '\nCall this function to pause execution of your program until the\nacquisitions and/or generations corresponding to sessions are done or\nuntil the function returns a timeout error. niTClk_WaitUntilDone is a\nblocking function that periodically checks the operation status. It\nreturns control to the calling program if the operation completes\nsuccessfully or an error occurs (including a timeout error). This\nfunction is most useful for finite data operations that you expect to\ncomplete within a certain time.\n'}, 'parameters': [{'direction': 'in', 'documentation': {'description': 'Number of elements in the sessions array'}, 'name': 'sessionCount', 'type': 'ViUInt32'}, {'direction': 'in', 'documentation': {'description': 'sessions is an array of sessions that are being synchronized.'}, 'is_session_handle': False, 'name': 'sessions', 'python_api_converter_name': 'convert_to_nitclk_session_number_list', 'size': {'mechanism': 'len', 'value': 'sessionCount'}, 'type': 'ViSession[]', 'type_in_documentation': 'list of instrument-specific sessions or nitclk.SessionReference instances'}, {'default_value': 'hightime.timedelta(seconds=0.0)', 'direction': 'in', 'documentation': {'description': '\nThe amount of time in seconds that niTClk_WaitUntilDone waits for the\nsessions to complete. If timeout is exceeded, niTClk_WaitUntilDone\nreturns an error.\n'}, 'name': 'timeout', 'python_api_converter_name': 'convert_timedelta_to_seconds_real64', 'type': 'ViReal64', 'type_in_documentation': 'hightime.timedelta, datetime.timedelta, or float in seconds'}], 'returns': 'ViStatus'}} |
print(any([True, 1, ""]))
print(all([True, 1, ""]))
print(dict(zip([1, 2, 3], "abc")))
| print(any([True, 1, '']))
print(all([True, 1, '']))
print(dict(zip([1, 2, 3], 'abc'))) |
top = [2,3,4,5,6]
# lst = [1,0,0,4,5]
lst = [1,2,3,4,5]
k = [9,9,0,0,0]
lst1 = [8,3,9,6,4,7,5,2,1]
lst2 = [10,11,12,8,3,9,6,4,7,5,2,1]
lst3 = [8,9,3,6,7,4,5,2,1]
lst4 = [8,3,9,6,4,7,5,2,1]
k = [9,0,0,0,0,0,0,0]
k = k[::-1]
def main():
subtract_1(lst1, k)
def helper1(lst, start):
new_lst = lst[start:]
index_of_zeros = []
index_of_carry = []
for i in range(len(new_lst)):
if new_lst[i] == 0:
index_of_zeros.append(i+ start)
elif new_lst[i] > 0:
index_of_carry.append(i)
index_of_carry = index_of_carry[1] + start
a = []
a.append([index_of_carry])
a.append(index_of_zeros)
return a
def subtract_1(lst, k):
print("top: ", top)
print("lst: ", lst)
print("k: ", k)
subtract_list = []
list_len = len(lst)
next_borrow = 1
for i in range(list_len):
#print("the list of i is: ", i)
#print("36 ",lst[i] - k[i], lst, k[i])
if (lst[i] - k[i]) >= 0:
#print("37 ",lst[i] - k[i], lst, k[i])
subtract_list.append(lst[i] - k[i])
while (lst[i] - k[i]) < 0:
if lst[i+next_borrow] > 0:
lst[i+next_borrow] -= 1
lst[i] = lst[i] + top[i]
if (lst[i] - k[i]) >= 0:
subtract_list.append(lst[i] - k[i])
#print("44 ",lst[i] - k[i], lst, k[i])
#print("43", lst[i] - k[i], lst[i], k[i])
#print("44",lst[i] - k[i], lst[i], k[i])
#problem!!!!!
elif lst[i+next_borrow] == 0:
a = helper1(lst, i)
index_of_carry = a[0][0]
index_of_zeros = a[1][0]
#print(lst[index_of_carry])
#print(lst[index_of_zeros])
temp = lst[:index_of_carry]
print(temp)
#print("value of i at 52: ",i)
#print("57", lst[i] - k[i], lst[i], k[i])
lst[index_of_carry] -= 1
for j in range(len(temp)):
#print("value of i at 54: ", i)
#print("61", lst[j] + top[j], lst, lst[3], j)
lst[j] += top[j]
if (lst[i] - k[i]) > 0:
subtract_list.append(lst[i] - k[i])
#print("value of i at 60: ",i)
#print("61", lst[i] - k[i], lst[i], k[i])
#problem!!!!!
print("subtract_list:", subtract_list)
if __name__ == "__main__":
main()
| top = [2, 3, 4, 5, 6]
lst = [1, 2, 3, 4, 5]
k = [9, 9, 0, 0, 0]
lst1 = [8, 3, 9, 6, 4, 7, 5, 2, 1]
lst2 = [10, 11, 12, 8, 3, 9, 6, 4, 7, 5, 2, 1]
lst3 = [8, 9, 3, 6, 7, 4, 5, 2, 1]
lst4 = [8, 3, 9, 6, 4, 7, 5, 2, 1]
k = [9, 0, 0, 0, 0, 0, 0, 0]
k = k[::-1]
def main():
subtract_1(lst1, k)
def helper1(lst, start):
new_lst = lst[start:]
index_of_zeros = []
index_of_carry = []
for i in range(len(new_lst)):
if new_lst[i] == 0:
index_of_zeros.append(i + start)
elif new_lst[i] > 0:
index_of_carry.append(i)
index_of_carry = index_of_carry[1] + start
a = []
a.append([index_of_carry])
a.append(index_of_zeros)
return a
def subtract_1(lst, k):
print('top: ', top)
print('lst: ', lst)
print('k: ', k)
subtract_list = []
list_len = len(lst)
next_borrow = 1
for i in range(list_len):
if lst[i] - k[i] >= 0:
subtract_list.append(lst[i] - k[i])
while lst[i] - k[i] < 0:
if lst[i + next_borrow] > 0:
lst[i + next_borrow] -= 1
lst[i] = lst[i] + top[i]
if lst[i] - k[i] >= 0:
subtract_list.append(lst[i] - k[i])
elif lst[i + next_borrow] == 0:
a = helper1(lst, i)
index_of_carry = a[0][0]
index_of_zeros = a[1][0]
temp = lst[:index_of_carry]
print(temp)
lst[index_of_carry] -= 1
for j in range(len(temp)):
lst[j] += top[j]
if lst[i] - k[i] > 0:
subtract_list.append(lst[i] - k[i])
print('subtract_list:', subtract_list)
if __name__ == '__main__':
main() |
day = '2'
with open(f'2015/data/day_{day}.in', 'r', encoding='utf-8') as f:
content = f.read().strip().split('\n')
def make_tup(row):
i = row.index('x')
a = row[:i]
row = row[i + 1:]
i = row.index('x')
b = row[:i]
c = row[i + 1:]
return int(a), int(b), int(c)
def area(a, b, c):
return 2 * (a*b + a*c + b*c)
##### Prva naloga #####
paper1 = 0
for row in content:
a, b, c = make_tup(row)
paper1 += area(a, b, c)
paper1 += min(a*b, b*c, a*c)
##### Druga naloga #####
ribbon2 = 0
for row in content:
a, b, c = make_tup(row)
ribbon2 += min(2*(a+b), 2*(a+c), 2*(b+c))
ribbon2 += a*b*c
def main():
s1 = str(paper1)
print(f'day {day}, puzzle 1: {s1}')
s2 = str(ribbon2)
print(f'day {day}, puzzle 2: {s2}')
with open(f'2015/out/day_{day}_1.out', 'w', encoding='utf-8') as f:
f.write(s1)
with open(f'2015/out/day_{day}_2.out', 'w', encoding='utf-8') as f:
f.write(s2)
main() | day = '2'
with open(f'2015/data/day_{day}.in', 'r', encoding='utf-8') as f:
content = f.read().strip().split('\n')
def make_tup(row):
i = row.index('x')
a = row[:i]
row = row[i + 1:]
i = row.index('x')
b = row[:i]
c = row[i + 1:]
return (int(a), int(b), int(c))
def area(a, b, c):
return 2 * (a * b + a * c + b * c)
paper1 = 0
for row in content:
(a, b, c) = make_tup(row)
paper1 += area(a, b, c)
paper1 += min(a * b, b * c, a * c)
ribbon2 = 0
for row in content:
(a, b, c) = make_tup(row)
ribbon2 += min(2 * (a + b), 2 * (a + c), 2 * (b + c))
ribbon2 += a * b * c
def main():
s1 = str(paper1)
print(f'day {day}, puzzle 1: {s1}')
s2 = str(ribbon2)
print(f'day {day}, puzzle 2: {s2}')
with open(f'2015/out/day_{day}_1.out', 'w', encoding='utf-8') as f:
f.write(s1)
with open(f'2015/out/day_{day}_2.out', 'w', encoding='utf-8') as f:
f.write(s2)
main() |
def translate():
return "jQuery(document).ready(function(){jQuery('body').translate('%s');});" % request.args(0).split('.')[0]
def changeLanguage():
session._language = request.args[0]
#T.force(request.args[0])
#T.set_current_languages(str(request.args[0]),str(request.args[0]) + '-' + str(request.args[0]))
if(len(request.args) == 5):
redirect(URL(request.args[1],request.args[2], request.args[3],args=(request.args[4])))
else:
redirect(URL(request.args[1],request.args[2], request.args[3]))
return
| def translate():
return "jQuery(document).ready(function(){jQuery('body').translate('%s');});" % request.args(0).split('.')[0]
def change_language():
session._language = request.args[0]
if len(request.args) == 5:
redirect(url(request.args[1], request.args[2], request.args[3], args=request.args[4]))
else:
redirect(url(request.args[1], request.args[2], request.args[3]))
return |
def main():
square = int(input("Calculate square root of: "))
print("square root of " + str(square) + " is " +
str(binsquareroot(square)))
def binsquareroot(square):
if square < 1:
return "an imaginair number"
if square == 1:
return 1
left = 1
right = square
mid = right
while (left + 1 < right):
mid = int(left + (right - left) / 2)
root = mid * mid
if root == square:
return mid
elif root > square:
right = mid
else: # root < square
left = mid
return "not a flat root"
if __name__ == '__main__':
main()
| def main():
square = int(input('Calculate square root of: '))
print('square root of ' + str(square) + ' is ' + str(binsquareroot(square)))
def binsquareroot(square):
if square < 1:
return 'an imaginair number'
if square == 1:
return 1
left = 1
right = square
mid = right
while left + 1 < right:
mid = int(left + (right - left) / 2)
root = mid * mid
if root == square:
return mid
elif root > square:
right = mid
else:
left = mid
return 'not a flat root'
if __name__ == '__main__':
main() |
# helpers.py
def url_join(*args, end_slash = True):
strip_args = [str(a).rstrip("/") for a in args]
url = "/".join(strip_args)
if end_slash and not url.endswith("/"):
url = url + "/"
return url
| def url_join(*args, end_slash=True):
strip_args = [str(a).rstrip('/') for a in args]
url = '/'.join(strip_args)
if end_slash and (not url.endswith('/')):
url = url + '/'
return url |
#
# Copyright (c) 2010-2016, Fabric Software Inc. All rights reserved.
#
class DirQualTypeInfo:
def __init__(self, dir_qual, type_info):
self.dir_qual = dir_qual
self.type_info = type_info
@property
def dq(self):
return self.dir_qual
@property
def ti(self):
return self.type_info
def get_desc(self):
return "%s:%s" % (self.dir_qual.get_desc(), self.type_info.get_desc())
def __str__(self):
return self.get_desc()
| class Dirqualtypeinfo:
def __init__(self, dir_qual, type_info):
self.dir_qual = dir_qual
self.type_info = type_info
@property
def dq(self):
return self.dir_qual
@property
def ti(self):
return self.type_info
def get_desc(self):
return '%s:%s' % (self.dir_qual.get_desc(), self.type_info.get_desc())
def __str__(self):
return self.get_desc() |
grade = 95
if grade >= 90:
print("A")
elif grade >= 80:
print("B")
elif grade >= 70:
print("C")
elif grade >= 60:
print("D")
else:
print("F") | grade = 95
if grade >= 90:
print('A')
elif grade >= 80:
print('B')
elif grade >= 70:
print('C')
elif grade >= 60:
print('D')
else:
print('F') |
def gen_src(count):
for i in range(1, count):
data = "".join(["%d" % x for x in range(1, 10000)])
native.genrule(
name = "generated_class_%d" % i,
out = "Class%d.java" % i,
bash = "echo -e 'package gen;\npublic class Class%d { static String data = \"%s\"; }' > $OUT" % (i, data),
)
native.android_library(
name = "generated_lib_%d" % i,
srcs = [":generated_class_%d" % i],
)
return [":generated_lib_%d" % x for x in range(1, count)]
def gen_src_with_refs(index, ref_count, type):
if type == "method":
refs = " ".join(["void fun%d() {};\n" % i for i in range(1, ref_count + 1)])
elif type == "field":
refs = " ".join(["int field%d = 1;\n" % i for i in range(1, ref_count + 1)])
else:
fail("unknown type")
name = "generated_class_%d_%d_%s_refs" % (index, ref_count, type)
native.genrule(
name = name,
out = "Class%d.java" % index,
bash = "echo -e 'package gen;\npublic class Class%d {\n%s}' > $OUT" % (index, refs),
)
return ":%s" % name
def gen_overflow_lib(type):
for i in range(1, 15):
native.android_library(
name = "generated_lib_%s_overflow_%d" % (type, i),
srcs = [gen_src_with_refs(i, 5000, type)],
visibility = ["PUBLIC"],
)
return [":" + "generated_lib_%s_overflow_%d" % (type, i) for i in range(1, 15)]
def gen_primary_dex_overflow(type, gen_deps):
native.android_binary(
name = "primary_dex_%s_overflow" % type,
dex_group_lib_limit = 1,
keystore = "//keystores:debug",
manifest = "SimpleManifest.xml",
primary_dex_patterns = [
"^gen/Class",
],
use_split_dex = True,
deps = [
"//java/com/sample/app:app",
"//java/com/sample/lib:lib",
] + gen_deps,
)
def gen_secondary_dex_overflow(type, gen_deps):
native.android_binary(
name = "secondary_dex_%s_overflow" % type,
dex_group_lib_limit = 1,
secondary_dex_weight_limit = 1024 * 1024 * 64,
keystore = "//keystores:debug",
manifest = "SimpleManifest.xml",
primary_dex_patterns = [
"/MyApplication^",
],
use_split_dex = True,
deps = [
"//java/com/sample/app:app",
"//java/com/sample/lib:lib",
] + gen_deps,
)
| def gen_src(count):
for i in range(1, count):
data = ''.join(['%d' % x for x in range(1, 10000)])
native.genrule(name='generated_class_%d' % i, out='Class%d.java' % i, bash='echo -e \'package gen;\npublic class Class%d { static String data = "%s"; }\' > $OUT' % (i, data))
native.android_library(name='generated_lib_%d' % i, srcs=[':generated_class_%d' % i])
return [':generated_lib_%d' % x for x in range(1, count)]
def gen_src_with_refs(index, ref_count, type):
if type == 'method':
refs = ' '.join(['void fun%d() {};\n' % i for i in range(1, ref_count + 1)])
elif type == 'field':
refs = ' '.join(['int field%d = 1;\n' % i for i in range(1, ref_count + 1)])
else:
fail('unknown type')
name = 'generated_class_%d_%d_%s_refs' % (index, ref_count, type)
native.genrule(name=name, out='Class%d.java' % index, bash="echo -e 'package gen;\npublic class Class%d {\n%s}' > $OUT" % (index, refs))
return ':%s' % name
def gen_overflow_lib(type):
for i in range(1, 15):
native.android_library(name='generated_lib_%s_overflow_%d' % (type, i), srcs=[gen_src_with_refs(i, 5000, type)], visibility=['PUBLIC'])
return [':' + 'generated_lib_%s_overflow_%d' % (type, i) for i in range(1, 15)]
def gen_primary_dex_overflow(type, gen_deps):
native.android_binary(name='primary_dex_%s_overflow' % type, dex_group_lib_limit=1, keystore='//keystores:debug', manifest='SimpleManifest.xml', primary_dex_patterns=['^gen/Class'], use_split_dex=True, deps=['//java/com/sample/app:app', '//java/com/sample/lib:lib'] + gen_deps)
def gen_secondary_dex_overflow(type, gen_deps):
native.android_binary(name='secondary_dex_%s_overflow' % type, dex_group_lib_limit=1, secondary_dex_weight_limit=1024 * 1024 * 64, keystore='//keystores:debug', manifest='SimpleManifest.xml', primary_dex_patterns=['/MyApplication^'], use_split_dex=True, deps=['//java/com/sample/app:app', '//java/com/sample/lib:lib'] + gen_deps) |
##Exemplo retirado do site http://code.tutsplus.com/tutorials/beginning-test-driven-development-in-python--net-30137
##//lhekheklqhlekhqkehqkehqkhelqw
##//ljkfhjdhfjkdhfkjlsdhlfkhslkjkljdflksgflsgdf
##//lkhdsklfskfgshgfsjhgfs
class Calculator(object):
def add(self, x, y):
number_types = (int, float, complex)
if instance(x, number_types) and instance(y, number_types):
return x + y
else:
raise ValueError
def sub(self, x, y):
number_types = (int, float, complex)
if instance(x, number_types) and instance(y, number_types):
return x + y
else:
raise ValueError
def mult(self, x, y):
number_types = (int, float, complex)
if instance(x, number_types) and instance(y, number_types):
return x + y
else:
raise ValueError
def div(self, x, y):
number_types = (int, float, complex)
if instance(x, number_types) and instance(y, number_types):
return x + y
else:
raise ValueError
| class Calculator(object):
def add(self, x, y):
number_types = (int, float, complex)
if instance(x, number_types) and instance(y, number_types):
return x + y
else:
raise ValueError
def sub(self, x, y):
number_types = (int, float, complex)
if instance(x, number_types) and instance(y, number_types):
return x + y
else:
raise ValueError
def mult(self, x, y):
number_types = (int, float, complex)
if instance(x, number_types) and instance(y, number_types):
return x + y
else:
raise ValueError
def div(self, x, y):
number_types = (int, float, complex)
if instance(x, number_types) and instance(y, number_types):
return x + y
else:
raise ValueError |
'''
Prompt:
Write a function bestSum(targetSum, numbers) that takes
in a targetSum and an array of numbers as arguments.
The function should return an array containing the
shortest combination of numbers that add up to exactly the targetSum.
If there is a tie for the shotest combination, you may return any of the shortest.
'''
# takes in a numbers array in decreasing order
def recurse(targetSum, numbers, combination = []):
if targetSum == 0:
return combination
if targetSum < 0:
return None
for n in numbers:
result = recurse(targetSum-n, numbers, [*combination, n])
if result is not None:
return result
return None
# Greedy approach. This only works on some sets of coins.
# Why? Consider this input: bestSum(8, [1, 4, 5])
# Should return [4, 4], but we are returning [5, 1, 1, 1]
def bestSum(targetSum, numbers):
numbers.sort(reverse=True)
return recurse(targetSum, numbers)
# Correct optimal solution with dynamic programming
def bestSum_V2(targetSum, numbers, cache = {}):
if targetSum in cache:
return cache[targetSum]
if targetSum == 0:
return []
if targetSum < 0:
return None
best = None
for n in numbers:
result = bestSum_V2(targetSum-n, numbers, cache)
if result is not None:
current = [*result, n]
if best is None or len(best) > len(current):
best = current
cache[targetSum] = best
return best
# print(bestSum(28, [7, 2, 1, 10, 5]))
# print(bestSum(7, [5, 3, 4, 7]))
# print(bestSum(8, [1, 4, 5]))
# print(bestSum_V2(28, [7, 2, 1, 10, 5]))
# print(bestSum_V2(7, [5, 3, 4, 7]))
# print(bestSum_V2(300, [100, 150, 7, 14]))
print(bestSum_V2(8, [1, 4, 5]))
| """
Prompt:
Write a function bestSum(targetSum, numbers) that takes
in a targetSum and an array of numbers as arguments.
The function should return an array containing the
shortest combination of numbers that add up to exactly the targetSum.
If there is a tie for the shotest combination, you may return any of the shortest.
"""
def recurse(targetSum, numbers, combination=[]):
if targetSum == 0:
return combination
if targetSum < 0:
return None
for n in numbers:
result = recurse(targetSum - n, numbers, [*combination, n])
if result is not None:
return result
return None
def best_sum(targetSum, numbers):
numbers.sort(reverse=True)
return recurse(targetSum, numbers)
def best_sum_v2(targetSum, numbers, cache={}):
if targetSum in cache:
return cache[targetSum]
if targetSum == 0:
return []
if targetSum < 0:
return None
best = None
for n in numbers:
result = best_sum_v2(targetSum - n, numbers, cache)
if result is not None:
current = [*result, n]
if best is None or len(best) > len(current):
best = current
cache[targetSum] = best
return best
print(best_sum_v2(8, [1, 4, 5])) |
apps_details = [
{
"app": "Learning xc functional from experimental data",
"repo": "https://github.com/mfkasim1/xcnn", # leave blank if no repo available
# leave blank if no paper available, strongly suggested to link to open-access paper
"paper": "https://arxiv.org/abs/2102.04229",
},
{
"app": "Basis optimization",
"repo": "https://github.com/diffqc/dqc-apps/tree/main/01-basis-opt",
"paper": "",
},
{
"app": "Alchemical perturbation",
"repo": "https://github.com/diffqc/dqc-apps/tree/main/04-alchemical-perturbation",
"paper": "",
},
]
repo_icons = {
"github": "docs/data/readme_icons/github.svg",
}
paper_icon = "docs/data/readme_icons/paper.svg"
def get_repo_name(repo_link):
# get the repository name
for repo_name in repo_icons.keys():
if repo_name in repo_link:
return repo_name
raise RuntimeError("Unlisted repository, please contact admin to add the repository.")
def add_row(app_detail):
# get the string for repository column
if app_detail['repo'].strip() != "":
repo_name = get_repo_name(app_detail['repo'])
repo_detail = f"[]({app_detail['repo']})"
else:
repo_detail = ""
# get the string for the paper column
if app_detail['paper'].strip() != "":
paper_detail = f"[]({app_detail['paper']})"
else:
paper_detail = ""
s = f"| {app_detail['app']} | {repo_detail} | {paper_detail} |\n"
return s
def main():
# construct the strings
s = "| Applications | Repo | Paper |\n"
s += "|-----------------------------------|------|-------|\n"
for app_detail in apps_details:
s += add_row(app_detail)
# open the readme file
fname = "README.md"
with open(fname, "r") as f:
content = f.read()
# find the signature in README
sig_start = "<!-- start of readme_appgen.py -->"
sig_end = "<!-- end of readme_appgen.py -->"
note = "<!-- Please do not edit this part directly, instead add your " + \
"application in the readme_appgen.py file -->\n"
idx_start = content.find(sig_start)
idx_end = content.find(sig_end)
# write the string to the README
content = content[:idx_start] + sig_start + "\n" + note + s + content[idx_end:]
with open(fname, "w") as f:
f.write(content)
if __name__ == "__main__":
main()
| apps_details = [{'app': 'Learning xc functional from experimental data', 'repo': 'https://github.com/mfkasim1/xcnn', 'paper': 'https://arxiv.org/abs/2102.04229'}, {'app': 'Basis optimization', 'repo': 'https://github.com/diffqc/dqc-apps/tree/main/01-basis-opt', 'paper': ''}, {'app': 'Alchemical perturbation', 'repo': 'https://github.com/diffqc/dqc-apps/tree/main/04-alchemical-perturbation', 'paper': ''}]
repo_icons = {'github': 'docs/data/readme_icons/github.svg'}
paper_icon = 'docs/data/readme_icons/paper.svg'
def get_repo_name(repo_link):
for repo_name in repo_icons.keys():
if repo_name in repo_link:
return repo_name
raise runtime_error('Unlisted repository, please contact admin to add the repository.')
def add_row(app_detail):
if app_detail['repo'].strip() != '':
repo_name = get_repo_name(app_detail['repo'])
repo_detail = f"[]({app_detail['repo']})"
else:
repo_detail = ''
if app_detail['paper'].strip() != '':
paper_detail = f"[]({app_detail['paper']})"
else:
paper_detail = ''
s = f"| {app_detail['app']} | {repo_detail} | {paper_detail} |\n"
return s
def main():
s = '| Applications | Repo | Paper |\n'
s += '|-----------------------------------|------|-------|\n'
for app_detail in apps_details:
s += add_row(app_detail)
fname = 'README.md'
with open(fname, 'r') as f:
content = f.read()
sig_start = '<!-- start of readme_appgen.py -->'
sig_end = '<!-- end of readme_appgen.py -->'
note = '<!-- Please do not edit this part directly, instead add your ' + 'application in the readme_appgen.py file -->\n'
idx_start = content.find(sig_start)
idx_end = content.find(sig_end)
content = content[:idx_start] + sig_start + '\n' + note + s + content[idx_end:]
with open(fname, 'w') as f:
f.write(content)
if __name__ == '__main__':
main() |
class ChainMap:
def __init__(self, *maps):
if maps:
self.maps = list(maps)
else:
self.maps = [{}]
def __getitem__(self, k):
for m in self.maps:
if k in m:
return m[k]
raise KeyError(k)
def __setitem__(self, k, v):
self.maps[0][k] = v
def __delitem__(self, k):
del self.maps[0][k]
| class Chainmap:
def __init__(self, *maps):
if maps:
self.maps = list(maps)
else:
self.maps = [{}]
def __getitem__(self, k):
for m in self.maps:
if k in m:
return m[k]
raise key_error(k)
def __setitem__(self, k, v):
self.maps[0][k] = v
def __delitem__(self, k):
del self.maps[0][k] |
#
# PySNMP MIB module BDCOM-FLASH (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/BDCOM-FLASH
# Produced by pysmi-0.3.4 at Wed May 1 11:36:39 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, ConstraintsIntersection, ConstraintsUnion, ValueRangeConstraint, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ConstraintsIntersection", "ConstraintsUnion", "ValueRangeConstraint", "SingleValueConstraint")
bdlocal, = mibBuilder.importSymbols("BDCOM-SMI", "bdlocal")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
ObjectIdentity, Counter32, Unsigned32, Gauge32, TimeTicks, IpAddress, ModuleIdentity, MibIdentifier, Counter64, MibScalar, MibTable, MibTableRow, MibTableColumn, iso, NotificationType, Integer32, Bits = mibBuilder.importSymbols("SNMPv2-SMI", "ObjectIdentity", "Counter32", "Unsigned32", "Gauge32", "TimeTicks", "IpAddress", "ModuleIdentity", "MibIdentifier", "Counter64", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "iso", "NotificationType", "Integer32", "Bits")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
bdlflash = MibIdentifier((1, 3, 6, 1, 4, 1, 3320, 2, 10))
bdflashSize = MibScalar((1, 3, 6, 1, 4, 1, 3320, 2, 10, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bdflashSize.setStatus('mandatory')
if mibBuilder.loadTexts: bdflashSize.setDescription('Total Size in Octets of Flash memory')
bdflashFree = MibScalar((1, 3, 6, 1, 4, 1, 3320, 2, 10, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bdflashFree.setStatus('mandatory')
if mibBuilder.loadTexts: bdflashFree.setDescription('Unused Size in Octets of Flash memory')
bdflashController = MibScalar((1, 3, 6, 1, 4, 1, 3320, 2, 10, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bdflashController.setStatus('mandatory')
if mibBuilder.loadTexts: bdflashController.setDescription('Provides the type of Flash controller (either CCTL or CCTL2) installed in the router.')
bdflashCard = MibScalar((1, 3, 6, 1, 4, 1, 3320, 2, 10, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bdflashCard.setStatus('mandatory')
if mibBuilder.loadTexts: bdflashCard.setDescription('Provides the type of Flash Card installed in the router. For example, the type of Flash Card could be either CSC-MS or CSC-MC+.')
bdflashVPP = MibScalar((1, 3, 6, 1, 4, 1, 3320, 2, 10, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("installed", 1), ("missing", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bdflashVPP.setStatus('mandatory')
if mibBuilder.loadTexts: bdflashVPP.setDescription('State of the VPP DIP jumper on the Flash memory card. Files can be written to the Flash memory card only if the VPP DIP jumper is turned on.')
bdflashErase = MibScalar((1, 3, 6, 1, 4, 1, 3320, 2, 10, 6), Integer32()).setMaxAccess("writeonly")
if mibBuilder.loadTexts: bdflashErase.setStatus('mandatory')
if mibBuilder.loadTexts: bdflashErase.setDescription('Request to erase flash memory')
bdflashEraseTime = MibScalar((1, 3, 6, 1, 4, 1, 3320, 2, 10, 7), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bdflashEraseTime.setStatus('mandatory')
if mibBuilder.loadTexts: bdflashEraseTime.setDescription('Indicates the value of sysUptime the last time Flash memory was erased. If the flash had not been erased after powerup it has a value of 0 days 00:00:00.')
bdflashEraseStatus = MibScalar((1, 3, 6, 1, 4, 1, 3320, 2, 10, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("flashOpInProgress", 1), ("flashOpSuccess", 2), ("flashOpFailure", 3), ("flashReadOnly", 4), ("flashOpenFailure", 5), ("bufferAllocationFailure", 6), ("noOpAfterPowerOn", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bdflashEraseStatus.setStatus('mandatory')
if mibBuilder.loadTexts: bdflashEraseStatus.setDescription('Status of current or last flash erasing')
bdflashToNet = MibScalar((1, 3, 6, 1, 4, 1, 3320, 2, 10, 9), DisplayString()).setMaxAccess("writeonly")
if mibBuilder.loadTexts: bdflashToNet.setStatus('mandatory')
if mibBuilder.loadTexts: bdflashToNet.setDescription('Write flash entry to tftp server. Value should be the name of the flash entry to send. Instance is the IP address of the tftp host.')
bdflashToNetTime = MibScalar((1, 3, 6, 1, 4, 1, 3320, 2, 10, 10), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bdflashToNetTime.setStatus('mandatory')
if mibBuilder.loadTexts: bdflashToNetTime.setDescription('Indicates the value of sysUpTime the last time a file was transfered from Flash memory on the router to a TFTP host. Returns 0 days 00:00:00 if there had been no transfer since powerup.')
bdflashToNetStatus = MibScalar((1, 3, 6, 1, 4, 1, 3320, 2, 10, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("flashOpInProgress", 1), ("flashOpSuccess", 2), ("flashOpFailure", 3), ("flashReadOnly", 4), ("flashOpenFailure", 5), ("bufferAllocationFailure", 6), ("noOpAfterPowerOn", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bdflashToNetStatus.setStatus('mandatory')
if mibBuilder.loadTexts: bdflashToNetStatus.setDescription('Status of current or last flash to net transfer')
bdnetToFlash = MibScalar((1, 3, 6, 1, 4, 1, 3320, 2, 10, 12), DisplayString()).setMaxAccess("writeonly")
if mibBuilder.loadTexts: bdnetToFlash.setStatus('mandatory')
if mibBuilder.loadTexts: bdnetToFlash.setDescription('Write flash entry from tftp server. Value should be the name of the flash entry to write. Instance is the IP address of the tftp host.')
bdnetToFlashTime = MibScalar((1, 3, 6, 1, 4, 1, 3320, 2, 10, 13), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bdnetToFlashTime.setStatus('mandatory')
if mibBuilder.loadTexts: bdnetToFlashTime.setDescription('Indicates the value of sysUpTime the last time file was copied from a Trivial File Transfer Protocol(TFTP) server to the Flash memory on the router. Returns 0 days 00:00:00 if there had been no transfers since powerup.')
bdnetToFlashStatus = MibScalar((1, 3, 6, 1, 4, 1, 3320, 2, 10, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("flashOpInProgress", 1), ("flashOpSuccess", 2), ("flashOpFailure", 3), ("flashReadOnly", 4), ("flashOpenFailure", 5), ("bufferAllocationFailure", 6), ("noOpAfterPowerOn", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bdnetToFlashStatus.setStatus('mandatory')
if mibBuilder.loadTexts: bdnetToFlashStatus.setDescription('Status of current or last net to flash transfer')
bdflashStatus = MibScalar((1, 3, 6, 1, 4, 1, 3320, 2, 10, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("busy", 1), ("available", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bdflashStatus.setStatus('mandatory')
if mibBuilder.loadTexts: bdflashStatus.setDescription('Status of the availability of flash')
bdflashEntries = MibScalar((1, 3, 6, 1, 4, 1, 3320, 2, 10, 16), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bdflashEntries.setStatus('mandatory')
if mibBuilder.loadTexts: bdflashEntries.setDescription('Number of entries in the flash directory')
bdlflashFileDirTable = MibTable((1, 3, 6, 1, 4, 1, 3320, 2, 10, 17), )
if mibBuilder.loadTexts: bdlflashFileDirTable.setStatus('mandatory')
if mibBuilder.loadTexts: bdlflashFileDirTable.setDescription(' A list of flash file entries.')
bdlflashFileDirEntry = MibTableRow((1, 3, 6, 1, 4, 1, 3320, 2, 10, 17, 1), ).setIndexNames((0, "BDCOM-FLASH", "flashEntries"))
if mibBuilder.loadTexts: bdlflashFileDirEntry.setStatus('mandatory')
if mibBuilder.loadTexts: bdlflashFileDirEntry.setDescription('A collection of flash eprom objects')
bdflashDirName = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 2, 10, 17, 1, 1), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bdflashDirName.setStatus('mandatory')
if mibBuilder.loadTexts: bdflashDirName.setDescription('Name associated with the flash entry')
bdflashDirSize = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 2, 10, 17, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: bdflashDirSize.setStatus('mandatory')
if mibBuilder.loadTexts: bdflashDirSize.setDescription('Size in Octets of a flash entry')
bdflashDirStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 3320, 2, 10, 17, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("valid", 1), ("deleted", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: bdflashDirStatus.setStatus('mandatory')
if mibBuilder.loadTexts: bdflashDirStatus.setDescription('Indicates the status of the entry')
mibBuilder.exportSymbols("BDCOM-FLASH", bdflashVPP=bdflashVPP, bdflashEraseTime=bdflashEraseTime, bdflashSize=bdflashSize, bdflashDirStatus=bdflashDirStatus, bdlflashFileDirEntry=bdlflashFileDirEntry, bdflashEntries=bdflashEntries, bdflashToNet=bdflashToNet, bdflashEraseStatus=bdflashEraseStatus, bdflashFree=bdflashFree, bdlflash=bdlflash, bdflashCard=bdflashCard, bdflashController=bdflashController, bdnetToFlashStatus=bdnetToFlashStatus, bdnetToFlashTime=bdnetToFlashTime, bdflashDirName=bdflashDirName, bdlflashFileDirTable=bdlflashFileDirTable, bdflashStatus=bdflashStatus, bdflashToNetStatus=bdflashToNetStatus, bdflashDirSize=bdflashDirSize, bdflashErase=bdflashErase, bdflashToNetTime=bdflashToNetTime, bdnetToFlash=bdnetToFlash)
| (octet_string, integer, object_identifier) = mibBuilder.importSymbols('ASN1', 'OctetString', 'Integer', 'ObjectIdentifier')
(named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues')
(value_size_constraint, constraints_intersection, constraints_union, value_range_constraint, single_value_constraint) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'ValueSizeConstraint', 'ConstraintsIntersection', 'ConstraintsUnion', 'ValueRangeConstraint', 'SingleValueConstraint')
(bdlocal,) = mibBuilder.importSymbols('BDCOM-SMI', 'bdlocal')
(module_compliance, notification_group) = mibBuilder.importSymbols('SNMPv2-CONF', 'ModuleCompliance', 'NotificationGroup')
(object_identity, counter32, unsigned32, gauge32, time_ticks, ip_address, module_identity, mib_identifier, counter64, mib_scalar, mib_table, mib_table_row, mib_table_column, iso, notification_type, integer32, bits) = mibBuilder.importSymbols('SNMPv2-SMI', 'ObjectIdentity', 'Counter32', 'Unsigned32', 'Gauge32', 'TimeTicks', 'IpAddress', 'ModuleIdentity', 'MibIdentifier', 'Counter64', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'iso', 'NotificationType', 'Integer32', 'Bits')
(display_string, textual_convention) = mibBuilder.importSymbols('SNMPv2-TC', 'DisplayString', 'TextualConvention')
bdlflash = mib_identifier((1, 3, 6, 1, 4, 1, 3320, 2, 10))
bdflash_size = mib_scalar((1, 3, 6, 1, 4, 1, 3320, 2, 10, 1), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bdflashSize.setStatus('mandatory')
if mibBuilder.loadTexts:
bdflashSize.setDescription('Total Size in Octets of Flash memory')
bdflash_free = mib_scalar((1, 3, 6, 1, 4, 1, 3320, 2, 10, 2), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bdflashFree.setStatus('mandatory')
if mibBuilder.loadTexts:
bdflashFree.setDescription('Unused Size in Octets of Flash memory')
bdflash_controller = mib_scalar((1, 3, 6, 1, 4, 1, 3320, 2, 10, 3), display_string()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bdflashController.setStatus('mandatory')
if mibBuilder.loadTexts:
bdflashController.setDescription('Provides the type of Flash controller (either CCTL or CCTL2) installed in the router.')
bdflash_card = mib_scalar((1, 3, 6, 1, 4, 1, 3320, 2, 10, 4), display_string()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bdflashCard.setStatus('mandatory')
if mibBuilder.loadTexts:
bdflashCard.setDescription('Provides the type of Flash Card installed in the router. For example, the type of Flash Card could be either CSC-MS or CSC-MC+.')
bdflash_vpp = mib_scalar((1, 3, 6, 1, 4, 1, 3320, 2, 10, 5), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2))).clone(namedValues=named_values(('installed', 1), ('missing', 2)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bdflashVPP.setStatus('mandatory')
if mibBuilder.loadTexts:
bdflashVPP.setDescription('State of the VPP DIP jumper on the Flash memory card. Files can be written to the Flash memory card only if the VPP DIP jumper is turned on.')
bdflash_erase = mib_scalar((1, 3, 6, 1, 4, 1, 3320, 2, 10, 6), integer32()).setMaxAccess('writeonly')
if mibBuilder.loadTexts:
bdflashErase.setStatus('mandatory')
if mibBuilder.loadTexts:
bdflashErase.setDescription('Request to erase flash memory')
bdflash_erase_time = mib_scalar((1, 3, 6, 1, 4, 1, 3320, 2, 10, 7), time_ticks()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bdflashEraseTime.setStatus('mandatory')
if mibBuilder.loadTexts:
bdflashEraseTime.setDescription('Indicates the value of sysUptime the last time Flash memory was erased. If the flash had not been erased after powerup it has a value of 0 days 00:00:00.')
bdflash_erase_status = mib_scalar((1, 3, 6, 1, 4, 1, 3320, 2, 10, 8), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=named_values(('flashOpInProgress', 1), ('flashOpSuccess', 2), ('flashOpFailure', 3), ('flashReadOnly', 4), ('flashOpenFailure', 5), ('bufferAllocationFailure', 6), ('noOpAfterPowerOn', 7)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bdflashEraseStatus.setStatus('mandatory')
if mibBuilder.loadTexts:
bdflashEraseStatus.setDescription('Status of current or last flash erasing')
bdflash_to_net = mib_scalar((1, 3, 6, 1, 4, 1, 3320, 2, 10, 9), display_string()).setMaxAccess('writeonly')
if mibBuilder.loadTexts:
bdflashToNet.setStatus('mandatory')
if mibBuilder.loadTexts:
bdflashToNet.setDescription('Write flash entry to tftp server. Value should be the name of the flash entry to send. Instance is the IP address of the tftp host.')
bdflash_to_net_time = mib_scalar((1, 3, 6, 1, 4, 1, 3320, 2, 10, 10), time_ticks()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bdflashToNetTime.setStatus('mandatory')
if mibBuilder.loadTexts:
bdflashToNetTime.setDescription('Indicates the value of sysUpTime the last time a file was transfered from Flash memory on the router to a TFTP host. Returns 0 days 00:00:00 if there had been no transfer since powerup.')
bdflash_to_net_status = mib_scalar((1, 3, 6, 1, 4, 1, 3320, 2, 10, 11), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=named_values(('flashOpInProgress', 1), ('flashOpSuccess', 2), ('flashOpFailure', 3), ('flashReadOnly', 4), ('flashOpenFailure', 5), ('bufferAllocationFailure', 6), ('noOpAfterPowerOn', 7)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bdflashToNetStatus.setStatus('mandatory')
if mibBuilder.loadTexts:
bdflashToNetStatus.setDescription('Status of current or last flash to net transfer')
bdnet_to_flash = mib_scalar((1, 3, 6, 1, 4, 1, 3320, 2, 10, 12), display_string()).setMaxAccess('writeonly')
if mibBuilder.loadTexts:
bdnetToFlash.setStatus('mandatory')
if mibBuilder.loadTexts:
bdnetToFlash.setDescription('Write flash entry from tftp server. Value should be the name of the flash entry to write. Instance is the IP address of the tftp host.')
bdnet_to_flash_time = mib_scalar((1, 3, 6, 1, 4, 1, 3320, 2, 10, 13), time_ticks()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bdnetToFlashTime.setStatus('mandatory')
if mibBuilder.loadTexts:
bdnetToFlashTime.setDescription('Indicates the value of sysUpTime the last time file was copied from a Trivial File Transfer Protocol(TFTP) server to the Flash memory on the router. Returns 0 days 00:00:00 if there had been no transfers since powerup.')
bdnet_to_flash_status = mib_scalar((1, 3, 6, 1, 4, 1, 3320, 2, 10, 14), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=named_values(('flashOpInProgress', 1), ('flashOpSuccess', 2), ('flashOpFailure', 3), ('flashReadOnly', 4), ('flashOpenFailure', 5), ('bufferAllocationFailure', 6), ('noOpAfterPowerOn', 7)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bdnetToFlashStatus.setStatus('mandatory')
if mibBuilder.loadTexts:
bdnetToFlashStatus.setDescription('Status of current or last net to flash transfer')
bdflash_status = mib_scalar((1, 3, 6, 1, 4, 1, 3320, 2, 10, 15), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2))).clone(namedValues=named_values(('busy', 1), ('available', 2)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bdflashStatus.setStatus('mandatory')
if mibBuilder.loadTexts:
bdflashStatus.setDescription('Status of the availability of flash')
bdflash_entries = mib_scalar((1, 3, 6, 1, 4, 1, 3320, 2, 10, 16), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bdflashEntries.setStatus('mandatory')
if mibBuilder.loadTexts:
bdflashEntries.setDescription('Number of entries in the flash directory')
bdlflash_file_dir_table = mib_table((1, 3, 6, 1, 4, 1, 3320, 2, 10, 17))
if mibBuilder.loadTexts:
bdlflashFileDirTable.setStatus('mandatory')
if mibBuilder.loadTexts:
bdlflashFileDirTable.setDescription(' A list of flash file entries.')
bdlflash_file_dir_entry = mib_table_row((1, 3, 6, 1, 4, 1, 3320, 2, 10, 17, 1)).setIndexNames((0, 'BDCOM-FLASH', 'flashEntries'))
if mibBuilder.loadTexts:
bdlflashFileDirEntry.setStatus('mandatory')
if mibBuilder.loadTexts:
bdlflashFileDirEntry.setDescription('A collection of flash eprom objects')
bdflash_dir_name = mib_table_column((1, 3, 6, 1, 4, 1, 3320, 2, 10, 17, 1, 1), display_string()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bdflashDirName.setStatus('mandatory')
if mibBuilder.loadTexts:
bdflashDirName.setDescription('Name associated with the flash entry')
bdflash_dir_size = mib_table_column((1, 3, 6, 1, 4, 1, 3320, 2, 10, 17, 1, 2), integer32()).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bdflashDirSize.setStatus('mandatory')
if mibBuilder.loadTexts:
bdflashDirSize.setDescription('Size in Octets of a flash entry')
bdflash_dir_status = mib_table_column((1, 3, 6, 1, 4, 1, 3320, 2, 10, 17, 1, 3), integer32().subtype(subtypeSpec=constraints_union(single_value_constraint(1, 2))).clone(namedValues=named_values(('valid', 1), ('deleted', 2)))).setMaxAccess('readonly')
if mibBuilder.loadTexts:
bdflashDirStatus.setStatus('mandatory')
if mibBuilder.loadTexts:
bdflashDirStatus.setDescription('Indicates the status of the entry')
mibBuilder.exportSymbols('BDCOM-FLASH', bdflashVPP=bdflashVPP, bdflashEraseTime=bdflashEraseTime, bdflashSize=bdflashSize, bdflashDirStatus=bdflashDirStatus, bdlflashFileDirEntry=bdlflashFileDirEntry, bdflashEntries=bdflashEntries, bdflashToNet=bdflashToNet, bdflashEraseStatus=bdflashEraseStatus, bdflashFree=bdflashFree, bdlflash=bdlflash, bdflashCard=bdflashCard, bdflashController=bdflashController, bdnetToFlashStatus=bdnetToFlashStatus, bdnetToFlashTime=bdnetToFlashTime, bdflashDirName=bdflashDirName, bdlflashFileDirTable=bdlflashFileDirTable, bdflashStatus=bdflashStatus, bdflashToNetStatus=bdflashToNetStatus, bdflashDirSize=bdflashDirSize, bdflashErase=bdflashErase, bdflashToNetTime=bdflashToNetTime, bdnetToFlash=bdnetToFlash) |
# https://www.codewars.com/kata/human-readable-duration-format
def format_duration(seconds):
if not seconds:
return "now"
units = [
("year", 365 * 24 * 60 * 60),
("day", 24 * 60 * 60),
("hour", 60 * 60),
("minute", 60),
("second", 1)
]
parts = []
for unit, divisor in units:
quantity, seconds = divmod(seconds, divisor)
if quantity:
parts.append("{} {}{}".format(quantity, unit, "s" if quantity > 1 else ""))
return parts[0] if len(parts) == 1 else ", ".join(parts[:-1]) + " and " + parts[-1] | def format_duration(seconds):
if not seconds:
return 'now'
units = [('year', 365 * 24 * 60 * 60), ('day', 24 * 60 * 60), ('hour', 60 * 60), ('minute', 60), ('second', 1)]
parts = []
for (unit, divisor) in units:
(quantity, seconds) = divmod(seconds, divisor)
if quantity:
parts.append('{} {}{}'.format(quantity, unit, 's' if quantity > 1 else ''))
return parts[0] if len(parts) == 1 else ', '.join(parts[:-1]) + ' and ' + parts[-1] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.