content
stringlengths
7
1.05M
fixed_cases
stringlengths
1
1.28M
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # x = int(input()) y = int(input()) day = 1 while y - x > 0: x = x + (x * 0.1) day += 1 print(day)
x = int(input()) y = int(input()) day = 1 while y - x > 0: x = x + x * 0.1 day += 1 print(day)
_windows_build_file_content = """ alias( name = "doxygen", visibility = ["//visibility:public"], actual = "{DOXYGEN_VERSION}/doxygen.exe", ) """ _known_archives = { "1.8.19": { "windows64": struct( urls = ["https://doxygen.nl/files/doxygen-1.8.19.windows.x64.bin.zip"], strip_prefix = "", sha256 = "36beda917f395b4160777f3689e86d6b7e4e51f6f9432413db8fe3b9279e6082", build_file_content = _windows_build_file_content, ), "windows32": struct( urls = ["https://doxygen.nl/files/doxygen-1.8.19.windows.bin.zip"], strip_prefix = "", sha256 = "", build_file_content = _windows_build_file_content, ), }, } def _os_key(os): if os.name.find("windows") != -1: return "windows32" elif os.name.find("linux") != -1: return "linux64" return os.name def _get_doxygen_archive(rctx): doxygen_version = rctx.attr.doxygen_version archives = _known_archives.get(doxygen_version) if not archives: fail("rules_doxygen unsupported doxygen_version: {}".format(doxygen_version)) archive = archives.get(_os_key(rctx.os)) if not archive: fail("rules_doxygen unknown doxygen version / operating system combo: doxygen_version={} os=".format(doxygen_version, rctx.os.name)) return archive def _doxygen_repository(rctx): archive = _get_doxygen_archive(rctx) rctx.download_and_extract(archive.urls, output = rctx.attr.doxygen_version, stripPrefix = archive.strip_prefix, sha256 = archive.sha256) rctx.file("BUILD.bazel", archive.build_file_content.format(DOXYGEN_VERSION=rctx.attr.doxygen_version), executable = False) doxygen_repository = repository_rule( implementation = _doxygen_repository, attrs = { "doxygen_version": attr.string( default = "1.8.19", values = _known_archives.keys(), ), }, )
_windows_build_file_content = '\nalias(\n name = "doxygen",\n visibility = ["//visibility:public"],\n actual = "{DOXYGEN_VERSION}/doxygen.exe",\n)\n' _known_archives = {'1.8.19': {'windows64': struct(urls=['https://doxygen.nl/files/doxygen-1.8.19.windows.x64.bin.zip'], strip_prefix='', sha256='36beda917f395b4160777f3689e86d6b7e4e51f6f9432413db8fe3b9279e6082', build_file_content=_windows_build_file_content), 'windows32': struct(urls=['https://doxygen.nl/files/doxygen-1.8.19.windows.bin.zip'], strip_prefix='', sha256='', build_file_content=_windows_build_file_content)}} def _os_key(os): if os.name.find('windows') != -1: return 'windows32' elif os.name.find('linux') != -1: return 'linux64' return os.name def _get_doxygen_archive(rctx): doxygen_version = rctx.attr.doxygen_version archives = _known_archives.get(doxygen_version) if not archives: fail('rules_doxygen unsupported doxygen_version: {}'.format(doxygen_version)) archive = archives.get(_os_key(rctx.os)) if not archive: fail('rules_doxygen unknown doxygen version / operating system combo: doxygen_version={} os='.format(doxygen_version, rctx.os.name)) return archive def _doxygen_repository(rctx): archive = _get_doxygen_archive(rctx) rctx.download_and_extract(archive.urls, output=rctx.attr.doxygen_version, stripPrefix=archive.strip_prefix, sha256=archive.sha256) rctx.file('BUILD.bazel', archive.build_file_content.format(DOXYGEN_VERSION=rctx.attr.doxygen_version), executable=False) doxygen_repository = repository_rule(implementation=_doxygen_repository, attrs={'doxygen_version': attr.string(default='1.8.19', values=_known_archives.keys())})
TELEGRAM_API_TOKEN = 'Tel Bot Token By @BotFather' UPLOADER = { 'uploader': 'reverse_image_search_bot.uploaders.ssh.SSHUploader', 'url': 'Host Domain Name', 'configuration': { 'host': 'Host IP (PUBLIC)', 'user': 'Yourname', 'password': 'Password', 'upload_dir': '/path/to/ReVot/', 'key_filename': '/path/to/.ssh/rsakey.pub (Public key)', } }
telegram_api_token = 'Tel Bot Token By @BotFather' uploader = {'uploader': 'reverse_image_search_bot.uploaders.ssh.SSHUploader', 'url': 'Host Domain Name', 'configuration': {'host': 'Host IP (PUBLIC)', 'user': 'Yourname', 'password': 'Password', 'upload_dir': '/path/to/ReVot/', 'key_filename': '/path/to/.ssh/rsakey.pub (Public key)'}}
# Time: O(n * m^2), n is the number of rows with 1s, m is the number of cols with 1s # Space: O(n * m) class Solution(object): def countCornerRectangles(self, grid): """ :type grid: List[List[int]] :rtype: int """ rows = [[c for c, val in enumerate(row) if val] for row in grid] result = 0 for i in range(len(rows)): lookup = set(rows[i]) for j in range(i): count = sum(1 for c in rows[j] if c in lookup) result += count*(count-1)/2 return result
class Solution(object): def count_corner_rectangles(self, grid): """ :type grid: List[List[int]] :rtype: int """ rows = [[c for (c, val) in enumerate(row) if val] for row in grid] result = 0 for i in range(len(rows)): lookup = set(rows[i]) for j in range(i): count = sum((1 for c in rows[j] if c in lookup)) result += count * (count - 1) / 2 return result
# https://www.geeksforgeeks.org/basic/cryptography/ # SGVP387900|14:43 27F19 # Hill Cipher implementation keyMatrix = [[0] * 3 for i in range(3)] # Genrate vector for the message messageVector = [[0] for i in range(3)] # Genrate vector for the cipher cipherMatrix = [[0] for i in range(3)] # Following function generates the # key matrix for the key string def getKeyMatrix(key): k = 0 for i in range(3): for j in range(3): keyMatrix[i][j] = ord(key[k]) % 65 k += 1 # Function encrypts the message def encrypt(messageVector): for i in range(3): for j in range(1): cipherMatrix[i][j] = 0 for x in range(3): cipherMatrix[i][j] += (keyMatrix[i][x] * messageVector[x][j]) cipherMatrix[i][j] = cipherMatrix[i][j] % 26 # https://www.geeksforgeeks.org/hill-cipher/ # HillCipher def HillCipher(message, key): # Get key matrix from the key string getKeyMatrix(key) # Generate vector for the message for i in range(3): messageVector[i][0] = ord(message[i]) % 65 # Function genrates # the encrypted vector encrypt(messageVector) # Generate the encrypted text # from the encrypted vector CipherText = [] for i in range(3): CipherText.append(chr(cipherMatrix[i][0] + 65)) # Print the cipherText print("Ciphertext: ", "".join(CipherText)) # Driver Code def main(): # Get the message to be encrypted message = "ACT" # Get the key key = "GYBNQKURP" HillCipher(message, key) if __name__ == "__main__": main()
key_matrix = [[0] * 3 for i in range(3)] message_vector = [[0] for i in range(3)] cipher_matrix = [[0] for i in range(3)] def get_key_matrix(key): k = 0 for i in range(3): for j in range(3): keyMatrix[i][j] = ord(key[k]) % 65 k += 1 def encrypt(messageVector): for i in range(3): for j in range(1): cipherMatrix[i][j] = 0 for x in range(3): cipherMatrix[i][j] += keyMatrix[i][x] * messageVector[x][j] cipherMatrix[i][j] = cipherMatrix[i][j] % 26 def hill_cipher(message, key): get_key_matrix(key) for i in range(3): messageVector[i][0] = ord(message[i]) % 65 encrypt(messageVector) cipher_text = [] for i in range(3): CipherText.append(chr(cipherMatrix[i][0] + 65)) print('Ciphertext: ', ''.join(CipherText)) def main(): message = 'ACT' key = 'GYBNQKURP' hill_cipher(message, key) if __name__ == '__main__': main()
def weakNumbers(n): all_factors = [count_factors(num) for num in range(1, n + 1)] weaknesses = [] for num, num_factors in enumerate(all_factors, 1): weakness = 0 for factor in all_factors[:num]: if factor > num_factors: weakness += 1 weaknesses.append(weakness) weakest = max(weaknesses) return [weakest, weaknesses.count(weakest)] def count_factors(n): factors = 0 for i in range(1, n + 1): if n % i == 0: factors += 1 return factors print(weakNumbers(500))
def weak_numbers(n): all_factors = [count_factors(num) for num in range(1, n + 1)] weaknesses = [] for (num, num_factors) in enumerate(all_factors, 1): weakness = 0 for factor in all_factors[:num]: if factor > num_factors: weakness += 1 weaknesses.append(weakness) weakest = max(weaknesses) return [weakest, weaknesses.count(weakest)] def count_factors(n): factors = 0 for i in range(1, n + 1): if n % i == 0: factors += 1 return factors print(weak_numbers(500))
#!/usr/bin/env python # # Copyright (C) 2014 The Android Open Source Project # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. arm = """ Build fingerprint: 'Android/aosp_arm/generic_arm:4.4.3.43.43.43/AOSP/enh06302258:eng/test-keys' Revision: '0' ABI: 'arm' signal 6 (SIGABRT), code -6 (SI_TKILL), fault addr -------- r0 00000000 r1 00002dd9 r2 00000006 r3 00000000 r4 f710edd8 r5 00000006 r6 00000000 r7 0000010c r8 f71b9df4 r9 ab0b5028 sl f7175695 fp f710edd0 ip 00002dd9 sp f710ed18 lr f7175ef1 pc f719a4e0 cpsr 60070010 d0 ffffffffffffffff d1 0000000000000031 d2 0000000000000037 d3 0000000000000033 d4 0000000000000000 d5 0000000000000000 d6 0000000000000000 d7 0000000000000000 d8 0000000000000000 d9 0000000000000000 d10 0000000000000000 d11 0000000000000000 d12 0000000000000000 d13 0000000000000000 d14 0000000000000000 d15 0000000000000000 d16 0000000000000000 d17 0000000000000fff d18 0000000000000000 d19 0000000000000000 d20 0000000000000000 d21 0000000000000000 d22 0000000000000000 d23 0000000000000000 d24 0000000000000000 d25 0000000000000000 d26 0000000000000000 d27 0000000000000000 d28 0000000000000000 d29 0000000000000000 d30 0000000000000000 d31 0000000000000000 scr 00000000 backtrace: #00 pc 000374e0 /system/lib/libc.so (tgkill+12) #01 pc 00012eed /system/lib/libc.so (pthread_kill+52) #02 pc 00013997 /system/lib/libc.so (raise+10) #03 pc 0001047d /system/lib/libc.so (__libc_android_abort+36) #04 pc 0000eb1c /system/lib/libc.so (abort+4) #05 pc 00000c6f /system/xbin/crasher #06 pc 000126b3 /system/lib/libc.so (__pthread_start(void*)+30) #07 pc 000107fb /system/lib/libc.so (__start_thread+6) """ arm64 = """ Build fingerprint: 'Android/aosp_arm64/generic_arm64:4.4.3.43.43.43/AOSP/enh06302258:eng/test-keys' Revision: '0' ABI: 'arm64' signal 6 (SIGABRT), code -6 (SI_TKILL), fault addr -------- x0 0000000000000000 x1 0000000000002df1 x2 0000000000000006 x3 000000559dc73040 x4 ffffffffffffffff x5 0000000000000005 x6 0000000000000001 x7 0000000000000020 x8 0000000000000083 x9 0000005563d21000 x10 0101010101010101 x11 0000000000000001 x12 0000000000000001 x13 0000005563d21000 x14 0000005563d21000 x15 0000000000000000 x16 0000005563d32f20 x17 0000000000000001 x18 0000000000000000 x19 000000559dc73040 x20 0000007f844dcbb0 x21 0000007f84639000 x22 0000000000000000 x23 0000000000000006 x24 0000007f845b2000 x25 0000007ff8f33bc0 x26 0000007f843df000 x27 000000559dc730c0 x28 0000007f84639788 x29 0000007f844dc9c0 x30 0000007f845b38c4 sp 0000007f844dc9c0 pc 0000007f845f28e0 v0 2f2f2f2f2f2f2f2f v1 5f6474656e62696c v2 000000000000006f v3 0000000000000000 v4 8020080280200800 v5 0000000000000000 v6 0000000000000000 v7 8020080280200802 v8 0000000000000000 v9 0000000000000000 v10 0000000000000000 v11 0000000000000000 v12 0000000000000000 v13 0000000000000000 v14 0000000000000000 v15 0000000000000000 v16 4010040140100401 v17 0000aaa800000000 v18 8020080280200800 v19 0000000000000000 v20 0000000000000000 v21 0000000000000000 v22 0000000000000000 v23 0000000000000000 v24 0000000000000000 v25 0000000000000000 v26 0000000000000000 v27 0000000000000000 v28 0000000000000000 v29 0000000000000000 v30 0000000000000000 v31 0000000000000000 backtrace: #00 pc 00000000000588e0 /system/lib64/libc.so (tgkill+8) #01 pc 00000000000198c0 /system/lib64/libc.so (pthread_kill+160) #02 pc 000000000001ab34 /system/lib64/libc.so (raise+28) #03 pc 00000000000148bc /system/lib64/libc.so (abort+60) #04 pc 00000000000016e0 /system/xbin/crasher64 #05 pc 00000000000017f0 /system/xbin/crasher64 #06 pc 0000000000018958 /system/lib64/libc.so (__pthread_start(void*)+52) #07 pc 0000000000014e90 /system/lib64/libc.so (__start_thread+16) """ mips = """ Build fingerprint: 'Android/aosp_mips/generic_mips:4.4.3.43.43.43/AOSP/enh06302258:eng/test-keys' Revision: '0' ABI: 'mips' pid: 958, tid: 960, name: crasher >>> crasher <<< signal 6 (SIGABRT), code -6 (SI_TKILL), fault addr -------- zr 00000000 at 802babc0 v0 00000000 v1 77b99dd0 a0 000003be a1 000003c0 a2 00000006 a3 00000000 t0 00000000 t1 9e7f5440 t2 00000020 t3 ffffff18 t4 77a9c000 t5 00000001 t6 00000000 t7 00000000 s0 000003c0 s1 77b99dd8 s2 00000000 s3 00000006 s4 77db2028 s5 000003be s6 77c39fa8 s7 77b99dd0 t8 00000000 t9 77c89e80 k0 00000000 k1 00000000 gp 77cce350 sp 77b99c78 s8 77db2020 ra 77c3b48c hi 00000000 lo 00000008 bva 7fff7008 epc 77c89e94 backtrace: #00 pc 00067e94 /system/lib/libc.so (tgkill+20) #01 pc 0001948c /system/lib/libc.so (pthread_kill+244) #02 pc 0001b0e8 /system/lib/libc.so (raise+60) #03 pc 00012908 /system/lib/libc.so (abort+104) #04 pc 000012a4 /system/xbin/crasher #05 pc 00018008 /system/lib/libc.so (__pthread_start(void*)+96) #06 pc 00013198 /system/lib/libc.so (__start_thread+36) """ mips64 = """ Build fingerprint: 'Android/aosp_mips64/generic_mips64:5.1.51/AOSP/agampe05040015:userdebug/test-keys' Revision: '1' ABI: 'mips64' pid: 342, tid: 342, name: crasher64 >>> crasher64 <<< signal 6 (SIGABRT), code -6 (SI_TKILL), fault addr -------- zr 0000000000000000 at 0000000000000001 v0 0000000000000000 v1 000000ffec1c6528 a0 0000000000000156 a1 0000000000000156 a2 0000000000000006 a3 0000000000000000 a4 000000000000ffff a5 fffffffffffffffc a6 0000000000000000 a7 0000000000000001 t0 0000000000000001 t1 0000000000000000 t2 0000000000000001 t3 0000000000000001 s0 0000000000000002 s1 000000ffec1c6538 s2 000000ffec1c6478 s3 0000000000000006 s4 0000000000100000 s5 000000fff1d44f98 s6 000000fff186c488 s7 0000000000000000 t8 ffffffffffff0000 t9 000000ffec01c2a0 k0 0000000000000000 k1 0000000000000000 gp 000000ffec0a6680 sp 000000ffff8c7150 s8 0000000000100206 ra 000000ffec016684 hi 0000000000000000 lo 0000000000000000 bva 000000ffffffe010 epc 000000ffec01c2a8 backtrace: #00 pc 00000000000832a8 /system/lib64/libc.so (tgkill+8) #01 pc 000000000007d684 /system/lib64/libc.so (pthread_kill+116) #02 pc 000000000002dd78 /system/lib64/libc.so (raise+56) #03 pc 000000000002684c /system/lib64/libc.so (abort+92) #04 pc 000000000000199c /system/xbin/crasher64 #05 pc 000000000002595c /system/lib64/libc.so (__libc_init+140) #06 pc 0000000000000fd4 /system/xbin/crasher64 #07 pc 0000000000000f80 /system/xbin/crasher64 """ x86 = """ Build fingerprint: 'Android/aosp_x86_64/generic_x86_64:4.4.3.43.43.43/AOSP/enh06302258:eng/test-keys' Revision: '0' ABI: 'x86' pid: 1566, tid: 1568, name: crasher >>> crasher <<< signal 6 (SIGABRT), code -6 (SI_TKILL), fault addr -------- eax 00000000 ebx 0000061e ecx 00000620 edx 00000006 esi f7679dd8 edi 00000000 xcs 00000023 xds 0000002b xes 0000002b xfs 00000003 xss 0000002b eip f7758ea6 ebp 00000620 esp f7679c60 flags 00000282 backtrace: #00 pc 00076ea6 /system/lib/libc.so (tgkill+22) #01 pc 0001dc8b /system/lib/libc.so (pthread_kill+155) #02 pc 0001f294 /system/lib/libc.so (raise+36) #03 pc 00017a04 /system/lib/libc.so (abort+84) #04 pc 00001099 /system/xbin/crasher #05 pc 0001cd58 /system/lib/libc.so (__pthread_start(void*)+56) #06 pc 00018169 /system/lib/libc.so (__start_thread+25) #07 pc 0000ed76 /system/lib/libc.so (__bionic_clone+70) """ x86_64 = """ Build fingerprint: 'Android/aosp_x86_64/generic_x86_64:4.4.3.43.43.43/AOSP/enh06302258:eng/test-keys' Revision: '0' ABI: 'x86_64' pid: 1608, tid: 1610, name: crasher64 >>> crasher64 <<< signal 6 (SIGABRT), code -6 (SI_TKILL), fault addr -------- rax 0000000000000000 rbx 000000000000064a rcx ffffffffffffffff rdx 0000000000000006 rsi 000000000000064a rdi 0000000000000648 r8 0000000000000001 r9 00007fe218110c98 r10 0000000000000008 r11 0000000000000206 r12 0000000000000000 r13 0000000000000006 r14 00007fe218111ba0 r15 0000000000000648 cs 0000000000000033 ss 000000000000002b rip 00007fe218201807 rbp 00007fe218111bb0 rsp 00007fe218111a18 eflags 0000000000000206 backtrace: #00 pc 0000000000077807 /system/lib64/libc.so (tgkill+7) #01 pc 000000000002243f /system/lib64/libc.so (pthread_kill+143) #02 pc 0000000000023551 /system/lib64/libc.so (raise+17) #03 pc 000000000001ce6d /system/lib64/libc.so (abort+61) #04 pc 0000000000001385 /system/xbin/crasher64 #05 pc 00000000000014a8 /system/xbin/crasher64 #06 pc 00000000000215ae /system/lib64/libc.so (__pthread_start(void*)+46) #07 pc 000000000001d3eb /system/lib64/libc.so (__start_thread+11) #08 pc 00000000000138f5 /system/lib64/libc.so (__bionic_clone+53) """ libmemunreachable = """ Unreachable memory 48 bytes in 2 unreachable allocations ABI: 'arm' 24 bytes unreachable at a11e6748 and 24 similar unreachable bytes in 1 allocation contents: a11e6748: 63 6f 6d 2e 61 6e 64 72 6f 69 64 2e 73 79 73 74 com.android.syst a11e6758: 65 6d 75 69 00 00 00 00 emui.... #00 pc 000076ae /system/lib/libcutils.so (set_process_name+45) #01 pc 000989d6 /system/lib/libandroid_runtime.so (android_os_Process_setArgV0(_JNIEnv*, _jobject*, _jstring*)+125) """ # This is a long crash in ASAN format, which does not pad frame numbers. This should be used # in a test to ensure that the stack is not split into two (see stack_core's test_long_asan_crash). long_asan_crash = """ Build fingerprint: 'Android/aosp_arm/generic_arm:4.4.3.43.43.43/AOSP/enh06302258:eng/test-keys' ABI: 'arm' #0 0x727d4dfdaf (/system/lib/libclang_rt.asan-arm-android.so+0x31daf) #1 0x727d4e00af (/system/lib/libclang_rt.asan-arm-android.so+0x320af) #2 0x72778db0cf (/data/lib/libc.so+0x740cf) #3 0x725688a66f (/does/not/matter/a.so+0x1066f) #4 0x72568a02af (/does/not/matter/a.so+0x262af) #5 0x725689e313 (/does/not/matter/a.so+0x24313) #6 0x72568a95eb (/does/not/matter/a.so+0x2f5eb) #7 0x725688de6f (/does/not/matter/a.so+0x13e6f) #8 0x72778ceeff (/does/not/matter/a.so+0x67eff) #9 0x7277884983 (/does/not/matter/a.so+0x1d983) #10 0x7277884983 (/does/not/matter/a.so+0x1d983) """
arm = "\nBuild fingerprint: 'Android/aosp_arm/generic_arm:4.4.3.43.43.43/AOSP/enh06302258:eng/test-keys'\nRevision: '0'\nABI: 'arm'\nsignal 6 (SIGABRT), code -6 (SI_TKILL), fault addr --------\n r0 00000000 r1 00002dd9 r2 00000006 r3 00000000\n r4 f710edd8 r5 00000006 r6 00000000 r7 0000010c\n r8 f71b9df4 r9 ab0b5028 sl f7175695 fp f710edd0\n ip 00002dd9 sp f710ed18 lr f7175ef1 pc f719a4e0 cpsr 60070010\n d0 ffffffffffffffff d1 0000000000000031\n d2 0000000000000037 d3 0000000000000033\n d4 0000000000000000 d5 0000000000000000\n d6 0000000000000000 d7 0000000000000000\n d8 0000000000000000 d9 0000000000000000\n d10 0000000000000000 d11 0000000000000000\n d12 0000000000000000 d13 0000000000000000\n d14 0000000000000000 d15 0000000000000000\n d16 0000000000000000 d17 0000000000000fff\n d18 0000000000000000 d19 0000000000000000\n d20 0000000000000000 d21 0000000000000000\n d22 0000000000000000 d23 0000000000000000\n d24 0000000000000000 d25 0000000000000000\n d26 0000000000000000 d27 0000000000000000\n d28 0000000000000000 d29 0000000000000000\n d30 0000000000000000 d31 0000000000000000\n scr 00000000\n\nbacktrace:\n #00 pc 000374e0 /system/lib/libc.so (tgkill+12)\n #01 pc 00012eed /system/lib/libc.so (pthread_kill+52)\n #02 pc 00013997 /system/lib/libc.so (raise+10)\n #03 pc 0001047d /system/lib/libc.so (__libc_android_abort+36)\n #04 pc 0000eb1c /system/lib/libc.so (abort+4)\n #05 pc 00000c6f /system/xbin/crasher\n #06 pc 000126b3 /system/lib/libc.so (__pthread_start(void*)+30)\n #07 pc 000107fb /system/lib/libc.so (__start_thread+6)\n" arm64 = "\nBuild fingerprint: 'Android/aosp_arm64/generic_arm64:4.4.3.43.43.43/AOSP/enh06302258:eng/test-keys'\nRevision: '0'\nABI: 'arm64'\nsignal 6 (SIGABRT), code -6 (SI_TKILL), fault addr --------\n x0 0000000000000000 x1 0000000000002df1 x2 0000000000000006 x3 000000559dc73040\n x4 ffffffffffffffff x5 0000000000000005 x6 0000000000000001 x7 0000000000000020\n x8 0000000000000083 x9 0000005563d21000 x10 0101010101010101 x11 0000000000000001\n x12 0000000000000001 x13 0000005563d21000 x14 0000005563d21000 x15 0000000000000000\n x16 0000005563d32f20 x17 0000000000000001 x18 0000000000000000 x19 000000559dc73040\n x20 0000007f844dcbb0 x21 0000007f84639000 x22 0000000000000000 x23 0000000000000006\n x24 0000007f845b2000 x25 0000007ff8f33bc0 x26 0000007f843df000 x27 000000559dc730c0\n x28 0000007f84639788 x29 0000007f844dc9c0 x30 0000007f845b38c4\n sp 0000007f844dc9c0 pc 0000007f845f28e0\n v0 2f2f2f2f2f2f2f2f v1 5f6474656e62696c v2 000000000000006f v3 0000000000000000\n v4 8020080280200800 v5 0000000000000000 v6 0000000000000000 v7 8020080280200802\n v8 0000000000000000 v9 0000000000000000 v10 0000000000000000 v11 0000000000000000\n v12 0000000000000000 v13 0000000000000000 v14 0000000000000000 v15 0000000000000000\n v16 4010040140100401 v17 0000aaa800000000 v18 8020080280200800 v19 0000000000000000\n v20 0000000000000000 v21 0000000000000000 v22 0000000000000000 v23 0000000000000000\n v24 0000000000000000 v25 0000000000000000 v26 0000000000000000 v27 0000000000000000\n v28 0000000000000000 v29 0000000000000000 v30 0000000000000000 v31 0000000000000000\n\nbacktrace:\n #00 pc 00000000000588e0 /system/lib64/libc.so (tgkill+8)\n #01 pc 00000000000198c0 /system/lib64/libc.so (pthread_kill+160)\n #02 pc 000000000001ab34 /system/lib64/libc.so (raise+28)\n #03 pc 00000000000148bc /system/lib64/libc.so (abort+60)\n #04 pc 00000000000016e0 /system/xbin/crasher64\n #05 pc 00000000000017f0 /system/xbin/crasher64\n #06 pc 0000000000018958 /system/lib64/libc.so (__pthread_start(void*)+52)\n #07 pc 0000000000014e90 /system/lib64/libc.so (__start_thread+16)\n" mips = "\nBuild fingerprint: 'Android/aosp_mips/generic_mips:4.4.3.43.43.43/AOSP/enh06302258:eng/test-keys'\nRevision: '0'\nABI: 'mips'\npid: 958, tid: 960, name: crasher >>> crasher <<<\nsignal 6 (SIGABRT), code -6 (SI_TKILL), fault addr --------\n zr 00000000 at 802babc0 v0 00000000 v1 77b99dd0\n a0 000003be a1 000003c0 a2 00000006 a3 00000000\n t0 00000000 t1 9e7f5440 t2 00000020 t3 ffffff18\n t4 77a9c000 t5 00000001 t6 00000000 t7 00000000\n s0 000003c0 s1 77b99dd8 s2 00000000 s3 00000006\n s4 77db2028 s5 000003be s6 77c39fa8 s7 77b99dd0\n t8 00000000 t9 77c89e80 k0 00000000 k1 00000000\n gp 77cce350 sp 77b99c78 s8 77db2020 ra 77c3b48c\n hi 00000000 lo 00000008 bva 7fff7008 epc 77c89e94\n\nbacktrace:\n #00 pc 00067e94 /system/lib/libc.so (tgkill+20)\n #01 pc 0001948c /system/lib/libc.so (pthread_kill+244)\n #02 pc 0001b0e8 /system/lib/libc.so (raise+60)\n #03 pc 00012908 /system/lib/libc.so (abort+104)\n #04 pc 000012a4 /system/xbin/crasher\n #05 pc 00018008 /system/lib/libc.so (__pthread_start(void*)+96)\n #06 pc 00013198 /system/lib/libc.so (__start_thread+36)\n" mips64 = "\nBuild fingerprint: 'Android/aosp_mips64/generic_mips64:5.1.51/AOSP/agampe05040015:userdebug/test-keys'\nRevision: '1'\nABI: 'mips64'\npid: 342, tid: 342, name: crasher64 >>> crasher64 <<<\nsignal 6 (SIGABRT), code -6 (SI_TKILL), fault addr --------\n zr 0000000000000000 at 0000000000000001 v0 0000000000000000 v1 000000ffec1c6528\n a0 0000000000000156 a1 0000000000000156 a2 0000000000000006 a3 0000000000000000\n a4 000000000000ffff a5 fffffffffffffffc a6 0000000000000000 a7 0000000000000001\n t0 0000000000000001 t1 0000000000000000 t2 0000000000000001 t3 0000000000000001\n s0 0000000000000002 s1 000000ffec1c6538 s2 000000ffec1c6478 s3 0000000000000006\n s4 0000000000100000 s5 000000fff1d44f98 s6 000000fff186c488 s7 0000000000000000\n t8 ffffffffffff0000 t9 000000ffec01c2a0 k0 0000000000000000 k1 0000000000000000\n gp 000000ffec0a6680 sp 000000ffff8c7150 s8 0000000000100206 ra 000000ffec016684\n hi 0000000000000000 lo 0000000000000000 bva 000000ffffffe010 epc 000000ffec01c2a8\n\nbacktrace:\n #00 pc 00000000000832a8 /system/lib64/libc.so (tgkill+8)\n #01 pc 000000000007d684 /system/lib64/libc.so (pthread_kill+116)\n #02 pc 000000000002dd78 /system/lib64/libc.so (raise+56)\n #03 pc 000000000002684c /system/lib64/libc.so (abort+92)\n #04 pc 000000000000199c /system/xbin/crasher64\n #05 pc 000000000002595c /system/lib64/libc.so (__libc_init+140)\n #06 pc 0000000000000fd4 /system/xbin/crasher64\n #07 pc 0000000000000f80 /system/xbin/crasher64\n" x86 = "\nBuild fingerprint: 'Android/aosp_x86_64/generic_x86_64:4.4.3.43.43.43/AOSP/enh06302258:eng/test-keys'\nRevision: '0'\nABI: 'x86'\npid: 1566, tid: 1568, name: crasher >>> crasher <<<\nsignal 6 (SIGABRT), code -6 (SI_TKILL), fault addr --------\n eax 00000000 ebx 0000061e ecx 00000620 edx 00000006\n esi f7679dd8 edi 00000000\n xcs 00000023 xds 0000002b xes 0000002b xfs 00000003 xss 0000002b\n eip f7758ea6 ebp 00000620 esp f7679c60 flags 00000282\n\nbacktrace:\n #00 pc 00076ea6 /system/lib/libc.so (tgkill+22)\n #01 pc 0001dc8b /system/lib/libc.so (pthread_kill+155)\n #02 pc 0001f294 /system/lib/libc.so (raise+36)\n #03 pc 00017a04 /system/lib/libc.so (abort+84)\n #04 pc 00001099 /system/xbin/crasher\n #05 pc 0001cd58 /system/lib/libc.so (__pthread_start(void*)+56)\n #06 pc 00018169 /system/lib/libc.so (__start_thread+25)\n #07 pc 0000ed76 /system/lib/libc.so (__bionic_clone+70)\n" x86_64 = "\nBuild fingerprint: 'Android/aosp_x86_64/generic_x86_64:4.4.3.43.43.43/AOSP/enh06302258:eng/test-keys'\nRevision: '0'\nABI: 'x86_64'\npid: 1608, tid: 1610, name: crasher64 >>> crasher64 <<<\nsignal 6 (SIGABRT), code -6 (SI_TKILL), fault addr --------\n rax 0000000000000000 rbx 000000000000064a rcx ffffffffffffffff rdx 0000000000000006\n rsi 000000000000064a rdi 0000000000000648\n r8 0000000000000001 r9 00007fe218110c98 r10 0000000000000008 r11 0000000000000206\n r12 0000000000000000 r13 0000000000000006 r14 00007fe218111ba0 r15 0000000000000648\n cs 0000000000000033 ss 000000000000002b\n rip 00007fe218201807 rbp 00007fe218111bb0 rsp 00007fe218111a18 eflags 0000000000000206\n\nbacktrace:\n #00 pc 0000000000077807 /system/lib64/libc.so (tgkill+7)\n #01 pc 000000000002243f /system/lib64/libc.so (pthread_kill+143)\n #02 pc 0000000000023551 /system/lib64/libc.so (raise+17)\n #03 pc 000000000001ce6d /system/lib64/libc.so (abort+61)\n #04 pc 0000000000001385 /system/xbin/crasher64\n #05 pc 00000000000014a8 /system/xbin/crasher64\n #06 pc 00000000000215ae /system/lib64/libc.so (__pthread_start(void*)+46)\n #07 pc 000000000001d3eb /system/lib64/libc.so (__start_thread+11)\n #08 pc 00000000000138f5 /system/lib64/libc.so (__bionic_clone+53)\n" libmemunreachable = "\n Unreachable memory\n 48 bytes in 2 unreachable allocations\n ABI: 'arm'\n\n 24 bytes unreachable at a11e6748\n and 24 similar unreachable bytes in 1 allocation\n contents:\n a11e6748: 63 6f 6d 2e 61 6e 64 72 6f 69 64 2e 73 79 73 74 com.android.syst\n a11e6758: 65 6d 75 69 00 00 00 00 emui....\n #00 pc 000076ae /system/lib/libcutils.so (set_process_name+45)\n #01 pc 000989d6 /system/lib/libandroid_runtime.so (android_os_Process_setArgV0(_JNIEnv*, _jobject*, _jstring*)+125)\n" long_asan_crash = "\nBuild fingerprint: 'Android/aosp_arm/generic_arm:4.4.3.43.43.43/AOSP/enh06302258:eng/test-keys'\nABI: 'arm'\n\n #0 0x727d4dfdaf (/system/lib/libclang_rt.asan-arm-android.so+0x31daf)\n\n #1 0x727d4e00af (/system/lib/libclang_rt.asan-arm-android.so+0x320af)\n\n #2 0x72778db0cf (/data/lib/libc.so+0x740cf)\n\n #3 0x725688a66f (/does/not/matter/a.so+0x1066f)\n\n #4 0x72568a02af (/does/not/matter/a.so+0x262af)\n\n #5 0x725689e313 (/does/not/matter/a.so+0x24313)\n\n #6 0x72568a95eb (/does/not/matter/a.so+0x2f5eb)\n\n #7 0x725688de6f (/does/not/matter/a.so+0x13e6f)\n\n #8 0x72778ceeff (/does/not/matter/a.so+0x67eff)\n\n #9 0x7277884983 (/does/not/matter/a.so+0x1d983)\n\n #10 0x7277884983 (/does/not/matter/a.so+0x1d983)\n"
##Config file for lifetime_spyrelet.py in spyre/spyre/spyrelet/ # Device List devices = { 'analyzer':[ 'lantz.drivers.spectrum.MS2721B', ['USB0::0x0B5B::0xFFF9::1118010_150_11::INSTR'], {} ], 'source':[ 'lantz.drivers.mwsource.SynthNVPro', ['ASRL16::INSTR'], {} ] } # Experiment List spyrelets = { 'freqSweep':[ 'spyre.spyrelets.freqSweep_spyrelet_test.Sweep', {'analyzer': 'analyzer','source': 'source'}, {} ], }
devices = {'analyzer': ['lantz.drivers.spectrum.MS2721B', ['USB0::0x0B5B::0xFFF9::1118010_150_11::INSTR'], {}], 'source': ['lantz.drivers.mwsource.SynthNVPro', ['ASRL16::INSTR'], {}]} spyrelets = {'freqSweep': ['spyre.spyrelets.freqSweep_spyrelet_test.Sweep', {'analyzer': 'analyzer', 'source': 'source'}, {}]}
class Point: def __init__(self, x, y): self.x = x self.y = y def __str__(self): return f'({self.x}, {self.y})'
class Point: def __init__(self, x, y): self.x = x self.y = y def __str__(self): return f'({self.x}, {self.y})'
""" By starting at the top of the triangle below and moving to adjacent numbers on the row below, the maximum total from top to bottom is 23. 3 7 4 2 4 6 8 5 9 3 That is, 3 + 7 + 4 + 9 = 23. Find the maximum total from top to bottom of the triangle below: 75 95 64 17 47 82 18 35 87 10 20 04 82 47 65 19 01 23 75 03 34 88 02 77 73 07 63 67 99 65 04 28 06 16 70 92 41 41 26 56 83 40 80 70 33 41 48 72 33 47 32 37 16 94 29 53 71 44 65 25 43 91 52 97 51 14 70 11 33 28 77 73 17 78 39 68 17 57 91 71 52 38 17 14 91 43 58 50 27 29 48 63 66 04 68 89 53 67 30 73 16 69 87 40 31 04 62 98 27 23 09 70 98 73 93 38 53 60 04 23 NOTE: As there are only 16384 routes, it is possible to solve this problem by trying every route. However, Problem 67, is the same challenge with a triangle containing one-hundred rows; it cannot be solved by brute force, and requires a clever method! ;o) """ ''' Brute force method using a recursive function ''' def brute(inp, level, number, sum): sum += inp[level][number] if level < len(inp)-1: s1 = brute(inp, level+1, number, sum) s2 = brute(inp, level+1, number+1, sum) return max(s1,s2) else: return sum string = """75 95 64 17 47 82 18 35 87 10 20 04 82 47 65 19 01 23 75 03 34 88 02 77 73 07 63 67 99 65 04 28 06 16 70 92 41 41 26 56 83 40 80 70 33 41 48 72 33 47 32 37 16 94 29 53 71 44 65 25 43 91 52 97 51 14 70 11 33 28 77 73 17 78 39 68 17 57 91 71 52 38 17 14 91 43 58 50 27 29 48 63 66 04 68 89 53 67 30 73 16 69 87 40 31 04 62 98 27 23 09 70 98 73 93 38 53 60 04 23""" string = string.split('\n') for s in range(len(string)): string[s] = string[s].split() string[s] = [int(i) for i in string[s]] print(brute(string,0,0,0))
""" By starting at the top of the triangle below and moving to adjacent numbers on the row below, the maximum total from top to bottom is 23. 3 7 4 2 4 6 8 5 9 3 That is, 3 + 7 + 4 + 9 = 23. Find the maximum total from top to bottom of the triangle below: 75 95 64 17 47 82 18 35 87 10 20 04 82 47 65 19 01 23 75 03 34 88 02 77 73 07 63 67 99 65 04 28 06 16 70 92 41 41 26 56 83 40 80 70 33 41 48 72 33 47 32 37 16 94 29 53 71 44 65 25 43 91 52 97 51 14 70 11 33 28 77 73 17 78 39 68 17 57 91 71 52 38 17 14 91 43 58 50 27 29 48 63 66 04 68 89 53 67 30 73 16 69 87 40 31 04 62 98 27 23 09 70 98 73 93 38 53 60 04 23 NOTE: As there are only 16384 routes, it is possible to solve this problem by trying every route. However, Problem 67, is the same challenge with a triangle containing one-hundred rows; it cannot be solved by brute force, and requires a clever method! ;o) """ ' Brute force method using a recursive function ' def brute(inp, level, number, sum): sum += inp[level][number] if level < len(inp) - 1: s1 = brute(inp, level + 1, number, sum) s2 = brute(inp, level + 1, number + 1, sum) return max(s1, s2) else: return sum string = '75\n95 64\n17 47 82\n18 35 87 10\n20 04 82 47 65\n19 01 23 75 03 34\n88 02 77 73 07 63 67\n99 65 04 28 06 16 70 92\n41 41 26 56 83 40 80 70 33\n41 48 72 33 47 32 37 16 94 29\n53 71 44 65 25 43 91 52 97 51 14\n70 11 33 28 77 73 17 78 39 68 17 57\n91 71 52 38 17 14 91 43 58 50 27 29 48\n63 66 04 68 89 53 67 30 73 16 69 87 40 31\n04 62 98 27 23 09 70 98 73 93 38 53 60 04 23' string = string.split('\n') for s in range(len(string)): string[s] = string[s].split() string[s] = [int(i) for i in string[s]] print(brute(string, 0, 0, 0))
class BST(): def __init__(self,data): self.key = data self.lch = None self.rch = None def Print(self): if self is None or self.key is None: return if self.lch: self.lch.Print() if self.rch: self.rch.Print() print(self.key,end=" ") def insert(self,data): if self.key is None: self.key = data if data <= self.key: if self.lch: self.lch.insert(data) else: self.lch = BST(data) if data > self.key: if self.rch: self.rch.insert(data) else: self.rch = BST(data) def countNodes(root,low,high): global count temp = list(range(low,high+1)) if root is None: return 0 if root: if root.key in temp: count+=1 if root.lch: countNodes(root.lch,low,high) if root.rch: countNodes(root.rch,low,high) count = 0 root = BST(15) root.insert(10) root.insert(25) root.insert(8) root.insert(12) root.insert(20) root.insert(30) root.insert(0) root.insert(-1) root.insert(21) root.insert(22) countNodes(root,-1,10) print(count) root.Print()
class Bst: def __init__(self, data): self.key = data self.lch = None self.rch = None def print(self): if self is None or self.key is None: return if self.lch: self.lch.Print() if self.rch: self.rch.Print() print(self.key, end=' ') def insert(self, data): if self.key is None: self.key = data if data <= self.key: if self.lch: self.lch.insert(data) else: self.lch = bst(data) if data > self.key: if self.rch: self.rch.insert(data) else: self.rch = bst(data) def count_nodes(root, low, high): global count temp = list(range(low, high + 1)) if root is None: return 0 if root: if root.key in temp: count += 1 if root.lch: count_nodes(root.lch, low, high) if root.rch: count_nodes(root.rch, low, high) count = 0 root = bst(15) root.insert(10) root.insert(25) root.insert(8) root.insert(12) root.insert(20) root.insert(30) root.insert(0) root.insert(-1) root.insert(21) root.insert(22) count_nodes(root, -1, 10) print(count) root.Print()
i = 0 while True: print(i) i+=1
i = 0 while True: print(i) i += 1
class Solution: def getSum(self, a, b): """ :type a: int :type b: int :rtype: int """ # 32 bits integer max MAX = 0x7FFFFFFF # 32 bits interger min MIN = 0x80000000 # mask to get last 32 bits mask = 0xFFFFFFFF while b != 0: # ^ get different bits and & gets double 1s, << moves carry a, b = (a ^ b) & mask, ((a & b) << 1) & mask print(a,b) # if a is negative, get a's 32 bits complement positive first # then get 32-bit positive's Python complement negative return a if a <= MAX else ~(a ^ mask) # test a = -70 b = 5 print(Solution().getSum(a,b))
class Solution: def get_sum(self, a, b): """ :type a: int :type b: int :rtype: int """ max = 2147483647 min = 2147483648 mask = 4294967295 while b != 0: (a, b) = ((a ^ b) & mask, (a & b) << 1 & mask) print(a, b) return a if a <= MAX else ~(a ^ mask) a = -70 b = 5 print(solution().getSum(a, b))
""" Task Score: 100% Complexity: O(N * log(N)) """ def solution(A): """ On the face of it this is quite simple, until you factor in negative numbers. Approach is very simple: 1. Sort array in descending order (or ascending if you like) 2. Take the max value in between the sum of the head of the array and the first element of the array combined with the last two elements (two negatives) """ # sort array in descending order A = sorted(A, reverse=True) # get the product of the first three elements return max(A[0] * A[1] * A[2], A[0] * A[-2] * A[-1])
""" Task Score: 100% Complexity: O(N * log(N)) """ def solution(A): """ On the face of it this is quite simple, until you factor in negative numbers. Approach is very simple: 1. Sort array in descending order (or ascending if you like) 2. Take the max value in between the sum of the head of the array and the first element of the array combined with the last two elements (two negatives) """ a = sorted(A, reverse=True) return max(A[0] * A[1] * A[2], A[0] * A[-2] * A[-1])
def bbox_normalize(bbox, image_size): return tuple(v / image_size[0] if i % 2 == 0 else v / image_size[1] for i, v in enumerate(bbox)) def bbox_denormalize(bbox, image_size): return tuple(v * image_size[0] if i % 2 == 0 else v * image_size[1] for i, v in enumerate(bbox))
def bbox_normalize(bbox, image_size): return tuple((v / image_size[0] if i % 2 == 0 else v / image_size[1] for (i, v) in enumerate(bbox))) def bbox_denormalize(bbox, image_size): return tuple((v * image_size[0] if i % 2 == 0 else v * image_size[1] for (i, v) in enumerate(bbox)))
def calculateStats(numbers): if len(numbers) == 0: stat_dict = {"avg": "nan", "max": "nan", "min": "nan"} else: max_value = max(numbers) min_value = min(numbers) avg_value = round(sum(numbers) / len(numbers), 3) stat_dict = {"avg": avg_value, "max": max_value, "min": min_value} return stat_dict
def calculate_stats(numbers): if len(numbers) == 0: stat_dict = {'avg': 'nan', 'max': 'nan', 'min': 'nan'} else: max_value = max(numbers) min_value = min(numbers) avg_value = round(sum(numbers) / len(numbers), 3) stat_dict = {'avg': avg_value, 'max': max_value, 'min': min_value} return stat_dict
data = ( None, # 0x00 None, # 0x01 None, # 0x02 None, # 0x03 None, # 0x04 'B', # 0x05 'P', # 0x06 'M', # 0x07 'F', # 0x08 'D', # 0x09 'T', # 0x0a 'N', # 0x0b 'L', # 0x0c 'G', # 0x0d 'K', # 0x0e 'H', # 0x0f 'J', # 0x10 'Q', # 0x11 'X', # 0x12 'ZH', # 0x13 'CH', # 0x14 'SH', # 0x15 'R', # 0x16 'Z', # 0x17 'C', # 0x18 'S', # 0x19 'A', # 0x1a 'O', # 0x1b 'E', # 0x1c 'EH', # 0x1d 'AI', # 0x1e 'EI', # 0x1f 'AU', # 0x20 'OU', # 0x21 'AN', # 0x22 'EN', # 0x23 'ANG', # 0x24 'ENG', # 0x25 'ER', # 0x26 'I', # 0x27 'U', # 0x28 'IU', # 0x29 'V', # 0x2a 'NG', # 0x2b 'GN', # 0x2c None, # 0x2d None, # 0x2e None, # 0x2f None, # 0x30 'g', # 0x31 'gg', # 0x32 'gs', # 0x33 'n', # 0x34 'nj', # 0x35 'nh', # 0x36 'd', # 0x37 'dd', # 0x38 'r', # 0x39 'lg', # 0x3a 'lm', # 0x3b 'lb', # 0x3c 'ls', # 0x3d 'lt', # 0x3e 'lp', # 0x3f 'rh', # 0x40 'm', # 0x41 'b', # 0x42 'bb', # 0x43 'bs', # 0x44 's', # 0x45 'ss', # 0x46 '', # 0x47 'j', # 0x48 'jj', # 0x49 'c', # 0x4a 'k', # 0x4b 't', # 0x4c 'p', # 0x4d 'h', # 0x4e 'a', # 0x4f 'ae', # 0x50 'ya', # 0x51 'yae', # 0x52 'eo', # 0x53 'e', # 0x54 'yeo', # 0x55 'ye', # 0x56 'o', # 0x57 'wa', # 0x58 'wae', # 0x59 'oe', # 0x5a 'yo', # 0x5b 'u', # 0x5c 'weo', # 0x5d 'we', # 0x5e 'wi', # 0x5f 'yu', # 0x60 'eu', # 0x61 'yi', # 0x62 'i', # 0x63 '', # 0x64 'nn', # 0x65 'nd', # 0x66 'ns', # 0x67 'nZ', # 0x68 'lgs', # 0x69 'ld', # 0x6a 'lbs', # 0x6b 'lZ', # 0x6c 'lQ', # 0x6d 'mb', # 0x6e 'ms', # 0x6f 'mZ', # 0x70 'mN', # 0x71 'bg', # 0x72 '', # 0x73 'bsg', # 0x74 'bst', # 0x75 'bj', # 0x76 'bt', # 0x77 'bN', # 0x78 'bbN', # 0x79 'sg', # 0x7a 'sn', # 0x7b 'sd', # 0x7c 'sb', # 0x7d 'sj', # 0x7e 'Z', # 0x7f '', # 0x80 'N', # 0x81 'Ns', # 0x82 'NZ', # 0x83 'pN', # 0x84 'hh', # 0x85 'Q', # 0x86 'yo-ya', # 0x87 'yo-yae', # 0x88 'yo-i', # 0x89 'yu-yeo', # 0x8a 'yu-ye', # 0x8b 'yu-i', # 0x8c 'U', # 0x8d 'U-i', # 0x8e None, # 0x8f '', # 0x90 '', # 0x91 '', # 0x92 '', # 0x93 '', # 0x94 '', # 0x95 '', # 0x96 '', # 0x97 '', # 0x98 '', # 0x99 '', # 0x9a '', # 0x9b '', # 0x9c '', # 0x9d '', # 0x9e '', # 0x9f 'BU', # 0xa0 'ZI', # 0xa1 'JI', # 0xa2 'GU', # 0xa3 'EE', # 0xa4 'ENN', # 0xa5 'OO', # 0xa6 'ONN', # 0xa7 'IR', # 0xa8 'ANN', # 0xa9 'INN', # 0xaa 'UNN', # 0xab 'IM', # 0xac 'NGG', # 0xad 'AINN', # 0xae 'AUNN', # 0xaf 'AM', # 0xb0 'OM', # 0xb1 'ONG', # 0xb2 'INNN', # 0xb3 'P', # 0xb4 'T', # 0xb5 'K', # 0xb6 'H', # 0xb7 None, # 0xb8 None, # 0xb9 None, # 0xba None, # 0xbb None, # 0xbc None, # 0xbd None, # 0xbe None, # 0xbf None, # 0xc0 None, # 0xc1 None, # 0xc2 None, # 0xc3 None, # 0xc4 None, # 0xc5 None, # 0xc6 None, # 0xc7 None, # 0xc8 None, # 0xc9 None, # 0xca None, # 0xcb None, # 0xcc None, # 0xcd None, # 0xce None, # 0xcf None, # 0xd0 None, # 0xd1 None, # 0xd2 None, # 0xd3 None, # 0xd4 None, # 0xd5 None, # 0xd6 None, # 0xd7 None, # 0xd8 None, # 0xd9 None, # 0xda None, # 0xdb None, # 0xdc None, # 0xdd None, # 0xde None, # 0xdf None, # 0xe0 None, # 0xe1 None, # 0xe2 None, # 0xe3 None, # 0xe4 None, # 0xe5 None, # 0xe6 None, # 0xe7 None, # 0xe8 None, # 0xe9 None, # 0xea None, # 0xeb None, # 0xec None, # 0xed None, # 0xee None, # 0xef None, # 0xf0 None, # 0xf1 None, # 0xf2 None, # 0xf3 None, # 0xf4 None, # 0xf5 None, # 0xf6 None, # 0xf7 None, # 0xf8 None, # 0xf9 None, # 0xfa None, # 0xfb None, # 0xfc None, # 0xfd None, # 0xfe )
data = (None, None, None, None, None, 'B', 'P', 'M', 'F', 'D', 'T', 'N', 'L', 'G', 'K', 'H', 'J', 'Q', 'X', 'ZH', 'CH', 'SH', 'R', 'Z', 'C', 'S', 'A', 'O', 'E', 'EH', 'AI', 'EI', 'AU', 'OU', 'AN', 'EN', 'ANG', 'ENG', 'ER', 'I', 'U', 'IU', 'V', 'NG', 'GN', None, None, None, None, 'g', 'gg', 'gs', 'n', 'nj', 'nh', 'd', 'dd', 'r', 'lg', 'lm', 'lb', 'ls', 'lt', 'lp', 'rh', 'm', 'b', 'bb', 'bs', 's', 'ss', '', 'j', 'jj', 'c', 'k', 't', 'p', 'h', 'a', 'ae', 'ya', 'yae', 'eo', 'e', 'yeo', 'ye', 'o', 'wa', 'wae', 'oe', 'yo', 'u', 'weo', 'we', 'wi', 'yu', 'eu', 'yi', 'i', '', 'nn', 'nd', 'ns', 'nZ', 'lgs', 'ld', 'lbs', 'lZ', 'lQ', 'mb', 'ms', 'mZ', 'mN', 'bg', '', 'bsg', 'bst', 'bj', 'bt', 'bN', 'bbN', 'sg', 'sn', 'sd', 'sb', 'sj', 'Z', '', 'N', 'Ns', 'NZ', 'pN', 'hh', 'Q', 'yo-ya', 'yo-yae', 'yo-i', 'yu-yeo', 'yu-ye', 'yu-i', 'U', 'U-i', None, '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', 'BU', 'ZI', 'JI', 'GU', 'EE', 'ENN', 'OO', 'ONN', 'IR', 'ANN', 'INN', 'UNN', 'IM', 'NGG', 'AINN', 'AUNN', 'AM', 'OM', 'ONG', 'INNN', 'P', 'T', 'K', 'H', None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None, None)
def solution(num): """Find the maximal sequence of consecutive ones""" try: num = int(num) count = 0 if(num > 0): while(num != 0): num = (num & (num << 1)) count += 1 return count except ValueError: return 'Invalid input.'
def solution(num): """Find the maximal sequence of consecutive ones""" try: num = int(num) count = 0 if num > 0: while num != 0: num = num & num << 1 count += 1 return count except ValueError: return 'Invalid input.'
jibunno = "red" kimino = "green" kari = jibunno jibunno = kimino kimino = kari print(jibunno, kimino)
jibunno = 'red' kimino = 'green' kari = jibunno jibunno = kimino kimino = kari print(jibunno, kimino)
class LinkedStack: class _Node: __slots__ = '_element', '_next' def __init__(self, element, next): self._element = element self._next = next def __init__(self): self._head = None self._size = 0 def __len__(self): return self._size def is_empty(self): return self._size == 0 def push(self, e): self._head = self._Node(e, self._head) self._size += 1 def top(self): if self.is_empty(): raise Empty('Stack is empty') return self._head._element def pop(self): if self.is_empty(): raise Empty('Stack is empty') answer = self._head._element self._head = self._head._next self._size -= 1 return answer
class Linkedstack: class _Node: __slots__ = ('_element', '_next') def __init__(self, element, next): self._element = element self._next = next def __init__(self): self._head = None self._size = 0 def __len__(self): return self._size def is_empty(self): return self._size == 0 def push(self, e): self._head = self._Node(e, self._head) self._size += 1 def top(self): if self.is_empty(): raise empty('Stack is empty') return self._head._element def pop(self): if self.is_empty(): raise empty('Stack is empty') answer = self._head._element self._head = self._head._next self._size -= 1 return answer
#!/usr/bin/env python # -*- coding: utf-8 -*- dice = [] for d1 in range(1, 7): for d2 in range(1, 7): dice.append((d1, d2)) n = 0 comb = [] for d1, d2 in dice: if d1 + d2 == 7: n += 1 comb.append((d1,d2)) print('%d combinations results in the sum 7' % n) print(comb)
dice = [] for d1 in range(1, 7): for d2 in range(1, 7): dice.append((d1, d2)) n = 0 comb = [] for (d1, d2) in dice: if d1 + d2 == 7: n += 1 comb.append((d1, d2)) print('%d combinations results in the sum 7' % n) print(comb)
"""Sector and industries.""" SECTORS = [ { "name": "Chemical", "industries": [ {"name": "Other"}, {"name": "Basic Chemicals"}, {"name": "Specialty Products"}, {"name": "Pharmaceutical Products"}, {"name": "Consumer Products"}, {"name": "Agricultural Products"}, ], }, { "name": "Commercial Facilities", "industries": [ {"name": "Entertainment and Media"}, {"name": "Gaming"}, {"name": "Lodging"}, {"name": "Other"}, {"name": "Outdoor Events"}, {"name": "Public Assembly"}, {"name": "Real Estate"}, {"name": "Retail"}, {"name": "Sports Leagues"}, ], }, { "name": "Communications", "industries": [ {"name": " Other"}, {"name": " Telecommunications"}, {"name": " Wireless Communications Service Providers"}, {"name": " Broadcasting"}, {"name": " Cable"}, {"name": " Satellite"}, {"name": " Wireline"}, ], }, { "name": "Critical Manufacturing", "industries": [ {"name": "Electrical Equipment, Appliance and Component Manufacturing"}, {"name": "Machinery Manufacturing"}, {"name": "Other"}, {"name": "Primary Metal Manufacturing"}, {"name": "Transportation and Heavy Equipment Manufacturing"}, {"name": "Manufacturing"}, {"name": "Heavy Machinery Manufacturing"}, ], }, { "name": "Dams", "industries": [ {"name": "Dams"}, {"name": "Other"}, {"name": "Private Hydropower Facilities in the US"}, {"name": "Levees"}, {"name": "Navigation Locks"}, {"name": "Tailings and Waste Impoundments"}, ], }, { "name": "Defense Industrial Base", "industries": [ {"name": "Aircraft Industry"}, {"name": "Ammunition"}, {"name": "Combat Vehicle"}, {"name": "Communications"}, {"name": "Defense Contractors "}, {"name": "Electrical Industry Commodities"}, {"name": "Electronics"}, {"name": "Mechanical Industry Commodities"}, {"name": "Missile Industry"}, {"name": "Other"}, {"name": "Research and Development Facilities"}, {"name": "Shipbuilding Industry"}, {"name": "Space"}, {"name": "Structural Industry Commodities"}, {"name": "Troop Support"}, {"name": "Weapons"}, ], }, { "name": "Emergency Services", "industries": [ {"name": "Emergency Management"}, {"name": "Emergency Medical Services"}, {"name": "Fire and Rescue Services"}, {"name": "Law Enforcement "}, {"name": "Other"}, {"name": "Public Works"}, ], }, { "name": "Energy", "industries": [ {"name": "Electric Power Generation, Transmission and Distribution"}, {"name": "Natural Gas"}, {"name": "Other"}, {"name": "Petroleum Refineries"}, {"name": "Oil and Natural Gas"}, ], }, { "name": "Financial Services", "industries": [ {"name": "Other"}, {"name": "US Banks"}, {"name": "US Credit Unions"}, {"name": "Consumer Services"}, {"name": "Credit and Liquidity Products"}, {"name": "Investment Products"}, {"name": "Risk Transfer Products"}, ], }, { "name": "Food and Agriculture", "industries": [ {"name": "Beverage Manufacturing Plants"}, {"name": "Food Manufacturing Plants"}, {"name": "Food Services"}, {"name": "Other"}, {"name": "Supply"}, {"name": "Processing, Packaging, and Production"}, {"name": "Product Storage"}, {"name": "Product Transportation"}, {"name": "Product Distribution"}, {"name": "Supporting Facilities"}, ], }, { "name": "Government Facilities", "industries": [ {"name": "Local Governments"}, {"name": "Other"}, {"name": "State Governments"}, {"name": "Territorial Governments"}, {"name": "Tribal Governments"}, {"name": "Public Facilities"}, {"name": "Non-Public Facilities"}, ], }, { "name": "Healthcare and Public Health", "industries": [ {"name": "Hospitals"}, {"name": "Other"}, {"name": "Residential Care Facilities"}, {"name": "Direct Patient Care"}, {"name": "Health IT"}, {"name": "Health Plans and Payers"}, {"name": "Fatality Management Services"}, {"name": "Medical Materials"}, {"name": "Support Services"}, ], }, { "name": "Information Technology", "industries": [ {"name": "Information Technology"}, {"name": "Other"}, {"name": "IT Production"}, {"name": "DNS Services"}, {"name": "Identity and Trust Support Management"}, {"name": "Internet Content and Service Providers"}, {"name": "Internet Routing and Connection"}, {"name": "Incident Management"}, ], }, { "name": "Nuclear Reactors, Materials, and Waste", "industries": [ {"name": "Operating Nuclear Power Plants"}, {"name": "Other"}, {"name": "Fuel Cycle Facilities"}, {"name": "Nuclear Materials Transport"}, {"name": "Radioactive Waste"}, {"name": "Radioactive Materials"}, ], }, { "name": "Transportation Systems", "industries": [ {"name": "Aviation"}, {"name": "Freight Rail"}, {"name": "Highway (truck transportation)"}, {"name": "Maritime"}, {"name": "Mass Transit and Passenger Rail"}, {"name": "Municipalities with Traffic Control Systems"}, {"name": "Other"}, { "name": "Pipelines (carries natural gas, hazardous liquids, and various chemicals.)" }, ], }, { "name": "Water and Wastewater Systems", "industries": [ {"name": "Other"}, {"name": "Public Water Systems"}, {"name": "Publicly Owned Treatment Works"}, ], }, ]
"""Sector and industries.""" sectors = [{'name': 'Chemical', 'industries': [{'name': 'Other'}, {'name': 'Basic Chemicals'}, {'name': 'Specialty Products'}, {'name': 'Pharmaceutical Products'}, {'name': 'Consumer Products'}, {'name': 'Agricultural Products'}]}, {'name': 'Commercial Facilities', 'industries': [{'name': 'Entertainment and Media'}, {'name': 'Gaming'}, {'name': 'Lodging'}, {'name': 'Other'}, {'name': 'Outdoor Events'}, {'name': 'Public Assembly'}, {'name': 'Real Estate'}, {'name': 'Retail'}, {'name': 'Sports Leagues'}]}, {'name': 'Communications', 'industries': [{'name': ' Other'}, {'name': ' Telecommunications'}, {'name': ' Wireless Communications Service Providers'}, {'name': ' Broadcasting'}, {'name': ' Cable'}, {'name': ' Satellite'}, {'name': ' Wireline'}]}, {'name': 'Critical Manufacturing', 'industries': [{'name': 'Electrical Equipment, Appliance and Component Manufacturing'}, {'name': 'Machinery Manufacturing'}, {'name': 'Other'}, {'name': 'Primary Metal Manufacturing'}, {'name': 'Transportation and Heavy Equipment Manufacturing'}, {'name': 'Manufacturing'}, {'name': 'Heavy Machinery Manufacturing'}]}, {'name': 'Dams', 'industries': [{'name': 'Dams'}, {'name': 'Other'}, {'name': 'Private Hydropower Facilities in the US'}, {'name': 'Levees'}, {'name': 'Navigation Locks'}, {'name': 'Tailings and Waste Impoundments'}]}, {'name': 'Defense Industrial Base', 'industries': [{'name': 'Aircraft Industry'}, {'name': 'Ammunition'}, {'name': 'Combat Vehicle'}, {'name': 'Communications'}, {'name': 'Defense Contractors '}, {'name': 'Electrical Industry Commodities'}, {'name': 'Electronics'}, {'name': 'Mechanical Industry Commodities'}, {'name': 'Missile Industry'}, {'name': 'Other'}, {'name': 'Research and Development Facilities'}, {'name': 'Shipbuilding Industry'}, {'name': 'Space'}, {'name': 'Structural Industry Commodities'}, {'name': 'Troop Support'}, {'name': 'Weapons'}]}, {'name': 'Emergency Services', 'industries': [{'name': 'Emergency Management'}, {'name': 'Emergency Medical Services'}, {'name': 'Fire and Rescue Services'}, {'name': 'Law Enforcement '}, {'name': 'Other'}, {'name': 'Public Works'}]}, {'name': 'Energy', 'industries': [{'name': 'Electric Power Generation, Transmission and Distribution'}, {'name': 'Natural Gas'}, {'name': 'Other'}, {'name': 'Petroleum Refineries'}, {'name': 'Oil and Natural Gas'}]}, {'name': 'Financial Services', 'industries': [{'name': 'Other'}, {'name': 'US Banks'}, {'name': 'US Credit Unions'}, {'name': 'Consumer Services'}, {'name': 'Credit and Liquidity Products'}, {'name': 'Investment Products'}, {'name': 'Risk Transfer Products'}]}, {'name': 'Food and Agriculture', 'industries': [{'name': 'Beverage Manufacturing Plants'}, {'name': 'Food Manufacturing Plants'}, {'name': 'Food Services'}, {'name': 'Other'}, {'name': 'Supply'}, {'name': 'Processing, Packaging, and Production'}, {'name': 'Product Storage'}, {'name': 'Product Transportation'}, {'name': 'Product Distribution'}, {'name': 'Supporting Facilities'}]}, {'name': 'Government Facilities', 'industries': [{'name': 'Local Governments'}, {'name': 'Other'}, {'name': 'State Governments'}, {'name': 'Territorial Governments'}, {'name': 'Tribal Governments'}, {'name': 'Public Facilities'}, {'name': 'Non-Public Facilities'}]}, {'name': 'Healthcare and Public Health', 'industries': [{'name': 'Hospitals'}, {'name': 'Other'}, {'name': 'Residential Care Facilities'}, {'name': 'Direct Patient Care'}, {'name': 'Health IT'}, {'name': 'Health Plans and Payers'}, {'name': 'Fatality Management Services'}, {'name': 'Medical Materials'}, {'name': 'Support Services'}]}, {'name': 'Information Technology', 'industries': [{'name': 'Information Technology'}, {'name': 'Other'}, {'name': 'IT Production'}, {'name': 'DNS Services'}, {'name': 'Identity and Trust Support Management'}, {'name': 'Internet Content and Service Providers'}, {'name': 'Internet Routing and Connection'}, {'name': 'Incident Management'}]}, {'name': 'Nuclear Reactors, Materials, and Waste', 'industries': [{'name': 'Operating Nuclear Power Plants'}, {'name': 'Other'}, {'name': 'Fuel Cycle Facilities'}, {'name': 'Nuclear Materials Transport'}, {'name': 'Radioactive Waste'}, {'name': 'Radioactive Materials'}]}, {'name': 'Transportation Systems', 'industries': [{'name': 'Aviation'}, {'name': 'Freight Rail'}, {'name': 'Highway (truck transportation)'}, {'name': 'Maritime'}, {'name': 'Mass Transit and Passenger Rail'}, {'name': 'Municipalities with Traffic Control Systems'}, {'name': 'Other'}, {'name': 'Pipelines (carries natural gas, hazardous liquids, and various chemicals.)'}]}, {'name': 'Water and Wastewater Systems', 'industries': [{'name': 'Other'}, {'name': 'Public Water Systems'}, {'name': 'Publicly Owned Treatment Works'}]}]
def selectionsort(L): n = len(L) for i in range(n-1): max_index=0 for index in range(n - i): if L[index] > L[max_index]: max_index = index L[n-i-1], L[max_index] = L[max_index], L[n-i-1]
def selectionsort(L): n = len(L) for i in range(n - 1): max_index = 0 for index in range(n - i): if L[index] > L[max_index]: max_index = index (L[n - i - 1], L[max_index]) = (L[max_index], L[n - i - 1])
class Constants: WINDOW_BACKGROUND_RGB: str = '#393939' # this needs to be the same as in windowstyle.py GREY_180_RGB: str = '#b4b3b3' # Text color GREY_127_RGB: str = '#7f7f7f' # 127, 127, 127 disabled GREY_90_RGB: str = '#5a5a5a' # 90, 90, 90 midlight GREY_5C_RGB: str = '#026fb2' # button GREY_80_RGB: str = '#505050' # 80, 80, 80 # Visited link GREY_66_RGB: str = '#424242' # 66, 66, 66 alternatebase GREY_53_RGB: str = '#353535' # 53, 53, 53 window GREY_42_RGB: str = '#2a2a2a' # 42, 42, 42 base GREY_35_RGB: str = '#232323' # 35, 35, 35 dark GREY_20_RGB: str = '#141414' # 20, 20, 20 shadow HIGHLIGHT_RGB: str = '#2a82da' # 42, 130, 218 highlight HYPERLINK_RGB: str = '#2a82da' # 42, 130, 218 hyperlink
class Constants: window_background_rgb: str = '#393939' grey_180_rgb: str = '#b4b3b3' grey_127_rgb: str = '#7f7f7f' grey_90_rgb: str = '#5a5a5a' grey_5_c_rgb: str = '#026fb2' grey_80_rgb: str = '#505050' grey_66_rgb: str = '#424242' grey_53_rgb: str = '#353535' grey_42_rgb: str = '#2a2a2a' grey_35_rgb: str = '#232323' grey_20_rgb: str = '#141414' highlight_rgb: str = '#2a82da' hyperlink_rgb: str = '#2a82da'
# ************************ compare_object_with_info.py ****************************** # # # # compare_object_with_info.py - # # # # Description: # # # # This component, is able to compare any two object, nested and # # not nested, and gives a simple result == True or False and information. # # # # # # *********************************************************************************** # success_message = "Objects are equal!" def compare_objects_with_info(a, b, indention=''): if not isinstance(a, type(b)): return False elif isinstance(a, dict): return compare_dict_with_info(dict(sorted(a.items())), dict(sorted(b.items())), indention) elif isinstance(a, list) or isinstance(a, tuple): return compare_list_with_info(a, b, indention) else: comparison = (a == b) if not comparison: comparison = (False, f"{indention}mismatch between {a} != {b}") else: comparison = True, f"{a} and {b} are equal!" return comparison # *************** NESTED COMPLEX OBJECTS ******************** # def is_complex_with_info(item): return isinstance(item, dict) or isinstance(item, list) or isinstance(item, tuple) or isinstance(item, set) # dict nested in other objects def dict_in_dict_of_dicts_with_info(parent_key, elem, dict_of_elem, indention): for k, v in dict_of_elem.items(): if isinstance(elem, type(v)) and sorted(elem.keys()) == sorted(v.keys()) and parent_key == k: result = compare_objects_with_info(elem, v, f"{indention} ") if not result[0]: return result else: return True, success_message return False, f"Element {elem}, is not appear correctly in dict {dict_of_elem}" def dict_in_list_with_info(elem, list_of_elem, indention): collect_info = "" counter = 1 for j in range(len(list_of_elem)): if isinstance(elem, type(list_of_elem[j])): if sorted(elem.keys()) == sorted(list_of_elem[j].keys()): result, info = compare_objects_with_info(elem, list_of_elem[j], f"{indention} ") if result: return True, success_message else: collect_info += f"{indention}{counter} : {info}\n" counter += 1 else: info = f'Origin Element keys {sorted(elem.keys())} and current object keys {sorted(list_of_elem[j].keys())}' collect_info += f"{indention}{counter} : {info}\n" counter += 1 return False, f"Element {elem}, is not exist in list in the exact way.\n\n{indention}Reasons are : \n{collect_info}" # indices objects nested in other objects def list_in_dict_with_info(parent_key, elem, dict_of_elem, indention): for k, v in dict_of_elem.items(): if parent_key == k: if type(elem) != type(v): return False, f"{indention}Elements {elem} and {v}, refer to the same key ``{parent_key}``, with diff types" elif not compare_objects_with_info(elem, v)[0]: return compare_objects_with_info(elem, v, f"{indention} ") else: return True, success_message return False, f"Element {elem}, is not appear correctly in dict {dict_of_elem}." def list_and_tuple_within_list_with_info(elem, list_of_elem, indention): collect_info = "" counter = 1 for j in range(len(list_of_elem)): if isinstance(elem, type(list_of_elem[j])): result, info = compare_objects_with_info(elem, list_of_elem[j], f"{indention} ") if result: return True, success_message else: collect_info += f"{indention}{counter} : {info}\n" counter += 1 return False, f"Element {elem} is not exist in list in the exact way.\n\n{indention}Reasons are : \n{collect_info}" def properties_do_not_fit_with_info(a, b, indention): if len(a) != len(b): return False, f"{indention}{a} length is {len(a)} and {b} length is {len(b)}" if a.keys() != b.keys(): return False, f"{indention}{a} keys are {list(a.keys())} and {b} keys is {list(b.keys())}" return True, success_message def compare_dict_with_info(a, b, indention): result = properties_do_not_fit_with_info(a, b, indention) if not result[0]: return result for key, value in a.items(): if isinstance(value, dict): result = dict_in_dict_of_dicts_with_info(key, value, b, indention) if not result[0]: return result elif is_complex_with_info(value): result = list_in_dict_with_info(key, value, b, indention) if not result[0]: return result else: if value != b[key]: return False, f"Mismatch between values for key ``{key}``, {value} != {b[key]}." return True, success_message def compare_list_with_info(a, b, indention): if len(a) != len(b): return False, f"Mismatch - length of list {a} is {len(a)} and {b} is {len(b)}" for i in range(len(a)): if isinstance(a[i], dict): result = dict_in_list_with_info(a[i], b, f"{indention}") if not result[0]: return result elif is_complex_with_info(a[i]): result = list_and_tuple_within_list_with_info(a[i], b, f"{indention}") if not result[0]: return result else: if not a[i] in b: return False, f"Element {a[i]} is in {a} and not in {b}" return True, success_message
success_message = 'Objects are equal!' def compare_objects_with_info(a, b, indention=''): if not isinstance(a, type(b)): return False elif isinstance(a, dict): return compare_dict_with_info(dict(sorted(a.items())), dict(sorted(b.items())), indention) elif isinstance(a, list) or isinstance(a, tuple): return compare_list_with_info(a, b, indention) else: comparison = a == b if not comparison: comparison = (False, f'{indention}mismatch between {a} != {b}') else: comparison = (True, f'{a} and {b} are equal!') return comparison def is_complex_with_info(item): return isinstance(item, dict) or isinstance(item, list) or isinstance(item, tuple) or isinstance(item, set) def dict_in_dict_of_dicts_with_info(parent_key, elem, dict_of_elem, indention): for (k, v) in dict_of_elem.items(): if isinstance(elem, type(v)) and sorted(elem.keys()) == sorted(v.keys()) and (parent_key == k): result = compare_objects_with_info(elem, v, f'{indention} ') if not result[0]: return result else: return (True, success_message) return (False, f'Element {elem}, is not appear correctly in dict {dict_of_elem}') def dict_in_list_with_info(elem, list_of_elem, indention): collect_info = '' counter = 1 for j in range(len(list_of_elem)): if isinstance(elem, type(list_of_elem[j])): if sorted(elem.keys()) == sorted(list_of_elem[j].keys()): (result, info) = compare_objects_with_info(elem, list_of_elem[j], f'{indention} ') if result: return (True, success_message) else: collect_info += f'{indention}{counter} : {info}\n' counter += 1 else: info = f'Origin Element keys {sorted(elem.keys())} and current object keys {sorted(list_of_elem[j].keys())}' collect_info += f'{indention}{counter} : {info}\n' counter += 1 return (False, f'Element {elem}, is not exist in list in the exact way.\n\n{indention}Reasons are : \n{collect_info}') def list_in_dict_with_info(parent_key, elem, dict_of_elem, indention): for (k, v) in dict_of_elem.items(): if parent_key == k: if type(elem) != type(v): return (False, f'{indention}Elements {elem} and {v}, refer to the same key ``{parent_key}``, with diff types') elif not compare_objects_with_info(elem, v)[0]: return compare_objects_with_info(elem, v, f'{indention} ') else: return (True, success_message) return (False, f'Element {elem}, is not appear correctly in dict {dict_of_elem}.') def list_and_tuple_within_list_with_info(elem, list_of_elem, indention): collect_info = '' counter = 1 for j in range(len(list_of_elem)): if isinstance(elem, type(list_of_elem[j])): (result, info) = compare_objects_with_info(elem, list_of_elem[j], f'{indention} ') if result: return (True, success_message) else: collect_info += f'{indention}{counter} : {info}\n' counter += 1 return (False, f'Element {elem} is not exist in list in the exact way.\n\n{indention}Reasons are : \n{collect_info}') def properties_do_not_fit_with_info(a, b, indention): if len(a) != len(b): return (False, f'{indention}{a} length is {len(a)} and {b} length is {len(b)}') if a.keys() != b.keys(): return (False, f'{indention}{a} keys are {list(a.keys())} and {b} keys is {list(b.keys())}') return (True, success_message) def compare_dict_with_info(a, b, indention): result = properties_do_not_fit_with_info(a, b, indention) if not result[0]: return result for (key, value) in a.items(): if isinstance(value, dict): result = dict_in_dict_of_dicts_with_info(key, value, b, indention) if not result[0]: return result elif is_complex_with_info(value): result = list_in_dict_with_info(key, value, b, indention) if not result[0]: return result elif value != b[key]: return (False, f'Mismatch between values for key ``{key}``, {value} != {b[key]}.') return (True, success_message) def compare_list_with_info(a, b, indention): if len(a) != len(b): return (False, f'Mismatch - length of list {a} is {len(a)} and {b} is {len(b)}') for i in range(len(a)): if isinstance(a[i], dict): result = dict_in_list_with_info(a[i], b, f'{indention}') if not result[0]: return result elif is_complex_with_info(a[i]): result = list_and_tuple_within_list_with_info(a[i], b, f'{indention}') if not result[0]: return result elif not a[i] in b: return (False, f'Element {a[i]} is in {a} and not in {b}') return (True, success_message)
n = int(input()) arr = [int(x) for x in input().split()] distinct = set(arr) print(len(distinct))
n = int(input()) arr = [int(x) for x in input().split()] distinct = set(arr) print(len(distinct))
class ZoneinfoError(Exception): pass class InvalidZoneinfoFile(ZoneinfoError): pass class InvalidTimezone(ZoneinfoError): def __init__(self, name): super(InvalidTimezone, self).__init__( 'Invalid timezone "{}"'.format(name) ) class InvalidPosixSpec(ZoneinfoError): def __init__(self, spec): super(InvalidPosixSpec, self).__init__( 'Invalid POSIX spec: {}'.format(spec) )
class Zoneinfoerror(Exception): pass class Invalidzoneinfofile(ZoneinfoError): pass class Invalidtimezone(ZoneinfoError): def __init__(self, name): super(InvalidTimezone, self).__init__('Invalid timezone "{}"'.format(name)) class Invalidposixspec(ZoneinfoError): def __init__(self, spec): super(InvalidPosixSpec, self).__init__('Invalid POSIX spec: {}'.format(spec))
class RenderQueue: def __init__(self, window): self.window = window self.queue = [] def render(self): if self.queue: self.queue[0]() def next(self): self.queue = self.queue[1:] def add(self, source): self.queue.append(source) def clear(self): self.queue = []
class Renderqueue: def __init__(self, window): self.window = window self.queue = [] def render(self): if self.queue: self.queue[0]() def next(self): self.queue = self.queue[1:] def add(self, source): self.queue.append(source) def clear(self): self.queue = []
def question(text, default=None, resp_type=None): if default is None: default_str = '' else: default_str = f'({default})' resp = input(f'{text}{default_str}: ') if not resp: return default if resp_type: return resp_type(resp) return resp
def question(text, default=None, resp_type=None): if default is None: default_str = '' else: default_str = f'({default})' resp = input(f'{text}{default_str}: ') if not resp: return default if resp_type: return resp_type(resp) return resp
class TreeNode: def __init__(self, x): self.val = x self.left = None self.right = None class Solution: def __init__(self): self.arrVal = [] self.arrTmp = [] def isSymmetric(self,root): if root == None: return True num = 0 self.arrTmp.append(root) while (len(self.arrTmp) != 0): tempNode = self.arrTmp[0] del self.arrTmp[0] if tempNode == None: self.arrVal.append(None) self.arrTmp.append(None) self.arrTmp.append(None) else: self.arrVal.append(tempNode.val) self.arrTmp.append(tempNode.left) self.arrTmp.append(tempNode.right) if (len(self.arrVal) == pow(2,num)): num+=1 if not self.isHUI(self.arrVal): return False else: del self.arrVal[:] if (self.isNoneList(self.arrTmp)): return True else: continue print(self.arrVal) return True def isHUI(self,lst): i = 0 while (i < len(lst)-i-1): if lst[i] != lst[len(lst)-i-1]: return False i+=1 return True def isNoneList(self,lst): for i in self.arrTmp: if i != None: return False return True if __name__ == "__main__": solution = Solution() leftNode = TreeNode(2) rightNode = TreeNode(2) rootNode = TreeNode(1) rootNode.left = leftNode rootNode.right = rightNode print(solution.isSymmetric(rootNode))
class Treenode: def __init__(self, x): self.val = x self.left = None self.right = None class Solution: def __init__(self): self.arrVal = [] self.arrTmp = [] def is_symmetric(self, root): if root == None: return True num = 0 self.arrTmp.append(root) while len(self.arrTmp) != 0: temp_node = self.arrTmp[0] del self.arrTmp[0] if tempNode == None: self.arrVal.append(None) self.arrTmp.append(None) self.arrTmp.append(None) else: self.arrVal.append(tempNode.val) self.arrTmp.append(tempNode.left) self.arrTmp.append(tempNode.right) if len(self.arrVal) == pow(2, num): num += 1 if not self.isHUI(self.arrVal): return False else: del self.arrVal[:] if self.isNoneList(self.arrTmp): return True else: continue print(self.arrVal) return True def is_hui(self, lst): i = 0 while i < len(lst) - i - 1: if lst[i] != lst[len(lst) - i - 1]: return False i += 1 return True def is_none_list(self, lst): for i in self.arrTmp: if i != None: return False return True if __name__ == '__main__': solution = solution() left_node = tree_node(2) right_node = tree_node(2) root_node = tree_node(1) rootNode.left = leftNode rootNode.right = rightNode print(solution.isSymmetric(rootNode))
a,b,n,w=list(map(int,input().split())) L = [] for i in range(1,n): if a*i + b*(n-i) == w: L.append(i) if len(L) == 1: print(L[0],n-L[0]) else: print(-1)
(a, b, n, w) = list(map(int, input().split())) l = [] for i in range(1, n): if a * i + b * (n - i) == w: L.append(i) if len(L) == 1: print(L[0], n - L[0]) else: print(-1)
#!/usr/bin/env python3 def is_terminal(battle_data) -> bool: return battle_data["ended"] def get_side_value(side) -> float: return sum(pokemon["hp"] / pokemon["maxhp"] for pokemon in side["pokemon"]) def get_heuristic_value(battle_data): sides = battle_data["sides"] return get_side_value(sides[1]) - get_side_value(sides[0]) def alpha_beta(env, battle, depth, alpha, beta, player_idx, last_move): client = env.client battle_id = battle["id"] battle_data = battle["data"] next_player_idx = (player_idx + 1) % 2 if depth == 0 or is_terminal(battle_data): return get_heuristic_value(battle_data), None best_move_idx = None if player_idx == 0: value = -float("inf") for move_idx in battle["actions"][1]: successor_value, _ = alpha_beta( env, battle, depth, alpha, beta, next_player_idx, env.get_move(move_idx) ) if successor_value > value: value = successor_value best_move_idx = move_idx alpha = max(alpha, value) if alpha >= beta: break return value, best_move_idx else: value = float("inf") for move_idx in battle["actions"][0]: successor = client.do_move(battle_id, env.get_move(move_idx), last_move) successor_value, _ = alpha_beta( env, successor, depth - 1, alpha, beta, next_player_idx, None ) if successor_value < value: value = successor_value best_move_idx = move_idx beta = min(beta, value) if alpha >= beta: break return value, best_move_idx def agent(env, depth=1): _, best_move_idx = alpha_beta( env, env.current_battle, depth, -float("inf"), float("inf"), 0, None ) return best_move_idx
def is_terminal(battle_data) -> bool: return battle_data['ended'] def get_side_value(side) -> float: return sum((pokemon['hp'] / pokemon['maxhp'] for pokemon in side['pokemon'])) def get_heuristic_value(battle_data): sides = battle_data['sides'] return get_side_value(sides[1]) - get_side_value(sides[0]) def alpha_beta(env, battle, depth, alpha, beta, player_idx, last_move): client = env.client battle_id = battle['id'] battle_data = battle['data'] next_player_idx = (player_idx + 1) % 2 if depth == 0 or is_terminal(battle_data): return (get_heuristic_value(battle_data), None) best_move_idx = None if player_idx == 0: value = -float('inf') for move_idx in battle['actions'][1]: (successor_value, _) = alpha_beta(env, battle, depth, alpha, beta, next_player_idx, env.get_move(move_idx)) if successor_value > value: value = successor_value best_move_idx = move_idx alpha = max(alpha, value) if alpha >= beta: break return (value, best_move_idx) else: value = float('inf') for move_idx in battle['actions'][0]: successor = client.do_move(battle_id, env.get_move(move_idx), last_move) (successor_value, _) = alpha_beta(env, successor, depth - 1, alpha, beta, next_player_idx, None) if successor_value < value: value = successor_value best_move_idx = move_idx beta = min(beta, value) if alpha >= beta: break return (value, best_move_idx) def agent(env, depth=1): (_, best_move_idx) = alpha_beta(env, env.current_battle, depth, -float('inf'), float('inf'), 0, None) return best_move_idx
class Cartes: couleur = ("bleu", "vert", "rouge", "jaune") valeur = (0, 1, 2, 3, 4, 5, 6, 7, 8, 9, "+2", "inversion", "passer", "stop") def __init__(self, coul, val): self.valeur = val self.couleur = coul def validation(self, val, coul): if val > 0 or val < 14: raise Exception("La valeur est comprise entre 0 et 14") if coul > 0 or coul < 4: raise Exception("La couleur est comprise entre 0 et 3") def __str__(self): return str(Cartes.valeur[self.__valeur]) + " de " + (Cartes.couleur[self.__couleur]) def __getValeur(self): return self.__valeur def __setValeur(self, val): self.__valeur = val valeur = property(__getValeur, __setValeur) def __getCouleur(self): return self.__valeur def __setCouleur(self, val): self.__couleur = val couleur = property(__getCouleur, __setCouleur)
class Cartes: couleur = ('bleu', 'vert', 'rouge', 'jaune') valeur = (0, 1, 2, 3, 4, 5, 6, 7, 8, 9, '+2', 'inversion', 'passer', 'stop') def __init__(self, coul, val): self.valeur = val self.couleur = coul def validation(self, val, coul): if val > 0 or val < 14: raise exception('La valeur est comprise entre 0 et 14') if coul > 0 or coul < 4: raise exception('La couleur est comprise entre 0 et 3') def __str__(self): return str(Cartes.valeur[self.__valeur]) + ' de ' + Cartes.couleur[self.__couleur] def __get_valeur(self): return self.__valeur def __set_valeur(self, val): self.__valeur = val valeur = property(__getValeur, __setValeur) def __get_couleur(self): return self.__valeur def __set_couleur(self, val): self.__couleur = val couleur = property(__getCouleur, __setCouleur)
class Connector: """We use this as a public base class. Customized connectors inherit this class as a framework for building them. .. note:: Base connector class class as framework for child classes. """ def __init__(self): """Instantiates the base connector. """ pass def _preprocess(self, message): """ This might involve cleaning up the input format, making everything lowercase (normalizing), removing extra things that we don't need, adding additional data from other sources (dictionary/word list), etc. :param message: Message. :type message: str. :returns: str -- the return code. """ cleaned_message = message return cleaned_message def _process(self, cleaned_message, userinfo=None): """ Read in and operate on the cleaned message data from :func`_preprocess`. :param cleaned_message: Cleaned message. :type cleaned_message: str. :param userinfo: Additional user data (e.g. User name). :type userinfo: dict. :returns: str -- the return code. """ prediction = cleaned_message return prediction def _postprocess(self, prediction): """ Operate on the prediction from :func`_process`. :param prediction: Cleaned message. :type prediction: str. :returns: str -- the return code. """ result = prediction return result def respond(self, message): """ This is called by :mod:`botframework` in child classes. :param message: Cleaned message. :type message: str. :returns: str -- the return code. """ cleaned_message = self._preprocess(message) prediction = self._process(cleaned_message) result = self._postprocess(prediction) return result
class Connector: """We use this as a public base class. Customized connectors inherit this class as a framework for building them. .. note:: Base connector class class as framework for child classes. """ def __init__(self): """Instantiates the base connector. """ pass def _preprocess(self, message): """ This might involve cleaning up the input format, making everything lowercase (normalizing), removing extra things that we don't need, adding additional data from other sources (dictionary/word list), etc. :param message: Message. :type message: str. :returns: str -- the return code. """ cleaned_message = message return cleaned_message def _process(self, cleaned_message, userinfo=None): """ Read in and operate on the cleaned message data from :func`_preprocess`. :param cleaned_message: Cleaned message. :type cleaned_message: str. :param userinfo: Additional user data (e.g. User name). :type userinfo: dict. :returns: str -- the return code. """ prediction = cleaned_message return prediction def _postprocess(self, prediction): """ Operate on the prediction from :func`_process`. :param prediction: Cleaned message. :type prediction: str. :returns: str -- the return code. """ result = prediction return result def respond(self, message): """ This is called by :mod:`botframework` in child classes. :param message: Cleaned message. :type message: str. :returns: str -- the return code. """ cleaned_message = self._preprocess(message) prediction = self._process(cleaned_message) result = self._postprocess(prediction) return result
inputs = [1,2,3,2.5] weights1 = [0.2,0.8,-0.5,1.0] weights2 = [0.5,-0.91,0.26,-0.5] weights3 = [-0.26, -0.27, 0.17, 0.87] bias1 = 2 bias2 = 3 bias3 = 0.5 output = [inputs[0] * weights1[0] + inputs[1] * weights1[1] + inputs[2] * weights1[2] + inputs[3] * weights1[3] + bias1, inputs[0] * weights2[0] + inputs[1] * weights2[1] + inputs[2] * weights2[2] + inputs[3] * weights2[3] + bias2, inputs[0] * weights3[0] + inputs[1] * weights3[1] + inputs[2] * weights3[2] + inputs[3] * weights3[3] + bias3] print(output)
inputs = [1, 2, 3, 2.5] weights1 = [0.2, 0.8, -0.5, 1.0] weights2 = [0.5, -0.91, 0.26, -0.5] weights3 = [-0.26, -0.27, 0.17, 0.87] bias1 = 2 bias2 = 3 bias3 = 0.5 output = [inputs[0] * weights1[0] + inputs[1] * weights1[1] + inputs[2] * weights1[2] + inputs[3] * weights1[3] + bias1, inputs[0] * weights2[0] + inputs[1] * weights2[1] + inputs[2] * weights2[2] + inputs[3] * weights2[3] + bias2, inputs[0] * weights3[0] + inputs[1] * weights3[1] + inputs[2] * weights3[2] + inputs[3] * weights3[3] + bias3] print(output)
l1 = [1,2,3] l2 = [4,5,6] l3 = [1,2,3] # number of items in list print(f"number of items in l1 : {len(l1)}") # compare lists , == compare the data element wise print(l1 == l1) print(l1 == l3) print(l1 == l2)
l1 = [1, 2, 3] l2 = [4, 5, 6] l3 = [1, 2, 3] print(f'number of items in l1 : {len(l1)}') print(l1 == l1) print(l1 == l3) print(l1 == l2)
# -*- coding: utf-8 -*- """ This program can calculate the hit and miss % using FIFO method. Created on Wed Sep 4 16:42:16 2019 @author: tuhin Mitra """ # demo = """ # You Can Either enter string like this: # 1,3,0,3,5,6 # or, # You can Also Enter like this: # 130356 # (for better readability the first method is recommended, # # However--> For quick and easy entry you can also use the 2nd method of input) # --- in both the cases you get the same output --- # """ # print(f"{demo}\n") # print('Enter the Given string (separated by commas):') # actually if you don't separate by commas,it'll still not give # # and problem # string = input() # if ',' in string: # string_list = string.replace(' ', '').split(',') # else: # string_list = [i for i in string] def solve(string_list, frame_length): current_list = [] hit_count = 0 miss_count = 0 count = 0 display_line = "Steps Display for FIFO Method:\n" # frame_length = int(input('Enter Frame length:')) # print('Entered length of the string : {}\n{}'.format(len(string_list), 16 * '__')) for i in string_list: if i not in current_list: if len(current_list) == frame_length: current_list.pop(0) current_list.append(i) miss_count += 1 flag = 'miss' else: hit_count += 1 flag = 'hit' count += 1 temp_list = current_list.copy() # a copy list created to keep safe the actual content temp_list.reverse() # print('>>{0} {1} with:({2}){3}{4}{5}'.format('step', count, i, '.\n|', '|\n|'.join(temp_list), '|')) # print(2 * ' ', flag, '\n') display_line += "\n" + '>>{0} {1} with:({2}){3}{4}{5}'.format('step', count, i, '\n|', '|\n|'.join(temp_list), '|') + f"\n{2 * ' '} {flag} \n" miss_pecent = (miss_count / len(string_list)) * 100 hit_percent = (hit_count / len(string_list)) * 100 # print('\nMiss count:{}, Hit count:{}\n{}'.format(miss_count, hit_count, 16 * '__')) display_line += '\n\nMiss count:{}, Hit count:{}\n{}'.format(miss_count, hit_count, 16 * '__') # print('The miss percent is: {0:.2f} and\nthe hit percent is: {1:.2f}'.format(miss_pecent, hit_percent)) display_line += '\nThe miss percent is: {0:.2f} and\nthe hit percent is: {1:.2f}\n'.format(miss_pecent, hit_percent) display_line += f'\n{14*" "}--by Tuhin Mitra' return display_line
""" This program can calculate the hit and miss % using FIFO method. Created on Wed Sep 4 16:42:16 2019 @author: tuhin Mitra """ def solve(string_list, frame_length): current_list = [] hit_count = 0 miss_count = 0 count = 0 display_line = 'Steps Display for FIFO Method:\n' for i in string_list: if i not in current_list: if len(current_list) == frame_length: current_list.pop(0) current_list.append(i) miss_count += 1 flag = 'miss' else: hit_count += 1 flag = 'hit' count += 1 temp_list = current_list.copy() temp_list.reverse() display_line += '\n' + '>>{0} {1} with:({2}){3}{4}{5}'.format('step', count, i, '\n|', '|\n|'.join(temp_list), '|') + f"\n{2 * ' '} {flag} \n" miss_pecent = miss_count / len(string_list) * 100 hit_percent = hit_count / len(string_list) * 100 display_line += '\n\nMiss count:{}, Hit count:{}\n{}'.format(miss_count, hit_count, 16 * '__') display_line += '\nThe miss percent is: {0:.2f} and\nthe hit percent is: {1:.2f}\n'.format(miss_pecent, hit_percent) display_line += f"\n{14 * ' '}--by Tuhin Mitra" return display_line
class CannotExploit(Exception): pass class CannotExplore(Exception): pass class NoSuchShellcode(Exception): pass
class Cannotexploit(Exception): pass class Cannotexplore(Exception): pass class Nosuchshellcode(Exception): pass
class TankBoard: """ Represents the board tanks are located on. Attributes: _height: height of the board _width: width of the board _board: 2D Array """ def __init__(self, width, height): self._height = height self._width = width self._board = [[None for _ in range(height)] for _ in range(width)] # Initialize the board def setUpTile(self, x, y, tile): """ Sets the tile at a given position to tile object given :param int x: X coordinate of the tile to be set :param int y: Y coordinate of the tile to be set :param Tile tile: Tile object to be set at the given location :return: None """ self._board[x][y] = tile def getTile(self, x, y): """ Returns tile at a given position :param int x: X coordinate of the tile to be returned :param int y: Y coordinate of the tile to be returned :return: Tile at the given location :rtype: Tile """ return self._board[x][y] @property def height(self): return self._height @height.setter def height(self, value): self._height = value @property def width(self): return self._width
class Tankboard: """ Represents the board tanks are located on. Attributes: _height: height of the board _width: width of the board _board: 2D Array """ def __init__(self, width, height): self._height = height self._width = width self._board = [[None for _ in range(height)] for _ in range(width)] def set_up_tile(self, x, y, tile): """ Sets the tile at a given position to tile object given :param int x: X coordinate of the tile to be set :param int y: Y coordinate of the tile to be set :param Tile tile: Tile object to be set at the given location :return: None """ self._board[x][y] = tile def get_tile(self, x, y): """ Returns tile at a given position :param int x: X coordinate of the tile to be returned :param int y: Y coordinate of the tile to be returned :return: Tile at the given location :rtype: Tile """ return self._board[x][y] @property def height(self): return self._height @height.setter def height(self, value): self._height = value @property def width(self): return self._width
def soma(lista): if len(lista) == 0: return 0 else: return lista[0] +soma(lista[1:]) print(soma([1,2,3,4,5,6,7])) def fib(n): if n==0: return 0 elif n==1: return 1 else: return fib(n-1)+fib(n-2)
def soma(lista): if len(lista) == 0: return 0 else: return lista[0] + soma(lista[1:]) print(soma([1, 2, 3, 4, 5, 6, 7])) def fib(n): if n == 0: return 0 elif n == 1: return 1 else: return fib(n - 1) + fib(n - 2)
# # from src/eulerian.c # # Eulerian to eulerianNumber # def eulerianNumber(n, k): if k == 0: return 1 if k < 0 or k >= n: return 0 return (k+1) * eulerianNumber(n-1,k) + (n-k) * eulerianNumber(n-1,k-1)
def eulerian_number(n, k): if k == 0: return 1 if k < 0 or k >= n: return 0 return (k + 1) * eulerian_number(n - 1, k) + (n - k) * eulerian_number(n - 1, k - 1)
# go_board.py class GoBoard: EMPTY = 0 WHITE = 1 BLACK = 2 def __init__( self, size = 0 ): self.size = size if size > 0: self.matrix = [ [ self.EMPTY for j in range( size ) ] for i in range( size ) ] def Serialize( self ): return { 'size' : self.size, 'matrix' : self.matrix } def Deserialize( self, data ): self.size = data[ 'size' ] self.matrix = data[ 'matrix' ] return self def __eq__( self, board ): for i in range( self.size ): for j in range( self.size ): if self.matrix[i][j] != board.matrix[i][j]: return False return True def AllLocationsOfState( self, state ): for i in range( self.size ): for j in range( self.size ): if self.matrix[i][j] == state: yield ( i, j ) def AdjacentLocations( self, location ): for offset in [ ( -1, 0 ), ( 1, 0 ), ( 0, -1 ), ( 0, 1 ) ]: adjacent_location = ( location[0] + offset[0], location[1] + offset[1] ) if adjacent_location[0] < 0 or adjacent_location[1] < 0: continue if adjacent_location[0] >= self.size or adjacent_location[1] >= self.size: continue yield adjacent_location def GetState( self, location ): return self.matrix[ location[0] ][ location[1] ] def SetState( self, location, state ): self.matrix[ location[0] ][ location[1] ] = state def AnalyzeGroups( self, for_who ): location_list = [ location for location in self.AllLocationsOfState( for_who ) ] group_list = [] while len( location_list ) > 0: location = location_list[0] group = { 'location_list' : [], 'liberties' : 0, 'liberty_location_list' : [] } queue = [ location ] while len( queue ) > 0: location = queue.pop() group[ 'location_list' ].append( location ) location_list.remove( location ) for adjacent_location in self.AdjacentLocations( location ): if adjacent_location in group[ 'location_list' ]: continue if adjacent_location in queue: continue if self.GetState( adjacent_location ) == for_who: queue.append( adjacent_location ) if for_who != self.EMPTY: for location in group[ 'location_list' ]: for adjacent_location in self.AdjacentLocations( location ): if self.GetState( adjacent_location ) == self.EMPTY: if not adjacent_location in group[ 'liberty_location_list' ]: group[ 'liberties' ] += 1 group[ 'liberty_location_list' ].append( adjacent_location ) else: del group[ 'liberties' ] del group[ 'liberty_location_list' ] group_list.append( group ) return group_list def CalculateTerritory( self ): territory = { self.WHITE : 0, self.BLACK : 0, } group_list = self.AnalyzeGroups( self.EMPTY ) for group in group_list: location_list = group[ 'location_list' ] touch_map = { self.WHITE : set(), self.BLACK : set(), } for location in location_list: for adjacent_location in self.AdjacentLocations( location ): state = self.GetState( adjacent_location ) if state != self.EMPTY: touch_map[ state ].add( adjacent_location ) white_touch_count = len( touch_map[ self.WHITE ] ) black_touch_count = len( touch_map[ self.BLACK ] ) group[ 'owner' ] = None if white_touch_count > 0 and black_touch_count == 0: group[ 'owner' ] = self.WHITE elif black_touch_count > 0 and white_touch_count == 0: group[ 'owner' ] = self.BLACK else: pass # No one owns the territory. owner = group[ 'owner' ] if owner: territory[ owner ] += len( location_list ) return territory, group_list def Clone( self ): clone = GoBoard( self.size ) for i in range( self.size ): for j in range( self.size ): clone.matrix[i][j] = self.matrix[i][j] return clone def __str__( self ): board_string = '' for i in range( self.size ): for j in range( self.size ): stone = self.matrix[i][j] if stone == self.EMPTY: stone = ' ' elif stone == self.WHITE: stone = 'O' elif stone == self.BLACK: stone = '#' else: stone = '?' board_string += '[' + stone + ']' if j < self.size - 1: board_string += '--' board_string += ' %02d\n' % i if i < self.size - 1: board_string += ' | ' * self.size + '\n' else: for j in range( self.size ): board_string += ' %02d ' % j board_string += '\n' return board_string
class Goboard: empty = 0 white = 1 black = 2 def __init__(self, size=0): self.size = size if size > 0: self.matrix = [[self.EMPTY for j in range(size)] for i in range(size)] def serialize(self): return {'size': self.size, 'matrix': self.matrix} def deserialize(self, data): self.size = data['size'] self.matrix = data['matrix'] return self def __eq__(self, board): for i in range(self.size): for j in range(self.size): if self.matrix[i][j] != board.matrix[i][j]: return False return True def all_locations_of_state(self, state): for i in range(self.size): for j in range(self.size): if self.matrix[i][j] == state: yield (i, j) def adjacent_locations(self, location): for offset in [(-1, 0), (1, 0), (0, -1), (0, 1)]: adjacent_location = (location[0] + offset[0], location[1] + offset[1]) if adjacent_location[0] < 0 or adjacent_location[1] < 0: continue if adjacent_location[0] >= self.size or adjacent_location[1] >= self.size: continue yield adjacent_location def get_state(self, location): return self.matrix[location[0]][location[1]] def set_state(self, location, state): self.matrix[location[0]][location[1]] = state def analyze_groups(self, for_who): location_list = [location for location in self.AllLocationsOfState(for_who)] group_list = [] while len(location_list) > 0: location = location_list[0] group = {'location_list': [], 'liberties': 0, 'liberty_location_list': []} queue = [location] while len(queue) > 0: location = queue.pop() group['location_list'].append(location) location_list.remove(location) for adjacent_location in self.AdjacentLocations(location): if adjacent_location in group['location_list']: continue if adjacent_location in queue: continue if self.GetState(adjacent_location) == for_who: queue.append(adjacent_location) if for_who != self.EMPTY: for location in group['location_list']: for adjacent_location in self.AdjacentLocations(location): if self.GetState(adjacent_location) == self.EMPTY: if not adjacent_location in group['liberty_location_list']: group['liberties'] += 1 group['liberty_location_list'].append(adjacent_location) else: del group['liberties'] del group['liberty_location_list'] group_list.append(group) return group_list def calculate_territory(self): territory = {self.WHITE: 0, self.BLACK: 0} group_list = self.AnalyzeGroups(self.EMPTY) for group in group_list: location_list = group['location_list'] touch_map = {self.WHITE: set(), self.BLACK: set()} for location in location_list: for adjacent_location in self.AdjacentLocations(location): state = self.GetState(adjacent_location) if state != self.EMPTY: touch_map[state].add(adjacent_location) white_touch_count = len(touch_map[self.WHITE]) black_touch_count = len(touch_map[self.BLACK]) group['owner'] = None if white_touch_count > 0 and black_touch_count == 0: group['owner'] = self.WHITE elif black_touch_count > 0 and white_touch_count == 0: group['owner'] = self.BLACK else: pass owner = group['owner'] if owner: territory[owner] += len(location_list) return (territory, group_list) def clone(self): clone = go_board(self.size) for i in range(self.size): for j in range(self.size): clone.matrix[i][j] = self.matrix[i][j] return clone def __str__(self): board_string = '' for i in range(self.size): for j in range(self.size): stone = self.matrix[i][j] if stone == self.EMPTY: stone = ' ' elif stone == self.WHITE: stone = 'O' elif stone == self.BLACK: stone = '#' else: stone = '?' board_string += '[' + stone + ']' if j < self.size - 1: board_string += '--' board_string += ' %02d\n' % i if i < self.size - 1: board_string += ' | ' * self.size + '\n' else: for j in range(self.size): board_string += ' %02d ' % j board_string += '\n' return board_string
""" Default config for Workload Automation. DO NOT MODIFY this file. This file gets copied to ~/.workload_automation/config.py on initial run of run_workloads. Add your configuration to that file instead. """ # *** WARNING: *** # Configuration listed in this file is NOT COMPLETE. This file sets the default # configuration for WA and gives EXAMPLES of other configuration available. It # is not supposed to be an exhaustive list. # PLEASE REFER TO WA DOCUMENTATION FOR THE COMPLETE LIST OF AVAILABLE # EXTENSIONS AND THEIR CONFIGURATION. # This defines when the device will be rebooted during Workload Automation execution. # # # # Valid policies are: # # never: The device will never be rebooted. # # as_needed: The device will only be rebooted if the need arises (e.g. if it # # becomes unresponsive # # initial: The device will be rebooted when the execution first starts, just before executing # # the first workload spec. # # each_spec: The device will be rebooted before running a new workload spec. # # each_iteration: The device will be rebooted before each new iteration. # # # reboot_policy = 'as_needed' # Defines the order in which the agenda spec will be executed. At the moment, # # the following execution orders are supported: # # # # by_iteration: The first iteration of each workload spec is executed one ofter the other, # # so all workloads are executed before proceeding on to the second iteration. # # This is the default if no order is explicitly specified. # # If multiple sections were specified, this will also split them up, so that specs # # in the same section are further apart in the execution order. # # by_section: Same as "by_iteration", but runn specs from the same section one after the other # # by_spec: All iterations of the first spec are executed before moving on to the next # # spec. This may also be specified as ``"classic"``, as this was the way # # workloads were executed in earlier versions of WA. # # random: Randomisizes the order in which specs run. # execution_order = 'by_iteration' # This indicates when a job will be re-run. # Possible values: # OK: This iteration has completed and no errors have been detected # PARTIAL: One or more instruments have failed (the iteration may still be running). # FAILED: The workload itself has failed. # ABORTED: The user interupted the workload # # If set to an empty list, a job will not be re-run ever. retry_on_status = ['FAILED', 'PARTIAL'] # How many times a job will be re-run before giving up max_retries = 3 # If WA should delete its files from the device after the run is completed clean_up = False #################################################################################################### ######################################### Device Settings ########################################## #################################################################################################### # Specify the device you want to run workload automation on. This must be a # # string with the ID of the device. Common options are 'generic_android' and # # 'generic_linux'. Run ``wa list devices`` to see all available options. # # # device = 'generic_android' # Configuration options that will be passed onto the device. These are obviously device-specific, # # so check the documentation for the particular device to find out which options and values are # # valid. The settings listed below are common to all devices # # # device_config = dict( # The name used by adb to identify the device. Use "adb devices" in bash to list # the devices currently seen by adb. #adb_name='10.109.173.2:5555', # The directory on the device that WA will use to push files to #working_directory='/sdcard/wa-working', # This specifies the device's CPU cores. The order must match how they # appear in cpufreq. The example below is for TC2. # core_names = ['a7', 'a7', 'a7', 'a15', 'a15'] # Specifies cluster mapping for the device's cores. # core_clusters = [0, 0, 0, 1, 1] ) #################################################################################################### ################################### Instrumentation Configuration #################################### #################################################################################################### # This defines the additionnal instrumentation that will be enabled during workload execution, # # which in turn determines what additional data (such as /proc/interrupts content or Streamline # # traces) will be available in the results directory. # # # instrumentation = [ # Records the time it took to run the workload 'execution_time', # Collects /proc/interrupts before and after execution and does a diff. 'interrupts', # Collects the contents of/sys/devices/system/cpu before and after execution and does a diff. 'cpufreq', # Gets energy usage from the workload form HWMON devices # NOTE: the hardware needs to have the right sensors in order for this to work #'hwmon', # Run perf in the background during workload execution and then collect the results. perf is a # standard Linux performance analysis tool. #'perf', # Collect Streamline traces during workload execution. Streamline is part of DS-5 #'streamline', # Collects traces by interacting with Ftrace Linux kernel internal tracer #'trace-cmd', # Obtains the power consumption of the target device's core measured by National Instruments # Data Acquisition(DAQ) device. #'daq', # Collects CCI counter data. #'cci_pmu_logger', # Collects FPS (Frames Per Second) and related metrics (such as jank) from # the View of the workload (Note: only a single View per workload is # supported at the moment, so this is mainly useful for games). #'fps', ] #################################################################################################### ################################# Result Processors Configuration ################################## #################################################################################################### # Specifies how results will be processed and presented. # # # result_processors = [ # Creates a status.txt that provides a summary status for the run 'status', # Creates a results.txt file for each iteration that lists all collected metrics # in "name = value (units)" format 'standard', # Creates a results.csv that contains metrics for all iterations of all workloads # in the .csv format. 'csv', # Creates a summary.csv that contains summary metrics for all iterations of all # all in the .csv format. Summary metrics are defined on per-worklod basis # are typically things like overall scores. The contents of summary.csv are # always a subset of the contents of results.csv (if it is generated). #'summary_csv', # Creates a results.csv that contains metrics for all iterations of all workloads # in the JSON format #'json', # Write results to an sqlite3 database. By default, a new database will be # generated for each run, however it is possible to specify a path to an # existing DB file (see result processor configuration below), in which # case results from multiple runs may be stored in the one file. #'sqlite', ] #################################################################################################### ################################### Logging output Configuration ################################### #################################################################################################### # Specify the format of logging messages. The format uses the old formatting syntax: # # # # http://docs.python.org/2/library/stdtypes.html#string-formatting-operations # # # # The attributes that can be used in formats are listested here: # # # # http://docs.python.org/2/library/logging.html#logrecord-attributes # # # logging = { # Log file format 'file format': '%(asctime)s %(levelname)-8s %(name)s: %(message)s', # Verbose console output format 'verbose format': '%(asctime)s %(levelname)-8s %(name)s: %(message)s', # Regular console output format 'regular format': '%(levelname)-8s %(message)s', # Colouring the console output 'colour_enabled': True, } #################################################################################################### #################################### Instruments Configuration ##################################### #################################################################################################### # Instrumentation Configuration is related to specific instrument's settings. Some of the # # instrumentations require specific settings in order for them to work. These settings are # # specified here. # # Note that these settings only take effect if the corresponding instrument is # enabled above. #################################################################################################### ######################################## perf configuration ######################################## # The hardware events such as instructions executed, cache-misses suffered, or branches # mispredicted to be reported by perf. Events can be obtained from the device by tpying # 'perf list'. #perf_events = ['migrations', 'cs'] # The perf options which can be obtained from man page for perf-record #perf_options = '-a -i' #################################################################################################### ####################################### hwmon configuration ######################################## # The kinds of sensors hwmon instrument will look for #hwmon_sensors = ['energy', 'temp'] #################################################################################################### ###################################### trace-cmd configuration ##################################### # trace-cmd events to be traced. The events can be found by rooting on the device then type # 'trace-cmd list -e' #trace_events = ['power*'] #################################################################################################### ######################################### DAQ configuration ######################################## # The host address of the machine that runs the daq Server which the instrument communicates with #daq_server_host = '10.1.17.56' # The port number for daq Server in which daq instrument communicates with #daq_server_port = 56788 # The values of resistors 1 and 2 (in Ohms) across which the voltages are measured #daq_resistor_values = [0.002, 0.002] #################################################################################################### ################################### cci_pmu_logger configuration ################################### # The events to be counted by PMU # NOTE: The number of events must not exceed the number of counters available (which is 4 for CCI-400) #cci_pmu_events = ['0x63', '0x83'] # The name of the events which will be used when reporting PMU counts #cci_pmu_event_labels = ['event_0x63', 'event_0x83'] # The period (in jiffies) between counter reads #cci_pmu_period = 15 #################################################################################################### ################################### fps configuration ############################################## # Data points below this FPS will dropped as not constituting "real" gameplay. The assumption # being that while actually running, the FPS in the game will not drop below X frames per second, # except on loading screens, menus, etc, which should not contribute to FPS calculation. #fps_drop_threshold=5 # If set to True, this will keep the raw dumpsys output in the results directory (this is maily # used for debugging). Note: frames.csv with collected frames data will always be generated # regardless of this setting. #fps_keep_raw=False #################################################################################################### ################################# Result Processor Configuration ################################### #################################################################################################### # Specifies an alternative database to store results in. If the file does not # exist, it will be created (the directiory of the file must exist however). If # the file does exist, the results will be added to the existing data set (each # run as a UUID, so results won't clash even if identical agendas were used). # Note that in order for this to work, the version of the schema used to generate # the DB file must match that of the schema used for the current run. Please # see "What's new" secition in WA docs to check if the schema has changed in # recent releases of WA. #sqlite_database = '/work/results/myresults.sqlite' # If the file specified by sqlite_database exists, setting this to True will # cause that file to be overwritten rather than updated -- existing results in # the file will be lost. #sqlite_overwrite = False # distribution: internal #################################################################################################### #################################### Resource Getter configuration ################################# #################################################################################################### # The location on your system where /arm/scratch is mounted. Used by # Scratch resource getter. #scratch_mount_point = '/arm/scratch' # end distribution
""" Default config for Workload Automation. DO NOT MODIFY this file. This file gets copied to ~/.workload_automation/config.py on initial run of run_workloads. Add your configuration to that file instead. """ reboot_policy = 'as_needed' execution_order = 'by_iteration' retry_on_status = ['FAILED', 'PARTIAL'] max_retries = 3 clean_up = False device = 'generic_android' device_config = dict() instrumentation = ['execution_time', 'interrupts', 'cpufreq'] result_processors = ['status', 'standard', 'csv'] logging = {'file format': '%(asctime)s %(levelname)-8s %(name)s: %(message)s', 'verbose format': '%(asctime)s %(levelname)-8s %(name)s: %(message)s', 'regular format': '%(levelname)-8s %(message)s', 'colour_enabled': True}
class QuoteInfo: def __init__(self): self.symbol = str() self.exchange = str() self.date = None self.time = None self.local_time = None self.price = float() self.volume = int() self.changepct = float() def __repr__(self): d = [] d.append(self.symbol+',') d.append(self.exchange+',') d.append(str(self.time)+',') d.append(str(self.price)+',') d.append(str(self.volume)+',') d.append(str(self.changepct)+'%,') return str().join(d) class QuoteArray: def __init__(self): self.symbol = str() self.exchange = str() self.date = None self.time_arr = [] self.price_arr = [] self.volume_arr = [] def append(self,q): self.symbol = q.symbol self.exchange = q.exchange self.date = q.date self.price_arr.append(q.price) self.time_arr.append(q.time) self.volume_arr.append(q.volume) class MarketDepth: def __init__(self): self.symbol = str() self.exchange = str() self.date = None self.time = None self.local_time = None self.bid_q = [] self.bid_qty_q = [] self.ask_q = [] self.ask_qty_q = [] def __repr__(self): d = [] d.append(self.symbol+',') d.append(self.exchange+',') d.append(str(self.time)+',') for bid in self.bid_q: d.append(str(bid)+',') for bid_qty in self.bid_qty_q: d.append(str(bid_qty)+',') for ask in self.ask_q: d.append(str(ask)+',') for ask_qty in self.ask_qty_q: d.append(str(ask_qty)+',') return str().join(d) class MarketDepthArray: def __init__(self): self.symbol = str() self.exchange = str() self.date = None self.time_arr = [] self.bid_q_arr = [] self.bid_qty_q_arr = [] self.ask_q_arr = [] self.ask_qty_q_arr = []
class Quoteinfo: def __init__(self): self.symbol = str() self.exchange = str() self.date = None self.time = None self.local_time = None self.price = float() self.volume = int() self.changepct = float() def __repr__(self): d = [] d.append(self.symbol + ',') d.append(self.exchange + ',') d.append(str(self.time) + ',') d.append(str(self.price) + ',') d.append(str(self.volume) + ',') d.append(str(self.changepct) + '%,') return str().join(d) class Quotearray: def __init__(self): self.symbol = str() self.exchange = str() self.date = None self.time_arr = [] self.price_arr = [] self.volume_arr = [] def append(self, q): self.symbol = q.symbol self.exchange = q.exchange self.date = q.date self.price_arr.append(q.price) self.time_arr.append(q.time) self.volume_arr.append(q.volume) class Marketdepth: def __init__(self): self.symbol = str() self.exchange = str() self.date = None self.time = None self.local_time = None self.bid_q = [] self.bid_qty_q = [] self.ask_q = [] self.ask_qty_q = [] def __repr__(self): d = [] d.append(self.symbol + ',') d.append(self.exchange + ',') d.append(str(self.time) + ',') for bid in self.bid_q: d.append(str(bid) + ',') for bid_qty in self.bid_qty_q: d.append(str(bid_qty) + ',') for ask in self.ask_q: d.append(str(ask) + ',') for ask_qty in self.ask_qty_q: d.append(str(ask_qty) + ',') return str().join(d) class Marketdeptharray: def __init__(self): self.symbol = str() self.exchange = str() self.date = None self.time_arr = [] self.bid_q_arr = [] self.bid_qty_q_arr = [] self.ask_q_arr = [] self.ask_qty_q_arr = []
class City: def __init__(self, name, lat, lon): self.name=name; self.lat=lat; self.lon=lon;
class City: def __init__(self, name, lat, lon): self.name = name self.lat = lat self.lon = lon
class InvalidInterface(Exception): pass class EmptyInterface(InvalidInterface): pass class NotAFileError(Exception): pass class MissingFileError(Exception): pass
class Invalidinterface(Exception): pass class Emptyinterface(InvalidInterface): pass class Notafileerror(Exception): pass class Missingfileerror(Exception): pass
''' Given a set of distinct integers, nums, return all possible subsets (the power set). ''' class Solution(object): def subsets(self, nums): """ :type nums: List[int] :rtype: List[List[int]] """ result = [[]] for num in nums: for j in range(len(result)): result.append(result[j] + [num]) return result
""" Given a set of distinct integers, nums, return all possible subsets (the power set). """ class Solution(object): def subsets(self, nums): """ :type nums: List[int] :rtype: List[List[int]] """ result = [[]] for num in nums: for j in range(len(result)): result.append(result[j] + [num]) return result
#! /usr/bin/env python3 a=[1, 4, 3, 53, 2] print("list :", a) print(a.pop()) #poping last element print("pop element from list :", a) a.append(21) #adding element in list print("adding element in list :", a)
a = [1, 4, 3, 53, 2] print('list :', a) print(a.pop()) print('pop element from list :', a) a.append(21) print('adding element in list :', a)
""" This file contains ASCII art used in the Birthday Wisher program """ logo = """ ,-----. ,--. ,--. ,--. ,--. ,--. ,--.,--. ,--. | |) /_ `--',--.--.,-' '-.| ,---. ,-| | ,--,--.,--. ,--. | | | |`--' ,---. | ,---. ,---. ,--.--. | .-. \,--.| .--''-. .-'| .-. |' .-. |' ,-. | \ ' / | |.'.| |,--.( .-' | .-. || .-. :| .--' | '--' /| || | | | | | | |\ `-' |\ '-' | \ ' | ,'. || |.-' `)| | | |\ --.| | `------' `--'`--' `--' `--' `--' `---' `--`--'.-' / '--' '--'`--'`----' `--' `--' `----'`--' `---' """
""" This file contains ASCII art used in the Birthday Wisher program """ logo = "\n \n,-----. ,--. ,--. ,--. ,--. ,--. ,--.,--. ,--. \n| |) /_ `--',--.--.,-' '-.| ,---. ,-| | ,--,--.,--. ,--. | | | |`--' ,---. | ,---. ,---. ,--.--. \n| .-. \\,--.| .--''-. .-'| .-. |' .-. |' ,-. | \\ ' / | |.'.| |,--.( .-' | .-. || .-. :| .--' \n| '--' /| || | | | | | | |\\ `-' |\\ '-' | \\ ' | ,'. || |.-' `)| | | |\\ --.| | \n`------' `--'`--' `--' `--' `--' `---' `--`--'.-' / '--' '--'`--'`----' `--' `--' `----'`--' \n `---' \n"
# Distinct powers def koliko_potenc(): sez = [] for a in range(2, 101): for b in range(2, 101): sez.append(a ** b) return sez def izloci_iste(sez): podvojeni = 0 for i in range(len(sez)): if sez[i] in sez[:i] : podvojeni += 1 return len(sez) - podvojeni print(izloci_iste(koliko_potenc()))
def koliko_potenc(): sez = [] for a in range(2, 101): for b in range(2, 101): sez.append(a ** b) return sez def izloci_iste(sez): podvojeni = 0 for i in range(len(sez)): if sez[i] in sez[:i]: podvojeni += 1 return len(sez) - podvojeni print(izloci_iste(koliko_potenc()))
# MIT 6.006 Introduction to Algorithms, Spring 2020 # see: https://www.youtube.com/watch?v=r4-cftqTcdI&list=PLUl4u3cNGP63EdVPNLG3ToM6LaEUuStEY&index=42 def max_score_prefix(in_list: list): dp = [0] * (len(in_list) + 1) in_list.insert(0, 1) for i in range(1, len(in_list)): dp[i - 1] = max(dp[i - 2], dp[i - 2] + in_list[i], dp[i - 3] + in_list[i] * in_list[i - 1]) print(dp[-2]) def max_score_suffix(in_list: list): dp = [0] * (len(in_list) + 2) in_list.append(1) for i in reversed(range(len(in_list) - 1)): dp[i] = max(dp[i + 1], dp[i + 1] + in_list[i], dp[i + 2] + in_list[i] * in_list[i + 1]) print(dp[0]) if __name__ == '__main__': bowling_pins = [1, 1, 9, 9, 2, -5, -5] max_score_suffix(bowling_pins)
def max_score_prefix(in_list: list): dp = [0] * (len(in_list) + 1) in_list.insert(0, 1) for i in range(1, len(in_list)): dp[i - 1] = max(dp[i - 2], dp[i - 2] + in_list[i], dp[i - 3] + in_list[i] * in_list[i - 1]) print(dp[-2]) def max_score_suffix(in_list: list): dp = [0] * (len(in_list) + 2) in_list.append(1) for i in reversed(range(len(in_list) - 1)): dp[i] = max(dp[i + 1], dp[i + 1] + in_list[i], dp[i + 2] + in_list[i] * in_list[i + 1]) print(dp[0]) if __name__ == '__main__': bowling_pins = [1, 1, 9, 9, 2, -5, -5] max_score_suffix(bowling_pins)
def gen_perms_helper(array, current, subsets, index): if index >= len(array): subsets.append(current) return gen_perms_helper(array, current.copy(), subsets, index + 1) current.append(array[index]) gen_perms_helper(array, current.copy(), subsets, index + 1) def gen_perms(array): subsets = [] gen_perms_helper(array, [], subsets, 0) return subsets print(gen_perms([1, 2, 3]))
def gen_perms_helper(array, current, subsets, index): if index >= len(array): subsets.append(current) return gen_perms_helper(array, current.copy(), subsets, index + 1) current.append(array[index]) gen_perms_helper(array, current.copy(), subsets, index + 1) def gen_perms(array): subsets = [] gen_perms_helper(array, [], subsets, 0) return subsets print(gen_perms([1, 2, 3]))
class WireMap: def __init__(self): self.wire_map = {} self.x = 0 self.y = 0 self.step_counter = 0 def move(self, steps, x_dir, y_dir): while steps > 0: self.x += x_dir self.y += y_dir self.step_counter += 1 self.wire_map[(self.x, self.y)] = self.step_counter steps -= 1 def move_up(self, steps): return self.move(steps, 0, -1) def move_down(self, steps): return self.move(steps, 0, 1) def move_left(self, steps): return self.move(steps, -1, 0) def move_right(self, steps): return self.move(steps, 1, 0) def mark_wires(self, movements): for command in movements: direction = command[0] s = int(command[1:]) if direction == "R": self.move_right(s) elif direction == "L": self.move_left(s) elif direction == "U": self.move_up(s) else: self.move_down(s) maps = [line.strip().split(",") for line in open("Day3.txt")] map1 = WireMap() map1.mark_wires(maps[0]) map2 = WireMap() map2.mark_wires(maps[1]) keys1 = set(map1.wire_map.keys()) keys2 = set(map2.wire_map.keys()) keys_intersection = keys1 & keys2 result = [] for key in keys_intersection: result.append(map1.wire_map[key] + map2.wire_map[key]) print(min(result))
class Wiremap: def __init__(self): self.wire_map = {} self.x = 0 self.y = 0 self.step_counter = 0 def move(self, steps, x_dir, y_dir): while steps > 0: self.x += x_dir self.y += y_dir self.step_counter += 1 self.wire_map[self.x, self.y] = self.step_counter steps -= 1 def move_up(self, steps): return self.move(steps, 0, -1) def move_down(self, steps): return self.move(steps, 0, 1) def move_left(self, steps): return self.move(steps, -1, 0) def move_right(self, steps): return self.move(steps, 1, 0) def mark_wires(self, movements): for command in movements: direction = command[0] s = int(command[1:]) if direction == 'R': self.move_right(s) elif direction == 'L': self.move_left(s) elif direction == 'U': self.move_up(s) else: self.move_down(s) maps = [line.strip().split(',') for line in open('Day3.txt')] map1 = wire_map() map1.mark_wires(maps[0]) map2 = wire_map() map2.mark_wires(maps[1]) keys1 = set(map1.wire_map.keys()) keys2 = set(map2.wire_map.keys()) keys_intersection = keys1 & keys2 result = [] for key in keys_intersection: result.append(map1.wire_map[key] + map2.wire_map[key]) print(min(result))
class Solution: def longestPalindrome(self, s: str) -> str: res = "" for i in range(len(s)): res = max(res,self.lp(s,i,i),self.lp(s,i,i+1),key=len) return res def lp(self,s,l,r): while l >= 0 and r < len(s) and s[l] == s[r]: l -= 1 r += 1 return s[l+1:r]
class Solution: def longest_palindrome(self, s: str) -> str: res = '' for i in range(len(s)): res = max(res, self.lp(s, i, i), self.lp(s, i, i + 1), key=len) return res def lp(self, s, l, r): while l >= 0 and r < len(s) and (s[l] == s[r]): l -= 1 r += 1 return s[l + 1:r]
#!/usr/bin/env python3 # -*- coding: utf-8 -*- class Priority(object): def __init__(self, nlp, object): self.object = object self.matcher = object.matcher.Matcher(nlp.vocab) self.matcher.add("Priority", None, #important [{'LOWER': 'important'}], #crucial [{'LOWER': 'crucial'}], #key [{'LOWER': 'key'}], #essential [{'LOWER': 'essential'}], #critical [{'LOWER': 'critical'}], #fundamental [{'LOWER': 'fundamental'}], #key [{'LOWER': 'key'}], #major [{'LOWER': 'major'}], #vital [{'LOWER': 'vital'}], #first and foremost [{'LOWER': 'first'}, {'LOWER': 'and'}, {'LOWER': 'foremost'}], #(now )?remember (that)? [{'LOWER': 'now', 'OP':'?'}, {'LOWER': 'remember'}], #keep in mind (that)? [{'LOWER': 'keep'}, {'LOWER': 'in'}, {'LOWER': 'mind'}], #don\'t forget (that)? [{'LOWER': 'do'}, {'LOWER': 'not'}, {'LOWER': 'forget'}], #let\'s not forget [{'LOWER': 'let'}, {'LOWER': 'us'}, {'LOWER': 'not'}, {'LOWER': 'forget'}], #let\'s keep in mind [{'LOWER': 'let'}, {'LOWER': 'us'}, {'LOWER': 'keep'}, {'LOWER': 'in'}, {'LOWER': 'mind'}], #let\'s remember [{'LOWER': 'let'}, {'LOWER': 'us'}, {'LOWER': 'remember'}], ) def __call__(self, doc): matches = self.matcher(doc) for match_id, start, end in matches: sents = self.object.tokens.Span(doc, start, end).sent sent_start, sent_end = sents.start, sents.end opinion = self.object.tokens.Span(doc, sent_start, sent_end, label = "PRIORITY") doc._.opinion.append(opinion,) return doc
class Priority(object): def __init__(self, nlp, object): self.object = object self.matcher = object.matcher.Matcher(nlp.vocab) self.matcher.add('Priority', None, [{'LOWER': 'important'}], [{'LOWER': 'crucial'}], [{'LOWER': 'key'}], [{'LOWER': 'essential'}], [{'LOWER': 'critical'}], [{'LOWER': 'fundamental'}], [{'LOWER': 'key'}], [{'LOWER': 'major'}], [{'LOWER': 'vital'}], [{'LOWER': 'first'}, {'LOWER': 'and'}, {'LOWER': 'foremost'}], [{'LOWER': 'now', 'OP': '?'}, {'LOWER': 'remember'}], [{'LOWER': 'keep'}, {'LOWER': 'in'}, {'LOWER': 'mind'}], [{'LOWER': 'do'}, {'LOWER': 'not'}, {'LOWER': 'forget'}], [{'LOWER': 'let'}, {'LOWER': 'us'}, {'LOWER': 'not'}, {'LOWER': 'forget'}], [{'LOWER': 'let'}, {'LOWER': 'us'}, {'LOWER': 'keep'}, {'LOWER': 'in'}, {'LOWER': 'mind'}], [{'LOWER': 'let'}, {'LOWER': 'us'}, {'LOWER': 'remember'}]) def __call__(self, doc): matches = self.matcher(doc) for (match_id, start, end) in matches: sents = self.object.tokens.Span(doc, start, end).sent (sent_start, sent_end) = (sents.start, sents.end) opinion = self.object.tokens.Span(doc, sent_start, sent_end, label='PRIORITY') doc._.opinion.append(opinion) return doc
class TileData(object): def __init__(self, filename, z, x, y) -> None: self.filename = filename self.z = z self.x = x self.y = y
class Tiledata(object): def __init__(self, filename, z, x, y) -> None: self.filename = filename self.z = z self.x = x self.y = y
#! /usr/bin/env python # coding: utf-8 class Singleton(object): _instance = None def __new__(cls, *args, **kwargs): if cls._instance is not None: return cls._instance o = object.__new__(cls) cls._instance = o return o @classmethod def get_instance(cls): return cls()
class Singleton(object): _instance = None def __new__(cls, *args, **kwargs): if cls._instance is not None: return cls._instance o = object.__new__(cls) cls._instance = o return o @classmethod def get_instance(cls): return cls()
line_input = list(x for x in input()) num_list = list(int(x) for x in line_input if x.isnumeric()) words = list(x for x in line_input if not x.isnumeric()) take = list(num_list[x] for x in range(len(num_list)) if x % 2 == 0) skip = list(num_list[y] for y in range(len(num_list)) if y % 2 == 1) result = [] for i in range(len(take)): result.append(words[:take[i]]) if skip[i] > 0: words = words[take[i] + skip[i]:] for j in range(len(result)): for k in result[j]: print(k, end="") # /home/master/PycharmProjects/pythonProjects/FirstStepsInPython/Fundamentals/Exercise /Lists Advanced/ # More Exercises
line_input = list((x for x in input())) num_list = list((int(x) for x in line_input if x.isnumeric())) words = list((x for x in line_input if not x.isnumeric())) take = list((num_list[x] for x in range(len(num_list)) if x % 2 == 0)) skip = list((num_list[y] for y in range(len(num_list)) if y % 2 == 1)) result = [] for i in range(len(take)): result.append(words[:take[i]]) if skip[i] > 0: words = words[take[i] + skip[i]:] for j in range(len(result)): for k in result[j]: print(k, end='')
class Sampleinfor: def __init__(self, samplename, datafile, fregion, hotspots=list()): self.samplename = samplename self.datafile = datafile self.hotspots = hotspots self.fregion = fregion
class Sampleinfor: def __init__(self, samplename, datafile, fregion, hotspots=list()): self.samplename = samplename self.datafile = datafile self.hotspots = hotspots self.fregion = fregion
a=1 print( not isinstance(a,str)) if __name__ == "__main__": codes = [1,2,3,4,5,6,7,8,9,10] offset = 0 limit = 3 total = len(codes) while offset < total: print("offet:{} limit:{} partial codes:{}".format(offset,limit, codes[offset:offset+limit])) offset += limit
a = 1 print(not isinstance(a, str)) if __name__ == '__main__': codes = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10] offset = 0 limit = 3 total = len(codes) while offset < total: print('offet:{} limit:{} partial codes:{}'.format(offset, limit, codes[offset:offset + limit])) offset += limit
def func(): nums = [1,3,4,4,3] sum = 0 # Converting nums to a set which gives us s = set(nums) # s = [1,3,4] for i in s: if nums.count(i)==1: sum +=i print(sum) func()
def func(): nums = [1, 3, 4, 4, 3] sum = 0 s = set(nums) for i in s: if nums.count(i) == 1: sum += i print(sum) func()
def test_register_function(fresh_db): @fresh_db.register_function def reverse_string(s): return "".join(reversed(list(s))) result = fresh_db.execute('select reverse_string("hello")').fetchone()[0] assert result == "olleh" def test_register_function_multiple_arguments(fresh_db): @fresh_db.register_function def a_times_b_plus_c(a, b, c): return a * b + c result = fresh_db.execute("select a_times_b_plus_c(2, 3, 4)").fetchone()[0] assert result == 10
def test_register_function(fresh_db): @fresh_db.register_function def reverse_string(s): return ''.join(reversed(list(s))) result = fresh_db.execute('select reverse_string("hello")').fetchone()[0] assert result == 'olleh' def test_register_function_multiple_arguments(fresh_db): @fresh_db.register_function def a_times_b_plus_c(a, b, c): return a * b + c result = fresh_db.execute('select a_times_b_plus_c(2, 3, 4)').fetchone()[0] assert result == 10
#cAssume s is a string of lower case characters. #Write a program that prints the longest substring of s in which the letters occur # in alphabetical order. For example, if s = 'azcbobobegghakl', then your program should print #Longest substring in alphabetical order is: beggh #In the case of ties, print the first substring. For example, if s = 'abcbcd', # then your program should print #Longest substring in alphabetical order is: abc s = 'abcdcd' maxLen = 0 current = s[0] long = s[0] for x in range(len(s) - 1): if s[x + 1] >= s[x]: current += s[x + 1] if len(current) > maxLen: maxLen = len(current) long = current else: current = s[x + 1] x += 1 print ('Longest substring in alphabetical order is: ' + long)
s = 'abcdcd' max_len = 0 current = s[0] long = s[0] for x in range(len(s) - 1): if s[x + 1] >= s[x]: current += s[x + 1] if len(current) > maxLen: max_len = len(current) long = current else: current = s[x + 1] x += 1 print('Longest substring in alphabetical order is: ' + long)
#!/usr/bin/env python """A module with the database time test mixin.""" class DatabaseTimeTestMixin(object): """A mixin for testing time-related methods of database implementations.""" def testNowPositive(self): timestamp = self.db.Now() self.assertGreaterEqual(timestamp, 0) def testNowMonotnic(self): timestamp_1 = self.db.Now() timestamp_2 = self.db.Now() timestamp_3 = self.db.Now() self.assertLessEqual(timestamp_1, timestamp_2) self.assertLessEqual(timestamp_2, timestamp_3) # This is just a mixin file and does not require a `__main__` entry.
"""A module with the database time test mixin.""" class Databasetimetestmixin(object): """A mixin for testing time-related methods of database implementations.""" def test_now_positive(self): timestamp = self.db.Now() self.assertGreaterEqual(timestamp, 0) def test_now_monotnic(self): timestamp_1 = self.db.Now() timestamp_2 = self.db.Now() timestamp_3 = self.db.Now() self.assertLessEqual(timestamp_1, timestamp_2) self.assertLessEqual(timestamp_2, timestamp_3)
class Solution: def combinationSum2(self, candidates, target): if not candidates or not target: return [] result = [] cand = sorted(candidates) N = len(candidates) def dfs(remainder, curr_combo, start): if remainder == 0: result.append(curr_combo) return for i in range(start, N): curr = cand[i] if i > start and curr == cand[i-1]: continue if curr > remainder: break dfs(remainder - curr, curr_combo + [curr], i+1) dfs(target, [], 0) return result
class Solution: def combination_sum2(self, candidates, target): if not candidates or not target: return [] result = [] cand = sorted(candidates) n = len(candidates) def dfs(remainder, curr_combo, start): if remainder == 0: result.append(curr_combo) return for i in range(start, N): curr = cand[i] if i > start and curr == cand[i - 1]: continue if curr > remainder: break dfs(remainder - curr, curr_combo + [curr], i + 1) dfs(target, [], 0) return result
template_file = 'gallery-template.md' output_filename = 'gallery.md' base_filename = 'http://alanpryorjr.com/visualizations/' base_image_filename = '../visualizations/' key_base = "bokeh_glyphs_{}" glyph_names = ['annular_wedge', 'annulus','arc','asterisk','bezier', 'circle', 'circle_cross','circle_x','cross','diamond','diamond_cross','ellipse', 'hbar','image','image_rgba', 'image_url', 'inverted_triangle','line', 'oval','patch','patches','quad', 'quadratic', 'ray', 'square', 'square_cross', 'square_x', 'segment', 'text', 'triangle', 'vbar', 'wedge', 'x'] glyph_format_dict = dict(glyph_base_code_filename=base_filename+'bokeh/glyphs/', glyph_base_html_filename=base_filename+'bokeh/figures/', glyph_base_image_filename=base_image_filename+'bokeh/figures/') filler_template = """ <a name="bokeh-glyphs-{glyph_name}"></a> #### {Glyph_name} ([Interactive]({glyph_base_html_filename}{glyph_name})) [(code)]({glyph_base_code_filename}{glyph_name}/{glyph_name}) ![{Glyph_name}]({glyph_img_file}) """ format_dict = {} for glyph_name in glyph_names: key = key_base.format(glyph_name) glyph_format_dict['Glyph_name'] = ' '.join(g.capitalize() for g in glyph_name.split('_')) glyph_format_dict['glyph_name'] = glyph_name glyph_format_dict['glyph_img_file'] = glyph_format_dict['glyph_base_image_filename'] + glyph_name + '.png' format_dict[key] = filler_template.format(**glyph_format_dict) with open(template_file,'r') as fi, open(output_filename, 'w') as fo: fo.write(fi.read().format(**format_dict).replace('<!DOCTYPE html>',''))
template_file = 'gallery-template.md' output_filename = 'gallery.md' base_filename = 'http://alanpryorjr.com/visualizations/' base_image_filename = '../visualizations/' key_base = 'bokeh_glyphs_{}' glyph_names = ['annular_wedge', 'annulus', 'arc', 'asterisk', 'bezier', 'circle', 'circle_cross', 'circle_x', 'cross', 'diamond', 'diamond_cross', 'ellipse', 'hbar', 'image', 'image_rgba', 'image_url', 'inverted_triangle', 'line', 'oval', 'patch', 'patches', 'quad', 'quadratic', 'ray', 'square', 'square_cross', 'square_x', 'segment', 'text', 'triangle', 'vbar', 'wedge', 'x'] glyph_format_dict = dict(glyph_base_code_filename=base_filename + 'bokeh/glyphs/', glyph_base_html_filename=base_filename + 'bokeh/figures/', glyph_base_image_filename=base_image_filename + 'bokeh/figures/') filler_template = '\n<a name="bokeh-glyphs-{glyph_name}"></a>\n#### {Glyph_name} ([Interactive]({glyph_base_html_filename}{glyph_name})) [(code)]({glyph_base_code_filename}{glyph_name}/{glyph_name})\n![{Glyph_name}]({glyph_img_file})\n' format_dict = {} for glyph_name in glyph_names: key = key_base.format(glyph_name) glyph_format_dict['Glyph_name'] = ' '.join((g.capitalize() for g in glyph_name.split('_'))) glyph_format_dict['glyph_name'] = glyph_name glyph_format_dict['glyph_img_file'] = glyph_format_dict['glyph_base_image_filename'] + glyph_name + '.png' format_dict[key] = filler_template.format(**glyph_format_dict) with open(template_file, 'r') as fi, open(output_filename, 'w') as fo: fo.write(fi.read().format(**format_dict).replace('<!DOCTYPE html>', ''))
class BaseReporter(object): """Delegate class to provider progress reporting for the resolver. """ def starting(self): """Called before the resolution actually starts. """ def starting_round(self, index): """Called before each round of resolution starts. The index is zero-based. """ def ending_round(self, index, state): """Called before each round of resolution ends. This is NOT called if the resolution ends at this round. Use `ending` if you want to report finalization. The index is zero-based. """ def ending(self, state): """Called before the resolution ends successfully. """ def adding_requirement(self, requirement): """Called when the resolver adds a new requirement into the resolve criteria. """ def backtracking(self, candidate): """Called when the resolver rejects a candidate during backtracking. """ def pinning(self, candidate): """Called when adding a candidate to the potential solution. """
class Basereporter(object): """Delegate class to provider progress reporting for the resolver. """ def starting(self): """Called before the resolution actually starts. """ def starting_round(self, index): """Called before each round of resolution starts. The index is zero-based. """ def ending_round(self, index, state): """Called before each round of resolution ends. This is NOT called if the resolution ends at this round. Use `ending` if you want to report finalization. The index is zero-based. """ def ending(self, state): """Called before the resolution ends successfully. """ def adding_requirement(self, requirement): """Called when the resolver adds a new requirement into the resolve criteria. """ def backtracking(self, candidate): """Called when the resolver rejects a candidate during backtracking. """ def pinning(self, candidate): """Called when adding a candidate to the potential solution. """
''' 2^100 ''' n = 1 result = 1 while n <= 10: result = result * 2 print(result) n = n + 1 print('The final is', result)
""" 2^100 """ n = 1 result = 1 while n <= 10: result = result * 2 print(result) n = n + 1 print('The final is', result)
# function to create a list (marks) with the content of every line, every item corresponds to a line with the same index def scan(vp): marks = [] for line in vp: # defining positions of last start and end of general information text_start = 0 text_end = 0 for idx, mark in enumerate(marks): if mark == "text_end": text_end = idx elif mark == "text_start": text_start = idx # finding line with the day the vp is made for if "Ausfertigung" in line: marks.append("new_day") continue # finding line with pressure on schedule if " | / " in line: marks.append("pressure") continue # finding blank lines with no information if line == " |": # to prevent index errors this part will only run when there are already at least 3 lines marked if len(marks) > 2: # if there are two blank lines (the current and the last one), # the line before those two will contain the last line of the general information block # text_start > text_end is true when the current line is in the general information block or the end of # this block has not been found yet if line == " |" and marks[-1] == "blank" and text_start > text_end: marks[-2] = "text_end" marks.append("blank") continue # when there are at least 3 lines marked if len(marks) >= 3: # two lines after the "pressure"-line the general information block will start if marks[-2] == "pressure": marks.append("text_start") continue # when the current line is in between text_start and text_end, # (when text_end does not contain the right index of the end of the general information block, # it will be smaller then text_start) # the current line will contain general information if text_start > text_end: marks.append("text") continue # searching for single class information if " | " in line: # when the class name could not be found, the line takes its place group = str(line) for idx, mark in enumerate(marks): if mark == "class_name": # extracting class names group = vp[idx][7:].replace(":", "") marks.append(group) continue if " |" in line: marks.append("class_name") continue # default (something is not right) marks.append("blank") # postprocessing for idx, mark in enumerate(marks): # replacing text_start and text_end marks with text marks # every possible class name at this point: # text; new_day; pressure; blank; class_name; [class name] if mark == "text_start" or mark == "text_end": marks[idx] = "text" return marks # return dictionary with intel for this day def get_intel(day, groups): intel = { # new day info "new_day": "", # pressure on schedule "pressure": "", # info text "text": [], # list with important information for selected groups "groups_intel": [] } # scan plan marks = scan(day) # going through every day for idx, line in enumerate(day): # extract intel from already marked lines if marks[idx] == "new_day": intel["new_day"] = day[idx][8:] elif marks[idx] == "pressure": intel["pressure"] = day[idx][12:] elif marks[idx] == "text": intel["text"].append(day[idx][7:]) elif marks[idx] in groups: intel["groups_intel"].append(day[idx][9:]) return intel
def scan(vp): marks = [] for line in vp: text_start = 0 text_end = 0 for (idx, mark) in enumerate(marks): if mark == 'text_end': text_end = idx elif mark == 'text_start': text_start = idx if 'Ausfertigung' in line: marks.append('new_day') continue if ' | / ' in line: marks.append('pressure') continue if line == ' |': if len(marks) > 2: if line == ' |' and marks[-1] == 'blank' and (text_start > text_end): marks[-2] = 'text_end' marks.append('blank') continue if len(marks) >= 3: if marks[-2] == 'pressure': marks.append('text_start') continue if text_start > text_end: marks.append('text') continue if ' | ' in line: group = str(line) for (idx, mark) in enumerate(marks): if mark == 'class_name': group = vp[idx][7:].replace(':', '') marks.append(group) continue if ' |' in line: marks.append('class_name') continue marks.append('blank') for (idx, mark) in enumerate(marks): if mark == 'text_start' or mark == 'text_end': marks[idx] = 'text' return marks def get_intel(day, groups): intel = {'new_day': '', 'pressure': '', 'text': [], 'groups_intel': []} marks = scan(day) for (idx, line) in enumerate(day): if marks[idx] == 'new_day': intel['new_day'] = day[idx][8:] elif marks[idx] == 'pressure': intel['pressure'] = day[idx][12:] elif marks[idx] == 'text': intel['text'].append(day[idx][7:]) elif marks[idx] in groups: intel['groups_intel'].append(day[idx][9:]) return intel
# __ __ _ __ ____ _ _ # \ \ / / | |/ _| _ \ | (_) # \ \ /\ / /__ | | |_| |_) | ___ | |_ _ __ # \ \/ \/ / _ \| | _| _ < / _ \| | | '_ \ # \ /\ / (_) | | | | |_) | (_) | | | | | | # \/ \/ \___/|_|_| |____/ \___/|_|_|_| |_| VERSION = (0, 1, 3) __version__ = '.'.join(map(str, VERSION))
version = (0, 1, 3) __version__ = '.'.join(map(str, VERSION))
def main(): print(add(1, 3)) def add(a: int, b: int): return a + b
def main(): print(add(1, 3)) def add(a: int, b: int): return a + b
__version__ = "0.29.9" __db_version__ = 7 __author__ = "Kristian Larsson, Lukas Garberg" __author_email__ = "kll@tele2.net, lukas@spritelink.net" __copyright__ = "Copyright 2011-2014, Kristian Larsson, Lukas Garberg" __license__ = "MIT" __status__ = "Development" __url__ = "http://SpriteLink.github.com/NIPAP"
__version__ = '0.29.9' __db_version__ = 7 __author__ = 'Kristian Larsson, Lukas Garberg' __author_email__ = 'kll@tele2.net, lukas@spritelink.net' __copyright__ = 'Copyright 2011-2014, Kristian Larsson, Lukas Garberg' __license__ = 'MIT' __status__ = 'Development' __url__ = 'http://SpriteLink.github.com/NIPAP'
CSV_HEADERS = [ "externalID", "status", "internalID", "rescueID", "name", "type", "priBreed", "secBreed", "mix", "sex", "okwithdogs", "okwithcats", "okwithkids", "declawed", "housebroken", "age", "specialNeeds", "altered", "size", "uptodate", "color", "pattern", "coatLength", "courtesy", "dsc", "found", "foundDate", "foundZipcode", "photo1", "photo2", "photo3", "photo4", "videoUrl", ]
csv_headers = ['externalID', 'status', 'internalID', 'rescueID', 'name', 'type', 'priBreed', 'secBreed', 'mix', 'sex', 'okwithdogs', 'okwithcats', 'okwithkids', 'declawed', 'housebroken', 'age', 'specialNeeds', 'altered', 'size', 'uptodate', 'color', 'pattern', 'coatLength', 'courtesy', 'dsc', 'found', 'foundDate', 'foundZipcode', 'photo1', 'photo2', 'photo3', 'photo4', 'videoUrl']
load( "@io_bazel_rules_scala//scala:providers.bzl", _DepsInfo = "DepsInfo", _ScalacProvider = "ScalacProvider", ) def _compute_strict_deps_mode(input_strict_deps_mode, dependency_mode): if dependency_mode == "direct": return "off" if input_strict_deps_mode == "default": if dependency_mode == "transitive": return "error" else: return "off" return input_strict_deps_mode def _compute_dependency_tracking_method( dependency_mode, input_dependency_tracking_method): if input_dependency_tracking_method == "default": if dependency_mode == "direct": return "high-level" else: return "ast" return input_dependency_tracking_method def _scala_toolchain_impl(ctx): dependency_mode = ctx.attr.dependency_mode strict_deps_mode = _compute_strict_deps_mode( ctx.attr.strict_deps_mode, dependency_mode, ) unused_dependency_checker_mode = ctx.attr.unused_dependency_checker_mode dependency_tracking_method = _compute_dependency_tracking_method( dependency_mode, ctx.attr.dependency_tracking_method, ) # Final quality checks to possibly detect buggy code above if dependency_mode not in ("direct", "plus-one", "transitive"): fail("Internal error: invalid dependency_mode " + dependency_mode) if strict_deps_mode not in ("off", "warn", "error"): fail("Internal error: invalid strict_deps_mode " + strict_deps_mode) if dependency_tracking_method not in ("ast", "high-level"): fail("Internal error: invalid dependency_tracking_method " + dependency_tracking_method) enable_diagnostics_report = ctx.attr.enable_diagnostics_report toolchain = platform_common.ToolchainInfo( scalacopts = ctx.attr.scalacopts, dep_providers = ctx.attr.dep_providers, dependency_mode = dependency_mode, strict_deps_mode = strict_deps_mode, unused_dependency_checker_mode = unused_dependency_checker_mode, dependency_tracking_method = dependency_tracking_method, enable_code_coverage_aspect = ctx.attr.enable_code_coverage_aspect, scalac_jvm_flags = ctx.attr.scalac_jvm_flags, scala_test_jvm_flags = ctx.attr.scala_test_jvm_flags, enable_diagnostics_report = enable_diagnostics_report, ) return [toolchain] scala_toolchain = rule( _scala_toolchain_impl, attrs = { "scalacopts": attr.string_list(), "dep_providers": attr.label_list( default = [ "@io_bazel_rules_scala//scala:scala_xml_provider", "@io_bazel_rules_scala//scala:parser_combinators_provider", "@io_bazel_rules_scala//scala:scala_compile_classpath_provider", "@io_bazel_rules_scala//scala:scala_library_classpath_provider", "@io_bazel_rules_scala//scala:scala_macro_classpath_provider", ], providers = [_DepsInfo], ), "dependency_mode": attr.string( default = "direct", values = ["direct", "plus-one", "transitive"], ), "strict_deps_mode": attr.string( default = "default", values = ["off", "warn", "error", "default"], ), "unused_dependency_checker_mode": attr.string( default = "off", values = ["off", "warn", "error"], ), "dependency_tracking_method": attr.string( default = "default", values = ["ast", "high-level", "default"], ), "enable_code_coverage_aspect": attr.string( default = "off", values = ["off", "on"], ), "scalac_jvm_flags": attr.string_list(), "scala_test_jvm_flags": attr.string_list(), "enable_diagnostics_report": attr.bool( doc = "Enable the output of structured diagnostics through the BEP", ), }, fragments = ["java"], )
load('@io_bazel_rules_scala//scala:providers.bzl', _DepsInfo='DepsInfo', _ScalacProvider='ScalacProvider') def _compute_strict_deps_mode(input_strict_deps_mode, dependency_mode): if dependency_mode == 'direct': return 'off' if input_strict_deps_mode == 'default': if dependency_mode == 'transitive': return 'error' else: return 'off' return input_strict_deps_mode def _compute_dependency_tracking_method(dependency_mode, input_dependency_tracking_method): if input_dependency_tracking_method == 'default': if dependency_mode == 'direct': return 'high-level' else: return 'ast' return input_dependency_tracking_method def _scala_toolchain_impl(ctx): dependency_mode = ctx.attr.dependency_mode strict_deps_mode = _compute_strict_deps_mode(ctx.attr.strict_deps_mode, dependency_mode) unused_dependency_checker_mode = ctx.attr.unused_dependency_checker_mode dependency_tracking_method = _compute_dependency_tracking_method(dependency_mode, ctx.attr.dependency_tracking_method) if dependency_mode not in ('direct', 'plus-one', 'transitive'): fail('Internal error: invalid dependency_mode ' + dependency_mode) if strict_deps_mode not in ('off', 'warn', 'error'): fail('Internal error: invalid strict_deps_mode ' + strict_deps_mode) if dependency_tracking_method not in ('ast', 'high-level'): fail('Internal error: invalid dependency_tracking_method ' + dependency_tracking_method) enable_diagnostics_report = ctx.attr.enable_diagnostics_report toolchain = platform_common.ToolchainInfo(scalacopts=ctx.attr.scalacopts, dep_providers=ctx.attr.dep_providers, dependency_mode=dependency_mode, strict_deps_mode=strict_deps_mode, unused_dependency_checker_mode=unused_dependency_checker_mode, dependency_tracking_method=dependency_tracking_method, enable_code_coverage_aspect=ctx.attr.enable_code_coverage_aspect, scalac_jvm_flags=ctx.attr.scalac_jvm_flags, scala_test_jvm_flags=ctx.attr.scala_test_jvm_flags, enable_diagnostics_report=enable_diagnostics_report) return [toolchain] scala_toolchain = rule(_scala_toolchain_impl, attrs={'scalacopts': attr.string_list(), 'dep_providers': attr.label_list(default=['@io_bazel_rules_scala//scala:scala_xml_provider', '@io_bazel_rules_scala//scala:parser_combinators_provider', '@io_bazel_rules_scala//scala:scala_compile_classpath_provider', '@io_bazel_rules_scala//scala:scala_library_classpath_provider', '@io_bazel_rules_scala//scala:scala_macro_classpath_provider'], providers=[_DepsInfo]), 'dependency_mode': attr.string(default='direct', values=['direct', 'plus-one', 'transitive']), 'strict_deps_mode': attr.string(default='default', values=['off', 'warn', 'error', 'default']), 'unused_dependency_checker_mode': attr.string(default='off', values=['off', 'warn', 'error']), 'dependency_tracking_method': attr.string(default='default', values=['ast', 'high-level', 'default']), 'enable_code_coverage_aspect': attr.string(default='off', values=['off', 'on']), 'scalac_jvm_flags': attr.string_list(), 'scala_test_jvm_flags': attr.string_list(), 'enable_diagnostics_report': attr.bool(doc='Enable the output of structured diagnostics through the BEP')}, fragments=['java'])
# Automatically generated # pylint: disable=all get = [{'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3, 'DefaultVCpus': 16, 'SizeInMiB': 32768, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 8, 'Ipv4AddressesPerInterface': 30, 'Ipv6AddressesPerInterface': 30, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'a1.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3}, 'VCpuInfo': {'DefaultVCpus': 16}, 'MemoryInfo': {'SizeInMiB': 32768}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 8, 'Ipv4AddressesPerInterface': 30, 'Ipv6AddressesPerInterface': 30, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 4.0, 'DefaultVCpus': 48, 'SizeInMiB': 393216, 'TotalSizeInGB': 1800, 'Disks': [{'SizeInGB': 900, 'Count': 2, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'z1d.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 4.0}, 'VCpuInfo': {'DefaultVCpus': 48}, 'MemoryInfo': {'SizeInMiB': 393216}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 1800, 'Disks': [{'SizeInGB': 900, 'Count': 2, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 64, 'SizeInMiB': 131072, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'c6g.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 64}, 'MemoryInfo': {'SizeInMiB': 131072}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 64, 'SizeInMiB': 131072, 'TotalSizeInGB': 3800, 'Disks': [{'SizeInGB': 1900, 'Count': 2, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'c6gd.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 64}, 'MemoryInfo': {'SizeInMiB': 131072}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 3800, 'Disks': [{'SizeInGB': 1900, 'Count': 2, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 64, 'SizeInMiB': 262144, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'm6g.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 64}, 'MemoryInfo': {'SizeInMiB': 262144}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 64, 'SizeInMiB': 262144, 'TotalSizeInGB': 3800, 'Disks': [{'SizeInGB': 1900, 'Count': 2, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'm6gd.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 64}, 'MemoryInfo': {'SizeInMiB': 262144}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 3800, 'Disks': [{'SizeInGB': 1900, 'Count': 2, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 64, 'SizeInMiB': 524288, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'r6g.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 64}, 'MemoryInfo': {'SizeInMiB': 524288}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': False, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 64, 'SizeInMiB': 524288, 'TotalSizeInGB': 3800, 'Disks': [{'SizeInGB': 1900, 'Count': 2, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'r6gd.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 64}, 'MemoryInfo': {'SizeInMiB': 524288}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 3800, 'Disks': [{'SizeInGB': 1900, 'Count': 2, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': False, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.4, 'DefaultVCpus': 72, 'SizeInMiB': 196608, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '100 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'c5n.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.4}, 'VCpuInfo': {'DefaultVCpus': 72}, 'MemoryInfo': {'SizeInMiB': 196608}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '100 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 2.3, 'DefaultVCpus': 72, 'SizeInMiB': 524288, 'TotalSizeInGB': 15200, 'Disks': [{'SizeInGB': 1900, 'Count': 8, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'i3.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 2.3}, 'VCpuInfo': {'DefaultVCpus': 72}, 'MemoryInfo': {'SizeInMiB': 524288}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 15200, 'Disks': [{'SizeInGB': 1900, 'Count': 8, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.6, 'DefaultVCpus': 96, 'SizeInMiB': 196608, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'c5.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.6}, 'VCpuInfo': {'DefaultVCpus': 96}, 'MemoryInfo': {'SizeInMiB': 196608}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': False, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.6, 'DefaultVCpus': 96, 'SizeInMiB': 196608, 'TotalSizeInGB': 3600, 'Disks': [{'SizeInGB': 900, 'Count': 4, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'c5d.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.6}, 'VCpuInfo': {'DefaultVCpus': 96}, 'MemoryInfo': {'SizeInMiB': 196608}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 3600, 'Disks': [{'SizeInGB': 900, 'Count': 4, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': False, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 96, 'SizeInMiB': 393216, 'TotalSizeInGB': 1800, 'Disks': [{'SizeInGB': 900, 'Count': 2, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '100 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'Gpus': [{'Name': 'T4', 'Manufacturer': 'NVIDIA', 'Count': 8, 'MemoryInfo': {'SizeInMiB': 16384}}], 'TotalGpuMemoryInMiB': 131072, 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'g4dn.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 96}, 'MemoryInfo': {'SizeInMiB': 393216}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 1800, 'Disks': [{'SizeInGB': 900, 'Count': 2, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '100 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'GpuInfo': {'Gpus': [{'Name': 'T4', 'Manufacturer': 'NVIDIA', 'Count': 8, 'MemoryInfo': {'SizeInMiB': 16384}}], 'TotalGpuMemoryInMiB': 131072}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1, 'DefaultVCpus': 96, 'SizeInMiB': 786432, 'TotalSizeInGB': 60000, 'Disks': [{'SizeInGB': 7500, 'Count': 8, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '100 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'i3en.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1}, 'VCpuInfo': {'DefaultVCpus': 96}, 'MemoryInfo': {'SizeInMiB': 786432}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 60000, 'Disks': [{'SizeInGB': 7500, 'Count': 8, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '100 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1, 'DefaultVCpus': 96, 'SizeInMiB': 393216, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'm5.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1}, 'VCpuInfo': {'DefaultVCpus': 96}, 'MemoryInfo': {'SizeInMiB': 393216}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1, 'DefaultVCpus': 96, 'SizeInMiB': 393216, 'TotalSizeInGB': 3600, 'Disks': [{'SizeInGB': 900, 'Count': 4, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'm5d.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1}, 'VCpuInfo': {'DefaultVCpus': 96}, 'MemoryInfo': {'SizeInMiB': 393216}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 3600, 'Disks': [{'SizeInGB': 900, 'Count': 4, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1, 'DefaultVCpus': 96, 'SizeInMiB': 786432, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'r5.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1}, 'VCpuInfo': {'DefaultVCpus': 96}, 'MemoryInfo': {'SizeInMiB': 786432}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1, 'DefaultVCpus': 96, 'SizeInMiB': 786432, 'TotalSizeInGB': 3600, 'Disks': [{'SizeInGB': 900, 'Count': 4, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'r5d.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1}, 'VCpuInfo': {'DefaultVCpus': 96}, 'MemoryInfo': {'SizeInMiB': 786432}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 3600, 'Disks': [{'SizeInGB': 900, 'Count': 4, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False}] # noqa: E501 def get_instances_list() -> list: '''Returns list EC2 instances with BareMetal = True .''' # pylint: disable=all return get
get = [{'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3, 'DefaultVCpus': 16, 'SizeInMiB': 32768, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 8, 'Ipv4AddressesPerInterface': 30, 'Ipv6AddressesPerInterface': 30, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'a1.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.3}, 'VCpuInfo': {'DefaultVCpus': 16}, 'MemoryInfo': {'SizeInMiB': 32768}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': 'Up to 10 Gigabit', 'MaximumNetworkInterfaces': 8, 'Ipv4AddressesPerInterface': 30, 'Ipv6AddressesPerInterface': 30, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 4.0, 'DefaultVCpus': 48, 'SizeInMiB': 393216, 'TotalSizeInGB': 1800, 'Disks': [{'SizeInGB': 900, 'Count': 2, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'z1d.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 4.0}, 'VCpuInfo': {'DefaultVCpus': 48}, 'MemoryInfo': {'SizeInMiB': 393216}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 1800, 'Disks': [{'SizeInGB': 900, 'Count': 2, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 64, 'SizeInMiB': 131072, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'c6g.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 64}, 'MemoryInfo': {'SizeInMiB': 131072}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 64, 'SizeInMiB': 131072, 'TotalSizeInGB': 3800, 'Disks': [{'SizeInGB': 1900, 'Count': 2, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'c6gd.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 64}, 'MemoryInfo': {'SizeInMiB': 131072}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 3800, 'Disks': [{'SizeInGB': 1900, 'Count': 2, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 64, 'SizeInMiB': 262144, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'm6g.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 64}, 'MemoryInfo': {'SizeInMiB': 262144}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 64, 'SizeInMiB': 262144, 'TotalSizeInGB': 3800, 'Disks': [{'SizeInGB': 1900, 'Count': 2, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'm6gd.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 64}, 'MemoryInfo': {'SizeInMiB': 262144}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 3800, 'Disks': [{'SizeInGB': 1900, 'Count': 2, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 64, 'SizeInMiB': 524288, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'r6g.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 64}, 'MemoryInfo': {'SizeInMiB': 524288}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': False, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 64, 'SizeInMiB': 524288, 'TotalSizeInGB': 3800, 'Disks': [{'SizeInGB': 1900, 'Count': 2, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'r6gd.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['arm64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 64}, 'MemoryInfo': {'SizeInMiB': 524288}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 3800, 'Disks': [{'SizeInGB': 1900, 'Count': 2, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': False, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.4, 'DefaultVCpus': 72, 'SizeInMiB': 196608, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '100 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'c5n.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.4}, 'VCpuInfo': {'DefaultVCpus': 72}, 'MemoryInfo': {'SizeInMiB': 196608}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '100 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 2.3, 'DefaultVCpus': 72, 'SizeInMiB': 524288, 'TotalSizeInGB': 15200, 'Disks': [{'SizeInGB': 1900, 'Count': 8, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'i3.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 2.3}, 'VCpuInfo': {'DefaultVCpus': 72}, 'MemoryInfo': {'SizeInMiB': 524288}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 15200, 'Disks': [{'SizeInGB': 1900, 'Count': 8, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.6, 'DefaultVCpus': 96, 'SizeInMiB': 196608, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'c5.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.6}, 'VCpuInfo': {'DefaultVCpus': 96}, 'MemoryInfo': {'SizeInMiB': 196608}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': False, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.6, 'DefaultVCpus': 96, 'SizeInMiB': 196608, 'TotalSizeInGB': 3600, 'Disks': [{'SizeInGB': 900, 'Count': 4, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'c5d.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.6}, 'VCpuInfo': {'DefaultVCpus': 96}, 'MemoryInfo': {'SizeInMiB': 196608}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 3600, 'Disks': [{'SizeInGB': 900, 'Count': 4, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': False, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 2.5, 'DefaultVCpus': 96, 'SizeInMiB': 393216, 'TotalSizeInGB': 1800, 'Disks': [{'SizeInGB': 900, 'Count': 2, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '100 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'Gpus': [{'Name': 'T4', 'Manufacturer': 'NVIDIA', 'Count': 8, 'MemoryInfo': {'SizeInMiB': 16384}}], 'TotalGpuMemoryInMiB': 131072, 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'g4dn.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 2.5}, 'VCpuInfo': {'DefaultVCpus': 96}, 'MemoryInfo': {'SizeInMiB': 393216}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 1800, 'Disks': [{'SizeInGB': 900, 'Count': 2, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '100 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'GpuInfo': {'Gpus': [{'Name': 'T4', 'Manufacturer': 'NVIDIA', 'Count': 8, 'MemoryInfo': {'SizeInMiB': 16384}}], 'TotalGpuMemoryInMiB': 131072}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1, 'DefaultVCpus': 96, 'SizeInMiB': 786432, 'TotalSizeInGB': 60000, 'Disks': [{'SizeInGB': 7500, 'Count': 8, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '100 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'i3en.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1}, 'VCpuInfo': {'DefaultVCpus': 96}, 'MemoryInfo': {'SizeInMiB': 786432}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 60000, 'Disks': [{'SizeInGB': 7500, 'Count': 8, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '100 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1, 'DefaultVCpus': 96, 'SizeInMiB': 393216, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'm5.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1}, 'VCpuInfo': {'DefaultVCpus': 96}, 'MemoryInfo': {'SizeInMiB': 393216}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1, 'DefaultVCpus': 96, 'SizeInMiB': 393216, 'TotalSizeInGB': 3600, 'Disks': [{'SizeInGB': 900, 'Count': 4, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'm5d.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1}, 'VCpuInfo': {'DefaultVCpus': 96}, 'MemoryInfo': {'SizeInMiB': 393216}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 3600, 'Disks': [{'SizeInGB': 900, 'Count': 4, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1, 'DefaultVCpus': 96, 'SizeInMiB': 786432, 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'r5.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1}, 'VCpuInfo': {'DefaultVCpus': 96}, 'MemoryInfo': {'SizeInMiB': 786432}, 'InstanceStorageSupported': False, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': True}, {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1, 'DefaultVCpus': 96, 'SizeInMiB': 786432, 'TotalSizeInGB': 3600, 'Disks': [{'SizeInGB': 900, 'Count': 4, 'Type': 'ssd'}], 'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported', 'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required', 'SupportedStrategies': ['cluster', 'partition', 'spread'], 'InstanceType': 'r5d.metal', 'CurrentGeneration': True, 'FreeTierEligible': False, 'SupportedUsageClasses': ['on-demand', 'spot'], 'SupportedRootDeviceTypes': ['ebs'], 'BareMetal': True, 'ProcessorInfo': {'SupportedArchitectures': ['x86_64'], 'SustainedClockSpeedInGhz': 3.1}, 'VCpuInfo': {'DefaultVCpus': 96}, 'MemoryInfo': {'SizeInMiB': 786432}, 'InstanceStorageSupported': True, 'InstanceStorageInfo': {'TotalSizeInGB': 3600, 'Disks': [{'SizeInGB': 900, 'Count': 4, 'Type': 'ssd'}]}, 'EbsInfo': {'EbsOptimizedSupport': 'default', 'EncryptionSupport': 'supported'}, 'NetworkInfo': {'NetworkPerformance': '25 Gigabit', 'MaximumNetworkInterfaces': 15, 'Ipv4AddressesPerInterface': 50, 'Ipv6AddressesPerInterface': 50, 'Ipv6Supported': True, 'EnaSupport': 'required'}, 'PlacementGroupInfo': {'SupportedStrategies': ['cluster', 'partition', 'spread']}, 'HibernationSupported': False, 'BurstablePerformanceSupported': False, 'DedicatedHostsSupported': True, 'AutoRecoverySupported': False}] def get_instances_list() -> list: """Returns list EC2 instances with BareMetal = True .""" return get
class StringWriter(TextWriter, IDisposable): """ Implements a System.IO.TextWriter for writing information to a string. The information is stored in an underlying System.Text.StringBuilder. StringWriter() StringWriter(formatProvider: IFormatProvider) StringWriter(sb: StringBuilder) StringWriter(sb: StringBuilder,formatProvider: IFormatProvider) """ def Close(self): """ Close(self: StringWriter) Closes the current System.IO.StringWriter and the underlying stream. """ pass def Dispose(self): """ Dispose(self: StringWriter,disposing: bool) Releases the unmanaged resources used by the System.IO.StringWriter and optionally releases the managed resources. disposing: true to release both managed and unmanaged resources; false to release only unmanaged resources. """ pass def FlushAsync(self): """ FlushAsync(self: StringWriter) -> Task """ pass def GetStringBuilder(self): """ GetStringBuilder(self: StringWriter) -> StringBuilder Returns the underlying System.Text.StringBuilder. Returns: The underlying StringBuilder. """ pass def MemberwiseClone(self, *args): """ MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject Creates a shallow copy of the current System.MarshalByRefObject object. cloneIdentity: false to delete the current System.MarshalByRefObject object's identity,which will cause the object to be assigned a new identity when it is marshaled across a remoting boundary. A value of false is usually appropriate. true to copy the current System.MarshalByRefObject object's identity to its clone,which will cause remoting client calls to be routed to the remote server object. Returns: A shallow copy of the current System.MarshalByRefObject object. MemberwiseClone(self: object) -> object Creates a shallow copy of the current System.Object. Returns: A shallow copy of the current System.Object. """ pass def ToString(self): """ ToString(self: StringWriter) -> str Returns a string containing the characters written to the current StringWriter so far. Returns: The string containing the characters written to the current StringWriter. """ pass def Write(self, *__args): """ Write(self: StringWriter,value: str) Writes a string to this instance of the StringWriter. value: The string to write. Write(self: StringWriter,buffer: Array[Char],index: int,count: int) Writes the specified region of a character array to this instance of the StringWriter. buffer: The character array to read data from. index: The index at which to begin reading from buffer. count: The maximum number of characters to write. Write(self: StringWriter,value: Char) Writes a character to this instance of the StringWriter. value: The character to write. """ pass def WriteAsync(self, *__args): """ WriteAsync(self: StringWriter,buffer: Array[Char],index: int,count: int) -> Task WriteAsync(self: StringWriter,value: str) -> Task WriteAsync(self: StringWriter,value: Char) -> Task """ pass def WriteLineAsync(self, *__args): """ WriteLineAsync(self: StringWriter,buffer: Array[Char],index: int,count: int) -> Task WriteLineAsync(self: StringWriter,value: str) -> Task WriteLineAsync(self: StringWriter,value: Char) -> Task """ pass def __enter__(self, *args): """ __enter__(self: IDisposable) -> object Provides the implementation of __enter__ for objects which implement IDisposable. """ pass def __exit__(self, *args): """ __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) Provides the implementation of __exit__ for objects which implement IDisposable. """ pass def __init__(self, *args): """ x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """ pass @staticmethod def __new__(self, *__args): """ __new__(cls: type) __new__(cls: type,formatProvider: IFormatProvider) __new__(cls: type,sb: StringBuilder) __new__(cls: type,sb: StringBuilder,formatProvider: IFormatProvider) """ pass def __reduce_ex__(self, *args): pass def __str__(self, *args): pass Encoding = property(lambda self: object(), lambda self, v: None, lambda self: None) """Gets the System.Text.Encoding in which the output is written. Get: Encoding(self: StringWriter) -> Encoding """ CoreNewLine = None
class Stringwriter(TextWriter, IDisposable): """ Implements a System.IO.TextWriter for writing information to a string. The information is stored in an underlying System.Text.StringBuilder. StringWriter() StringWriter(formatProvider: IFormatProvider) StringWriter(sb: StringBuilder) StringWriter(sb: StringBuilder,formatProvider: IFormatProvider) """ def close(self): """ Close(self: StringWriter) Closes the current System.IO.StringWriter and the underlying stream. """ pass def dispose(self): """ Dispose(self: StringWriter,disposing: bool) Releases the unmanaged resources used by the System.IO.StringWriter and optionally releases the managed resources. disposing: true to release both managed and unmanaged resources; false to release only unmanaged resources. """ pass def flush_async(self): """ FlushAsync(self: StringWriter) -> Task """ pass def get_string_builder(self): """ GetStringBuilder(self: StringWriter) -> StringBuilder Returns the underlying System.Text.StringBuilder. Returns: The underlying StringBuilder. """ pass def memberwise_clone(self, *args): """ MemberwiseClone(self: MarshalByRefObject,cloneIdentity: bool) -> MarshalByRefObject Creates a shallow copy of the current System.MarshalByRefObject object. cloneIdentity: false to delete the current System.MarshalByRefObject object's identity,which will cause the object to be assigned a new identity when it is marshaled across a remoting boundary. A value of false is usually appropriate. true to copy the current System.MarshalByRefObject object's identity to its clone,which will cause remoting client calls to be routed to the remote server object. Returns: A shallow copy of the current System.MarshalByRefObject object. MemberwiseClone(self: object) -> object Creates a shallow copy of the current System.Object. Returns: A shallow copy of the current System.Object. """ pass def to_string(self): """ ToString(self: StringWriter) -> str Returns a string containing the characters written to the current StringWriter so far. Returns: The string containing the characters written to the current StringWriter. """ pass def write(self, *__args): """ Write(self: StringWriter,value: str) Writes a string to this instance of the StringWriter. value: The string to write. Write(self: StringWriter,buffer: Array[Char],index: int,count: int) Writes the specified region of a character array to this instance of the StringWriter. buffer: The character array to read data from. index: The index at which to begin reading from buffer. count: The maximum number of characters to write. Write(self: StringWriter,value: Char) Writes a character to this instance of the StringWriter. value: The character to write. """ pass def write_async(self, *__args): """ WriteAsync(self: StringWriter,buffer: Array[Char],index: int,count: int) -> Task WriteAsync(self: StringWriter,value: str) -> Task WriteAsync(self: StringWriter,value: Char) -> Task """ pass def write_line_async(self, *__args): """ WriteLineAsync(self: StringWriter,buffer: Array[Char],index: int,count: int) -> Task WriteLineAsync(self: StringWriter,value: str) -> Task WriteLineAsync(self: StringWriter,value: Char) -> Task """ pass def __enter__(self, *args): """ __enter__(self: IDisposable) -> object Provides the implementation of __enter__ for objects which implement IDisposable. """ pass def __exit__(self, *args): """ __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) Provides the implementation of __exit__ for objects which implement IDisposable. """ pass def __init__(self, *args): """ x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """ pass @staticmethod def __new__(self, *__args): """ __new__(cls: type) __new__(cls: type,formatProvider: IFormatProvider) __new__(cls: type,sb: StringBuilder) __new__(cls: type,sb: StringBuilder,formatProvider: IFormatProvider) """ pass def __reduce_ex__(self, *args): pass def __str__(self, *args): pass encoding = property(lambda self: object(), lambda self, v: None, lambda self: None) 'Gets the System.Text.Encoding in which the output is written.\n\n\n\nGet: Encoding(self: StringWriter) -> Encoding\n\n\n\n' core_new_line = None
# 8. With two given lists [1,3,6,78,35,55] and [12,24,35,24,88,120,155], # write a program to make a list whose elements are intersection of the above given lists. listA = [1, 3, 6, 78, 35, 55] listB = [12, 24, 35, 24, 88, 120, 155] setA = set(listA) setB = set(listB) insersaction_of_AB = setA.intersection(setB) # A&B listA_insersact_B = list(insersaction_of_AB) print(listA_insersact_B)
list_a = [1, 3, 6, 78, 35, 55] list_b = [12, 24, 35, 24, 88, 120, 155] set_a = set(listA) set_b = set(listB) insersaction_of_ab = setA.intersection(setB) list_a_insersact_b = list(insersaction_of_AB) print(listA_insersact_B)
#encoding:utf-8 subreddit = 'WikiLeaks' t_channel = '@r_WikiLeaks' def send_post(submission, r2t): return r2t.send_simple(submission)
subreddit = 'WikiLeaks' t_channel = '@r_WikiLeaks' def send_post(submission, r2t): return r2t.send_simple(submission)
# -*- coding: utf-8 -*- """ Created on Fri Jun 26 15:11:02 2020 @author: dhaar01 """ n = int(input()) s = set() for i in range(n): s.add(input()) print(len(s))
""" Created on Fri Jun 26 15:11:02 2020 @author: dhaar01 """ n = int(input()) s = set() for i in range(n): s.add(input()) print(len(s))
""" Given a string, find the rank of the string amongst its permutations sorted lexicographically. Assume that no characters are repeated. Example : Input : 'acb' Output : 2 The order permutations with letters 'a', 'c', and 'b' : abc acb bac bca cab cba The answer might not fit in an integer, so return your answer % 1000003 """ class Solution: # Return the rank of given string from sorted permutation of that string def rankPermutation(self, string): n, rank, i = len(string), 1, 0 total_permutaion = self.fact(n) # Total number of permutation while i < n: total_permutaion = total_permutaion//(n-i) count = self.smallComb(string, i, n-1) rank = rank + count*total_permutaion i+= 1 return rank % 1000003 # Counts the number of small element from string[start] in right def smallComb(self, string, start, end): count, i = 0, start+1 while i <= end: if string[i] < string[start]: count += 1 i+=1 return count # Counts factorial of a number k def fact(self, k): f, i = 1, 1 while i <k+1: f *= i i+=1 return f # Space : O(n) # Time: O(n*n) def method_02(self, string): arr, n = list(string), len(string) sorted_arr = sorted(arr) rank, i, j = 1, 0, 0 while i < n and j < len(sorted_arr): if sorted_arr[i] != arr[j]: rank += self.fact(len(sorted_arr)-1) i+= 1 if sorted_arr[i] == arr[j]: del sorted_arr[i] j+= 1 i = 0 return rank%1000003 s = Solution() print(s.rankPermutation("VIEW")) print(s.method_02("VIEW"))
""" Given a string, find the rank of the string amongst its permutations sorted lexicographically. Assume that no characters are repeated. Example : Input : 'acb' Output : 2 The order permutations with letters 'a', 'c', and 'b' : abc acb bac bca cab cba The answer might not fit in an integer, so return your answer % 1000003 """ class Solution: def rank_permutation(self, string): (n, rank, i) = (len(string), 1, 0) total_permutaion = self.fact(n) while i < n: total_permutaion = total_permutaion // (n - i) count = self.smallComb(string, i, n - 1) rank = rank + count * total_permutaion i += 1 return rank % 1000003 def small_comb(self, string, start, end): (count, i) = (0, start + 1) while i <= end: if string[i] < string[start]: count += 1 i += 1 return count def fact(self, k): (f, i) = (1, 1) while i < k + 1: f *= i i += 1 return f def method_02(self, string): (arr, n) = (list(string), len(string)) sorted_arr = sorted(arr) (rank, i, j) = (1, 0, 0) while i < n and j < len(sorted_arr): if sorted_arr[i] != arr[j]: rank += self.fact(len(sorted_arr) - 1) i += 1 if sorted_arr[i] == arr[j]: del sorted_arr[i] j += 1 i = 0 return rank % 1000003 s = solution() print(s.rankPermutation('VIEW')) print(s.method_02('VIEW'))
# # PySNMP MIB module WWP-LEOS-PING-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/WWP-LEOS-PING-MIB # Produced by pysmi-0.3.4 at Wed May 1 15:38:13 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # ObjectIdentifier, Integer, OctetString = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "Integer", "OctetString") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") SingleValueConstraint, ConstraintsUnion, ConstraintsIntersection, ValueRangeConstraint, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ConstraintsUnion", "ConstraintsIntersection", "ValueRangeConstraint", "ValueSizeConstraint") AddressFamilyNumbers, = mibBuilder.importSymbols("IANA-ADDRESS-FAMILY-NUMBERS-MIB", "AddressFamilyNumbers") InetAddressType, = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddressType") NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance") Integer32, NotificationType, ModuleIdentity, Gauge32, MibScalar, MibTable, MibTableRow, MibTableColumn, Bits, Counter32, iso, ObjectIdentity, TimeTicks, MibIdentifier, Counter64, Unsigned32, IpAddress = mibBuilder.importSymbols("SNMPv2-SMI", "Integer32", "NotificationType", "ModuleIdentity", "Gauge32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Bits", "Counter32", "iso", "ObjectIdentity", "TimeTicks", "MibIdentifier", "Counter64", "Unsigned32", "IpAddress") DisplayString, TruthValue, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TruthValue", "TextualConvention") wwpModulesLeos, = mibBuilder.importSymbols("WWP-SMI", "wwpModulesLeos") wwpLeosPingMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19)) wwpLeosPingMIB.setRevisions(('2012-04-02 00:00', '2001-07-03 12:57',)) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): if mibBuilder.loadTexts: wwpLeosPingMIB.setRevisionsDescriptions(('Add wwpLeosPingInetAddrType to support IP protocol version independent Inet addressing.', 'Initial Creation',)) if mibBuilder.loadTexts: wwpLeosPingMIB.setLastUpdated('201204020000Z') if mibBuilder.loadTexts: wwpLeosPingMIB.setOrganization('Ciena, Inc') if mibBuilder.loadTexts: wwpLeosPingMIB.setContactInfo(' Mib Meister 115 North Sullivan Road Spokane Valley, WA 99037 USA Phone: +1 509 242 9000 Email: support@ciena.com') if mibBuilder.loadTexts: wwpLeosPingMIB.setDescription('The MIB for WWP Ping') class PingFailCause(TextualConvention, Integer32): description = 'The cause of the last ping failure.' status = 'current' subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17)) namedValues = NamedValues(("unknownHost", 1), ("socketError", 2), ("bindError", 3), ("connectError", 4), ("missingHost", 5), ("asyncError", 6), ("nonBlockError", 7), ("mcastError", 8), ("ttlError", 9), ("mcastTtlError", 10), ("outputError", 11), ("unreachableError", 12), ("isAlive", 13), ("txRx", 14), ("commandCompleted", 15), ("noStatus", 16), ("sendRecvMismatch", 17)) class PingState(TextualConvention, Integer32): description = 'The state of the last ping request.' status = 'current' subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4)) namedValues = NamedValues(("idle", 1), ("pinging", 2), ("pingComplete", 3), ("failed", 4)) wwpLeosPingMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1)) wwpLeosPingDelay = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100000))).setMaxAccess("readwrite") if mibBuilder.loadTexts: wwpLeosPingDelay.setStatus('current') if mibBuilder.loadTexts: wwpLeosPingDelay.setDescription('The object specifies the minimum amount of time to wait before sending the next packet in a sequence after receiving a response or declaring a timeout for a previous packet.') wwpLeosPingPacketSize = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 1464)).clone(56)).setMaxAccess("readwrite") if mibBuilder.loadTexts: wwpLeosPingPacketSize.setStatus('current') if mibBuilder.loadTexts: wwpLeosPingPacketSize.setDescription('The size of the ping packets to send to the target.') wwpLeosPingActivate = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 3), TruthValue()).setMaxAccess("readwrite") if mibBuilder.loadTexts: wwpLeosPingActivate.setStatus('current') if mibBuilder.loadTexts: wwpLeosPingActivate.setDescription("Ping can be activated by setting this object to true. Once the ping operation is completed, the object is set to 'false'. This object can be set to 'false' by the Management Station to stop the ping.") wwpLeosPingAddrType = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 4), AddressFamilyNumbers()).setMaxAccess("readonly") if mibBuilder.loadTexts: wwpLeosPingAddrType.setStatus('current') if mibBuilder.loadTexts: wwpLeosPingAddrType.setDescription('The address type associated with wwpLeosPingAddr. With the new wwpLeosPingInetAddrType being introduced to support RFC 4001, this OID will only be used when wwpLeosPingAddr is a host name or an IPv4 address. Otherwise, it will be set to other(0).') wwpLeosPingAddr = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 5), DisplayString()).setMaxAccess("readwrite") if mibBuilder.loadTexts: wwpLeosPingAddr.setStatus('current') if mibBuilder.loadTexts: wwpLeosPingAddr.setDescription('The host name or IP address of the device to be pinged. wwpLeosPingAddrType determines if address is host name or IP address.') wwpLeosPingPacketCount = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100000))).setMaxAccess("readwrite") if mibBuilder.loadTexts: wwpLeosPingPacketCount.setStatus('current') if mibBuilder.loadTexts: wwpLeosPingPacketCount.setDescription('Specifies the number of ICMP requests to send to the target.') wwpLeosPingPacketTimeout = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100000))).setMaxAccess("readwrite") if mibBuilder.loadTexts: wwpLeosPingPacketTimeout.setStatus('current') if mibBuilder.loadTexts: wwpLeosPingPacketTimeout.setDescription("Specifies the amount of time to wait for a response to a transmitted packet before declaring the packet 'dropped'.") wwpLeosPingSentPackets = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 8), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wwpLeosPingSentPackets.setStatus('current') if mibBuilder.loadTexts: wwpLeosPingSentPackets.setDescription('The number of ping packets that have been sent to the target.') wwpLeosPingReceivedPackets = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 9), Counter32()).setMaxAccess("readonly") if mibBuilder.loadTexts: wwpLeosPingReceivedPackets.setStatus('current') if mibBuilder.loadTexts: wwpLeosPingReceivedPackets.setDescription('The number of ping packets that have been received from the target.') wwpLeosPingFailCause = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 10), PingFailCause()).setMaxAccess("readonly") if mibBuilder.loadTexts: wwpLeosPingFailCause.setStatus('current') if mibBuilder.loadTexts: wwpLeosPingFailCause.setDescription('The result of the ping.') wwpLeosPingState = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 11), PingState().clone('idle')).setMaxAccess("readonly") if mibBuilder.loadTexts: wwpLeosPingState.setStatus('current') if mibBuilder.loadTexts: wwpLeosPingState.setDescription('The state of the ping process. The possible states include pinging, idle, complete or failed.') wwpLeosPingUntilStopped = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 12), TruthValue().clone('false')).setMaxAccess("readwrite") if mibBuilder.loadTexts: wwpLeosPingUntilStopped.setStatus('current') if mibBuilder.loadTexts: wwpLeosPingUntilStopped.setDescription("Setting this object to true prior to wwpLeosPingActivate will cause the device to ping the specified host until wwpLeosPingActivate is set to false. The object cannot be modified once the ping is active. The object returns to 'false' once the ping is halted.") wwpLeosPingInetAddrType = MibScalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 13), InetAddressType()).setMaxAccess("readonly") if mibBuilder.loadTexts: wwpLeosPingInetAddrType.setStatus('current') if mibBuilder.loadTexts: wwpLeosPingInetAddrType.setDescription('The Inet address type associated with wwpLeosPingAddr. When set to: ipv4 : wwpLeosPingAddr should be compliant with InetAddressIPv4 from RFC 4001 ipv6 : wwpLeosPingAddr should be compliant with InetAddressIPv6 from RFC 4001.') mibBuilder.exportSymbols("WWP-LEOS-PING-MIB", wwpLeosPingMIB=wwpLeosPingMIB, wwpLeosPingDelay=wwpLeosPingDelay, wwpLeosPingPacketTimeout=wwpLeosPingPacketTimeout, wwpLeosPingPacketSize=wwpLeosPingPacketSize, wwpLeosPingFailCause=wwpLeosPingFailCause, wwpLeosPingSentPackets=wwpLeosPingSentPackets, PingState=PingState, wwpLeosPingPacketCount=wwpLeosPingPacketCount, wwpLeosPingState=wwpLeosPingState, wwpLeosPingMIBObjects=wwpLeosPingMIBObjects, wwpLeosPingInetAddrType=wwpLeosPingInetAddrType, PingFailCause=PingFailCause, wwpLeosPingReceivedPackets=wwpLeosPingReceivedPackets, PYSNMP_MODULE_ID=wwpLeosPingMIB, wwpLeosPingAddrType=wwpLeosPingAddrType, wwpLeosPingUntilStopped=wwpLeosPingUntilStopped, wwpLeosPingActivate=wwpLeosPingActivate, wwpLeosPingAddr=wwpLeosPingAddr)
(object_identifier, integer, octet_string) = mibBuilder.importSymbols('ASN1', 'ObjectIdentifier', 'Integer', 'OctetString') (named_values,) = mibBuilder.importSymbols('ASN1-ENUMERATION', 'NamedValues') (single_value_constraint, constraints_union, constraints_intersection, value_range_constraint, value_size_constraint) = mibBuilder.importSymbols('ASN1-REFINEMENT', 'SingleValueConstraint', 'ConstraintsUnion', 'ConstraintsIntersection', 'ValueRangeConstraint', 'ValueSizeConstraint') (address_family_numbers,) = mibBuilder.importSymbols('IANA-ADDRESS-FAMILY-NUMBERS-MIB', 'AddressFamilyNumbers') (inet_address_type,) = mibBuilder.importSymbols('INET-ADDRESS-MIB', 'InetAddressType') (notification_group, module_compliance) = mibBuilder.importSymbols('SNMPv2-CONF', 'NotificationGroup', 'ModuleCompliance') (integer32, notification_type, module_identity, gauge32, mib_scalar, mib_table, mib_table_row, mib_table_column, bits, counter32, iso, object_identity, time_ticks, mib_identifier, counter64, unsigned32, ip_address) = mibBuilder.importSymbols('SNMPv2-SMI', 'Integer32', 'NotificationType', 'ModuleIdentity', 'Gauge32', 'MibScalar', 'MibTable', 'MibTableRow', 'MibTableColumn', 'Bits', 'Counter32', 'iso', 'ObjectIdentity', 'TimeTicks', 'MibIdentifier', 'Counter64', 'Unsigned32', 'IpAddress') (display_string, truth_value, textual_convention) = mibBuilder.importSymbols('SNMPv2-TC', 'DisplayString', 'TruthValue', 'TextualConvention') (wwp_modules_leos,) = mibBuilder.importSymbols('WWP-SMI', 'wwpModulesLeos') wwp_leos_ping_mib = module_identity((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19)) wwpLeosPingMIB.setRevisions(('2012-04-02 00:00', '2001-07-03 12:57')) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): if mibBuilder.loadTexts: wwpLeosPingMIB.setRevisionsDescriptions(('Add wwpLeosPingInetAddrType to support IP protocol version independent Inet addressing.', 'Initial Creation')) if mibBuilder.loadTexts: wwpLeosPingMIB.setLastUpdated('201204020000Z') if mibBuilder.loadTexts: wwpLeosPingMIB.setOrganization('Ciena, Inc') if mibBuilder.loadTexts: wwpLeosPingMIB.setContactInfo(' Mib Meister 115 North Sullivan Road Spokane Valley, WA 99037 USA Phone: +1 509 242 9000 Email: support@ciena.com') if mibBuilder.loadTexts: wwpLeosPingMIB.setDescription('The MIB for WWP Ping') class Pingfailcause(TextualConvention, Integer32): description = 'The cause of the last ping failure.' status = 'current' subtype_spec = Integer32.subtypeSpec + constraints_union(single_value_constraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17)) named_values = named_values(('unknownHost', 1), ('socketError', 2), ('bindError', 3), ('connectError', 4), ('missingHost', 5), ('asyncError', 6), ('nonBlockError', 7), ('mcastError', 8), ('ttlError', 9), ('mcastTtlError', 10), ('outputError', 11), ('unreachableError', 12), ('isAlive', 13), ('txRx', 14), ('commandCompleted', 15), ('noStatus', 16), ('sendRecvMismatch', 17)) class Pingstate(TextualConvention, Integer32): description = 'The state of the last ping request.' status = 'current' subtype_spec = Integer32.subtypeSpec + constraints_union(single_value_constraint(1, 2, 3, 4)) named_values = named_values(('idle', 1), ('pinging', 2), ('pingComplete', 3), ('failed', 4)) wwp_leos_ping_mib_objects = mib_identifier((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1)) wwp_leos_ping_delay = mib_scalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 1), integer32().subtype(subtypeSpec=value_range_constraint(0, 100000))).setMaxAccess('readwrite') if mibBuilder.loadTexts: wwpLeosPingDelay.setStatus('current') if mibBuilder.loadTexts: wwpLeosPingDelay.setDescription('The object specifies the minimum amount of time to wait before sending the next packet in a sequence after receiving a response or declaring a timeout for a previous packet.') wwp_leos_ping_packet_size = mib_scalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 2), integer32().subtype(subtypeSpec=value_range_constraint(1, 1464)).clone(56)).setMaxAccess('readwrite') if mibBuilder.loadTexts: wwpLeosPingPacketSize.setStatus('current') if mibBuilder.loadTexts: wwpLeosPingPacketSize.setDescription('The size of the ping packets to send to the target.') wwp_leos_ping_activate = mib_scalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 3), truth_value()).setMaxAccess('readwrite') if mibBuilder.loadTexts: wwpLeosPingActivate.setStatus('current') if mibBuilder.loadTexts: wwpLeosPingActivate.setDescription("Ping can be activated by setting this object to true. Once the ping operation is completed, the object is set to 'false'. This object can be set to 'false' by the Management Station to stop the ping.") wwp_leos_ping_addr_type = mib_scalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 4), address_family_numbers()).setMaxAccess('readonly') if mibBuilder.loadTexts: wwpLeosPingAddrType.setStatus('current') if mibBuilder.loadTexts: wwpLeosPingAddrType.setDescription('The address type associated with wwpLeosPingAddr. With the new wwpLeosPingInetAddrType being introduced to support RFC 4001, this OID will only be used when wwpLeosPingAddr is a host name or an IPv4 address. Otherwise, it will be set to other(0).') wwp_leos_ping_addr = mib_scalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 5), display_string()).setMaxAccess('readwrite') if mibBuilder.loadTexts: wwpLeosPingAddr.setStatus('current') if mibBuilder.loadTexts: wwpLeosPingAddr.setDescription('The host name or IP address of the device to be pinged. wwpLeosPingAddrType determines if address is host name or IP address.') wwp_leos_ping_packet_count = mib_scalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 6), integer32().subtype(subtypeSpec=value_range_constraint(0, 100000))).setMaxAccess('readwrite') if mibBuilder.loadTexts: wwpLeosPingPacketCount.setStatus('current') if mibBuilder.loadTexts: wwpLeosPingPacketCount.setDescription('Specifies the number of ICMP requests to send to the target.') wwp_leos_ping_packet_timeout = mib_scalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 7), integer32().subtype(subtypeSpec=value_range_constraint(0, 100000))).setMaxAccess('readwrite') if mibBuilder.loadTexts: wwpLeosPingPacketTimeout.setStatus('current') if mibBuilder.loadTexts: wwpLeosPingPacketTimeout.setDescription("Specifies the amount of time to wait for a response to a transmitted packet before declaring the packet 'dropped'.") wwp_leos_ping_sent_packets = mib_scalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 8), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wwpLeosPingSentPackets.setStatus('current') if mibBuilder.loadTexts: wwpLeosPingSentPackets.setDescription('The number of ping packets that have been sent to the target.') wwp_leos_ping_received_packets = mib_scalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 9), counter32()).setMaxAccess('readonly') if mibBuilder.loadTexts: wwpLeosPingReceivedPackets.setStatus('current') if mibBuilder.loadTexts: wwpLeosPingReceivedPackets.setDescription('The number of ping packets that have been received from the target.') wwp_leos_ping_fail_cause = mib_scalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 10), ping_fail_cause()).setMaxAccess('readonly') if mibBuilder.loadTexts: wwpLeosPingFailCause.setStatus('current') if mibBuilder.loadTexts: wwpLeosPingFailCause.setDescription('The result of the ping.') wwp_leos_ping_state = mib_scalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 11), ping_state().clone('idle')).setMaxAccess('readonly') if mibBuilder.loadTexts: wwpLeosPingState.setStatus('current') if mibBuilder.loadTexts: wwpLeosPingState.setDescription('The state of the ping process. The possible states include pinging, idle, complete or failed.') wwp_leos_ping_until_stopped = mib_scalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 12), truth_value().clone('false')).setMaxAccess('readwrite') if mibBuilder.loadTexts: wwpLeosPingUntilStopped.setStatus('current') if mibBuilder.loadTexts: wwpLeosPingUntilStopped.setDescription("Setting this object to true prior to wwpLeosPingActivate will cause the device to ping the specified host until wwpLeosPingActivate is set to false. The object cannot be modified once the ping is active. The object returns to 'false' once the ping is halted.") wwp_leos_ping_inet_addr_type = mib_scalar((1, 3, 6, 1, 4, 1, 6141, 2, 60, 19, 1, 13), inet_address_type()).setMaxAccess('readonly') if mibBuilder.loadTexts: wwpLeosPingInetAddrType.setStatus('current') if mibBuilder.loadTexts: wwpLeosPingInetAddrType.setDescription('The Inet address type associated with wwpLeosPingAddr. When set to: ipv4 : wwpLeosPingAddr should be compliant with InetAddressIPv4 from RFC 4001 ipv6 : wwpLeosPingAddr should be compliant with InetAddressIPv6 from RFC 4001.') mibBuilder.exportSymbols('WWP-LEOS-PING-MIB', wwpLeosPingMIB=wwpLeosPingMIB, wwpLeosPingDelay=wwpLeosPingDelay, wwpLeosPingPacketTimeout=wwpLeosPingPacketTimeout, wwpLeosPingPacketSize=wwpLeosPingPacketSize, wwpLeosPingFailCause=wwpLeosPingFailCause, wwpLeosPingSentPackets=wwpLeosPingSentPackets, PingState=PingState, wwpLeosPingPacketCount=wwpLeosPingPacketCount, wwpLeosPingState=wwpLeosPingState, wwpLeosPingMIBObjects=wwpLeosPingMIBObjects, wwpLeosPingInetAddrType=wwpLeosPingInetAddrType, PingFailCause=PingFailCause, wwpLeosPingReceivedPackets=wwpLeosPingReceivedPackets, PYSNMP_MODULE_ID=wwpLeosPingMIB, wwpLeosPingAddrType=wwpLeosPingAddrType, wwpLeosPingUntilStopped=wwpLeosPingUntilStopped, wwpLeosPingActivate=wwpLeosPingActivate, wwpLeosPingAddr=wwpLeosPingAddr)
# Copyright (c) 2009 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. { 'variables': { 'chromium_code': 1, 'chromium_root': '<(DEPTH)/third_party/chromium/src', }, 'targets': [ { 'target_name': 'instaweb_util', 'type': '<(library)', 'dependencies': [ '<(DEPTH)/base/base.gyp:base', ], 'include_dirs': [ '<(DEPTH)', ], 'export_dependent_settings': [ '<(DEPTH)/base/base.gyp:base', ], 'sources': [ # TODO(mdsteele): Add sources here as we need them. 'instaweb/util/function.cc', ], }, { 'target_name': 'spdy', 'type': '<(library)', 'dependencies': [ '<(DEPTH)/base/base.gyp:base', '<(DEPTH)/third_party/zlib/zlib.gyp:zlib', ], 'export_dependent_settings': [ '<(DEPTH)/base/base.gyp:base', ], 'include_dirs': [ '<(DEPTH)', '<(chromium_root)', ], 'sources': [ '<(chromium_root)/net/spdy/buffered_spdy_framer.cc', '<(chromium_root)/net/spdy/spdy_frame_builder.cc', '<(chromium_root)/net/spdy/spdy_frame_reader.cc', '<(chromium_root)/net/spdy/spdy_framer.cc', ], }, ], }
{'variables': {'chromium_code': 1, 'chromium_root': '<(DEPTH)/third_party/chromium/src'}, 'targets': [{'target_name': 'instaweb_util', 'type': '<(library)', 'dependencies': ['<(DEPTH)/base/base.gyp:base'], 'include_dirs': ['<(DEPTH)'], 'export_dependent_settings': ['<(DEPTH)/base/base.gyp:base'], 'sources': ['instaweb/util/function.cc']}, {'target_name': 'spdy', 'type': '<(library)', 'dependencies': ['<(DEPTH)/base/base.gyp:base', '<(DEPTH)/third_party/zlib/zlib.gyp:zlib'], 'export_dependent_settings': ['<(DEPTH)/base/base.gyp:base'], 'include_dirs': ['<(DEPTH)', '<(chromium_root)'], 'sources': ['<(chromium_root)/net/spdy/buffered_spdy_framer.cc', '<(chromium_root)/net/spdy/spdy_frame_builder.cc', '<(chromium_root)/net/spdy/spdy_frame_reader.cc', '<(chromium_root)/net/spdy/spdy_framer.cc']}]}
# AUTOGENERATED BY NBDEV! DO NOT EDIT! __all__ = ["index", "modules", "custom_doc_links", "git_url"] index = {"CoreClass": "00_core.ipynb", "TSDataFrame": "01_TSDataFrame.ipynb"} modules = ["core.py", "TSDataFrame.py"] doc_url = "https://alvaroof.github.io/nbdevtest/" git_url = "https://github.com/alvaroof/nbdevtest/tree/master/" def custom_doc_links(name): return None
__all__ = ['index', 'modules', 'custom_doc_links', 'git_url'] index = {'CoreClass': '00_core.ipynb', 'TSDataFrame': '01_TSDataFrame.ipynb'} modules = ['core.py', 'TSDataFrame.py'] doc_url = 'https://alvaroof.github.io/nbdevtest/' git_url = 'https://github.com/alvaroof/nbdevtest/tree/master/' def custom_doc_links(name): return None
# Time: O(n) # Space: O(1) class ListNode(object): def __init__(self, x): self.val = x self.next = None def __repr__(self): if self: return "{} -> {}".format(self.val, self.next) class Solution(object): # @param a ListNode # @return a ListNode def swapPairs(self, head): dummy = ListNode(0) dummy.next = head current = dummy while current.next and current.next.next: next_one, next_two, next_three = current.next, current.next.next, current.next.next.next current.next = next_two next_two.next = next_one next_one.next = next_three current = next_one return dummy.next
class Listnode(object): def __init__(self, x): self.val = x self.next = None def __repr__(self): if self: return '{} -> {}'.format(self.val, self.next) class Solution(object): def swap_pairs(self, head): dummy = list_node(0) dummy.next = head current = dummy while current.next and current.next.next: (next_one, next_two, next_three) = (current.next, current.next.next, current.next.next.next) current.next = next_two next_two.next = next_one next_one.next = next_three current = next_one return dummy.next
class JobSummary(object): """ job summary data structure from job format in couchdb """ def __init__(self , jobStatus = None): self.jobStatus = { "success": 0, "canceled": 0, "transition": 0, "queued": {"first": 0, "retry": 0}, "submitted": {"first": 0, "retry": 0}, "submitted": {"pending": 0, "running": 0}, "failure": {"create": 0, "submit": 0, "exception": 0}, "cooloff": {"create": 0, "submit": 0, "job": 0}, "paused": {"create": 0, "submit": 0, "job": 0}, } if jobStatus != None: self.addJobStatusInfo(jobStatus) def addJobStatusInfo(self, jobStatus): #TODO need to validate the structure. for key, value in self.jobStatus.items(): if isinstance(value, int): self.jobStatus[key] += jobStatus.get(key, 0) elif isinstance(value, dict): for secondKey, secondValue in value.items(): if key in jobStatus and secondKey in jobStatus[key]: self.jobStatus[key][secondKey] += jobStatus[key][secondKey] def addJobSummary(self, jobSummary): self.addJobStatusInfo(jobSummary.jobStatus) def getTotalJobs(self): return (self.getSuccess() + self.jobStatus["canceled"] + self.jobStatus[ "transition"] + self.getFailure() + self.getCooloff() + self.getPaused() + self.getQueued() + self.getRunning() + self.getPending()) def getSuccess(self): return self.jobStatus["success"] def getFailure(self): return (self.jobStatus["failure"]["create"] + self.jobStatus["failure"]["submit"] + self.jobStatus["failure"]["exception"]) def getCompleted(self): return self.getSuccess() + self.getFailure() def getSubmitted(self): return (self.jobStatus["submitted"]["first"] + self.jobStatus["submitted"]["retry"]) def getRunning(self): return self.jobStatus["submitted"]["running"]; def getPending(self): return self.jobStatus["submitted"]["pending"]; def getCooloff(self): return (self.jobStatus["cooloff"]["create"] + self.jobStatus["cooloff"]["submit"] + self.jobStatus["cooloff"]["job"]); def getPaused(self): return (self.jobStatus["paused"]["create"] + self.jobStatus["paused"]["submit"] + self.jobStatus["paused"]["job"]); def getQueued(self): return (self.jobStatus["queued"]["first"] + self.jobStatus["queued"]["retry"]) def getJSONStatus(self): return {'sucess': self.getSuccess(), 'failure': self.getFailure(), 'cooloff': self.getCooloff(), 'running': self.getRunning(), 'queued': self.getQueued(), 'pending': self.getPending(), 'paused': self.getPaused(), 'created': self.getTotalJobs() } class ProgressSummary(object): def __init__(self , progressReport = None): self.progress = { "totalLumis": 0, "events": 0, "size": 0 } if progressReport != None: self.addProgressReport(progressReport) def addProgressReport(self, progressReport): #TODO need to validate the structure. for key in self.progress.keys(): self.progress[key] += progressReport.get(key, 0) def getReport(self): return self.progress class TaskInfo(object): def __init__(self, requestName, taskName, data): self.requestName = requestName self.taskName = taskName self.taskType = data.get('jobtype', "N/A") self.jobSummary = JobSummary(data.get('status', {})) def addTaskInfo(self, taskInfo): if not (self.requestName == taskInfo.requestName and self.taskName == taskInfo.taskName): msg = "%s: %s, %s: %s, %s: %s" % (self.requestName, taskInfo.requestName, self.taskName, taskInfo.taskName, self.taskType, taskInfo.taskType) raise Exception("task doesn't match %s" % msg) self.jobSummary.addJobSummary(taskInfo.jobSummary) def getRequestName(self): return self.requestName def getTaskName(self): return self.taskName def getTaskType(self): return self.taskType def getJobSummary(self): return self.jobSummary def isTaskCompleted(self): totalJobs = self.jobSummary.getTotalJobs() completedJobs = self.jobSummary.getCompleted() return (totalJobs != 0 and totalJobs == completedJobs) class RequestInfo(object): def __init__(self, data): """ data structure is {'request_name1': {'agent_url1': {'status' } """ self.setData(data) def setData(self, data): #If RequestName doesn't exist, try legacy format (workflow) if 'RequestName' in data: self.requestName = data['RequestName'] else: self.requestName = data['workflow'] self.data = data self.jobSummaryByAgent = {} self.tasks = {} self.tasksByAgent = {} self.jobSummary = JobSummary() if 'AgentJobInfo' in data: for agentUrl, agentRequestInfo in data['AgentJobInfo'].items(): self.jobSummary.addJobStatusInfo(agentRequestInfo.get('status', {})) self.jobSummaryByAgent[agentUrl] = JobSummary(agentRequestInfo.get('status', {})) if 'tasks' in agentRequestInfo: self.tasksByAgent[agentUrl] = {} for taskName, data in agentRequestInfo['tasks'].items(): if taskName not in self.tasks: self.tasks[taskName] = TaskInfo(self.requestName, taskName, data) else: self.tasks[taskName].addTaskInfo(TaskInfo(self.requestName, taskName, data)) # only one task by one agent - don't need to combine self.tasksByAgent[agentUrl][taskName] = TaskInfo(self.requestName, taskName, data) def getJobSummary(self): return self.jobSummary def getJobSummaryByAgent(self, agentUrl = None): if agentUrl: return self.jobSummaryByAgent[agentUrl] else: return self.jobSummaryByAgent def getTasksByAgent(self, agentUrl = None): if agentUrl: return self.tasksByAgent[agentUrl] else: return self.tasksByAgent def getTasks(self): return self.tasks def getTotalTopLevelJobs(self): return self.data.get("total_jobs", "N/A") def getTotalTopLevelJobsInWMBS(self): inWMBS = 0 if "AgentJobInfo" in self.data: for agentRequestInfo in self.data["AgentJobInfo"].values(): inWMBS += agentRequestInfo['status'].get('inWMBS', 0) return inWMBS def getTotalInputLumis(self): return self.data.get("input_lumis", "N/A") def getTotalInputEvents(self): return self.data.get("input_events", "N/A") def getProgressSummaryByOutputDataset(self): """ check sampleResult.json for datastructure """ datasets = {}; if "AgentJobInfo" not in self.data: #ther is no report yet (no agent has reported) return datasets for agentRequestInfo in self.data["AgentJobInfo"].values(): tasks = agentRequestInfo.get("tasks", []) for task in tasks: for site in tasks[task].get("sites", []): for outputDS in tasks[task]["sites"][site].get("dataset", {}).keys(): #TODO: need update the record instead of replacing. datasets.setdefault(outputDS, ProgressSummary()) datasets[outputDS].addProgressReport(tasks[task]["sites"][site]["dataset"][outputDS]) return datasets def filterRequest(self, conditionFunc): return conditionFunc(self.data) def getRequestTransition(self): return self.data["request_status"] def getRequestStatus(self, timeFlag = False): if timeFlag: return self.data["request_status"][-1] else: return self.data["request_status"][-1]['status'] def isWorkflowFinished(self): """ check whether workflow is completed including LogCollect and CleanUp tasks TODO: If the parent task all failed and next task are not created at all, It can't detect complete status. If the one of the task doesn't contain any jobs, it will return False """ if len(self.tasks) == 0: return False for taskInfo in self.tasks.values(): if not taskInfo.isTaskCompleted(): return False return True class RequestInfoCollection(object): def __init__(self, data): self.collection = {} self.setData(data) def setData(self, data): for requestName, requestInfo in data.items(): self.collection[requestName] = RequestInfo(requestInfo) def getData(self): return self.collection def filterRequests(self, conditionFunc): filtered = {} for name, reqInfo in self.collection.items(): if reqInfo.filterRequest(conditionFunc): filtered[name] = reqInfo return filtered def getJSONData(self): result = {} for requestInfo in self.collection.values(): result[requestInfo.requestName] = {} for agentUrl, jobSummary in requestInfo.getJobSummaryByAgent().items(): result[requestInfo.requestName][agentUrl]= jobSummary.getJSONStatus() return result
class Jobsummary(object): """ job summary data structure from job format in couchdb """ def __init__(self, jobStatus=None): self.jobStatus = {'success': 0, 'canceled': 0, 'transition': 0, 'queued': {'first': 0, 'retry': 0}, 'submitted': {'first': 0, 'retry': 0}, 'submitted': {'pending': 0, 'running': 0}, 'failure': {'create': 0, 'submit': 0, 'exception': 0}, 'cooloff': {'create': 0, 'submit': 0, 'job': 0}, 'paused': {'create': 0, 'submit': 0, 'job': 0}} if jobStatus != None: self.addJobStatusInfo(jobStatus) def add_job_status_info(self, jobStatus): for (key, value) in self.jobStatus.items(): if isinstance(value, int): self.jobStatus[key] += jobStatus.get(key, 0) elif isinstance(value, dict): for (second_key, second_value) in value.items(): if key in jobStatus and secondKey in jobStatus[key]: self.jobStatus[key][secondKey] += jobStatus[key][secondKey] def add_job_summary(self, jobSummary): self.addJobStatusInfo(jobSummary.jobStatus) def get_total_jobs(self): return self.getSuccess() + self.jobStatus['canceled'] + self.jobStatus['transition'] + self.getFailure() + self.getCooloff() + self.getPaused() + self.getQueued() + self.getRunning() + self.getPending() def get_success(self): return self.jobStatus['success'] def get_failure(self): return self.jobStatus['failure']['create'] + self.jobStatus['failure']['submit'] + self.jobStatus['failure']['exception'] def get_completed(self): return self.getSuccess() + self.getFailure() def get_submitted(self): return self.jobStatus['submitted']['first'] + self.jobStatus['submitted']['retry'] def get_running(self): return self.jobStatus['submitted']['running'] def get_pending(self): return self.jobStatus['submitted']['pending'] def get_cooloff(self): return self.jobStatus['cooloff']['create'] + self.jobStatus['cooloff']['submit'] + self.jobStatus['cooloff']['job'] def get_paused(self): return self.jobStatus['paused']['create'] + self.jobStatus['paused']['submit'] + self.jobStatus['paused']['job'] def get_queued(self): return self.jobStatus['queued']['first'] + self.jobStatus['queued']['retry'] def get_json_status(self): return {'sucess': self.getSuccess(), 'failure': self.getFailure(), 'cooloff': self.getCooloff(), 'running': self.getRunning(), 'queued': self.getQueued(), 'pending': self.getPending(), 'paused': self.getPaused(), 'created': self.getTotalJobs()} class Progresssummary(object): def __init__(self, progressReport=None): self.progress = {'totalLumis': 0, 'events': 0, 'size': 0} if progressReport != None: self.addProgressReport(progressReport) def add_progress_report(self, progressReport): for key in self.progress.keys(): self.progress[key] += progressReport.get(key, 0) def get_report(self): return self.progress class Taskinfo(object): def __init__(self, requestName, taskName, data): self.requestName = requestName self.taskName = taskName self.taskType = data.get('jobtype', 'N/A') self.jobSummary = job_summary(data.get('status', {})) def add_task_info(self, taskInfo): if not (self.requestName == taskInfo.requestName and self.taskName == taskInfo.taskName): msg = '%s: %s, %s: %s, %s: %s' % (self.requestName, taskInfo.requestName, self.taskName, taskInfo.taskName, self.taskType, taskInfo.taskType) raise exception("task doesn't match %s" % msg) self.jobSummary.addJobSummary(taskInfo.jobSummary) def get_request_name(self): return self.requestName def get_task_name(self): return self.taskName def get_task_type(self): return self.taskType def get_job_summary(self): return self.jobSummary def is_task_completed(self): total_jobs = self.jobSummary.getTotalJobs() completed_jobs = self.jobSummary.getCompleted() return totalJobs != 0 and totalJobs == completedJobs class Requestinfo(object): def __init__(self, data): """ data structure is {'request_name1': {'agent_url1': {'status' } """ self.setData(data) def set_data(self, data): if 'RequestName' in data: self.requestName = data['RequestName'] else: self.requestName = data['workflow'] self.data = data self.jobSummaryByAgent = {} self.tasks = {} self.tasksByAgent = {} self.jobSummary = job_summary() if 'AgentJobInfo' in data: for (agent_url, agent_request_info) in data['AgentJobInfo'].items(): self.jobSummary.addJobStatusInfo(agentRequestInfo.get('status', {})) self.jobSummaryByAgent[agentUrl] = job_summary(agentRequestInfo.get('status', {})) if 'tasks' in agentRequestInfo: self.tasksByAgent[agentUrl] = {} for (task_name, data) in agentRequestInfo['tasks'].items(): if taskName not in self.tasks: self.tasks[taskName] = task_info(self.requestName, taskName, data) else: self.tasks[taskName].addTaskInfo(task_info(self.requestName, taskName, data)) self.tasksByAgent[agentUrl][taskName] = task_info(self.requestName, taskName, data) def get_job_summary(self): return self.jobSummary def get_job_summary_by_agent(self, agentUrl=None): if agentUrl: return self.jobSummaryByAgent[agentUrl] else: return self.jobSummaryByAgent def get_tasks_by_agent(self, agentUrl=None): if agentUrl: return self.tasksByAgent[agentUrl] else: return self.tasksByAgent def get_tasks(self): return self.tasks def get_total_top_level_jobs(self): return self.data.get('total_jobs', 'N/A') def get_total_top_level_jobs_in_wmbs(self): in_wmbs = 0 if 'AgentJobInfo' in self.data: for agent_request_info in self.data['AgentJobInfo'].values(): in_wmbs += agentRequestInfo['status'].get('inWMBS', 0) return inWMBS def get_total_input_lumis(self): return self.data.get('input_lumis', 'N/A') def get_total_input_events(self): return self.data.get('input_events', 'N/A') def get_progress_summary_by_output_dataset(self): """ check sampleResult.json for datastructure """ datasets = {} if 'AgentJobInfo' not in self.data: return datasets for agent_request_info in self.data['AgentJobInfo'].values(): tasks = agentRequestInfo.get('tasks', []) for task in tasks: for site in tasks[task].get('sites', []): for output_ds in tasks[task]['sites'][site].get('dataset', {}).keys(): datasets.setdefault(outputDS, progress_summary()) datasets[outputDS].addProgressReport(tasks[task]['sites'][site]['dataset'][outputDS]) return datasets def filter_request(self, conditionFunc): return condition_func(self.data) def get_request_transition(self): return self.data['request_status'] def get_request_status(self, timeFlag=False): if timeFlag: return self.data['request_status'][-1] else: return self.data['request_status'][-1]['status'] def is_workflow_finished(self): """ check whether workflow is completed including LogCollect and CleanUp tasks TODO: If the parent task all failed and next task are not created at all, It can't detect complete status. If the one of the task doesn't contain any jobs, it will return False """ if len(self.tasks) == 0: return False for task_info in self.tasks.values(): if not taskInfo.isTaskCompleted(): return False return True class Requestinfocollection(object): def __init__(self, data): self.collection = {} self.setData(data) def set_data(self, data): for (request_name, request_info) in data.items(): self.collection[requestName] = request_info(requestInfo) def get_data(self): return self.collection def filter_requests(self, conditionFunc): filtered = {} for (name, req_info) in self.collection.items(): if reqInfo.filterRequest(conditionFunc): filtered[name] = reqInfo return filtered def get_json_data(self): result = {} for request_info in self.collection.values(): result[requestInfo.requestName] = {} for (agent_url, job_summary) in requestInfo.getJobSummaryByAgent().items(): result[requestInfo.requestName][agentUrl] = jobSummary.getJSONStatus() return result
N, K = map(int, input().split()) if K > 1: diff = N-K print(diff) else: print(0)
(n, k) = map(int, input().split()) if K > 1: diff = N - K print(diff) else: print(0)
# Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. load("@bazel_skylib//lib:shell.bzl", "shell") load("//antlir/bzl:oss_shim.bzl", "buck_genrule") load("//antlir/bzl:shape.bzl", "shape") load(":flavor_helpers.bzl", "flavor_helpers") load(":gpt.shape.bzl", "gpt_partition_t", "gpt_t") load(":image_utils.bzl", "image_utils") def image_gpt_partition(package, is_esp = False, is_bios_boot = False, name = None): return shape.new( gpt_partition_t, package = package, is_esp = is_esp, is_bios_boot = is_bios_boot, name = name, ) def image_gpt( name, table, disk_guid = None, visibility = None, build_appliance = None): visibility = visibility or [] build_appliance = build_appliance or flavor_helpers.default_flavor_build_appliance gpt = shape.new(gpt_t, name = name, table = table, disk_guid = disk_guid) buck_genrule( name = name, bash = image_utils.wrap_bash_build_in_common_boilerplate( self_dependency = "//antlir/bzl:image_gpt", bash = ''' $(exe //antlir:gpt) \ --output-path "$OUT" \ --gpt {opts_quoted} \ --build-appliance $(query_outputs {build_appliance}) \ '''.format( opts_quoted = shell.quote(shape.do_not_cache_me_json(gpt)), build_appliance = build_appliance, ), rule_type = "image_gpt", target_name = name, ), cacheable = False, executable = True, visibility = visibility, antlir_rule = "user-internal", )
load('@bazel_skylib//lib:shell.bzl', 'shell') load('//antlir/bzl:oss_shim.bzl', 'buck_genrule') load('//antlir/bzl:shape.bzl', 'shape') load(':flavor_helpers.bzl', 'flavor_helpers') load(':gpt.shape.bzl', 'gpt_partition_t', 'gpt_t') load(':image_utils.bzl', 'image_utils') def image_gpt_partition(package, is_esp=False, is_bios_boot=False, name=None): return shape.new(gpt_partition_t, package=package, is_esp=is_esp, is_bios_boot=is_bios_boot, name=name) def image_gpt(name, table, disk_guid=None, visibility=None, build_appliance=None): visibility = visibility or [] build_appliance = build_appliance or flavor_helpers.default_flavor_build_appliance gpt = shape.new(gpt_t, name=name, table=table, disk_guid=disk_guid) buck_genrule(name=name, bash=image_utils.wrap_bash_build_in_common_boilerplate(self_dependency='//antlir/bzl:image_gpt', bash='\n $(exe //antlir:gpt) --output-path "$OUT" --gpt {opts_quoted} --build-appliance $(query_outputs {build_appliance}) '.format(opts_quoted=shell.quote(shape.do_not_cache_me_json(gpt)), build_appliance=build_appliance), rule_type='image_gpt', target_name=name), cacheable=False, executable=True, visibility=visibility, antlir_rule='user-internal')
def part1(data): return score(combat(*parse(data))) def parse(data): players = [] for player, cs in enumerate('\n'.join(data).split('\n\n'), 1): cards = [int(c.strip()) for c in cs.split('\n')[1:]] players.append(cards) return players def combat(a, b): if not a: return b elif not b: return a _a = a.pop(0) _b = b.pop(0) if _a > _b: return combat(a + [_a, _b], b) return combat(a, b + [_b, _a]) def score(cards): return sum([i * c for i, c in enumerate(reversed(cards), 1)]) def part2(data): return score(recursive_combat(*parse(data))[1]) def key(a, b): return ','.join([str(i) for i in a]) + '#' + ','.join([str(i) for i in b]) def recursive_combat(a, b): seen = set() while a and b: hands = (tuple(a), tuple(b)) if hands in seen: return 1, a seen.add(hands) _a = a.pop(0) _b = b.pop(0) if len(a) >= _a and len(b) >= _b: winner, _ = recursive_combat(a[:_a].copy(), b[:_b].copy()) if winner == 1: a += [_a, _b] else: b += [_b, _a] else: if _a > _b: a += [_a, _b] else: b += [_b, _a] if len(a) > len(b): return 1, a return 2, b
def part1(data): return score(combat(*parse(data))) def parse(data): players = [] for (player, cs) in enumerate('\n'.join(data).split('\n\n'), 1): cards = [int(c.strip()) for c in cs.split('\n')[1:]] players.append(cards) return players def combat(a, b): if not a: return b elif not b: return a _a = a.pop(0) _b = b.pop(0) if _a > _b: return combat(a + [_a, _b], b) return combat(a, b + [_b, _a]) def score(cards): return sum([i * c for (i, c) in enumerate(reversed(cards), 1)]) def part2(data): return score(recursive_combat(*parse(data))[1]) def key(a, b): return ','.join([str(i) for i in a]) + '#' + ','.join([str(i) for i in b]) def recursive_combat(a, b): seen = set() while a and b: hands = (tuple(a), tuple(b)) if hands in seen: return (1, a) seen.add(hands) _a = a.pop(0) _b = b.pop(0) if len(a) >= _a and len(b) >= _b: (winner, _) = recursive_combat(a[:_a].copy(), b[:_b].copy()) if winner == 1: a += [_a, _b] else: b += [_b, _a] elif _a > _b: a += [_a, _b] else: b += [_b, _a] if len(a) > len(b): return (1, a) return (2, b)
template = """ From: <{from_email}> To: <{to_email}> Subject: {subject} {message}""" print(template.format( from_email = "a@example.com", to_email = "b@example.com", message = "Here's some mail for you. " " Hope you enjoy the message!", subject = "You have mail!" ))
template = '\nFrom: <{from_email}>\nTo: <{to_email}>\nSubject: {subject}\n{message}' print(template.format(from_email='a@example.com', to_email='b@example.com', message="Here's some mail for you. Hope you enjoy the message!", subject='You have mail!'))
""" bitproto.grammars ~~~~~~~~~~~~~~~~~ Grammar rules. """ # fmt: off r_optional_semicolon = """ optional_semicolon : ';' | """ r_start = """ start : open_global_scope global_scope close_global_scope """ r_open_global_scope = """ open_global_scope : """ r_close_global_scope = """ close_global_scope : """ r_global_scope = """ global_scope : global_scope_definitions """ r_global_scope_definitions = """ global_scope_definitions : global_scope_definition_unit global_scope_definitions | global_scope_definition_unit | """ r_global_scope_definition_unit = """ global_scope_definition_unit : import | option | alias | const | enum | message | proto | comment | newline """ r_proto = """ proto : PROTO IDENTIFIER optional_semicolon """ r_comment = """ comment : COMMENT NEWLINE """ r_newline = """ newline : NEWLINE """ r_import = """ import : IMPORT STRING_LITERAL optional_semicolon | IMPORT IDENTIFIER STRING_LITERAL optional_semicolon """ r_option = """ option : OPTION dotted_identifier '=' option_value optional_semicolon """ r_option_value = """ option_value : boolean_literal | integer_literal | string_literal """ r_alias = """ alias : TYPE IDENTIFIER '=' type optional_semicolon | TYPEDEF type IDENTIFIER optional_semicolon """ r_const = """ const : CONST IDENTIFIER '=' const_value optional_semicolon """ r_const_value = """ const_value : boolean_literal | string_literal | constant_reference | calculation_expression """ r_calculation_expression = """ calculation_expression : calculation_expression_plus | calculation_expression_minus | calculation_expression_times | calculation_expression_divide | calculation_expression_group | integer_literal | constant_reference_for_calculation """ r_calculation_expression_plus = """ calculation_expression_plus : calculation_expression PLUS calculation_expression """ r_calculation_expression_minus = """ calculation_expression_minus : calculation_expression MINUS calculation_expression """ r_calculation_expression_times = """ calculation_expression_times : calculation_expression TIMES calculation_expression """ r_calculation_expression_divide = """ calculation_expression_divide : calculation_expression DIVIDE calculation_expression """ r_calculation_expression_group = """ calculation_expression_group : '(' calculation_expression ')' """ r_constant_reference_for_calculation = """ constant_reference_for_calculation : constant_reference """ r_constant_reference = """ constant_reference : dotted_identifier """ r_type = """ type : single_type | array_type """ r_single_type = """ single_type : base_type | type_reference """ r_base_type = """ base_type : BOOL_TYPE | UINT_TYPE | INT_TYPE | BYTE_TYPE """ r_type_reference = """ type_reference : dotted_identifier """ r_optional_extensible_flag = """ optional_extensible_flag : "'" | """ r_array_type = """ array_type : single_type '[' array_capacity ']' optional_extensible_flag """ r_array_capacity = """ array_capacity : INT_LITERAL | constant_reference_for_array_capacity """ r_constant_reference_for_array_capacity = """ constant_reference_for_array_capacity : constant_reference """ r_enum = """ enum : open_enum_scope enum_scope close_enum_scope """ r_open_enum_scope = """ open_enum_scope : ENUM IDENTIFIER ':' UINT_TYPE '{' """ r_enum_scope = """ enum_scope : enum_items """ r_close_enum_scope = """ close_enum_scope : '}' """ r_enum_items = """ enum_items : enum_item enum_items | enum_item | """ r_enum_item = """ enum_item : enum_field | enum_item_unsupported | comment | newline """ r_enum_item_unsupported = """ enum_item_unsupported : alias | const | proto | import | option | enum | message | message_field """ r_enum_field = """ enum_field : IDENTIFIER '=' integer_literal optional_semicolon """ r_message = """ message : open_message_scope message_scope close_message_scope """ r_open_message_scope = """ open_message_scope : MESSAGE IDENTIFIER optional_extensible_flag '{' """ r_close_message_scope = """ close_message_scope : '}' """ r_message_scope = """ message_scope : message_items """ r_message_items = """ message_items : message_item message_items | message_item | """ r_message_item = """ message_item : option | enum | message_field | message | message_item_unsupported | comment | newline """ r_message_item_unsupported = """ message_item_unsupported : alias | const | proto | import """ r_message_field = """ message_field : type message_field_name '=' INT_LITERAL optional_semicolon """ # https://github.com/hit9/bitproto/issues/39 # Allow some keywords to be message names. r_message_field_name = """ message_field_name : IDENTIFIER | TYPE """ r_boolean_literal = """ boolean_literal : BOOL_LITERAL """ r_integer_literal = """ integer_literal : INT_LITERAL | HEX_LITERAL """ r_string_literal = """ string_literal : STRING_LITERAL """ r_dotted_identifier = """ dotted_identifier : IDENTIFIER '.' dotted_identifier | IDENTIFIER """ # fmt: on
""" bitproto.grammars ~~~~~~~~~~~~~~~~~ Grammar rules. """ r_optional_semicolon = "\noptional_semicolon : ';'\n |\n" r_start = '\nstart : open_global_scope global_scope close_global_scope\n' r_open_global_scope = '\nopen_global_scope :\n' r_close_global_scope = '\nclose_global_scope :\n' r_global_scope = '\nglobal_scope : global_scope_definitions\n' r_global_scope_definitions = '\nglobal_scope_definitions : global_scope_definition_unit global_scope_definitions\n | global_scope_definition_unit\n |\n' r_global_scope_definition_unit = '\nglobal_scope_definition_unit : import\n | option\n | alias\n | const\n | enum\n | message\n | proto\n | comment\n | newline\n' r_proto = '\nproto : PROTO IDENTIFIER optional_semicolon\n' r_comment = '\ncomment : COMMENT NEWLINE\n' r_newline = '\nnewline : NEWLINE\n' r_import = '\nimport : IMPORT STRING_LITERAL optional_semicolon\n | IMPORT IDENTIFIER STRING_LITERAL optional_semicolon\n' r_option = "\noption : OPTION dotted_identifier '=' option_value optional_semicolon\n" r_option_value = '\noption_value : boolean_literal\n | integer_literal\n | string_literal\n' r_alias = "\nalias : TYPE IDENTIFIER '=' type optional_semicolon\n | TYPEDEF type IDENTIFIER optional_semicolon\n" r_const = "\nconst : CONST IDENTIFIER '=' const_value optional_semicolon\n" r_const_value = '\nconst_value : boolean_literal\n | string_literal\n | constant_reference\n | calculation_expression\n' r_calculation_expression = '\ncalculation_expression : calculation_expression_plus\n | calculation_expression_minus\n | calculation_expression_times\n | calculation_expression_divide\n | calculation_expression_group\n | integer_literal\n | constant_reference_for_calculation\n' r_calculation_expression_plus = '\ncalculation_expression_plus : calculation_expression PLUS calculation_expression\n' r_calculation_expression_minus = '\ncalculation_expression_minus : calculation_expression MINUS calculation_expression\n' r_calculation_expression_times = '\ncalculation_expression_times : calculation_expression TIMES calculation_expression\n' r_calculation_expression_divide = '\ncalculation_expression_divide : calculation_expression DIVIDE calculation_expression\n' r_calculation_expression_group = "\ncalculation_expression_group : '(' calculation_expression ')'\n" r_constant_reference_for_calculation = '\nconstant_reference_for_calculation : constant_reference\n' r_constant_reference = '\nconstant_reference : dotted_identifier\n' r_type = '\ntype : single_type\n | array_type\n' r_single_type = '\nsingle_type : base_type\n | type_reference\n' r_base_type = '\nbase_type : BOOL_TYPE\n | UINT_TYPE\n | INT_TYPE\n | BYTE_TYPE\n' r_type_reference = '\ntype_reference : dotted_identifier\n' r_optional_extensible_flag = '\noptional_extensible_flag : "\'"\n |\n' r_array_type = "\narray_type : single_type '[' array_capacity ']' optional_extensible_flag\n" r_array_capacity = '\narray_capacity : INT_LITERAL\n | constant_reference_for_array_capacity\n' r_constant_reference_for_array_capacity = '\nconstant_reference_for_array_capacity : constant_reference\n' r_enum = '\nenum : open_enum_scope enum_scope close_enum_scope\n' r_open_enum_scope = "\nopen_enum_scope : ENUM IDENTIFIER ':' UINT_TYPE '{'\n" r_enum_scope = '\nenum_scope : enum_items\n' r_close_enum_scope = "\nclose_enum_scope : '}'\n" r_enum_items = '\nenum_items : enum_item enum_items\n | enum_item\n |\n' r_enum_item = '\nenum_item : enum_field\n | enum_item_unsupported\n | comment\n | newline\n' r_enum_item_unsupported = '\nenum_item_unsupported : alias\n | const\n | proto\n | import\n | option\n | enum\n | message\n | message_field\n' r_enum_field = "\nenum_field : IDENTIFIER '=' integer_literal optional_semicolon\n" r_message = '\nmessage : open_message_scope message_scope close_message_scope\n' r_open_message_scope = "\nopen_message_scope : MESSAGE IDENTIFIER optional_extensible_flag '{'\n" r_close_message_scope = "\nclose_message_scope : '}'\n" r_message_scope = '\nmessage_scope : message_items\n' r_message_items = '\nmessage_items : message_item message_items\n | message_item\n |\n' r_message_item = '\nmessage_item : option\n | enum\n | message_field\n | message\n | message_item_unsupported\n | comment\n | newline\n' r_message_item_unsupported = '\nmessage_item_unsupported : alias\n | const\n | proto\n | import\n' r_message_field = "\nmessage_field : type message_field_name '=' INT_LITERAL optional_semicolon\n" r_message_field_name = '\nmessage_field_name : IDENTIFIER\n | TYPE\n' r_boolean_literal = '\nboolean_literal : BOOL_LITERAL\n' r_integer_literal = '\ninteger_literal : INT_LITERAL\n | HEX_LITERAL\n' r_string_literal = '\nstring_literal : STRING_LITERAL\n' r_dotted_identifier = "\ndotted_identifier : IDENTIFIER '.' dotted_identifier\n | IDENTIFIER\n"
# -*- coding: utf-8 -*- """ @author: krakowiakpawel9@gmail.com @site: e-smartdata.org """ empty_list = list() print(empty_list) # %% techs = ['python', 'java', 'c++', 'go', 'sql'] techs[0] = 'python 3.7' print(techs) # %% numbers = [3, 5, 3, 5, 23] print(numbers) print(type(numbers)) # %% mixed = ['python', 3.7, 4, True] print(mixed) # %% empty = [] nested = [[1, 2, [3, 'sql']], ['python', 'java', 'go'], 3] # %% first = ['mleko', 'ziemniaki', 'makaron'] second = ['woda', 'jajka'] bucket = [first, second] # %% len(bucket) # %% techs = ['python', 'java', 'c++', 'go', 'sql'] techs += ['javascript'] print(techs) # %% print(dir(list))
""" @author: krakowiakpawel9@gmail.com @site: e-smartdata.org """ empty_list = list() print(empty_list) techs = ['python', 'java', 'c++', 'go', 'sql'] techs[0] = 'python 3.7' print(techs) numbers = [3, 5, 3, 5, 23] print(numbers) print(type(numbers)) mixed = ['python', 3.7, 4, True] print(mixed) empty = [] nested = [[1, 2, [3, 'sql']], ['python', 'java', 'go'], 3] first = ['mleko', 'ziemniaki', 'makaron'] second = ['woda', 'jajka'] bucket = [first, second] len(bucket) techs = ['python', 'java', 'c++', 'go', 'sql'] techs += ['javascript'] print(techs) print(dir(list))
"""modelinfo_cfg - TEPPr configuration This is mutable object. * one teppr cfg points to only one dataset * one teppr cfg can have many trainings and respective evaluations, reportings and tracks which of the training is published * The sequence of trainings can be parallel schedule or sequence schedule. * In case it is sequence schedule, the linkage between the sequence provides the insight on hyper tunning * Two differnt teppr cfg can be inter-related from the perspective of transfer learning i.e. base model that that is being used as the starting point. teppr workflow steps # step-1 a) point to a AI Dataset b) create training experiment * a) and b) can be done to create the batch workfload c) run training * training can run for all the workfload # step-2 a) create evaluation strategy b) run the evaluation #step-3 a) generate training and evaluation reports b) publish the model to AI port """ ## deprecated, and only for reference # teppr cfg tepprcfg = { "created_on": None ,"modified_on": None ,"aids_dbname": None ,"aids_id": None ,"timestamp": None ,"log_dir": "logs/<dbname>" # ,"dnnarch": None # ,"framework_type": None ,"train_mode": "training" ,"test_mode": "inference" ,"allowed_file_type":['.txt','.csv','.yml','.json'] ,"allowed_image_type":['.pdf','.png','.jpg','.jpeg','.gif'] ,"allowed_video_type":['.mp4'] ,"data": None ,"stats": None ,"summary": None ,"train":[] ,"evaluate": [] ,"predict": [] ,"publish": [] ,"report": [] } ## ARCH CFG traincfg = { "MODE": "training" ,"DEVICE": "/gpu:0" ## /cpu:0 or /gpu:0 ,"WEIGHTS": None ,"MODEL_INFO": "mask_rcnn-matterport-coco-1.yml" ,"LOAD_WEIGHTS":{ "BY_NAME": True ,"EXCLUDE": ['mrcnn_class_logits', 'mrcnn_bbox_fc', 'mrcnn_bbox', 'mrcnn_mask'] } ,"SCHEDULES":[ { "EPOCHS": 40 ,"LAYERS": "heads" ,"LEARNING_RATE": 0.001 } ,{ "EPOCHS": 120 ,"LAYERS": "4+" ,"LEARNING_RATE": 0.001 } ,{ "EPOCHS": 160 ,"LAYERS": "all" ,"LEARNING_RATE": 0.0001 } ] ,"CONFIG":{} } evaluatecfg = { "SAVE_VIZ_AND_JSON": True ,"MODE": "inference" ,"DEVICE": "/gpu:0" ## /cpu:0 or /gpu:0 ,"WEIGHTS": None ,"MODEL_INFO": "mask_rcnn-vidteq-tsdr-1.yml" ,"LOAD_WEIGHTS":{ "BY_NAME": True ,"EXCLUDE": ['mrcnn_class_logits', 'mrcnn_bbox_fc', 'mrcnn_bbox', 'mrcnn_mask'] } ,"CONFIG":{ "DETECTION_MIN_CONFIDENCE": 0.9 ,"GPU_COUNT": 1 ,"IMAGES_PER_GPU": 1 ,"IMAGE_MIN_DIM": 720 ,"IMAGE_MAX_DIM": 1280 } } predictcfg = { "SAVE_VIZ_AND_JSON": True ,"MODE": "inference" ,"DEVICE": "/gpu:0" ## /cpu:0 or /gpu:0 ,"WEIGHTS": None ,"MODEL_INFO": "mask_rcnn-vidteq-tsdr-1.yml" ,"LOAD_WEIGHTS":{ "BY_NAME": True ,"EXCLUDE": ['mrcnn_class_logits', 'mrcnn_bbox_fc', 'mrcnn_bbox', 'mrcnn_mask'] } ,"CONFIG":{ "DETECTION_MIN_CONFIDENCE": 0.9 ,"GPU_COUNT": 1 ,"IMAGES_PER_GPU": 1 ,"IMAGE_MIN_DIM": 720 ,"IMAGE_MAX_DIM": 1280 } } ## publish_cfg is model info, whiv is linked to at least one item or more then one in the teppr cfg publishcfg = { "DNNARCH": None ,"FRAMEWORK_TYPE": None ,"ID": "tsdr" ,"PROBLEM_ID": "tsdr_segmentation" ,"ORG_NAME": "vidteq" ,"REL_NUM": None ,"CONFIG": {} ,"NAME": "tsdr" ,"DATASET": None ,"WEIGHTS_PATH": None ,"WEIGHTS": "ORG_NAME/ID/REL_NUM/DNNARCH" ,"PROTOTXT": None ,"NUM_CLASSES": None ,"CLASSINFO": [] ,"CLASSES": [] ,"DESCRIPTION": None ,"TIMESTAMP": None }
"""modelinfo_cfg - TEPPr configuration This is mutable object. * one teppr cfg points to only one dataset * one teppr cfg can have many trainings and respective evaluations, reportings and tracks which of the training is published * The sequence of trainings can be parallel schedule or sequence schedule. * In case it is sequence schedule, the linkage between the sequence provides the insight on hyper tunning * Two differnt teppr cfg can be inter-related from the perspective of transfer learning i.e. base model that that is being used as the starting point. teppr workflow steps # step-1 a) point to a AI Dataset b) create training experiment * a) and b) can be done to create the batch workfload c) run training * training can run for all the workfload # step-2 a) create evaluation strategy b) run the evaluation #step-3 a) generate training and evaluation reports b) publish the model to AI port """ tepprcfg = {'created_on': None, 'modified_on': None, 'aids_dbname': None, 'aids_id': None, 'timestamp': None, 'log_dir': 'logs/<dbname>', 'train_mode': 'training', 'test_mode': 'inference', 'allowed_file_type': ['.txt', '.csv', '.yml', '.json'], 'allowed_image_type': ['.pdf', '.png', '.jpg', '.jpeg', '.gif'], 'allowed_video_type': ['.mp4'], 'data': None, 'stats': None, 'summary': None, 'train': [], 'evaluate': [], 'predict': [], 'publish': [], 'report': []} traincfg = {'MODE': 'training', 'DEVICE': '/gpu:0', 'WEIGHTS': None, 'MODEL_INFO': 'mask_rcnn-matterport-coco-1.yml', 'LOAD_WEIGHTS': {'BY_NAME': True, 'EXCLUDE': ['mrcnn_class_logits', 'mrcnn_bbox_fc', 'mrcnn_bbox', 'mrcnn_mask']}, 'SCHEDULES': [{'EPOCHS': 40, 'LAYERS': 'heads', 'LEARNING_RATE': 0.001}, {'EPOCHS': 120, 'LAYERS': '4+', 'LEARNING_RATE': 0.001}, {'EPOCHS': 160, 'LAYERS': 'all', 'LEARNING_RATE': 0.0001}], 'CONFIG': {}} evaluatecfg = {'SAVE_VIZ_AND_JSON': True, 'MODE': 'inference', 'DEVICE': '/gpu:0', 'WEIGHTS': None, 'MODEL_INFO': 'mask_rcnn-vidteq-tsdr-1.yml', 'LOAD_WEIGHTS': {'BY_NAME': True, 'EXCLUDE': ['mrcnn_class_logits', 'mrcnn_bbox_fc', 'mrcnn_bbox', 'mrcnn_mask']}, 'CONFIG': {'DETECTION_MIN_CONFIDENCE': 0.9, 'GPU_COUNT': 1, 'IMAGES_PER_GPU': 1, 'IMAGE_MIN_DIM': 720, 'IMAGE_MAX_DIM': 1280}} predictcfg = {'SAVE_VIZ_AND_JSON': True, 'MODE': 'inference', 'DEVICE': '/gpu:0', 'WEIGHTS': None, 'MODEL_INFO': 'mask_rcnn-vidteq-tsdr-1.yml', 'LOAD_WEIGHTS': {'BY_NAME': True, 'EXCLUDE': ['mrcnn_class_logits', 'mrcnn_bbox_fc', 'mrcnn_bbox', 'mrcnn_mask']}, 'CONFIG': {'DETECTION_MIN_CONFIDENCE': 0.9, 'GPU_COUNT': 1, 'IMAGES_PER_GPU': 1, 'IMAGE_MIN_DIM': 720, 'IMAGE_MAX_DIM': 1280}} publishcfg = {'DNNARCH': None, 'FRAMEWORK_TYPE': None, 'ID': 'tsdr', 'PROBLEM_ID': 'tsdr_segmentation', 'ORG_NAME': 'vidteq', 'REL_NUM': None, 'CONFIG': {}, 'NAME': 'tsdr', 'DATASET': None, 'WEIGHTS_PATH': None, 'WEIGHTS': 'ORG_NAME/ID/REL_NUM/DNNARCH', 'PROTOTXT': None, 'NUM_CLASSES': None, 'CLASSINFO': [], 'CLASSES': [], 'DESCRIPTION': None, 'TIMESTAMP': None}
input = """ #maxint = 10. total_score(S,X) :- t, S==9, X = #max{ Y :score(Y) }. total_score1(S,X) :- t, S=9, X = #max{ Y :score(Y) }. """ output = """ {} """
input = '\n#maxint = 10.\n\ntotal_score(S,X) :- t, S==9, X = #max{ Y :score(Y) }.\ntotal_score1(S,X) :- t, S=9, X = #max{ Y :score(Y) }.\n\t\n' output = '\n{}\n'
# 2021 June 13 13:48 - 14:06 # 10101 # 01010 # Naturally, the sum would be 11111, which is 100000 - 1. # The essence then is to know the length of its bits representation. class Solution: def findComplement(self, num: int) -> int: cnt = 0 orig = num while num != 0: num >>= 1 cnt += 1 return (1 << cnt) - 1 - orig # Or else, we can certainly flip it bit-by-bit. Speed-wise quite comparable to # the last solution. Since there's no class Solution1: def findComplement(self, num: int) -> int: ans = 0 bit = 0 while num != 0: ans += ((num & 1) ^ 1) << bit bit += 1 num >>= 1 return ans # Now comes the most efficient solution to this problem! # A trick to get the most significant bit: # for any n, # n |= n >> 1 # n |= n >> 2 # n |= n >> 4 # n |= n >> 8 # n |= n >> 16 # Started with a 32 bit int n, it's guranteed that we'd get all 1's with the same # number of bits, after the above operations. And this is because: # 1) The most significant bit (left bit) would always be a set bit. # 2) Oring n and n >> 1 would give us n WITH 2 LEADING SET BITS. # 3) Now when we bitwise or n with n >> 2, we get n with 4 set bits, and it keeps # going. # 4) Since we get at most 32 set bits in 32 bit int, the above 5 ops would # gurantee that n would be all set by the end. # 5) If we don't get to certain shifts, but already has an all-set n, then what # the following operations do would be only oring n with 0, making the result # stays at n. # With this idea, we can solve it as follow: class Solution2: def findComplement(self, num: int) -> int: orig = num num |= num >> 1 num |= num >> 2 num |= num >> 4 num |= num >> 8 num |= num >> 16 return num - orig if __name__ == "__main__": print(Solution2().findComplement(5)) print(Solution2().findComplement(1))
class Solution: def find_complement(self, num: int) -> int: cnt = 0 orig = num while num != 0: num >>= 1 cnt += 1 return (1 << cnt) - 1 - orig class Solution1: def find_complement(self, num: int) -> int: ans = 0 bit = 0 while num != 0: ans += (num & 1 ^ 1) << bit bit += 1 num >>= 1 return ans class Solution2: def find_complement(self, num: int) -> int: orig = num num |= num >> 1 num |= num >> 2 num |= num >> 4 num |= num >> 8 num |= num >> 16 return num - orig if __name__ == '__main__': print(solution2().findComplement(5)) print(solution2().findComplement(1))
# A way of notifying a number of observer classes depending on the state of observed class is changed. class Subject: """What is observed""" def __init__(self): self._observers = [] def notify(self, modifier=None): for observer in self._observers: if modifier != observer: observer.update(self) def attach(self, observer): if observer not in self._observers: self._observers.append(observer) def detach(self, observer): try: self._observers.remove(observer) except ValueError: pass class Data(Subject): def __init__(self, name=''): Subject.__init__(self) self.name = name self._data = 0 @property def data(self): return self._data @data.setter def data(self, value): self._data = value self.notify() class HexViewer: def update(self, subject): print('HexViewer: Subject {} has data 0x{:x}'.format( subject.name, subject.data)) class OctalViewer: def update(self, subject): print('OctalViewer: Subject' + str(subject.name) + 'has data ' + str(oct(subject.data))) class DecimalViewer: def update(self, subject): print('DecimalViewer: Subject % s has data % d' % (subject.name, subject.data)) if __name__ == "__main__": obj1 = Data('Data 1') obj2 = Data('Data 2') view1 = DecimalViewer() view2 = HexViewer() view3 = OctalViewer() obj1.attach(view1) obj1.attach(view2) obj1.attach(view3) obj2.attach(view1) obj2.attach(view2) obj2.attach(view3) obj1.data = 10 obj2.data = 15
class Subject: """What is observed""" def __init__(self): self._observers = [] def notify(self, modifier=None): for observer in self._observers: if modifier != observer: observer.update(self) def attach(self, observer): if observer not in self._observers: self._observers.append(observer) def detach(self, observer): try: self._observers.remove(observer) except ValueError: pass class Data(Subject): def __init__(self, name=''): Subject.__init__(self) self.name = name self._data = 0 @property def data(self): return self._data @data.setter def data(self, value): self._data = value self.notify() class Hexviewer: def update(self, subject): print('HexViewer: Subject {} has data 0x{:x}'.format(subject.name, subject.data)) class Octalviewer: def update(self, subject): print('OctalViewer: Subject' + str(subject.name) + 'has data ' + str(oct(subject.data))) class Decimalviewer: def update(self, subject): print('DecimalViewer: Subject % s has data % d' % (subject.name, subject.data)) if __name__ == '__main__': obj1 = data('Data 1') obj2 = data('Data 2') view1 = decimal_viewer() view2 = hex_viewer() view3 = octal_viewer() obj1.attach(view1) obj1.attach(view2) obj1.attach(view3) obj2.attach(view1) obj2.attach(view2) obj2.attach(view3) obj1.data = 10 obj2.data = 15
class InterfaceError(Exception): def __init__(self, message, human_message=None): super().__init__(message) if human_message is None: self.human_message = message else: self.human_message = human_message
class Interfaceerror(Exception): def __init__(self, message, human_message=None): super().__init__(message) if human_message is None: self.human_message = message else: self.human_message = human_message
class Solution: def compress(self, chars: List[str]) -> int: read = 0 while read < len(chars) - 1: count = 1 read_next = read + 1 while read < len(chars) - 1 and chars[read_next] == chars[read]: del chars[read_next] count += 1 if count > 1: for char in str(count): chars.insert(read_next, char) read_next += 1 read = read_next return len(chars)
class Solution: def compress(self, chars: List[str]) -> int: read = 0 while read < len(chars) - 1: count = 1 read_next = read + 1 while read < len(chars) - 1 and chars[read_next] == chars[read]: del chars[read_next] count += 1 if count > 1: for char in str(count): chars.insert(read_next, char) read_next += 1 read = read_next return len(chars)
class RunnerException(Exception): def __init__(self, message=''): super().__init__() self.message = message class ArgumentError(RunnerException): def __str__(self): return 'ArgumentError: %s' % self.message
class Runnerexception(Exception): def __init__(self, message=''): super().__init__() self.message = message class Argumenterror(RunnerException): def __str__(self): return 'ArgumentError: %s' % self.message
"""StompListener: base class for a listener which will be invoked upon message arrival """ class StompListener(object): """StompListener: base class for a listener which will be invoked upon message arrival """ def on_message(self, frame): """Called by the STOMP receiver thread upon message arrival. Parameters ---------- frame: webstompy.StompFrame The frame containing the headers and the message """ pass
"""StompListener: base class for a listener which will be invoked upon message arrival """ class Stomplistener(object): """StompListener: base class for a listener which will be invoked upon message arrival """ def on_message(self, frame): """Called by the STOMP receiver thread upon message arrival. Parameters ---------- frame: webstompy.StompFrame The frame containing the headers and the message """ pass
words = "Life is short" def lazy_print(text): return lambda: print(text) task = lazy_print(words) task()
words = 'Life is short' def lazy_print(text): return lambda : print(text) task = lazy_print(words) task()