ngram
listlengths
0
82k
[ "True} } def __init__(self, options): super(ODOWriter, self).__init__(options) from flatson import", "None) self.odo_uri = self.read_option('odo_uri', None) self.flatson = Flatson(schema) self.logger.info('ODOWriter has", "to a odo destination. https://odo.readthedocs.org/en/latest/ Needed parameters: - schema (object)", "six import json import gzip from exporters.default_retries import retry_long from", "pandas as pd with gzip.open(dump_path) as f: lines = [json.loads(line.replace('\\n',", "as f: lines = [json.loads(line.replace('\\n', '')) for line in f.readlines()]", "= self.read_option('odo_uri', None) self.flatson = Flatson(schema) self.logger.info('ODOWriter has been initiated.", "Writing to: {}'.format(self.odo_uri)) @retry_long def write(self, dump_path, group_key=''): from odo", "json import gzip from exporters.default_retries import retry_long from exporters.writers.base_writer import", "from exporters.default_retries import retry_long from exporters.writers.base_writer import BaseWriter class ODOWriter(BaseWriter):", "(self.flatson.flatten(line) for line in lines) pf = pd.DataFrame(flattened_lines, columns=self.flatson.fieldnames) dshape", "import Flatson schema = self.read_option('schema', None) self.odo_uri = self.read_option('odo_uri', None)", "self).__init__(options) from flatson import Flatson schema = self.read_option('schema', None) self.odo_uri", "Flatson schema = self.read_option('schema', None) self.odo_uri = self.read_option('odo_uri', None) self.flatson", "Needed parameters: - schema (object) schema object. - odo_uri (str)", "def __init__(self, options): super(ODOWriter, self).__init__(options) from flatson import Flatson schema", "exporters.writers.base_writer import BaseWriter class ODOWriter(BaseWriter): \"\"\" Writes items to a", "Writes items to a odo destination. https://odo.readthedocs.org/en/latest/ Needed parameters: -", "= self.read_option('schema', None) self.odo_uri = self.read_option('odo_uri', None) self.flatson = Flatson(schema)", "odo, resource, discover import pandas as pd with gzip.open(dump_path) as", "gzip.open(dump_path) as f: lines = [json.loads(line.replace('\\n', '')) for line in", "gzip from exporters.default_retries import retry_long from exporters.writers.base_writer import BaseWriter class", "{'type': object, 'required': True}, 'odo_uri': {'type': six.string_types, 'required': True} }", "\"\"\" Writes items to a odo destination. https://odo.readthedocs.org/en/latest/ Needed parameters:", "destination. https://odo.readthedocs.org/en/latest/ Needed parameters: - schema (object) schema object. -", "lines) pf = pd.DataFrame(flattened_lines, columns=self.flatson.fieldnames) dshape = discover(pf) odo(pf, resource(self.odo_uri),", "items to a odo destination. https://odo.readthedocs.org/en/latest/ Needed parameters: - schema", "line in f.readlines()] flattened_lines = (self.flatson.flatten(line) for line in lines)", "six.string_types, 'required': True} } def __init__(self, options): super(ODOWriter, self).__init__(options) from", "exporters.default_retries import retry_long from exporters.writers.base_writer import BaseWriter class ODOWriter(BaseWriter): \"\"\"", "as pd with gzip.open(dump_path) as f: lines = [json.loads(line.replace('\\n', ''))", "self.odo_uri = self.read_option('odo_uri', None) self.flatson = Flatson(schema) self.logger.info('ODOWriter has been", "pd with gzip.open(dump_path) as f: lines = [json.loads(line.replace('\\n', '')) for", "been initiated. Writing to: {}'.format(self.odo_uri)) @retry_long def write(self, dump_path, group_key=''):", "import retry_long from exporters.writers.base_writer import BaseWriter class ODOWriter(BaseWriter): \"\"\" Writes", "'required': True} } def __init__(self, options): super(ODOWriter, self).__init__(options) from flatson", "from odo import odo, resource, discover import pandas as pd", "class ODOWriter(BaseWriter): \"\"\" Writes items to a odo destination. https://odo.readthedocs.org/en/latest/", "a odo destination. https://odo.readthedocs.org/en/latest/ Needed parameters: - schema (object) schema", "odo destination. https://odo.readthedocs.org/en/latest/ Needed parameters: - schema (object) schema object.", "} def __init__(self, options): super(ODOWriter, self).__init__(options) from flatson import Flatson", "f.readlines()] flattened_lines = (self.flatson.flatten(line) for line in lines) pf =", "odo_uri (str) ODO valid destination uri. \"\"\" requirements = {", "write(self, dump_path, group_key=''): from odo import odo, resource, discover import", "{ 'schema': {'type': object, 'required': True}, 'odo_uri': {'type': six.string_types, 'required':", "ODO valid destination uri. \"\"\" requirements = { 'schema': {'type':", "import odo, resource, discover import pandas as pd with gzip.open(dump_path)", "'required': True}, 'odo_uri': {'type': six.string_types, 'required': True} } def __init__(self,", "destination uri. \"\"\" requirements = { 'schema': {'type': object, 'required':", "f: lines = [json.loads(line.replace('\\n', '')) for line in f.readlines()] flattened_lines", "import gzip from exporters.default_retries import retry_long from exporters.writers.base_writer import BaseWriter", "(str) ODO valid destination uri. \"\"\" requirements = { 'schema':", "self.logger.info('ODOWriter has been initiated. Writing to: {}'.format(self.odo_uri)) @retry_long def write(self,", "@retry_long def write(self, dump_path, group_key=''): from odo import odo, resource,", "True}, 'odo_uri': {'type': six.string_types, 'required': True} } def __init__(self, options):", "{'type': six.string_types, 'required': True} } def __init__(self, options): super(ODOWriter, self).__init__(options)", "options): super(ODOWriter, self).__init__(options) from flatson import Flatson schema = self.read_option('schema',", "BaseWriter class ODOWriter(BaseWriter): \"\"\" Writes items to a odo destination.", "in lines) pf = pd.DataFrame(flattened_lines, columns=self.flatson.fieldnames) dshape = discover(pf) odo(pf,", "object. - odo_uri (str) ODO valid destination uri. \"\"\" requirements", "parameters: - schema (object) schema object. - odo_uri (str) ODO", "= Flatson(schema) self.logger.info('ODOWriter has been initiated. Writing to: {}'.format(self.odo_uri)) @retry_long", "\"\"\" requirements = { 'schema': {'type': object, 'required': True}, 'odo_uri':", "retry_long from exporters.writers.base_writer import BaseWriter class ODOWriter(BaseWriter): \"\"\" Writes items", "= [json.loads(line.replace('\\n', '')) for line in f.readlines()] flattened_lines = (self.flatson.flatten(line)", "= (self.flatson.flatten(line) for line in lines) pf = pd.DataFrame(flattened_lines, columns=self.flatson.fieldnames)", "Flatson(schema) self.logger.info('ODOWriter has been initiated. Writing to: {}'.format(self.odo_uri)) @retry_long def", "{}'.format(self.odo_uri)) @retry_long def write(self, dump_path, group_key=''): from odo import odo,", "[json.loads(line.replace('\\n', '')) for line in f.readlines()] flattened_lines = (self.flatson.flatten(line) for", "import json import gzip from exporters.default_retries import retry_long from exporters.writers.base_writer", "resource, discover import pandas as pd with gzip.open(dump_path) as f:", "import six import json import gzip from exporters.default_retries import retry_long", "with gzip.open(dump_path) as f: lines = [json.loads(line.replace('\\n', '')) for line", "from flatson import Flatson schema = self.read_option('schema', None) self.odo_uri =", "initiated. Writing to: {}'.format(self.odo_uri)) @retry_long def write(self, dump_path, group_key=''): from", "__init__(self, options): super(ODOWriter, self).__init__(options) from flatson import Flatson schema =", "self.read_option('schema', None) self.odo_uri = self.read_option('odo_uri', None) self.flatson = Flatson(schema) self.logger.info('ODOWriter", "requirements = { 'schema': {'type': object, 'required': True}, 'odo_uri': {'type':", "schema = self.read_option('schema', None) self.odo_uri = self.read_option('odo_uri', None) self.flatson =", "(object) schema object. - odo_uri (str) ODO valid destination uri.", "for line in f.readlines()] flattened_lines = (self.flatson.flatten(line) for line in", "self.flatson = Flatson(schema) self.logger.info('ODOWriter has been initiated. Writing to: {}'.format(self.odo_uri))", "to: {}'.format(self.odo_uri)) @retry_long def write(self, dump_path, group_key=''): from odo import", "'odo_uri': {'type': six.string_types, 'required': True} } def __init__(self, options): super(ODOWriter,", "schema object. - odo_uri (str) ODO valid destination uri. \"\"\"", "discover import pandas as pd with gzip.open(dump_path) as f: lines", "ODOWriter(BaseWriter): \"\"\" Writes items to a odo destination. https://odo.readthedocs.org/en/latest/ Needed", "line in lines) pf = pd.DataFrame(flattened_lines, columns=self.flatson.fieldnames) dshape = discover(pf)", "from exporters.writers.base_writer import BaseWriter class ODOWriter(BaseWriter): \"\"\" Writes items to", "'schema': {'type': object, 'required': True}, 'odo_uri': {'type': six.string_types, 'required': True}", "group_key=''): from odo import odo, resource, discover import pandas as", "self.read_option('odo_uri', None) self.flatson = Flatson(schema) self.logger.info('ODOWriter has been initiated. Writing", "flattened_lines = (self.flatson.flatten(line) for line in lines) pf = pd.DataFrame(flattened_lines,", "super(ODOWriter, self).__init__(options) from flatson import Flatson schema = self.read_option('schema', None)", "pf = pd.DataFrame(flattened_lines, columns=self.flatson.fieldnames) dshape = discover(pf) odo(pf, resource(self.odo_uri), dshape=dshape)", "schema (object) schema object. - odo_uri (str) ODO valid destination", "has been initiated. Writing to: {}'.format(self.odo_uri)) @retry_long def write(self, dump_path,", "- schema (object) schema object. - odo_uri (str) ODO valid", "lines = [json.loads(line.replace('\\n', '')) for line in f.readlines()] flattened_lines =", "object, 'required': True}, 'odo_uri': {'type': six.string_types, 'required': True} } def", "valid destination uri. \"\"\" requirements = { 'schema': {'type': object,", "= { 'schema': {'type': object, 'required': True}, 'odo_uri': {'type': six.string_types,", "dump_path, group_key=''): from odo import odo, resource, discover import pandas", "None) self.flatson = Flatson(schema) self.logger.info('ODOWriter has been initiated. Writing to:", "uri. \"\"\" requirements = { 'schema': {'type': object, 'required': True},", "odo import odo, resource, discover import pandas as pd with", "in f.readlines()] flattened_lines = (self.flatson.flatten(line) for line in lines) pf", "'')) for line in f.readlines()] flattened_lines = (self.flatson.flatten(line) for line", "def write(self, dump_path, group_key=''): from odo import odo, resource, discover", "https://odo.readthedocs.org/en/latest/ Needed parameters: - schema (object) schema object. - odo_uri", "flatson import Flatson schema = self.read_option('schema', None) self.odo_uri = self.read_option('odo_uri',", "import pandas as pd with gzip.open(dump_path) as f: lines =", "- odo_uri (str) ODO valid destination uri. \"\"\" requirements =", "for line in lines) pf = pd.DataFrame(flattened_lines, columns=self.flatson.fieldnames) dshape =", "import BaseWriter class ODOWriter(BaseWriter): \"\"\" Writes items to a odo" ]
[ "via: >>> from x7.shell import *; maketests('x7.sample.needs_tests') \"\"\" def needs_a_test(a,", ">>> from x7.shell import *; maketests('x7.sample.needs_tests') \"\"\" def needs_a_test(a, b):", "<reponame>gribbg/x7-geom \"\"\" Simple file to validate that maketests is working.", "from x7.shell import *; maketests('x7.sample.needs_tests') \"\"\" def needs_a_test(a, b): return", "maketests via: >>> from x7.shell import *; maketests('x7.sample.needs_tests') \"\"\" def", "maketests is working. Call maketests via: >>> from x7.shell import", "file to validate that maketests is working. Call maketests via:", "x7.shell import *; maketests('x7.sample.needs_tests') \"\"\" def needs_a_test(a, b): return a+b", "Simple file to validate that maketests is working. Call maketests", "that maketests is working. Call maketests via: >>> from x7.shell", "Call maketests via: >>> from x7.shell import *; maketests('x7.sample.needs_tests') \"\"\"", "working. Call maketests via: >>> from x7.shell import *; maketests('x7.sample.needs_tests')", "validate that maketests is working. Call maketests via: >>> from", "is working. Call maketests via: >>> from x7.shell import *;", "to validate that maketests is working. Call maketests via: >>>", "\"\"\" Simple file to validate that maketests is working. Call" ]
[ "else: IV = bytes(IV0s[nz]) hash.update(IV) # needs to be copied", "to copy object otherwise the originally pointed version gets updated!", "IV0s = [658678, 6785697, 254376, 67856, 1432543, 786, 124345, 5443654]", "file the algorithm here does the full padding so the", "every hash output is exactly used 2 times if the", "are STORTED # and the entire series of initial hashes", "counter last_counter = counter # determine the highest non-zero bit", "known initialisation vectors (IV) in a manner that the same", "# KEY DATA STRUCTURES' INTERPRETATION # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ IV0s = [658678,", "the algorithms presented here, although # in this file the", "== 1: IV = bytes(IV1s[i]) else: IV = bytes(IV0s[i]) hash.update(IV)", "takes around a # 100 measurements # this concept is", "with a # correspong IV of the sets 0 and", "<EMAIL> Last updated: 29.01.2021 ''' # the concept is to", "LSB # initialize hash = hashlib.sha3_512() # looping from MSB", "# addressing \"MSB\" of IVs at first, \"LSB\" at last!", "1: IV = bytes(IV1s[i]) else: IV = bytes(IV0s[i]) hash.update(IV) print(hash.hexdigest())", "# the concept is to generate a side channel resistant", "index 0 of hash_copies changes the most frequently ie. according", "and the data comes only afterwards... import hashlib # KEY", "with the secret key then with a # correspong IV", "Last updated: 29.01.2021 ''' # the concept is to generate", "only fewer values need to be recomputed, those whose corresponding", "if (counter>>i) & 1 == 1: IV = bytes(IV1s[i]) else:", "a manner that the same input # is not hashed", "based on # one secret key and several openly known", "hash_copies = [None for i in range(len(IV0s))] # LSB ...", "# the working solution is going to based on the", "then with a # correspong IV of the sets 0", "... MSB hash_copies = [None for i in range(len(IV0s))] #", "for i in range(len(IV0s)-1, -1, -1): if (counter>>i) & 1", "addressing \"MSB\" of IVs at first, \"LSB\" at last! IV", "to generate a side channel resistant initialisation of the hashing", "# 100 measurements # this concept is achieved by taking", "comes only afterwards... import hashlib # KEY DATA STRUCTURES' INTERPRETATION", "have changed, down until LSB # initialize hash = hashlib.sha3_512()", "this way every hash output is exactly used 2 times", "ON THE NATURE OF BINARY INCREMENTATION: # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # only", "is fully filled with IVs and the data comes only", "!= 0: # nz=0 is the initial condition, nothing needs", "starting at MSB - # is 0 or 1; this", "last_counter ^ counter last_counter = counter # determine the highest", "-= 1 if (counter>>nz) & 1 == 1: IV =", "whose corresponding counter bit didn't switch # have to copy", "in a manner that the same input # is not", "\"IV1s\" and compute a series of hashes starting with the", "is going to based on the algorithms presented here, although", "nz=0 is the initial condition, nothing needs to be done", "only afterwards... import hashlib # KEY DATA STRUCTURES' INTERPRETATION #", "= [2565, 256658, 985, 218996, 255, 685652, 28552, 3256565] #", "counter's corresponding bit - starting at MSB - # is", "more than two times, which is hopefully not sufficient for", "= hash.copy() # compute last_counter = 0 for counter in", "for i in range(len(IV0s))] # LSB ... MSB # counter", "0 while IV_mask > 0: IV_mask >>= 1 nz +=", "generate a side channel resistant initialisation of the hashing function", "fully recomputed only such whose corresponding # counter bits has", "a side channel resistant initialisation of the hashing function based", "compute only the remaining hashes while nz != 0: #", "here does the full padding so the results won't equal", "1 # initialize hash to the last value whose corresponding", "whose corresponding # counter bits has changed and all the", "<EMAIL> <EMAIL> Last updated: 29.01.2021 ''' # the concept is", "i in range(len(IV0s)-1, -1, -1): # addressing \"MSB\" of IVs", "# this concept is achieved by taking a counter of", "range(11): IV_mask = last_counter ^ counter last_counter = counter #", "# the CHI function in a practically noiseless computer simulation", "correspong IV of the sets 0 and 1 based on", "several openly known initialisation vectors (IV) in a manner that", "entire series of initial hashes are NOT fully recomputed only", "the intermediate values are STORTED # and the entire series", "where the rate is fully filled with IVs and the", "INDIVIDUALLY # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ for counter in range(11): hash = hashlib.sha3_512()", "most frequently ie. according to counter's LSB hash_copies[i] = hash.copy()", "copies' entire table after each computation #for hashes in hash_copies:", "intermediate values are STORTED # and the entire series of", "based on whether the counter's corresponding bit - starting at", "29.01.2021 ''' # the concept is to generate a side", "to the last value whose corresponding counter bit didn't switch", "# LSB ... MSB hash_copies = [None for i in", "on # the CHI function in a practically noiseless computer", "down to the LSB of the counter # the working", "by taking a counter of a certain bitlength, and twice", "0: # nz=0 is the initial condition, nothing needs to", "(counter>>i) & 1 == 1: IV = bytes(IV1s[i]) else: IV", "hash copies' entire table after each computation #for hashes in", "IVs and the data comes only afterwards... import hashlib #", "initialize hash to the last value whose corresponding counter bit", "of consecutive measurements for a successful attack on # the", "= hashlib.sha3_512() # looping from MSB to LSB for i", "[None for i in range(len(IV0s))] # LSB ... MSB #", "# counter # MSB ... LSB # COMPUTING HASHES FOR", "certain bitlength, and twice as many IVs as bits in", "a practically noiseless computer simulation (see \"chi_cpa.py\") takes around a", "and \"IV1s\" and compute a series of hashes starting with", "-1): # addressing \"MSB\" of IVs at first, \"LSB\" at", "hash.update(IV) # index 0 of hash_copies changes the most frequently", "IV = bytes(IV0s[i]) hash.update(IV) print(hash.hexdigest()) print() # COMPUTING HASHES BASED", "looping from MSB to LSB in counter too for i", "the CHI function in a practically noiseless computer simulation (see", "key then with a # correspong IV of the sets", "until LSB # initialize hash = hashlib.sha3_512() # looping from", "0 means there was no change nz = 0 while", "[658678, 6785697, 254376, 67856, 1432543, 786, 124345, 5443654] IV1s =", "255, 685652, 28552, 3256565] # LSB ... MSB hash_copies =", "# LSB ... MSB # counter # MSB ... LSB", "only such whose corresponding # counter bits has changed and", "== 1: IV = bytes(IV1s[nz]) else: IV = bytes(IV0s[nz]) hash.update(IV)", "hash.update(IV) print(hash.hexdigest()) print() # COMPUTING HASHES BASED ON THE NATURE", "corresponding counter bit didn't switch # have to copy object", "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ IV0s = [658678, 6785697, 254376, 67856, 1432543, 786, 124345,", "to based on the algorithms presented here, although # in", "# bits have changed, down until LSB # initialize hash", "again because of object orientation hash_copies[nz] = hash.copy() # showing", "the secret key then with a # correspong IV of", "range(len(IV0s)-1, -1, -1): if (counter>>i) & 1 == 1: IV", "hashes are NOT fully recomputed only such whose corresponding #", "algorithms presented here, although # in this file the algorithm", "INCREMENTATION: # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # only fewer values need to be", "changed, down until LSB # initialize hash = hashlib.sha3_512() #", "while nz != 0: # nz=0 is the initial condition,", "corresponding # counter bits has changed and all the next", "counter of a certain bitlength, and twice as many IVs", "originally pointed version gets updated! hash = hash_copies[nz].copy() # LSB", "print() # COMPUTING HASHES BASED ON THE NATURE OF BINARY", "down until LSB # initialize hash = hashlib.sha3_512() # looping", "hash.copy() # showing the hash copies' entire table after each", "# have to copy object otherwise the originally pointed version", "hash.update(IV) # needs to be copied again because of object", ">>= 1 nz += 1 # initialize hash to the", "a counter of a certain bitlength, and twice as many", "or 1; this way every hash output is exactly used", "practically noiseless computer simulation (see \"chi_cpa.py\") takes around a #", "of object orientation hash_copies[nz] = hash.copy() # showing the hash", "fewer values need to be recomputed, those whose corresponding #", "the initial condition, nothing needs to be done nz -=", "EVERY COUNTER VALUE INDIVIDUALLY # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ for counter in range(11):", "1; this way every hash output is exactly used 2", "does the full padding so the results won't equal to", "LSB # COMPUTING HASHES FOR EVERY COUNTER VALUE INDIVIDUALLY #", "IV_mask, LSB is 1, 0 means there was no change", "for a successful attack on # the CHI function in", "orientation hash_copies[nz] = hash.copy() # showing the hash copies' entire", "results won't equal to # a scheme where the rate", "^ counter last_counter = counter # determine the highest non-zero", "INTERPRETATION # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ IV0s = [658678, 6785697, 254376, 67856, 1432543,", "padding so the results won't equal to # a scheme", "CHI function in a practically noiseless computer simulation (see \"chi_cpa.py\")", "is 0 or 1; this way every hash output is", "100 measurements # this concept is achieved by taking a", "input # is not hashed too more than two times,", "# correspong IV of the sets 0 and 1 based", "to # a scheme where the rate is fully filled", "and twice as many IVs as bits in # the", "values need to be recomputed, those whose corresponding # bits", "# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ for counter in range(11): hash = hashlib.sha3_512() #", "the counter: \"IV0s\" and \"IV1s\" and compute a series of", "there was no change nz = 0 while IV_mask >", "the remaining hashes while nz != 0: # nz=0 is", "124345, 5443654] IV1s = [2565, 256658, 985, 218996, 255, 685652,", "= counter # determine the highest non-zero bit of IV_mask,", "measurements based computations: the number of consecutive measurements for a", "# is 0 or 1; this way every hash output", "hash_copies[nz].copy() # LSB is index 0 # compute only the", "on whether the counter's corresponding bit - starting at MSB", "& 1 == 1: IV = bytes(IV1s[nz]) else: IV =", "IV_mask >>= 1 nz += 1 # initialize hash to", "range(len(IV0s)-1, -1, -1): # addressing \"MSB\" of IVs at first,", "1 nz += 1 # initialize hash to the last", "a # correspong IV of the sets 0 and 1", "counter bit didn't switch # have to copy object otherwise", "FOR EVERY COUNTER VALUE INDIVIDUALLY # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ for counter in", "last_counter = counter # determine the highest non-zero bit of", "be copied again because of object orientation hash_copies[nz] = hash.copy()", "need to be recomputed, those whose corresponding # bits have", "STORTED # and the entire series of initial hashes are", "last_counter = 0 for counter in range(11): IV_mask = last_counter", "# initialize hash to the last value whose corresponding counter", "the results won't equal to # a scheme where the", "resistant initialisation of the hashing function based on # one", "bytes(IV1s[nz]) else: IV = bytes(IV0s[nz]) hash.update(IV) # needs to be", "function in a practically noiseless computer simulation (see \"chi_cpa.py\") takes", "= hashlib.sha3_512() # looping from MSB to LSB in counter", "# COMPUTING HASHES BASED ON THE NATURE OF BINARY INCREMENTATION:", "# compute last_counter = 0 for counter in range(11): IV_mask", "corresponding # bits have changed, down until LSB # initialize", "values are STORTED # and the entire series of initial", "BASED ON THE NATURE OF BINARY INCREMENTATION: # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ #", "whose corresponding # bits have changed, down until LSB #", "# nz=0 is the initial condition, nothing needs to be", "to the LSB of the counter # the working solution", "is achieved by taking a counter of a certain bitlength,", "5443654] IV1s = [2565, 256658, 985, 218996, 255, 685652, 28552,", "means there was no change nz = 0 while IV_mask", "NATURE OF BINARY INCREMENTATION: # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # only fewer values", "= bytes(IV1s[nz]) else: IV = bytes(IV0s[nz]) hash.update(IV) # needs to", "and compute a series of hashes starting with the secret", "counter's LSB hash_copies[i] = hash.copy() # compute last_counter = 0", "28552, 3256565] # LSB ... MSB hash_copies = [None for", "after each computation #for hashes in hash_copies: # print(hashes.hexdigest()) print(hash_copies[0].hexdigest())", "hashes while nz != 0: # nz=0 is the initial", "bit didn't switch # have to copy object otherwise the", "index 0 # compute only the remaining hashes while nz", "data comes only afterwards... import hashlib # KEY DATA STRUCTURES'", "MSB - # is 0 or 1; this way every", "consecutive measurements for a successful attack on # the CHI", "bit of IV_mask, LSB is 1, 0 means there was", "IVs at first, \"LSB\" at last! IV = bytes(IV0s[i]) hash.update(IV)", "of a certain bitlength, and twice as many IVs as", "the entire series of initial hashes are NOT fully recomputed", "too more than two times, which is hopefully not sufficient", "copied again because of object orientation hash_copies[nz] = hash.copy() #", "by: <NAME> <EMAIL> <EMAIL> Last updated: 29.01.2021 ''' # the", "and 1 based on whether the counter's corresponding bit -", "# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # only fewer values need to be recomputed,", "-1, -1): # addressing \"MSB\" of IVs at first, \"LSB\"", "LSB is 1, 0 means there was no change nz", "series of initial hashes are NOT fully recomputed only such", "manner that the same input # is not hashed too", "\"MSB\" of IVs at first, \"LSB\" at last! IV =", "twice as many IVs as bits in # the counter:", "# compute only the remaining hashes while nz != 0:", "a series of hashes starting with the secret key then", "hashes starting with the secret key then with a #", "hashlib.sha3_512() # looping from MSB to LSB for i in", "filled with IVs and the data comes only afterwards... import", "0 and 1 based on whether the counter's corresponding bit", "one secret key and several openly known initialisation vectors (IV)", "vectors (IV) in a manner that the same input #", "counter: \"IV0s\" and \"IV1s\" and compute a series of hashes", "needs to be copied again because of object orientation hash_copies[nz]", "side channel # measurements based computations: the number of consecutive", "of hashes starting with the secret key then with a", "initialisation of the hashing function based on # one secret", "version gets updated! hash = hash_copies[nz].copy() # LSB is index", "so the results won't equal to # a scheme where", "is hopefully not sufficient for side channel # measurements based", "IV = bytes(IV1s[i]) else: IV = bytes(IV0s[i]) hash.update(IV) print(hash.hexdigest()) print()", "Written by: <NAME> <EMAIL> <EMAIL> Last updated: 29.01.2021 ''' #", "solution is going to based on the algorithms presented here,", "those whose corresponding # bits have changed, down until LSB", "be recomputed, those whose corresponding # bits have changed, down", "looping from MSB to LSB for i in range(len(IV0s)-1, -1,", "= [None for i in range(len(IV0s))] # LSB ... MSB", "working solution is going to based on the algorithms presented", "hash_copies[nz] = hash.copy() # showing the hash copies' entire table", "= 0 for counter in range(11): IV_mask = last_counter ^", "noiseless computer simulation (see \"chi_cpa.py\") takes around a # 100", "value whose corresponding counter bit didn't switch # have to", "secret key then with a # correspong IV of the", "was no change nz = 0 while IV_mask > 0:", "bytes(IV0s[i]) hash.update(IV) # index 0 of hash_copies changes the most", "the same input # is not hashed too more than", "nz != 0: # nz=0 is the initial condition, nothing", "STRUCTURES' INTERPRETATION # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ IV0s = [658678, 6785697, 254376, 67856,", "here, although # in this file the algorithm here does", "BINARY INCREMENTATION: # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # only fewer values need to", "side channel resistant initialisation of the hashing function based on", "MSB ... LSB # COMPUTING HASHES FOR EVERY COUNTER VALUE", "IV_mask = last_counter ^ counter last_counter = counter # determine", "starting with the secret key then with a # correspong", "initial hashes are NOT fully recomputed only such whose corresponding", "0 # compute only the remaining hashes while nz !=", "at last! IV = bytes(IV0s[i]) hash.update(IV) # index 0 of", "hash = hash_copies[nz].copy() # LSB is index 0 # compute", "bytes(IV0s[nz]) hash.update(IV) # needs to be copied again because of", "too for i in range(len(IV0s)-1, -1, -1): if (counter>>i) &", "measurements # this concept is achieved by taking a counter", "# looping from MSB to LSB in counter too for", "recomputed, those whose corresponding # bits have changed, down until", "to be done nz -= 1 if (counter>>nz) & 1", "according to counter's LSB hash_copies[i] = hash.copy() # compute last_counter", "times, which is hopefully not sufficient for side channel #", "at first, \"LSB\" at last! IV = bytes(IV0s[i]) hash.update(IV) #", "based on the algorithms presented here, although # in this", "bytes(IV1s[i]) else: IV = bytes(IV0s[i]) hash.update(IV) print(hash.hexdigest()) print() # COMPUTING", "the sets 0 and 1 based on whether the counter's", "# measurements based computations: the number of consecutive measurements for", "of IV_mask, LSB is 1, 0 means there was no", "# in this file the algorithm here does the full", "+= 1 # initialize hash to the last value whose", "all the next levels too down to the LSB of", "used 2 times if the intermediate values are STORTED #", "computer simulation (see \"chi_cpa.py\") takes around a # 100 measurements", "many IVs as bits in # the counter: \"IV0s\" and", "IV = bytes(IV0s[i]) hash.update(IV) # index 0 of hash_copies changes", "on # one secret key and several openly known initialisation", "first, \"LSB\" at last! IV = bytes(IV0s[i]) hash.update(IV) # index", "if (counter>>nz) & 1 == 1: IV = bytes(IV1s[nz]) else:", "IV of the sets 0 and 1 based on whether", "equal to # a scheme where the rate is fully", "hash = hashlib.sha3_512() # looping from MSB to LSB for", "the number of consecutive measurements for a successful attack on", "1 based on whether the counter's corresponding bit - starting", "67856, 1432543, 786, 124345, 5443654] IV1s = [2565, 256658, 985,", "the counter's corresponding bit - starting at MSB - #", "this concept is achieved by taking a counter of a", "print(hash.hexdigest()) print() # COMPUTING HASHES BASED ON THE NATURE OF", "in a practically noiseless computer simulation (see \"chi_cpa.py\") takes around", "switch # have to copy object otherwise the originally pointed", "changed and all the next levels too down to the", "only the remaining hashes while nz != 0: # nz=0", "256658, 985, 218996, 255, 685652, 28552, 3256565] # LSB ...", "from MSB to LSB in counter too for i in", "in range(11): IV_mask = last_counter ^ counter last_counter = counter", "bits in # the counter: \"IV0s\" and \"IV1s\" and compute", "-1, -1): if (counter>>i) & 1 == 1: IV =", "afterwards... import hashlib # KEY DATA STRUCTURES' INTERPRETATION # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~", "# counter bits has changed and all the next levels", "scheme where the rate is fully filled with IVs and", "MSB hash_copies = [None for i in range(len(IV0s))] # LSB", "pointed version gets updated! hash = hash_copies[nz].copy() # LSB is", "# MSB ... LSB # COMPUTING HASHES FOR EVERY COUNTER", "compute a series of hashes starting with the secret key", "= bytes(IV1s[i]) else: IV = bytes(IV0s[i]) hash.update(IV) print(hash.hexdigest()) print() #", "which is hopefully not sufficient for side channel # measurements", "initialize hash = hashlib.sha3_512() # looping from MSB to LSB", "i in range(len(IV0s))] # LSB ... MSB # counter #", "is the initial condition, nothing needs to be done nz", "to LSB for i in range(len(IV0s)-1, -1, -1): # addressing", "\"chi_cpa.py\") takes around a # 100 measurements # this concept", "counter bits has changed and all the next levels too", "= hash_copies[nz].copy() # LSB is index 0 # compute only", "IV_mask > 0: IV_mask >>= 1 nz += 1 #", "as many IVs as bits in # the counter: \"IV0s\"", "two times, which is hopefully not sufficient for side channel", "is to generate a side channel resistant initialisation of the", "series of hashes starting with the secret key then with", "the data comes only afterwards... import hashlib # KEY DATA", "in range(len(IV0s)-1, -1, -1): if (counter>>i) & 1 == 1:", "# looping from MSB to LSB for i in range(len(IV0s)-1,", "such whose corresponding # counter bits has changed and all", "hash_copies changes the most frequently ie. according to counter's LSB", "object orientation hash_copies[nz] = hash.copy() # showing the hash copies'", "nz = 0 while IV_mask > 0: IV_mask >>= 1", "the full padding so the results won't equal to #", "won't equal to # a scheme where the rate is", "output is exactly used 2 times if the intermediate values", "# determine the highest non-zero bit of IV_mask, LSB is", "hash = hashlib.sha3_512() # looping from MSB to LSB in", "in range(len(IV0s))] # LSB ... MSB # counter # MSB", "hash_copies[i] = hash.copy() # compute last_counter = 0 for counter", "a successful attack on # the CHI function in a", "\"LSB\" at last! IV = bytes(IV0s[i]) hash.update(IV) # index 0", "counter in range(11): IV_mask = last_counter ^ counter last_counter =", "IV = bytes(IV0s[nz]) hash.update(IV) # needs to be copied again", "IV1s = [2565, 256658, 985, 218996, 255, 685652, 28552, 3256565]", "COMPUTING HASHES BASED ON THE NATURE OF BINARY INCREMENTATION: #", "# is not hashed too more than two times, which", "MSB # counter # MSB ... LSB # COMPUTING HASHES", "# showing the hash copies' entire table after each computation", "counter in range(11): hash = hashlib.sha3_512() # looping from MSB", "and the entire series of initial hashes are NOT fully", "1 if (counter>>nz) & 1 == 1: IV = bytes(IV1s[nz])", "while IV_mask > 0: IV_mask >>= 1 nz += 1", "key and several openly known initialisation vectors (IV) in a", "of hash_copies changes the most frequently ie. according to counter's", "hashlib # KEY DATA STRUCTURES' INTERPRETATION # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ IV0s =", "i in range(len(IV0s)-1, -1, -1): if (counter>>i) & 1 ==", "786, 124345, 5443654] IV1s = [2565, 256658, 985, 218996, 255,", "algorithm here does the full padding so the results won't", "times if the intermediate values are STORTED # and the", "218996, 255, 685652, 28552, 3256565] # LSB ... MSB hash_copies", "showing the hash copies' entire table after each computation #for", "from MSB to LSB for i in range(len(IV0s)-1, -1, -1):", "bytes(IV0s[i]) hash.update(IV) print(hash.hexdigest()) print() # COMPUTING HASHES BASED ON THE", "because of object orientation hash_copies[nz] = hash.copy() # showing the", "no change nz = 0 while IV_mask > 0: IV_mask", "to be recomputed, those whose corresponding # bits have changed,", "= last_counter ^ counter last_counter = counter # determine the", "of IVs at first, \"LSB\" at last! IV = bytes(IV0s[i])", "985, 218996, 255, 685652, 28552, 3256565] # LSB ... MSB", "rate is fully filled with IVs and the data comes", "not hashed too more than two times, which is hopefully", "VALUE INDIVIDUALLY # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ for counter in range(11): hash =", "# initialize hash = hashlib.sha3_512() # looping from MSB to", "nz += 1 # initialize hash to the last value", "counter # MSB ... LSB # COMPUTING HASHES FOR EVERY", "[2565, 256658, 985, 218996, 255, 685652, 28552, 3256565] # LSB", "a certain bitlength, and twice as many IVs as bits", "hash output is exactly used 2 times if the intermediate", "1 == 1: IV = bytes(IV1s[i]) else: IV = bytes(IV0s[i])", "the last value whose corresponding counter bit didn't switch #", "1: IV = bytes(IV1s[nz]) else: IV = bytes(IV0s[nz]) hash.update(IV) #", "is index 0 # compute only the remaining hashes while", "- starting at MSB - # is 0 or 1;", "for counter in range(11): IV_mask = last_counter ^ counter last_counter", "levels too down to the LSB of the counter #", "# only fewer values need to be recomputed, those whose", "that the same input # is not hashed too more", "= [658678, 6785697, 254376, 67856, 1432543, 786, 124345, 5443654] IV1s", "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ for counter in range(11): hash = hashlib.sha3_512() # looping", "the concept is to generate a side channel resistant initialisation", "determine the highest non-zero bit of IV_mask, LSB is 1,", "LSB ... MSB # counter # MSB ... LSB #", "be done nz -= 1 if (counter>>nz) & 1 ==", "as bits in # the counter: \"IV0s\" and \"IV1s\" and", "LSB for i in range(len(IV0s)-1, -1, -1): # addressing \"MSB\"", "for i in range(len(IV0s)-1, -1, -1): # addressing \"MSB\" of", "to LSB in counter too for i in range(len(IV0s)-1, -1,", "MSB to LSB for i in range(len(IV0s)-1, -1, -1): #", "in # the counter: \"IV0s\" and \"IV1s\" and compute a", "# one secret key and several openly known initialisation vectors", "change nz = 0 while IV_mask > 0: IV_mask >>=", "in range(11): hash = hashlib.sha3_512() # looping from MSB to", "... LSB # COMPUTING HASHES FOR EVERY COUNTER VALUE INDIVIDUALLY", "the working solution is going to based on the algorithms", "in this file the algorithm here does the full padding", "around a # 100 measurements # this concept is achieved", "didn't switch # have to copy object otherwise the originally", "in range(len(IV0s)-1, -1, -1): # addressing \"MSB\" of IVs at", "bit - starting at MSB - # is 0 or", "# and the entire series of initial hashes are NOT", "LSB in counter too for i in range(len(IV0s)-1, -1, -1):", "-1): if (counter>>i) & 1 == 1: IV = bytes(IV1s[i])", "else: IV = bytes(IV0s[i]) hash.update(IV) print(hash.hexdigest()) print() # COMPUTING HASHES", "have to copy object otherwise the originally pointed version gets", "range(len(IV0s))] # LSB ... MSB # counter # MSB ...", "changes the most frequently ie. according to counter's LSB hash_copies[i]", "secret key and several openly known initialisation vectors (IV) in", "# COMPUTING HASHES FOR EVERY COUNTER VALUE INDIVIDUALLY # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~", "number of consecutive measurements for a successful attack on #", "for counter in range(11): hash = hashlib.sha3_512() # looping from", "is exactly used 2 times if the intermediate values are", "# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ IV0s = [658678, 6785697, 254376, 67856, 1432543, 786,", "at MSB - # is 0 or 1; this way", "exactly used 2 times if the intermediate values are STORTED", "next levels too down to the LSB of the counter", "HASHES FOR EVERY COUNTER VALUE INDIVIDUALLY # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ for counter", "IVs as bits in # the counter: \"IV0s\" and \"IV1s\"", "frequently ie. according to counter's LSB hash_copies[i] = hash.copy() #", "= bytes(IV0s[nz]) hash.update(IV) # needs to be copied again because", "if the intermediate values are STORTED # and the entire", "otherwise the originally pointed version gets updated! hash = hash_copies[nz].copy()", "in counter too for i in range(len(IV0s)-1, -1, -1): if", "the next levels too down to the LSB of the", "needs to be done nz -= 1 if (counter>>nz) &", "channel resistant initialisation of the hashing function based on #", "counter # the working solution is going to based on", "sets 0 and 1 based on whether the counter's corresponding", "presented here, although # in this file the algorithm here", "the highest non-zero bit of IV_mask, LSB is 1, 0", "= bytes(IV0s[i]) hash.update(IV) # index 0 of hash_copies changes the", "0 or 1; this way every hash output is exactly", "nz -= 1 if (counter>>nz) & 1 == 1: IV", "computations: the number of consecutive measurements for a successful attack", "the rate is fully filled with IVs and the data", "685652, 28552, 3256565] # LSB ... MSB hash_copies = [None", "nothing needs to be done nz -= 1 if (counter>>nz)", "# LSB is index 0 # compute only the remaining", "COUNTER VALUE INDIVIDUALLY # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ for counter in range(11): hash", "= hash.copy() # showing the hash copies' entire table after", "0 for counter in range(11): IV_mask = last_counter ^ counter", "achieved by taking a counter of a certain bitlength, and", "function based on # one secret key and several openly", "IV = bytes(IV1s[nz]) else: IV = bytes(IV0s[nz]) hash.update(IV) # needs", "bitlength, and twice as many IVs as bits in #", "of the sets 0 and 1 based on whether the", "254376, 67856, 1432543, 786, 124345, 5443654] IV1s = [2565, 256658,", "based computations: the number of consecutive measurements for a successful", "updated: 29.01.2021 ''' # the concept is to generate a", "# needs to be copied again because of object orientation", "not sufficient for side channel # measurements based computations: the", "updated! hash = hash_copies[nz].copy() # LSB is index 0 #", "counter # determine the highest non-zero bit of IV_mask, LSB", "concept is achieved by taking a counter of a certain", "taking a counter of a certain bitlength, and twice as", "fully filled with IVs and the data comes only afterwards...", "of the counter # the working solution is going to", "remaining hashes while nz != 0: # nz=0 is the", "to counter's LSB hash_copies[i] = hash.copy() # compute last_counter =", "of the hashing function based on # one secret key", "compute last_counter = 0 for counter in range(11): IV_mask =", "too down to the LSB of the counter # the", "last! IV = bytes(IV0s[i]) hash.update(IV) # index 0 of hash_copies", "simulation (see \"chi_cpa.py\") takes around a # 100 measurements #", "sufficient for side channel # measurements based computations: the number", "HASHES BASED ON THE NATURE OF BINARY INCREMENTATION: # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~", "hashing function based on # one secret key and several", "of initial hashes are NOT fully recomputed only such whose", "import hashlib # KEY DATA STRUCTURES' INTERPRETATION # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ IV0s", "measurements for a successful attack on # the CHI function", "hash.copy() # compute last_counter = 0 for counter in range(11):", "with IVs and the data comes only afterwards... import hashlib", "# the counter: \"IV0s\" and \"IV1s\" and compute a series", "is not hashed too more than two times, which is", "hopefully not sufficient for side channel # measurements based computations:", "0: IV_mask >>= 1 nz += 1 # initialize hash", "KEY DATA STRUCTURES' INTERPRETATION # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ IV0s = [658678, 6785697,", "this file the algorithm here does the full padding so", "condition, nothing needs to be done nz -= 1 if", "(IV) in a manner that the same input # is", "- # is 0 or 1; this way every hash", "gets updated! hash = hash_copies[nz].copy() # LSB is index 0", "is 1, 0 means there was no change nz =", "MSB to LSB in counter too for i in range(len(IV0s)-1,", "> 0: IV_mask >>= 1 nz += 1 # initialize", "''' Written by: <NAME> <EMAIL> <EMAIL> Last updated: 29.01.2021 '''", "ie. according to counter's LSB hash_copies[i] = hash.copy() # compute", "hash to the last value whose corresponding counter bit didn't", "and all the next levels too down to the LSB", "hashlib.sha3_512() # looping from MSB to LSB in counter too", "\"IV0s\" and \"IV1s\" and compute a series of hashes starting", "successful attack on # the CHI function in a practically", "0 of hash_copies changes the most frequently ie. according to", "1, 0 means there was no change nz = 0", "channel # measurements based computations: the number of consecutive measurements", "(counter>>nz) & 1 == 1: IV = bytes(IV1s[nz]) else: IV", "the most frequently ie. according to counter's LSB hash_copies[i] =", "OF BINARY INCREMENTATION: # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # only fewer values need", "the counter # the working solution is going to based", "2 times if the intermediate values are STORTED # and", "entire table after each computation #for hashes in hash_copies: #", "(see \"chi_cpa.py\") takes around a # 100 measurements # this", "full padding so the results won't equal to # a", "concept is to generate a side channel resistant initialisation of", "3256565] # LSB ... MSB hash_copies = [None for i", "the algorithm here does the full padding so the results", "the LSB of the counter # the working solution is", "openly known initialisation vectors (IV) in a manner that the", "LSB hash_copies[i] = hash.copy() # compute last_counter = 0 for", "LSB is index 0 # compute only the remaining hashes", "COMPUTING HASHES FOR EVERY COUNTER VALUE INDIVIDUALLY # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ for", "counter too for i in range(len(IV0s)-1, -1, -1): if (counter>>i)", "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # only fewer values need to be recomputed, those", "copy object otherwise the originally pointed version gets updated! hash", "to be copied again because of object orientation hash_copies[nz] =", "for side channel # measurements based computations: the number of", "DATA STRUCTURES' INTERPRETATION # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ IV0s = [658678, 6785697, 254376,", "the hash copies' entire table after each computation #for hashes", "has changed and all the next levels too down to", "a scheme where the rate is fully filled with IVs", "1432543, 786, 124345, 5443654] IV1s = [2565, 256658, 985, 218996,", "table after each computation #for hashes in hash_copies: # print(hashes.hexdigest())", "initial condition, nothing needs to be done nz -= 1", "last value whose corresponding counter bit didn't switch # have", "highest non-zero bit of IV_mask, LSB is 1, 0 means", "are NOT fully recomputed only such whose corresponding # counter", "LSB of the counter # the working solution is going", "6785697, 254376, 67856, 1432543, 786, 124345, 5443654] IV1s = [2565,", "NOT fully recomputed only such whose corresponding # counter bits", "a # 100 measurements # this concept is achieved by", "LSB ... MSB hash_copies = [None for i in range(len(IV0s))]", "whether the counter's corresponding bit - starting at MSB -", "range(11): hash = hashlib.sha3_512() # looping from MSB to LSB", "1 == 1: IV = bytes(IV1s[nz]) else: IV = bytes(IV0s[nz])", "''' # the concept is to generate a side channel", "going to based on the algorithms presented here, although #", "hashed too more than two times, which is hopefully not", "the originally pointed version gets updated! hash = hash_copies[nz].copy() #", "THE NATURE OF BINARY INCREMENTATION: # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # only fewer", "corresponding bit - starting at MSB - # is 0", "non-zero bit of IV_mask, LSB is 1, 0 means there", "attack on # the CHI function in a practically noiseless", "although # in this file the algorithm here does the", "bits has changed and all the next levels too down", "done nz -= 1 if (counter>>nz) & 1 == 1:", "# index 0 of hash_copies changes the most frequently ie.", "& 1 == 1: IV = bytes(IV1s[i]) else: IV =", "same input # is not hashed too more than two", "than two times, which is hopefully not sufficient for side", "<NAME> <EMAIL> <EMAIL> Last updated: 29.01.2021 ''' # the concept", "and several openly known initialisation vectors (IV) in a manner", "way every hash output is exactly used 2 times if", "initialisation vectors (IV) in a manner that the same input", "bits have changed, down until LSB # initialize hash =", "on the algorithms presented here, although # in this file", "... MSB # counter # MSB ... LSB # COMPUTING", "the hashing function based on # one secret key and", "= 0 while IV_mask > 0: IV_mask >>= 1 nz", "recomputed only such whose corresponding # counter bits has changed", "# a scheme where the rate is fully filled with", "object otherwise the originally pointed version gets updated! hash =", "= bytes(IV0s[i]) hash.update(IV) print(hash.hexdigest()) print() # COMPUTING HASHES BASED ON" ]
[ "list, goal_node_id: int): \"\"\" Creates an ordered route list from", "failed, add the last node that # passed to the", "c in reversed_stack: if c.to_node.id == next_node: r.append(c.to_node.id) r.append(c.from_node.id) next_node", ":param route_stack: All routes found until goal :param goal_node: int", "so return if len(p) == 2: return p # Compile", "are in the input path # We start at 2,", "A ordered list from start to goal \"\"\" r =", "used by the search algorithm, or when needed :) Pretty", "1 # We’ve reached the end of the input path,", "output and return it output += p[len(p)-1] return output def", "to traverse to the goal. :param route_stack: All routes found", "< len(p)-1: # Do the ray cast if not ray_clear(output[len(output)-1],", "then # we can’t smooth it, so return if len(p)", "int): \"\"\" Creates an ordered route list from start to", "All routes found until goal :param goal_node: int ID of", "(I hope) For more information see the examples and tests", "i = 2 # Loop until we find the last", "# we can’t smooth it, so return if len(p) ==", "len(p) == 2: return p # Compile an output path", "<NAME> A simple bedirectional graph with A* and breadth-first pathfinding.", "next_node = goal_node_id reversed_stack = reversed(route_stack) for c in reversed_stack:", "track of where we are in the input path #", "until goal :param goal_node: int ID of the goal node", "when needed :) Pretty self explainatory (I hope) For more", "path, add the # end node to the output and", "two adjacent # nodes will pass the ray cast i", "p[len(p)-1] return output def clean_route_list(route_stack: list, goal_node_id: int): \"\"\" Creates", "from start to goal \"\"\" r = [] next_node =", "the input path, add the # end node to the", "goal_node_id: int): \"\"\" Creates an ordered route list from start", "goal. :param route_stack: All routes found until goal :param goal_node:", "the path is only two nodes long, then # we", "self explainatory (I hope) For more information see the examples", "not ray_clear(output[len(output)-1], p[i]): # The ray text failed, add the", "output def clean_route_list(route_stack: list, goal_node_id: int): \"\"\" Creates an ordered", "we are in the input path # We start at", "Utils are either used by the search algorithm, or when", "= reversed(route_stack) for c in reversed_stack: if c.to_node.id == next_node:", "the goal node :return: list A ordered list from start", "the ray cast i = 2 # Loop until we", "return p # Compile an output path output = [p[0]]", "+= p[i-1] # Consider the next node i += 1", "search algorithm, or when needed :) Pretty self explainatory (I", "# Keep track of where we are in the input", "end of the input path, add the # end node", ":param goal_node: int ID of the goal node :return: list", "the input path # We start at 2, because we", "the output list output += p[i-1] # Consider the next", "goal_node: int ID of the goal node :return: list A", "explainatory (I hope) For more information see the examples and", "ray cast if not ray_clear(output[len(output)-1], p[i]): # The ray text", "needed :) Pretty self explainatory (I hope) For more information", "it output += p[len(p)-1] return output def clean_route_list(route_stack: list, goal_node_id:", "to the output list output += p[i-1] # Consider the", "hope) For more information see the examples and tests folder", "list output += p[i-1] # Consider the next node i", "return it output += p[len(p)-1] return output def clean_route_list(route_stack: list,", "examples and tests folder \"\"\" def smooth_path(p): # If the", "= [] next_node = goal_node_id reversed_stack = reversed(route_stack) for c", "# We’ve reached the end of the input path, add", "reversed_stack: if c.to_node.id == next_node: r.append(c.to_node.id) r.append(c.from_node.id) next_node = c.from_node.id", "ray cast i = 2 # Loop until we find", "text failed, add the last node that # passed to", "the search algorithm, or when needed :) Pretty self explainatory", "path # We start at 2, because we assume two", "2: return p # Compile an output path output =", "add the last node that # passed to the output", "all node ids needed to traverse to the goal. :param", "the output and return it output += p[len(p)-1] return output", "the goal. :param route_stack: All routes found until goal :param", "pass the ray cast i = 2 # Loop until", "of the goal node :return: list A ordered list from", "r = [] next_node = goal_node_id reversed_stack = reversed(route_stack) for", "output += p[len(p)-1] return output def clean_route_list(route_stack: list, goal_node_id: int):", "with A* and breadth-first pathfinding. Utils are either used by", "input path # We start at 2, because we assume", "more information see the examples and tests folder \"\"\" def", "input while i < len(p)-1: # Do the ray cast", "smooth_path(p): # If the path is only two nodes long,", "while i < len(p)-1: # Do the ray cast if", "end node to the output and return it output +=", "route_stack: All routes found until goal :param goal_node: int ID", "Consider the next node i += 1 # We’ve reached", "= goal_node_id reversed_stack = reversed(route_stack) for c in reversed_stack: if", "path output = [p[0]] # Keep track of where we", "[] next_node = goal_node_id reversed_stack = reversed(route_stack) for c in", "the last node that # passed to the output list", "+= 1 # We’ve reached the end of the input", ":) Pretty self explainatory (I hope) For more information see", "# nodes will pass the ray cast i = 2", "only two nodes long, then # we can’t smooth it,", "traverse to the goal. :param route_stack: All routes found until", "# We start at 2, because we assume two adjacent", "last item in the input while i < len(p)-1: #", "Creates an ordered route list from start to finish with", "see the examples and tests folder \"\"\" def smooth_path(p): #", "~~~~~~~~~~~~~~~ <NAME> A simple bedirectional graph with A* and breadth-first", "are either used by the search algorithm, or when needed", "# Compile an output path output = [p[0]] # Keep", "at 2, because we assume two adjacent # nodes will", "if not ray_clear(output[len(output)-1], p[i]): # The ray text failed, add", "# If the path is only two nodes long, then", "the input while i < len(p)-1: # Do the ray", "ids needed to traverse to the goal. :param route_stack: All", "node :return: list A ordered list from start to goal", "A simple bedirectional graph with A* and breadth-first pathfinding. Utils", "p[i-1] # Consider the next node i += 1 #", "p[i]): # The ray text failed, add the last node", "found until goal :param goal_node: int ID of the goal", "return output def clean_route_list(route_stack: list, goal_node_id: int): \"\"\" Creates an", "output path output = [p[0]] # Keep track of where", "adjacent # nodes will pass the ray cast i =", "\"\"\" def smooth_path(p): # If the path is only two", "goal :param goal_node: int ID of the goal node :return:", "next node i += 1 # We’ve reached the end", "it, so return if len(p) == 2: return p #", "information see the examples and tests folder \"\"\" def smooth_path(p):", "We start at 2, because we assume two adjacent #", "cast if not ray_clear(output[len(output)-1], p[i]): # The ray text failed,", "to finish with all node ids needed to traverse to", "# Loop until we find the last item in the", "can’t smooth it, so return if len(p) == 2: return", "# Consider the next node i += 1 # We’ve", "the next node i += 1 # We’ve reached the", "goal_node_id reversed_stack = reversed(route_stack) for c in reversed_stack: if c.to_node.id", "the examples and tests folder \"\"\" def smooth_path(p): # If", "smooth it, so return if len(p) == 2: return p", "the # end node to the output and return it", "either used by the search algorithm, or when needed :)", "output = [p[0]] # Keep track of where we are", "item in the input while i < len(p)-1: # Do", "\"\"\" r = [] next_node = goal_node_id reversed_stack = reversed(route_stack)", "+= p[len(p)-1] return output def clean_route_list(route_stack: list, goal_node_id: int): \"\"\"", "start at 2, because we assume two adjacent # nodes", "Keep track of where we are in the input path", "finish with all node ids needed to traverse to the", "reversed_stack = reversed(route_stack) for c in reversed_stack: if c.to_node.id ==", "folder \"\"\" def smooth_path(p): # If the path is only", "the ray cast if not ray_clear(output[len(output)-1], p[i]): # The ray", "ordered list from start to goal \"\"\" r = []", "i += 1 # We’ve reached the end of the", "where we are in the input path # We start", "Pretty self explainatory (I hope) For more information see the", "and return it output += p[len(p)-1] return output def clean_route_list(route_stack:", "return if len(p) == 2: return p # Compile an", "Loop until we find the last item in the input", "needed to traverse to the goal. :param route_stack: All routes", "# Do the ray cast if not ray_clear(output[len(output)-1], p[i]): #", "node ids needed to traverse to the goal. :param route_stack:", "node i += 1 # We’ve reached the end of", "node that # passed to the output list output +=", "= 2 # Loop until we find the last item", "bedirectional graph with A* and breadth-first pathfinding. Utils are either", "int ID of the goal node :return: list A ordered", "breadth-first pathfinding. Utils are either used by the search algorithm,", "goal \"\"\" r = [] next_node = goal_node_id reversed_stack =", "nodes long, then # we can’t smooth it, so return", "Do the ray cast if not ray_clear(output[len(output)-1], p[i]): # The", "to the output and return it output += p[len(p)-1] return", "assume two adjacent # nodes will pass the ray cast", "until we find the last item in the input while", "list from start to finish with all node ids needed", "2, because we assume two adjacent # nodes will pass", "because we assume two adjacent # nodes will pass the", "goal node :return: list A ordered list from start to", "start to goal \"\"\" r = [] next_node = goal_node_id", "the last item in the input while i < len(p)-1:", "pathfinding. Utils are either used by the search algorithm, or", "or when needed :) Pretty self explainatory (I hope) For", "we find the last item in the input while i", "# The ray text failed, add the last node that", "tests folder \"\"\" def smooth_path(p): # If the path is", "algorithm, or when needed :) Pretty self explainatory (I hope)", "def smooth_path(p): # If the path is only two nodes", "in the input path # We start at 2, because", "p # Compile an output path output = [p[0]] #", "output += p[i-1] # Consider the next node i +=", "the end of the input path, add the # end", "route list from start to finish with all node ids", "ordered route list from start to finish with all node", "input path, add the # end node to the output", "if len(p) == 2: return p # Compile an output", "Compile an output path output = [p[0]] # Keep track", "cast i = 2 # Loop until we find the", "nodes will pass the ray cast i = 2 #", "node to the output and return it output += p[len(p)-1]", "== 2: return p # Compile an output path output", "output list output += p[i-1] # Consider the next node", "def clean_route_list(route_stack: list, goal_node_id: int): \"\"\" Creates an ordered route", "add the # end node to the output and return", "an ordered route list from start to finish with all", "reached the end of the input path, add the #", "routes found until goal :param goal_node: int ID of the", "list from start to goal \"\"\" r = [] next_node", "with all node ids needed to traverse to the goal.", "c.to_node.id == next_node: r.append(c.to_node.id) r.append(c.from_node.id) next_node = c.from_node.id return list(set(r))", "in reversed_stack: if c.to_node.id == next_node: r.append(c.to_node.id) r.append(c.from_node.id) next_node =", "last node that # passed to the output list output", "\"\"\" graphstar.utils ~~~~~~~~~~~~~~~ <NAME> A simple bedirectional graph with A*", "We’ve reached the end of the input path, add the", "from start to finish with all node ids needed to", "list A ordered list from start to goal \"\"\" r", "graphstar.utils ~~~~~~~~~~~~~~~ <NAME> A simple bedirectional graph with A* and", "in the input while i < len(p)-1: # Do the", "we can’t smooth it, so return if len(p) == 2:", "we assume two adjacent # nodes will pass the ray", "len(p)-1: # Do the ray cast if not ray_clear(output[len(output)-1], p[i]):", "for c in reversed_stack: if c.to_node.id == next_node: r.append(c.to_node.id) r.append(c.from_node.id)", "is only two nodes long, then # we can’t smooth", "reversed(route_stack) for c in reversed_stack: if c.to_node.id == next_node: r.append(c.to_node.id)", "long, then # we can’t smooth it, so return if", "= [p[0]] # Keep track of where we are in", "and breadth-first pathfinding. Utils are either used by the search", "of the input path, add the # end node to", "\"\"\" Creates an ordered route list from start to finish", "by the search algorithm, or when needed :) Pretty self", "clean_route_list(route_stack: list, goal_node_id: int): \"\"\" Creates an ordered route list", "start to finish with all node ids needed to traverse", "A* and breadth-first pathfinding. Utils are either used by the", "# end node to the output and return it output", "passed to the output list output += p[i-1] # Consider", "# passed to the output list output += p[i-1] #", "simple bedirectional graph with A* and breadth-first pathfinding. Utils are", "graph with A* and breadth-first pathfinding. Utils are either used", "that # passed to the output list output += p[i-1]", "to goal \"\"\" r = [] next_node = goal_node_id reversed_stack", "path is only two nodes long, then # we can’t", "two nodes long, then # we can’t smooth it, so", "an output path output = [p[0]] # Keep track of", "will pass the ray cast i = 2 # Loop", "2 # Loop until we find the last item in", ":return: list A ordered list from start to goal \"\"\"", "ID of the goal node :return: list A ordered list", "For more information see the examples and tests folder \"\"\"", "The ray text failed, add the last node that #", "and tests folder \"\"\" def smooth_path(p): # If the path", "If the path is only two nodes long, then #", "of where we are in the input path # We", "to the goal. :param route_stack: All routes found until goal", "ray_clear(output[len(output)-1], p[i]): # The ray text failed, add the last", "[p[0]] # Keep track of where we are in the", "i < len(p)-1: # Do the ray cast if not", "if c.to_node.id == next_node: r.append(c.to_node.id) r.append(c.from_node.id) next_node = c.from_node.id return", "find the last item in the input while i <", "ray text failed, add the last node that # passed" ]
[ "<filename>design_patterns/pubsub/simple_events/__init__.py class Event: def __init__(self): self.handlers = set() def subscribe(self,", "= set() def subscribe(self, func): self.handlers.add(func) def unsubscribe(self, func): self.handlers.remove(func)", "func): self.handlers.add(func) def unsubscribe(self, func): self.handlers.remove(func) def emit(self, *args): for", "def unsubscribe(self, func): self.handlers.remove(func) def emit(self, *args): for func in", "self.handlers.add(func) def unsubscribe(self, func): self.handlers.remove(func) def emit(self, *args): for func", "subscribe(self, func): self.handlers.add(func) def unsubscribe(self, func): self.handlers.remove(func) def emit(self, *args):", "Event: def __init__(self): self.handlers = set() def subscribe(self, func): self.handlers.add(func)", "class Event: def __init__(self): self.handlers = set() def subscribe(self, func):", "set() def subscribe(self, func): self.handlers.add(func) def unsubscribe(self, func): self.handlers.remove(func) def", "__init__(self): self.handlers = set() def subscribe(self, func): self.handlers.add(func) def unsubscribe(self,", "self.handlers = set() def subscribe(self, func): self.handlers.add(func) def unsubscribe(self, func):", "def subscribe(self, func): self.handlers.add(func) def unsubscribe(self, func): self.handlers.remove(func) def emit(self,", "func): self.handlers.remove(func) def emit(self, *args): for func in self.handlers: func(*args)", "def __init__(self): self.handlers = set() def subscribe(self, func): self.handlers.add(func) def", "unsubscribe(self, func): self.handlers.remove(func) def emit(self, *args): for func in self.handlers:" ]
[ "\"\"\" Check and return the type of an environment variable.", "environment variable. supported types: None Integer String @param env_var_name: environment", "type of an environment variable. supported types: None Integer String", "\"\"\" try: val = os.getenv(env_var_name) if val is None: return", "def check_env(env_var_name): \"\"\" Check and return the type of an", "ex: return \"None\" try: int_val = int(val) return 'Integer' except", "\"None\" try: int_val = int(val) return 'Integer' except ValueError: return", "return the type of an environment variable. supported types: None", "variable name @return: string of the type name. \"\"\" try:", "Check and return the type of an environment variable. supported", "variable. supported types: None Integer String @param env_var_name: environment variable", "as ex: return \"None\" try: int_val = int(val) return 'Integer'", "check_env(env_var_name): \"\"\" Check and return the type of an environment", "is None: return 'None' except Exception as ex: return \"None\"", "os.getenv(env_var_name) if val is None: return 'None' except Exception as", "'None' except Exception as ex: return \"None\" try: int_val =", "env_var_name: environment variable name @return: string of the type name.", "return \"None\" try: int_val = int(val) return 'Integer' except ValueError:", "try: val = os.getenv(env_var_name) if val is None: return 'None'", "= os.getenv(env_var_name) if val is None: return 'None' except Exception", "supported types: None Integer String @param env_var_name: environment variable name", "val is None: return 'None' except Exception as ex: return", "the type name. \"\"\" try: val = os.getenv(env_var_name) if val", "name @return: string of the type name. \"\"\" try: val", "if val is None: return 'None' except Exception as ex:", "an environment variable. supported types: None Integer String @param env_var_name:", "type name. \"\"\" try: val = os.getenv(env_var_name) if val is", "os def check_env(env_var_name): \"\"\" Check and return the type of", "@param env_var_name: environment variable name @return: string of the type", "of the type name. \"\"\" try: val = os.getenv(env_var_name) if", "name. \"\"\" try: val = os.getenv(env_var_name) if val is None:", "and return the type of an environment variable. supported types:", "import os def check_env(env_var_name): \"\"\" Check and return the type", "try: int_val = int(val) return 'Integer' except ValueError: return 'String'", "string of the type name. \"\"\" try: val = os.getenv(env_var_name)", "String @param env_var_name: environment variable name @return: string of the", "except Exception as ex: return \"None\" try: int_val = int(val)", "Integer String @param env_var_name: environment variable name @return: string of", "the type of an environment variable. supported types: None Integer", "Exception as ex: return \"None\" try: int_val = int(val) return", "val = os.getenv(env_var_name) if val is None: return 'None' except", "None Integer String @param env_var_name: environment variable name @return: string", "None: return 'None' except Exception as ex: return \"None\" try:", "types: None Integer String @param env_var_name: environment variable name @return:", "of an environment variable. supported types: None Integer String @param", "return 'None' except Exception as ex: return \"None\" try: int_val", "environment variable name @return: string of the type name. \"\"\"", "@return: string of the type name. \"\"\" try: val =" ]
[ "[\"name\" , \"song_image\" , \"pk\" , \"like\" , \"played\" ,", "class SoundDetailSerializer(serializers.ModelSerializer): class Meta: model = Sound fields = \"__all__\"", "class Meta: model = Album fields = [\"name\" , \"datepublish\"", "\"pk\" , \"like\" , \"played\" , \"tag\" , \"singer\" ,", "serializers.SerializerMethodField() class Meta: model = Album fields = [\"name\" ,", "model = Album fields = [\"name\" , \"datepublish\" , \"category\"", "AlbumSerializer(serializers.ModelSerializer): sound = serializers.SerializerMethodField() class Meta: model = Album fields", "fields = [\"name\" , \"song_image\" , \"pk\" , \"like\" ,", "1 def get_sound(self , obj): print(\"WORKING\") return SoundSerializer(instance=obj.sound , many=True).data", "\"upload_date\"] class SoundDetailSerializer(serializers.ModelSerializer): class Meta: model = Sound fields =", "Meta: model = Album fields = [\"name\" , \"datepublish\" ,", ", Album from rest_framework import serializers class SoundSerializer(serializers.ModelSerializer): class Meta:", "\"tag\" , \"singer\" , \"upload_date\"] class SoundDetailSerializer(serializers.ModelSerializer): class Meta: model", "\"like\" , \"played\" , \"tag\" , \"singer\" , \"upload_date\"] class", "[\"name\" , \"datepublish\" , \"category\" , \"sound\"] depth = 1", ", \"song_image\" , \"pk\" , \"like\" , \"played\" , \"tag\"", "SoundSerializer(serializers.ModelSerializer): class Meta: model = Sound fields = [\"name\" ,", "= Sound fields = [\"name\" , \"song_image\" , \"pk\" ,", "Meta: model = Sound fields = \"__all__\" class AlbumSerializer(serializers.ModelSerializer): sound", "Meta: model = Sound fields = [\"name\" , \"song_image\" ,", "class Meta: model = Sound fields = \"__all__\" class AlbumSerializer(serializers.ModelSerializer):", "sound = serializers.SerializerMethodField() class Meta: model = Album fields =", "= Album fields = [\"name\" , \"datepublish\" , \"category\" ,", "\"sound\"] depth = 1 def get_sound(self , obj): print(\"WORKING\") return", "\"played\" , \"tag\" , \"singer\" , \"upload_date\"] class SoundDetailSerializer(serializers.ModelSerializer): class", "serializers class SoundSerializer(serializers.ModelSerializer): class Meta: model = Sound fields =", "= [\"name\" , \"song_image\" , \"pk\" , \"like\" , \"played\"", "import serializers class SoundSerializer(serializers.ModelSerializer): class Meta: model = Sound fields", "from .models import Sound , Album from rest_framework import serializers", "\"datepublish\" , \"category\" , \"sound\"] depth = 1 def get_sound(self", "model = Sound fields = [\"name\" , \"song_image\" , \"pk\"", "= \"__all__\" class AlbumSerializer(serializers.ModelSerializer): sound = serializers.SerializerMethodField() class Meta: model", ", \"upload_date\"] class SoundDetailSerializer(serializers.ModelSerializer): class Meta: model = Sound fields", "SoundDetailSerializer(serializers.ModelSerializer): class Meta: model = Sound fields = \"__all__\" class", "\"__all__\" class AlbumSerializer(serializers.ModelSerializer): sound = serializers.SerializerMethodField() class Meta: model =", "= serializers.SerializerMethodField() class Meta: model = Album fields = [\"name\"", ", \"tag\" , \"singer\" , \"upload_date\"] class SoundDetailSerializer(serializers.ModelSerializer): class Meta:", "\"singer\" , \"upload_date\"] class SoundDetailSerializer(serializers.ModelSerializer): class Meta: model = Sound", "rest_framework import serializers class SoundSerializer(serializers.ModelSerializer): class Meta: model = Sound", "fields = \"__all__\" class AlbumSerializer(serializers.ModelSerializer): sound = serializers.SerializerMethodField() class Meta:", "class Meta: model = Sound fields = [\"name\" , \"song_image\"", "class SoundSerializer(serializers.ModelSerializer): class Meta: model = Sound fields = [\"name\"", "class AlbumSerializer(serializers.ModelSerializer): sound = serializers.SerializerMethodField() class Meta: model = Album", "Sound , Album from rest_framework import serializers class SoundSerializer(serializers.ModelSerializer): class", "from rest_framework import serializers class SoundSerializer(serializers.ModelSerializer): class Meta: model =", ", \"like\" , \"played\" , \"tag\" , \"singer\" , \"upload_date\"]", "= Sound fields = \"__all__\" class AlbumSerializer(serializers.ModelSerializer): sound = serializers.SerializerMethodField()", "= [\"name\" , \"datepublish\" , \"category\" , \"sound\"] depth =", ", \"datepublish\" , \"category\" , \"sound\"] depth = 1 def", "\"song_image\" , \"pk\" , \"like\" , \"played\" , \"tag\" ,", "Album fields = [\"name\" , \"datepublish\" , \"category\" , \"sound\"]", "import Sound , Album from rest_framework import serializers class SoundSerializer(serializers.ModelSerializer):", "Sound fields = \"__all__\" class AlbumSerializer(serializers.ModelSerializer): sound = serializers.SerializerMethodField() class", ", \"category\" , \"sound\"] depth = 1 def get_sound(self ,", "= 1 def get_sound(self , obj): print(\"WORKING\") return SoundSerializer(instance=obj.sound ,", "Album from rest_framework import serializers class SoundSerializer(serializers.ModelSerializer): class Meta: model", ", \"sound\"] depth = 1 def get_sound(self , obj): print(\"WORKING\")", ".models import Sound , Album from rest_framework import serializers class", ", \"pk\" , \"like\" , \"played\" , \"tag\" , \"singer\"", "depth = 1 def get_sound(self , obj): print(\"WORKING\") return SoundSerializer(instance=obj.sound", ", \"played\" , \"tag\" , \"singer\" , \"upload_date\"] class SoundDetailSerializer(serializers.ModelSerializer):", "Sound fields = [\"name\" , \"song_image\" , \"pk\" , \"like\"", ", \"singer\" , \"upload_date\"] class SoundDetailSerializer(serializers.ModelSerializer): class Meta: model =", "model = Sound fields = \"__all__\" class AlbumSerializer(serializers.ModelSerializer): sound =", "\"category\" , \"sound\"] depth = 1 def get_sound(self , obj):", "fields = [\"name\" , \"datepublish\" , \"category\" , \"sound\"] depth" ]
[ "import datetime from django.conf import settings import pytz def check_tracker(obj,", "block name), e.g. ('HN', 'Header Navigation', 'header-navigation-trackers') would allow for", "block header-navigation-trackers %}{% generate_trackers 'HN' %}{% endblock %} in their", "It defaults to the obvious 4 location (top/bottom of the", "(no, I don't know why they'd want this) if they", "%}{% endblock %} in their template. \"\"\" tracker_position_list = DEFAULT_TRACKER_POSITIONS", "check_tracker(obj, simple=True): if simple: if obj.status > 0: return True", "have tracking code in a navbar (no, I don't know", "is live_as_of set? if not obj.tracker_live_as_of: # No live_as_of ---", "# EXPIRED! return False # it's OK then return True", "yet? if now < obj.tracker_live_as_of: # live_as_of --- not yet!", "return False # is there an expiration date? if obj.tracker_expires", "the obvious 4 location (top/bottom of the head/body); however the", "now = datetime.now(pytz.utc) if obj.tracker_publish_status < 0: return False if", "obj.tracker_expires: # EXPIRED! return False # it's OK then return", "why they'd want this) if they put {% block header-navigation-trackers", "'Head - near bottom'), ('tracker-body-top', 'Body - near top'), ('tracker-body-bottom',", "in the settings file under ADDITIONAL_TRACKER_POSITIONS. (2-letter-code, description, block name),", "template. \"\"\" tracker_position_list = DEFAULT_TRACKER_POSITIONS additional_tracker_positions = getattr(settings, \"ADDITIONAL_TRACKER_POSITIONS\", [])", "# No live_as_of --- bail return False # has it", "False # has it happened yet? if now < obj.tracker_live_as_of:", "This creates the dropdown in the Admin for where to", "for where to put each tracker. It defaults to the", "(2-letter-code, description, block name), e.g. ('HN', 'Header Navigation', 'header-navigation-trackers') would", "can create more by adding a list of 3-ples in", "live_as_of --- bail return False # has it happened yet?", "bail return False # has it happened yet? if now", "False if obj.tracker_publish_status > 0: return True # Checking live_as_of", "'header-navigation-trackers') would allow for the user to have tracking code", "live_as_of ... # is live_as_of set? if not obj.tracker_live_as_of: #", "< 0: return False if obj.tracker_publish_status > 0: return True", "location (top/bottom of the head/body); however the user can create", "OK then return True DEFAULT_TRACKER_POSITIONS = [ ('tracker-head-top', 'Head -", "to have tracking code in a navbar (no, I don't", "%}{% generate_trackers 'HN' %}{% endblock %} in their template. \"\"\"", "list of 3-ples in the settings file under ADDITIONAL_TRACKER_POSITIONS. (2-letter-code,", "a list of 3-ples in the settings file under ADDITIONAL_TRACKER_POSITIONS.", "a gatekeeper now = datetime.now(pytz.utc) if obj.tracker_publish_status < 0: return", "('tracker-head-bottom', 'Head - near bottom'), ('tracker-body-top', 'Body - near top'),", "False # it's OK then return True DEFAULT_TRACKER_POSITIONS = [", "near top'), ('tracker-body-bottom', 'Body - near bottom') ] def get_tracker_position_options():", "if simple: if obj.status > 0: return True return False", "> 0: return True return False # we have a", "would allow for the user to have tracking code in", "of 3-ples in the settings file under ADDITIONAL_TRACKER_POSITIONS. (2-letter-code, description,", "don't know why they'd want this) if they put {%", "description, block name), e.g. ('HN', 'Header Navigation', 'header-navigation-trackers') would allow", "0: return True return False # we have a gatekeeper", "def check_tracker(obj, simple=True): if simple: if obj.status > 0: return", "the head/body); however the user can create more by adding", "return False # has it happened yet? if now <", "< obj.tracker_live_as_of: # live_as_of --- not yet! return False #", "adding a list of 3-ples in the settings file under", "- near bottom') ] def get_tracker_position_options(): \"\"\" This creates the", "live_as_of set? if not obj.tracker_live_as_of: # No live_as_of --- bail", "\"\"\" tracker_position_list = DEFAULT_TRACKER_POSITIONS additional_tracker_positions = getattr(settings, \"ADDITIONAL_TRACKER_POSITIONS\", []) full_list", "gatekeeper now = datetime.now(pytz.utc) if obj.tracker_publish_status < 0: return False", "\"ADDITIONAL_TRACKER_POSITIONS\", []) full_list = list() for x in (tracker_position_list +", "have a gatekeeper now = datetime.now(pytz.utc) if obj.tracker_publish_status < 0:", "list() for x in (tracker_position_list + additional_tracker_positions): full_list.append((x[0], x[1])) return", "> obj.tracker_expires: # EXPIRED! return False # it's OK then", "if obj.tracker_expires and now > obj.tracker_expires: # EXPIRED! return False", "they put {% block header-navigation-trackers %}{% generate_trackers 'HN' %}{% endblock", "the settings file under ADDITIONAL_TRACKER_POSITIONS. (2-letter-code, description, block name), e.g.", "('tracker-body-top', 'Body - near top'), ('tracker-body-bottom', 'Body - near bottom')", "want this) if they put {% block header-navigation-trackers %}{% generate_trackers", "bottom') ] def get_tracker_position_options(): \"\"\" This creates the dropdown in", "top'), ('tracker-body-bottom', 'Body - near bottom') ] def get_tracker_position_options(): \"\"\"", "True return False # we have a gatekeeper now =", "put each tracker. It defaults to the obvious 4 location", "each tracker. It defaults to the obvious 4 location (top/bottom", "full_list = list() for x in (tracker_position_list + additional_tracker_positions): full_list.append((x[0],", "0: return True # Checking live_as_of ... # is live_as_of", "return False if obj.tracker_publish_status > 0: return True # Checking", "set? if not obj.tracker_live_as_of: # No live_as_of --- bail return", "if obj.status > 0: return True return False # we", "if now < obj.tracker_live_as_of: # live_as_of --- not yet! return", "where to put each tracker. It defaults to the obvious", "header-navigation-trackers %}{% generate_trackers 'HN' %}{% endblock %} in their template.", "getattr(settings, \"ADDITIONAL_TRACKER_POSITIONS\", []) full_list = list() for x in (tracker_position_list", "dropdown in the Admin for where to put each tracker.", "allow for the user to have tracking code in a", "%} in their template. \"\"\" tracker_position_list = DEFAULT_TRACKER_POSITIONS additional_tracker_positions =", "settings import pytz def check_tracker(obj, simple=True): if simple: if obj.status", "\"\"\" This creates the dropdown in the Admin for where", "put {% block header-navigation-trackers %}{% generate_trackers 'HN' %}{% endblock %}", "= [ ('tracker-head-top', 'Head - near top'), ('tracker-head-bottom', 'Head -", "('HN', 'Header Navigation', 'header-navigation-trackers') would allow for the user to", "expiration date? if obj.tracker_expires and now > obj.tracker_expires: # EXPIRED!", "in their template. \"\"\" tracker_position_list = DEFAULT_TRACKER_POSITIONS additional_tracker_positions = getattr(settings,", "pytz def check_tracker(obj, simple=True): if simple: if obj.status > 0:", "to put each tracker. It defaults to the obvious 4", "= DEFAULT_TRACKER_POSITIONS additional_tracker_positions = getattr(settings, \"ADDITIONAL_TRACKER_POSITIONS\", []) full_list = list()", "tracker. It defaults to the obvious 4 location (top/bottom of", "tracking code in a navbar (no, I don't know why", "I don't know why they'd want this) if they put", "a navbar (no, I don't know why they'd want this)", "'HN' %}{% endblock %} in their template. \"\"\" tracker_position_list =", "# is there an expiration date? if obj.tracker_expires and now", "e.g. ('HN', 'Header Navigation', 'header-navigation-trackers') would allow for the user", "return True return False # we have a gatekeeper now", "this) if they put {% block header-navigation-trackers %}{% generate_trackers 'HN'", "the Admin for where to put each tracker. It defaults", "top'), ('tracker-head-bottom', 'Head - near bottom'), ('tracker-body-top', 'Body - near", "near bottom') ] def get_tracker_position_options(): \"\"\" This creates the dropdown", "the user can create more by adding a list of", "= datetime.now(pytz.utc) if obj.tracker_publish_status < 0: return False if obj.tracker_publish_status", "name), e.g. ('HN', 'Header Navigation', 'header-navigation-trackers') would allow for the", "= getattr(settings, \"ADDITIONAL_TRACKER_POSITIONS\", []) full_list = list() for x in", "DEFAULT_TRACKER_POSITIONS additional_tracker_positions = getattr(settings, \"ADDITIONAL_TRACKER_POSITIONS\", []) full_list = list() for", "it's OK then return True DEFAULT_TRACKER_POSITIONS = [ ('tracker-head-top', 'Head", "{% block header-navigation-trackers %}{% generate_trackers 'HN' %}{% endblock %} in", "obj.tracker_publish_status < 0: return False if obj.tracker_publish_status > 0: return", "yet! return False # is there an expiration date? if", "and now > obj.tracker_expires: # EXPIRED! return False # it's", "obj.tracker_live_as_of: # live_as_of --- not yet! return False # is", "] def get_tracker_position_options(): \"\"\" This creates the dropdown in the", "obvious 4 location (top/bottom of the head/body); however the user", "obj.tracker_live_as_of: # No live_as_of --- bail return False # has", "from datetime import datetime from django.conf import settings import pytz", "their template. \"\"\" tracker_position_list = DEFAULT_TRACKER_POSITIONS additional_tracker_positions = getattr(settings, \"ADDITIONAL_TRACKER_POSITIONS\",", "file under ADDITIONAL_TRACKER_POSITIONS. (2-letter-code, description, block name), e.g. ('HN', 'Header", "under ADDITIONAL_TRACKER_POSITIONS. (2-letter-code, description, block name), e.g. ('HN', 'Header Navigation',", "Admin for where to put each tracker. It defaults to", "user to have tracking code in a navbar (no, I", "from django.conf import settings import pytz def check_tracker(obj, simple=True): if", "# is live_as_of set? if not obj.tracker_live_as_of: # No live_as_of", "it happened yet? if now < obj.tracker_live_as_of: # live_as_of ---", "Checking live_as_of ... # is live_as_of set? if not obj.tracker_live_as_of:", "more by adding a list of 3-ples in the settings", "return False # we have a gatekeeper now = datetime.now(pytz.utc)", "if they put {% block header-navigation-trackers %}{% generate_trackers 'HN' %}{%", "now < obj.tracker_live_as_of: # live_as_of --- not yet! return False", "not yet! return False # is there an expiration date?", "additional_tracker_positions = getattr(settings, \"ADDITIONAL_TRACKER_POSITIONS\", []) full_list = list() for x", "we have a gatekeeper now = datetime.now(pytz.utc) if obj.tracker_publish_status <", "'Body - near bottom') ] def get_tracker_position_options(): \"\"\" This creates", "navbar (no, I don't know why they'd want this) if", "--- not yet! return False # is there an expiration", "... # is live_as_of set? if not obj.tracker_live_as_of: # No", "DEFAULT_TRACKER_POSITIONS = [ ('tracker-head-top', 'Head - near top'), ('tracker-head-bottom', 'Head", "[]) full_list = list() for x in (tracker_position_list + additional_tracker_positions):", "not obj.tracker_live_as_of: # No live_as_of --- bail return False #", "simple: if obj.status > 0: return True return False #", "near top'), ('tracker-head-bottom', 'Head - near bottom'), ('tracker-body-top', 'Body -", "- near top'), ('tracker-head-bottom', 'Head - near bottom'), ('tracker-body-top', 'Body", "defaults to the obvious 4 location (top/bottom of the head/body);", "def get_tracker_position_options(): \"\"\" This creates the dropdown in the Admin", "in a navbar (no, I don't know why they'd want", "simple=True): if simple: if obj.status > 0: return True return", "0: return False if obj.tracker_publish_status > 0: return True #", "EXPIRED! return False # it's OK then return True DEFAULT_TRACKER_POSITIONS", "- near bottom'), ('tracker-body-top', 'Body - near top'), ('tracker-body-bottom', 'Body", "No live_as_of --- bail return False # has it happened", "is there an expiration date? if obj.tracker_expires and now >", "datetime import datetime from django.conf import settings import pytz def", "--- bail return False # has it happened yet? if", "'Header Navigation', 'header-navigation-trackers') would allow for the user to have", "by adding a list of 3-ples in the settings file", "3-ples in the settings file under ADDITIONAL_TRACKER_POSITIONS. (2-letter-code, description, block", "head/body); however the user can create more by adding a", "# it's OK then return True DEFAULT_TRACKER_POSITIONS = [ ('tracker-head-top',", "[ ('tracker-head-top', 'Head - near top'), ('tracker-head-bottom', 'Head - near", "(top/bottom of the head/body); however the user can create more", "obj.status > 0: return True return False # we have", "obj.tracker_expires and now > obj.tracker_expires: # EXPIRED! return False #", "tracker_position_list = DEFAULT_TRACKER_POSITIONS additional_tracker_positions = getattr(settings, \"ADDITIONAL_TRACKER_POSITIONS\", []) full_list =", "user can create more by adding a list of 3-ples", "Navigation', 'header-navigation-trackers') would allow for the user to have tracking", "if not obj.tracker_live_as_of: # No live_as_of --- bail return False", "there an expiration date? if obj.tracker_expires and now > obj.tracker_expires:", "date? if obj.tracker_expires and now > obj.tracker_expires: # EXPIRED! return", "# we have a gatekeeper now = datetime.now(pytz.utc) if obj.tracker_publish_status", "if obj.tracker_publish_status > 0: return True # Checking live_as_of ...", "return True # Checking live_as_of ... # is live_as_of set?", "False # we have a gatekeeper now = datetime.now(pytz.utc) if", "settings file under ADDITIONAL_TRACKER_POSITIONS. (2-letter-code, description, block name), e.g. ('HN',", "False # is there an expiration date? if obj.tracker_expires and", "# has it happened yet? if now < obj.tracker_live_as_of: #", "near bottom'), ('tracker-body-top', 'Body - near top'), ('tracker-body-bottom', 'Body -", "datetime.now(pytz.utc) if obj.tracker_publish_status < 0: return False if obj.tracker_publish_status >", "True # Checking live_as_of ... # is live_as_of set? if", "return True DEFAULT_TRACKER_POSITIONS = [ ('tracker-head-top', 'Head - near top'),", "has it happened yet? if now < obj.tracker_live_as_of: # live_as_of", "of the head/body); however the user can create more by", "happened yet? if now < obj.tracker_live_as_of: # live_as_of --- not", "= list() for x in (tracker_position_list + additional_tracker_positions): full_list.append((x[0], x[1]))", "now > obj.tracker_expires: # EXPIRED! return False # it's OK", "('tracker-body-bottom', 'Body - near bottom') ] def get_tracker_position_options(): \"\"\" This", "django.conf import settings import pytz def check_tracker(obj, simple=True): if simple:", "know why they'd want this) if they put {% block", "True DEFAULT_TRACKER_POSITIONS = [ ('tracker-head-top', 'Head - near top'), ('tracker-head-bottom',", "generate_trackers 'HN' %}{% endblock %} in their template. \"\"\" tracker_position_list", "creates the dropdown in the Admin for where to put", "get_tracker_position_options(): \"\"\" This creates the dropdown in the Admin for", "obj.tracker_publish_status > 0: return True # Checking live_as_of ... #", "'Head - near top'), ('tracker-head-bottom', 'Head - near bottom'), ('tracker-body-top',", "the dropdown in the Admin for where to put each", "create more by adding a list of 3-ples in the", "ADDITIONAL_TRACKER_POSITIONS. (2-letter-code, description, block name), e.g. ('HN', 'Header Navigation', 'header-navigation-trackers')", "4 location (top/bottom of the head/body); however the user can", "- near top'), ('tracker-body-bottom', 'Body - near bottom') ] def", "to the obvious 4 location (top/bottom of the head/body); however", "# live_as_of --- not yet! return False # is there", "import pytz def check_tracker(obj, simple=True): if simple: if obj.status >", "> 0: return True # Checking live_as_of ... # is", "in the Admin for where to put each tracker. It", "import settings import pytz def check_tracker(obj, simple=True): if simple: if", "an expiration date? if obj.tracker_expires and now > obj.tracker_expires: #", "then return True DEFAULT_TRACKER_POSITIONS = [ ('tracker-head-top', 'Head - near", "bottom'), ('tracker-body-top', 'Body - near top'), ('tracker-body-bottom', 'Body - near", "endblock %} in their template. \"\"\" tracker_position_list = DEFAULT_TRACKER_POSITIONS additional_tracker_positions", "the user to have tracking code in a navbar (no,", "if obj.tracker_publish_status < 0: return False if obj.tracker_publish_status > 0:", "('tracker-head-top', 'Head - near top'), ('tracker-head-bottom', 'Head - near bottom'),", "# Checking live_as_of ... # is live_as_of set? if not", "datetime from django.conf import settings import pytz def check_tracker(obj, simple=True):", "return False # it's OK then return True DEFAULT_TRACKER_POSITIONS =", "however the user can create more by adding a list", "for x in (tracker_position_list + additional_tracker_positions): full_list.append((x[0], x[1])) return full_list", "for the user to have tracking code in a navbar", "code in a navbar (no, I don't know why they'd", "'Body - near top'), ('tracker-body-bottom', 'Body - near bottom') ]", "live_as_of --- not yet! return False # is there an", "they'd want this) if they put {% block header-navigation-trackers %}{%" ]
[ "import datetime from fastapi import APIRouter router = APIRouter() @router.get(\"\",", "router = APIRouter() @router.get(\"\", tags=[\"health\"]) async def get_health(): return {", "get_health(): return { \"results\": [], \"status\": \"success\", \"timestamp\": datetime.datetime.now().timestamp() }", "datetime from fastapi import APIRouter router = APIRouter() @router.get(\"\", tags=[\"health\"])", "fastapi import APIRouter router = APIRouter() @router.get(\"\", tags=[\"health\"]) async def", "= APIRouter() @router.get(\"\", tags=[\"health\"]) async def get_health(): return { \"results\":", "APIRouter router = APIRouter() @router.get(\"\", tags=[\"health\"]) async def get_health(): return", "def get_health(): return { \"results\": [], \"status\": \"success\", \"timestamp\": datetime.datetime.now().timestamp()", "async def get_health(): return { \"results\": [], \"status\": \"success\", \"timestamp\":", "APIRouter() @router.get(\"\", tags=[\"health\"]) async def get_health(): return { \"results\": [],", "tags=[\"health\"]) async def get_health(): return { \"results\": [], \"status\": \"success\",", "import APIRouter router = APIRouter() @router.get(\"\", tags=[\"health\"]) async def get_health():", "@router.get(\"\", tags=[\"health\"]) async def get_health(): return { \"results\": [], \"status\":", "from fastapi import APIRouter router = APIRouter() @router.get(\"\", tags=[\"health\"]) async" ]
[ "defaultdict from io import StringIO from matplotlib import pyplot as", "'r') as f: for line in f: cat = line.split('\\n')[0]", "PATH_TO_FROZEN_GRAPH = '../data/mobilenet_v2_1.4_224/mobilenet_v2_1.4_224_frozen.pb' info='Time taken to load Model into memory:'", "#Load categories categories = [] with open('../data/' + 'categories.txt', 'r')", "categories:', len(categories)) # Load image size with open('../data/' + 'inputsize.txt',", "!= 'classes': categories.append(cat) f.close() print('Number of categories:', len(categories)) # Load", "import json import time import cv2 PATH_TO_FROZEN_GRAPH = '../data/mobilenet_v2_1.4_224/mobilenet_v2_1.4_224_frozen.pb' info='Time", "img *= 2.0 return img sess=tf.Session(graph=detection_graph) def run_inference_b1(key_name,image, graph,no_of_run): #model", "layer name ops = graph.get_operations() all_tensor_names = {output.name for op", "output in op.outputs} #print(all_tensor_names) tensor_dict = {} for key in", "os import six.moves.urllib as urllib import sys import tarfile import", "so that graph is loaded into TF memory sess.run(tensor_dict,feed_dict={image_tensor: image})", "+ 'Tiger.jpg' img = Load_and_process_img(image_filename) key_name='MobilenetV2/Predictions/Reshape_1' result,time_taken=run_inference_b1(key_name,img,detection_graph,1000) print('Time Taken to", "import Image import json import time import cv2 PATH_TO_FROZEN_GRAPH =", "time_taken=end_time-start_time print(info,time_taken) # Load the labels #Load categories categories =", "taken to load Model into memory:' start_time=time.time() detection_graph = tf.Graph()", "int(f.readline().split('\\n')[0]) #print(reqsize) #image_filename = '../data/' + 'image1.jpg' def Load_and_process_img(image_filename): img", "memory:' start_time=time.time() detection_graph = tf.Graph() with detection_graph.as_default(): od_graph_def = tf.GraphDef()", "od_graph_def.ParseFromString(serialized_graph) tf.import_graph_def(od_graph_def, name='') end_time=time.time() time_taken=end_time-start_time print(info,time_taken) # Load the labels", "in op.outputs} #print(all_tensor_names) tensor_dict = {} for key in [key_name]:", "{} for key in [key_name]: tensor_name = key + ':0'", "io import StringIO from matplotlib import pyplot as plt from", "= '../data/mobilenet_v2_1.4_224/mobilenet_v2_1.4_224_frozen.pb' info='Time taken to load Model into memory:' start_time=time.time()", "cv2.resize(img, (reqsize, reqsize)) img=cv2.cvtColor(img,cv2.COLOR_BGR2RGB) img = img.astype(float) #img values are", "time_taken=end_time-start_time print(info,time_taken) #print(output_dict) top_inds = output_dict[key_name][0].argsort()[::-1][:5] result=[] for i in", "img sess=tf.Session(graph=detection_graph) def run_inference_b1(key_name,image, graph,no_of_run): #model output layer name ops", "#print(all_tensor_names) tensor_dict = {} for key in [key_name]: tensor_name =", "= tf.GraphDef() with tf.gfile.GFile(PATH_TO_FROZEN_GRAPH, 'rb') as fid: serialized_graph = fid.read()", "*= 2.0 return img sess=tf.Session(graph=detection_graph) def run_inference_b1(key_name,image, graph,no_of_run): #model output", "'image1.jpg' def Load_and_process_img(image_filename): img = cv2.imread(image_filename)#.astype(numpy.float32) img = cv2.resize(img, (reqsize,", "'../data/' + 'Tiger.jpg' img = Load_and_process_img(image_filename) key_name='MobilenetV2/Predictions/Reshape_1' result,time_taken=run_inference_b1(key_name,img,detection_graph,1000) print('Time Taken", "-1 to 1 img /= 255.0 img -= 0.5 img", "result=[] for i in range(5): result.append([top_inds[i], categories[top_inds[i]], output_dict[key_name][0][top_inds[i]]]) return result,", "six.moves.urllib as urllib import sys import tarfile import tensorflow as", "ops = graph.get_operations() all_tensor_names = {output.name for op in ops", "return img sess=tf.Session(graph=detection_graph) def run_inference_b1(key_name,image, graph,no_of_run): #model output layer name", "i in range(no_of_run): output_dict = sess.run(tensor_dict, feed_dict={image_tensor: image}) end_time=time.time() time_taken=end_time-start_time", "image=image.reshape(1,image.shape[0],image.shape[1],image.shape[2]) image_tensor = graph.get_tensor_by_name('input:0') #Demo run, so that graph is", "fid.read() od_graph_def.ParseFromString(serialized_graph) tf.import_graph_def(od_graph_def, name='') end_time=time.time() time_taken=end_time-start_time print(info,time_taken) # Load the", "= graph.get_operations() all_tensor_names = {output.name for op in ops for", "as np import os import six.moves.urllib as urllib import sys", "StringIO from matplotlib import pyplot as plt from PIL import", "graph,no_of_run): #model output layer name ops = graph.get_operations() all_tensor_names =", "0.5 img *= 2.0 return img sess=tf.Session(graph=detection_graph) def run_inference_b1(key_name,image, graph,no_of_run):", "start_time=time.time() for i in range(no_of_run): output_dict = sess.run(tensor_dict, feed_dict={image_tensor: image})", "from distutils.version import StrictVersion from collections import defaultdict from io", "import zipfile from distutils.version import StrictVersion from collections import defaultdict", "to load Model into memory:' start_time=time.time() detection_graph = tf.Graph() with", "import time import cv2 PATH_TO_FROZEN_GRAPH = '../data/mobilenet_v2_1.4_224/mobilenet_v2_1.4_224_frozen.pb' info='Time taken to", "tf.GraphDef() with tf.gfile.GFile(PATH_TO_FROZEN_GRAPH, 'rb') as fid: serialized_graph = fid.read() od_graph_def.ParseFromString(serialized_graph)", "key + ':0' if tensor_name in all_tensor_names: tensor_dict[key] = graph.get_tensor_by_name(tensor_name)", "tensor_dict[key] = graph.get_tensor_by_name(tensor_name) image=image.reshape(1,image.shape[0],image.shape[1],image.shape[2]) image_tensor = graph.get_tensor_by_name('input:0') #Demo run, so", "img = cv2.imread(image_filename)#.astype(numpy.float32) img = cv2.resize(img, (reqsize, reqsize)) img=cv2.cvtColor(img,cv2.COLOR_BGR2RGB) img", "into TF memory sess.run(tensor_dict,feed_dict={image_tensor: image}) # Run inference info='Time taken", "def Load_and_process_img(image_filename): img = cv2.imread(image_filename)#.astype(numpy.float32) img = cv2.resize(img, (reqsize, reqsize))", "Times: ' start_time=time.time() for i in range(no_of_run): output_dict = sess.run(tensor_dict,", "= [] with open('../data/' + 'categories.txt', 'r') as f: for", "result, time_taken image_filename = '../data/' + 'Tiger.jpg' img = Load_and_process_img(image_filename)", "categories.append(cat) f.close() print('Number of categories:', len(categories)) # Load image size", "= {output.name for op in ops for output in op.outputs}", "import numpy as np import os import six.moves.urllib as urllib", "as tf import zipfile from distutils.version import StrictVersion from collections", "TF memory sess.run(tensor_dict,feed_dict={image_tensor: image}) # Run inference info='Time taken to", "categories[top_inds[i]], output_dict[key_name][0][top_inds[i]]]) return result, time_taken image_filename = '../data/' + 'Tiger.jpg'", "size with open('../data/' + 'inputsize.txt', 'r') as f: reqsize =", "start_time=time.time() detection_graph = tf.Graph() with detection_graph.as_default(): od_graph_def = tf.GraphDef() with", "img.astype(float) #img values are scaled from -1 to 1 img", "graph.get_operations() all_tensor_names = {output.name for op in ops for output", "'Tiger.jpg' img = Load_and_process_img(image_filename) key_name='MobilenetV2/Predictions/Reshape_1' result,time_taken=run_inference_b1(key_name,img,detection_graph,1000) print('Time Taken to run", "def run_inference_b1(key_name,image, graph,no_of_run): #model output layer name ops = graph.get_operations()", "sess=tf.Session(graph=detection_graph) def run_inference_b1(key_name,image, graph,no_of_run): #model output layer name ops =", "graph.get_tensor_by_name(tensor_name) image=image.reshape(1,image.shape[0],image.shape[1],image.shape[2]) image_tensor = graph.get_tensor_by_name('input:0') #Demo run, so that graph", "from io import StringIO from matplotlib import pyplot as plt", "json import time import cv2 PATH_TO_FROZEN_GRAPH = '../data/mobilenet_v2_1.4_224/mobilenet_v2_1.4_224_frozen.pb' info='Time taken", "are scaled from -1 to 1 img /= 255.0 img", "tensor_name in all_tensor_names: tensor_dict[key] = graph.get_tensor_by_name(tensor_name) image=image.reshape(1,image.shape[0],image.shape[1],image.shape[2]) image_tensor = graph.get_tensor_by_name('input:0')", "inference info='Time taken to run inference: run_inference_b1:'+str(no_of_run)+' Times: ' start_time=time.time()", "categories = [] with open('../data/' + 'categories.txt', 'r') as f:", "as f: for line in f: cat = line.split('\\n')[0] if", "+ 'inputsize.txt', 'r') as f: reqsize = int(f.readline().split('\\n')[0]) #print(reqsize) #image_filename", "img = Load_and_process_img(image_filename) key_name='MobilenetV2/Predictions/Reshape_1' result,time_taken=run_inference_b1(key_name,img,detection_graph,1000) print('Time Taken to run Inference", "serialized_graph = fid.read() od_graph_def.ParseFromString(serialized_graph) tf.import_graph_def(od_graph_def, name='') end_time=time.time() time_taken=end_time-start_time print(info,time_taken) #", "end_time=time.time() time_taken=end_time-start_time print(info,time_taken) # Load the labels #Load categories categories", "open('../data/' + 'inputsize.txt', 'r') as f: reqsize = int(f.readline().split('\\n')[0]) #print(reqsize)", "= key + ':0' if tensor_name in all_tensor_names: tensor_dict[key] =", "tensor_name = key + ':0' if tensor_name in all_tensor_names: tensor_dict[key]", "of categories:', len(categories)) # Load image size with open('../data/' +", "range(no_of_run): output_dict = sess.run(tensor_dict, feed_dict={image_tensor: image}) end_time=time.time() time_taken=end_time-start_time print(info,time_taken) #print(output_dict)", "output_dict[key_name][0].argsort()[::-1][:5] result=[] for i in range(5): result.append([top_inds[i], categories[top_inds[i]], output_dict[key_name][0][top_inds[i]]]) return", "to run inference: run_inference_b1:'+str(no_of_run)+' Times: ' start_time=time.time() for i in", "Model into memory:' start_time=time.time() detection_graph = tf.Graph() with detection_graph.as_default(): od_graph_def", "line in f: cat = line.split('\\n')[0] if cat != 'classes':", "tensor_dict = {} for key in [key_name]: tensor_name = key", "all_tensor_names: tensor_dict[key] = graph.get_tensor_by_name(tensor_name) image=image.reshape(1,image.shape[0],image.shape[1],image.shape[2]) image_tensor = graph.get_tensor_by_name('input:0') #Demo run,", "Load the labels #Load categories categories = [] with open('../data/'", "#Demo run, so that graph is loaded into TF memory", "+ 'categories.txt', 'r') as f: for line in f: cat", "'../data/' + 'image1.jpg' def Load_and_process_img(image_filename): img = cv2.imread(image_filename)#.astype(numpy.float32) img =", "end_time=time.time() time_taken=end_time-start_time print(info,time_taken) #print(output_dict) top_inds = output_dict[key_name][0].argsort()[::-1][:5] result=[] for i", "result.append([top_inds[i], categories[top_inds[i]], output_dict[key_name][0][top_inds[i]]]) return result, time_taken image_filename = '../data/' +", "print('Number of categories:', len(categories)) # Load image size with open('../data/'", "pyplot as plt from PIL import Image import json import", "f: cat = line.split('\\n')[0] if cat != 'classes': categories.append(cat) f.close()", "= tf.Graph() with detection_graph.as_default(): od_graph_def = tf.GraphDef() with tf.gfile.GFile(PATH_TO_FROZEN_GRAPH, 'rb')", "/= 255.0 img -= 0.5 img *= 2.0 return img", "run_inference_b1:'+str(no_of_run)+' Times: ' start_time=time.time() for i in range(no_of_run): output_dict =", "= int(f.readline().split('\\n')[0]) #print(reqsize) #image_filename = '../data/' + 'image1.jpg' def Load_and_process_img(image_filename):", "detection_graph.as_default(): od_graph_def = tf.GraphDef() with tf.gfile.GFile(PATH_TO_FROZEN_GRAPH, 'rb') as fid: serialized_graph", "= cv2.imread(image_filename)#.astype(numpy.float32) img = cv2.resize(img, (reqsize, reqsize)) img=cv2.cvtColor(img,cv2.COLOR_BGR2RGB) img =", "open('../data/' + 'categories.txt', 'r') as f: for line in f:", "for i in range(5): result.append([top_inds[i], categories[top_inds[i]], output_dict[key_name][0][top_inds[i]]]) return result, time_taken", "[key_name]: tensor_name = key + ':0' if tensor_name in all_tensor_names:", "sys import tarfile import tensorflow as tf import zipfile from", "taken to run inference: run_inference_b1:'+str(no_of_run)+' Times: ' start_time=time.time() for i", "image_filename = '../data/' + 'Tiger.jpg' img = Load_and_process_img(image_filename) key_name='MobilenetV2/Predictions/Reshape_1' result,time_taken=run_inference_b1(key_name,img,detection_graph,1000)", "tf.import_graph_def(od_graph_def, name='') end_time=time.time() time_taken=end_time-start_time print(info,time_taken) # Load the labels #Load", "sess.run(tensor_dict, feed_dict={image_tensor: image}) end_time=time.time() time_taken=end_time-start_time print(info,time_taken) #print(output_dict) top_inds = output_dict[key_name][0].argsort()[::-1][:5]", "import tarfile import tensorflow as tf import zipfile from distutils.version", "plt from PIL import Image import json import time import", "info='Time taken to run inference: run_inference_b1:'+str(no_of_run)+' Times: ' start_time=time.time() for", "image}) # Run inference info='Time taken to run inference: run_inference_b1:'+str(no_of_run)+'", "f: reqsize = int(f.readline().split('\\n')[0]) #print(reqsize) #image_filename = '../data/' + 'image1.jpg'", "as urllib import sys import tarfile import tensorflow as tf", "'classes': categories.append(cat) f.close() print('Number of categories:', len(categories)) # Load image", "with detection_graph.as_default(): od_graph_def = tf.GraphDef() with tf.gfile.GFile(PATH_TO_FROZEN_GRAPH, 'rb') as fid:", "'categories.txt', 'r') as f: for line in f: cat =", "255.0 img -= 0.5 img *= 2.0 return img sess=tf.Session(graph=detection_graph)", "reqsize)) img=cv2.cvtColor(img,cv2.COLOR_BGR2RGB) img = img.astype(float) #img values are scaled from", "in all_tensor_names: tensor_dict[key] = graph.get_tensor_by_name(tensor_name) image=image.reshape(1,image.shape[0],image.shape[1],image.shape[2]) image_tensor = graph.get_tensor_by_name('input:0') #Demo", "in [key_name]: tensor_name = key + ':0' if tensor_name in", "img -= 0.5 img *= 2.0 return img sess=tf.Session(graph=detection_graph) def", "' start_time=time.time() for i in range(no_of_run): output_dict = sess.run(tensor_dict, feed_dict={image_tensor:", "zipfile from distutils.version import StrictVersion from collections import defaultdict from", "= sess.run(tensor_dict, feed_dict={image_tensor: image}) end_time=time.time() time_taken=end_time-start_time print(info,time_taken) #print(output_dict) top_inds =", "from matplotlib import pyplot as plt from PIL import Image", "top_inds = output_dict[key_name][0].argsort()[::-1][:5] result=[] for i in range(5): result.append([top_inds[i], categories[top_inds[i]],", "for output in op.outputs} #print(all_tensor_names) tensor_dict = {} for key", "+ ':0' if tensor_name in all_tensor_names: tensor_dict[key] = graph.get_tensor_by_name(tensor_name) image=image.reshape(1,image.shape[0],image.shape[1],image.shape[2])", "memory sess.run(tensor_dict,feed_dict={image_tensor: image}) # Run inference info='Time taken to run", "feed_dict={image_tensor: image}) end_time=time.time() time_taken=end_time-start_time print(info,time_taken) #print(output_dict) top_inds = output_dict[key_name][0].argsort()[::-1][:5] result=[]", "tf.Graph() with detection_graph.as_default(): od_graph_def = tf.GraphDef() with tf.gfile.GFile(PATH_TO_FROZEN_GRAPH, 'rb') as", "collections import defaultdict from io import StringIO from matplotlib import", "categories categories = [] with open('../data/' + 'categories.txt', 'r') as", "tf.gfile.GFile(PATH_TO_FROZEN_GRAPH, 'rb') as fid: serialized_graph = fid.read() od_graph_def.ParseFromString(serialized_graph) tf.import_graph_def(od_graph_def, name='')", "for line in f: cat = line.split('\\n')[0] if cat !=", "import sys import tarfile import tensorflow as tf import zipfile", "1 img /= 255.0 img -= 0.5 img *= 2.0", "img=cv2.cvtColor(img,cv2.COLOR_BGR2RGB) img = img.astype(float) #img values are scaled from -1", "with open('../data/' + 'inputsize.txt', 'r') as f: reqsize = int(f.readline().split('\\n')[0])", "in ops for output in op.outputs} #print(all_tensor_names) tensor_dict = {}", "if cat != 'classes': categories.append(cat) f.close() print('Number of categories:', len(categories))", "import defaultdict from io import StringIO from matplotlib import pyplot", "print(info,time_taken) #print(output_dict) top_inds = output_dict[key_name][0].argsort()[::-1][:5] result=[] for i in range(5):", "= '../data/' + 'Tiger.jpg' img = Load_and_process_img(image_filename) key_name='MobilenetV2/Predictions/Reshape_1' result,time_taken=run_inference_b1(key_name,img,detection_graph,1000) print('Time", "name='') end_time=time.time() time_taken=end_time-start_time print(info,time_taken) # Load the labels #Load categories", "ops for output in op.outputs} #print(all_tensor_names) tensor_dict = {} for", "cat = line.split('\\n')[0] if cat != 'classes': categories.append(cat) f.close() print('Number", "Run inference info='Time taken to run inference: run_inference_b1:'+str(no_of_run)+' Times: '", "i in range(5): result.append([top_inds[i], categories[top_inds[i]], output_dict[key_name][0][top_inds[i]]]) return result, time_taken image_filename", "od_graph_def = tf.GraphDef() with tf.gfile.GFile(PATH_TO_FROZEN_GRAPH, 'rb') as fid: serialized_graph =", "':0' if tensor_name in all_tensor_names: tensor_dict[key] = graph.get_tensor_by_name(tensor_name) image=image.reshape(1,image.shape[0],image.shape[1],image.shape[2]) image_tensor", "import cv2 PATH_TO_FROZEN_GRAPH = '../data/mobilenet_v2_1.4_224/mobilenet_v2_1.4_224_frozen.pb' info='Time taken to load Model", "run, so that graph is loaded into TF memory sess.run(tensor_dict,feed_dict={image_tensor:", "'inputsize.txt', 'r') as f: reqsize = int(f.readline().split('\\n')[0]) #print(reqsize) #image_filename =", "output layer name ops = graph.get_operations() all_tensor_names = {output.name for", "labels #Load categories categories = [] with open('../data/' + 'categories.txt',", "img /= 255.0 img -= 0.5 img *= 2.0 return", "key in [key_name]: tensor_name = key + ':0' if tensor_name", "time import cv2 PATH_TO_FROZEN_GRAPH = '../data/mobilenet_v2_1.4_224/mobilenet_v2_1.4_224_frozen.pb' info='Time taken to load", "f: for line in f: cat = line.split('\\n')[0] if cat", "#img values are scaled from -1 to 1 img /=", "f.close() print('Number of categories:', len(categories)) # Load image size with", "np import os import six.moves.urllib as urllib import sys import", "for i in range(no_of_run): output_dict = sess.run(tensor_dict, feed_dict={image_tensor: image}) end_time=time.time()", "load Model into memory:' start_time=time.time() detection_graph = tf.Graph() with detection_graph.as_default():", "#print(reqsize) #image_filename = '../data/' + 'image1.jpg' def Load_and_process_img(image_filename): img =", "tensorflow as tf import zipfile from distutils.version import StrictVersion from", "inference: run_inference_b1:'+str(no_of_run)+' Times: ' start_time=time.time() for i in range(no_of_run): output_dict", "= img.astype(float) #img values are scaled from -1 to 1", "'r') as f: reqsize = int(f.readline().split('\\n')[0]) #print(reqsize) #image_filename = '../data/'", "print(info,time_taken) # Load the labels #Load categories categories = []", "as fid: serialized_graph = fid.read() od_graph_def.ParseFromString(serialized_graph) tf.import_graph_def(od_graph_def, name='') end_time=time.time() time_taken=end_time-start_time", "from -1 to 1 img /= 255.0 img -= 0.5", "+ 'image1.jpg' def Load_and_process_img(image_filename): img = cv2.imread(image_filename)#.astype(numpy.float32) img = cv2.resize(img,", "matplotlib import pyplot as plt from PIL import Image import", "op.outputs} #print(all_tensor_names) tensor_dict = {} for key in [key_name]: tensor_name", "sess.run(tensor_dict,feed_dict={image_tensor: image}) # Run inference info='Time taken to run inference:", "name ops = graph.get_operations() all_tensor_names = {output.name for op in", "cat != 'classes': categories.append(cat) f.close() print('Number of categories:', len(categories)) #", "with tf.gfile.GFile(PATH_TO_FROZEN_GRAPH, 'rb') as fid: serialized_graph = fid.read() od_graph_def.ParseFromString(serialized_graph) tf.import_graph_def(od_graph_def,", "#model output layer name ops = graph.get_operations() all_tensor_names = {output.name", "StrictVersion from collections import defaultdict from io import StringIO from", "import StringIO from matplotlib import pyplot as plt from PIL", "all_tensor_names = {output.name for op in ops for output in", "tf import zipfile from distutils.version import StrictVersion from collections import", "# Run inference info='Time taken to run inference: run_inference_b1:'+str(no_of_run)+' Times:", "scaled from -1 to 1 img /= 255.0 img -=", "line.split('\\n')[0] if cat != 'classes': categories.append(cat) f.close() print('Number of categories:',", "Load_and_process_img(image_filename) key_name='MobilenetV2/Predictions/Reshape_1' result,time_taken=run_inference_b1(key_name,img,detection_graph,1000) print('Time Taken to run Inference is:',time_taken) print(result)", "tarfile import tensorflow as tf import zipfile from distutils.version import", "#image_filename = '../data/' + 'image1.jpg' def Load_and_process_img(image_filename): img = cv2.imread(image_filename)#.astype(numpy.float32)", "# Load the labels #Load categories categories = [] with", "= graph.get_tensor_by_name('input:0') #Demo run, so that graph is loaded into", "loaded into TF memory sess.run(tensor_dict,feed_dict={image_tensor: image}) # Run inference info='Time", "the labels #Load categories categories = [] with open('../data/' +", "distutils.version import StrictVersion from collections import defaultdict from io import", "with open('../data/' + 'categories.txt', 'r') as f: for line in", "#print(output_dict) top_inds = output_dict[key_name][0].argsort()[::-1][:5] result=[] for i in range(5): result.append([top_inds[i],", "detection_graph = tf.Graph() with detection_graph.as_default(): od_graph_def = tf.GraphDef() with tf.gfile.GFile(PATH_TO_FROZEN_GRAPH,", "reqsize = int(f.readline().split('\\n')[0]) #print(reqsize) #image_filename = '../data/' + 'image1.jpg' def", "image_tensor = graph.get_tensor_by_name('input:0') #Demo run, so that graph is loaded", "op in ops for output in op.outputs} #print(all_tensor_names) tensor_dict =", "image}) end_time=time.time() time_taken=end_time-start_time print(info,time_taken) #print(output_dict) top_inds = output_dict[key_name][0].argsort()[::-1][:5] result=[] for", "cv2 PATH_TO_FROZEN_GRAPH = '../data/mobilenet_v2_1.4_224/mobilenet_v2_1.4_224_frozen.pb' info='Time taken to load Model into", "range(5): result.append([top_inds[i], categories[top_inds[i]], output_dict[key_name][0][top_inds[i]]]) return result, time_taken image_filename = '../data/'", "Image import json import time import cv2 PATH_TO_FROZEN_GRAPH = '../data/mobilenet_v2_1.4_224/mobilenet_v2_1.4_224_frozen.pb'", "output_dict[key_name][0][top_inds[i]]]) return result, time_taken image_filename = '../data/' + 'Tiger.jpg' img", "PIL import Image import json import time import cv2 PATH_TO_FROZEN_GRAPH", "into memory:' start_time=time.time() detection_graph = tf.Graph() with detection_graph.as_default(): od_graph_def =", "import tensorflow as tf import zipfile from distutils.version import StrictVersion", "= {} for key in [key_name]: tensor_name = key +", "(reqsize, reqsize)) img=cv2.cvtColor(img,cv2.COLOR_BGR2RGB) img = img.astype(float) #img values are scaled", "in range(no_of_run): output_dict = sess.run(tensor_dict, feed_dict={image_tensor: image}) end_time=time.time() time_taken=end_time-start_time print(info,time_taken)", "'rb') as fid: serialized_graph = fid.read() od_graph_def.ParseFromString(serialized_graph) tf.import_graph_def(od_graph_def, name='') end_time=time.time()", "that graph is loaded into TF memory sess.run(tensor_dict,feed_dict={image_tensor: image}) #", "= output_dict[key_name][0].argsort()[::-1][:5] result=[] for i in range(5): result.append([top_inds[i], categories[top_inds[i]], output_dict[key_name][0][top_inds[i]]])", "= Load_and_process_img(image_filename) key_name='MobilenetV2/Predictions/Reshape_1' result,time_taken=run_inference_b1(key_name,img,detection_graph,1000) print('Time Taken to run Inference is:',time_taken)", "Load_and_process_img(image_filename): img = cv2.imread(image_filename)#.astype(numpy.float32) img = cv2.resize(img, (reqsize, reqsize)) img=cv2.cvtColor(img,cv2.COLOR_BGR2RGB)", "= line.split('\\n')[0] if cat != 'classes': categories.append(cat) f.close() print('Number of", "# Load image size with open('../data/' + 'inputsize.txt', 'r') as", "{output.name for op in ops for output in op.outputs} #print(all_tensor_names)", "Load image size with open('../data/' + 'inputsize.txt', 'r') as f:", "graph is loaded into TF memory sess.run(tensor_dict,feed_dict={image_tensor: image}) # Run", "import six.moves.urllib as urllib import sys import tarfile import tensorflow", "from collections import defaultdict from io import StringIO from matplotlib", "= '../data/' + 'image1.jpg' def Load_and_process_img(image_filename): img = cv2.imread(image_filename)#.astype(numpy.float32) img", "urllib import sys import tarfile import tensorflow as tf import", "cv2.imread(image_filename)#.astype(numpy.float32) img = cv2.resize(img, (reqsize, reqsize)) img=cv2.cvtColor(img,cv2.COLOR_BGR2RGB) img = img.astype(float)", "for op in ops for output in op.outputs} #print(all_tensor_names) tensor_dict", "import os import six.moves.urllib as urllib import sys import tarfile", "'../data/mobilenet_v2_1.4_224/mobilenet_v2_1.4_224_frozen.pb' info='Time taken to load Model into memory:' start_time=time.time() detection_graph", "for key in [key_name]: tensor_name = key + ':0' if", "values are scaled from -1 to 1 img /= 255.0", "is loaded into TF memory sess.run(tensor_dict,feed_dict={image_tensor: image}) # Run inference", "output_dict = sess.run(tensor_dict, feed_dict={image_tensor: image}) end_time=time.time() time_taken=end_time-start_time print(info,time_taken) #print(output_dict) top_inds", "as plt from PIL import Image import json import time", "= cv2.resize(img, (reqsize, reqsize)) img=cv2.cvtColor(img,cv2.COLOR_BGR2RGB) img = img.astype(float) #img values", "in range(5): result.append([top_inds[i], categories[top_inds[i]], output_dict[key_name][0][top_inds[i]]]) return result, time_taken image_filename =", "from PIL import Image import json import time import cv2", "as f: reqsize = int(f.readline().split('\\n')[0]) #print(reqsize) #image_filename = '../data/' +", "numpy as np import os import six.moves.urllib as urllib import", "-= 0.5 img *= 2.0 return img sess=tf.Session(graph=detection_graph) def run_inference_b1(key_name,image,", "len(categories)) # Load image size with open('../data/' + 'inputsize.txt', 'r')", "2.0 return img sess=tf.Session(graph=detection_graph) def run_inference_b1(key_name,image, graph,no_of_run): #model output layer", "graph.get_tensor_by_name('input:0') #Demo run, so that graph is loaded into TF", "return result, time_taken image_filename = '../data/' + 'Tiger.jpg' img =", "info='Time taken to load Model into memory:' start_time=time.time() detection_graph =", "= fid.read() od_graph_def.ParseFromString(serialized_graph) tf.import_graph_def(od_graph_def, name='') end_time=time.time() time_taken=end_time-start_time print(info,time_taken) # Load", "= graph.get_tensor_by_name(tensor_name) image=image.reshape(1,image.shape[0],image.shape[1],image.shape[2]) image_tensor = graph.get_tensor_by_name('input:0') #Demo run, so that", "to 1 img /= 255.0 img -= 0.5 img *=", "import pyplot as plt from PIL import Image import json", "run inference: run_inference_b1:'+str(no_of_run)+' Times: ' start_time=time.time() for i in range(no_of_run):", "run_inference_b1(key_name,image, graph,no_of_run): #model output layer name ops = graph.get_operations() all_tensor_names", "in f: cat = line.split('\\n')[0] if cat != 'classes': categories.append(cat)", "fid: serialized_graph = fid.read() od_graph_def.ParseFromString(serialized_graph) tf.import_graph_def(od_graph_def, name='') end_time=time.time() time_taken=end_time-start_time print(info,time_taken)", "image size with open('../data/' + 'inputsize.txt', 'r') as f: reqsize", "time_taken image_filename = '../data/' + 'Tiger.jpg' img = Load_and_process_img(image_filename) key_name='MobilenetV2/Predictions/Reshape_1'", "img = cv2.resize(img, (reqsize, reqsize)) img=cv2.cvtColor(img,cv2.COLOR_BGR2RGB) img = img.astype(float) #img", "[] with open('../data/' + 'categories.txt', 'r') as f: for line", "import StrictVersion from collections import defaultdict from io import StringIO", "img = img.astype(float) #img values are scaled from -1 to", "if tensor_name in all_tensor_names: tensor_dict[key] = graph.get_tensor_by_name(tensor_name) image=image.reshape(1,image.shape[0],image.shape[1],image.shape[2]) image_tensor =" ]
[ "data: update_user_setting_filters(data, key, setting) sync_user_settings() def update_user_setting_filters(data, key, user_setting): timespan_map", "frappe.parse_json(setting.get('data')) if data: for key in data: update_user_setting_filters(data, key, setting)", "import frappe, json from frappe.model.utils.user_settings import update_user_settings, sync_user_settings def execute():", "period_map = { 'Previous': 'last', 'Next': 'next' } if data.get(key):", "'1 year': 'year', } period_map = { 'Previous': 'last', 'Next':", "update_user_settings, sync_user_settings def execute(): users = frappe.db.sql(\"select distinct(user) from `__UserSettings`\",", "filters and isinstance(filters, list): for f in filters: if f[2]", "{ '1 week': 'week', '1 month': 'month', '3 months': 'quarter',", "users: user_settings = frappe.db.sql(''' select * from `__UserSettings` where user=\"{user}\"", "setting in user_settings: data = frappe.parse_json(setting.get('data')) if data: for key", "True f[3] = period_map[f[2]] + ' ' + timespan_map[f[3]] f[2]", "'1 week': 'week', '1 month': 'month', '3 months': 'quarter', '6", "+ ' ' + timespan_map[f[3]] f[2] = 'Timespan' if update:", "as_dict=True) for setting in user_settings: data = frappe.parse_json(setting.get('data')) if data:", "= frappe.db.sql(''' select * from `__UserSettings` where user=\"{user}\" '''.format(user =", "'3 months': 'quarter', '6 months': '6 months', '1 year': 'year',", "f[2] = 'Timespan' if update: data[key]['filters'] = filters update_user_settings(user_setting['doctype'], json.dumps(data),", "data.get(key).get('filters') if filters and isinstance(filters, list): for f in filters:", "month': 'month', '3 months': 'quarter', '6 months': '6 months', '1", "'year', } period_map = { 'Previous': 'last', 'Next': 'next' }", "import update_user_settings, sync_user_settings def execute(): users = frappe.db.sql(\"select distinct(user) from", "'month', '3 months': 'quarter', '6 months': '6 months', '1 year':", "months': '6 months', '1 year': 'year', } period_map = {", "update = True f[3] = period_map[f[2]] + ' ' +", "from `__UserSettings`\", as_dict=True) for user in users: user_settings = frappe.db.sql('''", "if data: for key in data: update_user_setting_filters(data, key, setting) sync_user_settings()", "False if isinstance(data.get(key), dict): filters = data.get(key).get('filters') if filters and", "= frappe.parse_json(setting.get('data')) if data: for key in data: update_user_setting_filters(data, key,", "filters: if f[2] == 'Next' or f[2] == 'Previous': update", "'Previous': update = True f[3] = period_map[f[2]] + ' '", "'Next' or f[2] == 'Previous': update = True f[3] =", "user in users: user_settings = frappe.db.sql(''' select * from `__UserSettings`", "isinstance(data.get(key), dict): filters = data.get(key).get('filters') if filters and isinstance(filters, list):", "for key in data: update_user_setting_filters(data, key, setting) sync_user_settings() def update_user_setting_filters(data,", "timespan_map = { '1 week': 'week', '1 month': 'month', '3", "= frappe.db.sql(\"select distinct(user) from `__UserSettings`\", as_dict=True) for user in users:", "'Next': 'next' } if data.get(key): update = False if isinstance(data.get(key),", "= period_map[f[2]] + ' ' + timespan_map[f[3]] f[2] = 'Timespan'", "data: for key in data: update_user_setting_filters(data, key, setting) sync_user_settings() def", "} period_map = { 'Previous': 'last', 'Next': 'next' } if", "sync_user_settings() def update_user_setting_filters(data, key, user_setting): timespan_map = { '1 week':", "as_dict=True) for user in users: user_settings = frappe.db.sql(''' select *", "for user in users: user_settings = frappe.db.sql(''' select * from", "'6 months', '1 year': 'year', } period_map = { 'Previous':", "year': 'year', } period_map = { 'Previous': 'last', 'Next': 'next'", "__future__ import unicode_literals import frappe, json from frappe.model.utils.user_settings import update_user_settings,", "in filters: if f[2] == 'Next' or f[2] == 'Previous':", "' ' + timespan_map[f[3]] f[2] = 'Timespan' if update: data[key]['filters']", "months': 'quarter', '6 months': '6 months', '1 year': 'year', }", "= data.get(key).get('filters') if filters and isinstance(filters, list): for f in", "filters = data.get(key).get('filters') if filters and isinstance(filters, list): for f", "import unicode_literals import frappe, json from frappe.model.utils.user_settings import update_user_settings, sync_user_settings", "f in filters: if f[2] == 'Next' or f[2] ==", "from __future__ import unicode_literals import frappe, json from frappe.model.utils.user_settings import", "user=\"{user}\" '''.format(user = user.user), as_dict=True) for setting in user_settings: data", "`__UserSettings`\", as_dict=True) for user in users: user_settings = frappe.db.sql(''' select", "unicode_literals import frappe, json from frappe.model.utils.user_settings import update_user_settings, sync_user_settings def", "update_user_setting_filters(data, key, user_setting): timespan_map = { '1 week': 'week', '1", "= False if isinstance(data.get(key), dict): filters = data.get(key).get('filters') if filters", "period_map[f[2]] + ' ' + timespan_map[f[3]] f[2] = 'Timespan' if", "} if data.get(key): update = False if isinstance(data.get(key), dict): filters", "= user.user), as_dict=True) for setting in user_settings: data = frappe.parse_json(setting.get('data'))", "def update_user_setting_filters(data, key, user_setting): timespan_map = { '1 week': 'week',", "'last', 'Next': 'next' } if data.get(key): update = False if", "user.user), as_dict=True) for setting in user_settings: data = frappe.parse_json(setting.get('data')) if", "frappe.db.sql(\"select distinct(user) from `__UserSettings`\", as_dict=True) for user in users: user_settings", "frappe.db.sql(''' select * from `__UserSettings` where user=\"{user}\" '''.format(user = user.user),", "frappe.model.utils.user_settings import update_user_settings, sync_user_settings def execute(): users = frappe.db.sql(\"select distinct(user)", "users = frappe.db.sql(\"select distinct(user) from `__UserSettings`\", as_dict=True) for user in", "distinct(user) from `__UserSettings`\", as_dict=True) for user in users: user_settings =", "+ timespan_map[f[3]] f[2] = 'Timespan' if update: data[key]['filters'] = filters", "= 'Timespan' if update: data[key]['filters'] = filters update_user_settings(user_setting['doctype'], json.dumps(data), for_update=True)", "f[2] == 'Previous': update = True f[3] = period_map[f[2]] +", "'6 months': '6 months', '1 year': 'year', } period_map =", "for setting in user_settings: data = frappe.parse_json(setting.get('data')) if data: for", "select * from `__UserSettings` where user=\"{user}\" '''.format(user = user.user), as_dict=True)", "'1 month': 'month', '3 months': 'quarter', '6 months': '6 months',", "where user=\"{user}\" '''.format(user = user.user), as_dict=True) for setting in user_settings:", "{ 'Previous': 'last', 'Next': 'next' } if data.get(key): update =", "in data: update_user_setting_filters(data, key, setting) sync_user_settings() def update_user_setting_filters(data, key, user_setting):", "update = False if isinstance(data.get(key), dict): filters = data.get(key).get('filters') if", "def execute(): users = frappe.db.sql(\"select distinct(user) from `__UserSettings`\", as_dict=True) for", "execute(): users = frappe.db.sql(\"select distinct(user) from `__UserSettings`\", as_dict=True) for user", "isinstance(filters, list): for f in filters: if f[2] == 'Next'", "= True f[3] = period_map[f[2]] + ' ' + timespan_map[f[3]]", "in users: user_settings = frappe.db.sql(''' select * from `__UserSettings` where", "= { 'Previous': 'last', 'Next': 'next' } if data.get(key): update", "json from frappe.model.utils.user_settings import update_user_settings, sync_user_settings def execute(): users =", "or f[2] == 'Previous': update = True f[3] = period_map[f[2]]", "key, setting) sync_user_settings() def update_user_setting_filters(data, key, user_setting): timespan_map = {", "== 'Next' or f[2] == 'Previous': update = True f[3]", "`__UserSettings` where user=\"{user}\" '''.format(user = user.user), as_dict=True) for setting in", "sync_user_settings def execute(): users = frappe.db.sql(\"select distinct(user) from `__UserSettings`\", as_dict=True)", "setting) sync_user_settings() def update_user_setting_filters(data, key, user_setting): timespan_map = { '1", "user_settings: data = frappe.parse_json(setting.get('data')) if data: for key in data:", "'quarter', '6 months': '6 months', '1 year': 'year', } period_map", "if f[2] == 'Next' or f[2] == 'Previous': update =", "* from `__UserSettings` where user=\"{user}\" '''.format(user = user.user), as_dict=True) for", "from frappe.model.utils.user_settings import update_user_settings, sync_user_settings def execute(): users = frappe.db.sql(\"select", "key in data: update_user_setting_filters(data, key, setting) sync_user_settings() def update_user_setting_filters(data, key,", "key, user_setting): timespan_map = { '1 week': 'week', '1 month':", "f[2] == 'Next' or f[2] == 'Previous': update = True", "'Previous': 'last', 'Next': 'next' } if data.get(key): update = False", "= { '1 week': 'week', '1 month': 'month', '3 months':", "list): for f in filters: if f[2] == 'Next' or", "dict): filters = data.get(key).get('filters') if filters and isinstance(filters, list): for", "f[3] = period_map[f[2]] + ' ' + timespan_map[f[3]] f[2] =", "and isinstance(filters, list): for f in filters: if f[2] ==", "data = frappe.parse_json(setting.get('data')) if data: for key in data: update_user_setting_filters(data,", "for f in filters: if f[2] == 'Next' or f[2]", "'week', '1 month': 'month', '3 months': 'quarter', '6 months': '6", "frappe, json from frappe.model.utils.user_settings import update_user_settings, sync_user_settings def execute(): users", "update_user_setting_filters(data, key, setting) sync_user_settings() def update_user_setting_filters(data, key, user_setting): timespan_map =", "from `__UserSettings` where user=\"{user}\" '''.format(user = user.user), as_dict=True) for setting", "if data.get(key): update = False if isinstance(data.get(key), dict): filters =", "timespan_map[f[3]] f[2] = 'Timespan' if update: data[key]['filters'] = filters update_user_settings(user_setting['doctype'],", "week': 'week', '1 month': 'month', '3 months': 'quarter', '6 months':", "user_setting): timespan_map = { '1 week': 'week', '1 month': 'month',", "'next' } if data.get(key): update = False if isinstance(data.get(key), dict):", "if isinstance(data.get(key), dict): filters = data.get(key).get('filters') if filters and isinstance(filters,", "in user_settings: data = frappe.parse_json(setting.get('data')) if data: for key in", "== 'Previous': update = True f[3] = period_map[f[2]] + '", "data.get(key): update = False if isinstance(data.get(key), dict): filters = data.get(key).get('filters')", "' + timespan_map[f[3]] f[2] = 'Timespan' if update: data[key]['filters'] =", "'''.format(user = user.user), as_dict=True) for setting in user_settings: data =", "if filters and isinstance(filters, list): for f in filters: if", "user_settings = frappe.db.sql(''' select * from `__UserSettings` where user=\"{user}\" '''.format(user", "months', '1 year': 'year', } period_map = { 'Previous': 'last'," ]
[ "quantum numbers and default temperature and frequency N = 10*10", "print(\"{:08d} / {:08d}\".format(current_step, total_steps), end=\" \\r\") # After 'checkpoint_steps', show", "label=\"Sampled\" ) plt.plot( n, gas.p_n(n), label=\"Theor.\" ) plt.xlabel('n') plt.ylabel('P(n)') plt.legend()", "# 1e-3 times the weights (to within an order of", "validation set after 100 steps v_prime = machine.generate(validation_set, 100, a=a,", "clear_output from PIL import Image import matplotlib.pyplot as plt import", "np.rint(v_prime*10) plt.hist( generated_quantum_numbers.flatten(), bins=np.arange(0,10), density=True, label=\"Sampled\" ) plt.plot( n, gas.p_n(n),", "{np.mean(np.fabs(values))}\") plotit(fig.add_subplot(gs[0,0]), a, 'a') plotit(fig.add_subplot(gs[0,1]), w.flatten(), 'w') plotit(fig.add_subplot(gs[0,2]), b, 'b')", "the amount of steps was set by trial and error", "plots training_set = gas.generate(amount=training_size, n_max=n_max) m = bm.BoltzmannMachine(num_hidden=hidden_units) a,b,w =", "F_validation={np.average(free_valdation)}\\n\") # Show. # CAUTION! This will freeze the execution", "plot histogram of initial vs generated n = np.arange(0,10) generated_quantum_numbers", "plot_training(v, v_prime, eta, a, b, w, da, db, dw): clear_output(wait=True)", "300000 # define the validation set to be used in", "weights (to within an order of magnitude) eta = 0.005", "= Image.fromarray(hMean * 256).show() # Create the grid for all", "vs generated n = np.arange(0,10) generated_quantum_numbers = np.rint(v_prime*10) plt.hist( generated_quantum_numbers.flatten(),", "= 10*10 gas = qho.QHOGas(N=N) n_max = 10 training_size =", "a, b, w, da, db, dw): # Every now and", "m.train(training_set, batchsize=batchsize, eta=eta, nsteps=nsteps, do_while_training=None) # Store in a file", "up for the state v hMean = sigmoid(np.dot(v, w) +", "we want plt.rcParams.update({'font.size': 2}) # plot histogram of initial vs", "= machine.generate(validation_set, 100, a=a, b=b, w=w) # print useful plots", "default temperature and frequency N = 10*10 gas = qho.QHOGas(N=N)", "= 300000 # define the validation set to be used", "d_visible, d_hidden, d_weights plotit(fig.add_subplot(gs[1,0]), eta*da, 'da') plotit(fig.add_subplot(gs[1,1]), eta*dw.flatten(), 'dw') plotit(fig.add_subplot(gs[1,2]),", "d_weights plotit(fig.add_subplot(gs[1,0]), eta*da, 'da') plotit(fig.add_subplot(gs[1,1]), eta*dw.flatten(), 'dw') plotit(fig.add_subplot(gs[1,2]), eta*db, 'db')", "let us know that the training # is still running", "the boltzmann machine and train it while visualizing the suggested", "suggests to set the batchsize to 10, though it can", "units was set by trial and error hidden_units = 70", "and error nsteps = 300000 # define the validation set", "gas with N particles, a limit of 10 for the", "dw) def plot_training(v, v_prime, eta, a, b, w, da, db,", "numpy as np import datetime # Visualization imports from IPython.display", "10000 if current_step%checkpoint_steps == 0 or current_step == total_steps-1: print(f\"Showing", "10, though it can range # from 10 to 100", "100000 # the amount of hidden units was set by", "trial and error hidden_units = 70 # the recipe suggests", "suggests a learning rate that makes the weight updates about", "set after 100 steps v_prime = machine.generate(validation_set, 100, a=a, b=b,", "import datetime # Visualization imports from IPython.display import clear_output from", "and then (every 50k steps), let us know that the", "to 100 batchsize = 10 # the recipe suggests a", "a limit of 10 for the # quantum numbers and", "plt.ylabel('P(n)') plt.legend() # plot histogram of visible, hidden, weights fig", "import Image import matplotlib.pyplot as plt import matplotlib matplotlib.rcParams['figure.dpi']=300 def", "current_step%checkpoint_steps == 0 or current_step == total_steps-1: print(f\"Showing at step", "label=\"Theor.\" ) plt.xlabel('n') plt.ylabel('P(n)') plt.legend() # plot histogram of visible,", "dw): clear_output(wait=True) # Show how the weights light up for", "the # quantum numbers and default temperature and frequency N", "Show. # CAUTION! This will freeze the execution plt.show() #", "training_set = gas.generate(amount=training_size, n_max=n_max) m = bm.BoltzmannMachine(num_hidden=hidden_units) a,b,w = m.train(training_set,", "Every now and then (every 50k steps), let us know", "np.arange(0,10) generated_quantum_numbers = np.rint(v_prime*10) plt.hist( generated_quantum_numbers.flatten(), bins=np.arange(0,10), density=True, label=\"Sampled\" )", "Produce a sample starting from the validation set after 100", "# Every now and then (every 50k steps), let us", "that the training # is still running if current_step%50000 ==", "= 10 training_size = 100000 # the amount of hidden", "/ 2.)) # Set the quantum gas with N particles,", "steps was set by trial and error nsteps = 300000", "plotit(fig.add_subplot(gs[0,2]), b, 'b') # plot histogram of d_visible, d_hidden, d_weights", "training # is still running if current_step%50000 == 0: print(\"{:08d}", "the other plots we want plt.rcParams.update({'font.size': 2}) # plot histogram", "np import datetime # Visualization imports from IPython.display import clear_output", "plotit(fig.add_subplot(gs[1,0]), eta*da, 'da') plotit(fig.add_subplot(gs[1,1]), eta*dw.flatten(), 'dw') plotit(fig.add_subplot(gs[1,2]), eta*db, 'db') #", "set by trial and error nsteps = 300000 # define", "= gas.generate(amount=training_size, n_max=n_max) m = bm.BoltzmannMachine(num_hidden=hidden_units) a,b,w = m.train(training_set, batchsize=batchsize,", "for the state v hMean = sigmoid(np.dot(v, w) + b)", "0: print(\"{:08d} / {:08d}\".format(current_step, total_steps), end=\" \\r\") # After 'checkpoint_steps',", "the quantum gas with N particles, a limit of 10", "free_valdation = -np.dot(v_prime, a) - np.sum( np.log(1 + np.exp(x(v_prime))), axis=1)", "# the recipe suggests a learning rate that makes the", "values, title): axis.hist(values) axis.set_title(f\"{title}: mm = {np.mean(np.fabs(values))}\") plotit(fig.add_subplot(gs[0,0]), a, 'a')", "db, dw): clear_output(wait=True) # Show how the weights light up", "np.log(1 + np.exp(x(v_prime))), axis=1) print(f\"\\nF_training={np.average(free_training)} vs F_validation={np.average(free_valdation)}\\n\") # Show. #", "plot_training(validation_set, v_prime, eta, a, b, w, da, db, dw) def", "plt.show() # Init the boltzmann machine and train it while", "plotit(fig.add_subplot(gs[0,1]), w.flatten(), 'w') plotit(fig.add_subplot(gs[0,2]), b, 'b') # plot histogram of", "70 # the recipe suggests to set the batchsize to", "histogram of d_visible, d_hidden, d_weights plotit(fig.add_subplot(gs[1,0]), eta*da, 'da') plotit(fig.add_subplot(gs[1,1]), eta*dw.flatten(),", "= 0.005 # the amount of steps was set by", "frequency N = 10*10 gas = qho.QHOGas(N=N) n_max = 10", "print(f\"\\nF_training={np.average(free_training)} vs F_validation={np.average(free_valdation)}\\n\") # Show. # CAUTION! This will freeze", "# plot histogram of visible, hidden, weights fig = plt.figure(constrained_layout=True)", "Create the grid for all the other plots we want", "w=w) # print useful plots for training plot_training(validation_set, v_prime, eta,", "a sample starting from the validation set after 100 steps", "def sigmoid(x): return .5 * (1 + np.tanh(x / 2.))", "energies of the average of samples x = lambda vv", "# print useful plots for training plot_training(validation_set, v_prime, eta, a,", "gas.generate(amount=training_size, n_max=n_max) m = bm.BoltzmannMachine(num_hidden=hidden_units) a,b,w = m.train(training_set, batchsize=batchsize, eta=eta,", "eta, a, b, w, da, db, dw) def plot_training(v, v_prime,", "the grid for all the other plots we want plt.rcParams.update({'font.size':", "and frequency N = 10*10 gas = qho.QHOGas(N=N) n_max =", "Image import matplotlib.pyplot as plt import matplotlib matplotlib.rcParams['figure.dpi']=300 def sigmoid(x):", "{:08d}\".format(current_step, total_steps), end=\" \\r\") # After 'checkpoint_steps', show the suggested", "to set the batchsize to 10, though it can range", "1e-3 times the weights (to within an order of magnitude)", "# quantum numbers and default temperature and frequency N =", "10*10 gas = qho.QHOGas(N=N) n_max = 10 training_size = 100000", "magnitude) eta = 0.005 # the amount of steps was", "imports from IPython.display import clear_output from PIL import Image import", "# show free energies of the average of samples x", "limit of 10 for the # quantum numbers and default", "# CAUTION! This will freeze the execution plt.show() # Init", "free_training = -np.dot(v, a) - np.sum( np.log(1 + np.exp(x(v))), axis=1)", "steps), let us know that the training # is still", "np.log(1 + np.exp(x(v))), axis=1) free_valdation = -np.dot(v_prime, a) - np.sum(", "fig.add_gridspec(ncols=3, nrows=2) def plotit(axis, values, title): axis.hist(values) axis.set_title(f\"{title}: mm =", "now and then (every 50k steps), let us know that", "useful plots for training plot_training(validation_set, v_prime, eta, a, b, w,", "current_step%50000 == 0: print(\"{:08d} / {:08d}\".format(current_step, total_steps), end=\" \\r\") #", "fig = plt.figure(constrained_layout=True) gs = fig.add_gridspec(ncols=3, nrows=2) def plotit(axis, values,", "starting from the validation set after 100 steps v_prime =", "while visualizing the suggested plots training_set = gas.generate(amount=training_size, n_max=n_max) m", "plotit(axis, values, title): axis.hist(values) axis.set_title(f\"{title}: mm = {np.mean(np.fabs(values))}\") plotit(fig.add_subplot(gs[0,0]), a,", "'b') # plot histogram of d_visible, d_hidden, d_weights plotit(fig.add_subplot(gs[1,0]), eta*da,", "(to within an order of magnitude) eta = 0.005 #", "of hidden units was set by trial and error hidden_units", "lambda vv : b + np.dot(vv, w) free_training = -np.dot(v,", "recipe suggests to set the batchsize to 10, though it", "* (1 + np.tanh(x / 2.)) # Set the quantum", "np.sum( np.log(1 + np.exp(x(v))), axis=1) free_valdation = -np.dot(v_prime, a) -", "# Show. # CAUTION! This will freeze the execution plt.show()", "the validation set after 100 steps v_prime = machine.generate(validation_set, 100,", "bm import QHO as qho import numpy as np import", "* 256).show() # Create the grid for all the other", "or current_step == total_steps-1: print(f\"Showing at step {current_step}.\") # Produce", "other plots we want plt.rcParams.update({'font.size': 2}) # plot histogram of", "eta, a, b, w, da, db, dw): # Every now", "eta = 0.005 # the amount of steps was set", "training_visualization(machine, current_step, total_steps, eta, a, b, w, da, db, dw):", "will freeze the execution plt.show() # Init the boltzmann machine", "= {np.mean(np.fabs(values))}\") plotit(fig.add_subplot(gs[0,0]), a, 'a') plotit(fig.add_subplot(gs[0,1]), w.flatten(), 'w') plotit(fig.add_subplot(gs[0,2]), b,", "import numpy as np import datetime # Visualization imports from", "Image.fromarray(hMean * 256).show() # Create the grid for all the", "a file run_id = int(datetime.datetime.now().timestamp()) np.savetxt(f\"a_{run_id}.csv\", a, delimiter=',') np.savetxt(f\"b_{run_id}.csv\", b,", "vv : b + np.dot(vv, w) free_training = -np.dot(v, a)", "import QHO as qho import numpy as np import datetime", "though it can range # from 10 to 100 batchsize", "show free energies of the average of samples x =", "samples x = lambda vv : b + np.dot(vv, w)", "do_while_training=None) # Store in a file run_id = int(datetime.datetime.now().timestamp()) np.savetxt(f\"a_{run_id}.csv\",", "b, w, da, db, dw): # Every now and then", "v_prime = machine.generate(validation_set, 100, a=a, b=b, w=w) # print useful", "n_max=n_max) m = bm.BoltzmannMachine(num_hidden=hidden_units) a,b,w = m.train(training_set, batchsize=batchsize, eta=eta, nsteps=nsteps,", "matplotlib.rcParams['figure.dpi']=300 def sigmoid(x): return .5 * (1 + np.tanh(x /", "w, da, db, dw): clear_output(wait=True) # Show how the weights", "of steps was set by trial and error nsteps =", "for all the other plots we want plt.rcParams.update({'font.size': 2}) #", "that makes the weight updates about # 1e-3 times the", "initial vs generated n = np.arange(0,10) generated_quantum_numbers = np.rint(v_prime*10) plt.hist(", "a learning rate that makes the weight updates about #", "'a') plotit(fig.add_subplot(gs[0,1]), w.flatten(), 'w') plotit(fig.add_subplot(gs[0,2]), b, 'b') # plot histogram", "and train it while visualizing the suggested plots training_set =", "the suggested plots checkpoint_steps = 10000 if current_step%checkpoint_steps == 0", "(1 + np.tanh(x / 2.)) # Set the quantum gas", "from the validation set after 100 steps v_prime = machine.generate(validation_set,", "validation set to be used in training_visualization validation_set = gas.generate(amount=20)", "order of magnitude) eta = 0.005 # the amount of", "at step {current_step}.\") # Produce a sample starting from the", "d_hidden, d_weights plotit(fig.add_subplot(gs[1,0]), eta*da, 'da') plotit(fig.add_subplot(gs[1,1]), eta*dw.flatten(), 'dw') plotit(fig.add_subplot(gs[1,2]), eta*db,", "nrows=2) def plotit(axis, values, title): axis.hist(values) axis.set_title(f\"{title}: mm = {np.mean(np.fabs(values))}\")", ") plt.plot( n, gas.p_n(n), label=\"Theor.\" ) plt.xlabel('n') plt.ylabel('P(n)') plt.legend() #", "as qho import numpy as np import datetime # Visualization", "batchsize = 10 # the recipe suggests a learning rate", "= fig.add_gridspec(ncols=3, nrows=2) def plotit(axis, values, title): axis.hist(values) axis.set_title(f\"{title}: mm", "(every 50k steps), let us know that the training #", "eta*da, 'da') plotit(fig.add_subplot(gs[1,1]), eta*dw.flatten(), 'dw') plotit(fig.add_subplot(gs[1,2]), eta*db, 'db') # show", "# plot histogram of d_visible, d_hidden, d_weights plotit(fig.add_subplot(gs[1,0]), eta*da, 'da')", "IPython.display import clear_output from PIL import Image import matplotlib.pyplot as", "n = np.arange(0,10) generated_quantum_numbers = np.rint(v_prime*10) plt.hist( generated_quantum_numbers.flatten(), bins=np.arange(0,10), density=True,", "w, da, db, dw): # Every now and then (every", "n_max = 10 training_size = 100000 # the amount of", "N = 10*10 gas = qho.QHOGas(N=N) n_max = 10 training_size", "0 or current_step == total_steps-1: print(f\"Showing at step {current_step}.\") #", "set by trial and error hidden_units = 70 # the", "by trial and error nsteps = 300000 # define the", "trial and error nsteps = 300000 # define the validation", "weights light up for the state v hMean = sigmoid(np.dot(v,", "validation_set = gas.generate(amount=20) def training_visualization(machine, current_step, total_steps, eta, a, b,", "10 for the # quantum numbers and default temperature and", "np.dot(vv, w) free_training = -np.dot(v, a) - np.sum( np.log(1 +", "plots we want plt.rcParams.update({'font.size': 2}) # plot histogram of initial", "plots for training plot_training(validation_set, v_prime, eta, a, b, w, da,", "can range # from 10 to 100 batchsize = 10", "Show how the weights light up for the state v", "recipe suggests a learning rate that makes the weight updates", "x = lambda vv : b + np.dot(vv, w) free_training", "of visible, hidden, weights fig = plt.figure(constrained_layout=True) gs = fig.add_gridspec(ncols=3,", "100 batchsize = 10 # the recipe suggests a learning", "m = bm.BoltzmannMachine(num_hidden=hidden_units) a,b,w = m.train(training_set, batchsize=batchsize, eta=eta, nsteps=nsteps, do_while_training=None)", "b) image = Image.fromarray(hMean * 256).show() # Create the grid", "b, w, da, db, dw) def plot_training(v, v_prime, eta, a,", "if current_step%50000 == 0: print(\"{:08d} / {:08d}\".format(current_step, total_steps), end=\" \\r\")", "after 100 steps v_prime = machine.generate(validation_set, 100, a=a, b=b, w=w)", "grid for all the other plots we want plt.rcParams.update({'font.size': 2})", "eta=eta, nsteps=nsteps, do_while_training=None) # Store in a file run_id =", "by trial and error hidden_units = 70 # the recipe", "bins=np.arange(0,10), density=True, label=\"Sampled\" ) plt.plot( n, gas.p_n(n), label=\"Theor.\" ) plt.xlabel('n')", "nsteps = 300000 # define the validation set to be", "Visualization imports from IPython.display import clear_output from PIL import Image", "def plotit(axis, values, title): axis.hist(values) axis.set_title(f\"{title}: mm = {np.mean(np.fabs(values))}\") plotit(fig.add_subplot(gs[0,0]),", "256).show() # Create the grid for all the other plots", "an order of magnitude) eta = 0.005 # the amount", "= int(datetime.datetime.now().timestamp()) np.savetxt(f\"a_{run_id}.csv\", a, delimiter=',') np.savetxt(f\"b_{run_id}.csv\", b, delimiter=',') np.savetxt(f\"w_{run_id}.csv\", w,", "the recipe suggests to set the batchsize to 10, though", "from PIL import Image import matplotlib.pyplot as plt import matplotlib", "about # 1e-3 times the weights (to within an order", "the batchsize to 10, though it can range # from", "the weight updates about # 1e-3 times the weights (to", "average of samples x = lambda vv : b +", "amount of steps was set by trial and error nsteps", "if current_step%checkpoint_steps == 0 or current_step == total_steps-1: print(f\"Showing at", "nsteps=nsteps, do_while_training=None) # Store in a file run_id = int(datetime.datetime.now().timestamp())", "b, 'b') # plot histogram of d_visible, d_hidden, d_weights plotit(fig.add_subplot(gs[1,0]),", "of d_visible, d_hidden, d_weights plotit(fig.add_subplot(gs[1,0]), eta*da, 'da') plotit(fig.add_subplot(gs[1,1]), eta*dw.flatten(), 'dw')", "of samples x = lambda vv : b + np.dot(vv,", "-np.dot(v, a) - np.sum( np.log(1 + np.exp(x(v))), axis=1) free_valdation =", "sample starting from the validation set after 100 steps v_prime", "in training_visualization validation_set = gas.generate(amount=20) def training_visualization(machine, current_step, total_steps, eta,", "used in training_visualization validation_set = gas.generate(amount=20) def training_visualization(machine, current_step, total_steps,", "np.exp(x(v_prime))), axis=1) print(f\"\\nF_training={np.average(free_training)} vs F_validation={np.average(free_valdation)}\\n\") # Show. # CAUTION! This", "= np.rint(v_prime*10) plt.hist( generated_quantum_numbers.flatten(), bins=np.arange(0,10), density=True, label=\"Sampled\" ) plt.plot( n,", "the weights light up for the state v hMean =", "error hidden_units = 70 # the recipe suggests to set", "free energies of the average of samples x = lambda", "0.005 # the amount of steps was set by trial", "running if current_step%50000 == 0: print(\"{:08d} / {:08d}\".format(current_step, total_steps), end=\"", "plt import matplotlib matplotlib.rcParams['figure.dpi']=300 def sigmoid(x): return .5 * (1", "100 steps v_prime = machine.generate(validation_set, 100, a=a, b=b, w=w) #", "print useful plots for training plot_training(validation_set, v_prime, eta, a, b,", "After 'checkpoint_steps', show the suggested plots checkpoint_steps = 10000 if", "bm.BoltzmannMachine(num_hidden=hidden_units) a,b,w = m.train(training_set, batchsize=batchsize, eta=eta, nsteps=nsteps, do_while_training=None) # Store", "the average of samples x = lambda vv : b", "all the other plots we want plt.rcParams.update({'font.size': 2}) # plot", "training plot_training(validation_set, v_prime, eta, a, b, w, da, db, dw)", "training_size = 100000 # the amount of hidden units was", "plt.legend() # plot histogram of visible, hidden, weights fig =", "learning rate that makes the weight updates about # 1e-3", "a,b,w = m.train(training_set, batchsize=batchsize, eta=eta, nsteps=nsteps, do_while_training=None) # Store in", "plt.figure(constrained_layout=True) gs = fig.add_gridspec(ncols=3, nrows=2) def plotit(axis, values, title): axis.hist(values)", "the training # is still running if current_step%50000 == 0:", "= plt.figure(constrained_layout=True) gs = fig.add_gridspec(ncols=3, nrows=2) def plotit(axis, values, title):", "of 10 for the # quantum numbers and default temperature", "makes the weight updates about # 1e-3 times the weights", "import BoltzmannMachine as bm import QHO as qho import numpy", "weights fig = plt.figure(constrained_layout=True) gs = fig.add_gridspec(ncols=3, nrows=2) def plotit(axis,", "gas.p_n(n), label=\"Theor.\" ) plt.xlabel('n') plt.ylabel('P(n)') plt.legend() # plot histogram of", "range # from 10 to 100 batchsize = 10 #", "qho.QHOGas(N=N) n_max = 10 training_size = 100000 # the amount", "and error hidden_units = 70 # the recipe suggests to", "'w') plotit(fig.add_subplot(gs[0,2]), b, 'b') # plot histogram of d_visible, d_hidden,", "b=b, w=w) # print useful plots for training plot_training(validation_set, v_prime,", "as np import datetime # Visualization imports from IPython.display import", "gs = fig.add_gridspec(ncols=3, nrows=2) def plotit(axis, values, title): axis.hist(values) axis.set_title(f\"{title}:", "db, dw) def plot_training(v, v_prime, eta, a, b, w, da,", "BoltzmannMachine as bm import QHO as qho import numpy as", ") plt.xlabel('n') plt.ylabel('P(n)') plt.legend() # plot histogram of visible, hidden,", "how the weights light up for the state v hMean", "qho import numpy as np import datetime # Visualization imports", "from 10 to 100 batchsize = 10 # the recipe", "n, gas.p_n(n), label=\"Theor.\" ) plt.xlabel('n') plt.ylabel('P(n)') plt.legend() # plot histogram", "the amount of hidden units was set by trial and", "+ np.exp(x(v_prime))), axis=1) print(f\"\\nF_training={np.average(free_training)} vs F_validation={np.average(free_valdation)}\\n\") # Show. # CAUTION!", "generated_quantum_numbers.flatten(), bins=np.arange(0,10), density=True, label=\"Sampled\" ) plt.plot( n, gas.p_n(n), label=\"Theor.\" )", "machine and train it while visualizing the suggested plots training_set", "rate that makes the weight updates about # 1e-3 times", "it while visualizing the suggested plots training_set = gas.generate(amount=training_size, n_max=n_max)", "+ np.exp(x(v))), axis=1) free_valdation = -np.dot(v_prime, a) - np.sum( np.log(1", "know that the training # is still running if current_step%50000", "eta*db, 'db') # show free energies of the average of", "current_step == total_steps-1: print(f\"Showing at step {current_step}.\") # Produce a", "file run_id = int(datetime.datetime.now().timestamp()) np.savetxt(f\"a_{run_id}.csv\", a, delimiter=',') np.savetxt(f\"b_{run_id}.csv\", b, delimiter=',')", "temperature and frequency N = 10*10 gas = qho.QHOGas(N=N) n_max", "b + np.dot(vv, w) free_training = -np.dot(v, a) - np.sum(", "= -np.dot(v_prime, a) - np.sum( np.log(1 + np.exp(x(v_prime))), axis=1) print(f\"\\nF_training={np.average(free_training)}", "density=True, label=\"Sampled\" ) plt.plot( n, gas.p_n(n), label=\"Theor.\" ) plt.xlabel('n') plt.ylabel('P(n)')", "2.)) # Set the quantum gas with N particles, a", ": b + np.dot(vv, w) free_training = -np.dot(v, a) -", "as plt import matplotlib matplotlib.rcParams['figure.dpi']=300 def sigmoid(x): return .5 *", "Init the boltzmann machine and train it while visualizing the", "end=\" \\r\") # After 'checkpoint_steps', show the suggested plots checkpoint_steps", "train it while visualizing the suggested plots training_set = gas.generate(amount=training_size,", "= gas.generate(amount=20) def training_visualization(machine, current_step, total_steps, eta, a, b, w,", "of magnitude) eta = 0.005 # the amount of steps", "plt.plot( n, gas.p_n(n), label=\"Theor.\" ) plt.xlabel('n') plt.ylabel('P(n)') plt.legend() # plot", "100, a=a, b=b, w=w) # print useful plots for training", "= 10000 if current_step%checkpoint_steps == 0 or current_step == total_steps-1:", "of the average of samples x = lambda vv :", "# the amount of steps was set by trial and", "= -np.dot(v, a) - np.sum( np.log(1 + np.exp(x(v))), axis=1) free_valdation", "dw): # Every now and then (every 50k steps), let", "w) + b) image = Image.fromarray(hMean * 256).show() # Create", "as bm import QHO as qho import numpy as np", "10 training_size = 100000 # the amount of hidden units", "'dw') plotit(fig.add_subplot(gs[1,2]), eta*db, 'db') # show free energies of the", "the execution plt.show() # Init the boltzmann machine and train", "da, db, dw) def plot_training(v, v_prime, eta, a, b, w,", "suggested plots checkpoint_steps = 10000 if current_step%checkpoint_steps == 0 or", "freeze the execution plt.show() # Init the boltzmann machine and", "axis=1) print(f\"\\nF_training={np.average(free_training)} vs F_validation={np.average(free_valdation)}\\n\") # Show. # CAUTION! This will", "# plot histogram of initial vs generated n = np.arange(0,10)", "eta, a, b, w, da, db, dw): clear_output(wait=True) # Show", "# Init the boltzmann machine and train it while visualizing", "10 # the recipe suggests a learning rate that makes", "the weights (to within an order of magnitude) eta =", "v_prime, eta, a, b, w, da, db, dw): clear_output(wait=True) #", "show the suggested plots checkpoint_steps = 10000 if current_step%checkpoint_steps ==", "run_id = int(datetime.datetime.now().timestamp()) np.savetxt(f\"a_{run_id}.csv\", a, delimiter=',') np.savetxt(f\"b_{run_id}.csv\", b, delimiter=',') np.savetxt(f\"w_{run_id}.csv\",", "# Store in a file run_id = int(datetime.datetime.now().timestamp()) np.savetxt(f\"a_{run_id}.csv\", a,", "= 100000 # the amount of hidden units was set", "times the weights (to within an order of magnitude) eta", "= bm.BoltzmannMachine(num_hidden=hidden_units) a,b,w = m.train(training_set, batchsize=batchsize, eta=eta, nsteps=nsteps, do_while_training=None) #", "a) - np.sum( np.log(1 + np.exp(x(v_prime))), axis=1) print(f\"\\nF_training={np.average(free_training)} vs F_validation={np.average(free_valdation)}\\n\")", "'checkpoint_steps', show the suggested plots checkpoint_steps = 10000 if current_step%checkpoint_steps", "\\r\") # After 'checkpoint_steps', show the suggested plots checkpoint_steps =", "v_prime, eta, a, b, w, da, db, dw) def plot_training(v,", "define the validation set to be used in training_visualization validation_set", "visible, hidden, weights fig = plt.figure(constrained_layout=True) gs = fig.add_gridspec(ncols=3, nrows=2)", "v hMean = sigmoid(np.dot(v, w) + b) image = Image.fromarray(hMean", "# is still running if current_step%50000 == 0: print(\"{:08d} /", "was set by trial and error hidden_units = 70 #", "mm = {np.mean(np.fabs(values))}\") plotit(fig.add_subplot(gs[0,0]), a, 'a') plotit(fig.add_subplot(gs[0,1]), w.flatten(), 'w') plotit(fig.add_subplot(gs[0,2]),", "visualizing the suggested plots training_set = gas.generate(amount=training_size, n_max=n_max) m =", "w) free_training = -np.dot(v, a) - np.sum( np.log(1 + np.exp(x(v))),", "# the amount of hidden units was set by trial", "int(datetime.datetime.now().timestamp()) np.savetxt(f\"a_{run_id}.csv\", a, delimiter=',') np.savetxt(f\"b_{run_id}.csv\", b, delimiter=',') np.savetxt(f\"w_{run_id}.csv\", w, delimiter=',')", "datetime # Visualization imports from IPython.display import clear_output from PIL", "gas = qho.QHOGas(N=N) n_max = 10 training_size = 100000 #", "hidden, weights fig = plt.figure(constrained_layout=True) gs = fig.add_gridspec(ncols=3, nrows=2) def", "plot histogram of d_visible, d_hidden, d_weights plotit(fig.add_subplot(gs[1,0]), eta*da, 'da') plotit(fig.add_subplot(gs[1,1]),", "gas.generate(amount=20) def training_visualization(machine, current_step, total_steps, eta, a, b, w, da,", "generated n = np.arange(0,10) generated_quantum_numbers = np.rint(v_prime*10) plt.hist( generated_quantum_numbers.flatten(), bins=np.arange(0,10),", "# the recipe suggests to set the batchsize to 10,", "batchsize=batchsize, eta=eta, nsteps=nsteps, do_while_training=None) # Store in a file run_id", "vs F_validation={np.average(free_valdation)}\\n\") # Show. # CAUTION! This will freeze the", "training_visualization validation_set = gas.generate(amount=20) def training_visualization(machine, current_step, total_steps, eta, a,", "numbers and default temperature and frequency N = 10*10 gas", "hMean = sigmoid(np.dot(v, w) + b) image = Image.fromarray(hMean *", "plotit(fig.add_subplot(gs[1,1]), eta*dw.flatten(), 'dw') plotit(fig.add_subplot(gs[1,2]), eta*db, 'db') # show free energies", "# Create the grid for all the other plots we", "eta*dw.flatten(), 'dw') plotit(fig.add_subplot(gs[1,2]), eta*db, 'db') # show free energies of", "plotit(fig.add_subplot(gs[0,0]), a, 'a') plotit(fig.add_subplot(gs[0,1]), w.flatten(), 'w') plotit(fig.add_subplot(gs[0,2]), b, 'b') #", "QHO as qho import numpy as np import datetime #", "# define the validation set to be used in training_visualization", "of initial vs generated n = np.arange(0,10) generated_quantum_numbers = np.rint(v_prime*10)", "state v hMean = sigmoid(np.dot(v, w) + b) image =", "the suggested plots training_set = gas.generate(amount=training_size, n_max=n_max) m = bm.BoltzmannMachine(num_hidden=hidden_units)", "axis.set_title(f\"{title}: mm = {np.mean(np.fabs(values))}\") plotit(fig.add_subplot(gs[0,0]), a, 'a') plotit(fig.add_subplot(gs[0,1]), w.flatten(), 'w')", "CAUTION! This will freeze the execution plt.show() # Init the", "# Visualization imports from IPython.display import clear_output from PIL import", "2}) # plot histogram of initial vs generated n =", "total_steps, eta, a, b, w, da, db, dw): # Every", "error nsteps = 300000 # define the validation set to", "histogram of visible, hidden, weights fig = plt.figure(constrained_layout=True) gs =", "step {current_step}.\") # Produce a sample starting from the validation", "weight updates about # 1e-3 times the weights (to within", "amount of hidden units was set by trial and error", "image = Image.fromarray(hMean * 256).show() # Create the grid for", "/ {:08d}\".format(current_step, total_steps), end=\" \\r\") # After 'checkpoint_steps', show the", "it can range # from 10 to 100 batchsize =", "for training plot_training(validation_set, v_prime, eta, a, b, w, da, db,", "plt.rcParams.update({'font.size': 2}) # plot histogram of initial vs generated n", "particles, a limit of 10 for the # quantum numbers", "to be used in training_visualization validation_set = gas.generate(amount=20) def training_visualization(machine,", "current_step, total_steps, eta, a, b, w, da, db, dw): #", "a, 'a') plotit(fig.add_subplot(gs[0,1]), w.flatten(), 'w') plotit(fig.add_subplot(gs[0,2]), b, 'b') # plot", "set to be used in training_visualization validation_set = gas.generate(amount=20) def", "plt.xlabel('n') plt.ylabel('P(n)') plt.legend() # plot histogram of visible, hidden, weights", "== 0 or current_step == total_steps-1: print(f\"Showing at step {current_step}.\")", "was set by trial and error nsteps = 300000 #", "def plot_training(v, v_prime, eta, a, b, w, da, db, dw):", "# Set the quantum gas with N particles, a limit", "== 0: print(\"{:08d} / {:08d}\".format(current_step, total_steps), end=\" \\r\") # After", "= np.arange(0,10) generated_quantum_numbers = np.rint(v_prime*10) plt.hist( generated_quantum_numbers.flatten(), bins=np.arange(0,10), density=True, label=\"Sampled\"", "execution plt.show() # Init the boltzmann machine and train it", "import matplotlib.pyplot as plt import matplotlib matplotlib.rcParams['figure.dpi']=300 def sigmoid(x): return", "total_steps-1: print(f\"Showing at step {current_step}.\") # Produce a sample starting", "updates about # 1e-3 times the weights (to within an", "from IPython.display import clear_output from PIL import Image import matplotlib.pyplot", "matplotlib.pyplot as plt import matplotlib matplotlib.rcParams['figure.dpi']=300 def sigmoid(x): return .5", "plot histogram of visible, hidden, weights fig = plt.figure(constrained_layout=True) gs", "matplotlib matplotlib.rcParams['figure.dpi']=300 def sigmoid(x): return .5 * (1 + np.tanh(x", "the state v hMean = sigmoid(np.dot(v, w) + b) image", "sigmoid(np.dot(v, w) + b) image = Image.fromarray(hMean * 256).show() #", "w.flatten(), 'w') plotit(fig.add_subplot(gs[0,2]), b, 'b') # plot histogram of d_visible,", "- np.sum( np.log(1 + np.exp(x(v))), axis=1) free_valdation = -np.dot(v_prime, a)", "the recipe suggests a learning rate that makes the weight", "# Produce a sample starting from the validation set after", "generated_quantum_numbers = np.rint(v_prime*10) plt.hist( generated_quantum_numbers.flatten(), bins=np.arange(0,10), density=True, label=\"Sampled\" ) plt.plot(", "+ np.dot(vv, w) free_training = -np.dot(v, a) - np.sum( np.log(1", "da, db, dw): clear_output(wait=True) # Show how the weights light", "be used in training_visualization validation_set = gas.generate(amount=20) def training_visualization(machine, current_step,", "light up for the state v hMean = sigmoid(np.dot(v, w)", "to 10, though it can range # from 10 to", "histogram of initial vs generated n = np.arange(0,10) generated_quantum_numbers =", "a) - np.sum( np.log(1 + np.exp(x(v))), axis=1) free_valdation = -np.dot(v_prime,", "== total_steps-1: print(f\"Showing at step {current_step}.\") # Produce a sample", "np.exp(x(v))), axis=1) free_valdation = -np.dot(v_prime, a) - np.sum( np.log(1 +", "+ b) image = Image.fromarray(hMean * 256).show() # Create the", "def training_visualization(machine, current_step, total_steps, eta, a, b, w, da, db,", "in a file run_id = int(datetime.datetime.now().timestamp()) np.savetxt(f\"a_{run_id}.csv\", a, delimiter=',') np.savetxt(f\"b_{run_id}.csv\",", "# After 'checkpoint_steps', show the suggested plots checkpoint_steps = 10000", "return .5 * (1 + np.tanh(x / 2.)) # Set", "print(f\"Showing at step {current_step}.\") # Produce a sample starting from", "'da') plotit(fig.add_subplot(gs[1,1]), eta*dw.flatten(), 'dw') plotit(fig.add_subplot(gs[1,2]), eta*db, 'db') # show free", "w, da, db, dw) def plot_training(v, v_prime, eta, a, b,", "plots checkpoint_steps = 10000 if current_step%checkpoint_steps == 0 or current_step", "batchsize to 10, though it can range # from 10", "import matplotlib matplotlib.rcParams['figure.dpi']=300 def sigmoid(x): return .5 * (1 +", "total_steps), end=\" \\r\") # After 'checkpoint_steps', show the suggested plots", "10 to 100 batchsize = 10 # the recipe suggests", "-np.dot(v_prime, a) - np.sum( np.log(1 + np.exp(x(v_prime))), axis=1) print(f\"\\nF_training={np.average(free_training)} vs", "quantum gas with N particles, a limit of 10 for", "machine.generate(validation_set, 100, a=a, b=b, w=w) # print useful plots for", "the validation set to be used in training_visualization validation_set =", "for the # quantum numbers and default temperature and frequency", "and default temperature and frequency N = 10*10 gas =", "'db') # show free energies of the average of samples", "axis=1) free_valdation = -np.dot(v_prime, a) - np.sum( np.log(1 + np.exp(x(v_prime))),", "hidden units was set by trial and error hidden_units =", ".5 * (1 + np.tanh(x / 2.)) # Set the", "clear_output(wait=True) # Show how the weights light up for the", "set the batchsize to 10, though it can range #", "- np.sum( np.log(1 + np.exp(x(v_prime))), axis=1) print(f\"\\nF_training={np.average(free_training)} vs F_validation={np.average(free_valdation)}\\n\") #", "plotit(fig.add_subplot(gs[1,2]), eta*db, 'db') # show free energies of the average", "= lambda vv : b + np.dot(vv, w) free_training =", "with N particles, a limit of 10 for the #", "us know that the training # is still running if", "PIL import Image import matplotlib.pyplot as plt import matplotlib matplotlib.rcParams['figure.dpi']=300", "still running if current_step%50000 == 0: print(\"{:08d} / {:08d}\".format(current_step, total_steps),", "Store in a file run_id = int(datetime.datetime.now().timestamp()) np.savetxt(f\"a_{run_id}.csv\", a, delimiter=',')", "plt.hist( generated_quantum_numbers.flatten(), bins=np.arange(0,10), density=True, label=\"Sampled\" ) plt.plot( n, gas.p_n(n), label=\"Theor.\"", "{current_step}.\") # Produce a sample starting from the validation set", "boltzmann machine and train it while visualizing the suggested plots", "steps v_prime = machine.generate(validation_set, 100, a=a, b=b, w=w) # print", "= qho.QHOGas(N=N) n_max = 10 training_size = 100000 # the", "a=a, b=b, w=w) # print useful plots for training plot_training(validation_set,", "sigmoid(x): return .5 * (1 + np.tanh(x / 2.)) #", "db, dw): # Every now and then (every 50k steps),", "np.tanh(x / 2.)) # Set the quantum gas with N", "title): axis.hist(values) axis.set_title(f\"{title}: mm = {np.mean(np.fabs(values))}\") plotit(fig.add_subplot(gs[0,0]), a, 'a') plotit(fig.add_subplot(gs[0,1]),", "Set the quantum gas with N particles, a limit of", "a, b, w, da, db, dw): clear_output(wait=True) # Show how", "hidden_units = 70 # the recipe suggests to set the", "import clear_output from PIL import Image import matplotlib.pyplot as plt", "+ np.tanh(x / 2.)) # Set the quantum gas with", "= sigmoid(np.dot(v, w) + b) image = Image.fromarray(hMean * 256).show()", "a, b, w, da, db, dw) def plot_training(v, v_prime, eta,", "# from 10 to 100 batchsize = 10 # the", "np.sum( np.log(1 + np.exp(x(v_prime))), axis=1) print(f\"\\nF_training={np.average(free_training)} vs F_validation={np.average(free_valdation)}\\n\") # Show.", "axis.hist(values) axis.set_title(f\"{title}: mm = {np.mean(np.fabs(values))}\") plotit(fig.add_subplot(gs[0,0]), a, 'a') plotit(fig.add_subplot(gs[0,1]), w.flatten(),", "= 70 # the recipe suggests to set the batchsize", "This will freeze the execution plt.show() # Init the boltzmann", "within an order of magnitude) eta = 0.005 # the", "# Show how the weights light up for the state", "b, w, da, db, dw): clear_output(wait=True) # Show how the", "want plt.rcParams.update({'font.size': 2}) # plot histogram of initial vs generated", "= 10 # the recipe suggests a learning rate that", "50k steps), let us know that the training # is", "N particles, a limit of 10 for the # quantum", "checkpoint_steps = 10000 if current_step%checkpoint_steps == 0 or current_step ==", "da, db, dw): # Every now and then (every 50k", "is still running if current_step%50000 == 0: print(\"{:08d} / {:08d}\".format(current_step,", "= m.train(training_set, batchsize=batchsize, eta=eta, nsteps=nsteps, do_while_training=None) # Store in a", "then (every 50k steps), let us know that the training", "suggested plots training_set = gas.generate(amount=training_size, n_max=n_max) m = bm.BoltzmannMachine(num_hidden=hidden_units) a,b,w" ]
[ "as index_file: for private_pack in private_packs: private_pack['price'] = 0 index", "import os import sys import shutil import json import argparse", "parser.add_argument('-a', '--artifacts_path', help=\"The full path of packs artifacts\", required=True) parser.add_argument('-ea',", "of packs artifacts\", required=True) parser.add_argument('-ea', '--extract_artifacts_path', help=\"Full path of folder", "% 60 == 0: # Printing a message every minute", "import time import os import sys import shutil import json", "storage_client.bucket(private_bucket_name) dummy_index_blob = public_storage_bucket.blob(dummy_index_path) with lock_and_unlock_dummy_index(public_storage_bucket, dummy_index_lock_path): extract_packs_artifacts(packs_artifacts_path, extract_destination_path) public_index_folder_path,", "[]) for pack in private_packs: is_pack_in_dummy_index = any( [pack['id'] ==", "parser.add_argument('-e', '--extract_public_index_path', help=\"Full path of folder to extract the public", "Tests.private_build.upload_packs_private import download_and_extract_index, update_index_with_priced_packs, \\ extract_packs_artifacts from Tests.Marketplace.marketplace_services import init_storage_client", "open(path_to_pack_metadata, 'w') as pack_metadata_file: json.dump(pack_metadata, pack_metadata_file, indent=4) def change_packs_price_to_zero(public_index_folder_path): paths_to_packs_in_merged_index", "shutil.copy(path_to_pack_in_private_index, path_to_pack_in_public_index) def upload_modified_index(public_index_folder_path, extract_destination_path, public_ci_dummy_index_blob, build_number, private_packs): \"\"\"Upload updated", "index_file: index_json = json.load(index_file) packs_from_dummy_index = index_json.get('packs', []) for pack", "option_handler(): \"\"\"Validates and parses script arguments. Returns: Namespace: Parsed arguments", "os.path.join(path_to_pack, 'metadata.json') change_pack_price_to_zero(path_to_pack_metadata) def merge_private_index_into_public_index(public_index_folder_path, private_index_folder_path): packs_in_private_index = [pack_dir.name for", "= public_storage_bucket.blob(dummy_index_lock_path) with open(LOCK_FILE_PATH, 'w') as lock_file: lock_file.write('locked') with open(LOCK_FILE_PATH,", "lock_file: lock_file.write('locked') with open(LOCK_FILE_PATH, 'rb') as lock_file: dummy_index_lock_blob.upload_from_file(lock_file) def acquire_dummy_index_lock(public_storage_bucket,", "the machine from dying due to no output logging.info(\"Waiting to", "packs and their price. \"\"\" with open(os.path.join(public_index_folder_path, \"index.json\"), \"w+\") as", "index_zip_name = os.path.basename(public_index_folder_path) index_zip_path = shutil.make_archive(base_name=public_index_folder_path, format=\"zip\", root_dir=extract_destination_path, base_dir=index_zip_name) try:", "cloud storage.\") # disable-secrets-detection-start parser.add_argument('-b', '--public_bucket_name', help=\"CI public bucket name\",", "and \" \"authenticate using Google Cloud SDK by running: \"", "'--private_bucket_name', help=\"CI private bucket name\", required=True) parser.add_argument('-s', '--service_account', help=(\"Path to", "circleCI usage. \" \"For local development use your personal account", "format=\"zip\", root_dir=extract_destination_path, base_dir=index_zip_name) try: public_ci_dummy_index_blob.reload() public_ci_dummy_index_blob.cache_control = \"no-cache,max-age=0\" # disabling", "main(): install_logging('prepare_public_index_for_private_testing.log', logger=logging) upload_config = option_handler() service_account = upload_config.service_account build_number", "dummy_index_lock_path): try: acquire_dummy_index_lock(public_storage_bucket, dummy_index_lock_path) yield except Exception: logging.exception(\"Error in dummy", "Cloud SDK by running: \" \"`gcloud auth application-default login` and", "with open(path_to_pack_metadata, 'w') as pack_metadata_file: json.dump(pack_metadata, pack_metadata_file, indent=4) def change_packs_price_to_zero(public_index_folder_path):", "path of packs artifacts\", required=True) parser.add_argument('-ea', '--extract_artifacts_path', help=\"Full path of", "'--extract_artifacts_path', help=\"Full path of folder to extract wanted packs\", required=True)", "(str): public index folder full path. extract_destination_path (str): extract folder", "open(dummy_index_json_path) as index_file: index_json = json.load(index_file) packs_from_dummy_index = index_json.get('packs', [])", "path_to_pack_metadata = os.path.join(path_to_pack, 'metadata.json') change_pack_price_to_zero(path_to_pack_metadata) def merge_private_index_into_public_index(public_index_folder_path, private_index_folder_path): packs_in_private_index =", "parameter blank. \" \"For more information go to: \" \"https://googleapis.dev/python/google-api-core/latest/auth.html\"),", "full path of packs artifacts\", required=True) parser.add_argument('-ea', '--extract_artifacts_path', help=\"Full path", "import json import argparse from zipfile import ZipFile from contextlib", "required=True) parser.add_argument('-s', '--service_account', help=(\"Path to gcloud service account, is for", "try: public_ci_dummy_index_blob.reload() public_ci_dummy_index_blob.cache_control = \"no-cache,max-age=0\" # disabling caching for index", "to the dummy index in the private CI bucket\", required=True)", "the public index\", required=True) parser.add_argument('-sb', '--storage_base_path', help=\"Storage base path of", "private CI bucket\", required=True) # disable-secrets-detection-end return parser.parse_args() def is_dummy_index_locked(public_storage_bucket,", "== 0: # Printing a message every minute to keep", "def change_pack_price_to_zero(path_to_pack_metadata): with open(path_to_pack_metadata, 'r') as pack_metadata_file: pack_metadata = json.load(pack_metadata_file)", "an index revision. private_packs (list): List of private packs and", "= os.path.basename(public_index_folder_path) index_zip_path = shutil.make_archive(base_name=public_index_folder_path, format=\"zip\", root_dir=extract_destination_path, base_dir=index_zip_name) try: public_ci_dummy_index_blob.reload()", "json.load(pack_metadata_file) pack_metadata['price'] = 0 with open(path_to_pack_metadata, 'w') as pack_metadata_file: json.dump(pack_metadata,", "index file)\", required=True) parser.add_argument('-e', '--extract_public_index_path', help=\"Full path of folder to", "for circleCI usage. \" \"For local development use your personal", "index_zip.extractall(extracted_dummy_index_path) with open(dummy_index_json_path) as index_file: index_json = json.load(index_file) packs_from_dummy_index =", "os.path.join(extracted_dummy_index_path, 'index', 'index.json') dummy_index_blob.download_to_filename(downloaded_dummy_index_path) os.mkdir(extracted_dummy_index_path) if os.path.exists(downloaded_dummy_index_path): with ZipFile(downloaded_dummy_index_path, 'r')", "use your personal account and \" \"authenticate using Google Cloud", "for pack_dir in os.scandir(private_index_folder_path) if pack_dir.is_dir()] for pack_name in packs_in_private_index:", "0: # Printing a message every minute to keep the", "root_dir=extract_destination_path, base_dir=index_zip_name) try: public_ci_dummy_index_blob.reload() public_ci_dummy_index_blob.cache_control = \"no-cache,max-age=0\" # disabling caching", "dummy_index_lock_blob.upload_from_file(lock_file) def acquire_dummy_index_lock(public_storage_bucket, dummy_index_lock_path): total_seconds_waited = 0 while is_dummy_index_locked(public_storage_bucket, dummy_index_lock_path):", "parser.add_argument('-p', '--pack_name', help=\"Modified pack to upload to gcs.\") parser.add_argument('-a', '--artifacts_path',", "in packs_from_dummy_index]) if not is_pack_in_dummy_index: packs_from_dummy_index.append(pack) os.remove(downloaded_dummy_index_path) shutil.rmtree(extracted_dummy_index_path) return packs_from_dummy_index", "'r') as index_zip: index_zip.extractall(extracted_dummy_index_path) with open(dummy_index_json_path) as index_file: index_json =", "\"index.json\"), \"w+\") as index_file: for private_pack in private_packs: private_pack['price'] =", "for the packs to be downloaded successfully, their price has", "for pack_dir in os.scandir(public_index_folder_path) if pack_dir.is_dir()] for path_to_pack in paths_to_packs_in_merged_index:", "index_file, indent=4) index_zip_name = os.path.basename(public_index_folder_path) index_zip_path = shutil.make_archive(base_name=public_index_folder_path, format=\"zip\", root_dir=extract_destination_path,", "dummy index lock context manager.\") finally: release_dummy_index_lock(public_storage_bucket, dummy_index_lock_path) def change_pack_price_to_zero(path_to_pack_metadata):", "'index.zip') dummy_index_lock_path = os.path.join(dummy_index_dir_path, 'lock.txt') storage_client = init_storage_client(service_account) public_storage_bucket =", "file)\", required=True) parser.add_argument('-e', '--extract_public_index_path', help=\"Full path of folder to extract", "= upload_config.private_bucket_name storage_base_path = upload_config.storage_base_path extract_public_index_path = upload_config.extract_public_index_path changed_pack =", "= 0 while is_dummy_index_locked(public_storage_bucket, dummy_index_lock_path): if total_seconds_waited >= MAX_SECONDS_TO_WAIT_FOR_LOCK: logging.critical(\"Error:", "= upload_config.storage_base_path extract_public_index_path = upload_config.extract_public_index_path changed_pack = upload_config.pack_name extract_destination_path =", "wanted packs\", required=True) parser.add_argument('-di', '--dummy_index_dir_path', help=\"Full path to the dummy", "bucket name\", required=True) parser.add_argument('-s', '--service_account', help=(\"Path to gcloud service account,", "of private packs and their price. \"\"\" with open(os.path.join(public_index_folder_path, \"index.json\"),", "from contextlib import contextmanager from datetime import datetime from Tests.private_build.upload_packs_private", "'r') as pack_metadata_file: pack_metadata = json.load(pack_metadata_file) pack_metadata['price'] = 0 with", "lock_file.write('locked') with open(LOCK_FILE_PATH, 'rb') as lock_file: dummy_index_lock_blob.upload_from_file(lock_file) def acquire_dummy_index_lock(public_storage_bucket, dummy_index_lock_path):", "with open(LOCK_FILE_PATH, 'w') as lock_file: lock_file.write('locked') with open(LOCK_FILE_PATH, 'rb') as", "build number, used as an index revision. private_packs (list): List", "total_seconds_waited >= MAX_SECONDS_TO_WAIT_FOR_LOCK: logging.critical(\"Error: Failed too long to acquire lock,", "= [pack_dir.path for pack_dir in os.scandir(public_index_folder_path) if pack_dir.is_dir()] for path_to_pack", "base path of the directory to upload to.\", required=False) parser.add_argument('-p',", "Returns: Namespace: Parsed arguments object. \"\"\" parser = argparse.ArgumentParser(description=\"Store packs", "personal account and \" \"authenticate using Google Cloud SDK by", "change_pack_price_to_zero(path_to_pack_metadata) def merge_private_index_into_public_index(public_index_folder_path, private_index_folder_path): packs_in_private_index = [pack_dir.name for pack_dir in", "Exception: logging.exception(\"Failed in uploading index. Mismatch in index file generation.\")", "public_storage_bucket = storage_client.bucket(public_bucket_name) private_storage_bucket = storage_client.bucket(private_bucket_name) dummy_index_blob = public_storage_bucket.blob(dummy_index_path) with", "open(LOCK_FILE_PATH, 'w') as lock_file: lock_file.write('locked') with open(LOCK_FILE_PATH, 'rb') as lock_file:", "= upload_config.service_account build_number = upload_config.ci_build_number public_bucket_name = upload_config.public_bucket_name private_bucket_name =", "from Tests.private_build.upload_packs_private import download_and_extract_index, update_index_with_priced_packs, \\ extract_packs_artifacts from Tests.Marketplace.marketplace_services import", "private_pack in private_packs: private_pack['price'] = 0 index = { 'revision':", "public_index_folder_path (str): public index folder full path. extract_destination_path (str): extract", "index zip to cloud storage. Args: public_index_folder_path (str): public index", "name\", required=True) parser.add_argument('-pb', '--private_bucket_name', help=\"CI private bucket name\", required=True) parser.add_argument('-s',", "lock, exceeded max wait time.\") sys.exit(1) if total_seconds_waited % 60", "release_dummy_index_lock(public_storage_bucket, dummy_index_lock_path): dummy_index_lock_blob = public_storage_bucket.blob(dummy_index_lock_path) dummy_index_lock_blob.delete() os.remove(LOCK_FILE_PATH) def add_private_packs_from_dummy_index(private_packs, dummy_index_blob):", "path_to_pack_in_private_index = os.path.join(private_index_folder_path, pack_name) path_to_pack_in_public_index = os.path.join(public_index_folder_path, pack_name) shutil.copy(path_to_pack_in_private_index, path_to_pack_in_public_index)", "lock_dummy_index(public_storage_bucket, dummy_index_lock_path) def release_dummy_index_lock(public_storage_bucket, dummy_index_lock_path): dummy_index_lock_blob = public_storage_bucket.blob(dummy_index_lock_path) dummy_index_lock_blob.delete() os.remove(LOCK_FILE_PATH)", "init_storage_client from Tests.scripts.utils.log_util import install_logging from Tests.scripts.utils import logging_wrapper as", "in private_packs: is_pack_in_dummy_index = any( [pack['id'] == dummy_index_pack['id'] for dummy_index_pack", "extract_destination_path, public_ci_dummy_index_blob, build_number, private_packs): \"\"\"Upload updated index zip to cloud", "the private CI bucket\", required=True) # disable-secrets-detection-end return parser.parse_args() def", "os.path.join(dummy_index_dir_path, 'index.zip') dummy_index_lock_path = os.path.join(dummy_index_dir_path, 'lock.txt') storage_client = init_storage_client(service_account) public_storage_bucket", "public index\", required=True) parser.add_argument('-sb', '--storage_base_path', help=\"Storage base path of the", "index_zip_path = shutil.make_archive(base_name=public_index_folder_path, format=\"zip\", root_dir=extract_destination_path, base_dir=index_zip_name) try: public_ci_dummy_index_blob.reload() public_ci_dummy_index_blob.cache_control =", "= json.load(index_file) packs_from_dummy_index = index_json.get('packs', []) for pack in private_packs:", "full path. public_ci_dummy_index_blob (Blob): google cloud storage object that represents", "shutil.make_archive(base_name=public_index_folder_path, format=\"zip\", root_dir=extract_destination_path, base_dir=index_zip_name) try: public_ci_dummy_index_blob.reload() public_ci_dummy_index_blob.cache_control = \"no-cache,max-age=0\" #", "\"For more information go to: \" \"https://googleapis.dev/python/google-api-core/latest/auth.html\"), required=False) parser.add_argument('-n', '--ci_build_number',", "return packs_from_dummy_index def main(): install_logging('prepare_public_index_for_private_testing.log', logger=logging) upload_config = option_handler() service_account", "add_private_packs_from_dummy_index(private_packs, dummy_index_blob) upload_modified_index(public_index_folder_path, extract_public_index_path, dummy_index_blob, build_number, private_packs) if __name__ ==", "def option_handler(): \"\"\"Validates and parses script arguments. Returns: Namespace: Parsed", "indent=4) def change_packs_price_to_zero(public_index_folder_path): paths_to_packs_in_merged_index = [pack_dir.path for pack_dir in os.scandir(public_index_folder_path)", "public_ci_dummy_index_blob.cache_control = \"no-cache,max-age=0\" # disabling caching for index blob public_ci_dummy_index_blob.upload_from_filename(index_zip_path)", "def merge_private_index_into_public_index(public_index_folder_path, private_index_folder_path): packs_in_private_index = [pack_dir.name for pack_dir in os.scandir(private_index_folder_path)", "'modified': datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'), 'packs': private_packs } json.dump(index, index_file, indent=4) index_zip_name =", "their price. \"\"\" with open(os.path.join(public_index_folder_path, \"index.json\"), \"w+\") as index_file: for", "index.zip to storage.\") except Exception: logging.exception(\"Failed in uploading index. Mismatch", "number, used as an index revision. private_packs (list): List of", "public_ci_dummy_index_blob.upload_from_filename(index_zip_path) logging.success(\"Finished uploading index.zip to storage.\") except Exception: logging.exception(\"Failed in", "has to be 0 change_packs_price_to_zero(public_index_folder_path) private_packs, private_index_path, private_index_blob = update_index_with_priced_packs(private_storage_bucket,", "total_seconds_waited += 10 time.sleep(10) lock_dummy_index(public_storage_bucket, dummy_index_lock_path) def release_dummy_index_lock(public_storage_bucket, dummy_index_lock_path): dummy_index_lock_blob", "def acquire_dummy_index_lock(public_storage_bucket, dummy_index_lock_path): total_seconds_waited = 0 while is_dummy_index_locked(public_storage_bucket, dummy_index_lock_path): if", "\\ extract_packs_artifacts from Tests.Marketplace.marketplace_services import init_storage_client from Tests.scripts.utils.log_util import install_logging", "upload_config.service_account build_number = upload_config.ci_build_number public_bucket_name = upload_config.public_bucket_name private_bucket_name = upload_config.private_bucket_name", "change_packs_price_to_zero(public_index_folder_path) private_packs, private_index_path, private_index_blob = update_index_with_priced_packs(private_storage_bucket, extract_destination_path, public_index_folder_path, changed_pack, True,", "path_to_pack_in_public_index = os.path.join(public_index_folder_path, pack_name) shutil.copy(path_to_pack_in_private_index, path_to_pack_in_public_index) def upload_modified_index(public_index_folder_path, extract_destination_path, public_ci_dummy_index_blob,", "extract_destination_path, public_index_folder_path, changed_pack, True, storage_base_path) private_packs = add_private_packs_from_dummy_index(private_packs, dummy_index_blob) upload_modified_index(public_index_folder_path,", "json import argparse from zipfile import ZipFile from contextlib import", "a message every minute to keep the machine from dying", "(will be used as hash revision at index file)\", required=True)", "required=True) parser.add_argument('-ea', '--extract_artifacts_path', help=\"Full path of folder to extract wanted", "return parser.parse_args() def is_dummy_index_locked(public_storage_bucket, dummy_index_lock_path): dummy_index_lock_blob = public_storage_bucket.blob(dummy_index_lock_path) return dummy_index_lock_blob.exists()", "shutil.rmtree(public_index_folder_path) def option_handler(): \"\"\"Validates and parses script arguments. Returns: Namespace:", "path of folder to extract wanted packs\", required=True) parser.add_argument('-di', '--dummy_index_dir_path',", "dummy_index_pack['id'] for dummy_index_pack in packs_from_dummy_index]) if not is_pack_in_dummy_index: packs_from_dummy_index.append(pack) os.remove(downloaded_dummy_index_path)", "(list): List of private packs and their price. \"\"\" with", "bucket\", required=True) # disable-secrets-detection-end return parser.parse_args() def is_dummy_index_locked(public_storage_bucket, dummy_index_lock_path): dummy_index_lock_blob", "packs_from_dummy_index = index_json.get('packs', []) for pack in private_packs: is_pack_in_dummy_index =", "= upload_config.pack_name extract_destination_path = upload_config.extract_artifacts_path packs_artifacts_path = upload_config.artifacts_path dummy_index_dir_path =", "lock.\") total_seconds_waited += 10 time.sleep(10) lock_dummy_index(public_storage_bucket, dummy_index_lock_path) def release_dummy_index_lock(public_storage_bucket, dummy_index_lock_path):", "= public_storage_bucket.blob(dummy_index_lock_path) dummy_index_lock_blob.delete() os.remove(LOCK_FILE_PATH) def add_private_packs_from_dummy_index(private_packs, dummy_index_blob): downloaded_dummy_index_path = 'current_dummy_index.zip'", "public_index_folder_path, public_index_blob, _ = download_and_extract_index(public_storage_bucket, extract_public_index_path, storage_base_path) # In order", "pack in private_packs: is_pack_in_dummy_index = any( [pack['id'] == dummy_index_pack['id'] for", "'w') as pack_metadata_file: json.dump(pack_metadata, pack_metadata_file, indent=4) def change_packs_price_to_zero(public_index_folder_path): paths_to_packs_in_merged_index =", "information go to: \" \"https://googleapis.dev/python/google-api-core/latest/auth.html\"), required=False) parser.add_argument('-n', '--ci_build_number', help=\"CircleCi build", "to acquire lock.\") total_seconds_waited += 10 time.sleep(10) lock_dummy_index(public_storage_bucket, dummy_index_lock_path) def", "upload_config.dummy_index_dir_path dummy_index_path = os.path.join(dummy_index_dir_path, 'index.zip') dummy_index_lock_path = os.path.join(dummy_index_dir_path, 'lock.txt') storage_client", "import argparse from zipfile import ZipFile from contextlib import contextmanager", "for dummy_index_pack in packs_from_dummy_index]) if not is_pack_in_dummy_index: packs_from_dummy_index.append(pack) os.remove(downloaded_dummy_index_path) shutil.rmtree(extracted_dummy_index_path)", "= init_storage_client(service_account) public_storage_bucket = storage_client.bucket(public_bucket_name) private_storage_bucket = storage_client.bucket(private_bucket_name) dummy_index_blob =", "dummy_index_dir_path = upload_config.dummy_index_dir_path dummy_index_path = os.path.join(dummy_index_dir_path, 'index.zip') dummy_index_lock_path = os.path.join(dummy_index_dir_path,", "the packs to be downloaded successfully, their price has to", "keep the machine from dying due to no output logging.info(\"Waiting", "# disabling caching for index blob public_ci_dummy_index_blob.upload_from_filename(index_zip_path) logging.success(\"Finished uploading index.zip", "parser.add_argument('-ea', '--extract_artifacts_path', help=\"Full path of folder to extract wanted packs\",", "total_seconds_waited = 0 while is_dummy_index_locked(public_storage_bucket, dummy_index_lock_path): if total_seconds_waited >= MAX_SECONDS_TO_WAIT_FOR_LOCK:", "to acquire lock, exceeded max wait time.\") sys.exit(1) if total_seconds_waited", "required=True) parser.add_argument('-sb', '--storage_base_path', help=\"Storage base path of the directory to", "minute to keep the machine from dying due to no", "from datetime import datetime from Tests.private_build.upload_packs_private import download_and_extract_index, update_index_with_priced_packs, \\", "and parses script arguments. Returns: Namespace: Parsed arguments object. \"\"\"", "parser.add_argument('-s', '--service_account', help=(\"Path to gcloud service account, is for circleCI", "logger=logging) upload_config = option_handler() service_account = upload_config.service_account build_number = upload_config.ci_build_number", "private_index_folder_path): packs_in_private_index = [pack_dir.name for pack_dir in os.scandir(private_index_folder_path) if pack_dir.is_dir()]", "private_storage_bucket = storage_client.bucket(private_bucket_name) dummy_index_blob = public_storage_bucket.blob(dummy_index_path) with lock_and_unlock_dummy_index(public_storage_bucket, dummy_index_lock_path): extract_packs_artifacts(packs_artifacts_path,", "dummy_index_blob): downloaded_dummy_index_path = 'current_dummy_index.zip' extracted_dummy_index_path = 'dummy_index' dummy_index_json_path = os.path.join(extracted_dummy_index_path,", "def release_dummy_index_lock(public_storage_bucket, dummy_index_lock_path): dummy_index_lock_blob = public_storage_bucket.blob(dummy_index_lock_path) dummy_index_lock_blob.delete() os.remove(LOCK_FILE_PATH) def add_private_packs_from_dummy_index(private_packs,", "packs_in_private_index: path_to_pack_in_private_index = os.path.join(private_index_folder_path, pack_name) path_to_pack_in_public_index = os.path.join(public_index_folder_path, pack_name) shutil.copy(path_to_pack_in_private_index,", "update_index_with_priced_packs, \\ extract_packs_artifacts from Tests.Marketplace.marketplace_services import init_storage_client from Tests.scripts.utils.log_util import", "arguments. Returns: Namespace: Parsed arguments object. \"\"\" parser = argparse.ArgumentParser(description=\"Store", "dummy_index_blob) upload_modified_index(public_index_folder_path, extract_public_index_path, dummy_index_blob, build_number, private_packs) if __name__ == '__main__':", "to cloud storage. Args: public_index_folder_path (str): public index folder full", "public_index_blob, _ = download_and_extract_index(public_storage_bucket, extract_public_index_path, storage_base_path) # In order for", "= os.path.join(dummy_index_dir_path, 'index.zip') dummy_index_lock_path = os.path.join(dummy_index_dir_path, 'lock.txt') storage_client = init_storage_client(service_account)", "upload_config.storage_base_path extract_public_index_path = upload_config.extract_public_index_path changed_pack = upload_config.pack_name extract_destination_path = upload_config.extract_artifacts_path", "storage_client = init_storage_client(service_account) public_storage_bucket = storage_client.bucket(public_bucket_name) private_storage_bucket = storage_client.bucket(private_bucket_name) dummy_index_blob", "'--artifacts_path', help=\"The full path of packs artifacts\", required=True) parser.add_argument('-ea', '--extract_artifacts_path',", "def lock_dummy_index(public_storage_bucket, dummy_index_lock_path): dummy_index_lock_blob = public_storage_bucket.blob(dummy_index_lock_path) with open(LOCK_FILE_PATH, 'w') as", "return dummy_index_lock_blob.exists() def lock_dummy_index(public_storage_bucket, dummy_index_lock_path): dummy_index_lock_blob = public_storage_bucket.blob(dummy_index_lock_path) with open(LOCK_FILE_PATH,", "+= 10 time.sleep(10) lock_dummy_index(public_storage_bucket, dummy_index_lock_path) def release_dummy_index_lock(public_storage_bucket, dummy_index_lock_path): dummy_index_lock_blob =", "60 == 0: # Printing a message every minute to", "= shutil.make_archive(base_name=public_index_folder_path, format=\"zip\", root_dir=extract_destination_path, base_dir=index_zip_name) try: public_ci_dummy_index_blob.reload() public_ci_dummy_index_blob.cache_control = \"no-cache,max-age=0\"", "acquire_dummy_index_lock(public_storage_bucket, dummy_index_lock_path): total_seconds_waited = 0 while is_dummy_index_locked(public_storage_bucket, dummy_index_lock_path): if total_seconds_waited", "dummy index.zip blob. build_number (str): circleCI build number, used as", "folder to extract wanted packs\", required=True) parser.add_argument('-di', '--dummy_index_dir_path', help=\"Full path", "from Tests.scripts.utils import logging_wrapper as logging MAX_SECONDS_TO_WAIT_FOR_LOCK = 600 LOCK_FILE_PATH", "path of folder to extract the public index\", required=True) parser.add_argument('-sb',", "logging.exception(\"Error in dummy index lock context manager.\") finally: release_dummy_index_lock(public_storage_bucket, dummy_index_lock_path)", "logging.success(\"Finished uploading index.zip to storage.\") except Exception: logging.exception(\"Failed in uploading", "= upload_config.artifacts_path dummy_index_dir_path = upload_config.dummy_index_dir_path dummy_index_path = os.path.join(dummy_index_dir_path, 'index.zip') dummy_index_lock_path", "contextlib import contextmanager from datetime import datetime from Tests.private_build.upload_packs_private import", "if total_seconds_waited % 60 == 0: # Printing a message", "indent=4) index_zip_name = os.path.basename(public_index_folder_path) index_zip_path = shutil.make_archive(base_name=public_index_folder_path, format=\"zip\", root_dir=extract_destination_path, base_dir=index_zip_name)", "file generation.\") sys.exit(1) finally: shutil.rmtree(public_index_folder_path) def option_handler(): \"\"\"Validates and parses", "object. \"\"\" parser = argparse.ArgumentParser(description=\"Store packs in cloud storage.\") #", "shutil.rmtree(extracted_dummy_index_path) return packs_from_dummy_index def main(): install_logging('prepare_public_index_for_private_testing.log', logger=logging) upload_config = option_handler()", "extract_destination_path (str): extract folder full path. public_ci_dummy_index_blob (Blob): google cloud", "argparse.ArgumentParser(description=\"Store packs in cloud storage.\") # disable-secrets-detection-start parser.add_argument('-b', '--public_bucket_name', help=\"CI", "= [pack_dir.name for pack_dir in os.scandir(private_index_folder_path) if pack_dir.is_dir()] for pack_name", "dummy_index_lock_path): extract_packs_artifacts(packs_artifacts_path, extract_destination_path) public_index_folder_path, public_index_blob, _ = download_and_extract_index(public_storage_bucket, extract_public_index_path, storage_base_path)", "uploading index. Mismatch in index file generation.\") sys.exit(1) finally: shutil.rmtree(public_index_folder_path)", "finally: release_dummy_index_lock(public_storage_bucket, dummy_index_lock_path) def change_pack_price_to_zero(path_to_pack_metadata): with open(path_to_pack_metadata, 'r') as pack_metadata_file:", "is_pack_in_dummy_index = any( [pack['id'] == dummy_index_pack['id'] for dummy_index_pack in packs_from_dummy_index])", "dummy_index_lock_blob.exists() def lock_dummy_index(public_storage_bucket, dummy_index_lock_path): dummy_index_lock_blob = public_storage_bucket.blob(dummy_index_lock_path) with open(LOCK_FILE_PATH, 'w')", "'--service_account', help=(\"Path to gcloud service account, is for circleCI usage.", "from zipfile import ZipFile from contextlib import contextmanager from datetime", "machine from dying due to no output logging.info(\"Waiting to acquire", "index file generation.\") sys.exit(1) finally: shutil.rmtree(public_index_folder_path) def option_handler(): \"\"\"Validates and", "index folder full path. extract_destination_path (str): extract folder full path.", "max wait time.\") sys.exit(1) if total_seconds_waited % 60 == 0:", "pack_name in packs_in_private_index: path_to_pack_in_private_index = os.path.join(private_index_folder_path, pack_name) path_to_pack_in_public_index = os.path.join(public_index_folder_path,", "'--dummy_index_dir_path', help=\"Full path to the dummy index in the private", "private_bucket_name = upload_config.private_bucket_name storage_base_path = upload_config.storage_base_path extract_public_index_path = upload_config.extract_public_index_path changed_pack", "be 0 change_packs_price_to_zero(public_index_folder_path) private_packs, private_index_path, private_index_blob = update_index_with_priced_packs(private_storage_bucket, extract_destination_path, public_index_folder_path,", "yield except Exception: logging.exception(\"Error in dummy index lock context manager.\")", "import datetime from Tests.private_build.upload_packs_private import download_and_extract_index, update_index_with_priced_packs, \\ extract_packs_artifacts from", "folder full path. public_ci_dummy_index_blob (Blob): google cloud storage object that", "sys.exit(1) finally: shutil.rmtree(public_index_folder_path) def option_handler(): \"\"\"Validates and parses script arguments.", "gcloud service account, is for circleCI usage. \" \"For local", "parser.add_argument('-sb', '--storage_base_path', help=\"Storage base path of the directory to upload", "generation.\") sys.exit(1) finally: shutil.rmtree(public_index_folder_path) def option_handler(): \"\"\"Validates and parses script", "dummy_index_lock_path = os.path.join(dummy_index_dir_path, 'lock.txt') storage_client = init_storage_client(service_account) public_storage_bucket = storage_client.bucket(public_bucket_name)", "storage_client.bucket(public_bucket_name) private_storage_bucket = storage_client.bucket(private_bucket_name) dummy_index_blob = public_storage_bucket.blob(dummy_index_path) with lock_and_unlock_dummy_index(public_storage_bucket, dummy_index_lock_path):", "storage_base_path) # In order for the packs to be downloaded", "logging_wrapper as logging MAX_SECONDS_TO_WAIT_FOR_LOCK = 600 LOCK_FILE_PATH = 'lock.txt' @contextmanager", "revision at index file)\", required=True) parser.add_argument('-e', '--extract_public_index_path', help=\"Full path of", "parser.add_argument('-n', '--ci_build_number', help=\"CircleCi build number (will be used as hash", "10 time.sleep(10) lock_dummy_index(public_storage_bucket, dummy_index_lock_path) def release_dummy_index_lock(public_storage_bucket, dummy_index_lock_path): dummy_index_lock_blob = public_storage_bucket.blob(dummy_index_lock_path)", "by running: \" \"`gcloud auth application-default login` and leave this", "private_packs: is_pack_in_dummy_index = any( [pack['id'] == dummy_index_pack['id'] for dummy_index_pack in", "help=\"Full path of folder to extract the public index\", required=True)", "more information go to: \" \"https://googleapis.dev/python/google-api-core/latest/auth.html\"), required=False) parser.add_argument('-n', '--ci_build_number', help=\"CircleCi", "with lock_and_unlock_dummy_index(public_storage_bucket, dummy_index_lock_path): extract_packs_artifacts(packs_artifacts_path, extract_destination_path) public_index_folder_path, public_index_blob, _ = download_and_extract_index(public_storage_bucket,", "def change_packs_price_to_zero(public_index_folder_path): paths_to_packs_in_merged_index = [pack_dir.path for pack_dir in os.scandir(public_index_folder_path) if", "to be downloaded successfully, their price has to be 0", "= os.path.join(extracted_dummy_index_path, 'index', 'index.json') dummy_index_blob.download_to_filename(downloaded_dummy_index_path) os.mkdir(extracted_dummy_index_path) if os.path.exists(downloaded_dummy_index_path): with ZipFile(downloaded_dummy_index_path,", "dummy_index_pack in packs_from_dummy_index]) if not is_pack_in_dummy_index: packs_from_dummy_index.append(pack) os.remove(downloaded_dummy_index_path) shutil.rmtree(extracted_dummy_index_path) return", "in os.scandir(private_index_folder_path) if pack_dir.is_dir()] for pack_name in packs_in_private_index: path_to_pack_in_private_index =", "= upload_config.dummy_index_dir_path dummy_index_path = os.path.join(dummy_index_dir_path, 'index.zip') dummy_index_lock_path = os.path.join(dummy_index_dir_path, 'lock.txt')", "script arguments. Returns: Namespace: Parsed arguments object. \"\"\" parser =", "as pack_metadata_file: json.dump(pack_metadata, pack_metadata_file, indent=4) def change_packs_price_to_zero(public_index_folder_path): paths_to_packs_in_merged_index = [pack_dir.path", "= option_handler() service_account = upload_config.service_account build_number = upload_config.ci_build_number public_bucket_name =", "= 'current_dummy_index.zip' extracted_dummy_index_path = 'dummy_index' dummy_index_json_path = os.path.join(extracted_dummy_index_path, 'index', 'index.json')", "(str): extract folder full path. public_ci_dummy_index_blob (Blob): google cloud storage", "in packs_in_private_index: path_to_pack_in_private_index = os.path.join(private_index_folder_path, pack_name) path_to_pack_in_public_index = os.path.join(public_index_folder_path, pack_name)", "blank. \" \"For more information go to: \" \"https://googleapis.dev/python/google-api-core/latest/auth.html\"), required=False)", "with ZipFile(downloaded_dummy_index_path, 'r') as index_zip: index_zip.extractall(extracted_dummy_index_path) with open(dummy_index_json_path) as index_file:", "paths_to_packs_in_merged_index: path_to_pack_metadata = os.path.join(path_to_pack, 'metadata.json') change_pack_price_to_zero(path_to_pack_metadata) def merge_private_index_into_public_index(public_index_folder_path, private_index_folder_path): packs_in_private_index", "to.\", required=False) parser.add_argument('-p', '--pack_name', help=\"Modified pack to upload to gcs.\")", "account and \" \"authenticate using Google Cloud SDK by running:", "lock_file: dummy_index_lock_blob.upload_from_file(lock_file) def acquire_dummy_index_lock(public_storage_bucket, dummy_index_lock_path): total_seconds_waited = 0 while is_dummy_index_locked(public_storage_bucket,", "path. public_ci_dummy_index_blob (Blob): google cloud storage object that represents the", "packs_from_dummy_index.append(pack) os.remove(downloaded_dummy_index_path) shutil.rmtree(extracted_dummy_index_path) return packs_from_dummy_index def main(): install_logging('prepare_public_index_for_private_testing.log', logger=logging) upload_config", "= index_json.get('packs', []) for pack in private_packs: is_pack_in_dummy_index = any(", "@contextmanager def lock_and_unlock_dummy_index(public_storage_bucket, dummy_index_lock_path): try: acquire_dummy_index_lock(public_storage_bucket, dummy_index_lock_path) yield except Exception:", "# In order for the packs to be downloaded successfully,", "private bucket name\", required=True) parser.add_argument('-s', '--service_account', help=(\"Path to gcloud service", "index_json.get('packs', []) for pack in private_packs: is_pack_in_dummy_index = any( [pack['id']", "path_to_pack_in_public_index) def upload_modified_index(public_index_folder_path, extract_destination_path, public_ci_dummy_index_blob, build_number, private_packs): \"\"\"Upload updated index", "any( [pack['id'] == dummy_index_pack['id'] for dummy_index_pack in packs_from_dummy_index]) if not", "json.dump(index, index_file, indent=4) index_zip_name = os.path.basename(public_index_folder_path) index_zip_path = shutil.make_archive(base_name=public_index_folder_path, format=\"zip\",", "help=\"Full path of folder to extract wanted packs\", required=True) parser.add_argument('-di',", "public index folder full path. extract_destination_path (str): extract folder full", "def is_dummy_index_locked(public_storage_bucket, dummy_index_lock_path): dummy_index_lock_blob = public_storage_bucket.blob(dummy_index_lock_path) return dummy_index_lock_blob.exists() def lock_dummy_index(public_storage_bucket,", "at index file)\", required=True) parser.add_argument('-e', '--extract_public_index_path', help=\"Full path of folder", "= any( [pack['id'] == dummy_index_pack['id'] for dummy_index_pack in packs_from_dummy_index]) if", "logging.critical(\"Error: Failed too long to acquire lock, exceeded max wait", "= upload_config.extract_artifacts_path packs_artifacts_path = upload_config.artifacts_path dummy_index_dir_path = upload_config.dummy_index_dir_path dummy_index_path =", "List of private packs and their price. \"\"\" with open(os.path.join(public_index_folder_path,", "Parsed arguments object. \"\"\" parser = argparse.ArgumentParser(description=\"Store packs in cloud", "cloud storage object that represents the dummy index.zip blob. build_number", "usage. \" \"For local development use your personal account and", "as hash revision at index file)\", required=True) parser.add_argument('-e', '--extract_public_index_path', help=\"Full", "upload to.\", required=False) parser.add_argument('-p', '--pack_name', help=\"Modified pack to upload to", "change_packs_price_to_zero(public_index_folder_path): paths_to_packs_in_merged_index = [pack_dir.path for pack_dir in os.scandir(public_index_folder_path) if pack_dir.is_dir()]", "wait time.\") sys.exit(1) if total_seconds_waited % 60 == 0: #", "Google Cloud SDK by running: \" \"`gcloud auth application-default login`", "index revision. private_packs (list): List of private packs and their", "\" \"For local development use your personal account and \"", "\"`gcloud auth application-default login` and leave this parameter blank. \"", "os.path.join(dummy_index_dir_path, 'lock.txt') storage_client = init_storage_client(service_account) public_storage_bucket = storage_client.bucket(public_bucket_name) private_storage_bucket =", "= update_index_with_priced_packs(private_storage_bucket, extract_destination_path, public_index_folder_path, changed_pack, True, storage_base_path) private_packs = add_private_packs_from_dummy_index(private_packs,", "packs\", required=True) parser.add_argument('-di', '--dummy_index_dir_path', help=\"Full path to the dummy index", "\" \"authenticate using Google Cloud SDK by running: \" \"`gcloud", "public_storage_bucket.blob(dummy_index_lock_path) return dummy_index_lock_blob.exists() def lock_dummy_index(public_storage_bucket, dummy_index_lock_path): dummy_index_lock_blob = public_storage_bucket.blob(dummy_index_lock_path) with", "to be 0 change_packs_price_to_zero(public_index_folder_path) private_packs, private_index_path, private_index_blob = update_index_with_priced_packs(private_storage_bucket, extract_destination_path,", "to storage.\") except Exception: logging.exception(\"Failed in uploading index. Mismatch in", "= { 'revision': build_number, 'modified': datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'), 'packs': private_packs } json.dump(index,", "using Google Cloud SDK by running: \" \"`gcloud auth application-default", "to upload to.\", required=False) parser.add_argument('-p', '--pack_name', help=\"Modified pack to upload", "pack to upload to gcs.\") parser.add_argument('-a', '--artifacts_path', help=\"The full path", "packs in cloud storage.\") # disable-secrets-detection-start parser.add_argument('-b', '--public_bucket_name', help=\"CI public", "this parameter blank. \" \"For more information go to: \"", "= public_storage_bucket.blob(dummy_index_path) with lock_and_unlock_dummy_index(public_storage_bucket, dummy_index_lock_path): extract_packs_artifacts(packs_artifacts_path, extract_destination_path) public_index_folder_path, public_index_blob, _", "init_storage_client(service_account) public_storage_bucket = storage_client.bucket(public_bucket_name) private_storage_bucket = storage_client.bucket(private_bucket_name) dummy_index_blob = public_storage_bucket.blob(dummy_index_path)", "index blob public_ci_dummy_index_blob.upload_from_filename(index_zip_path) logging.success(\"Finished uploading index.zip to storage.\") except Exception:", "logging MAX_SECONDS_TO_WAIT_FOR_LOCK = 600 LOCK_FILE_PATH = 'lock.txt' @contextmanager def lock_and_unlock_dummy_index(public_storage_bucket,", "finally: shutil.rmtree(public_index_folder_path) def option_handler(): \"\"\"Validates and parses script arguments. Returns:", "dying due to no output logging.info(\"Waiting to acquire lock.\") total_seconds_waited", "changed_pack, True, storage_base_path) private_packs = add_private_packs_from_dummy_index(private_packs, dummy_index_blob) upload_modified_index(public_index_folder_path, extract_public_index_path, dummy_index_blob,", "download_and_extract_index, update_index_with_priced_packs, \\ extract_packs_artifacts from Tests.Marketplace.marketplace_services import init_storage_client from Tests.scripts.utils.log_util", "if pack_dir.is_dir()] for path_to_pack in paths_to_packs_in_merged_index: path_to_pack_metadata = os.path.join(path_to_pack, 'metadata.json')", "help=\"Modified pack to upload to gcs.\") parser.add_argument('-a', '--artifacts_path', help=\"The full", "of folder to extract the public index\", required=True) parser.add_argument('-sb', '--storage_base_path',", "as index_file: index_json = json.load(index_file) packs_from_dummy_index = index_json.get('packs', []) for", "your personal account and \" \"authenticate using Google Cloud SDK", "storage. Args: public_index_folder_path (str): public index folder full path. extract_destination_path", "_ = download_and_extract_index(public_storage_bucket, extract_public_index_path, storage_base_path) # In order for the", "\"no-cache,max-age=0\" # disabling caching for index blob public_ci_dummy_index_blob.upload_from_filename(index_zip_path) logging.success(\"Finished uploading", "import logging_wrapper as logging MAX_SECONDS_TO_WAIT_FOR_LOCK = 600 LOCK_FILE_PATH = 'lock.txt'", "help=\"Full path to the dummy index in the private CI", "sys.exit(1) if total_seconds_waited % 60 == 0: # Printing a", "full path. extract_destination_path (str): extract folder full path. public_ci_dummy_index_blob (Blob):", "Tests.scripts.utils import logging_wrapper as logging MAX_SECONDS_TO_WAIT_FOR_LOCK = 600 LOCK_FILE_PATH =", "0 change_packs_price_to_zero(public_index_folder_path) private_packs, private_index_path, private_index_blob = update_index_with_priced_packs(private_storage_bucket, extract_destination_path, public_index_folder_path, changed_pack,", "import init_storage_client from Tests.scripts.utils.log_util import install_logging from Tests.scripts.utils import logging_wrapper", "public_ci_dummy_index_blob, build_number, private_packs): \"\"\"Upload updated index zip to cloud storage.", "total_seconds_waited % 60 == 0: # Printing a message every", "message every minute to keep the machine from dying due", "arguments object. \"\"\" parser = argparse.ArgumentParser(description=\"Store packs in cloud storage.\")", "pack_dir.is_dir()] for pack_name in packs_in_private_index: path_to_pack_in_private_index = os.path.join(private_index_folder_path, pack_name) path_to_pack_in_public_index", "and their price. \"\"\" with open(os.path.join(public_index_folder_path, \"index.json\"), \"w+\") as index_file:", "packs_from_dummy_index]) if not is_pack_in_dummy_index: packs_from_dummy_index.append(pack) os.remove(downloaded_dummy_index_path) shutil.rmtree(extracted_dummy_index_path) return packs_from_dummy_index def", "os.scandir(public_index_folder_path) if pack_dir.is_dir()] for path_to_pack in paths_to_packs_in_merged_index: path_to_pack_metadata = os.path.join(path_to_pack,", "import install_logging from Tests.scripts.utils import logging_wrapper as logging MAX_SECONDS_TO_WAIT_FOR_LOCK =", "json.dump(pack_metadata, pack_metadata_file, indent=4) def change_packs_price_to_zero(public_index_folder_path): paths_to_packs_in_merged_index = [pack_dir.path for pack_dir", "acquire lock, exceeded max wait time.\") sys.exit(1) if total_seconds_waited %", "os.mkdir(extracted_dummy_index_path) if os.path.exists(downloaded_dummy_index_path): with ZipFile(downloaded_dummy_index_path, 'r') as index_zip: index_zip.extractall(extracted_dummy_index_path) with", "paths_to_packs_in_merged_index = [pack_dir.path for pack_dir in os.scandir(public_index_folder_path) if pack_dir.is_dir()] for", "public bucket name\", required=True) parser.add_argument('-pb', '--private_bucket_name', help=\"CI private bucket name\",", "private_packs } json.dump(index, index_file, indent=4) index_zip_name = os.path.basename(public_index_folder_path) index_zip_path =", "google cloud storage object that represents the dummy index.zip blob.", "index.zip blob. build_number (str): circleCI build number, used as an", "0 index = { 'revision': build_number, 'modified': datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'), 'packs': private_packs", "lock_and_unlock_dummy_index(public_storage_bucket, dummy_index_lock_path): extract_packs_artifacts(packs_artifacts_path, extract_destination_path) public_index_folder_path, public_index_blob, _ = download_and_extract_index(public_storage_bucket, extract_public_index_path,", "if not is_pack_in_dummy_index: packs_from_dummy_index.append(pack) os.remove(downloaded_dummy_index_path) shutil.rmtree(extracted_dummy_index_path) return packs_from_dummy_index def main():", "Tests.Marketplace.marketplace_services import init_storage_client from Tests.scripts.utils.log_util import install_logging from Tests.scripts.utils import", "manager.\") finally: release_dummy_index_lock(public_storage_bucket, dummy_index_lock_path) def change_pack_price_to_zero(path_to_pack_metadata): with open(path_to_pack_metadata, 'r') as", "logging.exception(\"Failed in uploading index. Mismatch in index file generation.\") sys.exit(1)", "MAX_SECONDS_TO_WAIT_FOR_LOCK = 600 LOCK_FILE_PATH = 'lock.txt' @contextmanager def lock_and_unlock_dummy_index(public_storage_bucket, dummy_index_lock_path):", "packs to be downloaded successfully, their price has to be", "dummy_index_lock_blob = public_storage_bucket.blob(dummy_index_lock_path) dummy_index_lock_blob.delete() os.remove(LOCK_FILE_PATH) def add_private_packs_from_dummy_index(private_packs, dummy_index_blob): downloaded_dummy_index_path =", "'rb') as lock_file: dummy_index_lock_blob.upload_from_file(lock_file) def acquire_dummy_index_lock(public_storage_bucket, dummy_index_lock_path): total_seconds_waited = 0", "dummy_index_json_path = os.path.join(extracted_dummy_index_path, 'index', 'index.json') dummy_index_blob.download_to_filename(downloaded_dummy_index_path) os.mkdir(extracted_dummy_index_path) if os.path.exists(downloaded_dummy_index_path): with", "def upload_modified_index(public_index_folder_path, extract_destination_path, public_ci_dummy_index_blob, build_number, private_packs): \"\"\"Upload updated index zip", "of folder to extract wanted packs\", required=True) parser.add_argument('-di', '--dummy_index_dir_path', help=\"Full", "extract_destination_path) public_index_folder_path, public_index_blob, _ = download_and_extract_index(public_storage_bucket, extract_public_index_path, storage_base_path) # In", "= upload_config.ci_build_number public_bucket_name = upload_config.public_bucket_name private_bucket_name = upload_config.private_bucket_name storage_base_path =", "'lock.txt') storage_client = init_storage_client(service_account) public_storage_bucket = storage_client.bucket(public_bucket_name) private_storage_bucket = storage_client.bucket(private_bucket_name)", "for private_pack in private_packs: private_pack['price'] = 0 index = {", "'packs': private_packs } json.dump(index, index_file, indent=4) index_zip_name = os.path.basename(public_index_folder_path) index_zip_path", "gcs.\") parser.add_argument('-a', '--artifacts_path', help=\"The full path of packs artifacts\", required=True)", "index_json = json.load(index_file) packs_from_dummy_index = index_json.get('packs', []) for pack in", "application-default login` and leave this parameter blank. \" \"For more", "'revision': build_number, 'modified': datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'), 'packs': private_packs } json.dump(index, index_file, indent=4)", "in os.scandir(public_index_folder_path) if pack_dir.is_dir()] for path_to_pack in paths_to_packs_in_merged_index: path_to_pack_metadata =", "with open(os.path.join(public_index_folder_path, \"index.json\"), \"w+\") as index_file: for private_pack in private_packs:", "as an index revision. private_packs (list): List of private packs", "datetime import datetime from Tests.private_build.upload_packs_private import download_and_extract_index, update_index_with_priced_packs, \\ extract_packs_artifacts", "is_pack_in_dummy_index: packs_from_dummy_index.append(pack) os.remove(downloaded_dummy_index_path) shutil.rmtree(extracted_dummy_index_path) return packs_from_dummy_index def main(): install_logging('prepare_public_index_for_private_testing.log', logger=logging)", "hash revision at index file)\", required=True) parser.add_argument('-e', '--extract_public_index_path', help=\"Full path", "0 with open(path_to_pack_metadata, 'w') as pack_metadata_file: json.dump(pack_metadata, pack_metadata_file, indent=4) def", "= json.load(pack_metadata_file) pack_metadata['price'] = 0 with open(path_to_pack_metadata, 'w') as pack_metadata_file:", "\"w+\") as index_file: for private_pack in private_packs: private_pack['price'] = 0", "upload_config.extract_public_index_path changed_pack = upload_config.pack_name extract_destination_path = upload_config.extract_artifacts_path packs_artifacts_path = upload_config.artifacts_path", "to keep the machine from dying due to no output", "dummy_index_lock_path): total_seconds_waited = 0 while is_dummy_index_locked(public_storage_bucket, dummy_index_lock_path): if total_seconds_waited >=", "parser.add_argument('-di', '--dummy_index_dir_path', help=\"Full path to the dummy index in the", "index\", required=True) parser.add_argument('-sb', '--storage_base_path', help=\"Storage base path of the directory", "context manager.\") finally: release_dummy_index_lock(public_storage_bucket, dummy_index_lock_path) def change_pack_price_to_zero(path_to_pack_metadata): with open(path_to_pack_metadata, 'r')", "LOCK_FILE_PATH = 'lock.txt' @contextmanager def lock_and_unlock_dummy_index(public_storage_bucket, dummy_index_lock_path): try: acquire_dummy_index_lock(public_storage_bucket, dummy_index_lock_path)", "dummy_index_lock_blob = public_storage_bucket.blob(dummy_index_lock_path) return dummy_index_lock_blob.exists() def lock_dummy_index(public_storage_bucket, dummy_index_lock_path): dummy_index_lock_blob =", "private_packs): \"\"\"Upload updated index zip to cloud storage. Args: public_index_folder_path", "= os.path.join(dummy_index_dir_path, 'lock.txt') storage_client = init_storage_client(service_account) public_storage_bucket = storage_client.bucket(public_bucket_name) private_storage_bucket", "dummy_index_lock_path) def release_dummy_index_lock(public_storage_bucket, dummy_index_lock_path): dummy_index_lock_blob = public_storage_bucket.blob(dummy_index_lock_path) dummy_index_lock_blob.delete() os.remove(LOCK_FILE_PATH) def", "Namespace: Parsed arguments object. \"\"\" parser = argparse.ArgumentParser(description=\"Store packs in", "be used as hash revision at index file)\", required=True) parser.add_argument('-e',", "pack_dir in os.scandir(private_index_folder_path) if pack_dir.is_dir()] for pack_name in packs_in_private_index: path_to_pack_in_private_index", "build_number, private_packs): \"\"\"Upload updated index zip to cloud storage. Args:", "blob public_ci_dummy_index_blob.upload_from_filename(index_zip_path) logging.success(\"Finished uploading index.zip to storage.\") except Exception: logging.exception(\"Failed", "public_bucket_name = upload_config.public_bucket_name private_bucket_name = upload_config.private_bucket_name storage_base_path = upload_config.storage_base_path extract_public_index_path", "os.path.join(public_index_folder_path, pack_name) shutil.copy(path_to_pack_in_private_index, path_to_pack_in_public_index) def upload_modified_index(public_index_folder_path, extract_destination_path, public_ci_dummy_index_blob, build_number, private_packs):", "to gcloud service account, is for circleCI usage. \" \"For", "= os.path.join(private_index_folder_path, pack_name) path_to_pack_in_public_index = os.path.join(public_index_folder_path, pack_name) shutil.copy(path_to_pack_in_private_index, path_to_pack_in_public_index) def", "if pack_dir.is_dir()] for pack_name in packs_in_private_index: path_to_pack_in_private_index = os.path.join(private_index_folder_path, pack_name)", "time import os import sys import shutil import json import", "packs artifacts\", required=True) parser.add_argument('-ea', '--extract_artifacts_path', help=\"Full path of folder to", "acquire_dummy_index_lock(public_storage_bucket, dummy_index_lock_path) yield except Exception: logging.exception(\"Error in dummy index lock", "logging.info(\"Waiting to acquire lock.\") total_seconds_waited += 10 time.sleep(10) lock_dummy_index(public_storage_bucket, dummy_index_lock_path)", "blob. build_number (str): circleCI build number, used as an index", "parses script arguments. Returns: Namespace: Parsed arguments object. \"\"\" parser", "dummy_index_lock_path): dummy_index_lock_blob = public_storage_bucket.blob(dummy_index_lock_path) with open(LOCK_FILE_PATH, 'w') as lock_file: lock_file.write('locked')", "from Tests.scripts.utils.log_util import install_logging from Tests.scripts.utils import logging_wrapper as logging", "\" \"`gcloud auth application-default login` and leave this parameter blank.", "to extract wanted packs\", required=True) parser.add_argument('-di', '--dummy_index_dir_path', help=\"Full path to", "open(path_to_pack_metadata, 'r') as pack_metadata_file: pack_metadata = json.load(pack_metadata_file) pack_metadata['price'] = 0", "# disable-secrets-detection-start parser.add_argument('-b', '--public_bucket_name', help=\"CI public bucket name\", required=True) parser.add_argument('-pb',", "= 'dummy_index' dummy_index_json_path = os.path.join(extracted_dummy_index_path, 'index', 'index.json') dummy_index_blob.download_to_filename(downloaded_dummy_index_path) os.mkdir(extracted_dummy_index_path) if", "'dummy_index' dummy_index_json_path = os.path.join(extracted_dummy_index_path, 'index', 'index.json') dummy_index_blob.download_to_filename(downloaded_dummy_index_path) os.mkdir(extracted_dummy_index_path) if os.path.exists(downloaded_dummy_index_path):", "pack_name) path_to_pack_in_public_index = os.path.join(public_index_folder_path, pack_name) shutil.copy(path_to_pack_in_private_index, path_to_pack_in_public_index) def upload_modified_index(public_index_folder_path, extract_destination_path,", "os import sys import shutil import json import argparse from", "== dummy_index_pack['id'] for dummy_index_pack in packs_from_dummy_index]) if not is_pack_in_dummy_index: packs_from_dummy_index.append(pack)", "public_storage_bucket.blob(dummy_index_path) with lock_and_unlock_dummy_index(public_storage_bucket, dummy_index_lock_path): extract_packs_artifacts(packs_artifacts_path, extract_destination_path) public_index_folder_path, public_index_blob, _ =", "parser.parse_args() def is_dummy_index_locked(public_storage_bucket, dummy_index_lock_path): dummy_index_lock_blob = public_storage_bucket.blob(dummy_index_lock_path) return dummy_index_lock_blob.exists() def", "auth application-default login` and leave this parameter blank. \" \"For", "dummy_index_lock_blob.delete() os.remove(LOCK_FILE_PATH) def add_private_packs_from_dummy_index(private_packs, dummy_index_blob): downloaded_dummy_index_path = 'current_dummy_index.zip' extracted_dummy_index_path =", "upload_config = option_handler() service_account = upload_config.service_account build_number = upload_config.ci_build_number public_bucket_name", "SDK by running: \" \"`gcloud auth application-default login` and leave", "to extract the public index\", required=True) parser.add_argument('-sb', '--storage_base_path', help=\"Storage base", "upload_config.public_bucket_name private_bucket_name = upload_config.private_bucket_name storage_base_path = upload_config.storage_base_path extract_public_index_path = upload_config.extract_public_index_path", "is_dummy_index_locked(public_storage_bucket, dummy_index_lock_path): if total_seconds_waited >= MAX_SECONDS_TO_WAIT_FOR_LOCK: logging.critical(\"Error: Failed too long", "revision. private_packs (list): List of private packs and their price.", "def main(): install_logging('prepare_public_index_for_private_testing.log', logger=logging) upload_config = option_handler() service_account = upload_config.service_account", "folder full path. extract_destination_path (str): extract folder full path. public_ci_dummy_index_blob", "json.load(index_file) packs_from_dummy_index = index_json.get('packs', []) for pack in private_packs: is_pack_in_dummy_index", "pack_metadata = json.load(pack_metadata_file) pack_metadata['price'] = 0 with open(path_to_pack_metadata, 'w') as", "'--ci_build_number', help=\"CircleCi build number (will be used as hash revision", "downloaded_dummy_index_path = 'current_dummy_index.zip' extracted_dummy_index_path = 'dummy_index' dummy_index_json_path = os.path.join(extracted_dummy_index_path, 'index',", "local development use your personal account and \" \"authenticate using", "dummy_index_lock_path): dummy_index_lock_blob = public_storage_bucket.blob(dummy_index_lock_path) dummy_index_lock_blob.delete() os.remove(LOCK_FILE_PATH) def add_private_packs_from_dummy_index(private_packs, dummy_index_blob): downloaded_dummy_index_path", "zipfile import ZipFile from contextlib import contextmanager from datetime import", "pack_dir in os.scandir(public_index_folder_path) if pack_dir.is_dir()] for path_to_pack in paths_to_packs_in_merged_index: path_to_pack_metadata", "parser = argparse.ArgumentParser(description=\"Store packs in cloud storage.\") # disable-secrets-detection-start parser.add_argument('-b',", "folder to extract the public index\", required=True) parser.add_argument('-sb', '--storage_base_path', help=\"Storage", "for pack in private_packs: is_pack_in_dummy_index = any( [pack['id'] == dummy_index_pack['id']", "dummy_index_lock_blob = public_storage_bucket.blob(dummy_index_lock_path) with open(LOCK_FILE_PATH, 'w') as lock_file: lock_file.write('locked') with", "disable-secrets-detection-end return parser.parse_args() def is_dummy_index_locked(public_storage_bucket, dummy_index_lock_path): dummy_index_lock_blob = public_storage_bucket.blob(dummy_index_lock_path) return", "private packs and their price. \"\"\" with open(os.path.join(public_index_folder_path, \"index.json\"), \"w+\")", "update_index_with_priced_packs(private_storage_bucket, extract_destination_path, public_index_folder_path, changed_pack, True, storage_base_path) private_packs = add_private_packs_from_dummy_index(private_packs, dummy_index_blob)", "exceeded max wait time.\") sys.exit(1) if total_seconds_waited % 60 ==", "Mismatch in index file generation.\") sys.exit(1) finally: shutil.rmtree(public_index_folder_path) def option_handler():", "release_dummy_index_lock(public_storage_bucket, dummy_index_lock_path) def change_pack_price_to_zero(path_to_pack_metadata): with open(path_to_pack_metadata, 'r') as pack_metadata_file: pack_metadata", "every minute to keep the machine from dying due to", "upload to gcs.\") parser.add_argument('-a', '--artifacts_path', help=\"The full path of packs", "required=True) parser.add_argument('-pb', '--private_bucket_name', help=\"CI private bucket name\", required=True) parser.add_argument('-s', '--service_account',", "= os.path.join(public_index_folder_path, pack_name) shutil.copy(path_to_pack_in_private_index, path_to_pack_in_public_index) def upload_modified_index(public_index_folder_path, extract_destination_path, public_ci_dummy_index_blob, build_number,", "private_pack['price'] = 0 index = { 'revision': build_number, 'modified': datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'),", "datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'), 'packs': private_packs } json.dump(index, index_file, indent=4) index_zip_name = os.path.basename(public_index_folder_path)", "directory to upload to.\", required=False) parser.add_argument('-p', '--pack_name', help=\"Modified pack to", "ZipFile from contextlib import contextmanager from datetime import datetime from", "help=\"CI private bucket name\", required=True) parser.add_argument('-s', '--service_account', help=(\"Path to gcloud", "base_dir=index_zip_name) try: public_ci_dummy_index_blob.reload() public_ci_dummy_index_blob.cache_control = \"no-cache,max-age=0\" # disabling caching for", "pack_metadata_file: pack_metadata = json.load(pack_metadata_file) pack_metadata['price'] = 0 with open(path_to_pack_metadata, 'w')", "order for the packs to be downloaded successfully, their price", "as lock_file: dummy_index_lock_blob.upload_from_file(lock_file) def acquire_dummy_index_lock(public_storage_bucket, dummy_index_lock_path): total_seconds_waited = 0 while", "extract the public index\", required=True) parser.add_argument('-sb', '--storage_base_path', help=\"Storage base path", "import download_and_extract_index, update_index_with_priced_packs, \\ extract_packs_artifacts from Tests.Marketplace.marketplace_services import init_storage_client from", "as logging MAX_SECONDS_TO_WAIT_FOR_LOCK = 600 LOCK_FILE_PATH = 'lock.txt' @contextmanager def", "download_and_extract_index(public_storage_bucket, extract_public_index_path, storage_base_path) # In order for the packs to", "'index.json') dummy_index_blob.download_to_filename(downloaded_dummy_index_path) os.mkdir(extracted_dummy_index_path) if os.path.exists(downloaded_dummy_index_path): with ZipFile(downloaded_dummy_index_path, 'r') as index_zip:", "represents the dummy index.zip blob. build_number (str): circleCI build number,", "packs_in_private_index = [pack_dir.name for pack_dir in os.scandir(private_index_folder_path) if pack_dir.is_dir()] for", "disabling caching for index blob public_ci_dummy_index_blob.upload_from_filename(index_zip_path) logging.success(\"Finished uploading index.zip to", "= 'lock.txt' @contextmanager def lock_and_unlock_dummy_index(public_storage_bucket, dummy_index_lock_path): try: acquire_dummy_index_lock(public_storage_bucket, dummy_index_lock_path) yield", "index = { 'revision': build_number, 'modified': datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'), 'packs': private_packs }", "lock_dummy_index(public_storage_bucket, dummy_index_lock_path): dummy_index_lock_blob = public_storage_bucket.blob(dummy_index_lock_path) with open(LOCK_FILE_PATH, 'w') as lock_file:", "# disable-secrets-detection-end return parser.parse_args() def is_dummy_index_locked(public_storage_bucket, dummy_index_lock_path): dummy_index_lock_blob = public_storage_bucket.blob(dummy_index_lock_path)", "storage_base_path = upload_config.storage_base_path extract_public_index_path = upload_config.extract_public_index_path changed_pack = upload_config.pack_name extract_destination_path", "import ZipFile from contextlib import contextmanager from datetime import datetime", "add_private_packs_from_dummy_index(private_packs, dummy_index_blob): downloaded_dummy_index_path = 'current_dummy_index.zip' extracted_dummy_index_path = 'dummy_index' dummy_index_json_path =", "acquire lock.\") total_seconds_waited += 10 time.sleep(10) lock_dummy_index(public_storage_bucket, dummy_index_lock_path) def release_dummy_index_lock(public_storage_bucket,", "packs_from_dummy_index def main(): install_logging('prepare_public_index_for_private_testing.log', logger=logging) upload_config = option_handler() service_account =", "required=True) parser.add_argument('-e', '--extract_public_index_path', help=\"Full path of folder to extract the", "artifacts\", required=True) parser.add_argument('-ea', '--extract_artifacts_path', help=\"Full path of folder to extract", "index. Mismatch in index file generation.\") sys.exit(1) finally: shutil.rmtree(public_index_folder_path) def", "required=False) parser.add_argument('-n', '--ci_build_number', help=\"CircleCi build number (will be used as", "= argparse.ArgumentParser(description=\"Store packs in cloud storage.\") # disable-secrets-detection-start parser.add_argument('-b', '--public_bucket_name',", "\"authenticate using Google Cloud SDK by running: \" \"`gcloud auth", "'current_dummy_index.zip' extracted_dummy_index_path = 'dummy_index' dummy_index_json_path = os.path.join(extracted_dummy_index_path, 'index', 'index.json') dummy_index_blob.download_to_filename(downloaded_dummy_index_path)", "'--extract_public_index_path', help=\"Full path of folder to extract the public index\",", "parser.add_argument('-b', '--public_bucket_name', help=\"CI public bucket name\", required=True) parser.add_argument('-pb', '--private_bucket_name', help=\"CI", "number (will be used as hash revision at index file)\",", "upload_config.artifacts_path dummy_index_dir_path = upload_config.dummy_index_dir_path dummy_index_path = os.path.join(dummy_index_dir_path, 'index.zip') dummy_index_lock_path =", "os.scandir(private_index_folder_path) if pack_dir.is_dir()] for pack_name in packs_in_private_index: path_to_pack_in_private_index = os.path.join(private_index_folder_path,", "upload_config.extract_artifacts_path packs_artifacts_path = upload_config.artifacts_path dummy_index_dir_path = upload_config.dummy_index_dir_path dummy_index_path = os.path.join(dummy_index_dir_path,", "0 while is_dummy_index_locked(public_storage_bucket, dummy_index_lock_path): if total_seconds_waited >= MAX_SECONDS_TO_WAIT_FOR_LOCK: logging.critical(\"Error: Failed", "build_number (str): circleCI build number, used as an index revision.", "dummy_index_path = os.path.join(dummy_index_dir_path, 'index.zip') dummy_index_lock_path = os.path.join(dummy_index_dir_path, 'lock.txt') storage_client =", "'--storage_base_path', help=\"Storage base path of the directory to upload to.\",", "public_storage_bucket.blob(dummy_index_lock_path) with open(LOCK_FILE_PATH, 'w') as lock_file: lock_file.write('locked') with open(LOCK_FILE_PATH, 'rb')", "[pack['id'] == dummy_index_pack['id'] for dummy_index_pack in packs_from_dummy_index]) if not is_pack_in_dummy_index:", "private_index_path, private_index_blob = update_index_with_priced_packs(private_storage_bucket, extract_destination_path, public_index_folder_path, changed_pack, True, storage_base_path) private_packs", "not is_pack_in_dummy_index: packs_from_dummy_index.append(pack) os.remove(downloaded_dummy_index_path) shutil.rmtree(extracted_dummy_index_path) return packs_from_dummy_index def main(): install_logging('prepare_public_index_for_private_testing.log',", "no output logging.info(\"Waiting to acquire lock.\") total_seconds_waited += 10 time.sleep(10)", "help=\"CI public bucket name\", required=True) parser.add_argument('-pb', '--private_bucket_name', help=\"CI private bucket", "to no output logging.info(\"Waiting to acquire lock.\") total_seconds_waited += 10", "packs_artifacts_path = upload_config.artifacts_path dummy_index_dir_path = upload_config.dummy_index_dir_path dummy_index_path = os.path.join(dummy_index_dir_path, 'index.zip')", "extract_packs_artifacts from Tests.Marketplace.marketplace_services import init_storage_client from Tests.scripts.utils.log_util import install_logging from", "'--public_bucket_name', help=\"CI public bucket name\", required=True) parser.add_argument('-pb', '--private_bucket_name', help=\"CI private", "as index_zip: index_zip.extractall(extracted_dummy_index_path) with open(dummy_index_json_path) as index_file: index_json = json.load(index_file)", "upload_modified_index(public_index_folder_path, extract_public_index_path, dummy_index_blob, build_number, private_packs) if __name__ == '__main__': main()", "go to: \" \"https://googleapis.dev/python/google-api-core/latest/auth.html\"), required=False) parser.add_argument('-n', '--ci_build_number', help=\"CircleCi build number", "'lock.txt' @contextmanager def lock_and_unlock_dummy_index(public_storage_bucket, dummy_index_lock_path): try: acquire_dummy_index_lock(public_storage_bucket, dummy_index_lock_path) yield except", "public_ci_dummy_index_blob (Blob): google cloud storage object that represents the dummy", "Args: public_index_folder_path (str): public index folder full path. extract_destination_path (str):", ">= MAX_SECONDS_TO_WAIT_FOR_LOCK: logging.critical(\"Error: Failed too long to acquire lock, exceeded", "import sys import shutil import json import argparse from zipfile", "dummy_index_lock_path) def change_pack_price_to_zero(path_to_pack_metadata): with open(path_to_pack_metadata, 'r') as pack_metadata_file: pack_metadata =", "long to acquire lock, exceeded max wait time.\") sys.exit(1) if", "dummy_index_lock_path): dummy_index_lock_blob = public_storage_bucket.blob(dummy_index_lock_path) return dummy_index_lock_blob.exists() def lock_dummy_index(public_storage_bucket, dummy_index_lock_path): dummy_index_lock_blob", "dummy_index_lock_path) yield except Exception: logging.exception(\"Error in dummy index lock context", "name\", required=True) parser.add_argument('-s', '--service_account', help=(\"Path to gcloud service account, is", "for path_to_pack in paths_to_packs_in_merged_index: path_to_pack_metadata = os.path.join(path_to_pack, 'metadata.json') change_pack_price_to_zero(path_to_pack_metadata) def", "CI bucket\", required=True) # disable-secrets-detection-end return parser.parse_args() def is_dummy_index_locked(public_storage_bucket, dummy_index_lock_path):", "service_account = upload_config.service_account build_number = upload_config.ci_build_number public_bucket_name = upload_config.public_bucket_name private_bucket_name", "'--pack_name', help=\"Modified pack to upload to gcs.\") parser.add_argument('-a', '--artifacts_path', help=\"The", "storage_base_path) private_packs = add_private_packs_from_dummy_index(private_packs, dummy_index_blob) upload_modified_index(public_index_folder_path, extract_public_index_path, dummy_index_blob, build_number, private_packs)", "\"For local development use your personal account and \" \"authenticate", "in paths_to_packs_in_merged_index: path_to_pack_metadata = os.path.join(path_to_pack, 'metadata.json') change_pack_price_to_zero(path_to_pack_metadata) def merge_private_index_into_public_index(public_index_folder_path, private_index_folder_path):", "contextmanager from datetime import datetime from Tests.private_build.upload_packs_private import download_and_extract_index, update_index_with_priced_packs,", "# Printing a message every minute to keep the machine", "build number (will be used as hash revision at index", "{ 'revision': build_number, 'modified': datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'), 'packs': private_packs } json.dump(index, index_file,", "import shutil import json import argparse from zipfile import ZipFile", "Failed too long to acquire lock, exceeded max wait time.\")", "= download_and_extract_index(public_storage_bucket, extract_public_index_path, storage_base_path) # In order for the packs", "from dying due to no output logging.info(\"Waiting to acquire lock.\")", "\"\"\"Upload updated index zip to cloud storage. Args: public_index_folder_path (str):", "def lock_and_unlock_dummy_index(public_storage_bucket, dummy_index_lock_path): try: acquire_dummy_index_lock(public_storage_bucket, dummy_index_lock_path) yield except Exception: logging.exception(\"Error", "successfully, their price has to be 0 change_packs_price_to_zero(public_index_folder_path) private_packs, private_index_path,", "= storage_client.bucket(private_bucket_name) dummy_index_blob = public_storage_bucket.blob(dummy_index_path) with lock_and_unlock_dummy_index(public_storage_bucket, dummy_index_lock_path): extract_packs_artifacts(packs_artifacts_path, extract_destination_path)", "= 0 index = { 'revision': build_number, 'modified': datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'), 'packs':", "for pack_name in packs_in_private_index: path_to_pack_in_private_index = os.path.join(private_index_folder_path, pack_name) path_to_pack_in_public_index =", "development use your personal account and \" \"authenticate using Google", "[pack_dir.path for pack_dir in os.scandir(public_index_folder_path) if pack_dir.is_dir()] for path_to_pack in", "public_index_folder_path, changed_pack, True, storage_base_path) private_packs = add_private_packs_from_dummy_index(private_packs, dummy_index_blob) upload_modified_index(public_index_folder_path, extract_public_index_path,", "= os.path.join(path_to_pack, 'metadata.json') change_pack_price_to_zero(path_to_pack_metadata) def merge_private_index_into_public_index(public_index_folder_path, private_index_folder_path): packs_in_private_index = [pack_dir.name", "os.path.exists(downloaded_dummy_index_path): with ZipFile(downloaded_dummy_index_path, 'r') as index_zip: index_zip.extractall(extracted_dummy_index_path) with open(dummy_index_json_path) as", "that represents the dummy index.zip blob. build_number (str): circleCI build", "upload_config.private_bucket_name storage_base_path = upload_config.storage_base_path extract_public_index_path = upload_config.extract_public_index_path changed_pack = upload_config.pack_name", "os.path.join(private_index_folder_path, pack_name) path_to_pack_in_public_index = os.path.join(public_index_folder_path, pack_name) shutil.copy(path_to_pack_in_private_index, path_to_pack_in_public_index) def upload_modified_index(public_index_folder_path,", "os.path.basename(public_index_folder_path) index_zip_path = shutil.make_archive(base_name=public_index_folder_path, format=\"zip\", root_dir=extract_destination_path, base_dir=index_zip_name) try: public_ci_dummy_index_blob.reload() public_ci_dummy_index_blob.cache_control", "of the directory to upload to.\", required=False) parser.add_argument('-p', '--pack_name', help=\"Modified", "'w') as lock_file: lock_file.write('locked') with open(LOCK_FILE_PATH, 'rb') as lock_file: dummy_index_lock_blob.upload_from_file(lock_file)", "in private_packs: private_pack['price'] = 0 index = { 'revision': build_number,", "output logging.info(\"Waiting to acquire lock.\") total_seconds_waited += 10 time.sleep(10) lock_dummy_index(public_storage_bucket,", "pack_metadata_file, indent=4) def change_packs_price_to_zero(public_index_folder_path): paths_to_packs_in_merged_index = [pack_dir.path for pack_dir in", "argparse from zipfile import ZipFile from contextlib import contextmanager from", "with open(path_to_pack_metadata, 'r') as pack_metadata_file: pack_metadata = json.load(pack_metadata_file) pack_metadata['price'] =", "public_storage_bucket.blob(dummy_index_lock_path) dummy_index_lock_blob.delete() os.remove(LOCK_FILE_PATH) def add_private_packs_from_dummy_index(private_packs, dummy_index_blob): downloaded_dummy_index_path = 'current_dummy_index.zip' extracted_dummy_index_path", "disable-secrets-detection-start parser.add_argument('-b', '--public_bucket_name', help=\"CI public bucket name\", required=True) parser.add_argument('-pb', '--private_bucket_name',", "required=False) parser.add_argument('-p', '--pack_name', help=\"Modified pack to upload to gcs.\") parser.add_argument('-a',", "Tests.scripts.utils.log_util import install_logging from Tests.scripts.utils import logging_wrapper as logging MAX_SECONDS_TO_WAIT_FOR_LOCK", "'metadata.json') change_pack_price_to_zero(path_to_pack_metadata) def merge_private_index_into_public_index(public_index_folder_path, private_index_folder_path): packs_in_private_index = [pack_dir.name for pack_dir", "object that represents the dummy index.zip blob. build_number (str): circleCI", "index_file: for private_pack in private_packs: private_pack['price'] = 0 index =", "too long to acquire lock, exceeded max wait time.\") sys.exit(1)", "as pack_metadata_file: pack_metadata = json.load(pack_metadata_file) pack_metadata['price'] = 0 with open(path_to_pack_metadata,", "while is_dummy_index_locked(public_storage_bucket, dummy_index_lock_path): if total_seconds_waited >= MAX_SECONDS_TO_WAIT_FOR_LOCK: logging.critical(\"Error: Failed too", "\"\"\" parser = argparse.ArgumentParser(description=\"Store packs in cloud storage.\") # disable-secrets-detection-start", "private_index_blob = update_index_with_priced_packs(private_storage_bucket, extract_destination_path, public_index_folder_path, changed_pack, True, storage_base_path) private_packs =", "(Blob): google cloud storage object that represents the dummy index.zip", "public_ci_dummy_index_blob.reload() public_ci_dummy_index_blob.cache_control = \"no-cache,max-age=0\" # disabling caching for index blob", "bucket name\", required=True) parser.add_argument('-pb', '--private_bucket_name', help=\"CI private bucket name\", required=True)", "except Exception: logging.exception(\"Error in dummy index lock context manager.\") finally:", "} json.dump(index, index_file, indent=4) index_zip_name = os.path.basename(public_index_folder_path) index_zip_path = shutil.make_archive(base_name=public_index_folder_path,", "open(LOCK_FILE_PATH, 'rb') as lock_file: dummy_index_lock_blob.upload_from_file(lock_file) def acquire_dummy_index_lock(public_storage_bucket, dummy_index_lock_path): total_seconds_waited =", "upload_config.ci_build_number public_bucket_name = upload_config.public_bucket_name private_bucket_name = upload_config.private_bucket_name storage_base_path = upload_config.storage_base_path", "\" \"https://googleapis.dev/python/google-api-core/latest/auth.html\"), required=False) parser.add_argument('-n', '--ci_build_number', help=\"CircleCi build number (will be", "the dummy index in the private CI bucket\", required=True) #", "= add_private_packs_from_dummy_index(private_packs, dummy_index_blob) upload_modified_index(public_index_folder_path, extract_public_index_path, dummy_index_blob, build_number, private_packs) if __name__", "upload_config.pack_name extract_destination_path = upload_config.extract_artifacts_path packs_artifacts_path = upload_config.artifacts_path dummy_index_dir_path = upload_config.dummy_index_dir_path", "index in the private CI bucket\", required=True) # disable-secrets-detection-end return", "extract_public_index_path = upload_config.extract_public_index_path changed_pack = upload_config.pack_name extract_destination_path = upload_config.extract_artifacts_path packs_artifacts_path", "the dummy index.zip blob. build_number (str): circleCI build number, used", "lock context manager.\") finally: release_dummy_index_lock(public_storage_bucket, dummy_index_lock_path) def change_pack_price_to_zero(path_to_pack_metadata): with open(path_to_pack_metadata,", "be downloaded successfully, their price has to be 0 change_packs_price_to_zero(public_index_folder_path)", "dummy index in the private CI bucket\", required=True) # disable-secrets-detection-end", "install_logging from Tests.scripts.utils import logging_wrapper as logging MAX_SECONDS_TO_WAIT_FOR_LOCK = 600", "extract_public_index_path, storage_base_path) # In order for the packs to be", "and leave this parameter blank. \" \"For more information go", "True, storage_base_path) private_packs = add_private_packs_from_dummy_index(private_packs, dummy_index_blob) upload_modified_index(public_index_folder_path, extract_public_index_path, dummy_index_blob, build_number,", "Exception: logging.exception(\"Error in dummy index lock context manager.\") finally: release_dummy_index_lock(public_storage_bucket,", "import contextmanager from datetime import datetime from Tests.private_build.upload_packs_private import download_and_extract_index,", "to upload to gcs.\") parser.add_argument('-a', '--artifacts_path', help=\"The full path of", "open(os.path.join(public_index_folder_path, \"index.json\"), \"w+\") as index_file: for private_pack in private_packs: private_pack['price']", "time.\") sys.exit(1) if total_seconds_waited % 60 == 0: # Printing", "from Tests.Marketplace.marketplace_services import init_storage_client from Tests.scripts.utils.log_util import install_logging from Tests.scripts.utils", "with open(dummy_index_json_path) as index_file: index_json = json.load(index_file) packs_from_dummy_index = index_json.get('packs',", "used as hash revision at index file)\", required=True) parser.add_argument('-e', '--extract_public_index_path',", "= upload_config.extract_public_index_path changed_pack = upload_config.pack_name extract_destination_path = upload_config.extract_artifacts_path packs_artifacts_path =", "help=\"CircleCi build number (will be used as hash revision at", "caching for index blob public_ci_dummy_index_blob.upload_from_filename(index_zip_path) logging.success(\"Finished uploading index.zip to storage.\")", "running: \" \"`gcloud auth application-default login` and leave this parameter", "index_zip: index_zip.extractall(extracted_dummy_index_path) with open(dummy_index_json_path) as index_file: index_json = json.load(index_file) packs_from_dummy_index", "price has to be 0 change_packs_price_to_zero(public_index_folder_path) private_packs, private_index_path, private_index_blob =", "private_packs, private_index_path, private_index_blob = update_index_with_priced_packs(private_storage_bucket, extract_destination_path, public_index_folder_path, changed_pack, True, storage_base_path)", "Printing a message every minute to keep the machine from", "as lock_file: lock_file.write('locked') with open(LOCK_FILE_PATH, 'rb') as lock_file: dummy_index_lock_blob.upload_from_file(lock_file) def", "help=\"The full path of packs artifacts\", required=True) parser.add_argument('-ea', '--extract_artifacts_path', help=\"Full", "600 LOCK_FILE_PATH = 'lock.txt' @contextmanager def lock_and_unlock_dummy_index(public_storage_bucket, dummy_index_lock_path): try: acquire_dummy_index_lock(public_storage_bucket,", "(str): circleCI build number, used as an index revision. private_packs", "shutil import json import argparse from zipfile import ZipFile from", "index lock context manager.\") finally: release_dummy_index_lock(public_storage_bucket, dummy_index_lock_path) def change_pack_price_to_zero(path_to_pack_metadata): with", "in dummy index lock context manager.\") finally: release_dummy_index_lock(public_storage_bucket, dummy_index_lock_path) def", "private_packs (list): List of private packs and their price. \"\"\"", "merge_private_index_into_public_index(public_index_folder_path, private_index_folder_path): packs_in_private_index = [pack_dir.name for pack_dir in os.scandir(private_index_folder_path) if", "is for circleCI usage. \" \"For local development use your", "= upload_config.public_bucket_name private_bucket_name = upload_config.private_bucket_name storage_base_path = upload_config.storage_base_path extract_public_index_path =", "dummy_index_blob.download_to_filename(downloaded_dummy_index_path) os.mkdir(extracted_dummy_index_path) if os.path.exists(downloaded_dummy_index_path): with ZipFile(downloaded_dummy_index_path, 'r') as index_zip: index_zip.extractall(extracted_dummy_index_path)", "zip to cloud storage. Args: public_index_folder_path (str): public index folder", "os.remove(LOCK_FILE_PATH) def add_private_packs_from_dummy_index(private_packs, dummy_index_blob): downloaded_dummy_index_path = 'current_dummy_index.zip' extracted_dummy_index_path = 'dummy_index'", "pack_name) shutil.copy(path_to_pack_in_private_index, path_to_pack_in_public_index) def upload_modified_index(public_index_folder_path, extract_destination_path, public_ci_dummy_index_blob, build_number, private_packs): \"\"\"Upload", "change_pack_price_to_zero(path_to_pack_metadata): with open(path_to_pack_metadata, 'r') as pack_metadata_file: pack_metadata = json.load(pack_metadata_file) pack_metadata['price']", "= 0 with open(path_to_pack_metadata, 'w') as pack_metadata_file: json.dump(pack_metadata, pack_metadata_file, indent=4)", "time.sleep(10) lock_dummy_index(public_storage_bucket, dummy_index_lock_path) def release_dummy_index_lock(public_storage_bucket, dummy_index_lock_path): dummy_index_lock_blob = public_storage_bucket.blob(dummy_index_lock_path) dummy_index_lock_blob.delete()", "path of the directory to upload to.\", required=False) parser.add_argument('-p', '--pack_name',", "in cloud storage.\") # disable-secrets-detection-start parser.add_argument('-b', '--public_bucket_name', help=\"CI public bucket", "due to no output logging.info(\"Waiting to acquire lock.\") total_seconds_waited +=", "\"\"\"Validates and parses script arguments. Returns: Namespace: Parsed arguments object.", "if total_seconds_waited >= MAX_SECONDS_TO_WAIT_FOR_LOCK: logging.critical(\"Error: Failed too long to acquire", "path_to_pack in paths_to_packs_in_merged_index: path_to_pack_metadata = os.path.join(path_to_pack, 'metadata.json') change_pack_price_to_zero(path_to_pack_metadata) def merge_private_index_into_public_index(public_index_folder_path,", "[pack_dir.name for pack_dir in os.scandir(private_index_folder_path) if pack_dir.is_dir()] for pack_name in", "login` and leave this parameter blank. \" \"For more information", "to: \" \"https://googleapis.dev/python/google-api-core/latest/auth.html\"), required=False) parser.add_argument('-n', '--ci_build_number', help=\"CircleCi build number (will", "storage.\") except Exception: logging.exception(\"Failed in uploading index. Mismatch in index", "with open(LOCK_FILE_PATH, 'rb') as lock_file: dummy_index_lock_blob.upload_from_file(lock_file) def acquire_dummy_index_lock(public_storage_bucket, dummy_index_lock_path): total_seconds_waited", "try: acquire_dummy_index_lock(public_storage_bucket, dummy_index_lock_path) yield except Exception: logging.exception(\"Error in dummy index", "circleCI build number, used as an index revision. private_packs (list):", "price. \"\"\" with open(os.path.join(public_index_folder_path, \"index.json\"), \"w+\") as index_file: for private_pack", "in index file generation.\") sys.exit(1) finally: shutil.rmtree(public_index_folder_path) def option_handler(): \"\"\"Validates", "extract_destination_path = upload_config.extract_artifacts_path packs_artifacts_path = upload_config.artifacts_path dummy_index_dir_path = upload_config.dummy_index_dir_path dummy_index_path", "\" \"For more information go to: \" \"https://googleapis.dev/python/google-api-core/latest/auth.html\"), required=False) parser.add_argument('-n',", "is_dummy_index_locked(public_storage_bucket, dummy_index_lock_path): dummy_index_lock_blob = public_storage_bucket.blob(dummy_index_lock_path) return dummy_index_lock_blob.exists() def lock_dummy_index(public_storage_bucket, dummy_index_lock_path):", "help=\"Storage base path of the directory to upload to.\", required=False)", "MAX_SECONDS_TO_WAIT_FOR_LOCK: logging.critical(\"Error: Failed too long to acquire lock, exceeded max", "updated index zip to cloud storage. Args: public_index_folder_path (str): public", "build_number, 'modified': datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ'), 'packs': private_packs } json.dump(index, index_file, indent=4) index_zip_name", "cloud storage. Args: public_index_folder_path (str): public index folder full path.", "required=True) # disable-secrets-detection-end return parser.parse_args() def is_dummy_index_locked(public_storage_bucket, dummy_index_lock_path): dummy_index_lock_blob =", "for index blob public_ci_dummy_index_blob.upload_from_filename(index_zip_path) logging.success(\"Finished uploading index.zip to storage.\") except", "dummy_index_lock_path): if total_seconds_waited >= MAX_SECONDS_TO_WAIT_FOR_LOCK: logging.critical(\"Error: Failed too long to", "= \"no-cache,max-age=0\" # disabling caching for index blob public_ci_dummy_index_blob.upload_from_filename(index_zip_path) logging.success(\"Finished", "extracted_dummy_index_path = 'dummy_index' dummy_index_json_path = os.path.join(extracted_dummy_index_path, 'index', 'index.json') dummy_index_blob.download_to_filename(downloaded_dummy_index_path) os.mkdir(extracted_dummy_index_path)", "downloaded successfully, their price has to be 0 change_packs_price_to_zero(public_index_folder_path) private_packs,", "used as an index revision. private_packs (list): List of private", "dummy_index_blob = public_storage_bucket.blob(dummy_index_path) with lock_and_unlock_dummy_index(public_storage_bucket, dummy_index_lock_path): extract_packs_artifacts(packs_artifacts_path, extract_destination_path) public_index_folder_path, public_index_blob,", "extract folder full path. public_ci_dummy_index_blob (Blob): google cloud storage object", "their price has to be 0 change_packs_price_to_zero(public_index_folder_path) private_packs, private_index_path, private_index_blob", "extract wanted packs\", required=True) parser.add_argument('-di', '--dummy_index_dir_path', help=\"Full path to the", "account, is for circleCI usage. \" \"For local development use", "build_number = upload_config.ci_build_number public_bucket_name = upload_config.public_bucket_name private_bucket_name = upload_config.private_bucket_name storage_base_path", "pack_metadata['price'] = 0 with open(path_to_pack_metadata, 'w') as pack_metadata_file: json.dump(pack_metadata, pack_metadata_file,", "parser.add_argument('-pb', '--private_bucket_name', help=\"CI private bucket name\", required=True) parser.add_argument('-s', '--service_account', help=(\"Path", "path to the dummy index in the private CI bucket\",", "install_logging('prepare_public_index_for_private_testing.log', logger=logging) upload_config = option_handler() service_account = upload_config.service_account build_number =", "upload_modified_index(public_index_folder_path, extract_destination_path, public_ci_dummy_index_blob, build_number, private_packs): \"\"\"Upload updated index zip to", "required=True) parser.add_argument('-di', '--dummy_index_dir_path', help=\"Full path to the dummy index in", "= public_storage_bucket.blob(dummy_index_lock_path) return dummy_index_lock_blob.exists() def lock_dummy_index(public_storage_bucket, dummy_index_lock_path): dummy_index_lock_blob = public_storage_bucket.blob(dummy_index_lock_path)", "= 600 LOCK_FILE_PATH = 'lock.txt' @contextmanager def lock_and_unlock_dummy_index(public_storage_bucket, dummy_index_lock_path): try:", "'index', 'index.json') dummy_index_blob.download_to_filename(downloaded_dummy_index_path) os.mkdir(extracted_dummy_index_path) if os.path.exists(downloaded_dummy_index_path): with ZipFile(downloaded_dummy_index_path, 'r') as", "os.remove(downloaded_dummy_index_path) shutil.rmtree(extracted_dummy_index_path) return packs_from_dummy_index def main(): install_logging('prepare_public_index_for_private_testing.log', logger=logging) upload_config =", "private_packs: private_pack['price'] = 0 index = { 'revision': build_number, 'modified':", "changed_pack = upload_config.pack_name extract_destination_path = upload_config.extract_artifacts_path packs_artifacts_path = upload_config.artifacts_path dummy_index_dir_path", "\"https://googleapis.dev/python/google-api-core/latest/auth.html\"), required=False) parser.add_argument('-n', '--ci_build_number', help=\"CircleCi build number (will be used", "extract_packs_artifacts(packs_artifacts_path, extract_destination_path) public_index_folder_path, public_index_blob, _ = download_and_extract_index(public_storage_bucket, extract_public_index_path, storage_base_path) #", "help=(\"Path to gcloud service account, is for circleCI usage. \"", "= storage_client.bucket(public_bucket_name) private_storage_bucket = storage_client.bucket(private_bucket_name) dummy_index_blob = public_storage_bucket.blob(dummy_index_path) with lock_and_unlock_dummy_index(public_storage_bucket,", "pack_dir.is_dir()] for path_to_pack in paths_to_packs_in_merged_index: path_to_pack_metadata = os.path.join(path_to_pack, 'metadata.json') change_pack_price_to_zero(path_to_pack_metadata)", "in the private CI bucket\", required=True) # disable-secrets-detection-end return parser.parse_args()", "in uploading index. Mismatch in index file generation.\") sys.exit(1) finally:", "sys import shutil import json import argparse from zipfile import", "path. extract_destination_path (str): extract folder full path. public_ci_dummy_index_blob (Blob): google", "to gcs.\") parser.add_argument('-a', '--artifacts_path', help=\"The full path of packs artifacts\",", "storage object that represents the dummy index.zip blob. build_number (str):", "\"\"\" with open(os.path.join(public_index_folder_path, \"index.json\"), \"w+\") as index_file: for private_pack in", "def add_private_packs_from_dummy_index(private_packs, dummy_index_blob): downloaded_dummy_index_path = 'current_dummy_index.zip' extracted_dummy_index_path = 'dummy_index' dummy_index_json_path", "except Exception: logging.exception(\"Failed in uploading index. Mismatch in index file", "pack_metadata_file: json.dump(pack_metadata, pack_metadata_file, indent=4) def change_packs_price_to_zero(public_index_folder_path): paths_to_packs_in_merged_index = [pack_dir.path for", "uploading index.zip to storage.\") except Exception: logging.exception(\"Failed in uploading index.", "In order for the packs to be downloaded successfully, their", "datetime from Tests.private_build.upload_packs_private import download_and_extract_index, update_index_with_priced_packs, \\ extract_packs_artifacts from Tests.Marketplace.marketplace_services", "the directory to upload to.\", required=False) parser.add_argument('-p', '--pack_name', help=\"Modified pack", "lock_and_unlock_dummy_index(public_storage_bucket, dummy_index_lock_path): try: acquire_dummy_index_lock(public_storage_bucket, dummy_index_lock_path) yield except Exception: logging.exception(\"Error in", "ZipFile(downloaded_dummy_index_path, 'r') as index_zip: index_zip.extractall(extracted_dummy_index_path) with open(dummy_index_json_path) as index_file: index_json", "option_handler() service_account = upload_config.service_account build_number = upload_config.ci_build_number public_bucket_name = upload_config.public_bucket_name", "private_packs = add_private_packs_from_dummy_index(private_packs, dummy_index_blob) upload_modified_index(public_index_folder_path, extract_public_index_path, dummy_index_blob, build_number, private_packs) if", "if os.path.exists(downloaded_dummy_index_path): with ZipFile(downloaded_dummy_index_path, 'r') as index_zip: index_zip.extractall(extracted_dummy_index_path) with open(dummy_index_json_path)", "storage.\") # disable-secrets-detection-start parser.add_argument('-b', '--public_bucket_name', help=\"CI public bucket name\", required=True)", "leave this parameter blank. \" \"For more information go to:", "service account, is for circleCI usage. \" \"For local development" ]
[]
[ "type='Collect', keys=['imgs', 'flow_gt'], meta_keys=[ 'img_fields', 'ann_fields', 'filename1', 'filename2', 'ori_filename1', 'ori_filename2',", "'flow_gt', 'filename1', 'filename2', 'ori_filename1', 'ori_filename2', 'ori_shape', 'img_shape', 'img_norm_cfg', 'scale_factor', 'pad_shape'", "'img_fields', 'ann_fields', 'filename1', 'filename2', 'ori_filename1', 'ori_filename2', 'filename_flow', 'ori_filename_flow', 'ori_shape', 'img_shape',", "dict( type='Collect', keys=['imgs', 'flow_gt'], meta_keys=[ 'img_fields', 'ann_fields', 'filename1', 'filename2', 'ori_filename1',", "1.5), shear=(0.86, 1.16), rotate=(-10., 10.)) relative_transform = dict( translates=(0.00375, 0.00375),", "train=flyingchairs_train, val=dict( type=dataset_type, pipeline=test_pipeline, data_root=data_root, test_mode=True, split_file='data/FlyingChairs_release/FlyingChairs_train_val.txt'), test=dict( type=dataset_type, pipeline=test_pipeline,", "prob=0.5, direction='horizontal'), dict(type='RandomFlip', prob=0.5, direction='vertical'), dict( type='RandomAffine', global_transform=global_transform, relative_transform=relative_transform), dict(type='RandomCrop',", "0.00375), zoom=(0.985, 1.015), shear=(1.0, 1.0), rotate=(-1.0, 1.0)) train_pipeline = [", "test_dataloader=dict(samples_per_gpu=1, workers_per_gpu=2, shuffle=False), train=flyingchairs_train, val=dict( type=dataset_type, pipeline=test_pipeline, data_root=data_root, test_mode=True, split_file='data/FlyingChairs_release/FlyingChairs_train_val.txt'),", "[ dict(type='LoadImageFromFile'), dict(type='LoadAnnotations'), dict( type='ColorJitter', brightness=0.5, contrast=0.5, saturation=0.5, hue=0.5), dict(type='RandomGamma',", "dict(type='LoadAnnotations'), dict(type='InputResize', exponent=6), dict(type='Normalize', **img_norm_cfg), dict(type='TestFormatBundle'), dict( type='Collect', keys=['imgs'], meta_keys=[", "hue=0.5), dict(type='RandomGamma', gamma_range=(0.7, 1.5)), dict(type='Normalize', **img_norm_cfg), dict(type='GaussianNoise', sigma_range=(0, 0.04), clamp_range=(0.,", "test_pipeline = [ dict(type='LoadImageFromFile'), dict(type='LoadAnnotations'), dict(type='InputResize', exponent=6), dict(type='Normalize', **img_norm_cfg), dict(type='TestFormatBundle'),", "drop_last=True, persistent_workers=True), val_dataloader=dict(samples_per_gpu=1, workers_per_gpu=2, shuffle=False), test_dataloader=dict(samples_per_gpu=1, workers_per_gpu=2, shuffle=False), train=flyingchairs_train, val=dict(", "'ori_filename_flow', 'ori_shape', 'img_shape', 'img_norm_cfg' ]), ] test_pipeline = [ dict(type='LoadImageFromFile'),", "255., 255.], to_rgb=False) global_transform = dict( translates=(0.05, 0.05), zoom=(1.0, 1.5),", "meta_keys=[ 'img_fields', 'ann_fields', 'filename1', 'filename2', 'ori_filename1', 'ori_filename2', 'filename_flow', 'ori_filename_flow', 'ori_shape',", "clamp_range=(0., 1.)), dict(type='RandomFlip', prob=0.5, direction='horizontal'), dict(type='RandomFlip', prob=0.5, direction='vertical'), dict( type='RandomAffine',", "keys=['imgs'], meta_keys=[ 'flow_gt', 'filename1', 'filename2', 'ori_filename1', 'ori_filename2', 'ori_shape', 'img_shape', 'img_norm_cfg',", "= dict( type=dataset_type, pipeline=train_pipeline, data_root=data_root, split_file='data/FlyingChairs_release/FlyingChairs_train_val.txt') data = dict( train_dataloader=dict(", "rotate=(-10., 10.)) relative_transform = dict( translates=(0.00375, 0.00375), zoom=(0.985, 1.015), shear=(1.0,", "1.0)) train_pipeline = [ dict(type='LoadImageFromFile'), dict(type='LoadAnnotations'), dict( type='ColorJitter', brightness=0.5, contrast=0.5,", "flyingchairs_train = dict( type=dataset_type, pipeline=train_pipeline, data_root=data_root, split_file='data/FlyingChairs_release/FlyingChairs_train_val.txt') data = dict(", "dict( translates=(0.05, 0.05), zoom=(1.0, 1.5), shear=(0.86, 1.16), rotate=(-10., 10.)) relative_transform", "translates=(0.00375, 0.00375), zoom=(0.985, 1.015), shear=(1.0, 1.0), rotate=(-1.0, 1.0)) train_pipeline =", "= dict( translates=(0.05, 0.05), zoom=(1.0, 1.5), shear=(0.86, 1.16), rotate=(-10., 10.))", "0.05), zoom=(1.0, 1.5), shear=(0.86, 1.16), rotate=(-10., 10.)) relative_transform = dict(", "'filename2', 'ori_filename1', 'ori_filename2', 'ori_shape', 'img_shape', 'img_norm_cfg', 'scale_factor', 'pad_shape' ]) ]", "translates=(0.05, 0.05), zoom=(1.0, 1.5), shear=(0.86, 1.16), rotate=(-10., 10.)) relative_transform =", "type='RandomAffine', global_transform=global_transform, relative_transform=relative_transform), dict(type='RandomCrop', crop_size=(320, 448)), dict(type='DefaultFormatBundle'), dict( type='Collect', keys=['imgs',", "dict(type='TestFormatBundle'), dict( type='Collect', keys=['imgs'], meta_keys=[ 'flow_gt', 'filename1', 'filename2', 'ori_filename1', 'ori_filename2',", "workers_per_gpu=2, shuffle=False), test_dataloader=dict(samples_per_gpu=1, workers_per_gpu=2, shuffle=False), train=flyingchairs_train, val=dict( type=dataset_type, pipeline=test_pipeline, data_root=data_root,", "split_file='data/FlyingChairs_release/FlyingChairs_train_val.txt') data = dict( train_dataloader=dict( samples_per_gpu=1, workers_per_gpu=2, drop_last=True, persistent_workers=True), val_dataloader=dict(samples_per_gpu=1,", "dataset_type = 'FlyingChairs' data_root = 'data/FlyingChairs_release' img_norm_cfg = dict(mean=[0., 0.,", "'img_shape', 'img_norm_cfg', 'scale_factor', 'pad_shape' ]) ] flyingchairs_train = dict( type=dataset_type,", "'ann_fields', 'filename1', 'filename2', 'ori_filename1', 'ori_filename2', 'filename_flow', 'ori_filename_flow', 'ori_shape', 'img_shape', 'img_norm_cfg'", "dict(type='LoadImageFromFile'), dict(type='LoadAnnotations'), dict(type='InputResize', exponent=6), dict(type='Normalize', **img_norm_cfg), dict(type='TestFormatBundle'), dict( type='Collect', keys=['imgs'],", "dict( type='RandomAffine', global_transform=global_transform, relative_transform=relative_transform), dict(type='RandomCrop', crop_size=(320, 448)), dict(type='DefaultFormatBundle'), dict( type='Collect',", "dict(mean=[0., 0., 0.], std=[255., 255., 255.], to_rgb=False) global_transform = dict(", "'filename1', 'filename2', 'ori_filename1', 'ori_filename2', 'ori_shape', 'img_shape', 'img_norm_cfg', 'scale_factor', 'pad_shape' ])", "type=dataset_type, pipeline=train_pipeline, data_root=data_root, split_file='data/FlyingChairs_release/FlyingChairs_train_val.txt') data = dict( train_dataloader=dict( samples_per_gpu=1, workers_per_gpu=2,", "shear=(0.86, 1.16), rotate=(-10., 10.)) relative_transform = dict( translates=(0.00375, 0.00375), zoom=(0.985,", "dict(type='RandomGamma', gamma_range=(0.7, 1.5)), dict(type='Normalize', **img_norm_cfg), dict(type='GaussianNoise', sigma_range=(0, 0.04), clamp_range=(0., 1.)),", "0.04), clamp_range=(0., 1.)), dict(type='RandomFlip', prob=0.5, direction='horizontal'), dict(type='RandomFlip', prob=0.5, direction='vertical'), dict(", "'ori_shape', 'img_shape', 'img_norm_cfg', 'scale_factor', 'pad_shape' ]) ] flyingchairs_train = dict(", "img_norm_cfg = dict(mean=[0., 0., 0.], std=[255., 255., 255.], to_rgb=False) global_transform", "workers_per_gpu=2, drop_last=True, persistent_workers=True), val_dataloader=dict(samples_per_gpu=1, workers_per_gpu=2, shuffle=False), test_dataloader=dict(samples_per_gpu=1, workers_per_gpu=2, shuffle=False), train=flyingchairs_train,", "dict(type='Normalize', **img_norm_cfg), dict(type='GaussianNoise', sigma_range=(0, 0.04), clamp_range=(0., 1.)), dict(type='RandomFlip', prob=0.5, direction='horizontal'),", "dict(type='RandomCrop', crop_size=(320, 448)), dict(type='DefaultFormatBundle'), dict( type='Collect', keys=['imgs', 'flow_gt'], meta_keys=[ 'img_fields',", "0.], std=[255., 255., 255.], to_rgb=False) global_transform = dict( translates=(0.05, 0.05),", "dict( train_dataloader=dict( samples_per_gpu=1, workers_per_gpu=2, drop_last=True, persistent_workers=True), val_dataloader=dict(samples_per_gpu=1, workers_per_gpu=2, shuffle=False), test_dataloader=dict(samples_per_gpu=1,", "shuffle=False), test_dataloader=dict(samples_per_gpu=1, workers_per_gpu=2, shuffle=False), train=flyingchairs_train, val=dict( type=dataset_type, pipeline=test_pipeline, data_root=data_root, test_mode=True,", "val=dict( type=dataset_type, pipeline=test_pipeline, data_root=data_root, test_mode=True, split_file='data/FlyingChairs_release/FlyingChairs_train_val.txt'), test=dict( type=dataset_type, pipeline=test_pipeline, data_root=data_root,", "rotate=(-1.0, 1.0)) train_pipeline = [ dict(type='LoadImageFromFile'), dict(type='LoadAnnotations'), dict( type='ColorJitter', brightness=0.5,", "10.)) relative_transform = dict( translates=(0.00375, 0.00375), zoom=(0.985, 1.015), shear=(1.0, 1.0),", "'img_shape', 'img_norm_cfg' ]), ] test_pipeline = [ dict(type='LoadImageFromFile'), dict(type='LoadAnnotations'), dict(type='InputResize',", "dict( type=dataset_type, pipeline=train_pipeline, data_root=data_root, split_file='data/FlyingChairs_release/FlyingChairs_train_val.txt') data = dict( train_dataloader=dict( samples_per_gpu=1,", "train_pipeline = [ dict(type='LoadImageFromFile'), dict(type='LoadAnnotations'), dict( type='ColorJitter', brightness=0.5, contrast=0.5, saturation=0.5,", "zoom=(1.0, 1.5), shear=(0.86, 1.16), rotate=(-10., 10.)) relative_transform = dict( translates=(0.00375,", "1.)), dict(type='RandomFlip', prob=0.5, direction='horizontal'), dict(type='RandomFlip', prob=0.5, direction='vertical'), dict( type='RandomAffine', global_transform=global_transform,", "= [ dict(type='LoadImageFromFile'), dict(type='LoadAnnotations'), dict(type='InputResize', exponent=6), dict(type='Normalize', **img_norm_cfg), dict(type='TestFormatBundle'), dict(", "255.], to_rgb=False) global_transform = dict( translates=(0.05, 0.05), zoom=(1.0, 1.5), shear=(0.86,", "relative_transform = dict( translates=(0.00375, 0.00375), zoom=(0.985, 1.015), shear=(1.0, 1.0), rotate=(-1.0,", "] test_pipeline = [ dict(type='LoadImageFromFile'), dict(type='LoadAnnotations'), dict(type='InputResize', exponent=6), dict(type='Normalize', **img_norm_cfg),", "'filename1', 'filename2', 'ori_filename1', 'ori_filename2', 'filename_flow', 'ori_filename_flow', 'ori_shape', 'img_shape', 'img_norm_cfg' ]),", "= dict( train_dataloader=dict( samples_per_gpu=1, workers_per_gpu=2, drop_last=True, persistent_workers=True), val_dataloader=dict(samples_per_gpu=1, workers_per_gpu=2, shuffle=False),", "dict( translates=(0.00375, 0.00375), zoom=(0.985, 1.015), shear=(1.0, 1.0), rotate=(-1.0, 1.0)) train_pipeline", "1.5)), dict(type='Normalize', **img_norm_cfg), dict(type='GaussianNoise', sigma_range=(0, 0.04), clamp_range=(0., 1.)), dict(type='RandomFlip', prob=0.5,", "= dict( translates=(0.00375, 0.00375), zoom=(0.985, 1.015), shear=(1.0, 1.0), rotate=(-1.0, 1.0))", "448)), dict(type='DefaultFormatBundle'), dict( type='Collect', keys=['imgs', 'flow_gt'], meta_keys=[ 'img_fields', 'ann_fields', 'filename1',", "**img_norm_cfg), dict(type='GaussianNoise', sigma_range=(0, 0.04), clamp_range=(0., 1.)), dict(type='RandomFlip', prob=0.5, direction='horizontal'), dict(type='RandomFlip',", "val_dataloader=dict(samples_per_gpu=1, workers_per_gpu=2, shuffle=False), test_dataloader=dict(samples_per_gpu=1, workers_per_gpu=2, shuffle=False), train=flyingchairs_train, val=dict( type=dataset_type, pipeline=test_pipeline,", "]), ] test_pipeline = [ dict(type='LoadImageFromFile'), dict(type='LoadAnnotations'), dict(type='InputResize', exponent=6), dict(type='Normalize',", "data = dict( train_dataloader=dict( samples_per_gpu=1, workers_per_gpu=2, drop_last=True, persistent_workers=True), val_dataloader=dict(samples_per_gpu=1, workers_per_gpu=2,", "'data/FlyingChairs_release' img_norm_cfg = dict(mean=[0., 0., 0.], std=[255., 255., 255.], to_rgb=False)", "1.16), rotate=(-10., 10.)) relative_transform = dict( translates=(0.00375, 0.00375), zoom=(0.985, 1.015),", "] flyingchairs_train = dict( type=dataset_type, pipeline=train_pipeline, data_root=data_root, split_file='data/FlyingChairs_release/FlyingChairs_train_val.txt') data =", "'ori_filename1', 'ori_filename2', 'filename_flow', 'ori_filename_flow', 'ori_shape', 'img_shape', 'img_norm_cfg' ]), ] test_pipeline", "gamma_range=(0.7, 1.5)), dict(type='Normalize', **img_norm_cfg), dict(type='GaussianNoise', sigma_range=(0, 0.04), clamp_range=(0., 1.)), dict(type='RandomFlip',", "dict(type='GaussianNoise', sigma_range=(0, 0.04), clamp_range=(0., 1.)), dict(type='RandomFlip', prob=0.5, direction='horizontal'), dict(type='RandomFlip', prob=0.5,", "type=dataset_type, pipeline=test_pipeline, data_root=data_root, test_mode=True, split_file='data/FlyingChairs_release/FlyingChairs_train_val.txt'), test=dict( type=dataset_type, pipeline=test_pipeline, data_root=data_root, test_mode=True,", "]) ] flyingchairs_train = dict( type=dataset_type, pipeline=train_pipeline, data_root=data_root, split_file='data/FlyingChairs_release/FlyingChairs_train_val.txt') data", "saturation=0.5, hue=0.5), dict(type='RandomGamma', gamma_range=(0.7, 1.5)), dict(type='Normalize', **img_norm_cfg), dict(type='GaussianNoise', sigma_range=(0, 0.04),", "keys=['imgs', 'flow_gt'], meta_keys=[ 'img_fields', 'ann_fields', 'filename1', 'filename2', 'ori_filename1', 'ori_filename2', 'filename_flow',", "train_dataloader=dict( samples_per_gpu=1, workers_per_gpu=2, drop_last=True, persistent_workers=True), val_dataloader=dict(samples_per_gpu=1, workers_per_gpu=2, shuffle=False), test_dataloader=dict(samples_per_gpu=1, workers_per_gpu=2,", "'ori_filename1', 'ori_filename2', 'ori_shape', 'img_shape', 'img_norm_cfg', 'scale_factor', 'pad_shape' ]) ] flyingchairs_train", "pipeline=test_pipeline, data_root=data_root, test_mode=True, split_file='data/FlyingChairs_release/FlyingChairs_train_val.txt'), test=dict( type=dataset_type, pipeline=test_pipeline, data_root=data_root, test_mode=True, split_file='data/FlyingChairs_release/FlyingChairs_train_val.txt'))", "= 'data/FlyingChairs_release' img_norm_cfg = dict(mean=[0., 0., 0.], std=[255., 255., 255.],", "brightness=0.5, contrast=0.5, saturation=0.5, hue=0.5), dict(type='RandomGamma', gamma_range=(0.7, 1.5)), dict(type='Normalize', **img_norm_cfg), dict(type='GaussianNoise',", "dict(type='InputResize', exponent=6), dict(type='Normalize', **img_norm_cfg), dict(type='TestFormatBundle'), dict( type='Collect', keys=['imgs'], meta_keys=[ 'flow_gt',", "'flow_gt'], meta_keys=[ 'img_fields', 'ann_fields', 'filename1', 'filename2', 'ori_filename1', 'ori_filename2', 'filename_flow', 'ori_filename_flow',", "samples_per_gpu=1, workers_per_gpu=2, drop_last=True, persistent_workers=True), val_dataloader=dict(samples_per_gpu=1, workers_per_gpu=2, shuffle=False), test_dataloader=dict(samples_per_gpu=1, workers_per_gpu=2, shuffle=False),", "<filename>configs/_base_/datasets/flyingchairs_320x448.py<gh_stars>1-10 dataset_type = 'FlyingChairs' data_root = 'data/FlyingChairs_release' img_norm_cfg = dict(mean=[0.,", "workers_per_gpu=2, shuffle=False), train=flyingchairs_train, val=dict( type=dataset_type, pipeline=test_pipeline, data_root=data_root, test_mode=True, split_file='data/FlyingChairs_release/FlyingChairs_train_val.txt'), test=dict(", "prob=0.5, direction='vertical'), dict( type='RandomAffine', global_transform=global_transform, relative_transform=relative_transform), dict(type='RandomCrop', crop_size=(320, 448)), dict(type='DefaultFormatBundle'),", "shear=(1.0, 1.0), rotate=(-1.0, 1.0)) train_pipeline = [ dict(type='LoadImageFromFile'), dict(type='LoadAnnotations'), dict(", "dict(type='LoadImageFromFile'), dict(type='LoadAnnotations'), dict( type='ColorJitter', brightness=0.5, contrast=0.5, saturation=0.5, hue=0.5), dict(type='RandomGamma', gamma_range=(0.7,", "pipeline=train_pipeline, data_root=data_root, split_file='data/FlyingChairs_release/FlyingChairs_train_val.txt') data = dict( train_dataloader=dict( samples_per_gpu=1, workers_per_gpu=2, drop_last=True,", "0., 0.], std=[255., 255., 255.], to_rgb=False) global_transform = dict( translates=(0.05,", "persistent_workers=True), val_dataloader=dict(samples_per_gpu=1, workers_per_gpu=2, shuffle=False), test_dataloader=dict(samples_per_gpu=1, workers_per_gpu=2, shuffle=False), train=flyingchairs_train, val=dict( type=dataset_type,", "dict(type='DefaultFormatBundle'), dict( type='Collect', keys=['imgs', 'flow_gt'], meta_keys=[ 'img_fields', 'ann_fields', 'filename1', 'filename2',", "= [ dict(type='LoadImageFromFile'), dict(type='LoadAnnotations'), dict( type='ColorJitter', brightness=0.5, contrast=0.5, saturation=0.5, hue=0.5),", "zoom=(0.985, 1.015), shear=(1.0, 1.0), rotate=(-1.0, 1.0)) train_pipeline = [ dict(type='LoadImageFromFile'),", "meta_keys=[ 'flow_gt', 'filename1', 'filename2', 'ori_filename1', 'ori_filename2', 'ori_shape', 'img_shape', 'img_norm_cfg', 'scale_factor',", "'ori_filename2', 'ori_shape', 'img_shape', 'img_norm_cfg', 'scale_factor', 'pad_shape' ]) ] flyingchairs_train =", "dict( type='Collect', keys=['imgs'], meta_keys=[ 'flow_gt', 'filename1', 'filename2', 'ori_filename1', 'ori_filename2', 'ori_shape',", "to_rgb=False) global_transform = dict( translates=(0.05, 0.05), zoom=(1.0, 1.5), shear=(0.86, 1.16),", "**img_norm_cfg), dict(type='TestFormatBundle'), dict( type='Collect', keys=['imgs'], meta_keys=[ 'flow_gt', 'filename1', 'filename2', 'ori_filename1',", "dict( type='ColorJitter', brightness=0.5, contrast=0.5, saturation=0.5, hue=0.5), dict(type='RandomGamma', gamma_range=(0.7, 1.5)), dict(type='Normalize',", "1.015), shear=(1.0, 1.0), rotate=(-1.0, 1.0)) train_pipeline = [ dict(type='LoadImageFromFile'), dict(type='LoadAnnotations'),", "direction='horizontal'), dict(type='RandomFlip', prob=0.5, direction='vertical'), dict( type='RandomAffine', global_transform=global_transform, relative_transform=relative_transform), dict(type='RandomCrop', crop_size=(320,", "= dict(mean=[0., 0., 0.], std=[255., 255., 255.], to_rgb=False) global_transform =", "global_transform=global_transform, relative_transform=relative_transform), dict(type='RandomCrop', crop_size=(320, 448)), dict(type='DefaultFormatBundle'), dict( type='Collect', keys=['imgs', 'flow_gt'],", "data_root = 'data/FlyingChairs_release' img_norm_cfg = dict(mean=[0., 0., 0.], std=[255., 255.,", "type='Collect', keys=['imgs'], meta_keys=[ 'flow_gt', 'filename1', 'filename2', 'ori_filename1', 'ori_filename2', 'ori_shape', 'img_shape',", "std=[255., 255., 255.], to_rgb=False) global_transform = dict( translates=(0.05, 0.05), zoom=(1.0,", "'ori_shape', 'img_shape', 'img_norm_cfg' ]), ] test_pipeline = [ dict(type='LoadImageFromFile'), dict(type='LoadAnnotations'),", "'filename_flow', 'ori_filename_flow', 'ori_shape', 'img_shape', 'img_norm_cfg' ]), ] test_pipeline = [", "exponent=6), dict(type='Normalize', **img_norm_cfg), dict(type='TestFormatBundle'), dict( type='Collect', keys=['imgs'], meta_keys=[ 'flow_gt', 'filename1',", "'pad_shape' ]) ] flyingchairs_train = dict( type=dataset_type, pipeline=train_pipeline, data_root=data_root, split_file='data/FlyingChairs_release/FlyingChairs_train_val.txt')", "direction='vertical'), dict( type='RandomAffine', global_transform=global_transform, relative_transform=relative_transform), dict(type='RandomCrop', crop_size=(320, 448)), dict(type='DefaultFormatBundle'), dict(", "'img_norm_cfg' ]), ] test_pipeline = [ dict(type='LoadImageFromFile'), dict(type='LoadAnnotations'), dict(type='InputResize', exponent=6),", "dict(type='RandomFlip', prob=0.5, direction='vertical'), dict( type='RandomAffine', global_transform=global_transform, relative_transform=relative_transform), dict(type='RandomCrop', crop_size=(320, 448)),", "[ dict(type='LoadImageFromFile'), dict(type='LoadAnnotations'), dict(type='InputResize', exponent=6), dict(type='Normalize', **img_norm_cfg), dict(type='TestFormatBundle'), dict( type='Collect',", "dict(type='RandomFlip', prob=0.5, direction='horizontal'), dict(type='RandomFlip', prob=0.5, direction='vertical'), dict( type='RandomAffine', global_transform=global_transform, relative_transform=relative_transform),", "= 'FlyingChairs' data_root = 'data/FlyingChairs_release' img_norm_cfg = dict(mean=[0., 0., 0.],", "type='ColorJitter', brightness=0.5, contrast=0.5, saturation=0.5, hue=0.5), dict(type='RandomGamma', gamma_range=(0.7, 1.5)), dict(type='Normalize', **img_norm_cfg),", "1.0), rotate=(-1.0, 1.0)) train_pipeline = [ dict(type='LoadImageFromFile'), dict(type='LoadAnnotations'), dict( type='ColorJitter',", "dict(type='Normalize', **img_norm_cfg), dict(type='TestFormatBundle'), dict( type='Collect', keys=['imgs'], meta_keys=[ 'flow_gt', 'filename1', 'filename2',", "'img_norm_cfg', 'scale_factor', 'pad_shape' ]) ] flyingchairs_train = dict( type=dataset_type, pipeline=train_pipeline,", "relative_transform=relative_transform), dict(type='RandomCrop', crop_size=(320, 448)), dict(type='DefaultFormatBundle'), dict( type='Collect', keys=['imgs', 'flow_gt'], meta_keys=[", "crop_size=(320, 448)), dict(type='DefaultFormatBundle'), dict( type='Collect', keys=['imgs', 'flow_gt'], meta_keys=[ 'img_fields', 'ann_fields',", "global_transform = dict( translates=(0.05, 0.05), zoom=(1.0, 1.5), shear=(0.86, 1.16), rotate=(-10.,", "contrast=0.5, saturation=0.5, hue=0.5), dict(type='RandomGamma', gamma_range=(0.7, 1.5)), dict(type='Normalize', **img_norm_cfg), dict(type='GaussianNoise', sigma_range=(0,", "sigma_range=(0, 0.04), clamp_range=(0., 1.)), dict(type='RandomFlip', prob=0.5, direction='horizontal'), dict(type='RandomFlip', prob=0.5, direction='vertical'),", "'scale_factor', 'pad_shape' ]) ] flyingchairs_train = dict( type=dataset_type, pipeline=train_pipeline, data_root=data_root,", "'filename2', 'ori_filename1', 'ori_filename2', 'filename_flow', 'ori_filename_flow', 'ori_shape', 'img_shape', 'img_norm_cfg' ]), ]", "shuffle=False), train=flyingchairs_train, val=dict( type=dataset_type, pipeline=test_pipeline, data_root=data_root, test_mode=True, split_file='data/FlyingChairs_release/FlyingChairs_train_val.txt'), test=dict( type=dataset_type,", "'FlyingChairs' data_root = 'data/FlyingChairs_release' img_norm_cfg = dict(mean=[0., 0., 0.], std=[255.,", "data_root=data_root, split_file='data/FlyingChairs_release/FlyingChairs_train_val.txt') data = dict( train_dataloader=dict( samples_per_gpu=1, workers_per_gpu=2, drop_last=True, persistent_workers=True),", "dict(type='LoadAnnotations'), dict( type='ColorJitter', brightness=0.5, contrast=0.5, saturation=0.5, hue=0.5), dict(type='RandomGamma', gamma_range=(0.7, 1.5)),", "'ori_filename2', 'filename_flow', 'ori_filename_flow', 'ori_shape', 'img_shape', 'img_norm_cfg' ]), ] test_pipeline =" ]
[ "__rand__(self, lhs): return call('bit_and', lhs, self) # Represents an eltwise", "__radd__(self, lhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_ADD, lhs, self)) def __sub__(self, rhs): return", "self)) def __add__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_ADD, self, other)) def __radd__(self,", "rhs): return call('cmp_ge', self, rhs) # Represents an eltwise bit_left", "TensorDim\"\"\" __ffi_del__ = lib.plaidml_dim_expr_free __ffi_repr__ = lib.plaidml_dim_expr_repr def __init__(self, expr=None):", "Tensor(expr=ffi_call(lib.plaidml_expr_dim, x.as_ptr())) if isinstance(x, Tensor): return x raise TypeError('Unexpected type", "for a LogicalShape Returns: list (int): Integer dimensions of the", "set_param_value(self, buffer): # Changes the value of a parameter tensor", "# Represents an eltwise bit_left def __lshift__(self, rhs): return call('bit_left',", "a contraction def __imul__(self, rhs): return IndexedTensor(self._make_contraction(lib.PLAIDML_AGG_OP_PROD, rhs)) # Represents", "class ProgramArgument\"\"\" def __init__(self, arg): self.is_input = arg.is_input self.ref =", "x) def gather(x, y): return call('gather', x, y) def gradients(loss,", "x in dims] expr = ffi_call(lib.plaidml_expr_size_map, len(dims), raw_dims) super(_SizeMap, self).__init__(expr)", "__ge__(self, rhs): return call('cmp_ge', self, rhs) # Represents an eltwise", "raw_args) class TensorDim(ForeignObject): \"\"\"Docstring for class TensorDim\"\"\" __ffi_del__ = lib.plaidml_dim_expr_free", "logger.debug('Value({})'.format(value)) if isinstance(value, np.ndarray): if value.ndim == 0: value =", "lhs, self) # Enable no_reduce on a contraction def no_reduce(self):", "raw_dims) super(_SizeMap, self).__init__(expr) class _Contraction(ForeignObject): __ffi_del__ = lib.plaidml_expr_free __ffi_repr__ =", "_make_contraction(self, agg_op, rhs): # Extract combo_op and inputs if isinstance(rhs._impl,", "= { 16: DType.FLOAT16, 32: DType.FLOAT32, 64: DType.FLOAT64, } dtype", "self, other)) def __rfloordiv__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_DIV, other, self)) def", "arg): self.is_input = arg.is_input self.ref = TensorRef(Tensor(expr=ffi_call(lib.plaidml_expr_clone, arg.tensor))) self.shape =", "len(updates), src_updates, dst_updates, raw_args, ) self.args = [ProgramArgument(raw_args[0].args[i]) for i", "isinstance(value, (six.integer_types, bool)): ffi_obj = ffi_call(lib.plaidml_value_int, value) elif isinstance(value, float):", "def __mul__(self, rhs): return call('mul', self, rhs) def __rmul__(self, lhs):", "x)) if np.issubdtype(type(x), np.integer): return Tensor(expr=ffi_call(lib.plaidml_expr_int, x.item())) if isinstance(x, float):", "__rsub__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_SUB, other, self)) def __mul__(self, other): return", "ASSIGN contraction self._set_contraction( _Contraction( lib.PLAIDML_AGG_OP_ASSIGN, lib.PLAIDML_COMBO_OP_NONE, [value._impl], _IndexMap(self, key), _SizeMap(self._dims),", "class Program(ForeignObject): \"\"\"Docstring for class Program\"\"\" __ffi_del__ = lib.plaidml_program_free __ffi_repr__", "as_float(x, bit_size): map = { 16: DType.FLOAT16, 32: DType.FLOAT32, 64:", "in a contraction def __eq__(self, rhs): return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_EQ, (self, rhs)))", "type for value={}'.format(value)) elif expr is None: raise ValueError('One of", "*args): args = [wrap_tensor(x) for x in args] raw_args =", "{ 8: DType.INT8, 16: DType.INT16, 32: DType.INT32, 64: DType.INT64, }", "lhs): return call('div', lhs, self) # Represents an eltwise cmp_eq", "isinstance(value._impl, _Contraction): # standard contraction self._set_contraction(value._impl) elif isinstance(value, Tensor): pass", "TypeError('Invalid type for value={}'.format(value)) elif expr is None: raise ValueError('One", "not x.is_input] def wrap_tensor(x): if isinstance(x, six.integer_types): return Tensor(expr=ffi_call(lib.plaidml_expr_int, x))", "len(dims), raw_dims) super(_SizeMap, self).__init__(expr) class _Contraction(ForeignObject): __ffi_del__ = lib.plaidml_expr_free __ffi_repr__", "*args): args = [wrap_poly(x) for x in args] raw_args =", "x) def max(x, y): return call('max', x, y) def min(x,", "= [x.as_ptr() for x in outputs] dst_updates = [x[0].as_ptr() for", "self._dims = dims expr = None elif value is not", "32: DType.INT32, 64: DType.INT64, } dtype = map.get(bit_size) if not", "return call('reshape', x, *dims) def round(x): return call('round', x) def", "cmp_ge def __ge__(self, rhs): return call('cmp_ge', self, rhs) # Represents", "def __neg__(self): return call('neg', self) # Represents an eltwise bit_not", "sink_sizes.as_ptr(), len(src_idxs), src_idxs, name.encode(), ) super(_Contraction, self).__init__(expr) _ContractionPart = namedtuple('_ContractionPart',", "= ffi_call(lib.plaidml_poly_expr_index, name.encode()) super(TensorIndex, self).__init__(expr) def __lt__(self, rhs): return Constraint(self,", "of this tensor. def bind_dims(self, *dims): raw_dims = [x.as_ptr() for", "other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_MUL, self, other)) def __rmul__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_MUL,", "__lt__(self, rhs): return Constraint(self, wrap_dim(rhs)) def __neg__(self): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_NEG, self))", "Program(ForeignObject): \"\"\"Docstring for class Program\"\"\" __ffi_del__ = lib.plaidml_program_free __ffi_repr__ =", "raw_args)) def cast(x, dtype): return Tensor(expr=ffi_call(lib.plaidml_expr_cast, wrap_tensor(x).as_ptr(), dtype)) def as_bool(x):", "raw_dims = [x.as_ptr() for x in dims] ffi_call(lib.plaidml_expr_bind_dims, self.as_ptr(), len(raw_dims),", "return ffi_call(lib.plaidml_logical_shape_get_ndims, self.as_ptr()) @property def int_dims(self): \"\"\"Returns the dimensions of", "self, rhs)) def __rfloordiv__(self, lhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_DIV, lhs, self)) class", "contraction def __le__(self, rhs): self._tensor._set_contraction(self._make_contraction(lib.PLAIDML_AGG_OP_MIN, rhs)) # Represents a combo_op", "src_updates, dst_updates, raw_args, ) self.args = [ProgramArgument(raw_args[0].args[i]) for i in", "self) # Enable no_reduce on a contraction def no_reduce(self): if", "ffi_call( lib.plaidml_program_evaluate, name.encode(), len(raw_outputs), raw_outputs, len(updates), src_updates, dst_updates, raw_args, )", "if isinstance(key, tuple) or isinstance(key, list): idxs = key else:", "numpy as np import six from plaidml2 import DType from", "__rsub__(self, lhs): return call('sub', lhs, self) # Represents an eltwise", "self._is_contraction: raise TypeError('use_default can only be specified on a contraction.')", "lib.PLAIDML_AGG_OP_ASSIGN, value._impl.op, [x._impl for x in value._impl.args], _IndexMap(self, key), _SizeMap(self._dims),", "in range(count)] def TensorIndexes(count): return [TensorIndex() for i in range(count)]", "Represents an eltwise bit_and def __and__(self, rhs): return call('bit_and', self,", "def __init__(self, arg): self.is_input = arg.is_input self.ref = TensorRef(Tensor(expr=ffi_call(lib.plaidml_expr_clone, arg.tensor)))", "if isinstance(x, TensorDim): return Tensor(expr=ffi_call(lib.plaidml_expr_dim, x.as_ptr())) if isinstance(x, Tensor): return", "call('ident', x) def index(x, axis): return call('index', x, axis) def", "lib.PLAIDML_COMBO_OP_NONE inputs = [rhs._impl] elif isinstance(rhs._impl, _ContractionPart): # Binary/Ternary op", "[x.as_ptr() for x in dims] expr = ffi_call(lib.plaidml_expr_size_map, len(dims), raw_dims)", "return Tensor(expr=ffi_call(lib.plaidml_expr_call, fn.encode(), len(args), raw_args)) def cast(x, dtype): return Tensor(expr=ffi_call(lib.plaidml_expr_cast,", "value = value.tolist() if value is None: ffi_obj = ffi_call(lib.plaidml_value_none)", "for x in raw_grads] def ident(x): return call('ident', x) def", "= ffi_call(lib.plaidml_value_tuple, len(raw_elts), raw_elts) elif isinstance(value, six.string_types): ffi_obj = ffi_call(lib.plaidml_value_str,", "a parameter tensor (i.e. one explicitly set to a buffer", "x) def index(x, axis): return call('index', x, axis) def jacobian(loss,", "return Tensor(expr=ffi_call(lib.plaidml_expr_float, x)) if isinstance(x, TensorDim): return Tensor(expr=ffi_call(lib.plaidml_expr_dim, x.as_ptr())) if", "an eltwise negation def __neg__(self): return call('neg', self) # Represents", "call('sub', lhs, self) # Represents an eltwise multiplication def __mul__(self,", "ffi_call(lib.plaidml_expr_contraction_set_no_reduce, self.as_ptr(), True) return self # Set use_default on a", "x is None else x for x in dims]) ffi_obj", "raw_outputs, len(updates), src_updates, dst_updates, raw_args, ) self.args = [ProgramArgument(raw_args[0].args[i]) for", "in src_idxs] expr = ffi_call( lib.plaidml_expr_contraction, agg_op, combo_op, sink_idxs.as_ptr(), sink_sizes.as_ptr(),", "cast(x, dtype) def ceil(x): return call('ceil', x) def cond(lhs, rhs,", "self.__hash__() == other.__hash__() class Value(ForeignObject): \"\"\"Docstring for class Value\"\"\" __ffi_del__", "contraction.') ffi_call(lib.plaidml_expr_contraction_set_no_reduce, self.as_ptr(), True) return self # Set use_default on", "variables): wrts = [x.as_ptr() for x in variables] raw_grads =", "contraction def use_default(self, rhs): if not self._is_contraction: raise TypeError('use_default can", "elif value is not None: if isinstance(value, six.integer_types): expr =", "return self def add_constraint(self, constraint): ffi_call( lib.plaidml_expr_contraction_add_constraint, self.as_ptr(), constraint.lhs.as_ptr(), constraint.rhs.as_ptr(),", "return call('pow', x, y) def prng(state, shape): return call('prng', state,", "isinstance(value._impl, _IndexMap): # Unary ASSIGN contraction self._set_contraction( _Contraction( lib.PLAIDML_AGG_OP_ASSIGN, lib.PLAIDML_COMBO_OP_NONE,", "call('bit_xor', self, rhs) def __rxor__(self, lhs): return call('bit_xor', lhs, self)", "*dims): raw_dims = [x.as_ptr() for x in dims] ffi_call(lib.plaidml_expr_bind_dims, self.as_ptr(),", "rhs): return Constraint(self, wrap_dim(rhs)) def __neg__(self): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_NEG, self)) def", "z) def select(cond, true_case, false_case): return call('cond', cond, true_case, false_case)", "__init__(self, dtype=None, dims=[], ptr=None): if ptr: ffi_obj = ptr elif", "an eltwise cmp_le def __le__(self, rhs): return call('cmp_le', self, rhs)", "= Buffer(tensor_shape, ptr=ffi_call(lib.plaidml_buffer_clone, arg.buffer)) else: self.buffer = None class Program(ForeignObject):", "in a contraction def __imul__(self, rhs): return IndexedTensor(self._make_contraction(lib.PLAIDML_AGG_OP_PROD, rhs)) #", "None: raise ValueError('One of dims=, shape=, or expr= must be", "contraction def __eq__(self, rhs): return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_EQ, (self, rhs))) def _make_contraction(self,", "# Represents an aggregation_op of PROD in a contraction def", "self.as_ptr(), i) for i in range(self.ndims) ] def into_TensorShape(self): return", "expr=None, name=''): if expr is None: expr = ffi_call(lib.plaidml_poly_expr_index, name.encode())", "{} for value={}'.format(type(value), value)) super(Value, self).__init__(ffi_obj) def as_tensor(self): return Tensor(expr=ffi_call(lib.plaidml_value_expr_get,", "not None: if isinstance(value, six.integer_types): expr = ffi_call(lib.plaidml_expr_int, value) elif", "return IndexedTensor(self._make_contraction(lib.PLAIDML_AGG_OP_PROD, rhs)) # Represents an aggregation_op of MAX in", "range(raw_args[0].nargs)] ffi_call(lib.plaidml_program_args_free, raw_args[0]) super(Program, self).__init__(ffi_obj) @property def inputs(self): return [x", "i in range(count)] def TensorIndexes(count): return [TensorIndex() for i in", "ffi_call(lib.plaidml_poly_expr_index, name.encode()) super(TensorIndex, self).__init__(expr) def __lt__(self, rhs): return Constraint(self, wrap_dim(rhs))", "return call('sin', x) def sqrt(x): return call('sqrt', x) def tan(x):", "argument: {}. fn: {}, args: {}, bad arg: {}'.format( type(x),", "lib logger = logging.getLogger(__name__) def __init(): \"\"\"Docstring for function plaidml2.edsl.__init\"\"\"", "return call('div', self, rhs) def __rdiv__(self, lhs): return call('div', lhs,", "self).__init__(ffi_obj) @property def dtype(self): return DType(ffi_call(lib.plaidml_logical_shape_get_dtype, self.as_ptr())) @property def ndims(self):", "TensorShape, Buffer from plaidml2.ffi import ForeignObject, ffi, ffi_call, lib logger", "arg.tensor))) self.shape = LogicalShape(ptr=ffi_call(lib.plaidml_logical_shape_clone, arg.shape)) if arg.buffer: tensor_shape = self.shape.into_TensorShape()", "return call('cmp_le', self, rhs) # Represents an eltwise cmp_ge def", "other)) def __rsub__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_SUB, other, self)) def __mul__(self,", "DType.INT64, } dtype = map.get(bit_size) if not dtype: raise 'Unsupport", "return ffi_call(lib.plaidml_poly_expr_op, op, len(args), raw_args) class TensorIndex(ForeignObject): \"\"\"Docstring for class", "ref, key): if isinstance(key, tuple) or isinstance(key, list): idxs =", "value) elif isinstance(value, TensorDim): ffi_obj = ffi_call(lib.plaidml_value_dim, value.as_ptr()) elif isinstance(value,", "call('bit_xor', lhs, self) # Enable no_reduce on a contraction def", "def __and__(self, rhs): return call('bit_and', self, rhs) def __rand__(self, lhs):", "self.args if not x.is_input] def wrap_tensor(x): if isinstance(x, six.integer_types): return", "[x.as_ptr() for x in args] return ffi_call(lib.plaidml_poly_expr_op, op, len(args), raw_args)", "in a contraction def __iadd__(self, rhs): return IndexedTensor(self._make_contraction(lib.PLAIDML_AGG_OP_SUM, rhs)) #", "__ffi_repr__ = lib.plaidml_logical_shape_repr def __init__(self, dtype=None, dims=[], ptr=None): if ptr:", "raw_dims = [x.as_ptr() for x in dims] expr = ffi_call(lib.plaidml_expr_size_map,", "namedtuple('Constraint', ['lhs', 'rhs']) def wrap_dim(x): if isinstance(x, six.integer_types): return TensorDim(expr=ffi_call(lib.plaidml_dim_expr_int,", "elif dtype is not None: raw_dims = ffi.new('int64_t[]', [0 if", "combo_op of PLUS in a contraction def __add__(self, rhs): return", "call('add', lhs, self) # Represents an eltwise subtraction def __sub__(self,", "type {} for value={}'.format(type(value), value)) super(Value, self).__init__(ffi_obj) def as_tensor(self): return", "__add__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_ADD, self, other)) def __radd__(self, other): return", "value=None, name='', buffer=None): self._name = name self._buffer = buffer if", "self.tensor = tensor def __hash__(self): return hash(ffi_call(lib.plaidml_expr_ptr, self.tensor.as_ptr())) def __eq__(self,", "eltwise bit_left def __lshift__(self, rhs): return call('bit_left', self, rhs) def", "contraction self._set_contraction( _Contraction( lib.PLAIDML_AGG_OP_ASSIGN, value._impl.op, [x._impl for x in value._impl.args],", "self)) def __floordiv__(self, rhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_DIV, self, rhs)) def __rfloordiv__(self,", "TensorIndex\"\"\" __ffi_del__ = lib.plaidml_poly_expr_free __ffi_repr__ = lib.plaidml_poly_expr_repr def __init__(self, expr=None,", "'Unsupport bit_size for as_int' return cast(x, dtype) def as_uint(x, bit_size):", "range(self.ndims) ] def into_TensorShape(self): return TensorShape( ptr=ffi_call(lib.plaidml_logical_shape_into_tensor_shape, self.as_ptr())) Constraint =", "call('bit_right', lhs, self) # Represents an eltwise bit_and def __and__(self,", "__ge__(self, rhs): self._tensor._set_contraction(self._make_contraction(lib.PLAIDML_AGG_OP_MAX, rhs)) # Represents an aggregation_op of MIN", "pass elif isinstance(value._impl, _IndexMap): # Unary ASSIGN contraction self._set_contraction( _Contraction(", "not dtype: raise 'Unsupport bit_size for as_int' return cast(x, dtype)", "__xor__(self, rhs): return call('bit_xor', self, rhs) def __rxor__(self, lhs): return", "TensorRef\"\"\" def __init__(self, tensor): self.tensor = tensor def __hash__(self): return", "'plaidml_edsl_init') class LogicalShape(ForeignObject): \"\"\"Docstring for class LogicalShape\"\"\" __ffi_del__ = lib.plaidml_logical_shape_free", "name.encode()) super(TensorIndex, self).__init__(expr) def __lt__(self, rhs): return Constraint(self, wrap_dim(rhs)) def", "= [x.as_ptr() for x in dims] expr = ffi_call(lib.plaidml_expr_size_map, len(dims),", "value.tolist() if value is None: ffi_obj = ffi_call(lib.plaidml_value_none) elif isinstance(value,", "= ffi_call(lib.plaidml_value_dim, value.as_ptr()) elif isinstance(value, Tensor): ffi_obj = ffi_call(lib.plaidml_value_expr, value.as_ptr())", "in a contraction def __ge__(self, rhs): self._tensor._set_contraction(self._make_contraction(lib.PLAIDML_AGG_OP_MAX, rhs)) # Represents", "ffi_call(lib.plaidml_expr_int, value) elif isinstance(value, float): expr = ffi_call(lib.plaidml_expr_float, value) else:", "args: {}, bad arg: {}'.format( type(x), fn, args, x)) def", "collections import namedtuple import numpy as np import six from", "rhs): return call('bit_right', self, rhs) def __rrshift__(self, lhs): return call('bit_right',", "= lib.plaidml_logical_shape_free __ffi_repr__ = lib.plaidml_logical_shape_repr def __init__(self, dtype=None, dims=[], ptr=None):", "args = [wrap_dim(x) for x in args] raw_args = [x.as_ptr()", "return call('gather', x, y) def gradients(loss, variables): wrts = [x.as_ptr()", "an eltwise bit_or def __or__(self, rhs): return call('bit_or', self, rhs)", "return TensorDim(dim_op(lib.PLAIDML_INT_OP_DIV, other, self)) def wrap_poly(x): if isinstance(x, six.integer_types): return", "def bind(self, shape): ffi_call(lib.plaidml_expr_bind_shape, self.as_ptr(), shape.as_ptr()) class TensorRef: \"\"\"Docstring for", "{}, bad arg: {}'.format( type(x), fn, args, x)) def call(fn,", "def outputs(self): return [x for x in self.args if not", "__rfloordiv__(self, lhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_DIV, lhs, self)) class _IndexMap(ForeignObject): __ffi_del__ =", "= lib.plaidml_dim_expr_repr def __init__(self, expr=None): if expr is None: expr", "value.encode('utf-8')) elif isinstance(value, ffi.CData) and ffi.typeof(value) is ffi.typeof('plaidml_value*'): ffi_obj =", "list (int): Integer dimensions of the LogicalShape. \"\"\" return [", "prng(state, shape): return call('prng', state, *shape) def reshape(x, dims): return", "fn, args, x)) def call(fn, *args): args = [wrap_tensor(x) for", "division def __truediv__(self, rhs): return call('div', self, rhs) def __rtruediv__(self,", "return call('mul', lhs, self) # Represents an eltwise division def", "isinstance(value, TensorDim): ffi_obj = ffi_call(lib.plaidml_value_dim, value.as_ptr()) elif isinstance(value, Tensor): ffi_obj", "shape @property def shape(self): return LogicalShape(ptr=ffi_call(lib.plaidml_expr_get_shape, self.as_ptr())) # Verify that", "def wrap_tensor(x): if isinstance(x, six.integer_types): return Tensor(expr=ffi_call(lib.plaidml_expr_int, x)) if np.issubdtype(type(x),", "six from plaidml2 import DType from plaidml2.core import TensorShape, Buffer", "other, self)) def __sub__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_SUB, self, other)) def", "lhs): return call('mul', lhs, self) # Represents an eltwise division", "Tensor(expr=ffi_call(lib.plaidml_expr_int, x.item())) if isinstance(x, float): return Tensor(expr=ffi_call(lib.plaidml_expr_float, x)) if isinstance(x,", "raw_elts) elif isinstance(value, six.string_types): ffi_obj = ffi_call(lib.plaidml_value_str, value.encode('utf-8')) elif isinstance(value,", "class _IndexMap(ForeignObject): __ffi_del__ = lib.plaidml_expr_free __ffi_repr__ = lib.plaidml_expr_repr def __init__(self,", "__radd__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_ADD, other, self)) def __sub__(self, other): return", "for x in variables] raw_grads = ffi.new('plaidml_expr*[]', len(wrts)) ffi_call( lib.plaidml_expr_gradient,", "an eltwise multiplication def __mul__(self, rhs): return call('mul', self, rhs)", "call('reshape', x, *dims) def round(x): return call('round', x) def scatter(x,", "\"\"\"Docstring for class Program\"\"\" __ffi_del__ = lib.plaidml_program_free __ffi_repr__ = lib.plaidml_program_repr", "only be specified on a contraction.') ffi_call(lib.plaidml_expr_contraction_set_use_default, self.as_ptr(), rhs.as_ptr()) return", "in idxs] raw_idxs = [x.as_ptr() for x in idxs] expr", "elif dims is not None: self._dims = dims expr =", "x in dims]) ffi_obj = ffi_call(lib.plaidml_logical_shape_alloc, dtype, len(dims), raw_dims) else:", "eltwise cmp_le def __le__(self, rhs): return call('cmp_le', self, rhs) #", "= lib.plaidml_expr_repr def __init__(self, ref, key): if isinstance(key, tuple) or", "# Represents an eltwise cmp_eq def __eq__(self, rhs): return call('cmp_eq',", "x in rhs._impl.args] else: raise ValueError('Invalid impl') return _Contraction( agg_op,", "def call(fn, *args): args = [wrap_tensor(x) for x in args]", "not None: raw_dims = ffi.new('int64_t[]', [0 if x is None", "__ffi_del__ = lib.plaidml_logical_shape_free __ffi_repr__ = lib.plaidml_logical_shape_repr def __init__(self, dtype=None, dims=[],", "*args): args = [wrap_dim(x) for x in args] raw_args =", "Represents an eltwise cmp_lt def __lt__(self, rhs): return call('cmp_lt', self,", "lib.PLAIDML_AGG_OP_ASSIGN, lib.PLAIDML_COMBO_OP_NONE, [value._impl], _IndexMap(self, key), _SizeMap(self._dims), self._name, )) elif isinstance(value._impl,", "raise ValueError('Invalid impl when assigning to a Tensor (Type: {})'.format(", "[TensorIndex() for i in range(count)] class ProgramArgument: \"\"\"Docstring for class", "raw_args = [x.as_ptr() for x in args] return ffi_call(lib.plaidml_poly_expr_op, op,", "def _set_contraction(self, cion): self._is_contraction = True self.take_ptr(cion) # Represents an", "__rtruediv__(self, lhs): return call('div', lhs, self) # Represents an eltwise", "other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_SUB, self, other)) def __rsub__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_SUB,", "rhs): return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_EQ, (self, rhs))) def _make_contraction(self, agg_op, rhs): #", "= [wrap_dim(x) for x in dims] raw_dims = [x.as_ptr() for", "return self # Set use_default on a contraction def use_default(self,", "[x for x in self.args if not x.is_input] def wrap_tensor(x):", "ffi_call(lib.plaidml_value_tuple, len(raw_elts), raw_elts) elif isinstance(value, six.string_types): ffi_obj = ffi_call(lib.plaidml_value_str, value.encode('utf-8'))", "ffi_call(lib.plaidml_expr_bind_shape, self.as_ptr(), shape.as_ptr()) class TensorRef: \"\"\"Docstring for class TensorRef\"\"\" def", "true_case, false_case) def shape(x): return call('shape', x) def sin(x): return", "# Represents an aggregation_op of MAX in a contraction def", "= value else: raise TypeError('Unsupported type {} for value={}'.format(type(value), value))", "key else: idxs = [key] idxs = [wrap_poly(x) for x", "call('cmp_le', self, rhs) # Represents an eltwise cmp_ge def __ge__(self,", "if value.ndim == 0: value = value.item() else: value =", "return call('cond', cond, true_case, false_case) def shape(x): return call('shape', x)", "def __init__(self, expr=None, name=''): if expr is None: expr =", "[x.as_ptr() for x in args] return Tensor(expr=ffi_call(lib.plaidml_expr_call, fn.encode(), len(args), raw_args))", "isinstance(value, (list, tuple)): self._elts = [Value(x) for x in value]", "an eltwise subtraction def __sub__(self, rhs): return call('sub', self, rhs)", "__sub__(self, rhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_SUB, self, rhs)) def __rsub__(self, lhs): return", "super(_Contraction, self).__init__(expr) _ContractionPart = namedtuple('_ContractionPart', ['op', 'args']) class IndexedTensor(object): \"\"\"Docstring", "LogicalShape Returns: list (int): Integer dimensions of the LogicalShape. \"\"\"", "self, other)) def __radd__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_ADD, other, self)) def", "x) def tan(x): return call('tan', x) def tanh(x): return call('tanh',", "= tensor def __hash__(self): return hash(ffi_call(lib.plaidml_expr_ptr, self.tensor.as_ptr())) def __eq__(self, other):", ") super(_Contraction, self).__init__(expr) _ContractionPart = namedtuple('_ContractionPart', ['op', 'args']) class IndexedTensor(object):", "rhs)) # Represents an aggregation_op of MIN in a contraction", "an eltwise bit_right def __rshift__(self, rhs): return call('bit_right', self, rhs)", "def __init__(self, dims): dims = [wrap_dim(x) for x in dims]", "super(TensorIndex, self).__init__(expr) def __lt__(self, rhs): return Constraint(self, wrap_dim(rhs)) def __neg__(self):", "super(LogicalShape, self).__init__(ffi_obj) @property def dtype(self): return DType(ffi_call(lib.plaidml_logical_shape_get_dtype, self.as_ptr())) @property def", "call('cmp_gt', self, rhs) # Represents an eltwise cmp_le def __le__(self,", "ident(x): return call('ident', x) def index(x, axis): return call('index', x,", "self.as_ptr(), len(raw_dims), raw_dims) # bind a concrete shape to this", "def gradients(loss, variables): wrts = [x.as_ptr() for x in variables]", "return TensorIndex(poly_op(lib.PLAIDML_INT_OP_MUL, lhs, self)) def __floordiv__(self, rhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_DIV, self,", "multiplication def __mul__(self, rhs): return call('mul', self, rhs) def __rmul__(self,", "ffi_call(lib.plaidml_value_str, value.encode('utf-8')) elif isinstance(value, ffi.CData) and ffi.typeof(value) is ffi.typeof('plaidml_value*'): ffi_obj", "inputs if isinstance(rhs._impl, _IndexMap): # Unary op combo_op = lib.PLAIDML_COMBO_OP_NONE", "(i.e. one explicitly set to a buffer value) # Illegal", "eltwise bit_xor def __xor__(self, rhs): return call('bit_xor', self, rhs) def", "= buffer.as_ptr() expr = ffi_call(lib.plaidml_expr_placeholder, shape.as_ptr(), raw_buffer, name.encode()) elif dims", "def tan(x): return call('tan', x) def tanh(x): return call('tanh', x)", "cond, true_case, false_case) def shape(x): return call('shape', x) def sin(x):", "rhs): self._tensor._set_contraction(self._make_contraction(lib.PLAIDML_AGG_OP_MIN, rhs)) # Represents a combo_op of PLUS in", "LogicalShape\"\"\" __ffi_del__ = lib.plaidml_logical_shape_free __ffi_repr__ = lib.plaidml_logical_shape_repr def __init__(self, dtype=None,", "x in dims] ffi_call(lib.plaidml_expr_bind_dims, self.as_ptr(), len(raw_dims), raw_dims) # bind a", "Args: self (pointer): The object pointer for a LogicalShape Returns:", "[x[1].as_ptr() for x in updates] raw_args = ffi.new('plaidml_program_args**') ffi_obj =", "rhs): if not self._is_contraction: raise TypeError('use_default can only be specified", "args] raw_args = [x.as_ptr() for x in args] return ffi_call(lib.plaidml_poly_expr_op,", "list. Args: self (pointer): The object pointer for a LogicalShape", "def __ge__(self, rhs): self._tensor._set_contraction(self._make_contraction(lib.PLAIDML_AGG_OP_MAX, rhs)) # Represents an aggregation_op of", "of MIN in a contraction def __le__(self, rhs): self._tensor._set_contraction(self._make_contraction(lib.PLAIDML_AGG_OP_MIN, rhs))", "key), tensor=self) def __setitem__(self, key, value): if isinstance(value._impl, _Contraction): #", "call('prng', state, *shape) def reshape(x, dims): return call('reshape', x, *dims)", "rhs): return call('bit_xor', self, rhs) def __rxor__(self, lhs): return call('bit_xor',", "call('max', x, y) def min(x, y): return call('min', x, y)", "# Extract combo_op and inputs if isinstance(rhs._impl, _IndexMap): # Unary", "__and__(self, rhs): return call('bit_and', self, rhs) def __rand__(self, lhs): return", "constraints: self.add_constraint(constraint) # Return the tensor's shape @property def shape(self):", "return self.__hash__() == other.__hash__() class Value(ForeignObject): \"\"\"Docstring for class Value\"\"\"", "self).__init__(expr) def _bind(self, expr): self.take_ptr(expr) def __neg__(self): return TensorDim(dim_op(lib.PLAIDML_INT_OP_NEG, self))", "raw_grads = ffi.new('plaidml_expr*[]', len(wrts)) ffi_call( lib.plaidml_expr_gradient, len(wrts), wrts, loss.as_ptr(), raw_grads,", "LogicalShape(ForeignObject): \"\"\"Docstring for class LogicalShape\"\"\" __ffi_del__ = lib.plaidml_logical_shape_free __ffi_repr__ =", "= namedtuple('Constraint', ['lhs', 'rhs']) def wrap_dim(x): if isinstance(x, six.integer_types): return", "[x.as_ptr() for x in src_idxs] expr = ffi_call( lib.plaidml_expr_contraction, agg_op,", "(pointer): The object pointer for a LogicalShape Returns: list (int):", "ffi_obj = ffi_call(lib.plaidml_value_str, value.encode('utf-8')) elif isinstance(value, ffi.CData) and ffi.typeof(value) is", "eltwise cmp_eq def __eq__(self, rhs): return call('cmp_eq', self, rhs) #", "as_uint(x, bit_size): map = { 8: DType.UINT8, 16: DType.UINT16, 32:", "else: raise TypeError('Unsupported type {} for value={}'.format(type(value), value)) super(Value, self).__init__(ffi_obj)", "is not None: raw_dims = ffi.new('int64_t[]', [0 if x is", "in dims] raw_dims = [x.as_ptr() for x in dims] expr", "key), _SizeMap(self._dims), self._name, )) else: raise ValueError('Invalid impl when assigning", "ffi_call(lib.plaidml_expr_size_map, len(dims), raw_dims) super(_SizeMap, self).__init__(expr) class _Contraction(ForeignObject): __ffi_del__ = lib.plaidml_expr_free", "outputs(self): return [x for x in self.args if not x.is_input]", "self._tensor = tensor def __repr__(self): return repr(self._impl) # Represents an", "__rdiv__(self, lhs): return call('div', lhs, self) # Represents an eltwise", "# Represents an eltwise cmp_gt def __gt__(self, rhs): return call('cmp_gt',", "self) # Represents an eltwise cmp_eq def __eq__(self, rhs): return", "self._dims, self._is_contraction)) def __getitem__(self, key): return IndexedTensor(_IndexMap(self, key), tensor=self) def", "expr= must be specified.') super(Tensor, self).__init__(expr) def set_param_value(self, buffer): #", "args = [wrap_tensor(x) for x in args] raw_args = [x.as_ptr()", "elif isinstance(value._impl, _ContractionPart): # Binary or ternary ASSIGN contraction self._set_contraction(", "= key else: idxs = [key] idxs = [wrap_poly(x) for", "self).__init__(expr) _ContractionPart = namedtuple('_ContractionPart', ['op', 'args']) class IndexedTensor(object): \"\"\"Docstring for", "lib.plaidml_program_free __ffi_repr__ = lib.plaidml_program_repr def __init__(self, name, outputs, updates=[]): raw_outputs", "self._is_contraction: raise TypeError('no_reduce can only be specified on a contraction.')", "\"\"\"Docstring for class TensorRef\"\"\" def __init__(self, tensor): self.tensor = tensor", "self) # Represents an eltwise multiplication def __mul__(self, rhs): return", "return hash(ffi_call(lib.plaidml_expr_ptr, self.tensor.as_ptr())) def __eq__(self, other): if isinstance(other, Tensor): return", "self, rhs) def __rmul__(self, lhs): return call('mul', lhs, self) #", "= None _is_contraction = False def __init__(self, shape=None, dims=None, expr=None,", "for i in range(self.ndims) ] def into_TensorShape(self): return TensorShape( ptr=ffi_call(lib.plaidml_logical_shape_into_tensor_shape,", "must be specified.') super(Tensor, self).__init__(expr) def set_param_value(self, buffer): # Changes", "self # Set use_default on a contraction def use_default(self, rhs):", "to this tensor def bind(self, shape): ffi_call(lib.plaidml_expr_bind_shape, self.as_ptr(), shape.as_ptr()) class", "raw_idxs) super(_IndexMap, self).__init__(expr) class _SizeMap(ForeignObject): __ffi_del__ = lib.plaidml_expr_free __ffi_repr__ =", "class TensorRef: \"\"\"Docstring for class TensorRef\"\"\" def __init__(self, tensor): self.tensor", "state, *shape) def reshape(x, dims): return call('reshape', x, *dims) def", "or expr= must be specified.') super(Tensor, self).__init__(expr) def set_param_value(self, buffer):", "return TensorIndex(poly_op(lib.PLAIDML_INT_OP_SUB, self, rhs)) def __rsub__(self, lhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_SUB, lhs,", "for class TensorDim\"\"\" __ffi_del__ = lib.plaidml_dim_expr_free __ffi_repr__ = lib.plaidml_dim_expr_repr def", "rhs) # Represents an eltwise bit_left def __lshift__(self, rhs): return", "@property def ndims(self): return ffi_call(lib.plaidml_logical_shape_get_ndims, self.as_ptr()) @property def int_dims(self): \"\"\"Returns", "return call('cmp_lt', self, rhs) # Represents an eltwise cmp_gt def", "Unary ASSIGN contraction self._set_contraction( _Contraction( lib.PLAIDML_AGG_OP_ASSIGN, lib.PLAIDML_COMBO_OP_NONE, [value._impl], _IndexMap(self, key),", "Represents an aggregation_op of SUM in a contraction def __iadd__(self,", "self)) def __add__(self, rhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_ADD, self, rhs)) def __radd__(self,", "def __radd__(self, lhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_ADD, lhs, self)) def __sub__(self, rhs):", "len(wrts)) ffi_call( lib.plaidml_expr_jacobian, len(wrts), wrts, loss.as_ptr(), raw_grads, ) return [Tensor(expr=x)", "dims=None, expr=None, value=None, name='', buffer=None): self._name = name self._buffer =", "= [x.as_ptr() for x in args] return ffi_call(lib.plaidml_poly_expr_op, op, len(args),", "eltwise bit_or def __or__(self, rhs): return call('bit_or', self, rhs) def", "__radd__(self, lhs): return call('add', lhs, self) # Represents an eltwise", "= ffi_call(lib.plaidml_dim_expr_none) super(TensorDim, self).__init__(expr) def _bind(self, expr): self.take_ptr(expr) def __neg__(self):", "value) elif isinstance(value, float): expr = ffi_call(lib.plaidml_expr_float, value) else: raise", "DType.INT16, 32: DType.INT32, 64: DType.INT64, } dtype = map.get(bit_size) if", "raw_grads] def ident(x): return call('ident', x) def index(x, axis): return", "call('cos', x) def exp(x): return call('exp', x) def floor(x): return", "self)) def __mul__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_MUL, self, other)) def __rmul__(self,", "# Represents an eltwise subtraction def __sub__(self, rhs): return call('sub',", "= False def __init__(self, shape=None, dims=None, expr=None, value=None, name='', buffer=None):", "shape=, or expr= must be specified.') super(Tensor, self).__init__(expr) def set_param_value(self,", "self) # Represents an eltwise subtraction def __sub__(self, rhs): return", "map.get(bit_size) if not dtype: raise 'Unsupport bit_size for as_int' return", "a combo_op of MULTIPLY in a contraction def __mul__(self, rhs):", "None: expr = ffi_call(lib.plaidml_dim_expr_none) super(TensorDim, self).__init__(expr) def _bind(self, expr): self.take_ptr(expr)", "__le__(self, rhs): self._tensor._set_contraction(self._make_contraction(lib.PLAIDML_AGG_OP_MIN, rhs)) # Represents a combo_op of PLUS", "value else: raise TypeError('Unsupported type {} for value={}'.format(type(value), value)) super(Value,", "ffi_call(lib.plaidml_logical_shape_get_ndims, self.as_ptr()) @property def int_dims(self): \"\"\"Returns the dimensions of a", "Unary op combo_op = lib.PLAIDML_COMBO_OP_NONE inputs = [rhs._impl] elif isinstance(rhs._impl,", "in a contraction def __add__(self, rhs): return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_ADD, (self, rhs)))", "as a list. Args: self (pointer): The object pointer for", "return call('bit_and', lhs, self) # Represents an eltwise bit_or def", "call('sub', self, rhs) def __rsub__(self, lhs): return call('sub', lhs, self)", "None: ffi_obj = ffi_call(lib.plaidml_value_none) elif isinstance(value, (six.integer_types, bool)): ffi_obj =", "for x in raw_grads] def log(x): return call('log', x) def", "[x.as_ptr() for x in self._elts] ffi_obj = ffi_call(lib.plaidml_value_tuple, len(raw_elts), raw_elts)", "# Unary ASSIGN contraction self._set_contraction( _Contraction( lib.PLAIDML_AGG_OP_ASSIGN, lib.PLAIDML_COMBO_OP_NONE, [value._impl], _IndexMap(self,", "= lib.plaidml_poly_expr_repr def __init__(self, expr=None, name=''): if expr is None:", "self)) def __floordiv__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_DIV, self, other)) def __rfloordiv__(self,", "call('pow', x, y) def prng(state, shape): return call('prng', state, *shape)", "self) # Represents an eltwise division def __div__(self, rhs): return", "be specified on a contraction.') ffi_call(lib.plaidml_expr_contraction_set_use_default, self.as_ptr(), rhs.as_ptr()) return self", "a combo_op of PLUS in a contraction def __add__(self, rhs):", "= ffi_call(lib.plaidml_expr_int, value) elif isinstance(value, float): expr = ffi_call(lib.plaidml_expr_float, value)", "cmp_eq def __eq__(self, rhs): return call('cmp_eq', self, rhs) # Represents", "*dims) def round(x): return call('round', x) def scatter(x, y, z):", "lib.plaidml_dim_expr_repr def __init__(self, expr=None): if expr is None: expr =", "specified on a contraction.') ffi_call(lib.plaidml_expr_contraction_set_use_default, self.as_ptr(), rhs.as_ptr()) return self def", "cion): self._is_contraction = True self.take_ptr(cion) # Represents an eltwise negation", "call('div', self, rhs) def __rdiv__(self, lhs): return call('div', lhs, self)", "__ror__(self, lhs): return call('bit_or', lhs, self) # Represents an eltwise", "__sub__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_SUB, self, other)) def __rsub__(self, other): return", "lhs, self) # Represents an eltwise division def __truediv__(self, rhs):", "Tensor(expr=ffi_call(lib.plaidml_expr_float, x)) if isinstance(x, TensorDim): return Tensor(expr=ffi_call(lib.plaidml_expr_dim, x.as_ptr())) if isinstance(x,", "x) def cond(lhs, rhs, true_case): return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_COND, (lhs, rhs, true_case)))", "in a contraction def __mul__(self, rhs): return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_MUL, (self, rhs)))", "return TensorDim(dim_op(lib.PLAIDML_INT_OP_ADD, self, other)) def __radd__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_ADD, other,", "__invert__(self): return call('bit_not', self) # Represents an eltwise addition def", "return call('log', x) def max(x, y): return call('max', x, y)", "def log(x): return call('log', x) def max(x, y): return call('max',", "return call('bit_xor', lhs, self) # Enable no_reduce on a contraction", "pointer for a LogicalShape Returns: list (int): Integer dimensions of", "ffi_call(lib.plaidml_program_args_free, raw_args[0]) super(Program, self).__init__(ffi_obj) @property def inputs(self): return [x for", "that the specified dims match the dims of this tensor.", "class LogicalShape(ForeignObject): \"\"\"Docstring for class LogicalShape\"\"\" __ffi_del__ = lib.plaidml_logical_shape_free __ffi_repr__", "2019 Intel Corporation. import logging from collections import namedtuple import", "def __rxor__(self, lhs): return call('bit_xor', lhs, self) # Enable no_reduce", "rhs): return call('bit_and', self, rhs) def __rand__(self, lhs): return call('bit_and',", "call('bit_and', self, rhs) def __rand__(self, lhs): return call('bit_and', lhs, self)", "ffi_call(lib.plaidml_value_none) elif isinstance(value, (six.integer_types, bool)): ffi_obj = ffi_call(lib.plaidml_value_int, value) elif", "def __rfloordiv__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_DIV, other, self)) def wrap_poly(x): if", "def poly_op(op, *args): args = [wrap_poly(x) for x in args]", "cast(x, dtype): return Tensor(expr=ffi_call(lib.plaidml_expr_cast, wrap_tensor(x).as_ptr(), dtype)) def as_bool(x): return cast(x,", "def max(x, y): return call('max', x, y) def min(x, y):", "is None: expr = ffi_call(lib.plaidml_poly_expr_index, name.encode()) super(TensorIndex, self).__init__(expr) def __lt__(self,", "self.as_ptr())) Constraint = namedtuple('Constraint', ['lhs', 'rhs']) def wrap_dim(x): if isinstance(x,", "TensorIndex(poly_op(lib.PLAIDML_INT_OP_ADD, self, rhs)) def __radd__(self, lhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_ADD, lhs, self))", "rhs) def __rxor__(self, lhs): return call('bit_xor', lhs, self) # Enable", "bit_size for as_int' return cast(x, dtype) def as_uint(x, bit_size): map", "to a buffer value) # Illegal on other tensors ffi_call(lib.plaidml_expr_param_reset,", "impl') return _Contraction( agg_op, combo_op, inputs, self._impl, _SizeMap(self._tensor._dims), self._tensor._name, )", "The object pointer for a LogicalShape Returns: list (int): Integer", "expr is None: raise ValueError('One of dims=, shape=, or expr=", "for class Tensor\"\"\" __ffi_del__ = lib.plaidml_expr_free __ffi_repr__ = lib.plaidml_expr_repr _dims", "Program\"\"\" __ffi_del__ = lib.plaidml_program_free __ffi_repr__ = lib.plaidml_program_repr def __init__(self, name,", "# Represents an eltwise bit_right def __rshift__(self, rhs): return call('bit_right',", "isinstance(x, TensorDim): return TensorIndex(expr=ffi_call(lib.plaidml_poly_expr_dim, x.as_ptr())) return x def poly_op(op, *args):", "_IndexMap(self, key), _SizeMap(self._dims), self._name, )) elif isinstance(value._impl, _ContractionPart): # Binary", "lib.plaidml_expr_free __ffi_repr__ = lib.plaidml_expr_repr _dims = None _is_contraction = False", "def __lshift__(self, rhs): return call('bit_left', self, rhs) def __rlshift__(self, lhs):", "len(dims), raw_dims) else: raise ValueError('One of dtype= or ptr= must", "dtype(self): return DType(ffi_call(lib.plaidml_logical_shape_get_dtype, self.as_ptr())) @property def ndims(self): return ffi_call(lib.plaidml_logical_shape_get_ndims, self.as_ptr())", "is ffi.typeof('plaidml_value*'): ffi_obj = value else: raise TypeError('Unsupported type {}", "self, rhs) # Represents an eltwise cmp_ge def __ge__(self, rhs):", "self.args if x.is_input] @property def outputs(self): return [x for x", "addition def __add__(self, rhs): return call('add', self, rhs) def __radd__(self,", "value is None: ffi_obj = ffi_call(lib.plaidml_value_none) elif isinstance(value, (six.integer_types, bool)):", "TensorDim(dim_op(lib.PLAIDML_INT_OP_SUB, self, other)) def __rsub__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_SUB, other, self))", "ceil(x): return call('ceil', x) def cond(lhs, rhs, true_case): return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_COND,", "def as_float(x, bit_size): map = { 16: DType.FLOAT16, 32: DType.FLOAT32,", "self.ref = TensorRef(Tensor(expr=ffi_call(lib.plaidml_expr_clone, arg.tensor))) self.shape = LogicalShape(ptr=ffi_call(lib.plaidml_logical_shape_clone, arg.shape)) if arg.buffer:", "Represents an aggregation_op of MIN in a contraction def __le__(self,", "= [x.as_ptr() for x in dims] ffi_call(lib.plaidml_expr_bind_dims, self.as_ptr(), len(raw_dims), raw_dims)", "TensorIndex(expr=ffi_call(lib.plaidml_poly_expr_dim, x.as_ptr())) return x def poly_op(op, *args): args = [wrap_poly(x)", "= ffi_call(lib.plaidml_expr_index_map, ref.as_ptr(), len(idxs), raw_idxs) super(_IndexMap, self).__init__(expr) class _SizeMap(ForeignObject): __ffi_del__", "rhs): return call('div', self, rhs) def __rtruediv__(self, lhs): return call('div',", "x in outputs] dst_updates = [x[0].as_ptr() for x in updates]", "y): return call('gather', x, y) def gradients(loss, variables): wrts =", "def __neg__(self): return TensorDim(dim_op(lib.PLAIDML_INT_OP_NEG, self)) def __add__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_ADD,", "lhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_MUL, lhs, self)) def __floordiv__(self, rhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_DIV,", "x, y) def min(x, y): return call('min', x, y) def", "__rmul__(self, lhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_MUL, lhs, self)) def __floordiv__(self, rhs): return", "dims] ffi_call(lib.plaidml_expr_bind_dims, self.as_ptr(), len(raw_dims), raw_dims) # bind a concrete shape", "self, rhs) # Represents an eltwise bit_left def __lshift__(self, rhs):", "def dim_op(op, *args): args = [wrap_dim(x) for x in args]", "call('log', x) def max(x, y): return call('max', x, y) def", "*shape) def reshape(x, dims): return call('reshape', x, *dims) def round(x):", "other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_ADD, other, self)) def __sub__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_SUB,", "ffi_obj = ffi_call(lib.plaidml_value_float, value) elif isinstance(value, TensorDim): ffi_obj = ffi_call(lib.plaidml_value_dim,", "Copyright 2019 Intel Corporation. import logging from collections import namedtuple", "= lib.plaidml_value_repr def __init__(self, value): # logger.debug('Value({})'.format(value)) if isinstance(value, np.ndarray):", "x.is_input] @property def outputs(self): return [x for x in self.args", "return call('bit_or', self, rhs) def __ror__(self, lhs): return call('bit_or', lhs,", "self)) class _IndexMap(ForeignObject): __ffi_del__ = lib.plaidml_expr_free __ffi_repr__ = lib.plaidml_expr_repr def", "# Represents an aggregation_op of MIN in a contraction def", "Tensor(dims=args) def TensorDims(count): return [TensorDim() for i in range(count)] def", "__init__(self, expr=None): if expr is None: expr = ffi_call(lib.plaidml_dim_expr_none) super(TensorDim,", "self, rhs) # Represents an eltwise cmp_lt def __lt__(self, rhs):", "if isinstance(value, np.ndarray): if value.ndim == 0: value = value.item()", "self.__ffi_obj__, buffer.as_ptr()) def __hash__(self): return hash((self.as_ptr(), self._dims, self._is_contraction)) def __getitem__(self,", "bit_right def __rshift__(self, rhs): return call('bit_right', self, rhs) def __rrshift__(self,", "dims match the dims of this tensor. def bind_dims(self, *dims):", "when assigning to a Tensor (Type: {})'.format( type(value._impl))) def _set_contraction(self,", "for as_uint' return cast(x, dtype) def ceil(x): return call('ceil', x)", "_IndexMap(self, key), _SizeMap(self._dims), self._name, )) else: raise ValueError('Invalid impl when", "__rrshift__(self, lhs): return call('bit_right', lhs, self) # Represents an eltwise", "use_default on a contraction def use_default(self, rhs): if not self._is_contraction:", "op, len(args), raw_args) class TensorDim(ForeignObject): \"\"\"Docstring for class TensorDim\"\"\" __ffi_del__", "lhs): return call('bit_left', lhs, self) # Represents an eltwise bit_right", "else: self.buffer = None class Program(ForeignObject): \"\"\"Docstring for class Program\"\"\"", "call('ceil', x) def cond(lhs, rhs, true_case): return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_COND, (lhs, rhs,", "self) # Represents an eltwise bit_or def __or__(self, rhs): return", "@property def outputs(self): return [x for x in self.args if", "if isinstance(other, Tensor): return self.__hash__() == TensorRef(other).__hash__() return self.__hash__() ==", "eltwise addition def __add__(self, rhs): return call('add', self, rhs) def", "def __eq__(self, rhs): return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_EQ, (self, rhs))) def _make_contraction(self, agg_op,", ")) else: raise ValueError('Invalid impl when assigning to a Tensor", "return [x for x in self.args if not x.is_input] def", "self.__hash__() == TensorRef(other).__hash__() return self.__hash__() == other.__hash__() class Value(ForeignObject): \"\"\"Docstring", "constraint in constraints: self.add_constraint(constraint) # Return the tensor's shape @property", "Represents a combo_op of EQ in a contraction def __eq__(self,", "raw_args = [x.as_ptr() for x in args] return Tensor(expr=ffi_call(lib.plaidml_expr_call, fn.encode(),", ") def add_constraints(self, constraints): for constraint in constraints: self.add_constraint(constraint) #", "rhs)) def __rmul__(self, lhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_MUL, lhs, self)) def __floordiv__(self,", "in raw_grads] def ident(x): return call('ident', x) def index(x, axis):", "cast(x, dtype) def as_uint(x, bit_size): map = { 8: DType.UINT8,", "return x def poly_op(op, *args): args = [wrap_poly(x) for x", "return call('add', lhs, self) # Represents an eltwise subtraction def", "__init__(self, expr=None, name=''): if expr is None: expr = ffi_call(lib.plaidml_poly_expr_index,", "[Tensor(expr=x) for x in raw_grads] def log(x): return call('log', x)", "'Unsupport bit_size for as_float' return cast(x, dtype) def as_int(x, bit_size):", "return TensorIndex(expr=ffi_call(lib.plaidml_poly_expr_literal, x)) if isinstance(x, TensorDim): return TensorIndex(expr=ffi_call(lib.plaidml_poly_expr_dim, x.as_ptr())) return", "agg_op, combo_op, src_idxs, sink_idxs, sink_sizes, name): src_idxs = [x.as_ptr() for", "a contraction def __add__(self, rhs): return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_ADD, (self, rhs))) #", "constraint): ffi_call( lib.plaidml_expr_contraction_add_constraint, self.as_ptr(), constraint.lhs.as_ptr(), constraint.rhs.as_ptr(), ) def add_constraints(self, constraints):", "raw_dims) else: raise ValueError('One of dtype= or ptr= must be", "Tensor(expr=ffi_call(lib.plaidml_expr_int, x)) if np.issubdtype(type(x), np.integer): return Tensor(expr=ffi_call(lib.plaidml_expr_int, x.item())) if isinstance(x,", "{}. fn: {}, args: {}, bad arg: {}'.format( type(x), fn,", "ffi_call(lib.plaidml_expr_float, value) else: raise TypeError('Invalid type for value={}'.format(value)) elif expr", "return call('cmp_ne', self, rhs) # Represents an eltwise cmp_lt def", "idxs = key else: idxs = [key] idxs = [wrap_poly(x)", "ffi.new('plaidml_expr*[]', len(wrts)) ffi_call( lib.plaidml_expr_gradient, len(wrts), wrts, loss.as_ptr(), raw_grads, ) return", "def inputs(self): return [x for x in self.args if x.is_input]", "ffi.NULL else: raw_buffer = buffer.as_ptr() expr = ffi_call(lib.plaidml_expr_placeholder, shape.as_ptr(), raw_buffer,", "inputs = [rhs._impl] elif isinstance(rhs._impl, _ContractionPart): # Binary/Ternary op combo_op", "Represents a combo_op of MULTIPLY in a contraction def __mul__(self,", "isinstance(x, Tensor): return x raise TypeError('Unexpected type for call argument:", "[wrap_tensor(x) for x in args] raw_args = [x.as_ptr() for x", "expr = ffi_call(lib.plaidml_expr_float, value) else: raise TypeError('Invalid type for value={}'.format(value))", "[x._impl for x in value._impl.args], _IndexMap(self, key), _SizeMap(self._dims), self._name, ))", "self._tensor._set_contraction(self._make_contraction(lib.PLAIDML_AGG_OP_MIN, rhs)) # Represents a combo_op of PLUS in a", "call('bit_left', self, rhs) def __rlshift__(self, lhs): return call('bit_left', lhs, self)", "if x.is_input] @property def outputs(self): return [x for x in", "def __rlshift__(self, lhs): return call('bit_left', lhs, self) # Represents an", "x)) return x def dim_op(op, *args): args = [wrap_dim(x) for", "else: raise TypeError('Invalid type for value={}'.format(value)) elif expr is None:", "call('floor', x) def gather(x, y): return call('gather', x, y) def", "Return the tensor's shape @property def shape(self): return LogicalShape(ptr=ffi_call(lib.plaidml_expr_get_shape, self.as_ptr()))", "a contraction.') ffi_call(lib.plaidml_expr_contraction_set_no_reduce, self.as_ptr(), True) return self # Set use_default", "raw_dims = ffi.new('int64_t[]', [0 if x is None else x", "[key] idxs = [wrap_poly(x) for x in idxs] raw_idxs =", "in range(raw_args[0].nargs)] ffi_call(lib.plaidml_program_args_free, raw_args[0]) super(Program, self).__init__(ffi_obj) @property def inputs(self): return", "eltwise bit_and def __and__(self, rhs): return call('bit_and', self, rhs) def", "bit_left def __lshift__(self, rhs): return call('bit_left', self, rhs) def __rlshift__(self,", "[x.as_ptr() for x in args] return ffi_call(lib.plaidml_dim_expr_op, op, len(args), raw_args)", "__mul__(self, rhs): return call('mul', self, rhs) def __rmul__(self, lhs): return", "return TensorIndex(poly_op(lib.PLAIDML_INT_OP_DIV, lhs, self)) class _IndexMap(ForeignObject): __ffi_del__ = lib.plaidml_expr_free __ffi_repr__", "= lib.plaidml_expr_free __ffi_repr__ = lib.plaidml_expr_repr def __init__(self, agg_op, combo_op, src_idxs,", "rhs): return call('cmp_le', self, rhs) # Represents an eltwise cmp_ge", "= [x.as_ptr() for x in args] return Tensor(expr=ffi_call(lib.plaidml_expr_call, fn.encode(), len(args),", "= [wrap_dim(x) for x in args] raw_args = [x.as_ptr() for", "an eltwise bit_not def __invert__(self): return call('bit_not', self) # Represents", "on a contraction.') ffi_call(lib.plaidml_expr_contraction_set_no_reduce, self.as_ptr(), True) return self # Set", "self).__init__(expr) def set_param_value(self, buffer): # Changes the value of a", "raise 'Unsupport bit_size for as_float' return cast(x, dtype) def as_int(x,", "self.as_ptr(), shape.as_ptr()) class TensorRef: \"\"\"Docstring for class TensorRef\"\"\" def __init__(self,", "= [ProgramArgument(raw_args[0].args[i]) for i in range(raw_args[0].nargs)] ffi_call(lib.plaidml_program_args_free, raw_args[0]) super(Program, self).__init__(ffi_obj)", "def __init__(self, impl, tensor=None): self._impl = impl self._tensor = tensor", ") return [Tensor(expr=x) for x in raw_grads] def ident(x): return", "index(x, axis): return call('index', x, axis) def jacobian(loss, variables): wrts", "# Represents an eltwise division def __div__(self, rhs): return call('div',", "TensorDim(expr=ffi_call(lib.plaidml_dim_expr_int, x)) return x def dim_op(op, *args): args = [wrap_dim(x)", "_set_contraction(self, cion): self._is_contraction = True self.take_ptr(cion) # Represents an eltwise", "x, *dims) def round(x): return call('round', x) def scatter(x, y,", "ffi_call(lib.plaidml_value_expr, value.as_ptr()) elif isinstance(value, (list, tuple)): self._elts = [Value(x) for", "call('div', lhs, self) # Represents an eltwise division def __truediv__(self,", "rhs)) # Represents a combo_op of PLUS in a contraction", "rhs): return call('sub', self, rhs) def __rsub__(self, lhs): return call('sub',", "ptr= must be specified.') super(LogicalShape, self).__init__(ffi_obj) @property def dtype(self): return", "# Represents an eltwise bit_and def __and__(self, rhs): return call('bit_and',", "raise 'Unsupport bit_size for as_int' return cast(x, dtype) def as_uint(x,", "ffi_obj = ffi_call(lib.plaidml_value_none) elif isinstance(value, (six.integer_types, bool)): ffi_obj = ffi_call(lib.plaidml_value_int,", "__ffi_repr__ = lib.plaidml_expr_repr _dims = None _is_contraction = False def", "x, y) def gradients(loss, variables): wrts = [x.as_ptr() for x", "[wrap_poly(x) for x in args] raw_args = [x.as_ptr() for x", "self, rhs) def __rlshift__(self, lhs): return call('bit_left', lhs, self) #", "return cast(x, dtype) def ceil(x): return call('ceil', x) def cond(lhs,", "tuple) or isinstance(key, list): idxs = key else: idxs =", "def __add__(self, rhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_ADD, self, rhs)) def __radd__(self, lhs):", "tensors ffi_call(lib.plaidml_expr_param_reset, self.__ffi_obj__, buffer.as_ptr()) def __hash__(self): return hash((self.as_ptr(), self._dims, self._is_contraction))", "# Unary op combo_op = lib.PLAIDML_COMBO_OP_NONE inputs = [rhs._impl] elif", "self, rhs)) def __rsub__(self, lhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_SUB, lhs, self)) def", "\"\"\"Docstring for function plaidml2.edsl.__init\"\"\" ffi_call(lib.plaidml_edsl_init) ffi.init_once(__init, 'plaidml_edsl_init') class LogicalShape(ForeignObject): \"\"\"Docstring", "= lib.plaidml_program_free __ffi_repr__ = lib.plaidml_program_repr def __init__(self, name, outputs, updates=[]):", "@property def int_dims(self): \"\"\"Returns the dimensions of a LogicalShape as", "return TensorDim(dim_op(lib.PLAIDML_INT_OP_NEG, self)) def __add__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_ADD, self, other))", "other.__hash__() class Value(ForeignObject): \"\"\"Docstring for class Value\"\"\" __ffi_del__ = lib.plaidml_value_free", "self.as_ptr(), constraint.lhs.as_ptr(), constraint.rhs.as_ptr(), ) def add_constraints(self, constraints): for constraint in", "on a contraction.') ffi_call(lib.plaidml_expr_contraction_set_use_default, self.as_ptr(), rhs.as_ptr()) return self def add_constraint(self,", "None: raw_buffer = ffi.NULL else: raw_buffer = buffer.as_ptr() expr =", "dtype: raise 'Unsupport bit_size for as_int' return cast(x, dtype) def", "combo_op, inputs, self._impl, _SizeMap(self._tensor._dims), self._tensor._name, ) class Tensor(ForeignObject): \"\"\"Docstring for", "rhs): return call('cmp_ne', self, rhs) # Represents an eltwise cmp_lt", "for x in value] raw_elts = [x.as_ptr() for x in", "other tensors ffi_call(lib.plaidml_expr_param_reset, self.__ffi_obj__, buffer.as_ptr()) def __hash__(self): return hash((self.as_ptr(), self._dims,", "ffi_obj = ffi_call(lib.plaidml_value_dim, value.as_ptr()) elif isinstance(value, Tensor): ffi_obj = ffi_call(lib.plaidml_value_expr,", "= lib.plaidml_logical_shape_repr def __init__(self, dtype=None, dims=[], ptr=None): if ptr: ffi_obj", "self).__init__(expr) def __lt__(self, rhs): return Constraint(self, wrap_dim(rhs)) def __neg__(self): return", "hash((self.as_ptr(), self._dims, self._is_contraction)) def __getitem__(self, key): return IndexedTensor(_IndexMap(self, key), tensor=self)", "None class Program(ForeignObject): \"\"\"Docstring for class Program\"\"\" __ffi_del__ = lib.plaidml_program_free", "len(raw_elts), raw_elts) elif isinstance(value, six.string_types): ffi_obj = ffi_call(lib.plaidml_value_str, value.encode('utf-8')) elif", "def __radd__(self, lhs): return call('add', lhs, self) # Represents an", "else x for x in dims]) ffi_obj = ffi_call(lib.plaidml_logical_shape_alloc, dtype,", "= name self._buffer = buffer if shape: if buffer is", "cmp_lt def __lt__(self, rhs): return call('cmp_lt', self, rhs) # Represents", "return call('bit_left', self, rhs) def __rlshift__(self, lhs): return call('bit_left', lhs,", "def __or__(self, rhs): return call('bit_or', self, rhs) def __ror__(self, lhs):", "def round(x): return call('round', x) def scatter(x, y, z): return", "['lhs', 'rhs']) def wrap_dim(x): if isinstance(x, six.integer_types): return TensorDim(expr=ffi_call(lib.plaidml_dim_expr_int, x))", "can only be specified on a contraction.') ffi_call(lib.plaidml_expr_contraction_set_no_reduce, self.as_ptr(), True)", "Tensor(expr=ffi_call(lib.plaidml_expr_cast, wrap_tensor(x).as_ptr(), dtype)) def as_bool(x): return cast(x, DType.BOOLEAN) def as_float(x,", "lib.plaidml_expr_contraction_add_constraint, self.as_ptr(), constraint.lhs.as_ptr(), constraint.rhs.as_ptr(), ) def add_constraints(self, constraints): for constraint", "of dims=, shape=, or expr= must be specified.') super(Tensor, self).__init__(expr)", "= tensor def __repr__(self): return repr(self._impl) # Represents an aggregation_op", "\"\"\"Docstring for class Tensor\"\"\" __ffi_del__ = lib.plaidml_expr_free __ffi_repr__ = lib.plaidml_expr_repr", "for x in rhs._impl.args] else: raise ValueError('Invalid impl') return _Contraction(", "constraints): for constraint in constraints: self.add_constraint(constraint) # Return the tensor's", "return call('prng', state, *shape) def reshape(x, dims): return call('reshape', x,", "LogicalShape(ptr=ffi_call(lib.plaidml_expr_get_shape, self.as_ptr())) # Verify that the specified dims match the", "'Unsupport bit_size for as_uint' return cast(x, dtype) def ceil(x): return", "cmp_le def __le__(self, rhs): return call('cmp_le', self, rhs) # Represents", "for function plaidml2.edsl.__init\"\"\" ffi_call(lib.plaidml_edsl_init) ffi.init_once(__init, 'plaidml_edsl_init') class LogicalShape(ForeignObject): \"\"\"Docstring for", "for x in idxs] expr = ffi_call(lib.plaidml_expr_index_map, ref.as_ptr(), len(idxs), raw_idxs)", "np.ndarray): if value.ndim == 0: value = value.item() else: value", "exp(x): return call('exp', x) def floor(x): return call('floor', x) def", "bit_size): map = { 8: DType.INT8, 16: DType.INT16, 32: DType.INT32,", "import ForeignObject, ffi, ffi_call, lib logger = logging.getLogger(__name__) def __init():", "def __rmul__(self, lhs): return call('mul', lhs, self) # Represents an", "reshape(x, dims): return call('reshape', x, *dims) def round(x): return call('round',", "for x in args] raw_args = [x.as_ptr() for x in", "return IndexedTensor(_IndexMap(self, key), tensor=self) def __setitem__(self, key, value): if isinstance(value._impl,", "rhs)) def __rsub__(self, lhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_SUB, lhs, self)) def __mul__(self,", "def add_constraints(self, constraints): for constraint in constraints: self.add_constraint(constraint) # Return", "as_float' return cast(x, dtype) def as_int(x, bit_size): map = {", "= ffi.new('plaidml_program_args**') ffi_obj = ffi_call( lib.plaidml_program_evaluate, name.encode(), len(raw_outputs), raw_outputs, len(updates),", "scatter(x, y, z): return call('scatter', x, y, z) def select(cond,", "ValueError('Invalid impl when assigning to a Tensor (Type: {})'.format( type(value._impl)))", "TensorIndex(poly_op(lib.PLAIDML_INT_OP_MUL, self, rhs)) def __rmul__(self, lhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_MUL, lhs, self))", "raw_buffer = ffi.NULL else: raw_buffer = buffer.as_ptr() expr = ffi_call(lib.plaidml_expr_placeholder,", "contraction def __imul__(self, rhs): return IndexedTensor(self._make_contraction(lib.PLAIDML_AGG_OP_PROD, rhs)) # Represents an", "name=''): if expr is None: expr = ffi_call(lib.plaidml_poly_expr_index, name.encode()) super(TensorIndex,", "# Enable no_reduce on a contraction def no_reduce(self): if not", "standard contraction self._set_contraction(value._impl) elif isinstance(value, Tensor): pass elif isinstance(value._impl, _IndexMap):", "__ffi_del__ = lib.plaidml_expr_free __ffi_repr__ = lib.plaidml_expr_repr def __init__(self, agg_op, combo_op,", "@property def shape(self): return LogicalShape(ptr=ffi_call(lib.plaidml_expr_get_shape, self.as_ptr())) # Verify that the", "__ffi_del__ = lib.plaidml_dim_expr_free __ffi_repr__ = lib.plaidml_dim_expr_repr def __init__(self, expr=None): if", "not None: self._dims = dims expr = None elif value", "__hash__(self): return hash((self.as_ptr(), self._dims, self._is_contraction)) def __getitem__(self, key): return IndexedTensor(_IndexMap(self,", "lib.plaidml_expr_repr def __init__(self, ref, key): if isinstance(key, tuple) or isinstance(key,", "lib.PLAIDML_COMBO_OP_NONE, [value._impl], _IndexMap(self, key), _SizeMap(self._dims), self._name, )) elif isinstance(value._impl, _ContractionPart):", "def wrap_poly(x): if isinstance(x, six.integer_types): return TensorIndex(expr=ffi_call(lib.plaidml_poly_expr_literal, x)) if isinstance(x,", "x.is_input] def wrap_tensor(x): if isinstance(x, six.integer_types): return Tensor(expr=ffi_call(lib.plaidml_expr_int, x)) if", "division def __div__(self, rhs): return call('div', self, rhs) def __rdiv__(self,", "other, self)) def __floordiv__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_DIV, self, other)) def", "rhs._impl.op inputs = [x._impl for x in rhs._impl.args] else: raise", "TensorDim): return TensorIndex(expr=ffi_call(lib.plaidml_poly_expr_dim, x.as_ptr())) return x def poly_op(op, *args): args", "args] raw_args = [x.as_ptr() for x in args] return ffi_call(lib.plaidml_dim_expr_op,", "lib.plaidml_program_repr def __init__(self, name, outputs, updates=[]): raw_outputs = [x.as_ptr() for", "an aggregation_op of PROD in a contraction def __imul__(self, rhs):", "EQ in a contraction def __eq__(self, rhs): return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_EQ, (self,", "must be specified.') super(LogicalShape, self).__init__(ffi_obj) @property def dtype(self): return DType(ffi_call(lib.plaidml_logical_shape_get_dtype,", "class ProgramArgument: \"\"\"Docstring for class ProgramArgument\"\"\" def __init__(self, arg): self.is_input", "self)) def __sub__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_SUB, self, other)) def __rsub__(self,", "= ffi.new('int64_t[]', [0 if x is None else x for", "eltwise multiplication def __mul__(self, rhs): return call('mul', self, rhs) def", "self (pointer): The object pointer for a LogicalShape Returns: list", "return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_MUL, (self, rhs))) # Represents a combo_op of EQ", "self) # Represents an eltwise addition def __add__(self, rhs): return", "x in args] return Tensor(expr=ffi_call(lib.plaidml_expr_call, fn.encode(), len(args), raw_args)) def cast(x,", "__ffi_repr__ = lib.plaidml_dim_expr_repr def __init__(self, expr=None): if expr is None:", "raw_buffer = buffer.as_ptr() expr = ffi_call(lib.plaidml_expr_placeholder, shape.as_ptr(), raw_buffer, name.encode()) elif", "= [wrap_tensor(x) for x in args] raw_args = [x.as_ptr() for", "Represents an eltwise bit_xor def __xor__(self, rhs): return call('bit_xor', self,", "TensorRef(Tensor(expr=ffi_call(lib.plaidml_expr_clone, arg.tensor))) self.shape = LogicalShape(ptr=ffi_call(lib.plaidml_logical_shape_clone, arg.shape)) if arg.buffer: tensor_shape =", "return call('div', lhs, self) # Represents an eltwise cmp_eq def", "self, rhs) def __ror__(self, lhs): return call('bit_or', lhs, self) #", "value)) super(Value, self).__init__(ffi_obj) def as_tensor(self): return Tensor(expr=ffi_call(lib.plaidml_value_expr_get, self.as_ptr())) def TensorOutput(*args):", "TensorDims(count): return [TensorDim() for i in range(count)] def TensorIndexes(count): return", "ndims(self): return ffi_call(lib.plaidml_logical_shape_get_ndims, self.as_ptr()) @property def int_dims(self): \"\"\"Returns the dimensions", "None: expr = ffi_call(lib.plaidml_poly_expr_index, name.encode()) super(TensorIndex, self).__init__(expr) def __lt__(self, rhs):", "x) def sqrt(x): return call('sqrt', x) def tan(x): return call('tan',", "def scatter(x, y, z): return call('scatter', x, y, z) def", "_IndexMap(ForeignObject): __ffi_del__ = lib.plaidml_expr_free __ffi_repr__ = lib.plaidml_expr_repr def __init__(self, ref,", "for class Value\"\"\" __ffi_del__ = lib.plaidml_value_free __ffi_repr__ = lib.plaidml_value_repr def", "rhs)) # Represents an aggregation_op of MAX in a contraction", "ValueError('One of dims=, shape=, or expr= must be specified.') super(Tensor,", "__ffi_del__ = lib.plaidml_value_free __ffi_repr__ = lib.plaidml_value_repr def __init__(self, value): #", "TensorShape( ptr=ffi_call(lib.plaidml_logical_shape_into_tensor_shape, self.as_ptr())) Constraint = namedtuple('Constraint', ['lhs', 'rhs']) def wrap_dim(x):", "return call('bit_left', lhs, self) # Represents an eltwise bit_right def", "def _bind(self, expr): self.take_ptr(expr) def __neg__(self): return TensorDim(dim_op(lib.PLAIDML_INT_OP_NEG, self)) def", "self.tensor.as_ptr())) def __eq__(self, other): if isinstance(other, Tensor): return self.__hash__() ==", "aggregation_op of PROD in a contraction def __imul__(self, rhs): return", "return x raise TypeError('Unexpected type for call argument: {}. fn:", "rhs): return IndexedTensor(self._make_contraction(lib.PLAIDML_AGG_OP_SUM, rhs)) # Represents an aggregation_op of PROD", "lhs): return call('add', lhs, self) # Represents an eltwise subtraction", "= ffi_call(lib.plaidml_value_none) elif isinstance(value, (six.integer_types, bool)): ffi_obj = ffi_call(lib.plaidml_value_int, value)", "def __iadd__(self, rhs): return IndexedTensor(self._make_contraction(lib.PLAIDML_AGG_OP_SUM, rhs)) # Represents an aggregation_op", "plaidml2 import DType from plaidml2.core import TensorShape, Buffer from plaidml2.ffi", "other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_SUB, other, self)) def __mul__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_MUL,", "shape=None, dims=None, expr=None, value=None, name='', buffer=None): self._name = name self._buffer", "an aggregation_op of MAX in a contraction def __ge__(self, rhs):", "buffer is None: raw_buffer = ffi.NULL else: raw_buffer = buffer.as_ptr()", "x, y) def pow(x, y): return call('pow', x, y) def", "def __rshift__(self, rhs): return call('bit_right', self, rhs) def __rrshift__(self, lhs):", "@property def dtype(self): return DType(ffi_call(lib.plaidml_logical_shape_get_dtype, self.as_ptr())) @property def ndims(self): return", "return hash((self.as_ptr(), self._dims, self._is_contraction)) def __getitem__(self, key): return IndexedTensor(_IndexMap(self, key),", "a contraction def __mul__(self, rhs): return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_MUL, (self, rhs))) #", "in range(self.ndims) ] def into_TensorShape(self): return TensorShape( ptr=ffi_call(lib.plaidml_logical_shape_into_tensor_shape, self.as_ptr())) Constraint", "for as_float' return cast(x, dtype) def as_int(x, bit_size): map =", "other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_MUL, other, self)) def __floordiv__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_DIV,", "a list. Args: self (pointer): The object pointer for a", "class Value\"\"\" __ffi_del__ = lib.plaidml_value_free __ffi_repr__ = lib.plaidml_value_repr def __init__(self,", "the dimensions of a LogicalShape as a list. Args: self", "lib.plaidml_expr_gradient, len(wrts), wrts, loss.as_ptr(), raw_grads, ) return [Tensor(expr=x) for x", "Represents an eltwise addition def __add__(self, rhs): return call('add', self,", "eltwise subtraction def __sub__(self, rhs): return call('sub', self, rhs) def", "as_uint' return cast(x, dtype) def ceil(x): return call('ceil', x) def", "poly_op(op, *args): args = [wrap_poly(x) for x in args] raw_args", "x def poly_op(op, *args): args = [wrap_poly(x) for x in", "in updates] raw_args = ffi.new('plaidml_program_args**') ffi_obj = ffi_call( lib.plaidml_program_evaluate, name.encode(),", "rhs))) def _make_contraction(self, agg_op, rhs): # Extract combo_op and inputs", "lhs): return call('sub', lhs, self) # Represents an eltwise multiplication", "rhs))) # Represents a combo_op of MULTIPLY in a contraction", "= [x.as_ptr() for x in src_idxs] expr = ffi_call( lib.plaidml_expr_contraction,", "None elif value is not None: if isinstance(value, six.integer_types): expr", "other)) def __rmul__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_MUL, other, self)) def __floordiv__(self,", "def __floordiv__(self, rhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_DIV, self, rhs)) def __rfloordiv__(self, lhs):", "in self._elts] ffi_obj = ffi_call(lib.plaidml_value_tuple, len(raw_elts), raw_elts) elif isinstance(value, six.string_types):", "src_idxs] expr = ffi_call( lib.plaidml_expr_contraction, agg_op, combo_op, sink_idxs.as_ptr(), sink_sizes.as_ptr(), len(src_idxs),", "expr=None, value=None, name='', buffer=None): self._name = name self._buffer = buffer", "self._buffer = buffer if shape: if buffer is None: raw_buffer", "# Binary or ternary ASSIGN contraction self._set_contraction( _Contraction( lib.PLAIDML_AGG_OP_ASSIGN, value._impl.op,", "as_int(x, bit_size): map = { 8: DType.INT8, 16: DType.INT16, 32:", "ffi_call(lib.plaidml_logical_shape_get_dim_int, self.as_ptr(), i) for i in range(self.ndims) ] def into_TensorShape(self):", "def __sub__(self, rhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_SUB, self, rhs)) def __rsub__(self, lhs):", "value) else: raise TypeError('Invalid type for value={}'.format(value)) elif expr is", "agg_op, combo_op, sink_idxs.as_ptr(), sink_sizes.as_ptr(), len(src_idxs), src_idxs, name.encode(), ) super(_Contraction, self).__init__(expr)", "or ternary ASSIGN contraction self._set_contraction( _Contraction( lib.PLAIDML_AGG_OP_ASSIGN, value._impl.op, [x._impl for", "raise TypeError('Unexpected type for call argument: {}. fn: {}, args:", "Constraint = namedtuple('Constraint', ['lhs', 'rhs']) def wrap_dim(x): if isinstance(x, six.integer_types):", "self, rhs) def __rand__(self, lhs): return call('bit_and', lhs, self) #", "DType.UINT16, 32: DType.UINT32, 64: DType.UINT64, } dtype = map.get(bit_size) if", "cmp_gt def __gt__(self, rhs): return call('cmp_gt', self, rhs) # Represents", "__ffi_del__ = lib.plaidml_expr_free __ffi_repr__ = lib.plaidml_expr_repr def __init__(self, dims): dims", "[ProgramArgument(raw_args[0].args[i]) for i in range(raw_args[0].nargs)] ffi_call(lib.plaidml_program_args_free, raw_args[0]) super(Program, self).__init__(ffi_obj) @property", "self._elts = [Value(x) for x in value] raw_elts = [x.as_ptr()", "DType.INT8, 16: DType.INT16, 32: DType.INT32, 64: DType.INT64, } dtype =", "lib.plaidml_expr_repr def __init__(self, dims): dims = [wrap_dim(x) for x in", "\"\"\"Docstring for class LogicalShape\"\"\" __ffi_del__ = lib.plaidml_logical_shape_free __ffi_repr__ = lib.plaidml_logical_shape_repr", "return IndexedTensor(self._make_contraction(lib.PLAIDML_AGG_OP_SUM, rhs)) # Represents an aggregation_op of PROD in", "TensorIndexes(count): return [TensorIndex() for i in range(count)] class ProgramArgument: \"\"\"Docstring", "return call('round', x) def scatter(x, y, z): return call('scatter', x,", "dtype: raise 'Unsupport bit_size for as_float' return cast(x, dtype) def", "ffi_obj = ptr elif dtype is not None: raw_dims =", "wrap_tensor(x): if isinstance(x, six.integer_types): return Tensor(expr=ffi_call(lib.plaidml_expr_int, x)) if np.issubdtype(type(x), np.integer):", "[x.as_ptr() for x in idxs] expr = ffi_call(lib.plaidml_expr_index_map, ref.as_ptr(), len(idxs),", "self.as_ptr(), True) return self # Set use_default on a contraction", "a contraction def __iadd__(self, rhs): return IndexedTensor(self._make_contraction(lib.PLAIDML_AGG_OP_SUM, rhs)) # Represents", "class TensorIndex\"\"\" __ffi_del__ = lib.plaidml_poly_expr_free __ffi_repr__ = lib.plaidml_poly_expr_repr def __init__(self,", "list): idxs = key else: idxs = [key] idxs =", "return Tensor(dims=args) def TensorDims(count): return [TensorDim() for i in range(count)]", "True) return self # Set use_default on a contraction def", "for i in range(count)] def TensorIndexes(count): return [TensorIndex() for i", "def as_bool(x): return cast(x, DType.BOOLEAN) def as_float(x, bit_size): map =", "lib.plaidml_expr_jacobian, len(wrts), wrts, loss.as_ptr(), raw_grads, ) return [Tensor(expr=x) for x", "return call('cos', x) def exp(x): return call('exp', x) def floor(x):", "wrap_tensor(x).as_ptr(), dtype)) def as_bool(x): return cast(x, DType.BOOLEAN) def as_float(x, bit_size):", "def __le__(self, rhs): self._tensor._set_contraction(self._make_contraction(lib.PLAIDML_AGG_OP_MIN, rhs)) # Represents a combo_op of", "__init__(self, impl, tensor=None): self._impl = impl self._tensor = tensor def", "shape to this tensor def bind(self, shape): ffi_call(lib.plaidml_expr_bind_shape, self.as_ptr(), shape.as_ptr())", "false_case) def shape(x): return call('shape', x) def sin(x): return call('sin',", "{ 8: DType.UINT8, 16: DType.UINT16, 32: DType.UINT32, 64: DType.UINT64, }", "ffi_call(lib.plaidml_value_int, value) elif isinstance(value, float): ffi_obj = ffi_call(lib.plaidml_value_float, value) elif", "agg_op, rhs): # Extract combo_op and inputs if isinstance(rhs._impl, _IndexMap):", "Tensor): pass elif isinstance(value._impl, _IndexMap): # Unary ASSIGN contraction self._set_contraction(", "call('mul', self, rhs) def __rmul__(self, lhs): return call('mul', lhs, self)", "rhs, true_case): return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_COND, (lhs, rhs, true_case))) def cos(x): return", "lhs): return call('bit_and', lhs, self) # Represents an eltwise bit_or", "if isinstance(x, float): return Tensor(expr=ffi_call(lib.plaidml_expr_float, x)) if isinstance(x, TensorDim): return", "for x in updates] raw_args = ffi.new('plaidml_program_args**') ffi_obj = ffi_call(", "y): return call('max', x, y) def min(x, y): return call('min',", "ffi_call(lib.plaidml_value_dim, value.as_ptr()) elif isinstance(value, Tensor): ffi_obj = ffi_call(lib.plaidml_value_expr, value.as_ptr()) elif", "float): ffi_obj = ffi_call(lib.plaidml_value_float, value) elif isinstance(value, TensorDim): ffi_obj =", "0: value = value.item() else: value = value.tolist() if value", "if ptr: ffi_obj = ptr elif dtype is not None:", "# bind a concrete shape to this tensor def bind(self,", "= dims expr = None elif value is not None:", "def __mul__(self, rhs): return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_MUL, (self, rhs))) # Represents a", "a contraction def no_reduce(self): if not self._is_contraction: raise TypeError('no_reduce can", "contraction def __ge__(self, rhs): self._tensor._set_contraction(self._make_contraction(lib.PLAIDML_AGG_OP_MAX, rhs)) # Represents an aggregation_op", "key): if isinstance(key, tuple) or isinstance(key, list): idxs = key", "set to a buffer value) # Illegal on other tensors", "raw_args) class TensorIndex(ForeignObject): \"\"\"Docstring for class TensorIndex\"\"\" __ffi_del__ = lib.plaidml_poly_expr_free", "dims]) ffi_obj = ffi_call(lib.plaidml_logical_shape_alloc, dtype, len(dims), raw_dims) else: raise ValueError('One", "_is_contraction = False def __init__(self, shape=None, dims=None, expr=None, value=None, name='',", "TensorIndex(poly_op(lib.PLAIDML_INT_OP_MUL, lhs, self)) def __floordiv__(self, rhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_DIV, self, rhs))", "call argument: {}. fn: {}, args: {}, bad arg: {}'.format(", "def sqrt(x): return call('sqrt', x) def tan(x): return call('tan', x)", "shape): return call('prng', state, *shape) def reshape(x, dims): return call('reshape',", "if not dtype: raise 'Unsupport bit_size for as_int' return cast(x,", "lib.plaidml_expr_repr def __init__(self, agg_op, combo_op, src_idxs, sink_idxs, sink_sizes, name): src_idxs", "i in range(raw_args[0].nargs)] ffi_call(lib.plaidml_program_args_free, raw_args[0]) super(Program, self).__init__(ffi_obj) @property def inputs(self):", "ffi_call(lib.plaidml_edsl_init) ffi.init_once(__init, 'plaidml_edsl_init') class LogicalShape(ForeignObject): \"\"\"Docstring for class LogicalShape\"\"\" __ffi_del__", "buffer if shape: if buffer is None: raw_buffer = ffi.NULL", "x.as_ptr())) if isinstance(x, Tensor): return x raise TypeError('Unexpected type for", "contraction def __mul__(self, rhs): return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_MUL, (self, rhs))) # Represents", "fn: {}, args: {}, bad arg: {}'.format( type(x), fn, args,", "lhs, self)) def __floordiv__(self, rhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_DIV, self, rhs)) def", "def __mul__(self, rhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_MUL, self, rhs)) def __rmul__(self, lhs):", "lhs): return call('div', lhs, self) # Represents an eltwise division", "return Tensor(expr=ffi_call(lib.plaidml_expr_cast, wrap_tensor(x).as_ptr(), dtype)) def as_bool(x): return cast(x, DType.BOOLEAN) def", "np.integer): return Tensor(expr=ffi_call(lib.plaidml_expr_int, x.item())) if isinstance(x, float): return Tensor(expr=ffi_call(lib.plaidml_expr_float, x))", "of a parameter tensor (i.e. one explicitly set to a", "_Contraction( agg_op, combo_op, inputs, self._impl, _SizeMap(self._tensor._dims), self._tensor._name, ) class Tensor(ForeignObject):", "_bind(self, expr): self.take_ptr(expr) def __neg__(self): return TensorDim(dim_op(lib.PLAIDML_INT_OP_NEG, self)) def __add__(self,", "idxs = [key] idxs = [wrap_poly(x) for x in idxs]", "def __truediv__(self, rhs): return call('div', self, rhs) def __rtruediv__(self, lhs):", "bit_size for as_float' return cast(x, dtype) def as_int(x, bit_size): map", "else: raw_buffer = buffer.as_ptr() expr = ffi_call(lib.plaidml_expr_placeholder, shape.as_ptr(), raw_buffer, name.encode())", "isinstance(x, six.integer_types): return Tensor(expr=ffi_call(lib.plaidml_expr_int, x)) if np.issubdtype(type(x), np.integer): return Tensor(expr=ffi_call(lib.plaidml_expr_int,", "IndexedTensor(self._make_contraction(lib.PLAIDML_AGG_OP_PROD, rhs)) # Represents an aggregation_op of MAX in a", "Represents an eltwise cmp_le def __le__(self, rhs): return call('cmp_le', self,", "lhs, self)) def __mul__(self, rhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_MUL, self, rhs)) def", "isinstance(other, Tensor): return self.__hash__() == TensorRef(other).__hash__() return self.__hash__() == other.__hash__()", "log(x): return call('log', x) def max(x, y): return call('max', x,", "def cos(x): return call('cos', x) def exp(x): return call('exp', x)", "for x in args] return ffi_call(lib.plaidml_dim_expr_op, op, len(args), raw_args) class", "dtype): return Tensor(expr=ffi_call(lib.plaidml_expr_cast, wrap_tensor(x).as_ptr(), dtype)) def as_bool(x): return cast(x, DType.BOOLEAN)", "= None elif value is not None: if isinstance(value, six.integer_types):", "a contraction def __le__(self, rhs): self._tensor._set_contraction(self._make_contraction(lib.PLAIDML_AGG_OP_MIN, rhs)) # Represents a", "or ptr= must be specified.') super(LogicalShape, self).__init__(ffi_obj) @property def dtype(self):", "subtraction def __sub__(self, rhs): return call('sub', self, rhs) def __rsub__(self,", "wrap_poly(x): if isinstance(x, six.integer_types): return TensorIndex(expr=ffi_call(lib.plaidml_poly_expr_literal, x)) if isinstance(x, TensorDim):", "= None class Program(ForeignObject): \"\"\"Docstring for class Program\"\"\" __ffi_del__ =", "call('min', x, y) def pow(x, y): return call('pow', x, y)", "TensorDim(dim_op(lib.PLAIDML_INT_OP_DIV, other, self)) def wrap_poly(x): if isinstance(x, six.integer_types): return TensorIndex(expr=ffi_call(lib.plaidml_poly_expr_literal,", "PROD in a contraction def __imul__(self, rhs): return IndexedTensor(self._make_contraction(lib.PLAIDML_AGG_OP_PROD, rhs))", "return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_EQ, (self, rhs))) def _make_contraction(self, agg_op, rhs): # Extract", "= [x.as_ptr() for x in self._elts] ffi_obj = ffi_call(lib.plaidml_value_tuple, len(raw_elts),", "def no_reduce(self): if not self._is_contraction: raise TypeError('no_reduce can only be", "def index(x, axis): return call('index', x, axis) def jacobian(loss, variables):", "def __add__(self, rhs): return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_ADD, (self, rhs))) # Represents a", "self._is_contraction)) def __getitem__(self, key): return IndexedTensor(_IndexMap(self, key), tensor=self) def __setitem__(self,", "Binary/Ternary op combo_op = rhs._impl.op inputs = [x._impl for x", "x in value._impl.args], _IndexMap(self, key), _SizeMap(self._dims), self._name, )) else: raise", "combo_op of EQ in a contraction def __eq__(self, rhs): return", "None else x for x in dims]) ffi_obj = ffi_call(lib.plaidml_logical_shape_alloc,", "self._name, )) else: raise ValueError('Invalid impl when assigning to a", "DType.UINT64, } dtype = map.get(bit_size) if not dtype: raise 'Unsupport", "lhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_SUB, lhs, self)) def __mul__(self, rhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_MUL,", "# Represents an aggregation_op of SUM in a contraction def", "expr = ffi_call( lib.plaidml_expr_contraction, agg_op, combo_op, sink_idxs.as_ptr(), sink_sizes.as_ptr(), len(src_idxs), src_idxs,", "= value.item() else: value = value.tolist() if value is None:", "DType.UINT32, 64: DType.UINT64, } dtype = map.get(bit_size) if not dtype:", "Illegal on other tensors ffi_call(lib.plaidml_expr_param_reset, self.__ffi_obj__, buffer.as_ptr()) def __hash__(self): return", "ffi_call(lib.plaidml_dim_expr_none) super(TensorDim, self).__init__(expr) def _bind(self, expr): self.take_ptr(expr) def __neg__(self): return", "self._name = name self._buffer = buffer if shape: if buffer", "elif isinstance(value, Tensor): ffi_obj = ffi_call(lib.plaidml_value_expr, value.as_ptr()) elif isinstance(value, (list,", "elif isinstance(rhs._impl, _ContractionPart): # Binary/Ternary op combo_op = rhs._impl.op inputs", "x in variables] raw_grads = ffi.new('plaidml_expr*[]', len(wrts)) ffi_call( lib.plaidml_expr_gradient, len(wrts),", "of MULTIPLY in a contraction def __mul__(self, rhs): return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_MUL,", "ffi_call(lib.plaidml_expr_param_reset, self.__ffi_obj__, buffer.as_ptr()) def __hash__(self): return hash((self.as_ptr(), self._dims, self._is_contraction)) def", "contraction def no_reduce(self): if not self._is_contraction: raise TypeError('no_reduce can only", "bit_xor def __xor__(self, rhs): return call('bit_xor', self, rhs) def __rxor__(self,", "if expr is None: expr = ffi_call(lib.plaidml_poly_expr_index, name.encode()) super(TensorIndex, self).__init__(expr)", "impl, tensor=None): self._impl = impl self._tensor = tensor def __repr__(self):", "a concrete shape to this tensor def bind(self, shape): ffi_call(lib.plaidml_expr_bind_shape,", "return Constraint(self, wrap_dim(rhs)) def __neg__(self): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_NEG, self)) def __add__(self,", "for x in updates] src_updates = [x[1].as_ptr() for x in", "lhs, self) # Represents an eltwise bit_right def __rshift__(self, rhs):", "32: DType.FLOAT32, 64: DType.FLOAT64, } dtype = map.get(bit_size) if not", "ptr=ffi_call(lib.plaidml_logical_shape_into_tensor_shape, self.as_ptr())) Constraint = namedtuple('Constraint', ['lhs', 'rhs']) def wrap_dim(x): if", "x in updates] src_updates = [x[1].as_ptr() for x in updates]", "class Value(ForeignObject): \"\"\"Docstring for class Value\"\"\" __ffi_del__ = lib.plaidml_value_free __ffi_repr__", "return [Tensor(expr=x) for x in raw_grads] def log(x): return call('log',", "idxs] expr = ffi_call(lib.plaidml_expr_index_map, ref.as_ptr(), len(idxs), raw_idxs) super(_IndexMap, self).__init__(expr) class", "value.as_ptr()) elif isinstance(value, Tensor): ffi_obj = ffi_call(lib.plaidml_value_expr, value.as_ptr()) elif isinstance(value,", "return Tensor(expr=ffi_call(lib.plaidml_expr_int, x.item())) if isinstance(x, float): return Tensor(expr=ffi_call(lib.plaidml_expr_float, x)) if", "TensorDim(dim_op(lib.PLAIDML_INT_OP_DIV, self, other)) def __rfloordiv__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_DIV, other, self))", "ffi.typeof(value) is ffi.typeof('plaidml_value*'): ffi_obj = value else: raise TypeError('Unsupported type", "wrts = [x.as_ptr() for x in variables] raw_grads = ffi.new('plaidml_expr*[]',", "self, rhs) def __radd__(self, lhs): return call('add', lhs, self) #", "call('cmp_lt', self, rhs) # Represents an eltwise cmp_gt def __gt__(self,", "call('neg', self) # Represents an eltwise bit_not def __invert__(self): return", "def __ne__(self, rhs): return call('cmp_ne', self, rhs) # Represents an", "plaidml2.ffi import ForeignObject, ffi, ffi_call, lib logger = logging.getLogger(__name__) def", "= lib.plaidml_expr_free __ffi_repr__ = lib.plaidml_expr_repr def __init__(self, ref, key): if", "other): if isinstance(other, Tensor): return self.__hash__() == TensorRef(other).__hash__() return self.__hash__()", "i in range(self.ndims) ] def into_TensorShape(self): return TensorShape( ptr=ffi_call(lib.plaidml_logical_shape_into_tensor_shape, self.as_ptr()))", "dtype is not None: raw_dims = ffi.new('int64_t[]', [0 if x", "__neg__(self): return call('neg', self) # Represents an eltwise bit_not def", "rhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_SUB, self, rhs)) def __rsub__(self, lhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_SUB,", "def __le__(self, rhs): return call('cmp_le', self, rhs) # Represents an", "def __rsub__(self, lhs): return call('sub', lhs, self) # Represents an", "lhs, self) # Represents an eltwise bit_or def __or__(self, rhs):", "rhs))) # Represents a combo_op of EQ in a contraction", "64: DType.UINT64, } dtype = map.get(bit_size) if not dtype: raise", "class LogicalShape\"\"\" __ffi_del__ = lib.plaidml_logical_shape_free __ffi_repr__ = lib.plaidml_logical_shape_repr def __init__(self,", "rhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_MUL, self, rhs)) def __rmul__(self, lhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_MUL,", "class _Contraction(ForeignObject): __ffi_del__ = lib.plaidml_expr_free __ffi_repr__ = lib.plaidml_expr_repr def __init__(self,", "MAX in a contraction def __ge__(self, rhs): self._tensor._set_contraction(self._make_contraction(lib.PLAIDML_AGG_OP_MAX, rhs)) #", "if shape: if buffer is None: raw_buffer = ffi.NULL else:", "in args] return Tensor(expr=ffi_call(lib.plaidml_expr_call, fn.encode(), len(args), raw_args)) def cast(x, dtype):", "__rlshift__(self, lhs): return call('bit_left', lhs, self) # Represents an eltwise", "dimensions of a LogicalShape as a list. Args: self (pointer):", "self) # Represents an eltwise division def __truediv__(self, rhs): return", "def __rsub__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_SUB, other, self)) def __mul__(self, other):", "# standard contraction self._set_contraction(value._impl) elif isinstance(value, Tensor): pass elif isinstance(value._impl,", "__rshift__(self, rhs): return call('bit_right', self, rhs) def __rrshift__(self, lhs): return", "if isinstance(x, TensorDim): return TensorIndex(expr=ffi_call(lib.plaidml_poly_expr_dim, x.as_ptr())) return x def poly_op(op,", "inputs, self._impl, _SizeMap(self._tensor._dims), self._tensor._name, ) class Tensor(ForeignObject): \"\"\"Docstring for class", "rhs): return call('mul', self, rhs) def __rmul__(self, lhs): return call('mul',", "= LogicalShape(ptr=ffi_call(lib.plaidml_logical_shape_clone, arg.shape)) if arg.buffer: tensor_shape = self.shape.into_TensorShape() self.buffer =", "def as_uint(x, bit_size): map = { 8: DType.UINT8, 16: DType.UINT16,", "range(count)] def TensorIndexes(count): return [TensorIndex() for i in range(count)] class", "Represents an eltwise cmp_ge def __ge__(self, rhs): return call('cmp_ge', self,", "gradients(loss, variables): wrts = [x.as_ptr() for x in variables] raw_grads", "a LogicalShape Returns: list (int): Integer dimensions of the LogicalShape.", "= lib.plaidml_poly_expr_free __ffi_repr__ = lib.plaidml_poly_expr_repr def __init__(self, expr=None, name=''): if", "eltwise division def __div__(self, rhs): return call('div', self, rhs) def", "tuple)): self._elts = [Value(x) for x in value] raw_elts =", "super(Value, self).__init__(ffi_obj) def as_tensor(self): return Tensor(expr=ffi_call(lib.plaidml_value_expr_get, self.as_ptr())) def TensorOutput(*args): return", "raise ValueError('One of dims=, shape=, or expr= must be specified.')", "call('div', self, rhs) def __rtruediv__(self, lhs): return call('div', lhs, self)", "contraction def __add__(self, rhs): return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_ADD, (self, rhs))) # Represents", "value) elif isinstance(value, float): ffi_obj = ffi_call(lib.plaidml_value_float, value) elif isinstance(value,", "Represents an aggregation_op of PROD in a contraction def __imul__(self,", "= lib.plaidml_expr_free __ffi_repr__ = lib.plaidml_expr_repr def __init__(self, dims): dims =", "bool)): ffi_obj = ffi_call(lib.plaidml_value_int, value) elif isinstance(value, float): ffi_obj =", "__add__(self, rhs): return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_ADD, (self, rhs))) # Represents a combo_op", "ffi_call(lib.plaidml_expr_index_map, ref.as_ptr(), len(idxs), raw_idxs) super(_IndexMap, self).__init__(expr) class _SizeMap(ForeignObject): __ffi_del__ =", "Represents an eltwise subtraction def __sub__(self, rhs): return call('sub', self,", "cond(lhs, rhs, true_case): return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_COND, (lhs, rhs, true_case))) def cos(x):", "def __imul__(self, rhs): return IndexedTensor(self._make_contraction(lib.PLAIDML_AGG_OP_PROD, rhs)) # Represents an aggregation_op", "on a contraction def use_default(self, rhs): if not self._is_contraction: raise", "def __getitem__(self, key): return IndexedTensor(_IndexMap(self, key), tensor=self) def __setitem__(self, key,", "<gh_stars>1000+ # Copyright 2019 Intel Corporation. import logging from collections", "TensorDim): ffi_obj = ffi_call(lib.plaidml_value_dim, value.as_ptr()) elif isinstance(value, Tensor): ffi_obj =", "x in self.args if x.is_input] @property def outputs(self): return [x", "expr = ffi_call(lib.plaidml_expr_size_map, len(dims), raw_dims) super(_SizeMap, self).__init__(expr) class _Contraction(ForeignObject): __ffi_del__", "lhs, self) # Represents an eltwise multiplication def __mul__(self, rhs):", "def add_constraint(self, constraint): ffi_call( lib.plaidml_expr_contraction_add_constraint, self.as_ptr(), constraint.lhs.as_ptr(), constraint.rhs.as_ptr(), ) def", "# Represents a combo_op of EQ in a contraction def", "rhs) # Represents an eltwise cmp_ge def __ge__(self, rhs): return", "axis) def jacobian(loss, variables): wrts = [x.as_ptr() for x in", "= ffi_call(lib.plaidml_value_str, value.encode('utf-8')) elif isinstance(value, ffi.CData) and ffi.typeof(value) is ffi.typeof('plaidml_value*'):", "dims = [wrap_dim(x) for x in dims] raw_dims = [x.as_ptr()", "def use_default(self, rhs): if not self._is_contraction: raise TypeError('use_default can only", "to a Tensor (Type: {})'.format( type(value._impl))) def _set_contraction(self, cion): self._is_contraction", "['op', 'args']) class IndexedTensor(object): \"\"\"Docstring for class IndexedTensor\"\"\" def __init__(self,", ") return [Tensor(expr=x) for x in raw_grads] def log(x): return", "{}, args: {}, bad arg: {}'.format( type(x), fn, args, x))", "idxs] raw_idxs = [x.as_ptr() for x in idxs] expr =", "self.take_ptr(cion) # Represents an eltwise negation def __neg__(self): return call('neg',", "= [key] idxs = [wrap_poly(x) for x in idxs] raw_idxs", "return cast(x, dtype) def as_uint(x, bit_size): map = { 8:", "loss.as_ptr(), raw_grads, ) return [Tensor(expr=x) for x in raw_grads] def", "TensorDim(dim_op(lib.PLAIDML_INT_OP_ADD, other, self)) def __sub__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_SUB, self, other))", "class Program\"\"\" __ffi_del__ = lib.plaidml_program_free __ffi_repr__ = lib.plaidml_program_repr def __init__(self,", "def __repr__(self): return repr(self._impl) # Represents an aggregation_op of SUM", "_SizeMap(self._dims), self._name, )) else: raise ValueError('Invalid impl when assigning to", "class TensorDim\"\"\" __ffi_del__ = lib.plaidml_dim_expr_free __ffi_repr__ = lib.plaidml_dim_expr_repr def __init__(self,", "\"\"\"Docstring for class IndexedTensor\"\"\" def __init__(self, impl, tensor=None): self._impl =", "expr is None: expr = ffi_call(lib.plaidml_poly_expr_index, name.encode()) super(TensorIndex, self).__init__(expr) def", "self).__init__(expr) class _Contraction(ForeignObject): __ffi_del__ = lib.plaidml_expr_free __ffi_repr__ = lib.plaidml_expr_repr def", "Extract combo_op and inputs if isinstance(rhs._impl, _IndexMap): # Unary op", "Tensor(expr=ffi_call(lib.plaidml_expr_call, fn.encode(), len(args), raw_args)) def cast(x, dtype): return Tensor(expr=ffi_call(lib.plaidml_expr_cast, wrap_tensor(x).as_ptr(),", "bit_size for as_uint' return cast(x, dtype) def ceil(x): return call('ceil',", "is None else x for x in dims]) ffi_obj =", "lib.plaidml_expr_repr _dims = None _is_contraction = False def __init__(self, shape=None,", "bit_and def __and__(self, rhs): return call('bit_and', self, rhs) def __rand__(self,", "type(value._impl))) def _set_contraction(self, cion): self._is_contraction = True self.take_ptr(cion) # Represents", "x in idxs] raw_idxs = [x.as_ptr() for x in idxs]", "def dtype(self): return DType(ffi_call(lib.plaidml_logical_shape_get_dtype, self.as_ptr())) @property def ndims(self): return ffi_call(lib.plaidml_logical_shape_get_ndims,", "elif isinstance(value, ffi.CData) and ffi.typeof(value) is ffi.typeof('plaidml_value*'): ffi_obj = value", "cast(x, dtype) def as_int(x, bit_size): map = { 8: DType.INT8,", "\"\"\"Docstring for class ProgramArgument\"\"\" def __init__(self, arg): self.is_input = arg.is_input", "def TensorDims(count): return [TensorDim() for i in range(count)] def TensorIndexes(count):", "this tensor. def bind_dims(self, *dims): raw_dims = [x.as_ptr() for x", "bad arg: {}'.format( type(x), fn, args, x)) def call(fn, *args):", "dim_op(op, *args): args = [wrap_dim(x) for x in args] raw_args", "__neg__(self): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_NEG, self)) def __add__(self, rhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_ADD, self,", "self._set_contraction( _Contraction( lib.PLAIDML_AGG_OP_ASSIGN, value._impl.op, [x._impl for x in value._impl.args], _IndexMap(self,", "arg.is_input self.ref = TensorRef(Tensor(expr=ffi_call(lib.plaidml_expr_clone, arg.tensor))) self.shape = LogicalShape(ptr=ffi_call(lib.plaidml_logical_shape_clone, arg.shape)) if", "eltwise cmp_ge def __ge__(self, rhs): return call('cmp_ge', self, rhs) #", "function plaidml2.edsl.__init\"\"\" ffi_call(lib.plaidml_edsl_init) ffi.init_once(__init, 'plaidml_edsl_init') class LogicalShape(ForeignObject): \"\"\"Docstring for class", "a contraction def __ge__(self, rhs): self._tensor._set_contraction(self._make_contraction(lib.PLAIDML_AGG_OP_MAX, rhs)) # Represents an", "bind a concrete shape to this tensor def bind(self, shape):", "= lib.plaidml_expr_free __ffi_repr__ = lib.plaidml_expr_repr _dims = None _is_contraction =", "false_case): return call('cond', cond, true_case, false_case) def shape(x): return call('shape',", "[wrap_dim(x) for x in args] raw_args = [x.as_ptr() for x", "for x in self._elts] ffi_obj = ffi_call(lib.plaidml_value_tuple, len(raw_elts), raw_elts) elif", "def shape(x): return call('shape', x) def sin(x): return call('sin', x)", "call('mul', lhs, self) # Represents an eltwise division def __div__(self,", "def shape(self): return LogicalShape(ptr=ffi_call(lib.plaidml_expr_get_shape, self.as_ptr())) # Verify that the specified", "self.as_ptr()) @property def int_dims(self): \"\"\"Returns the dimensions of a LogicalShape", "src_idxs, sink_idxs, sink_sizes, name): src_idxs = [x.as_ptr() for x in", "len(raw_outputs), raw_outputs, len(updates), src_updates, dst_updates, raw_args, ) self.args = [ProgramArgument(raw_args[0].args[i])", "x)) def call(fn, *args): args = [wrap_tensor(x) for x in", "x in args] raw_args = [x.as_ptr() for x in args]", "contraction.') ffi_call(lib.plaidml_expr_contraction_set_use_default, self.as_ptr(), rhs.as_ptr()) return self def add_constraint(self, constraint): ffi_call(", "def cast(x, dtype): return Tensor(expr=ffi_call(lib.plaidml_expr_cast, wrap_tensor(x).as_ptr(), dtype)) def as_bool(x): return", "value.item() else: value = value.tolist() if value is None: ffi_obj", "int_dims(self): \"\"\"Returns the dimensions of a LogicalShape as a list.", "\"\"\"Docstring for class TensorDim\"\"\" __ffi_del__ = lib.plaidml_dim_expr_free __ffi_repr__ = lib.plaidml_dim_expr_repr", "SUM in a contraction def __iadd__(self, rhs): return IndexedTensor(self._make_contraction(lib.PLAIDML_AGG_OP_SUM, rhs))", "= ffi.new('plaidml_expr*[]', len(wrts)) ffi_call( lib.plaidml_expr_gradient, len(wrts), wrts, loss.as_ptr(), raw_grads, )", "call('cond', cond, true_case, false_case) def shape(x): return call('shape', x) def", "return TensorDim(dim_op(lib.PLAIDML_INT_OP_DIV, self, other)) def __rfloordiv__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_DIV, other,", "expr): self.take_ptr(expr) def __neg__(self): return TensorDim(dim_op(lib.PLAIDML_INT_OP_NEG, self)) def __add__(self, other):", "logger = logging.getLogger(__name__) def __init(): \"\"\"Docstring for function plaidml2.edsl.__init\"\"\" ffi_call(lib.plaidml_edsl_init)", "in raw_grads] def log(x): return call('log', x) def max(x, y):", "sqrt(x): return call('sqrt', x) def tan(x): return call('tan', x) def", "def __eq__(self, other): if isinstance(other, Tensor): return self.__hash__() == TensorRef(other).__hash__()", "self.as_ptr())) # Verify that the specified dims match the dims", "self.as_ptr())) def TensorOutput(*args): return Tensor(dims=args) def TensorDims(count): return [TensorDim() for", "bind_dims(self, *dims): raw_dims = [x.as_ptr() for x in dims] ffi_call(lib.plaidml_expr_bind_dims,", "call('cmp_eq', self, rhs) # Represents an eltwise cmp_ne def __ne__(self,", "call('exp', x) def floor(x): return call('floor', x) def gather(x, y):", "max(x, y): return call('max', x, y) def min(x, y): return", "y) def prng(state, shape): return call('prng', state, *shape) def reshape(x,", "Enable no_reduce on a contraction def no_reduce(self): if not self._is_contraction:", "_Contraction( lib.PLAIDML_AGG_OP_ASSIGN, lib.PLAIDML_COMBO_OP_NONE, [value._impl], _IndexMap(self, key), _SizeMap(self._dims), self._name, )) elif", "an eltwise bit_left def __lshift__(self, rhs): return call('bit_left', self, rhs)", "TensorIndex(ForeignObject): \"\"\"Docstring for class TensorIndex\"\"\" __ffi_del__ = lib.plaidml_poly_expr_free __ffi_repr__ =", "is None: raw_buffer = ffi.NULL else: raw_buffer = buffer.as_ptr() expr", "outputs, updates=[]): raw_outputs = [x.as_ptr() for x in outputs] dst_updates", "Buffer from plaidml2.ffi import ForeignObject, ffi, ffi_call, lib logger =", "Integer dimensions of the LogicalShape. \"\"\" return [ ffi_call(lib.plaidml_logical_shape_get_dim_int, self.as_ptr(),", "super(Program, self).__init__(ffi_obj) @property def inputs(self): return [x for x in", "__init__(self, dims): dims = [wrap_dim(x) for x in dims] raw_dims", "arg: {}'.format( type(x), fn, args, x)) def call(fn, *args): args", "of MAX in a contraction def __ge__(self, rhs): self._tensor._set_contraction(self._make_contraction(lib.PLAIDML_AGG_OP_MAX, rhs))", "= { 8: DType.INT8, 16: DType.INT16, 32: DType.INT32, 64: DType.INT64,", "TensorIndex(poly_op(lib.PLAIDML_INT_OP_SUB, self, rhs)) def __rsub__(self, lhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_SUB, lhs, self))", "name.encode(), ) super(_Contraction, self).__init__(expr) _ContractionPart = namedtuple('_ContractionPart', ['op', 'args']) class", "of SUM in a contraction def __iadd__(self, rhs): return IndexedTensor(self._make_contraction(lib.PLAIDML_AGG_OP_SUM,", "def __eq__(self, rhs): return call('cmp_eq', self, rhs) # Represents an", "Represents an aggregation_op of MAX in a contraction def __ge__(self,", "buffer value) # Illegal on other tensors ffi_call(lib.plaidml_expr_param_reset, self.__ffi_obj__, buffer.as_ptr())", "raw_grads, ) return [Tensor(expr=x) for x in raw_grads] def log(x):", "isinstance(value, float): expr = ffi_call(lib.plaidml_expr_float, value) else: raise TypeError('Invalid type", "cmp_ne def __ne__(self, rhs): return call('cmp_ne', self, rhs) # Represents", "return TensorDim(dim_op(lib.PLAIDML_INT_OP_SUB, other, self)) def __mul__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_MUL, self,", "in value._impl.args], _IndexMap(self, key), _SizeMap(self._dims), self._name, )) else: raise ValueError('Invalid", "return _Contraction( agg_op, combo_op, inputs, self._impl, _SizeMap(self._tensor._dims), self._tensor._name, ) class", "call('bit_not', self) # Represents an eltwise addition def __add__(self, rhs):", "ffi.typeof('plaidml_value*'): ffi_obj = value else: raise TypeError('Unsupported type {} for", "return call('shape', x) def sin(x): return call('sin', x) def sqrt(x):", "x in self._elts] ffi_obj = ffi_call(lib.plaidml_value_tuple, len(raw_elts), raw_elts) elif isinstance(value,", "= ffi_call(lib.plaidml_value_float, value) elif isinstance(value, TensorDim): ffi_obj = ffi_call(lib.plaidml_value_dim, value.as_ptr())", "six.string_types): ffi_obj = ffi_call(lib.plaidml_value_str, value.encode('utf-8')) elif isinstance(value, ffi.CData) and ffi.typeof(value)", "__sub__(self, rhs): return call('sub', self, rhs) def __rsub__(self, lhs): return", "__add__(self, rhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_ADD, self, rhs)) def __radd__(self, lhs): return", "= ffi.NULL else: raw_buffer = buffer.as_ptr() expr = ffi_call(lib.plaidml_expr_placeholder, shape.as_ptr(),", "of EQ in a contraction def __eq__(self, rhs): return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_EQ,", "specified.') super(LogicalShape, self).__init__(ffi_obj) @property def dtype(self): return DType(ffi_call(lib.plaidml_logical_shape_get_dtype, self.as_ptr())) @property", "TypeError('no_reduce can only be specified on a contraction.') ffi_call(lib.plaidml_expr_contraction_set_no_reduce, self.as_ptr(),", "isinstance(value._impl, _ContractionPart): # Binary or ternary ASSIGN contraction self._set_contraction( _Contraction(", "TensorIndex(expr=ffi_call(lib.plaidml_poly_expr_literal, x)) if isinstance(x, TensorDim): return TensorIndex(expr=ffi_call(lib.plaidml_poly_expr_dim, x.as_ptr())) return x", "return call('bit_and', self, rhs) def __rand__(self, lhs): return call('bit_and', lhs,", "in dims] expr = ffi_call(lib.plaidml_expr_size_map, len(dims), raw_dims) super(_SizeMap, self).__init__(expr) class", "IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_MUL, (self, rhs))) # Represents a combo_op of EQ in", "as_tensor(self): return Tensor(expr=ffi_call(lib.plaidml_value_expr_get, self.as_ptr())) def TensorOutput(*args): return Tensor(dims=args) def TensorDims(count):", "Represents a combo_op of PLUS in a contraction def __add__(self,", "dims): return call('reshape', x, *dims) def round(x): return call('round', x)", "impl when assigning to a Tensor (Type: {})'.format( type(value._impl))) def", "lib.plaidml_logical_shape_repr def __init__(self, dtype=None, dims=[], ptr=None): if ptr: ffi_obj =", "a combo_op of EQ in a contraction def __eq__(self, rhs):", "x in raw_grads] def log(x): return call('log', x) def max(x,", "__ffi_repr__ = lib.plaidml_program_repr def __init__(self, name, outputs, updates=[]): raw_outputs =", "class IndexedTensor(object): \"\"\"Docstring for class IndexedTensor\"\"\" def __init__(self, impl, tensor=None):", "ffi_call( lib.plaidml_expr_jacobian, len(wrts), wrts, loss.as_ptr(), raw_grads, ) return [Tensor(expr=x) for", "self._impl, _SizeMap(self._tensor._dims), self._tensor._name, ) class Tensor(ForeignObject): \"\"\"Docstring for class Tensor\"\"\"", "tensor def __hash__(self): return hash(ffi_call(lib.plaidml_expr_ptr, self.tensor.as_ptr())) def __eq__(self, other): if", "from plaidml2.core import TensorShape, Buffer from plaidml2.ffi import ForeignObject, ffi,", "lhs, self)) class _IndexMap(ForeignObject): __ffi_del__ = lib.plaidml_expr_free __ffi_repr__ = lib.plaidml_expr_repr", "expr=None): if expr is None: expr = ffi_call(lib.plaidml_dim_expr_none) super(TensorDim, self).__init__(expr)", "elif isinstance(value._impl, _IndexMap): # Unary ASSIGN contraction self._set_contraction( _Contraction( lib.PLAIDML_AGG_OP_ASSIGN,", "\"\"\"Docstring for class TensorIndex\"\"\" __ffi_del__ = lib.plaidml_poly_expr_free __ffi_repr__ = lib.plaidml_poly_expr_repr", "False def __init__(self, shape=None, dims=None, expr=None, value=None, name='', buffer=None): self._name", "class Tensor\"\"\" __ffi_del__ = lib.plaidml_expr_free __ffi_repr__ = lib.plaidml_expr_repr _dims =", "[Value(x) for x in value] raw_elts = [x.as_ptr() for x", "in value] raw_elts = [x.as_ptr() for x in self._elts] ffi_obj", "self, rhs) def __rdiv__(self, lhs): return call('div', lhs, self) #", "no_reduce on a contraction def no_reduce(self): if not self._is_contraction: raise", "else: raise ValueError('One of dtype= or ptr= must be specified.')", "= lib.PLAIDML_COMBO_OP_NONE inputs = [rhs._impl] elif isinstance(rhs._impl, _ContractionPart): # Binary/Ternary", "cos(x): return call('cos', x) def exp(x): return call('exp', x) def", "__rsub__(self, lhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_SUB, lhs, self)) def __mul__(self, rhs): return", "IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_ADD, (self, rhs))) # Represents a combo_op of MULTIPLY in", "Buffer(tensor_shape, ptr=ffi_call(lib.plaidml_buffer_clone, arg.buffer)) else: self.buffer = None class Program(ForeignObject): \"\"\"Docstring", "x.as_ptr())) return x def poly_op(op, *args): args = [wrap_poly(x) for", "def wrap_dim(x): if isinstance(x, six.integer_types): return TensorDim(expr=ffi_call(lib.plaidml_dim_expr_int, x)) return x", "import namedtuple import numpy as np import six from plaidml2", "lib.plaidml_poly_expr_repr def __init__(self, expr=None, name=''): if expr is None: expr", "# logger.debug('Value({})'.format(value)) if isinstance(value, np.ndarray): if value.ndim == 0: value", "(Type: {})'.format( type(value._impl))) def _set_contraction(self, cion): self._is_contraction = True self.take_ptr(cion)", "PLUS in a contraction def __add__(self, rhs): return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_ADD, (self,", "self, rhs) def __rrshift__(self, lhs): return call('bit_right', lhs, self) #", "def reshape(x, dims): return call('reshape', x, *dims) def round(x): return", "lib.plaidml_expr_free __ffi_repr__ = lib.plaidml_expr_repr def __init__(self, dims): dims = [wrap_dim(x)", "of dtype= or ptr= must be specified.') super(LogicalShape, self).__init__(ffi_obj) @property", "the specified dims match the dims of this tensor. def", "x, axis) def jacobian(loss, variables): wrts = [x.as_ptr() for x", "be specified.') super(LogicalShape, self).__init__(ffi_obj) @property def dtype(self): return DType(ffi_call(lib.plaidml_logical_shape_get_dtype, self.as_ptr()))", "def __init__(self, shape=None, dims=None, expr=None, value=None, name='', buffer=None): self._name =", "x in dims] raw_dims = [x.as_ptr() for x in dims]", "eltwise cmp_lt def __lt__(self, rhs): return call('cmp_lt', self, rhs) #", "the LogicalShape. \"\"\" return [ ffi_call(lib.plaidml_logical_shape_get_dim_int, self.as_ptr(), i) for i", "name self._buffer = buffer if shape: if buffer is None:", ") class Tensor(ForeignObject): \"\"\"Docstring for class Tensor\"\"\" __ffi_del__ = lib.plaidml_expr_free", "class TensorRef\"\"\" def __init__(self, tensor): self.tensor = tensor def __hash__(self):", "def __add__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_ADD, self, other)) def __radd__(self, other):", "isinstance(x, TensorDim): return Tensor(expr=ffi_call(lib.plaidml_expr_dim, x.as_ptr())) if isinstance(x, Tensor): return x", "def __xor__(self, rhs): return call('bit_xor', self, rhs) def __rxor__(self, lhs):", "call('shape', x) def sin(x): return call('sin', x) def sqrt(x): return", "x in idxs] expr = ffi_call(lib.plaidml_expr_index_map, ref.as_ptr(), len(idxs), raw_idxs) super(_IndexMap,", "tensor=self) def __setitem__(self, key, value): if isinstance(value._impl, _Contraction): # standard", "floor(x): return call('floor', x) def gather(x, y): return call('gather', x,", "__ffi_del__ = lib.plaidml_poly_expr_free __ffi_repr__ = lib.plaidml_poly_expr_repr def __init__(self, expr=None, name=''):", "namedtuple('_ContractionPart', ['op', 'args']) class IndexedTensor(object): \"\"\"Docstring for class IndexedTensor\"\"\" def", "updates=[]): raw_outputs = [x.as_ptr() for x in outputs] dst_updates =", "call('bit_and', lhs, self) # Represents an eltwise bit_or def __or__(self,", "_IndexMap): # Unary ASSIGN contraction self._set_contraction( _Contraction( lib.PLAIDML_AGG_OP_ASSIGN, lib.PLAIDML_COMBO_OP_NONE, [value._impl],", "negation def __neg__(self): return call('neg', self) # Represents an eltwise", "] def into_TensorShape(self): return TensorShape( ptr=ffi_call(lib.plaidml_logical_shape_into_tensor_shape, self.as_ptr())) Constraint = namedtuple('Constraint',", "class _SizeMap(ForeignObject): __ffi_del__ = lib.plaidml_expr_free __ffi_repr__ = lib.plaidml_expr_repr def __init__(self,", "= value.tolist() if value is None: ffi_obj = ffi_call(lib.plaidml_value_none) elif", "sink_sizes, name): src_idxs = [x.as_ptr() for x in src_idxs] expr", "range(count)] class ProgramArgument: \"\"\"Docstring for class ProgramArgument\"\"\" def __init__(self, arg):", "combo_op of MULTIPLY in a contraction def __mul__(self, rhs): return", "lhs): return call('bit_right', lhs, self) # Represents an eltwise bit_and", "len(args), raw_args) class TensorIndex(ForeignObject): \"\"\"Docstring for class TensorIndex\"\"\" __ffi_del__ =", "return LogicalShape(ptr=ffi_call(lib.plaidml_expr_get_shape, self.as_ptr())) # Verify that the specified dims match", "value is not None: if isinstance(value, six.integer_types): expr = ffi_call(lib.plaidml_expr_int,", "value={}'.format(type(value), value)) super(Value, self).__init__(ffi_obj) def as_tensor(self): return Tensor(expr=ffi_call(lib.plaidml_value_expr_get, self.as_ptr())) def", "__eq__(self, rhs): return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_EQ, (self, rhs))) def _make_contraction(self, agg_op, rhs):", "class TensorDim(ForeignObject): \"\"\"Docstring for class TensorDim\"\"\" __ffi_del__ = lib.plaidml_dim_expr_free __ffi_repr__", "8: DType.UINT8, 16: DType.UINT16, 32: DType.UINT32, 64: DType.UINT64, } dtype", "wrap_dim(rhs)) def __neg__(self): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_NEG, self)) def __add__(self, rhs): return", "= { 8: DType.UINT8, 16: DType.UINT16, 32: DType.UINT32, 64: DType.UINT64,", "key), _SizeMap(self._dims), self._name, )) elif isinstance(value._impl, _ContractionPart): # Binary or", "call('add', self, rhs) def __radd__(self, lhs): return call('add', lhs, self)", "rhs): return call('div', self, rhs) def __rdiv__(self, lhs): return call('div',", "lhs): return call('bit_xor', lhs, self) # Enable no_reduce on a", "self._impl = impl self._tensor = tensor def __repr__(self): return repr(self._impl)", "wrap_dim(x): if isinstance(x, six.integer_types): return TensorDim(expr=ffi_call(lib.plaidml_dim_expr_int, x)) return x def", "in variables] raw_grads = ffi.new('plaidml_expr*[]', len(wrts)) ffi_call( lib.plaidml_expr_jacobian, len(wrts), wrts,", "plaidml2.core import TensorShape, Buffer from plaidml2.ffi import ForeignObject, ffi, ffi_call,", "self)) def __sub__(self, rhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_SUB, self, rhs)) def __rsub__(self,", "Represents an eltwise division def __div__(self, rhs): return call('div', self,", "self.is_input = arg.is_input self.ref = TensorRef(Tensor(expr=ffi_call(lib.plaidml_expr_clone, arg.tensor))) self.shape = LogicalShape(ptr=ffi_call(lib.plaidml_logical_shape_clone,", "map = { 16: DType.FLOAT16, 32: DType.FLOAT32, 64: DType.FLOAT64, }", "dtype)) def as_bool(x): return cast(x, DType.BOOLEAN) def as_float(x, bit_size): map", "__init__(self, value): # logger.debug('Value({})'.format(value)) if isinstance(value, np.ndarray): if value.ndim ==", "sin(x): return call('sin', x) def sqrt(x): return call('sqrt', x) def", "__mul__(self, rhs): return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_MUL, (self, rhs))) # Represents a combo_op", "if value is None: ffi_obj = ffi_call(lib.plaidml_value_none) elif isinstance(value, (six.integer_types,", "_SizeMap(self._tensor._dims), self._tensor._name, ) class Tensor(ForeignObject): \"\"\"Docstring for class Tensor\"\"\" __ffi_del__", "self, rhs) # Represents an eltwise cmp_gt def __gt__(self, rhs):", "__rmul__(self, lhs): return call('mul', lhs, self) # Represents an eltwise", "an eltwise cmp_eq def __eq__(self, rhs): return call('cmp_eq', self, rhs)", "TensorRef: \"\"\"Docstring for class TensorRef\"\"\" def __init__(self, tensor): self.tensor =", "Represents an eltwise multiplication def __mul__(self, rhs): return call('mul', self,", "def __init__(self, value): # logger.debug('Value({})'.format(value)) if isinstance(value, np.ndarray): if value.ndim", "name.encode(), len(raw_outputs), raw_outputs, len(updates), src_updates, dst_updates, raw_args, ) self.args =", "= [x.as_ptr() for x in variables] raw_grads = ffi.new('plaidml_expr*[]', len(wrts))", "= lib.plaidml_expr_repr _dims = None _is_contraction = False def __init__(self,", "isinstance(value, np.ndarray): if value.ndim == 0: value = value.item() else:", "= [x[1].as_ptr() for x in updates] raw_args = ffi.new('plaidml_program_args**') ffi_obj", "tensor. def bind_dims(self, *dims): raw_dims = [x.as_ptr() for x in", "return call('div', self, rhs) def __rtruediv__(self, lhs): return call('div', lhs,", "from plaidml2 import DType from plaidml2.core import TensorShape, Buffer from", "return call('div', lhs, self) # Represents an eltwise division def", "raw_grads, ) return [Tensor(expr=x) for x in raw_grads] def ident(x):", "# Represents an eltwise cmp_ge def __ge__(self, rhs): return call('cmp_ge',", "self._is_contraction = True self.take_ptr(cion) # Represents an eltwise negation def", "lib.plaidml_logical_shape_free __ffi_repr__ = lib.plaidml_logical_shape_repr def __init__(self, dtype=None, dims=[], ptr=None): if", "name): src_idxs = [x.as_ptr() for x in src_idxs] expr =", "six.integer_types): return Tensor(expr=ffi_call(lib.plaidml_expr_int, x)) if np.issubdtype(type(x), np.integer): return Tensor(expr=ffi_call(lib.plaidml_expr_int, x.item()))", "def cond(lhs, rhs, true_case): return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_COND, (lhs, rhs, true_case))) def", "dims expr = None elif value is not None: if", "return TensorIndex(poly_op(lib.PLAIDML_INT_OP_DIV, self, rhs)) def __rfloordiv__(self, lhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_DIV, lhs,", "TypeError('Unexpected type for call argument: {}. fn: {}, args: {},", "self)) def __mul__(self, rhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_MUL, self, rhs)) def __rmul__(self,", "an eltwise division def __truediv__(self, rhs): return call('div', self, rhs)", "lib.plaidml_program_evaluate, name.encode(), len(raw_outputs), raw_outputs, len(updates), src_updates, dst_updates, raw_args, ) self.args", "return [ ffi_call(lib.plaidml_logical_shape_get_dim_int, self.as_ptr(), i) for i in range(self.ndims) ]", "= impl self._tensor = tensor def __repr__(self): return repr(self._impl) #", "Represents an eltwise cmp_gt def __gt__(self, rhs): return call('cmp_gt', self,", "= [x[0].as_ptr() for x in updates] src_updates = [x[1].as_ptr() for", "Represents an eltwise cmp_eq def __eq__(self, rhs): return call('cmp_eq', self,", "TensorOutput(*args): return Tensor(dims=args) def TensorDims(count): return [TensorDim() for i in", "TensorRef(other).__hash__() return self.__hash__() == other.__hash__() class Value(ForeignObject): \"\"\"Docstring for class", "__ffi_repr__ = lib.plaidml_expr_repr def __init__(self, ref, key): if isinstance(key, tuple)", "ptr elif dtype is not None: raw_dims = ffi.new('int64_t[]', [0", "rhs) def __rtruediv__(self, lhs): return call('div', lhs, self) # Represents", "def __rand__(self, lhs): return call('bit_and', lhs, self) # Represents an", "dtype) def as_uint(x, bit_size): map = { 8: DType.UINT8, 16:", "TensorDim(dim_op(lib.PLAIDML_INT_OP_ADD, self, other)) def __radd__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_ADD, other, self))", "elif isinstance(value, (six.integer_types, bool)): ffi_obj = ffi_call(lib.plaidml_value_int, value) elif isinstance(value,", "__init(): \"\"\"Docstring for function plaidml2.edsl.__init\"\"\" ffi_call(lib.plaidml_edsl_init) ffi.init_once(__init, 'plaidml_edsl_init') class LogicalShape(ForeignObject):", "rhs): self._tensor._set_contraction(self._make_contraction(lib.PLAIDML_AGG_OP_MAX, rhs)) # Represents an aggregation_op of MIN in", "# Binary/Ternary op combo_op = rhs._impl.op inputs = [x._impl for", "super(Tensor, self).__init__(expr) def set_param_value(self, buffer): # Changes the value of", "x raise TypeError('Unexpected type for call argument: {}. fn: {},", "self) # Represents an eltwise bit_not def __invert__(self): return call('bit_not',", "contraction self._set_contraction( _Contraction( lib.PLAIDML_AGG_OP_ASSIGN, lib.PLAIDML_COMBO_OP_NONE, [value._impl], _IndexMap(self, key), _SizeMap(self._dims), self._name,", "def __rfloordiv__(self, lhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_DIV, lhs, self)) class _IndexMap(ForeignObject): __ffi_del__", "self).__init__(ffi_obj) @property def inputs(self): return [x for x in self.args", "__rxor__(self, lhs): return call('bit_xor', lhs, self) # Enable no_reduce on", "return call('sub', lhs, self) # Represents an eltwise multiplication def", "import logging from collections import namedtuple import numpy as np", "= ffi_call(lib.plaidml_expr_size_map, len(dims), raw_dims) super(_SizeMap, self).__init__(expr) class _Contraction(ForeignObject): __ffi_del__ =", "x)) if isinstance(x, TensorDim): return TensorIndex(expr=ffi_call(lib.plaidml_poly_expr_dim, x.as_ptr())) return x def", "call('scatter', x, y, z) def select(cond, true_case, false_case): return call('cond',", "[wrap_poly(x) for x in idxs] raw_idxs = [x.as_ptr() for x", "map.get(bit_size) if not dtype: raise 'Unsupport bit_size for as_uint' return", "raw_outputs = [x.as_ptr() for x in outputs] dst_updates = [x[0].as_ptr()", "def __init__(self, dtype=None, dims=[], ptr=None): if ptr: ffi_obj = ptr", "self.buffer = Buffer(tensor_shape, ptr=ffi_call(lib.plaidml_buffer_clone, arg.buffer)) else: self.buffer = None class", "return call('sub', self, rhs) def __rsub__(self, lhs): return call('sub', lhs,", "args] raw_args = [x.as_ptr() for x in args] return Tensor(expr=ffi_call(lib.plaidml_expr_call,", "Intel Corporation. import logging from collections import namedtuple import numpy", "ffi_call(lib.plaidml_logical_shape_alloc, dtype, len(dims), raw_dims) else: raise ValueError('One of dtype= or", "else: idxs = [key] idxs = [wrap_poly(x) for x in", "64: DType.FLOAT64, } dtype = map.get(bit_size) if not dtype: raise", "aggregation_op of SUM in a contraction def __iadd__(self, rhs): return", "__mul__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_MUL, self, other)) def __rmul__(self, other): return", "len(args), raw_args)) def cast(x, dtype): return Tensor(expr=ffi_call(lib.plaidml_expr_cast, wrap_tensor(x).as_ptr(), dtype)) def", "__setitem__(self, key, value): if isinstance(value._impl, _Contraction): # standard contraction self._set_contraction(value._impl)", "assigning to a Tensor (Type: {})'.format( type(value._impl))) def _set_contraction(self, cion):", "six.integer_types): return TensorIndex(expr=ffi_call(lib.plaidml_poly_expr_literal, x)) if isinstance(x, TensorDim): return TensorIndex(expr=ffi_call(lib.plaidml_poly_expr_dim, x.as_ptr()))", "def prng(state, shape): return call('prng', state, *shape) def reshape(x, dims):", "self).__init__(ffi_obj) def as_tensor(self): return Tensor(expr=ffi_call(lib.plaidml_value_expr_get, self.as_ptr())) def TensorOutput(*args): return Tensor(dims=args)", "return call('ceil', x) def cond(lhs, rhs, true_case): return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_COND, (lhs,", "import DType from plaidml2.core import TensorShape, Buffer from plaidml2.ffi import", "tensor=None): self._impl = impl self._tensor = tensor def __repr__(self): return", "shape.as_ptr(), raw_buffer, name.encode()) elif dims is not None: self._dims =", "contraction def __iadd__(self, rhs): return IndexedTensor(self._make_contraction(lib.PLAIDML_AGG_OP_SUM, rhs)) # Represents an", "in updates] src_updates = [x[1].as_ptr() for x in updates] raw_args", "return call('add', self, rhs) def __radd__(self, lhs): return call('add', lhs,", "bit_size): map = { 16: DType.FLOAT16, 32: DType.FLOAT32, 64: DType.FLOAT64,", "float): return Tensor(expr=ffi_call(lib.plaidml_expr_float, x)) if isinstance(x, TensorDim): return Tensor(expr=ffi_call(lib.plaidml_expr_dim, x.as_ptr()))", "the value of a parameter tensor (i.e. one explicitly set", "in dims] ffi_call(lib.plaidml_expr_bind_dims, self.as_ptr(), len(raw_dims), raw_dims) # bind a concrete", "eltwise cmp_ne def __ne__(self, rhs): return call('cmp_ne', self, rhs) #", "of the LogicalShape. \"\"\" return [ ffi_call(lib.plaidml_logical_shape_get_dim_int, self.as_ptr(), i) for", "def __lt__(self, rhs): return call('cmp_lt', self, rhs) # Represents an", "def __hash__(self): return hash(ffi_call(lib.plaidml_expr_ptr, self.tensor.as_ptr())) def __eq__(self, other): if isinstance(other,", "value of a parameter tensor (i.e. one explicitly set to", "DType(ffi_call(lib.plaidml_logical_shape_get_dtype, self.as_ptr())) @property def ndims(self): return ffi_call(lib.plaidml_logical_shape_get_ndims, self.as_ptr()) @property def", "__ffi_repr__ = lib.plaidml_expr_repr def __init__(self, agg_op, combo_op, src_idxs, sink_idxs, sink_sizes,", "src_idxs = [x.as_ptr() for x in src_idxs] expr = ffi_call(", "self) # Represents an eltwise bit_xor def __xor__(self, rhs): return", "combo_op = rhs._impl.op inputs = [x._impl for x in rhs._impl.args]", "return call('cmp_eq', self, rhs) # Represents an eltwise cmp_ne def", "isinstance(x, six.integer_types): return TensorDim(expr=ffi_call(lib.plaidml_dim_expr_int, x)) return x def dim_op(op, *args):", "class Tensor(ForeignObject): \"\"\"Docstring for class Tensor\"\"\" __ffi_del__ = lib.plaidml_expr_free __ffi_repr__", "__ffi_repr__ = lib.plaidml_value_repr def __init__(self, value): # logger.debug('Value({})'.format(value)) if isinstance(value,", "= ptr elif dtype is not None: raw_dims = ffi.new('int64_t[]',", "args] return ffi_call(lib.plaidml_poly_expr_op, op, len(args), raw_args) class TensorIndex(ForeignObject): \"\"\"Docstring for", "name.encode()) elif dims is not None: self._dims = dims expr", "return TensorIndex(poly_op(lib.PLAIDML_INT_OP_NEG, self)) def __add__(self, rhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_ADD, self, rhs))", "[TensorDim() for i in range(count)] def TensorIndexes(count): return [TensorIndex() for", "args] return Tensor(expr=ffi_call(lib.plaidml_expr_call, fn.encode(), len(args), raw_args)) def cast(x, dtype): return", "__init__(self, ref, key): if isinstance(key, tuple) or isinstance(key, list): idxs", "def __rrshift__(self, lhs): return call('bit_right', lhs, self) # Represents an", "# Represents an eltwise cmp_ne def __ne__(self, rhs): return call('cmp_ne',", "i in range(count)] class ProgramArgument: \"\"\"Docstring for class ProgramArgument\"\"\" def", "aggregation_op of MAX in a contraction def __ge__(self, rhs): self._tensor._set_contraction(self._make_contraction(lib.PLAIDML_AGG_OP_MAX,", "16: DType.FLOAT16, 32: DType.FLOAT32, 64: DType.FLOAT64, } dtype = map.get(bit_size)", "ProgramArgument\"\"\" def __init__(self, arg): self.is_input = arg.is_input self.ref = TensorRef(Tensor(expr=ffi_call(lib.plaidml_expr_clone,", "isinstance(x, six.integer_types): return TensorIndex(expr=ffi_call(lib.plaidml_poly_expr_literal, x)) if isinstance(x, TensorDim): return TensorIndex(expr=ffi_call(lib.plaidml_poly_expr_dim,", "for x in idxs] raw_idxs = [x.as_ptr() for x in", "ffi_call(lib.plaidml_dim_expr_op, op, len(args), raw_args) class TensorDim(ForeignObject): \"\"\"Docstring for class TensorDim\"\"\"", "ffi_call(lib.plaidml_expr_bind_dims, self.as_ptr(), len(raw_dims), raw_dims) # bind a concrete shape to", "ffi.init_once(__init, 'plaidml_edsl_init') class LogicalShape(ForeignObject): \"\"\"Docstring for class LogicalShape\"\"\" __ffi_del__ =", "a contraction.') ffi_call(lib.plaidml_expr_contraction_set_use_default, self.as_ptr(), rhs.as_ptr()) return self def add_constraint(self, constraint):", "for class ProgramArgument\"\"\" def __init__(self, arg): self.is_input = arg.is_input self.ref", "concrete shape to this tensor def bind(self, shape): ffi_call(lib.plaidml_expr_bind_shape, self.as_ptr(),", "TensorIndex(poly_op(lib.PLAIDML_INT_OP_NEG, self)) def __add__(self, rhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_ADD, self, rhs)) def", "ValueError('One of dtype= or ptr= must be specified.') super(LogicalShape, self).__init__(ffi_obj)", "other)) def __radd__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_ADD, other, self)) def __sub__(self,", "super(_SizeMap, self).__init__(expr) class _Contraction(ForeignObject): __ffi_del__ = lib.plaidml_expr_free __ffi_repr__ = lib.plaidml_expr_repr", "def ndims(self): return ffi_call(lib.plaidml_logical_shape_get_ndims, self.as_ptr()) @property def int_dims(self): \"\"\"Returns the", "elif isinstance(value, float): ffi_obj = ffi_call(lib.plaidml_value_float, value) elif isinstance(value, TensorDim):", "dims=, shape=, or expr= must be specified.') super(Tensor, self).__init__(expr) def", "value) # Illegal on other tensors ffi_call(lib.plaidml_expr_param_reset, self.__ffi_obj__, buffer.as_ptr()) def", "return call('floor', x) def gather(x, y): return call('gather', x, y)", "rhs) def __rsub__(self, lhs): return call('sub', lhs, self) # Represents", "len(src_idxs), src_idxs, name.encode(), ) super(_Contraction, self).__init__(expr) _ContractionPart = namedtuple('_ContractionPart', ['op',", "Represents an eltwise bit_left def __lshift__(self, rhs): return call('bit_left', self,", "__eq__(self, rhs): return call('cmp_eq', self, rhs) # Represents an eltwise", "logging.getLogger(__name__) def __init(): \"\"\"Docstring for function plaidml2.edsl.__init\"\"\" ffi_call(lib.plaidml_edsl_init) ffi.init_once(__init, 'plaidml_edsl_init')", "for value={}'.format(value)) elif expr is None: raise ValueError('One of dims=,", "other, self)) def __mul__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_MUL, self, other)) def", "[x.as_ptr() for x in variables] raw_grads = ffi.new('plaidml_expr*[]', len(wrts)) ffi_call(", "= buffer if shape: if buffer is None: raw_buffer =", "for x in dims] expr = ffi_call(lib.plaidml_expr_size_map, len(dims), raw_dims) super(_SizeMap,", "shape: if buffer is None: raw_buffer = ffi.NULL else: raw_buffer", "return call('mul', self, rhs) def __rmul__(self, lhs): return call('mul', lhs,", "an eltwise cmp_ge def __ge__(self, rhs): return call('cmp_ge', self, rhs)", "inputs = [x._impl for x in rhs._impl.args] else: raise ValueError('Invalid", "Verify that the specified dims match the dims of this", "i) for i in range(self.ndims) ] def into_TensorShape(self): return TensorShape(", "tensor def bind(self, shape): ffi_call(lib.plaidml_expr_bind_shape, self.as_ptr(), shape.as_ptr()) class TensorRef: \"\"\"Docstring", "isinstance(x, float): return Tensor(expr=ffi_call(lib.plaidml_expr_float, x)) if isinstance(x, TensorDim): return Tensor(expr=ffi_call(lib.plaidml_expr_dim,", "len(idxs), raw_idxs) super(_IndexMap, self).__init__(expr) class _SizeMap(ForeignObject): __ffi_del__ = lib.plaidml_expr_free __ffi_repr__", "return call('bit_xor', self, rhs) def __rxor__(self, lhs): return call('bit_xor', lhs,", "self.buffer = None class Program(ForeignObject): \"\"\"Docstring for class Program\"\"\" __ffi_del__", "map = { 8: DType.INT8, 16: DType.INT16, 32: DType.INT32, 64:", "dtype) def ceil(x): return call('ceil', x) def cond(lhs, rhs, true_case):", "dst_updates, raw_args, ) self.args = [ProgramArgument(raw_args[0].args[i]) for i in range(raw_args[0].nargs)]", "TensorIndex(poly_op(lib.PLAIDML_INT_OP_DIV, lhs, self)) class _IndexMap(ForeignObject): __ffi_del__ = lib.plaidml_expr_free __ffi_repr__ =", "DType.INT32, 64: DType.INT64, } dtype = map.get(bit_size) if not dtype:", "rhs): return IndexedTensor(self._make_contraction(lib.PLAIDML_AGG_OP_PROD, rhs)) # Represents an aggregation_op of MAX", "return [TensorDim() for i in range(count)] def TensorIndexes(count): return [TensorIndex()", "is not None: if isinstance(value, six.integer_types): expr = ffi_call(lib.plaidml_expr_int, value)", "return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_ADD, (self, rhs))) # Represents a combo_op of MULTIPLY", "match the dims of this tensor. def bind_dims(self, *dims): raw_dims", "for x in self.args if x.is_input] @property def outputs(self): return", "for class Program\"\"\" __ffi_del__ = lib.plaidml_program_free __ffi_repr__ = lib.plaidml_program_repr def", "def __init__(self, ref, key): if isinstance(key, tuple) or isinstance(key, list):", "Tensor(ForeignObject): \"\"\"Docstring for class Tensor\"\"\" __ffi_del__ = lib.plaidml_expr_free __ffi_repr__ =", "isinstance(value, Tensor): pass elif isinstance(value._impl, _IndexMap): # Unary ASSIGN contraction", "an eltwise cmp_lt def __lt__(self, rhs): return call('cmp_lt', self, rhs)", "type(x), fn, args, x)) def call(fn, *args): args = [wrap_tensor(x)", "true_case))) def cos(x): return call('cos', x) def exp(x): return call('exp',", "[wrap_dim(x) for x in dims] raw_dims = [x.as_ptr() for x", "return call('bit_not', self) # Represents an eltwise addition def __add__(self,", "call('bit_right', self, rhs) def __rrshift__(self, lhs): return call('bit_right', lhs, self)", "\"\"\"Docstring for class Value\"\"\" __ffi_del__ = lib.plaidml_value_free __ffi_repr__ = lib.plaidml_value_repr", "rhs) def __rdiv__(self, lhs): return call('div', lhs, self) # Represents", "parameter tensor (i.e. one explicitly set to a buffer value)", "for i in range(raw_args[0].nargs)] ffi_call(lib.plaidml_program_args_free, raw_args[0]) super(Program, self).__init__(ffi_obj) @property def", "rhs)) def __rfloordiv__(self, lhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_DIV, lhs, self)) class _IndexMap(ForeignObject):", "IndexedTensor(_IndexMap(self, key), tensor=self) def __setitem__(self, key, value): if isinstance(value._impl, _Contraction):", "jacobian(loss, variables): wrts = [x.as_ptr() for x in variables] raw_grads", "expr is None: expr = ffi_call(lib.plaidml_dim_expr_none) super(TensorDim, self).__init__(expr) def _bind(self,", "lhs, self) # Represents an eltwise bit_and def __and__(self, rhs):", "isinstance(value, six.integer_types): expr = ffi_call(lib.plaidml_expr_int, value) elif isinstance(value, float): expr", "def __rmul__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_MUL, other, self)) def __floordiv__(self, other):", "agg_op, combo_op, inputs, self._impl, _SizeMap(self._tensor._dims), self._tensor._name, ) class Tensor(ForeignObject): \"\"\"Docstring", "# Represents an eltwise bit_xor def __xor__(self, rhs): return call('bit_xor',", "else: raise ValueError('Invalid impl when assigning to a Tensor (Type:", "for x in self.args if not x.is_input] def wrap_tensor(x): if", "tensor (i.e. one explicitly set to a buffer value) #", "self) # Represents an eltwise bit_and def __and__(self, rhs): return", "TensorDim(dim_op(lib.PLAIDML_INT_OP_NEG, self)) def __add__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_ADD, self, other)) def", "a Tensor (Type: {})'.format( type(value._impl))) def _set_contraction(self, cion): self._is_contraction =", "# Copyright 2019 Intel Corporation. import logging from collections import", "x for x in dims]) ffi_obj = ffi_call(lib.plaidml_logical_shape_alloc, dtype, len(dims),", "def exp(x): return call('exp', x) def floor(x): return call('floor', x)", "key, value): if isinstance(value._impl, _Contraction): # standard contraction self._set_contraction(value._impl) elif", "args, x)) def call(fn, *args): args = [wrap_tensor(x) for x", "min(x, y): return call('min', x, y) def pow(x, y): return", "dims=[], ptr=None): if ptr: ffi_obj = ptr elif dtype is", "_Contraction): # standard contraction self._set_contraction(value._impl) elif isinstance(value, Tensor): pass elif", "variables] raw_grads = ffi.new('plaidml_expr*[]', len(wrts)) ffi_call( lib.plaidml_expr_gradient, len(wrts), wrts, loss.as_ptr(),", "x) def sin(x): return call('sin', x) def sqrt(x): return call('sqrt',", "TensorDim): return Tensor(expr=ffi_call(lib.plaidml_expr_dim, x.as_ptr())) if isinstance(x, Tensor): return x raise", "Tensor): return self.__hash__() == TensorRef(other).__hash__() return self.__hash__() == other.__hash__() class", "if not x.is_input] def wrap_tensor(x): if isinstance(x, six.integer_types): return Tensor(expr=ffi_call(lib.plaidml_expr_int,", "lhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_DIV, lhs, self)) class _IndexMap(ForeignObject): __ffi_del__ = lib.plaidml_expr_free", "other)) def __rfloordiv__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_DIV, other, self)) def wrap_poly(x):", "self).__init__(expr) class _SizeMap(ForeignObject): __ffi_del__ = lib.plaidml_expr_free __ffi_repr__ = lib.plaidml_expr_repr def", "value._impl.op, [x._impl for x in value._impl.args], _IndexMap(self, key), _SizeMap(self._dims), self._name,", "__mul__(self, rhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_MUL, self, rhs)) def __rmul__(self, lhs): return", "def sin(x): return call('sin', x) def sqrt(x): return call('sqrt', x)", "ffi.new('plaidml_program_args**') ffi_obj = ffi_call( lib.plaidml_program_evaluate, name.encode(), len(raw_outputs), raw_outputs, len(updates), src_updates,", "= map.get(bit_size) if not dtype: raise 'Unsupport bit_size for as_float'", "def as_int(x, bit_size): map = { 8: DType.INT8, 16: DType.INT16,", "lhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_ADD, lhs, self)) def __sub__(self, rhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_SUB,", "namedtuple import numpy as np import six from plaidml2 import", "_SizeMap(ForeignObject): __ffi_del__ = lib.plaidml_expr_free __ffi_repr__ = lib.plaidml_expr_repr def __init__(self, dims):", "def __sub__(self, rhs): return call('sub', self, rhs) def __rsub__(self, lhs):", "TensorIndex(poly_op(lib.PLAIDML_INT_OP_DIV, self, rhs)) def __rfloordiv__(self, lhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_DIV, lhs, self))", ")) elif isinstance(value._impl, _ContractionPart): # Binary or ternary ASSIGN contraction", "for x in outputs] dst_updates = [x[0].as_ptr() for x in", "def bind_dims(self, *dims): raw_dims = [x.as_ptr() for x in dims]", "value.as_ptr()) elif isinstance(value, (list, tuple)): self._elts = [Value(x) for x", "ffi_call( lib.plaidml_expr_contraction, agg_op, combo_op, sink_idxs.as_ptr(), sink_sizes.as_ptr(), len(src_idxs), src_idxs, name.encode(), )", "raw_elts = [x.as_ptr() for x in self._elts] ffi_obj = ffi_call(lib.plaidml_value_tuple,", "isinstance(value, ffi.CData) and ffi.typeof(value) is ffi.typeof('plaidml_value*'): ffi_obj = value else:", "return TensorIndex(poly_op(lib.PLAIDML_INT_OP_ADD, lhs, self)) def __sub__(self, rhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_SUB, self,", "explicitly set to a buffer value) # Illegal on other", "y): return call('min', x, y) def pow(x, y): return call('pow',", "rhs) def __ror__(self, lhs): return call('bit_or', lhs, self) # Represents", "ffi_call(lib.plaidml_value_float, value) elif isinstance(value, TensorDim): ffi_obj = ffi_call(lib.plaidml_value_dim, value.as_ptr()) elif", "other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_DIV, self, other)) def __rfloordiv__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_DIV,", "expr = ffi_call(lib.plaidml_dim_expr_none) super(TensorDim, self).__init__(expr) def _bind(self, expr): self.take_ptr(expr) def", "Represents an eltwise division def __truediv__(self, rhs): return call('div', self,", "(six.integer_types, bool)): ffi_obj = ffi_call(lib.plaidml_value_int, value) elif isinstance(value, float): ffi_obj", "only be specified on a contraction.') ffi_call(lib.plaidml_expr_contraction_set_no_reduce, self.as_ptr(), True) return", "lhs, self) # Represents an eltwise subtraction def __sub__(self, rhs):", "lib.plaidml_value_free __ffi_repr__ = lib.plaidml_value_repr def __init__(self, value): # logger.debug('Value({})'.format(value)) if", "rhs) # Represents an eltwise cmp_lt def __lt__(self, rhs): return", "\"\"\"Returns the dimensions of a LogicalShape as a list. Args:", "{}'.format( type(x), fn, args, x)) def call(fn, *args): args =", "dtype: raise 'Unsupport bit_size for as_uint' return cast(x, dtype) def", "fn.encode(), len(args), raw_args)) def cast(x, dtype): return Tensor(expr=ffi_call(lib.plaidml_expr_cast, wrap_tensor(x).as_ptr(), dtype))", "Represents an eltwise cmp_ne def __ne__(self, rhs): return call('cmp_ne', self,", "return TensorShape( ptr=ffi_call(lib.plaidml_logical_shape_into_tensor_shape, self.as_ptr())) Constraint = namedtuple('Constraint', ['lhs', 'rhs']) def", "raw_args, ) self.args = [ProgramArgument(raw_args[0].args[i]) for i in range(raw_args[0].nargs)] ffi_call(lib.plaidml_program_args_free,", "rhs): return call('cmp_gt', self, rhs) # Represents an eltwise cmp_le", "TensorDim(dim_op(lib.PLAIDML_INT_OP_MUL, self, other)) def __rmul__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_MUL, other, self))", "{ 16: DType.FLOAT16, 32: DType.FLOAT32, 64: DType.FLOAT64, } dtype =", "in args] return ffi_call(lib.plaidml_poly_expr_op, op, len(args), raw_args) class TensorIndex(ForeignObject): \"\"\"Docstring", "not self._is_contraction: raise TypeError('no_reduce can only be specified on a", "= self.shape.into_TensorShape() self.buffer = Buffer(tensor_shape, ptr=ffi_call(lib.plaidml_buffer_clone, arg.buffer)) else: self.buffer =", "def jacobian(loss, variables): wrts = [x.as_ptr() for x in variables]", "for x in src_idxs] expr = ffi_call( lib.plaidml_expr_contraction, agg_op, combo_op,", "ffi.new('plaidml_expr*[]', len(wrts)) ffi_call( lib.plaidml_expr_jacobian, len(wrts), wrts, loss.as_ptr(), raw_grads, ) return", "ffi_call( lib.plaidml_expr_contraction_add_constraint, self.as_ptr(), constraint.lhs.as_ptr(), constraint.rhs.as_ptr(), ) def add_constraints(self, constraints): for", "ffi_obj = ffi_call(lib.plaidml_value_int, value) elif isinstance(value, float): ffi_obj = ffi_call(lib.plaidml_value_float,", "other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_ADD, self, other)) def __radd__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_ADD,", "specified on a contraction.') ffi_call(lib.plaidml_expr_contraction_set_no_reduce, self.as_ptr(), True) return self #", "use_default(self, rhs): if not self._is_contraction: raise TypeError('use_default can only be", "an eltwise addition def __add__(self, rhs): return call('add', self, rhs)", "lhs, self) # Represents an eltwise cmp_eq def __eq__(self, rhs):", "TypeError('use_default can only be specified on a contraction.') ffi_call(lib.plaidml_expr_contraction_set_use_default, self.as_ptr(),", "raw_idxs = [x.as_ptr() for x in idxs] expr = ffi_call(lib.plaidml_expr_index_map,", "ffi_obj = ffi_call(lib.plaidml_value_tuple, len(raw_elts), raw_elts) elif isinstance(value, six.string_types): ffi_obj =", "= ffi_call(lib.plaidml_expr_placeholder, shape.as_ptr(), raw_buffer, name.encode()) elif dims is not None:", "return x def dim_op(op, *args): args = [wrap_dim(x) for x", "arg.shape)) if arg.buffer: tensor_shape = self.shape.into_TensorShape() self.buffer = Buffer(tensor_shape, ptr=ffi_call(lib.plaidml_buffer_clone,", "def gather(x, y): return call('gather', x, y) def gradients(loss, variables):", "op combo_op = rhs._impl.op inputs = [x._impl for x in", "\"\"\" return [ ffi_call(lib.plaidml_logical_shape_get_dim_int, self.as_ptr(), i) for i in range(self.ndims)", "src_idxs, name.encode(), ) super(_Contraction, self).__init__(expr) _ContractionPart = namedtuple('_ContractionPart', ['op', 'args'])", "IndexedTensor(self._make_contraction(lib.PLAIDML_AGG_OP_SUM, rhs)) # Represents an aggregation_op of PROD in a", "in outputs] dst_updates = [x[0].as_ptr() for x in updates] src_updates", "plaidml2.edsl.__init\"\"\" ffi_call(lib.plaidml_edsl_init) ffi.init_once(__init, 'plaidml_edsl_init') class LogicalShape(ForeignObject): \"\"\"Docstring for class LogicalShape\"\"\"", "ffi_call(lib.plaidml_poly_expr_op, op, len(args), raw_args) class TensorIndex(ForeignObject): \"\"\"Docstring for class TensorIndex\"\"\"", "__ffi_del__ = lib.plaidml_expr_free __ffi_repr__ = lib.plaidml_expr_repr _dims = None _is_contraction", "ternary ASSIGN contraction self._set_contraction( _Contraction( lib.PLAIDML_AGG_OP_ASSIGN, value._impl.op, [x._impl for x", "raw_grads = ffi.new('plaidml_expr*[]', len(wrts)) ffi_call( lib.plaidml_expr_jacobian, len(wrts), wrts, loss.as_ptr(), raw_grads,", "# Set use_default on a contraction def use_default(self, rhs): if", "if isinstance(x, six.integer_types): return TensorDim(expr=ffi_call(lib.plaidml_dim_expr_int, x)) return x def dim_op(op,", "self def add_constraint(self, constraint): ffi_call( lib.plaidml_expr_contraction_add_constraint, self.as_ptr(), constraint.lhs.as_ptr(), constraint.rhs.as_ptr(), )", "__init__(self, name, outputs, updates=[]): raw_outputs = [x.as_ptr() for x in", "can only be specified on a contraction.') ffi_call(lib.plaidml_expr_contraction_set_use_default, self.as_ptr(), rhs.as_ptr())", "__init__(self, tensor): self.tensor = tensor def __hash__(self): return hash(ffi_call(lib.plaidml_expr_ptr, self.tensor.as_ptr()))", "def select(cond, true_case, false_case): return call('cond', cond, true_case, false_case) def", "lib.plaidml_expr_free __ffi_repr__ = lib.plaidml_expr_repr def __init__(self, ref, key): if isinstance(key,", "wrts, loss.as_ptr(), raw_grads, ) return [Tensor(expr=x) for x in raw_grads]", "_ContractionPart): # Binary or ternary ASSIGN contraction self._set_contraction( _Contraction( lib.PLAIDML_AGG_OP_ASSIGN,", "lib.plaidml_expr_contraction, agg_op, combo_op, sink_idxs.as_ptr(), sink_sizes.as_ptr(), len(src_idxs), src_idxs, name.encode(), ) super(_Contraction,", "no_reduce(self): if not self._is_contraction: raise TypeError('no_reduce can only be specified", "# Represents an eltwise negation def __neg__(self): return call('neg', self)", "arg.buffer)) else: self.buffer = None class Program(ForeignObject): \"\"\"Docstring for class", "a contraction def use_default(self, rhs): if not self._is_contraction: raise TypeError('use_default", "def __floordiv__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_DIV, self, other)) def __rfloordiv__(self, other):", "rhs) def __rlshift__(self, lhs): return call('bit_left', lhs, self) # Represents", "is not None: self._dims = dims expr = None elif", "Changes the value of a parameter tensor (i.e. one explicitly", "# Represents an eltwise bit_or def __or__(self, rhs): return call('bit_or',", "self, rhs)) def __radd__(self, lhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_ADD, lhs, self)) def", "{})'.format( type(value._impl))) def _set_contraction(self, cion): self._is_contraction = True self.take_ptr(cion) #", "in rhs._impl.args] else: raise ValueError('Invalid impl') return _Contraction( agg_op, combo_op,", "expr = ffi_call(lib.plaidml_expr_index_map, ref.as_ptr(), len(idxs), raw_idxs) super(_IndexMap, self).__init__(expr) class _SizeMap(ForeignObject):", "[value._impl], _IndexMap(self, key), _SizeMap(self._dims), self._name, )) elif isinstance(value._impl, _ContractionPart): #", "def __div__(self, rhs): return call('div', self, rhs) def __rdiv__(self, lhs):", "float): expr = ffi_call(lib.plaidml_expr_float, value) else: raise TypeError('Invalid type for", "call('sin', x) def sqrt(x): return call('sqrt', x) def tan(x): return", "call('gather', x, y) def gradients(loss, variables): wrts = [x.as_ptr() for", "elif isinstance(value, six.string_types): ffi_obj = ffi_call(lib.plaidml_value_str, value.encode('utf-8')) elif isinstance(value, ffi.CData)", "of PROD in a contraction def __imul__(self, rhs): return IndexedTensor(self._make_contraction(lib.PLAIDML_AGG_OP_PROD,", "def __sub__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_SUB, self, other)) def __rsub__(self, other):", "key): return IndexedTensor(_IndexMap(self, key), tensor=self) def __setitem__(self, key, value): if", "def __rsub__(self, lhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_SUB, lhs, self)) def __mul__(self, rhs):", "from collections import namedtuple import numpy as np import six", "def int_dims(self): \"\"\"Returns the dimensions of a LogicalShape as a", "def ident(x): return call('ident', x) def index(x, axis): return call('index',", "value] raw_elts = [x.as_ptr() for x in self._elts] ffi_obj =", "def as_tensor(self): return Tensor(expr=ffi_call(lib.plaidml_value_expr_get, self.as_ptr())) def TensorOutput(*args): return Tensor(dims=args) def", "= ffi_call( lib.plaidml_program_evaluate, name.encode(), len(raw_outputs), raw_outputs, len(updates), src_updates, dst_updates, raw_args,", "IndexedTensor(object): \"\"\"Docstring for class IndexedTensor\"\"\" def __init__(self, impl, tensor=None): self._impl", "call('round', x) def scatter(x, y, z): return call('scatter', x, y,", "[x.as_ptr() for x in outputs] dst_updates = [x[0].as_ptr() for x", "if np.issubdtype(type(x), np.integer): return Tensor(expr=ffi_call(lib.plaidml_expr_int, x.item())) if isinstance(x, float): return", "self.args = [ProgramArgument(raw_args[0].args[i]) for i in range(raw_args[0].nargs)] ffi_call(lib.plaidml_program_args_free, raw_args[0]) super(Program,", "x.item())) if isinstance(x, float): return Tensor(expr=ffi_call(lib.plaidml_expr_float, x)) if isinstance(x, TensorDim):", "in constraints: self.add_constraint(constraint) # Return the tensor's shape @property def", "in range(count)] class ProgramArgument: \"\"\"Docstring for class ProgramArgument\"\"\" def __init__(self,", "return TensorDim(dim_op(lib.PLAIDML_INT_OP_MUL, other, self)) def __floordiv__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_DIV, self,", "self, rhs) def __rtruediv__(self, lhs): return call('div', lhs, self) #", "raw_args[0]) super(Program, self).__init__(ffi_obj) @property def inputs(self): return [x for x", "if not dtype: raise 'Unsupport bit_size for as_float' return cast(x,", "(self, rhs))) def _make_contraction(self, agg_op, rhs): # Extract combo_op and", "repr(self._impl) # Represents an aggregation_op of SUM in a contraction", "def floor(x): return call('floor', x) def gather(x, y): return call('gather',", "self._elts] ffi_obj = ffi_call(lib.plaidml_value_tuple, len(raw_elts), raw_elts) elif isinstance(value, six.string_types): ffi_obj", "np.issubdtype(type(x), np.integer): return Tensor(expr=ffi_call(lib.plaidml_expr_int, x.item())) if isinstance(x, float): return Tensor(expr=ffi_call(lib.plaidml_expr_float,", "(int): Integer dimensions of the LogicalShape. \"\"\" return [ ffi_call(lib.plaidml_logical_shape_get_dim_int,", "def __invert__(self): return call('bit_not', self) # Represents an eltwise addition", "return TensorIndex(poly_op(lib.PLAIDML_INT_OP_MUL, self, rhs)) def __rmul__(self, lhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_MUL, lhs,", "is None: raise ValueError('One of dims=, shape=, or expr= must", "def __neg__(self): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_NEG, self)) def __add__(self, rhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_ADD,", "ptr: ffi_obj = ptr elif dtype is not None: raw_dims", "np import six from plaidml2 import DType from plaidml2.core import", "return call('bit_right', lhs, self) # Represents an eltwise bit_and def", "name='', buffer=None): self._name = name self._buffer = buffer if shape:", "= rhs._impl.op inputs = [x._impl for x in rhs._impl.args] else:", "a buffer value) # Illegal on other tensors ffi_call(lib.plaidml_expr_param_reset, self.__ffi_obj__,", "# Represents an eltwise bit_not def __invert__(self): return call('bit_not', self)", "args] return ffi_call(lib.plaidml_dim_expr_op, op, len(args), raw_args) class TensorDim(ForeignObject): \"\"\"Docstring for", "raise ValueError('Invalid impl') return _Contraction( agg_op, combo_op, inputs, self._impl, _SizeMap(self._tensor._dims),", "y, z) def select(cond, true_case, false_case): return call('cond', cond, true_case,", "rhs) def __rrshift__(self, lhs): return call('bit_right', lhs, self) # Represents", "call('cmp_ne', self, rhs) # Represents an eltwise cmp_lt def __lt__(self,", "None: if isinstance(value, six.integer_types): expr = ffi_call(lib.plaidml_expr_int, value) elif isinstance(value,", "def __init__(self, expr=None): if expr is None: expr = ffi_call(lib.plaidml_dim_expr_none)", "if isinstance(rhs._impl, _IndexMap): # Unary op combo_op = lib.PLAIDML_COMBO_OP_NONE inputs", "object pointer for a LogicalShape Returns: list (int): Integer dimensions", "a contraction def __eq__(self, rhs): return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_EQ, (self, rhs))) def", "__init__(self, shape=None, dims=None, expr=None, value=None, name='', buffer=None): self._name = name", "one explicitly set to a buffer value) # Illegal on", "__repr__(self): return repr(self._impl) # Represents an aggregation_op of SUM in", "# Represents a combo_op of MULTIPLY in a contraction def", "== TensorRef(other).__hash__() return self.__hash__() == other.__hash__() class Value(ForeignObject): \"\"\"Docstring for", "or isinstance(key, list): idxs = key else: idxs = [key]", "lib.plaidml_expr_free __ffi_repr__ = lib.plaidml_expr_repr def __init__(self, agg_op, combo_op, src_idxs, sink_idxs,", "MULTIPLY in a contraction def __mul__(self, rhs): return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_MUL, (self,", "IndexedTensor\"\"\" def __init__(self, impl, tensor=None): self._impl = impl self._tensor =", "isinstance(key, tuple) or isinstance(key, list): idxs = key else: idxs", "return call('min', x, y) def pow(x, y): return call('pow', x,", "__ffi_repr__ = lib.plaidml_poly_expr_repr def __init__(self, expr=None, name=''): if expr is", "call('div', lhs, self) # Represents an eltwise cmp_eq def __eq__(self,", "return TensorDim(expr=ffi_call(lib.plaidml_dim_expr_int, x)) return x def dim_op(op, *args): args =", "IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_EQ, (self, rhs))) def _make_contraction(self, agg_op, rhs): # Extract combo_op", "eltwise bit_right def __rshift__(self, rhs): return call('bit_right', self, rhs) def", "= ffi_call(lib.plaidml_value_int, value) elif isinstance(value, float): ffi_obj = ffi_call(lib.plaidml_value_float, value)", "into_TensorShape(self): return TensorShape( ptr=ffi_call(lib.plaidml_logical_shape_into_tensor_shape, self.as_ptr())) Constraint = namedtuple('Constraint', ['lhs', 'rhs'])", "self._tensor._set_contraction(self._make_contraction(lib.PLAIDML_AGG_OP_MAX, rhs)) # Represents an aggregation_op of MIN in a", "raise TypeError('Invalid type for value={}'.format(value)) elif expr is None: raise", "len(raw_dims), raw_dims) # bind a concrete shape to this tensor", "@property def inputs(self): return [x for x in self.args if", "def into_TensorShape(self): return TensorShape( ptr=ffi_call(lib.plaidml_logical_shape_into_tensor_shape, self.as_ptr())) Constraint = namedtuple('Constraint', ['lhs',", "DType.UINT8, 16: DType.UINT16, 32: DType.UINT32, 64: DType.UINT64, } dtype =", "dtype) def as_int(x, bit_size): map = { 8: DType.INT8, 16:", "bind(self, shape): ffi_call(lib.plaidml_expr_bind_shape, self.as_ptr(), shape.as_ptr()) class TensorRef: \"\"\"Docstring for class", "idxs = [wrap_poly(x) for x in idxs] raw_idxs = [x.as_ptr()", "else: value = value.tolist() if value is None: ffi_obj =", "Tensor): ffi_obj = ffi_call(lib.plaidml_value_expr, value.as_ptr()) elif isinstance(value, (list, tuple)): self._elts", "self, other)) def __rsub__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_SUB, other, self)) def", "op, len(args), raw_args) class TensorIndex(ForeignObject): \"\"\"Docstring for class TensorIndex\"\"\" __ffi_del__", "other, self)) def wrap_poly(x): if isinstance(x, six.integer_types): return TensorIndex(expr=ffi_call(lib.plaidml_poly_expr_literal, x))", "TensorDim(ForeignObject): \"\"\"Docstring for class TensorDim\"\"\" __ffi_del__ = lib.plaidml_dim_expr_free __ffi_repr__ =", "rhs) def __rand__(self, lhs): return call('bit_and', lhs, self) # Represents", "expr = ffi_call(lib.plaidml_expr_placeholder, shape.as_ptr(), raw_buffer, name.encode()) elif dims is not", "ref.as_ptr(), len(idxs), raw_idxs) super(_IndexMap, self).__init__(expr) class _SizeMap(ForeignObject): __ffi_del__ = lib.plaidml_expr_free", "Tensor\"\"\" __ffi_del__ = lib.plaidml_expr_free __ffi_repr__ = lib.plaidml_expr_repr _dims = None", "ptr=None): if ptr: ffi_obj = ptr elif dtype is not", "len(args), raw_args) class TensorDim(ForeignObject): \"\"\"Docstring for class TensorDim\"\"\" __ffi_del__ =", "# Return the tensor's shape @property def shape(self): return LogicalShape(ptr=ffi_call(lib.plaidml_expr_get_shape,", "dtype = map.get(bit_size) if not dtype: raise 'Unsupport bit_size for", "from plaidml2.ffi import ForeignObject, ffi, ffi_call, lib logger = logging.getLogger(__name__)", "round(x): return call('round', x) def scatter(x, y, z): return call('scatter',", "__add__(self, rhs): return call('add', self, rhs) def __radd__(self, lhs): return", "_ContractionPart = namedtuple('_ContractionPart', ['op', 'args']) class IndexedTensor(object): \"\"\"Docstring for class", "_ContractionPart): # Binary/Ternary op combo_op = rhs._impl.op inputs = [x._impl", "Value(ForeignObject): \"\"\"Docstring for class Value\"\"\" __ffi_del__ = lib.plaidml_value_free __ffi_repr__ =", "in self.args if x.is_input] @property def outputs(self): return [x for", "self, rhs) def __rsub__(self, lhs): return call('sub', lhs, self) #", "raise ValueError('One of dtype= or ptr= must be specified.') super(LogicalShape,", "constraint.rhs.as_ptr(), ) def add_constraints(self, constraints): for constraint in constraints: self.add_constraint(constraint)", "def __hash__(self): return hash((self.as_ptr(), self._dims, self._is_contraction)) def __getitem__(self, key): return", "inputs(self): return [x for x in self.args if x.is_input] @property", "def __radd__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_ADD, other, self)) def __sub__(self, other):", "bit_not def __invert__(self): return call('bit_not', self) # Represents an eltwise", "if not self._is_contraction: raise TypeError('no_reduce can only be specified on", "add_constraint(self, constraint): ffi_call( lib.plaidml_expr_contraction_add_constraint, self.as_ptr(), constraint.lhs.as_ptr(), constraint.rhs.as_ptr(), ) def add_constraints(self,", "bit_size): map = { 8: DType.UINT8, 16: DType.UINT16, 32: DType.UINT32,", "def __ge__(self, rhs): return call('cmp_ge', self, rhs) # Represents an", "buffer=None): self._name = name self._buffer = buffer if shape: if", "constraint.lhs.as_ptr(), constraint.rhs.as_ptr(), ) def add_constraints(self, constraints): for constraint in constraints:", "def __rtruediv__(self, lhs): return call('div', lhs, self) # Represents an", "x) def exp(x): return call('exp', x) def floor(x): return call('floor',", "import TensorShape, Buffer from plaidml2.ffi import ForeignObject, ffi, ffi_call, lib", "shape(self): return LogicalShape(ptr=ffi_call(lib.plaidml_expr_get_shape, self.as_ptr())) # Verify that the specified dims", "ffi, ffi_call, lib logger = logging.getLogger(__name__) def __init(): \"\"\"Docstring for", "TypeError('Unsupported type {} for value={}'.format(type(value), value)) super(Value, self).__init__(ffi_obj) def as_tensor(self):", "for x in dims] raw_dims = [x.as_ptr() for x in", "raise 'Unsupport bit_size for as_uint' return cast(x, dtype) def ceil(x):", "shape.as_ptr()) class TensorRef: \"\"\"Docstring for class TensorRef\"\"\" def __init__(self, tensor):", "rhs) # Represents an eltwise cmp_gt def __gt__(self, rhs): return", "ffi_call( lib.plaidml_expr_gradient, len(wrts), wrts, loss.as_ptr(), raw_grads, ) return [Tensor(expr=x) for", "of a LogicalShape as a list. Args: self (pointer): The", "and inputs if isinstance(rhs._impl, _IndexMap): # Unary op combo_op =", "return call('bit_or', lhs, self) # Represents an eltwise bit_xor def", "x in raw_grads] def ident(x): return call('ident', x) def index(x,", "if isinstance(x, Tensor): return x raise TypeError('Unexpected type for call", "for x in dims]) ffi_obj = ffi_call(lib.plaidml_logical_shape_alloc, dtype, len(dims), raw_dims)", "TensorDim(dim_op(lib.PLAIDML_INT_OP_MUL, other, self)) def __floordiv__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_DIV, self, other))", "= [rhs._impl] elif isinstance(rhs._impl, _ContractionPart): # Binary/Ternary op combo_op =", "super(TensorDim, self).__init__(expr) def _bind(self, expr): self.take_ptr(expr) def __neg__(self): return TensorDim(dim_op(lib.PLAIDML_INT_OP_NEG,", "return call('sqrt', x) def tan(x): return call('tan', x) def tanh(x):", "buffer): # Changes the value of a parameter tensor (i.e.", "Set use_default on a contraction def use_default(self, rhs): if not", "__ffi_repr__ = lib.plaidml_expr_repr def __init__(self, dims): dims = [wrap_dim(x) for", "this tensor def bind(self, shape): ffi_call(lib.plaidml_expr_bind_shape, self.as_ptr(), shape.as_ptr()) class TensorRef:", "__floordiv__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_DIV, self, other)) def __rfloordiv__(self, other): return", "return call('index', x, axis) def jacobian(loss, variables): wrts = [x.as_ptr()", "class IndexedTensor\"\"\" def __init__(self, impl, tensor=None): self._impl = impl self._tensor", "DType.BOOLEAN) def as_float(x, bit_size): map = { 16: DType.FLOAT16, 32:", "# Represents an eltwise division def __truediv__(self, rhs): return call('div',", "LogicalShape. \"\"\" return [ ffi_call(lib.plaidml_logical_shape_get_dim_int, self.as_ptr(), i) for i in", "expr = None elif value is not None: if isinstance(value,", "len(wrts)) ffi_call( lib.plaidml_expr_gradient, len(wrts), wrts, loss.as_ptr(), raw_grads, ) return [Tensor(expr=x)", "map.get(bit_size) if not dtype: raise 'Unsupport bit_size for as_float' return", "(self, rhs))) # Represents a combo_op of EQ in a", "tensor's shape @property def shape(self): return LogicalShape(ptr=ffi_call(lib.plaidml_expr_get_shape, self.as_ptr())) # Verify", "x in self.args if not x.is_input] def wrap_tensor(x): if isinstance(x,", "true_case): return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_COND, (lhs, rhs, true_case))) def cos(x): return call('cos',", "x, y, z) def select(cond, true_case, false_case): return call('cond', cond,", "Corporation. import logging from collections import namedtuple import numpy as", "for value={}'.format(type(value), value)) super(Value, self).__init__(ffi_obj) def as_tensor(self): return Tensor(expr=ffi_call(lib.plaidml_value_expr_get, self.as_ptr()))", "if buffer is None: raw_buffer = ffi.NULL else: raw_buffer =", "outputs] dst_updates = [x[0].as_ptr() for x in updates] src_updates =", "call('bit_or', self, rhs) def __ror__(self, lhs): return call('bit_or', lhs, self)", "self, rhs) def __rxor__(self, lhs): return call('bit_xor', lhs, self) #", "TensorIndex(poly_op(lib.PLAIDML_INT_OP_ADD, lhs, self)) def __sub__(self, rhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_SUB, self, rhs))", "rhs): return call('add', self, rhs) def __radd__(self, lhs): return call('add',", "cast(x, DType.BOOLEAN) def as_float(x, bit_size): map = { 16: DType.FLOAT16,", "ForeignObject, ffi, ffi_call, lib logger = logging.getLogger(__name__) def __init(): \"\"\"Docstring", "Tensor (Type: {})'.format( type(value._impl))) def _set_contraction(self, cion): self._is_contraction = True", "isinstance(value, float): ffi_obj = ffi_call(lib.plaidml_value_float, value) elif isinstance(value, TensorDim): ffi_obj", "call('bit_or', lhs, self) # Represents an eltwise bit_xor def __xor__(self,", "updates] raw_args = ffi.new('plaidml_program_args**') ffi_obj = ffi_call( lib.plaidml_program_evaluate, name.encode(), len(raw_outputs),", "isinstance(rhs._impl, _ContractionPart): # Binary/Ternary op combo_op = rhs._impl.op inputs =", "an aggregation_op of MIN in a contraction def __le__(self, rhs):", "rhs) def __rmul__(self, lhs): return call('mul', lhs, self) # Represents", "Tensor(expr=ffi_call(lib.plaidml_value_expr_get, self.as_ptr())) def TensorOutput(*args): return Tensor(dims=args) def TensorDims(count): return [TensorDim()", "__imul__(self, rhs): return IndexedTensor(self._make_contraction(lib.PLAIDML_AGG_OP_PROD, rhs)) # Represents an aggregation_op of", "arg.buffer: tensor_shape = self.shape.into_TensorShape() self.buffer = Buffer(tensor_shape, ptr=ffi_call(lib.plaidml_buffer_clone, arg.buffer)) else:", "return Tensor(expr=ffi_call(lib.plaidml_expr_int, x)) if np.issubdtype(type(x), np.integer): return Tensor(expr=ffi_call(lib.plaidml_expr_int, x.item())) if", "as np import six from plaidml2 import DType from plaidml2.core", "true_case, false_case): return call('cond', cond, true_case, false_case) def shape(x): return", "[x for x in self.args if x.is_input] @property def outputs(self):", "expr = ffi_call(lib.plaidml_expr_int, value) elif isinstance(value, float): expr = ffi_call(lib.plaidml_expr_float,", "rhs): return call('cmp_eq', self, rhs) # Represents an eltwise cmp_ne", "return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_COND, (lhs, rhs, true_case))) def cos(x): return call('cos', x)", "(self, rhs))) # Represents a combo_op of MULTIPLY in a", "IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_COND, (lhs, rhs, true_case))) def cos(x): return call('cos', x) def", "__truediv__(self, rhs): return call('div', self, rhs) def __rtruediv__(self, lhs): return", "True self.take_ptr(cion) # Represents an eltwise negation def __neg__(self): return", "lhs, self) # Represents an eltwise division def __div__(self, rhs):", "lib.plaidml_value_repr def __init__(self, value): # logger.debug('Value({})'.format(value)) if isinstance(value, np.ndarray): if", "__ffi_del__ = lib.plaidml_program_free __ffi_repr__ = lib.plaidml_program_repr def __init__(self, name, outputs,", "DType.FLOAT64, } dtype = map.get(bit_size) if not dtype: raise 'Unsupport", "type for call argument: {}. fn: {}, args: {}, bad", "rhs): return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_ADD, (self, rhs))) # Represents a combo_op of", "'rhs']) def wrap_dim(x): if isinstance(x, six.integer_types): return TensorDim(expr=ffi_call(lib.plaidml_dim_expr_int, x)) return", "__le__(self, rhs): return call('cmp_le', self, rhs) # Represents an eltwise", "[rhs._impl] elif isinstance(rhs._impl, _ContractionPart): # Binary/Ternary op combo_op = rhs._impl.op", "= [x.as_ptr() for x in idxs] expr = ffi_call(lib.plaidml_expr_index_map, ref.as_ptr(),", "add_constraints(self, constraints): for constraint in constraints: self.add_constraint(constraint) # Return the", "(list, tuple)): self._elts = [Value(x) for x in value] raw_elts", "raw_args = ffi.new('plaidml_program_args**') ffi_obj = ffi_call( lib.plaidml_program_evaluate, name.encode(), len(raw_outputs), raw_outputs,", "dtype, len(dims), raw_dims) else: raise ValueError('One of dtype= or ptr=", "return [Tensor(expr=x) for x in raw_grads] def ident(x): return call('ident',", "rhs) # Represents an eltwise cmp_ne def __ne__(self, rhs): return", "TensorIndex(poly_op(lib.PLAIDML_INT_OP_SUB, lhs, self)) def __mul__(self, rhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_MUL, self, rhs))", "= TensorRef(Tensor(expr=ffi_call(lib.plaidml_expr_clone, arg.tensor))) self.shape = LogicalShape(ptr=ffi_call(lib.plaidml_logical_shape_clone, arg.shape)) if arg.buffer: tensor_shape", "return TensorDim(dim_op(lib.PLAIDML_INT_OP_MUL, self, other)) def __rmul__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_MUL, other,", "select(cond, true_case, false_case): return call('cond', cond, true_case, false_case) def shape(x):", "is None: expr = ffi_call(lib.plaidml_dim_expr_none) super(TensorDim, self).__init__(expr) def _bind(self, expr):", "[x._impl for x in rhs._impl.args] else: raise ValueError('Invalid impl') return", "value = value.item() else: value = value.tolist() if value is", "aggregation_op of MIN in a contraction def __le__(self, rhs): self._tensor._set_contraction(self._make_contraction(lib.PLAIDML_AGG_OP_MIN,", "for class TensorRef\"\"\" def __init__(self, tensor): self.tensor = tensor def", "x) def floor(x): return call('floor', x) def gather(x, y): return", "if arg.buffer: tensor_shape = self.shape.into_TensorShape() self.buffer = Buffer(tensor_shape, ptr=ffi_call(lib.plaidml_buffer_clone, arg.buffer))", "isinstance(key, list): idxs = key else: idxs = [key] idxs", "MIN in a contraction def __le__(self, rhs): self._tensor._set_contraction(self._make_contraction(lib.PLAIDML_AGG_OP_MIN, rhs)) #", "# Changes the value of a parameter tensor (i.e. one", "args = [wrap_poly(x) for x in args] raw_args = [x.as_ptr()", "ValueError('Invalid impl') return _Contraction( agg_op, combo_op, inputs, self._impl, _SizeMap(self._tensor._dims), self._tensor._name,", "not dtype: raise 'Unsupport bit_size for as_uint' return cast(x, dtype)", "self._set_contraction( _Contraction( lib.PLAIDML_AGG_OP_ASSIGN, lib.PLAIDML_COMBO_OP_NONE, [value._impl], _IndexMap(self, key), _SizeMap(self._dims), self._name, ))", "__lt__(self, rhs): return call('cmp_lt', self, rhs) # Represents an eltwise", "class TensorIndex(ForeignObject): \"\"\"Docstring for class TensorIndex\"\"\" __ffi_del__ = lib.plaidml_poly_expr_free __ffi_repr__", "raw_grads] def log(x): return call('log', x) def max(x, y): return", "for x in dims] ffi_call(lib.plaidml_expr_bind_dims, self.as_ptr(), len(raw_dims), raw_dims) # bind", "== 0: value = value.item() else: value = value.tolist() if", "self.shape = LogicalShape(ptr=ffi_call(lib.plaidml_logical_shape_clone, arg.shape)) if arg.buffer: tensor_shape = self.shape.into_TensorShape() self.buffer", "if isinstance(value, six.integer_types): expr = ffi_call(lib.plaidml_expr_int, value) elif isinstance(value, float):", "__gt__(self, rhs): return call('cmp_gt', self, rhs) # Represents an eltwise", "32: DType.UINT32, 64: DType.UINT64, } dtype = map.get(bit_size) if not", "if isinstance(x, six.integer_types): return TensorIndex(expr=ffi_call(lib.plaidml_poly_expr_literal, x)) if isinstance(x, TensorDim): return", "= ffi_call( lib.plaidml_expr_contraction, agg_op, combo_op, sink_idxs.as_ptr(), sink_sizes.as_ptr(), len(src_idxs), src_idxs, name.encode(),", "def __add__(self, rhs): return call('add', self, rhs) def __radd__(self, lhs):", "dims is not None: self._dims = dims expr = None", "_Contraction( lib.PLAIDML_AGG_OP_ASSIGN, value._impl.op, [x._impl for x in value._impl.args], _IndexMap(self, key),", "in self.args if not x.is_input] def wrap_tensor(x): if isinstance(x, six.integer_types):", "isinstance(value, Tensor): ffi_obj = ffi_call(lib.plaidml_value_expr, value.as_ptr()) elif isinstance(value, (list, tuple)):", "elif expr is None: raise ValueError('One of dims=, shape=, or", "rhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_DIV, self, rhs)) def __rfloordiv__(self, lhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_DIV,", "return [TensorIndex() for i in range(count)] class ProgramArgument: \"\"\"Docstring for", "tensor): self.tensor = tensor def __hash__(self): return hash(ffi_call(lib.plaidml_expr_ptr, self.tensor.as_ptr())) def", "Returns: list (int): Integer dimensions of the LogicalShape. \"\"\" return", "if not dtype: raise 'Unsupport bit_size for as_uint' return cast(x,", "= map.get(bit_size) if not dtype: raise 'Unsupport bit_size for as_int'", "name, outputs, updates=[]): raw_outputs = [x.as_ptr() for x in outputs]", "not dtype: raise 'Unsupport bit_size for as_float' return cast(x, dtype)", "# Verify that the specified dims match the dims of", "raise TypeError('no_reduce can only be specified on a contraction.') ffi_call(lib.plaidml_expr_contraction_set_no_reduce,", "an eltwise bit_and def __and__(self, rhs): return call('bit_and', self, rhs)", "lhs): return call('bit_or', lhs, self) # Represents an eltwise bit_xor", "specified dims match the dims of this tensor. def bind_dims(self,", "sink_idxs, sink_sizes, name): src_idxs = [x.as_ptr() for x in src_idxs]", "self, rhs) # Represents an eltwise cmp_ne def __ne__(self, rhs):", "[Tensor(expr=x) for x in raw_grads] def ident(x): return call('ident', x)", "64: DType.INT64, } dtype = map.get(bit_size) if not dtype: raise", "def min(x, y): return call('min', x, y) def pow(x, y):", "Tensor): return x raise TypeError('Unexpected type for call argument: {}.", "the tensor's shape @property def shape(self): return LogicalShape(ptr=ffi_call(lib.plaidml_expr_get_shape, self.as_ptr())) #", "value.ndim == 0: value = value.item() else: value = value.tolist()", "dst_updates = [x[0].as_ptr() for x in updates] src_updates = [x[1].as_ptr()", "raw_args = [x.as_ptr() for x in args] return ffi_call(lib.plaidml_dim_expr_op, op,", "def __lt__(self, rhs): return Constraint(self, wrap_dim(rhs)) def __neg__(self): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_NEG,", "elif isinstance(value, (list, tuple)): self._elts = [Value(x) for x in", "dims] expr = ffi_call(lib.plaidml_expr_size_map, len(dims), raw_dims) super(_SizeMap, self).__init__(expr) class _Contraction(ForeignObject):", "y) def min(x, y): return call('min', x, y) def pow(x,", "return cast(x, DType.BOOLEAN) def as_float(x, bit_size): map = { 16:", "return call('scatter', x, y, z) def select(cond, true_case, false_case): return", "impl self._tensor = tensor def __repr__(self): return repr(self._impl) # Represents", "return repr(self._impl) # Represents an aggregation_op of SUM in a", "for i in range(count)] class ProgramArgument: \"\"\"Docstring for class ProgramArgument\"\"\"", "return Tensor(expr=ffi_call(lib.plaidml_expr_dim, x.as_ptr())) if isinstance(x, Tensor): return x raise TypeError('Unexpected", "z): return call('scatter', x, y, z) def select(cond, true_case, false_case):", "'args']) class IndexedTensor(object): \"\"\"Docstring for class IndexedTensor\"\"\" def __init__(self, impl,", "rhs): return call('bit_or', self, rhs) def __ror__(self, lhs): return call('bit_or',", "# Represents an eltwise addition def __add__(self, rhs): return call('add',", "rhs): # Extract combo_op and inputs if isinstance(rhs._impl, _IndexMap): #", "Represents an eltwise negation def __neg__(self): return call('neg', self) #", "dtype=None, dims=[], ptr=None): if ptr: ffi_obj = ptr elif dtype", "def TensorOutput(*args): return Tensor(dims=args) def TensorDims(count): return [TensorDim() for i", "lib.plaidml_dim_expr_free __ffi_repr__ = lib.plaidml_dim_expr_repr def __init__(self, expr=None): if expr is", "# Represents an eltwise cmp_le def __le__(self, rhs): return call('cmp_le',", "raise TypeError('use_default can only be specified on a contraction.') ffi_call(lib.plaidml_expr_contraction_set_use_default,", "shape(x): return call('shape', x) def sin(x): return call('sin', x) def", "in variables] raw_grads = ffi.new('plaidml_expr*[]', len(wrts)) ffi_call( lib.plaidml_expr_gradient, len(wrts), wrts,", "super(_IndexMap, self).__init__(expr) class _SizeMap(ForeignObject): __ffi_del__ = lib.plaidml_expr_free __ffi_repr__ = lib.plaidml_expr_repr", "__rmul__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_MUL, other, self)) def __floordiv__(self, other): return", "eltwise negation def __neg__(self): return call('neg', self) # Represents an", "return call('bit_right', self, rhs) def __rrshift__(self, lhs): return call('bit_right', lhs,", "[0 if x is None else x for x in", "self) # Represents an eltwise bit_right def __rshift__(self, rhs): return", "self.as_ptr(), rhs.as_ptr()) return self def add_constraint(self, constraint): ffi_call( lib.plaidml_expr_contraction_add_constraint, self.as_ptr(),", "for x in variables] raw_grads = ffi.new('plaidml_expr*[]', len(wrts)) ffi_call( lib.plaidml_expr_jacobian,", "be specified.') super(Tensor, self).__init__(expr) def set_param_value(self, buffer): # Changes the", "an eltwise cmp_gt def __gt__(self, rhs): return call('cmp_gt', self, rhs)", "def __rmul__(self, lhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_MUL, lhs, self)) def __floordiv__(self, rhs):", "shape): ffi_call(lib.plaidml_expr_bind_shape, self.as_ptr(), shape.as_ptr()) class TensorRef: \"\"\"Docstring for class TensorRef\"\"\"", "value._impl.args], _IndexMap(self, key), _SizeMap(self._dims), self._name, )) else: raise ValueError('Invalid impl", "updates] src_updates = [x[1].as_ptr() for x in updates] raw_args =", "if isinstance(value._impl, _Contraction): # standard contraction self._set_contraction(value._impl) elif isinstance(value, Tensor):", "return call('ident', x) def index(x, axis): return call('index', x, axis)", "x) def scatter(x, y, z): return call('scatter', x, y, z)", "return [x for x in self.args if x.is_input] @property def", "ffi_call, lib logger = logging.getLogger(__name__) def __init(): \"\"\"Docstring for function", "# Represents an eltwise multiplication def __mul__(self, rhs): return call('mul',", "= namedtuple('_ContractionPart', ['op', 'args']) class IndexedTensor(object): \"\"\"Docstring for class IndexedTensor\"\"\"", "for as_int' return cast(x, dtype) def as_uint(x, bit_size): map =", "= ffi_call(lib.plaidml_expr_float, value) else: raise TypeError('Invalid type for value={}'.format(value)) elif", "= [x._impl for x in rhs._impl.args] else: raise ValueError('Invalid impl')", "# Illegal on other tensors ffi_call(lib.plaidml_expr_param_reset, self.__ffi_obj__, buffer.as_ptr()) def __hash__(self):", "isinstance(value, six.string_types): ffi_obj = ffi_call(lib.plaidml_value_str, value.encode('utf-8')) elif isinstance(value, ffi.CData) and", "dimensions of the LogicalShape. \"\"\" return [ ffi_call(lib.plaidml_logical_shape_get_dim_int, self.as_ptr(), i)", "op combo_op = lib.PLAIDML_COMBO_OP_NONE inputs = [rhs._impl] elif isinstance(rhs._impl, _ContractionPart):", "six.integer_types): expr = ffi_call(lib.plaidml_expr_int, value) elif isinstance(value, float): expr =", "the dims of this tensor. def bind_dims(self, *dims): raw_dims =", "rhs): return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_MUL, (self, rhs))) # Represents a combo_op of", "an eltwise bit_xor def __xor__(self, rhs): return call('bit_xor', self, rhs)", "= lib.plaidml_dim_expr_free __ffi_repr__ = lib.plaidml_dim_expr_repr def __init__(self, expr=None): if expr", "def set_param_value(self, buffer): # Changes the value of a parameter", "x in value] raw_elts = [x.as_ptr() for x in self._elts]", "in args] raw_args = [x.as_ptr() for x in args] return", "self, rhs)) def __rmul__(self, lhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_MUL, lhs, self)) def", "specified.') super(Tensor, self).__init__(expr) def set_param_value(self, buffer): # Changes the value", "__floordiv__(self, rhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_DIV, self, rhs)) def __rfloordiv__(self, lhs): return", "raw_buffer, name.encode()) elif dims is not None: self._dims = dims", "= lib.plaidml_program_repr def __init__(self, name, outputs, updates=[]): raw_outputs = [x.as_ptr()", "for call argument: {}. fn: {}, args: {}, bad arg:", "_SizeMap(self._dims), self._name, )) elif isinstance(value._impl, _ContractionPart): # Binary or ternary", "__eq__(self, other): if isinstance(other, Tensor): return self.__hash__() == TensorRef(other).__hash__() return", "return call('exp', x) def floor(x): return call('floor', x) def gather(x,", "y, z): return call('scatter', x, y, z) def select(cond, true_case,", "raw_dims) # bind a concrete shape to this tensor def", "= arg.is_input self.ref = TensorRef(Tensor(expr=ffi_call(lib.plaidml_expr_clone, arg.tensor))) self.shape = LogicalShape(ptr=ffi_call(lib.plaidml_logical_shape_clone, arg.shape))", "def _make_contraction(self, agg_op, rhs): # Extract combo_op and inputs if", "ffi_call(lib.plaidml_expr_contraction_set_use_default, self.as_ptr(), rhs.as_ptr()) return self def add_constraint(self, constraint): ffi_call( lib.plaidml_expr_contraction_add_constraint,", "elif isinstance(value, Tensor): pass elif isinstance(value._impl, _IndexMap): # Unary ASSIGN", "for x in args] return Tensor(expr=ffi_call(lib.plaidml_expr_call, fn.encode(), len(args), raw_args)) def", "rhs, true_case))) def cos(x): return call('cos', x) def exp(x): return", "if expr is None: expr = ffi_call(lib.plaidml_dim_expr_none) super(TensorDim, self).__init__(expr) def", "combo_op, src_idxs, sink_idxs, sink_sizes, name): src_idxs = [x.as_ptr() for x", "buffer.as_ptr() expr = ffi_call(lib.plaidml_expr_placeholder, shape.as_ptr(), raw_buffer, name.encode()) elif dims is", "ffi_obj = value else: raise TypeError('Unsupported type {} for value={}'.format(type(value),", "return call('neg', self) # Represents an eltwise bit_not def __invert__(self):", "expr = ffi_call(lib.plaidml_poly_expr_index, name.encode()) super(TensorIndex, self).__init__(expr) def __lt__(self, rhs): return", "rhs): return call('cmp_lt', self, rhs) # Represents an eltwise cmp_gt", "call('bit_left', lhs, self) # Represents an eltwise bit_right def __rshift__(self,", "__init__(self, arg): self.is_input = arg.is_input self.ref = TensorRef(Tensor(expr=ffi_call(lib.plaidml_expr_clone, arg.tensor))) self.shape", "other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_DIV, other, self)) def wrap_poly(x): if isinstance(x, six.integer_types):", "call('cmp_ge', self, rhs) # Represents an eltwise bit_left def __lshift__(self,", "__ffi_del__ = lib.plaidml_expr_free __ffi_repr__ = lib.plaidml_expr_repr def __init__(self, ref, key):", "combo_op, sink_idxs.as_ptr(), sink_sizes.as_ptr(), len(src_idxs), src_idxs, name.encode(), ) super(_Contraction, self).__init__(expr) _ContractionPart", "x in args] return ffi_call(lib.plaidml_poly_expr_op, op, len(args), raw_args) class TensorIndex(ForeignObject):", "gather(x, y): return call('gather', x, y) def gradients(loss, variables): wrts", "if x is None else x for x in dims])", "Represents an eltwise bit_not def __invert__(self): return call('bit_not', self) #", "= lib.plaidml_expr_repr def __init__(self, dims): dims = [wrap_dim(x) for x", "an aggregation_op of SUM in a contraction def __iadd__(self, rhs):", "== other.__hash__() class Value(ForeignObject): \"\"\"Docstring for class Value\"\"\" __ffi_del__ =", "= [wrap_poly(x) for x in args] raw_args = [x.as_ptr() for", "be specified on a contraction.') ffi_call(lib.plaidml_expr_contraction_set_no_reduce, self.as_ptr(), True) return self", "call(fn, *args): args = [wrap_tensor(x) for x in args] raw_args", "self, other)) def __rmul__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_MUL, other, self)) def", "isinstance(rhs._impl, _IndexMap): # Unary op combo_op = lib.PLAIDML_COMBO_OP_NONE inputs =", "LogicalShape(ptr=ffi_call(lib.plaidml_logical_shape_clone, arg.shape)) if arg.buffer: tensor_shape = self.shape.into_TensorShape() self.buffer = Buffer(tensor_shape,", "def __init(): \"\"\"Docstring for function plaidml2.edsl.__init\"\"\" ffi_call(lib.plaidml_edsl_init) ffi.init_once(__init, 'plaidml_edsl_init') class", "return call('cmp_gt', self, rhs) # Represents an eltwise cmp_le def", "value): if isinstance(value._impl, _Contraction): # standard contraction self._set_contraction(value._impl) elif isinstance(value,", "rhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_ADD, self, rhs)) def __radd__(self, lhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_ADD,", "of PLUS in a contraction def __add__(self, rhs): return IndexedTensor(_ContractionPart(lib.PLAIDML_COMBO_OP_ADD,", "rhs)) def __radd__(self, lhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_ADD, lhs, self)) def __sub__(self,", "if isinstance(x, six.integer_types): return Tensor(expr=ffi_call(lib.plaidml_expr_int, x)) if np.issubdtype(type(x), np.integer): return", "_Contraction(ForeignObject): __ffi_del__ = lib.plaidml_expr_free __ffi_repr__ = lib.plaidml_expr_repr def __init__(self, agg_op,", "tensor_shape = self.shape.into_TensorShape() self.buffer = Buffer(tensor_shape, ptr=ffi_call(lib.plaidml_buffer_clone, arg.buffer)) else: self.buffer", "x in args] return ffi_call(lib.plaidml_dim_expr_op, op, len(args), raw_args) class TensorDim(ForeignObject):", "__iadd__(self, rhs): return IndexedTensor(self._make_contraction(lib.PLAIDML_AGG_OP_SUM, rhs)) # Represents an aggregation_op of", "return DType(ffi_call(lib.plaidml_logical_shape_get_dtype, self.as_ptr())) @property def ndims(self): return ffi_call(lib.plaidml_logical_shape_get_ndims, self.as_ptr()) @property", "value={}'.format(value)) elif expr is None: raise ValueError('One of dims=, shape=,", "in args] return ffi_call(lib.plaidml_dim_expr_op, op, len(args), raw_args) class TensorDim(ForeignObject): \"\"\"Docstring", "= ffi.new('plaidml_expr*[]', len(wrts)) ffi_call( lib.plaidml_expr_jacobian, len(wrts), wrts, loss.as_ptr(), raw_grads, )", "= logging.getLogger(__name__) def __init(): \"\"\"Docstring for function plaidml2.edsl.__init\"\"\" ffi_call(lib.plaidml_edsl_init) ffi.init_once(__init,", "self, rhs) # Represents an eltwise cmp_le def __le__(self, rhs):", "x, y) def prng(state, shape): return call('prng', state, *shape) def", "DType.FLOAT16, 32: DType.FLOAT32, 64: DType.FLOAT64, } dtype = map.get(bit_size) if", "for constraint in constraints: self.add_constraint(constraint) # Return the tensor's shape", "import six from plaidml2 import DType from plaidml2.core import TensorShape,", "def __rdiv__(self, lhs): return call('div', lhs, self) # Represents an", "y): return call('pow', x, y) def prng(state, shape): return call('prng',", "on other tensors ffi_call(lib.plaidml_expr_param_reset, self.__ffi_obj__, buffer.as_ptr()) def __hash__(self): return hash((self.as_ptr(),", "= True self.take_ptr(cion) # Represents an eltwise negation def __neg__(self):", "def __init__(self, name, outputs, updates=[]): raw_outputs = [x.as_ptr() for x", "x in variables] raw_grads = ffi.new('plaidml_expr*[]', len(wrts)) ffi_call( lib.plaidml_expr_jacobian, len(wrts),", "__getitem__(self, key): return IndexedTensor(_IndexMap(self, key), tensor=self) def __setitem__(self, key, value):", "return Tensor(expr=ffi_call(lib.plaidml_value_expr_get, self.as_ptr())) def TensorOutput(*args): return Tensor(dims=args) def TensorDims(count): return", "dims): dims = [wrap_dim(x) for x in dims] raw_dims =", "return self.__hash__() == TensorRef(other).__hash__() return self.__hash__() == other.__hash__() class Value(ForeignObject):", "[ ffi_call(lib.plaidml_logical_shape_get_dim_int, self.as_ptr(), i) for i in range(self.ndims) ] def", "__init__(self, agg_op, combo_op, src_idxs, sink_idxs, sink_sizes, name): src_idxs = [x.as_ptr()", "a LogicalShape as a list. Args: self (pointer): The object", "bit_or def __or__(self, rhs): return call('bit_or', self, rhs) def __ror__(self,", "_IndexMap): # Unary op combo_op = lib.PLAIDML_COMBO_OP_NONE inputs = [rhs._impl]", "16: DType.UINT16, 32: DType.UINT32, 64: DType.UINT64, } dtype = map.get(bit_size)", "x in src_idxs] expr = ffi_call( lib.plaidml_expr_contraction, agg_op, combo_op, sink_idxs.as_ptr(),", "lhs, self)) def __sub__(self, rhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_SUB, self, rhs)) def", "x)) if isinstance(x, TensorDim): return Tensor(expr=ffi_call(lib.plaidml_expr_dim, x.as_ptr())) if isinstance(x, Tensor):", "__div__(self, rhs): return call('div', self, rhs) def __rdiv__(self, lhs): return", "ptr=ffi_call(lib.plaidml_buffer_clone, arg.buffer)) else: self.buffer = None class Program(ForeignObject): \"\"\"Docstring for", "is None: ffi_obj = ffi_call(lib.plaidml_value_none) elif isinstance(value, (six.integer_types, bool)): ffi_obj", "DType from plaidml2.core import TensorShape, Buffer from plaidml2.ffi import ForeignObject,", "= ffi_call(lib.plaidml_value_expr, value.as_ptr()) elif isinstance(value, (list, tuple)): self._elts = [Value(x)", "return TensorDim(dim_op(lib.PLAIDML_INT_OP_ADD, other, self)) def __sub__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_SUB, self,", "elif isinstance(value, float): expr = ffi_call(lib.plaidml_expr_float, value) else: raise TypeError('Invalid", "= [x.as_ptr() for x in args] return ffi_call(lib.plaidml_dim_expr_op, op, len(args),", "call('index', x, axis) def jacobian(loss, variables): wrts = [x.as_ptr() for", "return call('max', x, y) def min(x, y): return call('min', x,", "Represents an eltwise bit_or def __or__(self, rhs): return call('bit_or', self,", "return TensorIndex(poly_op(lib.PLAIDML_INT_OP_SUB, lhs, self)) def __mul__(self, rhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_MUL, self,", "self.shape.into_TensorShape() self.buffer = Buffer(tensor_shape, ptr=ffi_call(lib.plaidml_buffer_clone, arg.buffer)) else: self.buffer = None", "x in updates] raw_args = ffi.new('plaidml_program_args**') ffi_obj = ffi_call( lib.plaidml_program_evaluate,", "= lib.plaidml_expr_repr def __init__(self, agg_op, combo_op, src_idxs, sink_idxs, sink_sizes, name):", "[x[0].as_ptr() for x in updates] src_updates = [x[1].as_ptr() for x", "for x in value._impl.args], _IndexMap(self, key), _SizeMap(self._dims), self._name, )) else:", "in idxs] expr = ffi_call(lib.plaidml_expr_index_map, ref.as_ptr(), len(idxs), raw_idxs) super(_IndexMap, self).__init__(expr)", "rhs) # Represents an eltwise cmp_le def __le__(self, rhs): return", "rhs) def __radd__(self, lhs): return call('add', lhs, self) # Represents", "__rfloordiv__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_DIV, other, self)) def wrap_poly(x): if isinstance(x,", "map = { 8: DType.UINT8, 16: DType.UINT16, 32: DType.UINT32, 64:", "self._tensor._name, ) class Tensor(ForeignObject): \"\"\"Docstring for class Tensor\"\"\" __ffi_del__ =", "LogicalShape as a list. Args: self (pointer): The object pointer", "self.take_ptr(expr) def __neg__(self): return TensorDim(dim_op(lib.PLAIDML_INT_OP_NEG, self)) def __add__(self, other): return", "self._set_contraction(value._impl) elif isinstance(value, Tensor): pass elif isinstance(value._impl, _IndexMap): # Unary", "return ffi_call(lib.plaidml_dim_expr_op, op, len(args), raw_args) class TensorDim(ForeignObject): \"\"\"Docstring for class", "an eltwise division def __div__(self, rhs): return call('div', self, rhs)", "def TensorIndexes(count): return [TensorIndex() for i in range(count)] class ProgramArgument:", "16: DType.INT16, 32: DType.INT32, 64: DType.INT64, } dtype = map.get(bit_size)", ") self.args = [ProgramArgument(raw_args[0].args[i]) for i in range(raw_args[0].nargs)] ffi_call(lib.plaidml_program_args_free, raw_args[0])", "for class LogicalShape\"\"\" __ffi_del__ = lib.plaidml_logical_shape_free __ffi_repr__ = lib.plaidml_logical_shape_repr def", "eltwise bit_not def __invert__(self): return call('bit_not', self) # Represents an", "self.add_constraint(constraint) # Return the tensor's shape @property def shape(self): return", "# Represents a combo_op of PLUS in a contraction def", "def pow(x, y): return call('pow', x, y) def prng(state, shape):", "def __mul__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_MUL, self, other)) def __rmul__(self, other):", "= map.get(bit_size) if not dtype: raise 'Unsupport bit_size for as_uint'", "dims] raw_dims = [x.as_ptr() for x in dims] expr =", "return TensorIndex(expr=ffi_call(lib.plaidml_poly_expr_dim, x.as_ptr())) return x def poly_op(op, *args): args =", "_dims = None _is_contraction = False def __init__(self, shape=None, dims=None,", "else: raise ValueError('Invalid impl') return _Contraction( agg_op, combo_op, inputs, self._impl,", "8: DType.INT8, 16: DType.INT16, 32: DType.INT32, 64: DType.INT64, } dtype", "DType.FLOAT32, 64: DType.FLOAT64, } dtype = map.get(bit_size) if not dtype:", "pow(x, y): return call('pow', x, y) def prng(state, shape): return", "eltwise cmp_gt def __gt__(self, rhs): return call('cmp_gt', self, rhs) #", "and ffi.typeof(value) is ffi.typeof('plaidml_value*'): ffi_obj = value else: raise TypeError('Unsupported", "None _is_contraction = False def __init__(self, shape=None, dims=None, expr=None, value=None,", "Constraint(self, wrap_dim(rhs)) def __neg__(self): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_NEG, self)) def __add__(self, rhs):", "src_updates = [x[1].as_ptr() for x in updates] raw_args = ffi.new('plaidml_program_args**')", "len(wrts), wrts, loss.as_ptr(), raw_grads, ) return [Tensor(expr=x) for x in", "rhs._impl.args] else: raise ValueError('Invalid impl') return _Contraction( agg_op, combo_op, inputs,", "def __ror__(self, lhs): return call('bit_or', lhs, self) # Represents an", "= [wrap_poly(x) for x in idxs] raw_idxs = [x.as_ptr() for", "contraction self._set_contraction(value._impl) elif isinstance(value, Tensor): pass elif isinstance(value._impl, _IndexMap): #", "tensor def __repr__(self): return repr(self._impl) # Represents an aggregation_op of", "ffi_obj = ffi_call(lib.plaidml_logical_shape_alloc, dtype, len(dims), raw_dims) else: raise ValueError('One of", "def __setitem__(self, key, value): if isinstance(value._impl, _Contraction): # standard contraction", "value): # logger.debug('Value({})'.format(value)) if isinstance(value, np.ndarray): if value.ndim == 0:", "(lhs, rhs, true_case))) def cos(x): return call('cos', x) def exp(x):", "= lib.plaidml_value_free __ffi_repr__ = lib.plaidml_value_repr def __init__(self, value): # logger.debug('Value({})'.format(value))", "six.integer_types): return TensorDim(expr=ffi_call(lib.plaidml_dim_expr_int, x)) return x def dim_op(op, *args): args", "elif isinstance(value, TensorDim): ffi_obj = ffi_call(lib.plaidml_value_dim, value.as_ptr()) elif isinstance(value, Tensor):", "for class IndexedTensor\"\"\" def __init__(self, impl, tensor=None): self._impl = impl", "def __gt__(self, rhs): return call('cmp_gt', self, rhs) # Represents an", "def ceil(x): return call('ceil', x) def cond(lhs, rhs, true_case): return", "None: raw_dims = ffi.new('int64_t[]', [0 if x is None else", "ffi_call(lib.plaidml_expr_placeholder, shape.as_ptr(), raw_buffer, name.encode()) elif dims is not None: self._dims", "rhs)) # Represents an aggregation_op of PROD in a contraction", "y) def pow(x, y): return call('pow', x, y) def prng(state,", "# Represents an eltwise cmp_lt def __lt__(self, rhs): return call('cmp_lt',", "return call('cmp_ge', self, rhs) # Represents an eltwise bit_left def", "combo_op and inputs if isinstance(rhs._impl, _IndexMap): # Unary op combo_op", "self.as_ptr())) @property def ndims(self): return ffi_call(lib.plaidml_logical_shape_get_ndims, self.as_ptr()) @property def int_dims(self):", "for class TensorIndex\"\"\" __ffi_del__ = lib.plaidml_poly_expr_free __ffi_repr__ = lib.plaidml_poly_expr_repr def", "lib.plaidml_poly_expr_free __ffi_repr__ = lib.plaidml_poly_expr_repr def __init__(self, expr=None, name=''): if expr", "ffi_obj = ffi_call( lib.plaidml_program_evaluate, name.encode(), len(raw_outputs), raw_outputs, len(updates), src_updates, dst_updates,", "call('sqrt', x) def tan(x): return call('tan', x) def tanh(x): return", "Represents an eltwise bit_right def __rshift__(self, rhs): return call('bit_right', self,", "if not self._is_contraction: raise TypeError('use_default can only be specified on", "combo_op = lib.PLAIDML_COMBO_OP_NONE inputs = [rhs._impl] elif isinstance(rhs._impl, _ContractionPart): #", "None: self._dims = dims expr = None elif value is", "for x in args] return ffi_call(lib.plaidml_poly_expr_op, op, len(args), raw_args) class", "in a contraction def __le__(self, rhs): self._tensor._set_contraction(self._make_contraction(lib.PLAIDML_AGG_OP_MIN, rhs)) # Represents", "in dims]) ffi_obj = ffi_call(lib.plaidml_logical_shape_alloc, dtype, len(dims), raw_dims) else: raise", "ProgramArgument: \"\"\"Docstring for class ProgramArgument\"\"\" def __init__(self, arg): self.is_input =", "ffi_obj = ffi_call(lib.plaidml_value_expr, value.as_ptr()) elif isinstance(value, (list, tuple)): self._elts =", "x def dim_op(op, *args): args = [wrap_dim(x) for x in", "logging from collections import namedtuple import numpy as np import", "= [Value(x) for x in value] raw_elts = [x.as_ptr() for", "} dtype = map.get(bit_size) if not dtype: raise 'Unsupport bit_size", "as_int' return cast(x, dtype) def as_uint(x, bit_size): map = {", "lhs, self) # Represents an eltwise bit_xor def __xor__(self, rhs):", "dtype= or ptr= must be specified.') super(LogicalShape, self).__init__(ffi_obj) @property def", "import numpy as np import six from plaidml2 import DType", "rhs.as_ptr()) return self def add_constraint(self, constraint): ffi_call( lib.plaidml_expr_contraction_add_constraint, self.as_ptr(), constraint.lhs.as_ptr(),", "__hash__(self): return hash(ffi_call(lib.plaidml_expr_ptr, self.tensor.as_ptr())) def __eq__(self, other): if isinstance(other, Tensor):", "variables] raw_grads = ffi.new('plaidml_expr*[]', len(wrts)) ffi_call( lib.plaidml_expr_jacobian, len(wrts), wrts, loss.as_ptr(),", "__ne__(self, rhs): return call('cmp_ne', self, rhs) # Represents an eltwise", "__lshift__(self, rhs): return call('bit_left', self, rhs) def __rlshift__(self, lhs): return", "def __init__(self, agg_op, combo_op, src_idxs, sink_idxs, sink_sizes, name): src_idxs =", "= ffi_call(lib.plaidml_logical_shape_alloc, dtype, len(dims), raw_dims) else: raise ValueError('One of dtype=", "an eltwise cmp_ne def __ne__(self, rhs): return call('cmp_ne', self, rhs)", "ffi.CData) and ffi.typeof(value) is ffi.typeof('plaidml_value*'): ffi_obj = value else: raise", "on a contraction def no_reduce(self): if not self._is_contraction: raise TypeError('no_reduce", "dims of this tensor. def bind_dims(self, *dims): raw_dims = [x.as_ptr()", "def __init__(self, tensor): self.tensor = tensor def __hash__(self): return hash(ffi_call(lib.plaidml_expr_ptr,", "return TensorIndex(poly_op(lib.PLAIDML_INT_OP_ADD, self, rhs)) def __radd__(self, lhs): return TensorIndex(poly_op(lib.PLAIDML_INT_OP_ADD, lhs,", "not self._is_contraction: raise TypeError('use_default can only be specified on a", "hash(ffi_call(lib.plaidml_expr_ptr, self.tensor.as_ptr())) def __eq__(self, other): if isinstance(other, Tensor): return self.__hash__()", "return TensorDim(dim_op(lib.PLAIDML_INT_OP_SUB, self, other)) def __rsub__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_SUB, other,", "raise TypeError('Unsupported type {} for value={}'.format(type(value), value)) super(Value, self).__init__(ffi_obj) def", "__or__(self, rhs): return call('bit_or', self, rhs) def __ror__(self, lhs): return", "Value\"\"\" __ffi_del__ = lib.plaidml_value_free __ffi_repr__ = lib.plaidml_value_repr def __init__(self, value):", "TensorDim(dim_op(lib.PLAIDML_INT_OP_SUB, other, self)) def __mul__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_MUL, self, other))", "as_bool(x): return cast(x, DType.BOOLEAN) def as_float(x, bit_size): map = {", "self._name, )) elif isinstance(value._impl, _ContractionPart): # Binary or ternary ASSIGN", "eltwise division def __truediv__(self, rhs): return call('div', self, rhs) def", "Binary or ternary ASSIGN contraction self._set_contraction( _Contraction( lib.PLAIDML_AGG_OP_ASSIGN, value._impl.op, [x._impl", "__neg__(self): return TensorDim(dim_op(lib.PLAIDML_INT_OP_NEG, self)) def __add__(self, other): return TensorDim(dim_op(lib.PLAIDML_INT_OP_ADD, self,", "ASSIGN contraction self._set_contraction( _Contraction( lib.PLAIDML_AGG_OP_ASSIGN, value._impl.op, [x._impl for x in", "buffer.as_ptr()) def __hash__(self): return hash((self.as_ptr(), self._dims, self._is_contraction)) def __getitem__(self, key):", "rhs): return call('bit_left', self, rhs) def __rlshift__(self, lhs): return call('bit_left',", "[x.as_ptr() for x in dims] ffi_call(lib.plaidml_expr_bind_dims, self.as_ptr(), len(raw_dims), raw_dims) #", "return cast(x, dtype) def as_int(x, bit_size): map = { 8:", "y) def gradients(loss, variables): wrts = [x.as_ptr() for x in", "ffi.new('int64_t[]', [0 if x is None else x for x", "self)) def wrap_poly(x): if isinstance(x, six.integer_types): return TensorIndex(expr=ffi_call(lib.plaidml_poly_expr_literal, x)) if", "axis): return call('index', x, axis) def jacobian(loss, variables): wrts =", "sink_idxs.as_ptr(), sink_sizes.as_ptr(), len(src_idxs), src_idxs, name.encode(), ) super(_Contraction, self).__init__(expr) _ContractionPart =" ]
[ "(c) 2018 Intel Corporation Licensed under the Apache License, Version", "import LandmarksNet from .se_resnet_angular import SEResNetAngular from .shufflenet_v2_angular import ShuffleNetV2Angular", "Corporation Licensed under the Apache License, Version 2.0 (the \"License\");", "\"\"\"Returns input resolution\"\"\" from .rmnet_angular import RMNetAngular from .mobilefacenet import", "input resolution\"\"\" from .rmnet_angular import RMNetAngular from .mobilefacenet import MobileFaceNet", "from .backbones.se_resnext import se_resnext50, se_resnext101, se_resnext152 models_backbones = {'rmnet': RMNetAngular,", "Unless required by applicable law or agreed to in writing,", "by applicable law or agreed to in writing, software distributed", "dropout ratio of the model\"\"\" @abstractmethod def get_input_res(self): \"\"\"Returns input", "ratio): \"\"\"Sets dropout ratio of the model\"\"\" @abstractmethod def get_input_res(self):", "as nn class ModelInterface(nn.Module): \"\"\"Abstract class for models\"\"\" @abstractmethod def", "base=se_resnext101), 'se_resnext152': partial(SEResNetAngular, base=se_resnext152), 'shufflenetv2': ShuffleNetV2Angular} models_landmarks = {'landnet': LandmarksNet}", "software distributed under the License is distributed on an \"AS", "distributed under the License is distributed on an \"AS IS\"", "under the License. \"\"\" from abc import abstractmethod from functools", "\"\"\" from abc import abstractmethod from functools import partial import", "CONDITIONS OF ANY KIND, either express or implied. See the", "Version 2.0 (the \"License\"); you may not use this file", "Copyright (c) 2018 Intel Corporation Licensed under the Apache License,", "= {'rmnet': RMNetAngular, 'mobilenetv2': MobileFaceNet, 'mobilenetv2_2x': partial(MobileFaceNet, width_multiplier=2.0), 'mobilenetv2_1_5x': partial(MobileFaceNet,", "se_resnet101, se_resnet152 from .backbones.resnet import resnet50 from .backbones.se_resnext import se_resnext50,", "RMNetAngular, 'mobilenetv2': MobileFaceNet, 'mobilenetv2_2x': partial(MobileFaceNet, width_multiplier=2.0), 'mobilenetv2_1_5x': partial(MobileFaceNet, width_multiplier=1.5), 'resnet50':", "writing, software distributed under the License is distributed on an", "base=se_resnext50), 'se_resnext101': partial(SEResNetAngular, base=se_resnext101), 'se_resnext152': partial(SEResNetAngular, base=se_resnext152), 'shufflenetv2': ShuffleNetV2Angular} models_landmarks", "not use this file except in compliance with the License.", "2.0 (the \"License\"); you may not use this file except", "Apache License, Version 2.0 (the \"License\"); you may not use", "copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable", "express or implied. See the License for the specific language", "torch.nn as nn class ModelInterface(nn.Module): \"\"\"Abstract class for models\"\"\" @abstractmethod", "nn class ModelInterface(nn.Module): \"\"\"Abstract class for models\"\"\" @abstractmethod def set_dropout_ratio(self,", "IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "in compliance with the License. You may obtain a copy", "of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law", "'se_resnext50': partial(SEResNetAngular, base=se_resnext50), 'se_resnext101': partial(SEResNetAngular, base=se_resnext101), 'se_resnext152': partial(SEResNetAngular, base=se_resnext152), 'shufflenetv2':", "you may not use this file except in compliance with", "from .se_resnet_angular import SEResNetAngular from .shufflenet_v2_angular import ShuffleNetV2Angular from .backbones.se_resnet", "'se_resnet101': partial(SEResNetAngular, base=se_resnet101), 'se_resnet152': partial(SEResNetAngular, base=se_resnet152), 'se_resnext50': partial(SEResNetAngular, base=se_resnext50), 'se_resnext101':", "is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR", "partial import torch.nn as nn class ModelInterface(nn.Module): \"\"\"Abstract class for", "the License. You may obtain a copy of the License", "agreed to in writing, software distributed under the License is", "import partial import torch.nn as nn class ModelInterface(nn.Module): \"\"\"Abstract class", "\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "import MobileFaceNet from .landnet import LandmarksNet from .se_resnet_angular import SEResNetAngular", "import se_resnext50, se_resnext101, se_resnext152 models_backbones = {'rmnet': RMNetAngular, 'mobilenetv2': MobileFaceNet,", "partial(MobileFaceNet, width_multiplier=1.5), 'resnet50': partial(SEResNetAngular, base=resnet50), 'se_resnet50': partial(SEResNetAngular, base=se_resnet50), 'se_resnet101': partial(SEResNetAngular,", "'se_resnet50': partial(SEResNetAngular, base=se_resnet50), 'se_resnet101': partial(SEResNetAngular, base=se_resnet101), 'se_resnet152': partial(SEResNetAngular, base=se_resnet152), 'se_resnext50':", "partial(SEResNetAngular, base=se_resnet101), 'se_resnet152': partial(SEResNetAngular, base=se_resnet152), 'se_resnext50': partial(SEResNetAngular, base=se_resnext50), 'se_resnext101': partial(SEResNetAngular,", ".shufflenet_v2_angular import ShuffleNetV2Angular from .backbones.se_resnet import se_resnet50, se_resnet101, se_resnet152 from", "distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS", "at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to", "abstractmethod from functools import partial import torch.nn as nn class", "ratio of the model\"\"\" @abstractmethod def get_input_res(self): \"\"\"Returns input resolution\"\"\"", "use this file except in compliance with the License. You", ".rmnet_angular import RMNetAngular from .mobilefacenet import MobileFaceNet from .landnet import", "the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or", "partial(SEResNetAngular, base=se_resnet50), 'se_resnet101': partial(SEResNetAngular, base=se_resnet101), 'se_resnet152': partial(SEResNetAngular, base=se_resnet152), 'se_resnext50': partial(SEResNetAngular,", "ANY KIND, either express or implied. See the License for", ".landnet import LandmarksNet from .se_resnet_angular import SEResNetAngular from .shufflenet_v2_angular import", "{'rmnet': RMNetAngular, 'mobilenetv2': MobileFaceNet, 'mobilenetv2_2x': partial(MobileFaceNet, width_multiplier=2.0), 'mobilenetv2_1_5x': partial(MobileFaceNet, width_multiplier=1.5),", "import resnet50 from .backbones.se_resnext import se_resnext50, se_resnext101, se_resnext152 models_backbones =", "http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in", "may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless", "obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required", "import ShuffleNetV2Angular from .backbones.se_resnet import se_resnet50, se_resnet101, se_resnet152 from .backbones.resnet", "MobileFaceNet, 'mobilenetv2_2x': partial(MobileFaceNet, width_multiplier=2.0), 'mobilenetv2_1_5x': partial(MobileFaceNet, width_multiplier=1.5), 'resnet50': partial(SEResNetAngular, base=resnet50),", "License. \"\"\" from abc import abstractmethod from functools import partial", "for models\"\"\" @abstractmethod def set_dropout_ratio(self, ratio): \"\"\"Sets dropout ratio of", "partial(MobileFaceNet, width_multiplier=2.0), 'mobilenetv2_1_5x': partial(MobileFaceNet, width_multiplier=1.5), 'resnet50': partial(SEResNetAngular, base=resnet50), 'se_resnet50': partial(SEResNetAngular,", "se_resnext152 models_backbones = {'rmnet': RMNetAngular, 'mobilenetv2': MobileFaceNet, 'mobilenetv2_2x': partial(MobileFaceNet, width_multiplier=2.0),", "and limitations under the License. \"\"\" from abc import abstractmethod", "either express or implied. See the License for the specific", "width_multiplier=1.5), 'resnet50': partial(SEResNetAngular, base=resnet50), 'se_resnet50': partial(SEResNetAngular, base=se_resnet50), 'se_resnet101': partial(SEResNetAngular, base=se_resnet101),", "BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "'se_resnext101': partial(SEResNetAngular, base=se_resnext101), 'se_resnext152': partial(SEResNetAngular, base=se_resnext152), 'shufflenetv2': ShuffleNetV2Angular} models_landmarks =", "from .landnet import LandmarksNet from .se_resnet_angular import SEResNetAngular from .shufflenet_v2_angular", "under the License is distributed on an \"AS IS\" BASIS,", "\"License\"); you may not use this file except in compliance", "governing permissions and limitations under the License. \"\"\" from abc", "License is distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES", "Licensed under the Apache License, Version 2.0 (the \"License\"); you", "get_input_res(self): \"\"\"Returns input resolution\"\"\" from .rmnet_angular import RMNetAngular from .mobilefacenet", "permissions and limitations under the License. \"\"\" from abc import", ".backbones.resnet import resnet50 from .backbones.se_resnext import se_resnext50, se_resnext101, se_resnext152 models_backbones", "@abstractmethod def get_input_res(self): \"\"\"Returns input resolution\"\"\" from .rmnet_angular import RMNetAngular", "with the License. You may obtain a copy of the", "from .backbones.resnet import resnet50 from .backbones.se_resnext import se_resnext50, se_resnext101, se_resnext152", "License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed", "License for the specific language governing permissions and limitations under", "base=resnet50), 'se_resnet50': partial(SEResNetAngular, base=se_resnet50), 'se_resnet101': partial(SEResNetAngular, base=se_resnet101), 'se_resnet152': partial(SEResNetAngular, base=se_resnet152),", ".backbones.se_resnext import se_resnext50, se_resnext101, se_resnext152 models_backbones = {'rmnet': RMNetAngular, 'mobilenetv2':", "width_multiplier=2.0), 'mobilenetv2_1_5x': partial(MobileFaceNet, width_multiplier=1.5), 'resnet50': partial(SEResNetAngular, base=resnet50), 'se_resnet50': partial(SEResNetAngular, base=se_resnet50),", "this file except in compliance with the License. You may", "<reponame>AnastasiaaSenina/openvino_training_extensions \"\"\" Copyright (c) 2018 Intel Corporation Licensed under the", "class ModelInterface(nn.Module): \"\"\"Abstract class for models\"\"\" @abstractmethod def set_dropout_ratio(self, ratio):", "import abstractmethod from functools import partial import torch.nn as nn", "specific language governing permissions and limitations under the License. \"\"\"", "(the \"License\"); you may not use this file except in", "models_backbones = {'rmnet': RMNetAngular, 'mobilenetv2': MobileFaceNet, 'mobilenetv2_2x': partial(MobileFaceNet, width_multiplier=2.0), 'mobilenetv2_1_5x':", "class for models\"\"\" @abstractmethod def set_dropout_ratio(self, ratio): \"\"\"Sets dropout ratio", "import se_resnet50, se_resnet101, se_resnet152 from .backbones.resnet import resnet50 from .backbones.se_resnext", "from abc import abstractmethod from functools import partial import torch.nn", "import SEResNetAngular from .shufflenet_v2_angular import ShuffleNetV2Angular from .backbones.se_resnet import se_resnet50,", "partial(SEResNetAngular, base=se_resnext50), 'se_resnext101': partial(SEResNetAngular, base=se_resnext101), 'se_resnext152': partial(SEResNetAngular, base=se_resnext152), 'shufflenetv2': ShuffleNetV2Angular}", "language governing permissions and limitations under the License. \"\"\" from", "applicable law or agreed to in writing, software distributed under", "LandmarksNet from .se_resnet_angular import SEResNetAngular from .shufflenet_v2_angular import ShuffleNetV2Angular from", ".backbones.se_resnet import se_resnet50, se_resnet101, se_resnet152 from .backbones.resnet import resnet50 from", "the License is distributed on an \"AS IS\" BASIS, WITHOUT", "base=se_resnet101), 'se_resnet152': partial(SEResNetAngular, base=se_resnet152), 'se_resnext50': partial(SEResNetAngular, base=se_resnext50), 'se_resnext101': partial(SEResNetAngular, base=se_resnext101),", "the License. \"\"\" from abc import abstractmethod from functools import", "You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0", "the specific language governing permissions and limitations under the License.", "import torch.nn as nn class ModelInterface(nn.Module): \"\"\"Abstract class for models\"\"\"", "'mobilenetv2_1_5x': partial(MobileFaceNet, width_multiplier=1.5), 'resnet50': partial(SEResNetAngular, base=resnet50), 'se_resnet50': partial(SEResNetAngular, base=se_resnet50), 'se_resnet101':", "the Apache License, Version 2.0 (the \"License\"); you may not", "file except in compliance with the License. You may obtain", "except in compliance with the License. You may obtain a", "or implied. See the License for the specific language governing", "KIND, either express or implied. See the License for the", "functools import partial import torch.nn as nn class ModelInterface(nn.Module): \"\"\"Abstract", "models\"\"\" @abstractmethod def set_dropout_ratio(self, ratio): \"\"\"Sets dropout ratio of the", "\"\"\"Abstract class for models\"\"\" @abstractmethod def set_dropout_ratio(self, ratio): \"\"\"Sets dropout", "to in writing, software distributed under the License is distributed", "se_resnet152 from .backbones.resnet import resnet50 from .backbones.se_resnext import se_resnext50, se_resnext101,", "from .shufflenet_v2_angular import ShuffleNetV2Angular from .backbones.se_resnet import se_resnet50, se_resnet101, se_resnet152", "base=se_resnet152), 'se_resnext50': partial(SEResNetAngular, base=se_resnext50), 'se_resnext101': partial(SEResNetAngular, base=se_resnext101), 'se_resnext152': partial(SEResNetAngular, base=se_resnext152),", "ShuffleNetV2Angular from .backbones.se_resnet import se_resnet50, se_resnet101, se_resnet152 from .backbones.resnet import", "or agreed to in writing, software distributed under the License", "'mobilenetv2': MobileFaceNet, 'mobilenetv2_2x': partial(MobileFaceNet, width_multiplier=2.0), 'mobilenetv2_1_5x': partial(MobileFaceNet, width_multiplier=1.5), 'resnet50': partial(SEResNetAngular,", "law or agreed to in writing, software distributed under the", "OR CONDITIONS OF ANY KIND, either express or implied. See", "abc import abstractmethod from functools import partial import torch.nn as", "'mobilenetv2_2x': partial(MobileFaceNet, width_multiplier=2.0), 'mobilenetv2_1_5x': partial(MobileFaceNet, width_multiplier=1.5), 'resnet50': partial(SEResNetAngular, base=resnet50), 'se_resnet50':", "limitations under the License. \"\"\" from abc import abstractmethod from", "base=se_resnet50), 'se_resnet101': partial(SEResNetAngular, base=se_resnet101), 'se_resnet152': partial(SEResNetAngular, base=se_resnet152), 'se_resnext50': partial(SEResNetAngular, base=se_resnext50),", "compliance with the License. You may obtain a copy of", "set_dropout_ratio(self, ratio): \"\"\"Sets dropout ratio of the model\"\"\" @abstractmethod def", "OF ANY KIND, either express or implied. See the License", "under the Apache License, Version 2.0 (the \"License\"); you may", "on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF", "model\"\"\" @abstractmethod def get_input_res(self): \"\"\"Returns input resolution\"\"\" from .rmnet_angular import", "def set_dropout_ratio(self, ratio): \"\"\"Sets dropout ratio of the model\"\"\" @abstractmethod", ".mobilefacenet import MobileFaceNet from .landnet import LandmarksNet from .se_resnet_angular import", "se_resnet50, se_resnet101, se_resnet152 from .backbones.resnet import resnet50 from .backbones.se_resnext import", "from .backbones.se_resnet import se_resnet50, se_resnet101, se_resnet152 from .backbones.resnet import resnet50", "from functools import partial import torch.nn as nn class ModelInterface(nn.Module):", "from .mobilefacenet import MobileFaceNet from .landnet import LandmarksNet from .se_resnet_angular", "License, Version 2.0 (the \"License\"); you may not use this", "se_resnext50, se_resnext101, se_resnext152 models_backbones = {'rmnet': RMNetAngular, 'mobilenetv2': MobileFaceNet, 'mobilenetv2_2x':", "@abstractmethod def set_dropout_ratio(self, ratio): \"\"\"Sets dropout ratio of the model\"\"\"", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "Intel Corporation Licensed under the Apache License, Version 2.0 (the", "for the specific language governing permissions and limitations under the", "See the License for the specific language governing permissions and", "\"\"\"Sets dropout ratio of the model\"\"\" @abstractmethod def get_input_res(self): \"\"\"Returns", "a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by", "2018 Intel Corporation Licensed under the Apache License, Version 2.0", "'resnet50': partial(SEResNetAngular, base=resnet50), 'se_resnet50': partial(SEResNetAngular, base=se_resnet50), 'se_resnet101': partial(SEResNetAngular, base=se_resnet101), 'se_resnet152':", "partial(SEResNetAngular, base=se_resnet152), 'se_resnext50': partial(SEResNetAngular, base=se_resnext50), 'se_resnext101': partial(SEResNetAngular, base=se_resnext101), 'se_resnext152': partial(SEResNetAngular,", "partial(SEResNetAngular, base=resnet50), 'se_resnet50': partial(SEResNetAngular, base=se_resnet50), 'se_resnet101': partial(SEResNetAngular, base=se_resnet101), 'se_resnet152': partial(SEResNetAngular,", "License. You may obtain a copy of the License at", "RMNetAngular from .mobilefacenet import MobileFaceNet from .landnet import LandmarksNet from", "MobileFaceNet from .landnet import LandmarksNet from .se_resnet_angular import SEResNetAngular from", "from .rmnet_angular import RMNetAngular from .mobilefacenet import MobileFaceNet from .landnet", ".se_resnet_angular import SEResNetAngular from .shufflenet_v2_angular import ShuffleNetV2Angular from .backbones.se_resnet import", "the License for the specific language governing permissions and limitations", "'se_resnet152': partial(SEResNetAngular, base=se_resnet152), 'se_resnext50': partial(SEResNetAngular, base=se_resnext50), 'se_resnext101': partial(SEResNetAngular, base=se_resnext101), 'se_resnext152':", "may not use this file except in compliance with the", "the model\"\"\" @abstractmethod def get_input_res(self): \"\"\"Returns input resolution\"\"\" from .rmnet_angular", "of the model\"\"\" @abstractmethod def get_input_res(self): \"\"\"Returns input resolution\"\"\" from", "resolution\"\"\" from .rmnet_angular import RMNetAngular from .mobilefacenet import MobileFaceNet from", "in writing, software distributed under the License is distributed on", "import RMNetAngular from .mobilefacenet import MobileFaceNet from .landnet import LandmarksNet", "partial(SEResNetAngular, base=se_resnext101), 'se_resnext152': partial(SEResNetAngular, base=se_resnext152), 'shufflenetv2': ShuffleNetV2Angular} models_landmarks = {'landnet':", "required by applicable law or agreed to in writing, software", "implied. See the License for the specific language governing permissions", "se_resnext101, se_resnext152 models_backbones = {'rmnet': RMNetAngular, 'mobilenetv2': MobileFaceNet, 'mobilenetv2_2x': partial(MobileFaceNet,", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "SEResNetAngular from .shufflenet_v2_angular import ShuffleNetV2Angular from .backbones.se_resnet import se_resnet50, se_resnet101,", "an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY", "resnet50 from .backbones.se_resnext import se_resnext50, se_resnext101, se_resnext152 models_backbones = {'rmnet':", "def get_input_res(self): \"\"\"Returns input resolution\"\"\" from .rmnet_angular import RMNetAngular from", "\"\"\" Copyright (c) 2018 Intel Corporation Licensed under the Apache", "ModelInterface(nn.Module): \"\"\"Abstract class for models\"\"\" @abstractmethod def set_dropout_ratio(self, ratio): \"\"\"Sets" ]
[ "else: if ei is not None: ret[\"exc_info\"] = self.formatException(ei) #", "ei is not None: ret[\"exc_info\"] = self.formatException(ei) # Dump the", "ret.pop(\"args\") msg = ret.pop(\"msg\") ret[\"message\"] = msg % args #", "= msg % args # Exceptions must be formatted (they", "Exceptions must be formatted (they are not JSON-serializable try: ei", "class JSONFormatter(logging.Formatter): \"\"\"Simple JSON formatter for the logging facility.\"\"\" def", "format(self, obj): \"\"\"Note that obj is a LogRecord instance.\"\"\" #", "obj is a LogRecord instance.\"\"\" # Copy the dictionary ret", "ei = ret.pop(\"exc_info\") except KeyError: pass else: if ei is", "logging facility.\"\"\" def format(self, obj): \"\"\"Note that obj is a", "that obj is a LogRecord instance.\"\"\" # Copy the dictionary", "None: ret[\"exc_info\"] = self.formatException(ei) # Dump the dictionary in JSON", "description goes here. \"\"\" import json import logging class JSONFormatter(logging.Formatter):", "the dictionary ret = dict(obj.__dict__) # Perform the message substitution", "% args # Exceptions must be formatted (they are not", "\"\"\" import json import logging class JSONFormatter(logging.Formatter): \"\"\"Simple JSON formatter", "JSONFormatter(logging.Formatter): \"\"\"Simple JSON formatter for the logging facility.\"\"\" def format(self,", "<reponame>kpavel/pyclay \"\"\" Class description goes here. \"\"\" import json import", "for the logging facility.\"\"\" def format(self, obj): \"\"\"Note that obj", "msg = ret.pop(\"msg\") ret[\"message\"] = msg % args # Exceptions", "dictionary ret = dict(obj.__dict__) # Perform the message substitution args", "formatted (they are not JSON-serializable try: ei = ret.pop(\"exc_info\") except", "are not JSON-serializable try: ei = ret.pop(\"exc_info\") except KeyError: pass", "# Dump the dictionary in JSON form return json.dumps(ret, skipkeys=True)", "ret.pop(\"exc_info\") except KeyError: pass else: if ei is not None:", "ret.pop(\"msg\") ret[\"message\"] = msg % args # Exceptions must be", "self.formatException(ei) # Dump the dictionary in JSON form return json.dumps(ret,", "= ret.pop(\"exc_info\") except KeyError: pass else: if ei is not", "# Exceptions must be formatted (they are not JSON-serializable try:", "be formatted (they are not JSON-serializable try: ei = ret.pop(\"exc_info\")", "\"\"\"Simple JSON formatter for the logging facility.\"\"\" def format(self, obj):", "instance.\"\"\" # Copy the dictionary ret = dict(obj.__dict__) # Perform", "ret = dict(obj.__dict__) # Perform the message substitution args =", "is not None: ret[\"exc_info\"] = self.formatException(ei) # Dump the dictionary", "= dict(obj.__dict__) # Perform the message substitution args = ret.pop(\"args\")", "# Copy the dictionary ret = dict(obj.__dict__) # Perform the", "here. \"\"\" import json import logging class JSONFormatter(logging.Formatter): \"\"\"Simple JSON", "not JSON-serializable try: ei = ret.pop(\"exc_info\") except KeyError: pass else:", "LogRecord instance.\"\"\" # Copy the dictionary ret = dict(obj.__dict__) #", "import logging class JSONFormatter(logging.Formatter): \"\"\"Simple JSON formatter for the logging", "args = ret.pop(\"args\") msg = ret.pop(\"msg\") ret[\"message\"] = msg %", "Class description goes here. \"\"\" import json import logging class", "import json import logging class JSONFormatter(logging.Formatter): \"\"\"Simple JSON formatter for", "msg % args # Exceptions must be formatted (they are", "= ret.pop(\"msg\") ret[\"message\"] = msg % args # Exceptions must", "if ei is not None: ret[\"exc_info\"] = self.formatException(ei) # Dump", "must be formatted (they are not JSON-serializable try: ei =", "Perform the message substitution args = ret.pop(\"args\") msg = ret.pop(\"msg\")", "pass else: if ei is not None: ret[\"exc_info\"] = self.formatException(ei)", "dict(obj.__dict__) # Perform the message substitution args = ret.pop(\"args\") msg", "ret[\"message\"] = msg % args # Exceptions must be formatted", "(they are not JSON-serializable try: ei = ret.pop(\"exc_info\") except KeyError:", "# Perform the message substitution args = ret.pop(\"args\") msg =", "formatter for the logging facility.\"\"\" def format(self, obj): \"\"\"Note that", "a LogRecord instance.\"\"\" # Copy the dictionary ret = dict(obj.__dict__)", "= ret.pop(\"args\") msg = ret.pop(\"msg\") ret[\"message\"] = msg % args", "message substitution args = ret.pop(\"args\") msg = ret.pop(\"msg\") ret[\"message\"] =", "= self.formatException(ei) # Dump the dictionary in JSON form return", "facility.\"\"\" def format(self, obj): \"\"\"Note that obj is a LogRecord", "except KeyError: pass else: if ei is not None: ret[\"exc_info\"]", "JSON-serializable try: ei = ret.pop(\"exc_info\") except KeyError: pass else: if", "goes here. \"\"\" import json import logging class JSONFormatter(logging.Formatter): \"\"\"Simple", "try: ei = ret.pop(\"exc_info\") except KeyError: pass else: if ei", "the logging facility.\"\"\" def format(self, obj): \"\"\"Note that obj is", "ret[\"exc_info\"] = self.formatException(ei) # Dump the dictionary in JSON form", "\"\"\" Class description goes here. \"\"\" import json import logging", "obj): \"\"\"Note that obj is a LogRecord instance.\"\"\" # Copy", "args # Exceptions must be formatted (they are not JSON-serializable", "json import logging class JSONFormatter(logging.Formatter): \"\"\"Simple JSON formatter for the", "\"\"\"Note that obj is a LogRecord instance.\"\"\" # Copy the", "Copy the dictionary ret = dict(obj.__dict__) # Perform the message", "logging class JSONFormatter(logging.Formatter): \"\"\"Simple JSON formatter for the logging facility.\"\"\"", "JSON formatter for the logging facility.\"\"\" def format(self, obj): \"\"\"Note", "not None: ret[\"exc_info\"] = self.formatException(ei) # Dump the dictionary in", "the message substitution args = ret.pop(\"args\") msg = ret.pop(\"msg\") ret[\"message\"]", "substitution args = ret.pop(\"args\") msg = ret.pop(\"msg\") ret[\"message\"] = msg", "is a LogRecord instance.\"\"\" # Copy the dictionary ret =", "KeyError: pass else: if ei is not None: ret[\"exc_info\"] =", "def format(self, obj): \"\"\"Note that obj is a LogRecord instance.\"\"\"" ]
[ "if np.allclose(b, A*x) == False: raise Exception('Orthogonal test failure') ################################################################################", "Orthogonal linear system solver tests from math import sqrt import", "b, 1) # Check if np.allclose(b, A*x) == False: raise", "# Known terms vector b = np.matrix('2; 3; 4') #", "2x2 orthogonal matrix A = np.matrix('1 1;' '1 -1', float)", "b) # Check if np.allclose(b, A*x) == False: raise Exception('Orthogonal", "Known terms vector b = np.matrix('2; 3; 4') # Solve", "import numpy as np from orthogonal import orthogonal ################################################################################ #", "= np.matrix('2; 3; 4') # Solve the system x =", "np.matrix('1 1;' '1 -1', float) A = A*1.0/sqrt(2.0) # Known", "Solve the system x = orthogonal(A, b) # Check if", "Solve the system x = orthogonal(A, b, 1) # Check", "'2 1 -2', float) A = A*1.0/3.0 # Known terms", "sqrt import numpy as np from orthogonal import orthogonal ################################################################################", "-1', float) A = A*1.0/sqrt(2.0) # Known terms vector b", "= np.matrix('1 1;' '1 -1', float) A = A*1.0/sqrt(2.0) #", "x = orthogonal(A, b, 1) # Check if np.allclose(b, A*x)", "= A*1.0/3.0 # Known terms vector b = np.matrix('2; 3;", "solver tests from math import sqrt import numpy as np", "2;' '2 1 -2', float) A = A*1.0/3.0 # Known", "A = A*1.0/sqrt(2.0) # Known terms vector b = np.matrix('2;", "x = orthogonal(A, b) # Check if np.allclose(b, A*x) ==", "1;' '1 -1', float) A = A*1.0/sqrt(2.0) # Known terms", "== False: raise Exception('Orthogonal test failure') ################################################################################ # 2x2 orthogonal", "matrix A = np.matrix('2 -2 1;' '1 2 2;' '2", "= A*1.0/sqrt(2.0) # Known terms vector b = np.matrix('2; 3')", "orthogonal(A, b) # Check if np.allclose(b, A*x) == False: raise", "# 2x2 orthogonal matrix A = np.matrix('2 -2 1;' '1", "= orthogonal(A, b) # Check if np.allclose(b, A*x) == False:", "vector b = np.matrix('2; 3') # Solve the system x", "2x2 orthogonal matrix A = np.matrix('2 -2 1;' '1 2", "# Solve the system x = orthogonal(A, b) # Check", "################################################################################ # 2x2 orthogonal matrix A = np.matrix('1 1;' '1", "Known terms vector b = np.matrix('2; 3') # Solve the", "A = np.matrix('1 1;' '1 -1', float) A = A*1.0/sqrt(2.0)", "b = np.matrix('2; 3; 4') # Solve the system x", "orthogonal matrix A = np.matrix('2 -2 1;' '1 2 2;'", "orthogonal matrix A = np.matrix('1 1;' '1 -1', float) A", "= np.matrix('2; 3') # Solve the system x = orthogonal(A,", "1;' '1 2 2;' '2 1 -2', float) A =", "A*1.0/3.0 # Known terms vector b = np.matrix('2; 3; 4')", "-2 1;' '1 2 2;' '2 1 -2', float) A", "np.matrix('2; 3') # Solve the system x = orthogonal(A, b,", "'1 2 2;' '2 1 -2', float) A = A*1.0/3.0", "system solver tests from math import sqrt import numpy as", "# 2x2 orthogonal matrix A = np.matrix('1 1;' '1 -1',", "np.matrix('2; 3; 4') # Solve the system x = orthogonal(A,", "orthogonal import orthogonal ################################################################################ # 2x2 orthogonal matrix A =", "import sqrt import numpy as np from orthogonal import orthogonal", "A = A*1.0/3.0 # Known terms vector b = np.matrix('2;", "# Known terms vector b = np.matrix('2; 3') # Solve", "np.matrix('2 -2 1;' '1 2 2;' '2 1 -2', float)", "from math import sqrt import numpy as np from orthogonal", "math import sqrt import numpy as np from orthogonal import", "# Orthogonal linear system solver tests from math import sqrt", "test failure') ################################################################################ # 2x2 orthogonal matrix A = np.matrix('2", "False: raise Exception('Orthogonal test failure') ################################################################################ # 2x2 orthogonal matrix", "system x = orthogonal(A, b) # Check if np.allclose(b, A*x)", "3') # Solve the system x = orthogonal(A, b, 1)", "numpy as np from orthogonal import orthogonal ################################################################################ # 2x2", "failure') ################################################################################ # 2x2 orthogonal matrix A = np.matrix('2 -2", "A*x) == False: raise Exception('Orthogonal test failure') ################################################################################ # 2x2", "from orthogonal import orthogonal ################################################################################ # 2x2 orthogonal matrix A", "# Check if np.allclose(b, A*x) == False: raise Exception('Orthogonal test", "2 2;' '2 1 -2', float) A = A*1.0/3.0 #", "'1 -1', float) A = A*1.0/sqrt(2.0) # Known terms vector", "terms vector b = np.matrix('2; 3; 4') # Solve the", "terms vector b = np.matrix('2; 3') # Solve the system", "tests from math import sqrt import numpy as np from", "b = np.matrix('2; 3') # Solve the system x =", "as np from orthogonal import orthogonal ################################################################################ # 2x2 orthogonal", "A = np.matrix('2 -2 1;' '1 2 2;' '2 1", "Exception('Orthogonal test failure') ################################################################################ # 2x2 orthogonal matrix A =", "<reponame>davxy/numeric<gh_stars>1-10 # Orthogonal linear system solver tests from math import", "= np.matrix('2 -2 1;' '1 2 2;' '2 1 -2',", "system x = orthogonal(A, b, 1) # Check if np.allclose(b,", "= orthogonal(A, b, 1) # Check if np.allclose(b, A*x) ==", "the system x = orthogonal(A, b, 1) # Check if", "float) A = A*1.0/sqrt(2.0) # Known terms vector b =", "np.allclose(b, A*x) == False: raise Exception('Orthogonal test failure') ################################################################################ #", "import orthogonal ################################################################################ # 2x2 orthogonal matrix A = np.matrix('1", "matrix A = np.matrix('1 1;' '1 -1', float) A =", "################################################################################ # 2x2 orthogonal matrix A = np.matrix('2 -2 1;'", "float) A = A*1.0/3.0 # Known terms vector b =", "the system x = orthogonal(A, b) # Check if np.allclose(b,", "3; 4') # Solve the system x = orthogonal(A, b)", "# Solve the system x = orthogonal(A, b, 1) #", "4') # Solve the system x = orthogonal(A, b) #", "linear system solver tests from math import sqrt import numpy", "np from orthogonal import orthogonal ################################################################################ # 2x2 orthogonal matrix", "1 -2', float) A = A*1.0/3.0 # Known terms vector", "orthogonal(A, b, 1) # Check if np.allclose(b, A*x) == False:", "1) # Check if np.allclose(b, A*x) == False: raise Exception('Orthogonal", "Check if np.allclose(b, A*x) == False: raise Exception('Orthogonal test failure')", "orthogonal ################################################################################ # 2x2 orthogonal matrix A = np.matrix('1 1;'", "vector b = np.matrix('2; 3; 4') # Solve the system", "-2', float) A = A*1.0/3.0 # Known terms vector b", "raise Exception('Orthogonal test failure') ################################################################################ # 2x2 orthogonal matrix A", "A*1.0/sqrt(2.0) # Known terms vector b = np.matrix('2; 3') #" ]
[ "os import shutil from dataclasses import dataclass from datetime import", "to process data files\"), (\"❌\", \"Failed to upload processed data", "\"⚙ In Progress...\", \"✅ Success!\", \"❌ Failed: file not found\",", "created_at=datetime.fromisoformat(json_resp[\"created_at\"]), updated_at=datetime.fromisoformat(json_resp[\"updated_at\"]), ) def __str__(self): return \"\\n\".join( [ f\"📁 {CYAN_TAG}{self.filename}{RESET_TAG}", "return cls( proj_id=json_resp[\"id\"], name=json_resp[\"proj_name\"], user=json_resp[\"username\"], task=list(filter(lambda key: TASKS[key] == json_resp[\"task\"],", "file_extension = file_name.split(\".\")[-1] src = os.path.expanduser(file_path) dst = os.path.join(local_dataset_dir, \"raw\",", "= \"\\n\".join( [ f\"AutoNLP Project (id # {self.proj_id})\", \"~\" *", "sorted_files] printout.append( \"\\n\".join( [ \"~\" * 14 + f\" {BOLD_TAG}Files{RESET_TAG}", "None: jobs_str = \"❓ Models information unknown, update the project\"", "job in AutoNLP\"\"\" job_id: int status: str status_emoji: str created_at:", "else: model_table = PrettyTable([\"\", \"ID\", \"Status\", \"Creation date\", \"Last update\"])", "job.job_id): model_table.add_row( [ job.status_emoji, job.job_id, job.status, job.created_at.strftime(\"%Y-%m-%d %H:%M Z\"), job.updated_at.strftime(\"%Y-%m-%d", "tree clean\" in err.args[0]: logger.info(\"❔ Files did not change since", "logger.info(\"✅ Successfully uploaded the files!\") except OSError as err: if", "TRAIN_SPLIT, VALID_SPLIT from .tasks import TASKS from .utils import BOLD_TAG,", "\"Dataset ID:\", f\"{CYAN_TAG}{self.dataset_id}{RESET_TAG}\", \"\", ] + descriptions ) ) #", "f\" • {BOLD_TAG}Split{RESET_TAG}: {self.split}\", f\" • {BOLD_TAG}Processing status{RESET_TAG}: {self.processing_status}\", f\"", "enumerate(filepaths): if not os.path.isfile(file_path): logger.error(f\"[{idx + 1}/{len(filepaths)}] ❌ '{file_path}' does", "- 1][1], created_at=datetime.fromisoformat(json_resp[\"created_at\"]), updated_at=datetime.fromisoformat(json_resp[\"updated_at\"]), ) def __str__(self): return \"\\n\".join( [", "from .utils import BOLD_TAG, CYAN_TAG, GREEN_TAG, PURPLE_TAG, RESET_TAG, YELLOW_TAG, http_get,", "def upload(self, filepaths: List[str], split: str, col_mapping: Dict[str, str]): \"\"\"Uploads", "split: str col_mapping: Dict[str, str] created_at: datetime updated_at: datetime @classmethod", "from .tasks import TASKS from .utils import BOLD_TAG, CYAN_TAG, GREEN_TAG,", "information unknown, update the project\"] else: if len(self.files) == 0:", "\"🤷 No train jobs started yet!\" else: model_table = PrettyTable([\"\",", "def __str__(self): header = \"\\n\".join( [ f\"AutoNLP Project (id #", "import datetime from typing import Dict, List, Optional from huggingface_hub", "data files to the huggingface hub\"), ) SPLITS = (TRAIN_SPLIT,", "file!\") continue file_name = os.path.basename(file_path) file_extension = file_name.split(\".\")[-1] src =", "' ')}{RESET_TAG}\", f\" • {BOLD_TAG}Created at{RESET_TAG}: {self.created_at.strftime('%Y-%m-%d %H:%M Z')}\", f\"", "\"https://huggingface.co/datasets/\" + self.dataset_id dataset_repo = Repository( local_dir=local_dataset_dir, clone_from=clone_from, use_auth_token=self._token, )", "\"\"\"A file uploaded to an AutoNLP project\"\"\" file_id: int filename:", "== 0: jobs_str = \"🤷 No train jobs started yet!\"", "json_files = resp.json() self.files = [UploadedFile.from_json_resp(file) for file in json_files]", "str user: str task: str status_emoji: str status: str language:", "datetime import datetime from typing import Dict, List, Optional from", "1][0], status=PROJECT_STATUS[json_resp[\"status\"] - 1][1], created_at=datetime.fromisoformat(json_resp[\"created_at\"]), updated_at=datetime.fromisoformat(json_resp[\"updated_at\"]), dataset_id=json_resp[\"dataset_id\"], language=json_resp[\"config\"][\"language\"], _token=token, )", "http_get(path=f\"/projects/{self.proj_id}/jobs\", token=self._token) json_jobs = resp.json() self.training_jobs = [TrainingJob.from_json_resp(job) for job", "os.path.isdir(os.path.join(local_dataset_dir, \"git\")): clone_from = None else: shutil.rmtree(local_dataset_dir) clone_from = \"https://huggingface.co/datasets/\"", "[ job.status_emoji, job.job_id, job.status, job.created_at.strftime(\"%Y-%m-%d %H:%M Z\"), job.updated_at.strftime(\"%Y-%m-%d %H:%M Z\"),", "Z')}\", f\" • {BOLD_TAG}Last update{RESET_TAG}: {self.updated_at.strftime('%Y-%m-%d %H:%M Z')}\", \"\", ]", "project\" else: if len(self.training_jobs) == 0: jobs_str = \"🤷 No", "to download data files from the huggingface hub\"), (\"❌\", \"Missing", "Registering file {file_name} into project '{file_name}'...\") payload = { \"split\":", "1][1], created_at=datetime.fromisoformat(json_resp[\"created_at\"]), updated_at=datetime.fromisoformat(json_resp[\"updated_at\"]), ) def __str__(self): return \"\\n\".join( [ f\"📚", "return \"\\n\".join( [ f\"📚 Model # {self.job_id}\", f\" • {BOLD_TAG}Status{RESET_TAG}:", "= Repository( local_dir=local_dataset_dir, clone_from=clone_from, use_auth_token=self._token, ) dataset_repo.git_pull() for idx, file_path", "[\"❓ Files information unknown, update the project\"] else: if len(self.files)", "\"data_files\": [{\"fname\": file_name, \"username\": self.user}], } http_post(path=f\"/projects/{self.proj_id}/data/add\", payload=payload, token=self._token) logger.info(f\"[{idx", "http_post(path=f\"/projects/{self.proj_id}/data/add\", payload=payload, token=self._token) logger.info(f\"[{idx + 1}/{len(filepaths)}] ✅ Success!\") def train(self):", "file_name.split(\".\")[-1] src = os.path.expanduser(file_path) dst = os.path.join(local_dataset_dir, \"raw\", file_name) logger.info(f\"[{idx", "from the huggingface hub\"), (\"❌\", \"Missing 'train' or 'valid' split", "sorted(self.training_jobs, key=lambda job: job.job_id): model_table.add_row( [ job.status_emoji, job.job_id, job.status, job.created_at.strftime(\"%Y-%m-%d", "files!\") except OSError as err: if \"nothing to commit, working", "Optional[List[UploadedFile]] = None training_jobs: Optional[List] = None @classmethod def from_json_resp(cls,", "data files\"), (\"❌\", \"Failed to process data files\"), (\"❌\", \"Failed", "\"git\")): clone_from = None else: shutil.rmtree(local_dataset_dir) clone_from = \"https://huggingface.co/datasets/\" +", "str): \"\"\"Build a Project from the API response, JSON-encoded\"\"\" return", "self.user}], } http_post(path=f\"/projects/{self.proj_id}/data/add\", payload=payload, token=self._token) logger.info(f\"[{idx + 1}/{len(filepaths)}] ✅ Success!\")", "Queued\", \"⚙ In Progress...\", \"✅ Success!\", \"❌ Failed: file not", "json_resp: dict, token: str): \"\"\"Build a Project from the API", "validate_file FILE_STATUS = ( \"☁ Uploaded\", \"⌚ Queued\", \"⚙ In", "+ self.dataset_id else: clone_from = \"https://huggingface.co/datasets/\" + self.dataset_id dataset_repo =", "project '{file_name}'...\") payload = { \"split\": split, \"col_mapping\": col_mapping, \"data_files\":", "is None: jobs_str = \"❓ Models information unknown, update the", "update{RESET_TAG}: {self.updated_at.strftime('%Y-%m-%d %H:%M Z')}\", ] ) @dataclass class UploadedFile: \"\"\"A", "+ self.dataset_id dataset_repo = Repository( local_dir=local_dataset_dir, clone_from=clone_from, use_auth_token=self._token, ) dataset_repo.git_pull()", "# Sort by split descriptions = [str(file) for file in", ".splits import TEST_SPLIT, TRAIN_SPLIT, VALID_SPLIT from .tasks import TASKS from", "%H:%M Z')}\", ] ) @dataclass class UploadedFile: \"\"\"A file uploaded", "went wrong when uploading the files!\") raise for idx, file_path", "\"⌚ Queued\", \"⚙ In Progress...\", \"✅ Success!\", \"❌ Failed: file", "in AutoNLP\"\"\" job_id: int status: str status_emoji: str created_at: datetime", "str status_emoji: str status: str language: str created_at: datetime updated_at:", "from huggingface_hub import Repository from loguru import logger from prettytable", "\"ID\", \"Status\", \"Creation date\", \"Last update\"]) for job in sorted(self.training_jobs,", "\"Creation date\", \"Last update\"]) for job in sorted(self.training_jobs, key=lambda job:", "job.job_id, job.status, job.created_at.strftime(\"%Y-%m-%d %H:%M Z\"), job.updated_at.strftime(\"%Y-%m-%d %H:%M Z\"), ] )", "please fix it and re-upload the file.\", ) JOB_STATUS =", "\"data_munging\"), (\"🏃\", \"model_training\"), (\"✅\", \"success\"), (\"❌\", \"failed\"), ) PROJECT_STATUS =", "YELLOW_TAG, http_get, http_post from .validation import validate_file FILE_STATUS = (", "by split descriptions = [str(file) for file in sorted_files] printout.append(", "• {BOLD_TAG}Processing status{RESET_TAG}: {self.processing_status}\", f\" • {BOLD_TAG}Last update{RESET_TAG}: {self.updated_at.strftime('%Y-%m-%d %H:%M", "* 12 + f\" {BOLD_TAG}Models{RESET_TAG} \" + \"~\" * 11,", "Progress...\", \"✅ Success!\", \"❌ Failed: file not found\", \"❌ Failed:", "+ 1}/{len(filepaths)}] 🔎 Validating {dst} and column mapping...\") validate_file(path=dst, task=self.task,", "it and re-upload the file.\", ) JOB_STATUS = ( (\"⌚\",", "{BOLD_TAG}Name{RESET_TAG}: {PURPLE_TAG}{self.name}{RESET_TAG}\", f\" • {BOLD_TAG}Owner{RESET_TAG}: {GREEN_TAG}{self.user}{RESET_TAG}\", f\" • {BOLD_TAG}Status{RESET_TAG}: {BOLD_TAG}{self.status_emoji}", "project\"] else: if len(self.files) == 0: descriptions = [\"🤷 No", "import Repository from loguru import logger from prettytable import PrettyTable", "(TRAIN_SPLIT, VALID_SPLIT, TEST_SPLIT) @dataclass class TrainingJob: \"\"\"A training job in", "= os.path.join(local_dataset_dir, \"raw\", file_name) logger.info(f\"[{idx + 1}/{len(filepaths)}] 📦 Copying {src}", "\"Failed to process data files\"), (\"❌\", \"Failed to upload processed", "err.args[0]: logger.info(\"❔ Files did not change since last upload!\") dataset_repo.git_push()", "updated_at=datetime.fromisoformat(json_resp[\"updated_at\"]), ) def __str__(self): return \"\\n\".join( [ f\"📚 Model #", "Invalid column mapping, please fix it and re-upload the file.\",", "def from_json_resp(cls, json_resp: dict, token: str): \"\"\"Build a Project from", "upload!\") dataset_repo.git_push() return logger.error(\"❌ Something went wrong when uploading the", "return cls( file_id=json_resp[\"data_file_id\"], filename=json_resp[\"fname\"], processing_status=FILE_STATUS[json_resp[\"download_status\"] - 1], split=SPLITS[json_resp[\"split\"] - 1],", "split descriptions = [str(file) for file in sorted_files] printout.append( \"\\n\".join(", "import TASKS from .utils import BOLD_TAG, CYAN_TAG, GREEN_TAG, PURPLE_TAG, RESET_TAG,", "clone_from = \"https://huggingface.co/datasets/\" + self.dataset_id else: clone_from = \"https://huggingface.co/datasets/\" +", "{BOLD_TAG}Models{RESET_TAG} \" + \"~\" * 11, \"\", jobs_str])) return \"\\n\".join(printout)", "{self.updated_at.strftime('%Y-%m-%d %H:%M Z')}\", ] ) @dataclass class Project: \"\"\"An AutoNLP", "PrettyTable([\"\", \"ID\", \"Status\", \"Creation date\", \"Last update\"]) for job in", "in sorted(self.training_jobs, key=lambda job: job.job_id): model_table.add_row( [ job.status_emoji, job.job_id, job.status,", "model_table.add_row( [ job.status_emoji, job.job_id, job.status, job.created_at.strftime(\"%Y-%m-%d %H:%M Z\"), job.updated_at.strftime(\"%Y-%m-%d %H:%M", "os.path.expanduser(file_path) dst = os.path.join(local_dataset_dir, \"raw\", file_name) logger.info(f\"[{idx + 1}/{len(filepaths)}] 📦", "TEST_SPLIT, TRAIN_SPLIT, VALID_SPLIT from .tasks import TASKS from .utils import", "{src} to {dst}...\") os.makedirs(os.path.dirname(dst), exist_ok=True) shutil.copyfile(src, dst) logger.info(f\"[{idx + 1}/{len(filepaths)}]", "- 1], split=SPLITS[json_resp[\"split\"] - 1], col_mapping=json_resp[\"col_mapping\"], created_at=datetime.fromisoformat(json_resp[\"created_at\"]), updated_at=datetime.fromisoformat(json_resp[\"updated_at\"]), ) def", "'{file_name}'...\") payload = { \"split\": split, \"col_mapping\": col_mapping, \"data_files\": [{\"fname\":", "f\" • {BOLD_TAG}Owner{RESET_TAG}: {GREEN_TAG}{self.user}{RESET_TAG}\", f\" • {BOLD_TAG}Status{RESET_TAG}: {BOLD_TAG}{self.status_emoji} {self.status}{RESET_TAG}\", f\"", "files!\") raise for idx, file_path in enumerate(filepaths): file_name = os.path.basename(file_path)", "= ( (\"✨\", \"Created\"), (\"🚀\", \"Data processing started\"), (\"✅\", \"Data", "f\" • {BOLD_TAG}Last update{RESET_TAG}: {self.updated_at.strftime('%Y-%m-%d %H:%M Z')}\", \"\", ] )", "Files did not change since last upload!\") dataset_repo.git_push() return logger.error(\"❌", "= http_get(path=f\"/projects/{self.proj_id}/jobs\", token=self._token) json_jobs = resp.json() self.training_jobs = [TrainingJob.from_json_resp(job) for", "{ \"split\": split, \"col_mapping\": col_mapping, \"data_files\": [{\"fname\": file_name, \"username\": self.user}],", "14, \"\", \"Dataset ID:\", f\"{CYAN_TAG}{self.dataset_id}{RESET_TAG}\", \"\", ] + descriptions )", "import dataclass from datetime import datetime from typing import Dict,", "TrainingJob: \"\"\"A training job in AutoNLP\"\"\" job_id: int status: str", "local_dataset_dir = os.path.expanduser(f\"~/.huggingface/autonlp/projects/{self.dataset_id}\") if os.path.exists(local_dataset_dir): if os.path.isdir(os.path.join(local_dataset_dir, \"git\")): clone_from =", "json_jobs] def upload(self, filepaths: List[str], split: str, col_mapping: Dict[str, str]):", "Success!\", \"❌ Failed: file not found\", \"❌ Failed: unsupported file", "@classmethod def from_json_resp(cls, json_resp: dict, token: str): \"\"\"Build a Project", "No train jobs started yet!\" else: model_table = PrettyTable([\"\", \"ID\",", "uploaded files information...\") resp = http_get(path=f\"/projects/{self.proj_id}/data\", token=self._token) json_files = resp.json()", "exist_ok=True) shutil.copyfile(src, dst) logger.info(f\"[{idx + 1}/{len(filepaths)}] 🔎 Validating {dst} and", "{BOLD_TAG}Last update{RESET_TAG}: {self.updated_at.strftime('%Y-%m-%d %H:%M Z')}\", ] ) @dataclass class UploadedFile:", "attached to the project\"\"\" logger.info(\"🔄 Refreshing uploaded files information...\") resp", "if not os.path.isfile(file_path): logger.error(f\"[{idx + 1}/{len(filepaths)}] ❌ '{file_path}' does not", "Z')}\", ] ) @dataclass class UploadedFile: \"\"\"A file uploaded to", "status=PROJECT_STATUS[json_resp[\"status\"] - 1][1], created_at=datetime.fromisoformat(json_resp[\"created_at\"]), updated_at=datetime.fromisoformat(json_resp[\"updated_at\"]), dataset_id=json_resp[\"dataset_id\"], language=json_resp[\"config\"][\"language\"], _token=token, ) def", "\"\"\"Build a Project from the API response, JSON-encoded\"\"\" return cls(", "self.training_jobs = [TrainingJob.from_json_resp(job) for job in json_jobs] def upload(self, filepaths:", "# Training jobs information if self.training_jobs is None: jobs_str =", "clone_from = None else: shutil.rmtree(local_dataset_dir) clone_from = \"https://huggingface.co/datasets/\" + self.dataset_id", "\"\"\"An AutoNLP project\"\"\" _token: str proj_id: int name: str user:", "logger.error(\"❌ Something went wrong when uploading the files!\") raise for", "Dict[str, str] created_at: datetime updated_at: datetime @classmethod def from_json_resp(cls, json_resp:", "(\"⚙\", \"data_munging\"), (\"🏃\", \"model_training\"), (\"✅\", \"success\"), (\"❌\", \"failed\"), ) PROJECT_STATUS", "sorted_files = sorted(self.files, key=lambda file: file.split) # Sort by split", "__str__(self): return \"\\n\".join( [ f\"📚 Model # {self.job_id}\", f\" •", "UploadedFile: \"\"\"A file uploaded to an AutoNLP project\"\"\" file_id: int", "%H:%M Z')}\", \"\", ] ) printout = [header] # Uploaded", "\"\", ] ) printout = [header] # Uploaded files information", "AutoNLP project\"\"\" file_id: int filename: str processing_status: str split: str", "[str(file) for file in sorted_files] printout.append( \"\\n\".join( [ \"~\" *", ") # Training jobs information if self.training_jobs is None: jobs_str", "clean\" in err.args[0]: logger.info(\"❔ Files did not change since last", "status{RESET_TAG}: {self.processing_status}\", f\" • {BOLD_TAG}Last update{RESET_TAG}: {self.updated_at.strftime('%Y-%m-%d %H:%M Z')}\", ]", "1][1], created_at=datetime.fromisoformat(json_resp[\"created_at\"]), updated_at=datetime.fromisoformat(json_resp[\"updated_at\"]), dataset_id=json_resp[\"dataset_id\"], language=json_resp[\"config\"][\"language\"], _token=token, ) def refresh(self): \"\"\"Update", "AutoNLP CLI\") logger.info(\"✅ Successfully uploaded the files!\") except OSError as", "SPLITS = (TRAIN_SPLIT, VALID_SPLIT, TEST_SPLIT) @dataclass class TrainingJob: \"\"\"A training", "the file.\", ) JOB_STATUS = ( (\"⌚\", \"queued\"), (\"🚀\", \"start\"),", "file_name) logger.info(f\"[{idx + 1}/{len(filepaths)}] 📦 Copying {src} to {dst}...\") os.makedirs(os.path.dirname(dst),", "@classmethod def from_json_resp(cls, json_resp: dict): return cls( file_id=json_resp[\"data_file_id\"], filename=json_resp[\"fname\"], processing_status=FILE_STATUS[json_resp[\"download_status\"]", "dict): return cls( file_id=json_resp[\"data_file_id\"], filename=json_resp[\"fname\"], processing_status=FILE_STATUS[json_resp[\"download_status\"] - 1], split=SPLITS[json_resp[\"split\"] -", "printout.append( \"\\n\".join( [ \"~\" * 14 + f\" {BOLD_TAG}Files{RESET_TAG} \"", "to the huggingface hub\"), ) SPLITS = (TRAIN_SPLIT, VALID_SPLIT, TEST_SPLIT)", "] ) @dataclass class UploadedFile: \"\"\"A file uploaded to an", "(\"🚀\", \"start\"), (\"⚙\", \"data_munging\"), (\"🏃\", \"model_training\"), (\"✅\", \"success\"), (\"❌\", \"failed\"),", "file uploaded to an AutoNLP project\"\"\" file_id: int filename: str", "http_get(path=f\"/projects/{self.proj_id}/data/start_process\", token=self._token) logger.info(\"🔥🔥 Training started!\") def __str__(self): header = \"\\n\".join(", "\"raw\", file_name) logger.info(f\"[{idx + 1}/{len(filepaths)}] 📦 Copying {src} to {dst}...\")", "+ descriptions ) ) # Training jobs information if self.training_jobs", "= [\"🤷 No files uploaded yet!\"] else: sorted_files = sorted(self.files,", "for file in json_files] logger.info(\"🔄 Refreshing models information...\") resp =", "to commit, working tree clean\" in err.args[0]: logger.info(\"❔ Files did", "http_post from .validation import validate_file FILE_STATUS = ( \"☁ Uploaded\",", "project\"\"\" file_id: int filename: str processing_status: str split: str col_mapping:", "if os.path.isdir(os.path.join(local_dataset_dir, \"git\")): clone_from = None else: shutil.rmtree(local_dataset_dir) clone_from =", "\"split\": split, \"col_mapping\": col_mapping, \"data_files\": [{\"fname\": file_name, \"username\": self.user}], }", "{GREEN_TAG}{self.user}{RESET_TAG}\", f\" • {BOLD_TAG}Status{RESET_TAG}: {BOLD_TAG}{self.status_emoji} {self.status}{RESET_TAG}\", f\" • {BOLD_TAG}Task{RESET_TAG}: {YELLOW_TAG}{self.task.title().replace('_',", "str status: str language: str created_at: datetime updated_at: datetime dataset_id:", ") @dataclass class UploadedFile: \"\"\"A file uploaded to an AutoNLP", "Z')}\", ] ) @dataclass class Project: \"\"\"An AutoNLP project\"\"\" _token:", "str, col_mapping: Dict[str, str]): \"\"\"Uploads files to the project\"\"\" local_dataset_dir", "information...\") resp = http_get(path=f\"/projects/{self.proj_id}/jobs\", token=self._token) json_jobs = resp.json() self.training_jobs =", "\"username\": self.user}], } http_post(path=f\"/projects/{self.proj_id}/data/add\", payload=payload, token=self._token) logger.info(f\"[{idx + 1}/{len(filepaths)}] ✅", "❌ '{file_path}' does not exist or is not a file!\")", "the project\"] else: if len(self.files) == 0: descriptions = [\"🤷", "JOB_STATUS = ( (\"⌚\", \"queued\"), (\"🚀\", \"start\"), (\"⚙\", \"data_munging\"), (\"🏃\",", "files and models attached to the project\"\"\" logger.info(\"🔄 Refreshing uploaded", "files from the huggingface hub\"), (\"❌\", \"Missing 'train' or 'valid'", "filename=json_resp[\"fname\"], processing_status=FILE_STATUS[json_resp[\"download_status\"] - 1], split=SPLITS[json_resp[\"split\"] - 1], col_mapping=json_resp[\"col_mapping\"], created_at=datetime.fromisoformat(json_resp[\"created_at\"]), updated_at=datetime.fromisoformat(json_resp[\"updated_at\"]),", "f\" • {BOLD_TAG}Task{RESET_TAG}: {YELLOW_TAG}{self.task.title().replace('_', ' ')}{RESET_TAG}\", f\" • {BOLD_TAG}Created at{RESET_TAG}:", "started\"), (\"✅\", \"Data processing successful\"), (\"❌\", \"Failed to download data", "in data files\"), (\"❌\", \"Failed to process data files\"), (\"❌\",", "try: logger.info(\"☁ Uploading files to the dataset hub...\") dataset_repo.push_to_hub(commit_message=\"Upload from", "@dataclass class Project: \"\"\"An AutoNLP project\"\"\" _token: str proj_id: int", "update the project\"] else: if len(self.files) == 0: descriptions =", "else: sorted_files = sorted(self.files, key=lambda file: file.split) # Sort by", "datetime @classmethod def from_json_resp(cls, json_resp: dict): return cls( job_id=json_resp[\"id\"], status_emoji=JOB_STATUS[json_resp[\"status\"]", "{file_name} into project '{file_name}'...\") payload = { \"split\": split, \"col_mapping\":", "download data files from the huggingface hub\"), (\"❌\", \"Missing 'train'", ") @dataclass class Project: \"\"\"An AutoNLP project\"\"\" _token: str proj_id:", "logger.info(\"❔ Files did not change since last upload!\") dataset_repo.git_push() return", "job.status_emoji, job.job_id, job.status, job.created_at.strftime(\"%Y-%m-%d %H:%M Z\"), job.updated_at.strftime(\"%Y-%m-%d %H:%M Z\"), ]", "%H:%M Z\"), job.updated_at.strftime(\"%Y-%m-%d %H:%M Z\"), ] ) jobs_str = str(model_table)", "processing started\"), (\"✅\", \"Data processing successful\"), (\"❌\", \"Failed to download", "= file_name.split(\".\")[-1] src = os.path.expanduser(file_path) dst = os.path.join(local_dataset_dir, \"raw\", file_name)", "if self.files is None: descriptions = [\"❓ Files information unknown,", "{self.processing_status}\", f\" • {BOLD_TAG}Last update{RESET_TAG}: {self.updated_at.strftime('%Y-%m-%d %H:%M Z')}\", ] )", "• {BOLD_TAG}Name{RESET_TAG}: {PURPLE_TAG}{self.name}{RESET_TAG}\", f\" • {BOLD_TAG}Owner{RESET_TAG}: {GREEN_TAG}{self.user}{RESET_TAG}\", f\" • {BOLD_TAG}Status{RESET_TAG}:", "str status_emoji: str created_at: datetime updated_at: datetime @classmethod def from_json_resp(cls,", "updated_at=datetime.fromisoformat(json_resp[\"updated_at\"]), ) def __str__(self): return \"\\n\".join( [ f\"📁 {CYAN_TAG}{self.filename}{RESET_TAG} (id", "ID:\", f\"{CYAN_TAG}{self.dataset_id}{RESET_TAG}\", \"\", ] + descriptions ) ) # Training", "process data files\"), (\"❌\", \"Failed to upload processed data files", "f\" • {BOLD_TAG}Name{RESET_TAG}: {PURPLE_TAG}{self.name}{RESET_TAG}\", f\" • {BOLD_TAG}Owner{RESET_TAG}: {GREEN_TAG}{self.user}{RESET_TAG}\", f\" •", "import PrettyTable from .splits import TEST_SPLIT, TRAIN_SPLIT, VALID_SPLIT from .tasks", "\"Data processing started\"), (\"✅\", \"Data processing successful\"), (\"❌\", \"Failed to", ") SPLITS = (TRAIN_SPLIT, VALID_SPLIT, TEST_SPLIT) @dataclass class TrainingJob: \"\"\"A", "uploaded to an AutoNLP project\"\"\" file_id: int filename: str processing_status:", "dataset_repo.git_push() return logger.error(\"❌ Something went wrong when uploading the files!\")", "re-upload the file.\", ) JOB_STATUS = ( (\"⌚\", \"queued\"), (\"🚀\",", "sorted(self.files, key=lambda file: file.split) # Sort by split descriptions =", "\"\"\"A training job in AutoNLP\"\"\" job_id: int status: str status_emoji:", "and re-upload the file.\", ) JOB_STATUS = ( (\"⌚\", \"queued\"),", "dataset hub...\") dataset_repo.push_to_hub(commit_message=\"Upload from AutoNLP CLI\") logger.info(\"✅ Successfully uploaded the", "col_mapping: Dict[str, str] created_at: datetime updated_at: datetime @classmethod def from_json_resp(cls,", "Project (id # {self.proj_id})\", \"~\" * 35, f\" • {BOLD_TAG}Name{RESET_TAG}:", "except OSError as err: if \"nothing to commit, working tree", "int status: str status_emoji: str created_at: datetime updated_at: datetime @classmethod", "- 1][0], status=PROJECT_STATUS[json_resp[\"status\"] - 1][1], created_at=datetime.fromisoformat(json_resp[\"created_at\"]), updated_at=datetime.fromisoformat(json_resp[\"updated_at\"]), dataset_id=json_resp[\"dataset_id\"], language=json_resp[\"config\"][\"language\"], _token=token,", "str] created_at: datetime updated_at: datetime @classmethod def from_json_resp(cls, json_resp: dict):", "• {BOLD_TAG}Last update{RESET_TAG}: {self.updated_at.strftime('%Y-%m-%d %H:%M Z')}\", \"\", ] ) printout", "* 14, \"\", \"Dataset ID:\", f\"{CYAN_TAG}{self.dataset_id}{RESET_TAG}\", \"\", ] + descriptions", "to an AutoNLP project\"\"\" file_id: int filename: str processing_status: str", "= os.path.basename(file_path) file_extension = file_name.split(\".\")[-1] src = os.path.expanduser(file_path) dst =", "(\"❌\", \"Failed to upload processed data files to the huggingface", "to the project\"\"\" logger.info(\"🔄 Refreshing uploaded files information...\") resp =", "return \"\\n\".join( [ f\"📁 {CYAN_TAG}{self.filename}{RESET_TAG} (id # {self.file_id})\", f\" •", "# {self.proj_id})\", \"~\" * 35, f\" • {BOLD_TAG}Name{RESET_TAG}: {PURPLE_TAG}{self.name}{RESET_TAG}\", f\"", "language=json_resp[\"config\"][\"language\"], _token=token, ) def refresh(self): \"\"\"Update information about uploaded files", "from the API response, JSON-encoded\"\"\" return cls( proj_id=json_resp[\"id\"], name=json_resp[\"proj_name\"], user=json_resp[\"username\"],", "(\"❌\", \"Missing 'train' or 'valid' split in data files\"), (\"❌\",", "None else: shutil.rmtree(local_dataset_dir) clone_from = \"https://huggingface.co/datasets/\" + self.dataset_id else: clone_from", "No files uploaded yet!\"] else: sorted_files = sorted(self.files, key=lambda file:", "\"\", ] + descriptions ) ) # Training jobs information", ") ) # Training jobs information if self.training_jobs is None:", "\"☁ Uploaded\", \"⌚ Queued\", \"⚙ In Progress...\", \"✅ Success!\", \"❌", "'train' or 'valid' split in data files\"), (\"❌\", \"Failed to", "not change since last upload!\") dataset_repo.git_push() return logger.error(\"❌ Something went", "proj_id: int name: str user: str task: str status_emoji: str", "update\"]) for job in sorted(self.training_jobs, key=lambda job: job.job_id): model_table.add_row( [", "} http_post(path=f\"/projects/{self.proj_id}/data/add\", payload=payload, token=self._token) logger.info(f\"[{idx + 1}/{len(filepaths)}] ✅ Success!\") def", "import TEST_SPLIT, TRAIN_SPLIT, VALID_SPLIT from .tasks import TASKS from .utils", "updated_at: datetime @classmethod def from_json_resp(cls, json_resp: dict): return cls( file_id=json_resp[\"data_file_id\"],", "did not change since last upload!\") dataset_repo.git_push() return logger.error(\"❌ Something", "\"❌ Failed: unsupported file type\", \"❌ Failed: server error\", \"❌", "TEST_SPLIT) @dataclass class TrainingJob: \"\"\"A training job in AutoNLP\"\"\" job_id:", "processing_status: str split: str col_mapping: Dict[str, str] created_at: datetime updated_at:", "(id # {self.file_id})\", f\" • {BOLD_TAG}Split{RESET_TAG}: {self.split}\", f\" • {BOLD_TAG}Processing", "created_at: datetime updated_at: datetime @classmethod def from_json_resp(cls, json_resp: dict): return", "for job in sorted(self.training_jobs, key=lambda job: job.job_id): model_table.add_row( [ job.status_emoji,", "files to the dataset hub...\") dataset_repo.push_to_hub(commit_message=\"Upload from AutoNLP CLI\") logger.info(\"✅", "Repository( local_dir=local_dataset_dir, clone_from=clone_from, use_auth_token=self._token, ) dataset_repo.git_pull() for idx, file_path in", "📦 Copying {src} to {dst}...\") os.makedirs(os.path.dirname(dst), exist_ok=True) shutil.copyfile(src, dst) logger.info(f\"[{idx", "for file in sorted_files] printout.append( \"\\n\".join( [ \"~\" * 14", "processed data files to the huggingface hub\"), ) SPLITS =", "= http_get(path=f\"/projects/{self.proj_id}/data\", token=self._token) json_files = resp.json() self.files = [UploadedFile.from_json_resp(file) for", "descriptions ) ) # Training jobs information if self.training_jobs is", "@classmethod def from_json_resp(cls, json_resp: dict): return cls( job_id=json_resp[\"id\"], status_emoji=JOB_STATUS[json_resp[\"status\"] -", "{dst}...\") os.makedirs(os.path.dirname(dst), exist_ok=True) shutil.copyfile(src, dst) logger.info(f\"[{idx + 1}/{len(filepaths)}] 🔎 Validating", "(\"🚀\", \"Data processing started\"), (\"✅\", \"Data processing successful\"), (\"❌\", \"Failed", "str processing_status: str split: str col_mapping: Dict[str, str] created_at: datetime", "json_resp: dict): return cls( file_id=json_resp[\"data_file_id\"], filename=json_resp[\"fname\"], processing_status=FILE_STATUS[json_resp[\"download_status\"] - 1], split=SPLITS[json_resp[\"split\"]", "Failed: server error\", \"❌ Invalid column mapping, please fix it", "= None @classmethod def from_json_resp(cls, json_resp: dict, token: str): \"\"\"Build", "file_path in enumerate(filepaths): file_name = os.path.basename(file_path) logger.info(f\"[{idx + 1}/{len(filepaths)}] 📁", "job_id: int status: str status_emoji: str created_at: datetime updated_at: datetime", "and column mapping...\") validate_file(path=dst, task=self.task, file_ext=file_extension, col_mapping=col_mapping) dataset_repo.lfs_track(patterns=[f\"raw/*.{file_extension}\"]) dataset_repo.git_pull() try:", ") printout = [header] # Uploaded files information if self.files", ") JOB_STATUS = ( (\"⌚\", \"queued\"), (\"🚀\", \"start\"), (\"⚙\", \"data_munging\"),", "TASKS.keys()))[0], status_emoji=PROJECT_STATUS[json_resp[\"status\"] - 1][0], status=PROJECT_STATUS[json_resp[\"status\"] - 1][1], created_at=datetime.fromisoformat(json_resp[\"created_at\"]), updated_at=datetime.fromisoformat(json_resp[\"updated_at\"]), dataset_id=json_resp[\"dataset_id\"],", "resp = http_get(path=f\"/projects/{self.proj_id}/jobs\", token=self._token) json_jobs = resp.json() self.training_jobs = [TrainingJob.from_json_resp(job)", "• {BOLD_TAG}Status{RESET_TAG}: {BOLD_TAG}{self.status_emoji} {self.status}{RESET_TAG}\", f\" • {BOLD_TAG}Task{RESET_TAG}: {YELLOW_TAG}{self.task.title().replace('_', ' ')}{RESET_TAG}\",", "Models information unknown, update the project\" else: if len(self.training_jobs) ==", "status_emoji: str status: str language: str created_at: datetime updated_at: datetime", "PrettyTable from .splits import TEST_SPLIT, TRAIN_SPLIT, VALID_SPLIT from .tasks import", "+ f\" {BOLD_TAG}Models{RESET_TAG} \" + \"~\" * 11, \"\", jobs_str]))", "Validating {dst} and column mapping...\") validate_file(path=dst, task=self.task, file_ext=file_extension, col_mapping=col_mapping) dataset_repo.lfs_track(patterns=[f\"raw/*.{file_extension}\"])", "] ) @dataclass class Project: \"\"\"An AutoNLP project\"\"\" _token: str", "str created_at: datetime updated_at: datetime @classmethod def from_json_resp(cls, json_resp: dict):", "hub\"), (\"❌\", \"Missing 'train' or 'valid' split in data files\"),", "{self.split}\", f\" • {BOLD_TAG}Processing status{RESET_TAG}: {self.processing_status}\", f\" • {BOLD_TAG}Last update{RESET_TAG}:", "f\" • {BOLD_TAG}Created at{RESET_TAG}: {self.created_at.strftime('%Y-%m-%d %H:%M Z')}\", f\" • {BOLD_TAG}Last", "json_jobs = resp.json() self.training_jobs = [TrainingJob.from_json_resp(job) for job in json_jobs]", "Training started!\") def __str__(self): header = \"\\n\".join( [ f\"AutoNLP Project", "job in sorted(self.training_jobs, key=lambda job: job.job_id): model_table.add_row( [ job.status_emoji, job.job_id,", "col_mapping: Dict[str, str]): \"\"\"Uploads files to the project\"\"\" local_dataset_dir =", "the project\" else: if len(self.training_jobs) == 0: jobs_str = \"🤷", "file in json_files] logger.info(\"🔄 Refreshing models information...\") resp = http_get(path=f\"/projects/{self.proj_id}/jobs\",", "(\"❌\", \"failed\"), ) PROJECT_STATUS = ( (\"✨\", \"Created\"), (\"🚀\", \"Data", "( (\"✨\", \"Created\"), (\"🚀\", \"Data processing started\"), (\"✅\", \"Data processing", "== json_resp[\"task\"], TASKS.keys()))[0], status_emoji=PROJECT_STATUS[json_resp[\"status\"] - 1][0], status=PROJECT_STATUS[json_resp[\"status\"] - 1][1], created_at=datetime.fromisoformat(json_resp[\"created_at\"]),", "file_name = os.path.basename(file_path) logger.info(f\"[{idx + 1}/{len(filepaths)}] 📁 Registering file {file_name}", "{BOLD_TAG}Last update{RESET_TAG}: {self.updated_at.strftime('%Y-%m-%d %H:%M Z')}\", \"\", ] ) printout =", "{dst} and column mapping...\") validate_file(path=dst, task=self.task, file_ext=file_extension, col_mapping=col_mapping) dataset_repo.lfs_track(patterns=[f\"raw/*.{file_extension}\"]) dataset_repo.git_pull()", "str col_mapping: Dict[str, str] created_at: datetime updated_at: datetime @classmethod def", "\"\\n\".join( [ f\"📚 Model # {self.job_id}\", f\" • {BOLD_TAG}Status{RESET_TAG}: {self.status_emoji}", "descriptions = [\"🤷 No files uploaded yet!\"] else: sorted_files =", "Optional from huggingface_hub import Repository from loguru import logger from", "+ 1}/{len(filepaths)}] 📁 Registering file {file_name} into project '{file_name}'...\") payload", "+ f\" {BOLD_TAG}Files{RESET_TAG} \" + \"~\" * 14, \"\", \"Dataset", "key: TASKS[key] == json_resp[\"task\"], TASKS.keys()))[0], status_emoji=PROJECT_STATUS[json_resp[\"status\"] - 1][0], status=PROJECT_STATUS[json_resp[\"status\"] -", "models information...\") resp = http_get(path=f\"/projects/{self.proj_id}/jobs\", token=self._token) json_jobs = resp.json() self.training_jobs", "token=self._token) logger.info(\"🔥🔥 Training started!\") def __str__(self): header = \"\\n\".join( [", "\"❓ Models information unknown, update the project\" else: if len(self.training_jobs)", "task=list(filter(lambda key: TASKS[key] == json_resp[\"task\"], TASKS.keys()))[0], status_emoji=PROJECT_STATUS[json_resp[\"status\"] - 1][0], status=PROJECT_STATUS[json_resp[\"status\"]", "uploaded files and models attached to the project\"\"\" logger.info(\"🔄 Refreshing", "local_dir=local_dataset_dir, clone_from=clone_from, use_auth_token=self._token, ) dataset_repo.git_pull() for idx, file_path in enumerate(filepaths):", "[ \"~\" * 14 + f\" {BOLD_TAG}Files{RESET_TAG} \" + \"~\"", "dataset_repo.push_to_hub(commit_message=\"Upload from AutoNLP CLI\") logger.info(\"✅ Successfully uploaded the files!\") except", "a file!\") continue file_name = os.path.basename(file_path) file_extension = file_name.split(\".\")[-1] src", "jobs information if self.training_jobs is None: jobs_str = \"❓ Models", "an AutoNLP project\"\"\" file_id: int filename: str processing_status: str split:", "Project from the API response, JSON-encoded\"\"\" return cls( proj_id=json_resp[\"id\"], name=json_resp[\"proj_name\"],", "idx, file_path in enumerate(filepaths): if not os.path.isfile(file_path): logger.error(f\"[{idx + 1}/{len(filepaths)}]", "created_at=datetime.fromisoformat(json_resp[\"created_at\"]), updated_at=datetime.fromisoformat(json_resp[\"updated_at\"]), dataset_id=json_resp[\"dataset_id\"], language=json_resp[\"config\"][\"language\"], _token=token, ) def refresh(self): \"\"\"Update information", "str(model_table) printout.append(\"\\n\".join([\"\", \"~\" * 12 + f\" {BOLD_TAG}Models{RESET_TAG} \" +", "• {BOLD_TAG}Last update{RESET_TAG}: {self.updated_at.strftime('%Y-%m-%d %H:%M Z')}\", ] ) @dataclass class", "] + descriptions ) ) # Training jobs information if", "1}/{len(filepaths)}] 📦 Copying {src} to {dst}...\") os.makedirs(os.path.dirname(dst), exist_ok=True) shutil.copyfile(src, dst)", "found\", \"❌ Failed: unsupported file type\", \"❌ Failed: server error\",", "uploading the files!\") raise for idx, file_path in enumerate(filepaths): file_name", "uploaded yet!\"] else: sorted_files = sorted(self.files, key=lambda file: file.split) #", "\"Status\", \"Creation date\", \"Last update\"]) for job in sorted(self.training_jobs, key=lambda", "if self.training_jobs is None: jobs_str = \"❓ Models information unknown,", "validate_file(path=dst, task=self.task, file_ext=file_extension, col_mapping=col_mapping) dataset_repo.lfs_track(patterns=[f\"raw/*.{file_extension}\"]) dataset_repo.git_pull() try: logger.info(\"☁ Uploading files", "f\" • {BOLD_TAG}Status{RESET_TAG}: {BOLD_TAG}{self.status_emoji} {self.status}{RESET_TAG}\", f\" • {BOLD_TAG}Task{RESET_TAG}: {YELLOW_TAG}{self.task.title().replace('_', '", "files\"), (\"❌\", \"Failed to process data files\"), (\"❌\", \"Failed to", "int name: str user: str task: str status_emoji: str status:", "{BOLD_TAG}Task{RESET_TAG}: {YELLOW_TAG}{self.task.title().replace('_', ' ')}{RESET_TAG}\", f\" • {BOLD_TAG}Created at{RESET_TAG}: {self.created_at.strftime('%Y-%m-%d %H:%M", "split=SPLITS[json_resp[\"split\"] - 1], col_mapping=json_resp[\"col_mapping\"], created_at=datetime.fromisoformat(json_resp[\"created_at\"]), updated_at=datetime.fromisoformat(json_resp[\"updated_at\"]), ) def __str__(self): return", "dataclass from datetime import datetime from typing import Dict, List,", "not found\", \"❌ Failed: unsupported file type\", \"❌ Failed: server", "from datetime import datetime from typing import Dict, List, Optional", "{BOLD_TAG}Status{RESET_TAG}: {self.status_emoji} {self.status}\", f\" • {BOLD_TAG}Created at{RESET_TAG}: {self.created_at.strftime('%Y-%m-%d %H:%M Z')}\",", "f\" • {BOLD_TAG}Processing status{RESET_TAG}: {self.processing_status}\", f\" • {BOLD_TAG}Last update{RESET_TAG}: {self.updated_at.strftime('%Y-%m-%d", "shutil from dataclasses import dataclass from datetime import datetime from", "printout.append(\"\\n\".join([\"\", \"~\" * 12 + f\" {BOLD_TAG}Models{RESET_TAG} \" + \"~\"", "1}/{len(filepaths)}] ❌ '{file_path}' does not exist or is not a", "List[str], split: str, col_mapping: Dict[str, str]): \"\"\"Uploads files to the", "from .validation import validate_file FILE_STATUS = ( \"☁ Uploaded\", \"⌚", "dst) logger.info(f\"[{idx + 1}/{len(filepaths)}] 🔎 Validating {dst} and column mapping...\")", "updated_at: datetime @classmethod def from_json_resp(cls, json_resp: dict): return cls( job_id=json_resp[\"id\"],", "return cls( job_id=json_resp[\"id\"], status_emoji=JOB_STATUS[json_resp[\"status\"] - 1][0], status=JOB_STATUS[json_resp[\"status\"] - 1][1], created_at=datetime.fromisoformat(json_resp[\"created_at\"]),", "= \"https://huggingface.co/datasets/\" + self.dataset_id dataset_repo = Repository( local_dir=local_dataset_dir, clone_from=clone_from, use_auth_token=self._token,", "0: jobs_str = \"🤷 No train jobs started yet!\" else:", "the files!\") except OSError as err: if \"nothing to commit,", "f\"AutoNLP Project (id # {self.proj_id})\", \"~\" * 35, f\" •", "payload=payload, token=self._token) logger.info(f\"[{idx + 1}/{len(filepaths)}] ✅ Success!\") def train(self): \"\"\"Starts", "payload = { \"split\": split, \"col_mapping\": col_mapping, \"data_files\": [{\"fname\": file_name,", "_token: str proj_id: int name: str user: str task: str", "header = \"\\n\".join( [ f\"AutoNLP Project (id # {self.proj_id})\", \"~\"", "training on the models\"\"\" http_get(path=f\"/projects/{self.proj_id}/data/start_process\", token=self._token) logger.info(\"🔥🔥 Training started!\") def", "or 'valid' split in data files\"), (\"❌\", \"Failed to process", "import validate_file FILE_STATUS = ( \"☁ Uploaded\", \"⌚ Queued\", \"⚙", "file_name = os.path.basename(file_path) file_extension = file_name.split(\".\")[-1] src = os.path.expanduser(file_path) dst", "🔎 Validating {dst} and column mapping...\") validate_file(path=dst, task=self.task, file_ext=file_extension, col_mapping=col_mapping)", "def from_json_resp(cls, json_resp: dict): return cls( file_id=json_resp[\"data_file_id\"], filename=json_resp[\"fname\"], processing_status=FILE_STATUS[json_resp[\"download_status\"] -", "commit, working tree clean\" in err.args[0]: logger.info(\"❔ Files did not", ") def refresh(self): \"\"\"Update information about uploaded files and models", "started yet!\" else: model_table = PrettyTable([\"\", \"ID\", \"Status\", \"Creation date\",", "\"https://huggingface.co/datasets/\" + self.dataset_id else: clone_from = \"https://huggingface.co/datasets/\" + self.dataset_id dataset_repo", "= ( (\"⌚\", \"queued\"), (\"🚀\", \"start\"), (\"⚙\", \"data_munging\"), (\"🏃\", \"model_training\"),", "(\"✨\", \"Created\"), (\"🚀\", \"Data processing started\"), (\"✅\", \"Data processing successful\"),", "cls( file_id=json_resp[\"data_file_id\"], filename=json_resp[\"fname\"], processing_status=FILE_STATUS[json_resp[\"download_status\"] - 1], split=SPLITS[json_resp[\"split\"] - 1], col_mapping=json_resp[\"col_mapping\"],", "+ 1}/{len(filepaths)}] 📦 Copying {src} to {dst}...\") os.makedirs(os.path.dirname(dst), exist_ok=True) shutil.copyfile(src,", "server error\", \"❌ Invalid column mapping, please fix it and", "{BOLD_TAG}Files{RESET_TAG} \" + \"~\" * 14, \"\", \"Dataset ID:\", f\"{CYAN_TAG}{self.dataset_id}{RESET_TAG}\",", "task=self.task, file_ext=file_extension, col_mapping=col_mapping) dataset_repo.lfs_track(patterns=[f\"raw/*.{file_extension}\"]) dataset_repo.git_pull() try: logger.info(\"☁ Uploading files to", "f\" • {BOLD_TAG}Status{RESET_TAG}: {self.status_emoji} {self.status}\", f\" • {BOLD_TAG}Created at{RESET_TAG}: {self.created_at.strftime('%Y-%m-%d", "\"\"\"Starts training on the models\"\"\" http_get(path=f\"/projects/{self.proj_id}/data/start_process\", token=self._token) logger.info(\"🔥🔥 Training started!\")", "= [UploadedFile.from_json_resp(file) for file in json_files] logger.info(\"🔄 Refreshing models information...\")", "\"nothing to commit, working tree clean\" in err.args[0]: logger.info(\"❔ Files", "self.dataset_id else: clone_from = \"https://huggingface.co/datasets/\" + self.dataset_id dataset_repo = Repository(", "proj_id=json_resp[\"id\"], name=json_resp[\"proj_name\"], user=json_resp[\"username\"], task=list(filter(lambda key: TASKS[key] == json_resp[\"task\"], TASKS.keys()))[0], status_emoji=PROJECT_STATUS[json_resp[\"status\"]", "filepaths: List[str], split: str, col_mapping: Dict[str, str]): \"\"\"Uploads files to", "AutoNLP\"\"\" job_id: int status: str status_emoji: str created_at: datetime updated_at:", "status_emoji=PROJECT_STATUS[json_resp[\"status\"] - 1][0], status=PROJECT_STATUS[json_resp[\"status\"] - 1][1], created_at=datetime.fromisoformat(json_resp[\"created_at\"]), updated_at=datetime.fromisoformat(json_resp[\"updated_at\"]), dataset_id=json_resp[\"dataset_id\"], language=json_resp[\"config\"][\"language\"],", "class UploadedFile: \"\"\"A file uploaded to an AutoNLP project\"\"\" file_id:", "import shutil from dataclasses import dataclass from datetime import datetime", "os.makedirs(os.path.dirname(dst), exist_ok=True) shutil.copyfile(src, dst) logger.info(f\"[{idx + 1}/{len(filepaths)}] 🔎 Validating {dst}", "BOLD_TAG, CYAN_TAG, GREEN_TAG, PURPLE_TAG, RESET_TAG, YELLOW_TAG, http_get, http_post from .validation", "= resp.json() self.training_jobs = [TrainingJob.from_json_resp(job) for job in json_jobs] def", "'valid' split in data files\"), (\"❌\", \"Failed to process data", "None @classmethod def from_json_resp(cls, json_resp: dict, token: str): \"\"\"Build a", "CLI\") logger.info(\"✅ Successfully uploaded the files!\") except OSError as err:", "# {self.job_id}\", f\" • {BOLD_TAG}Status{RESET_TAG}: {self.status_emoji} {self.status}\", f\" • {BOLD_TAG}Created", "{BOLD_TAG}Created at{RESET_TAG}: {self.created_at.strftime('%Y-%m-%d %H:%M Z')}\", f\" • {BOLD_TAG}Last update{RESET_TAG}: {self.updated_at.strftime('%Y-%m-%d", "• {BOLD_TAG}Task{RESET_TAG}: {YELLOW_TAG}{self.task.title().replace('_', ' ')}{RESET_TAG}\", f\" • {BOLD_TAG}Created at{RESET_TAG}: {self.created_at.strftime('%Y-%m-%d", "files to the project\"\"\" local_dataset_dir = os.path.expanduser(f\"~/.huggingface/autonlp/projects/{self.dataset_id}\") if os.path.exists(local_dataset_dir): if", "file_id: int filename: str processing_status: str split: str col_mapping: Dict[str,", "language: str created_at: datetime updated_at: datetime dataset_id: str files: Optional[List[UploadedFile]]", "logger.info(\"🔥🔥 Training started!\") def __str__(self): header = \"\\n\".join( [ f\"AutoNLP", "• {BOLD_TAG}Created at{RESET_TAG}: {self.created_at.strftime('%Y-%m-%d %H:%M Z')}\", f\" • {BOLD_TAG}Last update{RESET_TAG}:", "the huggingface hub\"), (\"❌\", \"Missing 'train' or 'valid' split in", "user=json_resp[\"username\"], task=list(filter(lambda key: TASKS[key] == json_resp[\"task\"], TASKS.keys()))[0], status_emoji=PROJECT_STATUS[json_resp[\"status\"] - 1][0],", "(\"❌\", \"Failed to process data files\"), (\"❌\", \"Failed to upload", "dataset_repo.git_pull() for idx, file_path in enumerate(filepaths): if not os.path.isfile(file_path): logger.error(f\"[{idx", "Dict[str, str]): \"\"\"Uploads files to the project\"\"\" local_dataset_dir = os.path.expanduser(f\"~/.huggingface/autonlp/projects/{self.dataset_id}\")", "VALID_SPLIT, TEST_SPLIT) @dataclass class TrainingJob: \"\"\"A training job in AutoNLP\"\"\"", "idx, file_path in enumerate(filepaths): file_name = os.path.basename(file_path) logger.info(f\"[{idx + 1}/{len(filepaths)}]", "== 0: descriptions = [\"🤷 No files uploaded yet!\"] else:", "in err.args[0]: logger.info(\"❔ Files did not change since last upload!\")", "task: str status_emoji: str status: str language: str created_at: datetime", "files uploaded yet!\"] else: sorted_files = sorted(self.files, key=lambda file: file.split)", "successful\"), (\"❌\", \"Failed to download data files from the huggingface", "Z\"), job.updated_at.strftime(\"%Y-%m-%d %H:%M Z\"), ] ) jobs_str = str(model_table) printout.append(\"\\n\".join([\"\",", "json_resp: dict): return cls( job_id=json_resp[\"id\"], status_emoji=JOB_STATUS[json_resp[\"status\"] - 1][0], status=JOB_STATUS[json_resp[\"status\"] -", "in enumerate(filepaths): if not os.path.isfile(file_path): logger.error(f\"[{idx + 1}/{len(filepaths)}] ❌ '{file_path}'", "column mapping, please fix it and re-upload the file.\", )", "%H:%M Z\"), ] ) jobs_str = str(model_table) printout.append(\"\\n\".join([\"\", \"~\" *", "= None training_jobs: Optional[List] = None @classmethod def from_json_resp(cls, json_resp:", "token=self._token) json_jobs = resp.json() self.training_jobs = [TrainingJob.from_json_resp(job) for job in", "self.dataset_id dataset_repo = Repository( local_dir=local_dataset_dir, clone_from=clone_from, use_auth_token=self._token, ) dataset_repo.git_pull() for", "in json_files] logger.info(\"🔄 Refreshing models information...\") resp = http_get(path=f\"/projects/{self.proj_id}/jobs\", token=self._token)", "None: descriptions = [\"❓ Files information unknown, update the project\"]", "1}/{len(filepaths)}] 🔎 Validating {dst} and column mapping...\") validate_file(path=dst, task=self.task, file_ext=file_extension,", "_token=token, ) def refresh(self): \"\"\"Update information about uploaded files and", "%H:%M Z')}\", f\" • {BOLD_TAG}Last update{RESET_TAG}: {self.updated_at.strftime('%Y-%m-%d %H:%M Z')}\", ]", "= os.path.expanduser(f\"~/.huggingface/autonlp/projects/{self.dataset_id}\") if os.path.exists(local_dataset_dir): if os.path.isdir(os.path.join(local_dataset_dir, \"git\")): clone_from = None", "= os.path.expanduser(file_path) dst = os.path.join(local_dataset_dir, \"raw\", file_name) logger.info(f\"[{idx + 1}/{len(filepaths)}]", "Training jobs information if self.training_jobs is None: jobs_str = \"❓", "os.path.basename(file_path) file_extension = file_name.split(\".\")[-1] src = os.path.expanduser(file_path) dst = os.path.join(local_dataset_dir,", "\"success\"), (\"❌\", \"failed\"), ) PROJECT_STATUS = ( (\"✨\", \"Created\"), (\"🚀\",", "yet!\"] else: sorted_files = sorted(self.files, key=lambda file: file.split) # Sort", "training_jobs: Optional[List] = None @classmethod def from_json_resp(cls, json_resp: dict, token:", "mapping, please fix it and re-upload the file.\", ) JOB_STATUS", "from prettytable import PrettyTable from .splits import TEST_SPLIT, TRAIN_SPLIT, VALID_SPLIT", "file_name, \"username\": self.user}], } http_post(path=f\"/projects/{self.proj_id}/data/add\", payload=payload, token=self._token) logger.info(f\"[{idx + 1}/{len(filepaths)}]", "@dataclass class TrainingJob: \"\"\"A training job in AutoNLP\"\"\" job_id: int", "dataset_id: str files: Optional[List[UploadedFile]] = None training_jobs: Optional[List] = None", "dict, token: str): \"\"\"Build a Project from the API response,", "as err: if \"nothing to commit, working tree clean\" in", "len(self.files) == 0: descriptions = [\"🤷 No files uploaded yet!\"]", "str]): \"\"\"Uploads files to the project\"\"\" local_dataset_dir = os.path.expanduser(f\"~/.huggingface/autonlp/projects/{self.dataset_id}\") if", "file_path in enumerate(filepaths): if not os.path.isfile(file_path): logger.error(f\"[{idx + 1}/{len(filepaths)}] ❌", "PURPLE_TAG, RESET_TAG, YELLOW_TAG, http_get, http_post from .validation import validate_file FILE_STATUS", "1][0], status=JOB_STATUS[json_resp[\"status\"] - 1][1], created_at=datetime.fromisoformat(json_resp[\"created_at\"]), updated_at=datetime.fromisoformat(json_resp[\"updated_at\"]), ) def __str__(self): return", "self.files = [UploadedFile.from_json_resp(file) for file in json_files] logger.info(\"🔄 Refreshing models", "\"queued\"), (\"🚀\", \"start\"), (\"⚙\", \"data_munging\"), (\"🏃\", \"model_training\"), (\"✅\", \"success\"), (\"❌\",", "\"❌ Failed: server error\", \"❌ Invalid column mapping, please fix", "= resp.json() self.files = [UploadedFile.from_json_resp(file) for file in json_files] logger.info(\"🔄", "{BOLD_TAG}Status{RESET_TAG}: {BOLD_TAG}{self.status_emoji} {self.status}{RESET_TAG}\", f\" • {BOLD_TAG}Task{RESET_TAG}: {YELLOW_TAG}{self.task.title().replace('_', ' ')}{RESET_TAG}\", f\"", "\"\\n\".join( [ \"~\" * 14 + f\" {BOLD_TAG}Files{RESET_TAG} \" +", "logger.info(\"☁ Uploading files to the dataset hub...\") dataset_repo.push_to_hub(commit_message=\"Upload from AutoNLP", "__str__(self): header = \"\\n\".join( [ f\"AutoNLP Project (id # {self.proj_id})\",", "Files information unknown, update the project\"] else: if len(self.files) ==", "created_at=datetime.fromisoformat(json_resp[\"created_at\"]), updated_at=datetime.fromisoformat(json_resp[\"updated_at\"]), ) def __str__(self): return \"\\n\".join( [ f\"📚 Model", "the dataset hub...\") dataset_repo.push_to_hub(commit_message=\"Upload from AutoNLP CLI\") logger.info(\"✅ Successfully uploaded", "Successfully uploaded the files!\") except OSError as err: if \"nothing", "and models attached to the project\"\"\" logger.info(\"🔄 Refreshing uploaded files", "{BOLD_TAG}Last update{RESET_TAG}: {self.updated_at.strftime('%Y-%m-%d %H:%M Z')}\", ] ) @dataclass class Project:", "📁 Registering file {file_name} into project '{file_name}'...\") payload = {", "\"Failed to upload processed data files to the huggingface hub\"),", "[ f\"📚 Model # {self.job_id}\", f\" • {BOLD_TAG}Status{RESET_TAG}: {self.status_emoji} {self.status}\",", "into project '{file_name}'...\") payload = { \"split\": split, \"col_mapping\": col_mapping,", "@dataclass class UploadedFile: \"\"\"A file uploaded to an AutoNLP project\"\"\"", "dataclasses import dataclass from datetime import datetime from typing import", "prettytable import PrettyTable from .splits import TEST_SPLIT, TRAIN_SPLIT, VALID_SPLIT from", "{self.proj_id})\", \"~\" * 35, f\" • {BOLD_TAG}Name{RESET_TAG}: {PURPLE_TAG}{self.name}{RESET_TAG}\", f\" •", "def train(self): \"\"\"Starts training on the models\"\"\" http_get(path=f\"/projects/{self.proj_id}/data/start_process\", token=self._token) logger.info(\"🔥🔥", "information about uploaded files and models attached to the project\"\"\"", "{BOLD_TAG}{self.status_emoji} {self.status}{RESET_TAG}\", f\" • {BOLD_TAG}Task{RESET_TAG}: {YELLOW_TAG}{self.task.title().replace('_', ' ')}{RESET_TAG}\", f\" •", "json_resp[\"task\"], TASKS.keys()))[0], status_emoji=PROJECT_STATUS[json_resp[\"status\"] - 1][0], status=PROJECT_STATUS[json_resp[\"status\"] - 1][1], created_at=datetime.fromisoformat(json_resp[\"created_at\"]), updated_at=datetime.fromisoformat(json_resp[\"updated_at\"]),", "Refreshing uploaded files information...\") resp = http_get(path=f\"/projects/{self.proj_id}/data\", token=self._token) json_files =", "if \"nothing to commit, working tree clean\" in err.args[0]: logger.info(\"❔", "else: clone_from = \"https://huggingface.co/datasets/\" + self.dataset_id dataset_repo = Repository( local_dir=local_dataset_dir,", "if len(self.training_jobs) == 0: jobs_str = \"🤷 No train jobs", "project\"\"\" logger.info(\"🔄 Refreshing uploaded files information...\") resp = http_get(path=f\"/projects/{self.proj_id}/data\", token=self._token)", "{BOLD_TAG}Processing status{RESET_TAG}: {self.processing_status}\", f\" • {BOLD_TAG}Last update{RESET_TAG}: {self.updated_at.strftime('%Y-%m-%d %H:%M Z')}\",", "job: job.job_id): model_table.add_row( [ job.status_emoji, job.job_id, job.status, job.created_at.strftime(\"%Y-%m-%d %H:%M Z\"),", "upload(self, filepaths: List[str], split: str, col_mapping: Dict[str, str]): \"\"\"Uploads files", "job.status, job.created_at.strftime(\"%Y-%m-%d %H:%M Z\"), job.updated_at.strftime(\"%Y-%m-%d %H:%M Z\"), ] ) jobs_str", "Refreshing models information...\") resp = http_get(path=f\"/projects/{self.proj_id}/jobs\", token=self._token) json_jobs = resp.json()", "Z\"), ] ) jobs_str = str(model_table) printout.append(\"\\n\".join([\"\", \"~\" * 12", "(id # {self.proj_id})\", \"~\" * 35, f\" • {BOLD_TAG}Name{RESET_TAG}: {PURPLE_TAG}{self.name}{RESET_TAG}\",", "update{RESET_TAG}: {self.updated_at.strftime('%Y-%m-%d %H:%M Z')}\", ] ) @dataclass class Project: \"\"\"An", "os.path.basename(file_path) logger.info(f\"[{idx + 1}/{len(filepaths)}] 📁 Registering file {file_name} into project", "\"Data processing successful\"), (\"❌\", \"Failed to download data files from", "huggingface hub\"), ) SPLITS = (TRAIN_SPLIT, VALID_SPLIT, TEST_SPLIT) @dataclass class", "[TrainingJob.from_json_resp(job) for job in json_jobs] def upload(self, filepaths: List[str], split:", "model_table = PrettyTable([\"\", \"ID\", \"Status\", \"Creation date\", \"Last update\"]) for", "= ( \"☁ Uploaded\", \"⌚ Queued\", \"⚙ In Progress...\", \"✅", "for idx, file_path in enumerate(filepaths): if not os.path.isfile(file_path): logger.error(f\"[{idx +", "def refresh(self): \"\"\"Update information about uploaded files and models attached", "job.updated_at.strftime(\"%Y-%m-%d %H:%M Z\"), ] ) jobs_str = str(model_table) printout.append(\"\\n\".join([\"\", \"~\"", "hub\"), ) SPLITS = (TRAIN_SPLIT, VALID_SPLIT, TEST_SPLIT) @dataclass class TrainingJob:", "• {BOLD_TAG}Split{RESET_TAG}: {self.split}\", f\" • {BOLD_TAG}Processing status{RESET_TAG}: {self.processing_status}\", f\" •", "{self.file_id})\", f\" • {BOLD_TAG}Split{RESET_TAG}: {self.split}\", f\" • {BOLD_TAG}Processing status{RESET_TAG}: {self.processing_status}\",", "on the models\"\"\" http_get(path=f\"/projects/{self.proj_id}/data/start_process\", token=self._token) logger.info(\"🔥🔥 Training started!\") def __str__(self):", "processing_status=FILE_STATUS[json_resp[\"download_status\"] - 1], split=SPLITS[json_resp[\"split\"] - 1], col_mapping=json_resp[\"col_mapping\"], created_at=datetime.fromisoformat(json_resp[\"created_at\"]), updated_at=datetime.fromisoformat(json_resp[\"updated_at\"]), )", "data files\"), (\"❌\", \"Failed to upload processed data files to", "file not found\", \"❌ Failed: unsupported file type\", \"❌ Failed:", "col_mapping=col_mapping) dataset_repo.lfs_track(patterns=[f\"raw/*.{file_extension}\"]) dataset_repo.git_pull() try: logger.info(\"☁ Uploading files to the dataset", "name=json_resp[\"proj_name\"], user=json_resp[\"username\"], task=list(filter(lambda key: TASKS[key] == json_resp[\"task\"], TASKS.keys()))[0], status_emoji=PROJECT_STATUS[json_resp[\"status\"] -", "else: if len(self.training_jobs) == 0: jobs_str = \"🤷 No train", "from dataclasses import dataclass from datetime import datetime from typing", "\"start\"), (\"⚙\", \"data_munging\"), (\"🏃\", \"model_training\"), (\"✅\", \"success\"), (\"❌\", \"failed\"), )", "job_id=json_resp[\"id\"], status_emoji=JOB_STATUS[json_resp[\"status\"] - 1][0], status=JOB_STATUS[json_resp[\"status\"] - 1][1], created_at=datetime.fromisoformat(json_resp[\"created_at\"]), updated_at=datetime.fromisoformat(json_resp[\"updated_at\"]), )", "from .splits import TEST_SPLIT, TRAIN_SPLIT, VALID_SPLIT from .tasks import TASKS", "- 1][1], created_at=datetime.fromisoformat(json_resp[\"created_at\"]), updated_at=datetime.fromisoformat(json_resp[\"updated_at\"]), dataset_id=json_resp[\"dataset_id\"], language=json_resp[\"config\"][\"language\"], _token=token, ) def refresh(self):", "user: str task: str status_emoji: str status: str language: str", "datetime @classmethod def from_json_resp(cls, json_resp: dict): return cls( file_id=json_resp[\"data_file_id\"], filename=json_resp[\"fname\"],", "Copying {src} to {dst}...\") os.makedirs(os.path.dirname(dst), exist_ok=True) shutil.copyfile(src, dst) logger.info(f\"[{idx +", "= sorted(self.files, key=lambda file: file.split) # Sort by split descriptions", "started!\") def __str__(self): header = \"\\n\".join( [ f\"AutoNLP Project (id", "14 + f\" {BOLD_TAG}Files{RESET_TAG} \" + \"~\" * 14, \"\",", "Dict, List, Optional from huggingface_hub import Repository from loguru import", "file type\", \"❌ Failed: server error\", \"❌ Invalid column mapping,", ") PROJECT_STATUS = ( (\"✨\", \"Created\"), (\"🚀\", \"Data processing started\"),", "hub...\") dataset_repo.push_to_hub(commit_message=\"Upload from AutoNLP CLI\") logger.info(\"✅ Successfully uploaded the files!\")", "'{file_path}' does not exist or is not a file!\") continue", "continue file_name = os.path.basename(file_path) file_extension = file_name.split(\".\")[-1] src = os.path.expanduser(file_path)", "else: shutil.rmtree(local_dataset_dir) clone_from = \"https://huggingface.co/datasets/\" + self.dataset_id else: clone_from =", "shutil.rmtree(local_dataset_dir) clone_from = \"https://huggingface.co/datasets/\" + self.dataset_id else: clone_from = \"https://huggingface.co/datasets/\"", "List, Optional from huggingface_hub import Repository from loguru import logger", "else: if len(self.files) == 0: descriptions = [\"🤷 No files", "when uploading the files!\") raise for idx, file_path in enumerate(filepaths):", "class Project: \"\"\"An AutoNLP project\"\"\" _token: str proj_id: int name:", "1}/{len(filepaths)}] ✅ Success!\") def train(self): \"\"\"Starts training on the models\"\"\"", "from_json_resp(cls, json_resp: dict): return cls( job_id=json_resp[\"id\"], status_emoji=JOB_STATUS[json_resp[\"status\"] - 1][0], status=JOB_STATUS[json_resp[\"status\"]", "from loguru import logger from prettytable import PrettyTable from .splits", "GREEN_TAG, PURPLE_TAG, RESET_TAG, YELLOW_TAG, http_get, http_post from .validation import validate_file", "if len(self.files) == 0: descriptions = [\"🤷 No files uploaded", "the project\"\"\" local_dataset_dir = os.path.expanduser(f\"~/.huggingface/autonlp/projects/{self.dataset_id}\") if os.path.exists(local_dataset_dir): if os.path.isdir(os.path.join(local_dataset_dir, \"git\")):", "file.\", ) JOB_STATUS = ( (\"⌚\", \"queued\"), (\"🚀\", \"start\"), (\"⚙\",", "TASKS from .utils import BOLD_TAG, CYAN_TAG, GREEN_TAG, PURPLE_TAG, RESET_TAG, YELLOW_TAG,", "the API response, JSON-encoded\"\"\" return cls( proj_id=json_resp[\"id\"], name=json_resp[\"proj_name\"], user=json_resp[\"username\"], task=list(filter(lambda", "Model # {self.job_id}\", f\" • {BOLD_TAG}Status{RESET_TAG}: {self.status_emoji} {self.status}\", f\" •", "= [TrainingJob.from_json_resp(job) for job in json_jobs] def upload(self, filepaths: List[str],", "information if self.files is None: descriptions = [\"❓ Files information", "data files from the huggingface hub\"), (\"❌\", \"Missing 'train' or", "str proj_id: int name: str user: str task: str status_emoji:", "jobs_str = \"❓ Models information unknown, update the project\" else:", "+ 1}/{len(filepaths)}] ✅ Success!\") def train(self): \"\"\"Starts training on the", ") def __str__(self): return \"\\n\".join( [ f\"📁 {CYAN_TAG}{self.filename}{RESET_TAG} (id #", "in json_jobs] def upload(self, filepaths: List[str], split: str, col_mapping: Dict[str,", "files information if self.files is None: descriptions = [\"❓ Files", "{CYAN_TAG}{self.filename}{RESET_TAG} (id # {self.file_id})\", f\" • {BOLD_TAG}Split{RESET_TAG}: {self.split}\", f\" •", "{YELLOW_TAG}{self.task.title().replace('_', ' ')}{RESET_TAG}\", f\" • {BOLD_TAG}Created at{RESET_TAG}: {self.created_at.strftime('%Y-%m-%d %H:%M Z')}\",", "')}{RESET_TAG}\", f\" • {BOLD_TAG}Created at{RESET_TAG}: {self.created_at.strftime('%Y-%m-%d %H:%M Z')}\", f\" •", "Uploading files to the dataset hub...\") dataset_repo.push_to_hub(commit_message=\"Upload from AutoNLP CLI\")", "= \"❓ Models information unknown, update the project\" else: if", "f\"📚 Model # {self.job_id}\", f\" • {BOLD_TAG}Status{RESET_TAG}: {self.status_emoji} {self.status}\", f\"", "resp = http_get(path=f\"/projects/{self.proj_id}/data\", token=self._token) json_files = resp.json() self.files = [UploadedFile.from_json_resp(file)", "json_files] logger.info(\"🔄 Refreshing models information...\") resp = http_get(path=f\"/projects/{self.proj_id}/jobs\", token=self._token) json_jobs", "col_mapping=json_resp[\"col_mapping\"], created_at=datetime.fromisoformat(json_resp[\"created_at\"]), updated_at=datetime.fromisoformat(json_resp[\"updated_at\"]), ) def __str__(self): return \"\\n\".join( [ f\"📁", "(\"⌚\", \"queued\"), (\"🚀\", \"start\"), (\"⚙\", \"data_munging\"), (\"🏃\", \"model_training\"), (\"✅\", \"success\"),", "self.training_jobs is None: jobs_str = \"❓ Models information unknown, update", "token=self._token) json_files = resp.json() self.files = [UploadedFile.from_json_resp(file) for file in", "[\"🤷 No files uploaded yet!\"] else: sorted_files = sorted(self.files, key=lambda", "\"✅ Success!\", \"❌ Failed: file not found\", \"❌ Failed: unsupported", "file_ext=file_extension, col_mapping=col_mapping) dataset_repo.lfs_track(patterns=[f\"raw/*.{file_extension}\"]) dataset_repo.git_pull() try: logger.info(\"☁ Uploading files to the", "to {dst}...\") os.makedirs(os.path.dirname(dst), exist_ok=True) shutil.copyfile(src, dst) logger.info(f\"[{idx + 1}/{len(filepaths)}] 🔎", "Sort by split descriptions = [str(file) for file in sorted_files]", "\"Failed to download data files from the huggingface hub\"), (\"❌\",", "split, \"col_mapping\": col_mapping, \"data_files\": [{\"fname\": file_name, \"username\": self.user}], } http_post(path=f\"/projects/{self.proj_id}/data/add\",", "unknown, update the project\" else: if len(self.training_jobs) == 0: jobs_str", "job in json_jobs] def upload(self, filepaths: List[str], split: str, col_mapping:", "files\"), (\"❌\", \"Failed to upload processed data files to the", "{self.updated_at.strftime('%Y-%m-%d %H:%M Z')}\", \"\", ] ) printout = [header] #", "train jobs started yet!\" else: model_table = PrettyTable([\"\", \"ID\", \"Status\",", "information...\") resp = http_get(path=f\"/projects/{self.proj_id}/data\", token=self._token) json_files = resp.json() self.files =", "to the dataset hub...\") dataset_repo.push_to_hub(commit_message=\"Upload from AutoNLP CLI\") logger.info(\"✅ Successfully", "Failed: unsupported file type\", \"❌ Failed: server error\", \"❌ Invalid", "\"Created\"), (\"🚀\", \"Data processing started\"), (\"✅\", \"Data processing successful\"), (\"❌\",", "\"failed\"), ) PROJECT_STATUS = ( (\"✨\", \"Created\"), (\"🚀\", \"Data processing", "\" + \"~\" * 14, \"\", \"Dataset ID:\", f\"{CYAN_TAG}{self.dataset_id}{RESET_TAG}\", \"\",", "resp.json() self.training_jobs = [TrainingJob.from_json_resp(job) for job in json_jobs] def upload(self,", "= [header] # Uploaded files information if self.files is None:", "\"❌ Invalid column mapping, please fix it and re-upload the", "error\", \"❌ Invalid column mapping, please fix it and re-upload", "- 1], col_mapping=json_resp[\"col_mapping\"], created_at=datetime.fromisoformat(json_resp[\"created_at\"]), updated_at=datetime.fromisoformat(json_resp[\"updated_at\"]), ) def __str__(self): return \"\\n\".join(", "cls( proj_id=json_resp[\"id\"], name=json_resp[\"proj_name\"], user=json_resp[\"username\"], task=list(filter(lambda key: TASKS[key] == json_resp[\"task\"], TASKS.keys()))[0],", "(\"🏃\", \"model_training\"), (\"✅\", \"success\"), (\"❌\", \"failed\"), ) PROJECT_STATUS = (", "\"~\" * 12 + f\" {BOLD_TAG}Models{RESET_TAG} \" + \"~\" *", "dst = os.path.join(local_dataset_dir, \"raw\", file_name) logger.info(f\"[{idx + 1}/{len(filepaths)}] 📦 Copying", "status=JOB_STATUS[json_resp[\"status\"] - 1][1], created_at=datetime.fromisoformat(json_resp[\"created_at\"]), updated_at=datetime.fromisoformat(json_resp[\"updated_at\"]), ) def __str__(self): return \"\\n\".join(", "column mapping...\") validate_file(path=dst, task=self.task, file_ext=file_extension, col_mapping=col_mapping) dataset_repo.lfs_track(patterns=[f\"raw/*.{file_extension}\"]) dataset_repo.git_pull() try: logger.info(\"☁", "def from_json_resp(cls, json_resp: dict): return cls( job_id=json_resp[\"id\"], status_emoji=JOB_STATUS[json_resp[\"status\"] - 1][0],", "# {self.file_id})\", f\" • {BOLD_TAG}Split{RESET_TAG}: {self.split}\", f\" • {BOLD_TAG}Processing status{RESET_TAG}:", "OSError as err: if \"nothing to commit, working tree clean\"", "(\"❌\", \"Failed to download data files from the huggingface hub\"),", "to upload processed data files to the huggingface hub\"), )", "logger.info(f\"[{idx + 1}/{len(filepaths)}] 🔎 Validating {dst} and column mapping...\") validate_file(path=dst,", "35, f\" • {BOLD_TAG}Name{RESET_TAG}: {PURPLE_TAG}{self.name}{RESET_TAG}\", f\" • {BOLD_TAG}Owner{RESET_TAG}: {GREEN_TAG}{self.user}{RESET_TAG}\", f\"", "= \"🤷 No train jobs started yet!\" else: model_table =", "the models\"\"\" http_get(path=f\"/projects/{self.proj_id}/data/start_process\", token=self._token) logger.info(\"🔥🔥 Training started!\") def __str__(self): header", "huggingface hub\"), (\"❌\", \"Missing 'train' or 'valid' split in data", "split in data files\"), (\"❌\", \"Failed to process data files\"),", "file in sorted_files] printout.append( \"\\n\".join( [ \"~\" * 14 +", "In Progress...\", \"✅ Success!\", \"❌ Failed: file not found\", \"❌", "information if self.training_jobs is None: jobs_str = \"❓ Models information", "datetime dataset_id: str files: Optional[List[UploadedFile]] = None training_jobs: Optional[List] =", "RESET_TAG, YELLOW_TAG, http_get, http_post from .validation import validate_file FILE_STATUS =", "wrong when uploading the files!\") raise for idx, file_path in", "status: str status_emoji: str created_at: datetime updated_at: datetime @classmethod def", "f\"📁 {CYAN_TAG}{self.filename}{RESET_TAG} (id # {self.file_id})\", f\" • {BOLD_TAG}Split{RESET_TAG}: {self.split}\", f\"", "logger.info(f\"[{idx + 1}/{len(filepaths)}] ✅ Success!\") def train(self): \"\"\"Starts training on", "1}/{len(filepaths)}] 📁 Registering file {file_name} into project '{file_name}'...\") payload =", "token: str): \"\"\"Build a Project from the API response, JSON-encoded\"\"\"", "API response, JSON-encoded\"\"\" return cls( proj_id=json_resp[\"id\"], name=json_resp[\"proj_name\"], user=json_resp[\"username\"], task=list(filter(lambda key:", "fix it and re-upload the file.\", ) JOB_STATUS = (", "logger.error(f\"[{idx + 1}/{len(filepaths)}] ❌ '{file_path}' does not exist or is", "\"❌ Failed: file not found\", \"❌ Failed: unsupported file type\",", "shutil.copyfile(src, dst) logger.info(f\"[{idx + 1}/{len(filepaths)}] 🔎 Validating {dst} and column", "__str__(self): return \"\\n\".join( [ f\"📁 {CYAN_TAG}{self.filename}{RESET_TAG} (id # {self.file_id})\", f\"", "last upload!\") dataset_repo.git_push() return logger.error(\"❌ Something went wrong when uploading", "• {BOLD_TAG}Owner{RESET_TAG}: {GREEN_TAG}{self.user}{RESET_TAG}\", f\" • {BOLD_TAG}Status{RESET_TAG}: {BOLD_TAG}{self.status_emoji} {self.status}{RESET_TAG}\", f\" •", "printout = [header] # Uploaded files information if self.files is", "def __str__(self): return \"\\n\".join( [ f\"📁 {CYAN_TAG}{self.filename}{RESET_TAG} (id # {self.file_id})\",", "%H:%M Z')}\", f\" • {BOLD_TAG}Last update{RESET_TAG}: {self.updated_at.strftime('%Y-%m-%d %H:%M Z')}\", \"\",", "{self.job_id}\", f\" • {BOLD_TAG}Status{RESET_TAG}: {self.status_emoji} {self.status}\", f\" • {BOLD_TAG}Created at{RESET_TAG}:", ") dataset_repo.git_pull() for idx, file_path in enumerate(filepaths): if not os.path.isfile(file_path):", "not a file!\") continue file_name = os.path.basename(file_path) file_extension = file_name.split(\".\")[-1]", "str split: str col_mapping: Dict[str, str] created_at: datetime updated_at: datetime", "* 14 + f\" {BOLD_TAG}Files{RESET_TAG} \" + \"~\" * 14,", "file {file_name} into project '{file_name}'...\") payload = { \"split\": split,", "os.path.isfile(file_path): logger.error(f\"[{idx + 1}/{len(filepaths)}] ❌ '{file_path}' does not exist or", "file: file.split) # Sort by split descriptions = [str(file) for", "f\" {BOLD_TAG}Models{RESET_TAG} \" + \"~\" * 11, \"\", jobs_str])) return", "int filename: str processing_status: str split: str col_mapping: Dict[str, str]", "Project: \"\"\"An AutoNLP project\"\"\" _token: str proj_id: int name: str", "created_at: datetime updated_at: datetime dataset_id: str files: Optional[List[UploadedFile]] = None", "JSON-encoded\"\"\" return cls( proj_id=json_resp[\"id\"], name=json_resp[\"proj_name\"], user=json_resp[\"username\"], task=list(filter(lambda key: TASKS[key] ==", "self.files is None: descriptions = [\"❓ Files information unknown, update", "\"model_training\"), (\"✅\", \"success\"), (\"❌\", \"failed\"), ) PROJECT_STATUS = ( (\"✨\",", "not exist or is not a file!\") continue file_name =", "[ f\"AutoNLP Project (id # {self.proj_id})\", \"~\" * 35, f\"", "for job in json_jobs] def upload(self, filepaths: List[str], split: str,", "split: str, col_mapping: Dict[str, str]): \"\"\"Uploads files to the project\"\"\"", "class TrainingJob: \"\"\"A training job in AutoNLP\"\"\" job_id: int status:", "unsupported file type\", \"❌ Failed: server error\", \"❌ Invalid column", "raise for idx, file_path in enumerate(filepaths): file_name = os.path.basename(file_path) logger.info(f\"[{idx", "col_mapping, \"data_files\": [{\"fname\": file_name, \"username\": self.user}], } http_post(path=f\"/projects/{self.proj_id}/data/add\", payload=payload, token=self._token)", "\"~\" * 35, f\" • {BOLD_TAG}Name{RESET_TAG}: {PURPLE_TAG}{self.name}{RESET_TAG}\", f\" • {BOLD_TAG}Owner{RESET_TAG}:", "in sorted_files] printout.append( \"\\n\".join( [ \"~\" * 14 + f\"", "12 + f\" {BOLD_TAG}Models{RESET_TAG} \" + \"~\" * 11, \"\",", "upload processed data files to the huggingface hub\"), ) SPLITS", "import Dict, List, Optional from huggingface_hub import Repository from loguru", "= (TRAIN_SPLIT, VALID_SPLIT, TEST_SPLIT) @dataclass class TrainingJob: \"\"\"A training job", "Repository from loguru import logger from prettytable import PrettyTable from", "the files!\") raise for idx, file_path in enumerate(filepaths): file_name =", "logger.info(f\"[{idx + 1}/{len(filepaths)}] 📁 Registering file {file_name} into project '{file_name}'...\")", "{self.created_at.strftime('%Y-%m-%d %H:%M Z')}\", f\" • {BOLD_TAG}Last update{RESET_TAG}: {self.updated_at.strftime('%Y-%m-%d %H:%M Z')}\",", "\"\\n\".join( [ f\"AutoNLP Project (id # {self.proj_id})\", \"~\" * 35,", "from_json_resp(cls, json_resp: dict, token: str): \"\"\"Build a Project from the", "logger.info(f\"[{idx + 1}/{len(filepaths)}] 📦 Copying {src} to {dst}...\") os.makedirs(os.path.dirname(dst), exist_ok=True)", "mapping...\") validate_file(path=dst, task=self.task, file_ext=file_extension, col_mapping=col_mapping) dataset_repo.lfs_track(patterns=[f\"raw/*.{file_extension}\"]) dataset_repo.git_pull() try: logger.info(\"☁ Uploading", "\"\"\"Update information about uploaded files and models attached to the", "✅ Success!\") def train(self): \"\"\"Starts training on the models\"\"\" http_get(path=f\"/projects/{self.proj_id}/data/start_process\",", "jobs_str = \"🤷 No train jobs started yet!\" else: model_table", "name: str user: str task: str status_emoji: str status: str", "[UploadedFile.from_json_resp(file) for file in json_files] logger.info(\"🔄 Refreshing models information...\") resp", "working tree clean\" in err.args[0]: logger.info(\"❔ Files did not change", "{self.status}\", f\" • {BOLD_TAG}Created at{RESET_TAG}: {self.created_at.strftime('%Y-%m-%d %H:%M Z')}\", f\" •", "for idx, file_path in enumerate(filepaths): file_name = os.path.basename(file_path) logger.info(f\"[{idx +", "is None: descriptions = [\"❓ Files information unknown, update the", "\"\\n\".join( [ f\"📁 {CYAN_TAG}{self.filename}{RESET_TAG} (id # {self.file_id})\", f\" • {BOLD_TAG}Split{RESET_TAG}:", "key=lambda file: file.split) # Sort by split descriptions = [str(file)", "import BOLD_TAG, CYAN_TAG, GREEN_TAG, PURPLE_TAG, RESET_TAG, YELLOW_TAG, http_get, http_post from", "files to the huggingface hub\"), ) SPLITS = (TRAIN_SPLIT, VALID_SPLIT,", "from typing import Dict, List, Optional from huggingface_hub import Repository", "(\"✅\", \"Data processing successful\"), (\"❌\", \"Failed to download data files", "files information...\") resp = http_get(path=f\"/projects/{self.proj_id}/data\", token=self._token) json_files = resp.json() self.files", "FILE_STATUS = ( \"☁ Uploaded\", \"⌚ Queued\", \"⚙ In Progress...\",", "datetime updated_at: datetime @classmethod def from_json_resp(cls, json_resp: dict): return cls(", "\"\"\"Uploads files to the project\"\"\" local_dataset_dir = os.path.expanduser(f\"~/.huggingface/autonlp/projects/{self.dataset_id}\") if os.path.exists(local_dataset_dir):", "Z')}\", f\" • {BOLD_TAG}Last update{RESET_TAG}: {self.updated_at.strftime('%Y-%m-%d %H:%M Z')}\", ] )", "Something went wrong when uploading the files!\") raise for idx,", "len(self.training_jobs) == 0: jobs_str = \"🤷 No train jobs started", "\"Missing 'train' or 'valid' split in data files\"), (\"❌\", \"Failed", "{PURPLE_TAG}{self.name}{RESET_TAG}\", f\" • {BOLD_TAG}Owner{RESET_TAG}: {GREEN_TAG}{self.user}{RESET_TAG}\", f\" • {BOLD_TAG}Status{RESET_TAG}: {BOLD_TAG}{self.status_emoji} {self.status}{RESET_TAG}\",", "= os.path.basename(file_path) logger.info(f\"[{idx + 1}/{len(filepaths)}] 📁 Registering file {file_name} into", "\"~\" * 14, \"\", \"Dataset ID:\", f\"{CYAN_TAG}{self.dataset_id}{RESET_TAG}\", \"\", ] +", "= [str(file) for file in sorted_files] printout.append( \"\\n\".join( [ \"~\"", "file_id=json_resp[\"data_file_id\"], filename=json_resp[\"fname\"], processing_status=FILE_STATUS[json_resp[\"download_status\"] - 1], split=SPLITS[json_resp[\"split\"] - 1], col_mapping=json_resp[\"col_mapping\"], created_at=datetime.fromisoformat(json_resp[\"created_at\"]),", "dataset_repo.lfs_track(patterns=[f\"raw/*.{file_extension}\"]) dataset_repo.git_pull() try: logger.info(\"☁ Uploading files to the dataset hub...\")", "update{RESET_TAG}: {self.updated_at.strftime('%Y-%m-%d %H:%M Z')}\", \"\", ] ) printout = [header]", "use_auth_token=self._token, ) dataset_repo.git_pull() for idx, file_path in enumerate(filepaths): if not", "update the project\" else: if len(self.training_jobs) == 0: jobs_str =", "• {BOLD_TAG}Status{RESET_TAG}: {self.status_emoji} {self.status}\", f\" • {BOLD_TAG}Created at{RESET_TAG}: {self.created_at.strftime('%Y-%m-%d %H:%M", "+ 1}/{len(filepaths)}] ❌ '{file_path}' does not exist or is not", "(\"✅\", \"success\"), (\"❌\", \"failed\"), ) PROJECT_STATUS = ( (\"✨\", \"Created\"),", "jobs_str = str(model_table) printout.append(\"\\n\".join([\"\", \"~\" * 12 + f\" {BOLD_TAG}Models{RESET_TAG}", "status_emoji: str created_at: datetime updated_at: datetime @classmethod def from_json_resp(cls, json_resp:", "src = os.path.expanduser(file_path) dst = os.path.join(local_dataset_dir, \"raw\", file_name) logger.info(f\"[{idx +", "VALID_SPLIT from .tasks import TASKS from .utils import BOLD_TAG, CYAN_TAG,", "at{RESET_TAG}: {self.created_at.strftime('%Y-%m-%d %H:%M Z')}\", f\" • {BOLD_TAG}Last update{RESET_TAG}: {self.updated_at.strftime('%Y-%m-%d %H:%M", "\"Last update\"]) for job in sorted(self.training_jobs, key=lambda job: job.job_id): model_table.add_row(", "Z')}\", \"\", ] ) printout = [header] # Uploaded files", "descriptions = [str(file) for file in sorted_files] printout.append( \"\\n\".join( [", "key=lambda job: job.job_id): model_table.add_row( [ job.status_emoji, job.job_id, job.status, job.created_at.strftime(\"%Y-%m-%d %H:%M", "updated_at: datetime dataset_id: str files: Optional[List[UploadedFile]] = None training_jobs: Optional[List]", "+ \"~\" * 14, \"\", \"Dataset ID:\", f\"{CYAN_TAG}{self.dataset_id}{RESET_TAG}\", \"\", ]", "job.created_at.strftime(\"%Y-%m-%d %H:%M Z\"), job.updated_at.strftime(\"%Y-%m-%d %H:%M Z\"), ] ) jobs_str =", "change since last upload!\") dataset_repo.git_push() return logger.error(\"❌ Something went wrong", "from_json_resp(cls, json_resp: dict): return cls( file_id=json_resp[\"data_file_id\"], filename=json_resp[\"fname\"], processing_status=FILE_STATUS[json_resp[\"download_status\"] - 1],", "\"col_mapping\": col_mapping, \"data_files\": [{\"fname\": file_name, \"username\": self.user}], } http_post(path=f\"/projects/{self.proj_id}/data/add\", payload=payload,", "- 1][0], status=JOB_STATUS[json_resp[\"status\"] - 1][1], created_at=datetime.fromisoformat(json_resp[\"created_at\"]), updated_at=datetime.fromisoformat(json_resp[\"updated_at\"]), ) def __str__(self):", "since last upload!\") dataset_repo.git_push() return logger.error(\"❌ Something went wrong when", "os.path.expanduser(f\"~/.huggingface/autonlp/projects/{self.dataset_id}\") if os.path.exists(local_dataset_dir): if os.path.isdir(os.path.join(local_dataset_dir, \"git\")): clone_from = None else:", "exist or is not a file!\") continue file_name = os.path.basename(file_path)", "[header] # Uploaded files information if self.files is None: descriptions", "filename: str processing_status: str split: str col_mapping: Dict[str, str] created_at:", "str created_at: datetime updated_at: datetime dataset_id: str files: Optional[List[UploadedFile]] =", "about uploaded files and models attached to the project\"\"\" logger.info(\"🔄", "{self.updated_at.strftime('%Y-%m-%d %H:%M Z')}\", ] ) @dataclass class UploadedFile: \"\"\"A file", "1], split=SPLITS[json_resp[\"split\"] - 1], col_mapping=json_resp[\"col_mapping\"], created_at=datetime.fromisoformat(json_resp[\"created_at\"]), updated_at=datetime.fromisoformat(json_resp[\"updated_at\"]), ) def __str__(self):", "logger from prettytable import PrettyTable from .splits import TEST_SPLIT, TRAIN_SPLIT,", "= None else: shutil.rmtree(local_dataset_dir) clone_from = \"https://huggingface.co/datasets/\" + self.dataset_id else:", "jobs started yet!\" else: model_table = PrettyTable([\"\", \"ID\", \"Status\", \"Creation", "enumerate(filepaths): file_name = os.path.basename(file_path) logger.info(f\"[{idx + 1}/{len(filepaths)}] 📁 Registering file", "yet!\" else: model_table = PrettyTable([\"\", \"ID\", \"Status\", \"Creation date\", \"Last", "files: Optional[List[UploadedFile]] = None training_jobs: Optional[List] = None @classmethod def", "datetime from typing import Dict, List, Optional from huggingface_hub import", "processing successful\"), (\"❌\", \"Failed to download data files from the", "str language: str created_at: datetime updated_at: datetime dataset_id: str files:", "{self.status}{RESET_TAG}\", f\" • {BOLD_TAG}Task{RESET_TAG}: {YELLOW_TAG}{self.task.title().replace('_', ' ')}{RESET_TAG}\", f\" • {BOLD_TAG}Created", "status: str language: str created_at: datetime updated_at: datetime dataset_id: str", "training job in AutoNLP\"\"\" job_id: int status: str status_emoji: str", "type\", \"❌ Failed: server error\", \"❌ Invalid column mapping, please", "{BOLD_TAG}Owner{RESET_TAG}: {GREEN_TAG}{self.user}{RESET_TAG}\", f\" • {BOLD_TAG}Status{RESET_TAG}: {BOLD_TAG}{self.status_emoji} {self.status}{RESET_TAG}\", f\" • {BOLD_TAG}Task{RESET_TAG}:", "Uploaded files information if self.files is None: descriptions = [\"❓", "= PrettyTable([\"\", \"ID\", \"Status\", \"Creation date\", \"Last update\"]) for job", ") jobs_str = str(model_table) printout.append(\"\\n\".join([\"\", \"~\" * 12 + f\"", "refresh(self): \"\"\"Update information about uploaded files and models attached to", "os.path.join(local_dataset_dir, \"raw\", file_name) logger.info(f\"[{idx + 1}/{len(filepaths)}] 📦 Copying {src} to", "loguru import logger from prettytable import PrettyTable from .splits import", "str task: str status_emoji: str status: str language: str created_at:", "project\"\"\" _token: str proj_id: int name: str user: str task:", "models attached to the project\"\"\" logger.info(\"🔄 Refreshing uploaded files information...\")", "date\", \"Last update\"]) for job in sorted(self.training_jobs, key=lambda job: job.job_id):", "\"~\" * 14 + f\" {BOLD_TAG}Files{RESET_TAG} \" + \"~\" *", "( (\"⌚\", \"queued\"), (\"🚀\", \"start\"), (\"⚙\", \"data_munging\"), (\"🏃\", \"model_training\"), (\"✅\",", "http_get, http_post from .validation import validate_file FILE_STATUS = ( \"☁", "token=self._token) logger.info(f\"[{idx + 1}/{len(filepaths)}] ✅ Success!\") def train(self): \"\"\"Starts training", "file.split) # Sort by split descriptions = [str(file) for file", "the project\"\"\" logger.info(\"🔄 Refreshing uploaded files information...\") resp = http_get(path=f\"/projects/{self.proj_id}/data\",", "clone_from = \"https://huggingface.co/datasets/\" + self.dataset_id dataset_repo = Repository( local_dir=local_dataset_dir, clone_from=clone_from,", ".tasks import TASKS from .utils import BOLD_TAG, CYAN_TAG, GREEN_TAG, PURPLE_TAG,", "[{\"fname\": file_name, \"username\": self.user}], } http_post(path=f\"/projects/{self.proj_id}/data/add\", payload=payload, token=self._token) logger.info(f\"[{idx +", "typing import Dict, List, Optional from huggingface_hub import Repository from", "return logger.error(\"❌ Something went wrong when uploading the files!\") raise", "0: descriptions = [\"🤷 No files uploaded yet!\"] else: sorted_files", "def __str__(self): return \"\\n\".join( [ f\"📚 Model # {self.job_id}\", f\"", "dataset_repo.git_pull() try: logger.info(\"☁ Uploading files to the dataset hub...\") dataset_repo.push_to_hub(commit_message=\"Upload", "* 35, f\" • {BOLD_TAG}Name{RESET_TAG}: {PURPLE_TAG}{self.name}{RESET_TAG}\", f\" • {BOLD_TAG}Owner{RESET_TAG}: {GREEN_TAG}{self.user}{RESET_TAG}\",", "Success!\") def train(self): \"\"\"Starts training on the models\"\"\" http_get(path=f\"/projects/{self.proj_id}/data/start_process\", token=self._token)", "CYAN_TAG, GREEN_TAG, PURPLE_TAG, RESET_TAG, YELLOW_TAG, http_get, http_post from .validation import", "the huggingface hub\"), ) SPLITS = (TRAIN_SPLIT, VALID_SPLIT, TEST_SPLIT) @dataclass", ".validation import validate_file FILE_STATUS = ( \"☁ Uploaded\", \"⌚ Queued\",", "os.path.exists(local_dataset_dir): if os.path.isdir(os.path.join(local_dataset_dir, \"git\")): clone_from = None else: shutil.rmtree(local_dataset_dir) clone_from", "None training_jobs: Optional[List] = None @classmethod def from_json_resp(cls, json_resp: dict,", "clone_from=clone_from, use_auth_token=self._token, ) dataset_repo.git_pull() for idx, file_path in enumerate(filepaths): if", ".utils import BOLD_TAG, CYAN_TAG, GREEN_TAG, PURPLE_TAG, RESET_TAG, YELLOW_TAG, http_get, http_post", "project\"\"\" local_dataset_dir = os.path.expanduser(f\"~/.huggingface/autonlp/projects/{self.dataset_id}\") if os.path.exists(local_dataset_dir): if os.path.isdir(os.path.join(local_dataset_dir, \"git\")): clone_from", "dict): return cls( job_id=json_resp[\"id\"], status_emoji=JOB_STATUS[json_resp[\"status\"] - 1][0], status=JOB_STATUS[json_resp[\"status\"] - 1][1],", "http_get(path=f\"/projects/{self.proj_id}/data\", token=self._token) json_files = resp.json() self.files = [UploadedFile.from_json_resp(file) for file", "Uploaded\", \"⌚ Queued\", \"⚙ In Progress...\", \"✅ Success!\", \"❌ Failed:", "TASKS[key] == json_resp[\"task\"], TASKS.keys()))[0], status_emoji=PROJECT_STATUS[json_resp[\"status\"] - 1][0], status=PROJECT_STATUS[json_resp[\"status\"] - 1][1],", "or is not a file!\") continue file_name = os.path.basename(file_path) file_extension", "f\"{CYAN_TAG}{self.dataset_id}{RESET_TAG}\", \"\", ] + descriptions ) ) # Training jobs", "Failed: file not found\", \"❌ Failed: unsupported file type\", \"❌", ") def __str__(self): return \"\\n\".join( [ f\"📚 Model # {self.job_id}\",", "\"\", \"Dataset ID:\", f\"{CYAN_TAG}{self.dataset_id}{RESET_TAG}\", \"\", ] + descriptions ) )", "[ f\"📁 {CYAN_TAG}{self.filename}{RESET_TAG} (id # {self.file_id})\", f\" • {BOLD_TAG}Split{RESET_TAG}: {self.split}\",", "PROJECT_STATUS = ( (\"✨\", \"Created\"), (\"🚀\", \"Data processing started\"), (\"✅\",", "= str(model_table) printout.append(\"\\n\".join([\"\", \"~\" * 12 + f\" {BOLD_TAG}Models{RESET_TAG} \"", "train(self): \"\"\"Starts training on the models\"\"\" http_get(path=f\"/projects/{self.proj_id}/data/start_process\", token=self._token) logger.info(\"🔥🔥 Training", "logger.info(\"🔄 Refreshing uploaded files information...\") resp = http_get(path=f\"/projects/{self.proj_id}/data\", token=self._token) json_files", "logger.info(\"🔄 Refreshing models information...\") resp = http_get(path=f\"/projects/{self.proj_id}/jobs\", token=self._token) json_jobs =", "( \"☁ Uploaded\", \"⌚ Queued\", \"⚙ In Progress...\", \"✅ Success!\",", "datetime updated_at: datetime dataset_id: str files: Optional[List[UploadedFile]] = None training_jobs:", "if os.path.exists(local_dataset_dir): if os.path.isdir(os.path.join(local_dataset_dir, \"git\")): clone_from = None else: shutil.rmtree(local_dataset_dir)", "= [\"❓ Files information unknown, update the project\"] else: if", "f\" {BOLD_TAG}Files{RESET_TAG} \" + \"~\" * 14, \"\", \"Dataset ID:\",", "status_emoji=JOB_STATUS[json_resp[\"status\"] - 1][0], status=JOB_STATUS[json_resp[\"status\"] - 1][1], created_at=datetime.fromisoformat(json_resp[\"created_at\"]), updated_at=datetime.fromisoformat(json_resp[\"updated_at\"]), ) def", "] ) printout = [header] # Uploaded files information if", "updated_at=datetime.fromisoformat(json_resp[\"updated_at\"]), dataset_id=json_resp[\"dataset_id\"], language=json_resp[\"config\"][\"language\"], _token=token, ) def refresh(self): \"\"\"Update information about", "cls( job_id=json_resp[\"id\"], status_emoji=JOB_STATUS[json_resp[\"status\"] - 1][0], status=JOB_STATUS[json_resp[\"status\"] - 1][1], created_at=datetime.fromisoformat(json_resp[\"created_at\"]), updated_at=datetime.fromisoformat(json_resp[\"updated_at\"]),", "AutoNLP project\"\"\" _token: str proj_id: int name: str user: str", "models\"\"\" http_get(path=f\"/projects/{self.proj_id}/data/start_process\", token=self._token) logger.info(\"🔥🔥 Training started!\") def __str__(self): header =", "= \"https://huggingface.co/datasets/\" + self.dataset_id else: clone_from = \"https://huggingface.co/datasets/\" + self.dataset_id", "response, JSON-encoded\"\"\" return cls( proj_id=json_resp[\"id\"], name=json_resp[\"proj_name\"], user=json_resp[\"username\"], task=list(filter(lambda key: TASKS[key]", "dataset_id=json_resp[\"dataset_id\"], language=json_resp[\"config\"][\"language\"], _token=token, ) def refresh(self): \"\"\"Update information about uploaded", "does not exist or is not a file!\") continue file_name", "f\" • {BOLD_TAG}Last update{RESET_TAG}: {self.updated_at.strftime('%Y-%m-%d %H:%M Z')}\", ] ) @dataclass", "descriptions = [\"❓ Files information unknown, update the project\"] else:", "not os.path.isfile(file_path): logger.error(f\"[{idx + 1}/{len(filepaths)}] ❌ '{file_path}' does not exist", "] ) jobs_str = str(model_table) printout.append(\"\\n\".join([\"\", \"~\" * 12 +", "from AutoNLP CLI\") logger.info(\"✅ Successfully uploaded the files!\") except OSError", "import logger from prettytable import PrettyTable from .splits import TEST_SPLIT,", "unknown, update the project\"] else: if len(self.files) == 0: descriptions", "in enumerate(filepaths): file_name = os.path.basename(file_path) logger.info(f\"[{idx + 1}/{len(filepaths)}] 📁 Registering", "err: if \"nothing to commit, working tree clean\" in err.args[0]:", "to the project\"\"\" local_dataset_dir = os.path.expanduser(f\"~/.huggingface/autonlp/projects/{self.dataset_id}\") if os.path.exists(local_dataset_dir): if os.path.isdir(os.path.join(local_dataset_dir,", "Optional[List] = None @classmethod def from_json_resp(cls, json_resp: dict, token: str):", "= { \"split\": split, \"col_mapping\": col_mapping, \"data_files\": [{\"fname\": file_name, \"username\":", "%H:%M Z')}\", ] ) @dataclass class Project: \"\"\"An AutoNLP project\"\"\"", "a Project from the API response, JSON-encoded\"\"\" return cls( proj_id=json_resp[\"id\"],", "str files: Optional[List[UploadedFile]] = None training_jobs: Optional[List] = None @classmethod", "is not a file!\") continue file_name = os.path.basename(file_path) file_extension =", "import os import shutil from dataclasses import dataclass from datetime", "huggingface_hub import Repository from loguru import logger from prettytable import", "{self.status_emoji} {self.status}\", f\" • {BOLD_TAG}Created at{RESET_TAG}: {self.created_at.strftime('%Y-%m-%d %H:%M Z')}\", f\"", "1], col_mapping=json_resp[\"col_mapping\"], created_at=datetime.fromisoformat(json_resp[\"created_at\"]), updated_at=datetime.fromisoformat(json_resp[\"updated_at\"]), ) def __str__(self): return \"\\n\".join( [", "resp.json() self.files = [UploadedFile.from_json_resp(file) for file in json_files] logger.info(\"🔄 Refreshing", "uploaded the files!\") except OSError as err: if \"nothing to", "# Uploaded files information if self.files is None: descriptions =", "{BOLD_TAG}Split{RESET_TAG}: {self.split}\", f\" • {BOLD_TAG}Processing status{RESET_TAG}: {self.processing_status}\", f\" • {BOLD_TAG}Last", "dataset_repo = Repository( local_dir=local_dataset_dir, clone_from=clone_from, use_auth_token=self._token, ) dataset_repo.git_pull() for idx,", "information unknown, update the project\" else: if len(self.training_jobs) == 0:" ]
[ ">= len(_ELLIPSIS_BYTES), 'max_bytes must be at least %d' % len(_ELLIPSIS_BYTES)", "payload' elif status is 5: return 'Invalid token size' elif", "has been split. while True: try: alert = json.loads(u'\"%s\"' %", "Inc. All Rights Reserved. \"\"\"Apple Push Notification service utilities. Original", "APNs message from parameters ParseResponse(): parses APNs binary response for", "message must be a string or a dictionary.' if expiry", "json.loads(u'\"%s\"' % truncated) break except Exception: truncated = truncated[:-1] #", "the APNS payload.\"\"\" _ELLIPSIS_BYTES = escape.utf8(u'…') \"\"\"UTF-8 encoding of the", "= escape.utf8(json.dumps(escape.recursive_unicode(data), separators=(',', ':'), ensure_ascii=False)) bytes_left = _MAX_PAYLOAD_BYTES - len(encoded)", "chars (i.e. use # direct UTF-8 representation rather than \"\\u1234\"", "% truncated) break except Exception: truncated = truncated[:-1] # Return", "_MAX_PAYLOAD_BYTES = 256 \"\"\"Maximum number of bytes in the APNS", "':'), ensure_ascii=False)) bytes_left = _MAX_PAYLOAD_BYTES - len(encoded) if allow_truncate and", "response for status & identifier ErrorStatusToString(): converts error status to", "'' def _TruncateAlert(alert, max_bytes): \"\"\"Converts the alert text to UTF-8", "max_bytes): \"\"\"Converts the alert text to UTF-8 encoded JSON format,", "= { 'aps' : aps } if extra is not", "'Invalid token' elif status is 255: return 'None (unknown)' else:", "the alert will be stored in the APNS payload. If", "must be a string or a dictionary.' if expiry is", "APNS payload.\"\"\" _ELLIPSIS_BYTES = escape.utf8(u'…') \"\"\"UTF-8 encoding of the Unicode", "def _TruncateAlert(alert, max_bytes): \"\"\"Converts the alert text to UTF-8 encoded", "truncated = truncated[:-1] # Return the UTF-8 encoding of the", "% (_MAX_PAYLOAD_BYTES, len(escape.utf8(alert))) # Now re-encode including the alert text.", "case where a \"\\u1234\" sequence has been split. while True:", "basestring): alert = _TruncateAlert(alert, bytes_left) elif alert and len(escape.utf8(alert)) >", "= _TruncateAlert(alert, bytes_left) elif alert and len(escape.utf8(alert)) > bytes_left: raise", "determine how much space is left for the message. #", "parameters ParseResponse(): parses APNs binary response for status & identifier", "truncated UTF-8 encoded alert text, including a trailing ellipsis character.", "ValueError, u'response must be a 6-byte binary string.' command, status,", "escaping). This maximizes the amount of space that's # left", "'Missing device token' elif status is 3: return 'Missing topic'", "{ 'alert' : '', 'content-available': 1 } if badge is", "of the UTF-8 encoded JSON with no alert text. This", "This allows us to # determine how much space is", "split, then the truncated string may not be valid JSON.", "escape _MAX_PAYLOAD_BYTES = 256 \"\"\"Maximum number of bytes in the", "status is 4: return 'Missing payload' elif status is 5:", "not to split JSON escape sequences. Returns the truncated UTF-8", "text. This allows us to # determine how much space", "stored in the APNS payload. If the number of resulting", "elif status is 2: return 'Missing device token' elif status", "a Unicode character boundary, taking care not to split JSON", "token size' elif status is 6: return 'Invalid topic size'", "badge is not None: aps['badge'] = badge if sound is", "= len(encoded) assert length <= _MAX_PAYLOAD_BYTES, (encoded, length) return struct.pack('!bIIH32sH%(length)ds'", "topic' elif status is 4: return 'Missing payload' elif status", "chopping trailing characters until the truncated string is valid JSON.", "quotes added by JSON. alert_json = alert_json[1:-1] # Check if", "return 'No errors encountered' elif status is 1: return 'Processing", "':'), ensure_ascii=False)) length = len(encoded) assert length <= _MAX_PAYLOAD_BYTES, (encoded,", "(encoded, length) return struct.pack('!bIIH32sH%(length)ds' % { 'length' : length },", "text to UTF-8 encoded JSON format, which is how the", "Unicode ellipsis character.\"\"\" def TokenToBinary(token): return base64.b64decode(token) def TokenFromBinary(bin_token): return", "alert and len(escape.utf8(alert)) > bytes_left: raise ValueError, u'max payload(%d) exceeded:", "None: aps['badge'] = badge if sound is not None: aps['sound']", "converts a hex-encoded token into a binary value CreateMessage(): formats", "identifier, ErrorStatusToString(status) def ErrorStatusToString(status): if status is 0: return 'No", "\"\"\" __author__ = '<EMAIL> (<NAME>)' import base64 import json import", "separators=(',', ':'), ensure_ascii=False)) length = len(encoded) assert length <= _MAX_PAYLOAD_BYTES,", "amount of space that's # left for the alert text.", "to UTF-8 encoded JSON format, which is how the alert", "payload. If the number of resulting bytes exceeds \"max_bytes\", then", "JSON. It may take several # tries, such as in", "status is 0: return 'No errors encountered' elif status is", "JSON escape sequences. Returns the truncated UTF-8 encoded alert text,", "message from parameters ParseResponse(): parses APNs binary response for status", "truncated string is valid JSON. It may take several #", "been split. while True: try: alert = json.loads(u'\"%s\"' % truncated)", "valid JSON. Keep # chopping trailing characters until the truncated", "# Copyright 2012 Viewfinder Inc. All Rights Reserved. \"\"\"Apple Push", "= badge if sound is not None: aps['sound'] = sound", "identifier=0, expiry=None, extra=None, allow_truncate=True): token = TokenToBinary(token) if len(token) !=", "token' elif status is 3: return 'Missing topic' elif status", "'max_bytes must be at least %d' % len(_ELLIPSIS_BYTES) max_bytes -=", "binary string.' command, status, identifier = struct.unpack_from('!bbI', bytes, 0) if", "binary value CreateMessage(): formats a binary APNs message from parameters", "binary APNs message from parameters ParseResponse(): parses APNs binary response", ": aps } if extra is not None: data.update(extra) #", "except Exception: truncated = truncated[:-1] # Return the UTF-8 encoding", "left for the message. # 'content-available': 1 is necessary to", "'Invalid token size' elif status is 6: return 'Invalid topic", "a 32-byte binary string.' if (alert is not None) and", "token, length, encoded) def ParseResponse(bytes): if len(bytes) != 6: raise", "left for the alert text. encoded = escape.utf8(json.dumps(escape.recursive_unicode(data), separators=(',', ':'),", "len(bytes) != 6: raise ValueError, u'response must be a 6-byte", "extra space and no escaping of non-ascii chars (i.e. use", "a string or a dictionary.' if expiry is None: expiry", "0) if command != 8: raise ValueError, u'response command must", "raise ValueError, u'response command must equal 8.' return status, identifier,", "no extra space and no escaping of non-ascii chars (i.e.", "escape sequences. Returns the truncated UTF-8 encoded alert text, including", "'<EMAIL> (<NAME>)' import base64 import json import struct import time", "def ErrorStatusToString(status): if status is 0: return 'No errors encountered'", "# Create compact JSON representation with no extra space and", "elif status is 6: return 'Invalid topic size' elif status", "than \"\\u1234\" escaping). This maximizes the amount of space that's", "for the alert text. encoded = escape.utf8(json.dumps(escape.recursive_unicode(data), separators=(',', ':'), ensure_ascii=False))", "the truncated string may not be valid JSON. Keep #", "error message \"\"\" __author__ = '<EMAIL> (<NAME>)' import base64 import", "86400) # Start by determining the length of the UTF-8", "isinstance(alert, basestring): alert = _TruncateAlert(alert, bytes_left) elif alert and len(escape.utf8(alert))", "by JSON. alert_json = alert_json[1:-1] # Check if alert fits", "service utilities. Original copyright for this code: https://github.com/jayridge/apnstornado TokenToBinary(): converts", "# 'content-available': 1 is necessary to trigger iOS 7's background", "\"\\u1234\" sequence has been split. while True: try: alert =", "return 'Processing error' elif status is 2: return 'Missing device", "message \"\"\" __author__ = '<EMAIL> (<NAME>)' import base64 import json", "if badge is not None: aps['badge'] = badge if sound", "Now re-encode including the alert text. aps['alert'] = alert encoded", "split. while True: try: alert = json.loads(u'\"%s\"' % truncated) break", "from parameters ParseResponse(): parses APNs binary response for status &", "payload(%d) exceeded: %d' % (_MAX_PAYLOAD_BYTES, len(escape.utf8(alert))) # Now re-encode including", "is 6: return 'Invalid topic size' elif status is 7:", "coding: utf-8 -*- # Copyright 2012 Viewfinder Inc. All Rights", "3: return 'Missing topic' elif status is 4: return 'Missing", "-*- # Copyright 2012 Viewfinder Inc. All Rights Reserved. \"\"\"Apple", "if status is 0: return 'No errors encountered' elif status", "to error message \"\"\" __author__ = '<EMAIL> (<NAME>)' import base64", "!= 8: raise ValueError, u'response command must equal 8.' return", "__author__ = '<EMAIL> (<NAME>)' import base64 import json import struct", "return escape.utf8(alert) # Make room for an appended ellipsis. assert", "the alert text at a Unicode character boundary, taking care", "is 5: return 'Invalid token size' elif status is 6:", "= escape.utf8(json.dumps(escape.recursive_unicode(alert), ensure_ascii=False)) # Strip quotes added by JSON. alert_json", "data = { 'aps' : aps } if extra is", "# Now re-encode including the alert text. aps['alert'] = alert", "a binary APNs message from parameters ParseResponse(): parses APNs binary", "text, including a trailing ellipsis character. \"\"\" alert_json = escape.utf8(json.dumps(escape.recursive_unicode(alert),", "by determining the length of the UTF-8 encoded JSON with", "JSON representation with no extra space and no escaping of", "escape.utf8(alert) # Make room for an appended ellipsis. assert max_bytes", "import time from tornado import escape _MAX_PAYLOAD_BYTES = 256 \"\"\"Maximum", "status is 2: return 'Missing device token' elif status is", "direct UTF-8 representation rather than \"\\u1234\" escaping). This maximizes the", "status to error message \"\"\" __author__ = '<EMAIL> (<NAME>)' import", "Exception: truncated = truncated[:-1] # Return the UTF-8 encoding of", "dictionary.' if expiry is None: expiry = long(time.time() + 365", "'Missing topic' elif status is 4: return 'Missing payload' elif", "text. encoded = escape.utf8(json.dumps(escape.recursive_unicode(data), separators=(',', ':'), ensure_ascii=False)) bytes_left = _MAX_PAYLOAD_BYTES", "then truncates the alert text at a Unicode character boundary,", "string.' command, status, identifier = struct.unpack_from('!bbI', bytes, 0) if command", "be at least %d' % len(_ELLIPSIS_BYTES) max_bytes -= len(_ELLIPSIS_BYTES) #", "is 3: return 'Missing topic' elif status is 4: return", "or a dictionary.' if expiry is None: expiry = long(time.time()", "where a \"\\u1234\" sequence has been split. while True: try:", "elif status is 7: return 'Invalid payload size' elif status", "use # direct UTF-8 representation rather than \"\\u1234\" escaping). This", "the Unicode ellipsis character.\"\"\" def TokenToBinary(token): return base64.b64decode(token) def TokenFromBinary(bin_token):", "elif status is 255: return 'None (unknown)' else: return ''", "assert length <= _MAX_PAYLOAD_BYTES, (encoded, length) return struct.pack('!bIIH32sH%(length)ds' % {", "u'response command must equal 8.' return status, identifier, ErrorStatusToString(status) def", "encountered' elif status is 1: return 'Processing error' elif status", "'Invalid payload size' elif status is 8: return 'Invalid token'", "a dictionary.' if expiry is None: expiry = long(time.time() +", "else: return '' def _TruncateAlert(alert, max_bytes): \"\"\"Converts the alert text", "the UTF-8 encoding of the alert with the ellipsis appended", "ErrorStatusToString(status): if status is 0: return 'No errors encountered' elif", "if command != 8: raise ValueError, u'response command must equal", "copyright for this code: https://github.com/jayridge/apnstornado TokenToBinary(): converts a hex-encoded token", "allow_truncate and isinstance(alert, basestring): alert = _TruncateAlert(alert, bytes_left) elif alert", "# Return the UTF-8 encoding of the alert with the", "# -*- coding: utf-8 -*- # Copyright 2012 Viewfinder Inc.", "this code: https://github.com/jayridge/apnstornado TokenToBinary(): converts a hex-encoded token into a", "<= _MAX_PAYLOAD_BYTES, (encoded, length) return struct.pack('!bIIH32sH%(length)ds' % { 'length' :", "error' elif status is 2: return 'Missing device token' elif", "status, identifier, ErrorStatusToString(status) def ErrorStatusToString(status): if status is 0: return", "taking care not to split JSON escape sequences. Returns the", "including the alert text. aps['alert'] = alert encoded = escape.utf8(json.dumps(escape.recursive_unicode(data),", "try: alert = json.loads(u'\"%s\"' % truncated) break except Exception: truncated", "{ 'aps' : aps } if extra is not None:", "of the alert with the ellipsis appended to it. return", "= struct.unpack_from('!bbI', bytes, 0) if command != 8: raise ValueError,", "status is 5: return 'Invalid token size' elif status is", "'Invalid topic size' elif status is 7: return 'Invalid payload", "alert=None, badge=None, sound=None, identifier=0, expiry=None, extra=None, allow_truncate=True): token = TokenToBinary(token)", "text. aps['alert'] = alert encoded = escape.utf8(json.dumps(escape.recursive_unicode(data), separators=(',', ':'), ensure_ascii=False))", "alert encoded = escape.utf8(json.dumps(escape.recursive_unicode(data), separators=(',', ':'), ensure_ascii=False)) length = len(encoded)", "which is how the alert will be stored in the", "0: return 'No errors encountered' elif status is 1: return", "encoded) def ParseResponse(bytes): if len(bytes) != 6: raise ValueError, u'response", "size' elif status is 6: return 'Invalid topic size' elif", "is None: expiry = long(time.time() + 365 * 86400) #", "Keep # chopping trailing characters until the truncated string is", "def TokenFromBinary(bin_token): return base64.b64encode(bin_token) def CreateMessage(token, alert=None, badge=None, sound=None, identifier=0,", "message. # 'content-available': 1 is necessary to trigger iOS 7's", "if extra is not None: data.update(extra) # Create compact JSON", "the message. # 'content-available': 1 is necessary to trigger iOS", "-*- coding: utf-8 -*- # Copyright 2012 Viewfinder Inc. All", "encoded JSON format, which is how the alert will be", "bytes exceeds \"max_bytes\", then truncates the alert text at a", "= alert_json[1:-1] # Check if alert fits with no truncation.", "appended ellipsis. assert max_bytes >= len(_ELLIPSIS_BYTES), 'max_bytes must be at", "return struct.pack('!bIIH32sH%(length)ds' % { 'length' : length }, 1, identifier,", "base64.b64encode(bin_token) def CreateMessage(token, alert=None, badge=None, sound=None, identifier=0, expiry=None, extra=None, allow_truncate=True):", "for status & identifier ErrorStatusToString(): converts error status to error", "long(time.time() + 365 * 86400) # Start by determining the", "the length of the UTF-8 encoded JSON with no alert", "token' elif status is 255: return 'None (unknown)' else: return", "of the Unicode ellipsis character.\"\"\" def TokenToBinary(token): return base64.b64decode(token) def", "the alert text. aps['alert'] = alert encoded = escape.utf8(json.dumps(escape.recursive_unicode(data), separators=(',',", "6: return 'Invalid topic size' elif status is 7: return", "length) return struct.pack('!bIIH32sH%(length)ds' % { 'length' : length }, 1,", "binary response for status & identifier ErrorStatusToString(): converts error status", "tries, such as in the case where a \"\\u1234\" sequence", "of non-ascii chars (i.e. use # direct UTF-8 representation rather", "%d' % len(_ELLIPSIS_BYTES) max_bytes -= len(_ELLIPSIS_BYTES) # Truncate the JSON", "alert with the ellipsis appended to it. return escape.utf8(alert) +", "JSON UTF8 string at a Unicode character boundary. truncated =", "\"\"\" alert_json = escape.utf8(json.dumps(escape.recursive_unicode(alert), ensure_ascii=False)) # Strip quotes added by", "encoded alert text, including a trailing ellipsis character. \"\"\" alert_json", "UTF-8 encoding of the alert with the ellipsis appended to", "is left for the message. # 'content-available': 1 is necessary", "rather than \"\\u1234\" escaping). This maximizes the amount of space", "string.' if (alert is not None) and (not isinstance(alert, (basestring,", "6-byte binary string.' command, status, identifier = struct.unpack_from('!bbI', bytes, 0)", "background download processing. aps = { 'alert' : '', 'content-available':", "= json.loads(u'\"%s\"' % truncated) break except Exception: truncated = truncated[:-1]", "not None: aps['sound'] = sound data = { 'aps' :", "elif status is 5: return 'Invalid token size' elif status", "badge=None, sound=None, identifier=0, expiry=None, extra=None, allow_truncate=True): token = TokenToBinary(token) if", "for the message. # 'content-available': 1 is necessary to trigger", "len(encoded) if allow_truncate and isinstance(alert, basestring): alert = _TruncateAlert(alert, bytes_left)", "command must equal 8.' return status, identifier, ErrorStatusToString(status) def ErrorStatusToString(status):", "sound data = { 'aps' : aps } if extra", "character boundary. truncated = alert_json[:max_bytes].decode('utf-8', errors='ignore') # If JSON escape", "alert_json[:max_bytes].decode('utf-8', errors='ignore') # If JSON escape sequences were split, then", "!= 32: raise ValueError, u'Token must be a 32-byte binary", "the truncated string is valid JSON. It may take several", "return 'Invalid topic size' elif status is 7: return 'Invalid", "'content-available': 1 is necessary to trigger iOS 7's background download", "in the APNS payload. If the number of resulting bytes", "len(_ELLIPSIS_BYTES) # Truncate the JSON UTF8 string at a Unicode", "True: try: alert = json.loads(u'\"%s\"' % truncated) break except Exception:", "expiry is None: expiry = long(time.time() + 365 * 86400)", "alert text. encoded = escape.utf8(json.dumps(escape.recursive_unicode(data), separators=(',', ':'), ensure_ascii=False)) bytes_left =", "determining the length of the UTF-8 encoded JSON with no", "processing. aps = { 'alert' : '', 'content-available': 1 }", "ellipsis character. \"\"\" alert_json = escape.utf8(json.dumps(escape.recursive_unicode(alert), ensure_ascii=False)) # Strip quotes", "<= max_bytes: return escape.utf8(alert) # Make room for an appended", "is not None: aps['badge'] = badge if sound is not", "%d' % (_MAX_PAYLOAD_BYTES, len(escape.utf8(alert))) # Now re-encode including the alert", "len(encoded) assert length <= _MAX_PAYLOAD_BYTES, (encoded, length) return struct.pack('!bIIH32sH%(length)ds' %", "to trigger iOS 7's background download processing. aps = {", ": '', 'content-available': 1 } if badge is not None:", "at a Unicode character boundary. truncated = alert_json[:max_bytes].decode('utf-8', errors='ignore') #", "length <= _MAX_PAYLOAD_BYTES, (encoded, length) return struct.pack('!bIIH32sH%(length)ds' % { 'length'", "assert max_bytes >= len(_ELLIPSIS_BYTES), 'max_bytes must be at least %d'", "status is 1: return 'Processing error' elif status is 2:", "import escape _MAX_PAYLOAD_BYTES = 256 \"\"\"Maximum number of bytes in", "2012 Viewfinder Inc. All Rights Reserved. \"\"\"Apple Push Notification service", "length }, 1, identifier, expiry, 32, token, length, encoded) def", "not None: aps['badge'] = badge if sound is not None:", "space and no escaping of non-ascii chars (i.e. use #", "(basestring, dict))): raise ValueError, u'Alert message must be a string", "is 7: return 'Invalid payload size' elif status is 8:", "(<NAME>)' import base64 import json import struct import time from", "= escape.utf8(json.dumps(escape.recursive_unicode(data), separators=(',', ':'), ensure_ascii=False)) length = len(encoded) assert length", "and no escaping of non-ascii chars (i.e. use # direct", "escape sequences were split, then the truncated string may not", "Truncate the JSON UTF8 string at a Unicode character boundary.", "is not None: aps['sound'] = sound data = { 'aps'", "boundary. truncated = alert_json[:max_bytes].decode('utf-8', errors='ignore') # If JSON escape sequences", "& identifier ErrorStatusToString(): converts error status to error message \"\"\"", "with the ellipsis appended to it. return escape.utf8(alert) + _ELLIPSIS_BYTES", "is how the alert will be stored in the APNS", "identifier = struct.unpack_from('!bbI', bytes, 0) if command != 8: raise", "if len(token) != 32: raise ValueError, u'Token must be a", "truncated = alert_json[:max_bytes].decode('utf-8', errors='ignore') # If JSON escape sequences were", "no truncation. if len(alert_json) <= max_bytes: return escape.utf8(alert) # Make", "!= 6: raise ValueError, u'response must be a 6-byte binary", "trailing characters until the truncated string is valid JSON. It", "alert = json.loads(u'\"%s\"' % truncated) break except Exception: truncated =", "'content-available': 1 } if badge is not None: aps['badge'] =", "trailing ellipsis character. \"\"\" alert_json = escape.utf8(json.dumps(escape.recursive_unicode(alert), ensure_ascii=False)) # Strip", "JSON escape sequences were split, then the truncated string may", "'length' : length }, 1, identifier, expiry, 32, token, length,", "None: aps['sound'] = sound data = { 'aps' : aps", "UTF-8 representation rather than \"\\u1234\" escaping). This maximizes the amount", "status is 7: return 'Invalid payload size' elif status is", "space that's # left for the alert text. encoded =", "from tornado import escape _MAX_PAYLOAD_BYTES = 256 \"\"\"Maximum number of", "is 2: return 'Missing device token' elif status is 3:", "representation with no extra space and no escaping of non-ascii", "valid JSON. It may take several # tries, such as", "json import struct import time from tornado import escape _MAX_PAYLOAD_BYTES", "All Rights Reserved. \"\"\"Apple Push Notification service utilities. Original copyright", "elif status is 4: return 'Missing payload' elif status is", "topic size' elif status is 7: return 'Invalid payload size'", "ParseResponse(): parses APNs binary response for status & identifier ErrorStatusToString():", "-= len(_ELLIPSIS_BYTES) # Truncate the JSON UTF8 string at a", "is 4: return 'Missing payload' elif status is 5: return", "text at a Unicode character boundary, taking care not to", "expiry=None, extra=None, allow_truncate=True): token = TokenToBinary(token) if len(token) != 32:", "at least %d' % len(_ELLIPSIS_BYTES) max_bytes -= len(_ELLIPSIS_BYTES) # Truncate", "def CreateMessage(token, alert=None, badge=None, sound=None, identifier=0, expiry=None, extra=None, allow_truncate=True): token", "Check if alert fits with no truncation. if len(alert_json) <=", "is 255: return 'None (unknown)' else: return '' def _TruncateAlert(alert,", "Start by determining the length of the UTF-8 encoded JSON", "raise ValueError, u'max payload(%d) exceeded: %d' % (_MAX_PAYLOAD_BYTES, len(escape.utf8(alert))) #", "Viewfinder Inc. All Rights Reserved. \"\"\"Apple Push Notification service utilities.", "converts error status to error message \"\"\" __author__ = '<EMAIL>", "a hex-encoded token into a binary value CreateMessage(): formats a", "character. \"\"\" alert_json = escape.utf8(json.dumps(escape.recursive_unicode(alert), ensure_ascii=False)) # Strip quotes added", "separators=(',', ':'), ensure_ascii=False)) bytes_left = _MAX_PAYLOAD_BYTES - len(encoded) if allow_truncate", "struct import time from tornado import escape _MAX_PAYLOAD_BYTES = 256", "escape.utf8(u'…') \"\"\"UTF-8 encoding of the Unicode ellipsis character.\"\"\" def TokenToBinary(token):", "in the APNS payload.\"\"\" _ELLIPSIS_BYTES = escape.utf8(u'…') \"\"\"UTF-8 encoding of", "'alert' : '', 'content-available': 1 } if badge is not", "must be a 32-byte binary string.' if (alert is not", "fits with no truncation. if len(alert_json) <= max_bytes: return escape.utf8(alert)", "the number of resulting bytes exceeds \"max_bytes\", then truncates the", "isinstance(alert, (basestring, dict))): raise ValueError, u'Alert message must be a", "be a 32-byte binary string.' if (alert is not None)", "is valid JSON. It may take several # tries, such", "that's # left for the alert text. encoded = escape.utf8(json.dumps(escape.recursive_unicode(data),", "import base64 import json import struct import time from tornado", "not be valid JSON. Keep # chopping trailing characters until", "# chopping trailing characters until the truncated string is valid", "ellipsis character.\"\"\" def TokenToBinary(token): return base64.b64decode(token) def TokenFromBinary(bin_token): return base64.b64encode(bin_token)", "struct.pack('!bIIH32sH%(length)ds' % { 'length' : length }, 1, identifier, expiry,", "_ELLIPSIS_BYTES = escape.utf8(u'…') \"\"\"UTF-8 encoding of the Unicode ellipsis character.\"\"\"", "ensure_ascii=False)) length = len(encoded) assert length <= _MAX_PAYLOAD_BYTES, (encoded, length)", "It may take several # tries, such as in the", "32-byte binary string.' if (alert is not None) and (not", "return 'Missing payload' elif status is 5: return 'Invalid token", "\"\"\"Apple Push Notification service utilities. Original copyright for this code:", "number of bytes in the APNS payload.\"\"\" _ELLIPSIS_BYTES = escape.utf8(u'…')", "# direct UTF-8 representation rather than \"\\u1234\" escaping). This maximizes", "TokenToBinary(token) if len(token) != 32: raise ValueError, u'Token must be", "% { 'length' : length }, 1, identifier, expiry, 32,", "tornado import escape _MAX_PAYLOAD_BYTES = 256 \"\"\"Maximum number of bytes", "badge if sound is not None: aps['sound'] = sound data", "extra is not None: data.update(extra) # Create compact JSON representation", "sound=None, identifier=0, expiry=None, extra=None, allow_truncate=True): token = TokenToBinary(token) if len(token)", "% len(_ELLIPSIS_BYTES) max_bytes -= len(_ELLIPSIS_BYTES) # Truncate the JSON UTF8", "'No errors encountered' elif status is 1: return 'Processing error'", "the alert text. encoded = escape.utf8(json.dumps(escape.recursive_unicode(data), separators=(',', ':'), ensure_ascii=False)) bytes_left", "ValueError, u'Alert message must be a string or a dictionary.'", "string or a dictionary.' if expiry is None: expiry =", "Create compact JSON representation with no extra space and no", "to split JSON escape sequences. Returns the truncated UTF-8 encoded", "return status, identifier, ErrorStatusToString(status) def ErrorStatusToString(status): if status is 0:", "dict))): raise ValueError, u'Alert message must be a string or", "None: expiry = long(time.time() + 365 * 86400) # Start", "escaping of non-ascii chars (i.e. use # direct UTF-8 representation", "(_MAX_PAYLOAD_BYTES, len(escape.utf8(alert))) # Now re-encode including the alert text. aps['alert']", "command != 8: raise ValueError, u'response command must equal 8.'", "8.' return status, identifier, ErrorStatusToString(status) def ErrorStatusToString(status): if status is", "device token' elif status is 3: return 'Missing topic' elif", "compact JSON representation with no extra space and no escaping", "data.update(extra) # Create compact JSON representation with no extra space", "download processing. aps = { 'alert' : '', 'content-available': 1", "(i.e. use # direct UTF-8 representation rather than \"\\u1234\" escaping).", "status, identifier = struct.unpack_from('!bbI', bytes, 0) if command != 8:", "utf-8 -*- # Copyright 2012 Viewfinder Inc. All Rights Reserved.", "non-ascii chars (i.e. use # direct UTF-8 representation rather than", "the amount of space that's # left for the alert", "equal 8.' return status, identifier, ErrorStatusToString(status) def ErrorStatusToString(status): if status", "JSON. Keep # chopping trailing characters until the truncated string", "aps['sound'] = sound data = { 'aps' : aps }", "1: return 'Processing error' elif status is 2: return 'Missing", ": length }, 1, identifier, expiry, 32, token, length, encoded)", "the APNS payload. If the number of resulting bytes exceeds", "trigger iOS 7's background download processing. aps = { 'alert'", "character.\"\"\" def TokenToBinary(token): return base64.b64decode(token) def TokenFromBinary(bin_token): return base64.b64encode(bin_token) def", "size' elif status is 8: return 'Invalid token' elif status", "room for an appended ellipsis. assert max_bytes >= len(_ELLIPSIS_BYTES), 'max_bytes", "encoding of the Unicode ellipsis character.\"\"\" def TokenToBinary(token): return base64.b64decode(token)", "1 is necessary to trigger iOS 7's background download processing.", "base64 import json import struct import time from tornado import", "string may not be valid JSON. Keep # chopping trailing", "as in the case where a \"\\u1234\" sequence has been", "parses APNs binary response for status & identifier ErrorStatusToString(): converts", "{ 'length' : length }, 1, identifier, expiry, 32, token,", "> bytes_left: raise ValueError, u'max payload(%d) exceeded: %d' % (_MAX_PAYLOAD_BYTES,", "TokenFromBinary(bin_token): return base64.b64encode(bin_token) def CreateMessage(token, alert=None, badge=None, sound=None, identifier=0, expiry=None,", "length = len(encoded) assert length <= _MAX_PAYLOAD_BYTES, (encoded, length) return", "alert will be stored in the APNS payload. If the", "is not None) and (not isinstance(alert, (basestring, dict))): raise ValueError,", "ErrorStatusToString(): converts error status to error message \"\"\" __author__ =", "ErrorStatusToString(status) def ErrorStatusToString(status): if status is 0: return 'No errors", "struct.unpack_from('!bbI', bytes, 0) if command != 8: raise ValueError, u'response", "a trailing ellipsis character. \"\"\" alert_json = escape.utf8(json.dumps(escape.recursive_unicode(alert), ensure_ascii=False)) #", "# Start by determining the length of the UTF-8 encoded", "aps } if extra is not None: data.update(extra) # Create", "import json import struct import time from tornado import escape", "payload.\"\"\" _ELLIPSIS_BYTES = escape.utf8(u'…') \"\"\"UTF-8 encoding of the Unicode ellipsis", "must be a 6-byte binary string.' command, status, identifier =", "be a 6-byte binary string.' command, status, identifier = struct.unpack_from('!bbI',", "errors='ignore') # If JSON escape sequences were split, then the", "be a string or a dictionary.' if expiry is None:", "return 'Missing device token' elif status is 3: return 'Missing", "expiry, 32, token, length, encoded) def ParseResponse(bytes): if len(bytes) !=", "how the alert will be stored in the APNS payload.", "u'max payload(%d) exceeded: %d' % (_MAX_PAYLOAD_BYTES, len(escape.utf8(alert))) # Now re-encode", "size' elif status is 7: return 'Invalid payload size' elif", "to # determine how much space is left for the", "alert text, including a trailing ellipsis character. \"\"\" alert_json =", "len(_ELLIPSIS_BYTES), 'max_bytes must be at least %d' % len(_ELLIPSIS_BYTES) max_bytes", "truncation. if len(alert_json) <= max_bytes: return escape.utf8(alert) # Make room", "# If JSON escape sequences were split, then the truncated", "UTF-8 encoded alert text, including a trailing ellipsis character. \"\"\"", "elif status is 1: return 'Processing error' elif status is", "the alert text to UTF-8 encoded JSON format, which is", "return 'None (unknown)' else: return '' def _TruncateAlert(alert, max_bytes): \"\"\"Converts", "bytes_left) elif alert and len(escape.utf8(alert)) > bytes_left: raise ValueError, u'max", "boundary, taking care not to split JSON escape sequences. Returns", "return 'Missing topic' elif status is 4: return 'Missing payload'", "JSON. alert_json = alert_json[1:-1] # Check if alert fits with", "alert_json = escape.utf8(json.dumps(escape.recursive_unicode(alert), ensure_ascii=False)) # Strip quotes added by JSON.", "extra=None, allow_truncate=True): token = TokenToBinary(token) if len(token) != 32: raise", "6: raise ValueError, u'response must be a 6-byte binary string.'", "base64.b64decode(token) def TokenFromBinary(bin_token): return base64.b64encode(bin_token) def CreateMessage(token, alert=None, badge=None, sound=None,", "at a Unicode character boundary, taking care not to split", "the truncated UTF-8 encoded alert text, including a trailing ellipsis", "added by JSON. alert_json = alert_json[1:-1] # Check if alert", "aps = { 'alert' : '', 'content-available': 1 } if", "5: return 'Invalid token size' elif status is 6: return", "exceeded: %d' % (_MAX_PAYLOAD_BYTES, len(escape.utf8(alert))) # Now re-encode including the", "Rights Reserved. \"\"\"Apple Push Notification service utilities. Original copyright for", "ensure_ascii=False)) bytes_left = _MAX_PAYLOAD_BYTES - len(encoded) if allow_truncate and isinstance(alert,", "(unknown)' else: return '' def _TruncateAlert(alert, max_bytes): \"\"\"Converts the alert", "_TruncateAlert(alert, bytes_left) elif alert and len(escape.utf8(alert)) > bytes_left: raise ValueError,", "https://github.com/jayridge/apnstornado TokenToBinary(): converts a hex-encoded token into a binary value", "\"\\u1234\" escaping). This maximizes the amount of space that's #", "= _MAX_PAYLOAD_BYTES - len(encoded) if allow_truncate and isinstance(alert, basestring): alert", "time from tornado import escape _MAX_PAYLOAD_BYTES = 256 \"\"\"Maximum number", "us to # determine how much space is left for", "raise ValueError, u'response must be a 6-byte binary string.' command,", "exceeds \"max_bytes\", then truncates the alert text at a Unicode", "If JSON escape sequences were split, then the truncated string", "elif status is 3: return 'Missing topic' elif status is", "including a trailing ellipsis character. \"\"\" alert_json = escape.utf8(json.dumps(escape.recursive_unicode(alert), ensure_ascii=False))", "= long(time.time() + 365 * 86400) # Start by determining", "be valid JSON. Keep # chopping trailing characters until the", "allow_truncate=True): token = TokenToBinary(token) if len(token) != 32: raise ValueError,", "# Make room for an appended ellipsis. assert max_bytes >=", "} if badge is not None: aps['badge'] = badge if", "'aps' : aps } if extra is not None: data.update(extra)", "not None) and (not isinstance(alert, (basestring, dict))): raise ValueError, u'Alert", "TokenToBinary(): converts a hex-encoded token into a binary value CreateMessage():", "- len(encoded) if allow_truncate and isinstance(alert, basestring): alert = _TruncateAlert(alert,", "format, which is how the alert will be stored in", "binary string.' if (alert is not None) and (not isinstance(alert,", "TokenToBinary(token): return base64.b64decode(token) def TokenFromBinary(bin_token): return base64.b64encode(bin_token) def CreateMessage(token, alert=None,", "JSON with no alert text. This allows us to #", "= escape.utf8(u'…') \"\"\"UTF-8 encoding of the Unicode ellipsis character.\"\"\" def", "aps['badge'] = badge if sound is not None: aps['sound'] =", "2: return 'Missing device token' elif status is 3: return", "errors encountered' elif status is 1: return 'Processing error' elif", "Returns the truncated UTF-8 encoded alert text, including a trailing", "truncated) break except Exception: truncated = truncated[:-1] # Return the", "32: raise ValueError, u'Token must be a 32-byte binary string.'", "\"\"\"UTF-8 encoding of the Unicode ellipsis character.\"\"\" def TokenToBinary(token): return", "will be stored in the APNS payload. If the number", "} if extra is not None: data.update(extra) # Create compact", "until the truncated string is valid JSON. It may take", "= TokenToBinary(token) if len(token) != 32: raise ValueError, u'Token must", "This maximizes the amount of space that's # left for", "truncated[:-1] # Return the UTF-8 encoding of the alert with", "the JSON UTF8 string at a Unicode character boundary. truncated", "bytes, 0) if command != 8: raise ValueError, u'response command", "break except Exception: truncated = truncated[:-1] # Return the UTF-8", "alert text at a Unicode character boundary, taking care not", "string is valid JSON. It may take several # tries,", "least %d' % len(_ELLIPSIS_BYTES) max_bytes -= len(_ELLIPSIS_BYTES) # Truncate the", "necessary to trigger iOS 7's background download processing. aps =", "is necessary to trigger iOS 7's background download processing. aps", "status is 6: return 'Invalid topic size' elif status is", "for an appended ellipsis. assert max_bytes >= len(_ELLIPSIS_BYTES), 'max_bytes must", "UTF-8 encoded JSON format, which is how the alert will", "# Strip quotes added by JSON. alert_json = alert_json[1:-1] #", "ValueError, u'response command must equal 8.' return status, identifier, ErrorStatusToString(status)", "iOS 7's background download processing. aps = { 'alert' :", "return 'Invalid payload size' elif status is 8: return 'Invalid", "for this code: https://github.com/jayridge/apnstornado TokenToBinary(): converts a hex-encoded token into", "several # tries, such as in the case where a", "is 1: return 'Processing error' elif status is 2: return", "token into a binary value CreateMessage(): formats a binary APNs", "APNs binary response for status & identifier ErrorStatusToString(): converts error", "truncates the alert text at a Unicode character boundary, taking", "UTF-8 encoded JSON with no alert text. This allows us", "8: return 'Invalid token' elif status is 255: return 'None", "alert_json = alert_json[1:-1] # Check if alert fits with no", "u'Alert message must be a string or a dictionary.' if", "# Check if alert fits with no truncation. if len(alert_json)", "8: raise ValueError, u'response command must equal 8.' return status,", "status is 3: return 'Missing topic' elif status is 4:", "of resulting bytes exceeds \"max_bytes\", then truncates the alert text", "# tries, such as in the case where a \"\\u1234\"", "is not None: data.update(extra) # Create compact JSON representation with", "error status to error message \"\"\" __author__ = '<EMAIL> (<NAME>)'", "u'Token must be a 32-byte binary string.' if (alert is", "string at a Unicode character boundary. truncated = alert_json[:max_bytes].decode('utf-8', errors='ignore')", "Unicode character boundary. truncated = alert_json[:max_bytes].decode('utf-8', errors='ignore') # If JSON", "characters until the truncated string is valid JSON. It may", "a \"\\u1234\" sequence has been split. while True: try: alert", "a binary value CreateMessage(): formats a binary APNs message from", "escape.utf8(json.dumps(escape.recursive_unicode(data), separators=(',', ':'), ensure_ascii=False)) length = len(encoded) assert length <=", "CreateMessage(): formats a binary APNs message from parameters ParseResponse(): parses", "the alert with the ellipsis appended to it. return escape.utf8(alert)", "'Missing payload' elif status is 5: return 'Invalid token size'", "* 86400) # Start by determining the length of the", "encoded JSON with no alert text. This allows us to", "how much space is left for the message. # 'content-available':", "ellipsis. assert max_bytes >= len(_ELLIPSIS_BYTES), 'max_bytes must be at least", "encoded = escape.utf8(json.dumps(escape.recursive_unicode(data), separators=(',', ':'), ensure_ascii=False)) bytes_left = _MAX_PAYLOAD_BYTES -", "return '' def _TruncateAlert(alert, max_bytes): \"\"\"Converts the alert text to", "status is 255: return 'None (unknown)' else: return '' def", "formats a binary APNs message from parameters ParseResponse(): parses APNs", "'Processing error' elif status is 2: return 'Missing device token'", "elif alert and len(escape.utf8(alert)) > bytes_left: raise ValueError, u'max payload(%d)", "len(escape.utf8(alert))) # Now re-encode including the alert text. aps['alert'] =", "}, 1, identifier, expiry, 32, token, length, encoded) def ParseResponse(bytes):", "32, token, length, encoded) def ParseResponse(bytes): if len(bytes) != 6:", "may not be valid JSON. Keep # chopping trailing characters", "<gh_stars>100-1000 # -*- coding: utf-8 -*- # Copyright 2012 Viewfinder", "no alert text. This allows us to # determine how", "much space is left for the message. # 'content-available': 1", "if len(alert_json) <= max_bytes: return escape.utf8(alert) # Make room for", "u'response must be a 6-byte binary string.' command, status, identifier", "command, status, identifier = struct.unpack_from('!bbI', bytes, 0) if command !=", "Original copyright for this code: https://github.com/jayridge/apnstornado TokenToBinary(): converts a hex-encoded", "alert text. This allows us to # determine how much", "alert text to UTF-8 encoded JSON format, which is how", "the UTF-8 encoded JSON with no alert text. This allows", "a 6-byte binary string.' command, status, identifier = struct.unpack_from('!bbI', bytes,", "UTF8 string at a Unicode character boundary. truncated = alert_json[:max_bytes].decode('utf-8',", "sequences were split, then the truncated string may not be", "hex-encoded token into a binary value CreateMessage(): formats a binary", "token = TokenToBinary(token) if len(token) != 32: raise ValueError, u'Token", "len(_ELLIPSIS_BYTES) max_bytes -= len(_ELLIPSIS_BYTES) # Truncate the JSON UTF8 string", "is 8: return 'Invalid token' elif status is 255: return", "while True: try: alert = json.loads(u'\"%s\"' % truncated) break except", "= alert_json[:max_bytes].decode('utf-8', errors='ignore') # If JSON escape sequences were split,", "an appended ellipsis. assert max_bytes >= len(_ELLIPSIS_BYTES), 'max_bytes must be", "256 \"\"\"Maximum number of bytes in the APNS payload.\"\"\" _ELLIPSIS_BYTES", "alert fits with no truncation. if len(alert_json) <= max_bytes: return", "bytes_left = _MAX_PAYLOAD_BYTES - len(encoded) if allow_truncate and isinstance(alert, basestring):", "identifier, expiry, 32, token, length, encoded) def ParseResponse(bytes): if len(bytes)", "with no alert text. This allows us to # determine", "+ 365 * 86400) # Start by determining the length", "and isinstance(alert, basestring): alert = _TruncateAlert(alert, bytes_left) elif alert and", "representation rather than \"\\u1234\" escaping). This maximizes the amount of", "Copyright 2012 Viewfinder Inc. All Rights Reserved. \"\"\"Apple Push Notification", "in the case where a \"\\u1234\" sequence has been split.", "return 'Invalid token size' elif status is 6: return 'Invalid", "if len(bytes) != 6: raise ValueError, u'response must be a", "ValueError, u'max payload(%d) exceeded: %d' % (_MAX_PAYLOAD_BYTES, len(escape.utf8(alert))) # Now", "bytes_left: raise ValueError, u'max payload(%d) exceeded: %d' % (_MAX_PAYLOAD_BYTES, len(escape.utf8(alert)))", "status is 8: return 'Invalid token' elif status is 255:", "return 'Invalid token' elif status is 255: return 'None (unknown)'", "Make room for an appended ellipsis. assert max_bytes >= len(_ELLIPSIS_BYTES),", "len(token) != 32: raise ValueError, u'Token must be a 32-byte", "bytes in the APNS payload.\"\"\" _ELLIPSIS_BYTES = escape.utf8(u'…') \"\"\"UTF-8 encoding", "CreateMessage(token, alert=None, badge=None, sound=None, identifier=0, expiry=None, extra=None, allow_truncate=True): token =", "if (alert is not None) and (not isinstance(alert, (basestring, dict))):", "Notification service utilities. Original copyright for this code: https://github.com/jayridge/apnstornado TokenToBinary():", "\"\"\"Maximum number of bytes in the APNS payload.\"\"\" _ELLIPSIS_BYTES =", "7: return 'Invalid payload size' elif status is 8: return", "max_bytes -= len(_ELLIPSIS_BYTES) # Truncate the JSON UTF8 string at", "split JSON escape sequences. Returns the truncated UTF-8 encoded alert", "raise ValueError, u'Token must be a 32-byte binary string.' if", "with no truncation. if len(alert_json) <= max_bytes: return escape.utf8(alert) #", "identifier ErrorStatusToString(): converts error status to error message \"\"\" __author__", "encoding of the alert with the ellipsis appended to it.", "number of resulting bytes exceeds \"max_bytes\", then truncates the alert", "def ParseResponse(bytes): if len(bytes) != 6: raise ValueError, u'response must", "if expiry is None: expiry = long(time.time() + 365 *", "of bytes in the APNS payload.\"\"\" _ELLIPSIS_BYTES = escape.utf8(u'…') \"\"\"UTF-8", "(alert is not None) and (not isinstance(alert, (basestring, dict))): raise", "resulting bytes exceeds \"max_bytes\", then truncates the alert text at", "max_bytes: return escape.utf8(alert) # Make room for an appended ellipsis.", "len(escape.utf8(alert)) > bytes_left: raise ValueError, u'max payload(%d) exceeded: %d' %", "return base64.b64encode(bin_token) def CreateMessage(token, alert=None, badge=None, sound=None, identifier=0, expiry=None, extra=None,", "length, encoded) def ParseResponse(bytes): if len(bytes) != 6: raise ValueError,", "sound is not None: aps['sound'] = sound data = {", "365 * 86400) # Start by determining the length of", "elif status is 8: return 'Invalid token' elif status is", "= { 'alert' : '', 'content-available': 1 } if badge", "a Unicode character boundary. truncated = alert_json[:max_bytes].decode('utf-8', errors='ignore') # If", "= 256 \"\"\"Maximum number of bytes in the APNS payload.\"\"\"", "ValueError, u'Token must be a 32-byte binary string.' if (alert", "# left for the alert text. encoded = escape.utf8(json.dumps(escape.recursive_unicode(data), separators=(',',", "ensure_ascii=False)) # Strip quotes added by JSON. alert_json = alert_json[1:-1]", "(not isinstance(alert, (basestring, dict))): raise ValueError, u'Alert message must be", "= truncated[:-1] # Return the UTF-8 encoding of the alert", "truncated string may not be valid JSON. Keep # chopping", "take several # tries, such as in the case where", "code: https://github.com/jayridge/apnstornado TokenToBinary(): converts a hex-encoded token into a binary", "must equal 8.' return status, identifier, ErrorStatusToString(status) def ErrorStatusToString(status): if", "return base64.b64decode(token) def TokenFromBinary(bin_token): return base64.b64encode(bin_token) def CreateMessage(token, alert=None, badge=None,", "alert_json[1:-1] # Check if alert fits with no truncation. if", "None: data.update(extra) # Create compact JSON representation with no extra", "maximizes the amount of space that's # left for the", "no escaping of non-ascii chars (i.e. use # direct UTF-8", "allows us to # determine how much space is left", "is 0: return 'No errors encountered' elif status is 1:", "were split, then the truncated string may not be valid", "the case where a \"\\u1234\" sequence has been split. while", "and (not isinstance(alert, (basestring, dict))): raise ValueError, u'Alert message must", "None) and (not isinstance(alert, (basestring, dict))): raise ValueError, u'Alert message", "escape.utf8(json.dumps(escape.recursive_unicode(data), separators=(',', ':'), ensure_ascii=False)) bytes_left = _MAX_PAYLOAD_BYTES - len(encoded) if", "Push Notification service utilities. Original copyright for this code: https://github.com/jayridge/apnstornado", "= sound data = { 'aps' : aps } if", "max_bytes >= len(_ELLIPSIS_BYTES), 'max_bytes must be at least %d' %", "and len(escape.utf8(alert)) > bytes_left: raise ValueError, u'max payload(%d) exceeded: %d'", "= alert encoded = escape.utf8(json.dumps(escape.recursive_unicode(data), separators=(',', ':'), ensure_ascii=False)) length =", "import struct import time from tornado import escape _MAX_PAYLOAD_BYTES =", "space is left for the message. # 'content-available': 1 is", "Strip quotes added by JSON. alert_json = alert_json[1:-1] # Check", "if allow_truncate and isinstance(alert, basestring): alert = _TruncateAlert(alert, bytes_left) elif", "7's background download processing. aps = { 'alert' : '',", "1 } if badge is not None: aps['badge'] = badge", "255: return 'None (unknown)' else: return '' def _TruncateAlert(alert, max_bytes):", "_MAX_PAYLOAD_BYTES, (encoded, length) return struct.pack('!bIIH32sH%(length)ds' % { 'length' : length", "4: return 'Missing payload' elif status is 5: return 'Invalid", "APNS payload. If the number of resulting bytes exceeds \"max_bytes\",", "sequences. Returns the truncated UTF-8 encoded alert text, including a", "must be at least %d' % len(_ELLIPSIS_BYTES) max_bytes -= len(_ELLIPSIS_BYTES)", "status & identifier ErrorStatusToString(): converts error status to error message", "value CreateMessage(): formats a binary APNs message from parameters ParseResponse():", "# determine how much space is left for the message.", "sequence has been split. while True: try: alert = json.loads(u'\"%s\"'", "Return the UTF-8 encoding of the alert with the ellipsis", "raise ValueError, u'Alert message must be a string or a", "'', 'content-available': 1 } if badge is not None: aps['badge']", "if sound is not None: aps['sound'] = sound data =", "aps['alert'] = alert encoded = escape.utf8(json.dumps(escape.recursive_unicode(data), separators=(',', ':'), ensure_ascii=False)) length", "encoded = escape.utf8(json.dumps(escape.recursive_unicode(data), separators=(',', ':'), ensure_ascii=False)) length = len(encoded) assert", "1, identifier, expiry, 32, token, length, encoded) def ParseResponse(bytes): if", "ParseResponse(bytes): if len(bytes) != 6: raise ValueError, u'response must be", "JSON format, which is how the alert will be stored", "# Truncate the JSON UTF8 string at a Unicode character", "be stored in the APNS payload. If the number of", "escape.utf8(json.dumps(escape.recursive_unicode(alert), ensure_ascii=False)) # Strip quotes added by JSON. alert_json =", "payload size' elif status is 8: return 'Invalid token' elif", "into a binary value CreateMessage(): formats a binary APNs message", "with no extra space and no escaping of non-ascii chars", "alert = _TruncateAlert(alert, bytes_left) elif alert and len(escape.utf8(alert)) > bytes_left:", "length of the UTF-8 encoded JSON with no alert text.", "\"\"\"Converts the alert text to UTF-8 encoded JSON format, which", "Reserved. \"\"\"Apple Push Notification service utilities. Original copyright for this", "if alert fits with no truncation. if len(alert_json) <= max_bytes:", "re-encode including the alert text. aps['alert'] = alert encoded =", "character boundary, taking care not to split JSON escape sequences.", "If the number of resulting bytes exceeds \"max_bytes\", then truncates", "_MAX_PAYLOAD_BYTES - len(encoded) if allow_truncate and isinstance(alert, basestring): alert =", "care not to split JSON escape sequences. Returns the truncated", "def TokenToBinary(token): return base64.b64decode(token) def TokenFromBinary(bin_token): return base64.b64encode(bin_token) def CreateMessage(token,", "may take several # tries, such as in the case", "utilities. Original copyright for this code: https://github.com/jayridge/apnstornado TokenToBinary(): converts a", "= '<EMAIL> (<NAME>)' import base64 import json import struct import", "of space that's # left for the alert text. encoded", "len(alert_json) <= max_bytes: return escape.utf8(alert) # Make room for an", "\"max_bytes\", then truncates the alert text at a Unicode character", "not None: data.update(extra) # Create compact JSON representation with no", "alert text. aps['alert'] = alert encoded = escape.utf8(json.dumps(escape.recursive_unicode(data), separators=(',', ':'),", "expiry = long(time.time() + 365 * 86400) # Start by", "'None (unknown)' else: return '' def _TruncateAlert(alert, max_bytes): \"\"\"Converts the", "_TruncateAlert(alert, max_bytes): \"\"\"Converts the alert text to UTF-8 encoded JSON", "such as in the case where a \"\\u1234\" sequence has", "Unicode character boundary, taking care not to split JSON escape", "then the truncated string may not be valid JSON. Keep" ]
[ ":align: center :width: 30% We can mathematically formalize this by", "the shape for the ansatz.\"\"\" return np.random.uniform(0, 2 * np.pi,", "y_1 y_2 + x_2^2 y_2^2 = \\langle \\boldsymbol{x}, \\boldsymbol{y} \\rangle^2.", "calculate the entries between the same datapoints, as we know", "the # variational parameters. At this point we fix the", "\\langle \\phi(\\boldsymbol{x}_i), \\phi(\\boldsymbol{x})\\rangle + b\\right). This rewriting might not seem", "that # fixes them to the values we sampled above.", "that expresses the original labelling in the vector # :math:`\\boldsymbol{y}`", "number of datapoints in the dataset. # # In summary,", "in PennyLane. We of course need to start with some", "embedding :math:`\\phi`. Consider for example the following embedding and the", "kernel-target alignment is then defined as the kernel alignment #", "have additional parameters # besides the datapoints, which is why", "the label :math:`y` via .. math:: y(\\boldsymbol{x}) = \\operatorname{sgn}(\\langle \\boldsymbol{w},", "the outer points in the dataset can be correctly classified,", "\\rangle. The kernel value is then given by the *overlap*", "ansatz we only need a device to run the quantum", "ax.set_ylim(-1, 1) ax.set_aspect(\"equal\") ax.axis(\"off\") return ax ############################################################################## # Let's now", "Note that SVC expects the kernel argument to be a", "labels.astype(int) return X, Y ############################################################################## # Next, we define a", "ax.scatter(x, y, c=Y, cmap=cmap, s=25, marker=\"s\") if num_sectors is not", "for j, layer_params in enumerate(params): layer(x, layer_params, wires, i0=j *", "the kernel # matrices :math:`K_1` and :math:`K_2` if we see", "random_params(num_wires=5, num_layers=6) ############################################################################## # Now we can have a look", "accuracy_init = accuracy(svm, X, Y) print(f\"The accuracy of the kernel", "Putting this into the formula yields .. math:: y(\\boldsymbol{x}) =", "it via the ``qml.kernels.square_kernel_matrix`` # method, which makes use of", "*quantum kernels* for short. In this tutorial you will learn", "the trained parameter baked into it. trained_kernel = lambda x1,", "this method is not very powerful, as datasets that are", "our prediction and include the embedding, we get .. math::", "= [\"#FF0000\", \"#0000FF\"][((i + 1) % 2)] ax.add_artist( mpl.patches.Wedge( (0,", "particularly simple # implementation of Quantum Embedding Kernels. The first", "good solution because it is very # resource intensive, and", "# In order to construct the full kernel circuit, we", "X1, X2: qml.kernels.kernel_matrix(X1, X2, trained_kernel) # Note that SVC expects", "of the ordering in ``qml.probs``, this is the first entry:", "that maps a datapoint :math:`\\boldsymbol{x}` to the state .. math::", "multiple times, reusing # the datapoint ``x`` but feeding different", "second support vector classifier with the # trained kernel: #", "X2: qml.kernels.kernel_matrix(X1, X2, trained_kernel) # Note that SVC expects the", "the SVM with random # parameters: accuracy_trained = accuracy(svm_trained, X,", "= kernel(X[0], X[1], init_params) print(f\"The kernel value between the first", "/ nminus for y in Y]) else: _Y = np.array(Y)", "linear classification. Imagine we want to discern two different classes", "an *ideal* kernel # function that expresses the original labelling", "second datapoint: kernel_value = kernel(X[0], X[1], init_params) print(f\"The kernel value", "Choose subset of datapoints to compute the KTA on. subset", "= lambda x1, x2: kernel(x1, x2, params) # Second create", "use PennyLane's ``default.qubit`` # device with 5 wires in analytic", "we also require its adjoint # :math:`U(\\boldsymbol{x})^\\dagger`, which we can", "you chose reproduces the actual similarities of the data. It", "a look at our dataset. In our example, we will", "of the first # datapoint and then the adjoint of", "# References # ---------- # # .. [#Training_QEKs] # #", "= np.count_nonzero(np.array(Y) == 1) nminus = len(Y) - nplus _Y", "seeing what the decision boundaries in this # classification look", "``lambda`` function from above. # Once we have this, we", "them with gradient-based optimization, and all that using the functionality", "Y[subset], lambda x1, x2: kernel(x1, x2, _params), assume_normalized_kernel=True, ) #", "\\langle \\boldsymbol{x}, \\boldsymbol{y} \\rangle^2. This means by just replacing the", "building block. Let's start by defining this layer: import pennylane", "have a look at the kernel value between the first", "\\phi(\\boldsymbol{x})\\rangle + b). We will forgo one tiny step, but", "datapoint. We # finally extract the probabilities of observing each", "# Second create a kernel matrix function using the trained", "expects the kernel argument to be a kernel matrix function.", "wire in enumerate(wires): qml.Hadamard(wires=[wire]) qml.RZ(x[i % len(x)], wires=[wire]) i +=", "the datapoint ``x`` but feeding different variational # parameters ``params``", "extended to higher dimensional vectors :math:`\\boldsymbol{x}`, where a line does", "see how well our classifier performs we will measure which", "# .. math:: # k_{\\boldsymbol{y}}(\\boldsymbol{x}_i, \\boldsymbol{x}_j) = y_i y_j. #", "noiseless simulation. Overall this means that we compute # :math:`\\frac{1}{2}(N^2-N)`", "can not be used to make predictions on a #", "inspect the decision boundaries of # our classifier: trained_plot_data =", "2 * np.pi / num_sectors angles = (center_indices + 0.5)", "of the embedding of the second datapoint. We # finally", "for good performance, but optimal alignment will not always #", "we will measure which percentage # of the dataset it", "function that # fixes them to the values we sampled", ":math:`6`. init_params = random_params(num_wires=5, num_layers=6) ############################################################################## # Now we can", "make_double_cake_data(num_sectors) ax = plot_double_cake_data(X, Y, plt.gca(), num_sectors=num_sectors) ############################################################################## # Defining", "a # dataset, becaues it is essentially just a tool", "# the optimization we will not use the entire training", "the KTA on. subset = np.random.choice(list(range(len(X))), 4) # Define the", "that SVM's have proven good generalisation # behavior, it will", "they are confusing. def _make_circular_data(num_sectors): \"\"\"Generate datapoints arranged in an", "but rather # sample smaller subsets of the data at", "of training the parameters of the quantum # kernel. Thus,", "Indeed, we see that now not only every data instance", "the state .. math:: |\\psi(\\boldsymbol{x})\\rangle = U(\\boldsymbol{x}) |0 \\rangle. The", "on Near-Term Quantum Computers.\" # `arXiv:2105.02276 <https://arxiv.org/abs/2105.02276>`__, 2021. # #", "Y = make_double_cake_data(num_sectors) ax = plot_double_cake_data(X, Y, plt.gca(), num_sectors=num_sectors) ##############################################################################", "product :math:`\\boldsymbol{y}\\boldsymbol{y}^T`. # The kernel-target alignment is then defined as", "and # :math:`K_2`: # # .. math:: # \\operatorname{KA}(K_1, K_2)", "data and corresponding sectors.\"\"\" x, y = X.T cmap =", "function the *kernel*. It provides the advantage that we can", "quantum models <https://pennylane.ai/qml/demos/tutorial_kernel_based_training.html>`__, where you will also find more #", "generate the data. # The details of these functions are", "i += inc qml.RY(params[0, j], wires=[wire]) qml.broadcast(unitary=qml.CRZ, pattern=\"ring\", wires=wires, parameters=params[1])", "each basis state. @qml.qnode(dev) def kernel_circuit(x1, x2, params): ansatz(x1, params,", "an optimization loop and improve the kernel-target alignment! # #", "``assume_normalized_kernel=True`` ensures that we do not # calculate the entries", "* sector_angle, lw=0, color=other_color, alpha=0.1, ) ) ax.set_xlim(-1, 1) ax.set_ylim(-1,", "full dataset every 50 steps. if (i + 1) %", "SVC ############################################################################## # To construct the SVM, we need to", "PennyLane's # ``kernels`` module allows you to easily evaluate the", "Kernels. The first ingredient we # need for this is", "correct class, but also that there are no strong artifacts", "kernel :math:`k`. In this demo, we will explore one particular", "hackathon. What are kernel methods? ------------------------ To understand what a", "unfit for gradient descent optimization. # We therefore first define", "a Quantum Embedding Kernel # ----------------------------------- # PennyLane's `kernels module", "the entries between the same datapoints, as we know them", "formalize this by considering a parameterised quantum circuit :math:`U(\\boldsymbol{x})` that", "wires): \"\"\"The embedding ansatz\"\"\" for j, layer_params in enumerate(params): layer(x,", "X, Y = make_double_cake_data(num_sectors) ax = plot_double_cake_data(X, Y, plt.gca(), num_sectors=num_sectors)", "different classes of points that lie in different corners of", "assign binary labels to datapoints: linear classification. Imagine we want", "Training the Quantum Embedding Kernel # ------------------------------------- # # To", "circuit structure itself. # # Before focusing on the kernel", "\\operatorname{Tr}(A^T B)`. This # reinforces the geometric picture of how", "boundary as a linear combination of the embedded datapoints :math:`\\boldsymbol{w}", "the # `demo on kernel-based training of quantum models <https://pennylane.ai/qml/demos/tutorial_kernel_based_training.html>`__,", "plot_decision_boundaries(svm, plt.gca()) ############################################################################## # We see the outer points in", "# Together, the datapoint and the variational parameters fully determine", "# Following on the results that SVM's have proven good", "in the # `demo on kernel-based training of quantum models", ":math:`k(\\boldsymbol{x}_i,\\boldsymbol{x}_j) = k(\\boldsymbol{x}_j, \\boldsymbol{x}_i)`. # In addition, the option ``assume_normalized_kernel=True``", "does *not* modify the variational parameters in our circuit #", "not separable by a hyperplane can't be classified without error.", "the probabilities of observing each basis state. @qml.qnode(dev) def kernel_circuit(x1,", "params, wires): \"\"\"The embedding ansatz\"\"\" for j, layer_params in enumerate(params):", "overlap of the quantum states by first applying the embedding", "different corners of the plane. A linear classifier corresponds to", "predictions # -------------------------------------------------- # The quantum kernel alone can not", "purpose of this tutorial we will use PennyLane's ``default.qubit`` #", "np.linspace(-1, 1, N_gridpoints)) _zz = np.zeros_like(_xx) for idx in np.ndindex(*_xx.shape):", "meta:: :property=\"og:description\": Kernels and alignment training with Pennylane. :property=\"og:image\": https://pennylane.ai/qml/_images/QEK_thumbnail.png", "We can mathematically formalize this by assigning the label :math:`y`", "can't be classified without error. We can actually sneak around", "linear classifier corresponds to drawing a line and assigning different", "*kernel matrix*. We can inspect it via the ``qml.kernels.square_kernel_matrix`` #", "j, wire in enumerate(wires): qml.Hadamard(wires=[wire]) qml.RZ(x[i % len(x)], wires=[wire]) i", "We will forgo one tiny step, but it can be", "<NAME>, <NAME>, <NAME> and <NAME>. Posted: 24 June 2021* Kernel", "by the *overlap* of the associated embedded quantum states ..", "many interesting cases the embedding :math:`\\phi` will be much costlier", "% 2)] ax.add_artist( mpl.patches.Wedge( (0, 0), 1, i * sector_angle,", "on the kernel values we have to provide values for", "clear that this method is not very powerful, as datasets", "lambda X1, X2: qml.kernels.kernel_matrix(X1, X2, trained_kernel) # Note that SVC", "means having good alignment is # guaranteed for good performance,", "accuracy(svm_trained, X, Y) print(f\"The accuracy of a kernel with trained", "the plane. In this form, linear classification can also be", "x1, x2: kernel(x1, x2, params), assume_normalized_kernel=True, ) print(f\"Step {i+1} -", "limitation by performing a neat trick: if we define some", "find more # background information on the kernel circuit structure", "actual labels of the # training data. It is based", "we want to discern two different classes of points that", "two regions anymore. Instead one needs a *hyperplane*. It is", "np.zeros_like(_xx) for idx in np.ndindex(*_xx.shape): _zz[idx] = classifier.predict(np.array([_xx[idx], _yy[idx]])[np.newaxis, :])", "to supply ``sklearn.svm.SVC`` with a function # that takes two", "the ansatz.\"\"\" return np.random.uniform(0, 2 * np.pi, (num_layers, 2, num_wires),", "one tiny step, but it can be shown that for", "-1 * labels2]) # Canonical form of labels Y =", "datapoint :math:`\\boldsymbol{x}` to the state .. math:: |\\psi(\\boldsymbol{x})\\rangle = U(\\boldsymbol{x})", "intricate decision boundaries! This is very important, because in many", "values between all elements of the dataset form the #", "# Report the alignment on the full dataset every 50", "binary labels to datapoints: linear classification. Imagine we want to", "# `Projector <https://pennylane.readthedocs.io/en/latest/code/api/pennylane.Projector.html>`__. # This is shown in the #", "resource intensive, and since the accuracy is a discrete quantity", "are not separable by a hyperplane can't be classified without", "create a dataset to work with, the # ``DoubleCake`` dataset.", "at the kernel value between the first and the #", "the dataset form the # *kernel matrix*. We can inspect", "perform the (potentially expensive) embedding :math:`\\phi`. Consider for example the", "classifies correctly. def accuracy(classifier, X, Y_target): return 1 - np.count_nonzero(classifier.predict(X)", "we can give # values to those variational parameters which", "available functionalities # to do both in PennyLane. We of", "cost = lambda _params: -target_alignment( X[subset], Y[subset], lambda x1, x2:", "np.hstack([y1, 0.5 * y2]) # Canonical form of dataset X", "accuracy of the kernel with random parameters is {accuracy_init:.3f}\") ##############################################################################", "this by assigning the label :math:`y` via .. math:: y(\\boldsymbol{x})", "have a look at our dataset. In our example, we", "for i in range(500): # Choose subset of datapoints to", "classical machine learning. Here we are concerned with kernels that", "is only a necessary but not a sufficient condition for", "############################################################################## # And we proceed right away to create a", "`kernels module <https://pennylane.readthedocs.io/en/latest/code/qml_kernels.html>`__. The demo is based on Ref. [#Training_QEKs]_,", "np import matplotlib as mpl np.random.seed(1359) ############################################################################## # And we", "plot_decision_boundaries(classifier, ax, N_gridpoints=14): _xx, _yy = np.meshgrid(np.linspace(-1, 1, N_gridpoints), np.linspace(-1,", "the ``DoubleCake`` data: def plot_double_cake_data(X, Y, ax, num_sectors=None): \"\"\"Plot double", "# given by the outer product :math:`\\boldsymbol{y}\\boldsymbol{y}^T`. # The kernel-target", "enumerate(params): layer(x, layer_params, wires, i0=j * len(wires)) adjoint_ansatz = qml.adjoint(ansatz)", "confusing. def _make_circular_data(num_sectors): \"\"\"Generate datapoints arranged in an even circle.\"\"\"", "in Y]) else: _Y = np.array(Y) T = np.outer(_Y, _Y)", "as building block. Let's start by defining this layer: import", "*kernel*. It provides the advantage that we can often find", "len(Y_target) accuracy_init = accuracy(svm, X, Y) print(f\"The accuracy of the", "Kernel # ----------------------------------- # PennyLane's `kernels module <https://pennylane.readthedocs.io/en/latest/code/qml_kernels.html>`__ # allows", "and :math:`\\boldsymbol{y}\\boldsymbol{y}^T`: # # .. math:: # \\operatorname{KTA}_{\\boldsymbol{y}}(K) # =", "<NAME>, <NAME>, <NAME>, # <NAME>, and <NAME>. # \"Training Quantum", "where :math:`N` is the number of elements in :math:`\\boldsymbol{y}`, #", "In this demo, we will treat a toy problem that", "# We have now achieved perfect classification! 🎆 # #", "quantum kernels =========================================== .. meta:: :property=\"og:description\": Kernels and alignment training", "not divide the entire space into two regions anymore. Instead", "search in parameter space is not a good solution because", "x2, params), assume_normalized_kernel=True, ) print(f\"Step {i+1} - Alignment = {current_alignment:.3f}\")", "decision boundaries! This is very important, because in many interesting", "{accuracy_trained:.3f}\") ############################################################################## # We have now achieved perfect classification! 🎆", "which we will construct by repeating a # layer as", "dataset every 50 steps. if (i + 1) % 50", "to objects, namely two kernels, being aligned in a vector", "# behavior, it will be interesting to inspect the decision", "accuracy is a discrete quantity we # would not be", "# resource intensive, and since the accuracy is a discrete", "kernel target alignment by :math:`-1` to actually # *maximize* it", "sectors: import matplotlib.pyplot as plt num_sectors = 3 X, Y", "can actually sneak around this limitation by performing a neat", "linear classification there, we could actually realise non-linear classification in", "on. # For the purpose of this tutorial we will", "\"\"\"Generate random variational parameters in the shape for the ansatz.\"\"\"", "the option ``assume_normalized_kernel=True`` ensures that we do not # calculate", "############################################################################## # Together with the ansatz we only need a", "``qml.kernels.square_kernel_matrix`` # method, which makes use of symmetry of the", "\\frac{\\operatorname{Tr}(K_1 K_2)}{\\sqrt{\\operatorname{Tr}(K_1^2)\\operatorname{Tr}(K_2^2)}}. # # .. note:: # Seen from a", "be 1 # for our noiseless simulation. Overall this means", "in the # same class and :math:`-1` otherwise and its", "alignment for our dataset and random parameters is {kta_init:.3f}\") ##############################################################################", "&= (x_1^2, \\sqrt{2} x_1 x_2, x_2^2) \\\\ k(\\boldsymbol{x}, \\boldsymbol{y}) &=", "inner workings of classification with quantum embedding kernels, # training", "with quantum embedding kernels, # training variational embedding kernels and", "# # .. note:: # Seen from a more theoretical", "of datapoints to compute the KTA on. subset = np.random.choice(list(range(len(X))),", "dataset to work with, the # ``DoubleCake`` dataset. Firstly, we", "the associated kernel: .. math:: \\phi((x_1, x_2)) &= (x_1^2, \\sqrt{2}", "vs. the SVM with random # parameters: accuracy_trained = accuracy(svm_trained,", "into two regions anymore. Instead one needs a *hyperplane*. It", "to help plot the ``DoubleCake`` data: def plot_double_cake_data(X, Y, ax,", "matrix*. We can inspect it via the ``qml.kernels.square_kernel_matrix`` # method,", "The training data enters the picture by defining an *ideal*", "option ``assume_normalized_kernel=True`` ensures that we do not # calculate the", "To perform an actual prediction we will make use #", "our original space! .. figure:: ../demonstrations/kernels_module/embedding_nonlinear_classification.png :align: center :width: 65%", "work with # 3 sectors: import matplotlib.pyplot as plt num_sectors", "# An alternative way to set up the kernel circuit", "assigned kernel is thus :math:`+1` if both datapoints lie in", "and on the other hand # is not expected to", "# `arXiv:2105.02276 <https://arxiv.org/abs/2105.02276>`__, 2021. # # .. [#Alignment] # #", "np.pi / num_sectors angles = (center_indices + 0.5) * sector_angle", "also interested in seeing what the decision boundaries in this", "Y_target) / len(Y_target) accuracy_init = accuracy(svm, X, Y) print(f\"The accuracy", "data at each step, we choose :math:`4` # datapoints at", "on opposing sides of the line: .. figure:: ../demonstrations/kernels_module/linear_classification.png :align:", "{i+1} - Alignment = {current_alignment:.3f}\") ############################################################################## # We want to", "x2, params): ansatz(x1, params, wires=wires) adjoint_ansatz(x2, params, wires=wires) return qml.probs(wires=wires)", "focusing on the kernel values we have to provide values", "# for our noiseless simulation. Overall this means that we", "# # Training the Quantum Embedding Kernel # ------------------------------------- #", "\\boldsymbol{x}_j) = y_i y_j. # # The assigned kernel is", "*not* modify the variational parameters in our circuit # ansatz.", "to datapoints: linear classification. Imagine we want to discern two", "sneak around this limitation by performing a neat trick: if", "in an even circle.\"\"\" center_indices = np.array(range(0, num_sectors)) sector_angle =", "math:: # \\operatorname{KTA}_{\\boldsymbol{y}}(K) # = \\frac{\\operatorname{Tr}(K \\boldsymbol{y}\\boldsymbol{y}^T)}{\\sqrt{\\operatorname{Tr}(K^2)\\operatorname{Tr}((\\boldsymbol{y}\\boldsymbol{y}^T)^2)}} # = \\frac{\\boldsymbol{y}^T", "that is given to it, which is why we #", "The kernel-target alignment compares the # similarity predicted by the", "kernel that can be realized on near-term quantum computers, namely", "this end we will introduce a # second helper method.", "drawback, however: having a high kernel-target alignment # is only", "matrix function using the trained kernel. trained_kernel_matrix = lambda X1,", "coordinates of the datapoints x = np.hstack([x1, 0.5 * x2])", "good performance, but optimal alignment will not always # bring", "note:: # An alternative way to set up the kernel", "how well # the kernel you chose reproduces the actual", "trained_kernel) # Note that SVC expects the kernel argument to", "at first, but notice the above formula only contains inner", "of our SVC. # # Training the Quantum Embedding Kernel", "`QHack <https://qhack.ai/>`__ hackathon. What are kernel methods? ------------------------ To understand", "\\alpha_i \\phi(\\boldsymbol{x}_i)`. Putting this into the formula yields .. math::", "not None: sector_angle = 360 / num_sectors for i in", "# Canonical form of dataset X = np.vstack([x, y]).T labels", "of classical machine learning. Here we are concerned with kernels", "will not always # bring optimal training accuracy with it.", "for i in range(num_sectors): color = [\"#FF0000\", \"#0000FF\"][(i % 2)]", "B \\rangle = \\operatorname{Tr}(A^T B)`. This # reinforces the geometric", "drawing a line and assigning different labels to the regions", "enumerate(wires): qml.Hadamard(wires=[wire]) qml.RZ(x[i % len(x)], wires=[wire]) i += inc qml.RY(params[0,", "qml.probs(wires=wires) ############################################################################## # The kernel function itself is now obtained", "the expression for our prediction and include the embedding, we", "if they are confusing. def _make_circular_data(num_sectors): \"\"\"Generate datapoints arranged in", "init_params) K_init = qml.kernels.square_kernel_matrix(X, init_kernel, assume_normalized_kernel=True) with np.printoptions(precision=3, suppress=True): print(K_init)", "\"#0000FF\"]) ax.scatter(x, y, c=Y, cmap=cmap, s=25, marker=\"s\") if num_sectors is", "and labels.\"\"\" K = qml.kernels.square_kernel_matrix( X, kernel, assume_normalized_kernel=assume_normalized_kernel, ) if", "a line and assigning different labels to the regions on", "\\boldsymbol{y} \\rangle^2. This means by just replacing the regular scalar", "dataset X = np.vstack([x, y]).T labels = np.hstack([labels1, -1 *", "1], ) plot_double_cake_data(X, Y, ax) return plot_data ############################################################################## # With", "* np.pi / num_sectors angles = (center_indices + 0.5) *", "this means that we compute # :math:`\\frac{1}{2}(N^2-N)` kernel values for", "levels=[-1, 0, 1], ) plot_double_cake_data(X, Y, ax) return plot_data ##############################################################################", "original labelling in the vector # :math:`\\boldsymbol{y}` by assigning to", "much more intricate decision boundaries! This is very important, because", "# # We will make use of regular gradient descent", "not have additional parameters # besides the datapoints, which is", "params, wires, i0=0, inc=1): \"\"\"Building block of the embedding ansatz\"\"\"", "= qml.device(\"default.qubit\", wires=5, shots=None) wires = dev.wires.tolist() ############################################################################## # Let", "of the quantum states by first applying the embedding of", "np.outer(_Y, _Y) inner_product = np.sum(K * T) norm = np.sqrt(np.sum(K", "ansatz\"\"\" i = i0 for j, wire in enumerate(wires): qml.Hadamard(wires=[wire])", "construct the full kernel circuit, we also require its adjoint", "models <https://pennylane.ai/qml/demos/tutorial_kernel_based_training.html>`__, where you will also find more # background", ":math:`-1` to actually # *maximize* it in the process. #", "N_gridpoints=14): _xx, _yy = np.meshgrid(np.linspace(-1, 1, N_gridpoints), np.linspace(-1, 1, N_gridpoints))", "Instead one needs a *hyperplane*. It is immediately clear that", "# target alignment: kta_init = qml.kernels.target_alignment(X, Y, init_kernel, assume_normalized_kernel=True) print(f\"The", "# The kernel-target alignment is then defined as the kernel", "a *hyperplane*. It is immediately clear that this method is", "init_kernel = lambda x1, x2: kernel(x1, x2, init_params) K_init =", "the kernel-target alignment! # # We will make use of", "# Let us now define the quantum circuit that realizes", "we can choose the vector defining the decision boundary as", "accuracy_trained = accuracy(svm_trained, X, Y) print(f\"The accuracy of a kernel", ") plot_double_cake_data(X, Y, ax) return plot_data ############################################################################## # With that", "# sample smaller subsets of the data at each step,", "we choose :math:`4` # datapoints at random. Remember that PennyLane's", "in range(num_sectors): color = [\"#FF0000\", \"#0000FF\"][(i % 2)] other_color =", "be realized on near-term quantum computers, namely *Quantum Embedding Kernels", "realized on near-term quantum computers, namely *Quantum Embedding Kernels (QEKs)*.", "# *kernel matrix*. We can inspect it via the ``qml.kernels.square_kernel_matrix``", "params = opt.step(cost, params) # Report the alignment on the", "we need some measure of # how well it fits", "training accuracy with it. # # Let's now come back", "kernel(x1, x2, params): return kernel_circuit(x1, x2, params)[0] ############################################################################## # #", "kernel alignment and its applications.\" # `Artificial Intelligence Review 43.2:", "to construct the full kernel circuit, we also require its", "* len(wires)) adjoint_ansatz = qml.adjoint(ansatz) def random_params(num_wires, num_layers): \"\"\"Generate random", "us spotting overfitting issues # visually in more complex data", "= 0.7 * np.sin(angles) labels = 2 * np.remainder(np.floor_divide(angles, sector_angle),", "np.pi, (num_layers, 2, num_wires), requires_grad=True) ############################################################################## # Together with the", "expect to see an accuracy improvement vs. the SVM with", "replacing the regular scalar product in our linear classification with", "this is an *ansatz*, which we will construct by repeating", "by looking at the probability # of observing the all-zero", "In this demo, we will explore one particular kind of", "opposing sides of the line: .. figure:: ../demonstrations/kernels_module/linear_classification.png :align: center", "kernel-target alignment compares the # similarity predicted by the quantum", "of # how well it fits the dataset in question.", ":math:`\\operatorname{KTA}` but rather # sample smaller subsets of the data", "descent optimization. # We therefore first define a differentiable version", "# performance of the kernel [#Alignment]_. This means having good", "see that now not only every data instance falls within", "dimensional vectors :math:`\\boldsymbol{x}`, where a line does not divide the", "as np import matplotlib as mpl np.random.seed(1359) ############################################################################## # And", "Firstly, we define two functions to enable us to #", "{\"_xx\": _xx, \"_yy\": _yy, \"_zz\": _zz} ax.contourf( _xx, _yy, _zz,", "SVC. # # Training the Quantum Embedding Kernel # -------------------------------------", "in different corners of the plane. A linear classifier corresponds", "of datapoints in the dataset. # # In summary, the", "we define a function to help plot the ``DoubleCake`` data:", "perform an actual prediction we will make use # of", "############################################################################## # To construct the SVM, we need to supply", "bring optimal training accuracy with it. # # Let's now", "X1, X2: qml.kernels.kernel_matrix(X1, X2, init_kernel)).fit(X, Y) ############################################################################## # To see", "# how well it fits the dataset in question. Performing", "parameters is {accuracy_trained:.3f}\") ############################################################################## # We have now achieved perfect", "= (center_indices + 0.5) * sector_angle x = 0.7 *", "embedding and the associated kernel: .. math:: \\phi((x_1, x_2)) &=", "the map :math:`k`, we can actually express much more intricate", "the first entry: def kernel(x1, x2, params): return kernel_circuit(x1, x2,", "# The details of these functions are not essential for", "# We want to assess the impact of training the", "steps. if (i + 1) % 50 == 0: current_alignment", "one needs a *hyperplane*. It is immediately clear that this", "num_sectors=None): \"\"\"Plot double cake data and corresponding sectors.\"\"\" x, y", "kernels that can be evaluated on quantum computers, *quantum kernels*", "y_1^2 + 2 x_1 x_2 y_1 y_2 + x_2^2 y_2^2", "This step does *not* modify the variational parameters in our", "the decision boundaries for our # initial classifier: init_plot_data =", "random # parameters: accuracy_trained = accuracy(svm_trained, X, Y) print(f\"The accuracy", "we will treat a toy problem that showcases the #", "label :math:`y` via .. math:: y(\\boldsymbol{x}) = \\operatorname{sgn}(\\langle \\boldsymbol{w}, \\boldsymbol{x}\\rangle", "for optimization cost = lambda _params: -target_alignment( X[subset], Y[subset], lambda", "optimal alignment will not always # bring optimal training accuracy", "to set up the kernel circuit in PennyLane would be", "in our circuit # ansatz. What it does is solving", "Hilbert-Schmidt (or # Frobenius) scalar product # :math:`\\langle A, B", "parameters # besides the datapoints, which is why we again", "# # .. note:: # This step does *not* modify", "the cornerstones of classical machine learning. Here we are concerned", "assigning different labels to the regions on opposing sides of", "(or # Frobenius) scalar product # :math:`\\langle A, B \\rangle", "trick: if we define some map :math:`\\phi(\\boldsymbol{x})` that *embeds* our", "subset = np.random.choice(list(range(len(X))), 4) # Define the cost function for", "by the outer product :math:`\\boldsymbol{y}\\boldsymbol{y}^T`. # The kernel-target alignment is", "x2: kernel(x1, x2, _params), assume_normalized_kernel=True, ) # Optimization step params", "of the angle between the kernel # matrices :math:`K_1` and", "circle. But remember we have a circuit # with many", ":math:`\\phi` will be much costlier to compute than the kernel", "is essentially just a tool to measure the similarity #", "*minimize* the cost function that is given to it, which", "built-in optimizer works # to *minimize* the cost function that", "cornerstones of classical machine learning. Here we are concerned with", "allows for a particularly simple # implementation of Quantum Embedding", "a kernel method does, let's first revisit one of the", "expects the kernel to not have additional parameters # besides", "the # second datapoint: kernel_value = kernel(X[0], X[1], init_params) print(f\"The", "classified, but # we still struggle with the inner circle.", "entire training set to compute # :math:`\\operatorname{KTA}` but rather #", "Optimization step params = opt.step(cost, params) # Report the alignment", "the quantum circuit that realizes the kernel. We will compute", "circuit :math:`U(\\boldsymbol{x})` that maps a datapoint :math:`\\boldsymbol{x}` to the state", "up an optimization loop and improve the kernel-target alignment! #", "opt.step(cost, params) # Report the alignment on the full dataset", "figure:: ../demonstrations/kernels_module/embedding_nonlinear_classification.png :align: center :width: 65% If we go back", "the embedded datapoints :math:`\\boldsymbol{w} = \\sum_i \\alpha_i \\phi(\\boldsymbol{x}_i)`. Putting this", "\\boldsymbol{x}_i)`. # In addition, the option ``assume_normalized_kernel=True`` ensures that we", "note:: # This step does *not* modify the variational parameters", "* y2]) # Canonical form of dataset X = np.vstack([x,", "# Now let's code up an optimization loop and improve", "ansatz. What it does is solving a different optimization task", "between vectors in the embedding space: .. math:: k(\\boldsymbol{x}_i, \\boldsymbol{x}_j)", "The kernel-target alignment is then defined as the kernel alignment", "that makes it superfluous to actually perform the (potentially expensive)", "norm = np.sqrt(np.sum(K * K) * np.sum(T * T)) inner_product", "ax.contourf( _xx, _yy, _zz, cmap=mpl.colors.ListedColormap([\"#FF0000\", \"#0000FF\"]), alpha=0.2, levels=[-1, 0, 1],", "to believe we can give # values to those variational", "# *maximize* it in the process. # # .. note::", "<NAME>. # \"Training Quantum Embedding Kernels on Near-Term Quantum Computers.\"", "the # similarity predicted by the quantum kernel to the", "np.sqrt(np.sum(K * K) * np.sum(T * T)) inner_product = inner_product", "Quantum # Embedding Kernel. # # .. note:: # This", "optimal training accuracy with it. # # Let's now come", "inner_product / norm return inner_product params = init_params opt =", "# We will make use of regular gradient descent optimization.", "we only need a device to run the quantum circuit", "simplest methods to assign binary labels to datapoints: linear classification.", "# Note that SVC expects the kernel argument to be", "our circuit # ansatz. What it does is solving a", "of the # training data. It is based on *kernel", "= SVC(kernel=trained_kernel_matrix).fit(X, Y) ############################################################################## # We expect to see an", "Frobenius) scalar product # :math:`\\langle A, B \\rangle = \\operatorname{Tr}(A^T", "| \\langle\\psi(\\boldsymbol{x}_i)|\\psi(\\boldsymbol{x}_j)\\rangle|^2. \"\"\" ############################################################################## # A toy problem # -------------", "quantum circuit that realizes the kernel. We will compute #", "in many interesting cases the embedding :math:`\\phi` will be much", "seem useful at first, but notice the above formula only", ") ax.add_artist( mpl.patches.Wedge( (0, 0), 0.5, i * sector_angle, (i", "we do not # calculate the entries between the same", "that would make us # distrust the model. In this", "shape for the ansatz.\"\"\" return np.random.uniform(0, 2 * np.pi, (num_layers,", "a look at the kernel value between the first and", "the original labelling in the vector # :math:`\\boldsymbol{y}` by assigning", "[#Training_QEKs]_, a project from Xanadu's own `QHack <https://qhack.ai/>`__ hackathon. What", "not essential for understanding the demo, # so don't mind", "quantity we # would not be able to detect small", "does have one drawback, however: having a high kernel-target alignment", "a differentiable version of this function. def target_alignment( X, Y,", "for our noiseless simulation. Overall this means that we compute", "ax = plot_double_cake_data(X, Y, plt.gca(), num_sectors=num_sectors) ############################################################################## # Defining a", "Y, lambda x1, x2: kernel(x1, x2, params), assume_normalized_kernel=True, ) print(f\"Step", "optimization. # We therefore first define a differentiable version of", "an explicit formula for the kernel :math:`k` that makes it", "/ num_sectors angles = (center_indices + 0.5) * sector_angle x", "circuit in PennyLane would be # to use the observable", "have a look at the decision boundaries for our #", "of layers in the # ansatz circuit to :math:`6`. init_params", "gradient descent optimization. To speed up # the optimization we", "(QEKs)*. These are kernels that arise from embedding data into", "takes two sets of datapoints and returns the associated kernel", "a dataset to work with, the # ``DoubleCake`` dataset. Firstly,", "same class and :math:`-1` otherwise and its kernel matrix is", "T) norm = np.sqrt(np.sum(K * K) * np.sum(T * T))", "on the plane. In this form, linear classification can also", "We have now achieved perfect classification! 🎆 # # Following", "# The training data enters the picture by defining an", "yet, making it unfit for gradient descent optimization. # We", "dev.wires.tolist() ############################################################################## # Let us now define the quantum circuit", "values for the # variational parameters. At this point we", "inner_product = inner_product / norm return inner_product params = init_params", "# .. math:: # \\operatorname{KA}(K_1, K_2) = \\frac{\\operatorname{Tr}(K_1 K_2)}{\\sqrt{\\operatorname{Tr}(K_1^2)\\operatorname{Tr}(K_2^2)}}. #", "svm_trained = SVC(kernel=trained_kernel_matrix).fit(X, Y) ############################################################################## # We expect to see", "from embedding data into the space of quantum states. We", "plt.gca()) ############################################################################## # Indeed, we see that now not only", "*ansatz*, which we will construct by repeating a # layer", "as the kernel alignment # of the kernel matrix :math:`K`", "SVM, we need to supply ``sklearn.svm.SVC`` with a function #", "always # bring optimal training accuracy with it. # #", ".. math:: |\\psi(\\boldsymbol{x})\\rangle = U(\\boldsymbol{x}) |0 \\rangle. The kernel value", "is thus :math:`+1` if both datapoints lie in the #", "a ``lambda`` function that # fixes them to the values", "lw=0, color=other_color, alpha=0.1, ) ) ax.set_xlim(-1, 1) ax.set_ylim(-1, 1) ax.set_aspect(\"equal\")", "optimization task for the # :math:`\\alpha` and :math:`b` vectors we", "tutorial you will learn how to evaluate kernels, use them", "############################################################################## # With that done, let's have a look at", "into it. trained_kernel = lambda x1, x2: kernel(x1, x2, params)", "x2: kernel(x1, x2, params) # Second create a kernel matrix", "contains inner products between vectors in the embedding space: ..", "assume_normalized_kernel=assume_normalized_kernel, ) if rescale_class_labels: nplus = np.count_nonzero(np.array(Y) == 1) nminus", "== 0: current_alignment = target_alignment( X, Y, lambda x1, x2:", "The details of these functions are not essential for understanding", "For the purpose of this tutorial we will use PennyLane's", "params): return kernel_circuit(x1, x2, params)[0] ############################################################################## # # .. note::", "# In addition, the option ``assume_normalized_kernel=True`` ensures that we do", "prediction and include the embedding, we get .. math:: y(\\boldsymbol{x})", "of elements in :math:`\\boldsymbol{y}`, # that is the number of", "of the embedded datapoints :math:`\\boldsymbol{w} = \\sum_i \\alpha_i \\phi(\\boldsymbol{x}_i)`. Putting", "* T)) inner_product = inner_product / norm return inner_product params", "a # layer as building block. Let's start by defining", "way to set up the kernel circuit in PennyLane would", "values we have to provide values for the # variational", "is not very powerful, as datasets that are not separable", "kernel matrix. # We can make use of the function", "i in range(500): # Choose subset of datapoints to compute", "wires, i0=0, inc=1): \"\"\"Building block of the embedding ansatz\"\"\" i", "qml.Hadamard(wires=[wire]) qml.RZ(x[i % len(x)], wires=[wire]) i += inc qml.RY(params[0, j],", "\"An overview of kernel alignment and its applications.\" # `Artificial", "on a # dataset, becaues it is essentially just a", "np.ndindex(*_xx.shape): _zz[idx] = classifier.predict(np.array([_xx[idx], _yy[idx]])[np.newaxis, :]) plot_data = {\"_xx\": _xx,", "2)] ax.add_artist( mpl.patches.Wedge( (0, 0), 1, i * sector_angle, (i", "datapoints lie in the # same class and :math:`-1` otherwise", "for classification and train them with gradient-based optimization, and all", "similarity # between two datapoints. To perform an actual prediction", "in enumerate(params): layer(x, layer_params, wires, i0=j * len(wires)) adjoint_ansatz =", "we could actually realise non-linear classification in our original space!", "is shown in the # `demo on kernel-based training of", "0.7 * np.sin(angles) labels = 2 * np.remainder(np.floor_divide(angles, sector_angle), 2)", "= _make_circular_data(num_sectors) x2, y2, labels2 = _make_circular_data(num_sectors) # x and", "= \\frac{\\operatorname{Tr}(K_1 K_2)}{\\sqrt{\\operatorname{Tr}(K_1^2)\\operatorname{Tr}(K_2^2)}}. # # .. note:: # Seen from", ":math:`\\boldsymbol{w} = \\sum_i \\alpha_i \\phi(\\boldsymbol{x}_i)`. Putting this into the formula", "X, Y) print(f\"The accuracy of a kernel with trained parameters", "one particular kind of kernel that can be realized on", "all elements of the dataset form the # *kernel matrix*.", "3 X, Y = make_double_cake_data(num_sectors) ax = plot_double_cake_data(X, Y, plt.gca(),", "example the following embedding and the associated kernel: .. math::", "make_double_cake_data(num_sectors): x1, y1, labels1 = _make_circular_data(num_sectors) x2, y2, labels2 =", "other_color = [\"#FF0000\", \"#0000FF\"][((i + 1) % 2)] ax.add_artist( mpl.patches.Wedge(", "In this form, linear classification can also be extended to", "the kernel values we have to provide values for the", "a circuit # with many free parameters! It is reasonable", "expresses the original labelling in the vector # :math:`\\boldsymbol{y}` by", "j, layer_params in enumerate(params): layer(x, layer_params, wires, i0=j * len(wires))", "accuracy with it. # # Let's now come back to", "qml.kernels.square_kernel_matrix( X, kernel, assume_normalized_kernel=assume_normalized_kernel, ) if rescale_class_labels: nplus = np.count_nonzero(np.array(Y)", ") print(f\"Step {i+1} - Alignment = {current_alignment:.3f}\") ############################################################################## # We", ") ) ax.add_artist( mpl.patches.Wedge( (0, 0), 0.5, i * sector_angle,", ".. [#Alignment] # # <NAME>, <NAME>, and <NAME>. # \"An", "the SVM from our Quantum # Embedding Kernel. # #", "Report the alignment on the full dataset every 50 steps.", "# This is shown in the # `demo on kernel-based", "of quantum states. We formalize this by considering a parameterised", "for the kernel :math:`k` that makes it superfluous to actually", "the number of layers in the # ansatz circuit to", "entries between the same datapoints, as we know them to", "the data at each step, we choose :math:`4` # datapoints", "embedding kernels and the available functionalities # to do both", "return kernel_circuit(x1, x2, params)[0] ############################################################################## # # .. note:: #", "create a kernel matrix function using the trained kernel. trained_kernel_matrix", "Quantum Computers.\" # `arXiv:2105.02276 <https://arxiv.org/abs/2105.02276>`__, 2021. # # .. [#Alignment]", "line: .. figure:: ../demonstrations/kernels_module/linear_classification.png :align: center :width: 30% We can", "classifier with the # trained kernel: # First create a", "them as vectors # in the space of matrices with", "some map :math:`\\phi(\\boldsymbol{x})` that *embeds* our datapoints into a larger", "this function the *kernel*. It provides the advantage that we", "num_wires), requires_grad=True) ############################################################################## # Together with the ansatz we only", ":math:`\\phi(\\boldsymbol{x})` that *embeds* our datapoints into a larger *feature space*", "and the associated kernel: .. math:: \\phi((x_1, x_2)) &= (x_1^2,", "i0 for j, wire in enumerate(wires): qml.Hadamard(wires=[wire]) qml.RZ(x[i % len(x)],", "the kernel circuit in PennyLane would be # to use", "target_alignment( X, Y, kernel, assume_normalized_kernel=False, rescale_class_labels=True, ): \"\"\"Kernel-target alignment between", "much costlier to compute than the kernel :math:`k`. In this", "also require its adjoint # :math:`U(\\boldsymbol{x})^\\dagger`, which we can obtain", "can give # values to those variational parameters which improve", "np.sum(K * T) norm = np.sqrt(np.sum(K * K) * np.sum(T", "num_sectors is not None: sector_angle = 360 / num_sectors for", "our SVC. # # Training the Quantum Embedding Kernel #", "(SVC). from sklearn.svm import SVC ############################################################################## # To construct the", "visually in more complex data sets. To this end we", "parameters which improve the overall accuracy # of our SVC.", "params) # Second create a kernel matrix function using the", "use them for classification and train them with gradient-based optimization,", "kernels* for short. In this tutorial you will learn how", "# would not be able to detect small improvements. #", "We are also interested in seeing what the decision boundaries", "geometric picture of how this measure relates # to objects,", "nplus = np.count_nonzero(np.array(Y) == 1) nminus = len(Y) - nplus", "have a circuit # with many free parameters! It is", "= np.random.choice(list(range(len(X))), 4) # Define the cost function for optimization", "the embedding space: .. math:: k(\\boldsymbol{x}_i, \\boldsymbol{x}_j) = \\langle \\phi(\\boldsymbol{x}_i),", "example, we will work with # 3 sectors: import matplotlib.pyplot", "= plot_decision_boundaries(svm, plt.gca()) ############################################################################## # We see the outer points", "b). We will forgo one tiny step, but it can", "into the space of quantum states. We formalize this by", "kernel-based training of quantum models <https://pennylane.ai/qml/demos/tutorial_kernel_based_training.html>`__, where you will also", "probability # of observing the all-zero state at the end", "\"\"\" ############################################################################## # A toy problem # ------------- # In", "color = [\"#FF0000\", \"#0000FF\"][(i % 2)] other_color = [\"#FF0000\", \"#0000FF\"][((i", "for :math:`N` datapoints. # To include the variational parameters, we", "Together with the ansatz we only need a device to", "datapoint: kernel_value = kernel(X[0], X[1], init_params) print(f\"The kernel value between", "but optimal alignment will not always # bring optimal training", "\\phi(\\boldsymbol{x}_i), \\phi(\\boldsymbol{x}_j)\\rangle. We call this function the *kernel*. It provides", "first applying the embedding of the first # datapoint and", "basis state. @qml.qnode(dev) def kernel_circuit(x1, x2, params): ansatz(x1, params, wires=wires)", "We see the outer points in the dataset can be", "a similiarity measure # between two kernels with given kernel", "is an *ansatz*, which we will construct by repeating a", "gradient-based optimization, and all that using the functionality of PennyLane's", "= make_double_cake_data(num_sectors) ax = plot_double_cake_data(X, Y, plt.gca(), num_sectors=num_sectors) ############################################################################## #", "qml.RZ(x[i % len(x)], wires=[wire]) i += inc qml.RY(params[0, j], wires=[wire])", "of datapoints and returns the associated kernel matrix. # We", "want to assess the impact of training the parameters of", "as we know them to be 1 # for our", "color=color, alpha=0.1, width=0.5, ) ) ax.add_artist( mpl.patches.Wedge( (0, 0), 0.5,", "fully determine # the embedding ansatz :math:`U(\\boldsymbol{x})`. # In order", "&= x_1^2 y_1^2 + 2 x_1 x_2 y_1 y_2 +", "random_params(num_wires, num_layers): \"\"\"Generate random variational parameters in the shape for", "thus :math:`+1` if both datapoints lie in the # same", "init_params = random_params(num_wires=5, num_layers=6) ############################################################################## # Now we can have", "would make us # distrust the model. In this sense,", "quantum states by first applying the embedding of the first", "plot_double_cake_data(X, Y, ax, num_sectors=None): \"\"\"Plot double cake data and corresponding", "the kernel [#Alignment]_. This means having good alignment is #", "datapoint ``x`` but feeding different variational # parameters ``params`` into", "def plot_decision_boundaries(classifier, ax, N_gridpoints=14): _xx, _yy = np.meshgrid(np.linspace(-1, 1, N_gridpoints),", "to the dataset, and on the other hand # is", "with, the # ``DoubleCake`` dataset. Firstly, we define two functions", "step does *not* modify the variational parameters in our circuit", ":math:`\\boldsymbol{y}\\boldsymbol{y}^T`. # The kernel-target alignment is then defined as the", "embedding ansatz\"\"\" for j, layer_params in enumerate(params): layer(x, layer_params, wires,", "at our dataset. In our example, we will work with", "formula for the kernel :math:`k` that makes it superfluous to", "course need to start with some imports: from pennylane import", "in the # ansatz circuit to :math:`6`. init_params = random_params(num_wires=5,", "make predictions on a # dataset, becaues it is essentially", "following embedding and the associated kernel: .. math:: \\phi((x_1, x_2))", "the data. It # does have one drawback, however: having", "- nplus _Y = np.array([y / nplus if y ==", "adjust the SVM from our Quantum # Embedding Kernel. #", "function itself is now obtained by looking at the probability", "that we can often find an explicit formula for the", "now not only every data instance falls within the #", "to a more specialized measure, the # *kernel-target alignment* [#Alignment]_.", "and second datapoint is {kernel_value:.3f}\") ############################################################################## # The mutual kernel", "of dataset X = np.vstack([x, y]).T labels = np.hstack([labels1, -1", "define a differentiable version of this function. def target_alignment( X,", "# ansatz circuit to :math:`6`. init_params = random_params(num_wires=5, num_layers=6) ##############################################################################", "toy problem # ------------- # In this demo, we will", "that can be evaluated on quantum computers, *quantum kernels* for", "X2, init_kernel)).fit(X, Y) ############################################################################## # To see how well our", "obtained by looking at the probability # of observing the", "well it fits the dataset in question. Performing an exhaustive", ":math:`K_1` and :math:`K_2` if we see them as vectors #", "_xx, _yy, _zz, cmap=mpl.colors.ListedColormap([\"#FF0000\", \"#0000FF\"]), alpha=0.2, levels=[-1, 0, 1], )", "for the # :math:`\\alpha` and :math:`b` vectors we introduced in", "# to *minimize* the cost function that is given to", "independent term :math:`b` specifies the position on the plane. In", "ansatz :math:`U(\\boldsymbol{x})`. # In order to construct the full kernel", ".. math:: k(\\boldsymbol{x}_i, \\boldsymbol{x}_j) = \\langle \\phi(\\boldsymbol{x}_i), \\phi(\\boldsymbol{x}_j)\\rangle. We call", "= dev.wires.tolist() ############################################################################## # Let us now define the quantum", "quantum circuit :math:`U(\\boldsymbol{x})` that maps a datapoint :math:`\\boldsymbol{x}` to the", "details of these functions are not essential for understanding the", "assume_normalized_kernel=True, ) print(f\"Step {i+1} - Alignment = {current_alignment:.3f}\") ############################################################################## #", ":math:`b` specifies the position on the plane. In this form,", "kernel-target alignment for our dataset and random parameters is {kta_init:.3f}\")", "function ``qml.kernels.target_alignment`` is not # differentiable yet, making it unfit", "fix the number of layers in the # ansatz circuit", "kernel values for :math:`N` datapoints. # To include the variational", "would be # to use the observable type # `Projector", "the functionality of PennyLane's `kernels module <https://pennylane.readthedocs.io/en/latest/code/qml_kernels.html>`__. The demo is", "= inner_product / norm return inner_product params = init_params opt", "for our prediction and include the embedding, we get ..", "are also interested in seeing what the decision boundaries in", "# Once we have this, we can let scikit-learn adjust", "qml.kernels.kernel_matrix(X1, X2, trained_kernel) # Note that SVC expects the kernel", "generalisation. # # References # ---------- # # .. [#Training_QEKs]", "# second datapoint: kernel_value = kernel(X[0], X[1], init_params) print(f\"The kernel", "inner circle. But remember we have a circuit # with", "SVM's have proven good generalisation # behavior, it will be", "subset of datapoints to compute the KTA on. subset =", "a larger *feature space* and then perform linear classification there,", "the embedding, we get .. math:: y(\\boldsymbol{x}) = \\operatorname{sgn}(\\langle \\boldsymbol{w},", "def kernel(x1, x2, params): return kernel_circuit(x1, x2, params)[0] ############################################################################## #", "a vector space. # # The training data enters the", "the product # of the corresponding labels: # # ..", "= U(\\boldsymbol{x}) |0 \\rangle. The kernel value is then given", "via the ``qml.kernels.square_kernel_matrix`` # method, which makes use of symmetry", "intensive, and since the accuracy is a discrete quantity we", "again supply the variational # parameters via the ``lambda`` function", "num_sectors)) sector_angle = 2 * np.pi / num_sectors angles =", "* labels2]) # Canonical form of labels Y = labels.astype(int)", "to the expression for our prediction and include the embedding,", "of scikit-learn's Support Vector Classifier (SVC). from sklearn.svm import SVC", "to work with, the # ``DoubleCake`` dataset. Firstly, we define", "correctly classified, but # we still struggle with the inner", "\\boldsymbol{y}}{\\sqrt{\\operatorname{Tr}(K^2)} N} # # where :math:`N` is the number of", ":width: 65% If we go back to the expression for", "process. # # .. note:: # Currently, the function ``qml.kernels.target_alignment``", "understand what a kernel method does, let's first revisit one", "dataset. # # In summary, the kernel-target alignment effectively captures", "# have to multiply the kernel target alignment by :math:`-1`", ":width: 30% We can mathematically formalize this by assigning the", "will construct by repeating a # layer as building block.", ":math:`K_2` if we see them as vectors # in the", "of observing each basis state. @qml.qnode(dev) def kernel_circuit(x1, x2, params):", "dataset and random parameters is {kta_init:.3f}\") ############################################################################## # Now let's", "for our # initial classifier: init_plot_data = plot_decision_boundaries(svm, plt.gca()) ##############################################################################", "<NAME>. Posted: 24 June 2021* Kernel methods are one of", "between all elements of the dataset form the # *kernel", "= \\operatorname{sgn}(\\langle \\boldsymbol{w}, \\boldsymbol{x}\\rangle + b). The vector :math:`\\boldsymbol{w}` points", "dataset it classifies correctly. def accuracy(classifier, X, Y_target): return 1", "the associated embedded quantum states .. math:: k(\\boldsymbol{x}_i, \\boldsymbol{x}_j) =", "observable type # `Projector <https://pennylane.readthedocs.io/en/latest/code/api/pennylane.Projector.html>`__. # This is shown in", ":math:`4` # datapoints at random. Remember that PennyLane's built-in optimizer", "form, linear classification can also be extended to higher dimensional", "the demo, # so don't mind them if they are", "very powerful, as datasets that are not separable by a", "for example the following embedding and the associated kernel: ..", "work with, the # ``DoubleCake`` dataset. Firstly, we define two", "use the observable type # `Projector <https://pennylane.readthedocs.io/en/latest/code/api/pennylane.Projector.html>`__. # This is", "= k(\\boldsymbol{x}_j, \\boldsymbol{x}_i)`. # In addition, the option ``assume_normalized_kernel=True`` ensures", "we compute # :math:`\\frac{1}{2}(N^2-N)` kernel values for :math:`N` datapoints. #", "# of the ordering in ``qml.probs``, this is the first", "circle.\"\"\" center_indices = np.array(range(0, num_sectors)) sector_angle = 2 * np.pi", "The mutual kernel values between all elements of the dataset", "* sector_angle x = 0.7 * np.cos(angles) y = 0.7", "essential for understanding the demo, # so don't mind them", "for this is an *ansatz*, which we will construct by", "we sampled above. init_kernel = lambda x1, x2: kernel(x1, x2,", "the dataset. # # In summary, the kernel-target alignment effectively", "hyperplane can't be classified without error. We can actually sneak", "plot_double_cake_data(X, Y, plt.gca(), num_sectors=num_sectors) ############################################################################## # Defining a Quantum Embedding", "probabilities of observing each basis state. @qml.qnode(dev) def kernel_circuit(x1, x2,", "find an explicit formula for the kernel :math:`k` that makes", "that we compute # :math:`\\frac{1}{2}(N^2-N)` kernel values for :math:`N` datapoints.", "= np.zeros_like(_xx) for idx in np.ndindex(*_xx.shape): _zz[idx] = classifier.predict(np.array([_xx[idx], _yy[idx]])[np.newaxis,", "by a hyperplane can't be classified without error. We can", "X[subset], Y[subset], lambda x1, x2: kernel(x1, x2, _params), assume_normalized_kernel=True, )", "proven good generalisation # behavior, it will be interesting to", "up # the optimization we will not use the entire", "# :math:`\\boldsymbol{y}` by assigning to two datapoints the product #", "classification with the map :math:`k`, we can actually express much", "* np.remainder(np.floor_divide(angles, sector_angle), 2) - 1 return x, y, labels", "demo, we will treat a toy problem that showcases the", "look like. This could help us spotting overfitting issues #", "# need for this is an *ansatz*, which we will", "# # .. [#Alignment] # # <NAME>, <NAME>, and <NAME>.", "k(\\boldsymbol{x}, \\boldsymbol{y}) &= x_1^2 y_1^2 + 2 x_1 x_2 y_1", "and evaluating quantum kernels =========================================== .. meta:: :property=\"og:description\": Kernels and", "Kernel we need some measure of # how well it", "gradient descent optimization. # We therefore first define a differentiable", "is not expected to suffer from bad generalisation. # #", "datapoints the product # of the corresponding labels: # #", "performance, but optimal alignment will not always # bring optimal", "and its applications.\" # `Artificial Intelligence Review 43.2: 179-192 <https://link.springer.com/article/10.1007/s10462-012-9369-4>`__,", "now define the quantum circuit that realizes the kernel. We", "can make use of the function ``qml.kernels.kernel_matrix`` that provides #", "This # reinforces the geometric picture of how this measure", "\\boldsymbol{y}) &= x_1^2 y_1^2 + 2 x_1 x_2 y_1 y_2", "set up the kernel circuit in PennyLane would be #", "same datapoints, as we know them to be 1 #", "accuracy # of our SVC. # # Training the Quantum", "kernel-target alignment # is only a necessary but not a", "optimizer works # to *minimize* the cost function that is", "# to objects, namely two kernels, being aligned in a", "generated by the # quantum kernel and :math:`\\boldsymbol{y}\\boldsymbol{y}^T`: # #", "alignment # is only a necessary but not a sufficient", "also that there are no strong artifacts that would make", "form of labels Y = labels.astype(int) return X, Y ##############################################################################", "datapoints, which is why we again supply the variational #", "# .. note:: # Currently, the function ``qml.kernels.target_alignment`` is not", "a second support vector classifier with the # trained kernel:", "and returns the associated kernel matrix. # We can make", "generalisation # behavior, it will be interesting to inspect the", "Let us now define the quantum circuit that realizes the", "sector_angle), 2) - 1 return x, y, labels def make_double_cake_data(num_sectors):", "in question. Performing an exhaustive # search in parameter space", "sector_angle, lw=0, color=color, alpha=0.1, width=0.5, ) ) ax.add_artist( mpl.patches.Wedge( (0,", "for a good # performance of the kernel [#Alignment]_. This", "all that using the functionality of PennyLane's `kernels module <https://pennylane.readthedocs.io/en/latest/code/qml_kernels.html>`__.", ":math:`k` that makes it superfluous to actually perform the (potentially", "# This step does *not* modify the variational parameters in", "is nothing else than the cosine of the angle between", "*maximize* it in the process. # # .. note:: #", "\"\"\"The embedding ansatz\"\"\" for j, layer_params in enumerate(params): layer(x, layer_params,", "# second helper method. def plot_decision_boundaries(classifier, ax, N_gridpoints=14): _xx, _yy", "subsets of the data at each step, we choose :math:`4`", "kernel. trained_kernel_matrix = lambda X1, X2: qml.kernels.kernel_matrix(X1, X2, trained_kernel) #", "artifacts that would make us # distrust the model. In", "is # guaranteed for good performance, but optimal alignment will", "training the parameters of the quantum # kernel. Thus, let's", "demo is based on Ref. [#Training_QEKs]_, a project from Xanadu's", "= accuracy(svm, X, Y) print(f\"The accuracy of the kernel with", "differentiable yet, making it unfit for gradient descent optimization. #", "############################################################################## # Let us now define the quantum circuit that", "# `demo on kernel-based training of quantum models <https://pennylane.ai/qml/demos/tutorial_kernel_based_training.html>`__, where", "value between the first and the # second datapoint: kernel_value", "wires=wires, parameters=params[1]) ############################################################################## # To construct the ansatz, this layer", "trained parameters is {accuracy_trained:.3f}\") ############################################################################## # We have now achieved", "done, let's have a look at the decision boundaries for", "a look at the decision boundaries for our # initial", "datapoints. To perform an actual prediction we will make use", "PennyLane's built-in optimizer works # to *minimize* the cost function", "<NAME> and <NAME>. Posted: 24 June 2021* Kernel methods are", "classifier: trained_plot_data = plot_decision_boundaries(svm_trained, plt.gca()) ############################################################################## # Indeed, we see", "X = np.vstack([x, y]).T labels = np.hstack([labels1, -1 * labels2])", "between two datapoints. To perform an actual prediction we will", "to actually # *maximize* it in the process. # #", "defined as the kernel alignment # of the kernel matrix", "dataset form the # *kernel matrix*. We can inspect it", "need to start with some imports: from pennylane import numpy", "The vector :math:`\\boldsymbol{w}` points perpendicular to the line and thus", "= lambda x1, x2: kernel(x1, x2, init_params) K_init = qml.kernels.square_kernel_matrix(X,", "use of the function ``qml.kernels.kernel_matrix`` that provides # this functionality.", "in the dataset can be correctly classified, but # we", "the cosine of the angle between the kernel # matrices", "functionality. It expects the kernel to not have additional parameters", "formula only contains inner products between vectors in the embedding", "states by first applying the embedding of the first #", "# datapoints at random. Remember that PennyLane's built-in optimizer works", "# search in parameter space is not a good solution", "65% If we go back to the expression for our", "# ------------- # In this demo, we will treat a", "can let scikit-learn adjust the SVM from our Quantum #", "the second datapoint. We # finally extract the probabilities of", "in parameter space is not a good solution because it", "to the actual labels of the # training data. It", "is repeated multiple times, reusing # the datapoint ``x`` but", "that SVC expects the kernel argument to be a kernel", "reusing # the datapoint ``x`` but feeding different variational #", "only need a device to run the quantum circuit on.", "------------------------ To understand what a kernel method does, let's first", "treat a toy problem that showcases the # inner workings", "scikit-learn's Support Vector Classifier (SVC). from sklearn.svm import SVC ##############################################################################", "matrices with the Hilbert-Schmidt (or # Frobenius) scalar product #", "<NAME>, <NAME>, and <NAME>. # \"An overview of kernel alignment", "behavior, it will be interesting to inspect the decision boundaries", "additional parameters # besides the datapoints, which is why we", "compares the # similarity predicted by the quantum kernel to", "def random_params(num_wires, num_layers): \"\"\"Generate random variational parameters in the shape", "kernel, # :math:`k(\\boldsymbol{x}_i,\\boldsymbol{x}_j) = k(\\boldsymbol{x}_j, \\boldsymbol{x}_i)`. # In addition, the", "ax.set_aspect(\"equal\") ax.axis(\"off\") return ax ############################################################################## # Let's now have a", "from sklearn.svm import SVC ############################################################################## # To construct the SVM,", "product # of the corresponding labels: # # .. math::", "not be able to detect small improvements. # # We", "by just replacing the regular scalar product in our linear", "assume_normalized_kernel=True, ) # Optimization step params = opt.step(cost, params) #", "y = 0.7 * np.sin(angles) labels = 2 * np.remainder(np.floor_divide(angles,", "# background information on the kernel circuit structure itself. #", "datapoints :math:`\\boldsymbol{w} = \\sum_i \\alpha_i \\phi(\\boldsymbol{x}_i)`. Putting this into the", "a function # that takes two sets of datapoints and", "from Xanadu's own `QHack <https://qhack.ai/>`__ hackathon. What are kernel methods?", "the (potentially expensive) embedding :math:`\\phi`. Consider for example the following", "one of the simplest methods to assign binary labels to", "Once we have this, we can let scikit-learn adjust the", "Let's start by defining this layer: import pennylane as qml", "the kernel :math:`k`. In this demo, we will explore one", "# that takes two sets of datapoints and returns the", "then given by the *overlap* of the associated embedded quantum", "Y, kernel, assume_normalized_kernel=False, rescale_class_labels=True, ): \"\"\"Kernel-target alignment between kernel and", "vectors in the embedding space: .. math:: k(\\boldsymbol{x}_i, \\boldsymbol{x}_j) =", "We can make use of the function ``qml.kernels.kernel_matrix`` that provides", "from bad generalisation. # # References # ---------- # #", "addition, the option ``assume_normalized_kernel=True`` ensures that we do not #", "baked into it. trained_kernel = lambda x1, x2: kernel(x1, x2,", "sufficient condition for a good # performance of the kernel", "scikit-learn adjust the SVM from our Quantum # Embedding Kernel.", "for the ansatz.\"\"\" return np.random.uniform(0, 2 * np.pi, (num_layers, 2,", "the cost function for optimization cost = lambda _params: -target_alignment(", "help us spotting overfitting issues # visually in more complex", "\"_yy\": _yy, \"_zz\": _zz} ax.contourf( _xx, _yy, _zz, cmap=mpl.colors.ListedColormap([\"#FF0000\", \"#0000FF\"]),", "init_kernel, assume_normalized_kernel=True) print(f\"The kernel-target alignment for our dataset and random", "adjust itself to the dataset, and on the other hand", "methods? ------------------------ To understand what a kernel method does, let's", "(i + 1) * sector_angle, lw=0, color=color, alpha=0.1, width=0.5, )", "############################################################################## # To construct the ansatz, this layer is repeated", "have now achieved perfect classification! 🎆 # # Following on", "that this method is not very powerful, as datasets that", "): \"\"\"Kernel-target alignment between kernel and labels.\"\"\" K = qml.kernels.square_kernel_matrix(", "the Quantum Embedding Kernel # ------------------------------------- # # To be", "construct by repeating a # layer as building block. Let's", "- Y_target) / len(Y_target) accuracy_init = accuracy(svm, X, Y) print(f\"The", ":math:`\\boldsymbol{y}` by assigning to two datapoints the product # of", "* sector_angle, (i + 1) * sector_angle, lw=0, color=color, alpha=0.1,", "is the number of datapoints in the dataset. # #", "reasonable to believe we can give # values to those", "# kernel. Thus, let's build a second support vector classifier", "= np.hstack([labels1, -1 * labels2]) # Canonical form of labels", "our linear classification with the map :math:`k`, we can actually", "particular kind of kernel that can be realized on near-term", "\\operatorname{KA}(K_1, K_2) = \\frac{\\operatorname{Tr}(K_1 K_2)}{\\sqrt{\\operatorname{Tr}(K_1^2)\\operatorname{Tr}(K_2^2)}}. # # .. note:: #", "kernel matrix function using the trained kernel. trained_kernel_matrix = lambda", "define the quantum circuit that realizes the kernel. We will", "expression for our prediction and include the embedding, we get", "K) * np.sum(T * T)) inner_product = inner_product / norm", "sectors.\"\"\" x, y = X.T cmap = mpl.colors.ListedColormap([\"#FF0000\", \"#0000FF\"]) ax.scatter(x,", "not seem useful at first, but notice the above formula", "make use of regular gradient descent optimization. To speed up", "qml.device(\"default.qubit\", wires=5, shots=None) wires = dev.wires.tolist() ############################################################################## # Let us", "print(f\"The accuracy of a kernel with trained parameters is {accuracy_trained:.3f}\")", "Y) print(f\"The accuracy of a kernel with trained parameters is", "a more specialized measure, the # *kernel-target alignment* [#Alignment]_. The", "kta_init = qml.kernels.target_alignment(X, Y, init_kernel, assume_normalized_kernel=True) print(f\"The kernel-target alignment for", "itself. # # Before focusing on the kernel values we", "will compute # the overlap of the quantum states by", "# fixes them to the values we sampled above. init_kernel", ":]) plot_data = {\"_xx\": _xx, \"_yy\": _yy, \"_zz\": _zz} ax.contourf(", "the dataset it classifies correctly. def accuracy(classifier, X, Y_target): return", "figure:: ../demonstrations/kernels_module/linear_classification.png :align: center :width: 30% We can mathematically formalize", "the embedding ansatz\"\"\" i = i0 for j, wire in", "function to help plot the ``DoubleCake`` data: def plot_double_cake_data(X, Y,", "the kernel, # :math:`k(\\boldsymbol{x}_i,\\boldsymbol{x}_j) = k(\\boldsymbol{x}_j, \\boldsymbol{x}_i)`. # In addition,", "and then perform linear classification there, we could actually realise", "y_2 + x_2^2 y_2^2 = \\langle \\boldsymbol{x}, \\boldsymbol{y} \\rangle^2. This", "between the same datapoints, as we know them to be", "arranged in an even circle.\"\"\" center_indices = np.array(range(0, num_sectors)) sector_angle", "determine # the embedding ansatz :math:`U(\\boldsymbol{x})`. # In order to", "perform linear classification there, we could actually realise non-linear classification", "x2, params)[0] ############################################################################## # # .. note:: # An alternative", "############################################################################## # # .. note:: # An alternative way to", "# Let's now come back to the actual implementation. PennyLane's", "plt num_sectors = 3 X, Y = make_double_cake_data(num_sectors) ax =", "matrices :math:`K_1` and :math:`K_2` if we see them as vectors", "than the kernel :math:`k`. In this demo, we will explore", "it classifies correctly. def accuracy(classifier, X, Y_target): return 1 -", "# Next, we define a function to help plot the", "quantum embedding kernels, # training variational embedding kernels and the", "import SVC ############################################################################## # To construct the SVM, we need", "version of this function. def target_alignment( X, Y, kernel, assume_normalized_kernel=False,", "without error. We can actually sneak around this limitation by", "Now let's code up an optimization loop and improve the", "picture of how this measure relates # to objects, namely", "plot the ``DoubleCake`` data: def plot_double_cake_data(X, Y, ax, num_sectors=None): \"\"\"Plot", "feeding different variational # parameters ``params`` into each of them.", "not # differentiable yet, making it unfit for gradient descent", "compute # :math:`\\frac{1}{2}(N^2-N)` kernel values for :math:`N` datapoints. # To", ":property=\"og:description\": Kernels and alignment training with Pennylane. :property=\"og:image\": https://pennylane.ai/qml/_images/QEK_thumbnail.png ..", "the outer product :math:`\\boldsymbol{y}\\boldsymbol{y}^T`. # The kernel-target alignment is then", "if y == 1 else y / nminus for y", "the kernel. We will compute # the overlap of the", "with # 3 sectors: import matplotlib.pyplot as plt num_sectors =", "term :math:`b` specifies the position on the plane. In this", "training data enters the picture by defining an *ideal* kernel", "if we see them as vectors # in the space", "This could help us spotting overfitting issues # visually in", "the # trained kernel: # First create a kernel with", "\\boldsymbol{x}_j) = | \\langle\\psi(\\boldsymbol{x}_i)|\\psi(\\boldsymbol{x}_j)\\rangle|^2. \"\"\" ############################################################################## # A toy problem", "this limitation by performing a neat trick: if we define", ".. figure:: ../demonstrations/kernels_module/embedding_nonlinear_classification.png :align: center :width: 65% If we go", "first # datapoint and then the adjoint of the embedding", "(potentially expensive) embedding :math:`\\phi`. Consider for example the following embedding", "qml def layer(x, params, wires, i0=0, inc=1): \"\"\"Building block of", "# :math:`K_2`: # # .. math:: # \\operatorname{KA}(K_1, K_2) =", "X.T cmap = mpl.colors.ListedColormap([\"#FF0000\", \"#0000FF\"]) ax.scatter(x, y, c=Y, cmap=cmap, s=25,", "a function to help plot the ``DoubleCake`` data: def plot_double_cake_data(X,", "detect small improvements. # # We can, however, resort to", "the first and second datapoint is {kernel_value:.3f}\") ############################################################################## # The", "np.meshgrid(np.linspace(-1, 1, N_gridpoints), np.linspace(-1, 1, N_gridpoints)) _zz = np.zeros_like(_xx) for", "sector_angle, (i + 1) * sector_angle, lw=0, color=color, alpha=0.1, width=0.5,", ".. [#Training_QEKs] # # <NAME>, <NAME>, <NAME>, <NAME>, # <NAME>,", "there are no strong artifacts that would make us #", "T = np.outer(_Y, _Y) inner_product = np.sum(K * T) norm", "kernel :math:`k` that makes it superfluous to actually perform the", "interesting cases the embedding :math:`\\phi` will be much costlier to", "a datapoint :math:`\\boldsymbol{x}` to the state .. math:: |\\psi(\\boldsymbol{x})\\rangle =", "dataset. In our example, we will work with # 3", "np.random.choice(list(range(len(X))), 4) # Define the cost function for optimization cost", ".. related:: tutorial_kernel_based_training Kernel-based training with scikit-learn tutorial_data_reuploading_classifier Classification with", ":math:`\\boldsymbol{x}`, where a line does not divide the entire space", "# <NAME>, <NAME>, and <NAME>. # \"An overview of kernel", "still struggle with the inner circle. But remember we have", "\\phi(\\boldsymbol{x}_i), \\phi(\\boldsymbol{x})\\rangle + b\\right). This rewriting might not seem useful", "svm = SVC(kernel=lambda X1, X2: qml.kernels.kernel_matrix(X1, X2, init_kernel)).fit(X, Y) ##############################################################################", "PennyLane. We of course need to start with some imports:", "x_2^2 y_2^2 = \\langle \\boldsymbol{x}, \\boldsymbol{y} \\rangle^2. This means by", "than the cosine of the angle between the kernel #", "adjoint of the embedding of the second datapoint. We #", "360 / num_sectors for i in range(num_sectors): color = [\"#FF0000\",", "inner_product = np.sum(K * T) norm = np.sqrt(np.sum(K * K)", "smaller subsets of the data at each step, we choose", "this demo, we will explore one particular kind of kernel", "we fix the number of layers in the # ansatz", "This rewriting might not seem useful at first, but notice", "layer: import pennylane as qml def layer(x, params, wires, i0=0,", "two kernels with given kernel matrices :math:`K_1` and # :math:`K_2`:", "1) % 50 == 0: current_alignment = target_alignment( X, Y,", "we will use PennyLane's ``default.qubit`` # device with 5 wires", "predictions on a # dataset, becaues it is essentially just", "labels: # # .. math:: # k_{\\boldsymbol{y}}(\\boldsymbol{x}_i, \\boldsymbol{x}_j) = y_i", "actually perform the (potentially expensive) embedding :math:`\\phi`. Consider for example", "= \\langle \\phi(\\boldsymbol{x}_i), \\phi(\\boldsymbol{x}_j)\\rangle. We call this function the *kernel*.", "# is only a necessary but not a sufficient condition", "set to compute # :math:`\\operatorname{KTA}` but rather # sample smaller", "of the data. It # does have one drawback, however:", "to the values we sampled above. init_kernel = lambda x1,", "= SVC(kernel=lambda X1, X2: qml.kernels.kernel_matrix(X1, X2, init_kernel)).fit(X, Y) ############################################################################## #", "very # resource intensive, and since the accuracy is a", "in the space of matrices with the Hilbert-Schmidt (or #", "function that is given to it, which is why we", "to compute the KTA on. subset = np.random.choice(list(range(len(X))), 4) #", "k(\\boldsymbol{x}_i, \\boldsymbol{x}_j) = | \\langle\\psi(\\boldsymbol{x}_i)|\\psi(\\boldsymbol{x}_j)\\rangle|^2. \"\"\" ############################################################################## # A toy", "above formula only contains inner products between vectors in the", "is the number of elements in :math:`\\boldsymbol{y}`, # that is", "does, let's first revisit one of the simplest methods to", "a parameterised quantum circuit :math:`U(\\boldsymbol{x})` that maps a datapoint :math:`\\boldsymbol{x}`", "the optimization we will not use the entire training set", "performance of the kernel [#Alignment]_. This means having good alignment", ":math:`\\frac{1}{2}(N^2-N)` kernel values for :math:`N` datapoints. # To include the", "angle between the kernel # matrices :math:`K_1` and :math:`K_2` if", "lambda x1, x2: kernel(x1, x2, params), assume_normalized_kernel=True, ) print(f\"Step {i+1}", "sector_angle, lw=0, color=other_color, alpha=0.1, ) ) ax.set_xlim(-1, 1) ax.set_ylim(-1, 1)", "1) ax.set_ylim(-1, 1) ax.set_aspect(\"equal\") ax.axis(\"off\") return ax ############################################################################## # Let's", "space. # # The training data enters the picture by", "# .. math:: # \\operatorname{KTA}_{\\boldsymbol{y}}(K) # = \\frac{\\operatorname{Tr}(K \\boldsymbol{y}\\boldsymbol{y}^T)}{\\sqrt{\\operatorname{Tr}(K^2)\\operatorname{Tr}((\\boldsymbol{y}\\boldsymbol{y}^T)^2)}} #", "demo, # so don't mind them if they are confusing.", "dev = qml.device(\"default.qubit\", wires=5, shots=None) wires = dev.wires.tolist() ############################################################################## #", "inc=1): \"\"\"Building block of the embedding ansatz\"\"\" i = i0", "= random_params(num_wires=5, num_layers=6) ############################################################################## # Now we can have a", "introduced in the beginning. svm = SVC(kernel=lambda X1, X2: qml.kernels.kernel_matrix(X1,", "will also find more # background information on the kernel", "that can be realized on near-term quantum computers, namely *Quantum", "state. @qml.qnode(dev) def kernel_circuit(x1, x2, params): ansatz(x1, params, wires=wires) adjoint_ansatz(x2,", "the ordering in ``qml.probs``, this is the first entry: def", "the quantum states by first applying the embedding of the", "want to discern two different classes of points that lie", "\\sqrt{2} x_1 x_2, x_2^2) \\\\ k(\\boldsymbol{x}, \\boldsymbol{y}) &= x_1^2 y_1^2", "sector_angle = 2 * np.pi / num_sectors angles = (center_indices", "np.hstack([x1, 0.5 * x2]) y = np.hstack([y1, 0.5 * y2])", "wires = dev.wires.tolist() ############################################################################## # Let us now define the", "vectors # in the space of matrices with the Hilbert-Schmidt", "that takes two sets of datapoints and returns the associated", "# We can, however, resort to a more specialized measure,", "lie in the # same class and :math:`-1` otherwise and", "math:: y(\\boldsymbol{x}) = \\operatorname{sgn}\\left(\\sum_i \\alpha_i \\langle \\phi(\\boldsymbol{x}_i), \\phi(\\boldsymbol{x})\\rangle + b\\right).", "\"#0000FF\"][((i + 1) % 2)] ax.add_artist( mpl.patches.Wedge( (0, 0), 1,", "the kernel argument to be a kernel matrix function. svm_trained", ".. math:: y(\\boldsymbol{x}) = \\operatorname{sgn}(\\langle \\boldsymbol{w}, \\boldsymbol{x}\\rangle + b). The", "vector :math:`\\boldsymbol{w}` points perpendicular to the line and thus determine", "# parameters via the ``lambda`` function from above. # Once", "short. In this tutorial you will learn how to evaluate", "SVC expects the kernel argument to be a kernel matrix", "from both: on # one hand it can adjust itself", "space of matrices with the Hilbert-Schmidt (or # Frobenius) scalar", "and y coordinates of the datapoints x = np.hstack([x1, 0.5", "the adjoint of the embedding of the second datapoint. We", "boundaries! This is very important, because in many interesting cases", "2)] other_color = [\"#FF0000\", \"#0000FF\"][((i + 1) % 2)] ax.add_artist(", "datapoint and then the adjoint of the embedding of the", "make us # distrust the model. In this sense, our", "are kernel methods? ------------------------ To understand what a kernel method", "# of the kernel matrix :math:`K` generated by the #", "# To construct the ansatz, this layer is repeated multiple", "datapoints at random. Remember that PennyLane's built-in optimizer works #", "[#Alignment] # # <NAME>, <NAME>, and <NAME>. # \"An overview", "discrete quantity we # would not be able to detect", "a sufficient condition for a good # performance of the", "<NAME>, <NAME>, <NAME>, <NAME>, # <NAME>, and <NAME>. # \"Training", "more # background information on the kernel circuit structure itself.", "is {kernel_value:.3f}\") ############################################################################## # The mutual kernel values between all", "Embedding Kernel for predictions # -------------------------------------------------- # The quantum kernel", "have this, we can let scikit-learn adjust the SVM from", "to use the observable type # `Projector <https://pennylane.readthedocs.io/en/latest/code/api/pennylane.Projector.html>`__. # This", "of these functions are not essential for understanding the demo,", "tool to measure the similarity # between two datapoints. To", "them to the values we sampled above. init_kernel = lambda", "its applications.\" # `Artificial Intelligence Review 43.2: 179-192 <https://link.springer.com/article/10.1007/s10462-012-9369-4>`__, 2015.", "the # :math:`\\alpha` and :math:`b` vectors we introduced in the", "# our classifier: trained_plot_data = plot_decision_boundaries(svm_trained, plt.gca()) ############################################################################## # Indeed,", "the full kernel circuit, we also require its adjoint #", "np.count_nonzero(classifier.predict(X) - Y_target) / len(Y_target) accuracy_init = accuracy(svm, X, Y)", "the process. # # .. note:: # Currently, the function", "It provides the advantage that we can often find an", "between the first and second datapoint is {kernel_value:.3f}\") ############################################################################## #", "ax ############################################################################## # Let's now have a look at our", "will use PennyLane's ``default.qubit`` # device with 5 wires in", "beginning. svm = SVC(kernel=lambda X1, X2: qml.kernels.kernel_matrix(X1, X2, init_kernel)).fit(X, Y)", "do both in PennyLane. We of course need to start", "``default.qubit`` # device with 5 wires in analytic mode. dev", "the kernel target alignment by :math:`-1` to actually # *maximize*", "background information on the kernel circuit structure itself. # #", "we will not use the entire training set to compute", "x = 0.7 * np.cos(angles) y = 0.7 * np.sin(angles)", "not # calculate the entries between the same datapoints, as", "parameters is {kta_init:.3f}\") ############################################################################## # Now let's code up an", "*kernel-target alignment* [#Alignment]_. The kernel-target alignment compares the # similarity", "# In this demo, we will treat a toy problem", "# ----------------------------------- # PennyLane's `kernels module <https://pennylane.readthedocs.io/en/latest/code/qml_kernels.html>`__ # allows for", "# x and y coordinates of the datapoints x =", "= np.hstack([y1, 0.5 * y2]) # Canonical form of dataset", "Kernels (QEKs)*. These are kernels that arise from embedding data", "to compute than the kernel :math:`k`. In this demo, we", "specifies the position on the plane. In this form, linear", "kernel with trained parameters is {accuracy_trained:.3f}\") ############################################################################## # We have", "(center_indices + 0.5) * sector_angle x = 0.7 * np.cos(angles)", "related:: tutorial_kernel_based_training Kernel-based training with scikit-learn tutorial_data_reuploading_classifier Classification with data", "analytic mode. dev = qml.device(\"default.qubit\", wires=5, shots=None) wires = dev.wires.tolist()", "product in our linear classification with the map :math:`k`, we", "############################################################################## # A toy problem # ------------- # In this", "makes it superfluous to actually perform the (potentially expensive) embedding", "have one drawback, however: having a high kernel-target alignment #", "y == 1 else y / nminus for y in", "and :math:`K_2` if we see them as vectors # in", "June 2021* Kernel methods are one of the cornerstones of", "outer points in the dataset can be correctly classified, but", "we can actually express much more intricate decision boundaries! This", "actually sneak around this limitation by performing a neat trick:", "kernel, assume_normalized_kernel=False, rescale_class_labels=True, ): \"\"\"Kernel-target alignment between kernel and labels.\"\"\"", "a project from Xanadu's own `QHack <https://qhack.ai/>`__ hackathon. What are", "two datapoints the product # of the corresponding labels: #", "np.array([y / nplus if y == 1 else y /", "the decision boundaries of # our classifier: trained_plot_data = plot_decision_boundaries(svm_trained,", "(i + 1) % 50 == 0: current_alignment = target_alignment(", "# :math:`k(\\boldsymbol{x}_i,\\boldsymbol{x}_j) = k(\\boldsymbol{x}_j, \\boldsymbol{x}_i)`. # In addition, the option", "_make_circular_data(num_sectors) # x and y coordinates of the datapoints x", "we have this, we can let scikit-learn adjust the SVM", "Posted: 24 June 2021* Kernel methods are one of the", "two kernels, being aligned in a vector space. # #", "<NAME>, # <NAME>, and <NAME>. # \"Training Quantum Embedding Kernels", "dataset in question. Performing an exhaustive # search in parameter", "run the quantum circuit on. # For the purpose of", "50 steps. if (i + 1) % 50 == 0:", "-- because # of the ordering in ``qml.probs``, this is", "ingredient we # need for this is an *ansatz*, which", "by the # quantum kernel and :math:`\\boldsymbol{y}\\boldsymbol{y}^T`: # # ..", "training variational embedding kernels and the available functionalities # to", "instance falls within the # correct class, but also that", "summary, the kernel-target alignment effectively captures how well # the", "mpl.colors.ListedColormap([\"#FF0000\", \"#0000FF\"]) ax.scatter(x, y, c=Y, cmap=cmap, s=25, marker=\"s\") if num_sectors", "revisit one of the simplest methods to assign binary labels", "free parameters! It is reasonable to believe we can give", "then perform linear classification there, we could actually realise non-linear", "more theoretical side, :math:`\\operatorname{KA}` # is nothing else than the", "an accuracy improvement vs. the SVM with random # parameters:", "kernel with random parameters is {accuracy_init:.3f}\") ############################################################################## # We are", "inc qml.RY(params[0, j], wires=[wire]) qml.broadcast(unitary=qml.CRZ, pattern=\"ring\", wires=wires, parameters=params[1]) ############################################################################## #", "them. # Together, the datapoint and the variational parameters fully", "define a function to help plot the ``DoubleCake`` data: def", ") ) ax.set_xlim(-1, 1) ax.set_ylim(-1, 1) ax.set_aspect(\"equal\") ax.axis(\"off\") return ax", "at the decision boundaries for our # initial classifier: init_plot_data", "[#Alignment]_. The kernel-target alignment compares the # similarity predicted by", "kernel # matrices :math:`K_1` and :math:`K_2` if we see them", "every data instance falls within the # correct class, but", "device to run the quantum circuit on. # For the", "the full dataset every 50 steps. if (i + 1)", "np.array(range(0, num_sectors)) sector_angle = 2 * np.pi / num_sectors angles", "of the dataset it classifies correctly. def accuracy(classifier, X, Y_target):", "can inspect it via the ``qml.kernels.square_kernel_matrix`` # method, which makes", "could actually realise non-linear classification in our original space! ..", "issues # visually in more complex data sets. To this", "Y_target): return 1 - np.count_nonzero(classifier.predict(X) - Y_target) / len(Y_target) accuracy_init", "for gradient descent optimization. # We therefore first define a", "What it does is solving a different optimization task for", "# method, which makes use of symmetry of the kernel,", "nminus for y in Y]) else: _Y = np.array(Y) T", "scalar product in our linear classification with the map :math:`k`,", "step, we choose :math:`4` # datapoints at random. Remember that", "the # same class and :math:`-1` otherwise and its kernel", "# trained kernel: # First create a kernel with the", "returns the associated kernel matrix. # We can make use", "ansatz.\"\"\" return np.random.uniform(0, 2 * np.pi, (num_layers, 2, num_wires), requires_grad=True)", "y coordinates of the datapoints x = np.hstack([x1, 0.5 *", "If we go back to the expression for our prediction", "kernel methods? ------------------------ To understand what a kernel method does,", "A linear classifier corresponds to drawing a line and assigning", "methods to assign binary labels to datapoints: linear classification. Imagine", "we have to provide values for the # variational parameters.", "alignment! # # We will make use of regular gradient", "accuracy of a kernel with trained parameters is {accuracy_trained:.3f}\") ##############################################################################", "# of the dataset it classifies correctly. def accuracy(classifier, X,", "alpha=0.2, levels=[-1, 0, 1], ) plot_double_cake_data(X, Y, ax) return plot_data", "- np.count_nonzero(classifier.predict(X) - Y_target) / len(Y_target) accuracy_init = accuracy(svm, X,", "in the process. # # .. note:: # Currently, the", ":math:`-1` otherwise and its kernel matrix is simply # given", "to measure the similarity # between two datapoints. To perform", "well our classifier performs we will measure which percentage #", "PennyLane's `kernels module <https://pennylane.readthedocs.io/en/latest/code/qml_kernels.html>`__. The demo is based on Ref.", "is solving a different optimization task for the # :math:`\\alpha`", "[\"#FF0000\", \"#0000FF\"][((i + 1) % 2)] ax.add_artist( mpl.patches.Wedge( (0, 0),", "high kernel-target alignment # is only a necessary but not", "y, labels def make_double_cake_data(num_sectors): x1, y1, labels1 = _make_circular_data(num_sectors) x2,", "We can, however, resort to a more specialized measure, the", "having a high kernel-target alignment # is only a necessary", "# The quantum kernel alone can not be used to", "achieved perfect classification! 🎆 # # Following on the results", "this demo, we will treat a toy problem that showcases", "repeated multiple times, reusing # the datapoint ``x`` but feeding", "value between the first and second datapoint is {kernel_value:.3f}\") ##############################################################################", "Classifier (SVC). from sklearn.svm import SVC ############################################################################## # To construct", "def make_double_cake_data(num_sectors): x1, y1, labels1 = _make_circular_data(num_sectors) x2, y2, labels2", "compute # :math:`\\operatorname{KTA}` but rather # sample smaller subsets of", "/ len(Y_target) accuracy_init = accuracy(svm, X, Y) print(f\"The accuracy of", "# Let's now have a look at our dataset. In", "to # generate the data. # The details of these", "matrices :math:`K_1` and # :math:`K_2`: # # .. math:: #", "the variational parameters, we construct a ``lambda`` function that #", "print(K_init) ############################################################################## # Using the Quantum Embedding Kernel for predictions", "+ 1) % 2)] ax.add_artist( mpl.patches.Wedge( (0, 0), 1, i", "kernel circuit, we also require its adjoint # :math:`U(\\boldsymbol{x})^\\dagger`, which", "that *embeds* our datapoints into a larger *feature space* and", "# To see how well our classifier performs we will", "# For the purpose of this tutorial we will use", "layer(x, params, wires, i0=0, inc=1): \"\"\"Building block of the embedding", "The quantum kernel alone can not be used to make", "between two kernels with given kernel matrices :math:`K_1` and #", "of symmetry of the kernel, # :math:`k(\\boldsymbol{x}_i,\\boldsymbol{x}_j) = k(\\boldsymbol{x}_j, \\boldsymbol{x}_i)`.", "will be much costlier to compute than the kernel :math:`k`.", "can be evaluated on quantum computers, *quantum kernels* for short.", "the purpose of this tutorial we will use PennyLane's ``default.qubit``", "num_sectors=num_sectors) ############################################################################## # Defining a Quantum Embedding Kernel # -----------------------------------", "the advantage that we can often find an explicit formula", "be able to train the Quantum Embedding Kernel we need", "inner products between vectors in the embedding space: .. math::", "and corresponding sectors.\"\"\" x, y = X.T cmap = mpl.colors.ListedColormap([\"#FF0000\",", "the values we sampled above. init_kernel = lambda x1, x2:", "optimization we will not use the entire training set to", "explicit formula for the kernel :math:`k` that makes it superfluous", "@qml.qnode(dev) def kernel_circuit(x1, x2, params): ansatz(x1, params, wires=wires) adjoint_ansatz(x2, params,", "parameters=params[1]) ############################################################################## # To construct the ansatz, this layer is", "_zz, cmap=mpl.colors.ListedColormap([\"#FF0000\", \"#0000FF\"]), alpha=0.2, levels=[-1, 0, 1], ) plot_double_cake_data(X, Y,", "kernel matrix :math:`K` generated by the # quantum kernel and", "often find an explicit formula for the kernel :math:`k` that", "means by just replacing the regular scalar product in our", "go back to the expression for our prediction and include", "overall accuracy # of our SVC. # # Training the", "None: sector_angle = 360 / num_sectors for i in range(num_sectors):", "init_params opt = qml.GradientDescentOptimizer(0.2) for i in range(500): # Choose", ":math:`\\boldsymbol{y}`, # that is the number of datapoints in the", "evaluate the kernel # target alignment: kta_init = qml.kernels.target_alignment(X, Y,", "boundaries of # our classifier: trained_plot_data = plot_decision_boundaries(svm_trained, plt.gca()) ##############################################################################", "need for this is an *ansatz*, which we will construct", "References # ---------- # # .. [#Training_QEKs] # # <NAME>,", "range(num_sectors): color = [\"#FF0000\", \"#0000FF\"][(i % 2)] other_color = [\"#FF0000\",", "symmetry of the kernel, # :math:`k(\\boldsymbol{x}_i,\\boldsymbol{x}_j) = k(\\boldsymbol{x}_j, \\boldsymbol{x}_i)`. #", "of the function ``qml.kernels.kernel_matrix`` that provides # this functionality. It", "first and the # second datapoint: kernel_value = kernel(X[0], X[1],", "some imports: from pennylane import numpy as np import matplotlib", "it will be interesting to inspect the decision boundaries of", "% 50 == 0: current_alignment = target_alignment( X, Y, lambda", "back to the expression for our prediction and include the", "loop and improve the kernel-target alignment! # # We will", "map :math:`\\phi(\\boldsymbol{x})` that *embeds* our datapoints into a larger *feature", "(i + 1) * sector_angle, lw=0, color=other_color, alpha=0.1, ) )", "on the kernel circuit structure itself. # # Before focusing", "ax.axis(\"off\") return ax ############################################################################## # Let's now have a look", "trained_kernel_matrix = lambda X1, X2: qml.kernels.kernel_matrix(X1, X2, trained_kernel) # Note", "quantum computers, *quantum kernels* for short. In this tutorial you", "# *kernel-target alignment* [#Alignment]_. The kernel-target alignment compares the #", "``DoubleCake`` dataset. Firstly, we define two functions to enable us", "define some map :math:`\\phi(\\boldsymbol{x})` that *embeds* our datapoints into a", "we can have a look at the kernel value between", "which is why we again supply the variational # parameters", "range(500): # Choose subset of datapoints to compute the KTA", "Currently, the function ``qml.kernels.target_alignment`` is not # differentiable yet, making", "does not divide the entire space into two regions anymore.", "1) % 2)] ax.add_artist( mpl.patches.Wedge( (0, 0), 1, i *", ".. math:: # \\operatorname{KTA}_{\\boldsymbol{y}}(K) # = \\frac{\\operatorname{Tr}(K \\boldsymbol{y}\\boldsymbol{y}^T)}{\\sqrt{\\operatorname{Tr}(K^2)\\operatorname{Tr}((\\boldsymbol{y}\\boldsymbol{y}^T)^2)}} # =", "return inner_product params = init_params opt = qml.GradientDescentOptimizer(0.2) for i", "alignment between kernel and labels.\"\"\" K = qml.kernels.square_kernel_matrix( X, kernel,", "= \\operatorname{sgn}(\\langle \\boldsymbol{w}, \\phi(\\boldsymbol{x})\\rangle + b). We will forgo one", "realizes the kernel. We will compute # the overlap of", "kernel alignment # of the kernel matrix :math:`K` generated by", "<NAME>, and <NAME>. # \"Training Quantum Embedding Kernels on Near-Term", "picture by defining an *ideal* kernel # function that expresses", "else than the cosine of the angle between the kernel", "datapoints: linear classification. Imagine we want to discern two different", "0), 0.5, i * sector_angle, (i + 1) * sector_angle,", "how this measure relates # to objects, namely two kernels,", "because it is very # resource intensive, and since the", "kernel value is then given by the *overlap* of the", "layer_params in enumerate(params): layer(x, layer_params, wires, i0=j * len(wires)) adjoint_ansatz", "the actual labels of the # training data. It is", "each of them. # Together, the datapoint and the variational", "params)[0] ############################################################################## # # .. note:: # An alternative way", "\"_zz\": _zz} ax.contourf( _xx, _yy, _zz, cmap=mpl.colors.ListedColormap([\"#FF0000\", \"#0000FF\"]), alpha=0.2, levels=[-1,", "via .. math:: y(\\boldsymbol{x}) = \\operatorname{sgn}(\\langle \\boldsymbol{w}, \\boldsymbol{x}\\rangle + b).", "circuit # with many free parameters! It is reasonable to", "well # the kernel you chose reproduces the actual similarities", "assume_normalized_kernel=False, rescale_class_labels=True, ): \"\"\"Kernel-target alignment between kernel and labels.\"\"\" K", "# dataset, becaues it is essentially just a tool to", "the # *kernel-target alignment* [#Alignment]_. The kernel-target alignment compares the", "# .. note:: # An alternative way to set up", "will not use the entire training set to compute #", "compute # the overlap of the quantum states by first", "# of observing the all-zero state at the end of", "above. init_kernel = lambda x1, x2: kernel(x1, x2, init_params) K_init", "from a more theoretical side, :math:`\\operatorname{KA}` # is nothing else", "sides of the line: .. figure:: ../demonstrations/kernels_module/linear_classification.png :align: center :width:", "we get .. math:: y(\\boldsymbol{x}) = \\operatorname{sgn}(\\langle \\boldsymbol{w}, \\phi(\\boldsymbol{x})\\rangle +", "def ansatz(x, params, wires): \"\"\"The embedding ansatz\"\"\" for j, layer_params", "wires=[wire]) qml.broadcast(unitary=qml.CRZ, pattern=\"ring\", wires=wires, parameters=params[1]) ############################################################################## # To construct the", "of the kernel [#Alignment]_. This means having good alignment is", "with the # trained kernel: # First create a kernel", "the kernel value between the first and the # second", "and assigning different labels to the regions on opposing sides", "x_1 x_2, x_2^2) \\\\ k(\\boldsymbol{x}, \\boldsymbol{y}) &= x_1^2 y_1^2 +", "each step, we choose :math:`4` # datapoints at random. Remember", "solution because it is very # resource intensive, and since", "X, Y) print(f\"The accuracy of the kernel with random parameters", "<https://pennylane.readthedocs.io/en/latest/code/qml_kernels.html>`__ # allows for a particularly simple # implementation of", "at random. Remember that PennyLane's built-in optimizer works # to", "predicted by the quantum kernel to the actual labels of", "trained_plot_data = plot_decision_boundaries(svm_trained, plt.gca()) ############################################################################## # Indeed, we see that", "# Defining a Quantum Embedding Kernel # ----------------------------------- # PennyLane's", "np.remainder(np.floor_divide(angles, sector_angle), 2) - 1 return x, y, labels def", "K \\boldsymbol{y}}{\\sqrt{\\operatorname{Tr}(K^2)} N} # # where :math:`N` is the number", "does is solving a different optimization task for the #", "the parameters of the quantum # kernel. Thus, let's build", ".. figure:: ../demonstrations/kernels_module/linear_classification.png :align: center :width: 30% We can mathematically", "<https://qhack.ai/>`__ hackathon. What are kernel methods? ------------------------ To understand what", "x2]) y = np.hstack([y1, 0.5 * y2]) # Canonical form", "this is the first entry: def kernel(x1, x2, params): return", "``params`` into each of them. # Together, the datapoint and", "the trained kernel. trained_kernel_matrix = lambda X1, X2: qml.kernels.kernel_matrix(X1, X2,", "# # where :math:`N` is the number of elements in", "labels2]) # Canonical form of labels Y = labels.astype(int) return", "we can let scikit-learn adjust the SVM from our Quantum", "# the embedding ansatz :math:`U(\\boldsymbol{x})`. # In order to construct", "just replacing the regular scalar product in our linear classification", "1, N_gridpoints), np.linspace(-1, 1, N_gridpoints)) _zz = np.zeros_like(_xx) for idx", "be interesting to inspect the decision boundaries of # our", "two sets of datapoints and returns the associated kernel matrix.", ":math:`b` vectors we introduced in the beginning. svm = SVC(kernel=lambda", "x2: kernel(x1, x2, init_params) K_init = qml.kernels.square_kernel_matrix(X, init_kernel, assume_normalized_kernel=True) with", "\"\"\"Kernel-target alignment between kernel and labels.\"\"\" K = qml.kernels.square_kernel_matrix( X,", "can often find an explicit formula for the kernel :math:`k`", "math:: k(\\boldsymbol{x}_i, \\boldsymbol{x}_j) = \\langle \\phi(\\boldsymbol{x}_i), \\phi(\\boldsymbol{x}_j)\\rangle. We call this", "with random parameters is {accuracy_init:.3f}\") ############################################################################## # We are also", "opt = qml.GradientDescentOptimizer(0.2) for i in range(500): # Choose subset", "############################################################################## # We see the outer points in the dataset", "To speed up # the optimization we will not use", "those variational parameters which improve the overall accuracy # of", "# We can make use of the function ``qml.kernels.kernel_matrix`` that", "dataset. Firstly, we define two functions to enable us to", "not a good solution because it is very # resource", "this layer: import pennylane as qml def layer(x, params, wires,", "assume_normalized_kernel=True) with np.printoptions(precision=3, suppress=True): print(K_init) ############################################################################## # Using the Quantum", "it can adjust itself to the dataset, and on the", "* T) norm = np.sqrt(np.sum(K * K) * np.sum(T *", "# ------------------------------------- # # To be able to train the", "# the overlap of the quantum states by first applying", "is {accuracy_trained:.3f}\") ############################################################################## # We have now achieved perfect classification!", "embedding data into the space of quantum states. We formalize", "sklearn.svm import SVC ############################################################################## # To construct the SVM, we", "math:: y(\\boldsymbol{x}) = \\operatorname{sgn}(\\langle \\boldsymbol{w}, \\phi(\\boldsymbol{x})\\rangle + b). We will", "``kernels`` module allows you to easily evaluate the kernel #", "ax.add_artist( mpl.patches.Wedge( (0, 0), 1, i * sector_angle, (i +", "similarity predicted by the quantum kernel to the actual labels", "datasets that are not separable by a hyperplane can't be", "# reinforces the geometric picture of how this measure relates", "line and assigning different labels to the regions on opposing", "could help us spotting overfitting issues # visually in more", "X2: qml.kernels.kernel_matrix(X1, X2, init_kernel)).fit(X, Y) ############################################################################## # To see how", "a high kernel-target alignment # is only a necessary but", "An alternative way to set up the kernel circuit in", "SVM with random # parameters: accuracy_trained = accuracy(svm_trained, X, Y)", "will introduce a # second helper method. def plot_decision_boundaries(classifier, ax,", "with data reuploading *Authors: <NAME>, <NAME>, <NAME>, <NAME>, <NAME> and", "away to create a dataset to work with, the #", "y / nminus for y in Y]) else: _Y =", "# does have one drawback, however: having a high kernel-target", "the regular scalar product in our linear classification with the", "# First create a kernel with the trained parameter baked", "We formalize this by considering a parameterised quantum circuit :math:`U(\\boldsymbol{x})`", "regular scalar product in our linear classification with the map", "type # `Projector <https://pennylane.readthedocs.io/en/latest/code/api/pennylane.Projector.html>`__. # This is shown in the", "quantum kernel to the actual labels of the # training", "via the ``lambda`` function from above. # Once we have", "the embedding of the first # datapoint and then the", "the embedding of the second datapoint. We # finally extract", "num_sectors angles = (center_indices + 0.5) * sector_angle x =", "to enable us to # generate the data. # The", "can choose the vector defining the decision boundary as a", "similiarity measure # between two kernels with given kernel matrices", "# # <NAME>, <NAME>, and <NAME>. # \"An overview of", "expected to suffer from bad generalisation. # # References #", "embedding kernels, # training variational embedding kernels and the available", "the picture by defining an *ideal* kernel # function that", "quantum # kernel. Thus, let's build a second support vector", "Support Vector Classifier (SVC). from sklearn.svm import SVC ############################################################################## #", "num_sectors for i in range(num_sectors): color = [\"#FF0000\", \"#0000FF\"][(i %", "# # The assigned kernel is thus :math:`+1` if both", "are concerned with kernels that can be evaluated on quantum", "the formula yields .. math:: y(\\boldsymbol{x}) = \\operatorname{sgn}\\left(\\sum_i \\alpha_i \\langle", "express much more intricate decision boundaries! This is very important,", "construct the SVM, we need to supply ``sklearn.svm.SVC`` with a", "different optimization task for the # :math:`\\alpha` and :math:`b` vectors", "is not None: sector_angle = 360 / num_sectors for i", "non-linear classification in our original space! .. figure:: ../demonstrations/kernels_module/embedding_nonlinear_classification.png :align:", "kind of kernel that can be realized on near-term quantum", "can obtain via ``qml.adjoint``. def ansatz(x, params, wires): \"\"\"The embedding", ".. note:: # Currently, the function ``qml.kernels.target_alignment`` is not #", "the datapoints, which is why we again supply the variational", ".. note:: # Seen from a more theoretical side, :math:`\\operatorname{KA}`", "powerful, as datasets that are not separable by a hyperplane", "# \"An overview of kernel alignment and its applications.\" #", "mpl.patches.Wedge( (0, 0), 0.5, i * sector_angle, (i + 1)", "to assess the impact of training the parameters of the", "that realizes the kernel. We will compute # the overlap", "# To construct the SVM, we need to supply ``sklearn.svm.SVC``", "* sector_angle, (i + 1) * sector_angle, lw=0, color=other_color, alpha=0.1,", "is {accuracy_init:.3f}\") ############################################################################## # We are also interested in seeing", "*Authors: <NAME>, <NAME>, <NAME>, <NAME>, <NAME> and <NAME>. Posted: 24", "# to use the observable type # `Projector <https://pennylane.readthedocs.io/en/latest/code/api/pennylane.Projector.html>`__. #", "many free parameters! It is reasonable to believe we can", "and then the adjoint of the embedding of the second", ".. meta:: :property=\"og:description\": Kernels and alignment training with Pennylane. :property=\"og:image\":", "\\langle \\phi(\\boldsymbol{x}_i), \\phi(\\boldsymbol{x}_j)\\rangle. We call this function the *kernel*. It", "of course need to start with some imports: from pennylane", "# Seen from a more theoretical side, :math:`\\operatorname{KA}` # is", "<NAME>, <NAME>, <NAME>, <NAME>, <NAME> and <NAME>. Posted: 24 June", "\"\"\"Plot double cake data and corresponding sectors.\"\"\" x, y =", "* sector_angle, lw=0, color=color, alpha=0.1, width=0.5, ) ) ax.add_artist( mpl.patches.Wedge(", "We # finally extract the probabilities of observing each basis", "from above. # Once we have this, we can let", "we see them as vectors # in the space of", "is why we again supply the variational # parameters via", "These are kernels that arise from embedding data into the", "classification with quantum embedding kernels, # training variational embedding kernels", "inner_product params = init_params opt = qml.GradientDescentOptimizer(0.2) for i in", "PennyLane's `kernels module <https://pennylane.readthedocs.io/en/latest/code/qml_kernels.html>`__ # allows for a particularly simple", "# Now we can have a look at the kernel", "we can often find an explicit formula for the kernel", "data. It # does have one drawback, however: having a", "the space of matrices with the Hilbert-Schmidt (or # Frobenius)", "of # our classifier: trained_plot_data = plot_decision_boundaries(svm_trained, plt.gca()) ############################################################################## #", "2 x_1 x_2 y_1 y_2 + x_2^2 y_2^2 = \\langle", "``x`` but feeding different variational # parameters ``params`` into each", "measure the similarity # between two datapoints. To perform an", "It is based on *kernel alignment*, a similiarity measure #", "y1, labels1 = _make_circular_data(num_sectors) x2, y2, labels2 = _make_circular_data(num_sectors) #", "X, Y_target): return 1 - np.count_nonzero(classifier.predict(X) - Y_target) / len(Y_target)", "+ 1) % 50 == 0: current_alignment = target_alignment( X,", "different labels to the regions on opposing sides of the", "# # Before focusing on the kernel values we have", "datapoints, as we know them to be 1 # for", "this functionality. It expects the kernel to not have additional", "question. Performing an exhaustive # search in parameter space is", "the vector defining the decision boundary as a linear combination", "# initial classifier: init_plot_data = plot_decision_boundaries(svm, plt.gca()) ############################################################################## # We", "actually # *maximize* it in the process. # # ..", "is then defined as the kernel alignment # of the", "Quantum Embedding Kernel # ------------------------------------- # # To be able", "datapoints into a larger *feature space* and then perform linear", "works # to *minimize* the cost function that is given", "import numpy as np import matplotlib as mpl np.random.seed(1359) ##############################################################################", "math:: # k_{\\boldsymbol{y}}(\\boldsymbol{x}_i, \\boldsymbol{x}_j) = y_i y_j. # # The", "kernel(X[0], X[1], init_params) print(f\"The kernel value between the first and", "datapoints arranged in an even circle.\"\"\" center_indices = np.array(range(0, num_sectors))", "we will work with # 3 sectors: import matplotlib.pyplot as", "by assigning the label :math:`y` via .. math:: y(\\boldsymbol{x}) =", "variational parameters fully determine # the embedding ansatz :math:`U(\\boldsymbol{x})`. #", "an *ansatz*, which we will construct by repeating a #", "define two functions to enable us to # generate the", "= [\"#FF0000\", \"#0000FF\"][(i % 2)] other_color = [\"#FF0000\", \"#0000FF\"][((i +", "measure of # how well it fits the dataset in", "to multiply the kernel target alignment by :math:`-1` to actually", "the ``lambda`` function from above. # Once we have this,", "alignment will not always # bring optimal training accuracy with", "classification can also be extended to higher dimensional vectors :math:`\\boldsymbol{x}`,", "i * sector_angle, (i + 1) * sector_angle, lw=0, color=color,", "X, kernel, assume_normalized_kernel=assume_normalized_kernel, ) if rescale_class_labels: nplus = np.count_nonzero(np.array(Y) ==", "# similarity predicted by the quantum kernel to the actual", "to drawing a line and assigning different labels to the", ".. math:: \\phi((x_1, x_2)) &= (x_1^2, \\sqrt{2} x_1 x_2, x_2^2)", "It # does have one drawback, however: having a high", "states .. math:: k(\\boldsymbol{x}_i, \\boldsymbol{x}_j) = | \\langle\\psi(\\boldsymbol{x}_i)|\\psi(\\boldsymbol{x}_j)\\rangle|^2. \"\"\" ##############################################################################", "training of quantum models <https://pennylane.ai/qml/demos/tutorial_kernel_based_training.html>`__, where you will also find", "data. It is based on *kernel alignment*, a similiarity measure", "https://pennylane.ai/qml/_images/QEK_thumbnail.png .. related:: tutorial_kernel_based_training Kernel-based training with scikit-learn tutorial_data_reuploading_classifier Classification", "can actually express much more intricate decision boundaries! This is", "the embedding :math:`\\phi` will be much costlier to compute than", "Computers.\" # `arXiv:2105.02276 <https://arxiv.org/abs/2105.02276>`__, 2021. # # .. [#Alignment] #", "/ num_sectors for i in range(num_sectors): color = [\"#FF0000\", \"#0000FF\"][(i", "kernel method does, let's first revisit one of the simplest", "and thus determine its slope. The independent term :math:`b` specifies", "+ 0.5) * sector_angle x = 0.7 * np.cos(angles) y", "implementation. PennyLane's # ``kernels`` module allows you to easily evaluate", "1) ax.set_aspect(\"equal\") ax.axis(\"off\") return ax ############################################################################## # Let's now have", "however, resort to a more specialized measure, the # *kernel-target", "of PennyLane's `kernels module <https://pennylane.readthedocs.io/en/latest/code/qml_kernels.html>`__. The demo is based on", "Vector Classifier (SVC). from sklearn.svm import SVC ############################################################################## # To", "we # have to multiply the kernel target alignment by", "PennyLane's ``default.qubit`` # device with 5 wires in analytic mode.", "good alignment is # guaranteed for good performance, but optimal", "we can obtain via ``qml.adjoint``. def ansatz(x, params, wires): \"\"\"The", "start by defining this layer: import pennylane as qml def", "as datasets that are not separable by a hyperplane can't", "\\boldsymbol{x}\\rangle + b). The vector :math:`\\boldsymbol{w}` points perpendicular to the", "marker=\"s\") if num_sectors is not None: sector_angle = 360 /", "function that expresses the original labelling in the vector #", "one drawback, however: having a high kernel-target alignment # is", "plane. In this form, linear classification can also be extended", "y2]) # Canonical form of dataset X = np.vstack([x, y]).T", "cake data and corresponding sectors.\"\"\" x, y = X.T cmap", "know them to be 1 # for our noiseless simulation.", "# matrices :math:`K_1` and :math:`K_2` if we see them as", "Together, the datapoint and the variational parameters fully determine #", "N_gridpoints)) _zz = np.zeros_like(_xx) for idx in np.ndindex(*_xx.shape): _zz[idx] =", "50 == 0: current_alignment = target_alignment( X, Y, lambda x1,", "via ``qml.adjoint``. def ansatz(x, params, wires): \"\"\"The embedding ansatz\"\"\" for", "approach benefits from both: on # one hand it can", "lambda x1, x2: kernel(x1, x2, init_params) K_init = qml.kernels.square_kernel_matrix(X, init_kernel,", "np.sin(angles) labels = 2 * np.remainder(np.floor_divide(angles, sector_angle), 2) - 1", "wires, i0=j * len(wires)) adjoint_ansatz = qml.adjoint(ansatz) def random_params(num_wires, num_layers):", "\\boldsymbol{w}, \\phi(\\boldsymbol{x})\\rangle + b). We will forgo one tiny step,", "\"Training Quantum Embedding Kernels on Near-Term Quantum Computers.\" # `arXiv:2105.02276", "can be correctly classified, but # we still struggle with", ".. math:: k(\\boldsymbol{x}_i, \\boldsymbol{x}_j) = | \\langle\\psi(\\boldsymbol{x}_i)|\\psi(\\boldsymbol{x}_j)\\rangle|^2. \"\"\" ############################################################################## #", "in the shape for the ansatz.\"\"\" return np.random.uniform(0, 2 *", "demo, we will explore one particular kind of kernel that", "\\langle\\psi(\\boldsymbol{x}_i)|\\psi(\\boldsymbol{x}_j)\\rangle|^2. \"\"\" ############################################################################## # A toy problem # ------------- #", "value is then given by the *overlap* of the associated", "Canonical form of dataset X = np.vstack([x, y]).T labels =", "in the embedding space: .. math:: k(\\boldsymbol{x}_i, \\boldsymbol{x}_j) = \\langle", "*kernel alignment*, a similiarity measure # between two kernels with", "# = \\frac{\\operatorname{Tr}(K \\boldsymbol{y}\\boldsymbol{y}^T)}{\\sqrt{\\operatorname{Tr}(K^2)\\operatorname{Tr}((\\boldsymbol{y}\\boldsymbol{y}^T)^2)}} # = \\frac{\\boldsymbol{y}^T K \\boldsymbol{y}}{\\sqrt{\\operatorname{Tr}(K^2)} N}", "with the trained parameter baked into it. trained_kernel = lambda", "boundaries for our # initial classifier: init_plot_data = plot_decision_boundaries(svm, plt.gca())", "spotting overfitting issues # visually in more complex data sets.", "24 June 2021* Kernel methods are one of the cornerstones", "the same datapoints, as we know them to be 1", "It expects the kernel to not have additional parameters #", "functionality of PennyLane's `kernels module <https://pennylane.readthedocs.io/en/latest/code/qml_kernels.html>`__. The demo is based", "\\operatorname{KTA}_{\\boldsymbol{y}}(K) # = \\frac{\\operatorname{Tr}(K \\boldsymbol{y}\\boldsymbol{y}^T)}{\\sqrt{\\operatorname{Tr}(K^2)\\operatorname{Tr}((\\boldsymbol{y}\\boldsymbol{y}^T)^2)}} # = \\frac{\\boldsymbol{y}^T K \\boldsymbol{y}}{\\sqrt{\\operatorname{Tr}(K^2)}", "between kernel and labels.\"\"\" K = qml.kernels.square_kernel_matrix( X, kernel, assume_normalized_kernel=assume_normalized_kernel,", "is {kta_init:.3f}\") ############################################################################## # Now let's code up an optimization", "the alignment on the full dataset every 50 steps. if", "\"\"\"Building block of the embedding ansatz\"\"\" i = i0 for", "end we will introduce a # second helper method. def", "measure, the # *kernel-target alignment* [#Alignment]_. The kernel-target alignment compares", "0.5 * y2]) # Canonical form of dataset X =", "= y_i y_j. # # The assigned kernel is thus", "besides the datapoints, which is why we again supply the", "# # .. math:: # \\operatorname{KA}(K_1, K_2) = \\frac{\\operatorname{Tr}(K_1 K_2)}{\\sqrt{\\operatorname{Tr}(K_1^2)\\operatorname{Tr}(K_2^2)}}.", "if we define some map :math:`\\phi(\\boldsymbol{x})` that *embeds* our datapoints", "ax, num_sectors=None): \"\"\"Plot double cake data and corresponding sectors.\"\"\" x,", "*ideal* kernel # function that expresses the original labelling in", "for understanding the demo, # so don't mind them if", "qml.broadcast(unitary=qml.CRZ, pattern=\"ring\", wires=wires, parameters=params[1]) ############################################################################## # To construct the ansatz,", "elements in :math:`\\boldsymbol{y}`, # that is the number of datapoints", "../demonstrations/kernels_module/linear_classification.png :align: center :width: 30% We can mathematically formalize this", "to the state .. math:: |\\psi(\\boldsymbol{x})\\rangle = U(\\boldsymbol{x}) |0 \\rangle.", "# In summary, the kernel-target alignment effectively captures how well", "# :math:`U(\\boldsymbol{x})^\\dagger`, which we can obtain via ``qml.adjoint``. def ansatz(x,", "# # Let's now come back to the actual implementation.", "regular gradient descent optimization. To speed up # the optimization", "def accuracy(classifier, X, Y_target): return 1 - np.count_nonzero(classifier.predict(X) - Y_target)", "effectively captures how well # the kernel you chose reproduces", "Embedding Kernels (QEKs)*. These are kernels that arise from embedding", "linear classification can also be extended to higher dimensional vectors", "let scikit-learn adjust the SVM from our Quantum # Embedding", "y2, labels2 = _make_circular_data(num_sectors) # x and y coordinates of", "classified without error. We can actually sneak around this limitation", "need to supply ``sklearn.svm.SVC`` with a function # that takes", "a kernel matrix function. svm_trained = SVC(kernel=trained_kernel_matrix).fit(X, Y) ############################################################################## #", "benefits from both: on # one hand it can adjust", "classification, we can choose the vector defining the decision boundary", "start with some imports: from pennylane import numpy as np", "to two datapoints the product # of the corresponding labels:", "what the decision boundaries in this # classification look like.", "formula yields .. math:: y(\\boldsymbol{x}) = \\operatorname{sgn}\\left(\\sum_i \\alpha_i \\langle \\phi(\\boldsymbol{x}_i),", "need a device to run the quantum circuit on. #", "argument to be a kernel matrix function. svm_trained = SVC(kernel=trained_kernel_matrix).fit(X,", "all-zero state at the end of the kernel circuit --", "to train the Quantum Embedding Kernel we need some measure", "function for optimization cost = lambda _params: -target_alignment( X[subset], Y[subset],", "* np.sin(angles) labels = 2 * np.remainder(np.floor_divide(angles, sector_angle), 2) -", "and :math:`b` vectors we introduced in the beginning. svm =", "but notice the above formula only contains inner products between", "np.hstack([labels1, -1 * labels2]) # Canonical form of labels Y", "color=other_color, alpha=0.1, ) ) ax.set_xlim(-1, 1) ax.set_ylim(-1, 1) ax.set_aspect(\"equal\") ax.axis(\"off\")", "overview of kernel alignment and its applications.\" # `Artificial Intelligence", "accuracy improvement vs. the SVM with random # parameters: accuracy_trained", "labels Y = labels.astype(int) return X, Y ############################################################################## # Next,", "of quantum models <https://pennylane.ai/qml/demos/tutorial_kernel_based_training.html>`__, where you will also find more", "have to provide values for the # variational parameters. At", "to detect small improvements. # # We can, however, resort", "entire space into two regions anymore. Instead one needs a", "cost function for optimization cost = lambda _params: -target_alignment( X[subset],", "X, Y, lambda x1, x2: kernel(x1, x2, params), assume_normalized_kernel=True, )", "module <https://pennylane.readthedocs.io/en/latest/code/qml_kernels.html>`__ # allows for a particularly simple # implementation", "anymore. Instead one needs a *hyperplane*. It is immediately clear", "*embeds* our datapoints into a larger *feature space* and then", "variational parameters, we construct a ``lambda`` function that # fixes", "like. This could help us spotting overfitting issues # visually", "Kernel # ------------------------------------- # # To be able to train", "function. def target_alignment( X, Y, kernel, assume_normalized_kernel=False, rescale_class_labels=True, ): \"\"\"Kernel-target", "And we proceed right away to create a dataset to", "params, wires=wires) adjoint_ansatz(x2, params, wires=wires) return qml.probs(wires=wires) ############################################################################## # The", "second helper method. def plot_decision_boundaries(classifier, ax, N_gridpoints=14): _xx, _yy =", "Second create a kernel matrix function using the trained kernel.", "what a kernel method does, let's first revisit one of", "\\sum_i \\alpha_i \\phi(\\boldsymbol{x}_i)`. Putting this into the formula yields ..", "vector defining the decision boundary as a linear combination of", "0.5 * x2]) y = np.hstack([y1, 0.5 * y2]) #", "to suffer from bad generalisation. # # References # ----------", "`arXiv:2105.02276 <https://arxiv.org/abs/2105.02276>`__, 2021. # # .. [#Alignment] # # <NAME>,", "order to construct the full kernel circuit, we also require", "# # .. note:: # An alternative way to set", "the kernel you chose reproduces the actual similarities of the", "divide the entire space into two regions anymore. Instead one", "adjoint_ansatz = qml.adjoint(ansatz) def random_params(num_wires, num_layers): \"\"\"Generate random variational parameters", "improvements. # # We can, however, resort to a more", "error. We can actually sneak around this limitation by performing", "however: having a high kernel-target alignment # is only a", "you will learn how to evaluate kernels, use them for", "The independent term :math:`b` specifies the position on the plane.", "form the # *kernel matrix*. We can inspect it via", "# # We can, however, resort to a more specialized", "it can be shown that for the purpose of optimal", "{kernel_value:.3f}\") ############################################################################## # The mutual kernel values between all elements", "quantum states .. math:: k(\\boldsymbol{x}_i, \\boldsymbol{x}_j) = | \\langle\\psi(\\boldsymbol{x}_i)|\\psi(\\boldsymbol{x}_j)\\rangle|^2. \"\"\"", "kernel and :math:`\\boldsymbol{y}\\boldsymbol{y}^T`: # # .. math:: # \\operatorname{KTA}_{\\boldsymbol{y}}(K) #", "position on the plane. In this form, linear classification can", "with trained parameters is {accuracy_trained:.3f}\") ############################################################################## # We have now", "measure which percentage # of the dataset it classifies correctly.", "is a discrete quantity we # would not be able", "(x_1^2, \\sqrt{2} x_1 x_2, x_2^2) \\\\ k(\\boldsymbol{x}, \\boldsymbol{y}) &= x_1^2", "making it unfit for gradient descent optimization. # We therefore", "the *overlap* of the associated embedded quantum states .. math::", "Defining a Quantum Embedding Kernel # ----------------------------------- # PennyLane's `kernels", "_params), assume_normalized_kernel=True, ) # Optimization step params = opt.step(cost, params)", "method, which makes use of symmetry of the kernel, #", "random. Remember that PennyLane's built-in optimizer works # to *minimize*", "vectors we introduced in the beginning. svm = SVC(kernel=lambda X1,", "that arise from embedding data into the space of quantum", "# one hand it can adjust itself to the dataset,", "quantum states. We formalize this by considering a parameterised quantum", "A toy problem # ------------- # In this demo, we", "the ``qml.kernels.square_kernel_matrix`` # method, which makes use of symmetry of", "wires=[wire]) i += inc qml.RY(params[0, j], wires=[wire]) qml.broadcast(unitary=qml.CRZ, pattern=\"ring\", wires=wires,", "easily evaluate the kernel # target alignment: kta_init = qml.kernels.target_alignment(X,", "cmap = mpl.colors.ListedColormap([\"#FF0000\", \"#0000FF\"]) ax.scatter(x, y, c=Y, cmap=cmap, s=25, marker=\"s\")", "[#Alignment]_. This means having good alignment is # guaranteed for", "on the other hand # is not expected to suffer", "Quantum Embedding Kernels on Near-Term Quantum Computers.\" # `arXiv:2105.02276 <https://arxiv.org/abs/2105.02276>`__,", "# Indeed, we see that now not only every data", "\\rangle^2. This means by just replacing the regular scalar product", "# Frobenius) scalar product # :math:`\\langle A, B \\rangle =", "of points that lie in different corners of the plane.", "i * sector_angle, (i + 1) * sector_angle, lw=0, color=other_color,", "# Optimization step params = opt.step(cost, params) # Report the", "require its adjoint # :math:`U(\\boldsymbol{x})^\\dagger`, which we can obtain via", "them to be 1 # for our noiseless simulation. Overall", "that done, let's have a look at the decision boundaries", "<https://arxiv.org/abs/2105.02276>`__, 2021. # # .. [#Alignment] # # <NAME>, <NAME>,", "space! .. figure:: ../demonstrations/kernels_module/embedding_nonlinear_classification.png :align: center :width: 65% If we", "the regions on opposing sides of the line: .. figure::", "= qml.adjoint(ansatz) def random_params(num_wires, num_layers): \"\"\"Generate random variational parameters in", "variational parameters in the shape for the ansatz.\"\"\" return np.random.uniform(0,", "embedding space: .. math:: k(\\boldsymbol{x}_i, \\boldsymbol{x}_j) = \\langle \\phi(\\boldsymbol{x}_i), \\phi(\\boldsymbol{x}_j)\\rangle.", "shown that for the purpose of optimal classification, we can", "############################################################################## # The kernel function itself is now obtained by", "at the end of the kernel circuit -- because #", "of Quantum Embedding Kernels. The first ingredient we # need", "be classified without error. We can actually sneak around this", "kernel matrices :math:`K_1` and # :math:`K_2`: # # .. math::", "product # :math:`\\langle A, B \\rangle = \\operatorname{Tr}(A^T B)`. This", "* x2]) y = np.hstack([y1, 0.5 * y2]) # Canonical", "We can actually sneak around this limitation by performing a", "our datapoints into a larger *feature space* and then perform", "are kernels that arise from embedding data into the space", "return 1 - np.count_nonzero(classifier.predict(X) - Y_target) / len(Y_target) accuracy_init =", "actually realise non-linear classification in our original space! .. figure::", "kernel, assume_normalized_kernel=assume_normalized_kernel, ) if rescale_class_labels: nplus = np.count_nonzero(np.array(Y) == 1)", "let's build a second support vector classifier with the #", "improvement vs. the SVM with random # parameters: accuracy_trained =", "the embedding ansatz :math:`U(\\boldsymbol{x})`. # In order to construct the", "and <NAME>. Posted: 24 June 2021* Kernel methods are one", "enable us to # generate the data. # The details", "norm return inner_product params = init_params opt = qml.GradientDescentOptimizer(0.2) for", "In this tutorial you will learn how to evaluate kernels,", "In our example, we will work with # 3 sectors:", "# -------------------------------------------------- # The quantum kernel alone can not be", "X[1], init_params) print(f\"The kernel value between the first and second", "assume_normalized_kernel=True) print(f\"The kernel-target alignment for our dataset and random parameters", "our approach benefits from both: on # one hand it", "modify the variational parameters in our circuit # ansatz. What", ":math:`y` via .. math:: y(\\boldsymbol{x}) = \\operatorname{sgn}(\\langle \\boldsymbol{w}, \\boldsymbol{x}\\rangle +", "center :width: 65% If we go back to the expression", "a good # performance of the kernel [#Alignment]_. This means", "we construct a ``lambda`` function that # fixes them to", "angles = (center_indices + 0.5) * sector_angle x = 0.7", "kernels and the available functionalities # to do both in", "y = np.hstack([y1, 0.5 * y2]) # Canonical form of", "why we again supply the variational # parameters via the", "performing a neat trick: if we define some map :math:`\\phi(\\boldsymbol{x})`", "= \\sum_i \\alpha_i \\phi(\\boldsymbol{x}_i)`. Putting this into the formula yields", "given by the outer product :math:`\\boldsymbol{y}\\boldsymbol{y}^T`. # The kernel-target alignment", "training set to compute # :math:`\\operatorname{KTA}` but rather # sample", "linear classification with the map :math:`k`, we can actually express", "# # Following on the results that SVM's have proven", "# \\operatorname{KTA}_{\\boldsymbol{y}}(K) # = \\frac{\\operatorname{Tr}(K \\boldsymbol{y}\\boldsymbol{y}^T)}{\\sqrt{\\operatorname{Tr}(K^2)\\operatorname{Tr}((\\boldsymbol{y}\\boldsymbol{y}^T)^2)}} # = \\frac{\\boldsymbol{y}^T K", "the angle between the kernel # matrices :math:`K_1` and :math:`K_2`", "use # of scikit-learn's Support Vector Classifier (SVC). from sklearn.svm", "lie in different corners of the plane. A linear classifier", "for y in Y]) else: _Y = np.array(Y) T =", "i0=j * len(wires)) adjoint_ansatz = qml.adjoint(ansatz) def random_params(num_wires, num_layers): \"\"\"Generate", "classifier performs we will measure which percentage # of the", "obtain via ``qml.adjoint``. def ansatz(x, params, wires): \"\"\"The embedding ansatz\"\"\"", "are not essential for understanding the demo, # so don't", ":align: center :width: 65% If we go back to the", ":math:`k`, we can actually express much more intricate decision boundaries!", "see an accuracy improvement vs. the SVM with random #", "we will explore one particular kind of kernel that can", "= len(Y) - nplus _Y = np.array([y / nplus if", "+= inc qml.RY(params[0, j], wires=[wire]) qml.broadcast(unitary=qml.CRZ, pattern=\"ring\", wires=wires, parameters=params[1]) ##############################################################################", "to see an accuracy improvement vs. the SVM with random", "the kernel with random parameters is {accuracy_init:.3f}\") ############################################################################## # We", "is very # resource intensive, and since the accuracy is", "second datapoint is {kernel_value:.3f}\") ############################################################################## # The mutual kernel values", "* np.sum(T * T)) inner_product = inner_product / norm return", "neat trick: if we define some map :math:`\\phi(\\boldsymbol{x})` that *embeds*", "kernels =========================================== .. meta:: :property=\"og:description\": Kernels and alignment training with", "############################################################################## # We want to assess the impact of training", "function # that takes two sets of datapoints and returns", "return X, Y ############################################################################## # Next, we define a function", "# so don't mind them if they are confusing. def", "construct a ``lambda`` function that # fixes them to the", "between the first and the # second datapoint: kernel_value =", "# PennyLane's `kernels module <https://pennylane.readthedocs.io/en/latest/code/qml_kernels.html>`__ # allows for a particularly", "kernel # target alignment: kta_init = qml.kernels.target_alignment(X, Y, init_kernel, assume_normalized_kernel=True)", ":math:`K_1` and # :math:`K_2`: # # .. math:: # \\operatorname{KA}(K_1,", "back to the actual implementation. PennyLane's # ``kernels`` module allows", "number of layers in the # ansatz circuit to :math:`6`.", "Xanadu's own `QHack <https://qhack.ai/>`__ hackathon. What are kernel methods? ------------------------", "plt.gca(), num_sectors=num_sectors) ############################################################################## # Defining a Quantum Embedding Kernel #", "data enters the picture by defining an *ideal* kernel #", "kernel circuit in PennyLane would be # to use the", "we will construct by repeating a # layer as building", "above. # Once we have this, we can let scikit-learn", "# device with 5 wires in analytic mode. dev =", "parameters: accuracy_trained = accuracy(svm_trained, X, Y) print(f\"The accuracy of a", "in our original space! .. figure:: ../demonstrations/kernels_module/embedding_nonlinear_classification.png :align: center :width:", "remember we have a circuit # with many free parameters!", "We of course need to start with some imports: from", "fits the dataset in question. Performing an exhaustive # search", "Seen from a more theoretical side, :math:`\\operatorname{KA}` # is nothing", "namely two kernels, being aligned in a vector space. #", "# quantum kernel and :math:`\\boldsymbol{y}\\boldsymbol{y}^T`: # # .. math:: #", "strong artifacts that would make us # distrust the model.", "decision boundaries in this # classification look like. This could", "rescale_class_labels=True, ): \"\"\"Kernel-target alignment between kernel and labels.\"\"\" K =", "Thus, let's build a second support vector classifier with the", "method does, let's first revisit one of the simplest methods", "as qml def layer(x, params, wires, i0=0, inc=1): \"\"\"Building block", "Kernels and alignment training with Pennylane. :property=\"og:image\": https://pennylane.ai/qml/_images/QEK_thumbnail.png .. related::", "Quantum Embedding Kernel # ----------------------------------- # PennyLane's `kernels module <https://pennylane.readthedocs.io/en/latest/code/qml_kernels.html>`__", "kernel value between the first and the # second datapoint:", "scikit-learn tutorial_data_reuploading_classifier Classification with data reuploading *Authors: <NAME>, <NAME>, <NAME>,", "x1, x2: kernel(x1, x2, params) # Second create a kernel", "quantum computers, namely *Quantum Embedding Kernels (QEKs)*. These are kernels", "############################################################################## # We are also interested in seeing what the", "_yy = np.meshgrid(np.linspace(-1, 1, N_gridpoints), np.linspace(-1, 1, N_gridpoints)) _zz =", "# parameters ``params`` into each of them. # Together, the", "variational parameters which improve the overall accuracy # of our", "the datapoint and the variational parameters fully determine # the", "guaranteed for good performance, but optimal alignment will not always", ":property=\"og:image\": https://pennylane.ai/qml/_images/QEK_thumbnail.png .. related:: tutorial_kernel_based_training Kernel-based training with scikit-learn tutorial_data_reuploading_classifier", "qml.kernels.kernel_matrix(X1, X2, init_kernel)).fit(X, Y) ############################################################################## # To see how well", "toy problem that showcases the # inner workings of classification", "that for the purpose of optimal classification, we can choose", "current_alignment = target_alignment( X, Y, lambda x1, x2: kernel(x1, x2,", "# ---------- # # .. [#Training_QEKs] # # <NAME>, <NAME>,", "1) nminus = len(Y) - nplus _Y = np.array([y /", "N_gridpoints), np.linspace(-1, 1, N_gridpoints)) _zz = np.zeros_like(_xx) for idx in", "the first # datapoint and then the adjoint of the", "# k_{\\boldsymbol{y}}(\\boldsymbol{x}_i, \\boldsymbol{x}_j) = y_i y_j. # # The assigned", ") # Optimization step params = opt.step(cost, params) # Report", "these functions are not essential for understanding the demo, #", "our classifier performs we will measure which percentage # of", "choose the vector defining the decision boundary as a linear", "maps a datapoint :math:`\\boldsymbol{x}` to the state .. math:: |\\psi(\\boldsymbol{x})\\rangle", "the ansatz, this layer is repeated multiple times, reusing #", "the accuracy is a discrete quantity we # would not", "The first ingredient we # need for this is an", "def target_alignment( X, Y, kernel, assume_normalized_kernel=False, rescale_class_labels=True, ): \"\"\"Kernel-target alignment", "the all-zero state at the end of the kernel circuit", "x1, x2: kernel(x1, x2, _params), assume_normalized_kernel=True, ) # Optimization step", "in the vector # :math:`\\boldsymbol{y}` by assigning to two datapoints", "double cake data and corresponding sectors.\"\"\" x, y = X.T", "to not have additional parameters # besides the datapoints, which", "the vector # :math:`\\boldsymbol{y}` by assigning to two datapoints the", "datapoints x = np.hstack([x1, 0.5 * x2]) y = np.hstack([y1,", "This means having good alignment is # guaranteed for good", "as mpl np.random.seed(1359) ############################################################################## # And we proceed right away", "len(wires)) adjoint_ansatz = qml.adjoint(ansatz) def random_params(num_wires, num_layers): \"\"\"Generate random variational", "= 0.7 * np.cos(angles) y = 0.7 * np.sin(angles) labels", "the space of quantum states. We formalize this by considering", "by defining an *ideal* kernel # function that expresses the", "= plot_decision_boundaries(svm_trained, plt.gca()) ############################################################################## # Indeed, we see that now", "include the embedding, we get .. math:: y(\\boldsymbol{x}) = \\operatorname{sgn}(\\langle", "will treat a toy problem that showcases the # inner", "of this tutorial we will use PennyLane's ``default.qubit`` # device", "to the line and thus determine its slope. The independent", "kernel_circuit(x1, x2, params)[0] ############################################################################## # # .. note:: # An", "but it can be shown that for the purpose of", "in analytic mode. dev = qml.device(\"default.qubit\", wires=5, shots=None) wires =", "= mpl.colors.ListedColormap([\"#FF0000\", \"#0000FF\"]) ax.scatter(x, y, c=Y, cmap=cmap, s=25, marker=\"s\") if", "with the map :math:`k`, we can actually express much more", "code up an optimization loop and improve the kernel-target alignment!", "width=0.5, ) ) ax.add_artist( mpl.patches.Wedge( (0, 0), 0.5, i *", "(0, 0), 1, i * sector_angle, (i + 1) *", "data. # The details of these functions are not essential", "first ingredient we # need for this is an *ansatz*,", "because # of the ordering in ``qml.probs``, this is the", "a # second helper method. def plot_decision_boundaries(classifier, ax, N_gridpoints=14): _xx,", "math:: \\phi((x_1, x_2)) &= (x_1^2, \\sqrt{2} x_1 x_2, x_2^2) \\\\", "first, but notice the above formula only contains inner products", "kernel is thus :math:`+1` if both datapoints lie in the", "Y]) else: _Y = np.array(Y) T = np.outer(_Y, _Y) inner_product", "kernels that arise from embedding data into the space of", "idx in np.ndindex(*_xx.shape): _zz[idx] = classifier.predict(np.array([_xx[idx], _yy[idx]])[np.newaxis, :]) plot_data =", "<https://pennylane.readthedocs.io/en/latest/code/api/pennylane.Projector.html>`__. # This is shown in the # `demo on", "the quantum kernel to the actual labels of the #", "kernel [#Alignment]_. This means having good alignment is # guaranteed", "now obtained by looking at the probability # of observing", "# implementation of Quantum Embedding Kernels. The first ingredient we", "= lambda X1, X2: qml.kernels.kernel_matrix(X1, X2, trained_kernel) # Note that", "train them with gradient-based optimization, and all that using the", "_Y = np.array([y / nplus if y == 1 else", "= target_alignment( X, Y, lambda x1, x2: kernel(x1, x2, params),", "B)`. This # reinforces the geometric picture of how this", "To include the variational parameters, we construct a ``lambda`` function", "To see how well our classifier performs we will measure", "this into the formula yields .. math:: y(\\boldsymbol{x}) = \\operatorname{sgn}\\left(\\sum_i", "and the # second datapoint: kernel_value = kernel(X[0], X[1], init_params)", "we are concerned with kernels that can be evaluated on", "every 50 steps. if (i + 1) % 50 ==", "look at the decision boundaries for our # initial classifier:", "our Quantum # Embedding Kernel. # # .. note:: #", "mind them if they are confusing. def _make_circular_data(num_sectors): \"\"\"Generate datapoints", "only a necessary but not a sufficient condition for a", "# Embedding Kernel. # # .. note:: # This step", "2) - 1 return x, y, labels def make_double_cake_data(num_sectors): x1,", "objects, namely two kernels, being aligned in a vector space.", "import matplotlib.pyplot as plt num_sectors = 3 X, Y =", "we # would not be able to detect small improvements.", "both datapoints lie in the # same class and :math:`-1`", "impact of training the parameters of the quantum # kernel.", "with many free parameters! It is reasonable to believe we", "of the cornerstones of classical machine learning. Here we are", "if num_sectors is not None: sector_angle = 360 / num_sectors", "s=25, marker=\"s\") if num_sectors is not None: sector_angle = 360", "ansatz\"\"\" for j, layer_params in enumerate(params): layer(x, layer_params, wires, i0=j", "x2, _params), assume_normalized_kernel=True, ) # Optimization step params = opt.step(cost,", "<https://pennylane.readthedocs.io/en/latest/code/qml_kernels.html>`__. The demo is based on Ref. [#Training_QEKs]_, a project", "\\boldsymbol{w}, \\boldsymbol{x}\\rangle + b). The vector :math:`\\boldsymbol{w}` points perpendicular to", "# calculate the entries between the same datapoints, as we", "print(f\"The kernel value between the first and second datapoint is", "kernel alone can not be used to make predictions on", "to higher dimensional vectors :math:`\\boldsymbol{x}`, where a line does not", "+ 2 x_1 x_2 y_1 y_2 + x_2^2 y_2^2 =", "which improve the overall accuracy # of our SVC. #", "---------- # # .. [#Training_QEKs] # # <NAME>, <NAME>, <NAME>,", "x, y, labels def make_double_cake_data(num_sectors): x1, y1, labels1 = _make_circular_data(num_sectors)", "problem # ------------- # In this demo, we will treat", "\\frac{\\boldsymbol{y}^T K \\boldsymbol{y}}{\\sqrt{\\operatorname{Tr}(K^2)} N} # # where :math:`N` is the", "def kernel_circuit(x1, x2, params): ansatz(x1, params, wires=wires) adjoint_ansatz(x2, params, wires=wires)", "labels1 = _make_circular_data(num_sectors) x2, y2, labels2 = _make_circular_data(num_sectors) # x", "N} # # where :math:`N` is the number of elements", "we # need for this is an *ansatz*, which we", "to :math:`6`. init_params = random_params(num_wires=5, num_layers=6) ############################################################################## # Now we", "classifier.predict(np.array([_xx[idx], _yy[idx]])[np.newaxis, :]) plot_data = {\"_xx\": _xx, \"_yy\": _yy, \"_zz\":", "concerned with kernels that can be evaluated on quantum computers,", "\\operatorname{sgn}(\\langle \\boldsymbol{w}, \\boldsymbol{x}\\rangle + b). The vector :math:`\\boldsymbol{w}` points perpendicular", "but not a sufficient condition for a good # performance", "be shown that for the purpose of optimal classification, we", "# to do both in PennyLane. We of course need", "Before focusing on the kernel values we have to provide", "reuploading *Authors: <NAME>, <NAME>, <NAME>, <NAME>, <NAME> and <NAME>. Posted:", "y, c=Y, cmap=cmap, s=25, marker=\"s\") if num_sectors is not None:", "with the ansatz we only need a device to run", "original space! .. figure:: ../demonstrations/kernels_module/embedding_nonlinear_classification.png :align: center :width: 65% If", "= 2 * np.remainder(np.floor_divide(angles, sector_angle), 2) - 1 return x,", "|0 \\rangle. The kernel value is then given by the", "But remember we have a circuit # with many free", "of the plane. A linear classifier corresponds to drawing a", "to provide values for the # variational parameters. At this", "how to evaluate kernels, use them for classification and train", "# 3 sectors: import matplotlib.pyplot as plt num_sectors = 3", "solving a different optimization task for the # :math:`\\alpha` and", "the # quantum kernel and :math:`\\boldsymbol{y}\\boldsymbol{y}^T`: # # .. math::", "shown in the # `demo on kernel-based training of quantum", "init_plot_data = plot_decision_boundaries(svm, plt.gca()) ############################################################################## # We see the outer", "costlier to compute than the kernel :math:`k`. In this demo,", "values we sampled above. init_kernel = lambda x1, x2: kernel(x1,", "alignment effectively captures how well # the kernel you chose", "embedding of the second datapoint. We # finally extract the", "_params: -target_alignment( X[subset], Y[subset], lambda x1, x2: kernel(x1, x2, _params),", "which we can obtain via ``qml.adjoint``. def ansatz(x, params, wires):", "complex data sets. To this end we will introduce a", "full kernel circuit, we also require its adjoint # :math:`U(\\boldsymbol{x})^\\dagger`,", "= np.outer(_Y, _Y) inner_product = np.sum(K * T) norm =", "of classification with quantum embedding kernels, # training variational embedding", "kernel with the trained parameter baked into it. trained_kernel =", "b). The vector :math:`\\boldsymbol{w}` points perpendicular to the line and", "would not be able to detect small improvements. # #", "= plot_double_cake_data(X, Y, plt.gca(), num_sectors=num_sectors) ############################################################################## # Defining a Quantum", "be extended to higher dimensional vectors :math:`\\boldsymbol{x}`, where a line", "differentiable version of this function. def target_alignment( X, Y, kernel,", "parameters in the shape for the ansatz.\"\"\" return np.random.uniform(0, 2", "len(Y) - nplus _Y = np.array([y / nplus if y", "us now define the quantum circuit that realizes the kernel.", "function ``qml.kernels.kernel_matrix`` that provides # this functionality. It expects the", "Embedding Kernel # ------------------------------------- # # To be able to", "associated kernel matrix. # We can make use of the", "== 1) nminus = len(Y) - nplus _Y = np.array([y", "# A toy problem # ------------- # In this demo,", "we proceed right away to create a dataset to work", "# in the space of matrices with the Hilbert-Schmidt (or", "this form, linear classification can also be extended to higher", "parameter baked into it. trained_kernel = lambda x1, x2: kernel(x1,", "<NAME>. # \"An overview of kernel alignment and its applications.\"", "right away to create a dataset to work with, the", "matrix function. svm_trained = SVC(kernel=trained_kernel_matrix).fit(X, Y) ############################################################################## # We expect", "alignment and its applications.\" # `Artificial Intelligence Review 43.2: 179-192", "elements of the dataset form the # *kernel matrix*. We", "which is why we # have to multiply the kernel", "associated kernel: .. math:: \\phi((x_1, x_2)) &= (x_1^2, \\sqrt{2} x_1", "that showcases the # inner workings of classification with quantum", "the ansatz we only need a device to run the", "rather # sample smaller subsets of the data at each", "by performing a neat trick: if we define some map", "and <NAME>. # \"An overview of kernel alignment and its", "the entire space into two regions anymore. Instead one needs", "which makes use of symmetry of the kernel, # :math:`k(\\boldsymbol{x}_i,\\boldsymbol{x}_j)", "x2, params) # Second create a kernel matrix function using", "Here we are concerned with kernels that can be evaluated", "# of the corresponding labels: # # .. math:: #", "speed up # the optimization we will not use the", "this point we fix the number of layers in the", "introduce a # second helper method. def plot_decision_boundaries(classifier, ax, N_gridpoints=14):", "This is very important, because in many interesting cases the", "K_init = qml.kernels.square_kernel_matrix(X, init_kernel, assume_normalized_kernel=True) with np.printoptions(precision=3, suppress=True): print(K_init) ##############################################################################", "kernel_value = kernel(X[0], X[1], init_params) print(f\"The kernel value between the", "1 - np.count_nonzero(classifier.predict(X) - Y_target) / len(Y_target) accuracy_init = accuracy(svm,", "given to it, which is why we # have to", "# between two datapoints. To perform an actual prediction we", "2 * np.remainder(np.floor_divide(angles, sector_angle), 2) - 1 return x, y,", "kernel: .. math:: \\phi((x_1, x_2)) &= (x_1^2, \\sqrt{2} x_1 x_2,", "Y, init_kernel, assume_normalized_kernel=True) print(f\"The kernel-target alignment for our dataset and", "two functions to enable us to # generate the data.", "of the line: .. figure:: ../demonstrations/kernels_module/linear_classification.png :align: center :width: 30%", "is reasonable to believe we can give # values to", "the results that SVM's have proven good generalisation # behavior,", "now have a look at our dataset. In our example,", "def layer(x, params, wires, i0=0, inc=1): \"\"\"Building block of the", "trained kernel: # First create a kernel with the trained", "by the quantum kernel to the actual labels of the", "# .. note:: # Seen from a more theoretical side,", "that using the functionality of PennyLane's `kernels module <https://pennylane.readthedocs.io/en/latest/code/qml_kernels.html>`__. The", "x1, y1, labels1 = _make_circular_data(num_sectors) x2, y2, labels2 = _make_circular_data(num_sectors)", "Kernel. # # .. note:: # This step does *not*", "= np.sum(K * T) norm = np.sqrt(np.sum(K * K) *", "other hand # is not expected to suffer from bad", "not always # bring optimal training accuracy with it. #", "is not # differentiable yet, making it unfit for gradient", "# Canonical form of labels Y = labels.astype(int) return X,", "of the associated embedded quantum states .. math:: k(\\boldsymbol{x}_i, \\boldsymbol{x}_j)", "a line does not divide the entire space into two", "\\phi((x_1, x_2)) &= (x_1^2, \\sqrt{2} x_1 x_2, x_2^2) \\\\ k(\\boldsymbol{x},", "the purpose of optimal classification, we can choose the vector", "at the probability # of observing the all-zero state at", "kernel circuit -- because # of the ordering in ``qml.probs``,", "values for :math:`N` datapoints. # To include the variational parameters,", "To construct the ansatz, this layer is repeated multiple times,", "With that done, let's have a look at the decision", "into a larger *feature space* and then perform linear classification", "reinforces the geometric picture of how this measure relates #", "can, however, resort to a more specialized measure, the #", "classification. Imagine we want to discern two different classes of", "_make_circular_data(num_sectors) x2, y2, labels2 = _make_circular_data(num_sectors) # x and y", "nothing else than the cosine of the angle between the", "the geometric picture of how this measure relates # to", "with Pennylane. :property=\"og:image\": https://pennylane.ai/qml/_images/QEK_thumbnail.png .. related:: tutorial_kernel_based_training Kernel-based training with", "by :math:`-1` to actually # *maximize* it in the process.", "In this sense, our approach benefits from both: on #", "but also that there are no strong artifacts that would", "train the Quantum Embedding Kernel we need some measure of", "into the formula yields .. math:: y(\\boldsymbol{x}) = \\operatorname{sgn}\\left(\\sum_i \\alpha_i", "fixes them to the values we sampled above. init_kernel =", "############################################################################## # Now we can have a look at the", "that provides # this functionality. It expects the kernel to", "_Y = np.array(Y) T = np.outer(_Y, _Y) inner_product = np.sum(K", "classification! 🎆 # # Following on the results that SVM's", "Embedding Kernel we need some measure of # how well", "which percentage # of the dataset it classifies correctly. def", "Near-Term Quantum Computers.\" # `arXiv:2105.02276 <https://arxiv.org/abs/2105.02276>`__, 2021. # # ..", "times, reusing # the datapoint ``x`` but feeding different variational", "is why we # have to multiply the kernel target", "quantum kernel and :math:`\\boldsymbol{y}\\boldsymbol{y}^T`: # # .. math:: # \\operatorname{KTA}_{\\boldsymbol{y}}(K)", "../demonstrations/kernels_module/embedding_nonlinear_classification.png :align: center :width: 65% If we go back to", "ensures that we do not # calculate the entries between", "data reuploading *Authors: <NAME>, <NAME>, <NAME>, <NAME>, <NAME> and <NAME>.", "parameters in our circuit # ansatz. What it does is", "of the simplest methods to assign binary labels to datapoints:", "and its kernel matrix is simply # given by the", "qml.RY(params[0, j], wires=[wire]) qml.broadcast(unitary=qml.CRZ, pattern=\"ring\", wires=wires, parameters=params[1]) ############################################################################## # To", "two datapoints. To perform an actual prediction we will make", "Y ############################################################################## # Next, we define a function to help", "# generate the data. # The details of these functions", "this layer is repeated multiple times, reusing # the datapoint", "x_1 x_2 y_1 y_2 + x_2^2 y_2^2 = \\langle \\boldsymbol{x},", "sample smaller subsets of the data at each step, we", "We call this function the *kernel*. It provides the advantage", "the actual implementation. PennyLane's # ``kernels`` module allows you to", "# .. [#Training_QEKs] # # <NAME>, <NAME>, <NAME>, <NAME>, #", "mode. dev = qml.device(\"default.qubit\", wires=5, shots=None) wires = dev.wires.tolist() ##############################################################################", "y in Y]) else: _Y = np.array(Y) T = np.outer(_Y,", "optimization, and all that using the functionality of PennyLane's `kernels", "realise non-linear classification in our original space! .. figure:: ../demonstrations/kernels_module/embedding_nonlinear_classification.png", "= \\frac{\\boldsymbol{y}^T K \\boldsymbol{y}}{\\sqrt{\\operatorname{Tr}(K^2)} N} # # where :math:`N` is", "params) # Report the alignment on the full dataset every", "i0=0, inc=1): \"\"\"Building block of the embedding ansatz\"\"\" i =", "sector_angle = 360 / num_sectors for i in range(num_sectors): color", "= \\operatorname{sgn}\\left(\\sum_i \\alpha_i \\langle \\phi(\\boldsymbol{x}_i), \\phi(\\boldsymbol{x})\\rangle + b\\right). This rewriting", "Let's now have a look at our dataset. In our", "qml.kernels.square_kernel_matrix(X, init_kernel, assume_normalized_kernel=True) with np.printoptions(precision=3, suppress=True): print(K_init) ############################################################################## # Using", "a necessary but not a sufficient condition for a good", "In summary, the kernel-target alignment effectively captures how well #", "SVC(kernel=lambda X1, X2: qml.kernels.kernel_matrix(X1, X2, init_kernel)).fit(X, Y) ############################################################################## # To", "tutorial_data_reuploading_classifier Classification with data reuploading *Authors: <NAME>, <NAME>, <NAME>, <NAME>,", "random parameters is {accuracy_init:.3f}\") ############################################################################## # We are also interested", "############################################################################## # Now let's code up an optimization loop and", "# # The training data enters the picture by defining", "observing each basis state. @qml.qnode(dev) def kernel_circuit(x1, x2, params): ansatz(x1,", "Embedding Kernels. The first ingredient we # need for this", "We will make use of regular gradient descent optimization. To", "_Y) inner_product = np.sum(K * T) norm = np.sqrt(np.sum(K *", "the datapoints x = np.hstack([x1, 0.5 * x2]) y =", "that there are no strong artifacts that would make us", "lambda x1, x2: kernel(x1, x2, params) # Second create a", "sets of datapoints and returns the associated kernel matrix. #", "_yy, _zz, cmap=mpl.colors.ListedColormap([\"#FF0000\", \"#0000FF\"]), alpha=0.2, levels=[-1, 0, 1], ) plot_double_cake_data(X,", "in np.ndindex(*_xx.shape): _zz[idx] = classifier.predict(np.array([_xx[idx], _yy[idx]])[np.newaxis, :]) plot_data = {\"_xx\":", "and include the embedding, we get .. math:: y(\\boldsymbol{x}) =", "why we # have to multiply the kernel target alignment", "+ b\\right). This rewriting might not seem useful at first,", "is based on *kernel alignment*, a similiarity measure # between", "nplus _Y = np.array([y / nplus if y == 1", "# ``DoubleCake`` dataset. Firstly, we define two functions to enable", "we see that now not only every data instance falls", "the # inner workings of classification with quantum embedding kernels,", "the actual similarities of the data. It # does have", "this function. def target_alignment( X, Y, kernel, assume_normalized_kernel=False, rescale_class_labels=True, ):", "points in the dataset can be correctly classified, but #", "the dataset can be correctly classified, but # we still", "# function that expresses the original labelling in the vector", "a tool to measure the similarity # between two datapoints.", "corresponding labels: # # .. math:: # k_{\\boldsymbol{y}}(\\boldsymbol{x}_i, \\boldsymbol{x}_j) =", "# allows for a particularly simple # implementation of Quantum", "kernel and labels.\"\"\" K = qml.kernels.square_kernel_matrix( X, kernel, assume_normalized_kernel=assume_normalized_kernel, )", "them for classification and train them with gradient-based optimization, and", "# differentiable yet, making it unfit for gradient descent optimization.", "labels.\"\"\" K = qml.kernels.square_kernel_matrix( X, kernel, assume_normalized_kernel=assume_normalized_kernel, ) if rescale_class_labels:", "if (i + 1) % 50 == 0: current_alignment =", "= np.vstack([x, y]).T labels = np.hstack([labels1, -1 * labels2]) #", "b\\right). This rewriting might not seem useful at first, but", "``qml.adjoint``. def ansatz(x, params, wires): \"\"\"The embedding ansatz\"\"\" for j,", "if rescale_class_labels: nplus = np.count_nonzero(np.array(Y) == 1) nminus = len(Y)", "layer as building block. Let's start by defining this layer:", "% len(x)], wires=[wire]) i += inc qml.RY(params[0, j], wires=[wire]) qml.broadcast(unitary=qml.CRZ,", "the plane. A linear classifier corresponds to drawing a line", "# inner workings of classification with quantum embedding kernels, #", "datapoints. # To include the variational parameters, we construct a", "our classifier: trained_plot_data = plot_decision_boundaries(svm_trained, plt.gca()) ############################################################################## # Indeed, we", "with a function # that takes two sets of datapoints", "num_layers=6) ############################################################################## # Now we can have a look at", "sense, our approach benefits from both: on # one hand", "can also be extended to higher dimensional vectors :math:`\\boldsymbol{x}`, where", "namely *Quantum Embedding Kernels (QEKs)*. These are kernels that arise", "important, because in many interesting cases the embedding :math:`\\phi` will", "and train them with gradient-based optimization, and all that using", "= 2 * np.pi / num_sectors angles = (center_indices +", "{kta_init:.3f}\") ############################################################################## # Now let's code up an optimization loop", "second datapoint. We # finally extract the probabilities of observing", "# With that done, let's have a look at the", "more specialized measure, the # *kernel-target alignment* [#Alignment]_. The kernel-target", "Classification with data reuploading *Authors: <NAME>, <NAME>, <NAME>, <NAME>, <NAME>", "classification and train them with gradient-based optimization, and all that", "our example, we will work with # 3 sectors: import", "make use # of scikit-learn's Support Vector Classifier (SVC). from", "space: .. math:: k(\\boldsymbol{x}_i, \\boldsymbol{x}_j) = \\langle \\phi(\\boldsymbol{x}_i), \\phi(\\boldsymbol{x}_j)\\rangle. We", "the beginning. svm = SVC(kernel=lambda X1, X2: qml.kernels.kernel_matrix(X1, X2, init_kernel)).fit(X,", "plt.gca()) ############################################################################## # We see the outer points in the", "corresponding sectors.\"\"\" x, y = X.T cmap = mpl.colors.ListedColormap([\"#FF0000\", \"#0000FF\"])", "else: _Y = np.array(Y) T = np.outer(_Y, _Y) inner_product =", "labels to datapoints: linear classification. Imagine we want to discern", "will learn how to evaluate kernels, use them for classification", "extract the probabilities of observing each basis state. @qml.qnode(dev) def", "qml.adjoint(ansatz) def random_params(num_wires, num_layers): \"\"\"Generate random variational parameters in the", "give # values to those variational parameters which improve the", "kernel_circuit(x1, x2, params): ansatz(x1, params, wires=wires) adjoint_ansatz(x2, params, wires=wires) return", "classifier corresponds to drawing a line and assigning different labels", "let's code up an optimization loop and improve the kernel-target", "to be 1 # for our noiseless simulation. Overall this", "you to easily evaluate the kernel # target alignment: kta_init", "mpl.patches.Wedge( (0, 0), 1, i * sector_angle, (i + 1)", "device with 5 wires in analytic mode. dev = qml.device(\"default.qubit\",", "np.count_nonzero(np.array(Y) == 1) nminus = len(Y) - nplus _Y =", "cmap=mpl.colors.ListedColormap([\"#FF0000\", \"#0000FF\"]), alpha=0.2, levels=[-1, 0, 1], ) plot_double_cake_data(X, Y, ax)", "Define the cost function for optimization cost = lambda _params:", "the position on the plane. In this form, linear classification", "ax.set_xlim(-1, 1) ax.set_ylim(-1, 1) ax.set_aspect(\"equal\") ax.axis(\"off\") return ax ############################################################################## #", "kernel(x1, x2, params) # Second create a kernel matrix function", "numpy as np import matplotlib as mpl np.random.seed(1359) ############################################################################## #", "let's have a look at the decision boundaries for our", "y]).T labels = np.hstack([labels1, -1 * labels2]) # Canonical form", ":math:`\\operatorname{KA}` # is nothing else than the cosine of the", "module allows you to easily evaluate the kernel # target", "larger *feature space* and then perform linear classification there, we", "the available functionalities # to do both in PennyLane. We", "from our Quantum # Embedding Kernel. # # .. note::", "using the functionality of PennyLane's `kernels module <https://pennylane.readthedocs.io/en/latest/code/qml_kernels.html>`__. The demo", ":math:`N` is the number of elements in :math:`\\boldsymbol{y}`, # that", "= 360 / num_sectors for i in range(num_sectors): color =", "end of the kernel circuit -- because # of the", "now achieved perfect classification! 🎆 # # Following on the", "a good solution because it is very # resource intensive,", "The demo is based on Ref. [#Training_QEKs]_, a project from", "struggle with the inner circle. But remember we have a", "will forgo one tiny step, but it can be shown", "wires=wires) adjoint_ansatz(x2, params, wires=wires) return qml.probs(wires=wires) ############################################################################## # The kernel", "associated embedded quantum states .. math:: k(\\boldsymbol{x}_i, \\boldsymbol{x}_j) = |", "from pennylane import numpy as np import matplotlib as mpl", "K_2) = \\frac{\\operatorname{Tr}(K_1 K_2)}{\\sqrt{\\operatorname{Tr}(K_1^2)\\operatorname{Tr}(K_2^2)}}. # # .. note:: # Seen", "of the embedding ansatz\"\"\" i = i0 for j, wire", "use of regular gradient descent optimization. To speed up #", "# # .. math:: # k_{\\boldsymbol{y}}(\\boldsymbol{x}_i, \\boldsymbol{x}_j) = y_i y_j.", "labels = 2 * np.remainder(np.floor_divide(angles, sector_angle), 2) - 1 return", "percentage # of the dataset it classifies correctly. def accuracy(classifier,", "datapoint is {kernel_value:.3f}\") ############################################################################## # The mutual kernel values between", "the kernel :math:`k` that makes it superfluous to actually perform", "is not a good solution because it is very #", "defining the decision boundary as a linear combination of the", "us to # generate the data. # The details of", "- 1 return x, y, labels def make_double_cake_data(num_sectors): x1, y1,", "with 5 wires in analytic mode. dev = qml.device(\"default.qubit\", wires=5,", "as vectors # in the space of matrices with the", "wires=5, shots=None) wires = dev.wires.tolist() ############################################################################## # Let us now", "yields .. math:: y(\\boldsymbol{x}) = \\operatorname{sgn}\\left(\\sum_i \\alpha_i \\langle \\phi(\\boldsymbol{x}_i), \\phi(\\boldsymbol{x})\\rangle", "dataset can be correctly classified, but # we still struggle", "makes use of symmetry of the kernel, # :math:`k(\\boldsymbol{x}_i,\\boldsymbol{x}_j) =", "and <NAME>. # \"Training Quantum Embedding Kernels on Near-Term Quantum", "itself is now obtained by looking at the probability #", "compute than the kernel :math:`k`. In this demo, we will", "the corresponding labels: # # .. math:: # k_{\\boldsymbol{y}}(\\boldsymbol{x}_i, \\boldsymbol{x}_j)", "dataset, becaues it is essentially just a tool to measure", "----------------------------------- # PennyLane's `kernels module <https://pennylane.readthedocs.io/en/latest/code/qml_kernels.html>`__ # allows for a", "space of quantum states. We formalize this by considering a", "compute the KTA on. subset = np.random.choice(list(range(len(X))), 4) # Define", "The kernel function itself is now obtained by looking at", "params), assume_normalized_kernel=True, ) print(f\"Step {i+1} - Alignment = {current_alignment:.3f}\") ##############################################################################", "\\\\ k(\\boldsymbol{x}, \\boldsymbol{y}) &= x_1^2 y_1^2 + 2 x_1 x_2", "and the available functionalities # to do both in PennyLane.", "alignment training with Pennylane. :property=\"og:image\": https://pennylane.ai/qml/_images/QEK_thumbnail.png .. related:: tutorial_kernel_based_training Kernel-based", "= _make_circular_data(num_sectors) # x and y coordinates of the datapoints", "to the actual implementation. PennyLane's # ``kernels`` module allows you", "note:: # Currently, the function ``qml.kernels.target_alignment`` is not # differentiable", "the entire training set to compute # :math:`\\operatorname{KTA}` but rather", "k(\\boldsymbol{x}_j, \\boldsymbol{x}_i)`. # In addition, the option ``assume_normalized_kernel=True`` ensures that", "and since the accuracy is a discrete quantity we #", "ordering in ``qml.probs``, this is the first entry: def kernel(x1,", "kernels, use them for classification and train them with gradient-based", "classification in our original space! .. figure:: ../demonstrations/kernels_module/embedding_nonlinear_classification.png :align: center", "wires in analytic mode. dev = qml.device(\"default.qubit\", wires=5, shots=None) wires", "near-term quantum computers, namely *Quantum Embedding Kernels (QEKs)*. These are", "1 # for our noiseless simulation. Overall this means that", "K = qml.kernels.square_kernel_matrix( X, kernel, assume_normalized_kernel=assume_normalized_kernel, ) if rescale_class_labels: nplus", "return plot_data ############################################################################## # With that done, let's have a", "= i0 for j, wire in enumerate(wires): qml.Hadamard(wires=[wire]) qml.RZ(x[i %", "# We see the outer points in the dataset can", "initial classifier: init_plot_data = plot_decision_boundaries(svm, plt.gca()) ############################################################################## # We see", "our dataset and random parameters is {kta_init:.3f}\") ############################################################################## # Now", "Alignment = {current_alignment:.3f}\") ############################################################################## # We want to assess the", "# parameters: accuracy_trained = accuracy(svm_trained, X, Y) print(f\"The accuracy of", "parameters ``params`` into each of them. # Together, the datapoint", "To this end we will introduce a # second helper", "x_2^2) \\\\ k(\\boldsymbol{x}, \\boldsymbol{y}) &= x_1^2 y_1^2 + 2 x_1", "advantage that we can often find an explicit formula for", "to do both in PennyLane. We of course need to", "able to detect small improvements. # # We can, however,", "<NAME>, <NAME>, <NAME>, <NAME> and <NAME>. Posted: 24 June 2021*", "# Before focusing on the kernel values we have to", "alignment compares the # similarity predicted by the quantum kernel", "a discrete quantity we # would not be able to", "its slope. The independent term :math:`b` specifies the position on", "of kernel that can be realized on near-term quantum computers,", "embedding of the first # datapoint and then the adjoint", "we know them to be 1 # for our noiseless", "actual similarities of the data. It # does have one", ":math:`N` datapoints. # To include the variational parameters, we construct", "We will compute # the overlap of the quantum states", "being aligned in a vector space. # # The training", "= \\frac{\\operatorname{Tr}(K \\boldsymbol{y}\\boldsymbol{y}^T)}{\\sqrt{\\operatorname{Tr}(K^2)\\operatorname{Tr}((\\boldsymbol{y}\\boldsymbol{y}^T)^2)}} # = \\frac{\\boldsymbol{y}^T K \\boldsymbol{y}}{\\sqrt{\\operatorname{Tr}(K^2)} N} #", "help plot the ``DoubleCake`` data: def plot_double_cake_data(X, Y, ax, num_sectors=None):", "embedding, we get .. math:: y(\\boldsymbol{x}) = \\operatorname{sgn}(\\langle \\boldsymbol{w}, \\phi(\\boldsymbol{x})\\rangle", "target alignment: kta_init = qml.kernels.target_alignment(X, Y, init_kernel, assume_normalized_kernel=True) print(f\"The kernel-target", "of the datapoints x = np.hstack([x1, 0.5 * x2]) y", "exhaustive # search in parameter space is not a good", "a toy problem that showcases the # inner workings of", "a particularly simple # implementation of Quantum Embedding Kernels. The", "adjoint # :math:`U(\\boldsymbol{x})^\\dagger`, which we can obtain via ``qml.adjoint``. def", "the probability # of observing the all-zero state at the", "############################################################################## # Defining a Quantum Embedding Kernel # ----------------------------------- #", "assigning to two datapoints the product # of the corresponding", "this measure relates # to objects, namely two kernels, being", "perfect classification! 🎆 # # Following on the results that", "corresponds to drawing a line and assigning different labels to", "# ansatz. What it does is solving a different optimization", "provides the advantage that we can often find an explicit", "At this point we fix the number of layers in", "# values to those variational parameters which improve the overall", "\\phi(\\boldsymbol{x}_i)`. Putting this into the formula yields .. math:: y(\\boldsymbol{x})", "5 wires in analytic mode. dev = qml.device(\"default.qubit\", wires=5, shots=None)", "r\"\"\"Training and evaluating quantum kernels =========================================== .. meta:: :property=\"og:description\": Kernels", "<NAME>, <NAME> and <NAME>. Posted: 24 June 2021* Kernel methods", "* np.cos(angles) y = 0.7 * np.sin(angles) labels = 2", "and :math:`-1` otherwise and its kernel matrix is simply #", "itself to the dataset, and on the other hand #", "matrix. # We can make use of the function ``qml.kernels.kernel_matrix``", "classification there, we could actually realise non-linear classification in our", "kernels with given kernel matrices :math:`K_1` and # :math:`K_2`: #", "pennylane import numpy as np import matplotlib as mpl np.random.seed(1359)", "it. # # Let's now come back to the actual", "actual implementation. PennyLane's # ``kernels`` module allows you to easily", "2021* Kernel methods are one of the cornerstones of classical", "for the purpose of optimal classification, we can choose the", "falls within the # correct class, but also that there", "kernel argument to be a kernel matrix function. svm_trained =", "labels of the # training data. It is based on", "then defined as the kernel alignment # of the kernel", "wires=wires) return qml.probs(wires=wires) ############################################################################## # The kernel function itself is", "to be a kernel matrix function. svm_trained = SVC(kernel=trained_kernel_matrix).fit(X, Y)", "= np.meshgrid(np.linspace(-1, 1, N_gridpoints), np.linspace(-1, 1, N_gridpoints)) _zz = np.zeros_like(_xx)", "x_1^2 y_1^2 + 2 x_1 x_2 y_1 y_2 + x_2^2", "Consider for example the following embedding and the associated kernel:", ") ax.set_xlim(-1, 1) ax.set_ylim(-1, 1) ax.set_aspect(\"equal\") ax.axis(\"off\") return ax ##############################################################################", "layer_params, wires, i0=j * len(wires)) adjoint_ansatz = qml.adjoint(ansatz) def random_params(num_wires,", "Pennylane. :property=\"og:image\": https://pennylane.ai/qml/_images/QEK_thumbnail.png .. related:: tutorial_kernel_based_training Kernel-based training with scikit-learn", "Embedding Kernel. # # .. note:: # This step does", "kernel values we have to provide values for the #", "embedding :math:`\\phi` will be much costlier to compute than the", "trained kernel. trained_kernel_matrix = lambda X1, X2: qml.kernels.kernel_matrix(X1, X2, trained_kernel)", "np.random.seed(1359) ############################################################################## # And we proceed right away to create", "alternative way to set up the kernel circuit in PennyLane", "x_2)) &= (x_1^2, \\sqrt{2} x_1 x_2, x_2^2) \\\\ k(\\boldsymbol{x}, \\boldsymbol{y})", "project from Xanadu's own `QHack <https://qhack.ai/>`__ hackathon. What are kernel", "this # classification look like. This could help us spotting", "include the variational parameters, we construct a ``lambda`` function that", "determine its slope. The independent term :math:`b` specifies the position", "at each step, we choose :math:`4` # datapoints at random.", "the model. In this sense, our approach benefits from both:", "############################################################################## # To see how well our classifier performs we", "alone can not be used to make predictions on a", "alignment* [#Alignment]_. The kernel-target alignment compares the # similarity predicted", "of the kernel matrix :math:`K` generated by the # quantum", "of this function. def target_alignment( X, Y, kernel, assume_normalized_kernel=False, rescale_class_labels=True,", "+ x_2^2 y_2^2 = \\langle \\boldsymbol{x}, \\boldsymbol{y} \\rangle^2. This means", "by repeating a # layer as building block. Let's start", "we introduced in the beginning. svm = SVC(kernel=lambda X1, X2:", "plane. A linear classifier corresponds to drawing a line and", "us # distrust the model. In this sense, our approach", "(num_layers, 2, num_wires), requires_grad=True) ############################################################################## # Together with the ansatz", "_zz} ax.contourf( _xx, _yy, _zz, cmap=mpl.colors.ListedColormap([\"#FF0000\", \"#0000FF\"]), alpha=0.2, levels=[-1, 0,", "# bring optimal training accuracy with it. # # Let's", "``DoubleCake`` data: def plot_double_cake_data(X, Y, ax, num_sectors=None): \"\"\"Plot double cake", "a neat trick: if we define some map :math:`\\phi(\\boldsymbol{x})` that", "to inspect the decision boundaries of # our classifier: trained_plot_data", "able to train the Quantum Embedding Kernel we need some", "datapoints to compute the KTA on. subset = np.random.choice(list(range(len(X))), 4)", "tiny step, but it can be shown that for the", "center_indices = np.array(range(0, num_sectors)) sector_angle = 2 * np.pi /", "a different optimization task for the # :math:`\\alpha` and :math:`b`", "by assigning to two datapoints the product # of the", "for short. In this tutorial you will learn how to", "# training data. It is based on *kernel alignment*, a", "use the entire training set to compute # :math:`\\operatorname{KTA}` but", "# of our SVC. # # Training the Quantum Embedding", "look at our dataset. In our example, we will work", "create a kernel with the trained parameter baked into it.", "# classification look like. This could help us spotting overfitting", "np.cos(angles) y = 0.7 * np.sin(angles) labels = 2 *", "be able to detect small improvements. # # We can,", "not very powerful, as datasets that are not separable by", "\\operatorname{sgn}\\left(\\sum_i \\alpha_i \\langle \\phi(\\boldsymbol{x}_i), \\phi(\\boldsymbol{x})\\rangle + b\\right). This rewriting might", "of the quantum # kernel. Thus, let's build a second", "x and y coordinates of the datapoints x = np.hstack([x1,", "import pennylane as qml def layer(x, params, wires, i0=0, inc=1):", "<NAME>, <NAME>, # <NAME>, and <NAME>. # \"Training Quantum Embedding", "around this limitation by performing a neat trick: if we", "X, Y, kernel, assume_normalized_kernel=False, rescale_class_labels=True, ): \"\"\"Kernel-target alignment between kernel", "# We therefore first define a differentiable version of this", "The assigned kernel is thus :math:`+1` if both datapoints lie", "having good alignment is # guaranteed for good performance, but", "Y) ############################################################################## # We expect to see an accuracy improvement", "parameters! It is reasonable to believe we can give #", "Ref. [#Training_QEKs]_, a project from Xanadu's own `QHack <https://qhack.ai/>`__ hackathon.", "its adjoint # :math:`U(\\boldsymbol{x})^\\dagger`, which we can obtain via ``qml.adjoint``.", "ansatz circuit to :math:`6`. init_params = random_params(num_wires=5, num_layers=6) ############################################################################## #", ".. math:: # \\operatorname{KA}(K_1, K_2) = \\frac{\\operatorname{Tr}(K_1 K_2)}{\\sqrt{\\operatorname{Tr}(K_1^2)\\operatorname{Tr}(K_2^2)}}. # #", "sector_angle x = 0.7 * np.cos(angles) y = 0.7 *", "based on *kernel alignment*, a similiarity measure # between two", "space* and then perform linear classification there, we could actually", "alignment # of the kernel matrix :math:`K` generated by the", "datapoints in the dataset. # # In summary, the kernel-target", "on the results that SVM's have proven good generalisation #", "labels def make_double_cake_data(num_sectors): x1, y1, labels1 = _make_circular_data(num_sectors) x2, y2,", "`Projector <https://pennylane.readthedocs.io/en/latest/code/api/pennylane.Projector.html>`__. # This is shown in the # `demo", "provides # this functionality. It expects the kernel to not", "not a sufficient condition for a good # performance of", "# :math:`\\frac{1}{2}(N^2-N)` kernel values for :math:`N` datapoints. # To include", "within the # correct class, but also that there are", "data into the space of quantum states. We formalize this", "training with scikit-learn tutorial_data_reuploading_classifier Classification with data reuploading *Authors: <NAME>,", "proceed right away to create a dataset to work with,", "resort to a more specialized measure, the # *kernel-target alignment*", "that lie in different corners of the plane. A linear", "more intricate decision boundaries! This is very important, because in", "== 1 else y / nminus for y in Y])", "chose reproduces the actual similarities of the data. It #", "bad generalisation. # # References # ---------- # # ..", "of the data at each step, we choose :math:`4` #", "of labels Y = labels.astype(int) return X, Y ############################################################################## #", "points that lie in different corners of the plane. A", "a hyperplane can't be classified without error. We can actually", "# <NAME>, and <NAME>. # \"Training Quantum Embedding Kernels on", "forgo one tiny step, but it can be shown that", "supply ``sklearn.svm.SVC`` with a function # that takes two sets", "# ``kernels`` module allows you to easily evaluate the kernel", "is now obtained by looking at the probability # of", "products between vectors in the embedding space: .. math:: k(\\boldsymbol{x}_i,", "# :math:`\\langle A, B \\rangle = \\operatorname{Tr}(A^T B)`. This #", "with scikit-learn tutorial_data_reuploading_classifier Classification with data reuploading *Authors: <NAME>, <NAME>,", "supply the variational # parameters via the ``lambda`` function from", "mathematically formalize this by assigning the label :math:`y` via ..", "becaues it is essentially just a tool to measure the", "math:: k(\\boldsymbol{x}_i, \\boldsymbol{x}_j) = | \\langle\\psi(\\boldsymbol{x}_i)|\\psi(\\boldsymbol{x}_j)\\rangle|^2. \"\"\" ############################################################################## # A", "layers in the # ansatz circuit to :math:`6`. init_params =", "# between two kernels with given kernel matrices :math:`K_1` and", "theoretical side, :math:`\\operatorname{KA}` # is nothing else than the cosine", "kernel. Thus, let's build a second support vector classifier with", "reproduces the actual similarities of the data. It # does", "A, B \\rangle = \\operatorname{Tr}(A^T B)`. This # reinforces the", "circuit to :math:`6`. init_params = random_params(num_wires=5, num_layers=6) ############################################################################## # Now", "can be realized on near-term quantum computers, namely *Quantum Embedding", "kernel value between the first and second datapoint is {kernel_value:.3f}\")", "We want to assess the impact of training the parameters", "# Currently, the function ``qml.kernels.target_alignment`` is not # differentiable yet,", "y(\\boldsymbol{x}) = \\operatorname{sgn}\\left(\\sum_i \\alpha_i \\langle \\phi(\\boldsymbol{x}_i), \\phi(\\boldsymbol{x})\\rangle + b\\right). This", "the Hilbert-Schmidt (or # Frobenius) scalar product # :math:`\\langle A,", "# same class and :math:`-1` otherwise and its kernel matrix", "to actually perform the (potentially expensive) embedding :math:`\\phi`. Consider for", "state .. math:: |\\psi(\\boldsymbol{x})\\rangle = U(\\boldsymbol{x}) |0 \\rangle. The kernel", "we will make use # of scikit-learn's Support Vector Classifier", "# where :math:`N` is the number of elements in :math:`\\boldsymbol{y}`,", "come back to the actual implementation. PennyLane's # ``kernels`` module", "that now not only every data instance falls within the", "layer is repeated multiple times, reusing # the datapoint ``x``", "the dataset, and on the other hand # is not", "num_sectors = 3 X, Y = make_double_cake_data(num_sectors) ax = plot_double_cake_data(X,", "don't mind them if they are confusing. def _make_circular_data(num_sectors): \"\"\"Generate", "# .. note:: # This step does *not* modify the", "not expected to suffer from bad generalisation. # # References", "looking at the probability # of observing the all-zero state", "will make use # of scikit-learn's Support Vector Classifier (SVC).", "a device to run the quantum circuit on. # For", "is the first entry: def kernel(x1, x2, params): return kernel_circuit(x1,", "classifier: init_plot_data = plot_decision_boundaries(svm, plt.gca()) ############################################################################## # We see the", "We expect to see an accuracy improvement vs. the SVM", "<https://pennylane.ai/qml/demos/tutorial_kernel_based_training.html>`__, where you will also find more # background information", "learn how to evaluate kernels, use them for classification and", "considering a parameterised quantum circuit :math:`U(\\boldsymbol{x})` that maps a datapoint", "on *kernel alignment*, a similiarity measure # between two kernels", "center :width: 30% We can mathematically formalize this by assigning", "be evaluated on quantum computers, *quantum kernels* for short. In", "+ b). We will forgo one tiny step, but it", "= \\operatorname{Tr}(A^T B)`. This # reinforces the geometric picture of", "be used to make predictions on a # dataset, becaues", "into each of them. # Together, the datapoint and the", "*hyperplane*. It is immediately clear that this method is not", "will work with # 3 sectors: import matplotlib.pyplot as plt", "information on the kernel circuit structure itself. # # Before", "with the inner circle. But remember we have a circuit", "nplus if y == 1 else y / nminus for", "line and thus determine its slope. The independent term :math:`b`", "Quantum Embedding Kernel for predictions # -------------------------------------------------- # The quantum", "it unfit for gradient descent optimization. # We therefore first", "# besides the datapoints, which is why we again supply", "to assign binary labels to datapoints: linear classification. Imagine we", "kernel. We will compute # the overlap of the quantum", "scalar product # :math:`\\langle A, B \\rangle = \\operatorname{Tr}(A^T B)`.", "\\boldsymbol{y}\\boldsymbol{y}^T)}{\\sqrt{\\operatorname{Tr}(K^2)\\operatorname{Tr}((\\boldsymbol{y}\\boldsymbol{y}^T)^2)}} # = \\frac{\\boldsymbol{y}^T K \\boldsymbol{y}}{\\sqrt{\\operatorname{Tr}(K^2)} N} # # where", "labelling in the vector # :math:`\\boldsymbol{y}` by assigning to two", "Performing an exhaustive # search in parameter space is not", ".. note:: # This step does *not* modify the variational", "slope. The independent term :math:`b` specifies the position on the", "then the adjoint of the embedding of the second datapoint.", "them if they are confusing. def _make_circular_data(num_sectors): \"\"\"Generate datapoints arranged", "a kernel with trained parameters is {accuracy_trained:.3f}\") ############################################################################## # We", "essentially just a tool to measure the similarity # between", "we have a circuit # with many free parameters! It", "# # <NAME>, <NAME>, <NAME>, <NAME>, # <NAME>, and <NAME>.", "embedded quantum states .. math:: k(\\boldsymbol{x}_i, \\boldsymbol{x}_j) = | \\langle\\psi(\\boldsymbol{x}_i)|\\psi(\\boldsymbol{x}_j)\\rangle|^2.", "with some imports: from pennylane import numpy as np import", "kernel(x1, x2, _params), assume_normalized_kernel=True, ) # Optimization step params =", "showcases the # inner workings of classification with quantum embedding", "class and :math:`-1` otherwise and its kernel matrix is simply", "one of the cornerstones of classical machine learning. Here we", "in :math:`\\boldsymbol{y}`, # that is the number of datapoints in", "both: on # one hand it can adjust itself to", "# of scikit-learn's Support Vector Classifier (SVC). from sklearn.svm import", "############################################################################## # Indeed, we see that now not only every", "the variational parameters fully determine # the embedding ansatz :math:`U(\\boldsymbol{x})`.", "of regular gradient descent optimization. To speed up # the", "= accuracy(svm_trained, X, Y) print(f\"The accuracy of a kernel with", "up the kernel circuit in PennyLane would be # to", "3 sectors: import matplotlib.pyplot as plt num_sectors = 3 X,", "cost function that is given to it, which is why", "import matplotlib as mpl np.random.seed(1359) ############################################################################## # And we proceed", "In order to construct the full kernel circuit, we also", "regions on opposing sides of the line: .. figure:: ../demonstrations/kernels_module/linear_classification.png", "variational # parameters via the ``lambda`` function from above. #", ":math:`U(\\boldsymbol{x})` that maps a datapoint :math:`\\boldsymbol{x}` to the state ..", "only contains inner products between vectors in the embedding space:", "need some measure of # how well it fits the", "for a particularly simple # implementation of Quantum Embedding Kernels.", "formalize this by assigning the label :math:`y` via .. math::", ":math:`\\boldsymbol{y}\\boldsymbol{y}^T`: # # .. math:: # \\operatorname{KTA}_{\\boldsymbol{y}}(K) # = \\frac{\\operatorname{Tr}(K", "# The kernel function itself is now obtained by looking", "machine learning. Here we are concerned with kernels that can", "the function ``qml.kernels.kernel_matrix`` that provides # this functionality. It expects", ":math:`\\boldsymbol{x}` to the state .. math:: |\\psi(\\boldsymbol{x})\\rangle = U(\\boldsymbol{x}) |0", "but feeding different variational # parameters ``params`` into each of", "repeating a # layer as building block. Let's start by", "1 return x, y, labels def make_double_cake_data(num_sectors): x1, y1, labels1", "our # initial classifier: init_plot_data = plot_decision_boundaries(svm, plt.gca()) ############################################################################## #", "1, N_gridpoints)) _zz = np.zeros_like(_xx) for idx in np.ndindex(*_xx.shape): _zz[idx]", "condition for a good # performance of the kernel [#Alignment]_.", "# distrust the model. In this sense, our approach benefits", "linear combination of the embedded datapoints :math:`\\boldsymbol{w} = \\sum_i \\alpha_i", "i = i0 for j, wire in enumerate(wires): qml.Hadamard(wires=[wire]) qml.RZ(x[i", "be correctly classified, but # we still struggle with the", "optimization cost = lambda _params: -target_alignment( X[subset], Y[subset], lambda x1,", "init_params) print(f\"The kernel value between the first and second datapoint", "finally extract the probabilities of observing each basis state. @qml.qnode(dev)", "assigning the label :math:`y` via .. math:: y(\\boldsymbol{x}) = \\operatorname{sgn}(\\langle", "0.7 * np.cos(angles) y = 0.7 * np.sin(angles) labels =", "method is not very powerful, as datasets that are not", "support vector classifier with the # trained kernel: # First", "values to those variational parameters which improve the overall accuracy", "the other hand # is not expected to suffer from", "To understand what a kernel method does, let's first revisit", "np.vstack([x, y]).T labels = np.hstack([labels1, -1 * labels2]) # Canonical", "and alignment training with Pennylane. :property=\"og:image\": https://pennylane.ai/qml/_images/QEK_thumbnail.png .. related:: tutorial_kernel_based_training", "provide values for the # variational parameters. At this point", "= lambda _params: -target_alignment( X[subset], Y[subset], lambda x1, x2: kernel(x1,", "of the dataset form the # *kernel matrix*. We can", "separable by a hyperplane can't be classified without error. We", "how well it fits the dataset in question. Performing an", "higher dimensional vectors :math:`\\boldsymbol{x}`, where a line does not divide", "i in range(num_sectors): color = [\"#FF0000\", \"#0000FF\"][(i % 2)] other_color", "kernels, being aligned in a vector space. # # The", "requires_grad=True) ############################################################################## # Together with the ansatz we only need", "in this # classification look like. This could help us", "np.random.uniform(0, 2 * np.pi, (num_layers, 2, num_wires), requires_grad=True) ############################################################################## #", "for idx in np.ndindex(*_xx.shape): _zz[idx] = classifier.predict(np.array([_xx[idx], _yy[idx]])[np.newaxis, :]) plot_data", "parameters. At this point we fix the number of layers", ":math:`\\langle A, B \\rangle = \\operatorname{Tr}(A^T B)`. This # reinforces", "1) * sector_angle, lw=0, color=other_color, alpha=0.1, ) ) ax.set_xlim(-1, 1)", "the kernel matrix :math:`K` generated by the # quantum kernel", "First create a kernel with the trained parameter baked into", "to evaluate kernels, use them for classification and train them", "the # ``DoubleCake`` dataset. Firstly, we define two functions to", "to easily evaluate the kernel # target alignment: kta_init =", "space is not a good solution because it is very", "simulation. Overall this means that we compute # :math:`\\frac{1}{2}(N^2-N)` kernel", "return x, y, labels def make_double_cake_data(num_sectors): x1, y1, labels1 =", "the # *kernel matrix*. We can inspect it via the", "classes of points that lie in different corners of the", "# this functionality. It expects the kernel to not have", "\\phi(\\boldsymbol{x})\\rangle + b\\right). This rewriting might not seem useful at", "overfitting issues # visually in more complex data sets. To", "target_alignment( X, Y, lambda x1, x2: kernel(x1, x2, params), assume_normalized_kernel=True,", "in our linear classification with the map :math:`k`, we can", "measure relates # to objects, namely two kernels, being aligned", "as a linear combination of the embedded datapoints :math:`\\boldsymbol{w} =", "# To include the variational parameters, we construct a ``lambda``", "num_layers): \"\"\"Generate random variational parameters in the shape for the", "in a vector space. # # The training data enters", "_xx, \"_yy\": _yy, \"_zz\": _zz} ax.contourf( _xx, _yy, _zz, cmap=mpl.colors.ListedColormap([\"#FF0000\",", "kernel matrix function. svm_trained = SVC(kernel=trained_kernel_matrix).fit(X, Y) ############################################################################## # We", "``qml.kernels.kernel_matrix`` that provides # this functionality. It expects the kernel", "############################################################################## # We expect to see an accuracy improvement vs.", "understanding the demo, # so don't mind them if they", "with given kernel matrices :math:`K_1` and # :math:`K_2`: # #", "not use the entire training set to compute # :math:`\\operatorname{KTA}`", "# :math:`\\operatorname{KTA}` but rather # sample smaller subsets of the", "Using the Quantum Embedding Kernel for predictions # -------------------------------------------------- #", "0: current_alignment = target_alignment( X, Y, lambda x1, x2: kernel(x1,", "with gradient-based optimization, and all that using the functionality of", "+ b). The vector :math:`\\boldsymbol{w}` points perpendicular to the line", "tutorial we will use PennyLane's ``default.qubit`` # device with 5", "# finally extract the probabilities of observing each basis state.", "# \"Training Quantum Embedding Kernels on Near-Term Quantum Computers.\" #", "the variational # parameters via the ``lambda`` function from above.", "parameterised quantum circuit :math:`U(\\boldsymbol{x})` that maps a datapoint :math:`\\boldsymbol{x}` to", "y_j. # # The assigned kernel is thus :math:`+1` if", "matrix :math:`K` generated by the # quantum kernel and :math:`\\boldsymbol{y}\\boldsymbol{y}^T`:", "measure # between two kernels with given kernel matrices :math:`K_1`", "is immediately clear that this method is not very powerful,", ":math:`\\alpha` and :math:`b` vectors we introduced in the beginning. svm", "alignment is # guaranteed for good performance, but optimal alignment", "will measure which percentage # of the dataset it classifies", "interesting to inspect the decision boundaries of # our classifier:", "suffer from bad generalisation. # # References # ---------- #", "0), 1, i * sector_angle, (i + 1) * sector_angle,", "_xx, _yy = np.meshgrid(np.linspace(-1, 1, N_gridpoints), np.linspace(-1, 1, N_gridpoints)) _zz", "# we still struggle with the inner circle. But remember", "needs a *hyperplane*. It is immediately clear that this method", "random parameters is {kta_init:.3f}\") ############################################################################## # Now let's code up", "vector space. # # The training data enters the picture", "np.sum(T * T)) inner_product = inner_product / norm return inner_product", "= np.hstack([x1, 0.5 * x2]) y = np.hstack([y1, 0.5 *", "data sets. To this end we will introduce a #", "kernels, # training variational embedding kernels and the available functionalities", "helper method. def plot_decision_boundaries(classifier, ax, N_gridpoints=14): _xx, _yy = np.meshgrid(np.linspace(-1,", "the first and the # second datapoint: kernel_value = kernel(X[0],", "in ``qml.probs``, this is the first entry: def kernel(x1, x2,", "the quantum # kernel. Thus, let's build a second support", "The kernel value is then given by the *overlap* of", "x2: kernel(x1, x2, params), assume_normalized_kernel=True, ) print(f\"Step {i+1} - Alignment", "datapoints and returns the associated kernel matrix. # We can", "trained parameter baked into it. trained_kernel = lambda x1, x2:", "we define two functions to enable us to # generate", "the kernel alignment # of the kernel matrix :math:`K` generated", "notice the above formula only contains inner products between vectors", "outer product :math:`\\boldsymbol{y}\\boldsymbol{y}^T`. # The kernel-target alignment is then defined", "# Together with the ansatz we only need a device", "we again supply the variational # parameters via the ``lambda``", "= np.array(range(0, num_sectors)) sector_angle = 2 * np.pi / num_sectors", "\\rangle = \\operatorname{Tr}(A^T B)`. This # reinforces the geometric picture", "first entry: def kernel(x1, x2, params): return kernel_circuit(x1, x2, params)[0]", "_zz = np.zeros_like(_xx) for idx in np.ndindex(*_xx.shape): _zz[idx] = classifier.predict(np.array([_xx[idx],", "parameters via the ``lambda`` function from above. # Once we", "alignment on the full dataset every 50 steps. if (i", "pattern=\"ring\", wires=wires, parameters=params[1]) ############################################################################## # To construct the ansatz, this", "to compute # :math:`\\operatorname{KTA}` but rather # sample smaller subsets", "cmap=cmap, s=25, marker=\"s\") if num_sectors is not None: sector_angle =", "target alignment by :math:`-1` to actually # *maximize* it in", "necessary but not a sufficient condition for a good #", "parameters is {accuracy_init:.3f}\") ############################################################################## # We are also interested in", "of observing the all-zero state at the end of the", "entry: def kernel(x1, x2, params): return kernel_circuit(x1, x2, params)[0] ##############################################################################", "workings of classification with quantum embedding kernels, # training variational", "function using the trained kernel. trained_kernel_matrix = lambda X1, X2:", "\\alpha_i \\langle \\phi(\\boldsymbol{x}_i), \\phi(\\boldsymbol{x})\\rangle + b\\right). This rewriting might not", "\\boldsymbol{x}, \\boldsymbol{y} \\rangle^2. This means by just replacing the regular", "means that we compute # :math:`\\frac{1}{2}(N^2-N)` kernel values for :math:`N`", "are no strong artifacts that would make us # distrust", "plot_data = {\"_xx\": _xx, \"_yy\": _yy, \"_zz\": _zz} ax.contourf( _xx,", "also be extended to higher dimensional vectors :math:`\\boldsymbol{x}`, where a", "------------------------------------- # # To be able to train the Quantum", "\\frac{\\operatorname{Tr}(K \\boldsymbol{y}\\boldsymbol{y}^T)}{\\sqrt{\\operatorname{Tr}(K^2)\\operatorname{Tr}((\\boldsymbol{y}\\boldsymbol{y}^T)^2)}} # = \\frac{\\boldsymbol{y}^T K \\boldsymbol{y}}{\\sqrt{\\operatorname{Tr}(K^2)} N} # #", "decision boundaries of # our classifier: trained_plot_data = plot_decision_boundaries(svm_trained, plt.gca())", "lw=0, color=color, alpha=0.1, width=0.5, ) ) ax.add_artist( mpl.patches.Wedge( (0, 0),", "can adjust itself to the dataset, and on the other", "boundaries in this # classification look like. This could help", "the cost function that is given to it, which is", "0, 1], ) plot_double_cake_data(X, Y, ax) return plot_data ############################################################################## #", "vector classifier with the # trained kernel: # First create", "discern two different classes of points that lie in different", "first and second datapoint is {kernel_value:.3f}\") ############################################################################## # The mutual", "\\operatorname{sgn}(\\langle \\boldsymbol{w}, \\phi(\\boldsymbol{x})\\rangle + b). We will forgo one tiny", "in range(500): # Choose subset of datapoints to compute the", "our noiseless simulation. Overall this means that we compute #", "1 else y / nminus for y in Y]) else:", "see the outer points in the dataset can be correctly", "= qml.kernels.square_kernel_matrix(X, init_kernel, assume_normalized_kernel=True) with np.printoptions(precision=3, suppress=True): print(K_init) ############################################################################## #", "the variational parameters in our circuit # ansatz. What it", "PennyLane would be # to use the observable type #", "qml.GradientDescentOptimizer(0.2) for i in range(500): # Choose subset of datapoints", "that we do not # calculate the entries between the", "# Choose subset of datapoints to compute the KTA on.", "can be shown that for the purpose of optimal classification,", "############################################################################## # The mutual kernel values between all elements of", "labels to the regions on opposing sides of the line:", "the impact of training the parameters of the quantum #", "Y, ax) return plot_data ############################################################################## # With that done, let's", "by defining this layer: import pennylane as qml def layer(x,", "random variational parameters in the shape for the ansatz.\"\"\" return", "the *kernel*. It provides the advantage that we can often", "the # training data. It is based on *kernel alignment*,", "the number of datapoints in the dataset. # # In", "🎆 # # Following on the results that SVM's have", "else y / nminus for y in Y]) else: _Y", "arise from embedding data into the space of quantum states.", "2 * np.pi, (num_layers, 2, num_wires), requires_grad=True) ############################################################################## # Together", "to create a dataset to work with, the # ``DoubleCake``", "to discern two different classes of points that lie in", "variational # parameters ``params`` into each of them. # Together,", ") if rescale_class_labels: nplus = np.count_nonzero(np.array(Y) == 1) nminus =", "vector # :math:`\\boldsymbol{y}` by assigning to two datapoints the product", "KTA on. subset = np.random.choice(list(range(len(X))), 4) # Define the cost", "pennylane as qml def layer(x, params, wires, i0=0, inc=1): \"\"\"Building", "ax) return plot_data ############################################################################## # With that done, let's have", "= 3 X, Y = make_double_cake_data(num_sectors) ax = plot_double_cake_data(X, Y,", "y(\\boldsymbol{x}) = \\operatorname{sgn}(\\langle \\boldsymbol{w}, \\boldsymbol{x}\\rangle + b). The vector :math:`\\boldsymbol{w}`", "training data. It is based on *kernel alignment*, a similiarity", "used to make predictions on a # dataset, becaues it", "methods are one of the cornerstones of classical machine learning.", "of the corresponding labels: # # .. math:: # k_{\\boldsymbol{y}}(\\boldsymbol{x}_i,", "a more theoretical side, :math:`\\operatorname{KA}` # is nothing else than", "believe we can give # values to those variational parameters", "init_kernel)).fit(X, Y) ############################################################################## # To see how well our classifier", "on Ref. [#Training_QEKs]_, a project from Xanadu's own `QHack <https://qhack.ai/>`__", "performs we will measure which percentage # of the dataset", "2, num_wires), requires_grad=True) ############################################################################## # Together with the ansatz we", "np.printoptions(precision=3, suppress=True): print(K_init) ############################################################################## # Using the Quantum Embedding Kernel", "classification look like. This could help us spotting overfitting issues", "the kernel circuit structure itself. # # Before focusing on", "ansatz(x, params, wires): \"\"\"The embedding ansatz\"\"\" for j, layer_params in", "SVM from our Quantum # Embedding Kernel. # # ..", ":math:`+1` if both datapoints lie in the # same class", "aligned in a vector space. # # The training data", "# = \\frac{\\boldsymbol{y}^T K \\boldsymbol{y}}{\\sqrt{\\operatorname{Tr}(K^2)} N} # # where :math:`N`", "[#Training_QEKs] # # <NAME>, <NAME>, <NAME>, <NAME>, # <NAME>, and", "the function ``qml.kernels.target_alignment`` is not # differentiable yet, making it", "`kernels module <https://pennylane.readthedocs.io/en/latest/code/qml_kernels.html>`__ # allows for a particularly simple #", "math:: # \\operatorname{KA}(K_1, K_2) = \\frac{\\operatorname{Tr}(K_1 K_2)}{\\sqrt{\\operatorname{Tr}(K_1^2)\\operatorname{Tr}(K_2^2)}}. # # ..", "improve the kernel-target alignment! # # We will make use", "circuit that realizes the kernel. We will compute # the", "= init_params opt = qml.GradientDescentOptimizer(0.2) for i in range(500): #", "variational parameters in our circuit # ansatz. What it does", "kernel function itself is now obtained by looking at the", "the # correct class, but also that there are no", "# # .. [#Training_QEKs] # # <NAME>, <NAME>, <NAME>, <NAME>,", "number of elements in :math:`\\boldsymbol{y}`, # that is the number", "# Using the Quantum Embedding Kernel for predictions # --------------------------------------------------", "def _make_circular_data(num_sectors): \"\"\"Generate datapoints arranged in an even circle.\"\"\" center_indices", "*feature space* and then perform linear classification there, we could", "matplotlib as mpl np.random.seed(1359) ############################################################################## # And we proceed right", "and the variational parameters fully determine # the embedding ansatz", "Quantum Embedding Kernel we need some measure of # how", "actual prediction we will make use # of scikit-learn's Support", "training with Pennylane. :property=\"og:image\": https://pennylane.ai/qml/_images/QEK_thumbnail.png .. related:: tutorial_kernel_based_training Kernel-based training", "What are kernel methods? ------------------------ To understand what a kernel", "descent optimization. To speed up # the optimization we will", "where you will also find more # background information on", "be a kernel matrix function. svm_trained = SVC(kernel=trained_kernel_matrix).fit(X, Y) ##############################################################################", "c=Y, cmap=cmap, s=25, marker=\"s\") if num_sectors is not None: sector_angle", "block of the embedding ansatz\"\"\" i = i0 for j,", "_yy[idx]])[np.newaxis, :]) plot_data = {\"_xx\": _xx, \"_yy\": _yy, \"_zz\": _zz}", "plot_double_cake_data(X, Y, ax) return plot_data ############################################################################## # With that done,", "rewriting might not seem useful at first, but notice the", "# datapoint and then the adjoint of the embedding of", "params): ansatz(x1, params, wires=wires) adjoint_ansatz(x2, params, wires=wires) return qml.probs(wires=wires) ##############################################################################", "thus determine its slope. The independent term :math:`b` specifies the", "# The assigned kernel is thus :math:`+1` if both datapoints", "# with many free parameters! It is reasonable to believe", "on kernel-based training of quantum models <https://pennylane.ai/qml/demos/tutorial_kernel_based_training.html>`__, where you will", "print(f\"The accuracy of the kernel with random parameters is {accuracy_init:.3f}\")", "captures how well # the kernel you chose reproduces the", "with kernels that can be evaluated on quantum computers, *quantum", "a kernel with the trained parameter baked into it. trained_kernel", "computers, namely *Quantum Embedding Kernels (QEKs)*. These are kernels that", "the kernel circuit -- because # of the ordering in", "+ 1) * sector_angle, lw=0, color=other_color, alpha=0.1, ) ) ax.set_xlim(-1,", "|\\psi(\\boldsymbol{x})\\rangle = U(\\boldsymbol{x}) |0 \\rangle. The kernel value is then", "quantum circuit on. # For the purpose of this tutorial", "return ax ############################################################################## # Let's now have a look at", "print(f\"Step {i+1} - Alignment = {current_alignment:.3f}\") ############################################################################## # We want", "good # performance of the kernel [#Alignment]_. This means having", "specialized measure, the # *kernel-target alignment* [#Alignment]_. The kernel-target alignment", "-target_alignment( X[subset], Y[subset], lambda x1, x2: kernel(x1, x2, _params), assume_normalized_kernel=True,", "ansatz, this layer is repeated multiple times, reusing # the", "Y = labels.astype(int) return X, Y ############################################################################## # Next, we", "will explore one particular kind of kernel that can be", "# Training the Quantum Embedding Kernel # ------------------------------------- # #", "parameters, we construct a ``lambda`` function that # fixes them", "alignment by :math:`-1` to actually # *maximize* it in the", "the Quantum Embedding Kernel for predictions # -------------------------------------------------- # The", "for predictions # -------------------------------------------------- # The quantum kernel alone can", "Embedding Kernels on Near-Term Quantum Computers.\" # `arXiv:2105.02276 <https://arxiv.org/abs/2105.02276>`__, 2021.", "# And we proceed right away to create a dataset", ".. note:: # An alternative way to set up the", "y = X.T cmap = mpl.colors.ListedColormap([\"#FF0000\", \"#0000FF\"]) ax.scatter(x, y, c=Y,", "since the accuracy is a discrete quantity we # would", "plot_decision_boundaries(svm_trained, plt.gca()) ############################################################################## # Indeed, we see that now not", "To be able to train the Quantum Embedding Kernel we", "have proven good generalisation # behavior, it will be interesting", "x = np.hstack([x1, 0.5 * x2]) y = np.hstack([y1, 0.5", "\\boldsymbol{x}_j) = \\langle \\phi(\\boldsymbol{x}_i), \\phi(\\boldsymbol{x}_j)\\rangle. We call this function the", "# guaranteed for good performance, but optimal alignment will not", "to those variational parameters which improve the overall accuracy #", "k(\\boldsymbol{x}_i, \\boldsymbol{x}_j) = \\langle \\phi(\\boldsymbol{x}_i), \\phi(\\boldsymbol{x}_j)\\rangle. We call this function", "``sklearn.svm.SVC`` with a function # that takes two sets of", "on quantum computers, *quantum kernels* for short. In this tutorial", "evaluate kernels, use them for classification and train them with", "alpha=0.1, ) ) ax.set_xlim(-1, 1) ax.set_ylim(-1, 1) ax.set_aspect(\"equal\") ax.axis(\"off\") return", "otherwise and its kernel matrix is simply # given by", "# # .. note:: # Currently, the function ``qml.kernels.target_alignment`` is", "because in many interesting cases the embedding :math:`\\phi` will be", ":math:`\\boldsymbol{w}` points perpendicular to the line and thus determine its", "# The mutual kernel values between all elements of the", "lambda x1, x2: kernel(x1, x2, _params), assume_normalized_kernel=True, ) # Optimization", "# To be able to train the Quantum Embedding Kernel", ":math:`U(\\boldsymbol{x})^\\dagger`, which we can obtain via ``qml.adjoint``. def ansatz(x, params,", "make use of the function ``qml.kernels.kernel_matrix`` that provides # this", "problem that showcases the # inner workings of classification with", "get .. math:: y(\\boldsymbol{x}) = \\operatorname{sgn}(\\langle \\boldsymbol{w}, \\phi(\\boldsymbol{x})\\rangle + b).", "of optimal classification, we can choose the vector defining the", "= {\"_xx\": _xx, \"_yy\": _yy, \"_zz\": _zz} ax.contourf( _xx, _yy,", "state at the end of the kernel circuit -- because", "it fits the dataset in question. Performing an exhaustive #", "= \\langle \\boldsymbol{x}, \\boldsymbol{y} \\rangle^2. This means by just replacing", "\"\"\"Generate datapoints arranged in an even circle.\"\"\" center_indices = np.array(range(0,", "the observable type # `Projector <https://pennylane.readthedocs.io/en/latest/code/api/pennylane.Projector.html>`__. # This is shown", "k_{\\boldsymbol{y}}(\\boldsymbol{x}_i, \\boldsymbol{x}_j) = y_i y_j. # # The assigned kernel", "class, but also that there are no strong artifacts that", "mpl np.random.seed(1359) ############################################################################## # And we proceed right away to", "alignment: kta_init = qml.kernels.target_alignment(X, Y, init_kernel, assume_normalized_kernel=True) print(f\"The kernel-target alignment", "``qml.probs``, this is the first entry: def kernel(x1, x2, params):", "30% We can mathematically formalize this by assigning the label", "Quantum Embedding Kernels. The first ingredient we # need for", "good generalisation # behavior, it will be interesting to inspect", "x_2 y_1 y_2 + x_2^2 y_2^2 = \\langle \\boldsymbol{x}, \\boldsymbol{y}", "data instance falls within the # correct class, but also", "=========================================== .. meta:: :property=\"og:description\": Kernels and alignment training with Pennylane.", "x2, y2, labels2 = _make_circular_data(num_sectors) # x and y coordinates", "that are not separable by a hyperplane can't be classified", "optimization loop and improve the kernel-target alignment! # # We", "= classifier.predict(np.array([_xx[idx], _yy[idx]])[np.newaxis, :]) plot_data = {\"_xx\": _xx, \"_yy\": _yy,", "it. trained_kernel = lambda x1, x2: kernel(x1, x2, params) #", "in the dataset. # # In summary, the kernel-target alignment", "the following embedding and the associated kernel: .. math:: \\phi((x_1,", "on # one hand it can adjust itself to the", "# # .. math:: # \\operatorname{KTA}_{\\boldsymbol{y}}(K) # = \\frac{\\operatorname{Tr}(K \\boldsymbol{y}\\boldsymbol{y}^T)}{\\sqrt{\\operatorname{Tr}(K^2)\\operatorname{Tr}((\\boldsymbol{y}\\boldsymbol{y}^T)^2)}}", "allows you to easily evaluate the kernel # target alignment:", "evaluated on quantum computers, *quantum kernels* for short. In this", "matplotlib.pyplot as plt num_sectors = 3 X, Y = make_double_cake_data(num_sectors)", "just a tool to measure the similarity # between two", "states. We formalize this by considering a parameterised quantum circuit", ".. math:: y(\\boldsymbol{x}) = \\operatorname{sgn}\\left(\\sum_i \\alpha_i \\langle \\phi(\\boldsymbol{x}_i), \\phi(\\boldsymbol{x})\\rangle +", "a linear combination of the embedded datapoints :math:`\\boldsymbol{w} = \\sum_i", "tutorial_kernel_based_training Kernel-based training with scikit-learn tutorial_data_reuploading_classifier Classification with data reuploading", "ax.add_artist( mpl.patches.Wedge( (0, 0), 0.5, i * sector_angle, (i +", "only every data instance falls within the # correct class,", "it does is solving a different optimization task for the", "it, which is why we # have to multiply the", "2021. # # .. [#Alignment] # # <NAME>, <NAME>, and", "to run the quantum circuit on. # For the purpose", "/ nplus if y == 1 else y / nminus", "using the trained kernel. trained_kernel_matrix = lambda X1, X2: qml.kernels.kernel_matrix(X1,", "by first applying the embedding of the first # datapoint", "that is the number of datapoints in the dataset. #", "(0, 0), 0.5, i * sector_angle, (i + 1) *", "-------------------------------------------------- # The quantum kernel alone can not be used", "with it. # # Let's now come back to the", "so don't mind them if they are confusing. def _make_circular_data(num_sectors):", "step params = opt.step(cost, params) # Report the alignment on", "data: def plot_double_cake_data(X, Y, ax, num_sectors=None): \"\"\"Plot double cake data", "simply # given by the outer product :math:`\\boldsymbol{y}\\boldsymbol{y}^T`. # The", "correctly. def accuracy(classifier, X, Y_target): return 1 - np.count_nonzero(classifier.predict(X) -", "{current_alignment:.3f}\") ############################################################################## # We want to assess the impact of", "X2, trained_kernel) # Note that SVC expects the kernel argument", "% 2)] other_color = [\"#FF0000\", \"#0000FF\"][((i + 1) % 2)]", "embedded datapoints :math:`\\boldsymbol{w} = \\sum_i \\alpha_i \\phi(\\boldsymbol{x}_i)`. Putting this into", "`demo on kernel-based training of quantum models <https://pennylane.ai/qml/demos/tutorial_kernel_based_training.html>`__, where you", "be much costlier to compute than the kernel :math:`k`. In", "it superfluous to actually perform the (potentially expensive) embedding :math:`\\phi`.", "side, :math:`\\operatorname{KA}` # is nothing else than the cosine of", "Embedding Kernel # ----------------------------------- # PennyLane's `kernels module <https://pennylane.readthedocs.io/en/latest/code/qml_kernels.html>`__ #", "use of symmetry of the kernel, # :math:`k(\\boldsymbol{x}_i,\\boldsymbol{x}_j) = k(\\boldsymbol{x}_j,", "be # to use the observable type # `Projector <https://pennylane.readthedocs.io/en/latest/code/api/pennylane.Projector.html>`__.", "= np.array(Y) T = np.outer(_Y, _Y) inner_product = np.sum(K *", "of the second datapoint. We # finally extract the probabilities", "the associated kernel matrix. # We can make use of", "of matrices with the Hilbert-Schmidt (or # Frobenius) scalar product", "both in PennyLane. We of course need to start with", "Now we can have a look at the kernel value", "an actual prediction we will make use # of scikit-learn's", "the line and thus determine its slope. The independent term", "1) * sector_angle, lw=0, color=color, alpha=0.1, width=0.5, ) ) ax.add_artist(", "x2, params): return kernel_circuit(x1, x2, params)[0] ############################################################################## # # ..", "actually express much more intricate decision boundaries! This is very", "construct the ansatz, this layer is repeated multiple times, reusing", "one hand it can adjust itself to the dataset, and", "print(f\"The kernel-target alignment for our dataset and random parameters is", "decision boundaries for our # initial classifier: init_plot_data = plot_decision_boundaries(svm,", "Imagine we want to discern two different classes of points", "points perpendicular to the line and thus determine its slope.", "# # References # ---------- # # .. [#Training_QEKs] #", "nminus = len(Y) - nplus _Y = np.array([y / nplus", "* K) * np.sum(T * T)) inner_product = inner_product /", "and all that using the functionality of PennyLane's `kernels module", "# variational parameters. At this point we fix the number", "= qml.kernels.square_kernel_matrix( X, kernel, assume_normalized_kernel=assume_normalized_kernel, ) if rescale_class_labels: nplus =", "look at the kernel value between the first and the", "kernel you chose reproduces the actual similarities of the data.", "parameters of the quantum # kernel. Thus, let's build a", "that PennyLane's built-in optimizer works # to *minimize* the cost", "# that is the number of datapoints in the dataset.", "quantum kernel alone can not be used to make predictions", "this, we can let scikit-learn adjust the SVM from our", "circuit, we also require its adjoint # :math:`U(\\boldsymbol{x})^\\dagger`, which we", "therefore first define a differentiable version of this function. def", "in more complex data sets. To this end we will", "the kernel-target alignment effectively captures how well # the kernel", "We can inspect it via the ``qml.kernels.square_kernel_matrix`` # method, which", "we still struggle with the inner circle. But remember we", "kernel: # First create a kernel with the trained parameter", "the data. # The details of these functions are not", "space into two regions anymore. Instead one needs a *hyperplane*.", "but # we still struggle with the inner circle. But", "simple # implementation of Quantum Embedding Kernels. The first ingredient", "suppress=True): print(K_init) ############################################################################## # Using the Quantum Embedding Kernel for", "# :math:`\\alpha` and :math:`b` vectors we introduced in the beginning.", "# \\operatorname{KA}(K_1, K_2) = \\frac{\\operatorname{Tr}(K_1 K_2)}{\\sqrt{\\operatorname{Tr}(K_1^2)\\operatorname{Tr}(K_2^2)}}. # # .. note::", "x2, init_params) K_init = qml.kernels.square_kernel_matrix(X, init_kernel, assume_normalized_kernel=True) with np.printoptions(precision=3, suppress=True):", ":math:`U(\\boldsymbol{x})`. # In order to construct the full kernel circuit,", ":math:`\\phi`. Consider for example the following embedding and the associated", "T)) inner_product = inner_product / norm return inner_product params =", "combination of the embedded datapoints :math:`\\boldsymbol{w} = \\sum_i \\alpha_i \\phi(\\boldsymbol{x}_i)`.", "this by considering a parameterised quantum circuit :math:`U(\\boldsymbol{x})` that maps", "implementation of Quantum Embedding Kernels. The first ingredient we #", "``lambda`` function that # fixes them to the values we", "function. svm_trained = SVC(kernel=trained_kernel_matrix).fit(X, Y) ############################################################################## # We expect to", "def plot_double_cake_data(X, Y, ax, num_sectors=None): \"\"\"Plot double cake data and", "how well our classifier performs we will measure which percentage", "= {current_alignment:.3f}\") ############################################################################## # We want to assess the impact", "an even circle.\"\"\" center_indices = np.array(range(0, num_sectors)) sector_angle = 2", "this sense, our approach benefits from both: on # one", "method. def plot_decision_boundaries(classifier, ax, N_gridpoints=14): _xx, _yy = np.meshgrid(np.linspace(-1, 1,", "regions anymore. Instead one needs a *hyperplane*. It is immediately", "first revisit one of the simplest methods to assign binary", "first define a differentiable version of this function. def target_alignment(", "layer(x, layer_params, wires, i0=j * len(wires)) adjoint_ansatz = qml.adjoint(ansatz) def", "= X.T cmap = mpl.colors.ListedColormap([\"#FF0000\", \"#0000FF\"]) ax.scatter(x, y, c=Y, cmap=cmap,", "In addition, the option ``assume_normalized_kernel=True`` ensures that we do not", "the kernel to not have additional parameters # besides the", "an exhaustive # search in parameter space is not a", "choose :math:`4` # datapoints at random. Remember that PennyLane's built-in", ":math:`k`. In this demo, we will explore one particular kind", "{accuracy_init:.3f}\") ############################################################################## # We are also interested in seeing what", "two different classes of points that lie in different corners", "you will also find more # background information on the", "functions to enable us to # generate the data. #", "matrix is simply # given by the outer product :math:`\\boldsymbol{y}\\boldsymbol{y}^T`.", "ax, N_gridpoints=14): _xx, _yy = np.meshgrid(np.linspace(-1, 1, N_gridpoints), np.linspace(-1, 1,", "the line: .. figure:: ../demonstrations/kernels_module/linear_classification.png :align: center :width: 30% We", "and random parameters is {kta_init:.3f}\") ############################################################################## # Now let's code", "circuit -- because # of the ordering in ``qml.probs``, this", "it is essentially just a tool to measure the similarity", "is simply # given by the outer product :math:`\\boldsymbol{y}\\boldsymbol{y}^T`. #", "# layer as building block. Let's start by defining this", "the Quantum Embedding Kernel we need some measure of #", "we will introduce a # second helper method. def plot_decision_boundaries(classifier,", "X, Y ############################################################################## # Next, we define a function to", "math:: y(\\boldsymbol{x}) = \\operatorname{sgn}(\\langle \\boldsymbol{w}, \\boldsymbol{x}\\rangle + b). The vector", "# We expect to see an accuracy improvement vs. the", "is based on Ref. [#Training_QEKs]_, a project from Xanadu's own", "task for the # :math:`\\alpha` and :math:`b` vectors we introduced", "[\"#FF0000\", \"#0000FF\"][(i % 2)] other_color = [\"#FF0000\", \"#0000FF\"][((i + 1)", "alpha=0.1, width=0.5, ) ) ax.add_artist( mpl.patches.Wedge( (0, 0), 0.5, i", "the inner circle. But remember we have a circuit #", "hand # is not expected to suffer from bad generalisation.", "this tutorial you will learn how to evaluate kernels, use", "can mathematically formalize this by assigning the label :math:`y` via", "learning. Here we are concerned with kernels that can be", "decision boundary as a linear combination of the embedded datapoints", "_make_circular_data(num_sectors): \"\"\"Generate datapoints arranged in an even circle.\"\"\" center_indices =", "*Quantum Embedding Kernels (QEKs)*. These are kernels that arise from", "relates # to objects, namely two kernels, being aligned in", "Kernel-based training with scikit-learn tutorial_data_reuploading_classifier Classification with data reuploading *Authors:", "= opt.step(cost, params) # Report the alignment on the full", "interested in seeing what the decision boundaries in this #", "We therefore first define a differentiable version of this function.", "############################################################################## # Using the Quantum Embedding Kernel for predictions #", "circuit on. # For the purpose of this tutorial we", "results that SVM's have proven good generalisation # behavior, it", "kernel-target alignment! # # We will make use of regular", "we need to supply ``sklearn.svm.SVC`` with a function # that", "call this function the *kernel*. It provides the advantage that", "accuracy(svm, X, Y) print(f\"The accuracy of the kernel with random", "lambda _params: -target_alignment( X[subset], Y[subset], lambda x1, x2: kernel(x1, x2,", "# .. [#Alignment] # # <NAME>, <NAME>, and <NAME>. #", "functions are not essential for understanding the demo, # so", "block. Let's start by defining this layer: import pennylane as", "= np.sqrt(np.sum(K * K) * np.sum(T * T)) inner_product =", "sampled above. init_kernel = lambda x1, x2: kernel(x1, x2, init_params)", "rescale_class_labels: nplus = np.count_nonzero(np.array(Y) == 1) nminus = len(Y) -", "step, but it can be shown that for the purpose", ":math:`K` generated by the # quantum kernel and :math:`\\boldsymbol{y}\\boldsymbol{y}^T`: #", "/ norm return inner_product params = init_params opt = qml.GradientDescentOptimizer(0.2)", "vectors :math:`\\boldsymbol{x}`, where a line does not divide the entire", "kernel(x1, x2, init_params) K_init = qml.kernels.square_kernel_matrix(X, init_kernel, assume_normalized_kernel=True) with np.printoptions(precision=3,", "between the kernel # matrices :math:`K_1` and :math:`K_2` if we", "It is immediately clear that this method is not very", "the similarity # between two datapoints. To perform an actual", "K_2)}{\\sqrt{\\operatorname{Tr}(K_1^2)\\operatorname{Tr}(K_2^2)}}. # # .. note:: # Seen from a more", ".. math:: # k_{\\boldsymbol{y}}(\\boldsymbol{x}_i, \\boldsymbol{x}_j) = y_i y_j. # #", "qml.kernels.target_alignment(X, Y, init_kernel, assume_normalized_kernel=True) print(f\"The kernel-target alignment for our dataset", "for j, wire in enumerate(wires): qml.Hadamard(wires=[wire]) qml.RZ(x[i % len(x)], wires=[wire])", "############################################################################## # Next, we define a function to help plot", "datapoint and the variational parameters fully determine # the embedding", "alignment is then defined as the kernel alignment # of", "our dataset. In our example, we will work with #", "1, i * sector_angle, (i + 1) * sector_angle, lw=0,", "0.5, i * sector_angle, (i + 1) * sector_angle, lw=0,", "kernel circuit structure itself. # # Before focusing on the", "similarities of the data. It # does have one drawback,", "to it, which is why we # have to multiply", "Overall this means that we compute # :math:`\\frac{1}{2}(N^2-N)` kernel values", "prediction we will make use # of scikit-learn's Support Vector", "perpendicular to the line and thus determine its slope. The", "functionalities # to do both in PennyLane. We of course", "of the kernel with random parameters is {accuracy_init:.3f}\") ############################################################################## #", "to make predictions on a # dataset, becaues it is", "can have a look at the kernel value between the", "expensive) embedding :math:`\\phi`. Consider for example the following embedding and", "= qml.kernels.target_alignment(X, Y, init_kernel, assume_normalized_kernel=True) print(f\"The kernel-target alignment for our", "different variational # parameters ``params`` into each of them. #", "sets. To this end we will introduce a # second", "kernel matrix is simply # given by the outer product", "to the regions on opposing sides of the line: ..", "the overall accuracy # of our SVC. # # Training", "in PennyLane would be # to use the observable type", "x, y = X.T cmap = mpl.colors.ListedColormap([\"#FF0000\", \"#0000FF\"]) ax.scatter(x, y,", "structure itself. # # Before focusing on the kernel values", "SVC(kernel=trained_kernel_matrix).fit(X, Y) ############################################################################## # We expect to see an accuracy", ":math:`K_2`: # # .. math:: # \\operatorname{KA}(K_1, K_2) = \\frac{\\operatorname{Tr}(K_1", "params = init_params opt = qml.GradientDescentOptimizer(0.2) for i in range(500):", "of how this measure relates # to objects, namely two", "in enumerate(wires): qml.Hadamard(wires=[wire]) qml.RZ(x[i % len(x)], wires=[wire]) i += inc", "to start with some imports: from pennylane import numpy as", "\"#0000FF\"][(i % 2)] other_color = [\"#FF0000\", \"#0000FF\"][((i + 1) %", "some measure of # how well it fits the dataset", "hand it can adjust itself to the dataset, and on", "mutual kernel values between all elements of the dataset form", "applying the embedding of the first # datapoint and then", "multiply the kernel target alignment by :math:`-1` to actually #", "let's first revisit one of the simplest methods to assign", "= qml.GradientDescentOptimizer(0.2) for i in range(500): # Choose subset of", "Let's now come back to the actual implementation. PennyLane's #", "0.5) * sector_angle x = 0.7 * np.cos(angles) y =", "accuracy(classifier, X, Y_target): return 1 - np.count_nonzero(classifier.predict(X) - Y_target) /", "kernel # function that expresses the original labelling in the", "alignment*, a similiarity measure # between two kernels with given", "Kernel for predictions # -------------------------------------------------- # The quantum kernel alone", "the decision boundaries in this # classification look like. This", "the number of elements in :math:`\\boldsymbol{y}`, # that is the", "own `QHack <https://qhack.ai/>`__ hackathon. What are kernel methods? ------------------------ To", "given kernel matrices :math:`K_1` and # :math:`K_2`: # # ..", "Y) ############################################################################## # To see how well our classifier performs", "also find more # background information on the kernel circuit", "Y, plt.gca(), num_sectors=num_sectors) ############################################################################## # Defining a Quantum Embedding Kernel", "is very important, because in many interesting cases the embedding", "have to multiply the kernel target alignment by :math:`-1` to", "return np.random.uniform(0, 2 * np.pi, (num_layers, 2, num_wires), requires_grad=True) ##############################################################################", "params, wires=wires) return qml.probs(wires=wires) ############################################################################## # The kernel function itself", "plot_data ############################################################################## # With that done, let's have a look", "to *minimize* the cost function that is given to it,", "This means by just replacing the regular scalar product in", "# # In summary, the kernel-target alignment effectively captures how", "the dataset in question. Performing an exhaustive # search in", "of them. # Together, the datapoint and the variational parameters", "where a line does not divide the entire space into", "there, we could actually realise non-linear classification in our original", "build a second support vector classifier with the # trained", "superfluous to actually perform the (potentially expensive) embedding :math:`\\phi`. Consider", "\"#0000FF\"]), alpha=0.2, levels=[-1, 0, 1], ) plot_double_cake_data(X, Y, ax) return", "on. subset = np.random.choice(list(range(len(X))), 4) # Define the cost function", "explore one particular kind of kernel that can be realized", "# # To be able to train the Quantum Embedding", "# the datapoint ``x`` but feeding different variational # parameters", "of the kernel circuit -- because # of the ordering", "corners of the plane. A linear classifier corresponds to drawing", "<NAME>, and <NAME>. # \"An overview of kernel alignment and", "the SVM, we need to supply ``sklearn.svm.SVC`` with a function", "map :math:`k`, we can actually express much more intricate decision", "Kernels on Near-Term Quantum Computers.\" # `arXiv:2105.02276 <https://arxiv.org/abs/2105.02276>`__, 2021. #", "shots=None) wires = dev.wires.tolist() ############################################################################## # Let us now define", "trained_kernel = lambda x1, x2: kernel(x1, x2, params) # Second", "the decision boundary as a linear combination of the embedded", "np.array(Y) T = np.outer(_Y, _Y) inner_product = np.sum(K * T)", "a kernel matrix function using the trained kernel. trained_kernel_matrix =", "embedding ansatz\"\"\" i = i0 for j, wire in enumerate(wires):", "# <NAME>, <NAME>, <NAME>, <NAME>, # <NAME>, and <NAME>. #", "function from above. # Once we have this, we can", "based on Ref. [#Training_QEKs]_, a project from Xanadu's own `QHack", "it in the process. # # .. note:: # Currently,", "immediately clear that this method is not very powerful, as", "# Define the cost function for optimization cost = lambda", "on near-term quantum computers, namely *Quantum Embedding Kernels (QEKs)*. These", "y_2^2 = \\langle \\boldsymbol{x}, \\boldsymbol{y} \\rangle^2. This means by just", "############################################################################## # We have now achieved perfect classification! 🎆 #", "very important, because in many interesting cases the embedding :math:`\\phi`", "is then given by the *overlap* of the associated embedded", "parameter space is not a good solution because it is", ".. math:: y(\\boldsymbol{x}) = \\operatorname{sgn}(\\langle \\boldsymbol{w}, \\phi(\\boldsymbol{x})\\rangle + b). We", "= labels.astype(int) return X, Y ############################################################################## # Next, we define", "no strong artifacts that would make us # distrust the", "do not # calculate the entries between the same datapoints,", "return qml.probs(wires=wires) ############################################################################## # The kernel function itself is now", "optimal classification, we can choose the vector defining the decision", "the quantum circuit on. # For the purpose of this", "as plt num_sectors = 3 X, Y = make_double_cake_data(num_sectors) ax", "``qml.kernels.target_alignment`` is not # differentiable yet, making it unfit for", "adjoint_ansatz(x2, params, wires=wires) return qml.probs(wires=wires) ############################################################################## # The kernel function", "even circle.\"\"\" center_indices = np.array(range(0, num_sectors)) sector_angle = 2 *", "now come back to the actual implementation. PennyLane's # ``kernels``", "= np.array([y / nplus if y == 1 else y", "not only every data instance falls within the # correct", "form of dataset X = np.vstack([x, y]).T labels = np.hstack([labels1,", "its kernel matrix is simply # given by the outer", "Kernel methods are one of the cornerstones of classical machine", "Next, we define a function to help plot the ``DoubleCake``", "embedding ansatz :math:`U(\\boldsymbol{x})`. # In order to construct the full", "are confusing. def _make_circular_data(num_sectors): \"\"\"Generate datapoints arranged in an even", "evaluating quantum kernels =========================================== .. meta:: :property=\"og:description\": Kernels and alignment", "see them as vectors # in the space of matrices", "the end of the kernel circuit -- because # of", "and improve the kernel-target alignment! # # We will make", "if both datapoints lie in the # same class and", "sector_angle, (i + 1) * sector_angle, lw=0, color=other_color, alpha=0.1, )", "_yy, \"_zz\": _zz} ax.contourf( _xx, _yy, _zz, cmap=mpl.colors.ListedColormap([\"#FF0000\", \"#0000FF\"]), alpha=0.2,", "math:: |\\psi(\\boldsymbol{x})\\rangle = U(\\boldsymbol{x}) |0 \\rangle. The kernel value is", "model. In this sense, our approach benefits from both: on", "# training variational embedding kernels and the available functionalities #", "we go back to the expression for our prediction and", "defining this layer: import pennylane as qml def layer(x, params,", "of kernel alignment and its applications.\" # `Artificial Intelligence Review", "this tutorial we will use PennyLane's ``default.qubit`` # device with", "*overlap* of the associated embedded quantum states .. math:: k(\\boldsymbol{x}_i,", "will be interesting to inspect the decision boundaries of #", "enters the picture by defining an *ideal* kernel # function", "observing the all-zero state at the end of the kernel", "kernel to the actual labels of the # training data.", "the kernel # target alignment: kta_init = qml.kernels.target_alignment(X, Y, init_kernel,", "on the full dataset every 50 steps. if (i +", "not be used to make predictions on a # dataset,", "the overlap of the quantum states by first applying the", "kernel values between all elements of the dataset form the", "x_2, x_2^2) \\\\ k(\\boldsymbol{x}, \\boldsymbol{y}) &= x_1^2 y_1^2 + 2", "assess the impact of training the parameters of the quantum", "# is not expected to suffer from bad generalisation. #", "we define some map :math:`\\phi(\\boldsymbol{x})` that *embeds* our datapoints into", "purpose of optimal classification, we can choose the vector defining", "by considering a parameterised quantum circuit :math:`U(\\boldsymbol{x})` that maps a", "Canonical form of labels Y = labels.astype(int) return X, Y", "############################################################################## # Let's now have a look at our dataset.", "_zz[idx] = classifier.predict(np.array([_xx[idx], _yy[idx]])[np.newaxis, :]) plot_data = {\"_xx\": _xx, \"_yy\":", "with the Hilbert-Schmidt (or # Frobenius) scalar product # :math:`\\langle", "Remember that PennyLane's built-in optimizer works # to *minimize* the", "Y) print(f\"The accuracy of the kernel with random parameters is", "line does not divide the entire space into two regions", "kernel(x1, x2, params), assume_normalized_kernel=True, ) print(f\"Step {i+1} - Alignment =", "len(x)], wires=[wire]) i += inc qml.RY(params[0, j], wires=[wire]) qml.broadcast(unitary=qml.CRZ, pattern=\"ring\",", "This is shown in the # `demo on kernel-based training", "inspect it via the ``qml.kernels.square_kernel_matrix`` # method, which makes use", "small improvements. # # We can, however, resort to a", "x1, x2: kernel(x1, x2, init_params) K_init = qml.kernels.square_kernel_matrix(X, init_kernel, assume_normalized_kernel=True)", "imports: from pennylane import numpy as np import matplotlib as", "it is very # resource intensive, and since the accuracy", "computers, *quantum kernels* for short. In this tutorial you will", "kernel to not have additional parameters # besides the datapoints,", "will make use of regular gradient descent optimization. To speed", "It is reasonable to believe we can give # values", "------------- # In this demo, we will treat a toy", "To construct the SVM, we need to supply ``sklearn.svm.SVC`` with", "variational parameters. At this point we fix the number of", "y_i y_j. # # The assigned kernel is thus :math:`+1`", "j], wires=[wire]) qml.broadcast(unitary=qml.CRZ, pattern=\"ring\", wires=wires, parameters=params[1]) ############################################################################## # To construct", "Y, ax, num_sectors=None): \"\"\"Plot double cake data and corresponding sectors.\"\"\"", "note:: # Seen from a more theoretical side, :math:`\\operatorname{KA}` #", "with random # parameters: accuracy_trained = accuracy(svm_trained, X, Y) print(f\"The", "the # ansatz circuit to :math:`6`. init_params = random_params(num_wires=5, num_layers=6)", "are one of the cornerstones of classical machine learning. Here", "optimization. To speed up # the optimization we will not", "# the kernel you chose reproduces the actual similarities of", "ansatz(x1, params, wires=wires) adjoint_ansatz(x2, params, wires=wires) return qml.probs(wires=wires) ############################################################################## #", "\\phi(\\boldsymbol{x}_j)\\rangle. We call this function the *kernel*. It provides the", "variational embedding kernels and the available functionalities # to do", "init_kernel, assume_normalized_kernel=True) with np.printoptions(precision=3, suppress=True): print(K_init) ############################################################################## # Using the", "* np.pi, (num_layers, 2, num_wires), requires_grad=True) ############################################################################## # Together with", "- Alignment = {current_alignment:.3f}\") ############################################################################## # We want to assess", "given by the *overlap* of the associated embedded quantum states", "for the # variational parameters. At this point we fix", "y(\\boldsymbol{x}) = \\operatorname{sgn}(\\langle \\boldsymbol{w}, \\phi(\\boldsymbol{x})\\rangle + b). We will forgo", "circuit # ansatz. What it does is solving a different", "defining an *ideal* kernel # function that expresses the original", "the above formula only contains inner products between vectors in", "is given to it, which is why we # have", "might not seem useful at first, but notice the above", "U(\\boldsymbol{x}) |0 \\rangle. The kernel value is then given by", "labels2 = _make_circular_data(num_sectors) # x and y coordinates of the", "cosine of the angle between the kernel # matrices :math:`K_1`", "useful at first, but notice the above formula only contains", "= | \\langle\\psi(\\boldsymbol{x}_i)|\\psi(\\boldsymbol{x}_j)\\rangle|^2. \"\"\" ############################################################################## # A toy problem #", "cases the embedding :math:`\\phi` will be much costlier to compute", "for our dataset and random parameters is {kta_init:.3f}\") ############################################################################## #", "kernel-target alignment effectively captures how well # the kernel you", "of a kernel with trained parameters is {accuracy_trained:.3f}\") ############################################################################## #", "in seeing what the decision boundaries in this # classification", "# visually in more complex data sets. To this end", "improve the overall accuracy # of our SVC. # #", "the simplest methods to assign binary labels to datapoints: linear", "Following on the results that SVM's have proven good generalisation", "distrust the model. In this sense, our approach benefits from", "with np.printoptions(precision=3, suppress=True): print(K_init) ############################################################################## # Using the Quantum Embedding", "parameters fully determine # the embedding ansatz :math:`U(\\boldsymbol{x})`. # In", "more complex data sets. To this end we will introduce", "in the beginning. svm = SVC(kernel=lambda X1, X2: qml.kernels.kernel_matrix(X1, X2,", "of the kernel, # :math:`k(\\boldsymbol{x}_i,\\boldsymbol{x}_j) = k(\\boldsymbol{x}_j, \\boldsymbol{x}_i)`. # In", "dataset, and on the other hand # is not expected", "labels = np.hstack([labels1, -1 * labels2]) # Canonical form of", "4) # Define the cost function for optimization cost =", "# We are also interested in seeing what the decision", "# correct class, but also that there are no strong", "point we fix the number of layers in the #", "# is nothing else than the cosine of the angle", "+ 1) * sector_angle, lw=0, color=color, alpha=0.1, width=0.5, ) )", "module <https://pennylane.readthedocs.io/en/latest/code/qml_kernels.html>`__. The demo is based on Ref. [#Training_QEKs]_, a" ]
[ "n = number-1 self.color = ((n/s)*(255/s),(n%s)*(255/s),128) def draw(self,screen,font,x,y,width,height): pygame.draw.rect(screen,self.color,(x,y,width,height)) text", "#gimme a CHAR, not some weird integer domap = {", "self.game: return (time.monotonic() - self.start_t , BLACK) elif self.is_solved() and", "BOIS WHITE = (255,255,255) BLACK = (0,0,0) GREEN = (32,200,32)", "= [] for i in range(0,self.size): new.append(self.content[x][(i+1)%self.size]) for i in", "in range(0,self.size): if self.content[i][j].number != i+j*self.size+1: return False return True", "game if gameboard.is_solved() and gameboard.start_t > gameboard.end_t: gameboard.end_time() #for quitters", "self.game = True return self.start_time def end_time(self): print(\"time has ended\")", "BLACK = (0,0,0) GREEN = (32,200,32) keys = {\"w\":0,\"a\":0,\"s\":0,\"d\":0,\"q\":0} last_was_Q", "k == \"q\": last_was_Q = True else: if last_was_Q: gameboard.start_time()", "workaroud #name the window & size it. pygame.display.set_caption(window_name) screen =", "/ (10 ** t_round) ) text_timer = font2.render(\"Time :\"+time_str,True,time[1]) text_moves", "return new def rotate_down(self,x): new = [] for i in", "self.start_t = time.monotonic() self.game = True return self.start_time def end_time(self):", "/ self.size) h = (height / self.size) x = i", "= [] start_t=0 end_t=0 game=False moves = 0 def __init__(self,size):", "new = [] for i in range(0,self.size): new.append(self.content[x][(i-1)%self.size]) for i", "gameboard.end_t: gameboard.end_time() #for quitters elif event.type == pygame.QUIT: print(\"Quitting...\") running", "TO CHANGE width = 500 height = 500 stats_height =", "\"s\":\"gameboard.rotate_down(int(pygame.mouse.get_pos()[0]/(width/board_size)))\", \"d\":\"gameboard.rotate_left(int(pygame.mouse.get_pos()[1]/(height/board_size)))\", \"q\":\"gameboard.scramble(scramble_turns)\" } #i guess? if k in ['w','a','s','d','q']:", "##VARIABLES TO CHANGE width = 500 height = 500 stats_height", "the game if gameboard.is_solved() and gameboard.start_t > gameboard.end_t: gameboard.end_time() elif", "CHANGE width = 500 height = 500 stats_height = 150", "i in range(0,self.size): new.append(self.content[(i-1)%self.size][y]) for i in range(0,self.size): self.content[i][y] =", "False exec(domap[k]) #end the game if gameboard.is_solved() and gameboard.start_t >", "(10 ** t_round) ) / (10 ** t_round) ) text_timer", "new[i] self.moves+=1 return new def rotate_right(self,y): new = [] for", "if (not self.is_solved()) and self.game: return (time.monotonic() - self.start_t ,", "new[i] self.moves+=1 return new def rotate_down(self,x): new = [] for", "= 50 t_round = 3 FPS = 30 ##DONT CHANGE", "in range(0,self.size): self.content[i][y] = new[i] self.moves+=1 return new def rotate_right(self,y):", "if gameboard.is_solved() and gameboard.start_t > gameboard.end_t: gameboard.end_time() elif event.type ==", "range(0,self.size): self.content[x][i] = new[i] self.moves+=1 return new def draw(self,screen,font): for", "j in range(0,self.size): if self.content[i][j].number != i+j*self.size+1: return False return", "running = False else: print(\"err0r, bAd 3v3nt lol\") assert False", "if k in ['w','a','s','d','q']: #starting game logic if k ==", "gameboard.end_time() #for quitters elif event.type == pygame.QUIT: print(\"Quitting...\") running =", "screen.blit(text_moves,(0,height+(stats_height/2))) #draw board gameboard.draw(screen,font) #update da screeeeeen pygame.display.update() #end the", "self.number = number n = number-1 self.color = ((n/s)*(255/s),(n%s)*(255/s),128) def", "} #i guess? if k in ['w','a','s','d','q']: #starting game logic", "j in range(0,self.size): w = (width / self.size) h =", "= [] for i in range(0,self.size): new.append(self.content[(i+1)%self.size][y]) for i in", "\"PyLoopover \"+str(board_size)+\"x\"+str(board_size) scramble_turns = 50 t_round = 3 FPS =", "= 0 def __init__(self,size): self.size = size for i in", "(32,200,32) keys = {\"w\":0,\"a\":0,\"s\":0,\"d\":0,\"q\":0} last_was_Q = False class Tile: def", "500 stats_height = 150 board_size = 5 window_name = \"PyLoopover", "for i in range(0,size): self.content.append([]) for j in range(0,size): self.content[i].append(None)", "* h self.content[i][j].draw(screen,font,x,y,w,h) def scramble(self,n): for i in range(0,n): o", "class Board: content = [] start_t=0 end_t=0 game=False moves =", "self.rotate_down(random.randint(0,board_size-1)) self.game=False self.moves=0 return True def is_solved(self): for i in", "2: self.rotate_up(random.randint(0,board_size-1)) else: self.rotate_down(random.randint(0,board_size-1)) self.game=False self.moves=0 return True def is_solved(self):", "\"q\": last_was_Q = True else: if last_was_Q: gameboard.start_time() last_was_Q =", "h self.content[i][j].draw(screen,font,x,y,w,h) def scramble(self,n): for i in range(0,n): o =", "def rotate_right(self,y): new = [] for i in range(0,self.size): new.append(self.content[(i+1)%self.size][y])", "draw(self,screen,font,x,y,width,height): pygame.draw.rect(screen,self.color,(x,y,width,height)) text = font.render(str(self.number),True,BLACK) screen.blit(text,(x,y)) class Board: content =", "scramble_turns = 50 t_round = 3 FPS = 30 ##DONT", "5 window_name = \"PyLoopover \"+str(board_size)+\"x\"+str(board_size) scramble_turns = 50 t_round =", "FPS = 30 ##DONT CHANGE THESE BOIS WHITE = (255,255,255)", "self.moves=0 return True def is_solved(self): for i in range(0,self.size): for", "print(\"time has started\") self.start_t = time.monotonic() self.game = True return", ") text_timer = font2.render(\"Time :\"+time_str,True,time[1]) text_moves = font2.render(\"Moves:\"+str(gameboard.moves),True,time[1]) screen.blit(text_timer,(0,height)) screen.blit(text_moves,(0,height+(stats_height/2)))", "new[i] self.moves+=1 return new def rotate_up(self,x): new = [] for", "\"q\":\"gameboard.scramble(scramble_turns)\" } #i guess? if k in ['w','a','s','d','q']: #starting game", "game=False moves = 0 def __init__(self,size): self.size = size for", "i in range(0,self.size): new.append(self.content[(i+1)%self.size][y]) for i in range(0,self.size): self.content[i][y] =", "else: print(\"err0r, bAd 3v3nt lol\") assert False if __name__ ==", "in range(0,n): o = random.randint(0,3) if o == 0: self.rotate_left(random.randint(0,board_size-1))", "in range(0,self.size): self.content[x][i] = new[i] self.moves+=1 return new def draw(self,screen,font):", "pygame.QUIT: print(\"Quitting...\") running = False else: print(\"err0r, bAd 3v3nt lol\")", "keys = {\"w\":0,\"a\":0,\"s\":0,\"d\":0,\"q\":0} last_was_Q = False class Tile: def __init__(self,number,s):", "= random.randint(0,3) if o == 0: self.rotate_left(random.randint(0,board_size-1)) elif o ==", "in range(0,size): self.content[i].append(None) self.content[i][j] = Tile(i+j*size+1,size) def rotate_left(self,y): new =", "True def start_time(self): print(\"time has started\") self.start_t = time.monotonic() self.game", "a CHAR, not some weird integer domap = { \"w\":\"gameboard.rotate_up(int(pygame.mouse.get_pos()[0]/(width/board_size)))\",", "else: return (0 , BLACK) def main(): gameboard = Board(board_size)", "no events allowed pygame.event.set_allowed(pygame.USEREVENT+1) #timer event pygame.event.set_allowed(pygame.KEYDOWN) pygame.event.set_allowed(pygame.QUIT) #4 quitters", "def draw(self,screen,font,x,y,width,height): pygame.draw.rect(screen,self.color,(x,y,width,height)) text = font.render(str(self.number),True,BLACK) screen.blit(text,(x,y)) class Board: content", "last_was_Q: gameboard.start_time() last_was_Q = False exec(domap[k]) #end the game if", "= (0,0,0) GREEN = (32,200,32) keys = {\"w\":0,\"a\":0,\"s\":0,\"d\":0,\"q\":0} last_was_Q =", "while running: #eevveeentttss??? event = pygame.event.wait() if event.type == pygame.USEREVENT+1:", "text_timer = font2.render(\"Time :\"+time_str,True,time[1]) text_moves = font2.render(\"Moves:\"+str(gameboard.moves),True,time[1]) screen.blit(text_timer,(0,height)) screen.blit(text_moves,(0,height+(stats_height/2))) #draw", "int( time[0] * (10 ** t_round) ) / (10 **", "= Board(board_size) pygame.init() pygame.mixer.quit() #weird workaroud #name the window &", "def start_time(self): print(\"time has started\") self.start_t = time.monotonic() self.game =", "pygame.display.update() #end the game if gameboard.is_solved() and gameboard.start_t > gameboard.end_t:", "new[i] self.moves+=1 return new def draw(self,screen,font): for i in range(0,self.size):", "BLACK) elif self.is_solved() and self.game: return (self.end_t - self.start_t ,", "Tile(i+j*size+1,size) def rotate_left(self,y): new = [] for i in range(0,self.size):", "pygame.event.wait() if event.type == pygame.USEREVENT+1: #a fresh canvas screen.fill(WHITE) #draw", "event = pygame.event.wait() if event.type == pygame.USEREVENT+1: #a fresh canvas", "= False exec(domap[k]) #end the game if gameboard.is_solved() and gameboard.start_t", "= True else: if last_was_Q: gameboard.start_time() last_was_Q = False exec(domap[k])", "return (time.monotonic() - self.start_t , BLACK) elif self.is_solved() and self.game:", "new.append(self.content[x][(i-1)%self.size]) for i in range(0,self.size): self.content[x][i] = new[i] self.moves+=1 return", "\"w\":\"gameboard.rotate_up(int(pygame.mouse.get_pos()[0]/(width/board_size)))\", \"a\":\"gameboard.rotate_right(int(pygame.mouse.get_pos()[1]/(height/board_size)))\", \"s\":\"gameboard.rotate_down(int(pygame.mouse.get_pos()[0]/(width/board_size)))\", \"d\":\"gameboard.rotate_left(int(pygame.mouse.get_pos()[1]/(height/board_size)))\", \"q\":\"gameboard.scramble(scramble_turns)\" } #i guess? if k", "* w y = j * h self.content[i][j].draw(screen,font,x,y,w,h) def scramble(self,n):", "/ self.size) x = i * w y = j", "#!/usr/bin/python3 import pygame import random import time ##VARIABLES TO CHANGE", "self.size) x = i * w y = j *", "return True def is_solved(self): for i in range(0,self.size): for j", "(self.end_t - self.start_t , GREEN) else: return (0 , BLACK)", "k = chr(event.key) #gimme a CHAR, not some weird integer", "que pygame.event.set_allowed(None) #start with no events allowed pygame.event.set_allowed(pygame.USEREVENT+1) #timer event", "self.content[i][j].draw(screen,font,x,y,w,h) def scramble(self,n): for i in range(0,n): o = random.randint(0,3)", "#i guess? if k in ['w','a','s','d','q']: #starting game logic if", "canvas screen.fill(WHITE) #draw stats time = gameboard.get_time() time_str = str(", "board gameboard.draw(screen,font) #update da screeeeeen pygame.display.update() #end the game if", "gameboard.start_time() last_was_Q = False exec(domap[k]) #end the game if gameboard.is_solved()", "new = [] for i in range(0,self.size): new.append(self.content[x][(i+1)%self.size]) for i", "#weird workaroud #name the window & size it. pygame.display.set_caption(window_name) screen", "#timer event pygame.event.set_allowed(pygame.KEYDOWN) pygame.event.set_allowed(pygame.QUIT) #4 quitters #setup fonts font =", "0: self.rotate_left(random.randint(0,board_size-1)) elif o == 1: self.rotate_right(random.randint(0,board_size-1)) elif o ==", "not some weird integer domap = { \"w\":\"gameboard.rotate_up(int(pygame.mouse.get_pos()[0]/(width/board_size)))\", \"a\":\"gameboard.rotate_right(int(pygame.mouse.get_pos()[1]/(height/board_size)))\", \"s\":\"gameboard.rotate_down(int(pygame.mouse.get_pos()[0]/(width/board_size)))\",", "GREEN = (32,200,32) keys = {\"w\":0,\"a\":0,\"s\":0,\"d\":0,\"q\":0} last_was_Q = False class", "if event.type == pygame.USEREVENT+1: #a fresh canvas screen.fill(WHITE) #draw stats", "#setup fonts font = pygame.font.SysFont('mono',int((width/board_size)/1.14)) font2 = pygame.font.SysFont('mono',int(stats_height/2.3)) #main l00p", "range(0,self.size): new.append(self.content[(i-1)%self.size][y]) for i in range(0,self.size): self.content[i][y] = new[i] self.moves+=1", "scramble(self,n): for i in range(0,n): o = random.randint(0,3) if o", "#start with no events allowed pygame.event.set_allowed(pygame.USEREVENT+1) #timer event pygame.event.set_allowed(pygame.KEYDOWN) pygame.event.set_allowed(pygame.QUIT)", "j in range(0,size): self.content[i].append(None) self.content[i][j] = Tile(i+j*size+1,size) def rotate_left(self,y): new", "GREEN) else: return (0 , BLACK) def main(): gameboard =", "= 150 board_size = 5 window_name = \"PyLoopover \"+str(board_size)+\"x\"+str(board_size) scramble_turns", "#draw board gameboard.draw(screen,font) #update da screeeeeen pygame.display.update() #end the game", "new.append(self.content[(i+1)%self.size][y]) for i in range(0,self.size): self.content[i][y] = new[i] self.moves+=1 return", "for i in range(0,self.size): for j in range(0,self.size): if self.content[i][j].number", "i in range(0,size): self.content.append([]) for j in range(0,size): self.content[i].append(None) self.content[i][j]", "= 500 stats_height = 150 board_size = 5 window_name =", "in range(0,self.size): for j in range(0,self.size): w = (width /", "and gameboard.start_t > gameboard.end_t: gameboard.end_time() elif event.type == pygame.KEYDOWN: k", "width = 500 height = 500 stats_height = 150 board_size", "def get_time(self): if (not self.is_solved()) and self.game: return (time.monotonic() -", "[] for i in range(0,self.size): new.append(self.content[x][(i-1)%self.size]) for i in range(0,self.size):", "bAd 3v3nt lol\") assert False if __name__ == \"__main__\": main()", "THESE BOIS WHITE = (255,255,255) BLACK = (0,0,0) GREEN =", "== 0: self.rotate_left(random.randint(0,board_size-1)) elif o == 1: self.rotate_right(random.randint(0,board_size-1)) elif o", "True def is_solved(self): for i in range(0,self.size): for j in", "{\"w\":0,\"a\":0,\"s\":0,\"d\":0,\"q\":0} last_was_Q = False class Tile: def __init__(self,number,s): self.number =", "def is_solved(self): for i in range(0,self.size): for j in range(0,self.size):", "range(0,self.size): self.content[x][i] = new[i] self.moves+=1 return new def rotate_up(self,x): new", "= (32,200,32) keys = {\"w\":0,\"a\":0,\"s\":0,\"d\":0,\"q\":0} last_was_Q = False class Tile:", "** t_round) ) text_timer = font2.render(\"Time :\"+time_str,True,time[1]) text_moves = font2.render(\"Moves:\"+str(gameboard.moves),True,time[1])", "[] for i in range(0,self.size): new.append(self.content[(i-1)%self.size][y]) for i in range(0,self.size):", "#update da screeeeeen pygame.display.update() #end the game if gameboard.is_solved() and", "print(\"Quitting...\") running = False else: print(\"err0r, bAd 3v3nt lol\") assert", "quitters #setup fonts font = pygame.font.SysFont('mono',int((width/board_size)/1.14)) font2 = pygame.font.SysFont('mono',int(stats_height/2.3)) #main", "event.type == pygame.QUIT: print(\"Quitting...\") running = False else: print(\"err0r, bAd", "__init__(self,size): self.size = size for i in range(0,size): self.content.append([]) for", "in range(0,self.size): new.append(self.content[x][(i+1)%self.size]) for i in range(0,self.size): self.content[x][i] = new[i]", "if last_was_Q: gameboard.start_time() last_was_Q = False exec(domap[k]) #end the game", "o == 2: self.rotate_up(random.randint(0,board_size-1)) else: self.rotate_down(random.randint(0,board_size-1)) self.game=False self.moves=0 return True", "new def rotate_right(self,y): new = [] for i in range(0,self.size):", "start_time(self): print(\"time has started\") self.start_t = time.monotonic() self.game = True", "stats time = gameboard.get_time() time_str = str( int( time[0] *", "some weird integer domap = { \"w\":\"gameboard.rotate_up(int(pygame.mouse.get_pos()[0]/(width/board_size)))\", \"a\":\"gameboard.rotate_right(int(pygame.mouse.get_pos()[1]/(height/board_size)))\", \"s\":\"gameboard.rotate_down(int(pygame.mouse.get_pos()[0]/(width/board_size)))\", \"d\":\"gameboard.rotate_left(int(pygame.mouse.get_pos()[1]/(height/board_size)))\",", "& size it. pygame.display.set_caption(window_name) screen = pygame.display.set_mode((width,height+stats_height),0,32) #setup framerate pygame.time.set_timer(pygame.USEREVENT+1,int((1/FPS)*1000))", "self.moves+=1 return new def rotate_right(self,y): new = [] for i", "return (0 , BLACK) def main(): gameboard = Board(board_size) pygame.init()", "Tile: def __init__(self,number,s): self.number = number n = number-1 self.color", "size it. pygame.display.set_caption(window_name) screen = pygame.display.set_mode((width,height+stats_height),0,32) #setup framerate pygame.time.set_timer(pygame.USEREVENT+1,int((1/FPS)*1000)) #setup", "and self.game: return (self.end_t - self.start_t , GREEN) else: return", "= font2.render(\"Moves:\"+str(gameboard.moves),True,time[1]) screen.blit(text_timer,(0,height)) screen.blit(text_moves,(0,height+(stats_height/2))) #draw board gameboard.draw(screen,font) #update da screeeeeen", "number n = number-1 self.color = ((n/s)*(255/s),(n%s)*(255/s),128) def draw(self,screen,font,x,y,width,height): pygame.draw.rect(screen,self.color,(x,y,width,height))", "i * w y = j * h self.content[i][j].draw(screen,font,x,y,w,h) def", "= \"PyLoopover \"+str(board_size)+\"x\"+str(board_size) scramble_turns = 50 t_round = 3 FPS", "self.moves+=1 return new def draw(self,screen,font): for i in range(0,self.size): for", "= str( int( time[0] * (10 ** t_round) ) /", "= True while running: #eevveeentttss??? event = pygame.event.wait() if event.type", "= pygame.font.SysFont('mono',int(stats_height/2.3)) #main l00p running = True while running: #eevveeentttss???", "##DONT CHANGE THESE BOIS WHITE = (255,255,255) BLACK = (0,0,0)", "#end the game if gameboard.is_solved() and gameboard.start_t > gameboard.end_t: gameboard.end_time()", "\"a\":\"gameboard.rotate_right(int(pygame.mouse.get_pos()[1]/(height/board_size)))\", \"s\":\"gameboard.rotate_down(int(pygame.mouse.get_pos()[0]/(width/board_size)))\", \"d\":\"gameboard.rotate_left(int(pygame.mouse.get_pos()[1]/(height/board_size)))\", \"q\":\"gameboard.scramble(scramble_turns)\" } #i guess? if k in", "new = [] for i in range(0,self.size): new.append(self.content[(i-1)%self.size][y]) for i", "= [] for i in range(0,self.size): new.append(self.content[x][(i-1)%self.size]) for i in", "new def rotate_down(self,x): new = [] for i in range(0,self.size):", "get_time(self): if (not self.is_solved()) and self.game: return (time.monotonic() - self.start_t", "True while running: #eevveeentttss??? event = pygame.event.wait() if event.type ==", "content = [] start_t=0 end_t=0 game=False moves = 0 def", "= 500 height = 500 stats_height = 150 board_size =", "pygame.USEREVENT+1: #a fresh canvas screen.fill(WHITE) #draw stats time = gameboard.get_time()", "guess? if k in ['w','a','s','d','q']: #starting game logic if k", "and self.game: return (time.monotonic() - self.start_t , BLACK) elif self.is_solved()", "for j in range(0,self.size): if self.content[i][j].number != i+j*self.size+1: return False", "- self.start_t , BLACK) elif self.is_solved() and self.game: return (self.end_t", "pygame.init() pygame.mixer.quit() #weird workaroud #name the window & size it.", "font2 = pygame.font.SysFont('mono',int(stats_height/2.3)) #main l00p running = True while running:", "gameboard.draw(screen,font) #update da screeeeeen pygame.display.update() #end the game if gameboard.is_solved()", "= Tile(i+j*size+1,size) def rotate_left(self,y): new = [] for i in", "size for i in range(0,size): self.content.append([]) for j in range(0,size):", "last_was_Q = True else: if last_was_Q: gameboard.start_time() last_was_Q = False", "end_t=0 game=False moves = 0 def __init__(self,size): self.size = size", "pygame.event.set_allowed(pygame.QUIT) #4 quitters #setup fonts font = pygame.font.SysFont('mono',int((width/board_size)/1.14)) font2 =", "pygame.display.set_mode((width,height+stats_height),0,32) #setup framerate pygame.time.set_timer(pygame.USEREVENT+1,int((1/FPS)*1000)) #setup event que pygame.event.set_allowed(None) #start with", "self.content[x][i] = new[i] self.moves+=1 return new def draw(self,screen,font): for i", "== 1: self.rotate_right(random.randint(0,board_size-1)) elif o == 2: self.rotate_up(random.randint(0,board_size-1)) else: self.rotate_down(random.randint(0,board_size-1))", ") / (10 ** t_round) ) text_timer = font2.render(\"Time :\"+time_str,True,time[1])", "= ((n/s)*(255/s),(n%s)*(255/s),128) def draw(self,screen,font,x,y,width,height): pygame.draw.rect(screen,self.color,(x,y,width,height)) text = font.render(str(self.number),True,BLACK) screen.blit(text,(x,y)) class", "= False class Tile: def __init__(self,number,s): self.number = number n", "else: self.rotate_down(random.randint(0,board_size-1)) self.game=False self.moves=0 return True def is_solved(self): for i", "> gameboard.end_t: gameboard.end_time() elif event.type == pygame.KEYDOWN: k = chr(event.key)", "i in range(0,self.size): self.content[x][i] = new[i] self.moves+=1 return new def", "self.content[i][y] = new[i] self.moves+=1 return new def rotate_down(self,x): new =", "screen = pygame.display.set_mode((width,height+stats_height),0,32) #setup framerate pygame.time.set_timer(pygame.USEREVENT+1,int((1/FPS)*1000)) #setup event que pygame.event.set_allowed(None)", "= new[i] self.moves+=1 return new def draw(self,screen,font): for i in", "font2.render(\"Moves:\"+str(gameboard.moves),True,time[1]) screen.blit(text_timer,(0,height)) screen.blit(text_moves,(0,height+(stats_height/2))) #draw board gameboard.draw(screen,font) #update da screeeeeen pygame.display.update()", "range(0,self.size): for j in range(0,self.size): if self.content[i][j].number != i+j*self.size+1: return", "the window & size it. pygame.display.set_caption(window_name) screen = pygame.display.set_mode((width,height+stats_height),0,32) #setup", "chr(event.key) #gimme a CHAR, not some weird integer domap =", "= number-1 self.color = ((n/s)*(255/s),(n%s)*(255/s),128) def draw(self,screen,font,x,y,width,height): pygame.draw.rect(screen,self.color,(x,y,width,height)) text =", "= 30 ##DONT CHANGE THESE BOIS WHITE = (255,255,255) BLACK", "self.is_solved() and self.game: return (self.end_t - self.start_t , GREEN) else:", ":\"+time_str,True,time[1]) text_moves = font2.render(\"Moves:\"+str(gameboard.moves),True,time[1]) screen.blit(text_timer,(0,height)) screen.blit(text_moves,(0,height+(stats_height/2))) #draw board gameboard.draw(screen,font) #update", "o == 1: self.rotate_right(random.randint(0,board_size-1)) elif o == 2: self.rotate_up(random.randint(0,board_size-1)) else:", "w = (width / self.size) h = (height / self.size)", "stats_height = 150 board_size = 5 window_name = \"PyLoopover \"+str(board_size)+\"x\"+str(board_size)", "in range(0,self.size): new.append(self.content[x][(i-1)%self.size]) for i in range(0,self.size): self.content[x][i] = new[i]", "str( int( time[0] * (10 ** t_round) ) / (10", "= { \"w\":\"gameboard.rotate_up(int(pygame.mouse.get_pos()[0]/(width/board_size)))\", \"a\":\"gameboard.rotate_right(int(pygame.mouse.get_pos()[1]/(height/board_size)))\", \"s\":\"gameboard.rotate_down(int(pygame.mouse.get_pos()[0]/(width/board_size)))\", \"d\":\"gameboard.rotate_left(int(pygame.mouse.get_pos()[1]/(height/board_size)))\", \"q\":\"gameboard.scramble(scramble_turns)\" } #i guess?", "events allowed pygame.event.set_allowed(pygame.USEREVENT+1) #timer event pygame.event.set_allowed(pygame.KEYDOWN) pygame.event.set_allowed(pygame.QUIT) #4 quitters #setup", "[] for i in range(0,self.size): new.append(self.content[(i+1)%self.size][y]) for i in range(0,self.size):", "BLACK) def main(): gameboard = Board(board_size) pygame.init() pygame.mixer.quit() #weird workaroud", "import pygame import random import time ##VARIABLES TO CHANGE width", "self.size) h = (height / self.size) x = i *", "== \"q\": last_was_Q = True else: if last_was_Q: gameboard.start_time() last_was_Q", "if gameboard.is_solved() and gameboard.start_t > gameboard.end_t: gameboard.end_time() #for quitters elif", "for i in range(0,self.size): for j in range(0,self.size): w =", "= j * h self.content[i][j].draw(screen,font,x,y,w,h) def scramble(self,n): for i in", "new.append(self.content[x][(i+1)%self.size]) for i in range(0,self.size): self.content[x][i] = new[i] self.moves+=1 return", "50 t_round = 3 FPS = 30 ##DONT CHANGE THESE", "* (10 ** t_round) ) / (10 ** t_round) )", "CHANGE THESE BOIS WHITE = (255,255,255) BLACK = (0,0,0) GREEN", "return False return True def start_time(self): print(\"time has started\") self.start_t", "moves = 0 def __init__(self,size): self.size = size for i", "range(0,n): o = random.randint(0,3) if o == 0: self.rotate_left(random.randint(0,board_size-1)) elif", "#starting game logic if k == \"q\": last_was_Q = True", "= 3 FPS = 30 ##DONT CHANGE THESE BOIS WHITE", "fonts font = pygame.font.SysFont('mono',int((width/board_size)/1.14)) font2 = pygame.font.SysFont('mono',int(stats_height/2.3)) #main l00p running", "last_was_Q = False exec(domap[k]) #end the game if gameboard.is_solved() and", "time = gameboard.get_time() time_str = str( int( time[0] * (10", "board_size = 5 window_name = \"PyLoopover \"+str(board_size)+\"x\"+str(board_size) scramble_turns = 50", "new = [] for i in range(0,self.size): new.append(self.content[(i+1)%self.size][y]) for i", "False else: print(\"err0r, bAd 3v3nt lol\") assert False if __name__", "random import time ##VARIABLES TO CHANGE width = 500 height", "False class Tile: def __init__(self,number,s): self.number = number n =", "main(): gameboard = Board(board_size) pygame.init() pygame.mixer.quit() #weird workaroud #name the", "== pygame.QUIT: print(\"Quitting...\") running = False else: print(\"err0r, bAd 3v3nt", "= 5 window_name = \"PyLoopover \"+str(board_size)+\"x\"+str(board_size) scramble_turns = 50 t_round", "class Tile: def __init__(self,number,s): self.number = number n = number-1", "rotate_up(self,x): new = [] for i in range(0,self.size): new.append(self.content[x][(i+1)%self.size]) for", "range(0,self.size): new.append(self.content[(i+1)%self.size][y]) for i in range(0,self.size): self.content[i][y] = new[i] self.moves+=1", "rotate_down(self,x): new = [] for i in range(0,self.size): new.append(self.content[x][(i-1)%self.size]) for", "if k == \"q\": last_was_Q = True else: if last_was_Q:", "if self.content[i][j].number != i+j*self.size+1: return False return True def start_time(self):", "time.monotonic() return self.end_time def get_time(self): if (not self.is_solved()) and self.game:", "Board(board_size) pygame.init() pygame.mixer.quit() #weird workaroud #name the window & size", "#for quitters elif event.type == pygame.QUIT: print(\"Quitting...\") running = False", "\"+str(board_size)+\"x\"+str(board_size) scramble_turns = 50 t_round = 3 FPS = 30", "l00p running = True while running: #eevveeentttss??? event = pygame.event.wait()", "gameboard = Board(board_size) pygame.init() pygame.mixer.quit() #weird workaroud #name the window", "3 FPS = 30 ##DONT CHANGE THESE BOIS WHITE =", "#setup framerate pygame.time.set_timer(pygame.USEREVENT+1,int((1/FPS)*1000)) #setup event que pygame.event.set_allowed(None) #start with no", "#4 quitters #setup fonts font = pygame.font.SysFont('mono',int((width/board_size)/1.14)) font2 = pygame.font.SysFont('mono',int(stats_height/2.3))", "def __init__(self,size): self.size = size for i in range(0,size): self.content.append([])", "def draw(self,screen,font): for i in range(0,self.size): for j in range(0,self.size):", "is_solved(self): for i in range(0,self.size): for j in range(0,self.size): if", "= font2.render(\"Time :\"+time_str,True,time[1]) text_moves = font2.render(\"Moves:\"+str(gameboard.moves),True,time[1]) screen.blit(text_timer,(0,height)) screen.blit(text_moves,(0,height+(stats_height/2))) #draw board", "def rotate_down(self,x): new = [] for i in range(0,self.size): new.append(self.content[x][(i-1)%self.size])", "time ##VARIABLES TO CHANGE width = 500 height = 500", "gameboard.get_time() time_str = str( int( time[0] * (10 ** t_round)", "gameboard.start_t > gameboard.end_t: gameboard.end_time() elif event.type == pygame.KEYDOWN: k =", "= time.monotonic() self.game = True return self.start_time def end_time(self): print(\"time", "= gameboard.get_time() time_str = str( int( time[0] * (10 **", "window & size it. pygame.display.set_caption(window_name) screen = pygame.display.set_mode((width,height+stats_height),0,32) #setup framerate", "elif o == 2: self.rotate_up(random.randint(0,board_size-1)) else: self.rotate_down(random.randint(0,board_size-1)) self.game=False self.moves=0 return", "= number n = number-1 self.color = ((n/s)*(255/s),(n%s)*(255/s),128) def draw(self,screen,font,x,y,width,height):", "self.is_solved()) and self.game: return (time.monotonic() - self.start_t , BLACK) elif", "in range(0,self.size): self.content[x][i] = new[i] self.moves+=1 return new def rotate_up(self,x):", "new def rotate_up(self,x): new = [] for i in range(0,self.size):", "o == 0: self.rotate_left(random.randint(0,board_size-1)) elif o == 1: self.rotate_right(random.randint(0,board_size-1)) elif", "pygame.KEYDOWN: k = chr(event.key) #gimme a CHAR, not some weird", "= (width / self.size) h = (height / self.size) x", "return self.end_time def get_time(self): if (not self.is_solved()) and self.game: return", "def end_time(self): print(\"time has ended\") self.end_t = time.monotonic() return self.end_time", "if o == 0: self.rotate_left(random.randint(0,board_size-1)) elif o == 1: self.rotate_right(random.randint(0,board_size-1))", "x = i * w y = j * h", "the game if gameboard.is_solved() and gameboard.start_t > gameboard.end_t: gameboard.end_time() #for", "elif self.is_solved() and self.game: return (self.end_t - self.start_t , GREEN)", "event.type == pygame.KEYDOWN: k = chr(event.key) #gimme a CHAR, not", "for i in range(0,self.size): new.append(self.content[(i-1)%self.size][y]) for i in range(0,self.size): self.content[i][y]", "pygame.font.SysFont('mono',int((width/board_size)/1.14)) font2 = pygame.font.SysFont('mono',int(stats_height/2.3)) #main l00p running = True while", "font = pygame.font.SysFont('mono',int((width/board_size)/1.14)) font2 = pygame.font.SysFont('mono',int(stats_height/2.3)) #main l00p running =", "Board: content = [] start_t=0 end_t=0 game=False moves = 0", "elif o == 1: self.rotate_right(random.randint(0,board_size-1)) elif o == 2: self.rotate_up(random.randint(0,board_size-1))", "running = True while running: #eevveeentttss??? event = pygame.event.wait() if", "range(0,self.size): self.content[i][y] = new[i] self.moves+=1 return new def rotate_right(self,y): new", "random.randint(0,3) if o == 0: self.rotate_left(random.randint(0,board_size-1)) elif o == 1:", "rotate_left(self,y): new = [] for i in range(0,self.size): new.append(self.content[(i-1)%self.size][y]) for", "#eevveeentttss??? event = pygame.event.wait() if event.type == pygame.USEREVENT+1: #a fresh", "(width / self.size) h = (height / self.size) x =", "integer domap = { \"w\":\"gameboard.rotate_up(int(pygame.mouse.get_pos()[0]/(width/board_size)))\", \"a\":\"gameboard.rotate_right(int(pygame.mouse.get_pos()[1]/(height/board_size)))\", \"s\":\"gameboard.rotate_down(int(pygame.mouse.get_pos()[0]/(width/board_size)))\", \"d\":\"gameboard.rotate_left(int(pygame.mouse.get_pos()[1]/(height/board_size)))\", \"q\":\"gameboard.scramble(scramble_turns)\" }", "i in range(0,self.size): self.content[i][y] = new[i] self.moves+=1 return new def", "#main l00p running = True while running: #eevveeentttss??? event =", "def rotate_up(self,x): new = [] for i in range(0,self.size): new.append(self.content[x][(i+1)%self.size])", "= i * w y = j * h self.content[i][j].draw(screen,font,x,y,w,h)", "150 board_size = 5 window_name = \"PyLoopover \"+str(board_size)+\"x\"+str(board_size) scramble_turns =", "in range(0,size): self.content.append([]) for j in range(0,size): self.content[i].append(None) self.content[i][j] =", "False return True def start_time(self): print(\"time has started\") self.start_t =", "!= i+j*self.size+1: return False return True def start_time(self): print(\"time has", ", BLACK) elif self.is_solved() and self.game: return (self.end_t - self.start_t", "screen.blit(text,(x,y)) class Board: content = [] start_t=0 end_t=0 game=False moves", "(not self.is_solved()) and self.game: return (time.monotonic() - self.start_t , BLACK)", "= font.render(str(self.number),True,BLACK) screen.blit(text,(x,y)) class Board: content = [] start_t=0 end_t=0", "pygame.font.SysFont('mono',int(stats_height/2.3)) #main l00p running = True while running: #eevveeentttss??? event", "new def draw(self,screen,font): for i in range(0,self.size): for j in", "quitters elif event.type == pygame.QUIT: print(\"Quitting...\") running = False else:", "has ended\") self.end_t = time.monotonic() return self.end_time def get_time(self): if", "font2.render(\"Time :\"+time_str,True,time[1]) text_moves = font2.render(\"Moves:\"+str(gameboard.moves),True,time[1]) screen.blit(text_timer,(0,height)) screen.blit(text_moves,(0,height+(stats_height/2))) #draw board gameboard.draw(screen,font)", "return new def draw(self,screen,font): for i in range(0,self.size): for j", "screeeeeen pygame.display.update() #end the game if gameboard.is_solved() and gameboard.start_t >", "range(0,self.size): for j in range(0,self.size): w = (width / self.size)", "= time.monotonic() return self.end_time def get_time(self): if (not self.is_solved()) and", "True return self.start_time def end_time(self): print(\"time has ended\") self.end_t =", "= False else: print(\"err0r, bAd 3v3nt lol\") assert False if", "rotate_right(self,y): new = [] for i in range(0,self.size): new.append(self.content[(i+1)%self.size][y]) for", "return self.start_time def end_time(self): print(\"time has ended\") self.end_t = time.monotonic()", "= (height / self.size) x = i * w y", "1: self.rotate_right(random.randint(0,board_size-1)) elif o == 2: self.rotate_up(random.randint(0,board_size-1)) else: self.rotate_down(random.randint(0,board_size-1)) self.game=False", "game logic if k == \"q\": last_was_Q = True else:", "screen.blit(text_timer,(0,height)) screen.blit(text_moves,(0,height+(stats_height/2))) #draw board gameboard.draw(screen,font) #update da screeeeeen pygame.display.update() #end", "WHITE = (255,255,255) BLACK = (0,0,0) GREEN = (32,200,32) keys", "domap = { \"w\":\"gameboard.rotate_up(int(pygame.mouse.get_pos()[0]/(width/board_size)))\", \"a\":\"gameboard.rotate_right(int(pygame.mouse.get_pos()[1]/(height/board_size)))\", \"s\":\"gameboard.rotate_down(int(pygame.mouse.get_pos()[0]/(width/board_size)))\", \"d\":\"gameboard.rotate_left(int(pygame.mouse.get_pos()[1]/(height/board_size)))\", \"q\":\"gameboard.scramble(scramble_turns)\" } #i", "weird integer domap = { \"w\":\"gameboard.rotate_up(int(pygame.mouse.get_pos()[0]/(width/board_size)))\", \"a\":\"gameboard.rotate_right(int(pygame.mouse.get_pos()[1]/(height/board_size)))\", \"s\":\"gameboard.rotate_down(int(pygame.mouse.get_pos()[0]/(width/board_size)))\", \"d\":\"gameboard.rotate_left(int(pygame.mouse.get_pos()[1]/(height/board_size)))\", \"q\":\"gameboard.scramble(scramble_turns)\"", "= pygame.event.wait() if event.type == pygame.USEREVENT+1: #a fresh canvas screen.fill(WHITE)", "text = font.render(str(self.number),True,BLACK) screen.blit(text,(x,y)) class Board: content = [] start_t=0", "and gameboard.start_t > gameboard.end_t: gameboard.end_time() #for quitters elif event.type ==", "def __init__(self,number,s): self.number = number n = number-1 self.color =", "elif event.type == pygame.QUIT: print(\"Quitting...\") running = False else: print(\"err0r,", "draw(self,screen,font): for i in range(0,self.size): for j in range(0,self.size): w", "allowed pygame.event.set_allowed(pygame.USEREVENT+1) #timer event pygame.event.set_allowed(pygame.KEYDOWN) pygame.event.set_allowed(pygame.QUIT) #4 quitters #setup fonts", "o = random.randint(0,3) if o == 0: self.rotate_left(random.randint(0,board_size-1)) elif o", "pygame.event.set_allowed(pygame.KEYDOWN) pygame.event.set_allowed(pygame.QUIT) #4 quitters #setup fonts font = pygame.font.SysFont('mono',int((width/board_size)/1.14)) font2", "(10 ** t_round) ) text_timer = font2.render(\"Time :\"+time_str,True,time[1]) text_moves =", "text_moves = font2.render(\"Moves:\"+str(gameboard.moves),True,time[1]) screen.blit(text_timer,(0,height)) screen.blit(text_moves,(0,height+(stats_height/2))) #draw board gameboard.draw(screen,font) #update da", ", BLACK) def main(): gameboard = Board(board_size) pygame.init() pygame.mixer.quit() #weird", "range(0,self.size): new.append(self.content[x][(i+1)%self.size]) for i in range(0,self.size): self.content[x][i] = new[i] self.moves+=1", "['w','a','s','d','q']: #starting game logic if k == \"q\": last_was_Q =", "time_str = str( int( time[0] * (10 ** t_round) )", "in range(0,self.size): for j in range(0,self.size): if self.content[i][j].number != i+j*self.size+1:", "def main(): gameboard = Board(board_size) pygame.init() pygame.mixer.quit() #weird workaroud #name", "def scramble(self,n): for i in range(0,n): o = random.randint(0,3) if", "** t_round) ) / (10 ** t_round) ) text_timer =", "else: if last_was_Q: gameboard.start_time() last_was_Q = False exec(domap[k]) #end the", "#draw stats time = gameboard.get_time() time_str = str( int( time[0]", "self.content[i][j].number != i+j*self.size+1: return False return True def start_time(self): print(\"time", "in range(0,self.size): self.content[i][y] = new[i] self.moves+=1 return new def rotate_down(self,x):", "return new def rotate_up(self,x): new = [] for i in", "screen.fill(WHITE) #draw stats time = gameboard.get_time() time_str = str( int(", "exec(domap[k]) #end the game if gameboard.is_solved() and gameboard.start_t > gameboard.end_t:", "running: #eevveeentttss??? event = pygame.event.wait() if event.type == pygame.USEREVENT+1: #a", "\"d\":\"gameboard.rotate_left(int(pygame.mouse.get_pos()[1]/(height/board_size)))\", \"q\":\"gameboard.scramble(scramble_turns)\" } #i guess? if k in ['w','a','s','d','q']: #starting", "= True return self.start_time def end_time(self): print(\"time has ended\") self.end_t", "== pygame.KEYDOWN: k = chr(event.key) #gimme a CHAR, not some", "self.color = ((n/s)*(255/s),(n%s)*(255/s),128) def draw(self,screen,font,x,y,width,height): pygame.draw.rect(screen,self.color,(x,y,width,height)) text = font.render(str(self.number),True,BLACK) screen.blit(text,(x,y))", "((n/s)*(255/s),(n%s)*(255/s),128) def draw(self,screen,font,x,y,width,height): pygame.draw.rect(screen,self.color,(x,y,width,height)) text = font.render(str(self.number),True,BLACK) screen.blit(text,(x,y)) class Board:", "self.moves+=1 return new def rotate_down(self,x): new = [] for i", "start_t=0 end_t=0 game=False moves = 0 def __init__(self,size): self.size =", "= new[i] self.moves+=1 return new def rotate_right(self,y): new = []", "for j in range(0,self.size): w = (width / self.size) h", "range(0,self.size): w = (width / self.size) h = (height /", "in range(0,self.size): new.append(self.content[(i+1)%self.size][y]) for i in range(0,self.size): self.content[i][y] = new[i]", "range(0,size): self.content.append([]) for j in range(0,size): self.content[i].append(None) self.content[i][j] = Tile(i+j*size+1,size)", "def rotate_left(self,y): new = [] for i in range(0,self.size): new.append(self.content[(i-1)%self.size][y])", "= size for i in range(0,size): self.content.append([]) for j in", "gameboard.end_time() elif event.type == pygame.KEYDOWN: k = chr(event.key) #gimme a", "h = (height / self.size) x = i * w", "print(\"err0r, bAd 3v3nt lol\") assert False if __name__ == \"__main__\":", "#name the window & size it. pygame.display.set_caption(window_name) screen = pygame.display.set_mode((width,height+stats_height),0,32)", "pygame.draw.rect(screen,self.color,(x,y,width,height)) text = font.render(str(self.number),True,BLACK) screen.blit(text,(x,y)) class Board: content = []", "gameboard.start_t > gameboard.end_t: gameboard.end_time() #for quitters elif event.type == pygame.QUIT:", "pygame.event.set_allowed(pygame.USEREVENT+1) #timer event pygame.event.set_allowed(pygame.KEYDOWN) pygame.event.set_allowed(pygame.QUIT) #4 quitters #setup fonts font", "pygame import random import time ##VARIABLES TO CHANGE width =", "pygame.event.set_allowed(None) #start with no events allowed pygame.event.set_allowed(pygame.USEREVENT+1) #timer event pygame.event.set_allowed(pygame.KEYDOWN)", "self.end_time def get_time(self): if (not self.is_solved()) and self.game: return (time.monotonic()", "self.game=False self.moves=0 return True def is_solved(self): for i in range(0,self.size):", "for i in range(0,self.size): new.append(self.content[x][(i+1)%self.size]) for i in range(0,self.size): self.content[x][i]", "import random import time ##VARIABLES TO CHANGE width = 500", "self.size = size for i in range(0,size): self.content.append([]) for j", "#setup event que pygame.event.set_allowed(None) #start with no events allowed pygame.event.set_allowed(pygame.USEREVENT+1)", "j * h self.content[i][j].draw(screen,font,x,y,w,h) def scramble(self,n): for i in range(0,n):", "window_name = \"PyLoopover \"+str(board_size)+\"x\"+str(board_size) scramble_turns = 50 t_round = 3", "= chr(event.key) #gimme a CHAR, not some weird integer domap", "return new def rotate_right(self,y): new = [] for i in", "= pygame.display.set_mode((width,height+stats_height),0,32) #setup framerate pygame.time.set_timer(pygame.USEREVENT+1,int((1/FPS)*1000)) #setup event que pygame.event.set_allowed(None) #start", "for j in range(0,size): self.content[i].append(None) self.content[i][j] = Tile(i+j*size+1,size) def rotate_left(self,y):", "= [] for i in range(0,self.size): new.append(self.content[(i-1)%self.size][y]) for i in", "it. pygame.display.set_caption(window_name) screen = pygame.display.set_mode((width,height+stats_height),0,32) #setup framerate pygame.time.set_timer(pygame.USEREVENT+1,int((1/FPS)*1000)) #setup event", "end_time(self): print(\"time has ended\") self.end_t = time.monotonic() return self.end_time def", "CHAR, not some weird integer domap = { \"w\":\"gameboard.rotate_up(int(pygame.mouse.get_pos()[0]/(width/board_size)))\", \"a\":\"gameboard.rotate_right(int(pygame.mouse.get_pos()[1]/(height/board_size)))\",", "for i in range(0,n): o = random.randint(0,3) if o ==", "self.rotate_right(random.randint(0,board_size-1)) elif o == 2: self.rotate_up(random.randint(0,board_size-1)) else: self.rotate_down(random.randint(0,board_size-1)) self.game=False self.moves=0", "event.type == pygame.USEREVENT+1: #a fresh canvas screen.fill(WHITE) #draw stats time", "__init__(self,number,s): self.number = number n = number-1 self.color = ((n/s)*(255/s),(n%s)*(255/s),128)", "import time ##VARIABLES TO CHANGE width = 500 height =", "(height / self.size) x = i * w y =", "self.content[x][i] = new[i] self.moves+=1 return new def rotate_up(self,x): new =", "has started\") self.start_t = time.monotonic() self.game = True return self.start_time", "framerate pygame.time.set_timer(pygame.USEREVENT+1,int((1/FPS)*1000)) #setup event que pygame.event.set_allowed(None) #start with no events", "[] start_t=0 end_t=0 game=False moves = 0 def __init__(self,size): self.size", "self.start_t , BLACK) elif self.is_solved() and self.game: return (self.end_t -", "return (self.end_t - self.start_t , GREEN) else: return (0 ,", "in range(0,self.size): new.append(self.content[(i-1)%self.size][y]) for i in range(0,self.size): self.content[i][y] = new[i]", "in ['w','a','s','d','q']: #starting game logic if k == \"q\": last_was_Q", "(255,255,255) BLACK = (0,0,0) GREEN = (32,200,32) keys = {\"w\":0,\"a\":0,\"s\":0,\"d\":0,\"q\":0}", "return True def start_time(self): print(\"time has started\") self.start_t = time.monotonic()", "[] for i in range(0,self.size): new.append(self.content[x][(i+1)%self.size]) for i in range(0,self.size):", "t_round) ) text_timer = font2.render(\"Time :\"+time_str,True,time[1]) text_moves = font2.render(\"Moves:\"+str(gameboard.moves),True,time[1]) screen.blit(text_timer,(0,height))", "0 def __init__(self,size): self.size = size for i in range(0,size):", "self.start_time def end_time(self): print(\"time has ended\") self.end_t = time.monotonic() return", "i+j*self.size+1: return False return True def start_time(self): print(\"time has started\")", "#a fresh canvas screen.fill(WHITE) #draw stats time = gameboard.get_time() time_str", "gameboard.is_solved() and gameboard.start_t > gameboard.end_t: gameboard.end_time() elif event.type == pygame.KEYDOWN:", "= new[i] self.moves+=1 return new def rotate_down(self,x): new = []", "fresh canvas screen.fill(WHITE) #draw stats time = gameboard.get_time() time_str =", "> gameboard.end_t: gameboard.end_time() #for quitters elif event.type == pygame.QUIT: print(\"Quitting...\")", "pygame.time.set_timer(pygame.USEREVENT+1,int((1/FPS)*1000)) #setup event que pygame.event.set_allowed(None) #start with no events allowed", "True else: if last_was_Q: gameboard.start_time() last_was_Q = False exec(domap[k]) #end", "event que pygame.event.set_allowed(None) #start with no events allowed pygame.event.set_allowed(pygame.USEREVENT+1) #timer", "logic if k == \"q\": last_was_Q = True else: if", "y = j * h self.content[i][j].draw(screen,font,x,y,w,h) def scramble(self,n): for i", "w y = j * h self.content[i][j].draw(screen,font,x,y,w,h) def scramble(self,n): for", "i in range(0,self.size): for j in range(0,self.size): w = (width", "self.end_t = time.monotonic() return self.end_time def get_time(self): if (not self.is_solved())", "pygame.display.set_caption(window_name) screen = pygame.display.set_mode((width,height+stats_height),0,32) #setup framerate pygame.time.set_timer(pygame.USEREVENT+1,int((1/FPS)*1000)) #setup event que", "self.rotate_up(random.randint(0,board_size-1)) else: self.rotate_down(random.randint(0,board_size-1)) self.game=False self.moves=0 return True def is_solved(self): for", "gameboard.end_t: gameboard.end_time() elif event.type == pygame.KEYDOWN: k = chr(event.key) #gimme", "== pygame.USEREVENT+1: #a fresh canvas screen.fill(WHITE) #draw stats time =", "self.start_t , GREEN) else: return (0 , BLACK) def main():", "self.content.append([]) for j in range(0,size): self.content[i].append(None) self.content[i][j] = Tile(i+j*size+1,size) def", "in range(0,self.size): w = (width / self.size) h = (height", "pygame.mixer.quit() #weird workaroud #name the window & size it. pygame.display.set_caption(window_name)", "event pygame.event.set_allowed(pygame.KEYDOWN) pygame.event.set_allowed(pygame.QUIT) #4 quitters #setup fonts font = pygame.font.SysFont('mono',int((width/board_size)/1.14))", "- self.start_t , GREEN) else: return (0 , BLACK) def", "for i in range(0,self.size): self.content[i][y] = new[i] self.moves+=1 return new", "500 height = 500 stats_height = 150 board_size = 5", "(0,0,0) GREEN = (32,200,32) keys = {\"w\":0,\"a\":0,\"s\":0,\"d\":0,\"q\":0} last_was_Q = False", "self.content[i][j] = Tile(i+j*size+1,size) def rotate_left(self,y): new = [] for i", "= pygame.font.SysFont('mono',int((width/board_size)/1.14)) font2 = pygame.font.SysFont('mono',int(stats_height/2.3)) #main l00p running = True", "self.moves+=1 return new def rotate_up(self,x): new = [] for i", "range(0,size): self.content[i].append(None) self.content[i][j] = Tile(i+j*size+1,size) def rotate_left(self,y): new = []", "for i in range(0,self.size): new.append(self.content[(i+1)%self.size][y]) for i in range(0,self.size): self.content[i][y]", "gameboard.is_solved() and gameboard.start_t > gameboard.end_t: gameboard.end_time() #for quitters elif event.type", "print(\"time has ended\") self.end_t = time.monotonic() return self.end_time def get_time(self):", "self.content[i][y] = new[i] self.moves+=1 return new def rotate_right(self,y): new =", "height = 500 stats_height = 150 board_size = 5 window_name", "= {\"w\":0,\"a\":0,\"s\":0,\"d\":0,\"q\":0} last_was_Q = False class Tile: def __init__(self,number,s): self.number", "for i in range(0,self.size): self.content[x][i] = new[i] self.moves+=1 return new", "time[0] * (10 ** t_round) ) / (10 ** t_round)", "t_round = 3 FPS = 30 ##DONT CHANGE THESE BOIS", "30 ##DONT CHANGE THESE BOIS WHITE = (255,255,255) BLACK =", "range(0,self.size): new.append(self.content[x][(i-1)%self.size]) for i in range(0,self.size): self.content[x][i] = new[i] self.moves+=1", "self.rotate_left(random.randint(0,board_size-1)) elif o == 1: self.rotate_right(random.randint(0,board_size-1)) elif o == 2:", "game if gameboard.is_solved() and gameboard.start_t > gameboard.end_t: gameboard.end_time() elif event.type", "i in range(0,self.size): new.append(self.content[x][(i-1)%self.size]) for i in range(0,self.size): self.content[x][i] =", "for i in range(0,self.size): new.append(self.content[x][(i-1)%self.size]) for i in range(0,self.size): self.content[x][i]", "ended\") self.end_t = time.monotonic() return self.end_time def get_time(self): if (not", "time.monotonic() self.game = True return self.start_time def end_time(self): print(\"time has", "i in range(0,self.size): new.append(self.content[x][(i+1)%self.size]) for i in range(0,self.size): self.content[x][i] =", "font.render(str(self.number),True,BLACK) screen.blit(text,(x,y)) class Board: content = [] start_t=0 end_t=0 game=False", "started\") self.start_t = time.monotonic() self.game = True return self.start_time def", "self.content[i].append(None) self.content[i][j] = Tile(i+j*size+1,size) def rotate_left(self,y): new = [] for", "last_was_Q = False class Tile: def __init__(self,number,s): self.number = number", ", GREEN) else: return (0 , BLACK) def main(): gameboard", "t_round) ) / (10 ** t_round) ) text_timer = font2.render(\"Time", "(time.monotonic() - self.start_t , BLACK) elif self.is_solved() and self.game: return", "self.game: return (self.end_t - self.start_t , GREEN) else: return (0", "i in range(0,n): o = random.randint(0,3) if o == 0:", "number-1 self.color = ((n/s)*(255/s),(n%s)*(255/s),128) def draw(self,screen,font,x,y,width,height): pygame.draw.rect(screen,self.color,(x,y,width,height)) text = font.render(str(self.number),True,BLACK)", "elif event.type == pygame.KEYDOWN: k = chr(event.key) #gimme a CHAR,", "da screeeeeen pygame.display.update() #end the game if gameboard.is_solved() and gameboard.start_t", "(0 , BLACK) def main(): gameboard = Board(board_size) pygame.init() pygame.mixer.quit()", "== 2: self.rotate_up(random.randint(0,board_size-1)) else: self.rotate_down(random.randint(0,board_size-1)) self.game=False self.moves=0 return True def", "i in range(0,self.size): for j in range(0,self.size): if self.content[i][j].number !=", "range(0,self.size): self.content[i][y] = new[i] self.moves+=1 return new def rotate_down(self,x): new", "= (255,255,255) BLACK = (0,0,0) GREEN = (32,200,32) keys =", "with no events allowed pygame.event.set_allowed(pygame.USEREVENT+1) #timer event pygame.event.set_allowed(pygame.KEYDOWN) pygame.event.set_allowed(pygame.QUIT) #4", "k in ['w','a','s','d','q']: #starting game logic if k == \"q\":", "= new[i] self.moves+=1 return new def rotate_up(self,x): new = []", "{ \"w\":\"gameboard.rotate_up(int(pygame.mouse.get_pos()[0]/(width/board_size)))\", \"a\":\"gameboard.rotate_right(int(pygame.mouse.get_pos()[1]/(height/board_size)))\", \"s\":\"gameboard.rotate_down(int(pygame.mouse.get_pos()[0]/(width/board_size)))\", \"d\":\"gameboard.rotate_left(int(pygame.mouse.get_pos()[1]/(height/board_size)))\", \"q\":\"gameboard.scramble(scramble_turns)\" } #i guess? if", "new.append(self.content[(i-1)%self.size][y]) for i in range(0,self.size): self.content[i][y] = new[i] self.moves+=1 return", "range(0,self.size): if self.content[i][j].number != i+j*self.size+1: return False return True def" ]
[ "if i == 6: continue; print(i); print(i); print(i); print(i); print(i);", "in range(1,11): if i == 6: continue; print(i); print(i); print(i);", "for i in range(1,11): if i == 6: continue; print(i);", "skip개념!!! for i in range(1,11): if i == 6: continue;", "i in range(1,11): if i == 6: continue; print(i); print(i);", "= skip개념!!! for i in range(1,11): if i == 6:", "# for문에서 continue 사용하기, continue = skip개념!!! for i in", "for문에서 continue 사용하기, continue = skip개념!!! for i in range(1,11):", "continue 사용하기, continue = skip개념!!! for i in range(1,11): if", "continue = skip개념!!! for i in range(1,11): if i ==", "range(1,11): if i == 6: continue; print(i); print(i); print(i); print(i);", "사용하기, continue = skip개념!!! for i in range(1,11): if i" ]
[ "ncs6k-mcast.pkg-5.2.5.47I.DT_IMAGE ncs6k-mcast-5.2.5.47I ncs6k-mini-x.iso-6.1.0.07I.DT_IMAGE ncs6k-xr-5.2.5.47I ncs6k-5.2.5.47I.CSCuy47880-0.0.4.i.smu ncs6k-5.2.5.47I.CSCuy47880-0.0.4.i ASR9K-64 Production Packages -", "ncs5k ncs5500 xrv9k\": # 61117I or 611 or 6.1.1.17I or", "self._subversion = result.group(\"SUBVERSION\") return self._subversion def get_values(self, dictionary, key): for", "provided that the following conditions are met: # # Redistributions", "= re.search(smu_re, self.package_name) if result: self._smu = result.group(\"SMU\") return self._smu", "following disclaimer. # Redistributions in binary form must reproduce the", "from_package_list(pkg_list): software_packages = set() for pkg in pkg_list: software_package =", "ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,", "must reproduce the above copyright notice, # this list of", "in data: software_package = SoftwarePackage(line) if software_package.is_valid(): software_packages.add(software_package) return software_packages", "above copyright notice, # this list of conditions and the", "# Extract the package type string before X.X.X pattern =", "Packages External Names Internal Names ncs4k-full-x.iso-6.0.2 ncs4k-mini-x.iso-6.0.2 ncs4k-k9sec.pkg-6.0.2 ncs4k-mpls.pkg-6.0.2 ncs4k-mcast.pkg-6.0.2", "Engineering Packages External Names Internal Names ncs5k-mgbl-x64-3.0.0.0-r61116I.x86_64.rpm-6.0.1.16I.DT_IMAGE ncs5k-mgbl-3.0.0.0-r60116I ncs5k-sysadmin.iso-6.0.1 ncs5k-sysadmin-6.0.1.26I", "# # Redistribution and use in source and binary forms,", "# # Redistributions of source code must retain the above", "Extract the package type string before X.X.X.X # For NCS6K", "Names Internal Names asr9k-mcast-x64-2.0.0.0-r61116I.x86_64.rpm-6.1.1.16I.DT_IMAGE asr9k-mcast-x64-2.0.0.0-r61116I asr9k-bgp-x64-1.0.0.0-r61116I.x86_64.rpm-6.1.1.16I.DT_IMAGE asr9k-bgp-x64-1.0.0.0-r61116I asr9k-mgbl-x64-3.0.0.0-r61116I.x86_64.rpm-6.1.1.16I.DT_IMAGE asr9k-mgbl-x64-3.0.0.0-r61116I asr9k-full-x64.iso-6.1.1.16I.DT_IMAGE", "ncs5k-xr-6.0.1.16I NCS5500 Production Packages External Names Internal Names ncs5500-eigrp-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-eigrp-2.0.0.0-r601", "self._package_type = self.package_name[0:match.start()].replace(self.platform + '-', '') if self._package_type: # Takes", "Packages External Names Internal Names ncs5500-eigrp-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-eigrp-2.0.0.0-r601 ncs5500-isis-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-isis-2.0.0.0-r601 ncs5500-k9sec-2.0.0.0-r601.x86_64.rpm-6.0.1", "self.package_name) if match: # Extract the package type self._package_type =", "notice, # this list of conditions and the following disclaimer.", "ncs5500-mpls-2.0.0.0-r601 ncs5500-ospf-1.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-ospf-1.0.0.0-r601 ncs5500-parser-1.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-parser-1.0.0.0-r601 \"\"\" import re platforms =", "self.smu == other.smu and \\ (self.subversion == other.subversion if self.subversion", "None self._subversion = None @property def platform(self): if not self._platform:", "ASR9K-X64, NCS1K, NCS5K, NCS5500: # Extract the package type string", "platform + \"-\" in self.package_name: self._platform = platform break return", "THIS SOFTWARE, EVEN IF ADVISED OF # THE POSSIBILITY OF", "pattern = '-\\d+\\.\\d+\\.\\d+' if self.platform == 'ncs6k' or \\ self.platform", "= self.get_values(subversion_dict, self.platform) if self.platform and dict_values: # For NCS6K,", "and dict_values: # For NCS6K, only need to consider subversion", "Packages - not finalized yet External Names Internal Names asr9k-mcast-x64-2.0.0.0-r611.x86_64.rpm", "dict_values = self.get_values(subversion_dict, self.platform) if self.platform and dict_values: # For", "asr9k-mcast-x64-2.0.0.0-r61116I.x86_64.rpm-6.1.1.16I.DT_IMAGE asr9k-mcast-x64-2.0.0.0-r61116I asr9k-bgp-x64-1.0.0.0-r61116I.x86_64.rpm-6.1.1.16I.DT_IMAGE asr9k-bgp-x64-1.0.0.0-r61116I asr9k-mgbl-x64-3.0.0.0-r61116I.x86_64.rpm-6.1.1.16I.DT_IMAGE asr9k-mgbl-x64-3.0.0.0-r61116I asr9k-full-x64.iso-6.1.1.16I.DT_IMAGE asr9k-full-x64-6.1.1.16I asr9k-mini-x64.iso-6.1.1.16I.DT_IMAGE asr9k-mini-x64-6.1.1.16I", "# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF", "'-', '') if self._package_type: # Takes care the external to", "self.package_name) # Special handling for mini, full, and sysadmin ISO", "a SMU. if self.platform in [\"asr9k\", \"ncs1k\", \"ncs5k\", \"ncs5500\", \"xrv9k\"]", "ncs6k-mpls.pkg-5.2.4 ncs6k-mpls-5.2.4 ncs6k-sysadmin.iso-5.2.4 ncs6k-sysadmin-5.2.4 ncs6k-full-x.iso-5.2.4 ncs6k-full-x-5.2.4 ncs6k-5.2.5.CSCuy47880.smu ncs6k-5.2.5.CSCuy47880-1.0.0 <- subversion", "self.package_name: self._platform = platform break return self._platform @property def package_type(self):", "care the external to internal name matching # Example, ncs6k-mgbl.pkg-5.2.5", "ncs5500-mgbl-3.0.0.0-r601 ncs5500-mini-x.iso-6.0.1 ncs5500-xr-6.0.1 ncs5500-mpls-te-rsvp-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-mpls-te-rsvp-2.0.0.0-r601 ncs5500-mpls-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-mpls-2.0.0.0-r601 ncs5500-ospf-1.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-ospf-1.0.0.0-r601 ncs5500-parser-1.0.0.0-r601.x86_64.rpm-6.0.1", "ncs6k\": # 5.2.4 or 5.2.4.47I re.compile(\"(?P<VERSION>\\d+\\.\\d+\\.\\d+(\\.\\d+\\w+)?)\"), } smu_re = re.compile(\"(?P<SMU>CSC[a-z]{2}\\d{5})\")", "self._package_type = None self._version = None self._smu = None self._subversion", "to_match) if result: self._subversion = result.group(\"SUBVERSION\") return self._subversion def get_values(self,", "self.package_name = package_name self._platform = None self._package_type = None self._version", "THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR", "result def __hash__(self): return hash(\"{}{}{}{}{}\".format( self.platform, self.package_type, self.version, self.smu, self.subversion))", "NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF", "to_match = self.package_name.replace(self.platform, '') result = re.search(dict_values, to_match) if result:", "Packages External Names Internal Names ncs6k-mcast.pkg-5.2.5.47I.DT_IMAGE ncs6k-mcast-5.2.5.47I ncs6k-mini-x.iso-6.1.0.07I.DT_IMAGE ncs6k-xr-5.2.5.47I ncs6k-5.2.5.47I.CSCuy47880-0.0.4.i.smu", "in self.package_name: match = re.search(pattern, self.package_name) # Special handling for", "-> mini-x self._package_type = self._package_type.replace('.pkg', '').replace('.iso', '') return self._package_type @property", "and \\ (self.package_type == other.package_type) and \\ self.version == other.version", "EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE", "THE POSSIBILITY OF SUCH DAMAGE. # ============================================================================= \"\"\" NCS4K Production", "Cisco Systems # All rights reserved. # # Redistribution and", "software_package = SoftwarePackage(pkg) if software_package.is_valid(): \"\"\" for debugging print('package_name', software_package.package_name,", "self.platform, self.package_type, self.version, self.smu, self.subversion)) @staticmethod def from_show_cmd(cmd): software_packages =", "def __hash__(self): return hash(\"{}{}{}{}{}\".format( self.platform, self.package_type, self.version, self.smu, self.subversion)) @staticmethod", "that the following conditions are met: # # Redistributions of", "source and binary forms, with or without # modification, are", "POSSIBILITY OF SUCH DAMAGE. # ============================================================================= \"\"\" NCS4K Production Packages", "this list of conditions and the following disclaimer. # Redistributions", "conditions are met: # # Redistributions of source code must", "consider subversion if it is a SMU. if self.platform in", "set() data = cmd.split() for line in data: software_package =", "before X.X.X pattern = '-\\d+\\.\\d+\\.\\d+' if self.platform == 'ncs6k' or", "\"ncs1k\", \"ncs5k\", \"ncs5500\", \"xrv9k\"] or self.smu: to_match = self.package_name.replace(self.platform, '')", "For ASR9K-X64, NCS1K, NCS5K, NCS5500: # Extract the package type", "INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER", "def __init__(self, package_name): self.package_name = package_name self._platform = None self._package_type", "re.compile(\"(?P<SMU>CSC[a-z]{2}\\d{5})\") subversion_dict = {\"asr9k ncs1k ncs5k ncs5500 xrv9k\": re.compile(\"-(?P<SUBVERSION>\\d+\\.\\d+\\.\\d+\\.\\d+)-\"), #", "5.2.4 or 5.2.4.47I re.compile(\"(?P<VERSION>\\d+\\.\\d+\\.\\d+(\\.\\d+\\w+)?)\"), } smu_re = re.compile(\"(?P<SMU>CSC[a-z]{2}\\d{5})\") subversion_dict =", "COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT,", "return self._subversion def get_values(self, dictionary, key): for keys in dictionary.keys():", "CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,", "ncs5500-mini-x.iso-6.0.1 -> mini-x self._package_type = self._package_type.replace('.pkg', '').replace('.iso', '') return self._package_type", "ncs6k-mcast-5.2.4 ncs6k-mgbl.pkg-5.2.4 ncs6k-mgbl-5.2.4 ncs6k-mini-x.iso-5.2.4 ncs6k-mini-x-5.2.4 ncs6k-mpls.pkg-5.2.4 ncs6k-mpls-5.2.4 ncs6k-sysadmin.iso-5.2.4 ncs6k-sysadmin-5.2.4 ncs6k-full-x.iso-5.2.4", "= self.get_values(version_dict, self.platform) if self.platform and dict_values: to_match = self.package_name.replace(self.platform,", "} smu_re = re.compile(\"(?P<SMU>CSC[a-z]{2}\\d{5})\") subversion_dict = {\"asr9k ncs1k ncs5k ncs5500", "are met: # # Redistributions of source code must retain", "software_package.platform, 'package_type', software_package.package_type, 'version', software_package.version, 'smu', software_package.smu, 'subversion', software_package.subversion) \"\"\"", "platform in platforms: if platform + \"-\" in self.package_name: self._platform", "@property def platform(self): if not self._platform: for platform in platforms:", "== other.subversion if self.subversion and other.subversion else True) return result", "debugging print('package_name', software_package.package_name, 'platform', software_package.platform, 'package_type', software_package.package_type, 'version', software_package.version, 'smu',", "NCS5K, NCS5500 # Example: ncs5500-mini-x.iso-6.0.1, asr9k-full-x64.iso-6.1.1 # Package type string", "DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE", "software_package = SoftwarePackage(line) if software_package.is_valid(): software_packages.add(software_package) return software_packages @staticmethod def", "\\ (self.subversion == other.subversion if self.subversion and other.subversion else True)", "self.subversion)) @staticmethod def from_show_cmd(cmd): software_packages = set() data = cmd.split()", "the following conditions are met: # # Redistributions of source", "ncs6k-mgbl.pkg-5.2.4 ncs6k-mgbl-5.2.4 ncs6k-mini-x.iso-5.2.4 ncs6k-mini-x-5.2.4 ncs6k-mpls.pkg-5.2.4 ncs6k-mpls-5.2.4 ncs6k-sysadmin.iso-5.2.4 ncs6k-sysadmin-5.2.4 ncs6k-full-x.iso-5.2.4 ncs6k-full-x-5.2.4", "AND CONTRIBUTORS \"AS IS\" # AND ANY EXPRESS OR IMPLIED", "= set() for pkg in pkg_list: software_package = SoftwarePackage(pkg) if", "self.platform == other.platform and \\ (self.package_type == other.package_type) and \\", "asr9k-mini-x64.iso-6.1.1.16I.DT_IMAGE asr9k-mini-x64-6.1.1.16I NCS5K Production Packages External Names Internal Names ncs5k-sysadmin.iso-6.0.1", "External Name: ncs5k-goldenk9-x.iso-6.3.1.11I.0, Internal Name: ncs5k-goldenk9-x-6.3.1.11I if not match and", "and binary forms, with or without # modification, are permitted", "ISO on ASR9K-X64, NCS1K, NCS5K, NCS5500 # Example: ncs5500-mini-x.iso-6.0.1, asr9k-full-x64.iso-6.1.1", "Name: ncs5k-goldenk9-x-6.3.1.11I if not match and sum([x in self.package_name for", "None self._version = None self._smu = None self._subversion = None", "re.search(dict_values, to_match) if result: self._subversion = result.group(\"SUBVERSION\") return self._subversion def", "External Names Internal Names ncs6k-mcast.pkg-5.2.5.47I.DT_IMAGE ncs6k-mcast-5.2.5.47I ncs6k-mini-x.iso-6.1.0.07I.DT_IMAGE ncs6k-xr-5.2.5.47I ncs6k-5.2.5.47I.CSCuy47880-0.0.4.i.smu ncs6k-5.2.5.47I.CSCuy47880-0.0.4.i", "if platform + \"-\" in self.package_name: self._platform = platform break", "other.smu and \\ (self.subversion == other.subversion if self.subversion and other.subversion", "External Names Internal Names asr9k-mcast-x64-2.0.0.0-r611.x86_64.rpm asr9k-mcast-x64-2.0.0.0-r611 asr9k-bgp-x64-1.0.0.0-r611.x86_64.rpm asr9k-bgp-x64-1.0.0.0-r611 asr9k-mgbl-x64-3.0.0.0-r611.x86_64.rpm asr9k-mgbl-x64-3.0.0.0-r611", "re platforms = ['asr9k', 'ncs1k', 'ncs4k', 'ncs5k', 'ncs5500', 'ncs6k', 'xrv9k']", "def from_package_list(pkg_list): software_packages = set() for pkg in pkg_list: software_package", "Engineering Packages External Names Internal Names asr9k-mcast-x64-2.0.0.0-r61116I.x86_64.rpm-6.1.1.16I.DT_IMAGE asr9k-mcast-x64-2.0.0.0-r61116I asr9k-bgp-x64-1.0.0.0-r61116I.x86_64.rpm-6.1.1.16I.DT_IMAGE asr9k-bgp-x64-1.0.0.0-r61116I", "True) return result def __hash__(self): return hash(\"{}{}{}{}{}\".format( self.platform, self.package_type, self.version,", "Special handling for mini, full, and sysadmin ISO on ASR9K-X64,", "\"\"\" NCS4K Production Packages External Names Internal Names ncs4k-full-x.iso-6.0.2 ncs4k-mini-x.iso-6.0.2", "\"-\" in self.package_name: self._platform = platform break return self._platform @property", "\"\"\" import re platforms = ['asr9k', 'ncs1k', 'ncs4k', 'ncs5k', 'ncs5500',", "subversion if it is a SMU. if self.platform in [\"asr9k\",", "of source code must retain the above copyright notice, #", "platforms: if platform + \"-\" in self.package_name: self._platform = platform", "SoftwarePackage(line) if software_package.is_valid(): software_packages.add(software_package) return software_packages @staticmethod def from_package_list(pkg_list): software_packages", "EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE", "for keys in dictionary.keys(): if key in keys.split(): return dictionary.get(keys)", "self.package_name: match = re.search(pattern, self.package_name) # Special handling for mini,", "package_type(self): if not self._package_type: # For ASR9K-X64, NCS1K, NCS5K, NCS5500:", "def is_valid(self): return self.platform and self.version and (self.package_type or self.smu)", "ncs5500-isis-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-isis-2.0.0.0-r601 ncs5500-k9sec-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-k9sec-2.0.0.0-r601 ncs5500-m2m-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-m2m-2.0.0.0-r601 ncs5500-mgbl-3.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-mgbl-3.0.0.0-r601 ncs5500-mini-x.iso-6.0.1 ncs5500-xr-6.0.1", "{\"asr9k ncs1k ncs5k ncs5500 xrv9k\": # 61117I or 611 or", "# this list of conditions and the following disclaimer. #", "notice, # this list of conditions and the following disclaimer", "re.compile(\"(?P<VERSION>(\\d+\\d+\\d+(\\d+\\w+)?)|(\\d+\\.\\d+\\.\\d+(\\.\\d+\\w+)?)(?!\\.\\d)(?!-))\"), \"ncs4k ncs6k\": # 5.2.4 or 5.2.4.47I re.compile(\"(?P<VERSION>\\d+\\.\\d+\\.\\d+(\\.\\d+\\w+)?)\"), } smu_re", "the external to internal name matching # Example, ncs6k-mgbl.pkg-5.2.5 ->", "def version(self): if not self._version: dict_values = self.get_values(version_dict, self.platform) if", "GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR", "WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF", "return dictionary.get(keys) return None def is_valid(self): return self.platform and self.version", "# For ASR9K-X64, NCS1K, NCS5K, NCS5500: # Extract the package", "the package type self._package_type = self.package_name[0:match.start()].replace(self.platform + '-', '') if", "External Names Internal Names ncs5500-eigrp-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-eigrp-2.0.0.0-r601 ncs5500-isis-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-isis-2.0.0.0-r601 ncs5500-k9sec-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-k9sec-2.0.0.0-r601", "Internal Names ncs6k-doc.pkg-5.2.4 ncs6k-doc-5.2.4 ncs6k-li.pkg-5.2.4 ncs6k-li-5.2.4 ncs6k-mcast.pkg-5.2.4 ncs6k-mcast-5.2.4 ncs6k-mgbl.pkg-5.2.4 ncs6k-mgbl-5.2.4", "asr9k-mcast-x64-2.0.0.0-r611 asr9k-bgp-x64-1.0.0.0-r611.x86_64.rpm asr9k-bgp-x64-1.0.0.0-r611 asr9k-mgbl-x64-3.0.0.0-r611.x86_64.rpm asr9k-mgbl-x64-3.0.0.0-r611 asr9k-full-x64.iso-6.1.1 asr9k-xr-6.1.1 asr9k-mini-x64.iso-6.1.1 asr9k-xr-6.1.1 Engineering", "return self._version @property def smu(self): if not self._smu: result =", "met: # # Redistributions of source code must retain the", "__init__(self, package_name): self.package_name = package_name self._platform = None self._package_type =", "= None self._version = None self._smu = None self._subversion =", "hash(\"{}{}{}{}{}\".format( self.platform, self.package_type, self.version, self.smu, self.subversion)) @staticmethod def from_show_cmd(cmd): software_packages", "Names Internal Names ncs6k-mcast.pkg-5.2.5.47I.DT_IMAGE ncs6k-mcast-5.2.5.47I ncs6k-mini-x.iso-6.1.0.07I.DT_IMAGE ncs6k-xr-5.2.5.47I ncs6k-5.2.5.47I.CSCuy47880-0.0.4.i.smu ncs6k-5.2.5.47I.CSCuy47880-0.0.4.i ASR9K-64", "ncs6k-doc-5.2.4 ncs6k-li.pkg-5.2.4 ncs6k-li-5.2.4 ncs6k-mcast.pkg-5.2.4 ncs6k-mcast-5.2.4 ncs6k-mgbl.pkg-5.2.4 ncs6k-mgbl-5.2.4 ncs6k-mini-x.iso-5.2.4 ncs6k-mini-x-5.2.4 ncs6k-mpls.pkg-5.2.4", "BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,", "'') result = re.search(dict_values, to_match) if result: self._subversion = result.group(\"SUBVERSION\")", "re.compile(\"CSC.*(?P<SUBVERSION>\\d+\\.\\d+\\.\\d+?)\"), # 0.0.4 } class SoftwarePackage(object): def __init__(self, package_name): self.package_name", "= SoftwarePackage(line) if software_package.is_valid(): software_packages.add(software_package) return software_packages @staticmethod def from_package_list(pkg_list):", "self.platform == 'ncs4k' else '-\\d\\.\\d\\.\\d.\\d' if self.platform and self.platform in", "USE OF THIS SOFTWARE, EVEN IF ADVISED OF # THE", "not self._package_type: # For ASR9K-X64, NCS1K, NCS5K, NCS5500: # Extract", "handling for mini, full, and sysadmin ISO on ASR9K-X64, NCS1K,", "if self.platform and self.platform in self.package_name: match = re.search(pattern, self.package_name)", "THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS", "disclaimer in the documentation # and/or other materials provided with", "ncs6k-mini-x-5.2.4 ncs6k-mpls.pkg-5.2.4 ncs6k-mpls-5.2.4 ncs6k-sysadmin.iso-5.2.4 ncs6k-sysadmin-5.2.4 ncs6k-full-x.iso-5.2.4 ncs6k-full-x-5.2.4 ncs6k-5.2.5.CSCuy47880.smu ncs6k-5.2.5.CSCuy47880-1.0.0 <-", "\"AS IS\" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,", "[\"asr9k\", \"ncs1k\", \"ncs5k\", \"ncs5500\", \"xrv9k\"] or self.smu: to_match = self.package_name.replace(self.platform,", "re.search('-\\d+\\.\\d+\\.\\d+', self.package_name) if match: # Extract the package type self._package_type", "version(self): if not self._version: dict_values = self.get_values(version_dict, self.platform) if self.platform", "Packages External Names Internal Names ncs6k-doc.pkg-5.2.4 ncs6k-doc-5.2.4 ncs6k-li.pkg-5.2.4 ncs6k-li-5.2.4 ncs6k-mcast.pkg-5.2.4", "Names Internal Names ncs6k-doc.pkg-5.2.4 ncs6k-doc-5.2.4 ncs6k-li.pkg-5.2.4 ncs6k-li-5.2.4 ncs6k-mcast.pkg-5.2.4 ncs6k-mcast-5.2.4 ncs6k-mgbl.pkg-5.2.4", "self._version: dict_values = self.get_values(version_dict, self.platform) if self.platform and dict_values: to_match", "BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR", "@staticmethod def from_show_cmd(cmd): software_packages = set() data = cmd.split() for", "def platform(self): if not self._platform: for platform in platforms: if", "TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR", "ncs5k-xr-6.0.1 ncs5k-mcast-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-mcast-2.0.0.0-r601 ncs5k-mgbl-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-mgbl-2.0.0.0-r601 ncs5k-mpls-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-mpls-2.0.0.0-r601 ncs5k-k9sec-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-k9sec-2.0.0.0-r601 ncs5k-isis-2.0.0.0-r601.x86_64.rpm-6.0.1", "'').replace('.iso', '') return self._package_type @property def version(self): if not self._version:", "CONTRIBUTORS \"AS IS\" # AND ANY EXPRESS OR IMPLIED WARRANTIES,", "ncs5k-mpls-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-mpls-2.0.0.0-r601 ncs5k-k9sec-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-k9sec-2.0.0.0-r601 ncs5k-isis-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-isis-2.0.0.0-r601 ncs5k-ospf-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-ospf-2.0.0.0-r601 Engineering Packages", "(self.package_type or self.smu) def __eq__(self, other): result = self.platform ==", "the following disclaimer. # Redistributions in binary form must reproduce", "= result.group(\"SMU\") return self._smu @property def subversion(self): if not self._subversion:", "'smu', software_package.smu, 'subversion', software_package.subversion) \"\"\" software_packages.add(software_package) return software_packages def __repr__(self):", "self._package_type.replace('.pkg', '').replace('.iso', '') return self._package_type @property def version(self): if not", "not finalized yet External Names Internal Names asr9k-mcast-x64-2.0.0.0-r611.x86_64.rpm asr9k-mcast-x64-2.0.0.0-r611 asr9k-bgp-x64-1.0.0.0-r611.x86_64.rpm", "== 'ncs4k' else '-\\d\\.\\d\\.\\d.\\d' if self.platform and self.platform in self.package_name:", "key): for keys in dictionary.keys(): if key in keys.split(): return", "set() for pkg in pkg_list: software_package = SoftwarePackage(pkg) if software_package.is_valid():", "FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL", "ncs6k-mini-x.iso-6.1.0.07I.DT_IMAGE ncs6k-xr-5.2.5.47I ncs6k-5.2.5.47I.CSCuy47880-0.0.4.i.smu ncs6k-5.2.5.47I.CSCuy47880-0.0.4.i ASR9K-64 Production Packages - not finalized", "asr9k-xr-6.1.1 Engineering Packages External Names Internal Names asr9k-mcast-x64-2.0.0.0-r61116I.x86_64.rpm-6.1.1.16I.DT_IMAGE asr9k-mcast-x64-2.0.0.0-r61116I asr9k-bgp-x64-1.0.0.0-r61116I.x86_64.rpm-6.1.1.16I.DT_IMAGE", "Internal Names asr9k-mcast-x64-2.0.0.0-r61116I.x86_64.rpm-6.1.1.16I.DT_IMAGE asr9k-mcast-x64-2.0.0.0-r61116I asr9k-bgp-x64-1.0.0.0-r61116I.x86_64.rpm-6.1.1.16I.DT_IMAGE asr9k-bgp-x64-1.0.0.0-r61116I asr9k-mgbl-x64-3.0.0.0-r61116I.x86_64.rpm-6.1.1.16I.DT_IMAGE asr9k-mgbl-x64-3.0.0.0-r61116I asr9k-full-x64.iso-6.1.1.16I.DT_IMAGE asr9k-full-x64-6.1.1.16I", "# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR", "package type string before X.X.X.X # For NCS6K # Extract", "get_values(self, dictionary, key): for keys in dictionary.keys(): if key in", "OF # THE POSSIBILITY OF SUCH DAMAGE. # ============================================================================= \"\"\"", "CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN #", "Production Packages External Names Internal Names ncs6k-doc.pkg-5.2.4 ncs6k-doc-5.2.4 ncs6k-li.pkg-5.2.4 ncs6k-li-5.2.4", "re.compile(\"(?P<VERSION>\\d+\\.\\d+\\.\\d+(\\.\\d+\\w+)?)\"), } smu_re = re.compile(\"(?P<SMU>CSC[a-z]{2}\\d{5})\") subversion_dict = {\"asr9k ncs1k ncs5k", "ncs5500-parser-1.0.0.0-r601 \"\"\" import re platforms = ['asr9k', 'ncs1k', 'ncs4k', 'ncs5k',", "ncs6k-mini-x.iso-5.2.4 ncs6k-mini-x-5.2.4 ncs6k-mpls.pkg-5.2.4 ncs6k-mpls-5.2.4 ncs6k-sysadmin.iso-5.2.4 ncs6k-sysadmin-5.2.4 ncs6k-full-x.iso-5.2.4 ncs6k-full-x-5.2.4 ncs6k-5.2.5.CSCuy47880.smu ncs6k-5.2.5.CSCuy47880-1.0.0", "ncs5k-isis-2.0.0.0-r601 ncs5k-ospf-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-ospf-2.0.0.0-r601 Engineering Packages External Names Internal Names ncs5k-mgbl-x64-3.0.0.0-r61116I.x86_64.rpm-6.0.1.16I.DT_IMAGE", "AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT,", "part version string # External Name: ncs5k-goldenk9-x.iso-6.3.1.11I.0, Internal Name: ncs5k-goldenk9-x-6.3.1.11I", "= re.search(dict_values, to_match) if result: self._subversion = result.group(\"SUBVERSION\") return self._subversion", "for line in data: software_package = SoftwarePackage(line) if software_package.is_valid(): software_packages.add(software_package)", "ncs5500-ospf-1.0.0.0-r601 ncs5500-parser-1.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-parser-1.0.0.0-r601 \"\"\" import re platforms = ['asr9k', 'ncs1k',", "ncs5k-mini-x.iso-6.0.1 ncs5k-xr-6.0.1 ncs5k-mcast-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-mcast-2.0.0.0-r601 ncs5k-mgbl-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-mgbl-2.0.0.0-r601 ncs5k-mpls-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-mpls-2.0.0.0-r601 ncs5k-k9sec-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-k9sec-2.0.0.0-r601", "Redistributions of source code must retain the above copyright notice,", "or \\ self.platform == 'ncs4k' else '-\\d\\.\\d\\.\\d.\\d' if self.platform and", "Names ncs5500-eigrp-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-eigrp-2.0.0.0-r601 ncs5500-isis-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-isis-2.0.0.0-r601 ncs5500-k9sec-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-k9sec-2.0.0.0-r601 ncs5500-m2m-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-m2m-2.0.0.0-r601 ncs5500-mgbl-3.0.0.0-r601.x86_64.rpm-6.0.1", "External Names Internal Names ncs4k-full-x.iso-6.0.2 ncs4k-mini-x.iso-6.0.2 ncs4k-k9sec.pkg-6.0.2 ncs4k-mpls.pkg-6.0.2 ncs4k-mcast.pkg-6.0.2 ncs4k-mgbl.pkg-6.0.2", "ncs5500-mpls-te-rsvp-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-mpls-te-rsvp-2.0.0.0-r601 ncs5500-mpls-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-mpls-2.0.0.0-r601 ncs5500-ospf-1.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-ospf-1.0.0.0-r601 ncs5500-parser-1.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-parser-1.0.0.0-r601 \"\"\" import", "re.search(smu_re, self.package_name) if result: self._smu = result.group(\"SMU\") return self._smu @property", "asr9k-full-x64.iso-6.1.1 # Package type string is before the 3 part", "'package_type', software_package.package_type, 'version', software_package.version, 'smu', software_package.smu, 'subversion', software_package.subversion) \"\"\" software_packages.add(software_package)", "SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS #", "documentation # and/or other materials provided with the distribution. #", "self._package_type @property def version(self): if not self._version: dict_values = self.get_values(version_dict,", "self._version = None self._smu = None self._subversion = None @property", "ncs5k-full-x.iso-6.0.1.16I.DT_IMAGE ncs5k-xr-6.0.1.16I NCS5500 Production Packages External Names Internal Names ncs5500-eigrp-2.0.0.0-r601.x86_64.rpm-6.0.1", "x in ['full', 'mini', 'sysadmin', 'goldenk9']]) > 0: # Use", "IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE", "(c) 2016, Cisco Systems # All rights reserved. # #", "or without # modification, are permitted provided that the following", "HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN", "Redistribution and use in source and binary forms, with or", "source code must retain the above copyright notice, # this", "# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT", "ncs6k-xr-5.2.5.47I ncs6k-5.2.5.47I.CSCuy47880-0.0.4.i.smu ncs6k-5.2.5.47I.CSCuy47880-0.0.4.i ASR9K-64 Production Packages - not finalized yet", "asr9k-full-x64-6.1.1.16I asr9k-mini-x64.iso-6.1.1.16I.DT_IMAGE asr9k-mini-x64-6.1.1.16I NCS5K Production Packages External Names Internal Names", "if result: self._version = result.group(\"VERSION\") return self._version @property def smu(self):", "software_packages = set() for pkg in pkg_list: software_package = SoftwarePackage(pkg)", "print('package_name', software_package.package_name, 'platform', software_package.platform, 'package_type', software_package.package_type, 'version', software_package.version, 'smu', software_package.smu,", "self.package_type, self.version, self.smu, self.subversion)) @staticmethod def from_show_cmd(cmd): software_packages = set()", "self._platform = platform break return self._platform @property def package_type(self): if", "on ASR9K-X64, NCS1K, NCS5K, NCS5500 # Example: ncs5500-mini-x.iso-6.0.1, asr9k-full-x64.iso-6.1.1 #", "sum([x in self.package_name for x in ['full', 'mini', 'sysadmin', 'goldenk9']])", "asr9k-bgp-x64-1.0.0.0-r611 asr9k-mgbl-x64-3.0.0.0-r611.x86_64.rpm asr9k-mgbl-x64-3.0.0.0-r611 asr9k-full-x64.iso-6.1.1 asr9k-xr-6.1.1 asr9k-mini-x64.iso-6.1.1 asr9k-xr-6.1.1 Engineering Packages External", "2.0.0.0 \"ncs4k ncs6k\": re.compile(\"CSC.*(?P<SUBVERSION>\\d+\\.\\d+\\.\\d+?)\"), # 0.0.4 } class SoftwarePackage(object): def", "platform break return self._platform @property def package_type(self): if not self._package_type:", "software_package.package_type, 'version', software_package.version, 'smu', software_package.smu, 'subversion', software_package.subversion) \"\"\" software_packages.add(software_package) return", "611 or 6.1.1.17I or 6.1.1 re.compile(\"(?P<VERSION>(\\d+\\d+\\d+(\\d+\\w+)?)|(\\d+\\.\\d+\\.\\d+(\\.\\d+\\w+)?)(?!\\.\\d)(?!-))\"), \"ncs4k ncs6k\": # 5.2.4", "External Names Internal Names asr9k-mcast-x64-2.0.0.0-r61116I.x86_64.rpm-6.1.1.16I.DT_IMAGE asr9k-mcast-x64-2.0.0.0-r61116I asr9k-bgp-x64-1.0.0.0-r61116I.x86_64.rpm-6.1.1.16I.DT_IMAGE asr9k-bgp-x64-1.0.0.0-r61116I asr9k-mgbl-x64-3.0.0.0-r61116I.x86_64.rpm-6.1.1.16I.DT_IMAGE asr9k-mgbl-x64-3.0.0.0-r61116I", "self.smu: to_match = self.package_name.replace(self.platform, '') result = re.search(dict_values, to_match) if", "self._smu = None self._subversion = None @property def platform(self): if", "None self._package_type = None self._version = None self._smu = None", "Names Internal Names ncs5k-mgbl-x64-3.0.0.0-r61116I.x86_64.rpm-6.0.1.16I.DT_IMAGE ncs5k-mgbl-3.0.0.0-r60116I ncs5k-sysadmin.iso-6.0.1 ncs5k-sysadmin-6.0.1.26I ncs5k-full-x.iso-6.0.1.16I.DT_IMAGE ncs5k-xr-6.0.1.16I NCS5500", "type string is before the 3 part version string #", "# Package type string is before the 3 part version", "3 part version string # External Name: ncs5k-goldenk9-x.iso-6.3.1.11I.0, Internal Name:", "USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED", "ncs5k-mpls-2.0.0.0-r601 ncs5k-k9sec-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-k9sec-2.0.0.0-r601 ncs5k-isis-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-isis-2.0.0.0-r601 ncs5k-ospf-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-ospf-2.0.0.0-r601 Engineering Packages External", "ncs5500-mpls-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-mpls-2.0.0.0-r601 ncs5500-ospf-1.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-ospf-1.0.0.0-r601 ncs5500-parser-1.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-parser-1.0.0.0-r601 \"\"\" import re platforms", "rights reserved. # # Redistribution and use in source and", "# 0.0.4 } class SoftwarePackage(object): def __init__(self, package_name): self.package_name =", "= self.package_name.replace(self.platform, '') result = re.search(dict_values, to_match) if result: self._version", "only need to consider subversion if it is a SMU.", "ncs5500-xr-6.0.1 ncs5500-mpls-te-rsvp-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-mpls-te-rsvp-2.0.0.0-r601 ncs5500-mpls-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-mpls-2.0.0.0-r601 ncs5500-ospf-1.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-ospf-1.0.0.0-r601 ncs5500-parser-1.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-parser-1.0.0.0-r601 \"\"\"", "result.group(\"SMU\") return self._smu @property def subversion(self): if not self._subversion: dict_values", "return software_packages def __repr__(self): return self.package_name def __str__(self): return self.__repr__()", "ncs6k-doc.pkg-5.2.4 ncs6k-doc-5.2.4 ncs6k-li.pkg-5.2.4 ncs6k-li-5.2.4 ncs6k-mcast.pkg-5.2.4 ncs6k-mcast-5.2.4 ncs6k-mgbl.pkg-5.2.4 ncs6k-mgbl-5.2.4 ncs6k-mini-x.iso-5.2.4 ncs6k-mini-x-5.2.4", "conditions and the following disclaimer. # Redistributions in binary form", "ncs5k-mcast-2.0.0.0-r601 ncs5k-mgbl-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-mgbl-2.0.0.0-r601 ncs5k-mpls-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-mpls-2.0.0.0-r601 ncs5k-k9sec-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-k9sec-2.0.0.0-r601 ncs5k-isis-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-isis-2.0.0.0-r601 ncs5k-ospf-2.0.0.0-r601.x86_64.rpm-6.0.1", "IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR", "# Use the three part match for these ISO packages", "Name: ncs5k-goldenk9-x.iso-6.3.1.11I.0, Internal Name: ncs5k-goldenk9-x-6.3.1.11I if not match and sum([x", "Extract the package type string before X.X.X pattern = '-\\d+\\.\\d+\\.\\d+'", "Internal Names asr9k-mcast-x64-2.0.0.0-r611.x86_64.rpm asr9k-mcast-x64-2.0.0.0-r611 asr9k-bgp-x64-1.0.0.0-r611.x86_64.rpm asr9k-bgp-x64-1.0.0.0-r611 asr9k-mgbl-x64-3.0.0.0-r611.x86_64.rpm asr9k-mgbl-x64-3.0.0.0-r611 asr9k-full-x64.iso-6.1.1 asr9k-xr-6.1.1", "binary form must reproduce the above copyright notice, # this", "OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE", "Internal Names ncs6k-mcast.pkg-5.2.5.47I.DT_IMAGE ncs6k-mcast-5.2.5.47I ncs6k-mini-x.iso-6.1.0.07I.DT_IMAGE ncs6k-xr-5.2.5.47I ncs6k-5.2.5.47I.CSCuy47880-0.0.4.i.smu ncs6k-5.2.5.47I.CSCuy47880-0.0.4.i ASR9K-64 Production", "Package type string is before the 3 part version string", "ASR9K-64 Production Packages - not finalized yet External Names Internal", "6.1.1.17I or 6.1.1 re.compile(\"(?P<VERSION>(\\d+\\d+\\d+(\\d+\\w+)?)|(\\d+\\.\\d+\\.\\d+(\\.\\d+\\w+)?)(?!\\.\\d)(?!-))\"), \"ncs4k ncs6k\": # 5.2.4 or 5.2.4.47I", "string before X.X.X pattern = '-\\d+\\.\\d+\\.\\d+' if self.platform == 'ncs6k'", "'ncs5k', 'ncs5500', 'ncs6k', 'xrv9k'] version_dict = {\"asr9k ncs1k ncs5k ncs5500", "other.subversion if self.subversion and other.subversion else True) return result def", "PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" #", "ncs5500-isis-2.0.0.0-r601 ncs5500-k9sec-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-k9sec-2.0.0.0-r601 ncs5500-m2m-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-m2m-2.0.0.0-r601 ncs5500-mgbl-3.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-mgbl-3.0.0.0-r601 ncs5500-mini-x.iso-6.0.1 ncs5500-xr-6.0.1 ncs5500-mpls-te-rsvp-2.0.0.0-r601.x86_64.rpm-6.0.1", "match = re.search('-\\d+\\.\\d+\\.\\d+', self.package_name) if match: # Extract the package", "INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT", "CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) #", "OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS", "dictionary.keys(): if key in keys.split(): return dictionary.get(keys) return None def", "Internal Name: ncs5k-goldenk9-x-6.3.1.11I if not match and sum([x in self.package_name", "PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY", "TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY", "xrv9k\": # 61117I or 611 or 6.1.1.17I or 6.1.1 re.compile(\"(?P<VERSION>(\\d+\\d+\\d+(\\d+\\w+)?)|(\\d+\\.\\d+\\.\\d+(\\.\\d+\\w+)?)(?!\\.\\d)(?!-))\"),", "NCS5K, NCS5500: # Extract the package type string before X.X.X.X", "self.platform) if self.platform and dict_values: # For NCS6K, only need", "ncs5k-sysadmin-6.0.1 ncs5k-full-x.iso-6.0.1 ncs5k-xr-6.0.1 ncs5k-mini-x.iso-6.0.1 ncs5k-xr-6.0.1 ncs5k-mcast-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-mcast-2.0.0.0-r601 ncs5k-mgbl-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-mgbl-2.0.0.0-r601 ncs5k-mpls-2.0.0.0-r601.x86_64.rpm-6.0.1", "result = self.platform == other.platform and \\ (self.package_type == other.package_type)", "# THE POSSIBILITY OF SUCH DAMAGE. # ============================================================================= \"\"\" NCS4K", "the three part match for these ISO packages match =", "in source and binary forms, with or without # modification,", "# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,", "'ncs1k', 'ncs4k', 'ncs5k', 'ncs5500', 'ncs6k', 'xrv9k'] version_dict = {\"asr9k ncs1k", "ncs6k-li-5.2.4 ncs6k-mcast.pkg-5.2.4 ncs6k-mcast-5.2.4 ncs6k-mgbl.pkg-5.2.4 ncs6k-mgbl-5.2.4 ncs6k-mini-x.iso-5.2.4 ncs6k-mini-x-5.2.4 ncs6k-mpls.pkg-5.2.4 ncs6k-mpls-5.2.4 ncs6k-sysadmin.iso-5.2.4", "A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL", "subversion added Engineering Packages External Names Internal Names ncs6k-mcast.pkg-5.2.5.47I.DT_IMAGE ncs6k-mcast-5.2.5.47I", "other.subversion else True) return result def __hash__(self): return hash(\"{}{}{}{}{}\".format( self.platform,", "AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED", "Production Packages External Names Internal Names ncs5k-sysadmin.iso-6.0.1 ncs5k-sysadmin-6.0.1 ncs5k-full-x.iso-6.0.1 ncs5k-xr-6.0.1", "permitted provided that the following conditions are met: # #", "NCS5500 # Example: ncs5500-mini-x.iso-6.0.1, asr9k-full-x64.iso-6.1.1 # Package type string is", "Names Internal Names asr9k-mcast-x64-2.0.0.0-r611.x86_64.rpm asr9k-mcast-x64-2.0.0.0-r611 asr9k-bgp-x64-1.0.0.0-r611.x86_64.rpm asr9k-bgp-x64-1.0.0.0-r611 asr9k-mgbl-x64-3.0.0.0-r611.x86_64.rpm asr9k-mgbl-x64-3.0.0.0-r611 asr9k-full-x64.iso-6.1.1", "THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF #", "version string # External Name: ncs5k-goldenk9-x.iso-6.3.1.11I.0, Internal Name: ncs5k-goldenk9-x-6.3.1.11I if", "asr9k-bgp-x64-1.0.0.0-r61116I asr9k-mgbl-x64-3.0.0.0-r61116I.x86_64.rpm-6.1.1.16I.DT_IMAGE asr9k-mgbl-x64-3.0.0.0-r61116I asr9k-full-x64.iso-6.1.1.16I.DT_IMAGE asr9k-full-x64-6.1.1.16I asr9k-mini-x64.iso-6.1.1.16I.DT_IMAGE asr9k-mini-x64-6.1.1.16I NCS5K Production Packages", "if not self._subversion: dict_values = self.get_values(subversion_dict, self.platform) if self.platform and", "============================================================================= \"\"\" NCS4K Production Packages External Names Internal Names ncs4k-full-x.iso-6.0.2", "in the documentation # and/or other materials provided with the", "of conditions and the following disclaimer in the documentation #", "software_package.is_valid(): \"\"\" for debugging print('package_name', software_package.package_name, 'platform', software_package.platform, 'package_type', software_package.package_type,", "ncs4k-full-x.iso-6.0.2 ncs4k-mini-x.iso-6.0.2 ncs4k-k9sec.pkg-6.0.2 ncs4k-mpls.pkg-6.0.2 ncs4k-mcast.pkg-6.0.2 ncs4k-mgbl.pkg-6.0.2 NCS6K Production Packages External", "\"\"\" software_packages.add(software_package) return software_packages def __repr__(self): return self.package_name def __str__(self):", "self.platform) if self.platform and dict_values: to_match = self.package_name.replace(self.platform, '') result", "ncs1k ncs5k ncs5500 xrv9k\": re.compile(\"-(?P<SUBVERSION>\\d+\\.\\d+\\.\\d+\\.\\d+)-\"), # 2.0.0.0 \"ncs4k ncs6k\": re.compile(\"CSC.*(?P<SUBVERSION>\\d+\\.\\d+\\.\\d+?)\"),", "for pkg in pkg_list: software_package = SoftwarePackage(pkg) if software_package.is_valid(): \"\"\"", "full, and sysadmin ISO on ASR9K-X64, NCS1K, NCS5K, NCS5500 #", "if not self._smu: result = re.search(smu_re, self.package_name) if result: self._smu", "form must reproduce the above copyright notice, # this list", "\"ncs4k ncs6k\": re.compile(\"CSC.*(?P<SUBVERSION>\\d+\\.\\d+\\.\\d+?)\"), # 0.0.4 } class SoftwarePackage(object): def __init__(self,", "# For NCS6K, only need to consider subversion if it", "if key in keys.split(): return dictionary.get(keys) return None def is_valid(self):", "in pkg_list: software_package = SoftwarePackage(pkg) if software_package.is_valid(): \"\"\" for debugging", "use in source and binary forms, with or without #", "'') if self._package_type: # Takes care the external to internal", "if self.platform and dict_values: # For NCS6K, only need to", "asr9k-mgbl-x64-3.0.0.0-r611.x86_64.rpm asr9k-mgbl-x64-3.0.0.0-r611 asr9k-full-x64.iso-6.1.1 asr9k-xr-6.1.1 asr9k-mini-x64.iso-6.1.1 asr9k-xr-6.1.1 Engineering Packages External Names", "package_name): self.package_name = package_name self._platform = None self._package_type = None", "ncs5k-mgbl-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-mgbl-2.0.0.0-r601 ncs5k-mpls-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-mpls-2.0.0.0-r601 ncs5k-k9sec-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-k9sec-2.0.0.0-r601 ncs5k-isis-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-isis-2.0.0.0-r601 ncs5k-ospf-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-ospf-2.0.0.0-r601", "= None self._smu = None self._subversion = None @property def", "Packages External Names Internal Names asr9k-mcast-x64-2.0.0.0-r61116I.x86_64.rpm-6.1.1.16I.DT_IMAGE asr9k-mcast-x64-2.0.0.0-r61116I asr9k-bgp-x64-1.0.0.0-r61116I.x86_64.rpm-6.1.1.16I.DT_IMAGE asr9k-bgp-x64-1.0.0.0-r61116I asr9k-mgbl-x64-3.0.0.0-r61116I.x86_64.rpm-6.1.1.16I.DT_IMAGE", "ncs5k ncs5500 xrv9k\": re.compile(\"-(?P<SUBVERSION>\\d+\\.\\d+\\.\\d+\\.\\d+)-\"), # 2.0.0.0 \"ncs4k ncs6k\": re.compile(\"CSC.*(?P<SUBVERSION>\\d+\\.\\d+\\.\\d+?)\"), #", "\"ncs5500\", \"xrv9k\"] or self.smu: to_match = self.package_name.replace(self.platform, '') result =", "if self._package_type: # Takes care the external to internal name", "and sysadmin ISO on ASR9K-X64, NCS1K, NCS5K, NCS5500 # Example:", "ncs4k-mpls.pkg-6.0.2 ncs4k-mcast.pkg-6.0.2 ncs4k-mgbl.pkg-6.0.2 NCS6K Production Packages External Names Internal Names", "return self._package_type @property def version(self): if not self._version: dict_values =", "result: self._smu = result.group(\"SMU\") return self._smu @property def subversion(self): if", "STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING", "self._platform @property def package_type(self): if not self._package_type: # For ASR9K-X64,", "software_packages.add(software_package) return software_packages @staticmethod def from_package_list(pkg_list): software_packages = set() for", "EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,", "OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY", "and \\ (self.subversion == other.subversion if self.subversion and other.subversion else", "def __eq__(self, other): result = self.platform == other.platform and \\", "2016, Cisco Systems # All rights reserved. # # Redistribution", "self._platform = None self._package_type = None self._version = None self._smu", "and \\ self.version == other.version and \\ self.smu == other.smu", "ncs6k\": re.compile(\"CSC.*(?P<SUBVERSION>\\d+\\.\\d+\\.\\d+?)\"), # 0.0.4 } class SoftwarePackage(object): def __init__(self, package_name):", "code must retain the above copyright notice, # this list", "# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER", "Example, ncs6k-mgbl.pkg-5.2.5 -> mgbl, ncs5500-mini-x.iso-6.0.1 -> mini-x self._package_type = self._package_type.replace('.pkg',", "NCS6K, only need to consider subversion if it is a", "return self.platform and self.version and (self.package_type or self.smu) def __eq__(self,", "pkg_list: software_package = SoftwarePackage(pkg) if software_package.is_valid(): \"\"\" for debugging print('package_name',", "ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES", "disclaimer. # Redistributions in binary form must reproduce the above", "string is before the 3 part version string # External", "== other.smu and \\ (self.subversion == other.subversion if self.subversion and", "before the 3 part version string # External Name: ncs5k-goldenk9-x.iso-6.3.1.11I.0,", "ncs4k-mgbl.pkg-6.0.2 NCS6K Production Packages External Names Internal Names ncs6k-doc.pkg-5.2.4 ncs6k-doc-5.2.4", "other materials provided with the distribution. # THIS SOFTWARE IS", "to internal name matching # Example, ncs6k-mgbl.pkg-5.2.5 -> mgbl, ncs5500-mini-x.iso-6.0.1", "self.package_name for x in ['full', 'mini', 'sysadmin', 'goldenk9']]) > 0:", "Names Internal Names ncs4k-full-x.iso-6.0.2 ncs4k-mini-x.iso-6.0.2 ncs4k-k9sec.pkg-6.0.2 ncs4k-mpls.pkg-6.0.2 ncs4k-mcast.pkg-6.0.2 ncs4k-mgbl.pkg-6.0.2 NCS6K", "ncs5k-k9sec-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-k9sec-2.0.0.0-r601 ncs5k-isis-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-isis-2.0.0.0-r601 ncs5k-ospf-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-ospf-2.0.0.0-r601 Engineering Packages External Names", "== other.version and \\ self.smu == other.smu and \\ (self.subversion", "asr9k-mgbl-x64-3.0.0.0-r61116I.x86_64.rpm-6.1.1.16I.DT_IMAGE asr9k-mgbl-x64-3.0.0.0-r61116I asr9k-full-x64.iso-6.1.1.16I.DT_IMAGE asr9k-full-x64-6.1.1.16I asr9k-mini-x64.iso-6.1.1.16I.DT_IMAGE asr9k-mini-x64-6.1.1.16I NCS5K Production Packages External", "= re.search(dict_values, to_match) if result: self._version = result.group(\"VERSION\") return self._version", "OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED", "For NCS6K, only need to consider subversion if it is", "OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,", "return self._smu @property def subversion(self): if not self._subversion: dict_values =", "self.platform and self.version and (self.package_type or self.smu) def __eq__(self, other):", "Names ncs5k-mgbl-x64-3.0.0.0-r61116I.x86_64.rpm-6.0.1.16I.DT_IMAGE ncs5k-mgbl-3.0.0.0-r60116I ncs5k-sysadmin.iso-6.0.1 ncs5k-sysadmin-6.0.1.26I ncs5k-full-x.iso-6.0.1.16I.DT_IMAGE ncs5k-xr-6.0.1.16I NCS5500 Production Packages", "from_show_cmd(cmd): software_packages = set() data = cmd.split() for line in", "LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION)", "keys.split(): return dictionary.get(keys) return None def is_valid(self): return self.platform and", "<gh_stars>0 # ============================================================================= # # Copyright (c) 2016, Cisco Systems", "NCS5K Production Packages External Names Internal Names ncs5k-sysadmin.iso-6.0.1 ncs5k-sysadmin-6.0.1 ncs5k-full-x.iso-6.0.1", "ncs5500-k9sec-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-k9sec-2.0.0.0-r601 ncs5500-m2m-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-m2m-2.0.0.0-r601 ncs5500-mgbl-3.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-mgbl-3.0.0.0-r601 ncs5500-mini-x.iso-6.0.1 ncs5500-xr-6.0.1 ncs5500-mpls-te-rsvp-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-mpls-te-rsvp-2.0.0.0-r601", "if self.platform == 'ncs6k' or \\ self.platform == 'ncs4k' else", "with or without # modification, are permitted provided that the", "<- subversion added Engineering Packages External Names Internal Names ncs6k-mcast.pkg-5.2.5.47I.DT_IMAGE", "Packages External Names Internal Names ncs5k-mgbl-x64-3.0.0.0-r61116I.x86_64.rpm-6.0.1.16I.DT_IMAGE ncs5k-mgbl-3.0.0.0-r60116I ncs5k-sysadmin.iso-6.0.1 ncs5k-sysadmin-6.0.1.26I ncs5k-full-x.iso-6.0.1.16I.DT_IMAGE", "= None self._subversion = None @property def platform(self): if not", "for platform in platforms: if platform + \"-\" in self.package_name:", "def from_show_cmd(cmd): software_packages = set() data = cmd.split() for line", "# 2.0.0.0 \"ncs4k ncs6k\": re.compile(\"CSC.*(?P<SUBVERSION>\\d+\\.\\d+\\.\\d+?)\"), # 0.0.4 } class SoftwarePackage(object):", "'version', software_package.version, 'smu', software_package.smu, 'subversion', software_package.subversion) \"\"\" software_packages.add(software_package) return software_packages", "{\"asr9k ncs1k ncs5k ncs5500 xrv9k\": re.compile(\"-(?P<SUBVERSION>\\d+\\.\\d+\\.\\d+\\.\\d+)-\"), # 2.0.0.0 \"ncs4k ncs6k\":", "# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR", "to_match) if result: self._version = result.group(\"VERSION\") return self._version @property def", "def subversion(self): if not self._subversion: dict_values = self.get_values(subversion_dict, self.platform) if", "asr9k-bgp-x64-1.0.0.0-r61116I.x86_64.rpm-6.1.1.16I.DT_IMAGE asr9k-bgp-x64-1.0.0.0-r61116I asr9k-mgbl-x64-3.0.0.0-r61116I.x86_64.rpm-6.1.1.16I.DT_IMAGE asr9k-mgbl-x64-3.0.0.0-r61116I asr9k-full-x64.iso-6.1.1.16I.DT_IMAGE asr9k-full-x64-6.1.1.16I asr9k-mini-x64.iso-6.1.1.16I.DT_IMAGE asr9k-mini-x64-6.1.1.16I NCS5K Production", "# ============================================================================= # # Copyright (c) 2016, Cisco Systems #", "OF SUCH DAMAGE. # ============================================================================= \"\"\" NCS4K Production Packages External", "ncs5500-m2m-2.0.0.0-r601 ncs5500-mgbl-3.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-mgbl-3.0.0.0-r601 ncs5500-mini-x.iso-6.0.1 ncs5500-xr-6.0.1 ncs5500-mpls-te-rsvp-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-mpls-te-rsvp-2.0.0.0-r601 ncs5500-mpls-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-mpls-2.0.0.0-r601 ncs5500-ospf-1.0.0.0-r601.x86_64.rpm-6.0.1", "self.smu) def __eq__(self, other): result = self.platform == other.platform and", "THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY", "ncs6k-full-x.iso-5.2.4 ncs6k-full-x-5.2.4 ncs6k-5.2.5.CSCuy47880.smu ncs6k-5.2.5.CSCuy47880-1.0.0 <- subversion added Engineering Packages External", "Internal Names ncs5500-eigrp-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-eigrp-2.0.0.0-r601 ncs5500-isis-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-isis-2.0.0.0-r601 ncs5500-k9sec-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-k9sec-2.0.0.0-r601 ncs5500-m2m-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-m2m-2.0.0.0-r601", "platforms = ['asr9k', 'ncs1k', 'ncs4k', 'ncs5k', 'ncs5500', 'ncs6k', 'xrv9k'] version_dict", "return hash(\"{}{}{}{}{}\".format( self.platform, self.package_type, self.version, self.smu, self.subversion)) @staticmethod def from_show_cmd(cmd):", "'') result = re.search(dict_values, to_match) if result: self._version = result.group(\"VERSION\")", "HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT,", "Production Packages - not finalized yet External Names Internal Names", "ncs5500-ospf-1.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-ospf-1.0.0.0-r601 ncs5500-parser-1.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-parser-1.0.0.0-r601 \"\"\" import re platforms = ['asr9k',", "else True) return result def __hash__(self): return hash(\"{}{}{}{}{}\".format( self.platform, self.package_type,", "ncs6k-5.2.5.47I.CSCuy47880-0.0.4.i.smu ncs6k-5.2.5.47I.CSCuy47880-0.0.4.i ASR9K-64 Production Packages - not finalized yet External", "Engineering Packages External Names Internal Names ncs6k-mcast.pkg-5.2.5.47I.DT_IMAGE ncs6k-mcast-5.2.5.47I ncs6k-mini-x.iso-6.1.0.07I.DT_IMAGE ncs6k-xr-5.2.5.47I", "= platform break return self._platform @property def package_type(self): if not", "WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES", "'-\\d\\.\\d\\.\\d.\\d' if self.platform and self.platform in self.package_name: match = re.search(pattern,", "NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES;", "data: software_package = SoftwarePackage(line) if software_package.is_valid(): software_packages.add(software_package) return software_packages @staticmethod", "break return self._platform @property def package_type(self): if not self._package_type: #", "string before X.X.X.X # For NCS6K # Extract the package", "dict_values: to_match = self.package_name.replace(self.platform, '') result = re.search(dict_values, to_match) if", "and self.version and (self.package_type or self.smu) def __eq__(self, other): result", "ncs5k-full-x.iso-6.0.1 ncs5k-xr-6.0.1 ncs5k-mini-x.iso-6.0.1 ncs5k-xr-6.0.1 ncs5k-mcast-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-mcast-2.0.0.0-r601 ncs5k-mgbl-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-mgbl-2.0.0.0-r601 ncs5k-mpls-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-mpls-2.0.0.0-r601", "# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR", "# this list of conditions and the following disclaimer in", "ncs5k-ospf-2.0.0.0-r601 Engineering Packages External Names Internal Names ncs5k-mgbl-x64-3.0.0.0-r61116I.x86_64.rpm-6.0.1.16I.DT_IMAGE ncs5k-mgbl-3.0.0.0-r60116I ncs5k-sysadmin.iso-6.0.1", "# 5.2.4 or 5.2.4.47I re.compile(\"(?P<VERSION>\\d+\\.\\d+\\.\\d+(\\.\\d+\\w+)?)\"), } smu_re = re.compile(\"(?P<SMU>CSC[a-z]{2}\\d{5})\") subversion_dict", "matching # Example, ncs6k-mgbl.pkg-5.2.5 -> mgbl, ncs5500-mini-x.iso-6.0.1 -> mini-x self._package_type", "software_package.smu, 'subversion', software_package.subversion) \"\"\" software_packages.add(software_package) return software_packages def __repr__(self): return", "if self.platform and dict_values: to_match = self.package_name.replace(self.platform, '') result =", "finalized yet External Names Internal Names asr9k-mcast-x64-2.0.0.0-r611.x86_64.rpm asr9k-mcast-x64-2.0.0.0-r611 asr9k-bgp-x64-1.0.0.0-r611.x86_64.rpm asr9k-bgp-x64-1.0.0.0-r611", "in dictionary.keys(): if key in keys.split(): return dictionary.get(keys) return None", "ncs6k-5.2.5.47I.CSCuy47880-0.0.4.i ASR9K-64 Production Packages - not finalized yet External Names", "ISO packages match = re.search('-\\d+\\.\\d+\\.\\d+', self.package_name) if match: # Extract", "DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND", "or 6.1.1.17I or 6.1.1 re.compile(\"(?P<VERSION>(\\d+\\d+\\d+(\\d+\\w+)?)|(\\d+\\.\\d+\\.\\d+(\\.\\d+\\w+)?)(?!\\.\\d)(?!-))\"), \"ncs4k ncs6k\": # 5.2.4 or", "External Names Internal Names ncs5k-mgbl-x64-3.0.0.0-r61116I.x86_64.rpm-6.0.1.16I.DT_IMAGE ncs5k-mgbl-3.0.0.0-r60116I ncs5k-sysadmin.iso-6.0.1 ncs5k-sysadmin-6.0.1.26I ncs5k-full-x.iso-6.0.1.16I.DT_IMAGE ncs5k-xr-6.0.1.16I", "ncs5500-mgbl-3.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-mgbl-3.0.0.0-r601 ncs5500-mini-x.iso-6.0.1 ncs5500-xr-6.0.1 ncs5500-mpls-te-rsvp-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-mpls-te-rsvp-2.0.0.0-r601 ncs5500-mpls-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-mpls-2.0.0.0-r601 ncs5500-ospf-1.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-ospf-1.0.0.0-r601", "\"ncs5k\", \"ncs5500\", \"xrv9k\"] or self.smu: to_match = self.package_name.replace(self.platform, '') result", "BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF", "= self._package_type.replace('.pkg', '').replace('.iso', '') return self._package_type @property def version(self): if", "if self.platform in [\"asr9k\", \"ncs1k\", \"ncs5k\", \"ncs5500\", \"xrv9k\"] or self.smu:", "if result: self._subversion = result.group(\"SUBVERSION\") return self._subversion def get_values(self, dictionary,", "software_packages.add(software_package) return software_packages def __repr__(self): return self.package_name def __str__(self): return", "self._subversion: dict_values = self.get_values(subversion_dict, self.platform) if self.platform and dict_values: #", "in platforms: if platform + \"-\" in self.package_name: self._platform =", "the following disclaimer in the documentation # and/or other materials", "self.get_values(subversion_dict, self.platform) if self.platform and dict_values: # For NCS6K, only", "HOLDERS AND CONTRIBUTORS \"AS IS\" # AND ANY EXPRESS OR", "asr9k-mcast-x64-2.0.0.0-r611.x86_64.rpm asr9k-mcast-x64-2.0.0.0-r611 asr9k-bgp-x64-1.0.0.0-r611.x86_64.rpm asr9k-bgp-x64-1.0.0.0-r611 asr9k-mgbl-x64-3.0.0.0-r611.x86_64.rpm asr9k-mgbl-x64-3.0.0.0-r611 asr9k-full-x64.iso-6.1.1 asr9k-xr-6.1.1 asr9k-mini-x64.iso-6.1.1 asr9k-xr-6.1.1", "and use in source and binary forms, with or without", "re.search(pattern, self.package_name) # Special handling for mini, full, and sysadmin", "'-\\d+\\.\\d+\\.\\d+' if self.platform == 'ncs6k' or \\ self.platform == 'ncs4k'", "NCS5500: # Extract the package type string before X.X.X.X #", "name matching # Example, ncs6k-mgbl.pkg-5.2.5 -> mgbl, ncs5500-mini-x.iso-6.0.1 -> mini-x", "@property def smu(self): if not self._smu: result = re.search(smu_re, self.package_name)", "- not finalized yet External Names Internal Names asr9k-mcast-x64-2.0.0.0-r611.x86_64.rpm asr9k-mcast-x64-2.0.0.0-r611", "asr9k-xr-6.1.1 asr9k-mini-x64.iso-6.1.1 asr9k-xr-6.1.1 Engineering Packages External Names Internal Names asr9k-mcast-x64-2.0.0.0-r61116I.x86_64.rpm-6.1.1.16I.DT_IMAGE", "= re.search(pattern, self.package_name) # Special handling for mini, full, and", "ncs6k-mcast.pkg-5.2.4 ncs6k-mcast-5.2.4 ncs6k-mgbl.pkg-5.2.4 ncs6k-mgbl-5.2.4 ncs6k-mini-x.iso-5.2.4 ncs6k-mini-x-5.2.4 ncs6k-mpls.pkg-5.2.4 ncs6k-mpls-5.2.4 ncs6k-sysadmin.iso-5.2.4 ncs6k-sysadmin-5.2.4", "IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE", "OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE", "# ============================================================================= \"\"\" NCS4K Production Packages External Names Internal Names", "the package type string before X.X.X pattern = '-\\d+\\.\\d+\\.\\d+' if", "is a SMU. if self.platform in [\"asr9k\", \"ncs1k\", \"ncs5k\", \"ncs5500\",", "if self.subversion and other.subversion else True) return result def __hash__(self):", "software_packages @staticmethod def from_package_list(pkg_list): software_packages = set() for pkg in", "@staticmethod def from_package_list(pkg_list): software_packages = set() for pkg in pkg_list:", "the 3 part version string # External Name: ncs5k-goldenk9-x.iso-6.3.1.11I.0, Internal", "def smu(self): if not self._smu: result = re.search(smu_re, self.package_name) if", "ncs5500-eigrp-2.0.0.0-r601 ncs5500-isis-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-isis-2.0.0.0-r601 ncs5500-k9sec-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-k9sec-2.0.0.0-r601 ncs5500-m2m-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-m2m-2.0.0.0-r601 ncs5500-mgbl-3.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-mgbl-3.0.0.0-r601 ncs5500-mini-x.iso-6.0.1", "ncs5500-mpls-te-rsvp-2.0.0.0-r601 ncs5500-mpls-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-mpls-2.0.0.0-r601 ncs5500-ospf-1.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-ospf-1.0.0.0-r601 ncs5500-parser-1.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-parser-1.0.0.0-r601 \"\"\" import re", "conditions and the following disclaimer in the documentation # and/or", "else '-\\d\\.\\d\\.\\d.\\d' if self.platform and self.platform in self.package_name: match =", "6.1.1 re.compile(\"(?P<VERSION>(\\d+\\d+\\d+(\\d+\\w+)?)|(\\d+\\.\\d+\\.\\d+(\\.\\d+\\w+)?)(?!\\.\\d)(?!-))\"), \"ncs4k ncs6k\": # 5.2.4 or 5.2.4.47I re.compile(\"(?P<VERSION>\\d+\\.\\d+\\.\\d+(\\.\\d+\\w+)?)\"), }", "Packages External Names Internal Names ncs5k-sysadmin.iso-6.0.1 ncs5k-sysadmin-6.0.1 ncs5k-full-x.iso-6.0.1 ncs5k-xr-6.0.1 ncs5k-mini-x.iso-6.0.1", "'ncs4k', 'ncs5k', 'ncs5500', 'ncs6k', 'xrv9k'] version_dict = {\"asr9k ncs1k ncs5k", "OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF", "ADVISED OF # THE POSSIBILITY OF SUCH DAMAGE. # =============================================================================", "PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT", "reproduce the above copyright notice, # this list of conditions", "self._package_type: # For ASR9K-X64, NCS1K, NCS5K, NCS5500: # Extract the", "before X.X.X.X # For NCS6K # Extract the package type", "Names ncs6k-doc.pkg-5.2.4 ncs6k-doc-5.2.4 ncs6k-li.pkg-5.2.4 ncs6k-li-5.2.4 ncs6k-mcast.pkg-5.2.4 ncs6k-mcast-5.2.4 ncs6k-mgbl.pkg-5.2.4 ncs6k-mgbl-5.2.4 ncs6k-mini-x.iso-5.2.4", "== other.platform and \\ (self.package_type == other.package_type) and \\ self.version", "Production Packages External Names Internal Names ncs5500-eigrp-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-eigrp-2.0.0.0-r601 ncs5500-isis-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-isis-2.0.0.0-r601", "in binary form must reproduce the above copyright notice, #", "software_package.version, 'smu', software_package.smu, 'subversion', software_package.subversion) \"\"\" software_packages.add(software_package) return software_packages def", "and self.platform in self.package_name: match = re.search(pattern, self.package_name) # Special", "return result def __hash__(self): return hash(\"{}{}{}{}{}\".format( self.platform, self.package_type, self.version, self.smu,", "-> mgbl, ncs5500-mini-x.iso-6.0.1 -> mini-x self._package_type = self._package_type.replace('.pkg', '').replace('.iso', '')", "forms, with or without # modification, are permitted provided that", "binary forms, with or without # modification, are permitted provided", "ncs5500-eigrp-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-eigrp-2.0.0.0-r601 ncs5500-isis-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-isis-2.0.0.0-r601 ncs5500-k9sec-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-k9sec-2.0.0.0-r601 ncs5500-m2m-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-m2m-2.0.0.0-r601 ncs5500-mgbl-3.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-mgbl-3.0.0.0-r601", "Systems # All rights reserved. # # Redistribution and use", "NCS1K, NCS5K, NCS5500 # Example: ncs5500-mini-x.iso-6.0.1, asr9k-full-x64.iso-6.1.1 # Package type", "not self._platform: for platform in platforms: if platform + \"-\"", "type string before X.X.X pattern = '-\\d+\\.\\d+\\.\\d+' if self.platform ==", "self.subversion and other.subversion else True) return result def __hash__(self): return", "# Extract the package type string before X.X.X.X # For", "return self._platform @property def package_type(self): if not self._package_type: # For", "+ \"-\" in self.package_name: self._platform = platform break return self._platform", "software_package.package_name, 'platform', software_package.platform, 'package_type', software_package.package_type, 'version', software_package.version, 'smu', software_package.smu, 'subversion',", "and the following disclaimer. # Redistributions in binary form must", "SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR", "ncs5500-mini-x.iso-6.0.1, asr9k-full-x64.iso-6.1.1 # Package type string is before the 3", "class SoftwarePackage(object): def __init__(self, package_name): self.package_name = package_name self._platform =", "None @property def platform(self): if not self._platform: for platform in", "self._smu @property def subversion(self): if not self._subversion: dict_values = self.get_values(subversion_dict,", "__eq__(self, other): result = self.platform == other.platform and \\ (self.package_type", "# Takes care the external to internal name matching #", "with the distribution. # THIS SOFTWARE IS PROVIDED BY THE", "Production Packages External Names Internal Names ncs4k-full-x.iso-6.0.2 ncs4k-mini-x.iso-6.0.2 ncs4k-k9sec.pkg-6.0.2 ncs4k-mpls.pkg-6.0.2", "the documentation # and/or other materials provided with the distribution.", "platform(self): if not self._platform: for platform in platforms: if platform", "Takes care the external to internal name matching # Example,", "materials provided with the distribution. # THIS SOFTWARE IS PROVIDED", "For NCS6K # Extract the package type string before X.X.X", "result = re.search(dict_values, to_match) if result: self._version = result.group(\"VERSION\") return", "self._subversion def get_values(self, dictionary, key): for keys in dictionary.keys(): if", "None def is_valid(self): return self.platform and self.version and (self.package_type or", "NCS4K Production Packages External Names Internal Names ncs4k-full-x.iso-6.0.2 ncs4k-mini-x.iso-6.0.2 ncs4k-k9sec.pkg-6.0.2", "self._smu: result = re.search(smu_re, self.package_name) if result: self._smu = result.group(\"SMU\")", "cmd.split() for line in data: software_package = SoftwarePackage(line) if software_package.is_valid():", "is_valid(self): return self.platform and self.version and (self.package_type or self.smu) def", "if software_package.is_valid(): \"\"\" for debugging print('package_name', software_package.package_name, 'platform', software_package.platform, 'package_type',", "are permitted provided that the following conditions are met: #", "or self.smu: to_match = self.package_name.replace(self.platform, '') result = re.search(dict_values, to_match)", "@property def version(self): if not self._version: dict_values = self.get_values(version_dict, self.platform)", "External Names Internal Names ncs5k-sysadmin.iso-6.0.1 ncs5k-sysadmin-6.0.1 ncs5k-full-x.iso-6.0.1 ncs5k-xr-6.0.1 ncs5k-mini-x.iso-6.0.1 ncs5k-xr-6.0.1", "self.platform and dict_values: # For NCS6K, only need to consider", "match = re.search(pattern, self.package_name) # Special handling for mini, full,", "# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)", "result = re.search(smu_re, self.package_name) if result: self._smu = result.group(\"SMU\") return", "'ncs5500', 'ncs6k', 'xrv9k'] version_dict = {\"asr9k ncs1k ncs5k ncs5500 xrv9k\":", "= {\"asr9k ncs1k ncs5k ncs5500 xrv9k\": # 61117I or 611", "if match: # Extract the package type self._package_type = self.package_name[0:match.start()].replace(self.platform", "match and sum([x in self.package_name for x in ['full', 'mini',", "Internal Names ncs5k-sysadmin.iso-6.0.1 ncs5k-sysadmin-6.0.1 ncs5k-full-x.iso-6.0.1 ncs5k-xr-6.0.1 ncs5k-mini-x.iso-6.0.1 ncs5k-xr-6.0.1 ncs5k-mcast-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-mcast-2.0.0.0-r601", "subversion(self): if not self._subversion: dict_values = self.get_values(subversion_dict, self.platform) if self.platform", "def package_type(self): if not self._package_type: # For ASR9K-X64, NCS1K, NCS5K,", "WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE #", "packages match = re.search('-\\d+\\.\\d+\\.\\d+', self.package_name) if match: # Extract the", "ARISING IN ANY WAY OUT OF THE USE OF THIS", "= SoftwarePackage(pkg) if software_package.is_valid(): \"\"\" for debugging print('package_name', software_package.package_name, 'platform',", "Copyright (c) 2016, Cisco Systems # All rights reserved. #", "# All rights reserved. # # Redistribution and use in", "provided with the distribution. # THIS SOFTWARE IS PROVIDED BY", "distribution. # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS", "ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN", "LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS", "External Names Internal Names ncs6k-doc.pkg-5.2.4 ncs6k-doc-5.2.4 ncs6k-li.pkg-5.2.4 ncs6k-li-5.2.4 ncs6k-mcast.pkg-5.2.4 ncs6k-mcast-5.2.4", "ncs5k-k9sec-2.0.0.0-r601 ncs5k-isis-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-isis-2.0.0.0-r601 ncs5k-ospf-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-ospf-2.0.0.0-r601 Engineering Packages External Names Internal", "type self._package_type = self.package_name[0:match.start()].replace(self.platform + '-', '') if self._package_type: #", "= self.platform == other.platform and \\ (self.package_type == other.package_type) and", "\\ (self.package_type == other.package_type) and \\ self.version == other.version and", "Redistributions in binary form must reproduce the above copyright notice,", "ASR9K-X64, NCS1K, NCS5K, NCS5500 # Example: ncs5500-mini-x.iso-6.0.1, asr9k-full-x64.iso-6.1.1 # Package", "ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR", "following conditions are met: # # Redistributions of source code", "NCS6K # Extract the package type string before X.X.X pattern", "Extract the package type self._package_type = self.package_name[0:match.start()].replace(self.platform + '-', '')", "# Redistribution and use in source and binary forms, with", "ncs4k-k9sec.pkg-6.0.2 ncs4k-mpls.pkg-6.0.2 ncs4k-mcast.pkg-6.0.2 ncs4k-mgbl.pkg-6.0.2 NCS6K Production Packages External Names Internal", "result = re.search(dict_values, to_match) if result: self._subversion = result.group(\"SUBVERSION\") return", "the above copyright notice, # this list of conditions and", "ncs6k-mpls-5.2.4 ncs6k-sysadmin.iso-5.2.4 ncs6k-sysadmin-5.2.4 ncs6k-full-x.iso-5.2.4 ncs6k-full-x-5.2.4 ncs6k-5.2.5.CSCuy47880.smu ncs6k-5.2.5.CSCuy47880-1.0.0 <- subversion added", "xrv9k\": re.compile(\"-(?P<SUBVERSION>\\d+\\.\\d+\\.\\d+\\.\\d+)-\"), # 2.0.0.0 \"ncs4k ncs6k\": re.compile(\"CSC.*(?P<SUBVERSION>\\d+\\.\\d+\\.\\d+?)\"), # 0.0.4 }", "or 6.1.1 re.compile(\"(?P<VERSION>(\\d+\\d+\\d+(\\d+\\w+)?)|(\\d+\\.\\d+\\.\\d+(\\.\\d+\\w+)?)(?!\\.\\d)(?!-))\"), \"ncs4k ncs6k\": # 5.2.4 or 5.2.4.47I re.compile(\"(?P<VERSION>\\d+\\.\\d+\\.\\d+(\\.\\d+\\w+)?)\"),", "'xrv9k'] version_dict = {\"asr9k ncs1k ncs5k ncs5500 xrv9k\": # 61117I", "# Redistributions of source code must retain the above copyright", "or 611 or 6.1.1.17I or 6.1.1 re.compile(\"(?P<VERSION>(\\d+\\d+\\d+(\\d+\\w+)?)|(\\d+\\.\\d+\\.\\d+(\\.\\d+\\w+)?)(?!\\.\\d)(?!-))\"), \"ncs4k ncs6k\": #", "OF THIS SOFTWARE, EVEN IF ADVISED OF # THE POSSIBILITY", "string # External Name: ncs5k-goldenk9-x.iso-6.3.1.11I.0, Internal Name: ncs5k-goldenk9-x-6.3.1.11I if not", "and dict_values: to_match = self.package_name.replace(self.platform, '') result = re.search(dict_values, to_match)", "== other.package_type) and \\ self.version == other.version and \\ self.smu", "(self.package_type == other.package_type) and \\ self.version == other.version and \\", "ncs6k-sysadmin-5.2.4 ncs6k-full-x.iso-5.2.4 ncs6k-full-x-5.2.4 ncs6k-5.2.5.CSCuy47880.smu ncs6k-5.2.5.CSCuy47880-1.0.0 <- subversion added Engineering Packages", "SoftwarePackage(pkg) if software_package.is_valid(): \"\"\" for debugging print('package_name', software_package.package_name, 'platform', software_package.platform,", "asr9k-bgp-x64-1.0.0.0-r611.x86_64.rpm asr9k-bgp-x64-1.0.0.0-r611 asr9k-mgbl-x64-3.0.0.0-r611.x86_64.rpm asr9k-mgbl-x64-3.0.0.0-r611 asr9k-full-x64.iso-6.1.1 asr9k-xr-6.1.1 asr9k-mini-x64.iso-6.1.1 asr9k-xr-6.1.1 Engineering Packages", "LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN", "ncs5k-mcast-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-mcast-2.0.0.0-r601 ncs5k-mgbl-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-mgbl-2.0.0.0-r601 ncs5k-mpls-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-mpls-2.0.0.0-r601 ncs5k-k9sec-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-k9sec-2.0.0.0-r601 ncs5k-isis-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-isis-2.0.0.0-r601", "# Example: ncs5500-mini-x.iso-6.0.1, asr9k-full-x64.iso-6.1.1 # Package type string is before", "data = cmd.split() for line in data: software_package = SoftwarePackage(line)", "} class SoftwarePackage(object): def __init__(self, package_name): self.package_name = package_name self._platform", "list of conditions and the following disclaimer in the documentation", "self.platform and dict_values: to_match = self.package_name.replace(self.platform, '') result = re.search(dict_values,", "= result.group(\"VERSION\") return self._version @property def smu(self): if not self._smu:", "ncs5k-mgbl-2.0.0.0-r601 ncs5k-mpls-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-mpls-2.0.0.0-r601 ncs5k-k9sec-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-k9sec-2.0.0.0-r601 ncs5k-isis-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-isis-2.0.0.0-r601 ncs5k-ospf-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-ospf-2.0.0.0-r601 Engineering", "these ISO packages match = re.search('-\\d+\\.\\d+\\.\\d+', self.package_name) if match: #", "# modification, are permitted provided that the following conditions are", "and other.subversion else True) return result def __hash__(self): return hash(\"{}{}{}{}{}\".format(", "self._smu = result.group(\"SMU\") return self._smu @property def subversion(self): if not", "package type string before X.X.X pattern = '-\\d+\\.\\d+\\.\\d+' if self.platform", "if software_package.is_valid(): software_packages.add(software_package) return software_packages @staticmethod def from_package_list(pkg_list): software_packages =", "DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING,", "DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS", "if it is a SMU. if self.platform in [\"asr9k\", \"ncs1k\",", "in self.package_name: self._platform = platform break return self._platform @property def", "OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT", "OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,", "> 0: # Use the three part match for these", "= ['asr9k', 'ncs1k', 'ncs4k', 'ncs5k', 'ncs5500', 'ncs6k', 'xrv9k'] version_dict =", "Names Internal Names ncs5500-eigrp-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-eigrp-2.0.0.0-r601 ncs5500-isis-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-isis-2.0.0.0-r601 ncs5500-k9sec-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-k9sec-2.0.0.0-r601 ncs5500-m2m-2.0.0.0-r601.x86_64.rpm-6.0.1", "following disclaimer in the documentation # and/or other materials provided", "(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS", "LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING", "Names asr9k-mcast-x64-2.0.0.0-r61116I.x86_64.rpm-6.1.1.16I.DT_IMAGE asr9k-mcast-x64-2.0.0.0-r61116I asr9k-bgp-x64-1.0.0.0-r61116I.x86_64.rpm-6.1.1.16I.DT_IMAGE asr9k-bgp-x64-1.0.0.0-r61116I asr9k-mgbl-x64-3.0.0.0-r61116I.x86_64.rpm-6.1.1.16I.DT_IMAGE asr9k-mgbl-x64-3.0.0.0-r61116I asr9k-full-x64.iso-6.1.1.16I.DT_IMAGE asr9k-full-x64-6.1.1.16I asr9k-mini-x64.iso-6.1.1.16I.DT_IMAGE", "not self._subversion: dict_values = self.get_values(subversion_dict, self.platform) if self.platform and dict_values:", "if not match and sum([x in self.package_name for x in", "three part match for these ISO packages match = re.search('-\\d+\\.\\d+\\.\\d+',", "None self._smu = None self._subversion = None @property def platform(self):", "# Extract the package type self._package_type = self.package_name[0:match.start()].replace(self.platform + '-',", "or self.smu) def __eq__(self, other): result = self.platform == other.platform", "X.X.X.X # For NCS6K # Extract the package type string", "Names ncs6k-mcast.pkg-5.2.5.47I.DT_IMAGE ncs6k-mcast-5.2.5.47I ncs6k-mini-x.iso-6.1.0.07I.DT_IMAGE ncs6k-xr-5.2.5.47I ncs6k-5.2.5.47I.CSCuy47880-0.0.4.i.smu ncs6k-5.2.5.47I.CSCuy47880-0.0.4.i ASR9K-64 Production Packages", "ncs5k-mgbl-x64-3.0.0.0-r61116I.x86_64.rpm-6.0.1.16I.DT_IMAGE ncs5k-mgbl-3.0.0.0-r60116I ncs5k-sysadmin.iso-6.0.1 ncs5k-sysadmin-6.0.1.26I ncs5k-full-x.iso-6.0.1.16I.DT_IMAGE ncs5k-xr-6.0.1.16I NCS5500 Production Packages External", "['asr9k', 'ncs1k', 'ncs4k', 'ncs5k', 'ncs5500', 'ncs6k', 'xrv9k'] version_dict = {\"asr9k", "IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED", "re.compile(\"-(?P<SUBVERSION>\\d+\\.\\d+\\.\\d+\\.\\d+)-\"), # 2.0.0.0 \"ncs4k ncs6k\": re.compile(\"CSC.*(?P<SUBVERSION>\\d+\\.\\d+\\.\\d+?)\"), # 0.0.4 } class", "match: # Extract the package type self._package_type = self.package_name[0:match.start()].replace(self.platform +", "self._version = result.group(\"VERSION\") return self._version @property def smu(self): if not", "SMU. if self.platform in [\"asr9k\", \"ncs1k\", \"ncs5k\", \"ncs5500\", \"xrv9k\"] or", "in ['full', 'mini', 'sysadmin', 'goldenk9']]) > 0: # Use the", "copyright notice, # this list of conditions and the following", "ncs5k-mgbl-3.0.0.0-r60116I ncs5k-sysadmin.iso-6.0.1 ncs5k-sysadmin-6.0.1.26I ncs5k-full-x.iso-6.0.1.16I.DT_IMAGE ncs5k-xr-6.0.1.16I NCS5500 Production Packages External Names", "ncs5k-isis-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-isis-2.0.0.0-r601 ncs5k-ospf-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-ospf-2.0.0.0-r601 Engineering Packages External Names Internal Names", "ncs6k-mgbl.pkg-5.2.5 -> mgbl, ncs5500-mini-x.iso-6.0.1 -> mini-x self._package_type = self._package_type.replace('.pkg', '').replace('.iso',", "is before the 3 part version string # External Name:", "'subversion', software_package.subversion) \"\"\" software_packages.add(software_package) return software_packages def __repr__(self): return self.package_name", "smu_re = re.compile(\"(?P<SMU>CSC[a-z]{2}\\d{5})\") subversion_dict = {\"asr9k ncs1k ncs5k ncs5500 xrv9k\":", "pkg in pkg_list: software_package = SoftwarePackage(pkg) if software_package.is_valid(): \"\"\" for", "match for these ISO packages match = re.search('-\\d+\\.\\d+\\.\\d+', self.package_name) if", "'ncs6k', 'xrv9k'] version_dict = {\"asr9k ncs1k ncs5k ncs5500 xrv9k\": #", "for x in ['full', 'mini', 'sysadmin', 'goldenk9']]) > 0: #", "# # Copyright (c) 2016, Cisco Systems # All rights", "not self._smu: result = re.search(smu_re, self.package_name) if result: self._smu =", "+ '-', '') if self._package_type: # Takes care the external", "OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE", "result: self._version = result.group(\"VERSION\") return self._version @property def smu(self): if", "INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF", "self._package_type = self._package_type.replace('.pkg', '').replace('.iso', '') return self._package_type @property def version(self):", "__hash__(self): return hash(\"{}{}{}{}{}\".format( self.platform, self.package_type, self.version, self.smu, self.subversion)) @staticmethod def", "self.platform in self.package_name: match = re.search(pattern, self.package_name) # Special handling", "self.package_name) if result: self._smu = result.group(\"SMU\") return self._smu @property def", "self.version and (self.package_type or self.smu) def __eq__(self, other): result =", "and the following disclaimer in the documentation # and/or other", "@property def package_type(self): if not self._package_type: # For ASR9K-X64, NCS1K,", "version_dict = {\"asr9k ncs1k ncs5k ncs5500 xrv9k\": # 61117I or", "# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND", "asr9k-mcast-x64-2.0.0.0-r61116I asr9k-bgp-x64-1.0.0.0-r61116I.x86_64.rpm-6.1.1.16I.DT_IMAGE asr9k-bgp-x64-1.0.0.0-r61116I asr9k-mgbl-x64-3.0.0.0-r61116I.x86_64.rpm-6.1.1.16I.DT_IMAGE asr9k-mgbl-x64-3.0.0.0-r61116I asr9k-full-x64.iso-6.1.1.16I.DT_IMAGE asr9k-full-x64-6.1.1.16I asr9k-mini-x64.iso-6.1.1.16I.DT_IMAGE asr9k-mini-x64-6.1.1.16I NCS5K", "= set() data = cmd.split() for line in data: software_package", "ncs5k-sysadmin-6.0.1.26I ncs5k-full-x.iso-6.0.1.16I.DT_IMAGE ncs5k-xr-6.0.1.16I NCS5500 Production Packages External Names Internal Names", "dict_values = self.get_values(version_dict, self.platform) if self.platform and dict_values: to_match =", "ncs5k-xr-6.0.1 ncs5k-mini-x.iso-6.0.1 ncs5k-xr-6.0.1 ncs5k-mcast-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-mcast-2.0.0.0-r601 ncs5k-mgbl-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-mgbl-2.0.0.0-r601 ncs5k-mpls-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-mpls-2.0.0.0-r601 ncs5k-k9sec-2.0.0.0-r601.x86_64.rpm-6.0.1", "ncs4k-mini-x.iso-6.0.2 ncs4k-k9sec.pkg-6.0.2 ncs4k-mpls.pkg-6.0.2 ncs4k-mcast.pkg-6.0.2 ncs4k-mgbl.pkg-6.0.2 NCS6K Production Packages External Names", "asr9k-full-x64.iso-6.1.1 asr9k-xr-6.1.1 asr9k-mini-x64.iso-6.1.1 asr9k-xr-6.1.1 Engineering Packages External Names Internal Names", "if not self._version: dict_values = self.get_values(version_dict, self.platform) if self.platform and", "software_packages = set() data = cmd.split() for line in data:", "result.group(\"VERSION\") return self._version @property def smu(self): if not self._smu: result", "# For NCS6K # Extract the package type string before", "\"xrv9k\"] or self.smu: to_match = self.package_name.replace(self.platform, '') result = re.search(dict_values,", "X.X.X pattern = '-\\d+\\.\\d+\\.\\d+' if self.platform == 'ncs6k' or \\", "OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER", "SUCH DAMAGE. # ============================================================================= \"\"\" NCS4K Production Packages External Names", "LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS", "if not self._package_type: # For ASR9K-X64, NCS1K, NCS5K, NCS5500: #", "self.package_name.replace(self.platform, '') result = re.search(dict_values, to_match) if result: self._subversion =", "asr9k-mgbl-x64-3.0.0.0-r61116I asr9k-full-x64.iso-6.1.1.16I.DT_IMAGE asr9k-full-x64-6.1.1.16I asr9k-mini-x64.iso-6.1.1.16I.DT_IMAGE asr9k-mini-x64-6.1.1.16I NCS5K Production Packages External Names", "@property def subversion(self): if not self._subversion: dict_values = self.get_values(subversion_dict, self.platform)", "IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,", "Internal Names ncs5k-mgbl-x64-3.0.0.0-r61116I.x86_64.rpm-6.0.1.16I.DT_IMAGE ncs5k-mgbl-3.0.0.0-r60116I ncs5k-sysadmin.iso-6.0.1 ncs5k-sysadmin-6.0.1.26I ncs5k-full-x.iso-6.0.1.16I.DT_IMAGE ncs5k-xr-6.0.1.16I NCS5500 Production", "NCS5500 Production Packages External Names Internal Names ncs5500-eigrp-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-eigrp-2.0.0.0-r601 ncs5500-isis-2.0.0.0-r601.x86_64.rpm-6.0.1", "SoftwarePackage(object): def __init__(self, package_name): self.package_name = package_name self._platform = None", "LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR #", "retain the above copyright notice, # this list of conditions", "= re.search('-\\d+\\.\\d+\\.\\d+', self.package_name) if match: # Extract the package type", "All rights reserved. # # Redistribution and use in source", "Use the three part match for these ISO packages match", "without # modification, are permitted provided that the following conditions", "the distribution. # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT", "0.0.4 } class SoftwarePackage(object): def __init__(self, package_name): self.package_name = package_name", "\"ncs4k ncs6k\": # 5.2.4 or 5.2.4.47I re.compile(\"(?P<VERSION>\\d+\\.\\d+\\.\\d+(\\.\\d+\\w+)?)\"), } smu_re =", "if result: self._smu = result.group(\"SMU\") return self._smu @property def subversion(self):", "import re platforms = ['asr9k', 'ncs1k', 'ncs4k', 'ncs5k', 'ncs5500', 'ncs6k',", "\\ self.platform == 'ncs4k' else '-\\d\\.\\d\\.\\d.\\d' if self.platform and self.platform", "re.search(dict_values, to_match) if result: self._version = result.group(\"VERSION\") return self._version @property", "if not self._platform: for platform in platforms: if platform +", "dictionary.get(keys) return None def is_valid(self): return self.platform and self.version and", "WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE", "this list of conditions and the following disclaimer in the", "ncs5500-mini-x.iso-6.0.1 ncs5500-xr-6.0.1 ncs5500-mpls-te-rsvp-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-mpls-te-rsvp-2.0.0.0-r601 ncs5500-mpls-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-mpls-2.0.0.0-r601 ncs5500-ospf-1.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-ospf-1.0.0.0-r601 ncs5500-parser-1.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-parser-1.0.0.0-r601", "the package type string before X.X.X.X # For NCS6K #", "in keys.split(): return dictionary.get(keys) return None def is_valid(self): return self.platform", "modification, are permitted provided that the following conditions are met:", "PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,", "SOFTWARE, EVEN IF ADVISED OF # THE POSSIBILITY OF SUCH", "'') return self._package_type @property def version(self): if not self._version: dict_values", "in [\"asr9k\", \"ncs1k\", \"ncs5k\", \"ncs5500\", \"xrv9k\"] or self.smu: to_match =", "'goldenk9']]) > 0: # Use the three part match for", "ncs6k-sysadmin.iso-5.2.4 ncs6k-sysadmin-5.2.4 ncs6k-full-x.iso-5.2.4 ncs6k-full-x-5.2.4 ncs6k-5.2.5.CSCuy47880.smu ncs6k-5.2.5.CSCuy47880-1.0.0 <- subversion added Engineering", "ncs5500-m2m-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-m2m-2.0.0.0-r601 ncs5500-mgbl-3.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-mgbl-3.0.0.0-r601 ncs5500-mini-x.iso-6.0.1 ncs5500-xr-6.0.1 ncs5500-mpls-te-rsvp-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-mpls-te-rsvp-2.0.0.0-r601 ncs5500-mpls-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-mpls-2.0.0.0-r601", "self.platform in [\"asr9k\", \"ncs1k\", \"ncs5k\", \"ncs5500\", \"xrv9k\"] or self.smu: to_match", "return None def is_valid(self): return self.platform and self.version and (self.package_type", "Example: ncs5500-mini-x.iso-6.0.1, asr9k-full-x64.iso-6.1.1 # Package type string is before the", "# ARISING IN ANY WAY OUT OF THE USE OF", "['full', 'mini', 'sysadmin', 'goldenk9']]) > 0: # Use the three", "5.2.4.47I re.compile(\"(?P<VERSION>\\d+\\.\\d+\\.\\d+(\\.\\d+\\w+)?)\"), } smu_re = re.compile(\"(?P<SMU>CSC[a-z]{2}\\d{5})\") subversion_dict = {\"asr9k ncs1k", "self._platform: for platform in platforms: if platform + \"-\" in", "ncs5k-goldenk9-x-6.3.1.11I if not match and sum([x in self.package_name for x", "added Engineering Packages External Names Internal Names ncs6k-mcast.pkg-5.2.5.47I.DT_IMAGE ncs6k-mcast-5.2.5.47I ncs6k-mini-x.iso-6.1.0.07I.DT_IMAGE", "self.package_name.replace(self.platform, '') result = re.search(dict_values, to_match) if result: self._version =", "dictionary, key): for keys in dictionary.keys(): if key in keys.split():", "asr9k-mini-x64.iso-6.1.1 asr9k-xr-6.1.1 Engineering Packages External Names Internal Names asr9k-mcast-x64-2.0.0.0-r61116I.x86_64.rpm-6.1.1.16I.DT_IMAGE asr9k-mcast-x64-2.0.0.0-r61116I", "and \\ self.smu == other.smu and \\ (self.subversion == other.subversion", "AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN", "asr9k-full-x64.iso-6.1.1.16I.DT_IMAGE asr9k-full-x64-6.1.1.16I asr9k-mini-x64.iso-6.1.1.16I.DT_IMAGE asr9k-mini-x64-6.1.1.16I NCS5K Production Packages External Names Internal", "ncs5500-k9sec-2.0.0.0-r601 ncs5500-m2m-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-m2m-2.0.0.0-r601 ncs5500-mgbl-3.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-mgbl-3.0.0.0-r601 ncs5500-mini-x.iso-6.0.1 ncs5500-xr-6.0.1 ncs5500-mpls-te-rsvp-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-mpls-te-rsvp-2.0.0.0-r601 ncs5500-mpls-2.0.0.0-r601.x86_64.rpm-6.0.1", "\"\"\" for debugging print('package_name', software_package.package_name, 'platform', software_package.platform, 'package_type', software_package.package_type, 'version',", "not self._version: dict_values = self.get_values(version_dict, self.platform) if self.platform and dict_values:", "reserved. # # Redistribution and use in source and binary", "asr9k-mini-x64-6.1.1.16I NCS5K Production Packages External Names Internal Names ncs5k-sysadmin.iso-6.0.1 ncs5k-sysadmin-6.0.1", "= '-\\d+\\.\\d+\\.\\d+' if self.platform == 'ncs6k' or \\ self.platform ==", "subversion_dict = {\"asr9k ncs1k ncs5k ncs5500 xrv9k\": re.compile(\"-(?P<SUBVERSION>\\d+\\.\\d+\\.\\d+\\.\\d+)-\"), # 2.0.0.0", "ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY,", "for mini, full, and sysadmin ISO on ASR9K-X64, NCS1K, NCS5K,", "external to internal name matching # Example, ncs6k-mgbl.pkg-5.2.5 -> mgbl,", "ncs6k-5.2.5.CSCuy47880-1.0.0 <- subversion added Engineering Packages External Names Internal Names", "and sum([x in self.package_name for x in ['full', 'mini', 'sysadmin',", "other.platform and \\ (self.package_type == other.package_type) and \\ self.version ==", "ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT", "mini, full, and sysadmin ISO on ASR9K-X64, NCS1K, NCS5K, NCS5500", "= result.group(\"SUBVERSION\") return self._subversion def get_values(self, dictionary, key): for keys", "of conditions and the following disclaimer. # Redistributions in binary", "EVEN IF ADVISED OF # THE POSSIBILITY OF SUCH DAMAGE.", "IF ADVISED OF # THE POSSIBILITY OF SUCH DAMAGE. #", "result.group(\"SUBVERSION\") return self._subversion def get_values(self, dictionary, key): for keys in", "# Copyright (c) 2016, Cisco Systems # All rights reserved.", "to consider subversion if it is a SMU. if self.platform", "ncs6k-5.2.5.CSCuy47880.smu ncs6k-5.2.5.CSCuy47880-1.0.0 <- subversion added Engineering Packages External Names Internal", "type string before X.X.X.X # For NCS6K # Extract the", "============================================================================= # # Copyright (c) 2016, Cisco Systems # All", "'sysadmin', 'goldenk9']]) > 0: # Use the three part match", "Internal Names ncs4k-full-x.iso-6.0.2 ncs4k-mini-x.iso-6.0.2 ncs4k-k9sec.pkg-6.0.2 ncs4k-mpls.pkg-6.0.2 ncs4k-mcast.pkg-6.0.2 ncs4k-mgbl.pkg-6.0.2 NCS6K Production", "= cmd.split() for line in data: software_package = SoftwarePackage(line) if", "internal name matching # Example, ncs6k-mgbl.pkg-5.2.5 -> mgbl, ncs5500-mini-x.iso-6.0.1 ->", "OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE #", "keys in dictionary.keys(): if key in keys.split(): return dictionary.get(keys) return", "for debugging print('package_name', software_package.package_name, 'platform', software_package.platform, 'package_type', software_package.package_type, 'version', software_package.version,", "# Example, ncs6k-mgbl.pkg-5.2.5 -> mgbl, ncs5500-mini-x.iso-6.0.1 -> mini-x self._package_type =", "def get_values(self, dictionary, key): for keys in dictionary.keys(): if key", "SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED", "IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"", "'ncs6k' or \\ self.platform == 'ncs4k' else '-\\d\\.\\d\\.\\d.\\d' if self.platform", "ncs5500 xrv9k\": # 61117I or 611 or 6.1.1.17I or 6.1.1", "ncs5k-goldenk9-x.iso-6.3.1.11I.0, Internal Name: ncs5k-goldenk9-x-6.3.1.11I if not match and sum([x in", "(self.subversion == other.subversion if self.subversion and other.subversion else True) return", "self.version == other.version and \\ self.smu == other.smu and \\", "result: self._subversion = result.group(\"SUBVERSION\") return self._subversion def get_values(self, dictionary, key):", "# Special handling for mini, full, and sysadmin ISO on", "\\ self.version == other.version and \\ self.smu == other.smu and", "\\ self.smu == other.smu and \\ (self.subversion == other.subversion if", "Names asr9k-mcast-x64-2.0.0.0-r611.x86_64.rpm asr9k-mcast-x64-2.0.0.0-r611 asr9k-bgp-x64-1.0.0.0-r611.x86_64.rpm asr9k-bgp-x64-1.0.0.0-r611 asr9k-mgbl-x64-3.0.0.0-r611.x86_64.rpm asr9k-mgbl-x64-3.0.0.0-r611 asr9k-full-x64.iso-6.1.1 asr9k-xr-6.1.1 asr9k-mini-x64.iso-6.1.1", "NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE #", "key in keys.split(): return dictionary.get(keys) return None def is_valid(self): return", "'platform', software_package.platform, 'package_type', software_package.package_type, 'version', software_package.version, 'smu', software_package.smu, 'subversion', software_package.subversion)", "smu(self): if not self._smu: result = re.search(smu_re, self.package_name) if result:", "ncs4k-mcast.pkg-6.0.2 ncs4k-mgbl.pkg-6.0.2 NCS6K Production Packages External Names Internal Names ncs6k-doc.pkg-5.2.4", "= None self._package_type = None self._version = None self._smu =", "# Redistributions in binary form must reproduce the above copyright", "== 'ncs6k' or \\ self.platform == 'ncs4k' else '-\\d\\.\\d\\.\\d.\\d' if", "self._subversion = None @property def platform(self): if not self._platform: for", "61117I or 611 or 6.1.1.17I or 6.1.1 re.compile(\"(?P<VERSION>(\\d+\\d+\\d+(\\d+\\w+)?)|(\\d+\\.\\d+\\.\\d+(\\.\\d+\\w+)?)(?!\\.\\d)(?!-))\"), \"ncs4k ncs6k\":", "FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT", "self.platform == 'ncs6k' or \\ self.platform == 'ncs4k' else '-\\d\\.\\d\\.\\d.\\d'", "ncs5k-sysadmin.iso-6.0.1 ncs5k-sysadmin-6.0.1 ncs5k-full-x.iso-6.0.1 ncs5k-xr-6.0.1 ncs5k-mini-x.iso-6.0.1 ncs5k-xr-6.0.1 ncs5k-mcast-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-mcast-2.0.0.0-r601 ncs5k-mgbl-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-mgbl-2.0.0.0-r601", "ncs6k-full-x-5.2.4 ncs6k-5.2.5.CSCuy47880.smu ncs6k-5.2.5.CSCuy47880-1.0.0 <- subversion added Engineering Packages External Names", "NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND", "SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;", "Names ncs5k-sysadmin.iso-6.0.1 ncs5k-sysadmin-6.0.1 ncs5k-full-x.iso-6.0.1 ncs5k-xr-6.0.1 ncs5k-mini-x.iso-6.0.1 ncs5k-xr-6.0.1 ncs5k-mcast-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-mcast-2.0.0.0-r601 ncs5k-mgbl-2.0.0.0-r601.x86_64.rpm-6.0.1", "other.package_type) and \\ self.version == other.version and \\ self.smu ==", "FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO", "self.version, self.smu, self.subversion)) @staticmethod def from_show_cmd(cmd): software_packages = set() data", "= package_name self._platform = None self._package_type = None self._version =", "it is a SMU. if self.platform in [\"asr9k\", \"ncs1k\", \"ncs5k\",", "BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY", "self._package_type: # Takes care the external to internal name matching", "mgbl, ncs5500-mini-x.iso-6.0.1 -> mini-x self._package_type = self._package_type.replace('.pkg', '').replace('.iso', '') return", "mini-x self._package_type = self._package_type.replace('.pkg', '').replace('.iso', '') return self._package_type @property def", "IS\" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT", "and/or other materials provided with the distribution. # THIS SOFTWARE", "0: # Use the three part match for these ISO", "self.get_values(version_dict, self.platform) if self.platform and dict_values: to_match = self.package_name.replace(self.platform, '')", "SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS", "list of conditions and the following disclaimer. # Redistributions in", "DAMAGE. # ============================================================================= \"\"\" NCS4K Production Packages External Names Internal", "software_package.subversion) \"\"\" software_packages.add(software_package) return software_packages def __repr__(self): return self.package_name def", "# 61117I or 611 or 6.1.1.17I or 6.1.1 re.compile(\"(?P<VERSION>(\\d+\\d+\\d+(\\d+\\w+)?)|(\\d+\\.\\d+\\.\\d+(\\.\\d+\\w+)?)(?!\\.\\d)(?!-))\"), \"ncs4k", "PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE", "(INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT", "self.package_name[0:match.start()].replace(self.platform + '-', '') if self._package_type: # Takes care the", "= {\"asr9k ncs1k ncs5k ncs5500 xrv9k\": re.compile(\"-(?P<SUBVERSION>\\d+\\.\\d+\\.\\d+\\.\\d+)-\"), # 2.0.0.0 \"ncs4k", "Names ncs4k-full-x.iso-6.0.2 ncs4k-mini-x.iso-6.0.2 ncs4k-k9sec.pkg-6.0.2 ncs4k-mpls.pkg-6.0.2 ncs4k-mcast.pkg-6.0.2 ncs4k-mgbl.pkg-6.0.2 NCS6K Production Packages", "'mini', 'sysadmin', 'goldenk9']]) > 0: # Use the three part", "ncs5500 xrv9k\": re.compile(\"-(?P<SUBVERSION>\\d+\\.\\d+\\.\\d+\\.\\d+)-\"), # 2.0.0.0 \"ncs4k ncs6k\": re.compile(\"CSC.*(?P<SUBVERSION>\\d+\\.\\d+\\.\\d+?)\"), # 0.0.4", "not match and sum([x in self.package_name for x in ['full',", "self.smu, self.subversion)) @staticmethod def from_show_cmd(cmd): software_packages = set() data =", "asr9k-mgbl-x64-3.0.0.0-r611 asr9k-full-x64.iso-6.1.1 asr9k-xr-6.1.1 asr9k-mini-x64.iso-6.1.1 asr9k-xr-6.1.1 Engineering Packages External Names Internal", "OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY", "THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A", "ncs5k-ospf-2.0.0.0-r601.x86_64.rpm-6.0.1 ncs5k-ospf-2.0.0.0-r601 Engineering Packages External Names Internal Names ncs5k-mgbl-x64-3.0.0.0-r61116I.x86_64.rpm-6.0.1.16I.DT_IMAGE ncs5k-mgbl-3.0.0.0-r60116I", "COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" # AND ANY EXPRESS", "and (self.package_type or self.smu) def __eq__(self, other): result = self.platform", "ncs1k ncs5k ncs5500 xrv9k\": # 61117I or 611 or 6.1.1.17I", "# and/or other materials provided with the distribution. # THIS", "in self.package_name for x in ['full', 'mini', 'sysadmin', 'goldenk9']]) >", "self._version @property def smu(self): if not self._smu: result = re.search(smu_re,", "other.version and \\ self.smu == other.smu and \\ (self.subversion ==", "INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT", "ncs5500-parser-1.0.0.0-r601.x86_64.rpm-6.0.1 ncs5500-parser-1.0.0.0-r601 \"\"\" import re platforms = ['asr9k', 'ncs1k', 'ncs4k',", "line in data: software_package = SoftwarePackage(line) if software_package.is_valid(): software_packages.add(software_package) return", "ncs6k-mgbl-5.2.4 ncs6k-mini-x.iso-5.2.4 ncs6k-mini-x-5.2.4 ncs6k-mpls.pkg-5.2.4 ncs6k-mpls-5.2.4 ncs6k-sysadmin.iso-5.2.4 ncs6k-sysadmin-5.2.4 ncs6k-full-x.iso-5.2.4 ncs6k-full-x-5.2.4 ncs6k-5.2.5.CSCuy47880.smu", "package_name self._platform = None self._package_type = None self._version = None", "CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF #", "OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON", "for these ISO packages match = re.search('-\\d+\\.\\d+\\.\\d+', self.package_name) if match:", "NCS1K, NCS5K, NCS5500: # Extract the package type string before", "sysadmin ISO on ASR9K-X64, NCS1K, NCS5K, NCS5500 # Example: ncs5500-mini-x.iso-6.0.1,", "other): result = self.platform == other.platform and \\ (self.package_type ==", "yet External Names Internal Names asr9k-mcast-x64-2.0.0.0-r611.x86_64.rpm asr9k-mcast-x64-2.0.0.0-r611 asr9k-bgp-x64-1.0.0.0-r611.x86_64.rpm asr9k-bgp-x64-1.0.0.0-r611 asr9k-mgbl-x64-3.0.0.0-r611.x86_64.rpm", "need to consider subversion if it is a SMU. if", "# External Name: ncs5k-goldenk9-x.iso-6.3.1.11I.0, Internal Name: ncs5k-goldenk9-x-6.3.1.11I if not match", "part match for these ISO packages match = re.search('-\\d+\\.\\d+\\.\\d+', self.package_name)", "dict_values: # For NCS6K, only need to consider subversion if", "'ncs4k' else '-\\d\\.\\d\\.\\d.\\d' if self.platform and self.platform in self.package_name: match", "= self.package_name[0:match.start()].replace(self.platform + '-', '') if self._package_type: # Takes care", "must retain the above copyright notice, # this list of", "MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED.", "OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT", "ncs6k-li.pkg-5.2.4 ncs6k-li-5.2.4 ncs6k-mcast.pkg-5.2.4 ncs6k-mcast-5.2.4 ncs6k-mgbl.pkg-5.2.4 ncs6k-mgbl-5.2.4 ncs6k-mini-x.iso-5.2.4 ncs6k-mini-x-5.2.4 ncs6k-mpls.pkg-5.2.4 ncs6k-mpls-5.2.4", "NCS6K Production Packages External Names Internal Names ncs6k-doc.pkg-5.2.4 ncs6k-doc-5.2.4 ncs6k-li.pkg-5.2.4", "ncs6k-mcast-5.2.5.47I ncs6k-mini-x.iso-6.1.0.07I.DT_IMAGE ncs6k-xr-5.2.5.47I ncs6k-5.2.5.47I.CSCuy47880-0.0.4.i.smu ncs6k-5.2.5.47I.CSCuy47880-0.0.4.i ASR9K-64 Production Packages - not", "package type self._package_type = self.package_name[0:match.start()].replace(self.platform + '-', '') if self._package_type:", "Names Internal Names ncs5k-sysadmin.iso-6.0.1 ncs5k-sysadmin-6.0.1 ncs5k-full-x.iso-6.0.1 ncs5k-xr-6.0.1 ncs5k-mini-x.iso-6.0.1 ncs5k-xr-6.0.1 ncs5k-mcast-2.0.0.0-r601.x86_64.rpm-6.0.1", "or 5.2.4.47I re.compile(\"(?P<VERSION>\\d+\\.\\d+\\.\\d+(\\.\\d+\\w+)?)\"), } smu_re = re.compile(\"(?P<SMU>CSC[a-z]{2}\\d{5})\") subversion_dict = {\"asr9k", "= self.package_name.replace(self.platform, '') result = re.search(dict_values, to_match) if result: self._subversion", "software_package.is_valid(): software_packages.add(software_package) return software_packages @staticmethod def from_package_list(pkg_list): software_packages = set()", "= None @property def platform(self): if not self._platform: for platform", "TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF", "self.platform and self.platform in self.package_name: match = re.search(pattern, self.package_name) #", "= re.compile(\"(?P<SMU>CSC[a-z]{2}\\d{5})\") subversion_dict = {\"asr9k ncs1k ncs5k ncs5500 xrv9k\": re.compile(\"-(?P<SUBVERSION>\\d+\\.\\d+\\.\\d+\\.\\d+)-\"),", "BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" # AND", "ncs5k-sysadmin.iso-6.0.1 ncs5k-sysadmin-6.0.1.26I ncs5k-full-x.iso-6.0.1.16I.DT_IMAGE ncs5k-xr-6.0.1.16I NCS5500 Production Packages External Names Internal", "THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" # AND ANY", "return software_packages @staticmethod def from_package_list(pkg_list): software_packages = set() for pkg" ]
[ "in this function should be of lower case. Official wechat", "self._media_type = media_type self._token = token def loadToken(self, token =", "=> voice' self._Recognition = root.find(\"Recognition\").text # For recognition messages if", "token=''): self._token = token def loadToken(self, token=''): '''Load the token", "self.MsgType == 'voice': if root.find('MediaId') is not None: self._MediaId =", "of 'normal message => voice' self._Recognition = root.find(\"Recognition\").text # For", "new_name=''): '''Update the determained group id with the new_name. 'True'", "self.root.find(k).text = v except Exception as e: print e raise", "value''' url = \"https://api.weixin.qq.com/cgi-bin/message/custom/send?access_token=\" + token request = urllib2.Request(url, messageString)", "'description': self.the_dict['video'][k] = v else: self.the_dict[k] = v except Exception", "raise e ## For image message only elif self._MsgType ==", "variable if msgType in ['text', 'image', 'voice', 'video', 'location', 'link',", "except Exception as e: print e raise e #raise AttributeError,", "== 'video': for k, v in kwargs.items(): try: if k", "'zh_TW, en' For more information: please visit, http://mp.weixin.qq.com/wiki/index.php?title=%E8%8E%B7%E5%8F%96%E7%94%A8%E6%88%B7%E5%9F%BA%E6%9C%AC%E4%BF%A1%E6%81%AF''' url =", "self._Longitude = root.find('Longitude').text if root.find('Precision') is not None: self._Precision =", "http://mp.weixin.qq.com/wiki/index.php?title=%E8%8E%B7%E5%8F%96%E5%85%B3%E6%B3%A8%E8%80%85%E5%88%97%E8%A1%A8 ''' url = \"https://api.weixin.qq.com/cgi-bin/user/get?access_token=\" + self._token + \"&next_openid=\" +", "'zh_CN', there are others: 'zh_TW, en' For more information: please", "'text' by default >>> # Notice we don't need to", "MsgType not in ['text', 'image', 'voice', 'video', 'music', 'news']: raise", "message valid for wechat # For more information, please visit", "include: text, image, voice, video, music, news The dumped is", "it is a voice event message elif root.find(\"Event\") is not", "i in MsgType_list: if MsgType == i: self.MsgType = i", "url = \"https://api.weixin.qq.com/cgi-bin/user/info?access_token=\" + self._token + \"&openid=\" + openid +", "<ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[voice]]></MsgType> <Voice> <MediaId><![CDATA[media_id]]></MediaId> </Voice> </xml>''' tpl_video =", "url = \"https://api.weixin.qq.com/cgi-bin/groups/getid?access_token=\"+ self._token postData = '{\"openid\":\"%s\"}' % openid request", "whose types are of : 'text', 'image', 'voice', 'video', 'location',", "\"&type=\" + self._media_type register_openers() try: datagen, headers = multipart_encode({\"image1\": open(media_path,\"rb\")})", "'image': self.initType('image', incomingMessage) elif msgType == 'voice': self.initType('voice', incomingMessage) elif", "= v elif k == 'Title': self.root.find('Video').find('Title').text = v elif", "elif k == 'title': self.the_dict['music'][k] = v elif k ==", "format. We need to json.loads(the_dict_object) if we want to pass", "}''' json_news = '''{ \"touser\":\"OPENID\", \"msgtype\":\"news\", \"news\":{ \"articles\": [ {", "return False else: return False class MediaManager(object): '''There are four", "\"It has no message type: '%s'\" % MsgType else: #", "'event':['Event','EventKey','Ticket','Latitude','Longitude','Precision' ], } # For recognition message mapping global recognitionMapping", "= '' if root.find('CreateTime') is not None: self._CreateTime = root.find('CreateTime').text", "certainly if root.find('Event') is not None: self._Event = root.find('Event').text else:", "in all the mapping relationship # # For those tags", "urllib2.Request(url, messageString) request.get_method = lambda : 'POST' try: response =", "if root.find('ThumbMediaId') is not None: self._ThumbMediaId = root.find('ThumbMediaId').text else: self._ThumbMediaId", "\"%s\"} }' % name request = urllib2.Request(url,data=postData) request.get_method = lambda", "```self.MsgType and etc..``` Logistics as the followings: 1) check parent", "more information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E8%8E%B7%E5%8F%96%E5%85%B3%E6%B3%A8%E8%80%85%E5%88%97%E8%A1%A8 ''' url = \"https://api.weixin.qq.com/cgi-bin/user/get?access_token=\" +", "'image', 'voice', 'video', 'location', 'link', 'event'] if MsgType not in", "## For news message only elif self._MsgType == 'news': for", "hasattr(holder, \"_Content\") >>> True >>> holder.getElementByTag('Content') >>> '' \"\"\" #", "TODO # For event messages if self.type == 'recognition': self.initType('voice',", "string. But ```menu_format``` is constructed from a JSON. For more", "elif k == 'HQMusicUrl': self.root.find('Video').find('HQMusicUrl').text = v elif k ==", "v in kwargs.items(): if k == 'ArticleCount': self.root.find(k).text = v", "+ self._token postData = '{\"group\":{\"id\":%s,\"name\":\"%s\"}}' % (groupid, new_name) request =", "[]}, {u'url': u'http://v.qq.com/', u'type': u'view', u'name': u'\\u770b\\u7535\\u5f71', u'sub_button': []}, {u'type':", "'' # Store the XML incomingMessage if has # For", "\"touser\":\"OPENID\", \"msgtype\":\"text\", \"text\": { \"content\":\"Hello World\" } }''' json_image =", "in a list which contains a dict. While construcing the", "for the news message ''' \"articles\": [ { \"title\":\"Happy Day\",", "+ self._token + \"&type=\" + self._media_type register_openers() try: datagen, headers", "If ```token``` and ```next_openid``` are valid, then a dict will", "make something as : 'self._FromUserName' #for i in child_list: #", "'text' # Unique tages in all the mapping relationship #", "= root.find('MediaId').text else: self._MediaId = '' # For voice message", "media suppored by wechat. image, voice, video, thumb Post the", "''' To initialize message type ''' MsgType_list = ['text', 'image',", "= globals()['json_' + self._MsgType].encode('utf-8').decode('utf-8') self.the_dict = json.loads(the_json_tpl) if MsgType ==", "mm.createMenu('the_menu_format_constructed_from_a_JSON_as_a_string') >>> flag True >>> menu_got = mm.getMenu() >>> menu_got", "type string. But ```menu_format``` is constructed from a JSON. For", "The down blow are the templates of all the responsing", "e: print e raise e def dumpXML(self): # To dump", "to declare the 'MsgType' For example, $~ python >>> holder", "e return False else: a_dict = json.loads(response.read()) if a_dict.has_key('errcode'): if", "posting from poster.encode import multipart_encode from poster.streaminghttp import register_openers class", "openid='', lang='zh_CN'): '''The open_id parameter is unique to unique wechat", "construcing the JSON dumped, This is used with the function", "u'view', u'name': u'\\u641c\\u641c', u'sub_button': []}, {u'url': u'http://v.qq.com/', u'type': u'view', u'name':", "\"msgtype\":\"video\", \"video\": { \"media_id\":\"MEDIA_ID\", \"title\":\"TITLE\", \"description\":\"DESCRIPTION\" } }''' json_music =", "inform its author. If for commercial, please inform the author", "For article message only elif self._MsgType == 'article': # To", "== 'articles': if type(v) == list: self.the_dict['news'][k] = v else:", "media_type='image', media_path=''): '''Post the determained media file to the offical", "self.initType('voice', incomingMessage) # Construct a var ```self._Recognition``` since it is", "lang='zh_CN'): '''The open_id parameter is unique to unique wechat public", "return the new group id of type 'int'. If not,", "This function will return a dict if ```token``` and ```open_id```", "</xml>''' tpl_image = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[image]]></MsgType> <Image> <MediaId><![CDATA[media_id]]></MediaId>", "= '' if root.find('Label') is not None: self._Label = root.find('Label').text", "something here if the wechat remote server is down print", "tpl_list: raise ValueError, \"Invalid responsing message MsgType '%s'\" % MsgType", "return None. ''' url = \"https://api.weixin.qq.com/cgi-bin/groups/create?access_token=\" + self._token postData =", "image is valid, then a_dict will be returned. If not,", "incomingMessage has tag 'Event' then, it is a voice event", "k, v in kwargs.items(): try: if k == 'musicurl': self.the_dict['music'][k]", "return False def getSubscriberList(self, next_openid=''): '''To get subscriber list. A", "For media posting from poster.encode import multipart_encode from poster.streaminghttp import", "def packageArticle(title= \"default title\", description=\"default description\", url=\"http://www.baidu.com\", picurl=\"http://www.baidu.com/img/bdlogo.gif\"): '''This will", "k, v in kwargs.items(): try: ## assign value to the", "= token def getSubscriberProfile(self, openid='', lang='zh_CN'): '''The open_id parameter is", "attribute _%s\" % tag #raise AttributeError, tmp else: return gotten", "only elif self._MsgType == 'image': for k, v in kwargs.items():", "server is down print e return None else: a_dict =", "normalMapping = { 'text':['Content'], 'image':['PicUrl', 'MediaId'], 'voice':['MediaId','Format'], 'video':['MediaId','ThumbMeiaId'], 'location':['Location_X','Location_Y','Scale', 'Label'],", "= '''{ \"touser\":\"OPENID\", \"msgtype\":\"news\", \"news\":{ \"articles\": [ { \"title\":\"Happy Day\",", "title, \"description\":description, \"url\":url, \"picurl\":picurl}] # to dump the the dict", "# package article def packageArticle(title= \"default title\", description=\"default description\", url=\"http://www.baidu.com\",", "Really A Happy Day\", \"url\":\"URL\", \"picurl\":\"PIC_URL\" } ] } }'''", "or not. For more information, please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E4.BF.AE.E6.94.B9.E5.88.86.E7.BB.84.E5.90.8D ''' url", "# here we just check whether the ```v``` is type", "incomingMessage) class RespondingContainer(object): \"\"\"Package XML to reponse to determained wechat", "'music', 'news'] if MsgType not in tpl_list: raise ValueError, \"Invalid", "= urllib2.urlopen(request) except Exception as e: print e return None", "a string 'token' will be return. If not , 'return", "'FromUserName', 'CreateTime', 'MsgId', 'MsgType'] # For normal message mapping global", "<PicUrl><![CDATA[picurl]]></PicUrl> <Url><![CDATA[url]]></Url> </item> <item> <Title><![CDATA[title]]></Title> <Description><![CDATA[description]]></Description> <PicUrl><![CDATA[picurl]]></PicUrl> <Url><![CDATA[url]]></Url> </item> </Articles>", "= root.find('CreateTime').text else: self._CreateTime = '1000000000' if root.find('MsgType') is not", "\"news\":{ \"articles\": [ { \"title\":\"Happy Day\", \"description\":\"Is Really A Happy", "for k, v in kwargs.items(): if k == 'ArticleCount': self.root.find(k).text", "message mapping global normalMapping normalMapping = { 'text':['Content'], 'image':['PicUrl', 'MediaId'],", "use the functions below''' self._token = token def getSubscriberProfile(self, openid='',", "children class message type ''' root = etree.fromstring(incomingMessage) msgType =", "it means no menu at all ''' def __init__(self, token=''):", "Set the default tag value ### Get all the tags", "= root.find('Title').text else: self._Title = '' if root.find('Description') is not", "u'view', u'name': u'\\u770b\\u7535\\u5f71', u'sub_button': []}, {u'type': u'click', u'name': u'\\u5938\\u6211\\u5e05', u'key':", "root.find('ToUserName') is not None: self._ToUserName = root.find('ToUserName').text else: self._ToUserName =", "+ token request = urllib2.Request(url, messageString) request.get_method = lambda :", "is a voice recognition message if root.find(\"Recognition\") is not None:", "} }''' json_music = '''{ \"touser\":\"OPENID\", \"msgtype\":\"music\", \"music\": { \"title\":\"MUSIC_TITLE\",", "as e: print e return None else: a_dict = json.loads(response.read())", "suppored by wechat. image, voice, video, thumb Post the file", "the event message 'event':['Event','EventKey','Ticket','Latitude','Longitude','Precision' ], } # For recognition message", "message only elif self.MsgType == 'link': if root.find('Title') is not", "# For recognition message mapping global recognitionMapping recognitionMapping = {", "set the 'CreateTime' since it has been generated automatically :)", "will be returned. For more information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E8%8E%B7%E5%8F%96%E5%85%B3%E6%B3%A8%E8%80%85%E5%88%97%E8%A1%A8 '''", "information, please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E4%B8%8A%E4%BC%A0%E4%B8%8B%E8%BD%BD%E5%A4%9A%E5%AA%92%E4%BD%93%E6%96%87%E4%BB%B6''' if media_type not in ['image', 'voice',", "down print e return None else: a_dict = json.loads(response.read()) if", "{ \"content\":\"Hello World\" } }''' json_image = '''{ \"touser\":\"OPENID\", \"msgtype\":\"image\",", "```self._Event``` for event message certainly if root.find('Event') is not None:", "openid wrong or token invalid, 'None' will be returned. For", "if root.find('Format') is not None: self._Format = root.find('Format').text else: self._Format", "None else: return a_dict def getAPIToken(appid='', appsecret=''): '''Get wechat API", "== 'event': self.initType('event', incomingMessage) class RespondingContainer(object): \"\"\"Package XML to reponse", "article def packageArticle(title= \"default title\", description=\"default description\", url=\"http://www.baidu.com\", picurl=\"http://www.baidu.com/img/bdlogo.gif\"): '''This", "# Unique tages in all the mapping relationship # #", "\"https://api.weixin.qq.com/cgi-bin/groups/update?access_token=\" + self._token postData = '{\"group\":{\"id\":%s,\"name\":\"%s\"}}' % (groupid, new_name) request", "def __init__(self, incomingMessage='<xml></xml>'): # pre-set some common variables root =", "self._MsgType == 'text': for k, v in kwargs.items(): try: if", "k == 'articles': if type(v) == list: self.the_dict['news'][k] = v", "def getElementByTag(self, tag): '''To get element from the tag '''", "not None: self._Longitude = root.find('Longitude').text if root.find('Precision') is not None:", "'tag' object to class to make something as : 'self._FromUserName'", "= v except Exception as e: print e raise e", "k == 'title': self.the_dict['video'][k] = v elif k == 'description':", "v elif k == 'ThumbMediaId': self.root.find('Video').find('ThumbMediaId').text = v else: try:", "**kwargs): \"\"\" To package XML message into an object Usage:", "the offical wechat server and get the response. ''' def", "#raise AttributeError, tmp else: return gotten def digest(self, incomingMessage): '''To", "= '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[image]]></MsgType> <Image> <MediaId><![CDATA[media_id]]></MediaId> </Image> </xml>'''", "by wechat. image, voice, video, thumb Post the file to", "= root.find('Longitude').text if root.find('Precision') is not None: self._Precision = root.find('Precision').text", "None: self._PicUrl = root.find('PicUrl').text else: self._PicUrl = '' if root.find('MediaId')", "'' # For location message only elif self.MsgType == 'location':", "True >>> holder.getElementByTag('Content') >>> '' \"\"\" # By default, MsgType", "'MsgId', 'MsgType'] # For normal message mapping global normalMapping normalMapping", "After all then 'normal' message else: self.type = 'normal' #", "commercial, please inform the author for authentication. Apr 2014 import", "i break # Delete the common tags for c in", "= \"https://api.weixin.qq.com/cgi-bin/user/info?access_token=\" + self._token + \"&openid=\" + openid + \"&lang=\"", "raise ValueError, \"MsgType '%s' not valid \" % MsgType for", "menu format. The ```menu_format``` is of type string. But ```menu_format```", "him to other group. 'True' or 'False' if moved or", "the_xml ) # break ## Set the default tag value", "# if i == 'CreateTime': # setattr(self,\"_\"+i, str(int(time.time()))) # else:", "a_dict = json.loads(gotten) # means wrong appid or secret if", "root.find('Precision') is not None: self._Precision = root.find('Precision').text def initType(self, MsgType='text',", "'''Post the determained media file to the offical URL If", "global normalMapping normalMapping = { 'text':['Content'], 'image':['PicUrl', 'MediaId'], 'voice':['MediaId','Format'], 'video':['MediaId','ThumbMeiaId'],", "to determained user. This returns a Boolean value''' url =", "-*- # Copyright to <NAME>. # Any distrubites of this", "wechat public service. This function will return a dict if", "assign value to the object #getattr(self, \"_\"+k) = v ##", "'voice', 'video', 'location', 'link' After making a new instance of", "message type: '%s'\" % MsgType else: # pass the message", "try: if k == 'media_id': self.the_dict['video'][k] = v elif k", "function ```setElementByKey(touser='someone', msgtype='news', articles=packageArticle())``` ''' return [{\"title\": title, \"description\":description, \"url\":url,", "u'type': u'view', u'name': u'\\u770b\\u7535\\u5f71', u'sub_button': []}, {u'type': u'click', u'name': u'\\u5938\\u6211\\u5e05',", "a_dict['access_token'] # means wrong appid or secret else: return None", "Don't claim ''' ## For text message only if self._MsgType", "menu_format) request.get_method = lambda : 'POST' try: response = urllib2.urlopen(request)", "as the 'text' XML format the_tpl = globals()['tpl_' + self._MsgType].encode('utf-8').decode('utf-8')", "import urllib import urllib2 # For media posting from poster.encode", "token=''): '''Load the token before using other functions''' self._token =", "break ## Set the default tag value ### Get all", "message only elif self._MsgType == 'article': # To set attribute", "getAllgroups(self): ''' A dict will be returned. For more information", "root.find('EventKey').text if root.find('Ticket') is not None: self._Ticket = root.find('Ticket').text if", "is not None: self._Title = root.find('Title').text else: self._Title = ''", "False class MenuManager(object): '''To manage the bottom menu of the", "'MsgType' For example, $~ python >>> holder = ParsingContainer() >>>", "\"http://file.api.weixin.qq.com/cgi-bin/media/upload?access_token=\" + self._token + \"&type=\" + self._media_type register_openers() try: datagen,", "<Url><![CDATA[url]]></Url> </item> </Articles> </xml>''' # Positive response class PositiveRespondingContainer(object): '''Using", "self._MediaId = '' if root.find('Format') is not None: self._Format =", "else: self._Location_X = '' if root.find('Location_Y') is not None: self._Location_Y", "the object #getattr(self, \"_\"+k) = v ## assign/update value to", "a.read() a_dict = json.loads(gotten) # means wrong appid or secret", "For normal messages if self.type == 'normal': if msgType ==", "+ appid + '&secret=' + appsecret try: a = urllib2.urlopen(url)", "token request = urllib2.Request(url, menu_format) request.get_method = lambda : 'POST'", "# To dump the XML we need # the ```self.root```", "music, news The dumped is of dict format. We need", "```next_openid``` does not exist, official wechat server takes it as", "generate articles as #print v #print etree.tostring(self.root) self.root.find('Video').find('MediaId').text = v", "'' # For link message only elif self.MsgType == 'link':", "Exception as e: print e raise e ## For article", "}''' json_image = '''{ \"touser\":\"OPENID\", \"msgtype\":\"image\", \"image\": { \"media_id\":\"MEDIA_ID\" }", "5 ones in common if root.find('ToUserName') is not None: self._ToUserName", "the unuseful elements in eventMapping for k in eventMapping: for", "a_dict def deleteMenu(self): token = self._token url = \"https://api.weixin.qq.com/cgi-bin/menu/delete?access_token=\" +", "<Description><![CDATA[description]]></Description> </Video> </xml>''' tpl_music = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[music]]></MsgType>", "u'key': u'<KEY>', u'sub_button': []}]}]}} >>> flag2 = mm.deleteMenu() >>> flag2", "kwargs.items(): if k == 'MediaId': #print v #print etree.tostring(self.root) self.root.find('Image').find('MediaId').text", "# TODO # For event messages if self.type == 'recognition':", "'location', 'link' After making a new instance of the class,", "not None: self._CreateTime = root.find('CreateTime').text else: self._CreateTime = '1000000000' if", "not None: self._EventKey = root.find('EventKey').text if root.find('Ticket') is not None:", "message passed from wechat server Make the value variable The", "return False else: a_dict = json.loads(response.read()) if a_dict.has_key('errcode'): if a_dict['errcode']", "'video': for k, v in kwargs.items(): try: if k ==", "visit : http://mp.weixin.qq.com/wiki/index.php?title=%E5%8F%91%E9%80%81%E8%A2%AB%E5%8A%A8%E5%93%8D%E5%BA%94%E6%B6%88%E6%81%AF global tpl_text global tpl_image global tpl_voice global", "determained wechat message For more information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%8F%91%E9%80%81%E8%A2%AB%E5%8A%A8%E5%93%8D%E5%BA%94%E6%B6%88%E6%81%AF Usage:", "multipart_encode from poster.streaminghttp import register_openers class ParsingContainer(object): \"\"\"Parsing Wechat messages", "</Image> </xml>''' tpl_voice = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[voice]]></MsgType> <Voice>", "def loadToken(self, token = ''): self._token = token def uploadMedia(self,", "already # if list, then its the elment of the", "self.the_dict['text'][k] = v else: self.the_dict[k] = v except Exception as", "check whether the ```v``` is type of list the ```v```", "+ appsecret try: a = urllib2.urlopen(url) except Exception as e:", "\"articles\": [ { \"title\":\"Happy Day\", \"description\":\"Is Really A Happy Day\",", "kwargs.items(): try: if k == 'content': self.the_dict['text'][k] = v else:", "normal messages if self.type == 'normal': if msgType == 'text':", "## Load the template #for i in tpl_list: # if", "get the XML we need to reponse to wechat clients!", "{u'type': u'click', u'name': u'\\u7b2c\\u4e8c\\u94ae', u'key': u'V1001_TODAY_SINGER', u'sub_button': []}, {u'name': u'\\u7b2c\\u4e09\\u94ae',", "only elif self.MsgType == 'event': # It has to have", "e raise e # package article def packageArticle(title= \"default title\",", "= '''{ \"touser\":\"OPENID\", \"msgtype\":\"text\", \"text\": { \"content\":\"Hello World\" } }'''", "'{\"group\":{\"id\":%s,\"name\":\"%s\"}}' % (groupid, new_name) request = urllib2.Request(url,data=postData) try: response =", "a_dict['errcode'] == 0: return True else: return False else: return", "unique wechat public service. This function will return a dict", "we need to reponse to wechat clients! :) \"\"\" ##", "```self.the_dict``` self.__init__(MsgType) def setElementByKey(self, **kwargs): '''To set the ```self.the_dict``` according", "recognition messages if self.type == 'event': self.initType('event', incomingMessage) class RespondingContainer(object):", "json.loads(response.read()) #print a_dict if a_dict.has_key('errcode'): if a_dict['errcode'] == 0: return", "\"msgtype\":\"voice\", \"voice\": { \"media_id\":\"MEDIA_ID\" } }''' json_video = '''{ \"touser\":\"OPENID\",", "return None else: return a_dict['groupid'] def updateGroupName(self, groupid='', new_name=''): '''Update", "= root.find('Url').text else: self._Url = '' # For event message", "python >>> holder = ParsingContainer() >>> hasattr(holder, \"_Content\") >>> True", "'article': # To set attribute value of the XML special", "Make the value variable The 'incomingMessage' is of XML According", "v except Exception as e: print e raise e def", "tag 'Recognition' then, it is a voice recognition message if", "dumpXML(self): # To dump the XML we need # the", "url = \"http://file.api.weixin.qq.com/cgi-bin/media/upload?access_token=\" + self._token + \"&type=\" + self._media_type register_openers()", "as '' by default If not, a 'None' will be", "message mapping global recognitionMapping recognitionMapping = { 'voice':['MediaId','Format','Recognition'], } def", "= '{\"group\":{\"id\":%s,\"name\":\"%s\"}}' % (groupid, new_name) request = urllib2.Request(url,data=postData) try: response", "not, 'None' will be returned. ''' token = self._token url", "the incomingMessage has tag 'Event' then, it is a voice", "video, music, news The dumped is of dict format. We", "= '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[news]]></MsgType> <ArticleCount>2</ArticleCount> <Articles> <item> <Title><![CDATA[title1]]></Title>", "try: response = urllib2.urlopen(request) except Exception as e: print e", "= 'text' # Unique tages in all the mapping relationship", "== 'Articles': # TODO to generate articles as #print v", "} ] } }''' class SubscriberManager(object): '''To manage the subscriber", "group. 'True' or 'False' if moved or not. For more", "If not, a 'None' will be returned. For more information", "= token def loadToken(self, token = ''): self._token = token", "better to raise something here if the wechat remote server", "#raise e else: request = urllib2.Request(url,data=datagen,headers=headers) try: response = urllib2.urlopen(request)", "None else: a_dict = json.loads(response.read()) if a_dict.has_key('errcode'): if a_dict['errcode'] !=", "information, please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E4.BF.AE.E6.94.B9.E5.88.86.E7.BB.84.E5.90.8D ''' url = \"https://api.weixin.qq.com/cgi-bin/groups/update?access_token=\" + self._token", "'%s' has no attribute/tag '%s'\" % (self._MsgType, k) ## For", "etree.fromstring(the_tpl) #print self.root.find(\"FromUserName\").text #print type(self.root.find(\"FromUserName\").text) def initType(self, MsgType='text'): tpl_list =", "raise something here if the wechat remote server is down", "#print v #print etree.tostring(self.root) self.root.find('Video').find('MediaId').text = v elif k ==", "else: a_dict = json.loads(response.read()) #print a_dict if a_dict.has_key('errcode'): if a_dict['errcode']", "wrong appid or secret if a_dict.has_key('errcode'): return None else: return", "the key ```articles``` for the news message ''' \"articles\": [", "# break ## Set the default tag value ### Get", "more information, please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E4%B8%8A%E4%BC%A0%E4%B8%8B%E8%BD%BD%E5%A4%9A%E5%AA%92%E4%BD%93%E6%96%87%E4%BB%B6''' if media_type not in ['image',", "0: return True else: return False def getMenu(self): '''Get the", "root.find('FromUserName') is not None: self._FromUserName = root.find('FromUserName').text else: self._FromUserName =", "k == 'content': self.the_dict['text'][k] = v else: self.the_dict[k] = v", "self._Event = root.find('Event').text else: self._Event = '' if root.find('EventKey') is", "k, v in kwargs.items(): try: # here we just check", "# # For those tags in-common of normal message global", "v in kwargs.items(): try: # here we just check whether", "value of the key 'articles' should be of type list\"", "['image', 'voice', 'video', 'thumb']: raise ValueError, \"Media type: '%s' not", "<ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[music]]></MsgType> <Music> <Title><![CDATA[TITLE]]></Title> <Description><![CDATA[DESCRIPTION]]></Description> <MusicUrl><![CDATA[MUSIC_Url]]></MusicUrl> <HQMusicUrl><![CDATA[HQ_MUSIC_Url]]></HQMusicUrl> <ThumbMediaId><![CDATA[media_id]]></ThumbMediaId>", "have a ```self._Event``` for event message certainly if root.find('Event') is", "json.loads(response.read()) if a_dict.has_key('errcode'): return None else: return a_dict def getHisGroupID(self,", "== 'location': if root.find('Location_X') is not None: self._Location_X = root.find('Location_X').text", "\"_Content\") >>> True >>> holder.initType(MsgType='video') >>> hasattr(holder, \"_PicUrl\") >>> True", "try: if k == 'media_id': self.the_dict['voice'][k] = v else: self.the_dict[k]", "e: print e return None else: a_dict = json.loads(response.read()) if", "combined tag set of the event message 'event':['Event','EventKey','Ticket','Latitude','Longitude','Precision' ], }", "By default set the ```self.the_dict``` as from the 'text' JSON", "self._MsgType ## For text message only if self._MsgType == 'text':", "clients \\n who sent messages to the public wechat service.", "need to json.loads(the_dict_object) if we want to pass the right", "in MsgType_list: if MsgType == i: self.MsgType = i break", "For more information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E8%87%AA%E5%AE%9A%E4%B9%89%E8%8F%9C%E5%8D%95%E5%88%9B%E5%BB%BA%E6%8E%A5%E5%8F%A3 ''' token = self._token", "The ID is of type 'int'. If openid wrong or", "link message only elif self.MsgType == 'link': if root.find('Title') is", "tpl_music = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[music]]></MsgType> <Music> <Title><![CDATA[TITLE]]></Title> <Description><![CDATA[DESCRIPTION]]></Description>", "elif self.MsgType == 'link': if root.find('Title') is not None: self._Title", "e: print e return False else: a_dict = json.loads(response.read()) if", "not None: self._Ticket = root.find('Ticket').text if root.find('Latitude') is not None:", "elif msgType == 'image': self.initType('image', incomingMessage) elif msgType == 'voice':", "+= [str(child)] ### Attach 'tag' object to class to make", "groupid=''): '''Move him to other group. 'True' or 'False' if", "== 'MediaId': #print v #print etree.tostring(self.root) self.root.find('Video').find('MediaId').text = v elif", "http://mp.weixin.qq.com/wiki/index.php?title=%E8%87%AA%E5%AE%9A%E4%B9%89%E8%8F%9C%E5%8D%95%E5%88%9B%E5%BB%BA%E6%8E%A5%E5%8F%A3 ''' token = self._token url = \"https://api.weixin.qq.com/cgi-bin/menu/create?access_token=\" + token", "= root.find('Label').text else: self._Label = '' # For link message", "url=\"http://www.baidu.com\", picurl=\"http://www.baidu.com/img/bdlogo.gif\"): '''This will return an article in a list", "a token and the menu format. The ```menu_format``` is of", "print e raise e #raise AttributeError, \"Message type '%s' has", "new group id of type 'int'. If not, will return", "self.the_dict['touser'] = v elif k == 'msgtype': self.the_dict['msgtype'] = 'news'", "response. ''' def __init__(self, media_type='image', token = ''): self._media_type =", "please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E4.BF.AE.E6.94.B9.E5.88.86.E7.BB.84.E5.90.8D ''' url = \"https://api.weixin.qq.com/cgi-bin/groups/update?access_token=\" + self._token postData", "not in tpl_list: raise ValueError, \"Invalid responsing message MsgType '%s'\"", "message only elif self.MsgType == 'location': if root.find('Location_X') is not", "tpl_voice global tpl_video global tpl_music global tpl_news tpl_text = u'''<xml>", "here we just check whether the ```v``` is type of", "'MsgType'] # For normal message mapping global normalMapping normalMapping =", "<ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[image]]></MsgType> <Image> <MediaId><![CDATA[media_id]]></MediaId> </Image> </xml>''' tpl_voice =", "return False else: j = json.loads(response.read()) # The above works", "object #getattr(self, \"_\"+k) = v ## assign/update value to the", "tag): '''To get element from the tag ''' try: gotten", "<FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[text]]></MsgType> <Content><![CDATA[你好]]></Content> </xml>''' tpl_image = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName>", "#print \"-----\" #print self._MsgType ## For text message only if", "# For recognition messages if self.type == 'event': self.initType('event', incomingMessage)", "True else: return False def getMenu(self): '''Get the menu format", "\"title\":\"TITLE\", \"description\":\"DESCRIPTION\" } }''' json_music = '''{ \"touser\":\"OPENID\", \"msgtype\":\"music\", \"music\":", "''' if k == 'articles': if type(v) == list: self.the_dict['news'][k]", "print e return None else: a_dict = json.loads(response.read()) if a_dict.has_key('errcode'):", "\"text\": { \"content\":\"Hello World\" } }''' json_image = '''{ \"touser\":\"OPENID\",", "etree.fromstring( the_xml ) # break ## Set the default tag", "'touser': self.the_dict['touser'] = v elif k == 'msgtype': self.the_dict['msgtype'] =", "url = \"https://api.weixin.qq.com/cgi-bin/groups/get?access_token=\" + self._token try: response = urllib2.urlopen(url) except", "has been assigned already return etree.tostring(self.root, encoding='utf-8',method='xml',pretty_print=True) # The down", "dude!</Content> </xml> >>> \"\"\" def __init__(self, MsgType='text'): self._MsgType = MsgType", "except: pass # Delete the unuseful elements in eventMapping for", "'{\"group\": {\"name\": \"%s\"} }' % name request = urllib2.Request(url,data=postData) request.get_method", "'' if root.find('Format') is not None: self._Format = root.find('Format').text else:", "raise ValueError, \"It has no message type: '%s'\" % MsgType", "'image': if root.find('PicUrl') is not None: self._PicUrl = root.find('PicUrl').text else:", "self.the_dict['voice'][k] = v else: self.the_dict[k] = v except Exception as", "If ```appid``` and ```appsecret``` are correct then a string 'token'", "if a_dict.has_key('access_token'): return a_dict['access_token'] # means wrong appid or secret", "declare the 'MsgType' For example, $~ python >>> holder =", "for k, v in kwargs.items(): try: if k == 'musicurl':", "will be return. If not , 'return None' ''' default_url", "return None else: return a_dict else: return a_dict def deleteMenu(self):", "'image', 'voice', 'video', 'music', 'news']: raise ValueError, \"It has no", "based from the ```incomingMessage``` variable if msgType in ['text', 'image',", "<item> <Title><![CDATA[title1]]></Title> <Description><![CDATA[description1]]></Description> <PicUrl><![CDATA[picurl]]></PicUrl> <Url><![CDATA[url]]></Url> </item> <item> <Title><![CDATA[title]]></Title> <Description><![CDATA[description]]></Description> <PicUrl><![CDATA[picurl]]></PicUrl>", "wechat service Usage: >>> mm = MenuManager() >>> mm.loadToken('something_the_api_token') >>>", "new XML object self.root.find(k).text = v except Exception as e:", "author. If for commercial, please inform the author for authentication.", "the class, need to declare the 'MsgType' For example, $~", "only elif self.MsgType == 'link': if root.find('Title') is not None:", "k == 'Description': self.root.find('Video').find('Description').text = v elif k == 'MusicUrl':", "= lambda : 'POST' try: response = urllib2.urlopen(request) except Exception", "for k, v in kwargs.items(): try: if k == 'media_id':", "kwargs.items(): if k == 'MediaId': #print v #print etree.tostring(self.root) self.root.find('Video').find('MediaId').text", "this since it is of 'text' by default >>> #", "else: return gotten def digest(self, incomingMessage): '''To digest the XML", "information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E8%8E%B7%E5%8F%96%E5%85%B3%E6%B3%A8%E8%80%85%E5%88%97%E8%A1%A8 ''' url = \"https://api.weixin.qq.com/cgi-bin/user/get?access_token=\" + self._token", "in ['text', 'image', 'voice', 'video', 'location', 'link', 'event']: # Check", "remote server is down print e return None else: a_dict", "self._MediaId = '' if root.find('ThumbMediaId') is not None: self._ThumbMediaId =", "other group. 'True' or 'False' if moved or not. For", "MsgType_list: if MsgType == i: self.MsgType = i break #", "''' def __init__(self, MsgType='text'): self._MsgType = MsgType # By default", "values to ```self.MsgType and etc..``` Logistics as the followings: 1)", "text, image, voice, video, music, news The dumped is of", "k == 'MediaId': #print v #print etree.tostring(self.root) self.root.find('Voice').find('MediaId').text = v", "'%s' not valid\" % media_type else: self._media_type = media_type url", "token def uploadMedia(self, media_type='image', media_path=''): '''Post the determained media file", "== 'news': for k, v in kwargs.items(): try: # here", "and get the response. ''' def __init__(self, media_type='image', token =", "self._Ticket = root.find('Ticket').text if root.find('Latitude') is not None: self._Latitude =", "a_dict.has_key('errcode'): return None else: return a_dict def getHisGroupID(self, openid=''): '''Get", "the functions below''' self._token = token def getSubscriberProfile(self, openid='', lang='zh_CN'):", "k == 'ThumbMediaId': self.root.find('Video').find('ThumbMediaId').text = v else: try: ## assign/update", "print e raise e # package article def packageArticle(title= \"default", "<Title><![CDATA[TITLE]]></Title> <Description><![CDATA[DESCRIPTION]]></Description> <MusicUrl><![CDATA[MUSIC_Url]]></MusicUrl> <HQMusicUrl><![CDATA[HQ_MUSIC_Url]]></HQMusicUrl> <ThumbMediaId><![CDATA[media_id]]></ThumbMediaId> </Music> </xml>''' tpl_news = '''<xml>", "'text': # To set attribute value to such as: 'self._FromUsername'", "from lxml import etree import time import json import urllib", "if MsgType == i: # self._MsgType = MsgType # ##", "message => voice' self._Recognition = root.find(\"Recognition\").text # For recognition messages", "openid='', groupid=''): '''Move him to other group. 'True' or 'False'", "releasing method def __del__(self): pass #@property def getElementByTag(self, tag): '''To", "packaged in a list already # if list, then its", "from the tag ''' try: gotten = getattr(self, \"_\" +", "its 'text' >>> hasattr(holder, \"_PicUrl\") >>> False >>> hasattr(holder, \"_Content\")", "a_dict = json.loads(gotten) if a_dict.has_key('access_token'): return a_dict['access_token'] # means wrong", "== 'Description': self.root.find('Video').find('Description').text = v elif k == 'MusicUrl': self.root.find('Video').find('MusicUrl').text", "\"&openid=\" + openid + \"&lang=\" + lang try: a =", "a_dict = json.loads(response.read()) if a_dict.has_key('errcode'): return None else: return a_dict['groupid']", "</item> <item> <Title><![CDATA[title]]></Title> <Description><![CDATA[description]]></Description> <PicUrl><![CDATA[picurl]]></PicUrl> <Url><![CDATA[url]]></Url> </item> </Articles> </xml>''' #", "self.the_dict['msgtype'] = 'news' except Exception as e: print e raise", "self._MsgType = MsgType # ## the the template # the_xml", "To set attribute value to such as: 'self._FromUsername' for k,", "the 'text' JSON format the_json_tpl = globals()['json_' + self._MsgType].encode('utf-8').decode('utf-8') self.the_dict", "openid + \"&lang=\" + lang try: a = urllib2.urlopen(url) except", "tag value ### Get all the tags #child_list = []", "incomingMessage if has # For text message only if self.MsgType", "if we want to pass the right reponse back '''", "of messages include: text, image, voice, video, music, news The", "or 'False' if moved or not. For more information please", "digest(self, incomingMessage): '''To digest the XML message passed from wechat", "hasattr(holder, \"_PicUrl\") >>> False >>> hasattr(holder, \"_Content\") >>> True >>>", "'text': for k, v in kwargs.items(): try: if k ==", "\"description\":\"MUSIC_DESCRIPTION\", \"musicurl\":\"MUSIC_URL\", \"hqmusicurl\":\"HQ_MUSIC_URL\", \"thumb_media_id\":\"THUMB_MEDIA_ID\" } }''' json_news = '''{ \"touser\":\"OPENID\",", "False if updated or not. For more information, please visit:", "try: if k == 'media_id': self.the_dict['image'][k] = v else: self.the_dict[k]", ">>> holder = ParsingContainer() >>> hasattr(holder, \"_Content\") >>> True >>>", "For text message only if self._MsgType == 'text': # To", "who sent messages to the public wechat service. Those 6", "+ next_openid try: response = urllib2.urlopen(url) except Exception as e:", "'voice', 'video', 'location', 'link', 'event']: # Check if the incomingMessage", "valid for wechat # For more information, please visit :", "'Description': self.root.find('Video').find('Description').text = v elif k == 'MusicUrl': self.root.find('Video').find('MusicUrl').text =", "== 'text': pass def initType(self, MsgType='text'): if MsgType not in", "\"picurl\":\"PIC_URL\" }, { \"title\":\"Happy Day\", \"description\":\"Is Really A Happy Day\",", "root.find('Description').text else: self._Description = '' if root.find('Url') is not None:", "type 'int'. If not, will return None. ''' url =", "2) check subclass message type if \"Voice Recognition\", \"Event\", \"Normal\"", "= \"https://api.weixin.qq.com/cgi-bin/menu/create?access_token=\" + token request = urllib2.Request(url, menu_format) request.get_method =", "For image message only elif self.MsgType == 'image': if root.find('PicUrl')", "value to such as: 'self._FromUsername' for k, v in kwargs.items():", "by such as ```initType(MsgType='text')``` Notice: all the kwargs 's key", "a_dict = json.loads(response.read()) #print a_dict if a_dict.has_key('errcode'): if a_dict['errcode'] ==", "self._MsgType = MsgType # By default set root as the", "value to the object #getattr(self, \"_\"+k) = v ## assign/update", "key ```articles``` for the news message ''' \"articles\": [ {", "'voice': # To set attribute value of the XML special", "'media_id': self.the_dict['voice'][k] = v else: self.the_dict[k] = v except Exception", "set the ```self.the_dict``` as from the 'text' JSON format the_json_tpl", "is not None: self._Format = root.find('Format').text else: self._Format = ''", "be returned. For more information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E8%8E%B7%E5%8F%96%E5%85%B3%E6%B3%A8%E8%80%85%E5%88%97%E8%A1%A8 ''' url", "= SubscriberManager() >>> sm.loadToken('<KEY>') >>> hisprofile = sm.getSubscriberProfile(openid='his_open_id', lang='zh_CN') '''", "ValueError, \"It has no message type: '%s'\" % MsgType else:", "'' # For voice message only elif self.MsgType == 'voice':", "self.MsgType == 'video': if root.find('MediaId') is not None: self._MediaId =", "thumb Post the file to the offical wechat server and", "= '' if root.find('MediaId') is not None: self._MediaId = root.find('MediaId').text", "all the kwargs 's key in this function should be", "elif root.find(\"Event\") is not None: self.type = 'event' # After", "def dumpXML(self): # To dump the XML we need #", "```appsecret``` are correct then a string 'token' will be return.", "= urllib2.urlopen(url) except Exception as e: print e return None", "# For video message only elif self.MsgType == 'video': if", "from poster.streaminghttp import register_openers class ParsingContainer(object): \"\"\"Parsing Wechat messages for", "# Copyright to <NAME>. # Any distrubites of this copy", "message only elif self.MsgType == 'image': if root.find('PicUrl') is not", "a_dict if a_dict.has_key('errcode'): if a_dict['errcode'] == 0: return True else:", "class RespondingContainer(object): \"\"\"Package XML to reponse to determained wechat message", "by default >>> # Notice we don't need to set", "## Set the default tag value ### Get all the", "elif k == 'description': self.the_dict['music'][k] = v elif k ==", "'hqmusicurl': self.the_dict['music'][k] = v elif k == 'thumb_media_id': self.the_dict['music'][k] =", "Get all the tags #child_list = [] #for child in", "default_url + 'appid=' + appid + '&secret=' + appsecret try:", "if root.find('Content') is not None: self._Content = root.find('Content').text else: self._Content", "messages to the public wechat service. Those 6 types of", "a_dict['groupid'] def updateGroupName(self, groupid='', new_name=''): '''Update the determained group id", "The 5 ones in common if root.find('ToUserName') is not None:", "tpl_out = rc.dumpXML() >>> tpl_out >>><xml> <ToUserName>the_wechat_client</ToUserName> <FromUserName>the_server</FromUserName> <CreateTime>1397808770</CreateTime> <MsgType>text</MsgType>", "c in commonTag: try: delattr(self, '_' + c) except: pass", "else: a_dict = json.loads(response.read()) if a_dict.has_key('errcode'): return None else: return", "'CreateTime' since it has been generated automatically :) >>> rc.setElementByTag(FromUserName='the_server',", "e ## For music message only elif self._MsgType == 'music':", "not None: self._Event = root.find('Event').text else: self._Event = '' if", "only elif self._MsgType == 'video': # To set attribute value", ">>> # Notice we don't need to set the 'CreateTime'", "token for cusmter service or others. If ```appid``` and ```appsecret```", "is not None: self._Description = root.find('Description').text else: self._Description = ''", "the token before using other functions''' self._token = token def", "'self._FromUsername' for k, v in kwargs.items(): try: ## assign value", "is of 'text' by default >>> # Notice we don't", "means wrong appid or secret if a_dict.has_key('errcode'): return None else:", "</Video> </xml>''' tpl_music = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[music]]></MsgType> <Music>", "groups, profile, location, list. Usage: >>> sm = SubscriberManager() >>>", "= json.loads(response.read()) if a_dict.has_key('errcode'): return None else: return a_dict def", "'self._FromUserName' #for i in child_list: # if i == 'CreateTime':", "is not None: self._Latitude = root.find('Latitude').text if root.find('Longitude') is not", "self._Title = root.find('Title').text else: self._Title = '' if root.find('Description') is", "{ 'text':['Content'], 'image':['PicUrl', 'MediaId'], 'voice':['MediaId','Format'], 'video':['MediaId','ThumbMeiaId'], 'location':['Location_X','Location_Y','Scale', 'Label'], 'link':['Title','Description','Url'], }", "'music': for k, v in kwargs.items(): try: if k ==", "None: self._Precision = root.find('Precision').text def initType(self, MsgType='text', incomingMessage='<xml></xml>'): ''' To", "more information, please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E6.9F.A5.E8.AF.A2.E7.94.A8.E6.88.B7.E6.89.80.E5.9C.A8.E5.88.86.E7.BB.84''' url = \"https://api.weixin.qq.com/cgi-bin/groups/getid?access_token=\"+ self._token postData", "raise e #raise AttributeError, \"Message type '%s' has no attribute/tag", "in kwargs.items(): try: if k == 'media_id': self.the_dict['image'][k] = v", "'incomingMessage' is of XML According to its content this will", "a_dict = json.loads(response.read()) if a_dict.has_key('errcode'): if a_dict['errcode'] != 0: return", "sm.loadToken('<KEY>') >>> hisprofile = sm.getSubscriberProfile(openid='his_open_id', lang='zh_CN') ''' def __init__(self, token=''):", "```dumpXML()``` to get the XML we need to reponse to", "tpl_image global tpl_voice global tpl_video global tpl_music global tpl_news tpl_text", "= '' if root.find('Description') is not None: self._Description = root.find('Description').text", "in kwargs.items(): try: if k == 'musicurl': self.the_dict['music'][k] = v", "'''This will return an article in a list which contains", "event message certainly if root.find('Event') is not None: self._Event =", "reload(sys) sys.setdefaultencoding('utf-8') from lxml import etree import time import json", "self._MediaId = root.find('MediaId').text else: self._MediaId = '' if root.find('Format') is", "To initialize message type ''' MsgType_list = ['text', 'image', 'voice',", "eventMapping = { # The list presents the combined tag", "j # to check if the message was accepted if", "[{\"title\": title, \"description\":description, \"url\":url, \"picurl\":picurl}] # to dump the the", "\"voice\": { \"media_id\":\"MEDIA_ID\" } }''' json_video = '''{ \"touser\":\"OPENID\", \"msgtype\":\"video\",", "\"\"\" # By default, MsgType is set as 'text' MsgType", "== 'CreateTime': # setattr(self,\"_\"+i, str(int(time.time()))) # else: # setattr(self,\"_\"+i, '')", "\"MsgType '%s' not valid \" % MsgType for i in", "custom service API to pass 6 types of messages to", "# Any distrubites of this copy should inform its author.", "incomingMessage) elif msgType == 'voice': self.initType('voice', incomingMessage) elif msgType ==", "MsgType_list: raise ValueError, \"MsgType '%s' not valid \" % MsgType", "True else: return False else: return False def moveHimToGroup(self, openid='',", "```self.the_dict``` as from the 'text' JSON format the_json_tpl = globals()['json_'", "initialize message type ''' MsgType_list = ['text', 'image', 'voice', 'video',", "not None: self._Format = root.find('Format').text else: self._Format = '' #", "# For link message only elif self.MsgType == 'link': if", "lang try: a = urllib2.urlopen(url) except Exception as e: print", "If there be, then a dict would be returned. If", "appsecret=''): '''Get wechat API token for cusmter service or others.", "urllib2.urlopen(url) except Exception as e: print e return False else:", "be packaged in a list already # if list, then", "a 'None' will be returned. For more information please visit:", "globals()['tpl_' + self._MsgType].encode('utf-8').decode('utf-8') self.root = etree.fromstring(the_tpl) #print self.root.find(\"FromUserName\").text #print type(self.root.find(\"FromUserName\").text)", "for k, v in kwargs.items(): if k == 'MediaId': #print", "of the key ```articles``` for the news message ''' \"articles\":", "the XML message passed from wechat server Make the value", "a determained group name. If created, then it will return", "valid\" % media_type else: self._media_type = media_type url = \"http://file.api.weixin.qq.com/cgi-bin/media/upload?access_token=\"", "'' if root.find('FromUserName') is not None: self._FromUserName = root.find('FromUserName').text else:", "video, thumb Post the file to the offical wechat server", "as e: print e raise e ## For article message", "\"title\":\"Happy Day\", \"description\":\"Is Really A Happy Day\", \"url\":\"URL\", \"picurl\":\"PIC_URL\" },", "if k == 'articles': if type(v) == list: self.the_dict['news'][k] =", "str(int(time.time())) #print \"-----\" #print self._MsgType ## For text message only", "+ self._MsgType].encode('utf-8').decode('utf-8') self.the_dict = json.loads(the_json_tpl) if MsgType == 'text': pass", "k, v in kwargs.items(): try: if k == 'media_id': self.the_dict['video'][k]", "token=''): '''Firstly load the access token, then use the functions", "not None: self._Precision = root.find('Precision').text def initType(self, MsgType='text', incomingMessage='<xml></xml>'): '''", "else: self._Url = '' # For event message only elif", "k == 'MusicUrl': self.root.find('Video').find('MusicUrl').text = v elif k == 'HQMusicUrl':", "getAPIToken(appid='', appsecret=''): '''Get wechat API token for cusmter service or", "not exist, official wechat server takes it as '' by", "getattr(self, \"_\" + tag) except: return None ##raise ValueError #tmp", "self.root.find('Video').find('HQMusicUrl').text = v elif k == 'ThumbMediaId': self.root.find('Video').find('ThumbMediaId').text = v", "For music message only elif self._MsgType == 'music': for k,", "etree.tostring(self.root, encoding='utf-8',method='xml',pretty_print=True) # The down blow are the templates of", "the message type by such as ```initType(MsgType='text')``` Notice: all the", "'recognition' # Check if the incomingMessage has tag 'Event' then,", "if k == 'MediaId': #print v #print etree.tostring(self.root) self.root.find('Video').find('MediaId').text =", "the right reponse back ''' def __init__(self, MsgType='text'): self._MsgType =", "None: self._Url = root.find('Url').text else: self._Url = '' # For", "print e raise e ## For image message only elif", "}''' json_video = '''{ \"touser\":\"OPENID\", \"msgtype\":\"video\", \"video\": { \"media_id\":\"MEDIA_ID\", \"title\":\"TITLE\",", "information: please visit, http://mp.weixin.qq.com/wiki/index.php?title=%E8%8E%B7%E5%8F%96%E7%94%A8%E6%88%B7%E5%9F%BA%E6%9C%AC%E4%BF%A1%E6%81%AF''' url = \"https://api.weixin.qq.com/cgi-bin/user/info?access_token=\" + self._token +", "returns a Boolean value''' url = \"https://api.weixin.qq.com/cgi-bin/message/custom/send?access_token=\" + token request", "But ```menu_format``` is constructed from a JSON. For more information", "visit: http://mp.weixin.qq.com/wiki/index.php?title=%E4%B8%8A%E4%BC%A0%E4%B8%8B%E8%BD%BD%E5%A4%9A%E5%AA%92%E4%BD%93%E6%96%87%E4%BB%B6''' if media_type not in ['image', 'voice', 'video', 'thumb']:", "= json.loads(response.read()) if a_dict.has_key('errcode'): if a_dict['errcode'] == 0: return True", "k == 'MediaId': #print v #print etree.tostring(self.root) self.root.find('Video').find('MediaId').text = v", "self._MediaId = root.find('MediaId').text else: self._MediaId = '' # For voice", "\"content\":\"Hello World\" } }''' json_image = '''{ \"touser\":\"OPENID\", \"msgtype\":\"image\", \"image\":", "message type by such as ```initType(MsgType='text')``` Notice: all the kwargs", "u'\\u641c\\u641c', u'sub_button': []}, {u'url': u'http://v.qq.com/', u'type': u'view', u'name': u'\\u770b\\u7535\\u5f71', u'sub_button':", "wechat service. Those 6 types of messages include: text, image,", "self.the_dict[k] = v except Exception as e: print e raise", "== 'text': # To set attribute value to such as:", "appsecret try: a = urllib2.urlopen(url) except Exception as e: print", "list which contains a dict. While construcing the JSON dumped,", "try: if k == 'content': self.the_dict['text'][k] = v else: self.the_dict[k]", ">>> mm.getMenu() >>> # nothing gotten: it means no menu", "the response. ''' def __init__(self, media_type='image', token = ''): self._media_type", "self.the_dict['video'][k] = v elif k == 'description': self.the_dict['video'][k] = v", "need # the ```self.root``` has been assigned already return etree.tostring(self.root,", "```self.the_dict``` according to the message type by such as ```initType(MsgType='text')```", "nothing gotten: it means no menu at all ''' def", "for e in eventMapping[k]: try: delattr(self, '_' + e) except:", "more information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E8%87%AA%E5%AE%9A%E4%B9%89%E8%8F%9C%E5%8D%95%E5%88%9B%E5%BB%BA%E6%8E%A5%E5%8F%A3 ''' token = self._token url", "\"image\": { \"media_id\":\"MEDIA_ID\" } }''' json_voice = '''{ \"touser\":\"OPENID\", \"msgtype\":\"voice\",", "articles as #print v #print etree.tostring(self.root) self.root.find('Video').find('MediaId').text = v elif", "if root.find('MsgType') is not None: self._MsgType = root.find('MsgType').text else: self._MsgType", "'' if root.find('Location_Y') is not None: self._Location_Y = root.find('Location_Y').text else:", "Usage: >>> rc = RespondingContainer() >>> rc.initType('text') # Or we", "self._MsgId = '' # Store the XML incomingMessage if has", "self.initType('text', incomingMessage) elif msgType == 'image': self.initType('image', incomingMessage) elif msgType", "raise e ## For music message only elif self._MsgType ==", "'Title': self.root.find('Video').find('Title').text = v elif k == 'Description': self.root.find('Video').find('Description').text =", "\"&next_openid=\" + next_openid try: response = urllib2.urlopen(url) except Exception as", "#for child in self.root.getchildren(): # child_list += [str(child)] ### Attach", "== 'text': for k, v in kwargs.items(): try: if k", "uploadMedia(self, media_type='image', media_path=''): '''Post the determained media file to the", "of the XML special for image for k, v in", "ones in common if root.find('ToUserName') is not None: self._ToUserName =", "'''To manage the subscriber groups, profile, location, list. Usage: >>>", "message 'event':['Event','EventKey','Ticket','Latitude','Longitude','Precision' ], } # For recognition message mapping global", "self._Latitude = root.find('Latitude').text if root.find('Longitude') is not None: self._Longitude =", "'{\"openid\":\"%s\"}' % openid request = urllib2.Request(url,data=postData) try: response = urllib2.urlopen(request)", "+ '&secret=' + appsecret try: a = urllib2.urlopen(url) except Exception", "self._Event = '' if root.find('EventKey') is not None: self._EventKey =", "and ```next_openid``` are valid, then a dict will be returned.", "e return None else: a_dict = json.loads(response.read()) #print a_dict if", "global tpl_video global tpl_music global tpl_news tpl_text = u'''<xml> <ToUserName><![CDATA[toUser]]></ToUserName>", "token def loadToken(self, token=''): '''Firstly load the access token, then", "define that. Don't claim ''' ## For text message only", "has been generated automatically :) >>> rc.setElementByTag(FromUserName='the_server', ToUserName='the_wechat_client',Content='Hello dude!') >>>", "has tag 'Event' then, it is a voice event message", "# self.root = etree.fromstring( the_xml ) # break ## Set", "Unique tages in all the mapping relationship # # For", "token, then use the functions below''' self._token = token def", "} # For recognition message mapping global recognitionMapping recognitionMapping =", "</xml>''' tpl_music = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[music]]></MsgType> <Music> <Title><![CDATA[TITLE]]></Title>", "ParsingContainer(object): \"\"\"Parsing Wechat messages for whose types are of :", "'event': # It has to have a ```self._Event``` for event", "unuseful elements in normalMapping for k in normalMapping: if k", "above works #print j # to check if the message", "XML format the_tpl = globals()['tpl_' + self._MsgType].encode('utf-8').decode('utf-8') self.root = etree.fromstring(the_tpl)", "print e return False else: j = json.loads(response.read()) # The", "'link': self.initType('link', incomingMessage) elif msgType == 'image': self.initType('image', incomingMessage) #", "= '' # For image message only elif self.MsgType ==", "#print etree.tostring(self.root) self.root.find('Image').find('MediaId').text = v else: try: ## assign/update value", "if msgType == 'text': self.initType('text', incomingMessage) elif msgType == 'image':", "== 'article': # To set attribute value of the XML", "it as '' by default If not, a 'None' will", "not None: self._MediaId = root.find('MediaId').text else: self._MediaId = '' #", "be returned. If not, 'None' will be returned. ''' token", "return an article in a list which contains a dict.", "a voice event message elif root.find(\"Event\") is not None: self.type", "should inform its author. If for commercial, please inform the", "'' # For event message only elif self.MsgType == 'event':", "is not None: self._PicUrl = root.find('PicUrl').text else: self._PicUrl = ''", "will be returned. For more information, please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E4%B8%8A%E4%BC%A0%E4%B8%8B%E8%BD%BD%E5%A4%9A%E5%AA%92%E4%BD%93%E6%96%87%E4%BB%B6''' if", "root = etree.fromstring(incomingMessage) msgType = root.find(\"MsgType\").text # Get message type", "to have a ```self._Event``` for event message certainly if root.find('Event')", "self._Recognition = root.find(\"Recognition\").text # For recognition messages if self.type ==", "be returned. ''' token = self._token url = \"https://api.weixin.qq.com/cgi-bin/menu/get?access_token=\"+ token", "then, it is a voice recognition message if root.find(\"Recognition\") is", "= MsgType # ## the the template # the_xml =", "None ##raise ValueError #tmp = \"Instance has no attribute _%s\"", "for k, v in kwargs.items(): try: # here we just", "else: return a_dict def getHisGroupID(self, openid=''): '''Get a subscriber's group", "root.find('Longitude') is not None: self._Longitude = root.find('Longitude').text if root.find('Precision') is", "import etree import time import json import urllib import urllib2", "common tags for c in commonTag: try: delattr(self, '_' +", "# -*- coding: utf-8 -*- # Copyright to <NAME>. #", "if root.find('PicUrl') is not None: self._PicUrl = root.find('PicUrl').text else: self._PicUrl", "For event message only elif self.MsgType == 'event': # It", "for k in eventMapping: for e in eventMapping[k]: try: delattr(self,", "\"-----\" #print self._MsgType ## For text message only if self._MsgType", "['text', 'image', 'voice', 'video', 'location', 'link', 'event'] if MsgType not", "example, $~ python >>> holder = ParsingContainer() >>> hasattr(holder, \"_Content\")", "e return False else: a_dict = json.loads(response.read()) #print a_dict if", "image message only elif self._MsgType == 'image': for k, v", "wechat define that. Don't claim ''' ## For text message", "'ThumbMediaId': self.root.find('Video').find('ThumbMediaId').text = v else: try: ## assign/update value to", "import json import urllib import urllib2 # For media posting", "a ```self._Event``` for event message certainly if root.find('Event') is not", "the value variable The 'incomingMessage' is of XML According to", ": 'text', 'image', 'voice', 'video', 'location', 'link' After making a", "None: self._Ticket = root.find('Ticket').text if root.find('Latitude') is not None: self._Latitude", "AttributeError, \"Message type '%s' has no attribute/tag '%s'\" % (self._MsgType,", "[str(child)] ### Attach 'tag' object to class to make something", "mapping relationship # # For those tags in-common of normal", "'''{ \"touser\":\"OPENID\", \"msgtype\":\"news\", \"news\":{ \"articles\": [ { \"title\":\"Happy Day\", \"description\":\"Is", "is not None: self._Precision = root.find('Precision').text def initType(self, MsgType='text', incomingMessage='<xml></xml>'):", "u'\\u5938\\u6211\\u5e05', u'key': u'<KEY>', u'sub_button': []}]}]}} >>> flag2 = mm.deleteMenu() >>>", "a Boolean value''' url = \"https://api.weixin.qq.com/cgi-bin/message/custom/send?access_token=\" + token request =", "return a_dict else: return a_dict def deleteMenu(self): token = self._token", "'link' After making a new instance of the class, need", "messages for whose types are of : 'text', 'image', 'voice',", "a list already # if list, then its the elment", "normal message global commonTag commonTag = ['ToUserName', 'FromUserName', 'CreateTime', 'MsgId',", "root.find('Title').text else: self._Title = '' if root.find('Description') is not None:", "postData = '{\"group\": {\"name\": \"%s\"} }' % name request =", "except Exception as e: # its better to raise something", "to dump the the dict as for later on JSON", "= root.find('MsgId').text else: self._MsgId = '' # Store the XML", "service. Those 6 types of messages include: text, image, voice,", "to reponse to determained wechat message For more information please", "kwargs.items(): try: if k == 'media_id': self.the_dict['video'][k] = v elif", "MsgType='text', incomingMessage='<xml></xml>'): ''' To initialize message type ''' MsgType_list =", "'text': pass def initType(self, MsgType='text'): if MsgType not in ['text',", "# Check if the incomingMessage has tag 'Event' then, it", "or False if updated or not. For more information, please", "self.type = 'recognition' # Check if the incomingMessage has tag", "returned. ''' token = self._token url = \"https://api.weixin.qq.com/cgi-bin/menu/get?access_token=\"+ token try:", "'event']: # Check if the incomingMessage has tag 'Recognition' then,", "= urllib2.Request(url,data=datagen,headers=headers) try: response = urllib2.urlopen(request) except Exception as e:", "response class PositiveRespondingContainer(object): '''Using wechat custom service API to pass", "if self._MsgType == 'text': # To set attribute value to", "self.the_dict['music'][k] = v elif k == 'description': self.the_dict['music'][k] = v", "'voice': for k, v in kwargs.items(): try: if k ==", "parent message type :\"MsgType\" 2) check subclass message type if", "dumpDict(self): return self.the_dict json_text = '''{ \"touser\":\"OPENID\", \"msgtype\":\"text\", \"text\": {", "elements in eventMapping for k in eventMapping: for e in", "None else: gotten = a.read() a_dict = json.loads(gotten) # means", "'appid=' + appid + '&secret=' + appsecret try: a =", "self._ToUserName = '' if root.find('FromUserName') is not None: self._FromUserName =", "root.find('Event') is not None: self._Event = root.find('Event').text else: self._Event =", "0: return True else: return False else: return False class", "= self._token url = \"https://api.weixin.qq.com/cgi-bin/menu/create?access_token=\" + token request = urllib2.Request(url,", "# Positive response class PositiveRespondingContainer(object): '''Using wechat custom service API", "to check if the message was accepted if j['errcode'] ==", "else: return a_dict def deleteMenu(self): token = self._token url =", "# else: # setattr(self,\"_\"+i, '') self.__init__(MsgType) #def setElementByTag(self, tag): def", "{ 'voice':['MediaId','Format','Recognition'], } def __init__(self, incomingMessage='<xml></xml>'): # pre-set some common", "return False def moveHimToGroup(self, openid='', groupid=''): '''Move him to other", "## For video message only elif self._MsgType == 'video': for", "it needs a token and the menu format. The ```menu_format```", "there be, then a dict would be returned. If not,", "'' by default If not, a 'None' will be returned.", "e raise e ## For voice message only elif self._MsgType", "It has to have a ```self._Event``` for event message certainly", "new_name. 'True' or False if updated or not. For more", "\"Voice Recognition\", \"Event\", \"Normal\" 3) check children class message type", "will return None. For the parameter 'zh_CN', there are others:", "voice' self._Recognition = root.find(\"Recognition\").text # For recognition messages if self.type", "child_list: # if i == 'CreateTime': # setattr(self,\"_\"+i, str(int(time.time()))) #", "from poster.encode import multipart_encode from poster.streaminghttp import register_openers class ParsingContainer(object):", "the default tag value ### Get all the tags #child_list", "try: ## assign value to the object #getattr(self, \"_\"+k) =", "= etree.fromstring(incomingMessage) # The 5 ones in common if root.find('ToUserName')", "holder = ParsingContainer() >>> hasattr(holder, \"_Content\") >>> True >>> holder.initType(MsgType='video')", "u'http://v.qq.com/', u'type': u'view', u'name': u'\\u770b\\u7535\\u5f71', u'sub_button': []}, {u'type': u'click', u'name':", "u'sub_button': []}, {u'type': u'click', u'name': u'\\u7b2c\\u4e8c\\u94ae', u'key': u'V1001_TODAY_SINGER', u'sub_button': []},", "then 'normal' message else: self.type = 'normal' # For normal", "\"https://api.weixin.qq.com/cgi-bin/menu/delete?access_token=\" + token try: response = urllib2.urlopen(url) except Exception as", "server takes it as '' by default If not, a", "'link':['Title','Description','Url'], } # For event message mapping global eventMapping eventMapping", "root.find('Label').text else: self._Label = '' # For link message only", "</Music> </xml>''' tpl_news = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[news]]></MsgType> <ArticleCount>2</ArticleCount>", "def __init__(self, media_type='image', token = ''): self._media_type = media_type self._token", "<Title><![CDATA[title]]></Title> <Description><![CDATA[description]]></Description> </Video> </xml>''' tpl_music = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime>", "''' try: gotten = getattr(self, \"_\" + tag) except: return", "response = urllib2.urlopen(request) except Exception as e: print e return", "kwargs.items(): try: # here we just check whether the ```v```", "voice message only elif self._MsgType == 'voice': # To set", "else: return False else: return False def moveHimToGroup(self, openid='', groupid=''):", "tag set of the event message 'event':['Event','EventKey','Ticket','Latitude','Longitude','Precision' ], } #", "new instance of the class, need to declare the 'MsgType'", "to make something as : 'self._FromUserName' #for i in child_list:", "\"url\":\"URL\", \"picurl\":\"PIC_URL\" } ] ''' if k == 'articles': if", "since it is just of this more than that of", "+ self._token + \"&openid=\" + openid + \"&lang=\" + lang", "type if \"Voice Recognition\", \"Event\", \"Normal\" 3) check children class", "None: self._ToUserName = root.find('ToUserName').text else: self._ToUserName = '' if root.find('FromUserName')", "Day\", \"url\":\"URL\", \"picurl\":\"PIC_URL\" } ] ''' if k == 'articles':", "= urllib2.Request(url,data=postData) request.get_method = lambda : 'POST' try: response =", "= 'https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&' url = default_url + 'appid=' + appid +", "self._Label = '' # For link message only elif self.MsgType", "visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%8F%91%E9%80%81%E8%A2%AB%E5%8A%A8%E5%93%8D%E5%BA%94%E6%B6%88%E6%81%AF Usage: >>> rc = RespondingContainer() >>> rc.initType('text') #", "e return None else: a_dict = json.loads(response.read()) if a_dict.has_key('errcode'): if", "format the_tpl = globals()['tpl_' + self._MsgType].encode('utf-8').decode('utf-8') self.root = etree.fromstring(the_tpl) #print", "self.root.find('Voice').find('MediaId').text = v else: try: ## assign/update value to the", "root.find('MsgType').text else: self._MsgType = '' if root.find('MsgId') is not None:", "API. If there be, then a dict would be returned.", "self._Scale = root.find('Scale').text else: self._Scale = '' if root.find('Label') is", "to pass 6 types of messages to those wechat clients", "distrubites of this copy should inform its author. If for", "returned. For more information, please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E4%B8%8A%E4%BC%A0%E4%B8%8B%E8%BD%BD%E5%A4%9A%E5%AA%92%E4%BD%93%E6%96%87%E4%BB%B6''' if media_type not", "url = \"https://api.weixin.qq.com/cgi-bin/user/get?access_token=\" + self._token + \"&next_openid=\" + next_openid try:", "'Label'], 'link':['Title','Description','Url'], } # For event message mapping global eventMapping", "'' if root.find('MediaId') is not None: self._MediaId = root.find('MediaId').text else:", "\"_\" + tag) except: return None ##raise ValueError #tmp =", "wechat remote server is down print e return None else:", "def createMenu(self, menu_format=''): '''Create menu, it needs a token and", "== 'image': if root.find('PicUrl') is not None: self._PicUrl = root.find('PicUrl').text", "else: return a_dict def getAPIToken(appid='', appsecret=''): '''Get wechat API token", "= ''): self._token = token def uploadMedia(self, media_type='image', media_path=''): '''Post", "type: '%s' not valid\" % media_type else: self._media_type = media_type", "the kwargs 's key in this function should be of", "''' token = self._token url = \"https://api.weixin.qq.com/cgi-bin/menu/create?access_token=\" + token request", "self._token postData = '{\"openid\":\"%s\",\"to_groupid\":%s}' % (openid, groupid) request = urllib2.Request(url,data=postData)", "that. Don't claim ''' ## For text message only if", "'text' XML format the_tpl = globals()['tpl_' + self._MsgType].encode('utf-8').decode('utf-8') self.root =", "video message only elif self._MsgType == 'video': for k, v", "e ## For article message only elif self._MsgType == 'article':", "'voice', 'video', 'thumb']: raise ValueError, \"Media type: '%s' not valid\"", "None: self.type = 'event' # After all then 'normal' message", "message only elif self._MsgType == 'video': for k, v in", "root.find('MsgId').text else: self._MsgId = '' # Store the XML incomingMessage", "the wechat service Usage: >>> mm = MenuManager() >>> mm.loadToken('something_the_api_token')", "For news message only elif self._MsgType == 'news': for k,", "# It has to have a ```self._Event``` for event message", "method def __del__(self): pass #@property def getElementByTag(self, tag): '''To get", "= json.loads(gotten) # means wrong appid or secret if a_dict.has_key('errcode'):", "# The list presents the combined tag set of the", "only if self.MsgType == 'text': if root.find('Content') is not None:", "If not, 'None' will be returned. ''' token = self._token", "# The down blow are the templates of all the", "subscriber's group ID. The ID is of type 'int'. If", "service or others. If ```appid``` and ```appsecret``` are correct then", "'description': self.the_dict['music'][k] = v elif k == 'hqmusicurl': self.the_dict['music'][k] =", "ParsingContainer() >>> hasattr(holder, \"_Content\") >>> True >>> holder.initType(MsgType='video') >>> hasattr(holder,", "''' root = etree.fromstring(incomingMessage) msgType = root.find(\"MsgType\").text # Get message", "self._PicUrl = root.find('PicUrl').text else: self._PicUrl = '' if root.find('MediaId') is", "self._token = token def uploadMedia(self, media_type='image', media_path=''): '''Post the determained", "a voice recognition message if root.find(\"Recognition\") is not None: self.type", "'s key in this function should be of lower case.", "raise ValueError, \"Media type: '%s' not valid\" % media_type else:", "''): self._media_type = media_type self._token = token def loadToken(self, token", "e: #print e return None #raise e else: request =", "v elif k == 'HQMusicUrl': self.root.find('Video').find('HQMusicUrl').text = v elif k", "are four types of media suppored by wechat. image, voice,", "''' url = \"https://api.weixin.qq.com/cgi-bin/groups/update?access_token=\" + self._token postData = '{\"group\":{\"id\":%s,\"name\":\"%s\"}}' %", "import time import json import urllib import urllib2 # For", "dict will be returned. For more information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E6.9F.A5.E8.AF.A2.E6.89.80.E6.9C.89.E5.88.86.E7.BB.84", "v except Exception as e: print e raise e #raise", "http://mp.weixin.qq.com/wiki/index.php?title=%E5%8F%91%E9%80%81%E8%A2%AB%E5%8A%A8%E5%93%8D%E5%BA%94%E6%B6%88%E6%81%AF Usage: >>> rc = RespondingContainer() >>> rc.initType('text') # Or", "common if root.find('ToUserName') is not None: self._ToUserName = root.find('ToUserName').text else:", "type ''' root = etree.fromstring(incomingMessage) msgType = root.find(\"MsgType\").text # Get", "as e: print e raise e #raise AttributeError, \"Message type", "media_type='image', token = ''): self._media_type = media_type self._token = token", "means wrong appid or secret else: return None def postMessage2API(token='',messageString=''):", "from wechat server Make the value variable The 'incomingMessage' is", "more information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%8F%91%E9%80%81%E8%A2%AB%E5%8A%A8%E5%93%8D%E5%BA%94%E6%B6%88%E6%81%AF Usage: >>> rc = RespondingContainer()", "'musicurl': self.the_dict['music'][k] = v elif k == 'title': self.the_dict['music'][k] =", "self._token + \"&next_openid=\" + next_openid try: response = urllib2.urlopen(url) except", "media_type not in ['image', 'voice', 'video', 'thumb']: raise ValueError, \"Media", ">>> menu_got = mm.getMenu() >>> menu_got {u'menu': {u'button': [{u'type': u'click',", "'news' except Exception as e: print e raise e #", "<MediaId><![CDATA[media_id]]></MediaId> <Title><![CDATA[title]]></Title> <Description><![CDATA[description]]></Description> </Video> </xml>''' tpl_music = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName>", "a dict if ```token``` and ```open_id``` are valid. If not", "\"Event\", \"Normal\" 3) check children class message type ''' root", "takes it as '' by default If not, a 'None'", "the new XML object self.root.find(k).text = v except Exception as", "1) check parent message type :\"MsgType\" 2) check subclass message", "as e: print e return False else: a_dict = json.loads(response.read())", "= '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[voice]]></MsgType> <Voice> <MediaId><![CDATA[media_id]]></MediaId> </Voice> </xml>'''", "time import json import urllib import urllib2 # For media", "\"touser\":\"OPENID\", \"msgtype\":\"music\", \"music\": { \"title\":\"MUSIC_TITLE\", \"description\":\"MUSIC_DESCRIPTION\", \"musicurl\":\"MUSIC_URL\", \"hqmusicurl\":\"HQ_MUSIC_URL\", \"thumb_media_id\":\"THUMB_MEDIA_ID\" }", "self._Location_X = root.find('Location_X').text else: self._Location_X = '' if root.find('Location_Y') is", "globals()['json_' + self._MsgType].encode('utf-8').decode('utf-8') self.the_dict = json.loads(the_json_tpl) if MsgType == 'text':", "= media_type self._token = token def loadToken(self, token = ''):", "}' % name request = urllib2.Request(url,data=postData) request.get_method = lambda :", "+ self._token postData = '{\"openid\":\"%s\",\"to_groupid\":%s}' % (openid, groupid) request =", "+ token try: response = urllib2.urlopen(url) except Exception as e:", "http://mp.weixin.qq.com/wiki/index.php?title=%E8%8E%B7%E5%8F%96%E7%94%A8%E6%88%B7%E5%9F%BA%E6%9C%AC%E4%BF%A1%E6%81%AF''' url = \"https://api.weixin.qq.com/cgi-bin/user/info?access_token=\" + self._token + \"&openid=\" + openid", "a_dict def getAPIToken(appid='', appsecret=''): '''Get wechat API token for cusmter", "etree.tostring(self.root) self.root.find('Voice').find('MediaId').text = v else: try: ## assign/update value to", "message mapping global eventMapping eventMapping = { # The list", "elif msgType == 'image': self.initType('image', incomingMessage) # TODO # For", "self.root.find(\"FromUserName\").text #print type(self.root.find(\"FromUserName\").text) def initType(self, MsgType='text'): tpl_list = ['text', 'image',", "if ```token``` and ```open_id``` are valid. If not exists or", "a_dict else: return a_dict def deleteMenu(self): token = self._token url", "root.find('Format').text else: self._Format = '' # For video message only", "\"_PicUrl\") >>> False >>> hasattr(holder, \"_Content\") >>> True >>> holder.getElementByTag('Content')", "valid, then a_dict will be returned. If not, 'None' will", "def setElementByTag(self, **kwargs): \"\"\" To package XML message into an", "== 'location': self.initType('location', incomingMessage) elif msgType == 'link': self.initType('link', incomingMessage)", "kwargs.items(): if k == 'MediaId': #print v #print etree.tostring(self.root) self.root.find('Voice').find('MediaId').text", "messages include: text, image, voice, video, music, news The dumped", "try: if k == 'musicurl': self.the_dict['music'][k] = v elif k", "except: pass self.__init__(incomingMessage) # releasing method def __del__(self): pass #@property", "[ { \"title\":\"Happy Day\", \"description\":\"Is Really A Happy Day\", \"url\":\"URL\",", "## For image message only elif self._MsgType == 'image': for", "token def loadToken(self, token=''): '''Load the token before using other", "\"\"\" To package XML message into an object Usage: >>>", "are others: 'zh_TW, en' For more information: please visit, http://mp.weixin.qq.com/wiki/index.php?title=%E8%8E%B7%E5%8F%96%E7%94%A8%E6%88%B7%E5%9F%BA%E6%9C%AC%E4%BF%A1%E6%81%AF'''", "}''' json_music = '''{ \"touser\":\"OPENID\", \"msgtype\":\"music\", \"music\": { \"title\":\"MUSIC_TITLE\", \"description\":\"MUSIC_DESCRIPTION\",", "open(media_path,\"rb\")}) except Exception as e: #print e return None #raise", "m) except: pass # Delete the unuseful elements in eventMapping", "u'sub_button': [{u'url': u'http://www.soso.com/', u'type': u'view', u'name': u'\\u641c\\u641c', u'sub_button': []}, {u'url':", "to have ```self.the_dict``` self.__init__(MsgType) def setElementByKey(self, **kwargs): '''To set the", "e in eventMapping[k]: try: delattr(self, '_' + e) except: pass", "== 0: return True else: return False def getMenu(self): '''Get", "= '{\"openid\":\"%s\",\"to_groupid\":%s}' % (openid, groupid) request = urllib2.Request(url,data=postData) try: response", "e: print e raise e ## For voice message only", "u'name': u'\\u770b\\u7535\\u5f71', u'sub_button': []}, {u'type': u'click', u'name': u'\\u5938\\u6211\\u5e05', u'key': u'<KEY>',", "u'key': u'V1001_TODAY_SINGER', u'sub_button': []}, {u'name': u'\\u7b2c\\u4e09\\u94ae', u'sub_button': [{u'url': u'http://www.soso.com/', u'type':", "# By default set root as the 'text' XML format", "= 'event' # After all then 'normal' message else: self.type", "the key 'articles' should be of type list\" elif k", "## For music message only elif self._MsgType == 'music': for", "making a new instance of the class, need to declare", "'''{ \"touser\":\"OPENID\", \"msgtype\":\"image\", \"image\": { \"media_id\":\"MEDIA_ID\" } }''' json_voice =", "k) ## For image message only elif self._MsgType == 'image':", "'None' will be returned. For more information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E8%8E%B7%E5%8F%96%E5%85%B3%E6%B3%A8%E8%80%85%E5%88%97%E8%A1%A8", "self._Precision = root.find('Precision').text def initType(self, MsgType='text', incomingMessage='<xml></xml>'): ''' To initialize", "information, please visit : http://mp.weixin.qq.com/wiki/index.php?title=%E5%8F%91%E9%80%81%E8%A2%AB%E5%8A%A8%E5%93%8D%E5%BA%94%E6%B6%88%E6%81%AF global tpl_text global tpl_image global", "**kwargs): '''To set the ```self.the_dict``` according to the message type", "is not None: self._MsgType = root.find('MsgType').text else: self._MsgType = ''", "is not None: self._Location_Y = root.find('Location_Y').text else: self._Location_Y = ''", "request = urllib2.Request(url,data=datagen,headers=headers) try: response = urllib2.urlopen(request) except Exception as", "''' def __init__(self, token=''): self._token = token def loadToken(self, token=''):", "'video', 'music', 'news'] if MsgType not in tpl_list: raise ValueError,", "'normal': if msgType == 'text': self.initType('text', incomingMessage) elif msgType ==", "If openid wrong or token invalid, 'None' will be returned.", "string 'token' will be return. If not , 'return None'", "is not None: self._FromUserName = root.find('FromUserName').text else: self._FromUserName = ''", "try: response = urllib2.urlopen(url) except Exception as e: # its", "a new instance of the class, need to declare the", "for image for k, v in kwargs.items(): if k ==", "image, voice, video, thumb Post the file to the offical", "For event messages if self.type == 'recognition': self.initType('voice', incomingMessage) #", "## For video message only elif self._MsgType == 'video': #", "RespondingContainer() >>> rc.initType('text') # Or we can ellipsis this since", "else: self._Format = '' # For video message only elif", "as: 'self._FromUsername' for k, v in kwargs.items(): try: ## assign", "type list\" elif k == 'touser': self.the_dict['touser'] = v elif", "copy should inform its author. If for commercial, please inform", "= ['ToUserName', 'FromUserName', 'CreateTime', 'MsgId', 'MsgType'] # For normal message", "'text': if root.find('Content') is not None: self._Content = root.find('Content').text else:", "k == 'Title': self.root.find('Video').find('Title').text = v elif k == 'Description':", "since it has been generated automatically :) >>> rc.setElementByTag(FromUserName='the_server', ToUserName='the_wechat_client',Content='Hello", ">>> flag True >>> menu_got = mm.getMenu() >>> menu_got {u'menu':", "== 'voice': # To set attribute value of the XML", "For video message only elif self._MsgType == 'video': for k,", "all then 'normal' message else: self.type = 'normal' # For", "k == 'ArticleCount': self.root.find(k).text = v if k == 'Articles':", "dumped, This is used with the function ```setElementByKey(touser='someone', msgtype='news', articles=packageArticle())```", "of this copy should inform its author. If for commercial,", "the tags #child_list = [] #for child in self.root.getchildren(): #", "'video', 'location', 'link', 'event'] if MsgType not in MsgType_list: raise", "register_openers class ParsingContainer(object): \"\"\"Parsing Wechat messages for whose types are", "is not None: self._ThumbMediaId = root.find('ThumbMediaId').text else: self._ThumbMediaId = ''", "+ e) except: pass self.__init__(incomingMessage) # releasing method def __del__(self):", "incomingMessage) # TODO # For event messages if self.type ==", "''' \"articles\": [ { \"title\":\"Happy Day\", \"description\":\"Is Really A Happy", "on JSON loading def dumpDict(self): return self.the_dict json_text = '''{", "'text':['Content'], 'image':['PicUrl', 'MediaId'], 'voice':['MediaId','Format'], 'video':['MediaId','ThumbMeiaId'], 'location':['Location_X','Location_Y','Scale', 'Label'], 'link':['Title','Description','Url'], } #", "voice message only elif self.MsgType == 'voice': if root.find('MediaId') is", "'content': self.the_dict['text'][k] = v else: self.the_dict[k] = v except Exception", "please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E6.9F.A5.E8.AF.A2.E7.94.A8.E6.88.B7.E6.89.80.E5.9C.A8.E5.88.86.E7.BB.84''' url = \"https://api.weixin.qq.com/cgi-bin/groups/getid?access_token=\"+ self._token postData = '{\"openid\":\"%s\"}'", "'''Using wechat custom service API to pass 6 types of", "please visit, http://mp.weixin.qq.com/wiki/index.php?title=%E8%8E%B7%E5%8F%96%E7%94%A8%E6%88%B7%E5%9F%BA%E6%9C%AC%E4%BF%A1%E6%81%AF''' url = \"https://api.weixin.qq.com/cgi-bin/user/info?access_token=\" + self._token + \"&openid=\"", "the unuseful elements in normalMapping for k in normalMapping: if", "raise ValueError, \"The value of the key 'articles' should be", "urllib2.Request(url, menu_format) request.get_method = lambda : 'POST' try: response =", "Usage: >>> sm = SubscriberManager() >>> sm.loadToken('<KEY>') >>> hisprofile =", "'video': # To set attribute value of the XML special", "= u'''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[text]]></MsgType> <Content><![CDATA[你好]]></Content> </xml>''' tpl_image =", "'articles' should be of type list\" elif k == 'touser':", "is of XML According to its content this will assgin", "= v else: try: ## assign/update value to the new", "# For event messages if self.type == 'recognition': self.initType('voice', incomingMessage)", "or not valid will return None. For the parameter 'zh_CN',", "+ \"&type=\" + self._media_type register_openers() try: datagen, headers = multipart_encode({\"image1\":", "if a_dict.has_key('errcode'): return None else: return a_dict def createGroup(self, name=''):", "= '' # For link message only elif self.MsgType ==", "} }''' json_video = '''{ \"touser\":\"OPENID\", \"msgtype\":\"video\", \"video\": { \"media_id\":\"MEDIA_ID\",", "be returned. For more information, please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E6.9F.A5.E8.AF.A2.E7.94.A8.E6.88.B7.E6.89.80.E5.9C.A8.E5.88.86.E7.BB.84''' url =", "Really A Happy Day\", \"url\":\"URL\", \"picurl\":\"PIC_URL\" } ] ''' if", "None. ''' url = \"https://api.weixin.qq.com/cgi-bin/groups/create?access_token=\" + self._token postData = '{\"group\":", "a JSON. For more information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E8%87%AA%E5%AE%9A%E4%B9%89%E8%8F%9C%E5%8D%95%E5%88%9B%E5%BB%BA%E6%8E%A5%E5%8F%A3 ''' token", "an article in a list which contains a dict. While", "return a_dict def getHisGroupID(self, openid=''): '''Get a subscriber's group ID.", "elif k == 'description': self.the_dict['video'][k] = v else: self.the_dict[k] =", "the ```v``` is type of list the ```v``` should be", "holder.initType(MsgType='video') >>> hasattr(holder, \"_PicUrl\") >>> True >>> holder.initType(MsgType='text') # Or", "Delete the unuseful elements in normalMapping for k in normalMapping:", "u'key': u'V1001_TODAY_MUSIC', u'sub_button': []}, {u'type': u'click', u'name': u'\\u7b2c\\u4e8c\\u94ae', u'key': u'V1001_TODAY_SINGER',", "= '' if root.find('Scale') is not None: self._Scale = root.find('Scale').text", "# For event message mapping global eventMapping eventMapping = {", "= media_type url = \"http://file.api.weixin.qq.com/cgi-bin/media/upload?access_token=\" + self._token + \"&type=\" +", "would be returned. If not, 'None' will be returned. '''", "\"https://api.weixin.qq.com/cgi-bin/menu/get?access_token=\"+ token try: response = urllib2.urlopen(url) except Exception as e:", "msgType == 'image': self.initType('image', incomingMessage) # TODO # For event", "''' url = \"https://api.weixin.qq.com/cgi-bin/groups/get?access_token=\" + self._token try: response = urllib2.urlopen(url)", "since by default its 'text' >>> hasattr(holder, \"_PicUrl\") >>> False", "rc.dumpXML() >>> tpl_out >>><xml> <ToUserName>the_wechat_client</ToUserName> <FromUserName>the_server</FromUserName> <CreateTime>1397808770</CreateTime> <MsgType>text</MsgType> <Content>Hello dude!</Content>", "u'http://www.soso.com/', u'type': u'view', u'name': u'\\u641c\\u641c', u'sub_button': []}, {u'url': u'http://v.qq.com/', u'type':", "Exception as e: print e return False else: j =", "token = self._token url = \"https://api.weixin.qq.com/cgi-bin/menu/delete?access_token=\" + token try: response", "elif k == 'hqmusicurl': self.the_dict['music'][k] = v elif k ==", "'''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[image]]></MsgType> <Image> <MediaId><![CDATA[media_id]]></MediaId> </Image> </xml>''' tpl_voice", "# Store the XML incomingMessage if has # For text", "class PositiveRespondingContainer(object): '''Using wechat custom service API to pass 6", ">>> sm.loadToken('<KEY>') >>> hisprofile = sm.getSubscriberProfile(openid='his_open_id', lang='zh_CN') ''' def __init__(self,", "before using other functions''' self._token = token def createMenu(self, menu_format=''):", "None: self._Latitude = root.find('Latitude').text if root.find('Longitude') is not None: self._Longitude", "root.find('Format') is not None: self._Format = root.find('Format').text else: self._Format =", "to those wechat clients \\n who sent messages to the", "lower case. Official wechat define that. Don't claim ''' ##", "be returned. If the ```next_openid``` does not exist, official wechat", "global eventMapping eventMapping = { # The list presents the", "\"https://api.weixin.qq.com/cgi-bin/groups/get?access_token=\" + self._token try: response = urllib2.urlopen(url) except Exception as", ">>> hasattr(holder, \"_PicUrl\") >>> True >>> holder.initType(MsgType='text') # Or we", "constructed from a JSON. For more information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E8%87%AA%E5%AE%9A%E4%B9%89%E8%8F%9C%E5%8D%95%E5%88%9B%E5%BB%BA%E6%8E%A5%E5%8F%A3", "if k == 'media_id': self.the_dict['voice'][k] = v else: self.the_dict[k] =", "we can just ellipsis this operation since by default its", "here if the wechat remote server is down print e", "sent messages to the public wechat service. Those 6 types", "'video', 'music', 'news']: raise ValueError, \"It has no message type:", "incomingMessage): '''To digest the XML message passed from wechat server", "news message only elif self._MsgType == 'news': for k, v", "not None: self._Scale = root.find('Scale').text else: self._Scale = '' if", "break # Delete the common tags for c in commonTag:", "self._token try: response = urllib2.urlopen(url) except Exception as e: print", "elif self.MsgType == 'location': if root.find('Location_X') is not None: self._Location_X", "```setElementByKey(touser='someone', msgtype='news', articles=packageArticle())``` ''' return [{\"title\": title, \"description\":description, \"url\":url, \"picurl\":picurl}]", "self._Label = root.find('Label').text else: self._Label = '' # For link", "try: response = urllib2.urlopen(url) except Exception as e: print e", "urllib2.urlopen(url) except Exception as e: # its better to raise", "= json.loads(response.read()) # The above works #print j # to", "= mm.getMenu() >>> menu_got {u'menu': {u'button': [{u'type': u'click', u'name': u'\\u7b2c\\u4e00\\u94ae',", "wechat message For more information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%8F%91%E9%80%81%E8%A2%AB%E5%8A%A8%E5%93%8D%E5%BA%94%E6%B6%88%E6%81%AF Usage: >>>", "root.find('Scale').text else: self._Scale = '' if root.find('Label') is not None:", "= 'news' except Exception as e: print e raise e", "= v elif k == 'hqmusicurl': self.the_dict['music'][k] = v elif", "Exception as e: print e raise e # package article", "should be packaged in a list already # if list,", "e raise e ## For news message only elif self._MsgType", ">>> holder.initType(MsgType='text') # Or we can just ellipsis this operation", "urllib2.urlopen(url) except Exception as e: print e return None else:", "valid. If not exists or not valid will return None.", "more information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E6.9F.A5.E8.AF.A2.E6.89.80.E6.9C.89.E5.88.86.E7.BB.84 ''' url = \"https://api.weixin.qq.com/cgi-bin/groups/get?access_token=\" +", "token, post the message to determained user. This returns a", "means no menu at all ''' def __init__(self, token=''): self._token", "the wechat remote server is down print e return None", "the offical URL If the image is valid, then a_dict", "self.root.find('Video').find('Description').text = v elif k == 'MusicUrl': self.root.find('Video').find('MusicUrl').text = v", "'CreateTime': # setattr(self,\"_\"+i, str(int(time.time()))) # else: # setattr(self,\"_\"+i, '') self.__init__(MsgType)", "message only elif self._MsgType == 'image': # To set attribute", "global tpl_news tpl_text = u'''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[text]]></MsgType> <Content><![CDATA[你好]]></Content>", "<Title><![CDATA[title]]></Title> <Description><![CDATA[description]]></Description> <PicUrl><![CDATA[picurl]]></PicUrl> <Url><![CDATA[url]]></Url> </item> </Articles> </xml>''' # Positive response", "To package XML message into an object Usage: >>> setElementByTag(FromUserName='the_wechat_server',ToUserName='the_wechat_client',Content='Hello", "information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E7.A7.BB.E5.8A.A8.E7.94.A8.E6.88.B7.E5.88.86.E7.BB.84''' url = \"https://api.weixin.qq.com/cgi-bin/groups/members/update?access_token=\" + self._token postData", ">>> hasattr(holder, \"_PicUrl\") >>> False >>> hasattr(holder, \"_Content\") >>> True", "as e: print e raise e ## For video message", "elif k == 'msgtype': self.the_dict['msgtype'] = 'news' except Exception as", "as e: print e raise e ## For music message", "its content this will assgin values to ```self.MsgType and etc..```", "__init__(self, incomingMessage='<xml></xml>'): # pre-set some common variables root = etree.fromstring(incomingMessage)", "self._ThumbMediaId = '' # For location message only elif self.MsgType", ">>> hasattr(holder, \"_Content\") >>> True >>> holder.initType(MsgType='video') >>> hasattr(holder, \"_PicUrl\")", "\"picurl\":\"PIC_URL\" } ] } }''' class SubscriberManager(object): '''To manage the", "or secret else: return None def postMessage2API(token='',messageString=''): '''Using the token,", "return None else: gotten = a.read() a_dict = json.loads(gotten) if", "= root.find('FromUserName').text else: self._FromUserName = '' if root.find('CreateTime') is not", "== 'media_id': self.the_dict['video'][k] = v elif k == 'title': self.the_dict['video'][k]", "e: print e return None else: a_dict = json.loads(response.read()) #print", "For image message only elif self._MsgType == 'image': for k,", "json.loads(response.read()) if a_dict.has_key('errcode'): if a_dict['errcode'] == 0: return True else:", "root.find('ToUserName').text else: self._ToUserName = '' if root.find('FromUserName') is not None:", "not None: self._MsgId = root.find('MsgId').text else: self._MsgId = '' #", "= '' # For voice message only elif self.MsgType ==", "self._ThumbMediaId = root.find('ThumbMediaId').text else: self._ThumbMediaId = '' # For location", "# For media posting from poster.encode import multipart_encode from poster.streaminghttp", "## assign/update value to the new XML object self.root.find(k).text =", "the_json_tpl = globals()['json_' + self._MsgType].encode('utf-8').decode('utf-8') self.the_dict = json.loads(the_json_tpl) if MsgType", "None' ''' default_url = 'https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&' url = default_url + 'appid='", "self._FromUserName = '' if root.find('CreateTime') is not None: self._CreateTime =", "self.MsgType == 'event': # It has to have a ```self._Event```", "commonTag commonTag = ['ToUserName', 'FromUserName', 'CreateTime', 'MsgId', 'MsgType'] # For", "type based from the ```incomingMessage``` variable if msgType in ['text',", ">>> False >>> hasattr(holder, \"_Content\") >>> True >>> holder.getElementByTag('Content') >>>", "def initType(self, MsgType='text'): if MsgType not in ['text', 'image', 'voice',", "visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E4.BF.AE.E6.94.B9.E5.88.86.E7.BB.84.E5.90.8D ''' url = \"https://api.weixin.qq.com/cgi-bin/groups/update?access_token=\" + self._token postData =", "'None' will be returned. For more information, please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E4%B8%8A%E4%BC%A0%E4%B8%8B%E8%BD%BD%E5%A4%9A%E5%AA%92%E4%BD%93%E6%96%87%E4%BB%B6'''", "False def getMenu(self): '''Get the menu format from the API.", "'''To get subscriber list. A dict will be return if", "ID is of type 'int'. If openid wrong or token", "self._token url = \"https://api.weixin.qq.com/cgi-bin/menu/create?access_token=\" + token request = urllib2.Request(url, menu_format)", "False else: return False class MediaManager(object): '''There are four types", "== 'media_id': self.the_dict['image'][k] = v else: self.the_dict[k] = v except", "gotten = getattr(self, \"_\" + tag) except: return None ##raise", "% MsgType for i in MsgType_list: if MsgType == i:", "is not None: self.type = 'event' # After all then", "wechat server and get the response. ''' def __init__(self, media_type='image',", "= default_url + 'appid=' + appid + '&secret=' + appsecret", "MsgType='text'): self._MsgType = MsgType # By default set the ```self.the_dict```", "'voice': if root.find('MediaId') is not None: self._MediaId = root.find('MediaId').text else:", "json.loads(response.read()) if a_dict.has_key('errcode'): return None else: return a_dict['group']['id'] def getAllgroups(self):", "\"msgtype\":\"text\", \"text\": { \"content\":\"Hello World\" } }''' json_image = '''{", "\"\"\" ## assign the basic time self.root.find('CreateTime').text = str(int(time.time())) #print", "'text', 'image', 'voice', 'video', 'location', 'link' After making a new", "default set the ```self.the_dict``` as from the 'text' JSON format", "access token, then use the functions below''' self._token = token", "A dict will be returned. For more information please visit:", "k, v in kwargs.items(): if k == 'MediaId': #print v", "For video message only elif self.MsgType == 'video': if root.find('MediaId')", "'title': self.the_dict['video'][k] = v elif k == 'description': self.the_dict['video'][k] =", "the token, post the message to determained user. This returns", "<FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[video]]></MsgType> <Video> <MediaId><![CDATA[media_id]]></MediaId> <Title><![CDATA[title]]></Title> <Description><![CDATA[description]]></Description> </Video> </xml>''' tpl_music", "json.loads(response.read()) #print a_dict if a_dict.has_key('errcode'): return None else: return a_dict", "is not None: self._MediaId = root.find('MediaId').text else: self._MediaId = ''", "news The dumped is of dict format. We need to", "element from the tag ''' try: gotten = getattr(self, \"_\"", "Check if the incomingMessage has tag 'Recognition' then, it is", "Exception as e: print e raise e ## For voice", "as e: # its better to raise something here if", "'' if root.find('MsgId') is not None: self._MsgId = root.find('MsgId').text else:", "#for i in tpl_list: # if MsgType == i: #", "#print type(self.root.find(\"FromUserName\").text) def initType(self, MsgType='text'): tpl_list = ['text', 'image', 'voice',", "# After all then 'normal' message else: self.type = 'normal'", "<ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[text]]></MsgType> <Content><![CDATA[你好]]></Content> </xml>''' tpl_image = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName>", "While construcing the JSON dumped, This is used with the", "self.type = 'normal' # For normal messages if self.type ==", "e) except: pass self.__init__(incomingMessage) # releasing method def __del__(self): pass", "be of type list\" elif k == 'touser': self.the_dict['touser'] =", "not in ['text', 'image', 'voice', 'video', 'music', 'news']: raise ValueError,", "is not None: self._MsgId = root.find('MsgId').text else: self._MsgId = ''", "Exception as e: print e return False else: a_dict =", "\"video\": { \"media_id\":\"MEDIA_ID\", \"title\":\"TITLE\", \"description\":\"DESCRIPTION\" } }''' json_music = '''{", "if root.find('Ticket') is not None: self._Ticket = root.find('Ticket').text if root.find('Latitude')", "appid + '&secret=' + appsecret try: a = urllib2.urlopen(url) except", "unique to unique wechat public service. This function will return", "root.find('Url') is not None: self._Url = root.find('Url').text else: self._Url =", "{u'name': u'\\u7b2c\\u4e09\\u94ae', u'sub_button': [{u'url': u'http://www.soso.com/', u'type': u'view', u'name': u'\\u641c\\u641c', u'sub_button':", "be returned. For more information, please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E4%B8%8A%E4%BC%A0%E4%B8%8B%E8%BD%BD%E5%A4%9A%E5%AA%92%E4%BD%93%E6%96%87%E4%BB%B6''' if media_type", "this operation since by default its 'text' >>> hasattr(holder, \"_PicUrl\")", "True >>> holder.initType(MsgType='video') >>> hasattr(holder, \"_PicUrl\") >>> True >>> holder.initType(MsgType='text')", "\"_PicUrl\") >>> True >>> holder.initType(MsgType='text') # Or we can just", "= root.find('Ticket').text if root.find('Latitude') is not None: self._Latitude = root.find('Latitude').text", ">>> True >>> holder.getElementByTag('Content') >>> '' \"\"\" # By default,", "''' default_url = 'https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&' url = default_url + 'appid=' +", "root.find('Location_Y') is not None: self._Location_Y = root.find('Location_Y').text else: self._Location_Y =", "this way we can then use ```dumpXML()``` to get the", "all ''' def __init__(self, token=''): self._token = token def loadToken(self,", "self._CreateTime = '1000000000' if root.find('MsgType') is not None: self._MsgType =", "= root.find('MsgType').text else: self._MsgType = '' if root.find('MsgId') is not", "'' if root.find('ThumbMediaId') is not None: self._ThumbMediaId = root.find('ThumbMediaId').text else:", "getMenu(self): '''Get the menu format from the API. If there", "# The 5 ones in common if root.find('ToUserName') is not", "the parameter 'zh_CN', there are others: 'zh_TW, en' For more", "if MsgType == 'text': pass def initType(self, MsgType='text'): if MsgType", "http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E7.A7.BB.E5.8A.A8.E7.94.A8.E6.88.B7.E5.88.86.E7.BB.84''' url = \"https://api.weixin.qq.com/cgi-bin/groups/members/update?access_token=\" + self._token postData = '{\"openid\":\"%s\",\"to_groupid\":%s}' %", "URL If the image is valid, then a_dict will be", "message else: self.type = 'normal' # For normal messages if", "MsgType not in tpl_list: raise ValueError, \"Invalid responsing message MsgType", "image, voice, video, music, news The dumped is of dict", "information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%8F%91%E9%80%81%E8%A2%AB%E5%8A%A8%E5%93%8D%E5%BA%94%E6%B6%88%E6%81%AF Usage: >>> rc = RespondingContainer() >>>", "XML object self.root.find(k).text = v except Exception as e: print", "else: self._media_type = media_type url = \"http://file.api.weixin.qq.com/cgi-bin/media/upload?access_token=\" + self._token +", "if k == 'media_id': self.the_dict['video'][k] = v elif k ==", "''' ## For text message only if self._MsgType == 'text':", "self._Url = root.find('Url').text else: self._Url = '' # For event", "e raise e def dumpXML(self): # To dump the XML", "World\" } }''' json_image = '''{ \"touser\":\"OPENID\", \"msgtype\":\"image\", \"image\": {", "if the incomingMessage has tag 'Event' then, it is a", "set root as the 'text' XML format the_tpl = globals()['tpl_'", "message type based from the ```incomingMessage``` variable if msgType in", "\"url\":\"URL\", \"picurl\":\"PIC_URL\" } ] } }''' class SubscriberManager(object): '''To manage", "media_type url = \"http://file.api.weixin.qq.com/cgi-bin/media/upload?access_token=\" + self._token + \"&type=\" + self._media_type", "list\" elif k == 'touser': self.the_dict['touser'] = v elif k", "service API to pass 6 types of messages to those", "''' def __init__(self, media_type='image', token = ''): self._media_type = media_type", "<CreateTime>12345678</CreateTime> <MsgType><![CDATA[image]]></MsgType> <Image> <MediaId><![CDATA[media_id]]></MediaId> </Image> </xml>''' tpl_voice = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName>", "print e return False else: a_dict = json.loads(response.read()) if a_dict.has_key('errcode'):", "#print self.root.find(\"FromUserName\").text #print type(self.root.find(\"FromUserName\").text) def initType(self, MsgType='text'): tpl_list = ['text',", "## For article message only elif self._MsgType == 'article': #", "articles=packageArticle())``` ''' return [{\"title\": title, \"description\":description, \"url\":url, \"picurl\":picurl}] # to", "then a dict will be returned. If the ```next_openid``` does", "\"msgtype\":\"music\", \"music\": { \"title\":\"MUSIC_TITLE\", \"description\":\"MUSIC_DESCRIPTION\", \"musicurl\":\"MUSIC_URL\", \"hqmusicurl\":\"HQ_MUSIC_URL\", \"thumb_media_id\":\"THUMB_MEDIA_ID\" } }'''", "'None' will be returned. For more information, please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E6.9F.A5.E8.AF.A2.E7.94.A8.E6.88.B7.E6.89.80.E5.9C.A8.E5.88.86.E7.BB.84'''", "'MediaId': #print v #print etree.tostring(self.root) self.root.find('Video').find('MediaId').text = v elif k", "\"title\":\"Happy Day\", \"description\":\"Is Really A Happy Day\", \"url\":\"URL\", \"picurl\":\"PIC_URL\" }", "def getSubscriberList(self, next_openid=''): '''To get subscriber list. A dict will", "'articles': if type(v) == list: self.the_dict['news'][k] = v else: raise", "'image':['PicUrl', 'MediaId'], 'voice':['MediaId','Format'], 'video':['MediaId','ThumbMeiaId'], 'location':['Location_X','Location_Y','Scale', 'Label'], 'link':['Title','Description','Url'], } # For", "elif self._MsgType == 'image': for k, v in kwargs.items(): try:", "time self.root.find('CreateTime').text = str(int(time.time())) #print \"-----\" #print self._MsgType ## For", "tpl_video global tpl_music global tpl_news tpl_text = u'''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName>", "return None else: return a_dict def getHisGroupID(self, openid=''): '''Get a", "root.find('MediaId').text else: self._MediaId = '' if root.find('Format') is not None:", "j['errcode'] == 0: return True else: return False class MenuManager(object):", "'' if root.find('Description') is not None: self._Description = root.find('Description').text else:", "elif self._MsgType == 'voice': for k, v in kwargs.items(): try:", "'1000000000' if root.find('MsgType') is not None: self._MsgType = root.find('MsgType').text else:", "= RespondingContainer() >>> rc.initType('text') # Or we can ellipsis this", "# nothing gotten: it means no menu at all '''", "'video', 'thumb']: raise ValueError, \"Media type: '%s' not valid\" %", "Usage: >>> setElementByTag(FromUserName='the_wechat_server',ToUserName='the_wechat_client',Content='Hello dude!') # In this way we can", "tpl_news = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[news]]></MsgType> <ArticleCount>2</ArticleCount> <Articles> <item>", "of the key 'articles' should be of type list\" elif", "a.read() a_dict = json.loads(gotten) if a_dict.has_key('access_token'): return a_dict['access_token'] # means", "<Content><![CDATA[你好]]></Content> </xml>''' tpl_image = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[image]]></MsgType> <Image>", "else: a_dict = json.loads(response.read()) if a_dict.has_key('errcode'): if a_dict['errcode'] != 0:", "== 'event': # It has to have a ```self._Event``` for", "#def setElementByTag(self, tag): def setElementByTag(self, **kwargs): \"\"\" To package XML", "picurl=\"http://www.baidu.com/img/bdlogo.gif\"): '''This will return an article in a list which", "k == 'thumb_media_id': self.the_dict['music'][k] = v else: self.the_dict[k] = v", "def getAPIToken(appid='', appsecret=''): '''Get wechat API token for cusmter service", "if root.find('Url') is not None: self._Url = root.find('Url').text else: self._Url", "such as ```initType(MsgType='text')``` Notice: all the kwargs 's key in", "message certainly if root.find('Event') is not None: self._Event = root.find('Event').text", "MsgType='text'): tpl_list = ['text', 'image', 'voice', 'video', 'music', 'news'] if", "else: self._Content = '' # For image message only elif", "== list: self.the_dict['news'][k] = v else: raise ValueError, \"The value", "in tpl_list: # if MsgType == i: # self._MsgType =", "list the ```v``` should be packaged in a list already", "incomingMessage) elif msgType == 'image': self.initType('image', incomingMessage) elif msgType ==", ">>> tpl_out >>><xml> <ToUserName>the_wechat_client</ToUserName> <FromUserName>the_server</FromUserName> <CreateTime>1397808770</CreateTime> <MsgType>text</MsgType> <Content>Hello dude!</Content> </xml>", "```self._Recognition``` since it is just of this more than that", "#print v #print etree.tostring(self.root) self.root.find('Image').find('MediaId').text = v else: try: ##", "```incomingMessage``` variable if msgType in ['text', 'image', 'voice', 'video', 'location',", "except Exception as e: print e raise e def dumpXML(self):", "\"title\":\"MUSIC_TITLE\", \"description\":\"MUSIC_DESCRIPTION\", \"musicurl\":\"MUSIC_URL\", \"hqmusicurl\":\"HQ_MUSIC_URL\", \"thumb_media_id\":\"THUMB_MEDIA_ID\" } }''' json_news = '''{", "k !=self.MsgType: for m in normalMapping[k]: try: delattr(self, '_' +", "menu format from the API. If there be, then a", "determained media file to the offical URL If the image", "is not None: self._Content = root.find('Content').text else: self._Content = ''", "{\"name\": \"%s\"} }' % name request = urllib2.Request(url,data=postData) request.get_method =", "#getattr(self, \"_\"+k) = v ## assign/update value to the new", "root.find('ThumbMediaId') is not None: self._ThumbMediaId = root.find('ThumbMediaId').text else: self._ThumbMediaId =", "sys reload(sys) sys.setdefaultencoding('utf-8') from lxml import etree import time import", "the access token, then use the functions below''' self._token =", "except Exception as e: print e return None else: a_dict", "[{u'type': u'click', u'name': u'\\u7b2c\\u4e00\\u94ae', u'key': u'V1001_TODAY_MUSIC', u'sub_button': []}, {u'type': u'click',", "using other functions''' self._token = token def createMenu(self, menu_format=''): '''Create", "return None def postMessage2API(token='',messageString=''): '''Using the token, post the message", "XML to reponse to determained wechat message For more information", "if a_dict.has_key('errcode'): if a_dict['errcode'] != 0: return None else: return", "v in kwargs.items(): try: if k == 'media_id': self.the_dict['voice'][k] =", "global commonTag commonTag = ['ToUserName', 'FromUserName', 'CreateTime', 'MsgId', 'MsgType'] #", "a_dict['group']['id'] def getAllgroups(self): ''' A dict will be returned. For", "'video', 'location', 'link' After making a new instance of the", "set attribute value of the XML special for image for", "has no attribute/tag '%s'\" % (self._MsgType, k) ## For image", "messages to those wechat clients \\n who sent messages to", "ellipsis this since it is of 'text' by default >>>", "For image message only elif self._MsgType == 'image': # To", "in-common of normal message global commonTag commonTag = ['ToUserName', 'FromUserName',", "#child_list = [] #for child in self.root.getchildren(): # child_list +=", "print e return None else: a_dict = json.loads(response.read()) #print a_dict", "= '' if root.find('Format') is not None: self._Format = root.find('Format').text", "msgType == 'link': self.initType('link', incomingMessage) elif msgType == 'image': self.initType('image',", "all the mapping relationship # # For those tags in-common", "v in kwargs.items(): try: if k == 'musicurl': self.the_dict['music'][k] =", "request = urllib2.Request(url,data=postData) try: response = urllib2.urlopen(request) except Exception as", "root.find('ThumbMediaId').text else: self._ThumbMediaId = '' # For location message only", "json.loads(the_dict_object) if we want to pass the right reponse back", "to other group. 'True' or 'False' if moved or not.", "only elif self._MsgType == 'image': # To set attribute value", "self._MsgType == 'text': # To set attribute value to such", "of list the ```v``` should be packaged in a list", "None: self.type = 'recognition' # Check if the incomingMessage has", "<Description><![CDATA[DESCRIPTION]]></Description> <MusicUrl><![CDATA[MUSIC_Url]]></MusicUrl> <HQMusicUrl><![CDATA[HQ_MUSIC_Url]]></HQMusicUrl> <ThumbMediaId><![CDATA[media_id]]></ThumbMediaId> </Music> </xml>''' tpl_news = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName>", "Exception as e: print e return None else: gotten =", "'image': for k, v in kwargs.items(): try: if k ==", "message only if self.MsgType == 'text': if root.find('Content') is not", "v else: raise ValueError, \"The value of the key 'articles'", "sm.getSubscriberProfile(openid='his_open_id', lang='zh_CN') ''' def __init__(self, token=''): self._token = token def", "please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E8%87%AA%E5%AE%9A%E4%B9%89%E8%8F%9C%E5%8D%95%E5%88%9B%E5%BB%BA%E6%8E%A5%E5%8F%A3 ''' token = self._token url = \"https://api.weixin.qq.com/cgi-bin/menu/create?access_token=\"", "self.initType('voice', incomingMessage) elif msgType == 'video': self.initType('video', incomingMessage) elif msgType", "u'V1001_TODAY_MUSIC', u'sub_button': []}, {u'type': u'click', u'name': u'\\u7b2c\\u4e8c\\u94ae', u'key': u'V1001_TODAY_SINGER', u'sub_button':", "wechat clients! :) \"\"\" ## assign the basic time self.root.find('CreateTime').text", "video message only elif self._MsgType == 'video': # To set", "e raise e ## For music message only elif self._MsgType", "for cusmter service or others. If ```appid``` and ```appsecret``` are", "visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E6.9F.A5.E8.AF.A2.E7.94.A8.E6.88.B7.E6.89.80.E5.9C.A8.E5.88.86.E7.BB.84''' url = \"https://api.weixin.qq.com/cgi-bin/groups/getid?access_token=\"+ self._token postData = '{\"openid\":\"%s\"}' %", "of type string. But ```menu_format``` is constructed from a JSON.", "is not None: self.type = 'recognition' # Check if the", "self.the_dict = json.loads(the_json_tpl) if MsgType == 'text': pass def initType(self,", "u'name': u'\\u7b2c\\u4e8c\\u94ae', u'key': u'V1001_TODAY_SINGER', u'sub_button': []}, {u'name': u'\\u7b2c\\u4e09\\u94ae', u'sub_button': [{u'url':", "self.root.find('Image').find('MediaId').text = v else: try: ## assign/update value to the", "a_dict def getHisGroupID(self, openid=''): '''Get a subscriber's group ID. The", "Exception as e: print e raise e ## For image", "<ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[news]]></MsgType> <ArticleCount>2</ArticleCount> <Articles> <item> <Title><![CDATA[title1]]></Title> <Description><![CDATA[description1]]></Description> <PicUrl><![CDATA[picurl]]></PicUrl>", "'''Using the token, post the message to determained user. This", "relationship # # For those tags in-common of normal message", "'video': self.initType('video', incomingMessage) elif msgType == 'location': self.initType('location', incomingMessage) elif", "in normalMapping[k]: try: delattr(self, '_' + m) except: pass #", "MsgType # ## the the template # the_xml = globals()['tpl_'+i]", "the_xml = globals()['tpl_'+i] # self.root = etree.fromstring( the_xml ) #", "image for k, v in kwargs.items(): if k == 'ArticleCount':", "<MediaId><![CDATA[media_id]]></MediaId> </Voice> </xml>''' tpl_video = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[video]]></MsgType>", "only elif self.MsgType == 'video': if root.find('MediaId') is not None:", "Exception as e: # its better to raise something here", "k == 'MediaId': #print v #print etree.tostring(self.root) self.root.find('Image').find('MediaId').text = v", ">>> True >>> holder.initType(MsgType='text') # Or we can just ellipsis", "have ```self.the_dict``` self.__init__(MsgType) def setElementByKey(self, **kwargs): '''To set the ```self.the_dict```", "== 'text': if root.find('Content') is not None: self._Content = root.find('Content').text", "return None ##raise ValueError #tmp = \"Instance has no attribute", "\"Instance has no attribute _%s\" % tag #raise AttributeError, tmp", "global recognitionMapping recognitionMapping = { 'voice':['MediaId','Format','Recognition'], } def __init__(self, incomingMessage='<xml></xml>'):", "token try: response = urllib2.urlopen(url) except Exception as e: print", "the ```incomingMessage``` variable if msgType in ['text', 'image', 'voice', 'video',", "= etree.fromstring( the_xml ) # break ## Set the default", "def __init__(self, token=''): self._token = token def loadToken(self, token=''): '''Firstly", "== 'Title': self.root.find('Video').find('Title').text = v elif k == 'Description': self.root.find('Video').find('Description').text", "Exception as e: print e raise e def dumpXML(self): #", "None def postMessage2API(token='',messageString=''): '''Using the token, post the message to", "else: return a_dict else: return a_dict def deleteMenu(self): token =", "of 'text' by default >>> # Notice we don't need", "datagen, headers = multipart_encode({\"image1\": open(media_path,\"rb\")}) except Exception as e: #print", "are valid. If not exists or not valid will return", "v elif k == 'Description': self.root.find('Video').find('Description').text = v elif k", "(groupid, new_name) request = urllib2.Request(url,data=postData) try: response = urllib2.urlopen(request) except", "u'\\u7b2c\\u4e00\\u94ae', u'key': u'V1001_TODAY_MUSIC', u'sub_button': []}, {u'type': u'click', u'name': u'\\u7b2c\\u4e8c\\u94ae', u'key':", "## assign the basic time self.root.find('CreateTime').text = str(int(time.time())) #print \"-----\"", "a_dict.has_key('errcode'): return None else: return a_dict def createGroup(self, name=''): '''Create", "e: print e raise e ## For video message only", "XML incomingMessage if has # For text message only if", "setElementByTag(self, **kwargs): \"\"\" To package XML message into an object", "## For image message only elif self._MsgType == 'image': #", "eventMapping[k]: try: delattr(self, '_' + e) except: pass self.__init__(incomingMessage) #", "if k == 'content': self.the_dict['text'][k] = v else: self.the_dict[k] =", "the_tpl = globals()['tpl_' + self._MsgType].encode('utf-8').decode('utf-8') self.root = etree.fromstring(the_tpl) #print self.root.find(\"FromUserName\").text", "msgType == 'location': self.initType('location', incomingMessage) elif msgType == 'link': self.initType('link',", "for whose types are of : 'text', 'image', 'voice', 'video',", "root.find('Url').text else: self._Url = '' # For event message only", "'thumb_media_id': self.the_dict['music'][k] = v else: self.the_dict[k] = v except Exception", "url = \"https://api.weixin.qq.com/cgi-bin/menu/delete?access_token=\" + token try: response = urllib2.urlopen(url) except", "#print j # to check if the message was accepted", "\"https://api.weixin.qq.com/cgi-bin/menu/create?access_token=\" + token request = urllib2.Request(url, menu_format) request.get_method = lambda", "return None else: a_dict = json.loads(response.read()) #print a_dict if a_dict.has_key('errcode'):", "else: self._Label = '' # For link message only elif", "<CreateTime>12345678</CreateTime> <MsgType><![CDATA[news]]></MsgType> <ArticleCount>2</ArticleCount> <Articles> <item> <Title><![CDATA[title1]]></Title> <Description><![CDATA[description1]]></Description> <PicUrl><![CDATA[picurl]]></PicUrl> <Url><![CDATA[url]]></Url> </item>", "token def getSubscriberProfile(self, openid='', lang='zh_CN'): '''The open_id parameter is unique", "v elif k == 'title': self.the_dict['video'][k] = v elif k", "of type 'int'. If not, will return None. ''' url", "None #raise e else: request = urllib2.Request(url,data=datagen,headers=headers) try: response =", "# its better to raise something here if the wechat", "not in MsgType_list: raise ValueError, \"MsgType '%s' not valid \"", "list, then its the elment of the key ```articles``` for", "if has # For text message only if self.MsgType ==", "with the new_name. 'True' or False if updated or not.", "packageArticle(title= \"default title\", description=\"default description\", url=\"http://www.baidu.com\", picurl=\"http://www.baidu.com/img/bdlogo.gif\"): '''This will return", "the mapping relationship # # For those tags in-common of", "PositiveRespondingContainer(object): '''Using wechat custom service API to pass 6 types", "\"media_id\":\"MEDIA_ID\" } }''' json_voice = '''{ \"touser\":\"OPENID\", \"msgtype\":\"voice\", \"voice\": {", "e: print e raise e ## For article message only", "determained group id with the new_name. 'True' or False if", "'''To set the ```self.the_dict``` according to the message type by", "else: self._PicUrl = '' if root.find('MediaId') is not None: self._MediaId", "class MenuManager(object): '''To manage the bottom menu of the wechat", "= getattr(self, \"_\" + tag) except: return None ##raise ValueError", "self.root.find('Video').find('Title').text = v elif k == 'Description': self.root.find('Video').find('Description').text = v", "getHisGroupID(self, openid=''): '''Get a subscriber's group ID. The ID is", "presents the combined tag set of the event message 'event':['Event','EventKey','Ticket','Latitude','Longitude','Precision'", "postData = '{\"openid\":\"%s\",\"to_groupid\":%s}' % (openid, groupid) request = urllib2.Request(url,data=postData) try:", "those wechat clients \\n who sent messages to the public", "'''{ \"touser\":\"OPENID\", \"msgtype\":\"music\", \"music\": { \"title\":\"MUSIC_TITLE\", \"description\":\"MUSIC_DESCRIPTION\", \"musicurl\":\"MUSIC_URL\", \"hqmusicurl\":\"HQ_MUSIC_URL\", \"thumb_media_id\":\"THUMB_MEDIA_ID\"", "= '''{ \"touser\":\"OPENID\", \"msgtype\":\"video\", \"video\": { \"media_id\":\"MEDIA_ID\", \"title\":\"TITLE\", \"description\":\"DESCRIPTION\" }", "return a_dict['group']['id'] def getAllgroups(self): ''' A dict will be returned.", "a_dict.has_key('access_token'): return a_dict['access_token'] # means wrong appid or secret else:", "False else: j = json.loads(response.read()) # The above works #print", "if root.find('Scale') is not None: self._Scale = root.find('Scale').text else: self._Scale", "etree.fromstring(incomingMessage) msgType = root.find(\"MsgType\").text # Get message type based from", "setElementByTag(FromUserName='the_wechat_server',ToUserName='the_wechat_client',Content='Hello dude!') # In this way we can then use", "functions''' self._token = token def createMenu(self, menu_format=''): '''Create menu, it", "The above works #print j # to check if the", "loadToken(self, token=''): '''Load the token before using other functions''' self._token", "if root.find('Precision') is not None: self._Precision = root.find('Precision').text def initType(self,", "down blow are the templates of all the responsing message", "def uploadMedia(self, media_type='image', media_path=''): '''Post the determained media file to", "= json.loads(response.read()) if a_dict.has_key('errcode'): if a_dict['errcode'] != 0: return None", "u'\\u7b2c\\u4e09\\u94ae', u'sub_button': [{u'url': u'http://www.soso.com/', u'type': u'view', u'name': u'\\u641c\\u641c', u'sub_button': []},", "else: gotten = a.read() a_dict = json.loads(gotten) # means wrong", "kwargs.items(): try: if k == 'musicurl': self.the_dict['music'][k] = v elif", "raise e # package article def packageArticle(title= \"default title\", description=\"default", "elif self._MsgType == 'video': for k, v in kwargs.items(): try:", "i in tpl_list: # if MsgType == i: # self._MsgType", "else: self._ThumbMediaId = '' # For location message only elif", "# Check if the incomingMessage has tag 'Recognition' then, it", "mm.loadToken('something_the_api_token') >>> flag = mm.createMenu('the_menu_format_constructed_from_a_JSON_as_a_string') >>> flag True >>> menu_got", "does not exist, official wechat server takes it as ''", "the template # the_xml = globals()['tpl_'+i] # self.root = etree.fromstring(", "e: print e return False else: j = json.loads(response.read()) #", "assign the basic time self.root.find('CreateTime').text = str(int(time.time())) #print \"-----\" #print", "= v elif k == 'description': self.the_dict['music'][k] = v elif", "6 types of messages include: text, image, voice, video, music,", "else: raise ValueError, \"The value of the key 'articles' should", "False def getSubscriberList(self, next_openid=''): '''To get subscriber list. A dict", "== 'image': self.initType('image', incomingMessage) # TODO # For event messages", "```open_id``` are valid. If not exists or not valid will", "next_openid=''): '''To get subscriber list. A dict will be return", "except Exception as e: print e return False else: a_dict", "\"https://api.weixin.qq.com/cgi-bin/groups/getid?access_token=\"+ self._token postData = '{\"openid\":\"%s\"}' % openid request = urllib2.Request(url,data=postData)", "j['errcode'] == 0: return True else: return False def getMenu(self):", "== 'link': if root.find('Title') is not None: self._Title = root.find('Title').text", "\"_\"+k) = v ## assign/update value to the new XML", "#raise AttributeError, \"Message type '%s' has no attribute/tag '%s'\" %", "<FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[music]]></MsgType> <Music> <Title><![CDATA[TITLE]]></Title> <Description><![CDATA[DESCRIPTION]]></Description> <MusicUrl><![CDATA[MUSIC_Url]]></MusicUrl> <HQMusicUrl><![CDATA[HQ_MUSIC_Url]]></HQMusicUrl> <ThumbMediaId><![CDATA[media_id]]></ThumbMediaId> </Music>", "e return None #raise e else: request = urllib2.Request(url,data=datagen,headers=headers) try:", "}, { \"title\":\"Happy Day\", \"description\":\"Is Really A Happy Day\", \"url\":\"URL\",", "pass #@property def getElementByTag(self, tag): '''To get element from the", "attribute value of the XML special for image for k,", "the ```next_openid``` does not exist, official wechat server takes it", "poster.streaminghttp import register_openers class ParsingContainer(object): \"\"\"Parsing Wechat messages for whose", "key in this function should be of lower case. Official", "types of media suppored by wechat. image, voice, video, thumb", "elif self._MsgType == 'image': # To set attribute value of", "list already # if list, then its the elment of", "mm.deleteMenu() >>> flag2 True >>> mm.getMenu() >>> # nothing gotten:", "in kwargs.items(): try: if k == 'content': self.the_dict['text'][k] = v", "token = self._token url = \"https://api.weixin.qq.com/cgi-bin/menu/create?access_token=\" + token request =", ">>> holder.getElementByTag('Content') >>> '' \"\"\" # By default, MsgType is", "determained group name. If created, then it will return the", "root.find('MediaId').text else: self._MediaId = '' if root.find('ThumbMediaId') is not None:", "\"thumb_media_id\":\"THUMB_MEDIA_ID\" } }''' json_news = '''{ \"touser\":\"OPENID\", \"msgtype\":\"news\", \"news\":{ \"articles\":", "some common variables root = etree.fromstring(incomingMessage) # The 5 ones", "== 'title': self.the_dict['video'][k] = v elif k == 'description': self.the_dict['video'][k]", "need to declare the 'MsgType' For example, $~ python >>>", "'''{ \"touser\":\"OPENID\", \"msgtype\":\"voice\", \"voice\": { \"media_id\":\"MEDIA_ID\" } }''' json_video =", "= '''{ \"touser\":\"OPENID\", \"msgtype\":\"voice\", \"voice\": { \"media_id\":\"MEDIA_ID\" } }''' json_video", "holder.initType(MsgType='text') # Or we can just ellipsis this operation since", "the XML we need to reponse to wechat clients! :)", "return a_dict['access_token'] # means wrong appid or secret else: return", "}''' class SubscriberManager(object): '''To manage the subscriber groups, profile, location,", "self._Content = root.find('Content').text else: self._Content = '' # For image", "# For image message only elif self.MsgType == 'image': if", "if i == 'CreateTime': # setattr(self,\"_\"+i, str(int(time.time()))) # else: #", "incomingMessage has tag 'Recognition' then, it is a voice recognition", "v in kwargs.items(): try: if k == 'content': self.the_dict['text'][k] =", "message type :\"MsgType\" 2) check subclass message type if \"Voice", "u'sub_button': []}, {u'name': u'\\u7b2c\\u4e09\\u94ae', u'sub_button': [{u'url': u'http://www.soso.com/', u'type': u'view', u'name':", "= etree.fromstring(incomingMessage) msgType = root.find(\"MsgType\").text # Get message type based", "e: print e return False else: a_dict = json.loads(response.read()) #print", "is valid, then a_dict will be returned. If not, 'None'", "responsing message valid for wechat # For more information, please", "self._MsgType = MsgType # By default set the ```self.the_dict``` as", "globals()['tpl_'+i] # self.root = etree.fromstring( the_xml ) # break ##", "of type list\" elif k == 'touser': self.the_dict['touser'] = v", "try: # here we just check whether the ```v``` is", "from the API. If there be, then a dict would", "determained user. This returns a Boolean value''' url = \"https://api.weixin.qq.com/cgi-bin/message/custom/send?access_token=\"", "= '''{ \"touser\":\"OPENID\", \"msgtype\":\"music\", \"music\": { \"title\":\"MUSIC_TITLE\", \"description\":\"MUSIC_DESCRIPTION\", \"musicurl\":\"MUSIC_URL\", \"hqmusicurl\":\"HQ_MUSIC_URL\",", "False def moveHimToGroup(self, openid='', groupid=''): '''Move him to other group.", "'token' will be return. If not , 'return None' '''", "+ tag) except: return None ##raise ValueError #tmp = \"Instance", "= [] #for child in self.root.getchildren(): # child_list += [str(child)]", "if a_dict.has_key('errcode'): return None else: return a_dict def getAPIToken(appid='', appsecret=''):", "raise ValueError, \"Invalid responsing message MsgType '%s'\" % MsgType else:", "urllib import urllib2 # For media posting from poster.encode import", "= { 'text':['Content'], 'image':['PicUrl', 'MediaId'], 'voice':['MediaId','Format'], 'video':['MediaId','ThumbMeiaId'], 'location':['Location_X','Location_Y','Scale', 'Label'], 'link':['Title','Description','Url'],", "= '' if root.find('EventKey') is not None: self._EventKey = root.find('EventKey').text", "more information, please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E4.BF.AE.E6.94.B9.E5.88.86.E7.BB.84.E5.90.8D ''' url = \"https://api.weixin.qq.com/cgi-bin/groups/update?access_token=\" +", "not None: self._Latitude = root.find('Latitude').text if root.find('Longitude') is not None:", "Apr 2014 import sys reload(sys) sys.setdefaultencoding('utf-8') from lxml import etree", "#print a_dict if a_dict.has_key('errcode'): if a_dict['errcode'] == 0: return True", "False else: return False def moveHimToGroup(self, openid='', groupid=''): '''Move him", "= \"https://api.weixin.qq.com/cgi-bin/menu/delete?access_token=\" + token try: response = urllib2.urlopen(url) except Exception", "= token def createMenu(self, menu_format=''): '''Create menu, it needs a", "== 'image': self.initType('image', incomingMessage) elif msgType == 'voice': self.initType('voice', incomingMessage)", "description=\"default description\", url=\"http://www.baidu.com\", picurl=\"http://www.baidu.com/img/bdlogo.gif\"): '''This will return an article in", "is not None: self._Scale = root.find('Scale').text else: self._Scale = ''", "else: return a_dict def createGroup(self, name=''): '''Create a determained group", "else: try: ## assign/update value to the new XML object", "ValueError #tmp = \"Instance has no attribute _%s\" % tag", "kwargs.items(): if k == 'ArticleCount': self.root.find(k).text = v if k", "assigned already return etree.tostring(self.root, encoding='utf-8',method='xml',pretty_print=True) # The down blow are", "if a_dict.has_key('errcode'): if a_dict['errcode'] == 0: return True else: return", "= urllib2.Request(url,data=postData) try: response = urllib2.urlopen(request) except Exception as e:", "the JSON dumped, This is used with the function ```setElementByKey(touser='someone',", "json_voice = '''{ \"touser\":\"OPENID\", \"msgtype\":\"voice\", \"voice\": { \"media_id\":\"MEDIA_ID\" } }'''", "dump the XML we need # the ```self.root``` has been", "return a_dict def deleteMenu(self): token = self._token url = \"https://api.weixin.qq.com/cgi-bin/menu/delete?access_token=\"", "else: self.the_dict[k] = v except Exception as e: print e", "e raise e ## For image message only elif self._MsgType", "v #print etree.tostring(self.root) self.root.find('Video').find('MediaId').text = v elif k == 'Title':", "http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E6.9F.A5.E8.AF.A2.E7.94.A8.E6.88.B7.E6.89.80.E5.9C.A8.E5.88.86.E7.BB.84''' url = \"https://api.weixin.qq.com/cgi-bin/groups/getid?access_token=\"+ self._token postData = '{\"openid\":\"%s\"}' % openid", "if j['errcode'] == 0: return True else: return False class", ">>> mm.loadToken('something_the_api_token') >>> flag = mm.createMenu('the_menu_format_constructed_from_a_JSON_as_a_string') >>> flag True >>>", "a_dict if a_dict.has_key('errcode'): return None else: return a_dict def getAPIToken(appid='',", "__init__(self, token=''): self._token = token def loadToken(self, token=''): '''Firstly load", "e raise e ## For video message only elif self._MsgType", "of dict format. We need to json.loads(the_dict_object) if we want", "this copy should inform its author. If for commercial, please", "self._token + \"&openid=\" + openid + \"&lang=\" + lang try:", "= '' if root.find('Location_Y') is not None: self._Location_Y = root.find('Location_Y').text", "{ # The list presents the combined tag set of", "check parent message type :\"MsgType\" 2) check subclass message type", "dict would be returned. If not, 'None' will be returned.", "if k == 'ArticleCount': self.root.find(k).text = v if k ==", "the file to the offical wechat server and get the", "the basic time self.root.find('CreateTime').text = str(int(time.time())) #print \"-----\" #print self._MsgType", "no attribute/tag '%s'\" % (self._MsgType, k) ## For image message", "http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E4.BF.AE.E6.94.B9.E5.88.86.E7.BB.84.E5.90.8D ''' url = \"https://api.weixin.qq.com/cgi-bin/groups/update?access_token=\" + self._token postData = '{\"group\":{\"id\":%s,\"name\":\"%s\"}}'", "<Image> <MediaId><![CDATA[media_id]]></MediaId> </Image> </xml>''' tpl_voice = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime>", "request = urllib2.Request(url,data=postData) request.get_method = lambda : 'POST' try: response", "be, then a dict would be returned. If not, 'None'", "e: # its better to raise something here if the", "for m in normalMapping[k]: try: delattr(self, '_' + m) except:", "try: gotten = getattr(self, \"_\" + tag) except: return None", "if a_dict.has_key('errcode'): return None else: return a_dict['groupid'] def updateGroupName(self, groupid='',", "print e raise e ## For video message only elif", "= ['text', 'image', 'voice', 'video', 'location', 'link', 'event'] if MsgType", "an object Usage: >>> setElementByTag(FromUserName='the_wechat_server',ToUserName='the_wechat_client',Content='Hello dude!') # In this way", "print e raise e ## For voice message only elif", "return False else: return False def moveHimToGroup(self, openid='', groupid=''): '''Move", "```token``` and ```next_openid``` are valid, then a dict will be", "for k, v in kwargs.items(): try: ## assign value to", "else: return False class MenuManager(object): '''To manage the bottom menu", "please visit : http://mp.weixin.qq.com/wiki/index.php?title=%E5%8F%91%E9%80%81%E8%A2%AB%E5%8A%A8%E5%93%8D%E5%BA%94%E6%B6%88%E6%81%AF global tpl_text global tpl_image global tpl_voice", "to the message type by such as ```initType(MsgType='text')``` Notice: all", "u'''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[text]]></MsgType> <Content><![CDATA[你好]]></Content> </xml>''' tpl_image = '''<xml>", "False >>> hasattr(holder, \"_Content\") >>> True >>> holder.getElementByTag('Content') >>> ''", "return False class MediaManager(object): '''There are four types of media", "message ''' \"articles\": [ { \"title\":\"Happy Day\", \"description\":\"Is Really A", "'location':['Location_X','Location_Y','Scale', 'Label'], 'link':['Title','Description','Url'], } # For event message mapping global", "exists or not valid will return None. For the parameter", "message global commonTag commonTag = ['ToUserName', 'FromUserName', 'CreateTime', 'MsgId', 'MsgType']", "\"https://api.weixin.qq.com/cgi-bin/message/custom/send?access_token=\" + token request = urllib2.Request(url, messageString) request.get_method = lambda", "request = urllib2.Request(url, menu_format) request.get_method = lambda : 'POST' try:", "e: print e raise e # package article def packageArticle(title=", "def dumpDict(self): return self.the_dict json_text = '''{ \"touser\":\"OPENID\", \"msgtype\":\"text\", \"text\":", "v except Exception as e: print e raise e ##", "not. For more information, please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E4.BF.AE.E6.94.B9.E5.88.86.E7.BB.84.E5.90.8D ''' url =", "= root.find('MediaId').text else: self._MediaId = '' if root.find('ThumbMediaId') is not", "is not None: self._Ticket = root.find('Ticket').text if root.find('Latitude') is not", "<CreateTime>12345678</CreateTime> <MsgType><![CDATA[video]]></MsgType> <Video> <MediaId><![CDATA[media_id]]></MediaId> <Title><![CDATA[title]]></Title> <Description><![CDATA[description]]></Description> </Video> </xml>''' tpl_music =", "createGroup(self, name=''): '''Create a determained group name. If created, then", "== 'MediaId': #print v #print etree.tostring(self.root) self.root.find('Image').find('MediaId').text = v else:", "back ''' def __init__(self, MsgType='text'): self._MsgType = MsgType # By", "= root.find('Latitude').text if root.find('Longitude') is not None: self._Longitude = root.find('Longitude').text", "elif k == 'touser': self.the_dict['touser'] = v elif k ==", "etree import time import json import urllib import urllib2 #", "<MsgType><![CDATA[image]]></MsgType> <Image> <MediaId><![CDATA[media_id]]></MediaId> </Image> </xml>''' tpl_voice = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName>", "raise e ## For news message only elif self._MsgType ==", "gotten = a.read() a_dict = json.loads(gotten) # means wrong appid", "= MsgType # By default set the ```self.the_dict``` as from", "incomingMessage) elif msgType == 'link': self.initType('link', incomingMessage) elif msgType ==", "#print etree.tostring(self.root) self.root.find('Video').find('MediaId').text = v elif k == 'Title': self.root.find('Video').find('Title').text", "'CreateTime', 'MsgId', 'MsgType'] # For normal message mapping global normalMapping", "please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%8F%91%E9%80%81%E8%A2%AB%E5%8A%A8%E5%93%8D%E5%BA%94%E6%B6%88%E6%81%AF Usage: >>> rc = RespondingContainer() >>> rc.initType('text')", "self._token = token def loadToken(self, token=''): '''Load the token before", "it is just of this more than that of 'normal", "try: a = urllib2.urlopen(url) except Exception as e: print e", "the menu format. The ```menu_format``` is of type string. But", "of media suppored by wechat. image, voice, video, thumb Post", "package article def packageArticle(title= \"default title\", description=\"default description\", url=\"http://www.baidu.com\", picurl=\"http://www.baidu.com/img/bdlogo.gif\"):", "pass the message type to have ```self.the_dict``` self.__init__(MsgType) def setElementByKey(self,", "not None: self._PicUrl = root.find('PicUrl').text else: self._PicUrl = '' if", "'title': self.the_dict['music'][k] = v elif k == 'description': self.the_dict['music'][k] =", "below''' self._token = token def getSubscriberProfile(self, openid='', lang='zh_CN'): '''The open_id", "json.loads(the_json_tpl) if MsgType == 'text': pass def initType(self, MsgType='text'): if", "get subscriber list. A dict will be return if valid.", "# The above works #print j # to check if", "## assign value to the object #getattr(self, \"_\"+k) = v", "```self.root``` has been assigned already return etree.tostring(self.root, encoding='utf-8',method='xml',pretty_print=True) # The", "self._MsgType == 'video': for k, v in kwargs.items(): try: if", "we need # the ```self.root``` has been assigned already return", "print e return None else: gotten = a.read() a_dict =", "Exception as e: print e raise e ## For music", "root.find('Scale') is not None: self._Scale = root.find('Scale').text else: self._Scale =", "+ self._MsgType].encode('utf-8').decode('utf-8') self.root = etree.fromstring(the_tpl) #print self.root.find(\"FromUserName\").text #print type(self.root.find(\"FromUserName\").text) def", "= urllib2.Request(url, messageString) request.get_method = lambda : 'POST' try: response", "delattr(self, '_' + e) except: pass self.__init__(incomingMessage) # releasing method", "= { 'voice':['MediaId','Format','Recognition'], } def __init__(self, incomingMessage='<xml></xml>'): # pre-set some", "For more information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E6.9F.A5.E8.AF.A2.E6.89.80.E6.9C.89.E5.88.86.E7.BB.84 ''' url = \"https://api.weixin.qq.com/cgi-bin/groups/get?access_token=\"", "just check whether the ```v``` is type of list the", "ValueError, \"Media type: '%s' not valid\" % media_type else: self._media_type", "root.find('PicUrl').text else: self._PicUrl = '' if root.find('MediaId') is not None:", "flag = mm.createMenu('the_menu_format_constructed_from_a_JSON_as_a_string') >>> flag True >>> menu_got = mm.getMenu()", "# the ```self.root``` has been assigned already return etree.tostring(self.root, encoding='utf-8',method='xml',pretty_print=True)", "returned. If not, 'None' will be returned. For more information,", "This returns a Boolean value''' url = \"https://api.weixin.qq.com/cgi-bin/message/custom/send?access_token=\" + token", "} def __init__(self, incomingMessage='<xml></xml>'): # pre-set some common variables root", "of : 'text', 'image', 'voice', 'video', 'location', 'link' After making", "try: delattr(self, '_' + m) except: pass # Delete the", "the subscriber groups, profile, location, list. Usage: >>> sm =", "tpl_news tpl_text = u'''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[text]]></MsgType> <Content><![CDATA[你好]]></Content> </xml>'''", "messages if self.type == 'recognition': self.initType('voice', incomingMessage) # Construct a", "event message mapping global eventMapping eventMapping = { # The", "music message only elif self._MsgType == 'music': for k, v", "return False else: a_dict = json.loads(response.read()) #print a_dict if a_dict.has_key('errcode'):", "\"Message type '%s' has no attribute/tag '%s'\" % (self._MsgType, k)", "e ## For news message only elif self._MsgType == 'news':", "as e: #print e return None #raise e else: request", "For link message only elif self.MsgType == 'link': if root.find('Title')", "'{\"openid\":\"%s\",\"to_groupid\":%s}' % (openid, groupid) request = urllib2.Request(url,data=postData) try: response =", "self._MediaId = root.find('MediaId').text else: self._MediaId = '' if root.find('ThumbMediaId') is", "need to set the 'CreateTime' since it has been generated", "\"hqmusicurl\":\"HQ_MUSIC_URL\", \"thumb_media_id\":\"THUMB_MEDIA_ID\" } }''' json_news = '''{ \"touser\":\"OPENID\", \"msgtype\":\"news\", \"news\":{", "is not None: self._Label = root.find('Label').text else: self._Label = ''", "in kwargs.items(): try: if k == 'media_id': self.the_dict['video'][k] = v", "+ \"&openid=\" + openid + \"&lang=\" + lang try: a", "not None: self._FromUserName = root.find('FromUserName').text else: self._FromUserName = '' if", "root.find('Latitude').text if root.find('Longitude') is not None: self._Longitude = root.find('Longitude').text if", "root.find('Content').text else: self._Content = '' # For image message only", ":\"MsgType\" 2) check subclass message type if \"Voice Recognition\", \"Event\",", "as e: print e raise e ## For voice message", "of the class, need to declare the 'MsgType' For example,", "self.root.find(k).text = v if k == 'Articles': # TODO to", "!= 0: return None else: return a_dict else: return a_dict", "= \"http://file.api.weixin.qq.com/cgi-bin/media/upload?access_token=\" + self._token + \"&type=\" + self._media_type register_openers() try:", "if root.find('Event') is not None: self._Event = root.find('Event').text else: self._Event", "self.type = 'event' # After all then 'normal' message else:", "MsgType for i in MsgType_list: if MsgType == i: self.MsgType", "__init__(self, MsgType='text'): self._MsgType = MsgType # By default set root", "6 types of messages to those wechat clients \\n who", "{ \"media_id\":\"MEDIA_ID\" } }''' json_voice = '''{ \"touser\":\"OPENID\", \"msgtype\":\"voice\", \"voice\":", "u'name': u'\\u5938\\u6211\\u5e05', u'key': u'<KEY>', u'sub_button': []}]}]}} >>> flag2 = mm.deleteMenu()", "default >>> # Notice we don't need to set the", "= json.loads(response.read()) if a_dict.has_key('errcode'): return None else: return a_dict['group']['id'] def", "k == 'touser': self.the_dict['touser'] = v elif k == 'msgtype':", ">>> sm = SubscriberManager() >>> sm.loadToken('<KEY>') >>> hisprofile = sm.getSubscriberProfile(openid='his_open_id',", "None else: a_dict = json.loads(response.read()) if a_dict.has_key('errcode'): return None else:", "by default its 'text' >>> hasattr(holder, \"_PicUrl\") >>> False >>>", "dude!') >>> tpl_out = rc.dumpXML() >>> tpl_out >>><xml> <ToUserName>the_wechat_client</ToUserName> <FromUserName>the_server</FromUserName>", "description\", url=\"http://www.baidu.com\", picurl=\"http://www.baidu.com/img/bdlogo.gif\"): '''This will return an article in a", "For more information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E7.A7.BB.E5.8A.A8.E7.94.A8.E6.88.B7.E5.88.86.E7.BB.84''' url = \"https://api.weixin.qq.com/cgi-bin/groups/members/update?access_token=\" +", "MsgType == 'text': pass def initType(self, MsgType='text'): if MsgType not", "\"\"\"Package XML to reponse to determained wechat message For more", "def __del__(self): pass #@property def getElementByTag(self, tag): '''To get element", "v #print etree.tostring(self.root) self.root.find('Voice').find('MediaId').text = v else: try: ## assign/update", "None: self._Event = root.find('Event').text else: self._Event = '' if root.find('EventKey')", "the news message ''' \"articles\": [ { \"title\":\"Happy Day\", \"description\":\"Is", "normalMapping: if k !=self.MsgType: for m in normalMapping[k]: try: delattr(self,", "token before using other functions''' self._token = token def createMenu(self,", "reponse to determained wechat message For more information please visit:", "<Articles> <item> <Title><![CDATA[title1]]></Title> <Description><![CDATA[description1]]></Description> <PicUrl><![CDATA[picurl]]></PicUrl> <Url><![CDATA[url]]></Url> </item> <item> <Title><![CDATA[title]]></Title> <Description><![CDATA[description]]></Description>", "is used with the function ```setElementByKey(touser='someone', msgtype='news', articles=packageArticle())``` ''' return", "'''To get element from the tag ''' try: gotten =", "['ToUserName', 'FromUserName', 'CreateTime', 'MsgId', 'MsgType'] # For normal message mapping", "default set root as the 'text' XML format the_tpl =", "new_name) request = urllib2.Request(url,data=postData) try: response = urllib2.urlopen(request) except Exception", "[]}, {u'type': u'click', u'name': u'\\u5938\\u6211\\u5e05', u'key': u'<KEY>', u'sub_button': []}]}]}} >>>", "<FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[voice]]></MsgType> <Voice> <MediaId><![CDATA[media_id]]></MediaId> </Voice> </xml>''' tpl_video = '''<xml>", "mm.getMenu() >>> # nothing gotten: it means no menu at", "authentication. Apr 2014 import sys reload(sys) sys.setdefaultencoding('utf-8') from lxml import", "function will return a dict if ```token``` and ```open_id``` are", "def initType(self, MsgType='text', incomingMessage='<xml></xml>'): ''' To initialize message type '''", "be return if valid. If ```token``` and ```next_openid``` are valid,", "None else: a_dict = json.loads(response.read()) #print a_dict if a_dict.has_key('errcode'): return", "recognitionMapping recognitionMapping = { 'voice':['MediaId','Format','Recognition'], } def __init__(self, incomingMessage='<xml></xml>'): #", "list: self.the_dict['news'][k] = v else: raise ValueError, \"The value of", "valid will return None. For the parameter 'zh_CN', there are", "a_dict = json.loads(response.read()) if a_dict.has_key('errcode'): if a_dict['errcode'] == 0: return", "self._MsgType == 'image': for k, v in kwargs.items(): try: if", "None: self._Location_Y = root.find('Location_Y').text else: self._Location_Y = '' if root.find('Scale')", "k == 'media_id': self.the_dict['image'][k] = v else: self.the_dict[k] = v", "in kwargs.items(): try: # here we just check whether the", "is not None: self._Longitude = root.find('Longitude').text if root.find('Precision') is not", "'image': self.initType('image', incomingMessage) # TODO # For event messages if", "initType(self, MsgType='text'): tpl_list = ['text', 'image', 'voice', 'video', 'music', 'news']", "location, list. Usage: >>> sm = SubscriberManager() >>> sm.loadToken('<KEY>') >>>", "return None #raise e else: request = urllib2.Request(url,data=datagen,headers=headers) try: response", "self._token postData = '{\"openid\":\"%s\"}' % openid request = urllib2.Request(url,data=postData) try:", "as for later on JSON loading def dumpDict(self): return self.the_dict", "== i: # self._MsgType = MsgType # ## the the", "self.the_dict['image'][k] = v else: self.the_dict[k] = v except Exception as", "!=self.MsgType: for m in normalMapping[k]: try: delattr(self, '_' + m)", ">>> # nothing gotten: it means no menu at all", "'media_id': self.the_dict['image'][k] = v else: self.the_dict[k] = v except Exception", "getSubscriberProfile(self, openid='', lang='zh_CN'): '''The open_id parameter is unique to unique", "AttributeError, tmp else: return gotten def digest(self, incomingMessage): '''To digest", "def moveHimToGroup(self, openid='', groupid=''): '''Move him to other group. 'True'", "is not None: self._Url = root.find('Url').text else: self._Url = ''", "rc = RespondingContainer() >>> rc.initType('text') # Or we can ellipsis", "self._MsgType == 'music': for k, v in kwargs.items(): try: if", "not None: self.type = 'event' # After all then 'normal'", "the determained media file to the offical URL If the", "Usage: >>> mm = MenuManager() >>> mm.loadToken('something_the_api_token') >>> flag =", "root.find('Location_X') is not None: self._Location_X = root.find('Location_X').text else: self._Location_X =", "Recognition\", \"Event\", \"Normal\" 3) check children class message type '''", "correct then a string 'token' will be return. If not", "None: self._CreateTime = root.find('CreateTime').text else: self._CreateTime = '1000000000' if root.find('MsgType')", "'link', 'event']: # Check if the incomingMessage has tag 'Recognition'", "{u'url': u'http://v.qq.com/', u'type': u'view', u'name': u'\\u770b\\u7535\\u5f71', u'sub_button': []}, {u'type': u'click',", "msgType == 'image': self.initType('image', incomingMessage) elif msgType == 'voice': self.initType('voice',", "getElementByTag(self, tag): '''To get element from the tag ''' try:", "menu, it needs a token and the menu format. The", "== 'hqmusicurl': self.the_dict['music'][k] = v elif k == 'thumb_media_id': self.the_dict['music'][k]", "template #for i in tpl_list: # if MsgType == i:", "None else: return a_dict def createGroup(self, name=''): '''Create a determained", "= ''): self._media_type = media_type self._token = token def loadToken(self,", "'image', 'voice', 'video', 'location', 'link', 'event']: # Check if the", "postMessage2API(token='',messageString=''): '''Using the token, post the message to determained user.", "v in kwargs.items(): if k == 'MediaId': #print v #print", "def __init__(self, MsgType='text'): self._MsgType = MsgType # By default set", "then a_dict will be returned. If not, 'None' will be", "will assgin values to ```self.MsgType and etc..``` Logistics as the", "whether the ```v``` is type of list the ```v``` should", "manage the bottom menu of the wechat service Usage: >>>", "### Attach 'tag' object to class to make something as", "self.the_dict['news'][k] = v else: raise ValueError, \"The value of the", "```initType(MsgType='text')``` Notice: all the kwargs 's key in this function", "dict as for later on JSON loading def dumpDict(self): return", "\"https://api.weixin.qq.com/cgi-bin/user/info?access_token=\" + self._token + \"&openid=\" + openid + \"&lang=\" +", "else: return False def getMenu(self): '''Get the menu format from", "should be of type list\" elif k == 'touser': self.the_dict['touser']", "except Exception as e: print e return None else: gotten", "RespondingContainer(object): \"\"\"Package XML to reponse to determained wechat message For", "json import urllib import urllib2 # For media posting from", "XML message into an object Usage: >>> setElementByTag(FromUserName='the_wechat_server',ToUserName='the_wechat_client',Content='Hello dude!') #", "+ lang try: a = urllib2.urlopen(url) except Exception as e:", "e ## For video message only elif self._MsgType == 'video':", "'True' or 'False' if moved or not. For more information", "token request = urllib2.Request(url, messageString) request.get_method = lambda : 'POST'", "e raise e #raise AttributeError, \"Message type '%s' has no", "class MediaManager(object): '''There are four types of media suppored by", "else: return a_dict['group']['id'] def getAllgroups(self): ''' A dict will be", "# For those tags in-common of normal message global commonTag", "message only if self._MsgType == 'text': for k, v in", "if the wechat remote server is down print e return", "url = default_url + 'appid=' + appid + '&secret=' +", ">>> hasattr(holder, \"_Content\") >>> True >>> holder.getElementByTag('Content') >>> '' \"\"\"", "None: self._FromUserName = root.find('FromUserName').text else: self._FromUserName = '' if root.find('CreateTime')", "in kwargs.items(): try: ## assign value to the object #getattr(self,", "+ openid + \"&lang=\" + lang try: a = urllib2.urlopen(url)", "subscriber list. A dict will be return if valid. If", "= root.find('ThumbMediaId').text else: self._ThumbMediaId = '' # For location message", "if j['errcode'] == 0: return True else: return False def", "'''Get wechat API token for cusmter service or others. If", "the dict as for later on JSON loading def dumpDict(self):", "file to the offical URL If the image is valid,", "been assigned already return etree.tostring(self.root, encoding='utf-8',method='xml',pretty_print=True) # The down blow", "# pass the message type to have ```self.the_dict``` self.__init__(MsgType) def", "== 0: return True else: return False class MenuManager(object): '''To", "elif msgType == 'link': self.initType('link', incomingMessage) elif msgType == 'image':", "was accepted if j['errcode'] == 0: return True else: return", "= str(int(time.time())) #print \"-----\" #print self._MsgType ## For text message", "else: self._MsgType = '' if root.find('MsgId') is not None: self._MsgId", "else: return False def getSubscriberList(self, next_openid=''): '''To get subscriber list.", "k == 'hqmusicurl': self.the_dict['music'][k] = v elif k == 'thumb_media_id':", "pre-set some common variables root = etree.fromstring(incomingMessage) # The 5", "k, v in kwargs.items(): try: if k == 'content': self.the_dict['text'][k]", "in tpl_list: raise ValueError, \"Invalid responsing message MsgType '%s'\" %", "== 'image': for k, v in kwargs.items(): try: if k", "something as : 'self._FromUserName' #for i in child_list: # if", "v elif k == 'description': self.the_dict['video'][k] = v else: self.the_dict[k]", "class ParsingContainer(object): \"\"\"Parsing Wechat messages for whose types are of", "if root.find('Location_Y') is not None: self._Location_Y = root.find('Location_Y').text else: self._Location_Y", "parameter is unique to unique wechat public service. This function", "= root.find('MediaId').text else: self._MediaId = '' if root.find('Format') is not", "updateGroupName(self, groupid='', new_name=''): '''Update the determained group id with the", "<MsgType><![CDATA[voice]]></MsgType> <Voice> <MediaId><![CDATA[media_id]]></MediaId> </Voice> </xml>''' tpl_video = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName>", "#print etree.tostring(self.root) self.root.find('Voice').find('MediaId').text = v else: try: ## assign/update value", "for k, v in kwargs.items(): try: if k == 'content':", "```menu_format``` is constructed from a JSON. For more information please", "variables root = etree.fromstring(incomingMessage) # The 5 ones in common", "For more information, please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E4.BF.AE.E6.94.B9.E5.88.86.E7.BB.84.E5.90.8D ''' url = \"https://api.weixin.qq.com/cgi-bin/groups/update?access_token=\"", "type '%s' has no attribute/tag '%s'\" % (self._MsgType, k) ##", "True else: return False else: return False class MediaManager(object): '''There", "elif msgType == 'video': self.initType('video', incomingMessage) elif msgType == 'location':", "root.find('MsgType') is not None: self._MsgType = root.find('MsgType').text else: self._MsgType =", "None: self._MsgId = root.find('MsgId').text else: self._MsgId = '' # Store", "if root.find('Label') is not None: self._Label = root.find('Label').text else: self._Label", "root.find('Event').text else: self._Event = '' if root.find('EventKey') is not None:", "else: return False else: return False def getSubscriberList(self, next_openid=''): '''To", "'voice':['MediaId','Format'], 'video':['MediaId','ThumbMeiaId'], 'location':['Location_X','Location_Y','Scale', 'Label'], 'link':['Title','Description','Url'], } # For event message", "or others. If ```appid``` and ```appsecret``` are correct then a", "self._Title = '' if root.find('Description') is not None: self._Description =", "== 'ArticleCount': self.root.find(k).text = v if k == 'Articles': #", "in commonTag: try: delattr(self, '_' + c) except: pass #", "group id of type 'int'. If not, will return None.", "raise e ## For video message only elif self._MsgType ==", "variable The 'incomingMessage' is of XML According to its content", "in kwargs.items(): if k == 'MediaId': #print v #print etree.tostring(self.root)", "raise e def dumpXML(self): # To dump the XML we", "this function should be of lower case. Official wechat define", "mm.getMenu() >>> menu_got {u'menu': {u'button': [{u'type': u'click', u'name': u'\\u7b2c\\u4e00\\u94ae', u'key':", "as 'text' MsgType = 'text' # Unique tages in all", "special for image for k, v in kwargs.items(): if k", "instance of the class, need to declare the 'MsgType' For", "for authentication. Apr 2014 import sys reload(sys) sys.setdefaultencoding('utf-8') from lxml", "the ```self.the_dict``` as from the 'text' JSON format the_json_tpl =", "those tags in-common of normal message global commonTag commonTag =", "== 'video': self.initType('video', incomingMessage) elif msgType == 'location': self.initType('location', incomingMessage)", "or token invalid, 'None' will be returned. For more information,", "its the elment of the key ```articles``` for the news", "k in normalMapping: if k !=self.MsgType: for m in normalMapping[k]:", "# For normal messages if self.type == 'normal': if msgType", "need to reponse to wechat clients! :) \"\"\" ## assign", "# self._MsgType = MsgType # ## the the template #", "this more than that of 'normal message => voice' self._Recognition", "self._MsgType == 'voice': # To set attribute value of the", "message to determained user. This returns a Boolean value''' url", "def deleteMenu(self): token = self._token url = \"https://api.weixin.qq.com/cgi-bin/menu/delete?access_token=\" + token", "self.initType('image', incomingMessage) # TODO # For event messages if self.type", "object self.root.find(k).text = v except Exception as e: print e", "Attach 'tag' object to class to make something as :", "to unique wechat public service. This function will return a", "self._Description = '' if root.find('Url') is not None: self._Url =", "and ```open_id``` are valid. If not exists or not valid", "is constructed from a JSON. For more information please visit:", "pass the right reponse back ''' def __init__(self, MsgType='text'): self._MsgType", "open_id parameter is unique to unique wechat public service. This", "assgin values to ```self.MsgType and etc..``` Logistics as the followings:", "group id with the new_name. 'True' or False if updated", "responsing message MsgType '%s'\" % MsgType else: ## Load the", "the new_name. 'True' or False if updated or not. For", "else: self.type = 'normal' # For normal messages if self.type", "'''To manage the bottom menu of the wechat service Usage:", "= \"https://api.weixin.qq.com/cgi-bin/message/custom/send?access_token=\" + token request = urllib2.Request(url, messageString) request.get_method =", "#print e return None #raise e else: request = urllib2.Request(url,data=datagen,headers=headers)", "check subclass message type if \"Voice Recognition\", \"Event\", \"Normal\" 3)", "msgType = root.find(\"MsgType\").text # Get message type based from the", "message For more information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%8F%91%E9%80%81%E8%A2%AB%E5%8A%A8%E5%93%8D%E5%BA%94%E6%B6%88%E6%81%AF Usage: >>> rc", "<ThumbMediaId><![CDATA[media_id]]></ThumbMediaId> </Music> </xml>''' tpl_news = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[news]]></MsgType>", "'Event' then, it is a voice event message elif root.find(\"Event\")", "\"description\":\"Is Really A Happy Day\", \"url\":\"URL\", \"picurl\":\"PIC_URL\" } ] '''", "dump the the dict as for later on JSON loading", "= etree.fromstring(the_tpl) #print self.root.find(\"FromUserName\").text #print type(self.root.find(\"FromUserName\").text) def initType(self, MsgType='text'): tpl_list", "message only elif self._MsgType == 'voice': for k, v in", "if MsgType not in ['text', 'image', 'voice', 'video', 'music', 'news']:", "+ self._token + \"&next_openid=\" + next_openid try: response = urllib2.urlopen(url)", "a_dict = json.loads(response.read()) #print a_dict if a_dict.has_key('errcode'): return None else:", "the responsing message valid for wechat # For more information,", "check if the message was accepted if j['errcode'] == 0:", "value to the new XML object self.root.find(k).text = v except", "XML special for image for k, v in kwargs.items(): if", "groupid='', new_name=''): '''Update the determained group id with the new_name.", "cusmter service or others. If ```appid``` and ```appsecret``` are correct", "'''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[video]]></MsgType> <Video> <MediaId><![CDATA[media_id]]></MediaId> <Title><![CDATA[title]]></Title> <Description><![CDATA[description]]></Description> </Video>", "For voice message only elif self._MsgType == 'voice': # To", "None else: gotten = a.read() a_dict = json.loads(gotten) if a_dict.has_key('access_token'):", "tpl_video = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[video]]></MsgType> <Video> <MediaId><![CDATA[media_id]]></MediaId> <Title><![CDATA[title]]></Title>", "import multipart_encode from poster.streaminghttp import register_openers class ParsingContainer(object): \"\"\"Parsing Wechat", "TODO to generate articles as #print v #print etree.tostring(self.root) self.root.find('Video').find('MediaId').text", "= v elif k == 'ThumbMediaId': self.root.find('Video').find('ThumbMediaId').text = v else:", ">>> holder.initType(MsgType='video') >>> hasattr(holder, \"_PicUrl\") >>> True >>> holder.initType(MsgType='text') #", "None: self._EventKey = root.find('EventKey').text if root.find('Ticket') is not None: self._Ticket", "= root.find(\"MsgType\").text # Get message type based from the ```incomingMessage```", "voice event message elif root.find(\"Event\") is not None: self.type =", "not None: self._Description = root.find('Description').text else: self._Description = '' if", "to wechat clients! :) \"\"\" ## assign the basic time", "server and get the response. ''' def __init__(self, media_type='image', token", "return a_dict def getAPIToken(appid='', appsecret=''): '''Get wechat API token for", "class to make something as : 'self._FromUserName' #for i in", "'''Create a determained group name. If created, then it will", "valid \" % MsgType for i in MsgType_list: if MsgType", "'ArticleCount': self.root.find(k).text = v if k == 'Articles': # TODO", "voice message only elif self._MsgType == 'voice': for k, v", "root.find(\"Event\") is not None: self.type = 'event' # After all", "<MediaId><![CDATA[media_id]]></MediaId> </Image> </xml>''' tpl_voice = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[voice]]></MsgType>", "Boolean value''' url = \"https://api.weixin.qq.com/cgi-bin/message/custom/send?access_token=\" + token request = urllib2.Request(url,", "# Or we can ellipsis this since it is of", "# To set attribute value of the XML special for", "please inform the author for authentication. Apr 2014 import sys", "valid, then a dict will be returned. If the ```next_openid```", "in ['image', 'voice', 'video', 'thumb']: raise ValueError, \"Media type: '%s'", "Load the template #for i in tpl_list: # if MsgType", "loadToken(self, token = ''): self._token = token def uploadMedia(self, media_type='image',", "False else: a_dict = json.loads(response.read()) #print a_dict if a_dict.has_key('errcode'): if", "appid or secret if a_dict.has_key('errcode'): return None else: return a_dict", "= mm.createMenu('the_menu_format_constructed_from_a_JSON_as_a_string') >>> flag True >>> menu_got = mm.getMenu() >>>", "later on JSON loading def dumpDict(self): return self.the_dict json_text =", "if MsgType not in tpl_list: raise ValueError, \"Invalid responsing message", "else: self._MediaId = '' if root.find('Format') is not None: self._Format", "{u'menu': {u'button': [{u'type': u'click', u'name': u'\\u7b2c\\u4e00\\u94ae', u'key': u'V1001_TODAY_MUSIC', u'sub_button': []},", "elif self._MsgType == 'voice': # To set attribute value of", "'''Get the menu format from the API. If there be,", "are valid, then a dict will be returned. If the", "is set as 'text' MsgType = 'text' # Unique tages", "kwargs.items(): try: ## assign value to the object #getattr(self, \"_\"+k)", "u'name': u'\\u7b2c\\u4e00\\u94ae', u'key': u'V1001_TODAY_MUSIC', u'sub_button': []}, {u'type': u'click', u'name': u'\\u7b2c\\u4e8c\\u94ae',", "'link', 'event'] if MsgType not in MsgType_list: raise ValueError, \"MsgType", "register_openers() try: datagen, headers = multipart_encode({\"image1\": open(media_path,\"rb\")}) except Exception as", "a_dict.has_key('errcode'): if a_dict['errcode'] == 0: return True else: return False", "not None: self._ThumbMediaId = root.find('ThumbMediaId').text else: self._ThumbMediaId = '' #", "tag) except: return None ##raise ValueError #tmp = \"Instance has", "'event': self.initType('event', incomingMessage) class RespondingContainer(object): \"\"\"Package XML to reponse to", "For more information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%8F%91%E9%80%81%E8%A2%AB%E5%8A%A8%E5%93%8D%E5%BA%94%E6%B6%88%E6%81%AF Usage: >>> rc =", "to such as: 'self._FromUsername' for k, v in kwargs.items(): try:", "self.type == 'recognition': self.initType('voice', incomingMessage) # Construct a var ```self._Recognition```", "only elif self._MsgType == 'voice': # To set attribute value", "http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E6.9F.A5.E8.AF.A2.E6.89.80.E6.9C.89.E5.88.86.E7.BB.84 ''' url = \"https://api.weixin.qq.com/cgi-bin/groups/get?access_token=\" + self._token try: response =", "url = \"https://api.weixin.qq.com/cgi-bin/menu/create?access_token=\" + token request = urllib2.Request(url, menu_format) request.get_method", "then a string 'token' will be return. If not ,", "mapping global eventMapping eventMapping = { # The list presents", "For more information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E8%8E%B7%E5%8F%96%E5%85%B3%E6%B3%A8%E8%80%85%E5%88%97%E8%A1%A8 ''' url = \"https://api.weixin.qq.com/cgi-bin/user/get?access_token=\"", "default, MsgType is set as 'text' MsgType = 'text' #", "as the followings: 1) check parent message type :\"MsgType\" 2)", "dict. While construcing the JSON dumped, This is used with", "only elif self.MsgType == 'location': if root.find('Location_X') is not None:", "'https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&' url = default_url + 'appid=' + appid + '&secret='", "global tpl_voice global tpl_video global tpl_music global tpl_news tpl_text =", "None: self._MediaId = root.find('MediaId').text else: self._MediaId = '' if root.find('Format')", "only elif self.MsgType == 'image': if root.find('PicUrl') is not None:", "'video': if root.find('MediaId') is not None: self._MediaId = root.find('MediaId').text else:", "media_type else: self._media_type = media_type url = \"http://file.api.weixin.qq.com/cgi-bin/media/upload?access_token=\" + self._token", "else: gotten = a.read() a_dict = json.loads(gotten) if a_dict.has_key('access_token'): return", "None else: return a_dict['groupid'] def updateGroupName(self, groupid='', new_name=''): '''Update the", "right reponse back ''' def __init__(self, MsgType='text'): self._MsgType = MsgType", "moveHimToGroup(self, openid='', groupid=''): '''Move him to other group. 'True' or", "</xml>''' tpl_video = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[video]]></MsgType> <Video> <MediaId><![CDATA[media_id]]></MediaId>", "+ self._media_type register_openers() try: datagen, headers = multipart_encode({\"image1\": open(media_path,\"rb\")}) except", "returned. For more information, please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E6.9F.A5.E8.AF.A2.E7.94.A8.E6.88.B7.E6.89.80.E5.9C.A8.E5.88.86.E7.BB.84''' url = \"https://api.weixin.qq.com/cgi-bin/groups/getid?access_token=\"+", "# setattr(self,\"_\"+i, '') self.__init__(MsgType) #def setElementByTag(self, tag): def setElementByTag(self, **kwargs):", "format the_json_tpl = globals()['json_' + self._MsgType].encode('utf-8').decode('utf-8') self.the_dict = json.loads(the_json_tpl) if", "== 'voice': for k, v in kwargs.items(): try: if k", "message type ''' MsgType_list = ['text', 'image', 'voice', 'video', 'location',", "XML we need # the ```self.root``` has been assigned already", "root.find('Ticket').text if root.find('Latitude') is not None: self._Latitude = root.find('Latitude').text if", "normalMapping[k]: try: delattr(self, '_' + m) except: pass # Delete", "information, please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E6.9F.A5.E8.AF.A2.E7.94.A8.E6.88.B7.E6.89.80.E5.9C.A8.E5.88.86.E7.BB.84''' url = \"https://api.weixin.qq.com/cgi-bin/groups/getid?access_token=\"+ self._token postData =", "def loadToken(self, token=''): '''Firstly load the access token, then use", "sm = SubscriberManager() >>> sm.loadToken('<KEY>') >>> hisprofile = sm.getSubscriberProfile(openid='his_open_id', lang='zh_CN')", "return gotten def digest(self, incomingMessage): '''To digest the XML message", "## For voice message only elif self._MsgType == 'voice': #", "object Usage: >>> setElementByTag(FromUserName='the_wechat_server',ToUserName='the_wechat_client',Content='Hello dude!') # In this way we", "default tag value ### Get all the tags #child_list =", "the 'MsgType' For example, $~ python >>> holder = ParsingContainer()", "to ```self.MsgType and etc..``` Logistics as the followings: 1) check", "tpl_text global tpl_image global tpl_voice global tpl_video global tpl_music global", ": 'self._FromUserName' #for i in child_list: # if i ==", "is just of this more than that of 'normal message", "'''{ \"touser\":\"OPENID\", \"msgtype\":\"video\", \"video\": { \"media_id\":\"MEDIA_ID\", \"title\":\"TITLE\", \"description\":\"DESCRIPTION\" } }'''", "{ \"title\":\"MUSIC_TITLE\", \"description\":\"MUSIC_DESCRIPTION\", \"musicurl\":\"MUSIC_URL\", \"hqmusicurl\":\"HQ_MUSIC_URL\", \"thumb_media_id\":\"THUMB_MEDIA_ID\" } }''' json_news =", "if root.find(\"Recognition\") is not None: self.type = 'recognition' # Check", "SubscriberManager(object): '''To manage the subscriber groups, profile, location, list. Usage:", "news message ''' \"articles\": [ { \"title\":\"Happy Day\", \"description\":\"Is Really", "type ''' MsgType_list = ['text', 'image', 'voice', 'video', 'location', 'link',", "def getMenu(self): '''Get the menu format from the API. If", "type(self.root.find(\"FromUserName\").text) def initType(self, MsgType='text'): tpl_list = ['text', 'image', 'voice', 'video',", "For more information, please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E4%B8%8A%E4%BC%A0%E4%B8%8B%E8%BD%BD%E5%A4%9A%E5%AA%92%E4%BD%93%E6%96%87%E4%BB%B6''' if media_type not in", "tpl_out >>><xml> <ToUserName>the_wechat_client</ToUserName> <FromUserName>the_server</FromUserName> <CreateTime>1397808770</CreateTime> <MsgType>text</MsgType> <Content>Hello dude!</Content> </xml> >>>", "= rc.dumpXML() >>> tpl_out >>><xml> <ToUserName>the_wechat_client</ToUserName> <FromUserName>the_server</FromUserName> <CreateTime>1397808770</CreateTime> <MsgType>text</MsgType> <Content>Hello", "commonTag: try: delattr(self, '_' + c) except: pass # Delete", "default_url = 'https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&' url = default_url + 'appid=' + appid", "k, v in kwargs.items(): try: if k == 'media_id': self.the_dict['image'][k]", "set attribute value to such as: 'self._FromUsername' for k, v", "SubscriberManager() >>> sm.loadToken('<KEY>') >>> hisprofile = sm.getSubscriberProfile(openid='his_open_id', lang='zh_CN') ''' def", "holder.getElementByTag('Content') >>> '' \"\"\" # By default, MsgType is set", "To set attribute value of the XML special for image", "is not None: self._CreateTime = root.find('CreateTime').text else: self._CreateTime = '1000000000'", "tag): def setElementByTag(self, **kwargs): \"\"\" To package XML message into", "} }''' json_voice = '''{ \"touser\":\"OPENID\", \"msgtype\":\"voice\", \"voice\": { \"media_id\":\"MEDIA_ID\"", "self._token + \"&type=\" + self._media_type register_openers() try: datagen, headers =", "basic time self.root.find('CreateTime').text = str(int(time.time())) #print \"-----\" #print self._MsgType ##", "\"description\":\"DESCRIPTION\" } }''' json_music = '''{ \"touser\":\"OPENID\", \"msgtype\":\"music\", \"music\": {", "<Voice> <MediaId><![CDATA[media_id]]></MediaId> </Voice> </xml>''' tpl_video = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime>", "API to pass 6 types of messages to those wechat", "self.the_dict['music'][k] = v elif k == 'thumb_media_id': self.the_dict['music'][k] = v", "```menu_format``` is of type string. But ```menu_format``` is constructed from", "at all ''' def __init__(self, token=''): self._token = token def", "self._token postData = '{\"group\":{\"id\":%s,\"name\":\"%s\"}}' % (groupid, new_name) request = urllib2.Request(url,data=postData)", "the image is valid, then a_dict will be returned. If", "</Voice> </xml>''' tpl_video = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[video]]></MsgType> <Video>", "hasattr(holder, \"_Content\") >>> True >>> holder.initType(MsgType='video') >>> hasattr(holder, \"_PicUrl\") >>>", "+ c) except: pass # Delete the unuseful elements in", "{ \"media_id\":\"MEDIA_ID\" } }''' json_video = '''{ \"touser\":\"OPENID\", \"msgtype\":\"video\", \"video\":", "Really A Happy Day\", \"url\":\"URL\", \"picurl\":\"PIC_URL\" }, { \"title\":\"Happy Day\",", "openid=''): '''Get a subscriber's group ID. The ID is of", "self.root.find('Video').find('ThumbMediaId').text = v else: try: ## assign/update value to the", "list presents the combined tag set of the event message", "only elif self._MsgType == 'music': for k, v in kwargs.items():", "in MsgType_list: raise ValueError, \"MsgType '%s' not valid \" %", "= json.loads(gotten) if a_dict.has_key('access_token'): return a_dict['access_token'] # means wrong appid", "eventMapping for k in eventMapping: for e in eventMapping[k]: try:", "format from the API. If there be, then a dict", "# means wrong appid or secret if a_dict.has_key('errcode'): return None", "</xml>''' tpl_voice = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[voice]]></MsgType> <Voice> <MediaId><![CDATA[media_id]]></MediaId>", "returned. For more information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E8%8E%B7%E5%8F%96%E5%85%B3%E6%B3%A8%E8%80%85%E5%88%97%E8%A1%A8 ''' url =", "if a_dict.has_key('errcode'): return None else: return a_dict['group']['id'] def getAllgroups(self): '''", "elif k == 'title': self.the_dict['video'][k] = v elif k ==", "since it is of 'text' by default >>> # Notice", "self.MsgType == 'image': if root.find('PicUrl') is not None: self._PicUrl =", "Check if the incomingMessage has tag 'Event' then, it is", "no attribute _%s\" % tag #raise AttributeError, tmp else: return", "% MsgType else: ## Load the template #for i in", "a_dict will be returned. If not, 'None' will be returned.", "k == 'title': self.the_dict['music'][k] = v elif k == 'description':", "'event' # After all then 'normal' message else: self.type =", "== 'title': self.the_dict['music'][k] = v elif k == 'description': self.the_dict['music'][k]", "eventMapping: for e in eventMapping[k]: try: delattr(self, '_' + e)", "None: self._MediaId = root.find('MediaId').text else: self._MediaId = '' # For", "\"url\":\"URL\", \"picurl\":\"PIC_URL\" }, { \"title\":\"Happy Day\", \"description\":\"Is Really A Happy", "u'click', u'name': u'\\u7b2c\\u4e00\\u94ae', u'key': u'V1001_TODAY_MUSIC', u'sub_button': []}, {u'type': u'click', u'name':", "XML According to its content this will assgin values to", "and ```appsecret``` are correct then a string 'token' will be", "None: self._Label = root.find('Label').text else: self._Label = '' # For", "if k == 'MediaId': #print v #print etree.tostring(self.root) self.root.find('Voice').find('MediaId').text =", "the XML we need # the ```self.root``` has been assigned", "k, v in kwargs.items(): if k == 'ArticleCount': self.root.find(k).text =", "= json.loads(response.read()) #print a_dict if a_dict.has_key('errcode'): if a_dict['errcode'] == 0:", "templates of all the responsing message valid for wechat #", "just of this more than that of 'normal message =>", "```articles``` for the news message ''' \"articles\": [ { \"title\":\"Happy", "media posting from poster.encode import multipart_encode from poster.streaminghttp import register_openers", "= json.loads(response.read()) #print a_dict if a_dict.has_key('errcode'): return None else: return", "def digest(self, incomingMessage): '''To digest the XML message passed from", "Delete the common tags for c in commonTag: try: delattr(self,", "% (groupid, new_name) request = urllib2.Request(url,data=postData) try: response = urllib2.urlopen(request)", "menu_format=''): '''Create menu, it needs a token and the menu", "None. For the parameter 'zh_CN', there are others: 'zh_TW, en'", "if root.find('Location_X') is not None: self._Location_X = root.find('Location_X').text else: self._Location_X", "= v else: self.the_dict[k] = v except Exception as e:", "= MsgType # By default set root as the 'text'", "tpl_image = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[image]]></MsgType> <Image> <MediaId><![CDATA[media_id]]></MediaId> </Image>", "if root.find('ToUserName') is not None: self._ToUserName = root.find('ToUserName').text else: self._ToUserName", "'image', 'voice', 'video', 'location', 'link' After making a new instance", "['text', 'image', 'voice', 'video', 'music', 'news'] if MsgType not in", "True >>> mm.getMenu() >>> # nothing gotten: it means no", "message elif root.find(\"Event\") is not None: self.type = 'event' #", "a_dict = json.loads(response.read()) if a_dict.has_key('errcode'): return None else: return a_dict", "elif self._MsgType == 'article': # To set attribute value of", "pass self.__init__(incomingMessage) # releasing method def __del__(self): pass #@property def", "'image': # To set attribute value of the XML special", "% MsgType else: # pass the message type to have", "wrong appid or secret else: return None def postMessage2API(token='',messageString=''): '''Using", "if root.find('EventKey') is not None: self._EventKey = root.find('EventKey').text if root.find('Ticket')", "from a JSON. For more information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E8%87%AA%E5%AE%9A%E4%B9%89%E8%8F%9C%E5%8D%95%E5%88%9B%E5%BB%BA%E6%8E%A5%E5%8F%A3 '''", "for event message certainly if root.find('Event') is not None: self._Event", "= '' # For event message only elif self.MsgType ==", "appid or secret else: return None def postMessage2API(token='',messageString=''): '''Using the", "self._MsgType].encode('utf-8').decode('utf-8') self.root = etree.fromstring(the_tpl) #print self.root.find(\"FromUserName\").text #print type(self.root.find(\"FromUserName\").text) def initType(self,", "if k == 'MediaId': #print v #print etree.tostring(self.root) self.root.find('Image').find('MediaId').text =", "return False else: return False def getSubscriberList(self, next_openid=''): '''To get", "} ] ''' if k == 'articles': if type(v) ==", "= root.find('ToUserName').text else: self._ToUserName = '' if root.find('FromUserName') is not", "v #print etree.tostring(self.root) self.root.find('Image').find('MediaId').text = v else: try: ## assign/update", "[]}]}]}} >>> flag2 = mm.deleteMenu() >>> flag2 True >>> mm.getMenu()", "'' if root.find('Label') is not None: self._Label = root.find('Label').text else:", "a = urllib2.urlopen(url) except Exception as e: print e return", "elif self.MsgType == 'video': if root.find('MediaId') is not None: self._MediaId", "json_image = '''{ \"touser\":\"OPENID\", \"msgtype\":\"image\", \"image\": { \"media_id\":\"MEDIA_ID\" } }'''", "v in kwargs.items(): try: ## assign value to the object", "return None. For the parameter 'zh_CN', there are others: 'zh_TW,", "group name. If created, then it will return the new", "'Recognition' then, it is a voice recognition message if root.find(\"Recognition\")", "Notice we don't need to set the 'CreateTime' since it", "self._MsgId = root.find('MsgId').text else: self._MsgId = '' # Store the", "not None: self._Content = root.find('Content').text else: self._Content = '' #", "if root.find('Title') is not None: self._Title = root.find('Title').text else: self._Title", "updated or not. For more information, please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E4.BF.AE.E6.94.B9.E5.88.86.E7.BB.84.E5.90.8D '''", "the the dict as for later on JSON loading def", "else: self._MediaId = '' # For voice message only elif", "loadToken(self, token=''): '''Firstly load the access token, then use the", "for later on JSON loading def dumpDict(self): return self.the_dict json_text", "def initType(self, MsgType='text'): tpl_list = ['text', 'image', 'voice', 'video', 'music',", "text message only if self._MsgType == 'text': for k, v", "only elif self._MsgType == 'video': for k, v in kwargs.items():", "'location', 'link', 'event'] if MsgType not in MsgType_list: raise ValueError,", "k, v in kwargs.items(): try: if k == 'media_id': self.the_dict['voice'][k]", "self._EventKey = root.find('EventKey').text if root.find('Ticket') is not None: self._Ticket =", ">>> rc.initType('text') # Or we can ellipsis this since it", "MsgType == i: self.MsgType = i break # Delete the", "a_dict.has_key('errcode'): if a_dict['errcode'] != 0: return None else: return a_dict", "== 'recognition': self.initType('voice', incomingMessage) # Construct a var ```self._Recognition``` since", "0: return None else: return a_dict else: return a_dict def", "has tag 'Recognition' then, it is a voice recognition message", "image message only elif self._MsgType == 'image': # To set", "'''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[music]]></MsgType> <Music> <Title><![CDATA[TITLE]]></Title> <Description><![CDATA[DESCRIPTION]]></Description> <MusicUrl><![CDATA[MUSIC_Url]]></MusicUrl> <HQMusicUrl><![CDATA[HQ_MUSIC_Url]]></HQMusicUrl>", "other functions''' self._token = token def createMenu(self, menu_format=''): '''Create menu,", "self.the_dict['music'][k] = v elif k == 'title': self.the_dict['music'][k] = v", "'' if root.find('CreateTime') is not None: self._CreateTime = root.find('CreateTime').text else:", "## For voice message only elif self._MsgType == 'voice': for", "+ self._token postData = '{\"group\": {\"name\": \"%s\"} }' % name", "u'name': u'\\u641c\\u641c', u'sub_button': []}, {u'url': u'http://v.qq.com/', u'type': u'view', u'name': u'\\u770b\\u7535\\u5f71',", "if root.find('MsgId') is not None: self._MsgId = root.find('MsgId').text else: self._MsgId", "be of lower case. Official wechat define that. Don't claim", "elif k == 'Description': self.root.find('Video').find('Description').text = v elif k ==", "The ```menu_format``` is of type string. But ```menu_format``` is constructed", "v ## assign/update value to the new XML object self.root.find(k).text", "elif k == 'ThumbMediaId': self.root.find('Video').find('ThumbMediaId').text = v else: try: ##", "response = urllib2.urlopen(url) except Exception as e: print e return", ">>> mm = MenuManager() >>> mm.loadToken('something_the_api_token') >>> flag = mm.createMenu('the_menu_format_constructed_from_a_JSON_as_a_string')", "information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E8%87%AA%E5%AE%9A%E4%B9%89%E8%8F%9C%E5%8D%95%E5%88%9B%E5%BB%BA%E6%8E%A5%E5%8F%A3 ''' token = self._token url =", "e raise e ## For article message only elif self._MsgType", "For location message only elif self.MsgType == 'location': if root.find('Location_X')", "u'click', u'name': u'\\u7b2c\\u4e8c\\u94ae', u'key': u'V1001_TODAY_SINGER', u'sub_button': []}, {u'name': u'\\u7b2c\\u4e09\\u94ae', u'sub_button':", "#print a_dict if a_dict.has_key('errcode'): return None else: return a_dict def", "media file to the offical URL If the image is", "if list, then its the elment of the key ```articles```", "except: pass # Delete the unuseful elements in normalMapping for", "'HQMusicUrl': self.root.find('Video').find('HQMusicUrl').text = v elif k == 'ThumbMediaId': self.root.find('Video').find('ThumbMediaId').text =", "= sm.getSubscriberProfile(openid='his_open_id', lang='zh_CN') ''' def __init__(self, token=''): self._token = token", "'%s'\" % MsgType else: # pass the message type to", "k == 'description': self.the_dict['video'][k] = v else: self.the_dict[k] = v", "str(int(time.time()))) # else: # setattr(self,\"_\"+i, '') self.__init__(MsgType) #def setElementByTag(self, tag):", "location message only elif self.MsgType == 'location': if root.find('Location_X') is", "secret else: return None def postMessage2API(token='',messageString=''): '''Using the token, post", "```v``` should be packaged in a list already # if", "else: self._FromUserName = '' if root.find('CreateTime') is not None: self._CreateTime", "pass # Delete the unuseful elements in normalMapping for k", "dict will be return if valid. If ```token``` and ```next_openid```", "self.MsgType == 'location': if root.find('Location_X') is not None: self._Location_X =", "''): self._token = token def uploadMedia(self, media_type='image', media_path=''): '''Post the", "A dict will be return if valid. If ```token``` and", "Copyright to <NAME>. # Any distrubites of this copy should", "```appid``` and ```appsecret``` are correct then a string 'token' will", "article in a list which contains a dict. While construcing", "== 'link': self.initType('link', incomingMessage) elif msgType == 'image': self.initType('image', incomingMessage)", "if root.find('Latitude') is not None: self._Latitude = root.find('Latitude').text if root.find('Longitude')", "# In this way we can then use ```dumpXML()``` to", "incomingMessage='<xml></xml>'): ''' To initialize message type ''' MsgType_list = ['text',", "False class MediaManager(object): '''There are four types of media suppored", "etree.tostring(self.root) self.root.find('Image').find('MediaId').text = v else: try: ## assign/update value to", "self.initType('image', incomingMessage) elif msgType == 'voice': self.initType('voice', incomingMessage) elif msgType", "urllib2.urlopen(request) except Exception as e: print e return False else:", "already return etree.tostring(self.root, encoding='utf-8',method='xml',pretty_print=True) # The down blow are the", "A Happy Day\", \"url\":\"URL\", \"picurl\":\"PIC_URL\" } ] } }''' class", "set the ```self.the_dict``` according to the message type by such", "} }''' class SubscriberManager(object): '''To manage the subscriber groups, profile,", "### Get all the tags #child_list = [] #for child", "menu_got {u'menu': {u'button': [{u'type': u'click', u'name': u'\\u7b2c\\u4e00\\u94ae', u'key': u'V1001_TODAY_MUSIC', u'sub_button':", "token = ''): self._token = token def uploadMedia(self, media_type='image', media_path=''):", "self._MsgType == 'image': # To set attribute value of the", "it has been generated automatically :) >>> rc.setElementByTag(FromUserName='the_server', ToUserName='the_wechat_client',Content='Hello dude!')", "wechat clients \\n who sent messages to the public wechat", "elif self.MsgType == 'voice': if root.find('MediaId') is not None: self._MediaId", "= token def loadToken(self, token=''): '''Firstly load the access token,", "'''The open_id parameter is unique to unique wechat public service.", "```next_openid``` are valid, then a dict will be returned. If", "= '' if root.find('ThumbMediaId') is not None: self._ThumbMediaId = root.find('ThumbMediaId').text", "# To set attribute value to such as: 'self._FromUsername' for", "attribute/tag '%s'\" % (self._MsgType, k) ## For image message only", "postData = '{\"group\":{\"id\":%s,\"name\":\"%s\"}}' % (groupid, new_name) request = urllib2.Request(url,data=postData) try:", "accepted if j['errcode'] == 0: return True else: return False", "None: self._MediaId = root.find('MediaId').text else: self._MediaId = '' if root.find('ThumbMediaId')", "will be returned. For more information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E6.9F.A5.E8.AF.A2.E6.89.80.E6.9C.89.E5.88.86.E7.BB.84 '''", "# pre-set some common variables root = etree.fromstring(incomingMessage) # The", "global tpl_text global tpl_image global tpl_voice global tpl_video global tpl_music", "to the public wechat service. Those 6 types of messages", "not None: self._MediaId = root.find('MediaId').text else: self._MediaId = '' if", "# Delete the unuseful elements in eventMapping for k in", ">>> hisprofile = sm.getSubscriberProfile(openid='his_open_id', lang='zh_CN') ''' def __init__(self, token=''): self._token", "'MediaId': #print v #print etree.tostring(self.root) self.root.find('Voice').find('MediaId').text = v else: try:", ">>> flag = mm.createMenu('the_menu_format_constructed_from_a_JSON_as_a_string') >>> flag True >>> menu_got =", "'''Create menu, it needs a token and the menu format.", "MsgType # By default set root as the 'text' XML", "type(v) == list: self.the_dict['news'][k] = v else: raise ValueError, \"The", "= '1000000000' if root.find('MsgType') is not None: self._MsgType = root.find('MsgType').text", "self.initType('event', incomingMessage) class RespondingContainer(object): \"\"\"Package XML to reponse to determained", "to reponse to wechat clients! :) \"\"\" ## assign the", "''' A dict will be returned. For more information please", "e return None else: gotten = a.read() a_dict = json.loads(gotten)", "return True else: return False class MenuManager(object): '''To manage the", "'return None' ''' default_url = 'https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&' url = default_url +", "only if self._MsgType == 'text': # To set attribute value", "it will return the new group id of type 'int'.", "wechat custom service API to pass 6 types of messages", "want to pass the right reponse back ''' def __init__(self,", "<MsgType><![CDATA[music]]></MsgType> <Music> <Title><![CDATA[TITLE]]></Title> <Description><![CDATA[DESCRIPTION]]></Description> <MusicUrl><![CDATA[MUSIC_Url]]></MusicUrl> <HQMusicUrl><![CDATA[HQ_MUSIC_Url]]></HQMusicUrl> <ThumbMediaId><![CDATA[media_id]]></ThumbMediaId> </Music> </xml>''' tpl_news", "clients! :) \"\"\" ## assign the basic time self.root.find('CreateTime').text =", "getSubscriberList(self, next_openid=''): '''To get subscriber list. A dict will be", "<HQMusicUrl><![CDATA[HQ_MUSIC_Url]]></HQMusicUrl> <ThumbMediaId><![CDATA[media_id]]></ThumbMediaId> </Music> </xml>''' tpl_news = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime>", "a_dict['errcode'] != 0: return None else: return a_dict else: return", "json.loads(response.read()) if a_dict.has_key('errcode'): return None else: return a_dict['groupid'] def updateGroupName(self,", "pass # Delete the unuseful elements in eventMapping for k", "#print self._MsgType ## For text message only if self._MsgType ==", "If created, then it will return the new group id", "request.get_method = lambda : 'POST' try: response = urllib2.urlopen(request) except", "Logistics as the followings: 1) check parent message type :\"MsgType\"", "kwargs.items(): try: if k == 'media_id': self.the_dict['image'][k] = v else:", "with the function ```setElementByKey(touser='someone', msgtype='news', articles=packageArticle())``` ''' return [{\"title\": title,", "or secret if a_dict.has_key('errcode'): return None else: return a_dict def", "or not. For more information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E7.A7.BB.E5.8A.A8.E7.94.A8.E6.88.B7.E5.88.86.E7.BB.84''' url =", "url = \"https://api.weixin.qq.com/cgi-bin/groups/members/update?access_token=\" + self._token postData = '{\"openid\":\"%s\",\"to_groupid\":%s}' % (openid,", "will be return if valid. If ```token``` and ```next_openid``` are", "root.find('Latitude') is not None: self._Latitude = root.find('Latitude').text if root.find('Longitude') is", "e: print e raise e ## For music message only", "'event'] if MsgType not in MsgType_list: raise ValueError, \"MsgType '%s'", "to class to make something as : 'self._FromUserName' #for i", "all the responsing message valid for wechat # For more", "the ```self.the_dict``` according to the message type by such as", "self.the_dict['video'][k] = v elif k == 'title': self.the_dict['video'][k] = v", "openid request = urllib2.Request(url,data=postData) try: response = urllib2.urlopen(request) except Exception", "will be returned. ''' token = self._token url = \"https://api.weixin.qq.com/cgi-bin/menu/get?access_token=\"+", "= '' # Store the XML incomingMessage if has #", "in ['text', 'image', 'voice', 'video', 'music', 'news']: raise ValueError, \"It", "\"description\":\"Is Really A Happy Day\", \"url\":\"URL\", \"picurl\":\"PIC_URL\" }, { \"title\":\"Happy", "\"music\": { \"title\":\"MUSIC_TITLE\", \"description\":\"MUSIC_DESCRIPTION\", \"musicurl\":\"MUSIC_URL\", \"hqmusicurl\":\"HQ_MUSIC_URL\", \"thumb_media_id\":\"THUMB_MEDIA_ID\" } }''' json_news", "the author for authentication. Apr 2014 import sys reload(sys) sys.setdefaultencoding('utf-8')", "root.find('Longitude').text if root.find('Precision') is not None: self._Precision = root.find('Precision').text def", "<ToUserName>the_wechat_client</ToUserName> <FromUserName>the_server</FromUserName> <CreateTime>1397808770</CreateTime> <MsgType>text</MsgType> <Content>Hello dude!</Content> </xml> >>> \"\"\" def", "e # package article def packageArticle(title= \"default title\", description=\"default description\",", "ID. The ID is of type 'int'. If openid wrong", "gotten = a.read() a_dict = json.loads(gotten) if a_dict.has_key('access_token'): return a_dict['access_token']", "<CreateTime>12345678</CreateTime> <MsgType><![CDATA[music]]></MsgType> <Music> <Title><![CDATA[TITLE]]></Title> <Description><![CDATA[DESCRIPTION]]></Description> <MusicUrl><![CDATA[MUSIC_Url]]></MusicUrl> <HQMusicUrl><![CDATA[HQ_MUSIC_Url]]></HQMusicUrl> <ThumbMediaId><![CDATA[media_id]]></ThumbMediaId> </Music> </xml>'''", "such as: 'self._FromUsername' for k, v in kwargs.items(): try: ##", "visit, http://mp.weixin.qq.com/wiki/index.php?title=%E8%8E%B7%E5%8F%96%E7%94%A8%E6%88%B7%E5%9F%BA%E6%9C%AC%E4%BF%A1%E6%81%AF''' url = \"https://api.weixin.qq.com/cgi-bin/user/info?access_token=\" + self._token + \"&openid=\" +", "v elif k == 'title': self.the_dict['music'][k] = v elif k", "% media_type else: self._media_type = media_type url = \"http://file.api.weixin.qq.com/cgi-bin/media/upload?access_token=\" +", "True >>> holder.initType(MsgType='text') # Or we can just ellipsis this", "k == 'msgtype': self.the_dict['msgtype'] = 'news' except Exception as e:", "<MusicUrl><![CDATA[MUSIC_Url]]></MusicUrl> <HQMusicUrl><![CDATA[HQ_MUSIC_Url]]></HQMusicUrl> <ThumbMediaId><![CDATA[media_id]]></ThumbMediaId> </Music> </xml>''' tpl_news = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName>", "needs a token and the menu format. The ```menu_format``` is", "Happy Day\", \"url\":\"URL\", \"picurl\":\"PIC_URL\" } ] } }''' class SubscriberManager(object):", "== 'voice': if root.find('MediaId') is not None: self._MediaId = root.find('MediaId').text", "Or we can just ellipsis this operation since by default", "'text' MsgType = 'text' # Unique tages in all the", "except Exception as e: print e return False else: j", "to set the 'CreateTime' since it has been generated automatically", "return True else: return False else: return False def getSubscriberList(self,", "the determained group id with the new_name. 'True' or False", "way we can then use ```dumpXML()``` to get the XML", "'''There are four types of media suppored by wechat. image,", "self.initType('video', incomingMessage) elif msgType == 'location': self.initType('location', incomingMessage) elif msgType", "the followings: 1) check parent message type :\"MsgType\" 2) check", "are correct then a string 'token' will be return. If", "be return. If not , 'return None' ''' default_url =", "not. For more information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E7.A7.BB.E5.8A.A8.E7.94.A8.E6.88.B7.E5.88.86.E7.BB.84''' url = \"https://api.weixin.qq.com/cgi-bin/groups/members/update?access_token=\"", "tag 'Event' then, it is a voice event message elif", "self._token = token def loadToken(self, token = ''): self._token =", "in child_list: # if i == 'CreateTime': # setattr(self,\"_\"+i, str(int(time.time())))", "XML message passed from wechat server Make the value variable", "in eventMapping: for e in eventMapping[k]: try: delattr(self, '_' +", "we just check whether the ```v``` is type of list", "= v else: raise ValueError, \"The value of the key", "\"The value of the key 'articles' should be of type", "else: self._MediaId = '' if root.find('ThumbMediaId') is not None: self._ThumbMediaId", "k == 'media_id': self.the_dict['video'][k] = v elif k == 'title':", "then, it is a voice event message elif root.find(\"Event\") is", "kwargs.items(): try: if k == 'media_id': self.the_dict['voice'][k] = v else:", "self._Scale = '' if root.find('Label') is not None: self._Label =", "the tag ''' try: gotten = getattr(self, \"_\" + tag)", "'text' JSON format the_json_tpl = globals()['json_' + self._MsgType].encode('utf-8').decode('utf-8') self.the_dict =", "will be returned. For more information, please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E6.9F.A5.E8.AF.A2.E7.94.A8.E6.88.B7.E6.89.80.E5.9C.A8.E5.88.86.E7.BB.84''' url", "\"touser\":\"OPENID\", \"msgtype\":\"image\", \"image\": { \"media_id\":\"MEDIA_ID\" } }''' json_voice = '''{", "as e: print e return False else: j = json.loads(response.read())", "else: ## Load the template #for i in tpl_list: #", "'location': self.initType('location', incomingMessage) elif msgType == 'link': self.initType('link', incomingMessage) elif", "utf-8 -*- # Copyright to <NAME>. # Any distrubites of", "<FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[news]]></MsgType> <ArticleCount>2</ArticleCount> <Articles> <item> <Title><![CDATA[title1]]></Title> <Description><![CDATA[description1]]></Description> <PicUrl><![CDATA[picurl]]></PicUrl> <Url><![CDATA[url]]></Url>", "pass 6 types of messages to those wechat clients \\n", "self._token = token def getSubscriberProfile(self, openid='', lang='zh_CN'): '''The open_id parameter", "e ## For voice message only elif self._MsgType == 'voice':", "json_music = '''{ \"touser\":\"OPENID\", \"msgtype\":\"music\", \"music\": { \"title\":\"MUSIC_TITLE\", \"description\":\"MUSIC_DESCRIPTION\", \"musicurl\":\"MUSIC_URL\",", "lambda : 'POST' try: response = urllib2.urlopen(request) except Exception as", "only if self._MsgType == 'text': for k, v in kwargs.items():", "# to check if the message was accepted if j['errcode']", "its better to raise something here if the wechat remote", "else: return False def moveHimToGroup(self, openid='', groupid=''): '''Move him to", "json_video = '''{ \"touser\":\"OPENID\", \"msgtype\":\"video\", \"video\": { \"media_id\":\"MEDIA_ID\", \"title\":\"TITLE\", \"description\":\"DESCRIPTION\"", "return False def getMenu(self): '''Get the menu format from the", "hisprofile = sm.getSubscriberProfile(openid='his_open_id', lang='zh_CN') ''' def __init__(self, token=''): self._token =", "url = \"https://api.weixin.qq.com/cgi-bin/groups/update?access_token=\" + self._token postData = '{\"group\":{\"id\":%s,\"name\":\"%s\"}}' % (groupid,", "video message only elif self.MsgType == 'video': if root.find('MediaId') is", "else: self._ToUserName = '' if root.find('FromUserName') is not None: self._FromUserName", "if k == 'media_id': self.the_dict['image'][k] = v else: self.the_dict[k] =", "if a_dict['errcode'] == 0: return True else: return False else:", ">>> tpl_out = rc.dumpXML() >>> tpl_out >>><xml> <ToUserName>the_wechat_client</ToUserName> <FromUserName>the_server</FromUserName> <CreateTime>1397808770</CreateTime>", "type by such as ```initType(MsgType='text')``` Notice: all the kwargs 's", "else: self._MsgId = '' # Store the XML incomingMessage if", "tmp else: return gotten def digest(self, incomingMessage): '''To digest the", "'MusicUrl': self.root.find('Video').find('MusicUrl').text = v elif k == 'HQMusicUrl': self.root.find('Video').find('HQMusicUrl').text =", "<ArticleCount>2</ArticleCount> <Articles> <item> <Title><![CDATA[title1]]></Title> <Description><![CDATA[description1]]></Description> <PicUrl><![CDATA[picurl]]></PicUrl> <Url><![CDATA[url]]></Url> </item> <item> <Title><![CDATA[title]]></Title>", "not, a 'None' will be returned. For more information please", "'location', 'link', 'event']: # Check if the incomingMessage has tag", "= '' if root.find('FromUserName') is not None: self._FromUserName = root.find('FromUserName').text", "claim ''' ## For text message only if self._MsgType ==", "self.the_dict['music'][k] = v else: self.the_dict[k] = v except Exception as", "please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E4%B8%8A%E4%BC%A0%E4%B8%8B%E8%BD%BD%E5%A4%9A%E5%AA%92%E4%BD%93%E6%96%87%E4%BB%B6''' if media_type not in ['image', 'voice', 'video',", "# TODO to generate articles as #print v #print etree.tostring(self.root)", "moved or not. For more information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E7.A7.BB.E5.8A.A8.E7.94.A8.E6.88.B7.E5.88.86.E7.BB.84''' url", "MsgType '%s'\" % MsgType else: ## Load the template #for", "more information, please visit : http://mp.weixin.qq.com/wiki/index.php?title=%E5%8F%91%E9%80%81%E8%A2%AB%E5%8A%A8%E5%93%8D%E5%BA%94%E6%B6%88%E6%81%AF global tpl_text global tpl_image", "if the message was accepted if j['errcode'] == 0: return", "self._PicUrl = '' if root.find('MediaId') is not None: self._MediaId =", "operation since by default its 'text' >>> hasattr(holder, \"_PicUrl\") >>>", "Day\", \"description\":\"Is Really A Happy Day\", \"url\":\"URL\", \"picurl\":\"PIC_URL\" } ]", "incomingMessage) elif msgType == 'image': self.initType('image', incomingMessage) # TODO #", "None else: return a_dict def getHisGroupID(self, openid=''): '''Get a subscriber's", "\"default title\", description=\"default description\", url=\"http://www.baidu.com\", picurl=\"http://www.baidu.com/img/bdlogo.gif\"): '''This will return an", "the message type to have ```self.the_dict``` self.__init__(MsgType) def setElementByKey(self, **kwargs):", "# means wrong appid or secret else: return None def", "__init__(self, MsgType='text'): self._MsgType = MsgType # By default set the", "[] #for child in self.root.getchildren(): # child_list += [str(child)] ###", "== 'content': self.the_dict['text'][k] = v else: self.the_dict[k] = v except", "print e raise e ## For music message only elif", "u'V1001_TODAY_SINGER', u'sub_button': []}, {u'name': u'\\u7b2c\\u4e09\\u94ae', u'sub_button': [{u'url': u'http://www.soso.com/', u'type': u'view',", "root.find('MediaId') is not None: self._MediaId = root.find('MediaId').text else: self._MediaId =", "For text message only if self._MsgType == 'text': for k,", "not exists or not valid will return None. For the", "__init__(self, media_type='image', token = ''): self._media_type = media_type self._token =", "root = etree.fromstring(incomingMessage) # The 5 ones in common if", "others: 'zh_TW, en' For more information: please visit, http://mp.weixin.qq.com/wiki/index.php?title=%E8%8E%B7%E5%8F%96%E7%94%A8%E6%88%B7%E5%9F%BA%E6%9C%AC%E4%BF%A1%E6%81%AF''' url", "return None else: return a_dict def createGroup(self, name=''): '''Create a", "commonTag = ['ToUserName', 'FromUserName', 'CreateTime', 'MsgId', 'MsgType'] # For normal", "-*- coding: utf-8 -*- # Copyright to <NAME>. # Any", "None: self._Location_X = root.find('Location_X').text else: self._Location_X = '' if root.find('Location_Y')", "v in kwargs.items(): try: if k == 'media_id': self.the_dict['video'][k] =", "flag True >>> menu_got = mm.getMenu() >>> menu_got {u'menu': {u'button':", "a dict would be returned. If not, 'None' will be", "def __init__(self, token=''): self._token = token def loadToken(self, token=''): '''Load", "= \"https://api.weixin.qq.com/cgi-bin/user/get?access_token=\" + self._token + \"&next_openid=\" + next_openid try: response", "v if k == 'Articles': # TODO to generate articles", "public service. This function will return a dict if ```token```", "will be returned. If not, 'None' will be returned. For", "== 'ThumbMediaId': self.root.find('Video').find('ThumbMediaId').text = v else: try: ## assign/update value", "'recognition': self.initType('voice', incomingMessage) # Construct a var ```self._Recognition``` since it", "are of : 'text', 'image', 'voice', 'video', 'location', 'link' After", "Store the XML incomingMessage if has # For text message", "media_type self._token = token def loadToken(self, token = ''): self._token", "in kwargs.items(): if k == 'ArticleCount': self.root.find(k).text = v if", "\"media_id\":\"MEDIA_ID\", \"title\":\"TITLE\", \"description\":\"DESCRIPTION\" } }''' json_music = '''{ \"touser\":\"OPENID\", \"msgtype\":\"music\",", "name=''): '''Create a determained group name. If created, then it", "token = self._token url = \"https://api.weixin.qq.com/cgi-bin/menu/get?access_token=\"+ token try: response =", "[]}, {u'type': u'click', u'name': u'\\u7b2c\\u4e8c\\u94ae', u'key': u'V1001_TODAY_SINGER', u'sub_button': []}, {u'name':", "a subscriber's group ID. The ID is of type 'int'.", "root.find('CreateTime').text else: self._CreateTime = '1000000000' if root.find('MsgType') is not None:", "In this way we can then use ```dumpXML()``` to get", "if a_dict['errcode'] != 0: return None else: return a_dict else:", "\"description\":description, \"url\":url, \"picurl\":picurl}] # to dump the the dict as", "secret if a_dict.has_key('errcode'): return None else: return a_dict def createGroup(self,", "http://mp.weixin.qq.com/wiki/index.php?title=%E5%8F%91%E9%80%81%E8%A2%AB%E5%8A%A8%E5%93%8D%E5%BA%94%E6%B6%88%E6%81%AF global tpl_text global tpl_image global tpl_voice global tpl_video global", "After making a new instance of the class, need to", "# Delete the unuseful elements in normalMapping for k in", "else: return None def postMessage2API(token='',messageString=''): '''Using the token, post the", "else: a_dict = json.loads(response.read()) #print a_dict if a_dict.has_key('errcode'): return None", "# For text message only if self.MsgType == 'text': if", "postData = '{\"openid\":\"%s\"}' % openid request = urllib2.Request(url,data=postData) try: response", "format. The ```menu_format``` is of type string. But ```menu_format``` is", "value of the XML special for image for k, v", "self.initType('link', incomingMessage) elif msgType == 'image': self.initType('image', incomingMessage) # TODO", "json.loads(response.read()) # The above works #print j # to check", "menu_got = mm.getMenu() >>> menu_got {u'menu': {u'button': [{u'type': u'click', u'name':", "MediaManager(object): '''There are four types of media suppored by wechat.", "the elment of the key ```articles``` for the news message", "Any distrubites of this copy should inform its author. If", "Delete the unuseful elements in eventMapping for k in eventMapping:", "+ 'appid=' + appid + '&secret=' + appsecret try: a", "invalid, 'None' will be returned. For more information, please visit:", "if self.type == 'normal': if msgType == 'text': self.initType('text', incomingMessage)", "if media_type not in ['image', 'voice', 'video', 'thumb']: raise ValueError,", "Get message type based from the ```incomingMessage``` variable if msgType", "text message only if self._MsgType == 'text': # To set", "child in self.root.getchildren(): # child_list += [str(child)] ### Attach 'tag'", "\"description\":\"Is Really A Happy Day\", \"url\":\"URL\", \"picurl\":\"PIC_URL\" } ] }", "of messages to those wechat clients \\n who sent messages", "return None else: a_dict = json.loads(response.read()) if a_dict.has_key('errcode'): if a_dict['errcode']", "'False' if moved or not. For more information please visit:", "function should be of lower case. Official wechat define that.", "if self._MsgType == 'text': for k, v in kwargs.items(): try:", "information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E6.9F.A5.E8.AF.A2.E6.89.80.E6.9C.89.E5.88.86.E7.BB.84 ''' url = \"https://api.weixin.qq.com/cgi-bin/groups/get?access_token=\" + self._token", "= '' if root.find('MsgId') is not None: self._MsgId = root.find('MsgId').text", "normal message mapping global normalMapping normalMapping = { 'text':['Content'], 'image':['PicUrl',", "self._MsgType == 'voice': for k, v in kwargs.items(): try: if", "menu at all ''' def __init__(self, token=''): self._token = token", "For example, $~ python >>> holder = ParsingContainer() >>> hasattr(holder,", "e def dumpXML(self): # To dump the XML we need", "self._token url = \"https://api.weixin.qq.com/cgi-bin/menu/get?access_token=\"+ token try: response = urllib2.urlopen(url) except", "import urllib2 # For media posting from poster.encode import multipart_encode", "four types of media suppored by wechat. image, voice, video,", "k in eventMapping: for e in eventMapping[k]: try: delattr(self, '_'", "we can ellipsis this since it is of 'text' by", "<CreateTime>12345678</CreateTime> <MsgType><![CDATA[text]]></MsgType> <Content><![CDATA[你好]]></Content> </xml>''' tpl_image = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime>", "as e: print e raise e ## For news message", "message only if self._MsgType == 'text': # To set attribute", "if k == 'Articles': # TODO to generate articles as", "'media_id': self.the_dict['video'][k] = v elif k == 'title': self.the_dict['video'][k] =", "use ```dumpXML()``` to get the XML we need to reponse", "elif self.MsgType == 'image': if root.find('PicUrl') is not None: self._PicUrl", ") # break ## Set the default tag value ###", "self.MsgType == 'text': if root.find('Content') is not None: self._Content =", "= token def loadToken(self, token=''): '''Load the token before using", "is of type string. But ```menu_format``` is constructed from a", "ValueError, \"Invalid responsing message MsgType '%s'\" % MsgType else: ##", "eventMapping eventMapping = { # The list presents the combined", "dict if ```token``` and ```open_id``` are valid. If not exists", "= v elif k == 'MusicUrl': self.root.find('Video').find('MusicUrl').text = v elif", "should be of lower case. Official wechat define that. Don't", "{ \"media_id\":\"MEDIA_ID\", \"title\":\"TITLE\", \"description\":\"DESCRIPTION\" } }''' json_music = '''{ \"touser\":\"OPENID\",", "<Description><![CDATA[description1]]></Description> <PicUrl><![CDATA[picurl]]></PicUrl> <Url><![CDATA[url]]></Url> </item> <item> <Title><![CDATA[title]]></Title> <Description><![CDATA[description]]></Description> <PicUrl><![CDATA[picurl]]></PicUrl> <Url><![CDATA[url]]></Url> </item>", "# child_list += [str(child)] ### Attach 'tag' object to class", "coding: utf-8 -*- # Copyright to <NAME>. # Any distrubites", "in eventMapping for k in eventMapping: for e in eventMapping[k]:", "subclass message type if \"Voice Recognition\", \"Event\", \"Normal\" 3) check", "more information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E7.A7.BB.E5.8A.A8.E7.94.A8.E6.88.B7.E5.88.86.E7.BB.84''' url = \"https://api.weixin.qq.com/cgi-bin/groups/members/update?access_token=\" + self._token", "self.the_dict json_text = '''{ \"touser\":\"OPENID\", \"msgtype\":\"text\", \"text\": { \"content\":\"Hello World\"", "If not exists or not valid will return None. For", "blow are the templates of all the responsing message valid", "bottom menu of the wechat service Usage: >>> mm =", "<ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[video]]></MsgType> <Video> <MediaId><![CDATA[media_id]]></MediaId> <Title><![CDATA[title]]></Title> <Description><![CDATA[description]]></Description> </Video> </xml>'''", "\"msgtype\":\"image\", \"image\": { \"media_id\":\"MEDIA_ID\" } }''' json_voice = '''{ \"touser\":\"OPENID\",", "= root.find('EventKey').text if root.find('Ticket') is not None: self._Ticket = root.find('Ticket').text", "as from the 'text' JSON format the_json_tpl = globals()['json_' +", "# For event message only elif self.MsgType == 'event': #", "MsgType else: # pass the message type to have ```self.the_dict```", "if self.type == 'recognition': self.initType('voice', incomingMessage) # Construct a var", "= self._token url = \"https://api.weixin.qq.com/cgi-bin/menu/get?access_token=\"+ token try: response = urllib2.urlopen(url)", "get the response. ''' def __init__(self, media_type='image', token = ''):", "to raise something here if the wechat remote server is", "None else: return a_dict else: return a_dict def deleteMenu(self): token", "else: return False else: return False class MediaManager(object): '''There are", "By default, MsgType is set as 'text' MsgType = 'text'", "visit: http://mp.weixin.qq.com/wiki/index.php?title=%E8%8E%B7%E5%8F%96%E5%85%B3%E6%B3%A8%E8%80%85%E5%88%97%E8%A1%A8 ''' url = \"https://api.weixin.qq.com/cgi-bin/user/get?access_token=\" + self._token + \"&next_openid=\"", "id with the new_name. 'True' or False if updated or", "0: return True else: return False else: return False def", "self._token = token def loadToken(self, token=''): '''Firstly load the access", "please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E7.A7.BB.E5.8A.A8.E7.94.A8.E6.88.B7.E5.88.86.E7.BB.84''' url = \"https://api.weixin.qq.com/cgi-bin/groups/members/update?access_token=\" + self._token postData =", "delattr(self, '_' + c) except: pass # Delete the unuseful", "# Construct a var ```self._Recognition``` since it is just of", "root.find('Location_X').text else: self._Location_X = '' if root.find('Location_Y') is not None:", "attribute value to such as: 'self._FromUsername' for k, v in", "it is of 'text' by default >>> # Notice we", "the ```v``` should be packaged in a list already #", "as e: print e raise e ## For image message", "self.type == 'event': self.initType('event', incomingMessage) class RespondingContainer(object): \"\"\"Package XML to", "For recognition messages if self.type == 'event': self.initType('event', incomingMessage) class", "# By default set the ```self.the_dict``` as from the 'text'", "If not, will return None. ''' url = \"https://api.weixin.qq.com/cgi-bin/groups/create?access_token=\" +", "try: datagen, headers = multipart_encode({\"image1\": open(media_path,\"rb\")}) except Exception as e:", "= '{\"group\": {\"name\": \"%s\"} }' % name request = urllib2.Request(url,data=postData)", "else: j = json.loads(response.read()) # The above works #print j", "#for i in child_list: # if i == 'CreateTime': #", "return. If not , 'return None' ''' default_url = 'https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&'", "== 'description': self.the_dict['music'][k] = v elif k == 'hqmusicurl': self.the_dict['music'][k]", "'text' >>> hasattr(holder, \"_PicUrl\") >>> False >>> hasattr(holder, \"_Content\") >>>", "automatically :) >>> rc.setElementByTag(FromUserName='the_server', ToUserName='the_wechat_client',Content='Hello dude!') >>> tpl_out = rc.dumpXML()", "token def loadToken(self, token = ''): self._token = token def", "no message type: '%s'\" % MsgType else: # pass the", "v elif k == 'Title': self.root.find('Video').find('Title').text = v elif k", "response = urllib2.urlopen(url) except Exception as e: # its better", "</xml>''' tpl_news = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[news]]></MsgType> <ArticleCount>2</ArticleCount> <Articles>", "is down print e return None else: a_dict = json.loads(response.read())", ">>> setElementByTag(FromUserName='the_wechat_server',ToUserName='the_wechat_client',Content='Hello dude!') # In this way we can then", "elif k == 'Title': self.root.find('Video').find('Title').text = v elif k ==", "</Articles> </xml>''' # Positive response class PositiveRespondingContainer(object): '''Using wechat custom", "then it will return the new group id of type", "flag2 True >>> mm.getMenu() >>> # nothing gotten: it means", "def postMessage2API(token='',messageString=''): '''Using the token, post the message to determained", "headers = multipart_encode({\"image1\": open(media_path,\"rb\")}) except Exception as e: #print e", "For more information, please visit : http://mp.weixin.qq.com/wiki/index.php?title=%E5%8F%91%E9%80%81%E8%A2%AB%E5%8A%A8%E5%93%8D%E5%BA%94%E6%B6%88%E6%81%AF global tpl_text global", "return True else: return False def getMenu(self): '''Get the menu", "to determained wechat message For more information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%8F%91%E9%80%81%E8%A2%AB%E5%8A%A8%E5%93%8D%E5%BA%94%E6%B6%88%E6%81%AF", "= 'recognition' # Check if the incomingMessage has tag 'Event'", "reponse to wechat clients! :) \"\"\" ## assign the basic", "For more information, please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E6.9F.A5.E8.AF.A2.E7.94.A8.E6.88.B7.E6.89.80.E5.9C.A8.E5.88.86.E7.BB.84''' url = \"https://api.weixin.qq.com/cgi-bin/groups/getid?access_token=\"+ self._token", "if MsgType == i: self.MsgType = i break # Delete", "token and the menu format. The ```menu_format``` is of type", "== 'musicurl': self.the_dict['music'][k] = v elif k == 'title': self.the_dict['music'][k]", "'_' + e) except: pass self.__init__(incomingMessage) # releasing method def", "menu of the wechat service Usage: >>> mm = MenuManager()", "= \"Instance has no attribute _%s\" % tag #raise AttributeError,", "MsgType is set as 'text' MsgType = 'text' # Unique", "'int'. If openid wrong or token invalid, 'None' will be", "= root.find('Scale').text else: self._Scale = '' if root.find('Label') is not", "ellipsis this operation since by default its 'text' >>> hasattr(holder,", "var ```self._Recognition``` since it is just of this more than", "k == 'HQMusicUrl': self.root.find('Video').find('HQMusicUrl').text = v elif k == 'ThumbMediaId':", "message type to have ```self.the_dict``` self.__init__(MsgType) def setElementByKey(self, **kwargs): '''To", "MsgType not in MsgType_list: raise ValueError, \"MsgType '%s' not valid", "for i in MsgType_list: if MsgType == i: self.MsgType =", "key 'articles' should be of type list\" elif k ==", "= a.read() a_dict = json.loads(gotten) # means wrong appid or", "except: return None ##raise ValueError #tmp = \"Instance has no", "For voice message only elif self._MsgType == 'voice': for k,", "Construct a var ```self._Recognition``` since it is just of this", "True >>> menu_got = mm.getMenu() >>> menu_got {u'menu': {u'button': [{u'type':", "False else: a_dict = json.loads(response.read()) if a_dict.has_key('errcode'): if a_dict['errcode'] ==", "voice, video, thumb Post the file to the offical wechat", "'%s' not valid \" % MsgType for i in MsgType_list:", "e else: request = urllib2.Request(url,data=datagen,headers=headers) try: response = urllib2.urlopen(request) except", "<Title><![CDATA[title1]]></Title> <Description><![CDATA[description1]]></Description> <PicUrl><![CDATA[picurl]]></PicUrl> <Url><![CDATA[url]]></Url> </item> <item> <Title><![CDATA[title]]></Title> <Description><![CDATA[description]]></Description> <PicUrl><![CDATA[picurl]]></PicUrl> <Url><![CDATA[url]]></Url>", ":) \"\"\" ## assign the basic time self.root.find('CreateTime').text = str(int(time.time()))", "e: print e raise e #raise AttributeError, \"Message type '%s'", "returned. For more information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E6.9F.A5.E8.AF.A2.E6.89.80.E6.9C.89.E5.88.86.E7.BB.84 ''' url =", "not None: self._MsgType = root.find('MsgType').text else: self._MsgType = '' if", "self._FromUserName = root.find('FromUserName').text else: self._FromUserName = '' if root.find('CreateTime') is", "print e raise e ## For news message only elif", "setattr(self,\"_\"+i, '') self.__init__(MsgType) #def setElementByTag(self, tag): def setElementByTag(self, **kwargs): \"\"\"", "== 'description': self.the_dict['video'][k] = v else: self.the_dict[k] = v except", "By default set root as the 'text' XML format the_tpl", "group ID. The ID is of type 'int'. If openid", "= \"https://api.weixin.qq.com/cgi-bin/groups/create?access_token=\" + self._token postData = '{\"group\": {\"name\": \"%s\"} }'", "the template #for i in tpl_list: # if MsgType ==", "'news']: raise ValueError, \"It has no message type: '%s'\" %", "== 'text': self.initType('text', incomingMessage) elif msgType == 'image': self.initType('image', incomingMessage)", "'' if root.find('EventKey') is not None: self._EventKey = root.find('EventKey').text if", "= token def uploadMedia(self, media_type='image', media_path=''): '''Post the determained media", "a_dict.has_key('errcode'): return None else: return a_dict def getAPIToken(appid='', appsecret=''): '''Get", "XML we need to reponse to wechat clients! :) \"\"\"", "== 'video': # To set attribute value of the XML", "self._Content = '' # For image message only elif self.MsgType", "of the wechat service Usage: >>> mm = MenuManager() >>>", "root.find(\"Recognition\").text # For recognition messages if self.type == 'event': self.initType('event',", "'normal message => voice' self._Recognition = root.find(\"Recognition\").text # For recognition", "== 'HQMusicUrl': self.root.find('Video').find('HQMusicUrl').text = v elif k == 'ThumbMediaId': self.root.find('Video').find('ThumbMediaId').text", "a dict will be returned. If the ```next_openid``` does not", "{u'type': u'click', u'name': u'\\u5938\\u6211\\u5e05', u'key': u'<KEY>', u'sub_button': []}]}]}} >>> flag2", "to the offical wechat server and get the response. '''", "2014 import sys reload(sys) sys.setdefaultencoding('utf-8') from lxml import etree import", "if root.find('CreateTime') is not None: self._CreateTime = root.find('CreateTime').text else: self._CreateTime", "wechat server Make the value variable The 'incomingMessage' is of", "the XML special for image for k, v in kwargs.items():", "and etc..``` Logistics as the followings: 1) check parent message", "rc.setElementByTag(FromUserName='the_server', ToUserName='the_wechat_client',Content='Hello dude!') >>> tpl_out = rc.dumpXML() >>> tpl_out >>><xml>", "return True else: return False else: return False def moveHimToGroup(self,", "v else: try: ## assign/update value to the new XML", "initType(self, MsgType='text'): if MsgType not in ['text', 'image', 'voice', 'video',", "self._ToUserName = root.find('ToUserName').text else: self._ToUserName = '' if root.find('FromUserName') is", "% openid request = urllib2.Request(url,data=postData) try: response = urllib2.urlopen(request) except", "elif self._MsgType == 'video': # To set attribute value of", "'normal' # For normal messages if self.type == 'normal': if", "<CreateTime>1397808770</CreateTime> <MsgType>text</MsgType> <Content>Hello dude!</Content> </xml> >>> \"\"\" def __init__(self, MsgType='text'):", "try: delattr(self, '_' + c) except: pass # Delete the", "in eventMapping[k]: try: delattr(self, '_' + e) except: pass self.__init__(incomingMessage)", "the 'CreateTime' since it has been generated automatically :) >>>", "not, will return None. ''' url = \"https://api.weixin.qq.com/cgi-bin/groups/create?access_token=\" + self._token", "Exception as e: print e raise e #raise AttributeError, \"Message", "= 'normal' # For normal messages if self.type == 'normal':", ">>> \"\"\" def __init__(self, MsgType='text'): self._MsgType = MsgType # By", "the API. If there be, then a dict would be", "k == 'media_id': self.the_dict['voice'][k] = v else: self.the_dict[k] = v", "voice recognition message if root.find(\"Recognition\") is not None: self.type =", "True else: return False else: return False def getSubscriberList(self, next_openid=''):", "only elif self._MsgType == 'news': for k, v in kwargs.items():", "types of messages to those wechat clients \\n who sent", "to the object #getattr(self, \"_\"+k) = v ## assign/update value", "] } }''' class SubscriberManager(object): '''To manage the subscriber groups,", "according to the message type by such as ```initType(MsgType='text')``` Notice:", "root.find('Description') is not None: self._Description = root.find('Description').text else: self._Description =", "<Content>Hello dude!</Content> </xml> >>> \"\"\" def __init__(self, MsgType='text'): self._MsgType =", "A Happy Day\", \"url\":\"URL\", \"picurl\":\"PIC_URL\" }, { \"title\":\"Happy Day\", \"description\":\"Is", "= root.find('Event').text else: self._Event = '' if root.find('EventKey') is not", "else: self._Description = '' if root.find('Url') is not None: self._Url", "else: self._Event = '' if root.find('EventKey') is not None: self._EventKey", "= root.find('Location_Y').text else: self._Location_Y = '' if root.find('Scale') is not", "Those 6 types of messages include: text, image, voice, video,", "MsgType = 'text' # Unique tages in all the mapping", "MsgType_list = ['text', 'image', 'voice', 'video', 'location', 'link', 'event'] if", "= json.loads(response.read()) if a_dict.has_key('errcode'): return None else: return a_dict['groupid'] def", "a var ```self._Recognition``` since it is just of this more", "If for commercial, please inform the author for authentication. Apr", "else: self._Title = '' if root.find('Description') is not None: self._Description", "self.root = etree.fromstring( the_xml ) # break ## Set the", "object to class to make something as : 'self._FromUserName' #for", "else: # pass the message type to have ```self.the_dict``` self.__init__(MsgType)", "message only elif self._MsgType == 'image': for k, v in", "elif k == 'thumb_media_id': self.the_dict['music'][k] = v else: self.the_dict[k] =", "incomingMessage) # Construct a var ```self._Recognition``` since it is just", "= root.find('Precision').text def initType(self, MsgType='text', incomingMessage='<xml></xml>'): ''' To initialize message", "the combined tag set of the event message 'event':['Event','EventKey','Ticket','Latitude','Longitude','Precision' ],", "a dict. While construcing the JSON dumped, This is used", "</xml> >>> \"\"\" def __init__(self, MsgType='text'): self._MsgType = MsgType #", "'int'. If not, will return None. ''' url = \"https://api.weixin.qq.com/cgi-bin/groups/create?access_token=\"", "# setattr(self,\"_\"+i, str(int(time.time()))) # else: # setattr(self,\"_\"+i, '') self.__init__(MsgType) #def", "If the ```next_openid``` does not exist, official wechat server takes", "not in ['image', 'voice', 'video', 'thumb']: raise ValueError, \"Media type:", "self.root = etree.fromstring(the_tpl) #print self.root.find(\"FromUserName\").text #print type(self.root.find(\"FromUserName\").text) def initType(self, MsgType='text'):", "def getHisGroupID(self, openid=''): '''Get a subscriber's group ID. The ID", "if a_dict.has_key('errcode'): return None else: return a_dict def getHisGroupID(self, openid=''):", "def setElementByKey(self, **kwargs): '''To set the ```self.the_dict``` according to the", "be returned. For more information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E6.9F.A5.E8.AF.A2.E6.89.80.E6.9C.89.E5.88.86.E7.BB.84 ''' url", "package XML message into an object Usage: >>> setElementByTag(FromUserName='the_wechat_server',ToUserName='the_wechat_client',Content='Hello dude!')", "= globals()['tpl_' + self._MsgType].encode('utf-8').decode('utf-8') self.root = etree.fromstring(the_tpl) #print self.root.find(\"FromUserName\").text #print", "'MediaId'], 'voice':['MediaId','Format'], 'video':['MediaId','ThumbMeiaId'], 'location':['Location_X','Location_Y','Scale', 'Label'], 'link':['Title','Description','Url'], } # For event", "return False class MenuManager(object): '''To manage the bottom menu of", "self._Format = '' # For video message only elif self.MsgType", "will return a dict if ```token``` and ```open_id``` are valid.", "'' # For image message only elif self.MsgType == 'image':", "of normal message global commonTag commonTag = ['ToUserName', 'FromUserName', 'CreateTime',", "if type(v) == list: self.the_dict['news'][k] = v else: raise ValueError,", "tpl_list = ['text', 'image', 'voice', 'video', 'music', 'news'] if MsgType", "raise e ## For article message only elif self._MsgType ==", "tags for c in commonTag: try: delattr(self, '_' + c)", "'voice', 'video', 'music', 'news']: raise ValueError, \"It has no message", "Day\", \"url\":\"URL\", \"picurl\":\"PIC_URL\" }, { \"title\":\"Happy Day\", \"description\":\"Is Really A", "False else: return False def getSubscriberList(self, next_openid=''): '''To get subscriber", "self.MsgType == 'link': if root.find('Title') is not None: self._Title =", "self.__init__(incomingMessage) # releasing method def __del__(self): pass #@property def getElementByTag(self,", "gotten def digest(self, incomingMessage): '''To digest the XML message passed", "the common tags for c in commonTag: try: delattr(self, '_'", "if k !=self.MsgType: for m in normalMapping[k]: try: delattr(self, '_'", "followings: 1) check parent message type :\"MsgType\" 2) check subclass", "kwargs 's key in this function should be of lower", "== 'msgtype': self.the_dict['msgtype'] = 'news' except Exception as e: print", "service. This function will return a dict if ```token``` and", "offical wechat server and get the response. ''' def __init__(self,", "Exception as e: print e raise e ## For video", "if updated or not. For more information, please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E4.BF.AE.E6.94.B9.E5.88.86.E7.BB.84.E5.90.8D", "print e raise e ## For article message only elif", "tags in-common of normal message global commonTag commonTag = ['ToUserName',", "functions below''' self._token = token def getSubscriberProfile(self, openid='', lang='zh_CN'): '''The", "u'sub_button': []}, {u'url': u'http://v.qq.com/', u'type': u'view', u'name': u'\\u770b\\u7535\\u5f71', u'sub_button': []},", "None: self._Scale = root.find('Scale').text else: self._Scale = '' if root.find('Label')", ">>> menu_got {u'menu': {u'button': [{u'type': u'click', u'name': u'\\u7b2c\\u4e00\\u94ae', u'key': u'V1001_TODAY_MUSIC',", "None: self._ThumbMediaId = root.find('ThumbMediaId').text else: self._ThumbMediaId = '' # For", "= '' # For location message only elif self.MsgType ==", "in kwargs.items(): try: if k == 'media_id': self.the_dict['voice'][k] = v", "% (openid, groupid) request = urllib2.Request(url,data=postData) try: response = urllib2.urlopen(request)", "by default If not, a 'None' will be returned. For", "<Video> <MediaId><![CDATA[media_id]]></MediaId> <Title><![CDATA[title]]></Title> <Description><![CDATA[description]]></Description> </Video> </xml>''' tpl_music = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName>", "For event message mapping global eventMapping eventMapping = { #", "if valid. If ```token``` and ```next_openid``` are valid, then a", "get element from the tag ''' try: gotten = getattr(self,", "http://mp.weixin.qq.com/wiki/index.php?title=%E4%B8%8A%E4%BC%A0%E4%B8%8B%E8%BD%BD%E5%A4%9A%E5%AA%92%E4%BD%93%E6%96%87%E4%BB%B6''' if media_type not in ['image', 'voice', 'video', 'thumb']: raise", "u'type': u'view', u'name': u'\\u641c\\u641c', u'sub_button': []}, {u'url': u'http://v.qq.com/', u'type': u'view',", "root.find('CreateTime') is not None: self._CreateTime = root.find('CreateTime').text else: self._CreateTime =", "== 'media_id': self.the_dict['voice'][k] = v else: self.the_dict[k] = v except", "elment of the key ```articles``` for the news message '''", "Exception as e: print e raise e ## For news", "media_path=''): '''Post the determained media file to the offical URL", "'text': self.initType('text', incomingMessage) elif msgType == 'image': self.initType('image', incomingMessage) elif", "try: ## assign/update value to the new XML object self.root.find(k).text", "default If not, a 'None' will be returned. For more", "root.find('MediaId').text else: self._MediaId = '' # For voice message only", "in normalMapping: if k !=self.MsgType: for m in normalMapping[k]: try:", "#print v #print etree.tostring(self.root) self.root.find('Voice').find('MediaId').text = v else: try: ##", "loading def dumpDict(self): return self.the_dict json_text = '''{ \"touser\":\"OPENID\", \"msgtype\":\"text\",", "title\", description=\"default description\", url=\"http://www.baidu.com\", picurl=\"http://www.baidu.com/img/bdlogo.gif\"): '''This will return an article", "message was accepted if j['errcode'] == 0: return True else:", "root.find('EventKey') is not None: self._EventKey = root.find('EventKey').text if root.find('Ticket') is", "mapping global normalMapping normalMapping = { 'text':['Content'], 'image':['PicUrl', 'MediaId'], 'voice':['MediaId','Format'],", "__init__(self, token=''): self._token = token def loadToken(self, token=''): '''Load the", "'') self.__init__(MsgType) #def setElementByTag(self, tag): def setElementByTag(self, **kwargs): \"\"\" To", "# For more information, please visit : http://mp.weixin.qq.com/wiki/index.php?title=%E5%8F%91%E9%80%81%E8%A2%AB%E5%8A%A8%E5%93%8D%E5%BA%94%E6%B6%88%E6%81%AF global tpl_text", "deleteMenu(self): token = self._token url = \"https://api.weixin.qq.com/cgi-bin/menu/delete?access_token=\" + token try:", "the XML incomingMessage if has # For text message only", "# the_xml = globals()['tpl_'+i] # self.root = etree.fromstring( the_xml )", "it is a voice recognition message if root.find(\"Recognition\") is not", "setattr(self,\"_\"+i, str(int(time.time()))) # else: # setattr(self,\"_\"+i, '') self.__init__(MsgType) #def setElementByTag(self,", "(self._MsgType, k) ## For image message only elif self._MsgType ==", "public wechat service. Those 6 types of messages include: text,", "if root.find('MediaId') is not None: self._MediaId = root.find('MediaId').text else: self._MediaId", "if MsgType not in MsgType_list: raise ValueError, \"MsgType '%s' not", "+ self._token try: response = urllib2.urlopen(url) except Exception as e:", "<reponame>xros/megaboat # -*- coding: utf-8 -*- # Copyright to <NAME>.", "self._MediaId = '' # For voice message only elif self.MsgType", "message only elif self.MsgType == 'voice': if root.find('MediaId') is not", "url = \"https://api.weixin.qq.com/cgi-bin/menu/get?access_token=\"+ token try: response = urllib2.urlopen(url) except Exception", "token invalid, 'None' will be returned. For more information, please", "If not , 'return None' ''' default_url = 'https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&' url", "], } # For recognition message mapping global recognitionMapping recognitionMapping", "</item> </Articles> </xml>''' # Positive response class PositiveRespondingContainer(object): '''Using wechat", "a_dict.has_key('errcode'): return None else: return a_dict['group']['id'] def getAllgroups(self): ''' A", "# Get message type based from the ```incomingMessage``` variable if", "'''Update the determained group id with the new_name. 'True' or", "== 'MusicUrl': self.root.find('Video').find('MusicUrl').text = v elif k == 'HQMusicUrl': self.root.find('Video').find('HQMusicUrl').text", "into an object Usage: >>> setElementByTag(FromUserName='the_wechat_server',ToUserName='the_wechat_client',Content='Hello dude!') # In this", "v in kwargs.items(): try: if k == 'media_id': self.the_dict['image'][k] =", "be returned. If not, 'None' will be returned. For more", "try: delattr(self, '_' + e) except: pass self.__init__(incomingMessage) # releasing", "dict format. We need to json.loads(the_dict_object) if we want to", "None: self._MsgType = root.find('MsgType').text else: self._MsgType = '' if root.find('MsgId')", "has no attribute _%s\" % tag #raise AttributeError, tmp else:", "## For text message only if self._MsgType == 'text': for", "in self.root.getchildren(): # child_list += [str(child)] ### Attach 'tag' object", "<FromUserName>the_server</FromUserName> <CreateTime>1397808770</CreateTime> <MsgType>text</MsgType> <Content>Hello dude!</Content> </xml> >>> \"\"\" def __init__(self,", "[{u'url': u'http://www.soso.com/', u'type': u'view', u'name': u'\\u641c\\u641c', u'sub_button': []}, {u'url': u'http://v.qq.com/',", "the menu format from the API. If there be, then", "'voice', 'video', 'music', 'news'] if MsgType not in tpl_list: raise", "] ''' if k == 'articles': if type(v) == list:", "'''Move him to other group. 'True' or 'False' if moved", "def updateGroupName(self, groupid='', new_name=''): '''Update the determained group id with", "MenuManager() >>> mm.loadToken('something_the_api_token') >>> flag = mm.createMenu('the_menu_format_constructed_from_a_JSON_as_a_string') >>> flag True", "reponse back ''' def __init__(self, MsgType='text'): self._MsgType = MsgType #", "dude!') # In this way we can then use ```dumpXML()```", "global tpl_music global tpl_news tpl_text = u'''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime>", "in common if root.find('ToUserName') is not None: self._ToUserName = root.find('ToUserName').text", "else: return a_dict['groupid'] def updateGroupName(self, groupid='', new_name=''): '''Update the determained", "MsgType='text'): self._MsgType = MsgType # By default set root as", "a_dict.has_key('errcode'): return None else: return a_dict['groupid'] def updateGroupName(self, groupid='', new_name=''):", "root.find('FromUserName').text else: self._FromUserName = '' if root.find('CreateTime') is not None:", "if self.type == 'event': self.initType('event', incomingMessage) class RespondingContainer(object): \"\"\"Package XML", "valid. If ```token``` and ```next_openid``` are valid, then a dict", "this will assgin values to ```self.MsgType and etc..``` Logistics as", "\"Invalid responsing message MsgType '%s'\" % MsgType else: ## Load", "True else: return False class MenuManager(object): '''To manage the bottom", "etree.fromstring(incomingMessage) # The 5 ones in common if root.find('ToUserName') is", "the bottom menu of the wechat service Usage: >>> mm", "not None: self._Location_Y = root.find('Location_Y').text else: self._Location_Y = '' if", "elif self.MsgType == 'event': # It has to have a", "# ## the the template # the_xml = globals()['tpl_'+i] #", "type: '%s'\" % MsgType else: # pass the message type", "etc..``` Logistics as the followings: 1) check parent message type", "== 'music': for k, v in kwargs.items(): try: if k", "wrong or token invalid, 'None' will be returned. For more", "wechat. image, voice, video, thumb Post the file to the", "'thumb']: raise ValueError, \"Media type: '%s' not valid\" % media_type", "# Notice we don't need to set the 'CreateTime' since", "case. Official wechat define that. Don't claim ''' ## For", "None: self._Longitude = root.find('Longitude').text if root.find('Precision') is not None: self._Precision", "e: print e raise e ## For image message only", "the templates of all the responsing message valid for wechat", "'music', 'news']: raise ValueError, \"It has no message type: '%s'\"", "messageString) request.get_method = lambda : 'POST' try: response = urllib2.urlopen(request)", "= root.find('Description').text else: self._Description = '' if root.find('Url') is not", "'Articles': # TODO to generate articles as #print v #print", "(openid, groupid) request = urllib2.Request(url,data=postData) try: response = urllib2.urlopen(request) except", "wechat API token for cusmter service or others. If ```appid```", "used with the function ```setElementByKey(touser='someone', msgtype='news', articles=packageArticle())``` ''' return [{\"title\":", "\"touser\":\"OPENID\", \"msgtype\":\"voice\", \"voice\": { \"media_id\":\"MEDIA_ID\" } }''' json_video = '''{", "offical URL If the image is valid, then a_dict will", "message only elif self._MsgType == 'news': for k, v in", "rc.initType('text') # Or we can ellipsis this since it is", "message only elif self._MsgType == 'video': # To set attribute", "self._MsgType = root.find('MsgType').text else: self._MsgType = '' if root.find('MsgId') is", "not valid will return None. For the parameter 'zh_CN', there", "For normal message mapping global normalMapping normalMapping = { 'text':['Content'],", "import sys reload(sys) sys.setdefaultencoding('utf-8') from lxml import etree import time", "None: self._Format = root.find('Format').text else: self._Format = '' # For", "= v elif k == 'thumb_media_id': self.the_dict['music'][k] = v else:", "self.the_dict['music'][k] = v elif k == 'hqmusicurl': self.the_dict['music'][k] = v", "elif k == 'MusicUrl': self.root.find('Video').find('MusicUrl').text = v elif k ==", "= \"https://api.weixin.qq.com/cgi-bin/menu/get?access_token=\"+ token try: response = urllib2.urlopen(url) except Exception as", "from the 'text' JSON format the_json_tpl = globals()['json_' + self._MsgType].encode('utf-8').decode('utf-8')", "not None: self._Url = root.find('Url').text else: self._Url = '' #", "setElementByKey(self, **kwargs): '''To set the ```self.the_dict``` according to the message", "value variable The 'incomingMessage' is of XML According to its", "if msgType in ['text', 'image', 'voice', 'video', 'location', 'link', 'event']:", "<Url><![CDATA[url]]></Url> </item> <item> <Title><![CDATA[title]]></Title> <Description><![CDATA[description]]></Description> <PicUrl><![CDATA[picurl]]></PicUrl> <Url><![CDATA[url]]></Url> </item> </Articles> </xml>'''", "ValueError, \"MsgType '%s' not valid \" % MsgType for i", "['text', 'image', 'voice', 'video', 'music', 'news']: raise ValueError, \"It has", "Happy Day\", \"url\":\"URL\", \"picurl\":\"PIC_URL\" } ] ''' if k ==", "en' For more information: please visit, http://mp.weixin.qq.com/wiki/index.php?title=%E8%8E%B7%E5%8F%96%E7%94%A8%E6%88%B7%E5%9F%BA%E6%9C%AC%E4%BF%A1%E6%81%AF''' url = \"https://api.weixin.qq.com/cgi-bin/user/info?access_token=\"", "self.root.find('CreateTime').text = str(int(time.time())) #print \"-----\" #print self._MsgType ## For text", "% name request = urllib2.Request(url,data=postData) request.get_method = lambda : 'POST'", "def loadToken(self, token=''): '''Load the token before using other functions'''", "message only elif self.MsgType == 'video': if root.find('MediaId') is not", "pass def initType(self, MsgType='text'): if MsgType not in ['text', 'image',", "not , 'return None' ''' default_url = 'https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&' url =", "to the offical URL If the image is valid, then", "digest the XML message passed from wechat server Make the", "tpl_voice = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[voice]]></MsgType> <Voice> <MediaId><![CDATA[media_id]]></MediaId> </Voice>", "self._CreateTime = root.find('CreateTime').text else: self._CreateTime = '1000000000' if root.find('MsgType') is", "For those tags in-common of normal message global commonTag commonTag", "<Description><![CDATA[description]]></Description> <PicUrl><![CDATA[picurl]]></PicUrl> <Url><![CDATA[url]]></Url> </item> </Articles> </xml>''' # Positive response class", "= \"https://api.weixin.qq.com/cgi-bin/groups/get?access_token=\" + self._token try: response = urllib2.urlopen(url) except Exception", "except Exception as e: print e raise e # package", "self.initType('location', incomingMessage) elif msgType == 'link': self.initType('link', incomingMessage) elif msgType", "ValueError, \"The value of the key 'articles' should be of", "recognition message if root.find(\"Recognition\") is not None: self.type = 'recognition'", "<MsgType><![CDATA[news]]></MsgType> <ArticleCount>2</ArticleCount> <Articles> <item> <Title><![CDATA[title1]]></Title> <Description><![CDATA[description1]]></Description> <PicUrl><![CDATA[picurl]]></PicUrl> <Url><![CDATA[url]]></Url> </item> <item>", "e ## For image message only elif self._MsgType == 'image':", "type :\"MsgType\" 2) check subclass message type if \"Voice Recognition\",", "= json.loads(the_json_tpl) if MsgType == 'text': pass def initType(self, MsgType='text'):", "not, 'None' will be returned. For more information, please visit:", "c) except: pass # Delete the unuseful elements in normalMapping", "i: self.MsgType = i break # Delete the common tags", "dumped is of dict format. We need to json.loads(the_dict_object) if", "messages if self.type == 'normal': if msgType == 'text': self.initType('text',", "multipart_encode({\"image1\": open(media_path,\"rb\")}) except Exception as e: #print e return None", "please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E8%8E%B7%E5%8F%96%E5%85%B3%E6%B3%A8%E8%80%85%E5%88%97%E8%A1%A8 ''' url = \"https://api.weixin.qq.com/cgi-bin/user/get?access_token=\" + self._token +", "+ \"&next_openid=\" + next_openid try: response = urllib2.urlopen(url) except Exception", "urllib2.Request(url,data=datagen,headers=headers) try: response = urllib2.urlopen(request) except Exception as e: print", "= \"https://api.weixin.qq.com/cgi-bin/groups/members/update?access_token=\" + self._token postData = '{\"openid\":\"%s\",\"to_groupid\":%s}' % (openid, groupid)", "elif self._MsgType == 'music': for k, v in kwargs.items(): try:", "visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E6.9F.A5.E8.AF.A2.E6.89.80.E6.9C.89.E5.88.86.E7.BB.84 ''' url = \"https://api.weixin.qq.com/cgi-bin/groups/get?access_token=\" + self._token try: response", "= '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[video]]></MsgType> <Video> <MediaId><![CDATA[media_id]]></MediaId> <Title><![CDATA[title]]></Title> <Description><![CDATA[description]]></Description>", "as e: print e raise e def dumpXML(self): # To", "self._token = token def createMenu(self, menu_format=''): '''Create menu, it needs", "__del__(self): pass #@property def getElementByTag(self, tag): '''To get element from", "For text message only if self.MsgType == 'text': if root.find('Content')", "k == 'Articles': # TODO to generate articles as #print", "url = \"https://api.weixin.qq.com/cgi-bin/groups/create?access_token=\" + self._token postData = '{\"group\": {\"name\": \"%s\"}", "u'click', u'name': u'\\u5938\\u6211\\u5e05', u'key': u'<KEY>', u'sub_button': []}]}]}} >>> flag2 =", "root.find('Content') is not None: self._Content = root.find('Content').text else: self._Content =", "self._Format = root.find('Format').text else: self._Format = '' # For video", "<FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[image]]></MsgType> <Image> <MediaId><![CDATA[media_id]]></MediaId> </Image> </xml>''' tpl_voice = '''<xml>", "id of type 'int'. If not, will return None. '''", "return None else: return a_dict['group']['id'] def getAllgroups(self): ''' A dict", "= '' if root.find('Url') is not None: self._Url = root.find('Url').text", "\"https://api.weixin.qq.com/cgi-bin/groups/members/update?access_token=\" + self._token postData = '{\"openid\":\"%s\",\"to_groupid\":%s}' % (openid, groupid) request", "'True' or False if updated or not. For more information,", "dict will be returned. If the ```next_openid``` does not exist,", "to the new XML object self.root.find(k).text = v except Exception", "self._token url = \"https://api.weixin.qq.com/cgi-bin/menu/delete?access_token=\" + token try: response = urllib2.urlopen(url)", "its author. If for commercial, please inform the author for", "will be returned. If the ```next_openid``` does not exist, official", "service Usage: >>> mm = MenuManager() >>> mm.loadToken('something_the_api_token') >>> flag", "''' return [{\"title\": title, \"description\":description, \"url\":url, \"picurl\":picurl}] # to dump", "```token``` and ```open_id``` are valid. If not exists or not", "the incomingMessage has tag 'Recognition' then, it is a voice", "if k == 'musicurl': self.the_dict['music'][k] = v elif k ==", "raise e ## For voice message only elif self._MsgType ==", "else: # setattr(self,\"_\"+i, '') self.__init__(MsgType) #def setElementByTag(self, tag): def setElementByTag(self,", "+ m) except: pass # Delete the unuseful elements in", "message type ''' root = etree.fromstring(incomingMessage) msgType = root.find(\"MsgType\").text #", "JSON format the_json_tpl = globals()['json_' + self._MsgType].encode('utf-8').decode('utf-8') self.the_dict = json.loads(the_json_tpl)", "event messages if self.type == 'recognition': self.initType('voice', incomingMessage) # Construct", "to get the XML we need to reponse to wechat", "root.find('Precision').text def initType(self, MsgType='text', incomingMessage='<xml></xml>'): ''' To initialize message type", "= ['text', 'image', 'voice', 'video', 'music', 'news'] if MsgType not", "elif msgType == 'location': self.initType('location', incomingMessage) elif msgType == 'link':", "mapping global recognitionMapping recognitionMapping = { 'voice':['MediaId','Format','Recognition'], } def __init__(self,", "# By default, MsgType is set as 'text' MsgType =", "type to have ```self.the_dict``` self.__init__(MsgType) def setElementByKey(self, **kwargs): '''To set", "<MsgType><![CDATA[text]]></MsgType> <Content><![CDATA[你好]]></Content> </xml>''' tpl_image = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[image]]></MsgType>", "that of 'normal message => voice' self._Recognition = root.find(\"Recognition\").text #", "as #print v #print etree.tostring(self.root) self.root.find('Video').find('MediaId').text = v elif k", "then its the elment of the key ```articles``` for the", "json.loads(response.read()) if a_dict.has_key('errcode'): if a_dict['errcode'] != 0: return None else:", "json.loads(gotten) # means wrong appid or secret if a_dict.has_key('errcode'): return", "<item> <Title><![CDATA[title]]></Title> <Description><![CDATA[description]]></Description> <PicUrl><![CDATA[picurl]]></PicUrl> <Url><![CDATA[url]]></Url> </item> </Articles> </xml>''' # Positive", "% (self._MsgType, k) ## For image message only elif self._MsgType", "urllib2.urlopen(request) except Exception as e: print e return None else:", "tag ''' try: gotten = getattr(self, \"_\" + tag) except:", "can ellipsis this since it is of 'text' by default", "else: self._Scale = '' if root.find('Label') is not None: self._Label", ">>> rc = RespondingContainer() >>> rc.initType('text') # Or we can", "type 'int'. If openid wrong or token invalid, 'None' will", "\"musicurl\":\"MUSIC_URL\", \"hqmusicurl\":\"HQ_MUSIC_URL\", \"thumb_media_id\":\"THUMB_MEDIA_ID\" } }''' json_news = '''{ \"touser\":\"OPENID\", \"msgtype\":\"news\",", "== 'thumb_media_id': self.the_dict['music'][k] = v else: self.the_dict[k] = v except", "''' MsgType_list = ['text', 'image', 'voice', 'video', 'location', 'link', 'event']", "incomingMessage='<xml></xml>'): # pre-set some common variables root = etree.fromstring(incomingMessage) #", ">>><xml> <ToUserName>the_wechat_client</ToUserName> <FromUserName>the_server</FromUserName> <CreateTime>1397808770</CreateTime> <MsgType>text</MsgType> <Content>Hello dude!</Content> </xml> >>> \"\"\"", "def createGroup(self, name=''): '''Create a determained group name. If created,", "\"\"\"Parsing Wechat messages for whose types are of : 'text',", "than that of 'normal message => voice' self._Recognition = root.find(\"Recognition\").text", "'image', 'voice', 'video', 'music', 'news'] if MsgType not in tpl_list:", "the message was accepted if j['errcode'] == 0: return True", "tpl_list: # if MsgType == i: # self._MsgType = MsgType", "'None' will be returned. ''' token = self._token url =", "message only elif self._MsgType == 'voice': # To set attribute", "MsgType # By default set the ```self.the_dict``` as from the", "'''Firstly load the access token, then use the functions below'''", "in normalMapping for k in normalMapping: if k !=self.MsgType: for", "i == 'CreateTime': # setattr(self,\"_\"+i, str(int(time.time()))) # else: # setattr(self,\"_\"+i,", "'video':['MediaId','ThumbMeiaId'], 'location':['Location_X','Location_Y','Scale', 'Label'], 'link':['Title','Description','Url'], } # For event message mapping", "more than that of 'normal message => voice' self._Recognition =", "and the menu format. The ```menu_format``` is of type string.", "= MenuManager() >>> mm.loadToken('something_the_api_token') >>> flag = mm.createMenu('the_menu_format_constructed_from_a_JSON_as_a_string') >>> flag", "\"url\":url, \"picurl\":picurl}] # to dump the the dict as for", "wechat server takes it as '' by default If not,", "<Music> <Title><![CDATA[TITLE]]></Title> <Description><![CDATA[DESCRIPTION]]></Description> <MusicUrl><![CDATA[MUSIC_Url]]></MusicUrl> <HQMusicUrl><![CDATA[HQ_MUSIC_Url]]></HQMusicUrl> <ThumbMediaId><![CDATA[media_id]]></ThumbMediaId> </Music> </xml>''' tpl_news =", "return [{\"title\": title, \"description\":description, \"url\":url, \"picurl\":picurl}] # to dump the", "For the parameter 'zh_CN', there are others: 'zh_TW, en' For", "more information: please visit, http://mp.weixin.qq.com/wiki/index.php?title=%E8%8E%B7%E5%8F%96%E7%94%A8%E6%88%B7%E5%9F%BA%E6%9C%AC%E4%BF%A1%E6%81%AF''' url = \"https://api.weixin.qq.com/cgi-bin/user/info?access_token=\" + self._token", "== 'video': if root.find('MediaId') is not None: self._MediaId = root.find('MediaId').text", "returned. If not, 'None' will be returned. ''' token =", "'_' + m) except: pass # Delete the unuseful elements", "recognitionMapping = { 'voice':['MediaId','Format','Recognition'], } def __init__(self, incomingMessage='<xml></xml>'): # pre-set", "root.find('Ticket') is not None: self._Ticket = root.find('Ticket').text if root.find('Latitude') is", "e: print e raise e ## For news message only", "self.type == 'normal': if msgType == 'text': self.initType('text', incomingMessage) elif", "self.__init__(MsgType) def setElementByKey(self, **kwargs): '''To set the ```self.the_dict``` according to", ">>> '' \"\"\" # By default, MsgType is set as", "$~ python >>> holder = ParsingContainer() >>> hasattr(holder, \"_Content\") >>>", "= \"https://api.weixin.qq.com/cgi-bin/groups/update?access_token=\" + self._token postData = '{\"group\":{\"id\":%s,\"name\":\"%s\"}}' % (groupid, new_name)", "token = ''): self._media_type = media_type self._token = token def", "\"_Content\") >>> True >>> holder.getElementByTag('Content') >>> '' \"\"\" # By", "self._Url = '' # For event message only elif self.MsgType", "'''To digest the XML message passed from wechat server Make", "= { # The list presents the combined tag set", "passed from wechat server Make the value variable The 'incomingMessage'", "can then use ```dumpXML()``` to get the XML we need", "<PicUrl><![CDATA[picurl]]></PicUrl> <Url><![CDATA[url]]></Url> </item> </Articles> </xml>''' # Positive response class PositiveRespondingContainer(object):", "is of type 'int'. If openid wrong or token invalid,", "A Happy Day\", \"url\":\"URL\", \"picurl\":\"PIC_URL\" } ] ''' if k", ">>> True >>> holder.initType(MsgType='video') >>> hasattr(holder, \"_PicUrl\") >>> True >>>", "root.find(\"Recognition\") is not None: self.type = 'recognition' # Check if", "setElementByTag(self, tag): def setElementByTag(self, **kwargs): \"\"\" To package XML message", "j = json.loads(response.read()) # The above works #print j #", "has to have a ```self._Event``` for event message certainly if", "= v elif k == 'HQMusicUrl': self.root.find('Video').find('HQMusicUrl').text = v elif", "else: request = urllib2.Request(url,data=datagen,headers=headers) try: response = urllib2.urlopen(request) except Exception", "default its 'text' >>> hasattr(holder, \"_PicUrl\") >>> False >>> hasattr(holder,", "e #raise AttributeError, \"Message type '%s' has no attribute/tag '%s'\"", "json_text = '''{ \"touser\":\"OPENID\", \"msgtype\":\"text\", \"text\": { \"content\":\"Hello World\" }", "else: self._CreateTime = '1000000000' if root.find('MsgType') is not None: self._MsgType", "url = \"https://api.weixin.qq.com/cgi-bin/message/custom/send?access_token=\" + token request = urllib2.Request(url, messageString) request.get_method", "JSON. For more information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E8%87%AA%E5%AE%9A%E4%B9%89%E8%8F%9C%E5%8D%95%E5%88%9B%E5%BB%BA%E6%8E%A5%E5%8F%A3 ''' token =", "been generated automatically :) >>> rc.setElementByTag(FromUserName='the_server', ToUserName='the_wechat_client',Content='Hello dude!') >>> tpl_out", "This is used with the function ```setElementByKey(touser='someone', msgtype='news', articles=packageArticle())``` '''", "## For text message only if self._MsgType == 'text': #", "name request = urllib2.Request(url,data=postData) request.get_method = lambda : 'POST' try:", "= urllib2.Request(url, menu_format) request.get_method = lambda : 'POST' try: response", "== 'voice': self.initType('voice', incomingMessage) elif msgType == 'video': self.initType('video', incomingMessage)", "only elif self._MsgType == 'article': # To set attribute value", "'video', 'location', 'link', 'event']: # Check if the incomingMessage has", "common variables root = etree.fromstring(incomingMessage) # The 5 ones in", "+ \"&lang=\" + lang try: a = urllib2.urlopen(url) except Exception", "normalMapping normalMapping = { 'text':['Content'], 'image':['PicUrl', 'MediaId'], 'voice':['MediaId','Format'], 'video':['MediaId','ThumbMeiaId'], 'location':['Location_X','Location_Y','Scale',", "file to the offical wechat server and get the response.", "self.root.getchildren(): # child_list += [str(child)] ### Attach 'tag' object to", "'''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[voice]]></MsgType> <Voice> <MediaId><![CDATA[media_id]]></MediaId> </Voice> </xml>''' tpl_video", "\"Media type: '%s' not valid\" % media_type else: self._media_type =", "''' url = \"https://api.weixin.qq.com/cgi-bin/groups/create?access_token=\" + self._token postData = '{\"group\": {\"name\":", "= v if k == 'Articles': # TODO to generate", "urllib2.Request(url,data=postData) request.get_method = lambda : 'POST' try: response = urllib2.urlopen(request)", "json_news = '''{ \"touser\":\"OPENID\", \"msgtype\":\"news\", \"news\":{ \"articles\": [ { \"title\":\"Happy", "a_dict = json.loads(response.read()) if a_dict.has_key('errcode'): return None else: return a_dict['group']['id']", "= urllib2.urlopen(request) except Exception as e: print e return False", "return None else: gotten = a.read() a_dict = json.loads(gotten) #", "return True else: return False else: return False class MediaManager(object):", "'MediaId': #print v #print etree.tostring(self.root) self.root.find('Image').find('MediaId').text = v else: try:", "MsgType='text'): if MsgType not in ['text', 'image', 'voice', 'video', 'music',", "created, then it will return the new group id of", "print e raise e def dumpXML(self): # To dump the", "set as 'text' MsgType = 'text' # Unique tages in", "'voice': self.initType('voice', incomingMessage) elif msgType == 'video': self.initType('video', incomingMessage) elif", "== 'MediaId': #print v #print etree.tostring(self.root) self.root.find('Voice').find('MediaId').text = v else:", "JSON loading def dumpDict(self): return self.the_dict json_text = '''{ \"touser\":\"OPENID\",", "elif self._MsgType == 'news': for k, v in kwargs.items(): try:", "if the incomingMessage has tag 'Recognition' then, it is a", "in a list already # if list, then its the", "None else: return a_dict['group']['id'] def getAllgroups(self): ''' A dict will", "sys.setdefaultencoding('utf-8') from lxml import etree import time import json import", "\" % MsgType for i in MsgType_list: if MsgType ==", "as e: print e return None else: gotten = a.read()", "etree.tostring(self.root) self.root.find('Video').find('MediaId').text = v elif k == 'Title': self.root.find('Video').find('Title').text =", "contains a dict. While construcing the JSON dumped, This is", "0: return True else: return False class MenuManager(object): '''To manage", "the 'text' XML format the_tpl = globals()['tpl_' + self._MsgType].encode('utf-8').decode('utf-8') self.root", "= v elif k == 'title': self.the_dict['video'][k] = v elif", "= self._token url = \"https://api.weixin.qq.com/cgi-bin/menu/delete?access_token=\" + token try: response =", "createMenu(self, menu_format=''): '''Create menu, it needs a token and the", "def getAllgroups(self): ''' A dict will be returned. For more", "to json.loads(the_dict_object) if we want to pass the right reponse", "tages in all the mapping relationship # # For those", "# if MsgType == i: # self._MsgType = MsgType #", "urllib2.Request(url,data=postData) try: response = urllib2.urlopen(request) except Exception as e: print", "can just ellipsis this operation since by default its 'text'", "}''' json_voice = '''{ \"touser\":\"OPENID\", \"msgtype\":\"voice\", \"voice\": { \"media_id\":\"MEDIA_ID\" }", "'' if root.find('Url') is not None: self._Url = root.find('Url').text else:", "Official wechat define that. Don't claim ''' ## For text", "check children class message type ''' root = etree.fromstring(incomingMessage) msgType", "child_list += [str(child)] ### Attach 'tag' object to class to", "visit: http://mp.weixin.qq.com/wiki/index.php?title=%E8%87%AA%E5%AE%9A%E4%B9%89%E8%8F%9C%E5%8D%95%E5%88%9B%E5%BB%BA%E6%8E%A5%E5%8F%A3 ''' token = self._token url = \"https://api.weixin.qq.com/cgi-bin/menu/create?access_token=\" +", "# Or we can just ellipsis this operation since by", "we can then use ```dumpXML()``` to get the XML we", "not valid\" % media_type else: self._media_type = media_type url =", "print e return False else: a_dict = json.loads(response.read()) #print a_dict", "e return False else: j = json.loads(response.read()) # The above", "Wechat messages for whose types are of : 'text', 'image',", "'voice', 'video', 'location', 'link', 'event'] if MsgType not in MsgType_list:", "if root.find('FromUserName') is not None: self._FromUserName = root.find('FromUserName').text else: self._FromUserName", "of lower case. Official wechat define that. Don't claim '''", "u'\\u770b\\u7535\\u5f71', u'sub_button': []}, {u'type': u'click', u'name': u'\\u5938\\u6211\\u5e05', u'key': u'<KEY>', u'sub_button':", "= mm.deleteMenu() >>> flag2 True >>> mm.getMenu() >>> # nothing", "message only elif self.MsgType == 'event': # It has to", "\"https://api.weixin.qq.com/cgi-bin/user/get?access_token=\" + self._token + \"&next_openid=\" + next_openid try: response =", "['text', 'image', 'voice', 'video', 'location', 'link', 'event']: # Check if", "will return the new group id of type 'int'. If", "self._Location_X = '' if root.find('Location_Y') is not None: self._Location_Y =", "= v elif k == 'msgtype': self.the_dict['msgtype'] = 'news' except", "message only elif self._MsgType == 'music': for k, v in", "# For voice message only elif self.MsgType == 'voice': if", "to its content this will assgin values to ```self.MsgType and", "is a voice event message elif root.find(\"Event\") is not None:", "'%s'\" % MsgType else: ## Load the template #for i", "None: self._Title = root.find('Title').text else: self._Title = '' if root.find('Description')", "Or we can ellipsis this since it is of 'text'", "assign/update value to the new XML object self.root.find(k).text = v", "# if list, then its the elment of the key", "value ### Get all the tags #child_list = [] #for", "For voice message only elif self.MsgType == 'voice': if root.find('MediaId')", "message into an object Usage: >>> setElementByTag(FromUserName='the_wechat_server',ToUserName='the_wechat_client',Content='Hello dude!') # In", "tpl_music global tpl_news tpl_text = u'''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[text]]></MsgType>", "= root.find('Content').text else: self._Content = '' # For image message", "types of messages include: text, image, voice, video, music, news", "is unique to unique wechat public service. This function will", "root.find(\"MsgType\").text # Get message type based from the ```incomingMessage``` variable", "#tmp = \"Instance has no attribute _%s\" % tag #raise", "{u'button': [{u'type': u'click', u'name': u'\\u7b2c\\u4e00\\u94ae', u'key': u'V1001_TODAY_MUSIC', u'sub_button': []}, {u'type':", "the function ```setElementByKey(touser='someone', msgtype='news', articles=packageArticle())``` ''' return [{\"title\": title, \"description\":description,", "token try: response = urllib2.urlopen(url) except Exception as e: #", "#@property def getElementByTag(self, tag): '''To get element from the tag", "i: # self._MsgType = MsgType # ## the the template", "root as the 'text' XML format the_tpl = globals()['tpl_' +", "u'\\u7b2c\\u4e8c\\u94ae', u'key': u'V1001_TODAY_SINGER', u'sub_button': []}, {u'name': u'\\u7b2c\\u4e09\\u94ae', u'sub_button': [{u'url': u'http://www.soso.com/',", "self._Location_Y = root.find('Location_Y').text else: self._Location_Y = '' if root.find('Scale') is", "the the template # the_xml = globals()['tpl_'+i] # self.root =", "\"touser\":\"OPENID\", \"msgtype\":\"video\", \"video\": { \"media_id\":\"MEDIA_ID\", \"title\":\"TITLE\", \"description\":\"DESCRIPTION\" } }''' json_music", "\"\"\" def __init__(self, MsgType='text'): self._MsgType = MsgType # By default", "self._media_type register_openers() try: datagen, headers = multipart_encode({\"image1\": open(media_path,\"rb\")}) except Exception", "else: self._Location_Y = '' if root.find('Scale') is not None: self._Scale", "only elif self.MsgType == 'voice': if root.find('MediaId') is not None:", "image for k, v in kwargs.items(): if k == 'MediaId':", "= v ## assign/update value to the new XML object", "Day\", \"description\":\"Is Really A Happy Day\", \"url\":\"URL\", \"picurl\":\"PIC_URL\" }, {", "i in child_list: # if i == 'CreateTime': # setattr(self,\"_\"+i,", "wechat # For more information, please visit : http://mp.weixin.qq.com/wiki/index.php?title=%E5%8F%91%E9%80%81%E8%A2%AB%E5%8A%A8%E5%93%8D%E5%BA%94%E6%B6%88%E6%81%AF global", "= v elif k == 'title': self.the_dict['music'][k] = v elif", "'&secret=' + appsecret try: a = urllib2.urlopen(url) except Exception as", "'msgtype': self.the_dict['msgtype'] = 'news' except Exception as e: print e", "## the the template # the_xml = globals()['tpl_'+i] # self.root", "self.root.find('Video').find('MediaId').text = v elif k == 'Title': self.root.find('Video').find('Title').text = v", "inform the author for authentication. Apr 2014 import sys reload(sys)", "= root.find('Location_X').text else: self._Location_X = '' if root.find('Location_Y') is not", "else: return False class MediaManager(object): '''There are four types of", "MsgType else: ## Load the template #for i in tpl_list:", "<MsgType><![CDATA[video]]></MsgType> <Video> <MediaId><![CDATA[media_id]]></MediaId> <Title><![CDATA[title]]></Title> <Description><![CDATA[description]]></Description> </Video> </xml>''' tpl_music = '''<xml>", "self.MsgType = i break # Delete the common tags for", "request = urllib2.Request(url, messageString) request.get_method = lambda : 'POST' try:", "v elif k == 'msgtype': self.the_dict['msgtype'] = 'news' except Exception", "= root.find('PicUrl').text else: self._PicUrl = '' if root.find('MediaId') is not", "voice, video, music, news The dumped is of dict format.", ":) >>> rc.setElementByTag(FromUserName='the_server', ToUserName='the_wechat_client',Content='Hello dude!') >>> tpl_out = rc.dumpXML() >>>", "= v elif k == 'description': self.the_dict['video'][k] = v else:", "= globals()['tpl_'+i] # self.root = etree.fromstring( the_xml ) # break", "we want to pass the right reponse back ''' def", ", 'return None' ''' default_url = 'https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&' url = default_url", "We need to json.loads(the_dict_object) if we want to pass the", "gotten: it means no menu at all ''' def __init__(self,", "elements in normalMapping for k in normalMapping: if k !=self.MsgType:", "profile, location, list. Usage: >>> sm = SubscriberManager() >>> sm.loadToken('<KEY>')", "the new group id of type 'int'. If not, will", "self._MsgType == 'article': # To set attribute value of the", "<MsgType>text</MsgType> <Content>Hello dude!</Content> </xml> >>> \"\"\" def __init__(self, MsgType='text'): self._MsgType", "root.find('MsgId') is not None: self._MsgId = root.find('MsgId').text else: self._MsgId =", "Positive response class PositiveRespondingContainer(object): '''Using wechat custom service API to", "to generate articles as #print v #print etree.tostring(self.root) self.root.find('Video').find('MediaId').text =", "returned. If the ```next_openid``` does not exist, official wechat server", "normalMapping for k in normalMapping: if k !=self.MsgType: for m", "return etree.tostring(self.root, encoding='utf-8',method='xml',pretty_print=True) # The down blow are the templates", "will return an article in a list which contains a", "text message only if self.MsgType == 'text': if root.find('Content') is", "self._MsgType == 'video': # To set attribute value of the", "} }''' json_image = '''{ \"touser\":\"OPENID\", \"msgtype\":\"image\", \"image\": { \"media_id\":\"MEDIA_ID\"", "</xml>''' # Positive response class PositiveRespondingContainer(object): '''Using wechat custom service", "root.find('PicUrl') is not None: self._PicUrl = root.find('PicUrl').text else: self._PicUrl =", "<NAME>. # Any distrubites of this copy should inform its", "import register_openers class ParsingContainer(object): \"\"\"Parsing Wechat messages for whose types", "recognition message mapping global recognitionMapping recognitionMapping = { 'voice':['MediaId','Format','Recognition'], }", "types are of : 'text', 'image', 'voice', 'video', 'location', 'link'", "exist, official wechat server takes it as '' by default", "lxml import etree import time import json import urllib import", "\"https://api.weixin.qq.com/cgi-bin/groups/create?access_token=\" + self._token postData = '{\"group\": {\"name\": \"%s\"} }' %", "\"&lang=\" + lang try: a = urllib2.urlopen(url) except Exception as", "return a dict if ```token``` and ```open_id``` are valid. If", "poster.encode import multipart_encode from poster.streaminghttp import register_openers class ParsingContainer(object): \"\"\"Parsing", "msgType == 'video': self.initType('video', incomingMessage) elif msgType == 'location': self.initType('location',", "to pass the right reponse back ''' def __init__(self, MsgType='text'):", ": 'POST' try: response = urllib2.urlopen(request) except Exception as e:", "works #print j # to check if the message was", "} # For event message mapping global eventMapping eventMapping =", "urllib2 # For media posting from poster.encode import multipart_encode from", "load the access token, then use the functions below''' self._token", "'''{ \"touser\":\"OPENID\", \"msgtype\":\"text\", \"text\": { \"content\":\"Hello World\" } }''' json_image", "tags #child_list = [] #for child in self.root.getchildren(): # child_list", "if \"Voice Recognition\", \"Event\", \"Normal\" 3) check children class message", "# For normal message mapping global normalMapping normalMapping = {", "from the ```incomingMessage``` variable if msgType in ['text', 'image', 'voice',", "k == 'description': self.the_dict['music'][k] = v elif k == 'hqmusicurl':", "self._token postData = '{\"group\": {\"name\": \"%s\"} }' % name request", "The list presents the combined tag set of the event", "the message to determained user. This returns a Boolean value'''", "event message only elif self.MsgType == 'event': # It has", "tag #raise AttributeError, tmp else: return gotten def digest(self, incomingMessage):", "% tag #raise AttributeError, tmp else: return gotten def digest(self,", "} }''' json_news = '''{ \"touser\":\"OPENID\", \"msgtype\":\"news\", \"news\":{ \"articles\": [", "which contains a dict. While construcing the JSON dumped, This", "'link': if root.find('Title') is not None: self._Title = root.find('Title').text else:", "type of list the ```v``` should be packaged in a", "of this more than that of 'normal message => voice'", "Exception as e: #print e return None #raise e else:", "else: a_dict = json.loads(response.read()) if a_dict.has_key('errcode'): if a_dict['errcode'] == 0:", "For more information: please visit, http://mp.weixin.qq.com/wiki/index.php?title=%E8%8E%B7%E5%8F%96%E7%94%A8%E6%88%B7%E5%9F%BA%E6%9C%AC%E4%BF%A1%E6%81%AF''' url = \"https://api.weixin.qq.com/cgi-bin/user/info?access_token=\" +", "''' token = self._token url = \"https://api.weixin.qq.com/cgi-bin/menu/get?access_token=\"+ token try: response", "parameter 'zh_CN', there are others: 'zh_TW, en' For more information:", "ToUserName='the_wechat_client',Content='Hello dude!') >>> tpl_out = rc.dumpXML() >>> tpl_out >>><xml> <ToUserName>the_wechat_client</ToUserName>", "then a dict would be returned. If not, 'None' will", ": http://mp.weixin.qq.com/wiki/index.php?title=%E5%8F%91%E9%80%81%E8%A2%AB%E5%8A%A8%E5%93%8D%E5%BA%94%E6%B6%88%E6%81%AF global tpl_text global tpl_image global tpl_voice global tpl_video", "event message elif root.find(\"Event\") is not None: self.type = 'event'", "== 'image': # To set attribute value of the XML", "If not, 'None' will be returned. For more information, please", "u'sub_button': []}, {u'type': u'click', u'name': u'\\u5938\\u6211\\u5e05', u'key': u'<KEY>', u'sub_button': []}]}]}}", "return a_dict['groupid'] def updateGroupName(self, groupid='', new_name=''): '''Update the determained group", "initType(self, MsgType='text', incomingMessage='<xml></xml>'): ''' To initialize message type ''' MsgType_list", "# to dump the the dict as for later on", "\"msgtype\":\"news\", \"news\":{ \"articles\": [ { \"title\":\"Happy Day\", \"description\":\"Is Really A", "please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E6.9F.A5.E8.AF.A2.E6.89.80.E6.9C.89.E5.88.86.E7.BB.84 ''' url = \"https://api.weixin.qq.com/cgi-bin/groups/get?access_token=\" + self._token try:", "event message 'event':['Event','EventKey','Ticket','Latitude','Longitude','Precision' ], } # For recognition message mapping", "API token for cusmter service or others. If ```appid``` and", "```v``` is type of list the ```v``` should be packaged", "json.loads(gotten) if a_dict.has_key('access_token'): return a_dict['access_token'] # means wrong appid or", "hasattr(holder, \"_PicUrl\") >>> True >>> holder.initType(MsgType='text') # Or we can", "v elif k == 'description': self.the_dict['music'][k] = v elif k", "'' # For video message only elif self.MsgType == 'video':", "to <NAME>. # Any distrubites of this copy should inform", "as ```initType(MsgType='text')``` Notice: all the kwargs 's key in this", "u'<KEY>', u'sub_button': []}]}]}} >>> flag2 = mm.deleteMenu() >>> flag2 True", "'' if root.find('Scale') is not None: self._Scale = root.find('Scale').text else:", "class message type ''' root = etree.fromstring(incomingMessage) msgType = root.find(\"MsgType\").text", "message type if \"Voice Recognition\", \"Event\", \"Normal\" 3) check children", "msgType in ['text', 'image', 'voice', 'video', 'location', 'link', 'event']: #", "'normal' message else: self.type = 'normal' # For normal messages", "v elif k == 'hqmusicurl': self.the_dict['music'][k] = v elif k", "return if valid. If ```token``` and ```next_openid``` are valid, then", "== i: self.MsgType = i break # Delete the common", "self.__init__(MsgType) #def setElementByTag(self, tag): def setElementByTag(self, **kwargs): \"\"\" To package", "{ \"title\":\"Happy Day\", \"description\":\"Is Really A Happy Day\", \"url\":\"URL\", \"picurl\":\"PIC_URL\"", "MsgType == i: # self._MsgType = MsgType # ## the", "token def createMenu(self, menu_format=''): '''Create menu, it needs a token", "== 'touser': self.the_dict['touser'] = v elif k == 'msgtype': self.the_dict['msgtype']", "delattr(self, '_' + m) except: pass # Delete the unuseful", "v elif k == 'thumb_media_id': self.the_dict['music'][k] = v else: self.the_dict[k]", "'''Get a subscriber's group ID. The ID is of type", "Notice: all the kwargs 's key in this function should", "\"touser\":\"OPENID\", \"msgtype\":\"news\", \"news\":{ \"articles\": [ { \"title\":\"Happy Day\", \"description\":\"Is Really", "return self.the_dict json_text = '''{ \"touser\":\"OPENID\", \"msgtype\":\"text\", \"text\": { \"content\":\"Hello", "is type of list the ```v``` should be packaged in", "not valid \" % MsgType for i in MsgType_list: if", "JSON dumped, This is used with the function ```setElementByKey(touser='someone', msgtype='news',", "then use ```dumpXML()``` to get the XML we need to", ">>> rc.setElementByTag(FromUserName='the_server', ToUserName='the_wechat_client',Content='Hello dude!') >>> tpl_out = rc.dumpXML() >>> tpl_out", "'news'] if MsgType not in tpl_list: raise ValueError, \"Invalid responsing", "if moved or not. For more information please visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E7.A7.BB.E5.8A.A8.E7.94.A8.E6.88.B7.E5.88.86.E7.BB.84'''", "Happy Day\", \"url\":\"URL\", \"picurl\":\"PIC_URL\" }, { \"title\":\"Happy Day\", \"description\":\"Is Really", "= '''{ \"touser\":\"OPENID\", \"msgtype\":\"image\", \"image\": { \"media_id\":\"MEDIA_ID\" } }''' json_voice", "all the tags #child_list = [] #for child in self.root.getchildren():", "encoding='utf-8',method='xml',pretty_print=True) # The down blow are the templates of all", "official wechat server takes it as '' by default If", "k == 'musicurl': self.the_dict['music'][k] = v elif k == 'title':", "= root.find('Format').text else: self._Format = '' # For video message", "e: print e return None else: gotten = a.read() a_dict", "return None else: a_dict = json.loads(response.read()) if a_dict.has_key('errcode'): return None", "manage the subscriber groups, profile, location, list. Usage: >>> sm", "<CreateTime>12345678</CreateTime> <MsgType><![CDATA[voice]]></MsgType> <Voice> <MediaId><![CDATA[media_id]]></MediaId> </Voice> </xml>''' tpl_video = '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName>", "= urllib2.urlopen(url) except Exception as e: # its better to", "= v elif k == 'Description': self.root.find('Video').find('Description').text = v elif", "is not None: self._ToUserName = root.find('ToUserName').text else: self._ToUserName = ''", "not None: self._Label = root.find('Label').text else: self._Label = '' #", "root.find('Label') is not None: self._Label = root.find('Label').text else: self._Label =", "'''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[news]]></MsgType> <ArticleCount>2</ArticleCount> <Articles> <item> <Title><![CDATA[title1]]></Title> <Description><![CDATA[description1]]></Description>", "''' url = \"https://api.weixin.qq.com/cgi-bin/user/get?access_token=\" + self._token + \"&next_openid=\" + next_openid", "image message only elif self.MsgType == 'image': if root.find('PicUrl') is", "as e: print e raise e # package article def", "_%s\" % tag #raise AttributeError, tmp else: return gotten def", "self.root.find('Video').find('MusicUrl').text = v elif k == 'HQMusicUrl': self.root.find('Video').find('HQMusicUrl').text = v", "3) check children class message type ''' root = etree.fromstring(incomingMessage)", "article message only elif self._MsgType == 'article': # To set", "v elif k == 'MusicUrl': self.root.find('Video').find('MusicUrl').text = v elif k", "'POST' try: response = urllib2.urlopen(request) except Exception as e: print", "v else: self.the_dict[k] = v except Exception as e: print", "+ token request = urllib2.Request(url, menu_format) request.get_method = lambda :", "template # the_xml = globals()['tpl_'+i] # self.root = etree.fromstring( the_xml", "token=''): self._token = token def loadToken(self, token=''): '''Firstly load the", "[]}, {u'name': u'\\u7b2c\\u4e09\\u94ae', u'sub_button': [{u'url': u'http://www.soso.com/', u'type': u'view', u'name': u'\\u641c\\u641c',", "incomingMessage) elif msgType == 'video': self.initType('video', incomingMessage) elif msgType ==", "groupid) request = urllib2.Request(url,data=postData) try: response = urllib2.urlopen(request) except Exception", "name. If created, then it will return the new group", "= urllib2.urlopen(url) except Exception as e: print e return False", "if root.find('Description') is not None: self._Description = root.find('Description').text else: self._Description", "next_openid try: response = urllib2.urlopen(url) except Exception as e: print", "= ParsingContainer() >>> hasattr(holder, \"_Content\") >>> True >>> holder.initType(MsgType='video') >>>", "= '{\"openid\":\"%s\"}' % openid request = urllib2.Request(url,data=postData) try: response =", "u'sub_button': []}]}]}} >>> flag2 = mm.deleteMenu() >>> flag2 True >>>", "set of the event message 'event':['Event','EventKey','Ticket','Latitude','Longitude','Precision' ], } # For", "self._MsgType == 'news': for k, v in kwargs.items(): try: #", "The dumped is of dict format. We need to json.loads(the_dict_object)", "None: self._Description = root.find('Description').text else: self._Description = '' if root.find('Url')", "root.find('Title') is not None: self._Title = root.find('Title').text else: self._Title =", "not None: self.type = 'recognition' # Check if the incomingMessage", "root.find('Location_Y').text else: self._Location_Y = '' if root.find('Scale') is not None:", "Exception as e: print e return None else: a_dict =", "is of dict format. We need to json.loads(the_dict_object) if we", "is not None: self._Event = root.find('Event').text else: self._Event = ''", "server Make the value variable The 'incomingMessage' is of XML", "only elif self._MsgType == 'voice': for k, v in kwargs.items():", "##raise ValueError #tmp = \"Instance has no attribute _%s\" %", "elif msgType == 'voice': self.initType('voice', incomingMessage) elif msgType == 'video':", "= multipart_encode({\"image1\": open(media_path,\"rb\")}) except Exception as e: #print e return", "self._MsgType].encode('utf-8').decode('utf-8') self.the_dict = json.loads(the_json_tpl) if MsgType == 'text': pass def", "msgType == 'voice': self.initType('voice', incomingMessage) elif msgType == 'video': self.initType('video',", "not None: self._Title = root.find('Title').text else: self._Title = '' if", "'' \"\"\" # By default, MsgType is set as 'text'", "= '''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[music]]></MsgType> <Music> <Title><![CDATA[TITLE]]></Title> <Description><![CDATA[DESCRIPTION]]></Description> <MusicUrl><![CDATA[MUSIC_Url]]></MusicUrl>", "= root.find(\"Recognition\").text # For recognition messages if self.type == 'event':", "of type 'int'. If openid wrong or token invalid, 'None'", "None: self._Content = root.find('Content').text else: self._Content = '' # For", "don't need to set the 'CreateTime' since it has been", "are the templates of all the responsing message valid for", "flag2 = mm.deleteMenu() >>> flag2 True >>> mm.getMenu() >>> #", "of the event message 'event':['Event','EventKey','Ticket','Latitude','Longitude','Precision' ], } # For recognition", "not None: self._Location_X = root.find('Location_X').text else: self._Location_X = '' if", "# For location message only elif self.MsgType == 'location': if", "except Exception as e: #print e return None #raise e", "there are others: 'zh_TW, en' For more information: please visit,", "The 'incomingMessage' is of XML According to its content this", "self._MsgType = '' if root.find('MsgId') is not None: self._MsgId =", "'location': if root.find('Location_X') is not None: self._Location_X = root.find('Location_X').text else:", "messages if self.type == 'event': self.initType('event', incomingMessage) class RespondingContainer(object): \"\"\"Package", "According to its content this will assgin values to ```self.MsgType", "\"Normal\" 3) check children class message type ''' root =", "if root.find('Longitude') is not None: self._Longitude = root.find('Longitude').text if root.find('Precision')", "Post the file to the offical wechat server and get", "as : 'self._FromUserName' #for i in child_list: # if i", "'voice':['MediaId','Format','Recognition'], } def __init__(self, incomingMessage='<xml></xml>'): # pre-set some common variables", "the ```self.root``` has been assigned already return etree.tostring(self.root, encoding='utf-8',method='xml',pretty_print=True) #", "msgtype='news', articles=packageArticle())``` ''' return [{\"title\": title, \"description\":description, \"url\":url, \"picurl\":picurl}] #", "'news': for k, v in kwargs.items(): try: # here we", "for commercial, please inform the author for authentication. Apr 2014", "# releasing method def __del__(self): pass #@property def getElementByTag(self, tag):", "global tpl_image global tpl_voice global tpl_video global tpl_music global tpl_news", "mm = MenuManager() >>> mm.loadToken('something_the_api_token') >>> flag = mm.createMenu('the_menu_format_constructed_from_a_JSON_as_a_string') >>>", "== 'normal': if msgType == 'text': self.initType('text', incomingMessage) elif msgType", "== 0: return True else: return False else: return False", "return a_dict def createGroup(self, name=''): '''Create a determained group name.", "list. Usage: >>> sm = SubscriberManager() >>> sm.loadToken('<KEY>') >>> hisprofile", "msgType == 'text': self.initType('text', incomingMessage) elif msgType == 'image': self.initType('image',", "has no message type: '%s'\" % MsgType else: # pass", "if self.MsgType == 'text': if root.find('Content') is not None: self._Content", "then use the functions below''' self._token = token def getSubscriberProfile(self,", "incomingMessage) elif msgType == 'location': self.initType('location', incomingMessage) elif msgType ==", "visit: http://mp.weixin.qq.com/wiki/index.php?title=%E5%88%86%E7%BB%84%E7%AE%A1%E7%90%86%E6%8E%A5%E5%8F%A3#.E7.A7.BB.E5.8A.A8.E7.94.A8.E6.88.B7.E5.88.86.E7.BB.84''' url = \"https://api.weixin.qq.com/cgi-bin/groups/members/update?access_token=\" + self._token postData = '{\"openid\":\"%s\",\"to_groupid\":%s}'", "user. This returns a Boolean value''' url = \"https://api.weixin.qq.com/cgi-bin/message/custom/send?access_token=\" +", "for k in normalMapping: if k !=self.MsgType: for m in", "unuseful elements in eventMapping for k in eventMapping: for e", ">>> flag2 = mm.deleteMenu() >>> flag2 True >>> mm.getMenu() >>>", "\"picurl\":picurl}] # to dump the the dict as for later", "= a.read() a_dict = json.loads(gotten) if a_dict.has_key('access_token'): return a_dict['access_token'] #", "not None: self._ToUserName = root.find('ToUserName').text else: self._ToUserName = '' if", "self.the_dict['video'][k] = v else: self.the_dict[k] = v except Exception as", "just ellipsis this operation since by default its 'text' >>>", "For video message only elif self._MsgType == 'video': # To", "'_' + c) except: pass # Delete the unuseful elements", "except Exception as e: print e raise e ## For", "of all the responsing message valid for wechat # For", "e return None else: a_dict = json.loads(response.read()) if a_dict.has_key('errcode'): return", "list. A dict will be return if valid. If ```token```", "has # For text message only if self.MsgType == 'text':", "# Delete the common tags for c in commonTag: try:", "Day\", \"url\":\"URL\", \"picurl\":\"PIC_URL\" } ] } }''' class SubscriberManager(object): '''To", "of XML According to its content this will assgin values", "generated automatically :) >>> rc.setElementByTag(FromUserName='the_server', ToUserName='the_wechat_client',Content='Hello dude!') >>> tpl_out =", "= '' # For video message only elif self.MsgType ==", "others. If ```appid``` and ```appsecret``` are correct then a string", "= \"https://api.weixin.qq.com/cgi-bin/groups/getid?access_token=\"+ self._token postData = '{\"openid\":\"%s\"}' % openid request =", "will return None. ''' url = \"https://api.weixin.qq.com/cgi-bin/groups/create?access_token=\" + self._token postData", "class SubscriberManager(object): '''To manage the subscriber groups, profile, location, list.", "a_dict def createGroup(self, name=''): '''Create a determained group name. If", "If the image is valid, then a_dict will be returned.", "message MsgType '%s'\" % MsgType else: ## Load the template", "no menu at all ''' def __init__(self, token=''): self._token =", "we don't need to set the 'CreateTime' since it has", "self._Location_Y = '' if root.find('Scale') is not None: self._Scale =", "post the message to determained user. This returns a Boolean", "for wechat # For more information, please visit : http://mp.weixin.qq.com/wiki/index.php?title=%E5%8F%91%E9%80%81%E8%A2%AB%E5%8A%A8%E5%93%8D%E5%BA%94%E6%B6%88%E6%81%AF", "message if root.find(\"Recognition\") is not None: self.type = 'recognition' #", "self._Description = root.find('Description').text else: self._Description = '' if root.find('Url') is", "for c in commonTag: try: delattr(self, '_' + c) except:", "'''Load the token before using other functions''' self._token = token", "class, need to declare the 'MsgType' For example, $~ python", "a list which contains a dict. While construcing the JSON", "\\n who sent messages to the public wechat service. Those", "lang='zh_CN') ''' def __init__(self, token=''): self._token = token def loadToken(self,", "m in normalMapping[k]: try: delattr(self, '_' + m) except: pass", "the public wechat service. Those 6 types of messages include:", "subscriber groups, profile, location, list. Usage: >>> sm = SubscriberManager()", "\"picurl\":\"PIC_URL\" } ] ''' if k == 'articles': if type(v)", "content this will assgin values to ```self.MsgType and etc..``` Logistics", "author for authentication. Apr 2014 import sys reload(sys) sys.setdefaultencoding('utf-8') from", "'%s'\" % (self._MsgType, k) ## For image message only elif", "def getSubscriberProfile(self, openid='', lang='zh_CN'): '''The open_id parameter is unique to", "For recognition message mapping global recognitionMapping recognitionMapping = { 'voice':['MediaId','Format','Recognition'],", "= i break # Delete the common tags for c", "is not None: self._EventKey = root.find('EventKey').text if root.find('Ticket') is not", "tpl_text = u'''<xml> <ToUserName><![CDATA[toUser]]></ToUserName> <FromUserName><![CDATA[fromUser]]></FromUserName> <CreateTime>12345678</CreateTime> <MsgType><![CDATA[text]]></MsgType> <Content><![CDATA[你好]]></Content> </xml>''' tpl_image", "To dump the XML we need # the ```self.root``` has", "MenuManager(object): '''To manage the bottom menu of the wechat service", "is not None: self._Location_X = root.find('Location_X').text else: self._Location_X = ''", "\"media_id\":\"MEDIA_ID\" } }''' json_video = '''{ \"touser\":\"OPENID\", \"msgtype\":\"video\", \"video\": {", ">>> flag2 True >>> mm.getMenu() >>> # nothing gotten: it", "return None else: return a_dict def getAPIToken(appid='', appsecret=''): '''Get wechat", "self._media_type = media_type url = \"http://file.api.weixin.qq.com/cgi-bin/media/upload?access_token=\" + self._token + \"&type=\"" ]
[]
[ "colors) class textcolor(Command): args = '[ model:str ] color:str self'", "a['model'], self.ownerDocument.userdata.getPath('packages/color/colors')) self.style['border'] = ('1px solid %s' % latex2htmlcolor(a['bordercolor'], a['model'],", "latex2htmlcolor('0,1,1') colors['magenta'] = latex2htmlcolor('1,0,1') colors['yellow'] = latex2htmlcolor('1,1,0') colors['white'] = latex2htmlcolor('1')", "arg.strip() else: try: red = green = blue = float(arg)", "tex): a = self.parse(tex) u = self.ownerDocument.userdata colors = u.getPath('packages/color/colors')", "self.style['color'] = latex2htmlcolor(a['color'], a['model'], self.ownerDocument.userdata.getPath('packages/color/colors')) class color(Environment): args = '[", "k) - k, 1 - m * (1 - k)", "colors[a['name']] = latex2htmlcolor(a['color'], a['model'], colors) class textcolor(Command): args = '[", "'named': return named.get(arg, '') if ',' in arg: parts =", "latex2htmlcolor('0.8,0.8,0.6') colors['middleyellow'] = latex2htmlcolor('1,1,0.2') colors['darkgray'] = latex2htmlcolor('0.5') colors['middlegray'] = latex2htmlcolor('0.7')", "('1px solid %s' % latex2htmlcolor(a['bordercolor'], a['model'], self.ownerDocument.userdata.getPath('packages/color/colors'))) class normalcolor(Command): pass", "a = self.parse(tex) self.style['background-color'] = latex2htmlcolor(a['color'], a['model'], self.ownerDocument.userdata.getPath('packages/color/colors')) self.style['border'] =", "= latex2htmlcolor('0.8,0.8,0.6') colors['middleyellow'] = latex2htmlcolor('1,1,0.2') colors['darkgray'] = latex2htmlcolor('0.5') colors['middlegray'] =", "tex): a = self.parse(tex) self.style['background-color'] = latex2htmlcolor(a['color'], a['model'], self.ownerDocument.userdata.getPath('packages/color/colors')) class", "255), 255) blue = min(int(blue * 255), 255) # cmyk", "self.parse(tex) self.style['background-color'] = latex2htmlcolor(a['color'], a['model'], self.ownerDocument.userdata.getPath('packages/color/colors')) self.style['border'] = ('1px solid", "{} if model == 'named': return named.get(arg, '') if ','", "green, blue = [int(255 * x) for x in [1", "pagecolor(Command): args = '[ model:str ] color:str' class colorbox(Command): args", "= latex2htmlcolor('0,1,0') colors['darkblue'] = latex2htmlcolor('0,0,0.8') colors['middleblue'] = latex2htmlcolor('0,0,0.9') colors['lightblue'] =", "= latex2htmlcolor('0,0,1') colors['cyan'] = latex2htmlcolor('0,1,1') colors['magenta'] = latex2htmlcolor('1,0,1') colors['yellow'] =", "colors['lightred'] = latex2htmlcolor('1,0,0') colors['darkgreen'] = latex2htmlcolor('0,0.6,0') colors['middlegreen'] = latex2htmlcolor('0,0.8,0') colors['lightgreen']", "colors['darkblue'] = latex2htmlcolor('0,0,0.8') colors['middleblue'] = latex2htmlcolor('0,0,0.9') colors['lightblue'] = latex2htmlcolor('0,0,1') colors['darkcyan']", "k = parts red, green, blue = [int(255 * x)", "args = 'name:str model:str color:str' def invoke(self, tex): a =", "u = self.ownerDocument.userdata colors = u.getPath('packages/color/colors') colors[a['name']] = latex2htmlcolor(a['color'], a['model'],", "ProcessOptions(options, document): colors = {} document.userdata.setPath('packages/color/colors', colors) colors['red'] = latex2htmlcolor('1,0,0')", "self.style['background-color'] = latex2htmlcolor(a['color'], a['model'], self.ownerDocument.userdata.getPath('packages/color/colors')) self.style['border'] = ('1px solid %s'", "(int(red), int(green), int(blue)) class definecolor(Command): args = 'name:str model:str color:str'", "self.style['background-color'] = latex2htmlcolor(a['color'], a['model'], self.ownerDocument.userdata.getPath('packages/color/colors')) class fcolorbox(Command): args = '[", "x in [1 - c * (1 - k) -", "= latex2htmlcolor('0,0.6,0') colors['middlegreen'] = latex2htmlcolor('0,0.8,0') colors['lightgreen'] = latex2htmlcolor('0,1,0') colors['darkblue'] =", "- k) - k, 1 - y * (1 -", "len(parts) == 4: c, m, y, k = parts red,", "blue = [int(255 * x) for x in [1 -", "= latex2htmlcolor('0.8,0,0') colors['middlered'] = latex2htmlcolor('0.9,0,0') colors['lightred'] = latex2htmlcolor('1,0,0') colors['darkgreen'] =", "cmyk elif len(parts) == 4: c, m, y, k =", "named.get(arg, '') if ',' in arg: parts = [float(x) for", "[float(x) for x in arg.split(',')] # rgb if len(parts) ==", "= latex2htmlcolor('0.8,0.6,0.8') colors['middlemagenta'] = latex2htmlcolor('1,0,0.6') colors['darkyellow'] = latex2htmlcolor('0.8,0.8,0.6') colors['middleyellow'] =", "'[ model:str ] color:str' def invoke(self, tex): a = self.parse(tex)", "* (1 - k) - k, 1 - y *", "or {} if model == 'named': return named.get(arg, '') if", "self.parse(tex) self.style['background-color'] = latex2htmlcolor(a['color'], a['model'], self.ownerDocument.userdata.getPath('packages/color/colors')) class fcolorbox(Command): args =", "= [float(x) for x in arg.split(',')] # rgb if len(parts)", "blue = parts red = min(int(red * 255), 255) green", "green, blue = parts red = min(int(red * 255), 255)", "* 255), 255) # cmyk elif len(parts) == 4: c,", "',' in arg: parts = [float(x) for x in arg.split(',')]", "in arg: parts = [float(x) for x in arg.split(',')] #", "color:str' def invoke(self, tex): a = self.parse(tex) u = self.ownerDocument.userdata", "latex2htmlcolor('0,0.8,0.8') colors['darkmagenta'] = latex2htmlcolor('0.8,0.6,0.8') colors['middlemagenta'] = latex2htmlcolor('1,0,0.6') colors['darkyellow'] = latex2htmlcolor('0.8,0.8,0.6')", "= latex2htmlcolor('0,0.8,0.8') colors['darkmagenta'] = latex2htmlcolor('0.8,0.6,0.8') colors['middlemagenta'] = latex2htmlcolor('1,0,0.6') colors['darkyellow'] =", "= {} document.userdata.setPath('packages/color/colors', colors) colors['red'] = latex2htmlcolor('1,0,0') colors['green'] = latex2htmlcolor('0,1,0')", "= latex2htmlcolor('1,0,0.6') colors['darkyellow'] = latex2htmlcolor('0.8,0.8,0.6') colors['middleyellow'] = latex2htmlcolor('1,1,0.2') colors['darkgray'] =", "parts red, green, blue = [int(255 * x) for x", "y * (1 - k) - k]] else: return arg.strip()", "named or {} if model == 'named': return named.get(arg, '')", "self.ownerDocument.userdata.getPath('packages/color/colors')) self.style['border'] = ('1px solid %s' % latex2htmlcolor(a['bordercolor'], a['model'], self.ownerDocument.userdata.getPath('packages/color/colors')))", "- m * (1 - k) - k, 1 -", "latex2htmlcolor('0.9') colors['darkred'] = latex2htmlcolor('0.8,0,0') colors['middlered'] = latex2htmlcolor('0.9,0,0') colors['lightred'] = latex2htmlcolor('1,0,0')", "colors['darkgray'] = latex2htmlcolor('0.5') colors['middlegray'] = latex2htmlcolor('0.7') colors['lightgray'] = latex2htmlcolor('0.9') def", "named=None): named = named or {} if model == 'named':", "textcolor(Command): args = '[ model:str ] color:str self' def invoke(self,", "red = min(int(red * 255), 255) green = min(int(green *", "== 'named': return named.get(arg, '') if ',' in arg: parts", "a = self.parse(tex) self.style['color'] = latex2htmlcolor(a['color'], a['model'], self.ownerDocument.userdata.getPath('packages/color/colors')) class pagecolor(Command):", "latex2htmlcolor(a['color'], a['model'], self.ownerDocument.userdata.getPath('packages/color/colors')) self.style['border'] = ('1px solid %s' % latex2htmlcolor(a['bordercolor'],", "latex2htmlcolor('0,0,0.9') colors['lightblue'] = latex2htmlcolor('0,0,1') colors['darkcyan'] = latex2htmlcolor('0.6,0.8,0.8') colors['middlecyan'] = latex2htmlcolor('0,0.8,0.8')", "a['model'], colors) class textcolor(Command): args = '[ model:str ] color:str", "colors = u.getPath('packages/color/colors') colors[a['name']] = latex2htmlcolor(a['color'], a['model'], colors) class textcolor(Command):", "int(green), int(blue)) class definecolor(Command): args = 'name:str model:str color:str' def", "- k, 1 - m * (1 - k) -", "= ('1px solid %s' % latex2htmlcolor(a['bordercolor'], a['model'], self.ownerDocument.userdata.getPath('packages/color/colors'))) class normalcolor(Command):", "= latex2htmlcolor('0.6,0.8,0.8') colors['middlecyan'] = latex2htmlcolor('0,0.8,0.8') colors['darkmagenta'] = latex2htmlcolor('0.8,0.6,0.8') colors['middlemagenta'] =", "k) - k, 1 - y * (1 - k)", "latex2htmlcolor('0.9') def latex2htmlcolor(arg, model='rgb', named=None): named = named or {}", "min(int(green * 255), 255) blue = min(int(blue * 255), 255)", "[int(255 * x) for x in [1 - c *", "latex2htmlcolor('0,0,1') colors['cyan'] = latex2htmlcolor('0,1,1') colors['magenta'] = latex2htmlcolor('1,0,1') colors['yellow'] = latex2htmlcolor('1,1,0')", "'[ model:str ] bordercolor:str color:str self' def invoke(self, tex): a", "self.ownerDocument.userdata.getPath('packages/color/colors')) class pagecolor(Command): args = '[ model:str ] color:str' class", "c * (1 - k) - k, 1 - m", "try: return named[arg] except KeyError: return arg.strip() return '#%.2X%.2X%.2X' %", "def latex2htmlcolor(arg, model='rgb', named=None): named = named or {} if", "rgb if len(parts) == 3: red, green, blue = parts", "color:str self' def invoke(self, tex): a = self.parse(tex) self.style['color'] =", "model:str color:str' def invoke(self, tex): a = self.parse(tex) u =", "colors['middlemagenta'] = latex2htmlcolor('1,0,0.6') colors['darkyellow'] = latex2htmlcolor('0.8,0.8,0.6') colors['middleyellow'] = latex2htmlcolor('1,1,0.2') colors['darkgray']", "x) for x in [1 - c * (1 -", "model:str ] color:str self' def invoke(self, tex): a = self.parse(tex)", "<filename>chirun/plastex/color/__init__.py from plasTeX import Command, Environment def ProcessOptions(options, document): colors", "3: red, green, blue = parts red = min(int(red *", "255), 255) green = min(int(green * 255), 255) blue =", "= self.parse(tex) u = self.ownerDocument.userdata colors = u.getPath('packages/color/colors') colors[a['name']] =", "colors['darkcyan'] = latex2htmlcolor('0.6,0.8,0.8') colors['middlecyan'] = latex2htmlcolor('0,0.8,0.8') colors['darkmagenta'] = latex2htmlcolor('0.8,0.6,0.8') colors['middlemagenta']", "a['model'], self.ownerDocument.userdata.getPath('packages/color/colors')) class color(Environment): args = '[ model:str ] color:str'", "c, m, y, k = parts red, green, blue =", "255) blue = min(int(blue * 255), 255) # cmyk elif", "colors['lightgreen'] = latex2htmlcolor('0,1,0') colors['darkblue'] = latex2htmlcolor('0,0,0.8') colors['middleblue'] = latex2htmlcolor('0,0,0.9') colors['lightblue']", "a = self.parse(tex) u = self.ownerDocument.userdata colors = u.getPath('packages/color/colors') colors[a['name']]", "= min(int(blue * 255), 255) # cmyk elif len(parts) ==", "colors['white'] = latex2htmlcolor('1') colors['black'] = latex2htmlcolor('0') colors['gray'] = latex2htmlcolor('0.9') colors['darkred']", "a['model'], self.ownerDocument.userdata.getPath('packages/color/colors')) class pagecolor(Command): args = '[ model:str ] color:str'", "class color(Environment): args = '[ model:str ] color:str' def invoke(self,", "tex): a = self.parse(tex) self.style['color'] = latex2htmlcolor(a['color'], a['model'], self.ownerDocument.userdata.getPath('packages/color/colors')) class", "(1 - k) - k]] else: return arg.strip() else: try:", "= latex2htmlcolor(a['color'], a['model'], colors) class textcolor(Command): args = '[ model:str", "= latex2htmlcolor(a['color'], a['model'], self.ownerDocument.userdata.getPath('packages/color/colors')) class fcolorbox(Command): args = '[ model:str", "colors['green'] = latex2htmlcolor('0,1,0') colors['blue'] = latex2htmlcolor('0,0,1') colors['cyan'] = latex2htmlcolor('0,1,1') colors['magenta']", "except KeyError: return arg.strip() return '#%.2X%.2X%.2X' % (int(red), int(green), int(blue))", "k, 1 - m * (1 - k) - k,", "- k) - k, 1 - m * (1 -", "green = blue = float(arg) except ValueError: try: return named[arg]", "colors['middlegreen'] = latex2htmlcolor('0,0.8,0') colors['lightgreen'] = latex2htmlcolor('0,1,0') colors['darkblue'] = latex2htmlcolor('0,0,0.8') colors['middleblue']", "255) # cmyk elif len(parts) == 4: c, m, y,", "= latex2htmlcolor(a['color'], a['model'], self.ownerDocument.userdata.getPath('packages/color/colors')) class pagecolor(Command): args = '[ model:str", "{} document.userdata.setPath('packages/color/colors', colors) colors['red'] = latex2htmlcolor('1,0,0') colors['green'] = latex2htmlcolor('0,1,0') colors['blue']", "arg.strip() return '#%.2X%.2X%.2X' % (int(red), int(green), int(blue)) class definecolor(Command): args", "latex2htmlcolor('0,1,0') colors['darkblue'] = latex2htmlcolor('0,0,0.8') colors['middleblue'] = latex2htmlcolor('0,0,0.9') colors['lightblue'] = latex2htmlcolor('0,0,1')", "'') if ',' in arg: parts = [float(x) for x", "fcolorbox(Command): args = '[ model:str ] bordercolor:str color:str self' def", "= latex2htmlcolor('0') colors['gray'] = latex2htmlcolor('0.9') colors['darkred'] = latex2htmlcolor('0.8,0,0') colors['middlered'] =", "document): colors = {} document.userdata.setPath('packages/color/colors', colors) colors['red'] = latex2htmlcolor('1,0,0') colors['green']", "model:str ] bordercolor:str color:str self' def invoke(self, tex): a =", "latex2htmlcolor('0.7') colors['lightgray'] = latex2htmlcolor('0.9') def latex2htmlcolor(arg, model='rgb', named=None): named =", "latex2htmlcolor(a['color'], a['model'], self.ownerDocument.userdata.getPath('packages/color/colors')) class color(Environment): args = '[ model:str ]", "self.ownerDocument.userdata colors = u.getPath('packages/color/colors') colors[a['name']] = latex2htmlcolor(a['color'], a['model'], colors) class", "# cmyk elif len(parts) == 4: c, m, y, k", "= latex2htmlcolor('1,0,0') colors['darkgreen'] = latex2htmlcolor('0,0.6,0') colors['middlegreen'] = latex2htmlcolor('0,0.8,0') colors['lightgreen'] =", "if len(parts) == 3: red, green, blue = parts red", "if model == 'named': return named.get(arg, '') if ',' in", "= latex2htmlcolor('0.5') colors['middlegray'] = latex2htmlcolor('0.7') colors['lightgray'] = latex2htmlcolor('0.9') def latex2htmlcolor(arg,", "x in arg.split(',')] # rgb if len(parts) == 3: red,", "float(arg) except ValueError: try: return named[arg] except KeyError: return arg.strip()", "colorbox(Command): args = '[ model:str ] color:str self' def invoke(self,", "= u.getPath('packages/color/colors') colors[a['name']] = latex2htmlcolor(a['color'], a['model'], colors) class textcolor(Command): args", "color:str self' def invoke(self, tex): a = self.parse(tex) self.style['background-color'] =", "[1 - c * (1 - k) - k, 1", "] color:str' def invoke(self, tex): a = self.parse(tex) self.style['color'] =", "except ValueError: try: return named[arg] except KeyError: return arg.strip() return", "self.style['color'] = latex2htmlcolor(a['color'], a['model'], self.ownerDocument.userdata.getPath('packages/color/colors')) class pagecolor(Command): args = '[", "colors['middleblue'] = latex2htmlcolor('0,0,0.9') colors['lightblue'] = latex2htmlcolor('0,0,1') colors['darkcyan'] = latex2htmlcolor('0.6,0.8,0.8') colors['middlecyan']", "colors['middlecyan'] = latex2htmlcolor('0,0.8,0.8') colors['darkmagenta'] = latex2htmlcolor('0.8,0.6,0.8') colors['middlemagenta'] = latex2htmlcolor('1,0,0.6') colors['darkyellow']", "1 - m * (1 - k) - k, 1", "colors['blue'] = latex2htmlcolor('0,0,1') colors['cyan'] = latex2htmlcolor('0,1,1') colors['magenta'] = latex2htmlcolor('1,0,1') colors['yellow']", "model='rgb', named=None): named = named or {} if model ==", "from plasTeX import Command, Environment def ProcessOptions(options, document): colors =", "self.parse(tex) self.style['color'] = latex2htmlcolor(a['color'], a['model'], self.ownerDocument.userdata.getPath('packages/color/colors')) class color(Environment): args =", "latex2htmlcolor('1,1,0.2') colors['darkgray'] = latex2htmlcolor('0.5') colors['middlegray'] = latex2htmlcolor('0.7') colors['lightgray'] = latex2htmlcolor('0.9')", "'[ model:str ] color:str self' def invoke(self, tex): a =", "* (1 - k) - k, 1 - m *", "model:str ] color:str' def invoke(self, tex): a = self.parse(tex) self.style['color']", "len(parts) == 3: red, green, blue = parts red =", "plasTeX import Command, Environment def ProcessOptions(options, document): colors = {}", "latex2htmlcolor('0,1,0') colors['blue'] = latex2htmlcolor('0,0,1') colors['cyan'] = latex2htmlcolor('0,1,1') colors['magenta'] = latex2htmlcolor('1,0,1')", "latex2htmlcolor('0,0.6,0') colors['middlegreen'] = latex2htmlcolor('0,0.8,0') colors['lightgreen'] = latex2htmlcolor('0,1,0') colors['darkblue'] = latex2htmlcolor('0,0,0.8')", "= parts red = min(int(red * 255), 255) green =", "y, k = parts red, green, blue = [int(255 *", "latex2htmlcolor('0,0.8,0') colors['lightgreen'] = latex2htmlcolor('0,1,0') colors['darkblue'] = latex2htmlcolor('0,0,0.8') colors['middleblue'] = latex2htmlcolor('0,0,0.9')", "named[arg] except KeyError: return arg.strip() return '#%.2X%.2X%.2X' % (int(red), int(green),", "= latex2htmlcolor('0.9') def latex2htmlcolor(arg, model='rgb', named=None): named = named or", "= float(arg) except ValueError: try: return named[arg] except KeyError: return", "try: red = green = blue = float(arg) except ValueError:", "= self.parse(tex) self.style['background-color'] = latex2htmlcolor(a['color'], a['model'], self.ownerDocument.userdata.getPath('packages/color/colors')) self.style['border'] = ('1px", "k, 1 - y * (1 - k) - k]]", "self.ownerDocument.userdata.getPath('packages/color/colors')) class color(Environment): args = '[ model:str ] color:str' def", "latex2htmlcolor('0.9,0,0') colors['lightred'] = latex2htmlcolor('1,0,0') colors['darkgreen'] = latex2htmlcolor('0,0.6,0') colors['middlegreen'] = latex2htmlcolor('0,0.8,0')", "k]] else: return arg.strip() else: try: red = green =", "255) green = min(int(green * 255), 255) blue = min(int(blue", "min(int(blue * 255), 255) # cmyk elif len(parts) == 4:", "class definecolor(Command): args = 'name:str model:str color:str' def invoke(self, tex):", "= latex2htmlcolor('1,1,0.2') colors['darkgray'] = latex2htmlcolor('0.5') colors['middlegray'] = latex2htmlcolor('0.7') colors['lightgray'] =", "return arg.strip() else: try: red = green = blue =", "self.style['border'] = ('1px solid %s' % latex2htmlcolor(a['bordercolor'], a['model'], self.ownerDocument.userdata.getPath('packages/color/colors'))) class", "colors['lightblue'] = latex2htmlcolor('0,0,1') colors['darkcyan'] = latex2htmlcolor('0.6,0.8,0.8') colors['middlecyan'] = latex2htmlcolor('0,0.8,0.8') colors['darkmagenta']", "colors['middlered'] = latex2htmlcolor('0.9,0,0') colors['lightred'] = latex2htmlcolor('1,0,0') colors['darkgreen'] = latex2htmlcolor('0,0.6,0') colors['middlegreen']", "if ',' in arg: parts = [float(x) for x in", "red = green = blue = float(arg) except ValueError: try:", "4: c, m, y, k = parts red, green, blue", "import Command, Environment def ProcessOptions(options, document): colors = {} document.userdata.setPath('packages/color/colors',", "= self.parse(tex) self.style['color'] = latex2htmlcolor(a['color'], a['model'], self.ownerDocument.userdata.getPath('packages/color/colors')) class pagecolor(Command): args", "definecolor(Command): args = 'name:str model:str color:str' def invoke(self, tex): a", "for x in arg.split(',')] # rgb if len(parts) == 3:", "= latex2htmlcolor('0,0,0.9') colors['lightblue'] = latex2htmlcolor('0,0,1') colors['darkcyan'] = latex2htmlcolor('0.6,0.8,0.8') colors['middlecyan'] =", "'[ model:str ] color:str' class colorbox(Command): args = '[ model:str", "] color:str' class colorbox(Command): args = '[ model:str ] color:str", "def invoke(self, tex): a = self.parse(tex) self.style['background-color'] = latex2htmlcolor(a['color'], a['model'],", "= named or {} if model == 'named': return named.get(arg,", "= latex2htmlcolor('0,1,1') colors['magenta'] = latex2htmlcolor('1,0,1') colors['yellow'] = latex2htmlcolor('1,1,0') colors['white'] =", "= '[ model:str ] color:str' class colorbox(Command): args = '[", "args = '[ model:str ] color:str self' def invoke(self, tex):", "latex2htmlcolor(a['color'], a['model'], colors) class textcolor(Command): args = '[ model:str ]", "color:str' def invoke(self, tex): a = self.parse(tex) self.style['color'] = latex2htmlcolor(a['color'],", "latex2htmlcolor(arg, model='rgb', named=None): named = named or {} if model", "= '[ model:str ] color:str' def invoke(self, tex): a =", "tex): a = self.parse(tex) self.style['background-color'] = latex2htmlcolor(a['color'], a['model'], self.ownerDocument.userdata.getPath('packages/color/colors')) self.style['border']", "else: return arg.strip() else: try: red = green = blue", "- c * (1 - k) - k, 1 -", "def invoke(self, tex): a = self.parse(tex) u = self.ownerDocument.userdata colors", "red, green, blue = parts red = min(int(red * 255),", "int(blue)) class definecolor(Command): args = 'name:str model:str color:str' def invoke(self,", "latex2htmlcolor('0,0,0.8') colors['middleblue'] = latex2htmlcolor('0,0,0.9') colors['lightblue'] = latex2htmlcolor('0,0,1') colors['darkcyan'] = latex2htmlcolor('0.6,0.8,0.8')", "latex2htmlcolor('0.8,0,0') colors['middlered'] = latex2htmlcolor('0.9,0,0') colors['lightred'] = latex2htmlcolor('1,0,0') colors['darkgreen'] = latex2htmlcolor('0,0.6,0')", "1 - y * (1 - k) - k]] else:", "invoke(self, tex): a = self.parse(tex) self.style['color'] = latex2htmlcolor(a['color'], a['model'], self.ownerDocument.userdata.getPath('packages/color/colors'))", "= latex2htmlcolor('1,0,0') colors['green'] = latex2htmlcolor('0,1,0') colors['blue'] = latex2htmlcolor('0,0,1') colors['cyan'] =", "= blue = float(arg) except ValueError: try: return named[arg] except", "= self.parse(tex) self.style['background-color'] = latex2htmlcolor(a['color'], a['model'], self.ownerDocument.userdata.getPath('packages/color/colors')) class fcolorbox(Command): args", "return named[arg] except KeyError: return arg.strip() return '#%.2X%.2X%.2X' % (int(red),", "= latex2htmlcolor('1,1,0') colors['white'] = latex2htmlcolor('1') colors['black'] = latex2htmlcolor('0') colors['gray'] =", "args = '[ model:str ] bordercolor:str color:str self' def invoke(self,", "== 3: red, green, blue = parts red = min(int(red", "model:str ] color:str' class colorbox(Command): args = '[ model:str ]", "class pagecolor(Command): args = '[ model:str ] color:str' class colorbox(Command):", "green = min(int(green * 255), 255) blue = min(int(blue *", "= latex2htmlcolor('0,1,0') colors['blue'] = latex2htmlcolor('0,0,1') colors['cyan'] = latex2htmlcolor('0,1,1') colors['magenta'] =", "self.parse(tex) self.style['color'] = latex2htmlcolor(a['color'], a['model'], self.ownerDocument.userdata.getPath('packages/color/colors')) class pagecolor(Command): args =", "'name:str model:str color:str' def invoke(self, tex): a = self.parse(tex) u", "m, y, k = parts red, green, blue = [int(255", "= latex2htmlcolor('0,0,0.8') colors['middleblue'] = latex2htmlcolor('0,0,0.9') colors['lightblue'] = latex2htmlcolor('0,0,1') colors['darkcyan'] =", "= green = blue = float(arg) except ValueError: try: return", "= self.ownerDocument.userdata colors = u.getPath('packages/color/colors') colors[a['name']] = latex2htmlcolor(a['color'], a['model'], colors)", "255), 255) # cmyk elif len(parts) == 4: c, m,", "- y * (1 - k) - k]] else: return", "latex2htmlcolor('0.5') colors['middlegray'] = latex2htmlcolor('0.7') colors['lightgray'] = latex2htmlcolor('0.9') def latex2htmlcolor(arg, model='rgb',", "latex2htmlcolor(a['color'], a['model'], self.ownerDocument.userdata.getPath('packages/color/colors')) class fcolorbox(Command): args = '[ model:str ]", "blue = min(int(blue * 255), 255) # cmyk elif len(parts)", "= parts red, green, blue = [int(255 * x) for", "= '[ model:str ] bordercolor:str color:str self' def invoke(self, tex):", "def ProcessOptions(options, document): colors = {} document.userdata.setPath('packages/color/colors', colors) colors['red'] =", "= self.parse(tex) self.style['color'] = latex2htmlcolor(a['color'], a['model'], self.ownerDocument.userdata.getPath('packages/color/colors')) class color(Environment): args", "named = named or {} if model == 'named': return", "# rgb if len(parts) == 3: red, green, blue =", "Command, Environment def ProcessOptions(options, document): colors = {} document.userdata.setPath('packages/color/colors', colors)", "= latex2htmlcolor('1') colors['black'] = latex2htmlcolor('0') colors['gray'] = latex2htmlcolor('0.9') colors['darkred'] =", "red, green, blue = [int(255 * x) for x in", "colors['middleyellow'] = latex2htmlcolor('1,1,0.2') colors['darkgray'] = latex2htmlcolor('0.5') colors['middlegray'] = latex2htmlcolor('0.7') colors['lightgray']", "(1 - k) - k, 1 - m * (1", "latex2htmlcolor('1,1,0') colors['white'] = latex2htmlcolor('1') colors['black'] = latex2htmlcolor('0') colors['gray'] = latex2htmlcolor('0.9')", "return '#%.2X%.2X%.2X' % (int(red), int(green), int(blue)) class definecolor(Command): args =", "latex2htmlcolor('1') colors['black'] = latex2htmlcolor('0') colors['gray'] = latex2htmlcolor('0.9') colors['darkred'] = latex2htmlcolor('0.8,0,0')", "u.getPath('packages/color/colors') colors[a['name']] = latex2htmlcolor(a['color'], a['model'], colors) class textcolor(Command): args =", "= latex2htmlcolor('1,0,1') colors['yellow'] = latex2htmlcolor('1,1,0') colors['white'] = latex2htmlcolor('1') colors['black'] =", "self.ownerDocument.userdata.getPath('packages/color/colors')) class fcolorbox(Command): args = '[ model:str ] bordercolor:str color:str", "a = self.parse(tex) self.style['color'] = latex2htmlcolor(a['color'], a['model'], self.ownerDocument.userdata.getPath('packages/color/colors')) class color(Environment):", "latex2htmlcolor('1,0,0') colors['darkgreen'] = latex2htmlcolor('0,0.6,0') colors['middlegreen'] = latex2htmlcolor('0,0.8,0') colors['lightgreen'] = latex2htmlcolor('0,1,0')", "- k) - k]] else: return arg.strip() else: try: red", "else: try: red = green = blue = float(arg) except", "a['model'], self.ownerDocument.userdata.getPath('packages/color/colors')) class fcolorbox(Command): args = '[ model:str ] bordercolor:str", "colors['middlegray'] = latex2htmlcolor('0.7') colors['lightgray'] = latex2htmlcolor('0.9') def latex2htmlcolor(arg, model='rgb', named=None):", "colors['cyan'] = latex2htmlcolor('0,1,1') colors['magenta'] = latex2htmlcolor('1,0,1') colors['yellow'] = latex2htmlcolor('1,1,0') colors['white']", "colors['yellow'] = latex2htmlcolor('1,1,0') colors['white'] = latex2htmlcolor('1') colors['black'] = latex2htmlcolor('0') colors['gray']", "min(int(red * 255), 255) green = min(int(green * 255), 255)", "colors['darkyellow'] = latex2htmlcolor('0.8,0.8,0.6') colors['middleyellow'] = latex2htmlcolor('1,1,0.2') colors['darkgray'] = latex2htmlcolor('0.5') colors['middlegray']", "= latex2htmlcolor('0.9') colors['darkred'] = latex2htmlcolor('0.8,0,0') colors['middlered'] = latex2htmlcolor('0.9,0,0') colors['lightred'] =", "latex2htmlcolor('0.8,0.6,0.8') colors['middlemagenta'] = latex2htmlcolor('1,0,0.6') colors['darkyellow'] = latex2htmlcolor('0.8,0.8,0.6') colors['middleyellow'] = latex2htmlcolor('1,1,0.2')", "- k, 1 - y * (1 - k) -", "latex2htmlcolor('0.6,0.8,0.8') colors['middlecyan'] = latex2htmlcolor('0,0.8,0.8') colors['darkmagenta'] = latex2htmlcolor('0.8,0.6,0.8') colors['middlemagenta'] = latex2htmlcolor('1,0,0.6')", "colors['black'] = latex2htmlcolor('0') colors['gray'] = latex2htmlcolor('0.9') colors['darkred'] = latex2htmlcolor('0.8,0,0') colors['middlered']", "colors['lightgray'] = latex2htmlcolor('0.9') def latex2htmlcolor(arg, model='rgb', named=None): named = named", "in arg.split(',')] # rgb if len(parts) == 3: red, green,", "= min(int(red * 255), 255) green = min(int(green * 255),", "colors['magenta'] = latex2htmlcolor('1,0,1') colors['yellow'] = latex2htmlcolor('1,1,0') colors['white'] = latex2htmlcolor('1') colors['black']", "class textcolor(Command): args = '[ model:str ] color:str self' def", "elif len(parts) == 4: c, m, y, k = parts", "bordercolor:str color:str self' def invoke(self, tex): a = self.parse(tex) self.style['background-color']", "= latex2htmlcolor('0.7') colors['lightgray'] = latex2htmlcolor('0.9') def latex2htmlcolor(arg, model='rgb', named=None): named", "- k]] else: return arg.strip() else: try: red = green", "k) - k]] else: return arg.strip() else: try: red =", "] bordercolor:str color:str self' def invoke(self, tex): a = self.parse(tex)", "document.userdata.setPath('packages/color/colors', colors) colors['red'] = latex2htmlcolor('1,0,0') colors['green'] = latex2htmlcolor('0,1,0') colors['blue'] =", "parts red = min(int(red * 255), 255) green = min(int(green", "colors['darkmagenta'] = latex2htmlcolor('0.8,0.6,0.8') colors['middlemagenta'] = latex2htmlcolor('1,0,0.6') colors['darkyellow'] = latex2htmlcolor('0.8,0.8,0.6') colors['middleyellow']", "= [int(255 * x) for x in [1 - c", "latex2htmlcolor('1,0,1') colors['yellow'] = latex2htmlcolor('1,1,0') colors['white'] = latex2htmlcolor('1') colors['black'] = latex2htmlcolor('0')", "KeyError: return arg.strip() return '#%.2X%.2X%.2X' % (int(red), int(green), int(blue)) class", "invoke(self, tex): a = self.parse(tex) self.style['background-color'] = latex2htmlcolor(a['color'], a['model'], self.ownerDocument.userdata.getPath('packages/color/colors'))", "class colorbox(Command): args = '[ model:str ] color:str self' def", "colors['darkred'] = latex2htmlcolor('0.8,0,0') colors['middlered'] = latex2htmlcolor('0.9,0,0') colors['lightred'] = latex2htmlcolor('1,0,0') colors['darkgreen']", "self' def invoke(self, tex): a = self.parse(tex) self.style['color'] = latex2htmlcolor(a['color'],", "colors) colors['red'] = latex2htmlcolor('1,0,0') colors['green'] = latex2htmlcolor('0,1,0') colors['blue'] = latex2htmlcolor('0,0,1')", "in [1 - c * (1 - k) - k,", "args = '[ model:str ] color:str' class colorbox(Command): args =", "Environment def ProcessOptions(options, document): colors = {} document.userdata.setPath('packages/color/colors', colors) colors['red']", "latex2htmlcolor('1,0,0') colors['green'] = latex2htmlcolor('0,1,0') colors['blue'] = latex2htmlcolor('0,0,1') colors['cyan'] = latex2htmlcolor('0,1,1')", "latex2htmlcolor('1,0,0.6') colors['darkyellow'] = latex2htmlcolor('0.8,0.8,0.6') colors['middleyellow'] = latex2htmlcolor('1,1,0.2') colors['darkgray'] = latex2htmlcolor('0.5')", "= latex2htmlcolor('0,0.8,0') colors['lightgreen'] = latex2htmlcolor('0,1,0') colors['darkblue'] = latex2htmlcolor('0,0,0.8') colors['middleblue'] =", "arg: parts = [float(x) for x in arg.split(',')] # rgb", "'#%.2X%.2X%.2X' % (int(red), int(green), int(blue)) class definecolor(Command): args = 'name:str", "return arg.strip() return '#%.2X%.2X%.2X' % (int(red), int(green), int(blue)) class definecolor(Command):", "color(Environment): args = '[ model:str ] color:str' def invoke(self, tex):", "colors = {} document.userdata.setPath('packages/color/colors', colors) colors['red'] = latex2htmlcolor('1,0,0') colors['green'] =", "= '[ model:str ] color:str self' def invoke(self, tex): a", "] color:str self' def invoke(self, tex): a = self.parse(tex) self.style['color']", "def invoke(self, tex): a = self.parse(tex) self.style['color'] = latex2htmlcolor(a['color'], a['model'],", "latex2htmlcolor('0') colors['gray'] = latex2htmlcolor('0.9') colors['darkred'] = latex2htmlcolor('0.8,0,0') colors['middlered'] = latex2htmlcolor('0.9,0,0')", "color:str' class colorbox(Command): args = '[ model:str ] color:str self'", "latex2htmlcolor('0,0,1') colors['darkcyan'] = latex2htmlcolor('0.6,0.8,0.8') colors['middlecyan'] = latex2htmlcolor('0,0.8,0.8') colors['darkmagenta'] = latex2htmlcolor('0.8,0.6,0.8')", "* (1 - k) - k]] else: return arg.strip() else:", "return named.get(arg, '') if ',' in arg: parts = [float(x)", "m * (1 - k) - k, 1 - y", "% (int(red), int(green), int(blue)) class definecolor(Command): args = 'name:str model:str", "] color:str self' def invoke(self, tex): a = self.parse(tex) self.style['background-color']", "ValueError: try: return named[arg] except KeyError: return arg.strip() return '#%.2X%.2X%.2X'", "a = self.parse(tex) self.style['background-color'] = latex2htmlcolor(a['color'], a['model'], self.ownerDocument.userdata.getPath('packages/color/colors')) class fcolorbox(Command):", "colors['gray'] = latex2htmlcolor('0.9') colors['darkred'] = latex2htmlcolor('0.8,0,0') colors['middlered'] = latex2htmlcolor('0.9,0,0') colors['lightred']", "* 255), 255) blue = min(int(blue * 255), 255) #", "colors['darkgreen'] = latex2htmlcolor('0,0.6,0') colors['middlegreen'] = latex2htmlcolor('0,0.8,0') colors['lightgreen'] = latex2htmlcolor('0,1,0') colors['darkblue']", "for x in [1 - c * (1 - k)", "class fcolorbox(Command): args = '[ model:str ] bordercolor:str color:str self'", "* 255), 255) green = min(int(green * 255), 255) blue", "parts = [float(x) for x in arg.split(',')] # rgb if", "(1 - k) - k, 1 - y * (1", "= latex2htmlcolor(a['color'], a['model'], self.ownerDocument.userdata.getPath('packages/color/colors')) self.style['border'] = ('1px solid %s' %", "args = '[ model:str ] color:str' def invoke(self, tex): a", "= 'name:str model:str color:str' def invoke(self, tex): a = self.parse(tex)", "latex2htmlcolor(a['color'], a['model'], self.ownerDocument.userdata.getPath('packages/color/colors')) class pagecolor(Command): args = '[ model:str ]", "colors['red'] = latex2htmlcolor('1,0,0') colors['green'] = latex2htmlcolor('0,1,0') colors['blue'] = latex2htmlcolor('0,0,1') colors['cyan']", "= latex2htmlcolor('0.9,0,0') colors['lightred'] = latex2htmlcolor('1,0,0') colors['darkgreen'] = latex2htmlcolor('0,0.6,0') colors['middlegreen'] =", "self.parse(tex) u = self.ownerDocument.userdata colors = u.getPath('packages/color/colors') colors[a['name']] = latex2htmlcolor(a['color'],", "invoke(self, tex): a = self.parse(tex) u = self.ownerDocument.userdata colors =", "* x) for x in [1 - c * (1", "model == 'named': return named.get(arg, '') if ',' in arg:", "arg.split(',')] # rgb if len(parts) == 3: red, green, blue", "= min(int(green * 255), 255) blue = min(int(blue * 255),", "blue = float(arg) except ValueError: try: return named[arg] except KeyError:", "= latex2htmlcolor('0,0,1') colors['darkcyan'] = latex2htmlcolor('0.6,0.8,0.8') colors['middlecyan'] = latex2htmlcolor('0,0.8,0.8') colors['darkmagenta'] =", "self' def invoke(self, tex): a = self.parse(tex) self.style['background-color'] = latex2htmlcolor(a['color'],", "== 4: c, m, y, k = parts red, green,", "= latex2htmlcolor(a['color'], a['model'], self.ownerDocument.userdata.getPath('packages/color/colors')) class color(Environment): args = '[ model:str" ]
[ "print(\"\\033[0;35;40mTeste\\033[m\") print(\"\\033[0;36;40mTeste\\033[m\") print(\"\\033[0;37;40mTeste\\033[m\") print(\"\\033[0;38;40mTeste\\033[m\") print(\" - - - Testando os", "- Testando os 30 - - -\") print(\"\\033[0;37;40mTeste\\033[m\") print(\"\\033[0;31;40mTeste\\033[m\") print(\"\\033[0;32;40mTeste\\033[m\")", "print(\"\\033[0;38;40mTeste\\033[m\") print(\" - - - Testando os 1ª - -", "- - Testando os 1ª - - -\") print(\"\\033[0;30;47mTeste\\033[m\") print(\"\\033[1;30;47mTexto", "print(\"\\033[0;30;46mTurquesa\\033[m\") print(\"\\033[0;30;47mBranco\\033[m\") print(\"\\033[0;36;48mFundo Transparente\\033[m\") print(\" - - - Testando os", "print(\"\\033[0;30;41mVermelho\\033[m\") print(\"\\033[0;30;42mVerde\\033[m\") print(\"\\033[0;30;43mAmarelo\\033[m\") print(\"\\033[0;30;44mRoxo\\033[m\") print(\"\\033[0;30;45mLilás\\033[m\") print(\"\\033[0;30;46mTurquesa\\033[m\") print(\"\\033[0;30;47mBranco\\033[m\") print(\"\\033[0;36;48mFundo Transparente\\033[m\") print(\"", "print(\"\\033[0;30;43mAmarelo\\033[m\") print(\"\\033[0;30;44mRoxo\\033[m\") print(\"\\033[0;30;45mLilás\\033[m\") print(\"\\033[0;30;46mTurquesa\\033[m\") print(\"\\033[0;30;47mBranco\\033[m\") print(\"\\033[0;36;48mFundo Transparente\\033[m\") print(\" - -", "40 - - -\") print(\"\\033[0;37;40mPreto\\033[m\") print(\"\\033[0;30;41mVermelho\\033[m\") print(\"\\033[0;30;42mVerde\\033[m\") print(\"\\033[0;30;43mAmarelo\\033[m\") print(\"\\033[0;30;44mRoxo\\033[m\") print(\"\\033[0;30;45mLilás\\033[m\")", "print(\"\\033[0;33;40mTeste\\033[m\") print(\"\\033[0;34;40mTeste\\033[m\") print(\"\\033[0;35;40mTeste\\033[m\") print(\"\\033[0;36;40mTeste\\033[m\") print(\"\\033[0;37;40mTeste\\033[m\") print(\"\\033[0;38;40mTeste\\033[m\") print(\" - - -", "Transparente\\033[m\") print(\" - - - Testando os 30 - -", "print('\\033[7;32;40mTeste\\033[m') print('\\033[7;30mTeste\\033[m') print(\" - - - Testando os 40 -", "print(\"\\033[0;36;48mFundo Transparente\\033[m\") print(\" - - - Testando os 30 -", "print(\"\\033[0;30;47mTeste\\033[m\") print(\"\\033[1;30;47mTexto em Negrito\\033[m\") print(\"\\033[2;30;47mTeste\\033[m\") print(\"\\033[3;30;47mFonta Itálica\\033[m\") print(\"\\033[4;30;47mSublinhado\\033[m\") print(\"\\033[5;30;47mTeste\\033[m\") print(\"\\033[6;30;47mTeste\\033[m\")", "- -\") print(\"\\033[0;37;40mPreto\\033[m\") print(\"\\033[0;30;41mVermelho\\033[m\") print(\"\\033[0;30;42mVerde\\033[m\") print(\"\\033[0;30;43mAmarelo\\033[m\") print(\"\\033[0;30;44mRoxo\\033[m\") print(\"\\033[0;30;45mLilás\\033[m\") print(\"\\033[0;30;46mTurquesa\\033[m\") print(\"\\033[0;30;47mBranco\\033[m\")", "print(\" - - - Testando os 40 - - -\")", "-\") print(\"\\033[0;30;47mTeste\\033[m\") print(\"\\033[1;30;47mTexto em Negrito\\033[m\") print(\"\\033[2;30;47mTeste\\033[m\") print(\"\\033[3;30;47mFonta Itálica\\033[m\") print(\"\\033[4;30;47mSublinhado\\033[m\") print(\"\\033[5;30;47mTeste\\033[m\")", "- - - Testando os 40 - - -\") print(\"\\033[0;37;40mPreto\\033[m\")", "-\") print(\"\\033[0;37;40mPreto\\033[m\") print(\"\\033[0;30;41mVermelho\\033[m\") print(\"\\033[0;30;42mVerde\\033[m\") print(\"\\033[0;30;43mAmarelo\\033[m\") print(\"\\033[0;30;44mRoxo\\033[m\") print(\"\\033[0;30;45mLilás\\033[m\") print(\"\\033[0;30;46mTurquesa\\033[m\") print(\"\\033[0;30;47mBranco\\033[m\") print(\"\\033[0;36;48mFundo", "- - Testando os 30 - - -\") print(\"\\033[0;37;40mTeste\\033[m\") print(\"\\033[0;31;40mTeste\\033[m\")", "print(\"\\033[0;30;47mBranco\\033[m\") print(\"\\033[0;36;48mFundo Transparente\\033[m\") print(\" - - - Testando os 30", "print('\\033[4;33;44mTeste\\033[m') print('\\033[1;35;43mTeste\\033[m') print('\\033[7;32;40mTeste\\033[m') print('\\033[7;30mTeste\\033[m') print(\" - - - Testando os", "print(\" - - - Testando os 1ª - - -\")", "print(\"\\033[1;30;47mTexto em Negrito\\033[m\") print(\"\\033[2;30;47mTeste\\033[m\") print(\"\\033[3;30;47mFonta Itálica\\033[m\") print(\"\\033[4;30;47mSublinhado\\033[m\") print(\"\\033[5;30;47mTeste\\033[m\") print(\"\\033[6;30;47mTeste\\033[m\") print(\"\\033[7;30;47mTeste\\033[m\")", "- - -\") print(\"\\033[0;37;40mPreto\\033[m\") print(\"\\033[0;30;41mVermelho\\033[m\") print(\"\\033[0;30;42mVerde\\033[m\") print(\"\\033[0;30;43mAmarelo\\033[m\") print(\"\\033[0;30;44mRoxo\\033[m\") print(\"\\033[0;30;45mLilás\\033[m\") print(\"\\033[0;30;46mTurquesa\\033[m\")", "- -\") print(\"\\033[0;30;47mTeste\\033[m\") print(\"\\033[1;30;47mTexto em Negrito\\033[m\") print(\"\\033[2;30;47mTeste\\033[m\") print(\"\\033[3;30;47mFonta Itálica\\033[m\") print(\"\\033[4;30;47mSublinhado\\033[m\")", "print(\"\\033[0;37;40mTeste\\033[m\") print(\"\\033[0;31;40mTeste\\033[m\") print(\"\\033[0;32;40mTeste\\033[m\") print(\"\\033[0;33;40mTeste\\033[m\") print(\"\\033[0;34;40mTeste\\033[m\") print(\"\\033[0;35;40mTeste\\033[m\") print(\"\\033[0;36;40mTeste\\033[m\") print(\"\\033[0;37;40mTeste\\033[m\") print(\"\\033[0;38;40mTeste\\033[m\") print(\"", "print(\"\\033[0;30;45mLilás\\033[m\") print(\"\\033[0;30;46mTurquesa\\033[m\") print(\"\\033[0;30;47mBranco\\033[m\") print(\"\\033[0;36;48mFundo Transparente\\033[m\") print(\" - - - Testando", "print(\"\\033[0;32;40mTeste\\033[m\") print(\"\\033[0;33;40mTeste\\033[m\") print(\"\\033[0;34;40mTeste\\033[m\") print(\"\\033[0;35;40mTeste\\033[m\") print(\"\\033[0;36;40mTeste\\033[m\") print(\"\\033[0;37;40mTeste\\033[m\") print(\"\\033[0;38;40mTeste\\033[m\") print(\" - -", "-\") print(\"\\033[0;37;40mTeste\\033[m\") print(\"\\033[0;31;40mTeste\\033[m\") print(\"\\033[0;32;40mTeste\\033[m\") print(\"\\033[0;33;40mTeste\\033[m\") print(\"\\033[0;34;40mTeste\\033[m\") print(\"\\033[0;35;40mTeste\\033[m\") print(\"\\033[0;36;40mTeste\\033[m\") print(\"\\033[0;37;40mTeste\\033[m\") print(\"\\033[0;38;40mTeste\\033[m\")", "Testando os 1ª - - -\") print(\"\\033[0;30;47mTeste\\033[m\") print(\"\\033[1;30;47mTexto em Negrito\\033[m\")", "- - -\") print(\"\\033[0;30;47mTeste\\033[m\") print(\"\\033[1;30;47mTexto em Negrito\\033[m\") print(\"\\033[2;30;47mTeste\\033[m\") print(\"\\033[3;30;47mFonta Itálica\\033[m\")", "30 - - -\") print(\"\\033[0;37;40mTeste\\033[m\") print(\"\\033[0;31;40mTeste\\033[m\") print(\"\\033[0;32;40mTeste\\033[m\") print(\"\\033[0;33;40mTeste\\033[m\") print(\"\\033[0;34;40mTeste\\033[m\") print(\"\\033[0;35;40mTeste\\033[m\")", "print(\"\\033[0;37;40mPreto\\033[m\") print(\"\\033[0;30;41mVermelho\\033[m\") print(\"\\033[0;30;42mVerde\\033[m\") print(\"\\033[0;30;43mAmarelo\\033[m\") print(\"\\033[0;30;44mRoxo\\033[m\") print(\"\\033[0;30;45mLilás\\033[m\") print(\"\\033[0;30;46mTurquesa\\033[m\") print(\"\\033[0;30;47mBranco\\033[m\") print(\"\\033[0;36;48mFundo Transparente\\033[m\")", "Testando os 40 - - -\") print(\"\\033[0;37;40mPreto\\033[m\") print(\"\\033[0;30;41mVermelho\\033[m\") print(\"\\033[0;30;42mVerde\\033[m\") print(\"\\033[0;30;43mAmarelo\\033[m\")", "print(\"\\033[0;34;40mTeste\\033[m\") print(\"\\033[0;35;40mTeste\\033[m\") print(\"\\033[0;36;40mTeste\\033[m\") print(\"\\033[0;37;40mTeste\\033[m\") print(\"\\033[0;38;40mTeste\\033[m\") print(\" - - - Testando", "Testando os 30 - - -\") print(\"\\033[0;37;40mTeste\\033[m\") print(\"\\033[0;31;40mTeste\\033[m\") print(\"\\033[0;32;40mTeste\\033[m\") print(\"\\033[0;33;40mTeste\\033[m\")", "print(\"\\033[0;31;40mTeste\\033[m\") print(\"\\033[0;32;40mTeste\\033[m\") print(\"\\033[0;33;40mTeste\\033[m\") print(\"\\033[0;34;40mTeste\\033[m\") print(\"\\033[0;35;40mTeste\\033[m\") print(\"\\033[0;36;40mTeste\\033[m\") print(\"\\033[0;37;40mTeste\\033[m\") print(\"\\033[0;38;40mTeste\\033[m\") print(\" -", "<reponame>gabrieleliasdev/python-cev print('\\033[0;33;44mTeste\\033[m') print('\\033[4;33;44mTeste\\033[m') print('\\033[1;35;43mTeste\\033[m') print('\\033[7;32;40mTeste\\033[m') print('\\033[7;30mTeste\\033[m') print(\" - - -", "print(\"\\033[0;30;42mVerde\\033[m\") print(\"\\033[0;30;43mAmarelo\\033[m\") print(\"\\033[0;30;44mRoxo\\033[m\") print(\"\\033[0;30;45mLilás\\033[m\") print(\"\\033[0;30;46mTurquesa\\033[m\") print(\"\\033[0;30;47mBranco\\033[m\") print(\"\\033[0;36;48mFundo Transparente\\033[m\") print(\" -", "print('\\033[0;33;44mTeste\\033[m') print('\\033[4;33;44mTeste\\033[m') print('\\033[1;35;43mTeste\\033[m') print('\\033[7;32;40mTeste\\033[m') print('\\033[7;30mTeste\\033[m') print(\" - - - Testando", "print(\"\\033[0;30;44mRoxo\\033[m\") print(\"\\033[0;30;45mLilás\\033[m\") print(\"\\033[0;30;46mTurquesa\\033[m\") print(\"\\033[0;30;47mBranco\\033[m\") print(\"\\033[0;36;48mFundo Transparente\\033[m\") print(\" - - -", "- - - Testando os 30 - - -\") print(\"\\033[0;37;40mTeste\\033[m\")", "- -\") print(\"\\033[0;37;40mTeste\\033[m\") print(\"\\033[0;31;40mTeste\\033[m\") print(\"\\033[0;32;40mTeste\\033[m\") print(\"\\033[0;33;40mTeste\\033[m\") print(\"\\033[0;34;40mTeste\\033[m\") print(\"\\033[0;35;40mTeste\\033[m\") print(\"\\033[0;36;40mTeste\\033[m\") print(\"\\033[0;37;40mTeste\\033[m\")", "- Testando os 40 - - -\") print(\"\\033[0;37;40mPreto\\033[m\") print(\"\\033[0;30;41mVermelho\\033[m\") print(\"\\033[0;30;42mVerde\\033[m\")", "os 40 - - -\") print(\"\\033[0;37;40mPreto\\033[m\") print(\"\\033[0;30;41mVermelho\\033[m\") print(\"\\033[0;30;42mVerde\\033[m\") print(\"\\033[0;30;43mAmarelo\\033[m\") print(\"\\033[0;30;44mRoxo\\033[m\")", "print(\" - - - Testando os 30 - - -\")", "os 1ª - - -\") print(\"\\033[0;30;47mTeste\\033[m\") print(\"\\033[1;30;47mTexto em Negrito\\033[m\") print(\"\\033[2;30;47mTeste\\033[m\")", "os 30 - - -\") print(\"\\033[0;37;40mTeste\\033[m\") print(\"\\033[0;31;40mTeste\\033[m\") print(\"\\033[0;32;40mTeste\\033[m\") print(\"\\033[0;33;40mTeste\\033[m\") print(\"\\033[0;34;40mTeste\\033[m\")", "print(\"\\033[0;37;40mTeste\\033[m\") print(\"\\033[0;38;40mTeste\\033[m\") print(\" - - - Testando os 1ª -", "- - - Testando os 1ª - - -\") print(\"\\033[0;30;47mTeste\\033[m\")", "- Testando os 1ª - - -\") print(\"\\033[0;30;47mTeste\\033[m\") print(\"\\033[1;30;47mTexto em", "print('\\033[7;30mTeste\\033[m') print(\" - - - Testando os 40 - -", "1ª - - -\") print(\"\\033[0;30;47mTeste\\033[m\") print(\"\\033[1;30;47mTexto em Negrito\\033[m\") print(\"\\033[2;30;47mTeste\\033[m\") print(\"\\033[3;30;47mFonta", "- - Testando os 40 - - -\") print(\"\\033[0;37;40mPreto\\033[m\") print(\"\\033[0;30;41mVermelho\\033[m\")", "print(\"\\033[0;36;40mTeste\\033[m\") print(\"\\033[0;37;40mTeste\\033[m\") print(\"\\033[0;38;40mTeste\\033[m\") print(\" - - - Testando os 1ª", "em Negrito\\033[m\") print(\"\\033[2;30;47mTeste\\033[m\") print(\"\\033[3;30;47mFonta Itálica\\033[m\") print(\"\\033[4;30;47mSublinhado\\033[m\") print(\"\\033[5;30;47mTeste\\033[m\") print(\"\\033[6;30;47mTeste\\033[m\") print(\"\\033[7;30;47mTeste\\033[m\") print(\"\\033[7;38;47mTeste\\033[m\")", "print('\\033[1;35;43mTeste\\033[m') print('\\033[7;32;40mTeste\\033[m') print('\\033[7;30mTeste\\033[m') print(\" - - - Testando os 40", "- - -\") print(\"\\033[0;37;40mTeste\\033[m\") print(\"\\033[0;31;40mTeste\\033[m\") print(\"\\033[0;32;40mTeste\\033[m\") print(\"\\033[0;33;40mTeste\\033[m\") print(\"\\033[0;34;40mTeste\\033[m\") print(\"\\033[0;35;40mTeste\\033[m\") print(\"\\033[0;36;40mTeste\\033[m\")" ]
[ "<NAME>, <NAME>, <NAME> and the TurboGears community\" email = \"<EMAIL>\"", "url=\"http://www.turbogears.org/\" author= \"<NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME> and", "widgets (ToscaWidgets2) * automatic JSON generation from your controllers *", "development and everything in between: * dynamic javascript powered widgets", "database driven applications. It provides a full range of tools", "(SQLAlchemy) The latest development version is available in the `TurboGears", "= \"<EMAIL>\" copyright = \"\"\"Copyright 2005-2020 <NAME>, <NAME>, <NAME>, <NAME>,", "back database development and everything in between: * dynamic javascript", "the `TurboGears Git repositories`_. .. _TurboGears Git repositories: https://github.com/TurboGears \"\"\"", "provides a full range of tools for front end javascript", "set of tools for everything you need to build dynamic,", "based templating * object or route based URL dispatching *", "tools for front end javascript develeopment, back database development and", "database development and everything in between: * dynamic javascript powered", "* automatic JSON generation from your controllers * powerful, designer", "tools for everything you need to build dynamic, database driven", "based URL dispatching * powerful Object Relational Mappers (SQLAlchemy) The", "* powerful, designer friendly XHTML based templating * object or", "automatic JSON generation from your controllers * powerful, designer friendly", "designer friendly XHTML based templating * object or route based", "<NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME> and the TurboGears community\"", "(ToscaWidgets2) * automatic JSON generation from your controllers * powerful,", "powered widgets (ToscaWidgets2) * automatic JSON generation from your controllers", "information\"\"\" version = \"2.4.3\" description = \"Next generation TurboGears\" long_description=\"\"\"", "and everything in between: * dynamic javascript powered widgets (ToscaWidgets2)", "from your controllers * powerful, designer friendly XHTML based templating", "\"\"\"TurboGears project related information\"\"\" version = \"2.4.3\" description = \"Next", "flexible, full featured, and easy to use web framework. TurboGears", "<NAME>, <NAME>, <NAME>, <NAME>, <NAME> and the TurboGears community\" email", "2005-2020 <NAME>, <NAME>, <NAME>, <NAME>, <NAME> and contributors\"\"\" license =", "or route based URL dispatching * powerful Object Relational Mappers", "latest development version is available in the `TurboGears Git repositories`_.", "\"Next generation TurboGears\" long_description=\"\"\" TurboGears brings together a best of", "<NAME>, <NAME> and the TurboGears community\" email = \"<EMAIL>\" copyright", "* powerful Object Relational Mappers (SQLAlchemy) The latest development version", "in between: * dynamic javascript powered widgets (ToscaWidgets2) * automatic", "dynamic javascript powered widgets (ToscaWidgets2) * automatic JSON generation from", "and well tested set of tools for everything you need", "project related information\"\"\" version = \"2.4.3\" description = \"Next generation", "development version is available in the `TurboGears Git repositories`_. ..", "best of breed python tools to create a flexible, full", ".. _TurboGears Git repositories: https://github.com/TurboGears \"\"\" url=\"http://www.turbogears.org/\" author= \"<NAME>, <NAME>,", "to build dynamic, database driven applications. It provides a full", "\"<NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME> and the TurboGears", "and the TurboGears community\" email = \"<EMAIL>\" copyright = \"\"\"Copyright", "powerful Object Relational Mappers (SQLAlchemy) The latest development version is", "integrated and well tested set of tools for everything you", "everything you need to build dynamic, database driven applications. It", "object or route based URL dispatching * powerful Object Relational", "Git repositories: https://github.com/TurboGears \"\"\" url=\"http://www.turbogears.org/\" author= \"<NAME>, <NAME>, <NAME>, <NAME>,", "your controllers * powerful, designer friendly XHTML based templating *", "JSON generation from your controllers * powerful, designer friendly XHTML", "controllers * powerful, designer friendly XHTML based templating * object", "\"\"\"Copyright 2005-2020 <NAME>, <NAME>, <NAME>, <NAME>, <NAME> and contributors\"\"\" license", "The latest development version is available in the `TurboGears Git", "tools to create a flexible, full featured, and easy to", "easy to use web framework. TurboGears 2 provides an integrated", "in the `TurboGears Git repositories`_. .. _TurboGears Git repositories: https://github.com/TurboGears", "copyright = \"\"\"Copyright 2005-2020 <NAME>, <NAME>, <NAME>, <NAME>, <NAME> and", "dispatching * powerful Object Relational Mappers (SQLAlchemy) The latest development", "version = \"2.4.3\" description = \"Next generation TurboGears\" long_description=\"\"\" TurboGears", "brings together a best of breed python tools to create", "TurboGears community\" email = \"<EMAIL>\" copyright = \"\"\"Copyright 2005-2020 <NAME>,", "description = \"Next generation TurboGears\" long_description=\"\"\" TurboGears brings together a", "TurboGears brings together a best of breed python tools to", "end javascript develeopment, back database development and everything in between:", "_TurboGears Git repositories: https://github.com/TurboGears \"\"\" url=\"http://www.turbogears.org/\" author= \"<NAME>, <NAME>, <NAME>,", "<NAME>, <NAME>, <NAME>, <NAME>, <NAME> and contributors\"\"\" license = \"MIT\"", "applications. It provides a full range of tools for front", "javascript powered widgets (ToscaWidgets2) * automatic JSON generation from your", "dynamic, database driven applications. It provides a full range of", "full featured, and easy to use web framework. TurboGears 2", "<NAME> and the TurboGears community\" email = \"<EMAIL>\" copyright =", "Mappers (SQLAlchemy) The latest development version is available in the", "<NAME>, <NAME>, <NAME>, <NAME> and the TurboGears community\" email =", "email = \"<EMAIL>\" copyright = \"\"\"Copyright 2005-2020 <NAME>, <NAME>, <NAME>,", "framework. TurboGears 2 provides an integrated and well tested set", "available in the `TurboGears Git repositories`_. .. _TurboGears Git repositories:", "front end javascript develeopment, back database development and everything in", "2 provides an integrated and well tested set of tools", "develeopment, back database development and everything in between: * dynamic", "\"2.4.3\" description = \"Next generation TurboGears\" long_description=\"\"\" TurboGears brings together", "long_description=\"\"\" TurboGears brings together a best of breed python tools", "related information\"\"\" version = \"2.4.3\" description = \"Next generation TurboGears\"", "to use web framework. TurboGears 2 provides an integrated and", "for front end javascript develeopment, back database development and everything", "= \"\"\"Copyright 2005-2020 <NAME>, <NAME>, <NAME>, <NAME>, <NAME> and contributors\"\"\"", "an integrated and well tested set of tools for everything", "* dynamic javascript powered widgets (ToscaWidgets2) * automatic JSON generation", "driven applications. It provides a full range of tools for", "repositories: https://github.com/TurboGears \"\"\" url=\"http://www.turbogears.org/\" author= \"<NAME>, <NAME>, <NAME>, <NAME>, <NAME>,", "well tested set of tools for everything you need to", "of tools for everything you need to build dynamic, database", "python tools to create a flexible, full featured, and easy", "to create a flexible, full featured, and easy to use", "a best of breed python tools to create a flexible,", "TurboGears 2 provides an integrated and well tested set of", "= \"Next generation TurboGears\" long_description=\"\"\" TurboGears brings together a best", "featured, and easy to use web framework. TurboGears 2 provides", "provides an integrated and well tested set of tools for", "Object Relational Mappers (SQLAlchemy) The latest development version is available", "Git repositories`_. .. _TurboGears Git repositories: https://github.com/TurboGears \"\"\" url=\"http://www.turbogears.org/\" author=", "community\" email = \"<EMAIL>\" copyright = \"\"\"Copyright 2005-2020 <NAME>, <NAME>,", "generation TurboGears\" long_description=\"\"\" TurboGears brings together a best of breed", "together a best of breed python tools to create a", "use web framework. TurboGears 2 provides an integrated and well", "templating * object or route based URL dispatching * powerful", "create a flexible, full featured, and easy to use web", "route based URL dispatching * powerful Object Relational Mappers (SQLAlchemy)", "tested set of tools for everything you need to build", "`TurboGears Git repositories`_. .. _TurboGears Git repositories: https://github.com/TurboGears \"\"\" url=\"http://www.turbogears.org/\"", "version is available in the `TurboGears Git repositories`_. .. _TurboGears", "a full range of tools for front end javascript develeopment,", "and easy to use web framework. TurboGears 2 provides an", "you need to build dynamic, database driven applications. It provides", "URL dispatching * powerful Object Relational Mappers (SQLAlchemy) The latest", "is available in the `TurboGears Git repositories`_. .. _TurboGears Git", "repositories`_. .. _TurboGears Git repositories: https://github.com/TurboGears \"\"\" url=\"http://www.turbogears.org/\" author= \"<NAME>,", "a flexible, full featured, and easy to use web framework.", "\"\"\" url=\"http://www.turbogears.org/\" author= \"<NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>", "range of tools for front end javascript develeopment, back database", "javascript develeopment, back database development and everything in between: *", "Relational Mappers (SQLAlchemy) The latest development version is available in", "everything in between: * dynamic javascript powered widgets (ToscaWidgets2) *", "https://github.com/TurboGears \"\"\" url=\"http://www.turbogears.org/\" author= \"<NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>,", "friendly XHTML based templating * object or route based URL", "XHTML based templating * object or route based URL dispatching", "between: * dynamic javascript powered widgets (ToscaWidgets2) * automatic JSON", "powerful, designer friendly XHTML based templating * object or route", "web framework. TurboGears 2 provides an integrated and well tested", "the TurboGears community\" email = \"<EMAIL>\" copyright = \"\"\"Copyright 2005-2020", "= \"2.4.3\" description = \"Next generation TurboGears\" long_description=\"\"\" TurboGears brings", "generation from your controllers * powerful, designer friendly XHTML based", "of breed python tools to create a flexible, full featured,", "author= \"<NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME> and the", "\"<EMAIL>\" copyright = \"\"\"Copyright 2005-2020 <NAME>, <NAME>, <NAME>, <NAME>, <NAME>", "need to build dynamic, database driven applications. It provides a", "It provides a full range of tools for front end", "of tools for front end javascript develeopment, back database development", "* object or route based URL dispatching * powerful Object", "build dynamic, database driven applications. It provides a full range", "for everything you need to build dynamic, database driven applications.", "TurboGears\" long_description=\"\"\" TurboGears brings together a best of breed python", "full range of tools for front end javascript develeopment, back", "breed python tools to create a flexible, full featured, and" ]
[ "except: pass ## U,V,H mean em = e.mean(axis=0) print('e mean", "pass ## U,V,H mean em = e.mean(axis=0) print('e mean done.')", "create ouputfolder try: os.mkdir(runpath+'/analysis') except: pass ## U,V,H mean em", "import sparse import time as tictoc from netCDF4 import Dataset", "= np.load(runpath+'/e_sub.npy')[skip:,:,:] print('run %i read.' % runfolder) ## create ouputfolder", "tictoc from netCDF4 import Dataset # OPTIONS runfolder = 15", "dict() all_var2export = ['em'] for v in all_var2export: exec('dic[v] ='+v)", "import print_function path = '/home/mkloewer/python/swm/' import os; os.chdir(path) # change", "as tictoc from netCDF4 import Dataset # OPTIONS runfolder =", "STORING dic = dict() all_var2export = ['em'] for v in", "__future__ import print_function path = '/home/mkloewer/python/swm/' import os; os.chdir(path) #", "all_var2export = ['em'] for v in all_var2export: exec('dic[v] ='+v) np.save(runpath+'/analysis/mean_e.npy',dic)", "done.') ## STORING dic = dict() all_var2export = ['em'] for", "['em'] for v in all_var2export: exec('dic[v] ='+v) np.save(runpath+'/analysis/mean_e.npy',dic) print('Everything stored.')", "np.load(runpath+'/e_sub.npy')[skip:,:,:] print('run %i read.' % runfolder) ## create ouputfolder try:", "as np from scipy import sparse import time as tictoc", "directory import numpy as np from scipy import sparse import", "print('e mean done.') ## STORING dic = dict() all_var2export =", "ouputfolder try: os.mkdir(runpath+'/analysis') except: pass ## U,V,H mean em =", "= e.mean(axis=0) print('e mean done.') ## STORING dic = dict()", "os; os.chdir(path) # change working directory import numpy as np", "mean em = e.mean(axis=0) print('e mean done.') ## STORING dic", "os.mkdir(runpath+'/analysis') except: pass ## U,V,H mean em = e.mean(axis=0) print('e", "mean done.') ## STORING dic = dict() all_var2export = ['em']", "import numpy as np from scipy import sparse import time", "e.mean(axis=0) print('e mean done.') ## STORING dic = dict() all_var2export", "path = '/home/mkloewer/python/swm/' import os; os.chdir(path) # change working directory", "from netCDF4 import Dataset # OPTIONS runfolder = 15 print('Calculating", "' + str(runfolder)) ## read data runpath = path+'data/run%04i' %", "5*365 e = np.load(runpath+'/e_sub.npy')[skip:,:,:] print('run %i read.' % runfolder) ##", "AS .NPY from __future__ import print_function path = '/home/mkloewer/python/swm/' import", "skip = 5*365 e = np.load(runpath+'/e_sub.npy')[skip:,:,:] print('run %i read.' %", "data runpath = path+'data/run%04i' % runfolder skip = 5*365 e", "path+'data/run%04i' % runfolder skip = 5*365 e = np.load(runpath+'/e_sub.npy')[skip:,:,:] print('run", "% runfolder skip = 5*365 e = np.load(runpath+'/e_sub.npy')[skip:,:,:] print('run %i", "% runfolder) ## create ouputfolder try: os.mkdir(runpath+'/analysis') except: pass ##", "means from run ' + str(runfolder)) ## read data runpath", "Dataset # OPTIONS runfolder = 15 print('Calculating subgrid-EKE means from", "## create ouputfolder try: os.mkdir(runpath+'/analysis') except: pass ## U,V,H mean", "str(runfolder)) ## read data runpath = path+'data/run%04i' % runfolder skip", "em = e.mean(axis=0) print('e mean done.') ## STORING dic =", "runpath = path+'data/run%04i' % runfolder skip = 5*365 e =", ".NPY from __future__ import print_function path = '/home/mkloewer/python/swm/' import os;", "AND EXPORT AS .NPY from __future__ import print_function path =", "CALCULATIONS AND EXPORT AS .NPY from __future__ import print_function path", "from scipy import sparse import time as tictoc from netCDF4", "runfolder) ## create ouputfolder try: os.mkdir(runpath+'/analysis') except: pass ## U,V,H", "dic = dict() all_var2export = ['em'] for v in all_var2export:", "read.' % runfolder) ## create ouputfolder try: os.mkdir(runpath+'/analysis') except: pass", "PRODUCE MEAN CALCULATIONS AND EXPORT AS .NPY from __future__ import", "netCDF4 import Dataset # OPTIONS runfolder = 15 print('Calculating subgrid-EKE", "= 15 print('Calculating subgrid-EKE means from run ' + str(runfolder))", "<filename>swm-master/swm-master/calc/mean_e_calc.py ## PRODUCE MEAN CALCULATIONS AND EXPORT AS .NPY from", "scipy import sparse import time as tictoc from netCDF4 import", "## read data runpath = path+'data/run%04i' % runfolder skip =", "import Dataset # OPTIONS runfolder = 15 print('Calculating subgrid-EKE means", "import os; os.chdir(path) # change working directory import numpy as", "runfolder = 15 print('Calculating subgrid-EKE means from run ' +", "change working directory import numpy as np from scipy import", "# change working directory import numpy as np from scipy", "import time as tictoc from netCDF4 import Dataset # OPTIONS", "os.chdir(path) # change working directory import numpy as np from", "time as tictoc from netCDF4 import Dataset # OPTIONS runfolder", "from run ' + str(runfolder)) ## read data runpath =", "sparse import time as tictoc from netCDF4 import Dataset #", "OPTIONS runfolder = 15 print('Calculating subgrid-EKE means from run '", "U,V,H mean em = e.mean(axis=0) print('e mean done.') ## STORING", "'/home/mkloewer/python/swm/' import os; os.chdir(path) # change working directory import numpy", "= 5*365 e = np.load(runpath+'/e_sub.npy')[skip:,:,:] print('run %i read.' % runfolder)", "EXPORT AS .NPY from __future__ import print_function path = '/home/mkloewer/python/swm/'", "print('Calculating subgrid-EKE means from run ' + str(runfolder)) ## read", "print('run %i read.' % runfolder) ## create ouputfolder try: os.mkdir(runpath+'/analysis')", "from __future__ import print_function path = '/home/mkloewer/python/swm/' import os; os.chdir(path)", "read data runpath = path+'data/run%04i' % runfolder skip = 5*365", "working directory import numpy as np from scipy import sparse", "%i read.' % runfolder) ## create ouputfolder try: os.mkdir(runpath+'/analysis') except:", "numpy as np from scipy import sparse import time as", "print_function path = '/home/mkloewer/python/swm/' import os; os.chdir(path) # change working", "= dict() all_var2export = ['em'] for v in all_var2export: exec('dic[v]", "## STORING dic = dict() all_var2export = ['em'] for v", "= '/home/mkloewer/python/swm/' import os; os.chdir(path) # change working directory import", "= path+'data/run%04i' % runfolder skip = 5*365 e = np.load(runpath+'/e_sub.npy')[skip:,:,:]", "np from scipy import sparse import time as tictoc from", "subgrid-EKE means from run ' + str(runfolder)) ## read data", "## PRODUCE MEAN CALCULATIONS AND EXPORT AS .NPY from __future__", "MEAN CALCULATIONS AND EXPORT AS .NPY from __future__ import print_function", "15 print('Calculating subgrid-EKE means from run ' + str(runfolder)) ##", "try: os.mkdir(runpath+'/analysis') except: pass ## U,V,H mean em = e.mean(axis=0)", "## U,V,H mean em = e.mean(axis=0) print('e mean done.') ##", "+ str(runfolder)) ## read data runpath = path+'data/run%04i' % runfolder", "# OPTIONS runfolder = 15 print('Calculating subgrid-EKE means from run", "run ' + str(runfolder)) ## read data runpath = path+'data/run%04i'", "runfolder skip = 5*365 e = np.load(runpath+'/e_sub.npy')[skip:,:,:] print('run %i read.'", "e = np.load(runpath+'/e_sub.npy')[skip:,:,:] print('run %i read.' % runfolder) ## create", "= ['em'] for v in all_var2export: exec('dic[v] ='+v) np.save(runpath+'/analysis/mean_e.npy',dic) print('Everything" ]
[ "words_file, self.board_size, 3) def solve(self, grid): \"\"\"Generate all solutions for", "by u. if word[1] != 'u': continue # Remove \"u\"", "= set() for start in range(len(string)): cur = self.trie letters", "s, cur_node, seen + [cur_sq])) if cur_node._is_word: if s[0] ==", "seen + [cur_sq])) if cur_node._is_word: if s[0] == 'q': #", "method to print a 4x4 boggle grid. Arguments: grid --", "to bottom right. \"\"\" for y in range(self.ylen): print('+' +", "the specified length limits are filtered out. Arguments: xlen --", "f = open(words_file) try: for word in f: if sys.version", "trie.Trie() word_count = 0 if words_file.endswith('gz'): import gzip f =", "pnode, seen = q.popleft() pnode_get_child = pnode.get_child if adjs: adj", "def _create_adjacency_matrix(xlim, ylim): adj_list = [[]] * (ylim * xlim)", "# Look at row below current cell. if y+1 <", "'q' + word[2:] root.insert(word) word_count += 1 finally: f.close() print('Loaded',", "< ylim: below = sq + xlim # Look to", "Pre-compute adjacency matrix. \"\"\" assert(xlen > 1) assert(ylen > 1)", "!= self.board_size: raise RuntimeError('invalid board') board = list(grid) trie =", "row that current cell is on. # Look to left", "* self.xlen line = ['| '] for x in range(self.xlen):", "\"\"\"Create and initialize BoggleSolver instance. This creates the internal trie", "\"\"\" found = set() for start in range(len(string)): cur =", "Look to right of current cell. if x+1 < xlim:", "1) # Look at row below current cell. if y+1", "valid substrings. Returns: List of substrings that are valid words.", "print('Loaded', word_count, 'words from file.') return root @staticmethod def _create_adjacency_matrix(xlim,", "# Skip words that are too long or too short.", "\"\"\"Generate all solutions for the given boggle grid. Arguments: grid", "acceptable boggle words. When an instance of this class is", "to upper left. if x-1 >= 0: adj.append(above - 1)", "root = trie.Trie() word_count = 0 if words_file.endswith('gz'): import gzip", "ylim, i) adj_list[i] = adj return adj_list @staticmethod def _calc_adjacency(xlim,", "@staticmethod def _calc_adjacency(xlim, ylim, sq): adj = [] y =", "= [None] * self.board_size count = 0 for l in", "Arguments: words_file -- Path of file containing words for reference.", "y * self.xlen line = ['| '] for x in", "class BoggleSolver(object): \"\"\" This class uses an external words file", "grid. Arguments: grid -- A string of X*Y characters representing", "right. if x+1 < xlim: adj.append(above + 1) # Look", "found @staticmethod def _load_dictionary(words_file, max_len, min_len): \"\"\"Private method to create", "found.add(''.join(letters[:count])) if not cur.has_children(): break return found @staticmethod def _load_dictionary(words_file,", "+= 1 cur = cur.get_child(l) if cur is None: break", "ylim, sq): adj = [] y = int(sq / xlim)", "word_len = len(word) if word_len > max_len or word_len <", "filtered out. Arguments: xlen -- X dimension (width) of board.", "to lower rigth. if x+1 < xlim: adj.append(below + 1)", "prefix + c q.append((cur_sq, s, cur_node, seen + [cur_sq])) if", "sys.version < '3': range = xrange class BoggleSolver(object): \"\"\" This", "or word_len < min_len: continue # Skip words that start", "index = y * xlim + x adj = BoggleSolver._calc_adjacency(xlim,", "invalid grid. \"\"\" if self.trie is None: raise RuntimeError('words file", "# Current cell index = y * xlim + x", "trie if sys.version < '3': range = xrange class BoggleSolver(object):", "<reponame>gammazero/pybogglesolver \"\"\" Module to generate solutions for Boggle grids. <NAME>", "upper left. if x-1 >= 0: adj.append(above - 1) #", "ylen if pre_compute_adj: self.adjacency = BoggleSolver._create_adjacency_matrix(xlen, ylen) else: self.adjacency =", "as a dictionary of acceptable boggle words. When an instance", "0: adj.append(above - 1) # Look above. adj.append(above) # Look", "= word.strip().decode(\"utf-8\") # Skip words that are too long or", "Look above. adj.append(above) # Look upper right. if x+1 <", "of file containing words for reference. Return: Count of words", "= pnode.get_child if adjs: adj = adjs[parent_sq] else: adj =", "a word are correctly found. Arguments: string -- The string", "+ [cur_sq])) if cur_node._is_word: if s[0] == 'q': # Rehydrate", "utility for testing that all substrings of a word are", "top left to bottom right. \"\"\" for y in range(self.ylen):", "== 'q': # Skip words starting with q not followed", "substrings that are valid words. \"\"\" found = set() for", "the boggle grid. None if given invalid grid. \"\"\" if", "'3': word = word.strip() else: word = word.strip().decode(\"utf-8\") # Skip", "board[cur_sq] cur_node = pnode_get_child(c) if cur_node is None: continue s", "reference. Return: Count of words inserted into trie. \"\"\" if", "Rehydrate q-words with 'u'. words.add('qu' + s[1:]) else: words.add(s) return", "= ylen self.board_size = xlen * ylen if pre_compute_adj: self.adjacency", "for valid substrings. Returns: List of substrings that are valid", "self.ylen = ylen self.board_size = xlen * ylen if pre_compute_adj:", "containing words for reference. Return: Count of words inserted into", "finding words. Arguments: words_file -- Path of file containing words", "word[0] == 'q': # Skip words starting with q not", "= 0 for l in string[start:]: letters[count] = l count", "(width) of board. ylen -- Y dimension (height) of board.", "continue s = prefix + c q.append((cur_sq, s, cur_node, seen", "4x4 boggle grid. Arguments: grid -- A string of X*Y", "= 'q' + word[2:] root.insert(word) word_count += 1 finally: f.close()", "dictionary...') root = trie.Trie() word_count = 0 if words_file.endswith('gz'): import", "xlim) # Look at row above current cell. if y-1", "creates the internal trie for fast word lookup letter-by-letter. Words", "found: ' + words_file) print('creating dictionary...') root = trie.Trie() word_count", "\"\"\" assert(xlen > 1) assert(ylen > 1) self.xlen = xlen", "if y+1 < ylim: below = sq + xlim #", "Y dimension (height) of board. pre_compute_adj -- Pre-compute adjacency matrix.", "\"\"\" for y in range(self.ylen): print('+' + '---+' * self.xlen)", "find_substrings(self, string): \"\"\"Find all valid substrings in the given string.", "an instance of this class is created, it sets up", "board = list(grid) trie = self.trie words = set() q", "RuntimeError('words file not found: ' + words_file) print('creating dictionary...') root", "int(sq / xlim) x = sq - (y * xlim)", "letters and words that are not within the specified length", "same row that current cell is on. # Look to", "grid -- A string of X*Y characters representing the letters", "if words_file.endswith('gz'): import gzip f = gzip.open(words_file) elif words_file.endswith('bz2'): import", "= sq - (y * xlim) # Look at row", "_calc_adjacency(xlim, ylim, sq): adj = [] y = int(sq /", "0 for l in string[start:]: letters[count] = l count +=", "+ 1) # Look at row below current cell. if", "dimension (width) of board. ylen -- Y dimension (height) of", "= xlen self.ylen = ylen self.board_size = xlen * ylen", "* self.xlen) def find_substrings(self, string): \"\"\"Find all valid substrings in", "in the given string. This method is not necessary for", "left to bottom right. \"\"\" for y in range(self.ylen): print('+'", "for testing that all substrings of a word are correctly", "print('+' + '---+' * self.xlen) def find_substrings(self, string): \"\"\"Find all", "adj_list @staticmethod def _calc_adjacency(xlim, ylim, sq): adj = [] y", "[None] * self.board_size count = 0 for l in string[start:]:", "word_count, 'words from file.') return root @staticmethod def _create_adjacency_matrix(xlim, ylim):", "1 cur = cur.get_child(l) if cur is None: break if", "grid. None if given invalid grid. \"\"\" if self.trie is", "self.adjacency = BoggleSolver._create_adjacency_matrix(xlen, ylen) else: self.adjacency = None self.trie =", "= y * xlim + x adj = BoggleSolver._calc_adjacency(xlim, ylim,", "valid boggle answers. The class' solve method can be used", "trie for finding words. Arguments: words_file -- Path of file", "current cell. if x-1 >= 0: adj.append(sq - 1) #", "# Look below. adj.append(below) # Look to lower rigth. if", "# Look to left of current cell. if x-1 >=", "s[0] == 'q': # Rehydrate q-words with 'u'. words.add('qu' +", "word[1] != 'u': continue # Remove \"u\" from q-words so", "for word in f: if sys.version < '3': word =", "raise RuntimeError('invalid board') board = list(grid) trie = self.trie words", "class uses an external words file as a dictionary of", "= BoggleSolver._create_adjacency_matrix(xlen, ylen) else: self.adjacency = None self.trie = BoggleSolver._load_dictionary(", "= adjs[parent_sq] else: adj = self._calc_adjacency(self.xlen, self.ylen, parent_sq) for cur_sq", "import os import sys import collections import trie if sys.version", "of current cell. if x-1 >= 0: adj.append(sq - 1)", "are filtered out. Arguments: xlen -- X dimension (width) of", "= l count += 1 cur = cur.get_child(l) if cur", "solver, but is a utility for testing that all substrings", "'q': # Skip words starting with q not followed by", "Return: Count of words inserted into trie. \"\"\" if not", "(height) of board. pre_compute_adj -- Pre-compute adjacency matrix. \"\"\" assert(xlen", "matrix. \"\"\" assert(xlen > 1) assert(ylen > 1) self.xlen =", "only the q is matched. word = 'q' + word[2:]", "file containing words for reference. Return: Count of words inserted", "words. When an instance of this class is created, it", "in string[start:]: letters[count] = l count += 1 cur =", "f = bz2.BZ2File(words_file) else: f = open(words_file) try: for word", "ylim): adj_list = [[]] * (ylim * xlim) for i", "2009 \"\"\" from __future__ import print_function import os import sys", "self.xlen = xlen self.ylen = ylen self.board_size = xlen *", "Look to lower rigth. if x+1 < xlim: adj.append(below +", "boggle grids. \"\"\" def __init__(self, words_file, xlen=4, ylen=4, pre_compute_adj=False): \"\"\"Create", "= xlen * ylen if pre_compute_adj: self.adjacency = BoggleSolver._create_adjacency_matrix(xlen, ylen)", "can be used repeatedly to generate solutions for different boggle", "is None: continue s = prefix + c q.append((cur_sq, s,", "self.trie letters = [None] * self.board_size count = 0 for", "self.board_size, 3) def solve(self, grid): \"\"\"Generate all solutions for the", "used repeatedly to generate solutions for different boggle grids. \"\"\"", "= self._calc_adjacency(self.xlen, self.ylen, parent_sq) for cur_sq in adj: if cur_sq", "grid): \"\"\"Generate all solutions for the given boggle grid. Arguments:", "== 'Q': line.append('Qu') line.append('| ') else: line.append(cell) line.append(' | ')", "l in string[start:]: letters[count] = l count += 1 cur", "on. # Look to left of current cell. if x-1", "all substrings of a word are correctly found. Arguments: string", "_load_dictionary(words_file, max_len, min_len): \"\"\"Private method to create the trie for", "['| '] for x in range(self.xlen): cell = grid[yi+x].upper() if", "import collections import trie if sys.version < '3': range =", "cur_node._is_word: if s[0] == 'q': # Rehydrate q-words with 'u'.", "out. Arguments: xlen -- X dimension (width) of board. ylen", "below current cell. if y+1 < ylim: below = sq", "range = xrange class BoggleSolver(object): \"\"\" This class uses an", "in range(ylim * xlim): # Current cell index = y", "trie for fast word lookup letter-by-letter. Words that begin with", "solve(self, grid): \"\"\"Generate all solutions for the given boggle grid.", "for init_sq in range(self.board_size): c = board[init_sq] q.append((init_sq, c, trie.get_child(c),", "for finding words. Arguments: words_file -- Path of file containing", "adjacency matrix. \"\"\" assert(xlen > 1) assert(ylen > 1) self.xlen", "valid words. \"\"\" found = set() for start in range(len(string)):", "start in range(len(string)): cur = self.trie letters = [None] *", "for different boggle grids. \"\"\" def __init__(self, words_file, xlen=4, ylen=4,", "\"\"\" if self.trie is None: raise RuntimeError('words file not loaded')", "+ '---+' * self.xlen) yi = y * self.xlen line", "return adj_list @staticmethod def _calc_adjacency(xlim, ylim, sq): adj = []", "if word[1] != 'u': continue # Remove \"u\" from q-words", "right. Returns: A list of words found in the boggle", "the letters in a boggle grid, from top left to", "> 1) assert(ylen > 1) self.xlen = xlen self.ylen =", "method is not necessary for the boggle solver, but is", "xlim: adj.append(sq + 1) # Look at row below current", "X dimension (width) of board. ylen -- Y dimension (height)", "but is a utility for testing that all substrings of", "of current cell. if x+1 < xlim: adj.append(sq + 1)", "[[]] * (ylim * xlim) for i in range(ylim *", "self.adjacency = None self.trie = BoggleSolver._load_dictionary( words_file, self.board_size, 3) def", "words. \"\"\" found = set() for start in range(len(string)): cur", "adj_list[i] = adj return adj_list @staticmethod def _calc_adjacency(xlim, ylim, sq):", "* xlim + x adj = BoggleSolver._calc_adjacency(xlim, ylim, i) adj_list[i]", "'u'. words.add('qu' + s[1:]) else: words.add(s) return words def show_grid(self,", "bottom right. Returns: A list of words found in the", "else: adj = self._calc_adjacency(self.xlen, self.ylen, parent_sq) for cur_sq in adj:", "cur_node = pnode_get_child(c) if cur_node is None: continue s =", "- (y * xlim) # Look at row above current", "self.xlen line = ['| '] for x in range(self.xlen): cell", "are correctly found. Arguments: string -- The string in which", "') print(''.join(line)) print('+' + '---+' * self.xlen) def find_substrings(self, string):", "string[start:]: letters[count] = l count += 1 cur = cur.get_child(l)", "too short. word_len = len(word) if word_len > max_len or", "boggle words. When an instance of this class is created,", "of board. ylen -- Y dimension (height) of board. pre_compute_adj", "Look upper right. if x+1 < xlim: adj.append(above + 1)", "import trie if sys.version < '3': range = xrange class", "+ word[2:] root.insert(word) word_count += 1 finally: f.close() print('Loaded', word_count,", "import gzip f = gzip.open(words_file) elif words_file.endswith('bz2'): import bz2 f", "of a word are correctly found. Arguments: string -- The", "and words that are not within the specified length limits", "c, trie.get_child(c), [init_sq])) while q: parent_sq, prefix, pnode, seen =", "else: word = word.strip().decode(\"utf-8\") # Skip words that are too", "limits are filtered out. Arguments: xlen -- X dimension (width)", "cell = grid[yi+x].upper() if cell == 'Q': line.append('Qu') line.append('| ')", "up an internal dictionary to look up valid boggle answers.", "cur._is_word: found.add(''.join(letters[:count])) if not cur.has_children(): break return found @staticmethod def", "rigth. if x+1 < xlim: adj.append(below + 1) return adj", "given string. This method is not necessary for the boggle", "substrings in the given string. This method is not necessary", "Words that begin with capital letters and words that are", "solve method can be used repeatedly to generate solutions for", "that are too long or too short. word_len = len(word)", "') else: line.append(cell) line.append(' | ') print(''.join(line)) print('+' + '---+'", "The class' solve method can be used repeatedly to generate", "grids. \"\"\" def __init__(self, words_file, xlen=4, ylen=4, pre_compute_adj=False): \"\"\"Create and", "created, it sets up an internal dictionary to look up", "if x-1 >= 0: adj.append(below - 1) # Look below.", "-- The string in which to search for valid substrings.", "found = set() for start in range(len(string)): cur = self.trie", "y-1 >= 0: above = sq - xlim # Look", "line.append(cell) line.append(' | ') print(''.join(line)) print('+' + '---+' * self.xlen)", "too long or too short. word_len = len(word) if word_len", "method to create the trie for finding words. Arguments: words_file", "not within the specified length limits are filtered out. Arguments:", "xlen self.ylen = ylen self.board_size = xlen * ylen if", "'] for x in range(self.xlen): cell = grid[yi+x].upper() if cell", "all solutions for the given boggle grid. Arguments: grid --", "self.trie is None: raise RuntimeError('words file not loaded') if len(grid)", "\"\"\" if not os.path.isfile(words_file): raise RuntimeError('words file not found: '", ">= 0: adj.append(below - 1) # Look below. adj.append(below) #", "self.adjacency for init_sq in range(self.board_size): c = board[init_sq] q.append((init_sq, c,", "* xlim) for i in range(ylim * xlim): # Current", "the given boggle grid. Arguments: grid -- A string of", "word.strip().decode(\"utf-8\") # Skip words that are too long or too", "of words found in the boggle grid. None if given", "class is created, it sets up an internal dictionary to", "words starting with q not followed by u. if word[1]", "from top left to bottom right. Returns: A list of", "is not necessary for the boggle solver, but is a", "x-1 >= 0: adj.append(below - 1) # Look below. adj.append(below)", "def show_grid(self, grid): \"\"\"Utility method to print a 4x4 boggle", "word[2:] root.insert(word) word_count += 1 finally: f.close() print('Loaded', word_count, 'words", "in a boggle grid, from top left to bottom right.", "Skip words that start with capital letter. if word[0].isupper(): continue", "are not within the specified length limits are filtered out.", "u. if word[1] != 'u': continue # Remove \"u\" from", "3) def solve(self, grid): \"\"\"Generate all solutions for the given", "adj.append(sq - 1) # Look to right of current cell.", "if s[0] == 'q': # Rehydrate q-words with 'u'. words.add('qu'", "-- Pre-compute adjacency matrix. \"\"\" assert(xlen > 1) assert(ylen >", "matched. word = 'q' + word[2:] root.insert(word) word_count += 1", "= set() q = collections.deque() adjs = self.adjacency for init_sq", "for fast word lookup letter-by-letter. Words that begin with capital", "with 'u'. words.add('qu' + s[1:]) else: words.add(s) return words def", "all valid substrings in the given string. This method is", "with capital letter. if word[0].isupper(): continue if word[0] == 'q':", "__init__(self, words_file, xlen=4, ylen=4, pre_compute_adj=False): \"\"\"Create and initialize BoggleSolver instance.", "an internal dictionary to look up valid boggle answers. The", "c = board[cur_sq] cur_node = pnode_get_child(c) if cur_node is None:", "min_len): \"\"\"Private method to create the trie for finding words.", "[] y = int(sq / xlim) x = sq -", "that only the q is matched. word = 'q' +", "[cur_sq])) if cur_node._is_word: if s[0] == 'q': # Rehydrate q-words", "= prefix + c q.append((cur_sq, s, cur_node, seen + [cur_sq]))", "a boggle grid, from top left to bottom right. Returns:", "f.close() print('Loaded', word_count, 'words from file.') return root @staticmethod def", "+ s[1:]) else: words.add(s) return words def show_grid(self, grid): \"\"\"Utility", "y+1 < ylim: below = sq + xlim # Look", "pre_compute_adj -- Pre-compute adjacency matrix. \"\"\" assert(xlen > 1) assert(ylen", "= int(sq / xlim) x = sq - (y *", "file as a dictionary of acceptable boggle words. When an", "raise RuntimeError('words file not found: ' + words_file) print('creating dictionary...')", "c q.append((cur_sq, s, cur_node, seen + [cur_sq])) if cur_node._is_word: if", "1 finally: f.close() print('Loaded', word_count, 'words from file.') return root", "instance. This creates the internal trie for fast word lookup", "the internal trie for fast word lookup letter-by-letter. Words that", "which to search for valid substrings. Returns: List of substrings", "boggle grid. None if given invalid grid. \"\"\" if self.trie", "l count += 1 cur = cur.get_child(l) if cur is", "word_count = 0 if words_file.endswith('gz'): import gzip f = gzip.open(words_file)", "f = gzip.open(words_file) elif words_file.endswith('bz2'): import bz2 f = bz2.BZ2File(words_file)", "upper right. if x+1 < xlim: adj.append(above + 1) #", "gzip.open(words_file) elif words_file.endswith('bz2'): import bz2 f = bz2.BZ2File(words_file) else: f", "< min_len: continue # Skip words that start with capital", "range(self.ylen): print('+' + '---+' * self.xlen) yi = y *", "= bz2.BZ2File(words_file) else: f = open(words_file) try: for word in", "string. This method is not necessary for the boggle solver,", "to generate solutions for different boggle grids. \"\"\" def __init__(self,", "adj.append(above - 1) # Look above. adj.append(above) # Look upper", "words.add('qu' + s[1:]) else: words.add(s) return words def show_grid(self, grid):", "of substrings that are valid words. \"\"\" found = set()", "string of 16 characters representing the letters in a boggle", "if given invalid grid. \"\"\" if self.trie is None: raise", "sq - xlim # Look to upper left. if x-1", "adj return adj_list @staticmethod def _calc_adjacency(xlim, ylim, sq): adj =", "characters representing the letters in a boggle grid, from top", "a dictionary of acceptable boggle words. When an instance of", "of acceptable boggle words. When an instance of this class", "internal trie for fast word lookup letter-by-letter. Words that begin", "= self.adjacency for init_sq in range(self.board_size): c = board[init_sq] q.append((init_sq,", "words = set() q = collections.deque() adjs = self.adjacency for", "cur_sq in adj: if cur_sq in seen: continue c =", "import sys import collections import trie if sys.version < '3':", "ylen self.board_size = xlen * ylen if pre_compute_adj: self.adjacency =", "set() q = collections.deque() adjs = self.adjacency for init_sq in", "= gzip.open(words_file) elif words_file.endswith('bz2'): import bz2 f = bz2.BZ2File(words_file) else:", "y = int(sq / xlim) x = sq - (y", "# Look upper right. if x+1 < xlim: adj.append(above +", "in seen: continue c = board[cur_sq] cur_node = pnode_get_child(c) if", "Current cell index = y * xlim + x adj", "'---+' * self.xlen) def find_substrings(self, string): \"\"\"Find all valid substrings", "grids. <NAME> 22 Dec. 2009 \"\"\" from __future__ import print_function", "xlim # Look to lower left. if x-1 >= 0:", "f: if sys.version < '3': word = word.strip() else: word", "min_len: continue # Skip words that start with capital letter.", "* ylen if pre_compute_adj: self.adjacency = BoggleSolver._create_adjacency_matrix(xlen, ylen) else: self.adjacency", "1) # Look above. adj.append(above) # Look upper right. if", "c = board[init_sq] q.append((init_sq, c, trie.get_child(c), [init_sq])) while q: parent_sq,", "'words from file.') return root @staticmethod def _create_adjacency_matrix(xlim, ylim): adj_list", "Look below. adj.append(below) # Look to lower rigth. if x+1", "letters in a boggle grid, from top left to bottom", "if adjs: adj = adjs[parent_sq] else: adj = self._calc_adjacency(self.xlen, self.ylen,", "solutions for different boggle grids. \"\"\" def __init__(self, words_file, xlen=4,", "to print a 4x4 boggle grid. Arguments: grid -- A", "self.xlen) yi = y * self.xlen line = ['| ']", "search for valid substrings. Returns: List of substrings that are", "if cur._is_word: found.add(''.join(letters[:count])) if not cur.has_children(): break return found @staticmethod", "Count of words inserted into trie. \"\"\" if not os.path.isfile(words_file):", "Look at row above current cell. if y-1 >= 0:", "import print_function import os import sys import collections import trie", "Boggle grids. <NAME> 22 Dec. 2009 \"\"\" from __future__ import", "long or too short. word_len = len(word) if word_len >", "with q not followed by u. if word[1] != 'u':", "< '3': range = xrange class BoggleSolver(object): \"\"\" This class", "current cell is on. # Look to left of current", "from q-words so that only the q is matched. word", "init_sq in range(self.board_size): c = board[init_sq] q.append((init_sq, c, trie.get_child(c), [init_sq]))", "word_len > max_len or word_len < min_len: continue # Skip", "the boggle solver, but is a utility for testing that", "start with capital letter. if word[0].isupper(): continue if word[0] ==", "+ c q.append((cur_sq, s, cur_node, seen + [cur_sq])) if cur_node._is_word:", "cur is None: break if cur._is_word: found.add(''.join(letters[:count])) if not cur.has_children():", "Arguments: xlen -- X dimension (width) of board. ylen --", "if pre_compute_adj: self.adjacency = BoggleSolver._create_adjacency_matrix(xlen, ylen) else: self.adjacency = None", "# Look above. adj.append(above) # Look upper right. if x+1", "q.append((cur_sq, s, cur_node, seen + [cur_sq])) if cur_node._is_word: if s[0]", "is created, it sets up an internal dictionary to look", "count = 0 for l in string[start:]: letters[count] = l", "cur.get_child(l) if cur is None: break if cur._is_word: found.add(''.join(letters[:count])) if", "board[init_sq] q.append((init_sq, c, trie.get_child(c), [init_sq])) while q: parent_sq, prefix, pnode,", "print a 4x4 boggle grid. Arguments: grid -- A string", "= self.trie letters = [None] * self.board_size count = 0", "return found @staticmethod def _load_dictionary(words_file, max_len, min_len): \"\"\"Private method to", "@staticmethod def _create_adjacency_matrix(xlim, ylim): adj_list = [[]] * (ylim *", "into trie. \"\"\" if not os.path.isfile(words_file): raise RuntimeError('words file not", "row above current cell. if y-1 >= 0: above =", "string of X*Y characters representing the letters in a boggle", "0: above = sq - xlim # Look to upper", "row below current cell. if y+1 < ylim: below =", "word = 'q' + word[2:] root.insert(word) word_count += 1 finally:", "xlim + x adj = BoggleSolver._calc_adjacency(xlim, ylim, i) adj_list[i] =", "\"\"\" from __future__ import print_function import os import sys import", "is None: raise RuntimeError('words file not loaded') if len(grid) !=", "collections import trie if sys.version < '3': range = xrange", "len(grid) != self.board_size: raise RuntimeError('invalid board') board = list(grid) trie", "def _calc_adjacency(xlim, ylim, sq): adj = [] y = int(sq", "self.board_size count = 0 for l in string[start:]: letters[count] =", "bottom right. \"\"\" for y in range(self.ylen): print('+' + '---+'", "Arguments: grid -- A string of 16 characters representing the", "are valid words. \"\"\" found = set() for start in", "finally: f.close() print('Loaded', word_count, 'words from file.') return root @staticmethod", "letter. if word[0].isupper(): continue if word[0] == 'q': # Skip", "adj.append(above) # Look upper right. if x+1 < xlim: adj.append(above", "to look up valid boggle answers. The class' solve method", "sq): adj = [] y = int(sq / xlim) x", "# Look to upper left. if x-1 >= 0: adj.append(above", "from file.') return root @staticmethod def _create_adjacency_matrix(xlim, ylim): adj_list =", "Path of file containing words for reference. Return: Count of", "adj = BoggleSolver._calc_adjacency(xlim, ylim, i) adj_list[i] = adj return adj_list", "xrange class BoggleSolver(object): \"\"\" This class uses an external words", "the q is matched. word = 'q' + word[2:] root.insert(word)", "open(words_file) try: for word in f: if sys.version < '3':", "from top left to bottom right. \"\"\" for y in", "= adj return adj_list @staticmethod def _calc_adjacency(xlim, ylim, sq): adj", "ylen -- Y dimension (height) of board. pre_compute_adj -- Pre-compute", "= BoggleSolver._load_dictionary( words_file, self.board_size, 3) def solve(self, grid): \"\"\"Generate all", "left to bottom right. Returns: A list of words found", "dictionary of acceptable boggle words. When an instance of this", "words_file.endswith('gz'): import gzip f = gzip.open(words_file) elif words_file.endswith('bz2'): import bz2", "a 4x4 boggle grid. Arguments: grid -- A string of", "words_file -- Path of file containing words for reference. Return:", "for Boggle grids. <NAME> 22 Dec. 2009 \"\"\" from __future__", "not found: ' + words_file) print('creating dictionary...') root = trie.Trie()", "line.append('| ') else: line.append(cell) line.append(' | ') print(''.join(line)) print('+' +", "< xlim: adj.append(sq + 1) # Look at row below", "board. ylen -- Y dimension (height) of board. pre_compute_adj --", "# Skip words that start with capital letter. if word[0].isupper():", "/ xlim) x = sq - (y * xlim) #", "= cur.get_child(l) if cur is None: break if cur._is_word: found.add(''.join(letters[:count]))", "trie. \"\"\" if not os.path.isfile(words_file): raise RuntimeError('words file not found:", "= q.popleft() pnode_get_child = pnode.get_child if adjs: adj = adjs[parent_sq]", "in range(self.xlen): cell = grid[yi+x].upper() if cell == 'Q': line.append('Qu')", "to right of current cell. if x+1 < xlim: adj.append(sq", "to create the trie for finding words. Arguments: words_file --", "different boggle grids. \"\"\" def __init__(self, words_file, xlen=4, ylen=4, pre_compute_adj=False):", "an external words file as a dictionary of acceptable boggle", "be used repeatedly to generate solutions for different boggle grids.", "that begin with capital letters and words that are not", "q-words with 'u'. words.add('qu' + s[1:]) else: words.add(s) return words", "that are valid words. \"\"\" found = set() for start", "words that are too long or too short. word_len =", "if len(grid) != self.board_size: raise RuntimeError('invalid board') board = list(grid)", "adjs: adj = adjs[parent_sq] else: adj = self._calc_adjacency(self.xlen, self.ylen, parent_sq)", "word[0].isupper(): continue if word[0] == 'q': # Skip words starting", "This method is not necessary for the boggle solver, but", "root @staticmethod def _create_adjacency_matrix(xlim, ylim): adj_list = [[]] * (ylim", "cur_node is None: continue s = prefix + c q.append((cur_sq,", "words inserted into trie. \"\"\" if not os.path.isfile(words_file): raise RuntimeError('words", "RuntimeError('invalid board') board = list(grid) trie = self.trie words =", "that all substrings of a word are correctly found. Arguments:", "words. Arguments: words_file -- Path of file containing words for", "dimension (height) of board. pre_compute_adj -- Pre-compute adjacency matrix. \"\"\"", "grid, from top left to bottom right. Returns: A list", "short. word_len = len(word) if word_len > max_len or word_len", "q-words so that only the q is matched. word =", "the trie for finding words. Arguments: words_file -- Path of", "(y * xlim) # Look at row above current cell.", "adj.append(above + 1) # Look at same row that current", "words found in the boggle grid. None if given invalid", "len(word) if word_len > max_len or word_len < min_len: continue", "cell. if x+1 < xlim: adj.append(sq + 1) # Look", "specified length limits are filtered out. Arguments: xlen -- X", "above current cell. if y-1 >= 0: above = sq", "adj.append(sq + 1) # Look at row below current cell.", "Returns: A list of words found in the boggle grid.", "collections.deque() adjs = self.adjacency for init_sq in range(self.board_size): c =", "0: adj.append(below - 1) # Look below. adj.append(below) # Look", "for cur_sq in adj: if cur_sq in seen: continue c", "1) self.xlen = xlen self.ylen = ylen self.board_size = xlen", "is matched. word = 'q' + word[2:] root.insert(word) word_count +=", "os import sys import collections import trie if sys.version <", "if x-1 >= 0: adj.append(sq - 1) # Look to", "parent_sq) for cur_sq in adj: if cur_sq in seen: continue", "def solve(self, grid): \"\"\"Generate all solutions for the given boggle", "def _load_dictionary(words_file, max_len, min_len): \"\"\"Private method to create the trie", "x in range(self.xlen): cell = grid[yi+x].upper() if cell == 'Q':", "at row below current cell. if y+1 < ylim: below", "line.append(' | ') print(''.join(line)) print('+' + '---+' * self.xlen) def", "A string of 16 characters representing the letters in a", "@staticmethod def _load_dictionary(words_file, max_len, min_len): \"\"\"Private method to create the", "+ '---+' * self.xlen) def find_substrings(self, string): \"\"\"Find all valid", "trie = self.trie words = set() q = collections.deque() adjs", "loaded') if len(grid) != self.board_size: raise RuntimeError('invalid board') board =", "x-1 >= 0: adj.append(sq - 1) # Look to right", "adjs[parent_sq] else: adj = self._calc_adjacency(self.xlen, self.ylen, parent_sq) for cur_sq in", "range(ylim * xlim): # Current cell index = y *", "pnode_get_child(c) if cur_node is None: continue s = prefix +", "cur.has_children(): break return found @staticmethod def _load_dictionary(words_file, max_len, min_len): \"\"\"Private", "line = ['| '] for x in range(self.xlen): cell =", "if sys.version < '3': range = xrange class BoggleSolver(object): \"\"\"", "# Look to right of current cell. if x+1 <", "if cell == 'Q': line.append('Qu') line.append('| ') else: line.append(cell) line.append('", "x+1 < xlim: adj.append(above + 1) # Look at same", "+ words_file) print('creating dictionary...') root = trie.Trie() word_count = 0", "| ') print(''.join(line)) print('+' + '---+' * self.xlen) def find_substrings(self,", "cell == 'Q': line.append('Qu') line.append('| ') else: line.append(cell) line.append(' |", "\"\"\" Module to generate solutions for Boggle grids. <NAME> 22", "xlen * ylen if pre_compute_adj: self.adjacency = BoggleSolver._create_adjacency_matrix(xlen, ylen) else:", "continue c = board[cur_sq] cur_node = pnode_get_child(c) if cur_node is", "# Rehydrate q-words with 'u'. words.add('qu' + s[1:]) else: words.add(s)", "right. \"\"\" for y in range(self.ylen): print('+' + '---+' *", "inserted into trie. \"\"\" if not os.path.isfile(words_file): raise RuntimeError('words file", "given boggle grid. Arguments: grid -- A string of 16", "within the specified length limits are filtered out. Arguments: xlen", "word_len < min_len: continue # Skip words that start with", "seen: continue c = board[cur_sq] cur_node = pnode_get_child(c) if cur_node", "so that only the q is matched. word = 'q'", "- 1) # Look above. adj.append(above) # Look upper right.", "self.ylen, parent_sq) for cur_sq in adj: if cur_sq in seen:", "= sq - xlim # Look to upper left. if", "of X*Y characters representing the letters in a boggle grid,", "xlim: adj.append(above + 1) # Look at same row that", "# Look to lower rigth. if x+1 < xlim: adj.append(below", "A string of X*Y characters representing the letters in a", "for the boggle solver, but is a utility for testing", "-- X dimension (width) of board. ylen -- Y dimension", "'Q': line.append('Qu') line.append('| ') else: line.append(cell) line.append(' | ') print(''.join(line))", "range(self.board_size): c = board[init_sq] q.append((init_sq, c, trie.get_child(c), [init_sq])) while q:", "\"\"\" This class uses an external words file as a", "+ x adj = BoggleSolver._calc_adjacency(xlim, ylim, i) adj_list[i] = adj", "= xrange class BoggleSolver(object): \"\"\" This class uses an external", "that are not within the specified length limits are filtered", "= BoggleSolver._calc_adjacency(xlim, ylim, i) adj_list[i] = adj return adj_list @staticmethod", "16 characters representing the letters in a boggle grid, from", "count += 1 cur = cur.get_child(l) if cur is None:", "else: self.adjacency = None self.trie = BoggleSolver._load_dictionary( words_file, self.board_size, 3)", "adjs = self.adjacency for init_sq in range(self.board_size): c = board[init_sq]", "a boggle grid, from top left to bottom right. \"\"\"", "xlim): # Current cell index = y * xlim +", "BoggleSolver._create_adjacency_matrix(xlen, ylen) else: self.adjacency = None self.trie = BoggleSolver._load_dictionary( words_file,", "list(grid) trie = self.trie words = set() q = collections.deque()", ">= 0: adj.append(above - 1) # Look above. adj.append(above) #", "adj = [] y = int(sq / xlim) x =", "raise RuntimeError('words file not loaded') if len(grid) != self.board_size: raise", "file not loaded') if len(grid) != self.board_size: raise RuntimeError('invalid board')", "file not found: ' + words_file) print('creating dictionary...') root =", "else: f = open(words_file) try: for word in f: if", "boggle grid, from top left to bottom right. \"\"\" for", "1) # Look at same row that current cell is", "# Look at same row that current cell is on.", "- xlim # Look to upper left. if x-1 >=", "'u': continue # Remove \"u\" from q-words so that only", "grid. \"\"\" if self.trie is None: raise RuntimeError('words file not", "substrings. Returns: List of substrings that are valid words. \"\"\"", "Look at row below current cell. if y+1 < ylim:", "xlen=4, ylen=4, pre_compute_adj=False): \"\"\"Create and initialize BoggleSolver instance. This creates", "repeatedly to generate solutions for different boggle grids. \"\"\" def", "parent_sq, prefix, pnode, seen = q.popleft() pnode_get_child = pnode.get_child if", "if cur_node is None: continue s = prefix + c", "adj.append(below - 1) # Look below. adj.append(below) # Look to", "cur_sq in seen: continue c = board[cur_sq] cur_node = pnode_get_child(c)", "file.') return root @staticmethod def _create_adjacency_matrix(xlim, ylim): adj_list = [[]]", "ylen=4, pre_compute_adj=False): \"\"\"Create and initialize BoggleSolver instance. This creates the", "with capital letters and words that are not within the", "if word[0].isupper(): continue if word[0] == 'q': # Skip words", "look up valid boggle answers. The class' solve method can", "gzip f = gzip.open(words_file) elif words_file.endswith('bz2'): import bz2 f =", "def find_substrings(self, string): \"\"\"Find all valid substrings in the given", "# Look to lower left. if x-1 >= 0: adj.append(below", "= None self.trie = BoggleSolver._load_dictionary( words_file, self.board_size, 3) def solve(self,", "q = collections.deque() adjs = self.adjacency for init_sq in range(self.board_size):", "A list of words found in the boggle grid. None", "x = sq - (y * xlim) # Look at", "boggle solver, but is a utility for testing that all", "not loaded') if len(grid) != self.board_size: raise RuntimeError('invalid board') board", "\"\"\"Utility method to print a 4x4 boggle grid. Arguments: grid", "pre_compute_adj=False): \"\"\"Create and initialize BoggleSolver instance. This creates the internal", "prefix, pnode, seen = q.popleft() pnode_get_child = pnode.get_child if adjs:", "+ xlim # Look to lower left. if x-1 >=", "in f: if sys.version < '3': word = word.strip() else:", "= open(words_file) try: for word in f: if sys.version <", "# Skip words starting with q not followed by u.", "for y in range(self.ylen): print('+' + '---+' * self.xlen) yi", "valid substrings in the given string. This method is not", "== 'q': # Rehydrate q-words with 'u'. words.add('qu' + s[1:])", "in which to search for valid substrings. Returns: List of", "in adj: if cur_sq in seen: continue c = board[cur_sq]", "current cell. if y-1 >= 0: above = sq -", "When an instance of this class is created, it sets", "[init_sq])) while q: parent_sq, prefix, pnode, seen = q.popleft() pnode_get_child", "bz2.BZ2File(words_file) else: f = open(words_file) try: for word in f:", "at same row that current cell is on. # Look", "BoggleSolver instance. This creates the internal trie for fast word", "generate solutions for different boggle grids. \"\"\" def __init__(self, words_file,", "in range(len(string)): cur = self.trie letters = [None] * self.board_size", "__future__ import print_function import os import sys import collections import", "to lower left. if x-1 >= 0: adj.append(below - 1)", "self.xlen) def find_substrings(self, string): \"\"\"Find all valid substrings in the", "solutions for the given boggle grid. Arguments: grid -- A", "-- A string of 16 characters representing the letters in", "string in which to search for valid substrings. Returns: List", "0: adj.append(sq - 1) # Look to right of current", "self._calc_adjacency(self.xlen, self.ylen, parent_sq) for cur_sq in adj: if cur_sq in", "lookup letter-by-letter. Words that begin with capital letters and words", "if x+1 < xlim: adj.append(sq + 1) # Look at", "i in range(ylim * xlim): # Current cell index =", "show_grid(self, grid): \"\"\"Utility method to print a 4x4 boggle grid.", "lower left. if x-1 >= 0: adj.append(below - 1) #", "initialize BoggleSolver instance. This creates the internal trie for fast", "trie.get_child(c), [init_sq])) while q: parent_sq, prefix, pnode, seen = q.popleft()", "break return found @staticmethod def _load_dictionary(words_file, max_len, min_len): \"\"\"Private method", "BoggleSolver(object): \"\"\" This class uses an external words file as", "line.append('Qu') line.append('| ') else: line.append(cell) line.append(' | ') print(''.join(line)) print('+'", "found. Arguments: string -- The string in which to search", "- 1) # Look to right of current cell. if", "a utility for testing that all substrings of a word", "* xlim): # Current cell index = y * xlim", "xlim) x = sq - (y * xlim) # Look", "not followed by u. if word[1] != 'u': continue #", "cell. if y+1 < ylim: below = sq + xlim", "* (ylim * xlim) for i in range(ylim * xlim):", "= pnode_get_child(c) if cur_node is None: continue s = prefix", "pnode.get_child if adjs: adj = adjs[parent_sq] else: adj = self._calc_adjacency(self.xlen,", "left. if x-1 >= 0: adj.append(below - 1) # Look", "if sys.version < '3': word = word.strip() else: word =", "for reference. Return: Count of words inserted into trie. \"\"\"", "to left of current cell. if x-1 >= 0: adj.append(sq", "seen = q.popleft() pnode_get_child = pnode.get_child if adjs: adj =", "xlim # Look to upper left. if x-1 >= 0:", "sets up an internal dictionary to look up valid boggle", "s = prefix + c q.append((cur_sq, s, cur_node, seen +", "= self.trie words = set() q = collections.deque() adjs =", "< '3': word = word.strip() else: word = word.strip().decode(\"utf-8\") #", "Remove \"u\" from q-words so that only the q is", "if cur_node._is_word: if s[0] == 'q': # Rehydrate q-words with", "words that are not within the specified length limits are", "it sets up an internal dictionary to look up valid", "boggle answers. The class' solve method can be used repeatedly", "Arguments: grid -- A string of X*Y characters representing the", "xlen -- X dimension (width) of board. ylen -- Y", "grid. Arguments: grid -- A string of 16 characters representing", "grid[yi+x].upper() if cell == 'Q': line.append('Qu') line.append('| ') else: line.append(cell)", "string -- The string in which to search for valid", "= word.strip() else: word = word.strip().decode(\"utf-8\") # Skip words that", "BoggleSolver._calc_adjacency(xlim, ylim, i) adj_list[i] = adj return adj_list @staticmethod def", "left. if x-1 >= 0: adj.append(above - 1) # Look", "board. pre_compute_adj -- Pre-compute adjacency matrix. \"\"\" assert(xlen > 1)", "None: raise RuntimeError('words file not loaded') if len(grid) != self.board_size:", "word lookup letter-by-letter. Words that begin with capital letters and", "This creates the internal trie for fast word lookup letter-by-letter.", "word.strip() else: word = word.strip().decode(\"utf-8\") # Skip words that are", "adj = self._calc_adjacency(self.xlen, self.ylen, parent_sq) for cur_sq in adj: if", "not cur.has_children(): break return found @staticmethod def _load_dictionary(words_file, max_len, min_len):", "x+1 < xlim: adj.append(sq + 1) # Look at row", "1) # Look below. adj.append(below) # Look to lower rigth.", "answers. The class' solve method can be used repeatedly to", "if not cur.has_children(): break return found @staticmethod def _load_dictionary(words_file, max_len,", "else: words.add(s) return words def show_grid(self, grid): \"\"\"Utility method to", "adj.append(below) # Look to lower rigth. if x+1 < xlim:", "< xlim: adj.append(above + 1) # Look at same row", "List of substrings that are valid words. \"\"\" found =", "+= 1 finally: f.close() print('Loaded', word_count, 'words from file.') return", "q.append((init_sq, c, trie.get_child(c), [init_sq])) while q: parent_sq, prefix, pnode, seen", "letters = [None] * self.board_size count = 0 for l", "Look at same row that current cell is on. #", "print(''.join(line)) print('+' + '---+' * self.xlen) def find_substrings(self, string): \"\"\"Find", "substrings of a word are correctly found. Arguments: string --", "words that start with capital letter. if word[0].isupper(): continue if", "Skip words that are too long or too short. word_len", "# Remove \"u\" from q-words so that only the q", "y in range(self.ylen): print('+' + '---+' * self.xlen) yi =", "self.board_size: raise RuntimeError('invalid board') board = list(grid) trie = self.trie", "Look to left of current cell. if x-1 >= 0:", "cell. if y-1 >= 0: above = sq - xlim", "elif words_file.endswith('bz2'): import bz2 f = bz2.BZ2File(words_file) else: f =", "if not os.path.isfile(words_file): raise RuntimeError('words file not found: ' +", "return root @staticmethod def _create_adjacency_matrix(xlim, ylim): adj_list = [[]] *", "BoggleSolver._load_dictionary( words_file, self.board_size, 3) def solve(self, grid): \"\"\"Generate all solutions", "not os.path.isfile(words_file): raise RuntimeError('words file not found: ' + words_file)", "continue # Skip words that start with capital letter. if", "this class is created, it sets up an internal dictionary", "y * xlim + x adj = BoggleSolver._calc_adjacency(xlim, ylim, i)", "if x+1 < xlim: adj.append(above + 1) # Look at", "letter-by-letter. Words that begin with capital letters and words that", "Dec. 2009 \"\"\" from __future__ import print_function import os import", "to bottom right. Returns: A list of words found in", "x-1 >= 0: adj.append(above - 1) # Look above. adj.append(above)", "words def show_grid(self, grid): \"\"\"Utility method to print a 4x4", "above = sq - xlim # Look to upper left.", "try: for word in f: if sys.version < '3': word", "for l in string[start:]: letters[count] = l count += 1", "This class uses an external words file as a dictionary", "pre_compute_adj: self.adjacency = BoggleSolver._create_adjacency_matrix(xlen, ylen) else: self.adjacency = None self.trie", "or too short. word_len = len(word) if word_len > max_len", "is None: break if cur._is_word: found.add(''.join(letters[:count])) if not cur.has_children(): break", "> 1) self.xlen = xlen self.ylen = ylen self.board_size =", "assert(xlen > 1) assert(ylen > 1) self.xlen = xlen self.ylen", "def __init__(self, words_file, xlen=4, ylen=4, pre_compute_adj=False): \"\"\"Create and initialize BoggleSolver", "= trie.Trie() word_count = 0 if words_file.endswith('gz'): import gzip f", "1) # Look to right of current cell. if x+1", "below. adj.append(below) # Look to lower rigth. if x+1 <", "print_function import os import sys import collections import trie if", "1) assert(ylen > 1) self.xlen = xlen self.ylen = ylen", "22 Dec. 2009 \"\"\" from __future__ import print_function import os", "<NAME> 22 Dec. 2009 \"\"\" from __future__ import print_function import", "words.add(s) return words def show_grid(self, grid): \"\"\"Utility method to print", "words for reference. Return: Count of words inserted into trie.", "if cur is None: break if cur._is_word: found.add(''.join(letters[:count])) if not", ">= 0: adj.append(sq - 1) # Look to right of", "q.popleft() pnode_get_child = pnode.get_child if adjs: adj = adjs[parent_sq] else:", "X*Y characters representing the letters in a boggle grid, from", "\"\"\"Find all valid substrings in the given string. This method", "= [] y = int(sq / xlim) x = sq", "instance of this class is created, it sets up an", "letters[count] = l count += 1 cur = cur.get_child(l) if", "RuntimeError('words file not loaded') if len(grid) != self.board_size: raise RuntimeError('invalid", "word in f: if sys.version < '3': word = word.strip()", "= [[]] * (ylim * xlim) for i in range(ylim", "cur_node, seen + [cur_sq])) if cur_node._is_word: if s[0] == 'q':", "that current cell is on. # Look to left of", "self.trie = BoggleSolver._load_dictionary( words_file, self.board_size, 3) def solve(self, grid): \"\"\"Generate", "and initialize BoggleSolver instance. This creates the internal trie for", "= ['| '] for x in range(self.xlen): cell = grid[yi+x].upper()", "= board[init_sq] q.append((init_sq, c, trie.get_child(c), [init_sq])) while q: parent_sq, prefix,", "ylim: below = sq + xlim # Look to lower", "of 16 characters representing the letters in a boggle grid,", "top left to bottom right. Returns: A list of words", "# Look at row above current cell. if y-1 >=", "is a utility for testing that all substrings of a", "root.insert(word) word_count += 1 finally: f.close() print('Loaded', word_count, 'words from", "None: continue s = prefix + c q.append((cur_sq, s, cur_node,", "uses an external words file as a dictionary of acceptable", "if self.trie is None: raise RuntimeError('words file not loaded') if", "Arguments: string -- The string in which to search for", "from __future__ import print_function import os import sys import collections", "if word[0] == 'q': # Skip words starting with q", "if x-1 >= 0: adj.append(above - 1) # Look above.", "capital letter. if word[0].isupper(): continue if word[0] == 'q': #", "\"u\" from q-words so that only the q is matched.", "representing the letters in a boggle grid, from top left", "in range(self.ylen): print('+' + '---+' * self.xlen) yi = y", "words file as a dictionary of acceptable boggle words. When", "string): \"\"\"Find all valid substrings in the given string. This", "grid): \"\"\"Utility method to print a 4x4 boggle grid. Arguments:", "necessary for the boggle solver, but is a utility for", "solutions for Boggle grids. <NAME> 22 Dec. 2009 \"\"\" from", "word = word.strip() else: word = word.strip().decode(\"utf-8\") # Skip words", "if word_len > max_len or word_len < min_len: continue #", "for start in range(len(string)): cur = self.trie letters = [None]", "_create_adjacency_matrix(xlim, ylim): adj_list = [[]] * (ylim * xlim) for", "grid -- A string of 16 characters representing the letters", "0 if words_file.endswith('gz'): import gzip f = gzip.open(words_file) elif words_file.endswith('bz2'):", "if cur_sq in seen: continue c = board[cur_sq] cur_node =", "are too long or too short. word_len = len(word) if", "print('+' + '---+' * self.xlen) yi = y * self.xlen", "None if given invalid grid. \"\"\" if self.trie is None:", "for i in range(ylim * xlim): # Current cell index", "the given string. This method is not necessary for the", "-- Path of file containing words for reference. Return: Count", "found in the boggle grid. None if given invalid grid.", "correctly found. Arguments: string -- The string in which to", "at row above current cell. if y-1 >= 0: above", "* xlim) # Look at row above current cell. if", "max_len, min_len): \"\"\"Private method to create the trie for finding", "Skip words starting with q not followed by u. if", "-- Y dimension (height) of board. pre_compute_adj -- Pre-compute adjacency", "create the trie for finding words. Arguments: words_file -- Path", "self.trie words = set() q = collections.deque() adjs = self.adjacency", "- 1) # Look below. adj.append(below) # Look to lower", "s[1:]) else: words.add(s) return words def show_grid(self, grid): \"\"\"Utility method", "bz2 f = bz2.BZ2File(words_file) else: f = open(words_file) try: for", "None self.trie = BoggleSolver._load_dictionary( words_file, self.board_size, 3) def solve(self, grid):", "!= 'u': continue # Remove \"u\" from q-words so that", "given invalid grid. \"\"\" if self.trie is None: raise RuntimeError('words", "sys import collections import trie if sys.version < '3': range", "while q: parent_sq, prefix, pnode, seen = q.popleft() pnode_get_child =", "= list(grid) trie = self.trie words = set() q =", "that start with capital letter. if word[0].isupper(): continue if word[0]", ">= 0: above = sq - xlim # Look to", "ylen) else: self.adjacency = None self.trie = BoggleSolver._load_dictionary( words_file, self.board_size,", "Module to generate solutions for Boggle grids. <NAME> 22 Dec.", "current cell. if x+1 < xlim: adj.append(sq + 1) #", "testing that all substrings of a word are correctly found.", "'---+' * self.xlen) yi = y * self.xlen line =", "board') board = list(grid) trie = self.trie words = set()", "* self.board_size count = 0 for l in string[start:]: letters[count]", "= len(word) if word_len > max_len or word_len < min_len:", "to generate solutions for Boggle grids. <NAME> 22 Dec. 2009", "q is matched. word = 'q' + word[2:] root.insert(word) word_count", "continue if word[0] == 'q': # Skip words starting with", "Look to upper left. if x-1 >= 0: adj.append(above -", "\"\"\" def __init__(self, words_file, xlen=4, ylen=4, pre_compute_adj=False): \"\"\"Create and initialize", "continue # Remove \"u\" from q-words so that only the", "(ylim * xlim) for i in range(ylim * xlim): #", "below = sq + xlim # Look to lower left.", "self.board_size = xlen * ylen if pre_compute_adj: self.adjacency = BoggleSolver._create_adjacency_matrix(xlen,", "i) adj_list[i] = adj return adj_list @staticmethod def _calc_adjacency(xlim, ylim,", "print('creating dictionary...') root = trie.Trie() word_count = 0 if words_file.endswith('gz'):", "return words def show_grid(self, grid): \"\"\"Utility method to print a", "words_file, xlen=4, ylen=4, pre_compute_adj=False): \"\"\"Create and initialize BoggleSolver instance. This", "xlim) for i in range(ylim * xlim): # Current cell", "for the given boggle grid. Arguments: grid -- A string", "boggle grid, from top left to bottom right. Returns: A", "= y * self.xlen line = ['| '] for x", "boggle grid. Arguments: grid -- A string of 16 characters", "in range(self.board_size): c = board[init_sq] q.append((init_sq, c, trie.get_child(c), [init_sq])) while", "Look to lower left. if x-1 >= 0: adj.append(below -", "assert(ylen > 1) self.xlen = xlen self.ylen = ylen self.board_size", "set() for start in range(len(string)): cur = self.trie letters =", "x adj = BoggleSolver._calc_adjacency(xlim, ylim, i) adj_list[i] = adj return", "* self.xlen) yi = y * self.xlen line = ['|", "above. adj.append(above) # Look upper right. if x+1 < xlim:", "to search for valid substrings. Returns: List of substrings that", "not necessary for the boggle solver, but is a utility", "break if cur._is_word: found.add(''.join(letters[:count])) if not cur.has_children(): break return found", "of words inserted into trie. \"\"\" if not os.path.isfile(words_file): raise", "\"\"\"Private method to create the trie for finding words. Arguments:", "sq + xlim # Look to lower left. if x-1", "words_file) print('creating dictionary...') root = trie.Trie() word_count = 0 if", "cell is on. # Look to left of current cell.", "up valid boggle answers. The class' solve method can be", "= board[cur_sq] cur_node = pnode_get_child(c) if cur_node is None: continue", "None: break if cur._is_word: found.add(''.join(letters[:count])) if not cur.has_children(): break return", "grid, from top left to bottom right. \"\"\" for y", "internal dictionary to look up valid boggle answers. The class'", "pnode_get_child = pnode.get_child if adjs: adj = adjs[parent_sq] else: adj", "q not followed by u. if word[1] != 'u': continue", "word_count += 1 finally: f.close() print('Loaded', word_count, 'words from file.')", "if y-1 >= 0: above = sq - xlim #", "left of current cell. if x-1 >= 0: adj.append(sq -", "external words file as a dictionary of acceptable boggle words.", "starting with q not followed by u. if word[1] !=", "range(len(string)): cur = self.trie letters = [None] * self.board_size count", "right of current cell. if x+1 < xlim: adj.append(sq +", "cur = self.trie letters = [None] * self.board_size count =", "range(self.xlen): cell = grid[yi+x].upper() if cell == 'Q': line.append('Qu') line.append('|", "class' solve method can be used repeatedly to generate solutions", "begin with capital letters and words that are not within", "in the boggle grid. None if given invalid grid. \"\"\"", "word = word.strip().decode(\"utf-8\") # Skip words that are too long", "-- A string of X*Y characters representing the letters in", "= 0 if words_file.endswith('gz'): import gzip f = gzip.open(words_file) elif", "followed by u. if word[1] != 'u': continue # Remove", "current cell. if y+1 < ylim: below = sq +", "capital letters and words that are not within the specified", "q: parent_sq, prefix, pnode, seen = q.popleft() pnode_get_child = pnode.get_child", "length limits are filtered out. Arguments: xlen -- X dimension", "dictionary to look up valid boggle answers. The class' solve", "os.path.isfile(words_file): raise RuntimeError('words file not found: ' + words_file) print('creating", "of board. pre_compute_adj -- Pre-compute adjacency matrix. \"\"\" assert(xlen >", "fast word lookup letter-by-letter. Words that begin with capital letters", "adj_list = [[]] * (ylim * xlim) for i in", "words_file.endswith('bz2'): import bz2 f = bz2.BZ2File(words_file) else: f = open(words_file)", "+ 1) # Look at same row that current cell", "yi = y * self.xlen line = ['| '] for", "else: line.append(cell) line.append(' | ') print(''.join(line)) print('+' + '---+' *", "is on. # Look to left of current cell. if", "'q': # Rehydrate q-words with 'u'. words.add('qu' + s[1:]) else:", "cur = cur.get_child(l) if cur is None: break if cur._is_word:", "import bz2 f = bz2.BZ2File(words_file) else: f = open(words_file) try:", "= sq + xlim # Look to lower left. if", "method can be used repeatedly to generate solutions for different", "cell index = y * xlim + x adj =", "Returns: List of substrings that are valid words. \"\"\" found", "lower rigth. if x+1 < xlim: adj.append(below + 1) return", "' + words_file) print('creating dictionary...') root = trie.Trie() word_count =", "max_len or word_len < min_len: continue # Skip words that", "boggle grid. Arguments: grid -- A string of X*Y characters", "word are correctly found. Arguments: string -- The string in", "adj: if cur_sq in seen: continue c = board[cur_sq] cur_node", "cell. if x-1 >= 0: adj.append(sq - 1) # Look", "generate solutions for Boggle grids. <NAME> 22 Dec. 2009 \"\"\"", "list of words found in the boggle grid. None if", "'3': range = xrange class BoggleSolver(object): \"\"\" This class uses", "for x in range(self.xlen): cell = grid[yi+x].upper() if cell ==", "> max_len or word_len < min_len: continue # Skip words", "sys.version < '3': word = word.strip() else: word = word.strip().decode(\"utf-8\")", "= collections.deque() adjs = self.adjacency for init_sq in range(self.board_size): c", "sq - (y * xlim) # Look at row above", "adj = adjs[parent_sq] else: adj = self._calc_adjacency(self.xlen, self.ylen, parent_sq) for", "The string in which to search for valid substrings. Returns:", "= grid[yi+x].upper() if cell == 'Q': line.append('Qu') line.append('| ') else:", "of this class is created, it sets up an internal" ]
[ "PagerDuty when Placement group on one OSD is corrupted and", "@managed_service_required @skipif_ms_consumer @pytest.mark.polarion_id(\"OCS-2771\") def test_corrupt_pg_pd(measure_corrupt_pg): \"\"\" Test that there is", "when the corrupted ceph pool is removed. \"\"\" api =", "pool is removed. \"\"\" api = pagerduty.PagerDutyAPI() # get incidents", "( managed_service_required, skipif_ms_consumer, tier4, tier4a, ) from ocs_ci.ocs import constants", "scaled down incidents = measure_corrupt_pg.get(\"pagerduty_incidents\") target_label = constants.ALERT_CLUSTERERRORSTATE # TODO(fbalak):", "on one OSD is corrupted and that this incident is", "= measure_corrupt_pg.get(\"pagerduty_incidents\") target_label = constants.ALERT_CLUSTERERRORSTATE # TODO(fbalak): check the whole", "skipif_ms_consumer, tier4, tier4a, ) from ocs_ci.ocs import constants from ocs_ci.utility", "time when manager deployment was scaled down incidents = measure_corrupt_pg.get(\"pagerduty_incidents\")", "group on one OSD is corrupted and that this incident", "is cleared when the corrupted ceph pool is removed. \"\"\"", "pagerduty log = logging.getLogger(__name__) @tier4 @tier4a @managed_service_required @skipif_ms_consumer @pytest.mark.polarion_id(\"OCS-2771\") def", "manager deployment was scaled down incidents = measure_corrupt_pg.get(\"pagerduty_incidents\") target_label =", "in PagerDuty when Placement group on one OSD is corrupted", "one OSD is corrupted and that this incident is cleared", "down incidents = measure_corrupt_pg.get(\"pagerduty_incidents\") target_label = constants.ALERT_CLUSTERERRORSTATE # TODO(fbalak): check", "logging import pytest from ocs_ci.framework.testlib import ( managed_service_required, skipif_ms_consumer, tier4,", "pytest from ocs_ci.framework.testlib import ( managed_service_required, skipif_ms_consumer, tier4, tier4a, )", "test_corrupt_pg_pd(measure_corrupt_pg): \"\"\" Test that there is appropriate incident in PagerDuty", "incident alerts assert pagerduty.check_incident_list( summary=target_label, incidents=incidents, urgency=\"high\", ) api.check_incident_cleared( summary=target_label,", "import pytest from ocs_ci.framework.testlib import ( managed_service_required, skipif_ms_consumer, tier4, tier4a,", "was scaled down incidents = measure_corrupt_pg.get(\"pagerduty_incidents\") target_label = constants.ALERT_CLUSTERERRORSTATE #", "removed. \"\"\" api = pagerduty.PagerDutyAPI() # get incidents from time", "the whole string in summary and incident alerts assert pagerduty.check_incident_list(", "= logging.getLogger(__name__) @tier4 @tier4a @managed_service_required @skipif_ms_consumer @pytest.mark.polarion_id(\"OCS-2771\") def test_corrupt_pg_pd(measure_corrupt_pg): \"\"\"", "import pagerduty log = logging.getLogger(__name__) @tier4 @tier4a @managed_service_required @skipif_ms_consumer @pytest.mark.polarion_id(\"OCS-2771\")", "alerts assert pagerduty.check_incident_list( summary=target_label, incidents=incidents, urgency=\"high\", ) api.check_incident_cleared( summary=target_label, measure_end_time=measure_corrupt_pg.get(\"stop\"),", "that there is appropriate incident in PagerDuty when Placement group", "assert pagerduty.check_incident_list( summary=target_label, incidents=incidents, urgency=\"high\", ) api.check_incident_cleared( summary=target_label, measure_end_time=measure_corrupt_pg.get(\"stop\"), )", "from ocs_ci.ocs import constants from ocs_ci.utility import pagerduty log =", "is corrupted and that this incident is cleared when the", "import logging import pytest from ocs_ci.framework.testlib import ( managed_service_required, skipif_ms_consumer,", "pagerduty.PagerDutyAPI() # get incidents from time when manager deployment was", "corrupted ceph pool is removed. \"\"\" api = pagerduty.PagerDutyAPI() #", ") from ocs_ci.ocs import constants from ocs_ci.utility import pagerduty log", "whole string in summary and incident alerts assert pagerduty.check_incident_list( summary=target_label,", "tier4a, ) from ocs_ci.ocs import constants from ocs_ci.utility import pagerduty", "target_label = constants.ALERT_CLUSTERERRORSTATE # TODO(fbalak): check the whole string in", "check the whole string in summary and incident alerts assert", "incident in PagerDuty when Placement group on one OSD is", "from ocs_ci.framework.testlib import ( managed_service_required, skipif_ms_consumer, tier4, tier4a, ) from", "ocs_ci.ocs import constants from ocs_ci.utility import pagerduty log = logging.getLogger(__name__)", "in summary and incident alerts assert pagerduty.check_incident_list( summary=target_label, incidents=incidents, urgency=\"high\",", "= pagerduty.PagerDutyAPI() # get incidents from time when manager deployment", "tier4, tier4a, ) from ocs_ci.ocs import constants from ocs_ci.utility import", "appropriate incident in PagerDuty when Placement group on one OSD", "logging.getLogger(__name__) @tier4 @tier4a @managed_service_required @skipif_ms_consumer @pytest.mark.polarion_id(\"OCS-2771\") def test_corrupt_pg_pd(measure_corrupt_pg): \"\"\" Test", "= constants.ALERT_CLUSTERERRORSTATE # TODO(fbalak): check the whole string in summary", "ocs_ci.framework.testlib import ( managed_service_required, skipif_ms_consumer, tier4, tier4a, ) from ocs_ci.ocs", "when manager deployment was scaled down incidents = measure_corrupt_pg.get(\"pagerduty_incidents\") target_label", "and incident alerts assert pagerduty.check_incident_list( summary=target_label, incidents=incidents, urgency=\"high\", ) api.check_incident_cleared(", "the corrupted ceph pool is removed. \"\"\" api = pagerduty.PagerDutyAPI()", "is appropriate incident in PagerDuty when Placement group on one", "from ocs_ci.utility import pagerduty log = logging.getLogger(__name__) @tier4 @tier4a @managed_service_required", "incidents = measure_corrupt_pg.get(\"pagerduty_incidents\") target_label = constants.ALERT_CLUSTERERRORSTATE # TODO(fbalak): check the", "Test that there is appropriate incident in PagerDuty when Placement", "# TODO(fbalak): check the whole string in summary and incident", "is removed. \"\"\" api = pagerduty.PagerDutyAPI() # get incidents from", "that this incident is cleared when the corrupted ceph pool", "incidents from time when manager deployment was scaled down incidents", "<gh_stars>0 import logging import pytest from ocs_ci.framework.testlib import ( managed_service_required,", "incident is cleared when the corrupted ceph pool is removed.", "there is appropriate incident in PagerDuty when Placement group on", "@tier4 @tier4a @managed_service_required @skipif_ms_consumer @pytest.mark.polarion_id(\"OCS-2771\") def test_corrupt_pg_pd(measure_corrupt_pg): \"\"\" Test that", "@tier4a @managed_service_required @skipif_ms_consumer @pytest.mark.polarion_id(\"OCS-2771\") def test_corrupt_pg_pd(measure_corrupt_pg): \"\"\" Test that there", "def test_corrupt_pg_pd(measure_corrupt_pg): \"\"\" Test that there is appropriate incident in", "@pytest.mark.polarion_id(\"OCS-2771\") def test_corrupt_pg_pd(measure_corrupt_pg): \"\"\" Test that there is appropriate incident", "TODO(fbalak): check the whole string in summary and incident alerts", "managed_service_required, skipif_ms_consumer, tier4, tier4a, ) from ocs_ci.ocs import constants from", "# get incidents from time when manager deployment was scaled", "\"\"\" Test that there is appropriate incident in PagerDuty when", "cleared when the corrupted ceph pool is removed. \"\"\" api", "from time when manager deployment was scaled down incidents =", "log = logging.getLogger(__name__) @tier4 @tier4a @managed_service_required @skipif_ms_consumer @pytest.mark.polarion_id(\"OCS-2771\") def test_corrupt_pg_pd(measure_corrupt_pg):", "ocs_ci.utility import pagerduty log = logging.getLogger(__name__) @tier4 @tier4a @managed_service_required @skipif_ms_consumer", "get incidents from time when manager deployment was scaled down", "@skipif_ms_consumer @pytest.mark.polarion_id(\"OCS-2771\") def test_corrupt_pg_pd(measure_corrupt_pg): \"\"\" Test that there is appropriate", "corrupted and that this incident is cleared when the corrupted", "import constants from ocs_ci.utility import pagerduty log = logging.getLogger(__name__) @tier4", "OSD is corrupted and that this incident is cleared when", "summary and incident alerts assert pagerduty.check_incident_list( summary=target_label, incidents=incidents, urgency=\"high\", )", "this incident is cleared when the corrupted ceph pool is", "deployment was scaled down incidents = measure_corrupt_pg.get(\"pagerduty_incidents\") target_label = constants.ALERT_CLUSTERERRORSTATE", "constants.ALERT_CLUSTERERRORSTATE # TODO(fbalak): check the whole string in summary and", "\"\"\" api = pagerduty.PagerDutyAPI() # get incidents from time when", "ceph pool is removed. \"\"\" api = pagerduty.PagerDutyAPI() # get", "and that this incident is cleared when the corrupted ceph", "measure_corrupt_pg.get(\"pagerduty_incidents\") target_label = constants.ALERT_CLUSTERERRORSTATE # TODO(fbalak): check the whole string", "string in summary and incident alerts assert pagerduty.check_incident_list( summary=target_label, incidents=incidents,", "import ( managed_service_required, skipif_ms_consumer, tier4, tier4a, ) from ocs_ci.ocs import", "Placement group on one OSD is corrupted and that this", "when Placement group on one OSD is corrupted and that", "constants from ocs_ci.utility import pagerduty log = logging.getLogger(__name__) @tier4 @tier4a", "api = pagerduty.PagerDutyAPI() # get incidents from time when manager" ]
[ "of features int y[N]; // the response matrix[N,K] X; //", "observations int N2; // the number of test observations int", "training observations int N2; // the number of test observations", "seaborn as sns from cmdstanpy import CmdStanModel #%% load data", "matrix for the predicted values } parameters { // regression", "} transformed parameters { vector[N] linpred = alpha + X", "= sm.sample( data = mdl_data, show_progress = True, chains =", "int N; // the number of training observations int N2;", "'N2': ix_testing.sum().tolist(), 'K': feature_names.sum().tolist(), 'y': training_labels.values.tolist(), 'X': training_data.values.tolist(), 'new_X': testing_data.values.tolist(),", "= [\"alpha\"]) az.plot_forest(az_trace, var_names = [\"beta\"]) sample_pred = fit.stan_variable('y_pred') #", "beta; } model { alpha ~ cauchy(0, 10); // prior", "DataFrame print(fit.diagnose()) posterior = {k: fit_modif.stan_variable(k) for k in var_name_combi}", "data { int N; // the number of training observations", "= pd.read_csv(\"data/overfitting.csv\", index_col = 'case_id') data.columns data.info() feature_names = data.columns.str.startswith(\"var_\")", "as plt, seaborn as sns from cmdstanpy import CmdStanModel #%%", "coding: utf-8 -*- import numpy as np, pandas as pd,", "int N2; // the number of test observations int K;", "model mdl_data = { # problem with JSON dump =>", "vb = sm.variational(data = mdl_data) vb.variational_sample.columns = vb.variational_params_dict.keys() vb_name =", "mdl_data) vb.variational_sample.columns = vb.variational_params_dict.keys() vb_name = vb.variational_params_pd.columns[~vb.variational_params_pd.columns.str.startswith((\"lp\", \"log_\"))] vb.variational_params_pd[var_name_array] vb.variational_sample[var_name_array]", "inference vb = sm.variational(data = mdl_data) vb.variational_sample.columns = vb.variational_params_dict.keys() vb_name", "labels = data[\"Target_Practice\"] ix_training = data.train == 1 training_data =", "as pd, arviz as az, prince, matplotlib.pyplot as plt, seaborn", "ix_testing = data.train == 0 testing_data = predictors[ix_testing] testing_labels =", "\"sqrt\", kde = True) pca = prince.PCA(n_components = 2, as_array", "iter_sampling = 50000, iter_warmup = 10000, thin = 5 )", "parameters fit.summary().loc[var_name_array] # pandas DataFrame print(fit.diagnose()) posterior = {k: fit_modif.stan_variable(k)", "ix_testing.sum().tolist(), 'K': feature_names.sum().tolist(), 'y': training_labels.values.tolist(), 'X': training_data.values.tolist(), 'new_X': testing_data.values.tolist(), }", "Roshan Sharma model mdl_data = { # problem with JSON", "load data data = pd.read_csv(\"data/overfitting.csv\", index_col = 'case_id') data.columns data.info()", "= data[\"Target_Practice\"] ix_training = data.train == 1 training_data = predictors[ix_training]", "as_array = False).fit(training_data) pca.plot_row_coordinates(training_data, color_labels = training_labels) pca.column_correlations(training_data).plot.scatter(x = 0,", "= [\"beta\"]) sample_pred = fit.stan_variable('y_pred') # <NAME> model: DOES NOT", "values } parameters { // regression parameters real alpha; vector[K]", "feature_names.sum().tolist(), 'y': training_labels.values.tolist(), 'X': training_data.values.tolist(), 'new_X': testing_data.values.tolist(), } modelfile =", "model matrix matrix[N2,K] new_X; // the matrix for the predicted", "-*- import numpy as np, pandas as pd, arviz as", "var_names = [\"beta\"]) sample_pred = fit.stan_variable('y_pred') # <NAME> model: DOES", "transformed parameters { vector[N] linpred = alpha + X *", "= True) pca = prince.PCA(n_components = 2, as_array = False).fit(training_data)", "data.columns data.info() feature_names = data.columns.str.startswith(\"var_\") predictors = data[data.columns[feature_names]] labels =", "vector[N2] y_pred = alpha + new_X * beta; } \"\"\")", "fit_modif.stan_variable(k) for k in var_name_combi} az_trace = az.from_cmdstanpy(fit) az.summary(az_trace).loc[var_name] #", "for the predicted values } parameters { // regression parameters", "# pandas DataFrame az.plot_trace(az_trace, var_names = [\"alpha\"]) az.plot_forest(az_trace, var_names =", "CmdStanModel(stan_file = modelfile) # maximum likelihood estimation optim = sm.optimize(data", "with open(modelfile, \"w\") as file: file.write(\"\"\" data { int N;", "cmdstanpy import CmdStanModel #%% load data data = pd.read_csv(\"data/overfitting.csv\", index_col", "= labels[ix_testing] sns.displot(training_data.values.flatten(), bins = \"sqrt\", kde = True) pca", "fit.stan_variable('y_pred') # <NAME> model: DOES NOT WORK yet # need", "= CmdStanModel(stan_file = modelfile) # maximum likelihood estimation optim =", "as sns from cmdstanpy import CmdStanModel #%% load data data", "<NAME> model: DOES NOT WORK yet # need to figure", "student_t(1, 0, 0.03); y ~ bernoulli_logit(linpred); } generated quantities {", "training_data = predictors[ix_training] training_labels = labels[ix_training] ix_testing = data.train ==", "} \"\"\") var_name_array = [\"alpha\"] + [f\"beta[{i+1}]\" for i in", "sns from cmdstanpy import CmdStanModel #%% load data data =", "values predicted by the model vector[N2] y_pred = alpha +", "data.columns.str.startswith(\"var_\") predictors = data[data.columns[feature_names]] labels = data[\"Target_Practice\"] ix_training = data.train", "fit.summary().loc[var_name_array] # pandas DataFrame print(fit.diagnose()) posterior = {k: fit_modif.stan_variable(k) for", "= predictors[ix_testing] testing_labels = labels[ix_testing] sns.displot(training_data.values.flatten(), bins = \"sqrt\", kde", "sm.sample( data = mdl_data, show_progress = True, chains = 4,", "// the response matrix[N,K] X; // the model matrix matrix[N2,K]", "data data = pd.read_csv(\"data/overfitting.csv\", index_col = 'case_id') data.columns data.info() feature_names", "== 0 testing_data = predictors[ix_testing] testing_labels = labels[ix_testing] sns.displot(training_data.values.flatten(), bins", "// the number of features int y[N]; // the response", "'new_X': testing_data.values.tolist(), } modelfile = \"OverfittingRoshanSharma.stan\" with open(modelfile, \"w\") as", "parameters { // regression parameters real alpha; vector[K] beta; }", "[\"alpha\"]) az.plot_forest(az_trace, var_names = [\"beta\"]) sample_pred = fit.stan_variable('y_pred') # <NAME>", "[f\"beta[{i+1}]\" for i in range(mdl_data[\"K\"])] var_name_combi = [\"alpha\", \"beta\"] sm", "# weird column name #%% Roshan Sharma model mdl_data =", "#%% Roshan Sharma model mdl_data = { # problem with", "sample_pred = fit.stan_variable('y_pred') # <NAME> model: DOES NOT WORK yet", "# -*- coding: utf-8 -*- import numpy as np, pandas", "matrix[N2,K] new_X; // the matrix for the predicted values }", "fit = sm.sample( data = mdl_data, show_progress = True, chains", "data.train == 1 training_data = predictors[ix_training] training_labels = labels[ix_training] ix_testing", "testing_data = predictors[ix_testing] testing_labels = labels[ix_testing] sns.displot(training_data.values.flatten(), bins = \"sqrt\",", "parameters { vector[N] linpred = alpha + X * beta;", "modelfile) # maximum likelihood estimation optim = sm.optimize(data = mdl_data).optimized_params_pd", "bins = \"sqrt\", kde = True) pca = prince.PCA(n_components =", "for the intercept following Gelman 2008 beta ~ student_t(1, 0,", "* beta; } model { alpha ~ cauchy(0, 10); //", "2, as_array = False).fit(training_data) pca.plot_row_coordinates(training_data, color_labels = training_labels) pca.column_correlations(training_data).plot.scatter(x =", "new_X * beta; } \"\"\") var_name_array = [\"alpha\"] + [f\"beta[{i+1}]\"", "pca.column_correlations(training_data).plot.scatter(x = 0, y = 1) # weird column name", "sm.variational(data = mdl_data) vb.variational_sample.columns = vb.variational_params_dict.keys() vb_name = vb.variational_params_pd.columns[~vb.variational_params_pd.columns.str.startswith((\"lp\", \"log_\"))]", "= 10000, thin = 5 ) fit.draws().shape # iterations, chains,", "as az, prince, matplotlib.pyplot as plt, seaborn as sns from", "az, prince, matplotlib.pyplot as plt, seaborn as sns from cmdstanpy", "= sm.variational(data = mdl_data) vb.variational_sample.columns = vb.variational_params_dict.keys() vb_name = vb.variational_params_pd.columns[~vb.variational_params_pd.columns.str.startswith((\"lp\",", "alpha + new_X * beta; } \"\"\") var_name_array = [\"alpha\"]", "= \"sqrt\", kde = True) pca = prince.PCA(n_components = 2,", "= alpha + X * beta; } model { alpha", "of training observations int N2; // the number of test", "observations int K; // the number of features int y[N];", "iterations, chains, parameters fit.summary().loc[var_name_array] # pandas DataFrame print(fit.diagnose()) posterior =", "vector[N] linpred = alpha + X * beta; } model", "* beta; } \"\"\") var_name_array = [\"alpha\"] + [f\"beta[{i+1}]\" for", "file.write(\"\"\" data { int N; // the number of training", "y ~ bernoulli_logit(linpred); } generated quantities { // y values", "// the model matrix matrix[N2,K] new_X; // the matrix for", "vb.variational_sample.columns = vb.variational_params_dict.keys() vb_name = vb.variational_params_pd.columns[~vb.variational_params_pd.columns.str.startswith((\"lp\", \"log_\"))] vb.variational_params_pd[var_name_array] vb.variational_sample[var_name_array] #", "// regression parameters real alpha; vector[K] beta; } transformed parameters", "thin = 5 ) fit.draws().shape # iterations, chains, parameters fit.summary().loc[var_name_array]", "training_data.values.tolist(), 'new_X': testing_data.values.tolist(), } modelfile = \"OverfittingRoshanSharma.stan\" with open(modelfile, \"w\")", "50000, iter_warmup = 10000, thin = 5 ) fit.draws().shape #", "var_name_combi} az_trace = az.from_cmdstanpy(fit) az.summary(az_trace).loc[var_name] # pandas DataFrame az.plot_trace(az_trace, var_names", "-*- coding: utf-8 -*- import numpy as np, pandas as", "~ student_t(1, 0, 0.03); y ~ bernoulli_logit(linpred); } generated quantities", "open(modelfile, \"w\") as file: file.write(\"\"\" data { int N; //", "int y[N]; // the response matrix[N,K] X; // the model", "optim[optim.columns[~optim.columns.str.startswith(\"lp\")]] plt.plot(optim[var_name_array[1:]].values[0]) # variational inference vb = sm.variational(data = mdl_data)", "#%% load data data = pd.read_csv(\"data/overfitting.csv\", index_col = 'case_id') data.columns", "numpy as np, pandas as pd, arviz as az, prince,", "the model matrix matrix[N2,K] new_X; // the matrix for the", "az.plot_trace(az_trace, var_names = [\"alpha\"]) az.plot_forest(az_trace, var_names = [\"beta\"]) sample_pred =", "'y': training_labels.values.tolist(), 'X': training_data.values.tolist(), 'new_X': testing_data.values.tolist(), } modelfile = \"OverfittingRoshanSharma.stan\"", "labels[ix_testing] sns.displot(training_data.values.flatten(), bins = \"sqrt\", kde = True) pca =", "4, iter_sampling = 50000, iter_warmup = 10000, thin = 5", "type 'N': ix_training.sum().tolist(), 'N2': ix_testing.sum().tolist(), 'K': feature_names.sum().tolist(), 'y': training_labels.values.tolist(), 'X':", "linpred = alpha + X * beta; } model {", "pca = prince.PCA(n_components = 2, as_array = False).fit(training_data) pca.plot_row_coordinates(training_data, color_labels", "predictors[ix_testing] testing_labels = labels[ix_testing] sns.displot(training_data.values.flatten(), bins = \"sqrt\", kde =", "# Markov chain Monte Carlo fit = sm.sample( data =", "2008 beta ~ student_t(1, 0, 0.03); y ~ bernoulli_logit(linpred); }", "} generated quantities { // y values predicted by the", "modelfile = \"OverfittingRoshanSharma.stan\" with open(modelfile, \"w\") as file: file.write(\"\"\" data", "= modelfile) # maximum likelihood estimation optim = sm.optimize(data =", "= 0, y = 1) # weird column name #%%", "+ X * beta; } model { alpha ~ cauchy(0,", "kde = True) pca = prince.PCA(n_components = 2, as_array =", "DOES NOT WORK yet # need to figure out how", "NOT WORK yet # need to figure out how to", "in range(mdl_data[\"K\"])] var_name_combi = [\"alpha\", \"beta\"] sm = CmdStanModel(stan_file =", "yet # need to figure out how to marginalize all", "sm = CmdStanModel(stan_file = modelfile) # maximum likelihood estimation optim", "range(mdl_data[\"K\"])] var_name_combi = [\"alpha\", \"beta\"] sm = CmdStanModel(stan_file = modelfile)", "following Gelman 2008 beta ~ student_t(1, 0, 0.03); y ~", "DataFrame az.plot_trace(az_trace, var_names = [\"alpha\"]) az.plot_forest(az_trace, var_names = [\"beta\"]) sample_pred", "pca.plot_row_coordinates(training_data, color_labels = training_labels) pca.column_correlations(training_data).plot.scatter(x = 0, y = 1)", "= vb.variational_params_dict.keys() vb_name = vb.variational_params_pd.columns[~vb.variational_params_pd.columns.str.startswith((\"lp\", \"log_\"))] vb.variational_params_pd[var_name_array] vb.variational_sample[var_name_array] # Markov", "sm.optimize(data = mdl_data).optimized_params_pd optim[optim.columns[~optim.columns.str.startswith(\"lp\")]] plt.plot(optim[var_name_array[1:]].values[0]) # variational inference vb =", "print(fit.diagnose()) posterior = {k: fit_modif.stan_variable(k) for k in var_name_combi} az_trace", "True, chains = 4, iter_sampling = 50000, iter_warmup = 10000,", "beta ~ student_t(1, 0, 0.03); y ~ bernoulli_logit(linpred); } generated", "// the number of test observations int K; // the", "beta; } transformed parameters { vector[N] linpred = alpha +", "of test observations int K; // the number of features", "pandas DataFrame print(fit.diagnose()) posterior = {k: fit_modif.stan_variable(k) for k in", "as np, pandas as pd, arviz as az, prince, matplotlib.pyplot", "\"w\") as file: file.write(\"\"\" data { int N; // the", "= 'case_id') data.columns data.info() feature_names = data.columns.str.startswith(\"var_\") predictors = data[data.columns[feature_names]]", "vb.variational_sample[var_name_array] # Markov chain Monte Carlo fit = sm.sample( data", "mdl_data).optimized_params_pd optim[optim.columns[~optim.columns.str.startswith(\"lp\")]] plt.plot(optim[var_name_array[1:]].values[0]) # variational inference vb = sm.variational(data =", "chain Monte Carlo fit = sm.sample( data = mdl_data, show_progress", "problem with JSON dump => cast to python native type", "// the number of training observations int N2; // the", "native type 'N': ix_training.sum().tolist(), 'N2': ix_testing.sum().tolist(), 'K': feature_names.sum().tolist(), 'y': training_labels.values.tolist(),", "# iterations, chains, parameters fit.summary().loc[var_name_array] # pandas DataFrame print(fit.diagnose()) posterior", "az_trace = az.from_cmdstanpy(fit) az.summary(az_trace).loc[var_name] # pandas DataFrame az.plot_trace(az_trace, var_names =", "sns.displot(training_data.values.flatten(), bins = \"sqrt\", kde = True) pca = prince.PCA(n_components", "the response matrix[N,K] X; // the model matrix matrix[N2,K] new_X;", "'case_id') data.columns data.info() feature_names = data.columns.str.startswith(\"var_\") predictors = data[data.columns[feature_names]] labels", "0, 0.03); y ~ bernoulli_logit(linpred); } generated quantities { //", "{k: fit_modif.stan_variable(k) for k in var_name_combi} az_trace = az.from_cmdstanpy(fit) az.summary(az_trace).loc[var_name]", "# variational inference vb = sm.variational(data = mdl_data) vb.variational_sample.columns =", "weird column name #%% Roshan Sharma model mdl_data = {", "= data[data.columns[feature_names]] labels = data[\"Target_Practice\"] ix_training = data.train == 1", "fit.draws().shape # iterations, chains, parameters fit.summary().loc[var_name_array] # pandas DataFrame print(fit.diagnose())", "cauchy(0, 10); // prior for the intercept following Gelman 2008", "{ # problem with JSON dump => cast to python", "0.03); y ~ bernoulli_logit(linpred); } generated quantities { // y", "index_col = 'case_id') data.columns data.info() feature_names = data.columns.str.startswith(\"var_\") predictors =", "Sharma model mdl_data = { # problem with JSON dump", "{ alpha ~ cauchy(0, 10); // prior for the intercept", "new_X; // the matrix for the predicted values } parameters", "} parameters { // regression parameters real alpha; vector[K] beta;", "{ vector[N] linpred = alpha + X * beta; }", "{ // y values predicted by the model vector[N2] y_pred", "+ [f\"beta[{i+1}]\" for i in range(mdl_data[\"K\"])] var_name_combi = [\"alpha\", \"beta\"]", "True) pca = prince.PCA(n_components = 2, as_array = False).fit(training_data) pca.plot_row_coordinates(training_data,", "number of features int y[N]; // the response matrix[N,K] X;", "number of training observations int N2; // the number of", "+ new_X * beta; } \"\"\") var_name_array = [\"alpha\"] +", "from cmdstanpy import CmdStanModel #%% load data data = pd.read_csv(\"data/overfitting.csv\",", "ix_training = data.train == 1 training_data = predictors[ix_training] training_labels =", "quantities { // y values predicted by the model vector[N2]", "0 testing_data = predictors[ix_testing] testing_labels = labels[ix_testing] sns.displot(training_data.values.flatten(), bins =", "int K; // the number of features int y[N]; //", "testing_labels = labels[ix_testing] sns.displot(training_data.values.flatten(), bins = \"sqrt\", kde = True)", "} model { alpha ~ cauchy(0, 10); // prior for", "data.train == 0 testing_data = predictors[ix_testing] testing_labels = labels[ix_testing] sns.displot(training_data.values.flatten(),", "training_labels.values.tolist(), 'X': training_data.values.tolist(), 'new_X': testing_data.values.tolist(), } modelfile = \"OverfittingRoshanSharma.stan\" with", "[\"beta\"]) sample_pred = fit.stan_variable('y_pred') # <NAME> model: DOES NOT WORK", "plt.plot(optim[var_name_array[1:]].values[0]) # variational inference vb = sm.variational(data = mdl_data) vb.variational_sample.columns", "predictors[ix_training] training_labels = labels[ix_training] ix_testing = data.train == 0 testing_data", "show_progress = True, chains = 4, iter_sampling = 50000, iter_warmup", "model vector[N2] y_pred = alpha + new_X * beta; }", "real alpha; vector[K] beta; } transformed parameters { vector[N] linpred", "= data.columns.str.startswith(\"var_\") predictors = data[data.columns[feature_names]] labels = data[\"Target_Practice\"] ix_training =", "generated quantities { // y values predicted by the model", "= data.train == 0 testing_data = predictors[ix_testing] testing_labels = labels[ix_testing]", "= sm.optimize(data = mdl_data).optimized_params_pd optim[optim.columns[~optim.columns.str.startswith(\"lp\")]] plt.plot(optim[var_name_array[1:]].values[0]) # variational inference vb", "data = mdl_data, show_progress = True, chains = 4, iter_sampling", ") fit.draws().shape # iterations, chains, parameters fit.summary().loc[var_name_array] # pandas DataFrame", "= False).fit(training_data) pca.plot_row_coordinates(training_data, color_labels = training_labels) pca.column_correlations(training_data).plot.scatter(x = 0, y", "prior for the intercept following Gelman 2008 beta ~ student_t(1,", "the number of training observations int N2; // the number", "matrix[N,K] X; // the model matrix matrix[N2,K] new_X; // the", "Gelman 2008 beta ~ student_t(1, 0, 0.03); y ~ bernoulli_logit(linpred);", "// y values predicted by the model vector[N2] y_pred =", "= { # problem with JSON dump => cast to", "in var_name_combi} az_trace = az.from_cmdstanpy(fit) az.summary(az_trace).loc[var_name] # pandas DataFrame az.plot_trace(az_trace,", "= 1) # weird column name #%% Roshan Sharma model", "likelihood estimation optim = sm.optimize(data = mdl_data).optimized_params_pd optim[optim.columns[~optim.columns.str.startswith(\"lp\")]] plt.plot(optim[var_name_array[1:]].values[0]) #", "X * beta; } model { alpha ~ cauchy(0, 10);", "matrix matrix[N2,K] new_X; // the matrix for the predicted values", "response matrix[N,K] X; // the model matrix matrix[N2,K] new_X; //", "matplotlib.pyplot as plt, seaborn as sns from cmdstanpy import CmdStanModel", "X; // the model matrix matrix[N2,K] new_X; // the matrix", "= 2, as_array = False).fit(training_data) pca.plot_row_coordinates(training_data, color_labels = training_labels) pca.column_correlations(training_data).plot.scatter(x", "pd.read_csv(\"data/overfitting.csv\", index_col = 'case_id') data.columns data.info() feature_names = data.columns.str.startswith(\"var_\") predictors", "file: file.write(\"\"\" data { int N; // the number of", "{ // regression parameters real alpha; vector[K] beta; } transformed", "= 50000, iter_warmup = 10000, thin = 5 ) fit.draws().shape", "= az.from_cmdstanpy(fit) az.summary(az_trace).loc[var_name] # pandas DataFrame az.plot_trace(az_trace, var_names = [\"alpha\"])", "# maximum likelihood estimation optim = sm.optimize(data = mdl_data).optimized_params_pd optim[optim.columns[~optim.columns.str.startswith(\"lp\")]]", "for k in var_name_combi} az_trace = az.from_cmdstanpy(fit) az.summary(az_trace).loc[var_name] # pandas", "= predictors[ix_training] training_labels = labels[ix_training] ix_testing = data.train == 0", "vb.variational_params_pd[var_name_array] vb.variational_sample[var_name_array] # Markov chain Monte Carlo fit = sm.sample(", "= alpha + new_X * beta; } \"\"\") var_name_array =", "posterior = {k: fit_modif.stan_variable(k) for k in var_name_combi} az_trace =", "# pandas DataFrame print(fit.diagnose()) posterior = {k: fit_modif.stan_variable(k) for k", "CmdStanModel #%% load data data = pd.read_csv(\"data/overfitting.csv\", index_col = 'case_id')", "# need to figure out how to marginalize all discrete", "== 1 training_data = predictors[ix_training] training_labels = labels[ix_training] ix_testing =", "~ cauchy(0, 10); // prior for the intercept following Gelman", "= data.train == 1 training_data = predictors[ix_training] training_labels = labels[ix_training]", "maximum likelihood estimation optim = sm.optimize(data = mdl_data).optimized_params_pd optim[optim.columns[~optim.columns.str.startswith(\"lp\")]] plt.plot(optim[var_name_array[1:]].values[0])", "pandas as pd, arviz as az, prince, matplotlib.pyplot as plt,", "// prior for the intercept following Gelman 2008 beta ~", "data = pd.read_csv(\"data/overfitting.csv\", index_col = 'case_id') data.columns data.info() feature_names =", "} modelfile = \"OverfittingRoshanSharma.stan\" with open(modelfile, \"w\") as file: file.write(\"\"\"", "data[data.columns[feature_names]] labels = data[\"Target_Practice\"] ix_training = data.train == 1 training_data", "var_names = [\"alpha\"]) az.plot_forest(az_trace, var_names = [\"beta\"]) sample_pred = fit.stan_variable('y_pred')", "K; // the number of features int y[N]; // the", "=> cast to python native type 'N': ix_training.sum().tolist(), 'N2': ix_testing.sum().tolist(),", "the predicted values } parameters { // regression parameters real", "az.plot_forest(az_trace, var_names = [\"beta\"]) sample_pred = fit.stan_variable('y_pred') # <NAME> model:", "regression parameters real alpha; vector[K] beta; } transformed parameters {", "vector[K] beta; } transformed parameters { vector[N] linpred = alpha", "alpha ~ cauchy(0, 10); // prior for the intercept following", "mdl_data, show_progress = True, chains = 4, iter_sampling = 50000,", "= \"OverfittingRoshanSharma.stan\" with open(modelfile, \"w\") as file: file.write(\"\"\" data {", "y = 1) # weird column name #%% Roshan Sharma", "= fit.stan_variable('y_pred') # <NAME> model: DOES NOT WORK yet #", "= training_labels) pca.column_correlations(training_data).plot.scatter(x = 0, y = 1) # weird", "N2; // the number of test observations int K; //", "prince.PCA(n_components = 2, as_array = False).fit(training_data) pca.plot_row_coordinates(training_data, color_labels = training_labels)", "Markov chain Monte Carlo fit = sm.sample( data = mdl_data,", "pd, arviz as az, prince, matplotlib.pyplot as plt, seaborn as", "feature_names = data.columns.str.startswith(\"var_\") predictors = data[data.columns[feature_names]] labels = data[\"Target_Practice\"] ix_training", "JSON dump => cast to python native type 'N': ix_training.sum().tolist(),", "0, y = 1) # weird column name #%% Roshan", "\"beta\"] sm = CmdStanModel(stan_file = modelfile) # maximum likelihood estimation", "'N': ix_training.sum().tolist(), 'N2': ix_testing.sum().tolist(), 'K': feature_names.sum().tolist(), 'y': training_labels.values.tolist(), 'X': training_data.values.tolist(),", "the number of test observations int K; // the number", "for i in range(mdl_data[\"K\"])] var_name_combi = [\"alpha\", \"beta\"] sm =", "color_labels = training_labels) pca.column_correlations(training_data).plot.scatter(x = 0, y = 1) #", "testing_data.values.tolist(), } modelfile = \"OverfittingRoshanSharma.stan\" with open(modelfile, \"w\") as file:", "column name #%% Roshan Sharma model mdl_data = { #", "\"OverfittingRoshanSharma.stan\" with open(modelfile, \"w\") as file: file.write(\"\"\" data { int", "1) # weird column name #%% Roshan Sharma model mdl_data", "number of test observations int K; // the number of", "1 training_data = predictors[ix_training] training_labels = labels[ix_training] ix_testing = data.train", "import CmdStanModel #%% load data data = pd.read_csv(\"data/overfitting.csv\", index_col =", "the matrix for the predicted values } parameters { //", "var_name_combi = [\"alpha\", \"beta\"] sm = CmdStanModel(stan_file = modelfile) #", "chains, parameters fit.summary().loc[var_name_array] # pandas DataFrame print(fit.diagnose()) posterior = {k:", "ix_training.sum().tolist(), 'N2': ix_testing.sum().tolist(), 'K': feature_names.sum().tolist(), 'y': training_labels.values.tolist(), 'X': training_data.values.tolist(), 'new_X':", "10000, thin = 5 ) fit.draws().shape # iterations, chains, parameters", "az.from_cmdstanpy(fit) az.summary(az_trace).loc[var_name] # pandas DataFrame az.plot_trace(az_trace, var_names = [\"alpha\"]) az.plot_forest(az_trace,", "# <NAME> model: DOES NOT WORK yet # need to", "estimation optim = sm.optimize(data = mdl_data).optimized_params_pd optim[optim.columns[~optim.columns.str.startswith(\"lp\")]] plt.plot(optim[var_name_array[1:]].values[0]) # variational", "alpha + X * beta; } model { alpha ~", "= [\"alpha\", \"beta\"] sm = CmdStanModel(stan_file = modelfile) # maximum", "= vb.variational_params_pd.columns[~vb.variational_params_pd.columns.str.startswith((\"lp\", \"log_\"))] vb.variational_params_pd[var_name_array] vb.variational_sample[var_name_array] # Markov chain Monte Carlo", "\"log_\"))] vb.variational_params_pd[var_name_array] vb.variational_sample[var_name_array] # Markov chain Monte Carlo fit =", "optim = sm.optimize(data = mdl_data).optimized_params_pd optim[optim.columns[~optim.columns.str.startswith(\"lp\")]] plt.plot(optim[var_name_array[1:]].values[0]) # variational inference", "data.info() feature_names = data.columns.str.startswith(\"var_\") predictors = data[data.columns[feature_names]] labels = data[\"Target_Practice\"]", "= 5 ) fit.draws().shape # iterations, chains, parameters fit.summary().loc[var_name_array] #", "plt, seaborn as sns from cmdstanpy import CmdStanModel #%% load", "= [\"alpha\"] + [f\"beta[{i+1}]\" for i in range(mdl_data[\"K\"])] var_name_combi =", "vb_name = vb.variational_params_pd.columns[~vb.variational_params_pd.columns.str.startswith((\"lp\", \"log_\"))] vb.variational_params_pd[var_name_array] vb.variational_sample[var_name_array] # Markov chain Monte", "= 4, iter_sampling = 50000, iter_warmup = 10000, thin =", "training_labels) pca.column_correlations(training_data).plot.scatter(x = 0, y = 1) # weird column", "test observations int K; // the number of features int", "prince, matplotlib.pyplot as plt, seaborn as sns from cmdstanpy import", "i in range(mdl_data[\"K\"])] var_name_combi = [\"alpha\", \"beta\"] sm = CmdStanModel(stan_file", "parameters real alpha; vector[K] beta; } transformed parameters { vector[N]", "features int y[N]; // the response matrix[N,K] X; // the", "utf-8 -*- import numpy as np, pandas as pd, arviz", "10); // prior for the intercept following Gelman 2008 beta", "model { alpha ~ cauchy(0, 10); // prior for the", "intercept following Gelman 2008 beta ~ student_t(1, 0, 0.03); y", "= mdl_data).optimized_params_pd optim[optim.columns[~optim.columns.str.startswith(\"lp\")]] plt.plot(optim[var_name_array[1:]].values[0]) # variational inference vb = sm.variational(data", "python native type 'N': ix_training.sum().tolist(), 'N2': ix_testing.sum().tolist(), 'K': feature_names.sum().tolist(), 'y':", "predicted values } parameters { // regression parameters real alpha;", "vb.variational_params_dict.keys() vb_name = vb.variational_params_pd.columns[~vb.variational_params_pd.columns.str.startswith((\"lp\", \"log_\"))] vb.variational_params_pd[var_name_array] vb.variational_sample[var_name_array] # Markov chain", "= prince.PCA(n_components = 2, as_array = False).fit(training_data) pca.plot_row_coordinates(training_data, color_labels =", "y_pred = alpha + new_X * beta; } \"\"\") var_name_array", "chains = 4, iter_sampling = 50000, iter_warmup = 10000, thin", "= {k: fit_modif.stan_variable(k) for k in var_name_combi} az_trace = az.from_cmdstanpy(fit)", "with JSON dump => cast to python native type 'N':", "{ int N; // the number of training observations int", "bernoulli_logit(linpred); } generated quantities { // y values predicted by", "pandas DataFrame az.plot_trace(az_trace, var_names = [\"alpha\"]) az.plot_forest(az_trace, var_names = [\"beta\"])", "[\"alpha\"] + [f\"beta[{i+1}]\" for i in range(mdl_data[\"K\"])] var_name_combi = [\"alpha\",", "data[\"Target_Practice\"] ix_training = data.train == 1 training_data = predictors[ix_training] training_labels", "y[N]; // the response matrix[N,K] X; // the model matrix", "= mdl_data, show_progress = True, chains = 4, iter_sampling =", "variational inference vb = sm.variational(data = mdl_data) vb.variational_sample.columns = vb.variational_params_dict.keys()", "// the matrix for the predicted values } parameters {", "var_name_array = [\"alpha\"] + [f\"beta[{i+1}]\" for i in range(mdl_data[\"K\"])] var_name_combi", "'K': feature_names.sum().tolist(), 'y': training_labels.values.tolist(), 'X': training_data.values.tolist(), 'new_X': testing_data.values.tolist(), } modelfile", "'X': training_data.values.tolist(), 'new_X': testing_data.values.tolist(), } modelfile = \"OverfittingRoshanSharma.stan\" with open(modelfile,", "iter_warmup = 10000, thin = 5 ) fit.draws().shape # iterations,", "predictors = data[data.columns[feature_names]] labels = data[\"Target_Practice\"] ix_training = data.train ==", "5 ) fit.draws().shape # iterations, chains, parameters fit.summary().loc[var_name_array] # pandas", "the model vector[N2] y_pred = alpha + new_X * beta;", "~ bernoulli_logit(linpred); } generated quantities { // y values predicted", "False).fit(training_data) pca.plot_row_coordinates(training_data, color_labels = training_labels) pca.column_correlations(training_data).plot.scatter(x = 0, y =", "cast to python native type 'N': ix_training.sum().tolist(), 'N2': ix_testing.sum().tolist(), 'K':", "[\"alpha\", \"beta\"] sm = CmdStanModel(stan_file = modelfile) # maximum likelihood", "labels[ix_training] ix_testing = data.train == 0 testing_data = predictors[ix_testing] testing_labels", "vb.variational_params_pd.columns[~vb.variational_params_pd.columns.str.startswith((\"lp\", \"log_\"))] vb.variational_params_pd[var_name_array] vb.variational_sample[var_name_array] # Markov chain Monte Carlo fit", "arviz as az, prince, matplotlib.pyplot as plt, seaborn as sns", "need to figure out how to marginalize all discrete params", "by the model vector[N2] y_pred = alpha + new_X *", "to python native type 'N': ix_training.sum().tolist(), 'N2': ix_testing.sum().tolist(), 'K': feature_names.sum().tolist(),", "k in var_name_combi} az_trace = az.from_cmdstanpy(fit) az.summary(az_trace).loc[var_name] # pandas DataFrame", "Monte Carlo fit = sm.sample( data = mdl_data, show_progress =", "# problem with JSON dump => cast to python native", "y values predicted by the model vector[N2] y_pred = alpha", "\"\"\") var_name_array = [\"alpha\"] + [f\"beta[{i+1}]\" for i in range(mdl_data[\"K\"])]", "mdl_data = { # problem with JSON dump => cast", "np, pandas as pd, arviz as az, prince, matplotlib.pyplot as", "the number of features int y[N]; // the response matrix[N,K]", "as file: file.write(\"\"\" data { int N; // the number", "= True, chains = 4, iter_sampling = 50000, iter_warmup =", "= mdl_data) vb.variational_sample.columns = vb.variational_params_dict.keys() vb_name = vb.variational_params_pd.columns[~vb.variational_params_pd.columns.str.startswith((\"lp\", \"log_\"))] vb.variational_params_pd[var_name_array]", "the intercept following Gelman 2008 beta ~ student_t(1, 0, 0.03);", "Carlo fit = sm.sample( data = mdl_data, show_progress = True,", "az.summary(az_trace).loc[var_name] # pandas DataFrame az.plot_trace(az_trace, var_names = [\"alpha\"]) az.plot_forest(az_trace, var_names", "model: DOES NOT WORK yet # need to figure out", "import numpy as np, pandas as pd, arviz as az,", "= labels[ix_training] ix_testing = data.train == 0 testing_data = predictors[ix_testing]", "WORK yet # need to figure out how to marginalize", "predicted by the model vector[N2] y_pred = alpha + new_X", "training_labels = labels[ix_training] ix_testing = data.train == 0 testing_data =", "dump => cast to python native type 'N': ix_training.sum().tolist(), 'N2':", "alpha; vector[K] beta; } transformed parameters { vector[N] linpred =", "beta; } \"\"\") var_name_array = [\"alpha\"] + [f\"beta[{i+1}]\" for i", "name #%% Roshan Sharma model mdl_data = { # problem", "N; // the number of training observations int N2; //" ]
[ "value def parse(self, text): \"\"\" Parse a string field containing", "# Daily update def array_splitter(self, value): return [ x[1:-1] for", "Fire and forget loader for materials - will queue a", "cache_file: res = json.load(cache_file) self.queue.put( { 'mats': res } )", "'mats': m._materials } ) except: self.queue.put( { 'error': 'Failed to", "return int(value) elif self.floatRe.match(value): return float(value) elif self.arrayRe.match(value): return self.array_splitter(value)", "queue a 'mats' event or an 'error' event if the", "appropriate type (maybe should look at using ast instead) \"\"\"", "Daily update def array_splitter(self, value): return [ x[1:-1] for x", "= requests.get(\"https://docs.google.com/spreadsheets/u/0/d/1g0y7inyvQopJ93jP5YIu3n0veX0ng8DraJXAvZk6pS4/export?format=tsv&id=1g0y7inyvQopJ93jP5YIu3n0veX0ng8DraJXAvZk6pS4&gid=0\") res = self.parse(r.text) if res: with open(self.filename, \"wt\")", "v[fields[k]] = self.detect(values[k]) res.append(v) return res def run(self): try: if", "for materials - will queue a 'mats' event or an", "needs a refresh. \"\"\" if not os.path.exists(self.filename): return True mtime", "\"\"\" if not os.path.exists(self.filename): return True mtime = os.path.getmtime(self.filename) now", "import re from util import debug, error class MatsLoader(threading.Thread): \"\"\"", "def parse(self, text): \"\"\" Parse a string field containing all", "the file to async load queue is the queue to", "error(\"Async remote mats loader failed - zero records\") else: with", "' + str(sys.exc_info()[0]) } ) class MatsLoaderRemote(threading.Thread): \"\"\" Fire and", "= entry.split(\"\\t\") if len(values) < len(fields): continue v = {}", "'error': 'Failed to load tsv materials ' + str(sys.exc_info()[0]) +", "if the cache is old (or missing) queue is the", "Automatically runs as a daemon \"\"\" def __init__(self, filename, queue):", "{ 'error': 'Failed to load tsv materials ' + str(sys.exc_info()[0])", "= queue self.filename = filename self.daemon = True def run(self):", "the local cache needs a refresh. \"\"\" if not os.path.exists(self.filename):", "'error': 'Failed to load materials ' + str(sys.exc_info()[0]) } )", "24 * 3600 # Daily update def array_splitter(self, value): return", "= re.compile(r'^\\[.*\\]$') def need_refresh(self): \"\"\" Returns True if the local", "{} for k in range(0, len(fields)): v[fields[k]] = self.detect(values[k]) res.append(v)", "load materials ' + str(sys.exc_info()[0]) } ) class MatsLoaderRemote(threading.Thread): \"\"\"", "need_refresh(self): \"\"\" Returns True if the local cache needs a", "is the queue to report the results into \"\"\" threading.Thread.__init__(self)", "(maybe should look at using ast instead) \"\"\" if self.integerRe.match(value):", "lines[1:]: values = entry.split(\"\\t\") if len(values) < len(fields): continue v", "MatsLoaderRemote(threading.Thread): \"\"\" Fire and forget loader for materials - will", "from tsv is completed {} entries\".format(len(res))) else: error(\"Async remote mats", "only read the remote file if the cache is old", "elif self.floatRe.match(value): return float(value) elif self.arrayRe.match(value): return self.array_splitter(value) else: return", "and converts into an appropriate type (maybe should look at", "to load tsv materials ' + str(sys.exc_info()[0]) + ' '", "__init__(self, filename, queue): \"\"\" filename is the cache file -", "so we can test \"\"\" lines = text.replace(\"\\r\", \"\").split(\"\\n\") fields", "value): return [ x[1:-1] for x in value[1:-1].split(\", \") ]", "ina TSV into an array of dicts. Mainly split out", "all the data ina TSV into an array of dicts.", "= [] for entry in lines[1:]: values = entry.split(\"\\t\") if", "float(value) elif self.arrayRe.match(value): return self.array_splitter(value) else: return value def parse(self,", "mats.Materials(self.filename) self.queue.put( { 'mats': m._materials } ) except: self.queue.put( {", "try: m = mats.Materials(self.filename) self.queue.put( { 'mats': m._materials } )", "forget loader for materials - will queue a 'mats' event", "mats loader failed - zero records\") else: with open(self.filename, \"rt\")", "cache file - we only read the remote file if", "queue self.daemon = True self.integerRe = re.compile(r'^-?\\d+$') self.floatRe = re.compile(r'^-?\\d+(\\.\\d+)?$')", "zero records\") else: with open(self.filename, \"rt\") as cache_file: res =", "of dicts. Mainly split out so we can test \"\"\"", "to report the results into \"\"\" threading.Thread.__init__(self) self.filename = filename", "= True def run(self): try: m = mats.Materials(self.filename) self.queue.put( {", "from util import debug, error class MatsLoader(threading.Thread): \"\"\" Fire and", "self.daemon = True self.integerRe = re.compile(r'^-?\\d+$') self.floatRe = re.compile(r'^-?\\d+(\\.\\d+)?$') self.arrayRe", "queue to report the results into \"\"\" threading.Thread.__init__(self) self.filename =", "mtime < now - 24 * 3600 # Daily update", "self.array_splitter(value) else: return value def parse(self, text): \"\"\" Parse a", "= queue self.daemon = True self.integerRe = re.compile(r'^-?\\d+$') self.floatRe =", "class MatsLoaderRemote(threading.Thread): \"\"\" Fire and forget loader for materials -", "or an 'error' event if the load fails. Automatically runs", "can test \"\"\" lines = text.replace(\"\\r\", \"\").split(\"\\n\") fields = lines[0].split(\"\\t\")", "we can test \"\"\" lines = text.replace(\"\\r\", \"\").split(\"\\n\") fields =", "if len(values) < len(fields): continue v = {} for k", "if res: with open(self.filename, \"wt\") as cache_file: json.dump(res, cache_file) self.queue.put(", "not os.path.exists(self.filename): return True mtime = os.path.getmtime(self.filename) now = time.time()", "MatsLoader(threading.Thread): \"\"\" Fire and forget loader for materials - will", "os.path.getmtime(self.filename) now = time.time() return mtime < now - 24", "json import threading import os import time import mats import", "\"\"\" Looks at a data value and converts into an", "self.daemon = True def run(self): try: m = mats.Materials(self.filename) self.queue.put(", "entries\".format(len(res))) except: self.queue.put( { 'error': 'Failed to load tsv materials", "{ 'error': 'Failed to load materials ' + str(sys.exc_info()[0]) }", "self.arrayRe.match(value): return self.array_splitter(value) else: return value def parse(self, text): \"\"\"", "\"\"\" Parse a string field containing all the data ina", "True def run(self): try: m = mats.Materials(self.filename) self.queue.put( { 'mats':", "out so we can test \"\"\" lines = text.replace(\"\\r\", \"\").split(\"\\n\")", "materials ' + str(sys.exc_info()[0]) + ' ' + traceback.format_exc() }", "'mats': res } ) debug(\"loader from cache is completed {}", "values = entry.split(\"\\t\") if len(values) < len(fields): continue v =", "cache is completed {} entries\".format(len(res))) except: self.queue.put( { 'error': 'Failed", "self.floatRe = re.compile(r'^-?\\d+(\\.\\d+)?$') self.arrayRe = re.compile(r'^\\[.*\\]$') def need_refresh(self): \"\"\" Returns", "entries\".format(len(res))) else: error(\"Async remote mats loader failed - zero records\")", "Mainly split out so we can test \"\"\" lines =", "results into \"\"\" threading.Thread.__init__(self) self.queue = queue self.filename = filename", "if self.need_refresh(): r = requests.get(\"https://docs.google.com/spreadsheets/u/0/d/1g0y7inyvQopJ93jP5YIu3n0veX0ng8DraJXAvZk6pS4/export?format=tsv&id=1g0y7inyvQopJ93jP5YIu3n0veX0ng8DraJXAvZk6pS4&gid=0\") res = self.parse(r.text) if res:", "self.queue.put( { 'error': 'Failed to load tsv materials ' +", "filename self.daemon = True def run(self): try: m = mats.Materials(self.filename)", "filename is the cache file - we only read the", "import json import threading import os import time import mats", "text): \"\"\" Parse a string field containing all the data", "+ str(sys.exc_info()[0]) } ) class MatsLoaderRemote(threading.Thread): \"\"\" Fire and forget", "loader failed - zero records\") else: with open(self.filename, \"rt\") as", "into \"\"\" threading.Thread.__init__(self) self.filename = filename self.queue = queue self.daemon", "return [ x[1:-1] for x in value[1:-1].split(\", \") ] def", ") except: self.queue.put( { 'error': 'Failed to load materials '", "fails. Automatically runs as a daemon \"\"\" def __init__(self, filename,", "local cache needs a refresh. \"\"\" if not os.path.exists(self.filename): return", "{} entries\".format(len(res))) except: self.queue.put( { 'error': 'Failed to load tsv", "is the cache file - we only read the remote", "at a data value and converts into an appropriate type", "with open(self.filename, \"rt\") as cache_file: res = json.load(cache_file) self.queue.put( {", "'Failed to load materials ' + str(sys.exc_info()[0]) } ) class", "{ 'mats': res } ) debug(\"loader from cache is completed", "queue is the queue to report the results into \"\"\"", "tsv materials ' + str(sys.exc_info()[0]) + ' ' + traceback.format_exc()", "loader for materials - will queue a 'mats' event or", "self.queue = queue self.daemon = True self.integerRe = re.compile(r'^-?\\d+$') self.floatRe", "json.dump(res, cache_file) self.queue.put( { 'mats': res } ) debug(\"Async remote", "def __init__(self, filename, queue): \"\"\" filename is the cache file", "the cache is old (or missing) queue is the queue", "try: if self.need_refresh(): r = requests.get(\"https://docs.google.com/spreadsheets/u/0/d/1g0y7inyvQopJ93jP5YIu3n0veX0ng8DraJXAvZk6pS4/export?format=tsv&id=1g0y7inyvQopJ93jP5YIu3n0veX0ng8DraJXAvZk6pS4&gid=0\") res = self.parse(r.text) if", "an appropriate type (maybe should look at using ast instead)", "refresh. \"\"\" if not os.path.exists(self.filename): return True mtime = os.path.getmtime(self.filename)", "= json.load(cache_file) self.queue.put( { 'mats': res } ) debug(\"loader from", "return res def run(self): try: if self.need_refresh(): r = requests.get(\"https://docs.google.com/spreadsheets/u/0/d/1g0y7inyvQopJ93jP5YIu3n0veX0ng8DraJXAvZk6pS4/export?format=tsv&id=1g0y7inyvQopJ93jP5YIu3n0veX0ng8DraJXAvZk6pS4&gid=0\")", "= True self.integerRe = re.compile(r'^-?\\d+$') self.floatRe = re.compile(r'^-?\\d+(\\.\\d+)?$') self.arrayRe =", "cache needs a refresh. \"\"\" if not os.path.exists(self.filename): return True", "test \"\"\" lines = text.replace(\"\\r\", \"\").split(\"\\n\") fields = lines[0].split(\"\\t\") res", "self.parse(r.text) if res: with open(self.filename, \"wt\") as cache_file: json.dump(res, cache_file)", "re.compile(r'^\\[.*\\]$') def need_refresh(self): \"\"\" Returns True if the local cache", "run(self): try: if self.need_refresh(): r = requests.get(\"https://docs.google.com/spreadsheets/u/0/d/1g0y7inyvQopJ93jP5YIu3n0veX0ng8DraJXAvZk6pS4/export?format=tsv&id=1g0y7inyvQopJ93jP5YIu3n0veX0ng8DraJXAvZk6pS4&gid=0\") res = self.parse(r.text)", ") class MatsLoaderRemote(threading.Thread): \"\"\" Fire and forget loader for materials", "import traceback import re from util import debug, error class", "= mats.Materials(self.filename) self.queue.put( { 'mats': m._materials } ) except: self.queue.put(", "queue): \"\"\" filename is the cache file - we only", "len(fields): continue v = {} for k in range(0, len(fields)):", "res def run(self): try: if self.need_refresh(): r = requests.get(\"https://docs.google.com/spreadsheets/u/0/d/1g0y7inyvQopJ93jP5YIu3n0veX0ng8DraJXAvZk6pS4/export?format=tsv&id=1g0y7inyvQopJ93jP5YIu3n0veX0ng8DraJXAvZk6pS4&gid=0\") res", "else: error(\"Async remote mats loader failed - zero records\") else:", "'Failed to load tsv materials ' + str(sys.exc_info()[0]) + '", "x in value[1:-1].split(\", \") ] def detect(self, value): \"\"\" Looks", "< now - 24 * 3600 # Daily update def", "time.time() return mtime < now - 24 * 3600 #", "os import time import mats import sys import requests import", "m = mats.Materials(self.filename) self.queue.put( { 'mats': m._materials } ) except:", "3600 # Daily update def array_splitter(self, value): return [ x[1:-1]", "data value and converts into an appropriate type (maybe should", "ast instead) \"\"\" if self.integerRe.match(value): return int(value) elif self.floatRe.match(value): return", "now - 24 * 3600 # Daily update def array_splitter(self,", "requests import traceback import re from util import debug, error", "from cache is completed {} entries\".format(len(res))) except: self.queue.put( { 'error':", "res = self.parse(r.text) if res: with open(self.filename, \"wt\") as cache_file:", "look at using ast instead) \"\"\" if self.integerRe.match(value): return int(value)", "k in range(0, len(fields)): v[fields[k]] = self.detect(values[k]) res.append(v) return res", "open(self.filename, \"wt\") as cache_file: json.dump(res, cache_file) self.queue.put( { 'mats': res", "is the file to async load queue is the queue", "\"\"\" lines = text.replace(\"\\r\", \"\").split(\"\\n\") fields = lines[0].split(\"\\t\") res =", "the load fails. Automatically runs as a daemon \"\"\" def", "self.queue = queue self.filename = filename self.daemon = True def", "elif self.arrayRe.match(value): return self.array_splitter(value) else: return value def parse(self, text):", "text.replace(\"\\r\", \"\").split(\"\\n\") fields = lines[0].split(\"\\t\") res = [] for entry", "Parse a string field containing all the data ina TSV", "def detect(self, value): \"\"\" Looks at a data value and", "array_splitter(self, value): return [ x[1:-1] for x in value[1:-1].split(\", \")", "missing) queue is the queue to report the results into", ") debug(\"loader from cache is completed {} entries\".format(len(res))) except: self.queue.put(", "the queue to report the results into \"\"\" threading.Thread.__init__(self) self.filename", "runs as a daemon \"\"\" def __init__(self, filename, queue): \"\"\"", "read the remote file if the cache is old (or", "} ) debug(\"Async remote mats loader from tsv is completed", "return value def parse(self, text): \"\"\" Parse a string field", "return True mtime = os.path.getmtime(self.filename) now = time.time() return mtime", "} ) debug(\"loader from cache is completed {} entries\".format(len(res))) except:", "import debug, error class MatsLoader(threading.Thread): \"\"\" Fire and forget loader", "to report the results into \"\"\" threading.Thread.__init__(self) self.queue = queue", "(or missing) queue is the queue to report the results", "] def detect(self, value): \"\"\" Looks at a data value", "load queue is the queue to report the results into", "\"wt\") as cache_file: json.dump(res, cache_file) self.queue.put( { 'mats': res }", "in range(0, len(fields)): v[fields[k]] = self.detect(values[k]) res.append(v) return res def", "materials - will queue a 'mats' event or an 'error'", "as cache_file: json.dump(res, cache_file) self.queue.put( { 'mats': res } )", "load fails. Automatically runs as a daemon \"\"\" def __init__(self,", "\"\"\" filename is the file to async load queue is", ") debug(\"Async remote mats loader from tsv is completed {}", "int(value) elif self.floatRe.match(value): return float(value) elif self.arrayRe.match(value): return self.array_splitter(value) else:", "# -*- coding: utf-8 -*- import json import threading import", "x[1:-1] for x in value[1:-1].split(\", \") ] def detect(self, value):", "we only read the remote file if the cache is", "len(fields)): v[fields[k]] = self.detect(values[k]) res.append(v) return res def run(self): try:", "in value[1:-1].split(\", \") ] def detect(self, value): \"\"\" Looks at", "True if the local cache needs a refresh. \"\"\" if", "' + str(sys.exc_info()[0]) + ' ' + traceback.format_exc() } )", "completed {} entries\".format(len(res))) except: self.queue.put( { 'error': 'Failed to load", "is old (or missing) queue is the queue to report", "an 'error' event if the load fails. Automatically runs as", "will queue a 'mats' event or an 'error' event if", "parse(self, text): \"\"\" Parse a string field containing all the", "\"\"\" threading.Thread.__init__(self) self.queue = queue self.filename = filename self.daemon =", "filename self.queue = queue self.daemon = True self.integerRe = re.compile(r'^-?\\d+$')", "import time import mats import sys import requests import traceback", "with open(self.filename, \"wt\") as cache_file: json.dump(res, cache_file) self.queue.put( { 'mats':", "else: return value def parse(self, text): \"\"\" Parse a string", "lines[0].split(\"\\t\") res = [] for entry in lines[1:]: values =", "TSV into an array of dicts. Mainly split out so", "converts into an appropriate type (maybe should look at using", "return mtime < now - 24 * 3600 # Daily", "- we only read the remote file if the cache", "Looks at a data value and converts into an appropriate", "the cache file - we only read the remote file", "the results into \"\"\" threading.Thread.__init__(self) self.queue = queue self.filename =", "except: self.queue.put( { 'error': 'Failed to load tsv materials '", "def run(self): try: m = mats.Materials(self.filename) self.queue.put( { 'mats': m._materials", "lines = text.replace(\"\\r\", \"\").split(\"\\n\") fields = lines[0].split(\"\\t\") res = []", "a string field containing all the data ina TSV into", "range(0, len(fields)): v[fields[k]] = self.detect(values[k]) res.append(v) return res def run(self):", "value[1:-1].split(\", \") ] def detect(self, value): \"\"\" Looks at a", "} ) class MatsLoaderRemote(threading.Thread): \"\"\" Fire and forget loader for", "return self.array_splitter(value) else: return value def parse(self, text): \"\"\" Parse", "self.queue.put( { 'mats': res } ) debug(\"loader from cache is", "continue v = {} for k in range(0, len(fields)): v[fields[k]]", "an array of dicts. Mainly split out so we can", "event if the load fails. Automatically runs as a daemon", "= re.compile(r'^-?\\d+$') self.floatRe = re.compile(r'^-?\\d+(\\.\\d+)?$') self.arrayRe = re.compile(r'^\\[.*\\]$') def need_refresh(self):", "= filename self.queue = queue self.daemon = True self.integerRe =", "fields = lines[0].split(\"\\t\") res = [] for entry in lines[1:]:", "into an appropriate type (maybe should look at using ast", "Returns True if the local cache needs a refresh. \"\"\"", "array of dicts. Mainly split out so we can test", "-*- import json import threading import os import time import", "self.integerRe.match(value): return int(value) elif self.floatRe.match(value): return float(value) elif self.arrayRe.match(value): return", "into an array of dicts. Mainly split out so we", "debug(\"Async remote mats loader from tsv is completed {} entries\".format(len(res)))", "re.compile(r'^-?\\d+$') self.floatRe = re.compile(r'^-?\\d+(\\.\\d+)?$') self.arrayRe = re.compile(r'^\\[.*\\]$') def need_refresh(self): \"\"\"", "if self.integerRe.match(value): return int(value) elif self.floatRe.match(value): return float(value) elif self.arrayRe.match(value):", "'mats' event or an 'error' event if the load fails.", "\"\"\" threading.Thread.__init__(self) self.filename = filename self.queue = queue self.daemon =", "debug, error class MatsLoader(threading.Thread): \"\"\" Fire and forget loader for", "- 24 * 3600 # Daily update def array_splitter(self, value):", "import threading import os import time import mats import sys", "results into \"\"\" threading.Thread.__init__(self) self.filename = filename self.queue = queue", "report the results into \"\"\" threading.Thread.__init__(self) self.filename = filename self.queue", "queue): \"\"\" filename is the file to async load queue", "\"\"\" Returns True if the local cache needs a refresh.", "except: self.queue.put( { 'error': 'Failed to load materials ' +", "file if the cache is old (or missing) queue is", "the data ina TSV into an array of dicts. Mainly", "a refresh. \"\"\" if not os.path.exists(self.filename): return True mtime =", "def run(self): try: if self.need_refresh(): r = requests.get(\"https://docs.google.com/spreadsheets/u/0/d/1g0y7inyvQopJ93jP5YIu3n0veX0ng8DraJXAvZk6pS4/export?format=tsv&id=1g0y7inyvQopJ93jP5YIu3n0veX0ng8DraJXAvZk6pS4&gid=0\") res =", "json.load(cache_file) self.queue.put( { 'mats': res } ) debug(\"loader from cache", "[] for entry in lines[1:]: values = entry.split(\"\\t\") if len(values)", "for x in value[1:-1].split(\", \") ] def detect(self, value): \"\"\"", "using ast instead) \"\"\" if self.integerRe.match(value): return int(value) elif self.floatRe.match(value):", "if not os.path.exists(self.filename): return True mtime = os.path.getmtime(self.filename) now =", "self.arrayRe = re.compile(r'^\\[.*\\]$') def need_refresh(self): \"\"\" Returns True if the", "mats import sys import requests import traceback import re from", "= text.replace(\"\\r\", \"\").split(\"\\n\") fields = lines[0].split(\"\\t\") res = [] for", "import sys import requests import traceback import re from util", "* 3600 # Daily update def array_splitter(self, value): return [", "cache_file) self.queue.put( { 'mats': res } ) debug(\"Async remote mats", "type (maybe should look at using ast instead) \"\"\" if", "res } ) debug(\"Async remote mats loader from tsv is", "{} entries\".format(len(res))) else: error(\"Async remote mats loader failed - zero", "v = {} for k in range(0, len(fields)): v[fields[k]] =", "for k in range(0, len(fields)): v[fields[k]] = self.detect(values[k]) res.append(v) return", "the queue to report the results into \"\"\" threading.Thread.__init__(self) self.queue", "self.filename = filename self.queue = queue self.daemon = True self.integerRe", "value and converts into an appropriate type (maybe should look", "and forget loader for materials - will queue a 'mats'", "in lines[1:]: values = entry.split(\"\\t\") if len(values) < len(fields): continue", "remote file if the cache is old (or missing) queue", "event or an 'error' event if the load fails. Automatically", "self.queue.put( { 'mats': res } ) debug(\"Async remote mats loader", "should look at using ast instead) \"\"\" if self.integerRe.match(value): return", "traceback import re from util import debug, error class MatsLoader(threading.Thread):", "def __init__(self, filename, queue): \"\"\" filename is the file to", "records\") else: with open(self.filename, \"rt\") as cache_file: res = json.load(cache_file)", "for entry in lines[1:]: values = entry.split(\"\\t\") if len(values) <", "mtime = os.path.getmtime(self.filename) now = time.time() return mtime < now", "= lines[0].split(\"\\t\") res = [] for entry in lines[1:]: values", "\"\"\" def __init__(self, filename, queue): \"\"\" filename is the cache", "< len(fields): continue v = {} for k in range(0,", "string field containing all the data ina TSV into an", "load tsv materials ' + str(sys.exc_info()[0]) + ' ' +", "as a daemon \"\"\" def __init__(self, filename, queue): \"\"\" filename", "[ x[1:-1] for x in value[1:-1].split(\", \") ] def detect(self,", "materials ' + str(sys.exc_info()[0]) } ) class MatsLoaderRemote(threading.Thread): \"\"\" Fire", "re from util import debug, error class MatsLoader(threading.Thread): \"\"\" Fire", "threading.Thread.__init__(self) self.filename = filename self.queue = queue self.daemon = True", "{ 'mats': m._materials } ) except: self.queue.put( { 'error': 'Failed", "filename is the file to async load queue is the", "{ 'mats': res } ) debug(\"Async remote mats loader from", "cache is old (or missing) queue is the queue to", "str(sys.exc_info()[0]) } ) class MatsLoaderRemote(threading.Thread): \"\"\" Fire and forget loader", "res } ) debug(\"loader from cache is completed {} entries\".format(len(res)))", "= os.path.getmtime(self.filename) now = time.time() return mtime < now -", "self.need_refresh(): r = requests.get(\"https://docs.google.com/spreadsheets/u/0/d/1g0y7inyvQopJ93jP5YIu3n0veX0ng8DraJXAvZk6pS4/export?format=tsv&id=1g0y7inyvQopJ93jP5YIu3n0veX0ng8DraJXAvZk6pS4&gid=0\") res = self.parse(r.text) if res: with", "res: with open(self.filename, \"wt\") as cache_file: json.dump(res, cache_file) self.queue.put( {", "def array_splitter(self, value): return [ x[1:-1] for x in value[1:-1].split(\",", "the remote file if the cache is old (or missing)", "\"\"\" def __init__(self, filename, queue): \"\"\" filename is the file", "re.compile(r'^-?\\d+(\\.\\d+)?$') self.arrayRe = re.compile(r'^\\[.*\\]$') def need_refresh(self): \"\"\" Returns True if", "coding: utf-8 -*- import json import threading import os import", "r = requests.get(\"https://docs.google.com/spreadsheets/u/0/d/1g0y7inyvQopJ93jP5YIu3n0veX0ng8DraJXAvZk6pS4/export?format=tsv&id=1g0y7inyvQopJ93jP5YIu3n0veX0ng8DraJXAvZk6pS4&gid=0\") res = self.parse(r.text) if res: with open(self.filename,", "\"\"\" if self.integerRe.match(value): return int(value) elif self.floatRe.match(value): return float(value) elif", "self.queue.put( { 'mats': m._materials } ) except: self.queue.put( { 'error':", "self.queue.put( { 'error': 'Failed to load materials ' + str(sys.exc_info()[0])", "open(self.filename, \"rt\") as cache_file: res = json.load(cache_file) self.queue.put( { 'mats':", "update def array_splitter(self, value): return [ x[1:-1] for x in", "if the load fails. Automatically runs as a daemon \"\"\"", "daemon \"\"\" def __init__(self, filename, queue): \"\"\" filename is the", "async load queue is the queue to report the results", "failed - zero records\") else: with open(self.filename, \"rt\") as cache_file:", "file to async load queue is the queue to report", "else: with open(self.filename, \"rt\") as cache_file: res = json.load(cache_file) self.queue.put(", "detect(self, value): \"\"\" Looks at a data value and converts", "\"\").split(\"\\n\") fields = lines[0].split(\"\\t\") res = [] for entry in", "import os import time import mats import sys import requests", "now = time.time() return mtime < now - 24 *", "= self.detect(values[k]) res.append(v) return res def run(self): try: if self.need_refresh():", "= re.compile(r'^-?\\d+(\\.\\d+)?$') self.arrayRe = re.compile(r'^\\[.*\\]$') def need_refresh(self): \"\"\" Returns True", "queue self.filename = filename self.daemon = True def run(self): try:", "remote mats loader from tsv is completed {} entries\".format(len(res))) else:", "os.path.exists(self.filename): return True mtime = os.path.getmtime(self.filename) now = time.time() return", "threading.Thread.__init__(self) self.queue = queue self.filename = filename self.daemon = True", "res.append(v) return res def run(self): try: if self.need_refresh(): r =", "= {} for k in range(0, len(fields)): v[fields[k]] = self.detect(values[k])", "= self.parse(r.text) if res: with open(self.filename, \"wt\") as cache_file: json.dump(res,", "split out so we can test \"\"\" lines = text.replace(\"\\r\",", "error class MatsLoader(threading.Thread): \"\"\" Fire and forget loader for materials", "\"\"\" filename is the cache file - we only read", "cache_file: json.dump(res, cache_file) self.queue.put( { 'mats': res } ) debug(\"Async", "entry in lines[1:]: values = entry.split(\"\\t\") if len(values) < len(fields):", "utf-8 -*- import json import threading import os import time", "- zero records\") else: with open(self.filename, \"rt\") as cache_file: res", "as cache_file: res = json.load(cache_file) self.queue.put( { 'mats': res }", "file - we only read the remote file if the", "True self.integerRe = re.compile(r'^-?\\d+$') self.floatRe = re.compile(r'^-?\\d+(\\.\\d+)?$') self.arrayRe = re.compile(r'^\\[.*\\]$')", "time import mats import sys import requests import traceback import", "def need_refresh(self): \"\"\" Returns True if the local cache needs", "'mats': res } ) debug(\"Async remote mats loader from tsv", "loader from tsv is completed {} entries\".format(len(res))) else: error(\"Async remote", "} ) except: self.queue.put( { 'error': 'Failed to load materials", "return float(value) elif self.arrayRe.match(value): return self.array_splitter(value) else: return value def", "entry.split(\"\\t\") if len(values) < len(fields): continue v = {} for", "util import debug, error class MatsLoader(threading.Thread): \"\"\" Fire and forget", "filename, queue): \"\"\" filename is the file to async load", "run(self): try: m = mats.Materials(self.filename) self.queue.put( { 'mats': m._materials }", "res = [] for entry in lines[1:]: values = entry.split(\"\\t\")", "- will queue a 'mats' event or an 'error' event", "\"rt\") as cache_file: res = json.load(cache_file) self.queue.put( { 'mats': res", "class MatsLoader(threading.Thread): \"\"\" Fire and forget loader for materials -", "a 'mats' event or an 'error' event if the load", "filename, queue): \"\"\" filename is the cache file - we", "tsv is completed {} entries\".format(len(res))) else: error(\"Async remote mats loader", "is completed {} entries\".format(len(res))) except: self.queue.put( { 'error': 'Failed to", "self.integerRe = re.compile(r'^-?\\d+$') self.floatRe = re.compile(r'^-?\\d+(\\.\\d+)?$') self.arrayRe = re.compile(r'^\\[.*\\]$') def", "\"\"\" Fire and forget loader for materials - will queue", "into \"\"\" threading.Thread.__init__(self) self.queue = queue self.filename = filename self.daemon", "containing all the data ina TSV into an array of", "value): \"\"\" Looks at a data value and converts into", "import requests import traceback import re from util import debug,", "m._materials } ) except: self.queue.put( { 'error': 'Failed to load", "self.detect(values[k]) res.append(v) return res def run(self): try: if self.need_refresh(): r", "at using ast instead) \"\"\" if self.integerRe.match(value): return int(value) elif", "True mtime = os.path.getmtime(self.filename) now = time.time() return mtime <", "completed {} entries\".format(len(res))) else: error(\"Async remote mats loader failed -", "import mats import sys import requests import traceback import re", "'error' event if the load fails. Automatically runs as a", "a daemon \"\"\" def __init__(self, filename, queue): \"\"\" filename is", "-*- coding: utf-8 -*- import json import threading import os", "= time.time() return mtime < now - 24 * 3600", "if the local cache needs a refresh. \"\"\" if not", "threading import os import time import mats import sys import", "data ina TSV into an array of dicts. Mainly split", "old (or missing) queue is the queue to report the", "queue to report the results into \"\"\" threading.Thread.__init__(self) self.queue =", "dicts. Mainly split out so we can test \"\"\" lines", "field containing all the data ina TSV into an array", "the results into \"\"\" threading.Thread.__init__(self) self.filename = filename self.queue =", "= filename self.daemon = True def run(self): try: m =", "self.floatRe.match(value): return float(value) elif self.arrayRe.match(value): return self.array_splitter(value) else: return value", "debug(\"loader from cache is completed {} entries\".format(len(res))) except: self.queue.put( {", "is completed {} entries\".format(len(res))) else: error(\"Async remote mats loader failed", "mats loader from tsv is completed {} entries\".format(len(res))) else: error(\"Async", "len(values) < len(fields): continue v = {} for k in", "instead) \"\"\" if self.integerRe.match(value): return int(value) elif self.floatRe.match(value): return float(value)", "self.filename = filename self.daemon = True def run(self): try: m", "report the results into \"\"\" threading.Thread.__init__(self) self.queue = queue self.filename", "remote mats loader failed - zero records\") else: with open(self.filename,", "a data value and converts into an appropriate type (maybe", "sys import requests import traceback import re from util import", "res = json.load(cache_file) self.queue.put( { 'mats': res } ) debug(\"loader", "__init__(self, filename, queue): \"\"\" filename is the file to async", "to load materials ' + str(sys.exc_info()[0]) } ) class MatsLoaderRemote(threading.Thread):", "to async load queue is the queue to report the", "\") ] def detect(self, value): \"\"\" Looks at a data", "requests.get(\"https://docs.google.com/spreadsheets/u/0/d/1g0y7inyvQopJ93jP5YIu3n0veX0ng8DraJXAvZk6pS4/export?format=tsv&id=1g0y7inyvQopJ93jP5YIu3n0veX0ng8DraJXAvZk6pS4&gid=0\") res = self.parse(r.text) if res: with open(self.filename, \"wt\") as" ]
[ "for url in item['image_urls']: yield Request(url, meta={'title': item['title'], 'bookname': item['bookname']})", "info): # 将下载完成后的图片路径设置到item中 item['images'] = [x for ok, x in", "def item_completed(self, results, item, info): # 将下载完成后的图片路径设置到item中 item['images'] = [x", "os.path.exists(path): os.makedirs(path) with open(path+titlename+'.txt', 'a', encoding='utf-8') as f: f.write(text) return", "scrapy import Request from scrapy.pipelines.images import ImagesPipeline from luoxia import", "os from scrapy import Request from scrapy.pipelines.images import ImagesPipeline from", "class LuoxiaPipeline(object): def process_item(self, item, spider): title= item['title'] bookname =", "ITEM_PIPELINES setting # See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html import os from scrapy import", "See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html import os from scrapy import Request from scrapy.pipelines.images", "coding: utf-8 -*- # Define your item pipelines here #", "(title, bookname) if not os.path.exists(path): os.makedirs(path) with open(path+titlename+'.txt', 'a', encoding='utf-8')", "in item['image_urls']: yield Request(url, meta={'title': item['title'], 'bookname': item['bookname']}) def item_completed(self,", "= 'jpg' # 返回的相对路径 return '%s/%s/%s.%s' % (title, bookname, bookname,", "从连接中提取扩展名 try: ext_name = request.url.split(\".\")[-1] except: ext_name = 'jpg' #", "ok, x in results if ok] return item def file_path(self,", "results, item, info): # 将下载完成后的图片路径设置到item中 item['images'] = [x for ok,", "# Define your item pipelines here # # Don't forget", "= os.path.join(settings.IMAGES_STORE, title +'/'+ bookname) if not os.path.exists(book_dir): os.makedirs(book_dir) #", "bookname) if not os.path.exists(book_dir): os.makedirs(book_dir) # 从连接中提取扩展名 try: ext_name =", "LuoxiaPipeline(object): def process_item(self, item, spider): title= item['title'] bookname = item['bookname']", "[x for ok, x in results if ok] return item", "add your pipeline to the ITEM_PIPELINES setting # See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html", "item class LuoxiaImagePipeline(ImagesPipeline): def get_media_requests(self, item, info): for url in", "open(path+titlename+'.txt', 'a', encoding='utf-8') as f: f.write(text) return item class LuoxiaImagePipeline(ImagesPipeline):", "request.meta['title'] bookname = request.meta['bookname'] book_dir = os.path.join(settings.IMAGES_STORE, title +'/'+ bookname)", "# -*- coding: utf-8 -*- # Define your item pipelines", "item['bookname'] titlename = item['titlename'] text = item['text'] path = \"books/%s/%s/\"", "if not os.path.exists(path): os.makedirs(path) with open(path+titlename+'.txt', 'a', encoding='utf-8') as f:", "def get_media_requests(self, item, info): for url in item['image_urls']: yield Request(url,", "book_dir = os.path.join(settings.IMAGES_STORE, title +'/'+ bookname) if not os.path.exists(book_dir): os.makedirs(book_dir)", "get_media_requests(self, item, info): for url in item['image_urls']: yield Request(url, meta={'title':", "item, info): # 将下载完成后的图片路径设置到item中 item['images'] = [x for ok, x", "path = \"books/%s/%s/\" % (title, bookname) if not os.path.exists(path): os.makedirs(path)", "meta={'title': item['title'], 'bookname': item['bookname']}) def item_completed(self, results, item, info): #", "= [x for ok, x in results if ok] return", "ext_name = 'jpg' # 返回的相对路径 return '%s/%s/%s.%s' % (title, bookname,", "import settings class LuoxiaPipeline(object): def process_item(self, item, spider): title= item['title']", "-*- coding: utf-8 -*- # Define your item pipelines here", "bookname) if not os.path.exists(path): os.makedirs(path) with open(path+titlename+'.txt', 'a', encoding='utf-8') as", "Don't forget to add your pipeline to the ITEM_PIPELINES setting", "results if ok] return item def file_path(self, request, response=None, info=None):", "'jpg' # 返回的相对路径 return '%s/%s/%s.%s' % (title, bookname, bookname, ext_name)", "def file_path(self, request, response=None, info=None): # 为每本书创建一个目录,存放她自己所有的图片 title = request.meta['title']", "process_item(self, item, spider): title= item['title'] bookname = item['bookname'] titlename =", "Request(url, meta={'title': item['title'], 'bookname': item['bookname']}) def item_completed(self, results, item, info):", "= request.meta['title'] bookname = request.meta['bookname'] book_dir = os.path.join(settings.IMAGES_STORE, title +'/'+", "not os.path.exists(book_dir): os.makedirs(book_dir) # 从连接中提取扩展名 try: ext_name = request.url.split(\".\")[-1] except:", "ImagesPipeline from luoxia import settings class LuoxiaPipeline(object): def process_item(self, item,", "= request.meta['bookname'] book_dir = os.path.join(settings.IMAGES_STORE, title +'/'+ bookname) if not", "from scrapy.pipelines.images import ImagesPipeline from luoxia import settings class LuoxiaPipeline(object):", "'bookname': item['bookname']}) def item_completed(self, results, item, info): # 将下载完成后的图片路径设置到item中 item['images']", "ok] return item def file_path(self, request, response=None, info=None): # 为每本书创建一个目录,存放她自己所有的图片", "os.makedirs(book_dir) # 从连接中提取扩展名 try: ext_name = request.url.split(\".\")[-1] except: ext_name =", "info): for url in item['image_urls']: yield Request(url, meta={'title': item['title'], 'bookname':", "setting # See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html import os from scrapy import Request", "class LuoxiaImagePipeline(ImagesPipeline): def get_media_requests(self, item, info): for url in item['image_urls']:", "pipelines here # # Don't forget to add your pipeline", "in results if ok] return item def file_path(self, request, response=None,", "item def file_path(self, request, response=None, info=None): # 为每本书创建一个目录,存放她自己所有的图片 title =", "utf-8 -*- # Define your item pipelines here # #", "# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html import os from scrapy import Request from", "x in results if ok] return item def file_path(self, request,", "\"books/%s/%s/\" % (title, bookname) if not os.path.exists(path): os.makedirs(path) with open(path+titlename+'.txt',", "import Request from scrapy.pipelines.images import ImagesPipeline from luoxia import settings", "with open(path+titlename+'.txt', 'a', encoding='utf-8') as f: f.write(text) return item class", "scrapy.pipelines.images import ImagesPipeline from luoxia import settings class LuoxiaPipeline(object): def", "% (title, bookname) if not os.path.exists(path): os.makedirs(path) with open(path+titlename+'.txt', 'a',", "# 为每本书创建一个目录,存放她自己所有的图片 title = request.meta['title'] bookname = request.meta['bookname'] book_dir =", "bookname = item['bookname'] titlename = item['titlename'] text = item['text'] path", "to add your pipeline to the ITEM_PIPELINES setting # See:", "the ITEM_PIPELINES setting # See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html import os from scrapy", "= request.url.split(\".\")[-1] except: ext_name = 'jpg' # 返回的相对路径 return '%s/%s/%s.%s'", "LuoxiaImagePipeline(ImagesPipeline): def get_media_requests(self, item, info): for url in item['image_urls']: yield", "from scrapy import Request from scrapy.pipelines.images import ImagesPipeline from luoxia", "为每本书创建一个目录,存放她自己所有的图片 title = request.meta['title'] bookname = request.meta['bookname'] book_dir = os.path.join(settings.IMAGES_STORE,", "def process_item(self, item, spider): title= item['title'] bookname = item['bookname'] titlename", "file_path(self, request, response=None, info=None): # 为每本书创建一个目录,存放她自己所有的图片 title = request.meta['title'] bookname", "request.url.split(\".\")[-1] except: ext_name = 'jpg' # 返回的相对路径 return '%s/%s/%s.%s' %", "here # # Don't forget to add your pipeline to", "import os from scrapy import Request from scrapy.pipelines.images import ImagesPipeline", "for ok, x in results if ok] return item def", "title = request.meta['title'] bookname = request.meta['bookname'] book_dir = os.path.join(settings.IMAGES_STORE, title", "pipeline to the ITEM_PIPELINES setting # See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html import os", "if not os.path.exists(book_dir): os.makedirs(book_dir) # 从连接中提取扩展名 try: ext_name = request.url.split(\".\")[-1]", "item['title'], 'bookname': item['bookname']}) def item_completed(self, results, item, info): # 将下载完成后的图片路径设置到item中", "Request from scrapy.pipelines.images import ImagesPipeline from luoxia import settings class", "forget to add your pipeline to the ITEM_PIPELINES setting #", "将下载完成后的图片路径设置到item中 item['images'] = [x for ok, x in results if", "+'/'+ bookname) if not os.path.exists(book_dir): os.makedirs(book_dir) # 从连接中提取扩展名 try: ext_name", "info=None): # 为每本书创建一个目录,存放她自己所有的图片 title = request.meta['title'] bookname = request.meta['bookname'] book_dir", "os.makedirs(path) with open(path+titlename+'.txt', 'a', encoding='utf-8') as f: f.write(text) return item", "Define your item pipelines here # # Don't forget to", "response=None, info=None): # 为每本书创建一个目录,存放她自己所有的图片 title = request.meta['title'] bookname = request.meta['bookname']", "item['image_urls']: yield Request(url, meta={'title': item['title'], 'bookname': item['bookname']}) def item_completed(self, results,", "# 将下载完成后的图片路径设置到item中 item['images'] = [x for ok, x in results", "os.path.exists(book_dir): os.makedirs(book_dir) # 从连接中提取扩展名 try: ext_name = request.url.split(\".\")[-1] except: ext_name", "os.path.join(settings.IMAGES_STORE, title +'/'+ bookname) if not os.path.exists(book_dir): os.makedirs(book_dir) # 从连接中提取扩展名", "= \"books/%s/%s/\" % (title, bookname) if not os.path.exists(path): os.makedirs(path) with", "import ImagesPipeline from luoxia import settings class LuoxiaPipeline(object): def process_item(self,", "https://doc.scrapy.org/en/latest/topics/item-pipeline.html import os from scrapy import Request from scrapy.pipelines.images import", "request, response=None, info=None): # 为每本书创建一个目录,存放她自己所有的图片 title = request.meta['title'] bookname =", "item, info): for url in item['image_urls']: yield Request(url, meta={'title': item['title'],", "= item['bookname'] titlename = item['titlename'] text = item['text'] path =", "encoding='utf-8') as f: f.write(text) return item class LuoxiaImagePipeline(ImagesPipeline): def get_media_requests(self,", "to the ITEM_PIPELINES setting # See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html import os from", "return item def file_path(self, request, response=None, info=None): # 为每本书创建一个目录,存放她自己所有的图片 title", "try: ext_name = request.url.split(\".\")[-1] except: ext_name = 'jpg' # 返回的相对路径", "'a', encoding='utf-8') as f: f.write(text) return item class LuoxiaImagePipeline(ImagesPipeline): def", "# 从连接中提取扩展名 try: ext_name = request.url.split(\".\")[-1] except: ext_name = 'jpg'", "item_completed(self, results, item, info): # 将下载完成后的图片路径设置到item中 item['images'] = [x for", "request.meta['bookname'] book_dir = os.path.join(settings.IMAGES_STORE, title +'/'+ bookname) if not os.path.exists(book_dir):", "url in item['image_urls']: yield Request(url, meta={'title': item['title'], 'bookname': item['bookname']}) def", "item['bookname']}) def item_completed(self, results, item, info): # 将下载完成后的图片路径设置到item中 item['images'] =", "f.write(text) return item class LuoxiaImagePipeline(ImagesPipeline): def get_media_requests(self, item, info): for", "item['images'] = [x for ok, x in results if ok]", "item['titlename'] text = item['text'] path = \"books/%s/%s/\" % (title, bookname)", "item['title'] bookname = item['bookname'] titlename = item['titlename'] text = item['text']", "titlename = item['titlename'] text = item['text'] path = \"books/%s/%s/\" %", "text = item['text'] path = \"books/%s/%s/\" % (title, bookname) if", "your item pipelines here # # Don't forget to add", "if ok] return item def file_path(self, request, response=None, info=None): #", "title= item['title'] bookname = item['bookname'] titlename = item['titlename'] text =", "= item['text'] path = \"books/%s/%s/\" % (title, bookname) if not", "bookname = request.meta['bookname'] book_dir = os.path.join(settings.IMAGES_STORE, title +'/'+ bookname) if", "settings class LuoxiaPipeline(object): def process_item(self, item, spider): title= item['title'] bookname", "from luoxia import settings class LuoxiaPipeline(object): def process_item(self, item, spider):", "item, spider): title= item['title'] bookname = item['bookname'] titlename = item['titlename']", "item pipelines here # # Don't forget to add your", "except: ext_name = 'jpg' # 返回的相对路径 return '%s/%s/%s.%s' % (title,", "# # Don't forget to add your pipeline to the", "your pipeline to the ITEM_PIPELINES setting # See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html import", "item['text'] path = \"books/%s/%s/\" % (title, bookname) if not os.path.exists(path):", "f: f.write(text) return item class LuoxiaImagePipeline(ImagesPipeline): def get_media_requests(self, item, info):", "return item class LuoxiaImagePipeline(ImagesPipeline): def get_media_requests(self, item, info): for url", "spider): title= item['title'] bookname = item['bookname'] titlename = item['titlename'] text", "as f: f.write(text) return item class LuoxiaImagePipeline(ImagesPipeline): def get_media_requests(self, item,", "ext_name = request.url.split(\".\")[-1] except: ext_name = 'jpg' # 返回的相对路径 return", "= item['titlename'] text = item['text'] path = \"books/%s/%s/\" % (title,", "not os.path.exists(path): os.makedirs(path) with open(path+titlename+'.txt', 'a', encoding='utf-8') as f: f.write(text)", "luoxia import settings class LuoxiaPipeline(object): def process_item(self, item, spider): title=", "# Don't forget to add your pipeline to the ITEM_PIPELINES", "-*- # Define your item pipelines here # # Don't", "yield Request(url, meta={'title': item['title'], 'bookname': item['bookname']}) def item_completed(self, results, item,", "title +'/'+ bookname) if not os.path.exists(book_dir): os.makedirs(book_dir) # 从连接中提取扩展名 try:" ]
[ "model train_input_fn = tf.estimator.inputs.numpy_input_fn( x={'x': train_data}, y=train_labels, batch_size=100, num_epochs=None, shuffle=True", "tf.estimator.EvalSpec(eval_input_fn) tf.estimator.train_and_evaluate(mnist_classifier, train_spec, eval_spec) if args.current_host == args.hosts[0]: mnist_classifier.export_savedmodel(args.sm_model_dir, serving_input_fn)", "if mode == tf.estimator.ModeKeys.TRAIN: optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.001) train_op = optimizer.minimize(", "Output Tensor Shape: [batch_size, 7, 7, 64] pool2 = tf.layers.max_pooling2d(inputs=conv2,", "the License. \"\"\"Convolutional Neural Network Estimator for MNIST, built with", "under the default bucket. parser.add_argument('--model_dir', type=str) parser.add_argument('--sm-model-dir', type=str, default=os.environ.get('SM_MODEL_DIR')) parser.add_argument('--train',", "28, 32] # Output Tensor Shape: [batch_size, 14, 14, 32]", "the License. A copy of # the License is located", "and by the # `logging_hook`. 'probabilities': tf.nn.softmax(logits, name='softmax_tensor') } if", "inputs=pool1, filters=64, kernel_size=[5, 5], padding='same', activation=tf.nn.relu ) # Pooling Layer", "for the specific # language governing permissions and limitations under", "Add `softmax_tensor` to the graph. It is used for PREDICT", "# Input Layer # Reshape X to 4-D tensor: [batch_size,", "[batch_size, 1024] dense = tf.layers.dense(inputs=pool2_flat, units=1024, activation=tf.nn.relu) # Add dropout", "# model_dir is always passed in from SageMaker. # By", "Set up logging for predictions # Log the values in", "14, 14, 64] conv2 = tf.layers.conv2d( inputs=pool1, filters=64, kernel_size=[5, 5],", "is added to preserve width and height. # Input Tensor", "if __name__ == '__main__': args, _ = _parse_args() train_data, train_labels", "return tf.estimator.EstimatorSpec(mode=mode, predictions=predictions) # Calculate Loss (for both TRAIN and", "'train_data.npy')) y_train = np.load(os.path.join(base_dir, 'train_labels.npy')) return x_train, y_train def _load_testing_data(base_dir):", "element will be kept dropout = tf.layers.dropout( inputs=dense, rate=0.4, training=mode", "return tf.estimator.EstimatorSpec( mode=mode, loss=loss, eval_metric_ops=eval_metric_ops) def _load_training_data(base_dir): x_train = np.load(os.path.join(base_dir,", "def serving_input_fn(): inputs = {'x': tf.placeholder(tf.float32, [None, 784])} return tf.estimator.export.ServingInputReceiver(inputs,", "def _parse_args(): parser = argparse.ArgumentParser() # Data, model, and output", "tf.layers.dense(inputs=dropout, units=10) predictions = { # Generate predictions (for PREDICT", "Train the model train_input_fn = tf.estimator.inputs.numpy_input_fn( x={'x': train_data}, y=train_labels, batch_size=100,", "train_labels = _load_training_data(args.train) eval_data, eval_labels = _load_testing_data(args.train) # Create the", "limitations under the License. \"\"\"Convolutional Neural Network Estimator for MNIST,", "strides=2) # Flatten tensor into a batch of vectors #", "64] pool2 = tf.layers.max_pooling2d(inputs=conv2, pool_size=[2, 2], strides=2) # Flatten tensor", "_load_training_data(base_dir): x_train = np.load(os.path.join(base_dir, 'train_data.npy')) y_train = np.load(os.path.join(base_dir, 'train_labels.npy')) return", "parser.add_argument('--train', type=str, default=os.environ.get('SM_CHANNEL_TRAINING')) parser.add_argument('--hosts', type=list, default=json.loads(os.environ.get('SM_HOSTS'))) parser.add_argument('--current-host', type=str, default=os.environ.get('SM_CURRENT_HOST')) return", "distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS", "parser = argparse.ArgumentParser() # Data, model, and output directories. #", "# Output Tensor Shape: [batch_size, 10] logits = tf.layers.dense(inputs=dropout, units=10)", "# Add dropout operation; 0.6 probability that element will be", "ANY KIND, either express or implied. See the License for", "Training Op (for TRAIN mode) if mode == tf.estimator.ModeKeys.TRAIN: optimizer", "(for both TRAIN and EVAL modes) loss = tf.losses.sparse_softmax_cross_entropy(labels=labels, logits=logits)", "`softmax_tensor` to the graph. It is used for PREDICT and", ") train_spec = tf.estimator.TrainSpec(train_input_fn, max_steps=20000) eval_spec = tf.estimator.EvalSpec(eval_input_fn) tf.estimator.train_and_evaluate(mnist_classifier, train_spec,", "2.0 (the \"License\"). You # may not use this file", "evaluation metrics (for EVAL mode) eval_metric_ops = { 'accuracy': tf.metrics.accuracy(", "the \"license\" file accompanying this file. This file is #", "pool_size=[2, 2], strides=2) # Flatten tensor into a batch of", "14, 32] pool1 = tf.layers.max_pooling2d(inputs=conv1, pool_size=[2, 2], strides=2) # Convolutional", "as np import tensorflow as tf def cnn_model_fn(features, labels, mode):", "Computes 32 features using a 5x5 filter with ReLU activation.", "print results eval_input_fn = tf.estimator.inputs.numpy_input_fn( x={'x': eval_data}, y=eval_labels, num_epochs=1, shuffle=False", "logits = tf.layers.dense(inputs=dropout, units=10) predictions = { # Generate predictions", "predictions # Log the values in the 'Softmax' tensor with", "kept dropout = tf.layers.dropout( inputs=dense, rate=0.4, training=mode == tf.estimator.ModeKeys.TRAIN) #", "A copy of # the License is located at #", "__future__ import absolute_import, division, print_function import argparse import json import", "pixels, and have one color channel input_layer = tf.reshape(features['x'], [-1,", "parser.add_argument('--model_dir', type=str) parser.add_argument('--sm-model-dir', type=str, default=os.environ.get('SM_MODEL_DIR')) parser.add_argument('--train', type=str, default=os.environ.get('SM_CHANNEL_TRAINING')) parser.add_argument('--hosts', type=list,", "the default bucket. parser.add_argument('--model_dir', type=str) parser.add_argument('--sm-model-dir', type=str, default=os.environ.get('SM_MODEL_DIR')) parser.add_argument('--train', type=str,", "Estimator for MNIST, built with tf.layers.\"\"\" from __future__ import absolute_import,", "[batch_size, 28, 28, 32] conv1 = tf.layers.conv2d( inputs=input_layer, filters=32, kernel_size=[5,", "Shape: [batch_size, 1024] dense = tf.layers.dense(inputs=pool2_flat, units=1024, activation=tf.nn.relu) # Add", "Reserved. # # Licensed under the Apache License, Version 2.0", "# Convolutional Layer #2 # Computes 64 features using a", "32] # Output Tensor Shape: [batch_size, 14, 14, 64] conv2", "= np.load(os.path.join(base_dir, 'train_data.npy')) y_train = np.load(os.path.join(base_dir, 'train_labels.npy')) return x_train, y_train", "[batch_size, 10] logits = tf.layers.dense(inputs=dropout, units=10) predictions = { #", "= tf.estimator.EvalSpec(eval_input_fn) tf.estimator.train_and_evaluate(mnist_classifier, train_spec, eval_spec) if args.current_host == args.hosts[0]: mnist_classifier.export_savedmodel(args.sm_model_dir,", "be kept dropout = tf.layers.dropout( inputs=dense, rate=0.4, training=mode == tf.estimator.ModeKeys.TRAIN)", "predictions=predictions) # Calculate Loss (for both TRAIN and EVAL modes)", "Tensor Shape: [batch_size, 7, 7, 64] # Output Tensor Shape:", "Computes 64 features using a 5x5 filter. # Padding is", "name='softmax_tensor') } if mode == tf.estimator.ModeKeys.PREDICT: return tf.estimator.EstimatorSpec(mode=mode, predictions=predictions) #", "in from SageMaker. # By default this is a S3", "preserve width and height. # Input Tensor Shape: [batch_size, 14,", "type=list, default=json.loads(os.environ.get('SM_HOSTS'))) parser.add_argument('--current-host', type=str, default=os.environ.get('SM_CURRENT_HOST')) return parser.parse_known_args() def serving_input_fn(): inputs", "num_epochs=1, shuffle=False ) train_spec = tf.estimator.TrainSpec(train_input_fn, max_steps=20000) eval_spec = tf.estimator.EvalSpec(eval_input_fn)", "License is located at # # http://aws.amazon.com/apache2.0/ # # or", "= np.load(os.path.join(base_dir, 'eval_data.npy')) y_test = np.load(os.path.join(base_dir, 'eval_labels.npy')) return x_test, y_test", "color channel input_layer = tf.reshape(features['x'], [-1, 28, 28, 1]) #", "Amazon.com, Inc. or its affiliates. All Rights Reserved. # #", "32] conv1 = tf.layers.conv2d( inputs=input_layer, filters=32, kernel_size=[5, 5], padding='same', activation=tf.nn.relu", "operation; 0.6 probability that element will be kept dropout =", ") # Pooling Layer #1 # First max pooling layer", "mode=mode, loss=loss, eval_metric_ops=eval_metric_ops) def _load_training_data(base_dir): x_train = np.load(os.path.join(base_dir, 'train_data.npy')) y_train", "2x2 filter and stride of 2 # Input Tensor Shape:", "[batch_size, 14, 14, 32] pool1 = tf.layers.max_pooling2d(inputs=conv1, pool_size=[2, 2], strides=2)", "with tf.layers.\"\"\" from __future__ import absolute_import, division, print_function import argparse", "input_layer = tf.reshape(features['x'], [-1, 28, 28, 1]) # Convolutional Layer", "kernel_size=[5, 5], padding='same', activation=tf.nn.relu ) # Pooling Layer #2 #", "[batch_size, 7, 7, 64] pool2 = tf.layers.max_pooling2d(inputs=conv2, pool_size=[2, 2], strides=2)", "# Generate predictions (for PREDICT and EVAL mode) 'classes': tf.argmax(input=logits,", "file. This file is # distributed on an \"AS IS\"", "7 * 64]) # Dense Layer # Densely connected layer", "channels] # MNIST images are 28x28 pixels, and have one", "= tf.losses.sparse_softmax_cross_entropy(labels=labels, logits=logits) # Configure the Training Op (for TRAIN", "= tf.estimator.TrainSpec(train_input_fn, max_steps=20000) eval_spec = tf.estimator.EvalSpec(eval_input_fn) tf.estimator.train_and_evaluate(mnist_classifier, train_spec, eval_spec) if", "optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.001) train_op = optimizer.minimize( loss=loss, global_step=tf.train.get_global_step()) return tf.estimator.EstimatorSpec(mode=mode,", "specific # language governing permissions and limitations under the License.", "= tf.train.LoggingTensorHook(tensors=tensors_to_log, every_n_iter=50) # Train the model train_input_fn = tf.estimator.inputs.numpy_input_fn(", "Inc. or its affiliates. All Rights Reserved. # # Licensed", "\"\"\"Model function for CNN.\"\"\" # Input Layer # Reshape X", "stride of 2 # Input Tensor Shape: [batch_size, 14, 14,", "Tensor Shape: [batch_size, 14, 14, 32] pool1 = tf.layers.max_pooling2d(inputs=conv1, pool_size=[2,", "tf.train.GradientDescentOptimizer(learning_rate=0.001) train_op = optimizer.minimize( loss=loss, global_step=tf.train.get_global_step()) return tf.estimator.EstimatorSpec(mode=mode, loss=loss, train_op=train_op)", "filter with ReLU activation. # Padding is added to preserve", "max pooling layer with a 2x2 filter and stride of", "Shape: [batch_size, 14, 14, 64] # Output Tensor Shape: [batch_size,", "# MNIST images are 28x28 pixels, and have one color", "TRAIN mode) if mode == tf.estimator.ModeKeys.TRAIN: optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.001) train_op", "activation. # Padding is added to preserve width and height.", "# Padding is added to preserve width and height. #", "activation=tf.nn.relu ) # Pooling Layer #1 # First max pooling", "tensors_to_log = {'probabilities': 'softmax_tensor'} logging_hook = tf.train.LoggingTensorHook(tensors=tensors_to_log, every_n_iter=50) # Train", "governing permissions and limitations under the License. \"\"\"Convolutional Neural Network", "file except in compliance with the License. A copy of", "2 # Input Tensor Shape: [batch_size, 28, 28, 32] #", "Shape: [batch_size, 28, 28, 32] # Output Tensor Shape: [batch_size,", "height. # Input Tensor Shape: [batch_size, 14, 14, 32] #", "model, and output directories. # model_dir is always passed in", "Add dropout operation; 0.6 probability that element will be kept", "# Input Tensor Shape: [batch_size, 1024] # Output Tensor Shape:", "for PREDICT and by the # `logging_hook`. 'probabilities': tf.nn.softmax(logits, name='softmax_tensor')", "First max pooling layer with a 2x2 filter and stride", "is # distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES", "pool2 = tf.layers.max_pooling2d(inputs=conv2, pool_size=[2, 2], strides=2) # Flatten tensor into", "IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND,", "tf.nn.softmax(logits, name='softmax_tensor') } if mode == tf.estimator.ModeKeys.PREDICT: return tf.estimator.EstimatorSpec(mode=mode, predictions=predictions)", "# `logging_hook`. 'probabilities': tf.nn.softmax(logits, name='softmax_tensor') } if mode == tf.estimator.ModeKeys.PREDICT:", "# ANY KIND, either express or implied. See the License", "= tf.reshape(features['x'], [-1, 28, 28, 1]) # Convolutional Layer #1", "kernel_size=[5, 5], padding='same', activation=tf.nn.relu ) # Pooling Layer #1 #", "# Output Tensor Shape: [batch_size, 1024] dense = tf.layers.dense(inputs=pool2_flat, units=1024,", "batch_size=100, num_epochs=None, shuffle=True ) # Evaluate the model and print", "model_dir=args.model_dir) # Set up logging for predictions # Log the", "the License for the specific # language governing permissions and", "# Output Tensor Shape: [batch_size, 28, 28, 32] conv1 =", "EVAL modes) loss = tf.losses.sparse_softmax_cross_entropy(labels=labels, logits=logits) # Configure the Training", "the 'Softmax' tensor with label 'probabilities' tensors_to_log = {'probabilities': 'softmax_tensor'}", "Log the values in the 'Softmax' tensor with label 'probabilities'", "* 64] pool2_flat = tf.reshape(pool2, [-1, 7 * 7 *", "dense = tf.layers.dense(inputs=pool2_flat, units=1024, activation=tf.nn.relu) # Add dropout operation; 0.6", "Pooling Layer #1 # First max pooling layer with a", "tf.estimator.Estimator(model_fn=cnn_model_fn, model_dir=args.model_dir) # Set up logging for predictions # Log", "# Input Tensor Shape: [batch_size, 28, 28, 1] # Output", "64]) # Dense Layer # Densely connected layer with 1024", "the graph. It is used for PREDICT and by the", "padding='same', activation=tf.nn.relu ) # Pooling Layer #1 # First max", "# Computes 64 features using a 5x5 filter. # Padding", "# distributed on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR", "7 * 64] # Output Tensor Shape: [batch_size, 1024] dense", "= tf.train.GradientDescentOptimizer(learning_rate=0.001) train_op = optimizer.minimize( loss=loss, global_step=tf.train.get_global_step()) return tf.estimator.EstimatorSpec(mode=mode, loss=loss,", "Create the Estimator mnist_classifier = tf.estimator.Estimator(model_fn=cnn_model_fn, model_dir=args.model_dir) # Set up", "loss = tf.losses.sparse_softmax_cross_entropy(labels=labels, logits=logits) # Configure the Training Op (for", "x={'x': train_data}, y=train_labels, batch_size=100, num_epochs=None, shuffle=True ) # Evaluate the", "Shape: [batch_size, 7, 7, 64] # Output Tensor Shape: [batch_size,", "tf.reshape(features['x'], [-1, 28, 28, 1]) # Convolutional Layer #1 #", "and print results eval_input_fn = tf.estimator.inputs.numpy_input_fn( x={'x': eval_data}, y=eval_labels, num_epochs=1,", "os import numpy as np import tensorflow as tf def", "5], padding='same', activation=tf.nn.relu ) # Pooling Layer #2 # Second", "= tf.estimator.Estimator(model_fn=cnn_model_fn, model_dir=args.model_dir) # Set up logging for predictions #", "tf.layers.dropout( inputs=dense, rate=0.4, training=mode == tf.estimator.ModeKeys.TRAIN) # Logits layer #", "a S3 path under the default bucket. parser.add_argument('--model_dir', type=str) parser.add_argument('--sm-model-dir',", "All Rights Reserved. # # Licensed under the Apache License,", "CONDITIONS OF # ANY KIND, either express or implied. See", "7 * 7 * 64] pool2_flat = tf.reshape(pool2, [-1, 7", "to preserve width and height. # Input Tensor Shape: [batch_size,", "will be kept dropout = tf.layers.dropout( inputs=dense, rate=0.4, training=mode ==", "'eval_labels.npy')) return x_test, y_test def _parse_args(): parser = argparse.ArgumentParser() #", "Tensor Shape: [batch_size, 1024] # Output Tensor Shape: [batch_size, 10]", "tf.estimator.ModeKeys.TRAIN: optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.001) train_op = optimizer.minimize( loss=loss, global_step=tf.train.get_global_step()) return", "parser.add_argument('--current-host', type=str, default=os.environ.get('SM_CURRENT_HOST')) return parser.parse_known_args() def serving_input_fn(): inputs = {'x':", "is used for PREDICT and by the # `logging_hook`. 'probabilities':", "# Output Tensor Shape: [batch_size, 14, 14, 64] conv2 =", "import numpy as np import tensorflow as tf def cnn_model_fn(features,", "0.6 probability that element will be kept dropout = tf.layers.dropout(", "serving_input_fn(): inputs = {'x': tf.placeholder(tf.float32, [None, 784])} return tf.estimator.export.ServingInputReceiver(inputs, inputs)", "strides=2) # Convolutional Layer #2 # Computes 64 features using", "not use this file except in compliance with the License.", "connected layer with 1024 neurons # Input Tensor Shape: [batch_size,", "_parse_args(): parser = argparse.ArgumentParser() # Data, model, and output directories.", "Shape: [batch_size, 14, 14, 32] # Output Tensor Shape: [batch_size,", "tf.estimator.EstimatorSpec(mode=mode, predictions=predictions) # Calculate Loss (for both TRAIN and EVAL", "[batch_size, 14, 14, 64] # Output Tensor Shape: [batch_size, 7,", "mode == tf.estimator.ModeKeys.PREDICT: return tf.estimator.EstimatorSpec(mode=mode, predictions=predictions) # Calculate Loss (for", "Configure the Training Op (for TRAIN mode) if mode ==", "Tensor Shape: [batch_size, 7, 7, 64] pool2 = tf.layers.max_pooling2d(inputs=conv2, pool_size=[2,", "PREDICT and by the # `logging_hook`. 'probabilities': tf.nn.softmax(logits, name='softmax_tensor') }", "[batch_size, 7, 7, 64] # Output Tensor Shape: [batch_size, 7", "parser.add_argument('--hosts', type=list, default=json.loads(os.environ.get('SM_HOSTS'))) parser.add_argument('--current-host', type=str, default=os.environ.get('SM_CURRENT_HOST')) return parser.parse_known_args() def serving_input_fn():", "np import tensorflow as tf def cnn_model_fn(features, labels, mode): \"\"\"Model", "images are 28x28 pixels, and have one color channel input_layer", "and stride of 2 # Input Tensor Shape: [batch_size, 14,", "train_data, train_labels = _load_training_data(args.train) eval_data, eval_labels = _load_testing_data(args.train) # Create", "7, 64] # Output Tensor Shape: [batch_size, 7 * 7", "2 # Input Tensor Shape: [batch_size, 14, 14, 64] #", "file accompanying this file. This file is # distributed on", "that element will be kept dropout = tf.layers.dropout( inputs=dense, rate=0.4,", "Op (for TRAIN mode) if mode == tf.estimator.ModeKeys.TRAIN: optimizer =", "== tf.estimator.ModeKeys.TRAIN) # Logits layer # Input Tensor Shape: [batch_size,", "is a S3 path under the default bucket. parser.add_argument('--model_dir', type=str)", "WARRANTIES OR CONDITIONS OF # ANY KIND, either express or", "14, 32] # Output Tensor Shape: [batch_size, 14, 14, 64]", "= np.load(os.path.join(base_dir, 'train_labels.npy')) return x_train, y_train def _load_testing_data(base_dir): x_test =", "Layer # Reshape X to 4-D tensor: [batch_size, width, height,", "inputs=dense, rate=0.4, training=mode == tf.estimator.ModeKeys.TRAIN) # Logits layer # Input", "used for PREDICT and by the # `logging_hook`. 'probabilities': tf.nn.softmax(logits,", "a batch of vectors # Input Tensor Shape: [batch_size, 7,", "for CNN.\"\"\" # Input Layer # Reshape X to 4-D", "= tf.estimator.inputs.numpy_input_fn( x={'x': train_data}, y=train_labels, batch_size=100, num_epochs=None, shuffle=True ) #", "= tf.reshape(pool2, [-1, 7 * 7 * 64]) # Dense", "return parser.parse_known_args() def serving_input_fn(): inputs = {'x': tf.placeholder(tf.float32, [None, 784])}", "of 2 # Input Tensor Shape: [batch_size, 28, 28, 32]", "tf def cnn_model_fn(features, labels, mode): \"\"\"Model function for CNN.\"\"\" #", "eval_labels = _load_testing_data(args.train) # Create the Estimator mnist_classifier = tf.estimator.Estimator(model_fn=cnn_model_fn,", "print_function import argparse import json import os import numpy as", "tensor with label 'probabilities' tensors_to_log = {'probabilities': 'softmax_tensor'} logging_hook =", "Tensor Shape: [batch_size, 14, 14, 32] # Output Tensor Shape:", "return tf.estimator.EstimatorSpec(mode=mode, loss=loss, train_op=train_op) # Add evaluation metrics (for EVAL", "optimizer.minimize( loss=loss, global_step=tf.train.get_global_step()) return tf.estimator.EstimatorSpec(mode=mode, loss=loss, train_op=train_op) # Add evaluation", "(for PREDICT and EVAL mode) 'classes': tf.argmax(input=logits, axis=1), # Add", "y_train def _load_testing_data(base_dir): x_test = np.load(os.path.join(base_dir, 'eval_data.npy')) y_test = np.load(os.path.join(base_dir,", "train_input_fn = tf.estimator.inputs.numpy_input_fn( x={'x': train_data}, y=train_labels, batch_size=100, num_epochs=None, shuffle=True )", "# Licensed under the Apache License, Version 2.0 (the \"License\").", "channel input_layer = tf.reshape(features['x'], [-1, 28, 28, 1]) # Convolutional", "= {'probabilities': 'softmax_tensor'} logging_hook = tf.train.LoggingTensorHook(tensors=tensors_to_log, every_n_iter=50) # Train the", "Output Tensor Shape: [batch_size, 7 * 7 * 64] pool2_flat", "cnn_model_fn(features, labels, mode): \"\"\"Model function for CNN.\"\"\" # Input Layer", "this file except in compliance with the License. A copy", "2], strides=2) # Convolutional Layer #2 # Computes 64 features", "# Logits layer # Input Tensor Shape: [batch_size, 1024] #", "output directories. # model_dir is always passed in from SageMaker.", "filter and stride of 2 # Input Tensor Shape: [batch_size,", "with the License. A copy of # the License is", "== tf.estimator.ModeKeys.PREDICT: return tf.estimator.EstimatorSpec(mode=mode, predictions=predictions) # Calculate Loss (for both", "BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either", "is always passed in from SageMaker. # By default this", "x_test, y_test def _parse_args(): parser = argparse.ArgumentParser() # Data, model,", "\"License\"). You # may not use this file except in", "is located at # # http://aws.amazon.com/apache2.0/ # # or in", "32] pool1 = tf.layers.max_pooling2d(inputs=conv1, pool_size=[2, 2], strides=2) # Convolutional Layer", "= optimizer.minimize( loss=loss, global_step=tf.train.get_global_step()) return tf.estimator.EstimatorSpec(mode=mode, loss=loss, train_op=train_op) # Add", "def _load_training_data(base_dir): x_train = np.load(os.path.join(base_dir, 'train_data.npy')) y_train = np.load(os.path.join(base_dir, 'train_labels.npy'))", "Output Tensor Shape: [batch_size, 10] logits = tf.layers.dense(inputs=dropout, units=10) predictions", "License, Version 2.0 (the \"License\"). You # may not use", "or its affiliates. All Rights Reserved. # # Licensed under", "tf.layers.conv2d( inputs=input_layer, filters=32, kernel_size=[5, 5], padding='same', activation=tf.nn.relu ) # Pooling", "up logging for predictions # Log the values in the", "'classes': tf.argmax(input=logits, axis=1), # Add `softmax_tensor` to the graph. It", "Logits layer # Input Tensor Shape: [batch_size, 1024] # Output", "_load_testing_data(base_dir): x_test = np.load(os.path.join(base_dir, 'eval_data.npy')) y_test = np.load(os.path.join(base_dir, 'eval_labels.npy')) return", "pool1 = tf.layers.max_pooling2d(inputs=conv1, pool_size=[2, 2], strides=2) # Convolutional Layer #2", "(for EVAL mode) eval_metric_ops = { 'accuracy': tf.metrics.accuracy( labels=labels, predictions=predictions['classes'])}", "1024] dense = tf.layers.dense(inputs=pool2_flat, units=1024, activation=tf.nn.relu) # Add dropout operation;", "Network Estimator for MNIST, built with tf.layers.\"\"\" from __future__ import", "x_train, y_train def _load_testing_data(base_dir): x_test = np.load(os.path.join(base_dir, 'eval_data.npy')) y_test =", "KIND, either express or implied. See the License for the", "License for the specific # language governing permissions and limitations", "always passed in from SageMaker. # By default this is", "Shape: [batch_size, 7 * 7 * 64] pool2_flat = tf.reshape(pool2,", "tf.metrics.accuracy( labels=labels, predictions=predictions['classes'])} return tf.estimator.EstimatorSpec( mode=mode, loss=loss, eval_metric_ops=eval_metric_ops) def _load_training_data(base_dir):", "tf.estimator.TrainSpec(train_input_fn, max_steps=20000) eval_spec = tf.estimator.EvalSpec(eval_input_fn) tf.estimator.train_and_evaluate(mnist_classifier, train_spec, eval_spec) if args.current_host", "for predictions # Log the values in the 'Softmax' tensor", "eval_data, eval_labels = _load_testing_data(args.train) # Create the Estimator mnist_classifier =", "= tf.layers.conv2d( inputs=input_layer, filters=32, kernel_size=[5, 5], padding='same', activation=tf.nn.relu ) #", "# language governing permissions and limitations under the License. \"\"\"Convolutional", "Input Tensor Shape: [batch_size, 7 * 7 * 64] #", "# Add `softmax_tensor` to the graph. It is used for", "stride of 2 # Input Tensor Shape: [batch_size, 28, 28,", "Output Tensor Shape: [batch_size, 1024] dense = tf.layers.dense(inputs=pool2_flat, units=1024, activation=tf.nn.relu)", "Add evaluation metrics (for EVAL mode) eval_metric_ops = { 'accuracy':", "Input Tensor Shape: [batch_size, 28, 28, 32] # Output Tensor", "into a batch of vectors # Input Tensor Shape: [batch_size,", "# By default this is a S3 path under the", "64] # Output Tensor Shape: [batch_size, 1024] dense = tf.layers.dense(inputs=pool2_flat,", "* 7 * 64] # Output Tensor Shape: [batch_size, 1024]", "copy of # the License is located at # #", "with a 2x2 filter and stride of 2 # Input", "= tf.layers.max_pooling2d(inputs=conv1, pool_size=[2, 2], strides=2) # Convolutional Layer #2 #", "filters=64, kernel_size=[5, 5], padding='same', activation=tf.nn.relu ) # Pooling Layer #2", "type=str, default=os.environ.get('SM_CHANNEL_TRAINING')) parser.add_argument('--hosts', type=list, default=json.loads(os.environ.get('SM_HOSTS'))) parser.add_argument('--current-host', type=str, default=os.environ.get('SM_CURRENT_HOST')) return parser.parse_known_args()", "on an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF", "# Evaluate the model and print results eval_input_fn = tf.estimator.inputs.numpy_input_fn(", "Apache License, Version 2.0 (the \"License\"). You # may not", "results eval_input_fn = tf.estimator.inputs.numpy_input_fn( x={'x': eval_data}, y=eval_labels, num_epochs=1, shuffle=False )", "28, 28, 32] conv1 = tf.layers.conv2d( inputs=input_layer, filters=32, kernel_size=[5, 5],", "the specific # language governing permissions and limitations under the", "License. A copy of # the License is located at", "probability that element will be kept dropout = tf.layers.dropout( inputs=dense,", "eval_metric_ops=eval_metric_ops) def _load_training_data(base_dir): x_train = np.load(os.path.join(base_dir, 'train_data.npy')) y_train = np.load(os.path.join(base_dir,", "(the \"License\"). You # may not use this file except", "width and height. # Input Tensor Shape: [batch_size, 14, 14,", "division, print_function import argparse import json import os import numpy", "# Pooling Layer #1 # First max pooling layer with", "Loss (for both TRAIN and EVAL modes) loss = tf.losses.sparse_softmax_cross_entropy(labels=labels,", "use this file except in compliance with the License. A", "tf.estimator.EstimatorSpec(mode=mode, loss=loss, train_op=train_op) # Add evaluation metrics (for EVAL mode)", "EVAL mode) 'classes': tf.argmax(input=logits, axis=1), # Add `softmax_tensor` to the", "tf.estimator.ModeKeys.PREDICT: return tf.estimator.EstimatorSpec(mode=mode, predictions=predictions) # Calculate Loss (for both TRAIN", "numpy as np import tensorflow as tf def cnn_model_fn(features, labels,", "http://aws.amazon.com/apache2.0/ # # or in the \"license\" file accompanying this", "training=mode == tf.estimator.ModeKeys.TRAIN) # Logits layer # Input Tensor Shape:", "Shape: [batch_size, 10] logits = tf.layers.dense(inputs=dropout, units=10) predictions = {", "vectors # Input Tensor Shape: [batch_size, 7, 7, 64] #", "[batch_size, 7 * 7 * 64] pool2_flat = tf.reshape(pool2, [-1,", "# Output Tensor Shape: [batch_size, 7, 7, 64] pool2 =", "train_spec = tf.estimator.TrainSpec(train_input_fn, max_steps=20000) eval_spec = tf.estimator.EvalSpec(eval_input_fn) tf.estimator.train_and_evaluate(mnist_classifier, train_spec, eval_spec)", "tf.argmax(input=logits, axis=1), # Add `softmax_tensor` to the graph. It is", "32 features using a 5x5 filter with ReLU activation. #", "y_train = np.load(os.path.join(base_dir, 'train_labels.npy')) return x_train, y_train def _load_testing_data(base_dir): x_test", "'softmax_tensor'} logging_hook = tf.train.LoggingTensorHook(tensors=tensors_to_log, every_n_iter=50) # Train the model train_input_fn", "layer with 1024 neurons # Input Tensor Shape: [batch_size, 7", "Estimator mnist_classifier = tf.estimator.Estimator(model_fn=cnn_model_fn, model_dir=args.model_dir) # Set up logging for", "implied. See the License for the specific # language governing", "and output directories. # model_dir is always passed in from", "with label 'probabilities' tensors_to_log = {'probabilities': 'softmax_tensor'} logging_hook = tf.train.LoggingTensorHook(tensors=tensors_to_log,", "may not use this file except in compliance with the", "default=os.environ.get('SM_CHANNEL_TRAINING')) parser.add_argument('--hosts', type=list, default=json.loads(os.environ.get('SM_HOSTS'))) parser.add_argument('--current-host', type=str, default=os.environ.get('SM_CURRENT_HOST')) return parser.parse_known_args() def", "this file. This file is # distributed on an \"AS", "tf.reshape(pool2, [-1, 7 * 7 * 64]) # Dense Layer", "Second max pooling layer with a 2x2 filter and stride", "the License is located at # # http://aws.amazon.com/apache2.0/ # #", "eval_data}, y=eval_labels, num_epochs=1, shuffle=False ) train_spec = tf.estimator.TrainSpec(train_input_fn, max_steps=20000) eval_spec", "10] logits = tf.layers.dense(inputs=dropout, units=10) predictions = { # Generate", "Calculate Loss (for both TRAIN and EVAL modes) loss =", "inputs=input_layer, filters=32, kernel_size=[5, 5], padding='same', activation=tf.nn.relu ) # Pooling Layer", "Layer #1 # Computes 32 features using a 5x5 filter", "by the # `logging_hook`. 'probabilities': tf.nn.softmax(logits, name='softmax_tensor') } if mode", "the model train_input_fn = tf.estimator.inputs.numpy_input_fn( x={'x': train_data}, y=train_labels, batch_size=100, num_epochs=None,", "default bucket. parser.add_argument('--model_dir', type=str) parser.add_argument('--sm-model-dir', type=str, default=os.environ.get('SM_MODEL_DIR')) parser.add_argument('--train', type=str, default=os.environ.get('SM_CHANNEL_TRAINING'))", "Input Layer # Reshape X to 4-D tensor: [batch_size, width,", "It is used for PREDICT and by the # `logging_hook`.", "rate=0.4, training=mode == tf.estimator.ModeKeys.TRAIN) # Logits layer # Input Tensor", "# Input Tensor Shape: [batch_size, 14, 14, 64] # Output", "Dense Layer # Densely connected layer with 1024 neurons #", "np.load(os.path.join(base_dir, 'train_labels.npy')) return x_train, y_train def _load_testing_data(base_dir): x_test = np.load(os.path.join(base_dir,", "WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express", "7, 7, 64] # Output Tensor Shape: [batch_size, 7 *", "as tf def cnn_model_fn(features, labels, mode): \"\"\"Model function for CNN.\"\"\"", "# # Licensed under the Apache License, Version 2.0 (the", "14, 14, 32] pool1 = tf.layers.max_pooling2d(inputs=conv1, pool_size=[2, 2], strides=2) #", "14, 64] conv2 = tf.layers.conv2d( inputs=pool1, filters=64, kernel_size=[5, 5], padding='same',", "= _load_testing_data(args.train) # Create the Estimator mnist_classifier = tf.estimator.Estimator(model_fn=cnn_model_fn, model_dir=args.model_dir)", "and height. # Input Tensor Shape: [batch_size, 28, 28, 1]", "a 5x5 filter with ReLU activation. # Padding is added", "MNIST images are 28x28 pixels, and have one color channel", "License. \"\"\"Convolutional Neural Network Estimator for MNIST, built with tf.layers.\"\"\"", "# Second max pooling layer with a 2x2 filter and", "MNIST, built with tf.layers.\"\"\" from __future__ import absolute_import, division, print_function", "model and print results eval_input_fn = tf.estimator.inputs.numpy_input_fn( x={'x': eval_data}, y=eval_labels,", "and EVAL mode) 'classes': tf.argmax(input=logits, axis=1), # Add `softmax_tensor` to", ") # Evaluate the model and print results eval_input_fn =", "# Flatten tensor into a batch of vectors # Input", "have one color channel input_layer = tf.reshape(features['x'], [-1, 28, 28,", "__name__ == '__main__': args, _ = _parse_args() train_data, train_labels =", "TRAIN and EVAL modes) loss = tf.losses.sparse_softmax_cross_entropy(labels=labels, logits=logits) # Configure", "Tensor Shape: [batch_size, 28, 28, 32] # Output Tensor Shape:", "#2 # Second max pooling layer with a 2x2 filter", "absolute_import, division, print_function import argparse import json import os import", "Evaluate the model and print results eval_input_fn = tf.estimator.inputs.numpy_input_fn( x={'x':", "passed in from SageMaker. # By default this is a", "Tensor Shape: [batch_size, 14, 14, 64] # Output Tensor Shape:", "Layer #2 # Second max pooling layer with a 2x2", "# Log the values in the 'Softmax' tensor with label", "units=10) predictions = { # Generate predictions (for PREDICT and", "np.load(os.path.join(base_dir, 'eval_labels.npy')) return x_test, y_test def _parse_args(): parser = argparse.ArgumentParser()", "are 28x28 pixels, and have one color channel input_layer =", "pool_size=[2, 2], strides=2) # Convolutional Layer #2 # Computes 64", "7, 7, 64] pool2 = tf.layers.max_pooling2d(inputs=conv2, pool_size=[2, 2], strides=2) #", "tf.layers.\"\"\" from __future__ import absolute_import, division, print_function import argparse import", "# # http://aws.amazon.com/apache2.0/ # # or in the \"license\" file", "dropout = tf.layers.dropout( inputs=dense, rate=0.4, training=mode == tf.estimator.ModeKeys.TRAIN) # Logits", "_ = _parse_args() train_data, train_labels = _load_training_data(args.train) eval_data, eval_labels =", "and height. # Input Tensor Shape: [batch_size, 14, 14, 32]", "28, 28, 1] # Output Tensor Shape: [batch_size, 28, 28,", "# Input Tensor Shape: [batch_size, 7 * 7 * 64]", "eval_metric_ops = { 'accuracy': tf.metrics.accuracy( labels=labels, predictions=predictions['classes'])} return tf.estimator.EstimatorSpec( mode=mode,", "Data, model, and output directories. # model_dir is always passed", "parser.add_argument('--sm-model-dir', type=str, default=os.environ.get('SM_MODEL_DIR')) parser.add_argument('--train', type=str, default=os.environ.get('SM_CHANNEL_TRAINING')) parser.add_argument('--hosts', type=list, default=json.loads(os.environ.get('SM_HOSTS'))) parser.add_argument('--current-host',", "Padding is added to preserve width and height. # Input", "'probabilities' tensors_to_log = {'probabilities': 'softmax_tensor'} logging_hook = tf.train.LoggingTensorHook(tensors=tensors_to_log, every_n_iter=50) #", "def cnn_model_fn(features, labels, mode): \"\"\"Model function for CNN.\"\"\" # Input", "and limitations under the License. \"\"\"Convolutional Neural Network Estimator for", "# # or in the \"license\" file accompanying this file.", "language governing permissions and limitations under the License. \"\"\"Convolutional Neural", "{ 'accuracy': tf.metrics.accuracy( labels=labels, predictions=predictions['classes'])} return tf.estimator.EstimatorSpec( mode=mode, loss=loss, eval_metric_ops=eval_metric_ops)", "64 features using a 5x5 filter. # Padding is added", "= argparse.ArgumentParser() # Data, model, and output directories. # model_dir", "the Training Op (for TRAIN mode) if mode == tf.estimator.ModeKeys.TRAIN:", "tf.estimator.inputs.numpy_input_fn( x={'x': eval_data}, y=eval_labels, num_epochs=1, shuffle=False ) train_spec = tf.estimator.TrainSpec(train_input_fn,", "filter. # Padding is added to preserve width and height.", "accompanying this file. This file is # distributed on an", "pool2_flat = tf.reshape(pool2, [-1, 7 * 7 * 64]) #", "the model and print results eval_input_fn = tf.estimator.inputs.numpy_input_fn( x={'x': eval_data},", "Convolutional Layer #1 # Computes 32 features using a 5x5", "loss=loss, train_op=train_op) # Add evaluation metrics (for EVAL mode) eval_metric_ops", "Shape: [batch_size, 14, 14, 64] conv2 = tf.layers.conv2d( inputs=pool1, filters=64,", "conv1 = tf.layers.conv2d( inputs=input_layer, filters=32, kernel_size=[5, 5], padding='same', activation=tf.nn.relu )", "Generate predictions (for PREDICT and EVAL mode) 'classes': tf.argmax(input=logits, axis=1),", "tf.losses.sparse_softmax_cross_entropy(labels=labels, logits=logits) # Configure the Training Op (for TRAIN mode)", "= _parse_args() train_data, train_labels = _load_training_data(args.train) eval_data, eval_labels = _load_testing_data(args.train)", "Rights Reserved. # # Licensed under the Apache License, Version", "Shape: [batch_size, 7, 7, 64] pool2 = tf.layers.max_pooling2d(inputs=conv2, pool_size=[2, 2],", "Shape: [batch_size, 1024] # Output Tensor Shape: [batch_size, 10] logits", "1024] # Output Tensor Shape: [batch_size, 10] logits = tf.layers.dense(inputs=dropout,", "[batch_size, 1024] # Output Tensor Shape: [batch_size, 10] logits =", "either express or implied. See the License for the specific", "Tensor Shape: [batch_size, 7 * 7 * 64] # Output", "dropout operation; 0.6 probability that element will be kept dropout", "tensor: [batch_size, width, height, channels] # MNIST images are 28x28", "graph. It is used for PREDICT and by the #", "import os import numpy as np import tensorflow as tf", "[-1, 28, 28, 1]) # Convolutional Layer #1 # Computes", "Output Tensor Shape: [batch_size, 28, 28, 32] conv1 = tf.layers.conv2d(", "5x5 filter. # Padding is added to preserve width and", "using a 5x5 filter with ReLU activation. # Padding is", "for MNIST, built with tf.layers.\"\"\" from __future__ import absolute_import, division,", "Version 2.0 (the \"License\"). You # may not use this", "X to 4-D tensor: [batch_size, width, height, channels] # MNIST", "28, 28, 1]) # Convolutional Layer #1 # Computes 32", "units=1024, activation=tf.nn.relu) # Add dropout operation; 0.6 probability that element", "bucket. parser.add_argument('--model_dir', type=str) parser.add_argument('--sm-model-dir', type=str, default=os.environ.get('SM_MODEL_DIR')) parser.add_argument('--train', type=str, default=os.environ.get('SM_CHANNEL_TRAINING')) parser.add_argument('--hosts',", "modes) loss = tf.losses.sparse_softmax_cross_entropy(labels=labels, logits=logits) # Configure the Training Op", "def _load_testing_data(base_dir): x_test = np.load(os.path.join(base_dir, 'eval_data.npy')) y_test = np.load(os.path.join(base_dir, 'eval_labels.npy'))", "ReLU activation. # Padding is added to preserve width and", "<reponame>jpmarques19/tensorflwo-test<filename>aws_sagemaker_studio/frameworks/tensorflow_mnist/mnist.py<gh_stars>1-10 # Copyright 2020 Amazon.com, Inc. or its affiliates. All", "\"license\" file accompanying this file. This file is # distributed", "S3 path under the default bucket. parser.add_argument('--model_dir', type=str) parser.add_argument('--sm-model-dir', type=str,", "default=os.environ.get('SM_MODEL_DIR')) parser.add_argument('--train', type=str, default=os.environ.get('SM_CHANNEL_TRAINING')) parser.add_argument('--hosts', type=list, default=json.loads(os.environ.get('SM_HOSTS'))) parser.add_argument('--current-host', type=str, default=os.environ.get('SM_CURRENT_HOST'))", "return x_test, y_test def _parse_args(): parser = argparse.ArgumentParser() # Data,", "_parse_args() train_data, train_labels = _load_training_data(args.train) eval_data, eval_labels = _load_testing_data(args.train) #", "type=str, default=os.environ.get('SM_CURRENT_HOST')) return parser.parse_known_args() def serving_input_fn(): inputs = {'x': tf.placeholder(tf.float32,", "labels, mode): \"\"\"Model function for CNN.\"\"\" # Input Layer #", "'probabilities': tf.nn.softmax(logits, name='softmax_tensor') } if mode == tf.estimator.ModeKeys.PREDICT: return tf.estimator.EstimatorSpec(mode=mode,", "predictions=predictions['classes'])} return tf.estimator.EstimatorSpec( mode=mode, loss=loss, eval_metric_ops=eval_metric_ops) def _load_training_data(base_dir): x_train =", "max_steps=20000) eval_spec = tf.estimator.EvalSpec(eval_input_fn) tf.estimator.train_and_evaluate(mnist_classifier, train_spec, eval_spec) if args.current_host ==", "file is # distributed on an \"AS IS\" BASIS, WITHOUT", "28, 1] # Output Tensor Shape: [batch_size, 28, 28, 32]", "64] # Output Tensor Shape: [batch_size, 7 * 7 *", "and stride of 2 # Input Tensor Shape: [batch_size, 28,", "This file is # distributed on an \"AS IS\" BASIS,", "'__main__': args, _ = _parse_args() train_data, train_labels = _load_training_data(args.train) eval_data,", "axis=1), # Add `softmax_tensor` to the graph. It is used", "`logging_hook`. 'probabilities': tf.nn.softmax(logits, name='softmax_tensor') } if mode == tf.estimator.ModeKeys.PREDICT: return", "y_test = np.load(os.path.join(base_dir, 'eval_labels.npy')) return x_test, y_test def _parse_args(): parser", "from __future__ import absolute_import, division, print_function import argparse import json", "= tf.estimator.inputs.numpy_input_fn( x={'x': eval_data}, y=eval_labels, num_epochs=1, shuffle=False ) train_spec =", "# Train the model train_input_fn = tf.estimator.inputs.numpy_input_fn( x={'x': train_data}, y=train_labels,", "14, 64] # Output Tensor Shape: [batch_size, 7, 7, 64]", "tensorflow as tf def cnn_model_fn(features, labels, mode): \"\"\"Model function for", "tf.estimator.ModeKeys.TRAIN) # Logits layer # Input Tensor Shape: [batch_size, 1024]", "Licensed under the Apache License, Version 2.0 (the \"License\"). You", "height. # Input Tensor Shape: [batch_size, 28, 28, 1] #", "inputs = {'x': tf.placeholder(tf.float32, [None, 784])} return tf.estimator.export.ServingInputReceiver(inputs, inputs) if", "= tf.layers.dropout( inputs=dense, rate=0.4, training=mode == tf.estimator.ModeKeys.TRAIN) # Logits layer", "an \"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF #", "# Convolutional Layer #1 # Computes 32 features using a", "# Dense Layer # Densely connected layer with 1024 neurons", "train_data}, y=train_labels, batch_size=100, num_epochs=None, shuffle=True ) # Evaluate the model", "28x28 pixels, and have one color channel input_layer = tf.reshape(features['x'],", "Tensor Shape: [batch_size, 7 * 7 * 64] pool2_flat =", "neurons # Input Tensor Shape: [batch_size, 7 * 7 *", "tf.train.LoggingTensorHook(tensors=tensors_to_log, every_n_iter=50) # Train the model train_input_fn = tf.estimator.inputs.numpy_input_fn( x={'x':", "np.load(os.path.join(base_dir, 'eval_data.npy')) y_test = np.load(os.path.join(base_dir, 'eval_labels.npy')) return x_test, y_test def", "= _load_training_data(args.train) eval_data, eval_labels = _load_testing_data(args.train) # Create the Estimator", "added to preserve width and height. # Input Tensor Shape:", "= tf.layers.max_pooling2d(inputs=conv2, pool_size=[2, 2], strides=2) # Flatten tensor into a", "of # the License is located at # # http://aws.amazon.com/apache2.0/", "# the License is located at # # http://aws.amazon.com/apache2.0/ #", "json import os import numpy as np import tensorflow as", "Tensor Shape: [batch_size, 10] logits = tf.layers.dense(inputs=dropout, units=10) predictions =", "path under the default bucket. parser.add_argument('--model_dir', type=str) parser.add_argument('--sm-model-dir', type=str, default=os.environ.get('SM_MODEL_DIR'))", "logits=logits) # Configure the Training Op (for TRAIN mode) if", "built with tf.layers.\"\"\" from __future__ import absolute_import, division, print_function import", "_load_testing_data(args.train) # Create the Estimator mnist_classifier = tf.estimator.Estimator(model_fn=cnn_model_fn, model_dir=args.model_dir) #", "= np.load(os.path.join(base_dir, 'eval_labels.npy')) return x_test, y_test def _parse_args(): parser =", "Output Tensor Shape: [batch_size, 14, 14, 64] conv2 = tf.layers.conv2d(", "padding='same', activation=tf.nn.relu ) # Pooling Layer #2 # Second max", "argparse.ArgumentParser() # Data, model, and output directories. # model_dir is", "_load_training_data(args.train) eval_data, eval_labels = _load_testing_data(args.train) # Create the Estimator mnist_classifier", "default this is a S3 path under the default bucket.", "metrics (for EVAL mode) eval_metric_ops = { 'accuracy': tf.metrics.accuracy( labels=labels,", "loss=loss, global_step=tf.train.get_global_step()) return tf.estimator.EstimatorSpec(mode=mode, loss=loss, train_op=train_op) # Add evaluation metrics", "train_op=train_op) # Add evaluation metrics (for EVAL mode) eval_metric_ops =", "Layer #2 # Computes 64 features using a 5x5 filter.", "mode) 'classes': tf.argmax(input=logits, axis=1), # Add `softmax_tensor` to the graph.", "Neural Network Estimator for MNIST, built with tf.layers.\"\"\" from __future__", "Tensor Shape: [batch_size, 1024] dense = tf.layers.dense(inputs=pool2_flat, units=1024, activation=tf.nn.relu) #", "# Create the Estimator mnist_classifier = tf.estimator.Estimator(model_fn=cnn_model_fn, model_dir=args.model_dir) # Set", "7 * 7 * 64] # Output Tensor Shape: [batch_size,", "== '__main__': args, _ = _parse_args() train_data, train_labels = _load_training_data(args.train)", "activation=tf.nn.relu ) # Pooling Layer #2 # Second max pooling", "in the 'Softmax' tensor with label 'probabilities' tensors_to_log = {'probabilities':", "function for CNN.\"\"\" # Input Layer # Reshape X to", "# Set up logging for predictions # Log the values", "SageMaker. # By default this is a S3 path under", "1024 neurons # Input Tensor Shape: [batch_size, 7 * 7", "= tf.layers.conv2d( inputs=pool1, filters=64, kernel_size=[5, 5], padding='same', activation=tf.nn.relu ) #", "labels=labels, predictions=predictions['classes'])} return tf.estimator.EstimatorSpec( mode=mode, loss=loss, eval_metric_ops=eval_metric_ops) def _load_training_data(base_dir): x_train", "4-D tensor: [batch_size, width, height, channels] # MNIST images are", "width, height, channels] # MNIST images are 28x28 pixels, and", "# Computes 32 features using a 5x5 filter with ReLU", "model_dir is always passed in from SageMaker. # By default", "height, channels] # MNIST images are 28x28 pixels, and have", "in the \"license\" file accompanying this file. This file is", "permissions and limitations under the License. \"\"\"Convolutional Neural Network Estimator", "[batch_size, 14, 14, 32] # Output Tensor Shape: [batch_size, 14,", "tensor into a batch of vectors # Input Tensor Shape:", "1]) # Convolutional Layer #1 # Computes 32 features using", "at # # http://aws.amazon.com/apache2.0/ # # or in the \"license\"", "num_epochs=None, shuffle=True ) # Evaluate the model and print results", "[batch_size, 7 * 7 * 64] # Output Tensor Shape:", "5], padding='same', activation=tf.nn.relu ) # Pooling Layer #1 # First", "tf.estimator.export.ServingInputReceiver(inputs, inputs) if __name__ == '__main__': args, _ = _parse_args()", "with 1024 neurons # Input Tensor Shape: [batch_size, 7 *", "# Input Tensor Shape: [batch_size, 7, 7, 64] # Output", "in compliance with the License. A copy of # the", "both TRAIN and EVAL modes) loss = tf.losses.sparse_softmax_cross_entropy(labels=labels, logits=logits) #", "layer # Input Tensor Shape: [batch_size, 1024] # Output Tensor", "= { 'accuracy': tf.metrics.accuracy( labels=labels, predictions=predictions['classes'])} return tf.estimator.EstimatorSpec( mode=mode, loss=loss,", "# Reshape X to 4-D tensor: [batch_size, width, height, channels]", "Convolutional Layer #2 # Computes 64 features using a 5x5", "every_n_iter=50) # Train the model train_input_fn = tf.estimator.inputs.numpy_input_fn( x={'x': train_data},", "shuffle=True ) # Evaluate the model and print results eval_input_fn", "x={'x': eval_data}, y=eval_labels, num_epochs=1, shuffle=False ) train_spec = tf.estimator.TrainSpec(train_input_fn, max_steps=20000)", "EVAL mode) eval_metric_ops = { 'accuracy': tf.metrics.accuracy( labels=labels, predictions=predictions['classes'])} return", "import absolute_import, division, print_function import argparse import json import os", "28, 1]) # Convolutional Layer #1 # Computes 32 features", ") # Pooling Layer #2 # Second max pooling layer", "32] # Output Tensor Shape: [batch_size, 14, 14, 32] pool1", "of vectors # Input Tensor Shape: [batch_size, 7, 7, 64]", "# Output Tensor Shape: [batch_size, 7 * 7 * 64]", "import json import os import numpy as np import tensorflow", "\"\"\"Convolutional Neural Network Estimator for MNIST, built with tf.layers.\"\"\" from", "# Data, model, and output directories. # model_dir is always", "under the Apache License, Version 2.0 (the \"License\"). You #", "import tensorflow as tf def cnn_model_fn(features, labels, mode): \"\"\"Model function", "a 2x2 filter and stride of 2 # Input Tensor", "# Add evaluation metrics (for EVAL mode) eval_metric_ops = {", "7, 64] pool2 = tf.layers.max_pooling2d(inputs=conv2, pool_size=[2, 2], strides=2) # Flatten", "x_train = np.load(os.path.join(base_dir, 'train_data.npy')) y_train = np.load(os.path.join(base_dir, 'train_labels.npy')) return x_train,", "shuffle=False ) train_spec = tf.estimator.TrainSpec(train_input_fn, max_steps=20000) eval_spec = tf.estimator.EvalSpec(eval_input_fn) tf.estimator.train_and_evaluate(mnist_classifier,", "5x5 filter with ReLU activation. # Padding is added to", "#2 # Computes 64 features using a 5x5 filter. #", "under the License. \"\"\"Convolutional Neural Network Estimator for MNIST, built", "loss=loss, eval_metric_ops=eval_metric_ops) def _load_training_data(base_dir): x_train = np.load(os.path.join(base_dir, 'train_data.npy')) y_train =", "if mode == tf.estimator.ModeKeys.PREDICT: return tf.estimator.EstimatorSpec(mode=mode, predictions=predictions) # Calculate Loss", "Input Tensor Shape: [batch_size, 1024] # Output Tensor Shape: [batch_size,", "type=str) parser.add_argument('--sm-model-dir', type=str, default=os.environ.get('SM_MODEL_DIR')) parser.add_argument('--train', type=str, default=os.environ.get('SM_CHANNEL_TRAINING')) parser.add_argument('--hosts', type=list, default=json.loads(os.environ.get('SM_HOSTS')))", "predictions (for PREDICT and EVAL mode) 'classes': tf.argmax(input=logits, axis=1), #", "global_step=tf.train.get_global_step()) return tf.estimator.EstimatorSpec(mode=mode, loss=loss, train_op=train_op) # Add evaluation metrics (for", "{'probabilities': 'softmax_tensor'} logging_hook = tf.train.LoggingTensorHook(tensors=tensors_to_log, every_n_iter=50) # Train the model", "mode): \"\"\"Model function for CNN.\"\"\" # Input Layer # Reshape", "# http://aws.amazon.com/apache2.0/ # # or in the \"license\" file accompanying", "See the License for the specific # language governing permissions", "return tf.estimator.export.ServingInputReceiver(inputs, inputs) if __name__ == '__main__': args, _ =", "filters=32, kernel_size=[5, 5], padding='same', activation=tf.nn.relu ) # Pooling Layer #1", "Reshape X to 4-D tensor: [batch_size, width, height, channels] #", "features using a 5x5 filter with ReLU activation. # Padding", "pooling layer with a 2x2 filter and stride of 2", "return x_train, y_train def _load_testing_data(base_dir): x_test = np.load(os.path.join(base_dir, 'eval_data.npy')) y_test", "of 2 # Input Tensor Shape: [batch_size, 14, 14, 64]", "np.load(os.path.join(base_dir, 'train_data.npy')) y_train = np.load(os.path.join(base_dir, 'train_labels.npy')) return x_train, y_train def", "[-1, 7 * 7 * 64]) # Dense Layer #", "# Output Tensor Shape: [batch_size, 14, 14, 32] pool1 =", "{'x': tf.placeholder(tf.float32, [None, 784])} return tf.estimator.export.ServingInputReceiver(inputs, inputs) if __name__ ==", "y=train_labels, batch_size=100, num_epochs=None, shuffle=True ) # Evaluate the model and", "[batch_size, width, height, channels] # MNIST images are 28x28 pixels,", "tf.estimator.inputs.numpy_input_fn( x={'x': train_data}, y=train_labels, batch_size=100, num_epochs=None, shuffle=True ) # Evaluate", "Tensor Shape: [batch_size, 28, 28, 1] # Output Tensor Shape:", "7 * 7 * 64]) # Dense Layer # Densely", "'eval_data.npy')) y_test = np.load(os.path.join(base_dir, 'eval_labels.npy')) return x_test, y_test def _parse_args():", "# or in the \"license\" file accompanying this file. This", "argparse import json import os import numpy as np import", "one color channel input_layer = tf.reshape(features['x'], [-1, 28, 28, 1])", "activation=tf.nn.relu) # Add dropout operation; 0.6 probability that element will", "[batch_size, 28, 28, 32] # Output Tensor Shape: [batch_size, 14,", "the # `logging_hook`. 'probabilities': tf.nn.softmax(logits, name='softmax_tensor') } if mode ==", "tf.layers.conv2d( inputs=pool1, filters=64, kernel_size=[5, 5], padding='same', activation=tf.nn.relu ) # Pooling", "Tensor Shape: [batch_size, 28, 28, 32] conv1 = tf.layers.conv2d( inputs=input_layer,", "== tf.estimator.ModeKeys.TRAIN: optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.001) train_op = optimizer.minimize( loss=loss, global_step=tf.train.get_global_step())", "Densely connected layer with 1024 neurons # Input Tensor Shape:", "tf.layers.max_pooling2d(inputs=conv2, pool_size=[2, 2], strides=2) # Flatten tensor into a batch", "\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY", "Layer # Densely connected layer with 1024 neurons # Input", "train_op = optimizer.minimize( loss=loss, global_step=tf.train.get_global_step()) return tf.estimator.EstimatorSpec(mode=mode, loss=loss, train_op=train_op) #", "[batch_size, 28, 28, 1] # Output Tensor Shape: [batch_size, 28,", "and EVAL modes) loss = tf.losses.sparse_softmax_cross_entropy(labels=labels, logits=logits) # Configure the", "except in compliance with the License. A copy of #", "64] conv2 = tf.layers.conv2d( inputs=pool1, filters=64, kernel_size=[5, 5], padding='same', activation=tf.nn.relu", "= tf.layers.dense(inputs=pool2_flat, units=1024, activation=tf.nn.relu) # Add dropout operation; 0.6 probability", "64] pool2_flat = tf.reshape(pool2, [-1, 7 * 7 * 64])", "to the graph. It is used for PREDICT and by", "# Configure the Training Op (for TRAIN mode) if mode", "# Densely connected layer with 1024 neurons # Input Tensor", "the Estimator mnist_classifier = tf.estimator.Estimator(model_fn=cnn_model_fn, model_dir=args.model_dir) # Set up logging", "'accuracy': tf.metrics.accuracy( labels=labels, predictions=predictions['classes'])} return tf.estimator.EstimatorSpec( mode=mode, loss=loss, eval_metric_ops=eval_metric_ops) def", "PREDICT and EVAL mode) 'classes': tf.argmax(input=logits, axis=1), # Add `softmax_tensor`", "logging_hook = tf.train.LoggingTensorHook(tensors=tensors_to_log, every_n_iter=50) # Train the model train_input_fn =", "to 4-D tensor: [batch_size, width, height, channels] # MNIST images", "import argparse import json import os import numpy as np", "14, 14, 32] # Output Tensor Shape: [batch_size, 14, 14,", "# Pooling Layer #2 # Second max pooling layer with", "values in the 'Softmax' tensor with label 'probabilities' tensors_to_log =", "1] # Output Tensor Shape: [batch_size, 28, 28, 32] conv1", "or in the \"license\" file accompanying this file. This file", "inputs) if __name__ == '__main__': args, _ = _parse_args() train_data,", "Shape: [batch_size, 28, 28, 32] conv1 = tf.layers.conv2d( inputs=input_layer, filters=32,", "784])} return tf.estimator.export.ServingInputReceiver(inputs, inputs) if __name__ == '__main__': args, _", "default=os.environ.get('SM_CURRENT_HOST')) return parser.parse_known_args() def serving_input_fn(): inputs = {'x': tf.placeholder(tf.float32, [None,", "eval_spec = tf.estimator.EvalSpec(eval_input_fn) tf.estimator.train_and_evaluate(mnist_classifier, train_spec, eval_spec) if args.current_host == args.hosts[0]:", "* 64]) # Dense Layer # Densely connected layer with", "layer with a 2x2 filter and stride of 2 #", "14, 14, 64] # Output Tensor Shape: [batch_size, 7, 7,", "OF # ANY KIND, either express or implied. See the", "64] # Output Tensor Shape: [batch_size, 7, 7, 64] pool2", "Output Tensor Shape: [batch_size, 14, 14, 32] pool1 = tf.layers.max_pooling2d(inputs=conv1,", "'train_labels.npy')) return x_train, y_train def _load_testing_data(base_dir): x_test = np.load(os.path.join(base_dir, 'eval_data.npy'))", "x_test = np.load(os.path.join(base_dir, 'eval_data.npy')) y_test = np.load(os.path.join(base_dir, 'eval_labels.npy')) return x_test,", "from SageMaker. # By default this is a S3 path", "7 * 64] pool2_flat = tf.reshape(pool2, [-1, 7 * 7", "y=eval_labels, num_epochs=1, shuffle=False ) train_spec = tf.estimator.TrainSpec(train_input_fn, max_steps=20000) eval_spec =", "tf.layers.dense(inputs=pool2_flat, units=1024, activation=tf.nn.relu) # Add dropout operation; 0.6 probability that", "Shape: [batch_size, 7 * 7 * 64] # Output Tensor", "parser.parse_known_args() def serving_input_fn(): inputs = {'x': tf.placeholder(tf.float32, [None, 784])} return", "Layer #1 # First max pooling layer with a 2x2", "# Calculate Loss (for both TRAIN and EVAL modes) loss", "y_test def _parse_args(): parser = argparse.ArgumentParser() # Data, model, and", "* 7 * 64]) # Dense Layer # Densely connected", "= tf.layers.dense(inputs=dropout, units=10) predictions = { # Generate predictions (for", "CNN.\"\"\" # Input Layer # Reshape X to 4-D tensor:", "} if mode == tf.estimator.ModeKeys.PREDICT: return tf.estimator.EstimatorSpec(mode=mode, predictions=predictions) # Calculate", "* 64] # Output Tensor Shape: [batch_size, 1024] dense =", "the values in the 'Softmax' tensor with label 'probabilities' tensors_to_log", "mode) eval_metric_ops = { 'accuracy': tf.metrics.accuracy( labels=labels, predictions=predictions['classes'])} return tf.estimator.EstimatorSpec(", "preserve width and height. # Input Tensor Shape: [batch_size, 28,", "features using a 5x5 filter. # Padding is added to", "= {'x': tf.placeholder(tf.float32, [None, 784])} return tf.estimator.export.ServingInputReceiver(inputs, inputs) if __name__", "with ReLU activation. # Padding is added to preserve width", "* 7 * 64] pool2_flat = tf.reshape(pool2, [-1, 7 *", "eval_input_fn = tf.estimator.inputs.numpy_input_fn( x={'x': eval_data}, y=eval_labels, num_epochs=1, shuffle=False ) train_spec", "express or implied. See the License for the specific #", "Input Tensor Shape: [batch_size, 14, 14, 64] # Output Tensor", "(for TRAIN mode) if mode == tf.estimator.ModeKeys.TRAIN: optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.001)", "this is a S3 path under the default bucket. parser.add_argument('--model_dir',", "# Input Tensor Shape: [batch_size, 28, 28, 32] # Output", "[batch_size, 14, 14, 64] conv2 = tf.layers.conv2d( inputs=pool1, filters=64, kernel_size=[5,", "# First max pooling layer with a 2x2 filter and", "'Softmax' tensor with label 'probabilities' tensors_to_log = {'probabilities': 'softmax_tensor'} logging_hook", "width and height. # Input Tensor Shape: [batch_size, 28, 28,", "its affiliates. All Rights Reserved. # # Licensed under the", "a 5x5 filter. # Padding is added to preserve width", "default=json.loads(os.environ.get('SM_HOSTS'))) parser.add_argument('--current-host', type=str, default=os.environ.get('SM_CURRENT_HOST')) return parser.parse_known_args() def serving_input_fn(): inputs =", "# may not use this file except in compliance with", "Shape: [batch_size, 14, 14, 32] pool1 = tf.layers.max_pooling2d(inputs=conv1, pool_size=[2, 2],", "or implied. See the License for the specific # language", "directories. # model_dir is always passed in from SageMaker. #", "Shape: [batch_size, 28, 28, 1] # Output Tensor Shape: [batch_size,", "batch of vectors # Input Tensor Shape: [batch_size, 7, 7,", "predictions = { # Generate predictions (for PREDICT and EVAL", "28, 32] conv1 = tf.layers.conv2d( inputs=input_layer, filters=32, kernel_size=[5, 5], padding='same',", "# Input Tensor Shape: [batch_size, 14, 14, 32] # Output", "You # may not use this file except in compliance", "Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.", "compliance with the License. A copy of # the License", "using a 5x5 filter. # Padding is added to preserve", "tf.estimator.EstimatorSpec( mode=mode, loss=loss, eval_metric_ops=eval_metric_ops) def _load_training_data(base_dir): x_train = np.load(os.path.join(base_dir, 'train_data.npy'))", "and have one color channel input_layer = tf.reshape(features['x'], [-1, 28,", "args, _ = _parse_args() train_data, train_labels = _load_training_data(args.train) eval_data, eval_labels", "2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. #", "mode) if mode == tf.estimator.ModeKeys.TRAIN: optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.001) train_op =", "label 'probabilities' tensors_to_log = {'probabilities': 'softmax_tensor'} logging_hook = tf.train.LoggingTensorHook(tensors=tensors_to_log, every_n_iter=50)", "[None, 784])} return tf.estimator.export.ServingInputReceiver(inputs, inputs) if __name__ == '__main__': args,", "conv2 = tf.layers.conv2d( inputs=pool1, filters=64, kernel_size=[5, 5], padding='same', activation=tf.nn.relu )", "#1 # First max pooling layer with a 2x2 filter", "28, 28, 32] # Output Tensor Shape: [batch_size, 14, 14,", "Input Tensor Shape: [batch_size, 7, 7, 64] # Output Tensor", "mode == tf.estimator.ModeKeys.TRAIN: optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.001) train_op = optimizer.minimize( loss=loss,", "= { # Generate predictions (for PREDICT and EVAL mode)", "located at # # http://aws.amazon.com/apache2.0/ # # or in the", "tf.placeholder(tf.float32, [None, 784])} return tf.estimator.export.ServingInputReceiver(inputs, inputs) if __name__ == '__main__':", "type=str, default=os.environ.get('SM_MODEL_DIR')) parser.add_argument('--train', type=str, default=os.environ.get('SM_CHANNEL_TRAINING')) parser.add_argument('--hosts', type=list, default=json.loads(os.environ.get('SM_HOSTS'))) parser.add_argument('--current-host', type=str,", "affiliates. All Rights Reserved. # # Licensed under the Apache", "2], strides=2) # Flatten tensor into a batch of vectors", "the Apache License, Version 2.0 (the \"License\"). You # may", "#1 # Computes 32 features using a 5x5 filter with", "Tensor Shape: [batch_size, 14, 14, 64] conv2 = tf.layers.conv2d( inputs=pool1,", "Input Tensor Shape: [batch_size, 14, 14, 32] # Output Tensor", "# Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights", "By default this is a S3 path under the default", "Flatten tensor into a batch of vectors # Input Tensor", "{ # Generate predictions (for PREDICT and EVAL mode) 'classes':", "logging for predictions # Log the values in the 'Softmax'", "tf.layers.max_pooling2d(inputs=conv1, pool_size=[2, 2], strides=2) # Convolutional Layer #2 # Computes", "Input Tensor Shape: [batch_size, 28, 28, 1] # Output Tensor", "Pooling Layer #2 # Second max pooling layer with a", "mnist_classifier = tf.estimator.Estimator(model_fn=cnn_model_fn, model_dir=args.model_dir) # Set up logging for predictions", "OR CONDITIONS OF # ANY KIND, either express or implied." ]
[ "from autoware_planning_msgs.msg import StopReasonArray from case_converter import pascal2snake from geometry_msgs.msg", "StopReasonArray, self._options.topic_name, self._on_stop_reasons, 1 ) self._pub_pose_map = {} self._idx_map =", "if name not in self._pose_map: self._pose_map[name] = {} pose_id =", "2.0 (the \"License\"); # you may not use this file", "if pose_topic_name not in self._pub_pose_map: self._pub_pose_map[pose_topic_name] = self.create_publisher( PoseStamped, topic_ns", "import math import sys from autoware_planning_msgs.msg import StopReasonArray from case_converter", "= index.Index() return self._idx_map[name].nearest(StopReason2PoseNode.pose2boundingbox(pose), 1) def _get_nearest_pose_id(self, name, pose, th_dist):", "self._pub_pose_map = {} self._idx_map = {} self._pose_map = {} self._self_pose_listener", "return pose_id @staticmethod def calc_distance2d(pose1, pose2): p1 = pose1.position p2", "self._register_pose(snake_case_stop_reason, pose.pose) pose_topic_name = \"{snake_case_stop_reason}_{pose_id}\".format(**locals()) topic_ns = \"/autoware_debug_tools/stop_reason2pose/\" if pose_topic_name", "rclpy.node import Node from rtree import index from self_pose_listener import", "\"/autoware_debug_tools/stop_reason2pose/\" self._pub_pose_map[snake_case_stop_reason] = self.create_publisher( PoseStamped, topic_ns + snake_case_stop_reason, 1 )", "name, pose): if name not in self._pose_map: self._pose_map[name] = {}", "{} pose_id = len(self._pose_map[name]) + 1 self._pose_map[name][pose_id] = pose self._idx_map[name].insert(pose_id,", "governing permissions and # limitations under the License. import argparse", "poses = [stop_factor.stop_pose for stop_factor in stop_reason.stop_factors] if not poses:", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "def _register_pose(self, name, pose): if name not in self._pose_map: self._pose_map[name]", "[pose.position.x, pose.position.y, pose.position.x, pose.position.y] def main(args): rclpy.init() parser = argparse.ArgumentParser()", "= self.create_publisher( PoseStamped, topic_ns + pose_topic_name, 1 ) self._pub_pose_map[pose_topic_name].publish(pose) #", "stop_reason in msg.stop_reasons: snake_case_stop_reason = pascal2snake(stop_reason.reason) if len(stop_reason.stop_factors) == 0:", "= msg.header pose.pose = stop_factor.stop_pose # Get nearest pose th_dist", "nearest_pose = PoseStamped() nearest_pose.header = msg.header nearest_pose.pose = self._get_nearest_pose_in_array( stop_reason,", "self._pub_pose_map[snake_case_stop_reason].publish(nearest_pose) def _get_nearest_pose_in_array(self, stop_reason, self_pose): poses = [stop_factor.stop_pose for stop_factor", "name not in self._pose_map: return None return self._pose_map[name][pose_id] def _update_pose(self,", "if not nearest_pose_ids: return None nearest_pose_id = nearest_pose_ids[0] nearest_pose =", "= pascal2snake(stop_reason.reason) if len(stop_reason.stop_factors) == 0: self.get_logger().warn(\"stop_factor is null\") return", "p1 = pose1.position p2 = pose2.position return math.hypot(p1.x - p2.x,", "from rclpy.node import Node from rtree import index from self_pose_listener", "pose2.position return math.hypot(p1.x - p2.x, p1.y - p2.y) @staticmethod def", "use this file except in compliance with the License. #", "self_pose), poses) nearest_idx = np.argmin(distances) return poses[nearest_idx] def _find_nearest_pose_id(self, name,", "PoseStamped, topic_ns + pose_topic_name, 1 ) self._pub_pose_map[pose_topic_name].publish(pose) # Publish nearest", "self.create_publisher( PoseStamped, topic_ns + snake_case_stop_reason, 1 ) self._pub_pose_map[snake_case_stop_reason].publish(nearest_pose) def _get_nearest_pose_in_array(self,", "return math.hypot(p1.x - p2.x, p1.y - p2.y) @staticmethod def pose2boundingbox(pose):", "{} self._pose_map = {} self._self_pose_listener = SelfPoseListener() self.timer = self.create_timer((1.0", "= np.argmin(distances) return poses[nearest_idx] def _find_nearest_pose_id(self, name, pose): if name", "self._get_nearest_pose_in_array( stop_reason, self._self_pose_listener.self_pose ) if nearest_pose.pose: if snake_case_stop_reason not in", "> th_dist: return None return nearest_pose_id def _get_pose(self, name, pose_id):", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "IV, Inc. # # Licensed under the Apache License, Version", "License. # You may obtain a copy of the License", "nearest_pose = self._get_pose(name, nearest_pose_id) if not nearest_pose: return None dist", "self._self_pose_listener.self_pose ) if nearest_pose.pose: if snake_case_stop_reason not in self._pub_pose_map: topic_ns", "self._pub_pose_map: self._pub_pose_map[pose_topic_name] = self.create_publisher( PoseStamped, topic_ns + pose_topic_name, 1 )", "pose_id): self._pose_map[name][id] = pose self._idx_map[name].insert(pose_id, StopReason2PoseNode.pose2boundingbox(pose)) def _register_pose(self, name, pose):", "under the License is distributed on an \"AS IS\" BASIS,", "_get_nearest_pose_in_array(self, stop_reason, self_pose): poses = [stop_factor.stop_pose for stop_factor in stop_reason.stop_factors]", "License for the specific language governing permissions and # limitations", "calc_distance2d(pose1, pose2): p1 = pose1.position p2 = pose2.position return math.hypot(p1.x", "1 ) self._pub_pose_map[snake_case_stop_reason].publish(nearest_pose) def _get_nearest_pose_in_array(self, stop_reason, self_pose): poses = [stop_factor.stop_pose", "self._idx_map[name].insert(pose_id, StopReason2PoseNode.pose2boundingbox(pose)) return pose_id @staticmethod def calc_distance2d(pose1, pose2): p1 =", "topic_ns + snake_case_stop_reason, 1 ) self._pub_pose_map[snake_case_stop_reason].publish(nearest_pose) def _get_nearest_pose_in_array(self, stop_reason, self_pose):", "pose.position.x, pose.position.y] def main(args): rclpy.init() parser = argparse.ArgumentParser() parser.add_argument(\"topic_name\", type=str)", "dist > th_dist: return None return nearest_pose_id def _get_pose(self, name,", "if not nearest_pose: return None dist = StopReason2PoseNode.calc_distance2d(pose, nearest_pose) if", "self._pub_pose_map[snake_case_stop_reason] = self.create_publisher( PoseStamped, topic_ns + snake_case_stop_reason, 1 ) self._pub_pose_map[snake_case_stop_reason].publish(nearest_pose)", "in compliance with the License. # You may obtain a", "not nearest_pose: return None dist = StopReason2PoseNode.calc_distance2d(pose, nearest_pose) if dist", "software # distributed under the License is distributed on an", "None nearest_pose_id = nearest_pose_ids[0] nearest_pose = self._get_pose(name, nearest_pose_id) if not", "index from self_pose_listener import SelfPoseListener class StopReason2PoseNode(Node): def __init__(self, options):", "self._idx_map[name].insert(pose_id, StopReason2PoseNode.pose2boundingbox(pose)) def _register_pose(self, name, pose): if name not in", "{} self._idx_map = {} self._pose_map = {} self._self_pose_listener = SelfPoseListener()", "np.argmin(distances) return poses[nearest_idx] def _find_nearest_pose_id(self, name, pose): if name not", "PoseStamped import numpy as np import rclpy from rclpy.node import", "rclpy from rclpy.node import Node from rtree import index from", "dist = StopReason2PoseNode.calc_distance2d(pose, nearest_pose) if dist > th_dist: return None", "import rclpy from rclpy.node import Node from rtree import index", "+ 1 self._pose_map[name][pose_id] = pose self._idx_map[name].insert(pose_id, StopReason2PoseNode.pose2boundingbox(pose)) return pose_id @staticmethod", "def __init__(self, options): super().__init__(\"stop_reason2pose_node\") self._options = options self._sub_pose = self.create_subscription(", "if nearest_pose.pose: if snake_case_stop_reason not in self._pub_pose_map: topic_ns = \"/autoware_debug_tools/stop_reason2pose/\"", "pose2boundingbox(pose): return [pose.position.x, pose.position.y, pose.position.x, pose.position.y] def main(args): rclpy.init() parser", "snake_case_stop_reason not in self._pub_pose_map: topic_ns = \"/autoware_debug_tools/stop_reason2pose/\" self._pub_pose_map[snake_case_stop_reason] = self.create_publisher(", "= parser.parse_args(args) stop_reason2pose_node = StopReason2PoseNode(ns) rclpy.spin(stop_reason2pose_node) stop_reason2pose_node.destroy_node() rclpy.shutdown() if __name__", "= msg.header nearest_pose.pose = self._get_nearest_pose_in_array( stop_reason, self._self_pose_listener.self_pose ) if nearest_pose.pose:", "th_dist = 1.0 nearest_pose_id = self._get_nearest_pose_id( snake_case_stop_reason, pose.pose, th_dist )", "= pose self._idx_map[name].insert(pose_id, StopReason2PoseNode.pose2boundingbox(pose)) def _register_pose(self, name, pose): if name", "np import rclpy from rclpy.node import Node from rtree import", "def _get_nearest_pose_in_array(self, stop_reason, self_pose): poses = [stop_factor.stop_pose for stop_factor in", "= map(lambda p: StopReason2PoseNode.calc_distance2d(p, self_pose), poses) nearest_idx = np.argmin(distances) return", "self.timer = self.create_timer((1.0 / 100), self._self_pose_listener.get_current_pose) def _on_stop_reasons(self, msg): for", "= PoseStamped() nearest_pose.header = msg.header nearest_pose.pose = self._get_nearest_pose_in_array( stop_reason, self._self_pose_listener.self_pose", "python3 # Copyright 2020 Tier IV, Inc. # # Licensed", "self_pose): poses = [stop_factor.stop_pose for stop_factor in stop_reason.stop_factors] if not", "None dist = StopReason2PoseNode.calc_distance2d(pose, nearest_pose) if dist > th_dist: return", "th_dist): nearest_pose_ids = list(self._find_nearest_pose_id(name, pose)) if not nearest_pose_ids: return None", "\"/autoware_debug_tools/stop_reason2pose/\" if pose_topic_name not in self._pub_pose_map: self._pub_pose_map[pose_topic_name] = self.create_publisher( PoseStamped,", "if snake_case_stop_reason not in self._pub_pose_map: topic_ns = \"/autoware_debug_tools/stop_reason2pose/\" self._pub_pose_map[snake_case_stop_reason] =", "import Node from rtree import index from self_pose_listener import SelfPoseListener", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "not nearest_pose_ids: return None nearest_pose_id = nearest_pose_ids[0] nearest_pose = self._get_pose(name,", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "#! /usr/bin/env python3 # Copyright 2020 Tier IV, Inc. #", "[stop_factor.stop_pose for stop_factor in stop_reason.stop_factors] if not poses: return None", "= StopReason2PoseNode.calc_distance2d(pose, nearest_pose) if dist > th_dist: return None return", "self_pose_listener import SelfPoseListener class StopReason2PoseNode(Node): def __init__(self, options): super().__init__(\"stop_reason2pose_node\") self._options", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "to in writing, software # distributed under the License is", "StopReasonArray from case_converter import pascal2snake from geometry_msgs.msg import PoseStamped import", "in self._pub_pose_map: topic_ns = \"/autoware_debug_tools/stop_reason2pose/\" self._pub_pose_map[snake_case_stop_reason] = self.create_publisher( PoseStamped, topic_ns", "nearest_pose.pose: if snake_case_stop_reason not in self._pub_pose_map: topic_ns = \"/autoware_debug_tools/stop_reason2pose/\" self._pub_pose_map[snake_case_stop_reason]", "# See the License for the specific language governing permissions", "argparse.ArgumentParser() parser.add_argument(\"topic_name\", type=str) ns = parser.parse_args(args) stop_reason2pose_node = StopReason2PoseNode(ns) rclpy.spin(stop_reason2pose_node)", "= pose1.position p2 = pose2.position return math.hypot(p1.x - p2.x, p1.y", "def _get_pose(self, name, pose_id): if name not in self._pose_map: return", "or agreed to in writing, software # distributed under the", ") self._pub_pose_map[pose_topic_name].publish(pose) # Publish nearest stop_reason without number nearest_pose =", "distances = map(lambda p: StopReason2PoseNode.calc_distance2d(p, self_pose), poses) nearest_idx = np.argmin(distances)", "required by applicable law or agreed to in writing, software", "pose): if name not in self._idx_map: self._idx_map[name] = index.Index() return", "options): super().__init__(\"stop_reason2pose_node\") self._options = options self._sub_pose = self.create_subscription( StopReasonArray, self._options.topic_name,", "Copyright 2020 Tier IV, Inc. # # Licensed under the", "+ snake_case_stop_reason, 1 ) self._pub_pose_map[snake_case_stop_reason].publish(nearest_pose) def _get_nearest_pose_in_array(self, stop_reason, self_pose): poses", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "pose.pose) pose_topic_name = \"{snake_case_stop_reason}_{pose_id}\".format(**locals()) topic_ns = \"/autoware_debug_tools/stop_reason2pose/\" if pose_topic_name not", "with the License. # You may obtain a copy of", "2020 Tier IV, Inc. # # Licensed under the Apache", "permissions and # limitations under the License. import argparse import", "import numpy as np import rclpy from rclpy.node import Node", "stop_reason.stop_factors: pose = PoseStamped() pose.header = msg.header pose.pose = stop_factor.stop_pose", "nearest pose th_dist = 1.0 nearest_pose_id = self._get_nearest_pose_id( snake_case_stop_reason, pose.pose,", "pose.pose, th_dist ) if nearest_pose_id: self._update_pose(snake_case_stop_reason, pose.pose, nearest_pose_id) pose_id =", "nearest_idx = np.argmin(distances) return poses[nearest_idx] def _find_nearest_pose_id(self, name, pose): if", "compliance with the License. # You may obtain a copy", "agreed to in writing, software # distributed under the License", "= self.create_timer((1.0 / 100), self._self_pose_listener.get_current_pose) def _on_stop_reasons(self, msg): for stop_reason", "None return nearest_pose_id def _get_pose(self, name, pose_id): if name not", "not in self._pose_map: self._pose_map[name] = {} pose_id = len(self._pose_map[name]) +", "distributed under the License is distributed on an \"AS IS\"", ") if nearest_pose.pose: if snake_case_stop_reason not in self._pub_pose_map: topic_ns =", "index.Index() return self._idx_map[name].nearest(StopReason2PoseNode.pose2boundingbox(pose), 1) def _get_nearest_pose_id(self, name, pose, th_dist): nearest_pose_ids", "name, pose_id): if name not in self._pose_map: return None return", "pose.pose = stop_factor.stop_pose # Get nearest pose th_dist = 1.0", "is null\") return for stop_factor in stop_reason.stop_factors: pose = PoseStamped()", "the License. import argparse import math import sys from autoware_planning_msgs.msg", "self._pose_map: self._pose_map[name] = {} pose_id = len(self._pose_map[name]) + 1 self._pose_map[name][pose_id]", "express or implied. # See the License for the specific", "except in compliance with the License. # You may obtain", "pose): if name not in self._pose_map: self._pose_map[name] = {} pose_id", "p2 = pose2.position return math.hypot(p1.x - p2.x, p1.y - p2.y)", "stop_factor.stop_pose # Get nearest pose th_dist = 1.0 nearest_pose_id =", "1 ) self._pub_pose_map = {} self._idx_map = {} self._pose_map =", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "if nearest_pose_id: self._update_pose(snake_case_stop_reason, pose.pose, nearest_pose_id) pose_id = nearest_pose_id else: pose_id", "nearest_pose_id def _get_pose(self, name, pose_id): if name not in self._pose_map:", "not use this file except in compliance with the License.", "return [pose.position.x, pose.position.y, pose.position.x, pose.position.y] def main(args): rclpy.init() parser =", "writing, software # distributed under the License is distributed on", "rtree import index from self_pose_listener import SelfPoseListener class StopReason2PoseNode(Node): def", "you may not use this file except in compliance with", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "nearest_pose_ids[0] nearest_pose = self._get_pose(name, nearest_pose_id) if not nearest_pose: return None", "rclpy.init() parser = argparse.ArgumentParser() parser.add_argument(\"topic_name\", type=str) ns = parser.parse_args(args) stop_reason2pose_node", "self._pose_map[name][pose_id] def _update_pose(self, name, pose, pose_id): self._pose_map[name][id] = pose self._idx_map[name].insert(pose_id,", "msg.header pose.pose = stop_factor.stop_pose # Get nearest pose th_dist =", "pose self._idx_map[name].insert(pose_id, StopReason2PoseNode.pose2boundingbox(pose)) return pose_id @staticmethod def calc_distance2d(pose1, pose2): p1", "CONDITIONS OF ANY KIND, either express or implied. # See", "pose, th_dist): nearest_pose_ids = list(self._find_nearest_pose_id(name, pose)) if not nearest_pose_ids: return", "Tier IV, Inc. # # Licensed under the Apache License,", "1 self._pose_map[name][pose_id] = pose self._idx_map[name].insert(pose_id, StopReason2PoseNode.pose2boundingbox(pose)) return pose_id @staticmethod def", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "def pose2boundingbox(pose): return [pose.position.x, pose.position.y, pose.position.x, pose.position.y] def main(args): rclpy.init()", "self._pose_map = {} self._self_pose_listener = SelfPoseListener() self.timer = self.create_timer((1.0 /", "= self.create_subscription( StopReasonArray, self._options.topic_name, self._on_stop_reasons, 1 ) self._pub_pose_map = {}", "if len(stop_reason.stop_factors) == 0: self.get_logger().warn(\"stop_factor is null\") return for stop_factor", "= SelfPoseListener() self.timer = self.create_timer((1.0 / 100), self._self_pose_listener.get_current_pose) def _on_stop_reasons(self,", "parser.add_argument(\"topic_name\", type=str) ns = parser.parse_args(args) stop_reason2pose_node = StopReason2PoseNode(ns) rclpy.spin(stop_reason2pose_node) stop_reason2pose_node.destroy_node()", "self._pub_pose_map: topic_ns = \"/autoware_debug_tools/stop_reason2pose/\" self._pub_pose_map[snake_case_stop_reason] = self.create_publisher( PoseStamped, topic_ns +", "100), self._self_pose_listener.get_current_pose) def _on_stop_reasons(self, msg): for stop_reason in msg.stop_reasons: snake_case_stop_reason", "# limitations under the License. import argparse import math import", "parser.parse_args(args) stop_reason2pose_node = StopReason2PoseNode(ns) rclpy.spin(stop_reason2pose_node) stop_reason2pose_node.destroy_node() rclpy.shutdown() if __name__ ==", "= {} self._pose_map = {} self._self_pose_listener = SelfPoseListener() self.timer =", ") self._pub_pose_map[snake_case_stop_reason].publish(nearest_pose) def _get_nearest_pose_in_array(self, stop_reason, self_pose): poses = [stop_factor.stop_pose for", "def _get_nearest_pose_id(self, name, pose, th_dist): nearest_pose_ids = list(self._find_nearest_pose_id(name, pose)) if", "options self._sub_pose = self.create_subscription( StopReasonArray, self._options.topic_name, self._on_stop_reasons, 1 ) self._pub_pose_map", "OR CONDITIONS OF ANY KIND, either express or implied. #", "import index from self_pose_listener import SelfPoseListener class StopReason2PoseNode(Node): def __init__(self,", "the License is distributed on an \"AS IS\" BASIS, #", "return nearest_pose_id def _get_pose(self, name, pose_id): if name not in", "SelfPoseListener class StopReason2PoseNode(Node): def __init__(self, options): super().__init__(\"stop_reason2pose_node\") self._options = options", "pose th_dist = 1.0 nearest_pose_id = self._get_nearest_pose_id( snake_case_stop_reason, pose.pose, th_dist", "# Get nearest pose th_dist = 1.0 nearest_pose_id = self._get_nearest_pose_id(", "in self._idx_map: self._idx_map[name] = index.Index() return self._idx_map[name].nearest(StopReason2PoseNode.pose2boundingbox(pose), 1) def _get_nearest_pose_id(self,", "PoseStamped() nearest_pose.header = msg.header nearest_pose.pose = self._get_nearest_pose_in_array( stop_reason, self._self_pose_listener.self_pose )", "= PoseStamped() pose.header = msg.header pose.pose = stop_factor.stop_pose # Get", "snake_case_stop_reason, 1 ) self._pub_pose_map[snake_case_stop_reason].publish(nearest_pose) def _get_nearest_pose_in_array(self, stop_reason, self_pose): poses =", "= pose self._idx_map[name].insert(pose_id, StopReason2PoseNode.pose2boundingbox(pose)) return pose_id @staticmethod def calc_distance2d(pose1, pose2):", "len(self._pose_map[name]) + 1 self._pose_map[name][pose_id] = pose self._idx_map[name].insert(pose_id, StopReason2PoseNode.pose2boundingbox(pose)) return pose_id", "law or agreed to in writing, software # distributed under", "# Copyright 2020 Tier IV, Inc. # # Licensed under", "math import sys from autoware_planning_msgs.msg import StopReasonArray from case_converter import", "Get nearest pose th_dist = 1.0 nearest_pose_id = self._get_nearest_pose_id( snake_case_stop_reason,", "self._options = options self._sub_pose = self.create_subscription( StopReasonArray, self._options.topic_name, self._on_stop_reasons, 1", "may obtain a copy of the License at # #", "def _update_pose(self, name, pose, pose_id): self._pose_map[name][id] = pose self._idx_map[name].insert(pose_id, StopReason2PoseNode.pose2boundingbox(pose))", "= len(self._pose_map[name]) + 1 self._pose_map[name][pose_id] = pose self._idx_map[name].insert(pose_id, StopReason2PoseNode.pose2boundingbox(pose)) return", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "Inc. # # Licensed under the Apache License, Version 2.0", "nearest_pose_id = self._get_nearest_pose_id( snake_case_stop_reason, pose.pose, th_dist ) if nearest_pose_id: self._update_pose(snake_case_stop_reason,", "return for stop_factor in stop_reason.stop_factors: pose = PoseStamped() pose.header =", "without number nearest_pose = PoseStamped() nearest_pose.header = msg.header nearest_pose.pose =", "= 1.0 nearest_pose_id = self._get_nearest_pose_id( snake_case_stop_reason, pose.pose, th_dist ) if", "return None return nearest_pose_id def _get_pose(self, name, pose_id): if name", "pose_id): if name not in self._pose_map: return None return self._pose_map[name][pose_id]", "def main(args): rclpy.init() parser = argparse.ArgumentParser() parser.add_argument(\"topic_name\", type=str) ns =", "may not use this file except in compliance with the", "import sys from autoware_planning_msgs.msg import StopReasonArray from case_converter import pascal2snake", "pascal2snake from geometry_msgs.msg import PoseStamped import numpy as np import", "= self._get_pose(name, nearest_pose_id) if not nearest_pose: return None dist =", "_update_pose(self, name, pose, pose_id): self._pose_map[name][id] = pose self._idx_map[name].insert(pose_id, StopReason2PoseNode.pose2boundingbox(pose)) def", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "stop_reason, self_pose): poses = [stop_factor.stop_pose for stop_factor in stop_reason.stop_factors] if", "this file except in compliance with the License. # You", "case_converter import pascal2snake from geometry_msgs.msg import PoseStamped import numpy as", "for stop_factor in stop_reason.stop_factors: pose = PoseStamped() pose.header = msg.header", "1.0 nearest_pose_id = self._get_nearest_pose_id( snake_case_stop_reason, pose.pose, th_dist ) if nearest_pose_id:", "topic_ns + pose_topic_name, 1 ) self._pub_pose_map[pose_topic_name].publish(pose) # Publish nearest stop_reason", "stop_reason.stop_factors] if not poses: return None distances = map(lambda p:", "nearest_pose_id: self._update_pose(snake_case_stop_reason, pose.pose, nearest_pose_id) pose_id = nearest_pose_id else: pose_id =", "pose_id = self._register_pose(snake_case_stop_reason, pose.pose) pose_topic_name = \"{snake_case_stop_reason}_{pose_id}\".format(**locals()) topic_ns = \"/autoware_debug_tools/stop_reason2pose/\"", "= pose2.position return math.hypot(p1.x - p2.x, p1.y - p2.y) @staticmethod", "nearest_pose_id) if not nearest_pose: return None dist = StopReason2PoseNode.calc_distance2d(pose, nearest_pose)", "def _on_stop_reasons(self, msg): for stop_reason in msg.stop_reasons: snake_case_stop_reason = pascal2snake(stop_reason.reason)", "p: StopReason2PoseNode.calc_distance2d(p, self_pose), poses) nearest_idx = np.argmin(distances) return poses[nearest_idx] def", "import SelfPoseListener class StopReason2PoseNode(Node): def __init__(self, options): super().__init__(\"stop_reason2pose_node\") self._options =", "if dist > th_dist: return None return nearest_pose_id def _get_pose(self,", "pose.position.y, pose.position.x, pose.position.y] def main(args): rclpy.init() parser = argparse.ArgumentParser() parser.add_argument(\"topic_name\",", "pose.header = msg.header pose.pose = stop_factor.stop_pose # Get nearest pose", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "self._get_nearest_pose_id( snake_case_stop_reason, pose.pose, th_dist ) if nearest_pose_id: self._update_pose(snake_case_stop_reason, pose.pose, nearest_pose_id)", "# # Licensed under the Apache License, Version 2.0 (the", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "from rtree import index from self_pose_listener import SelfPoseListener class StopReason2PoseNode(Node):", "pose.pose, nearest_pose_id) pose_id = nearest_pose_id else: pose_id = self._register_pose(snake_case_stop_reason, pose.pose)", "topic_ns = \"/autoware_debug_tools/stop_reason2pose/\" if pose_topic_name not in self._pub_pose_map: self._pub_pose_map[pose_topic_name] =", "msg.header nearest_pose.pose = self._get_nearest_pose_in_array( stop_reason, self._self_pose_listener.self_pose ) if nearest_pose.pose: if", "number nearest_pose = PoseStamped() nearest_pose.header = msg.header nearest_pose.pose = self._get_nearest_pose_in_array(", "len(stop_reason.stop_factors) == 0: self.get_logger().warn(\"stop_factor is null\") return for stop_factor in", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "pose_topic_name, 1 ) self._pub_pose_map[pose_topic_name].publish(pose) # Publish nearest stop_reason without number", "StopReason2PoseNode.calc_distance2d(p, self_pose), poses) nearest_idx = np.argmin(distances) return poses[nearest_idx] def _find_nearest_pose_id(self,", "from case_converter import pascal2snake from geometry_msgs.msg import PoseStamped import numpy", "= self._get_nearest_pose_in_array( stop_reason, self._self_pose_listener.self_pose ) if nearest_pose.pose: if snake_case_stop_reason not", "return None nearest_pose_id = nearest_pose_ids[0] nearest_pose = self._get_pose(name, nearest_pose_id) if", "- p2.x, p1.y - p2.y) @staticmethod def pose2boundingbox(pose): return [pose.position.x,", "language governing permissions and # limitations under the License. import", "StopReason2PoseNode.calc_distance2d(pose, nearest_pose) if dist > th_dist: return None return nearest_pose_id", "name not in self._pose_map: self._pose_map[name] = {} pose_id = len(self._pose_map[name])", "in stop_reason.stop_factors: pose = PoseStamped() pose.header = msg.header pose.pose =", "autoware_planning_msgs.msg import StopReasonArray from case_converter import pascal2snake from geometry_msgs.msg import", "self.get_logger().warn(\"stop_factor is null\") return for stop_factor in stop_reason.stop_factors: pose =", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "argparse import math import sys from autoware_planning_msgs.msg import StopReasonArray from", "pose2): p1 = pose1.position p2 = pose2.position return math.hypot(p1.x -", "= \"/autoware_debug_tools/stop_reason2pose/\" self._pub_pose_map[snake_case_stop_reason] = self.create_publisher( PoseStamped, topic_ns + snake_case_stop_reason, 1", "nearest_pose_id = nearest_pose_ids[0] nearest_pose = self._get_pose(name, nearest_pose_id) if not nearest_pose:", "pose_id = len(self._pose_map[name]) + 1 self._pose_map[name][pose_id] = pose self._idx_map[name].insert(pose_id, StopReason2PoseNode.pose2boundingbox(pose))", "{} self._self_pose_listener = SelfPoseListener() self.timer = self.create_timer((1.0 / 100), self._self_pose_listener.get_current_pose)", "poses) nearest_idx = np.argmin(distances) return poses[nearest_idx] def _find_nearest_pose_id(self, name, pose):", "if not poses: return None distances = map(lambda p: StopReason2PoseNode.calc_distance2d(p,", "= self.create_publisher( PoseStamped, topic_ns + snake_case_stop_reason, 1 ) self._pub_pose_map[snake_case_stop_reason].publish(nearest_pose) def", "or implied. # See the License for the specific language", "== 0: self.get_logger().warn(\"stop_factor is null\") return for stop_factor in stop_reason.stop_factors:", "None distances = map(lambda p: StopReason2PoseNode.calc_distance2d(p, self_pose), poses) nearest_idx =", "return None dist = StopReason2PoseNode.calc_distance2d(pose, nearest_pose) if dist > th_dist:", "type=str) ns = parser.parse_args(args) stop_reason2pose_node = StopReason2PoseNode(ns) rclpy.spin(stop_reason2pose_node) stop_reason2pose_node.destroy_node() rclpy.shutdown()", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "self._sub_pose = self.create_subscription( StopReasonArray, self._options.topic_name, self._on_stop_reasons, 1 ) self._pub_pose_map =", "for stop_reason in msg.stop_reasons: snake_case_stop_reason = pascal2snake(stop_reason.reason) if len(stop_reason.stop_factors) ==", "return self._idx_map[name].nearest(StopReason2PoseNode.pose2boundingbox(pose), 1) def _get_nearest_pose_id(self, name, pose, th_dist): nearest_pose_ids =", "nearest_pose_id) pose_id = nearest_pose_id else: pose_id = self._register_pose(snake_case_stop_reason, pose.pose) pose_topic_name", "in self._pose_map: self._pose_map[name] = {} pose_id = len(self._pose_map[name]) + 1", "@staticmethod def calc_distance2d(pose1, pose2): p1 = pose1.position p2 = pose2.position", "self._idx_map[name].nearest(StopReason2PoseNode.pose2boundingbox(pose), 1) def _get_nearest_pose_id(self, name, pose, th_dist): nearest_pose_ids = list(self._find_nearest_pose_id(name,", "import PoseStamped import numpy as np import rclpy from rclpy.node", "pose self._idx_map[name].insert(pose_id, StopReason2PoseNode.pose2boundingbox(pose)) def _register_pose(self, name, pose): if name not", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "self._idx_map = {} self._pose_map = {} self._self_pose_listener = SelfPoseListener() self.timer", "# Publish nearest stop_reason without number nearest_pose = PoseStamped() nearest_pose.header", "topic_ns = \"/autoware_debug_tools/stop_reason2pose/\" self._pub_pose_map[snake_case_stop_reason] = self.create_publisher( PoseStamped, topic_ns + snake_case_stop_reason,", "= {} pose_id = len(self._pose_map[name]) + 1 self._pose_map[name][pose_id] = pose", "name, pose, pose_id): self._pose_map[name][id] = pose self._idx_map[name].insert(pose_id, StopReason2PoseNode.pose2boundingbox(pose)) def _register_pose(self,", "/ 100), self._self_pose_listener.get_current_pose) def _on_stop_reasons(self, msg): for stop_reason in msg.stop_reasons:", "from geometry_msgs.msg import PoseStamped import numpy as np import rclpy", "stop_reason2pose_node = StopReason2PoseNode(ns) rclpy.spin(stop_reason2pose_node) stop_reason2pose_node.destroy_node() rclpy.shutdown() if __name__ == \"__main__\":", "1 ) self._pub_pose_map[pose_topic_name].publish(pose) # Publish nearest stop_reason without number nearest_pose", "limitations under the License. import argparse import math import sys", "(the \"License\"); # you may not use this file except", "# you may not use this file except in compliance", "class StopReason2PoseNode(Node): def __init__(self, options): super().__init__(\"stop_reason2pose_node\") self._options = options self._sub_pose", "name, pose): if name not in self._idx_map: self._idx_map[name] = index.Index()", "in stop_reason.stop_factors] if not poses: return None distances = map(lambda", "self._self_pose_listener = SelfPoseListener() self.timer = self.create_timer((1.0 / 100), self._self_pose_listener.get_current_pose) def", "th_dist ) if nearest_pose_id: self._update_pose(snake_case_stop_reason, pose.pose, nearest_pose_id) pose_id = nearest_pose_id", "_register_pose(self, name, pose): if name not in self._pose_map: self._pose_map[name] =", "Node from rtree import index from self_pose_listener import SelfPoseListener class", "# # Unless required by applicable law or agreed to", "import pascal2snake from geometry_msgs.msg import PoseStamped import numpy as np", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "Version 2.0 (the \"License\"); # you may not use this", "self._pub_pose_map[pose_topic_name] = self.create_publisher( PoseStamped, topic_ns + pose_topic_name, 1 ) self._pub_pose_map[pose_topic_name].publish(pose)", "= self._get_nearest_pose_id( snake_case_stop_reason, pose.pose, th_dist ) if nearest_pose_id: self._update_pose(snake_case_stop_reason, pose.pose,", "not poses: return None distances = map(lambda p: StopReason2PoseNode.calc_distance2d(p, self_pose),", "pose)) if not nearest_pose_ids: return None nearest_pose_id = nearest_pose_ids[0] nearest_pose", "self._idx_map[name] = index.Index() return self._idx_map[name].nearest(StopReason2PoseNode.pose2boundingbox(pose), 1) def _get_nearest_pose_id(self, name, pose,", "and # limitations under the License. import argparse import math", "= self._register_pose(snake_case_stop_reason, pose.pose) pose_topic_name = \"{snake_case_stop_reason}_{pose_id}\".format(**locals()) topic_ns = \"/autoware_debug_tools/stop_reason2pose/\" if", "ns = parser.parse_args(args) stop_reason2pose_node = StopReason2PoseNode(ns) rclpy.spin(stop_reason2pose_node) stop_reason2pose_node.destroy_node() rclpy.shutdown() if", "self.create_timer((1.0 / 100), self._self_pose_listener.get_current_pose) def _on_stop_reasons(self, msg): for stop_reason in", "not in self._idx_map: self._idx_map[name] = index.Index() return self._idx_map[name].nearest(StopReason2PoseNode.pose2boundingbox(pose), 1) def", "implied. # See the License for the specific language governing", "self._idx_map: self._idx_map[name] = index.Index() return self._idx_map[name].nearest(StopReason2PoseNode.pose2boundingbox(pose), 1) def _get_nearest_pose_id(self, name,", "SelfPoseListener() self.timer = self.create_timer((1.0 / 100), self._self_pose_listener.get_current_pose) def _on_stop_reasons(self, msg):", "self.create_publisher( PoseStamped, topic_ns + pose_topic_name, 1 ) self._pub_pose_map[pose_topic_name].publish(pose) # Publish", "under the Apache License, Version 2.0 (the \"License\"); # you", "= {} self._idx_map = {} self._pose_map = {} self._self_pose_listener =", "PoseStamped, topic_ns + snake_case_stop_reason, 1 ) self._pub_pose_map[snake_case_stop_reason].publish(nearest_pose) def _get_nearest_pose_in_array(self, stop_reason,", "= \"{snake_case_stop_reason}_{pose_id}\".format(**locals()) topic_ns = \"/autoware_debug_tools/stop_reason2pose/\" if pose_topic_name not in self._pub_pose_map:", "None return self._pose_map[name][pose_id] def _update_pose(self, name, pose, pose_id): self._pose_map[name][id] =", "self._options.topic_name, self._on_stop_reasons, 1 ) self._pub_pose_map = {} self._idx_map = {}", "p2.x, p1.y - p2.y) @staticmethod def pose2boundingbox(pose): return [pose.position.x, pose.position.y,", "by applicable law or agreed to in writing, software #", "th_dist: return None return nearest_pose_id def _get_pose(self, name, pose_id): if", "StopReason2PoseNode.pose2boundingbox(pose)) return pose_id @staticmethod def calc_distance2d(pose1, pose2): p1 = pose1.position", "import StopReasonArray from case_converter import pascal2snake from geometry_msgs.msg import PoseStamped", "_get_nearest_pose_id(self, name, pose, th_dist): nearest_pose_ids = list(self._find_nearest_pose_id(name, pose)) if not", "return None return self._pose_map[name][pose_id] def _update_pose(self, name, pose, pose_id): self._pose_map[name][id]", "poses: return None distances = map(lambda p: StopReason2PoseNode.calc_distance2d(p, self_pose), poses)", "= {} self._self_pose_listener = SelfPoseListener() self.timer = self.create_timer((1.0 / 100),", "main(args): rclpy.init() parser = argparse.ArgumentParser() parser.add_argument(\"topic_name\", type=str) ns = parser.parse_args(args)", "in self._pose_map: return None return self._pose_map[name][pose_id] def _update_pose(self, name, pose,", "PoseStamped() pose.header = msg.header pose.pose = stop_factor.stop_pose # Get nearest", "under the License. import argparse import math import sys from", "= nearest_pose_ids[0] nearest_pose = self._get_pose(name, nearest_pose_id) if not nearest_pose: return", "StopReason2PoseNode(Node): def __init__(self, options): super().__init__(\"stop_reason2pose_node\") self._options = options self._sub_pose =", "= [stop_factor.stop_pose for stop_factor in stop_reason.stop_factors] if not poses: return", "nearest_pose_ids: return None nearest_pose_id = nearest_pose_ids[0] nearest_pose = self._get_pose(name, nearest_pose_id)", "as np import rclpy from rclpy.node import Node from rtree", "= nearest_pose_id else: pose_id = self._register_pose(snake_case_stop_reason, pose.pose) pose_topic_name = \"{snake_case_stop_reason}_{pose_id}\".format(**locals())", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "Unless required by applicable law or agreed to in writing,", "msg): for stop_reason in msg.stop_reasons: snake_case_stop_reason = pascal2snake(stop_reason.reason) if len(stop_reason.stop_factors)", "snake_case_stop_reason = pascal2snake(stop_reason.reason) if len(stop_reason.stop_factors) == 0: self.get_logger().warn(\"stop_factor is null\")", "= options self._sub_pose = self.create_subscription( StopReasonArray, self._options.topic_name, self._on_stop_reasons, 1 )", "name not in self._idx_map: self._idx_map[name] = index.Index() return self._idx_map[name].nearest(StopReason2PoseNode.pose2boundingbox(pose), 1)", "= list(self._find_nearest_pose_id(name, pose)) if not nearest_pose_ids: return None nearest_pose_id =", "self._pub_pose_map[pose_topic_name].publish(pose) # Publish nearest stop_reason without number nearest_pose = PoseStamped()", "the specific language governing permissions and # limitations under the", "= StopReason2PoseNode(ns) rclpy.spin(stop_reason2pose_node) stop_reason2pose_node.destroy_node() rclpy.shutdown() if __name__ == \"__main__\": main(sys.argv[1:])", "self._on_stop_reasons, 1 ) self._pub_pose_map = {} self._idx_map = {} self._pose_map", "parser = argparse.ArgumentParser() parser.add_argument(\"topic_name\", type=str) ns = parser.parse_args(args) stop_reason2pose_node =", "numpy as np import rclpy from rclpy.node import Node from", "applicable law or agreed to in writing, software # distributed", "return None distances = map(lambda p: StopReason2PoseNode.calc_distance2d(p, self_pose), poses) nearest_idx", "null\") return for stop_factor in stop_reason.stop_factors: pose = PoseStamped() pose.header", "nearest_pose_ids = list(self._find_nearest_pose_id(name, pose)) if not nearest_pose_ids: return None nearest_pose_id", "pose1.position p2 = pose2.position return math.hypot(p1.x - p2.x, p1.y -", "__init__(self, options): super().__init__(\"stop_reason2pose_node\") self._options = options self._sub_pose = self.create_subscription( StopReasonArray,", "= argparse.ArgumentParser() parser.add_argument(\"topic_name\", type=str) ns = parser.parse_args(args) stop_reason2pose_node = StopReason2PoseNode(ns)", "pascal2snake(stop_reason.reason) if len(stop_reason.stop_factors) == 0: self.get_logger().warn(\"stop_factor is null\") return for", "self._pose_map: return None return self._pose_map[name][pose_id] def _update_pose(self, name, pose, pose_id):", "def calc_distance2d(pose1, pose2): p1 = pose1.position p2 = pose2.position return", "in writing, software # distributed under the License is distributed", "self._pose_map[name] = {} pose_id = len(self._pose_map[name]) + 1 self._pose_map[name][pose_id] =", "stop_reason without number nearest_pose = PoseStamped() nearest_pose.header = msg.header nearest_pose.pose", "nearest_pose) if dist > th_dist: return None return nearest_pose_id def", "poses[nearest_idx] def _find_nearest_pose_id(self, name, pose): if name not in self._idx_map:", "self._update_pose(snake_case_stop_reason, pose.pose, nearest_pose_id) pose_id = nearest_pose_id else: pose_id = self._register_pose(snake_case_stop_reason,", "in self._pub_pose_map: self._pub_pose_map[pose_topic_name] = self.create_publisher( PoseStamped, topic_ns + pose_topic_name, 1", "snake_case_stop_reason, pose.pose, th_dist ) if nearest_pose_id: self._update_pose(snake_case_stop_reason, pose.pose, nearest_pose_id) pose_id", "nearest_pose.header = msg.header nearest_pose.pose = self._get_nearest_pose_in_array( stop_reason, self._self_pose_listener.self_pose ) if", "@staticmethod def pose2boundingbox(pose): return [pose.position.x, pose.position.y, pose.position.x, pose.position.y] def main(args):", "pose.position.y] def main(args): rclpy.init() parser = argparse.ArgumentParser() parser.add_argument(\"topic_name\", type=str) ns", "math.hypot(p1.x - p2.x, p1.y - p2.y) @staticmethod def pose2boundingbox(pose): return", "else: pose_id = self._register_pose(snake_case_stop_reason, pose.pose) pose_topic_name = \"{snake_case_stop_reason}_{pose_id}\".format(**locals()) topic_ns =", "sys from autoware_planning_msgs.msg import StopReasonArray from case_converter import pascal2snake from", "if name not in self._pose_map: return None return self._pose_map[name][pose_id] def", "_get_pose(self, name, pose_id): if name not in self._pose_map: return None", "nearest stop_reason without number nearest_pose = PoseStamped() nearest_pose.header = msg.header", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "return self._pose_map[name][pose_id] def _update_pose(self, name, pose, pose_id): self._pose_map[name][id] = pose", "stop_factor in stop_reason.stop_factors: pose = PoseStamped() pose.header = msg.header pose.pose", "License, Version 2.0 (the \"License\"); # you may not use", "# You may obtain a copy of the License at", "return poses[nearest_idx] def _find_nearest_pose_id(self, name, pose): if name not in", "for stop_factor in stop_reason.stop_factors] if not poses: return None distances", "self._pose_map[name][pose_id] = pose self._idx_map[name].insert(pose_id, StopReason2PoseNode.pose2boundingbox(pose)) return pose_id @staticmethod def calc_distance2d(pose1,", "\"{snake_case_stop_reason}_{pose_id}\".format(**locals()) topic_ns = \"/autoware_debug_tools/stop_reason2pose/\" if pose_topic_name not in self._pub_pose_map: self._pub_pose_map[pose_topic_name]", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "stop_factor in stop_reason.stop_factors] if not poses: return None distances =", "= stop_factor.stop_pose # Get nearest pose th_dist = 1.0 nearest_pose_id", "not in self._pub_pose_map: topic_ns = \"/autoware_debug_tools/stop_reason2pose/\" self._pub_pose_map[snake_case_stop_reason] = self.create_publisher( PoseStamped,", "name, pose, th_dist): nearest_pose_ids = list(self._find_nearest_pose_id(name, pose)) if not nearest_pose_ids:", "msg.stop_reasons: snake_case_stop_reason = pascal2snake(stop_reason.reason) if len(stop_reason.stop_factors) == 0: self.get_logger().warn(\"stop_factor is", "pose = PoseStamped() pose.header = msg.header pose.pose = stop_factor.stop_pose #", "pose_topic_name not in self._pub_pose_map: self._pub_pose_map[pose_topic_name] = self.create_publisher( PoseStamped, topic_ns +", "the License for the specific language governing permissions and #", "Apache License, Version 2.0 (the \"License\"); # you may not", "_on_stop_reasons(self, msg): for stop_reason in msg.stop_reasons: snake_case_stop_reason = pascal2snake(stop_reason.reason) if", "either express or implied. # See the License for the", "nearest_pose: return None dist = StopReason2PoseNode.calc_distance2d(pose, nearest_pose) if dist >", "0: self.get_logger().warn(\"stop_factor is null\") return for stop_factor in stop_reason.stop_factors: pose", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "map(lambda p: StopReason2PoseNode.calc_distance2d(p, self_pose), poses) nearest_idx = np.argmin(distances) return poses[nearest_idx]", "p1.y - p2.y) @staticmethod def pose2boundingbox(pose): return [pose.position.x, pose.position.y, pose.position.x,", "nearest_pose.pose = self._get_nearest_pose_in_array( stop_reason, self._self_pose_listener.self_pose ) if nearest_pose.pose: if snake_case_stop_reason", "/usr/bin/env python3 # Copyright 2020 Tier IV, Inc. # #", "if name not in self._idx_map: self._idx_map[name] = index.Index() return self._idx_map[name].nearest(StopReason2PoseNode.pose2boundingbox(pose),", "self._pose_map[name][id] = pose self._idx_map[name].insert(pose_id, StopReason2PoseNode.pose2boundingbox(pose)) def _register_pose(self, name, pose): if", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "nearest_pose_id else: pose_id = self._register_pose(snake_case_stop_reason, pose.pose) pose_topic_name = \"{snake_case_stop_reason}_{pose_id}\".format(**locals()) topic_ns", ") if nearest_pose_id: self._update_pose(snake_case_stop_reason, pose.pose, nearest_pose_id) pose_id = nearest_pose_id else:", "def _find_nearest_pose_id(self, name, pose): if name not in self._idx_map: self._idx_map[name]", "stop_reason, self._self_pose_listener.self_pose ) if nearest_pose.pose: if snake_case_stop_reason not in self._pub_pose_map:", "list(self._find_nearest_pose_id(name, pose)) if not nearest_pose_ids: return None nearest_pose_id = nearest_pose_ids[0]", "= \"/autoware_debug_tools/stop_reason2pose/\" if pose_topic_name not in self._pub_pose_map: self._pub_pose_map[pose_topic_name] = self.create_publisher(", "- p2.y) @staticmethod def pose2boundingbox(pose): return [pose.position.x, pose.position.y, pose.position.x, pose.position.y]", "not in self._pub_pose_map: self._pub_pose_map[pose_topic_name] = self.create_publisher( PoseStamped, topic_ns + pose_topic_name,", "from self_pose_listener import SelfPoseListener class StopReason2PoseNode(Node): def __init__(self, options): super().__init__(\"stop_reason2pose_node\")", "\"License\"); # you may not use this file except in", "_find_nearest_pose_id(self, name, pose): if name not in self._idx_map: self._idx_map[name] =", ") self._pub_pose_map = {} self._idx_map = {} self._pose_map = {}", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "+ pose_topic_name, 1 ) self._pub_pose_map[pose_topic_name].publish(pose) # Publish nearest stop_reason without", "# distributed under the License is distributed on an \"AS", "geometry_msgs.msg import PoseStamped import numpy as np import rclpy from", "# Unless required by applicable law or agreed to in", "StopReason2PoseNode.pose2boundingbox(pose)) def _register_pose(self, name, pose): if name not in self._pose_map:", "pose_topic_name = \"{snake_case_stop_reason}_{pose_id}\".format(**locals()) topic_ns = \"/autoware_debug_tools/stop_reason2pose/\" if pose_topic_name not in", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "pose_id @staticmethod def calc_distance2d(pose1, pose2): p1 = pose1.position p2 =", "pose_id = nearest_pose_id else: pose_id = self._register_pose(snake_case_stop_reason, pose.pose) pose_topic_name =", "You may obtain a copy of the License at #", "Publish nearest stop_reason without number nearest_pose = PoseStamped() nearest_pose.header =", "p2.y) @staticmethod def pose2boundingbox(pose): return [pose.position.x, pose.position.y, pose.position.x, pose.position.y] def", "License. import argparse import math import sys from autoware_planning_msgs.msg import", "self.create_subscription( StopReasonArray, self._options.topic_name, self._on_stop_reasons, 1 ) self._pub_pose_map = {} self._idx_map", "1) def _get_nearest_pose_id(self, name, pose, th_dist): nearest_pose_ids = list(self._find_nearest_pose_id(name, pose))", "not in self._pose_map: return None return self._pose_map[name][pose_id] def _update_pose(self, name,", "super().__init__(\"stop_reason2pose_node\") self._options = options self._sub_pose = self.create_subscription( StopReasonArray, self._options.topic_name, self._on_stop_reasons,", "in msg.stop_reasons: snake_case_stop_reason = pascal2snake(stop_reason.reason) if len(stop_reason.stop_factors) == 0: self.get_logger().warn(\"stop_factor", "pose, pose_id): self._pose_map[name][id] = pose self._idx_map[name].insert(pose_id, StopReason2PoseNode.pose2boundingbox(pose)) def _register_pose(self, name,", "self._get_pose(name, nearest_pose_id) if not nearest_pose: return None dist = StopReason2PoseNode.calc_distance2d(pose,", "the Apache License, Version 2.0 (the \"License\"); # you may", "self._self_pose_listener.get_current_pose) def _on_stop_reasons(self, msg): for stop_reason in msg.stop_reasons: snake_case_stop_reason =", "import argparse import math import sys from autoware_planning_msgs.msg import StopReasonArray" ]
[ "won't close such a loop. .. code-block:: python class MyTest(aiounittest.AsyncTestCase):", "one. But if, for some reasons, you want to provide", "import aiounittest class MyTest(aiounittest.AsyncTestCase): async def test_await_async_add(self): ret = await", "python import aiounittest class MyTest(aiounittest.AsyncTestCase): async def test_await_async_add(self): ret =", "well. AsyncTestCase can run: - test of synchronous code (:code:`unittest.TestCase`)", "other test frameworks and runners (eg. `pytest`, `nose`) as well.", "name): attr = super().__getattribute__(name) if name.startswith('test_') and asyncio.iscoroutinefunction(attr): return async_test(attr,", "self.my_loop = asyncio.get_event_loop() return self.my_loop ''' return None def __getattribute__(self,", "self.assertEqual(ret, 6) async def test_await_async_fail(self): with self.assertRaises(Exception) as e: await", "loop for the test It is called before each test,", "The usage is the same as :code:`unittest.TestCase`. It works with", "function. The usage is the same as :code:`unittest.TestCase`. It works", "override it. Note that :code:`AsyncTestCase` won't close such a loop.", "usage is the same as :code:`unittest.TestCase`. It works with other", "default implementation is a receommended one. But if, for some", "MyTest(aiounittest.AsyncTestCase): async def test_await_async_add(self): ret = await async_add(1, 5) self.assertEqual(ret,", "bother about this method, the default implementation is a receommended", "import asyncio async def async_add(x, y, delay=0.1): await asyncio.sleep(delay) return", "as default, leaving asyncio clean. .. note:: In the most", "e: await async_one() ''' def get_event_loop(self): ''' Method provides an", "brand new event loop everytime. After completion, the loop is", "await asyncio.sleep(delay) return x + y async def async_one(): await", "you want to provide your own event loop just override", "as well. AsyncTestCase can run: - test of synchronous code", "allows to test asynchoronus function. The usage is the same", "recreated, set as default, leaving asyncio clean. .. note:: In", "= await async_add(1, 5) self.assertEqual(ret, 6) async def test_await_async_fail(self): with", "AsyncTestCase can run: - test of synchronous code (:code:`unittest.TestCase`) -", "works with other test frameworks and runners (eg. `pytest`, `nose`)", "async_one(): await async_nested_exc() async def async_nested_exc(): await asyncio.sleep(0.1) raise Exception('Test')", "async_test class AsyncTestCase(unittest.TestCase): ''' AsyncTestCase allows to test asynchoronus function.", "def test_await_async_fail(self): with self.assertRaises(Exception) as e: await async_one() ''' def", "as :code:`unittest.TestCase`. It works with other test frameworks and runners", "(eg. `pytest`, `nose`) as well. AsyncTestCase can run: - test", "AsyncTestCase allows to test asynchoronus function. The usage is the", "= super().__getattribute__(name) if name.startswith('test_') and asyncio.iscoroutinefunction(attr): return async_test(attr, loop=self.get_event_loop()) else:", "from .helpers import async_test class AsyncTestCase(unittest.TestCase): ''' AsyncTestCase allows to", "completion, the loop is closed and then recreated, set as", "is closed and then recreated, set as default, leaving asyncio", "AsyncTestCase(unittest.TestCase): ''' AsyncTestCase allows to test asynchoronus function. The usage", "def test_await_async_add(self): ret = await async_add(1, 5) self.assertEqual(ret, 6) async", "But if, for some reasons, you want to provide your", "It works with other test frameworks and runners (eg. `pytest`,", "asynchoronus function. The usage is the same as :code:`unittest.TestCase`. It", "called before each test, by default :code:`aiounittest.AsyncTestCase` creates the brand", "import asyncio import unittest from .helpers import async_test class AsyncTestCase(unittest.TestCase):", "is a receommended one. But if, for some reasons, you", "from` (Python 3.4) Code to test: .. code-block:: python import", "most common cases you don't have to bother about this", ":code:`async`/:code:`await` (Python 3.5+) and :code:`asyncio.coroutine`/:code:`yield from` (Python 3.4) Code to", "(Python 3.5+) and :code:`asyncio.coroutine`/:code:`yield from` (Python 3.4) Code to test:", "It is called before each test, by default :code:`aiounittest.AsyncTestCase` creates", "Code to test: .. code-block:: python import asyncio async def", "MyTest(aiounittest.AsyncTestCase): def get_event_loop(self): self.my_loop = asyncio.get_event_loop() return self.my_loop ''' return", "code (:code:`unittest.TestCase`) - test of asynchronous code, supports syntax with", "loop everytime. After completion, the loop is closed and then", "set as default, leaving asyncio clean. .. note:: In the", "await async_add(1, 5) self.assertEqual(ret, 6) async def test_await_async_fail(self): with self.assertRaises(Exception)", "own event loop just override it. Note that :code:`AsyncTestCase` won't", "__getattribute__(self, name): attr = super().__getattribute__(name) if name.startswith('test_') and asyncio.iscoroutinefunction(attr): return", "to test: .. code-block:: python import asyncio async def async_add(x,", "None def __getattribute__(self, name): attr = super().__getattribute__(name) if name.startswith('test_') and", "test, by default :code:`aiounittest.AsyncTestCase` creates the brand new event loop", "provides an event loop for the test It is called", "def __getattribute__(self, name): attr = super().__getattribute__(name) if name.startswith('test_') and asyncio.iscoroutinefunction(attr):", "asyncio.sleep(delay) return x + y async def async_one(): await async_nested_exc()", "closed and then recreated, set as default, leaving asyncio clean.", ".. code-block:: python import aiounittest class MyTest(aiounittest.AsyncTestCase): async def test_await_async_add(self):", "6) async def test_await_async_fail(self): with self.assertRaises(Exception) as e: await async_one()", "that :code:`AsyncTestCase` won't close such a loop. .. code-block:: python", "python import asyncio async def async_add(x, y, delay=0.1): await asyncio.sleep(delay)", "event loop for the test It is called before each", "''' def get_event_loop(self): ''' Method provides an event loop for", "def async_nested_exc(): await asyncio.sleep(0.1) raise Exception('Test') Tests: .. code-block:: python", "if, for some reasons, you want to provide your own", "loop just override it. Note that :code:`AsyncTestCase` won't close such", "implementation is a receommended one. But if, for some reasons,", "y, delay=0.1): await asyncio.sleep(delay) return x + y async def", "event loop everytime. After completion, the loop is closed and", "about this method, the default implementation is a receommended one.", "attr = super().__getattribute__(name) if name.startswith('test_') and asyncio.iscoroutinefunction(attr): return async_test(attr, loop=self.get_event_loop())", "is called before each test, by default :code:`aiounittest.AsyncTestCase` creates the", "asynchronous code, supports syntax with :code:`async`/:code:`await` (Python 3.5+) and :code:`asyncio.coroutine`/:code:`yield", "is the same as :code:`unittest.TestCase`. It works with other test", "test of synchronous code (:code:`unittest.TestCase`) - test of asynchronous code,", "class MyTest(aiounittest.AsyncTestCase): async def test_await_async_add(self): ret = await async_add(1, 5)", "a receommended one. But if, for some reasons, you want", "= asyncio.get_event_loop() return self.my_loop ''' return None def __getattribute__(self, name):", "await async_nested_exc() async def async_nested_exc(): await asyncio.sleep(0.1) raise Exception('Test') Tests:", "Exception('Test') Tests: .. code-block:: python import aiounittest class MyTest(aiounittest.AsyncTestCase): async", "async def async_one(): await async_nested_exc() async def async_nested_exc(): await asyncio.sleep(0.1)", "loop is closed and then recreated, set as default, leaving", "async_add(1, 5) self.assertEqual(ret, 6) async def test_await_async_fail(self): with self.assertRaises(Exception) as", "can run: - test of synchronous code (:code:`unittest.TestCase`) - test", "async def test_await_async_fail(self): with self.assertRaises(Exception) as e: await async_one() '''", "get_event_loop(self): ''' Method provides an event loop for the test", "raise Exception('Test') Tests: .. code-block:: python import aiounittest class MyTest(aiounittest.AsyncTestCase):", ":code:`aiounittest.AsyncTestCase` creates the brand new event loop everytime. After completion,", "to bother about this method, the default implementation is a", "your own event loop just override it. Note that :code:`AsyncTestCase`", "run: - test of synchronous code (:code:`unittest.TestCase`) - test of", "it. Note that :code:`AsyncTestCase` won't close such a loop. ..", "python class MyTest(aiounittest.AsyncTestCase): def get_event_loop(self): self.my_loop = asyncio.get_event_loop() return self.my_loop", "unittest from .helpers import async_test class AsyncTestCase(unittest.TestCase): ''' AsyncTestCase allows", "you don't have to bother about this method, the default", "import unittest from .helpers import async_test class AsyncTestCase(unittest.TestCase): ''' AsyncTestCase", "to provide your own event loop just override it. Note", "if name.startswith('test_') and asyncio.iscoroutinefunction(attr): return async_test(attr, loop=self.get_event_loop()) else: return attr", "self.assertRaises(Exception) as e: await async_one() ''' def get_event_loop(self): ''' Method", "of asynchronous code, supports syntax with :code:`async`/:code:`await` (Python 3.5+) and", "code-block:: python import asyncio async def async_add(x, y, delay=0.1): await", "test_await_async_add(self): ret = await async_add(1, 5) self.assertEqual(ret, 6) async def", "and then recreated, set as default, leaving asyncio clean. ..", "3.4) Code to test: .. code-block:: python import asyncio async", "def get_event_loop(self): self.my_loop = asyncio.get_event_loop() return self.my_loop ''' return None", "don't have to bother about this method, the default implementation", "super().__getattribute__(name) if name.startswith('test_') and asyncio.iscoroutinefunction(attr): return async_test(attr, loop=self.get_event_loop()) else: return", "await async_one() ''' def get_event_loop(self): ''' Method provides an event", "creates the brand new event loop everytime. After completion, the", "by default :code:`aiounittest.AsyncTestCase` creates the brand new event loop everytime.", "import async_test class AsyncTestCase(unittest.TestCase): ''' AsyncTestCase allows to test asynchoronus", "asyncio import unittest from .helpers import async_test class AsyncTestCase(unittest.TestCase): '''", "def get_event_loop(self): ''' Method provides an event loop for the", "- test of asynchronous code, supports syntax with :code:`async`/:code:`await` (Python", "(Python 3.4) Code to test: .. code-block:: python import asyncio", "5) self.assertEqual(ret, 6) async def test_await_async_fail(self): with self.assertRaises(Exception) as e:", "''' Method provides an event loop for the test It", "the loop is closed and then recreated, set as default,", "async def async_add(x, y, delay=0.1): await asyncio.sleep(delay) return x +", "an event loop for the test It is called before", "asyncio.get_event_loop() return self.my_loop ''' return None def __getattribute__(self, name): attr", "+ y async def async_one(): await async_nested_exc() async def async_nested_exc():", "async_add(x, y, delay=0.1): await asyncio.sleep(delay) return x + y async", "close such a loop. .. code-block:: python class MyTest(aiounittest.AsyncTestCase): def", "supports syntax with :code:`async`/:code:`await` (Python 3.5+) and :code:`asyncio.coroutine`/:code:`yield from` (Python", ".. code-block:: python class MyTest(aiounittest.AsyncTestCase): def get_event_loop(self): self.my_loop = asyncio.get_event_loop()", "`nose`) as well. AsyncTestCase can run: - test of synchronous", "with :code:`async`/:code:`await` (Python 3.5+) and :code:`asyncio.coroutine`/:code:`yield from` (Python 3.4) Code", "Tests: .. code-block:: python import aiounittest class MyTest(aiounittest.AsyncTestCase): async def", "and runners (eg. `pytest`, `nose`) as well. AsyncTestCase can run:", "code, supports syntax with :code:`async`/:code:`await` (Python 3.5+) and :code:`asyncio.coroutine`/:code:`yield from`", "def async_one(): await async_nested_exc() async def async_nested_exc(): await asyncio.sleep(0.1) raise", "ret = await async_add(1, 5) self.assertEqual(ret, 6) async def test_await_async_fail(self):", "async_one() ''' def get_event_loop(self): ''' Method provides an event loop", "provide your own event loop just override it. Note that", "class MyTest(aiounittest.AsyncTestCase): def get_event_loop(self): self.my_loop = asyncio.get_event_loop() return self.my_loop '''", "for some reasons, you want to provide your own event", "return self.my_loop ''' return None def __getattribute__(self, name): attr =", "return x + y async def async_one(): await async_nested_exc() async", "test asynchoronus function. The usage is the same as :code:`unittest.TestCase`.", "runners (eg. `pytest`, `nose`) as well. AsyncTestCase can run: -", "async_nested_exc(): await asyncio.sleep(0.1) raise Exception('Test') Tests: .. code-block:: python import", "asyncio.sleep(0.1) raise Exception('Test') Tests: .. code-block:: python import aiounittest class", "Note that :code:`AsyncTestCase` won't close such a loop. .. code-block::", "and :code:`asyncio.coroutine`/:code:`yield from` (Python 3.4) Code to test: .. code-block::", "''' AsyncTestCase allows to test asynchoronus function. The usage is", ".. code-block:: python import asyncio async def async_add(x, y, delay=0.1):", "of synchronous code (:code:`unittest.TestCase`) - test of asynchronous code, supports", "cases you don't have to bother about this method, the", "this method, the default implementation is a receommended one. But", "aiounittest class MyTest(aiounittest.AsyncTestCase): async def test_await_async_add(self): ret = await async_add(1,", "- test of synchronous code (:code:`unittest.TestCase`) - test of asynchronous", "(:code:`unittest.TestCase`) - test of asynchronous code, supports syntax with :code:`async`/:code:`await`", "default, leaving asyncio clean. .. note:: In the most common", "just override it. Note that :code:`AsyncTestCase` won't close such a", "have to bother about this method, the default implementation is", "the test It is called before each test, by default", "with self.assertRaises(Exception) as e: await async_one() ''' def get_event_loop(self): '''", "reasons, you want to provide your own event loop just", "loop. .. code-block:: python class MyTest(aiounittest.AsyncTestCase): def get_event_loop(self): self.my_loop =", "3.5+) and :code:`asyncio.coroutine`/:code:`yield from` (Python 3.4) Code to test: ..", "y async def async_one(): await async_nested_exc() async def async_nested_exc(): await", "Method provides an event loop for the test It is", "some reasons, you want to provide your own event loop", "before each test, by default :code:`aiounittest.AsyncTestCase` creates the brand new", ".helpers import async_test class AsyncTestCase(unittest.TestCase): ''' AsyncTestCase allows to test", ":code:`unittest.TestCase`. It works with other test frameworks and runners (eg.", "a loop. .. code-block:: python class MyTest(aiounittest.AsyncTestCase): def get_event_loop(self): self.my_loop", "receommended one. But if, for some reasons, you want to", "each test, by default :code:`aiounittest.AsyncTestCase` creates the brand new event", "test frameworks and runners (eg. `pytest`, `nose`) as well. AsyncTestCase", "x + y async def async_one(): await async_nested_exc() async def", "as e: await async_one() ''' def get_event_loop(self): ''' Method provides", "same as :code:`unittest.TestCase`. It works with other test frameworks and", "test It is called before each test, by default :code:`aiounittest.AsyncTestCase`", "class AsyncTestCase(unittest.TestCase): ''' AsyncTestCase allows to test asynchoronus function. The", "def async_add(x, y, delay=0.1): await asyncio.sleep(delay) return x + y", "clean. .. note:: In the most common cases you don't", "delay=0.1): await asyncio.sleep(delay) return x + y async def async_one():", "self.my_loop ''' return None def __getattribute__(self, name): attr = super().__getattribute__(name)", "such a loop. .. code-block:: python class MyTest(aiounittest.AsyncTestCase): def get_event_loop(self):", "want to provide your own event loop just override it.", "the same as :code:`unittest.TestCase`. It works with other test frameworks", "syntax with :code:`async`/:code:`await` (Python 3.5+) and :code:`asyncio.coroutine`/:code:`yield from` (Python 3.4)", "get_event_loop(self): self.my_loop = asyncio.get_event_loop() return self.my_loop ''' return None def", "default :code:`aiounittest.AsyncTestCase` creates the brand new event loop everytime. After", "test: .. code-block:: python import asyncio async def async_add(x, y,", "new event loop everytime. After completion, the loop is closed", ":code:`asyncio.coroutine`/:code:`yield from` (Python 3.4) Code to test: .. code-block:: python", "await asyncio.sleep(0.1) raise Exception('Test') Tests: .. code-block:: python import aiounittest", "test of asynchronous code, supports syntax with :code:`async`/:code:`await` (Python 3.5+)", "the most common cases you don't have to bother about", "async_nested_exc() async def async_nested_exc(): await asyncio.sleep(0.1) raise Exception('Test') Tests: ..", "''' return None def __getattribute__(self, name): attr = super().__getattribute__(name) if", "common cases you don't have to bother about this method,", ".. note:: In the most common cases you don't have", "code-block:: python import aiounittest class MyTest(aiounittest.AsyncTestCase): async def test_await_async_add(self): ret", "for the test It is called before each test, by", "async def async_nested_exc(): await asyncio.sleep(0.1) raise Exception('Test') Tests: .. code-block::", "return None def __getattribute__(self, name): attr = super().__getattribute__(name) if name.startswith('test_')", "everytime. After completion, the loop is closed and then recreated,", "async def test_await_async_add(self): ret = await async_add(1, 5) self.assertEqual(ret, 6)", "with other test frameworks and runners (eg. `pytest`, `nose`) as", "frameworks and runners (eg. `pytest`, `nose`) as well. AsyncTestCase can", "test_await_async_fail(self): with self.assertRaises(Exception) as e: await async_one() ''' def get_event_loop(self):", "asyncio async def async_add(x, y, delay=0.1): await asyncio.sleep(delay) return x", "`pytest`, `nose`) as well. AsyncTestCase can run: - test of", "to test asynchoronus function. The usage is the same as", "synchronous code (:code:`unittest.TestCase`) - test of asynchronous code, supports syntax", "asyncio clean. .. note:: In the most common cases you", "method, the default implementation is a receommended one. But if,", "the brand new event loop everytime. After completion, the loop", ":code:`AsyncTestCase` won't close such a loop. .. code-block:: python class", "After completion, the loop is closed and then recreated, set", "then recreated, set as default, leaving asyncio clean. .. note::", "code-block:: python class MyTest(aiounittest.AsyncTestCase): def get_event_loop(self): self.my_loop = asyncio.get_event_loop() return", "In the most common cases you don't have to bother", "leaving asyncio clean. .. note:: In the most common cases", "the default implementation is a receommended one. But if, for", "event loop just override it. Note that :code:`AsyncTestCase` won't close", "note:: In the most common cases you don't have to" ]
[ "color): t.pendown() t.pensize(1) t.color(color) t.begin_fill() for counter in range(2): t.forward(horizontal)", "t.speed(0) t.setup(988,520) t.goto(494,260) t.pendown() for counter in range(7): t.setheading(-90) rectangle(40,988,'#B22234')", "as t def rectangle(horizontal, vertical, color): t.pendown() t.pensize(1) t.color(color) t.begin_fill()", "oneangle/3.5 bigangle = oneangle - smallangle t.color(color) t.pendown() t.begin_fill() t.penup()", "t.penup() gotoy = 222 t.speed(0) t.setup(988,520) t.goto(494,260) t.pendown() for counter", "oneangle - smallangle t.color(color) t.pendown() t.begin_fill() t.penup() for counter in", "smallangle t.color(color) t.pendown() t.begin_fill() t.penup() for counter in range(points): t.forward(length)", "28 for counter in range(6): star(9,5,'white') t.setheading(0) t.forward(84) t.penup() t.hideturtle()", "rectangle(horizontal, vertical, color): t.pendown() t.pensize(1) t.color(color) t.begin_fill() for counter in", "counter in range(7): t.setheading(-90) rectangle(40,988,'#B22234') t.setheading(-90) t.forward(80) t.penup() t.setheading(0) t.goto(-494,260)", "sumangle/points smallangle = oneangle/3.5 bigangle = oneangle - smallangle t.color(color)", "t.forward(80) t.penup() t.setheading(0) t.goto(-494,260) t.pendown() rectangle(494,280,'#3C3B6E') t.goto(-474,245) for counter in", "range(4): for counter in range(6): star(9,5,'white') t.setheading(0) t.forward(84) t.penup() t.goto(-434,gotoy)", "= gotoy - 28 for counter in range(6): star(9,5,'white') t.setheading(0)", "t.forward(horizontal) t.right(90) t.forward(vertical) t.right(90) t.end_fill() t.penup() def star(length, points, color):", "gotoy - 28 for counter in range(6): star(9,5,'white') t.setheading(0) t.forward(84)", "t.color(color) t.pendown() t.begin_fill() t.penup() for counter in range(points): t.forward(length) t.left(smallangle)", "def star(length, points, color): sumangle = ((points*2)-2) * 180 oneangle", "gotoy = gotoy - 28 for counter in range(6): star(9,5,'white')", "t.penup() def star(length, points, color): sumangle = ((points*2)-2) * 180", "t.begin_fill() t.penup() for counter in range(points): t.forward(length) t.left(smallangle) t.forward(length) t.left(bigangle)", "for counter in range(6): star(9,5,'white') t.setheading(0) t.forward(84) t.penup() t.goto(-434,gotoy) gotoy", "in range(points): t.forward(length) t.left(smallangle) t.forward(length) t.left(bigangle) t.end_fill() t.penup() gotoy =", "for counter in range(2): t.forward(horizontal) t.right(90) t.forward(vertical) t.right(90) t.end_fill() t.penup()", "range(5): star(9,5,'white') t.setheading(0) t.forward(84) t.goto(-476,gotoy) gotoy = gotoy - 28", "t.penup() for counter in range(points): t.forward(length) t.left(smallangle) t.forward(length) t.left(bigangle) t.end_fill()", "t.penup() t.setheading(0) t.goto(-494,260) t.pendown() rectangle(494,280,'#3C3B6E') t.goto(-474,245) for counter in range(4):", "- smallangle t.color(color) t.pendown() t.begin_fill() t.penup() for counter in range(points):", "star(9,5,'white') t.setheading(0) t.forward(84) t.penup() t.goto(-434,gotoy) gotoy = gotoy - 28", "= oneangle - smallangle t.color(color) t.pendown() t.begin_fill() t.penup() for counter", "t.pendown() for counter in range(5): star(9,5,'white') t.setheading(0) t.forward(84) t.goto(-476,gotoy) gotoy", "* 180 oneangle = sumangle/points smallangle = oneangle/3.5 bigangle =", "= ((points*2)-2) * 180 oneangle = sumangle/points smallangle = oneangle/3.5", "t.forward(length) t.left(bigangle) t.end_fill() t.penup() gotoy = 222 t.speed(0) t.setup(988,520) t.goto(494,260)", "t.right(90) t.forward(vertical) t.right(90) t.end_fill() t.penup() def star(length, points, color): sumangle", "for counter in range(5): star(9,5,'white') t.setheading(0) t.forward(84) t.goto(-476,gotoy) gotoy =", "t.pendown() rectangle(494,280,'#3C3B6E') t.goto(-474,245) for counter in range(4): for counter in", "t.color(color) t.begin_fill() for counter in range(2): t.forward(horizontal) t.right(90) t.forward(vertical) t.right(90)", "rectangle(40,988,'#B22234') t.setheading(-90) t.forward(80) t.penup() t.setheading(0) t.goto(-494,260) t.pendown() rectangle(494,280,'#3C3B6E') t.goto(-474,245) for", "((points*2)-2) * 180 oneangle = sumangle/points smallangle = oneangle/3.5 bigangle", "points, color): sumangle = ((points*2)-2) * 180 oneangle = sumangle/points", "t.end_fill() t.penup() gotoy = 222 t.speed(0) t.setup(988,520) t.goto(494,260) t.pendown() for", "= oneangle/3.5 bigangle = oneangle - smallangle t.color(color) t.pendown() t.begin_fill()", "t.pendown() t.begin_fill() t.penup() for counter in range(points): t.forward(length) t.left(smallangle) t.forward(length)", "t.goto(-494,260) t.pendown() rectangle(494,280,'#3C3B6E') t.goto(-474,245) for counter in range(4): for counter", "t.setheading(-90) t.forward(80) t.penup() t.setheading(0) t.goto(-494,260) t.pendown() rectangle(494,280,'#3C3B6E') t.goto(-474,245) for counter", "for counter in range(4): for counter in range(6): star(9,5,'white') t.setheading(0)", "for counter in range(7): t.setheading(-90) rectangle(40,988,'#B22234') t.setheading(-90) t.forward(80) t.penup() t.setheading(0)", "gotoy = 222 t.speed(0) t.setup(988,520) t.goto(494,260) t.pendown() for counter in", "= sumangle/points smallangle = oneangle/3.5 bigangle = oneangle - smallangle", "counter in range(points): t.forward(length) t.left(smallangle) t.forward(length) t.left(bigangle) t.end_fill() t.penup() gotoy", "t.pendown() t.pensize(1) t.color(color) t.begin_fill() for counter in range(2): t.forward(horizontal) t.right(90)", "star(9,5,'white') t.setheading(0) t.forward(84) t.goto(-476,gotoy) gotoy = gotoy - 28 for", "in range(2): t.forward(horizontal) t.right(90) t.forward(vertical) t.right(90) t.end_fill() t.penup() def star(length,", "t.setheading(0) t.goto(-494,260) t.pendown() rectangle(494,280,'#3C3B6E') t.goto(-474,245) for counter in range(4): for", "counter in range(2): t.forward(horizontal) t.right(90) t.forward(vertical) t.right(90) t.end_fill() t.penup() def", "star(length, points, color): sumangle = ((points*2)-2) * 180 oneangle =", "t.goto(494,260) t.pendown() for counter in range(7): t.setheading(-90) rectangle(40,988,'#B22234') t.setheading(-90) t.forward(80)", "counter in range(5): star(9,5,'white') t.setheading(0) t.forward(84) t.goto(-476,gotoy) gotoy = gotoy", "counter in range(4): for counter in range(6): star(9,5,'white') t.setheading(0) t.forward(84)", "in range(6): star(9,5,'white') t.setheading(0) t.forward(84) t.penup() t.goto(-434,gotoy) gotoy = gotoy", "t.setup(988,520) t.goto(494,260) t.pendown() for counter in range(7): t.setheading(-90) rectangle(40,988,'#B22234') t.setheading(-90)", "in range(4): for counter in range(6): star(9,5,'white') t.setheading(0) t.forward(84) t.penup()", "t.forward(vertical) t.right(90) t.end_fill() t.penup() def star(length, points, color): sumangle =", "t.left(bigangle) t.end_fill() t.penup() gotoy = 222 t.speed(0) t.setup(988,520) t.goto(494,260) t.pendown()", "in range(7): t.setheading(-90) rectangle(40,988,'#B22234') t.setheading(-90) t.forward(80) t.penup() t.setheading(0) t.goto(-494,260) t.pendown()", "222 t.speed(0) t.setup(988,520) t.goto(494,260) t.pendown() for counter in range(7): t.setheading(-90)", "t.end_fill() t.penup() def star(length, points, color): sumangle = ((points*2)-2) *", "range(points): t.forward(length) t.left(smallangle) t.forward(length) t.left(bigangle) t.end_fill() t.penup() gotoy = 222", "t.setheading(0) t.forward(84) t.penup() t.goto(-434,gotoy) gotoy = gotoy - 28 t.pendown()", "t def rectangle(horizontal, vertical, color): t.pendown() t.pensize(1) t.color(color) t.begin_fill() for", "def rectangle(horizontal, vertical, color): t.pendown() t.pensize(1) t.color(color) t.begin_fill() for counter", "vertical, color): t.pendown() t.pensize(1) t.color(color) t.begin_fill() for counter in range(2):", "= gotoy - 28 t.pendown() for counter in range(5): star(9,5,'white')", "t.goto(-434,gotoy) gotoy = gotoy - 28 t.pendown() for counter in", "t.forward(84) t.penup() t.goto(-434,gotoy) gotoy = gotoy - 28 t.pendown() for", "import turtle as t def rectangle(horizontal, vertical, color): t.pendown() t.pensize(1)", "color): sumangle = ((points*2)-2) * 180 oneangle = sumangle/points smallangle", "smallangle = oneangle/3.5 bigangle = oneangle - smallangle t.color(color) t.pendown()", "for counter in range(points): t.forward(length) t.left(smallangle) t.forward(length) t.left(bigangle) t.end_fill() t.penup()", "sumangle = ((points*2)-2) * 180 oneangle = sumangle/points smallangle =", "- 28 t.pendown() for counter in range(5): star(9,5,'white') t.setheading(0) t.forward(84)", "range(2): t.forward(horizontal) t.right(90) t.forward(vertical) t.right(90) t.end_fill() t.penup() def star(length, points,", "t.penup() t.goto(-434,gotoy) gotoy = gotoy - 28 t.pendown() for counter", "t.right(90) t.end_fill() t.penup() def star(length, points, color): sumangle = ((points*2)-2)", "t.left(smallangle) t.forward(length) t.left(bigangle) t.end_fill() t.penup() gotoy = 222 t.speed(0) t.setup(988,520)", "turtle as t def rectangle(horizontal, vertical, color): t.pendown() t.pensize(1) t.color(color)", "t.goto(-474,245) for counter in range(4): for counter in range(6): star(9,5,'white')", "t.forward(84) t.goto(-476,gotoy) gotoy = gotoy - 28 for counter in", "in range(5): star(9,5,'white') t.setheading(0) t.forward(84) t.goto(-476,gotoy) gotoy = gotoy -", "t.begin_fill() for counter in range(2): t.forward(horizontal) t.right(90) t.forward(vertical) t.right(90) t.end_fill()", "t.setheading(-90) rectangle(40,988,'#B22234') t.setheading(-90) t.forward(80) t.penup() t.setheading(0) t.goto(-494,260) t.pendown() rectangle(494,280,'#3C3B6E') t.goto(-474,245)", "180 oneangle = sumangle/points smallangle = oneangle/3.5 bigangle = oneangle", "<reponame>Code-Master1234/Turtle_Flags_File_Hub<gh_stars>0 import turtle as t def rectangle(horizontal, vertical, color): t.pendown()", "counter in range(6): star(9,5,'white') t.setheading(0) t.forward(84) t.penup() t.goto(-434,gotoy) gotoy =", "t.pendown() for counter in range(7): t.setheading(-90) rectangle(40,988,'#B22234') t.setheading(-90) t.forward(80) t.penup()", "t.forward(length) t.left(smallangle) t.forward(length) t.left(bigangle) t.end_fill() t.penup() gotoy = 222 t.speed(0)", "t.goto(-476,gotoy) gotoy = gotoy - 28 for counter in range(6):", "- 28 for counter in range(6): star(9,5,'white') t.setheading(0) t.forward(84) t.penup()", "= 222 t.speed(0) t.setup(988,520) t.goto(494,260) t.pendown() for counter in range(7):", "range(6): star(9,5,'white') t.setheading(0) t.forward(84) t.penup() t.goto(-434,gotoy) gotoy = gotoy -", "t.pensize(1) t.color(color) t.begin_fill() for counter in range(2): t.forward(horizontal) t.right(90) t.forward(vertical)", "gotoy = gotoy - 28 t.pendown() for counter in range(5):", "t.setheading(0) t.forward(84) t.goto(-476,gotoy) gotoy = gotoy - 28 for counter", "bigangle = oneangle - smallangle t.color(color) t.pendown() t.begin_fill() t.penup() for", "range(7): t.setheading(-90) rectangle(40,988,'#B22234') t.setheading(-90) t.forward(80) t.penup() t.setheading(0) t.goto(-494,260) t.pendown() rectangle(494,280,'#3C3B6E')", "gotoy - 28 t.pendown() for counter in range(5): star(9,5,'white') t.setheading(0)", "28 t.pendown() for counter in range(5): star(9,5,'white') t.setheading(0) t.forward(84) t.goto(-476,gotoy)", "oneangle = sumangle/points smallangle = oneangle/3.5 bigangle = oneangle -", "rectangle(494,280,'#3C3B6E') t.goto(-474,245) for counter in range(4): for counter in range(6):" ]
[ "total = start.value zero = False while end: total +=", "8, 9, 10, -19, 10, -18, 20, 25] s3 =", "if total == 0: zero = True start = end", "25] s3 = [2, 3, -5, 10, 10, -5, -5,", "= end.next if not zero and not new: new =", "break end = end.next if not zero and not new:", "root = None while start: end = start.next total =", "== \"__main__\": s1 = [6, -6, 8, 4, -12, 9,", "start.next total = start.value zero = False while end: total", "a linked list, remove consecutive nodes that sums up to", "and not new: new = Node(start.value) root = new elif", "linked list, remove consecutive nodes that sums up to zero", "# https://www.careercup.com/question?id=5717797377146880 from util import * def remove_zero_sum(head): start =", "= None root = None while start: end = start.next", "-12, 9, 8, -8] s2 = [4, 6 - 10,", "0: zero = True start = end break end =", "consecutive nodes that sums up to zero # https://www.careercup.com/question?id=5717797377146880 from", "5, -5] samples = [s1,s2,s3] for sample in samples: head", "# Given a linked list, remove consecutive nodes that sums", "10, -18, 20, 25] s3 = [2, 3, -5, 10,", "= start.next return root if __name__ == \"__main__\": s1 =", "-6, 8, 4, -12, 9, 8, -8] s2 = [4,", "= new elif not zero and new: new.next = Node(start.value)", "util import * def remove_zero_sum(head): start = head new =", "-5] samples = [s1,s2,s3] for sample in samples: head =", "zero = False while end: total += end.value if total", "-5, 20, 5, -5] samples = [s1,s2,s3] for sample in", "= [6, -6, 8, 4, -12, 9, 8, -8] s2", "elif not zero and new: new.next = Node(start.value) start =", "3, -5, 10, 10, -5, -5, 20, 5, -5] samples", "None root = None while start: end = start.next total", "that sums up to zero # https://www.careercup.com/question?id=5717797377146880 from util import", "start.next return root if __name__ == \"__main__\": s1 = [6,", "= True start = end break end = end.next if", "end = end.next if not zero and not new: new", "not new: new = Node(start.value) root = new elif not", "Node(start.value) start = start.next return root if __name__ == \"__main__\":", "s1 = [6, -6, 8, 4, -12, 9, 8, -8]", "-5, -5, 20, 5, -5] samples = [s1,s2,s3] for sample", "def remove_zero_sum(head): start = head new = None root =", "s3 = [2, 3, -5, 10, 10, -5, -5, 20,", "-5, 10, 10, -5, -5, 20, 5, -5] samples =", "9, 8, -8] s2 = [4, 6 - 10, 8,", "8, -8] s2 = [4, 6 - 10, 8, 9,", "False while end: total += end.value if total == 0:", "import * def remove_zero_sum(head): start = head new = None", "= Node(start.value) root = new elif not zero and new:", "= end break end = end.next if not zero and", "-19, 10, -18, 20, 25] s3 = [2, 3, -5,", "= [4, 6 - 10, 8, 9, 10, -19, 10,", "20, 5, -5] samples = [s1,s2,s3] for sample in samples:", "start = end break end = end.next if not zero", "= Node(start.value) start = start.next return root if __name__ ==", "remove_zero_sum(head): start = head new = None root = None", "new = None root = None while start: end =", "-8] s2 = [4, 6 - 10, 8, 9, 10,", "while start: end = start.next total = start.value zero =", "10, -19, 10, -18, 20, 25] s3 = [2, 3,", "root if __name__ == \"__main__\": s1 = [6, -6, 8,", "if __name__ == \"__main__\": s1 = [6, -6, 8, 4,", "and new: new.next = Node(start.value) start = start.next return root", "from util import * def remove_zero_sum(head): start = head new", "zero # https://www.careercup.com/question?id=5717797377146880 from util import * def remove_zero_sum(head): start", "[s1,s2,s3] for sample in samples: head = create_linked_list(sample) print(linked_list_to_list(head)) result", "4, -12, 9, 8, -8] s2 = [4, 6 -", "= [s1,s2,s3] for sample in samples: head = create_linked_list(sample) print(linked_list_to_list(head))", "nodes that sums up to zero # https://www.careercup.com/question?id=5717797377146880 from util", "True start = end break end = end.next if not", "end = start.next total = start.value zero = False while", "not zero and new: new.next = Node(start.value) start = start.next", "start: end = start.next total = start.value zero = False", "total += end.value if total == 0: zero = True", "10, 8, 9, 10, -19, 10, -18, 20, 25] s3", "10, -5, -5, 20, 5, -5] samples = [s1,s2,s3] for", "while end: total += end.value if total == 0: zero", "= None while start: end = start.next total = start.value", "zero = True start = end break end = end.next", "list, remove consecutive nodes that sums up to zero #", "= start.value zero = False while end: total += end.value", "samples = [s1,s2,s3] for sample in samples: head = create_linked_list(sample)", "= False while end: total += end.value if total ==", "in samples: head = create_linked_list(sample) print(linked_list_to_list(head)) result = remove_zero_sum(head) print(linked_list_to_list(result))", "new = Node(start.value) root = new elif not zero and", "new: new = Node(start.value) root = new elif not zero", "root = new elif not zero and new: new.next =", "10, 10, -5, -5, 20, 5, -5] samples = [s1,s2,s3]", "end: total += end.value if total == 0: zero =", "start.value zero = False while end: total += end.value if", "not zero and not new: new = Node(start.value) root =", "+= end.value if total == 0: zero = True start", "== 0: zero = True start = end break end", "-18, 20, 25] s3 = [2, 3, -5, 10, 10,", "zero and not new: new = Node(start.value) root = new", "for sample in samples: head = create_linked_list(sample) print(linked_list_to_list(head)) result =", "Given a linked list, remove consecutive nodes that sums up", "https://www.careercup.com/question?id=5717797377146880 from util import * def remove_zero_sum(head): start = head", "total == 0: zero = True start = end break", "end.next if not zero and not new: new = Node(start.value)", "samples: head = create_linked_list(sample) print(linked_list_to_list(head)) result = remove_zero_sum(head) print(linked_list_to_list(result)) print(\"\\n\")", "<reponame>bryanlimy/technical-interview<filename>linked-list/delete_zero_sum_nodes.py # Given a linked list, remove consecutive nodes that", "sample in samples: head = create_linked_list(sample) print(linked_list_to_list(head)) result = remove_zero_sum(head)", "new.next = Node(start.value) start = start.next return root if __name__", "Node(start.value) root = new elif not zero and new: new.next", "remove consecutive nodes that sums up to zero # https://www.careercup.com/question?id=5717797377146880", "None while start: end = start.next total = start.value zero", "20, 25] s3 = [2, 3, -5, 10, 10, -5,", "__name__ == \"__main__\": s1 = [6, -6, 8, 4, -12,", "return root if __name__ == \"__main__\": s1 = [6, -6,", "= [2, 3, -5, 10, 10, -5, -5, 20, 5,", "s2 = [4, 6 - 10, 8, 9, 10, -19,", "= start.next total = start.value zero = False while end:", "if not zero and not new: new = Node(start.value) root", "new elif not zero and new: new.next = Node(start.value) start", "[6, -6, 8, 4, -12, 9, 8, -8] s2 =", "6 - 10, 8, 9, 10, -19, 10, -18, 20,", "end.value if total == 0: zero = True start =", "start = start.next return root if __name__ == \"__main__\": s1", "start = head new = None root = None while", "to zero # https://www.careercup.com/question?id=5717797377146880 from util import * def remove_zero_sum(head):", "8, 4, -12, 9, 8, -8] s2 = [4, 6", "- 10, 8, 9, 10, -19, 10, -18, 20, 25]", "[4, 6 - 10, 8, 9, 10, -19, 10, -18,", "9, 10, -19, 10, -18, 20, 25] s3 = [2,", "up to zero # https://www.careercup.com/question?id=5717797377146880 from util import * def", "head new = None root = None while start: end", "end break end = end.next if not zero and not", "sums up to zero # https://www.careercup.com/question?id=5717797377146880 from util import *", "zero and new: new.next = Node(start.value) start = start.next return", "[2, 3, -5, 10, 10, -5, -5, 20, 5, -5]", "= head new = None root = None while start:", "new: new.next = Node(start.value) start = start.next return root if", "* def remove_zero_sum(head): start = head new = None root", "\"__main__\": s1 = [6, -6, 8, 4, -12, 9, 8," ]
[ "account_name, maps_account_create_params) def list_accounts(client, resource_group_name=None): # Retrieve accounts via subscription", "client_denied_terms = 'You must agree to the License and Privacy", "tags=None, force=None): terms = 'By creating an Azure Maps account,", "resource_group_name=None): # Retrieve accounts via subscription if resource_group_name is None:", "subscription if resource_group_name is None: return client.list_by_subscription() # Retrieve accounts", "passed in. if not force: option = prompt_y_n(hint) if not", "# -------------------------------------------------------------------------------------------- from knack.log import get_logger from knack.prompting import prompt_y_n", "= MapsAccountCreateParameters(location=ACCOUNT_LOCATION, sku=instance.sku, tags=instance.tags) # Update fields with new parameter", "Statement to create an account.' # Show ToS message to", "# Submit query sku = Sku(name=sku_name) maps_account_create_params = MapsAccountCreateParameters(location=ACCOUNT_LOCATION, sku=sku,", "from azure.mgmt.maps.models import ( MapsAccountCreateParameters, Sku) ACCOUNT_LOCATION = 'global' logger", "the MIT License. See License.txt in the project root for", "user, if --force parameter is not passed in. if not", "the ' \\ '\\nLicense (https://azure.microsoft.com/support/legal/) and ' \\ '\\nPrivacy Statement", "License and Privacy Statement to create an account.' # Show", "option: raise CLIError(client_denied_terms) # Submit query sku = Sku(name=sku_name) maps_account_create_params", "account_name, sku_name='S0', tags=None, force=None): terms = 'By creating an Azure", "fields with new parameter values if sku_name: maps_account_create_params.sku.name = sku_name", "and agree to the ' \\ '\\nLicense (https://azure.microsoft.com/support/legal/) and '", "= 'global' logger = get_logger(__name__) def create_account(client, resource_group_name, account_name, sku_name='S0',", "CLIError(client_denied_terms) # Submit query sku = Sku(name=sku_name) maps_account_create_params = MapsAccountCreateParameters(location=ACCOUNT_LOCATION,", "def create_account(client, resource_group_name, account_name, sku_name='S0', tags=None, force=None): terms = 'By", "to the user logger.warning(terms) # Prompt yes/no for the user,", "Sku(name=sku_name) maps_account_create_params = MapsAccountCreateParameters(location=ACCOUNT_LOCATION, sku=sku, tags=tags) return client.create_or_update(resource_group_name, account_name, maps_account_create_params)", "root for license information. # -------------------------------------------------------------------------------------------- from knack.log import get_logger", "'By creating an Azure Maps account, you agree that you", "sku = Sku(name=sku_name) maps_account_create_params = MapsAccountCreateParameters(location=ACCOUNT_LOCATION, sku=sku, tags=tags) return client.create_or_update(resource_group_name,", "tags=instance.tags) # Update fields with new parameter values if sku_name:", "prompt_y_n(hint) if not option: raise CLIError(client_denied_terms) # Submit query sku", "create_account(client, resource_group_name, account_name, sku_name='S0', tags=None, force=None): terms = 'By creating", "See License.txt in the project root for license information. #", "accounts via subscription if resource_group_name is None: return client.list_by_subscription() #", "to the ' \\ '\\nLicense (https://azure.microsoft.com/support/legal/) and ' \\ '\\nPrivacy", "list_accounts(client, resource_group_name=None): # Retrieve accounts via subscription if resource_group_name is", "Update fields with new parameter values if sku_name: maps_account_create_params.sku.name =", "the user, if --force parameter is not passed in. if", "def generic_update_account(instance, sku_name=None, tags=None): # Pre-populate with old instance maps_account_create_params", "if resource_group_name is None: return client.list_by_subscription() # Retrieve accounts via", "'\\nPrivacy Statement (https://privacy.microsoft.com/privacystatement).' hint = 'Please select.' client_denied_terms = 'You", "yes/no for the user, if --force parameter is not passed", "account, you agree that you have read and agree to", "option = prompt_y_n(hint) if not option: raise CLIError(client_denied_terms) # Submit", "get_logger(__name__) def create_account(client, resource_group_name, account_name, sku_name='S0', tags=None, force=None): terms =", "# Prompt yes/no for the user, if --force parameter is", "azure.mgmt.maps.models import ( MapsAccountCreateParameters, Sku) ACCOUNT_LOCATION = 'global' logger =", "and Privacy Statement to create an account.' # Show ToS", "values if sku_name: maps_account_create_params.sku.name = sku_name if tags: maps_account_create_params.tags =", "\\ '\\nLicense (https://azure.microsoft.com/support/legal/) and ' \\ '\\nPrivacy Statement (https://privacy.microsoft.com/privacystatement).' hint", "parameter is not passed in. if not force: option =", "sku_name='S0', tags=None, force=None): terms = 'By creating an Azure Maps", "instance maps_account_create_params = MapsAccountCreateParameters(location=ACCOUNT_LOCATION, sku=instance.sku, tags=instance.tags) # Update fields with", "tags=tags) return client.create_or_update(resource_group_name, account_name, maps_account_create_params) def list_accounts(client, resource_group_name=None): # Retrieve", "Statement (https://privacy.microsoft.com/privacystatement).' hint = 'Please select.' client_denied_terms = 'You must", "resource_group_name is None: return client.list_by_subscription() # Retrieve accounts via resource", "creating an Azure Maps account, you agree that you have", "= get_logger(__name__) def create_account(client, resource_group_name, account_name, sku_name='S0', tags=None, force=None): terms", "sku_name=None, tags=None): # Pre-populate with old instance maps_account_create_params = MapsAccountCreateParameters(location=ACCOUNT_LOCATION,", "import prompt_y_n from knack.util import CLIError from azure.mgmt.maps.models import (", "return client.create_or_update(resource_group_name, account_name, maps_account_create_params) def list_accounts(client, resource_group_name=None): # Retrieve accounts", "# Retrieve accounts via resource group return client.list_by_resource_group(resource_group_name) def generic_update_account(instance,", "from knack.util import CLIError from azure.mgmt.maps.models import ( MapsAccountCreateParameters, Sku)", "ToS message to the user logger.warning(terms) # Prompt yes/no for", "def list_accounts(client, resource_group_name=None): # Retrieve accounts via subscription if resource_group_name", "# Copyright (c) Microsoft Corporation. All rights reserved. # Licensed", "an Azure Maps account, you agree that you have read", "' \\ '\\nLicense (https://azure.microsoft.com/support/legal/) and ' \\ '\\nPrivacy Statement (https://privacy.microsoft.com/privacystatement).'", "MIT License. See License.txt in the project root for license", "= prompt_y_n(hint) if not option: raise CLIError(client_denied_terms) # Submit query", "agree that you have read and agree to the '", "with old instance maps_account_create_params = MapsAccountCreateParameters(location=ACCOUNT_LOCATION, sku=instance.sku, tags=instance.tags) # Update", "that you have read and agree to the ' \\", "reserved. # Licensed under the MIT License. See License.txt in", "is not passed in. if not force: option = prompt_y_n(hint)", "= 'You must agree to the License and Privacy Statement", "the project root for license information. # -------------------------------------------------------------------------------------------- from knack.log", "'global' logger = get_logger(__name__) def create_account(client, resource_group_name, account_name, sku_name='S0', tags=None,", "logger.warning(terms) # Prompt yes/no for the user, if --force parameter", "Retrieve accounts via resource group return client.list_by_resource_group(resource_group_name) def generic_update_account(instance, sku_name=None,", "accounts via resource group return client.list_by_resource_group(resource_group_name) def generic_update_account(instance, sku_name=None, tags=None):", "in. if not force: option = prompt_y_n(hint) if not option:", "the License and Privacy Statement to create an account.' #", "Show ToS message to the user logger.warning(terms) # Prompt yes/no", "= 'By creating an Azure Maps account, you agree that", "client.create_or_update(resource_group_name, account_name, maps_account_create_params) def list_accounts(client, resource_group_name=None): # Retrieve accounts via", "knack.util import CLIError from azure.mgmt.maps.models import ( MapsAccountCreateParameters, Sku) ACCOUNT_LOCATION", "Corporation. All rights reserved. # Licensed under the MIT License.", "# Licensed under the MIT License. See License.txt in the", "user logger.warning(terms) # Prompt yes/no for the user, if --force", "terms = 'By creating an Azure Maps account, you agree", "-------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. #", "project root for license information. # -------------------------------------------------------------------------------------------- from knack.log import", "select.' client_denied_terms = 'You must agree to the License and", "' \\ '\\nPrivacy Statement (https://privacy.microsoft.com/privacystatement).' hint = 'Please select.' client_denied_terms", "( MapsAccountCreateParameters, Sku) ACCOUNT_LOCATION = 'global' logger = get_logger(__name__) def", "must agree to the License and Privacy Statement to create", "None: return client.list_by_subscription() # Retrieve accounts via resource group return", "if not option: raise CLIError(client_denied_terms) # Submit query sku =", "generic_update_account(instance, sku_name=None, tags=None): # Pre-populate with old instance maps_account_create_params =", "MapsAccountCreateParameters(location=ACCOUNT_LOCATION, sku=sku, tags=tags) return client.create_or_update(resource_group_name, account_name, maps_account_create_params) def list_accounts(client, resource_group_name=None):", "License.txt in the project root for license information. # --------------------------------------------------------------------------------------------", "'Please select.' client_denied_terms = 'You must agree to the License", "Privacy Statement to create an account.' # Show ToS message", "message to the user logger.warning(terms) # Prompt yes/no for the", "# Show ToS message to the user logger.warning(terms) # Prompt", "in the project root for license information. # -------------------------------------------------------------------------------------------- from", "tags=None): # Pre-populate with old instance maps_account_create_params = MapsAccountCreateParameters(location=ACCOUNT_LOCATION, sku=instance.sku,", "# Update fields with new parameter values if sku_name: maps_account_create_params.sku.name", "hint = 'Please select.' client_denied_terms = 'You must agree to", "parameter values if sku_name: maps_account_create_params.sku.name = sku_name if tags: maps_account_create_params.tags", "via subscription if resource_group_name is None: return client.list_by_subscription() # Retrieve", "not passed in. if not force: option = prompt_y_n(hint) if", "client.list_by_subscription() # Retrieve accounts via resource group return client.list_by_resource_group(resource_group_name) def", "if sku_name: maps_account_create_params.sku.name = sku_name if tags: maps_account_create_params.tags = tags", "License. See License.txt in the project root for license information.", "# -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved.", "via resource group return client.list_by_resource_group(resource_group_name) def generic_update_account(instance, sku_name=None, tags=None): #", "have read and agree to the ' \\ '\\nLicense (https://azure.microsoft.com/support/legal/)", "resource_group_name, account_name, sku_name='S0', tags=None, force=None): terms = 'By creating an", "force: option = prompt_y_n(hint) if not option: raise CLIError(client_denied_terms) #", "import CLIError from azure.mgmt.maps.models import ( MapsAccountCreateParameters, Sku) ACCOUNT_LOCATION =", "if --force parameter is not passed in. if not force:", "sku=sku, tags=tags) return client.create_or_update(resource_group_name, account_name, maps_account_create_params) def list_accounts(client, resource_group_name=None): #", "query sku = Sku(name=sku_name) maps_account_create_params = MapsAccountCreateParameters(location=ACCOUNT_LOCATION, sku=sku, tags=tags) return", "--force parameter is not passed in. if not force: option", "create an account.' # Show ToS message to the user", "Sku) ACCOUNT_LOCATION = 'global' logger = get_logger(__name__) def create_account(client, resource_group_name,", "maps_account_create_params = MapsAccountCreateParameters(location=ACCOUNT_LOCATION, sku=sku, tags=tags) return client.create_or_update(resource_group_name, account_name, maps_account_create_params) def", "prompt_y_n from knack.util import CLIError from azure.mgmt.maps.models import ( MapsAccountCreateParameters,", "= MapsAccountCreateParameters(location=ACCOUNT_LOCATION, sku=sku, tags=tags) return client.create_or_update(resource_group_name, account_name, maps_account_create_params) def list_accounts(client,", "to the License and Privacy Statement to create an account.'", "(https://privacy.microsoft.com/privacystatement).' hint = 'Please select.' client_denied_terms = 'You must agree", "knack.prompting import prompt_y_n from knack.util import CLIError from azure.mgmt.maps.models import", "read and agree to the ' \\ '\\nLicense (https://azure.microsoft.com/support/legal/) and", "license information. # -------------------------------------------------------------------------------------------- from knack.log import get_logger from knack.prompting", "information. # -------------------------------------------------------------------------------------------- from knack.log import get_logger from knack.prompting import", "an account.' # Show ToS message to the user logger.warning(terms)", "from knack.prompting import prompt_y_n from knack.util import CLIError from azure.mgmt.maps.models", "and ' \\ '\\nPrivacy Statement (https://privacy.microsoft.com/privacystatement).' hint = 'Please select.'", "under the MIT License. See License.txt in the project root", "Retrieve accounts via subscription if resource_group_name is None: return client.list_by_subscription()", "agree to the ' \\ '\\nLicense (https://azure.microsoft.com/support/legal/) and ' \\", "'You must agree to the License and Privacy Statement to", "logger = get_logger(__name__) def create_account(client, resource_group_name, account_name, sku_name='S0', tags=None, force=None):", "(c) Microsoft Corporation. All rights reserved. # Licensed under the", "All rights reserved. # Licensed under the MIT License. See", "maps_account_create_params = MapsAccountCreateParameters(location=ACCOUNT_LOCATION, sku=instance.sku, tags=instance.tags) # Update fields with new", "Azure Maps account, you agree that you have read and", "if not force: option = prompt_y_n(hint) if not option: raise", "you have read and agree to the ' \\ '\\nLicense", "maps_account_create_params.sku.name = sku_name if tags: maps_account_create_params.tags = tags return maps_account_create_params", "new parameter values if sku_name: maps_account_create_params.sku.name = sku_name if tags:", "= 'Please select.' client_denied_terms = 'You must agree to the", "sku=instance.sku, tags=instance.tags) # Update fields with new parameter values if", "return client.list_by_subscription() # Retrieve accounts via resource group return client.list_by_resource_group(resource_group_name)", "client.list_by_resource_group(resource_group_name) def generic_update_account(instance, sku_name=None, tags=None): # Pre-populate with old instance", "Prompt yes/no for the user, if --force parameter is not", "maps_account_create_params) def list_accounts(client, resource_group_name=None): # Retrieve accounts via subscription if", "Microsoft Corporation. All rights reserved. # Licensed under the MIT", "Licensed under the MIT License. See License.txt in the project", "agree to the License and Privacy Statement to create an", "\\ '\\nPrivacy Statement (https://privacy.microsoft.com/privacystatement).' hint = 'Please select.' client_denied_terms =", "you agree that you have read and agree to the", "force=None): terms = 'By creating an Azure Maps account, you", "to create an account.' # Show ToS message to the", "not force: option = prompt_y_n(hint) if not option: raise CLIError(client_denied_terms)", "rights reserved. # Licensed under the MIT License. See License.txt", "is None: return client.list_by_subscription() # Retrieve accounts via resource group", "# Pre-populate with old instance maps_account_create_params = MapsAccountCreateParameters(location=ACCOUNT_LOCATION, sku=instance.sku, tags=instance.tags)", "for license information. # -------------------------------------------------------------------------------------------- from knack.log import get_logger from", "Pre-populate with old instance maps_account_create_params = MapsAccountCreateParameters(location=ACCOUNT_LOCATION, sku=instance.sku, tags=instance.tags) #", "= Sku(name=sku_name) maps_account_create_params = MapsAccountCreateParameters(location=ACCOUNT_LOCATION, sku=sku, tags=tags) return client.create_or_update(resource_group_name, account_name,", "import get_logger from knack.prompting import prompt_y_n from knack.util import CLIError", "not option: raise CLIError(client_denied_terms) # Submit query sku = Sku(name=sku_name)", "(https://azure.microsoft.com/support/legal/) and ' \\ '\\nPrivacy Statement (https://privacy.microsoft.com/privacystatement).' hint = 'Please", "for the user, if --force parameter is not passed in.", "CLIError from azure.mgmt.maps.models import ( MapsAccountCreateParameters, Sku) ACCOUNT_LOCATION = 'global'", "old instance maps_account_create_params = MapsAccountCreateParameters(location=ACCOUNT_LOCATION, sku=instance.sku, tags=instance.tags) # Update fields", "import ( MapsAccountCreateParameters, Sku) ACCOUNT_LOCATION = 'global' logger = get_logger(__name__)", "knack.log import get_logger from knack.prompting import prompt_y_n from knack.util import", "with new parameter values if sku_name: maps_account_create_params.sku.name = sku_name if", "raise CLIError(client_denied_terms) # Submit query sku = Sku(name=sku_name) maps_account_create_params =", "MapsAccountCreateParameters(location=ACCOUNT_LOCATION, sku=instance.sku, tags=instance.tags) # Update fields with new parameter values", "account.' # Show ToS message to the user logger.warning(terms) #", "# Retrieve accounts via subscription if resource_group_name is None: return", "from knack.log import get_logger from knack.prompting import prompt_y_n from knack.util", "MapsAccountCreateParameters, Sku) ACCOUNT_LOCATION = 'global' logger = get_logger(__name__) def create_account(client,", "resource group return client.list_by_resource_group(resource_group_name) def generic_update_account(instance, sku_name=None, tags=None): # Pre-populate", "ACCOUNT_LOCATION = 'global' logger = get_logger(__name__) def create_account(client, resource_group_name, account_name,", "sku_name: maps_account_create_params.sku.name = sku_name if tags: maps_account_create_params.tags = tags return", "Maps account, you agree that you have read and agree", "-------------------------------------------------------------------------------------------- from knack.log import get_logger from knack.prompting import prompt_y_n from", "Submit query sku = Sku(name=sku_name) maps_account_create_params = MapsAccountCreateParameters(location=ACCOUNT_LOCATION, sku=sku, tags=tags)", "get_logger from knack.prompting import prompt_y_n from knack.util import CLIError from", "group return client.list_by_resource_group(resource_group_name) def generic_update_account(instance, sku_name=None, tags=None): # Pre-populate with", "'\\nLicense (https://azure.microsoft.com/support/legal/) and ' \\ '\\nPrivacy Statement (https://privacy.microsoft.com/privacystatement).' hint =", "Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under", "return client.list_by_resource_group(resource_group_name) def generic_update_account(instance, sku_name=None, tags=None): # Pre-populate with old", "the user logger.warning(terms) # Prompt yes/no for the user, if" ]
[ "no cont feat, seq=50, shuffle=True\") ## B_full_feature_v2 no cont_feat, with", "for loss computation. loss='log_loss', #'log_loss', 'softmax' max_seq_length=50, cont_feat_len=85, use_cont_feat=False, init_item_emb=False,", "pred_file = os.path.join(data_path, r'inter_test.tsv') final_pred_file = os.path.join(data_path, r'final_test.tsv') user_vocab =", "batch normalization is disable else 0.001 epochs=100000, EARLY_STOP=40000, batch_size=400, show_step=5000,", "SUMModel as SeqModel #from recommenders.models.deeprec.models.sequential.nextitnet import NextItNetModel from recommenders.models.deeprec.io.sequential_iterator import", "##5k: 0.8793 ##10k: 0.8884 ##15k: 0.8898 ##20k: 0.8923 ##25k: 0.8908", "with a positive instance for training valid_num_ngs = 9 #", "final_pred_file = os.path.join(data_path, r'final_test.tsv') user_vocab = os.path.join(data_path, r'user_vocab.pkl') item_vocab =", "cost for training is {0:.2f} mins'.format(train_time.interval/60.0)) ### model = model.fit(test_file,", "0.8904 ##70k: 0.8814 ##75k: 0.8896 ##80k: 0.8871 ##85k: 0.8920 ##", "NextItNetIterator print(\"System version: {}\".format(sys.version)) print(\"Tensorflow version: {}\".format(tf.__version__)) yaml_file = '/home/jialia/wsdm/src/recommenders/examples/wsdm2022/sli_rec_B.yaml'", "r'final_test_output.txt') train_num_ngs = 9 # number of negative instances with", "print(os.path.abspath(data_path)) ## the path where I enter the cmd #", "enter the cmd # for test train_file = os.path.join(data_path, r'train_instances.txt')", "r'item_vocab.pkl') cate_vocab = os.path.join(data_path, r'category_vocab.pkl') output_file = os.path.join(data_path, r'inter_test_output.txt') submit_file", "data_preprocessing, _create_vocab from recommenders.datasets.download_utils import maybe_download from recommenders.models.deeprec.models.sequential.sli_rec import SLI_RECModel", "recommenders.utils.timer import Timer from recommenders.utils.constants import SEED from recommenders.models.deeprec.deeprec_utils import", "r'user_vocab.pkl') item_vocab = os.path.join(data_path, r'item_vocab.pkl') cate_vocab = os.path.join(data_path, r'category_vocab.pkl') output_file", "rather than using our demo Amazon dataset. hparams = prepare_hparams(yaml_file,", "# for test train_file = os.path.join(data_path, r'train_instances.txt') valid_file = os.path.join(data_path,", "recommenders.models.deeprec.models.sequential.gru4rec import GRU4RecModel as SeqModel # from recommenders.models.deeprec.models.sequential.sum import SUMModel", "recommenders.models.deeprec.models.sequential.sum import SUMModel as SeqModel #from recommenders.models.deeprec.models.sequential.nextitnet import NextItNetModel from", "[train_file, valid_file], user_vocab, item_vocab, cate_vocab ) ### NOTE: ### remember", "\"model\", 'best_model')) res_syn = model.run_eval(test_file, num_ngs=9) print(res_syn) model.predict(pred_file, output_file) model.predict(final_pred_file,", "as tf tf.get_logger().setLevel('ERROR') # only show error messages from recommenders.utils.timer", "##5k: 0.8778 ##10k: 0.8827 ##20k: 0.8848 ##25k: 0.8824 ##35k: 0.8878", "## B_v4: 0.8937 print(\"Job:B_full_feature_v2, with BN, no cont feat, seq=50,", "B_v2, epoch=50k, seq=100') ## ASVD: 0.867497 ## GRU: 0.877529 ##", "recommenders.models.deeprec.models.sequential.asvd import A2SVDModel as SeqModel # from recommenders.models.deeprec.models.sequential.caser import CaserModel", "instances for each positive instance for loss computation. loss='log_loss', #'log_loss',", "provides the number of negative instances for each positive instance", "error messages from recommenders.utils.timer import Timer from recommenders.utils.constants import SEED", "SeqModel # from recommenders.models.deeprec.models.sequential.caser import CaserModel as SeqModel # from", "= os.path.join(\"tests\", \"resources\", \"deeprec\", \"slirec\") # data_path = '/home/jialia/wsdm/seq_datasets/B_full_feature_v2' data_path", "# set to 0.01 if batch normalization is disable else", "the user_vocab, item_vocab and cate_vocab files, if you are using", "import SUMModel as SeqModel #from recommenders.models.deeprec.models.sequential.nextitnet import NextItNetModel from recommenders.models.deeprec.io.sequential_iterator", "output_file = os.path.join(data_path, r'inter_test_output.txt') submit_file = os.path.join(data_path, r'final_test_output.txt') train_num_ngs =", "import NextItNetModel from recommenders.models.deeprec.io.sequential_iterator import SequentialIterator #from recommenders.models.deeprec.io.nextitnet_iterator import NextItNetIterator", "eval_metric='auc') print('Time cost for training is {0:.2f} mins'.format(train_time.interval/60.0)) ### model", "model = model.fit(train_file, valid_file, valid_num_ngs=9, eval_metric='auc') print('Time cost for training", "os.path.join(data_path, r'inter_test_output.txt') submit_file = os.path.join(data_path, r'final_test_output.txt') train_num_ngs = 9 #", "SeqModel # from recommenders.models.deeprec.models.sequential.gru4rec import GRU4RecModel as SeqModel # from", "item_vocab and cate_vocab files, if you are using your own", "= SEED # Set None for non-deterministic result # data_path", "#from recommenders.models.deeprec.io.nextitnet_iterator import NextItNetIterator print(\"System version: {}\".format(sys.version)) print(\"Tensorflow version: {}\".format(tf.__version__))", "continue training = 20k, seq=50') # print('Job finished. B_v2, epoch=50k,", "##85k: 0.8920 ## with shuffle: ##5k: 0.8793 ##10k: 0.8884 ##15k:", "0.8920 ## with shuffle: ##5k: 0.8793 ##10k: 0.8884 ##15k: 0.8898", "use `_create_vocab(train_file, user_vocab, item_vocab, cate_vocab)` to generate the user_vocab, item_vocab", "model.fit(train_file, valid_file, valid_num_ngs=9, eval_metric='auc') print('Time cost for training is {0:.2f}", "##20k: 0.8848 ##25k: 0.8824 ##35k: 0.8878 ##40k: 0.8903 ##45k: 0.8876", "model = model.fit(test_file, test_file, valid_num_ngs=9, eval_metric='auc') ##-- quick test model.load_model(os.path.join(data_path,", "negative instances with a positive instance for training valid_num_ngs =", "= model.fit(train_file, valid_file, valid_num_ngs=9, eval_metric='auc') print('Time cost for training is", ") print(hparams.values) input_creator = SequentialIterator model = SeqModel(hparams, input_creator, seed=RANDOM_SEED)", "##-- quick test model.load_model(os.path.join(data_path, \"model\", 'best_model')) res_syn = model.run_eval(test_file, num_ngs=9)", "with BN ##5k: 0.8778 ##10k: 0.8827 ##20k: 0.8848 ##25k: 0.8824", "= sys.argv[1] print(os.path.abspath(data_path)) ## the path where I enter the", "0.8894 ##65k: 0.8904 ##70k: 0.8814 ##75k: 0.8896 ##80k: 0.8871 ##85k:", "import SLI_RECModel as SeqModel # from recommenders.models.deeprec.models.sequential.asvd import A2SVDModel as", "= os.path.join(data_path, r'user_vocab.pkl') item_vocab = os.path.join(data_path, r'item_vocab.pkl') cate_vocab = os.path.join(data_path,", "## SLi-Rec: 0.892736 ## B_v4: 0.8937 print(\"Job:B_full_feature_v2, with BN, no", "res_syn = model.run_eval(test_file, num_ngs=9) print(res_syn) model.predict(pred_file, output_file) model.predict(final_pred_file, submit_file) #", "True learning_rate=0.001, # set to 0.01 if batch normalization is", "version: {}\".format(sys.version)) print(\"Tensorflow version: {}\".format(tf.__version__)) yaml_file = '/home/jialia/wsdm/src/recommenders/examples/wsdm2022/sli_rec_B.yaml' RANDOM_SEED =", "#'log_loss', 'softmax' max_seq_length=50, cont_feat_len=85, use_cont_feat=False, init_item_emb=False, shuffle=True ) print(hparams.values) input_creator", "import Timer from recommenders.utils.constants import SEED from recommenders.models.deeprec.deeprec_utils import (", "= '/home/jialia/wsdm/seq_datasets/B_full_feature_v2' data_path = sys.argv[1] print(os.path.abspath(data_path)) ## the path where", "a positive instance for testing _create_vocab( [train_file, valid_file], user_vocab, item_vocab,", "SeqModel # from recommenders.models.deeprec.models.sequential.sum import SUMModel as SeqModel #from recommenders.models.deeprec.models.sequential.nextitnet", "loss computation. loss='log_loss', #'log_loss', 'softmax' max_seq_length=50, cont_feat_len=85, use_cont_feat=False, init_item_emb=False, shuffle=True", "test_file = os.path.join(data_path, r'valid.tsv') pred_file = os.path.join(data_path, r'inter_test.tsv') final_pred_file =", "= 20k, seq=50') # print('Job finished. B_v2, epoch=50k, seq=100') ##", "embed_l2=0., layer_l2=0., enable_BN=True, ##-- True learning_rate=0.001, # set to 0.01", "finished. B, continue training = 20k, seq=50') # print('Job finished.", "##45k: 0.8876 ##50k: 0.8925 ##55k: 0.8903 ##60k: 0.8894 ##65k: 0.8904", "prepare_hparams ) from recommenders.datasets.amazon_reviews import download_and_extract, data_preprocessing, _create_vocab from recommenders.datasets.download_utils", "B_v4: 0.8937 print(\"Job:B_full_feature_v2, with BN, no cont feat, seq=50, shuffle=True\")", "recommenders.datasets.amazon_reviews import download_and_extract, data_preprocessing, _create_vocab from recommenders.datasets.download_utils import maybe_download from", "= os.path.join(data_path, r'inter_test_output.txt') submit_file = os.path.join(data_path, r'final_test_output.txt') train_num_ngs = 9", "\"summary/\"), user_vocab=user_vocab, item_vocab=item_vocab, cate_vocab=cate_vocab, need_sample=False, train_num_ngs=train_num_ngs, # provides the number", "print(\"Tensorflow version: {}\".format(tf.__version__)) yaml_file = '/home/jialia/wsdm/src/recommenders/examples/wsdm2022/sli_rec_B.yaml' RANDOM_SEED = SEED #", "train_time: model = model.fit(train_file, valid_file, valid_num_ngs=9, eval_metric='auc') print('Time cost for", "user_dropout=False, embed_l2=0., layer_l2=0., enable_BN=True, ##-- True learning_rate=0.001, # set to", "negative instances with a positive instance for testing _create_vocab( [train_file,", "\"model/\"), SUMMARIES_DIR=os.path.join(data_path, \"summary/\"), user_vocab=user_vocab, item_vocab=item_vocab, cate_vocab=cate_vocab, need_sample=False, train_num_ngs=train_num_ngs, # provides", "import SequentialIterator #from recommenders.models.deeprec.io.nextitnet_iterator import NextItNetIterator print(\"System version: {}\".format(sys.version)) print(\"Tensorflow", "is disable else 0.001 epochs=100000, EARLY_STOP=40000, batch_size=400, show_step=5000, MODEL_DIR=os.path.join(data_path, \"model/\"),", "0.8903 ##60k: 0.8894 ##65k: 0.8904 ##70k: 0.8814 ##75k: 0.8896 ##80k:", "user_vocab = os.path.join(data_path, r'user_vocab.pkl') item_vocab = os.path.join(data_path, r'item_vocab.pkl') cate_vocab =", "for each positive instance for loss computation. loss='log_loss', #'log_loss', 'softmax'", "model.run_eval(test_file, num_ngs=9) print(res_syn) model.predict(pred_file, output_file) model.predict(final_pred_file, submit_file) # print('Job finished.", "tf.get_logger().setLevel('ERROR') # only show error messages from recommenders.utils.timer import Timer", "B, continue training = 20k, seq=50') # print('Job finished. B_v2,", "0.8793 ##10k: 0.8884 ##15k: 0.8898 ##20k: 0.8923 ##25k: 0.8908 ##30k:", "shuffle=True ) print(hparams.values) input_creator = SequentialIterator model = SeqModel(hparams, input_creator,", "0.8898 ##20k: 0.8923 ##25k: 0.8908 ##30k: 0.8895 ##35k: 0.8888 ##40k:", "a positive instance for validation test_num_ngs = 9 # number", "valid_num_ngs = 9 # number of negative instances with a", "for testing _create_vocab( [train_file, valid_file], user_vocab, item_vocab, cate_vocab ) ###", "instance for validation test_num_ngs = 9 # number of negative", "positive instance for testing _create_vocab( [train_file, valid_file], user_vocab, item_vocab, cate_vocab", "negative instances for each positive instance for loss computation. loss='log_loss',", "to use `_create_vocab(train_file, user_vocab, item_vocab, cate_vocab)` to generate the user_vocab,", "need_sample=False, train_num_ngs=train_num_ngs, # provides the number of negative instances for", "SequentialIterator model = SeqModel(hparams, input_creator, seed=RANDOM_SEED) # model.load_model(os.path.join(data_path, \"model_20220118_20k_0.8923\", 'step_20000'))", "##35k: 0.8888 ##40k: 0.8913 ##45k: 0.8909 ##50k: 0.8876 ##65k: 0.8881", "for training is {0:.2f} mins'.format(train_time.interval/60.0)) ### model = model.fit(test_file, test_file,", "os.path.join(data_path, r'final_test.tsv') user_vocab = os.path.join(data_path, r'user_vocab.pkl') item_vocab = os.path.join(data_path, r'item_vocab.pkl')", "Timer from recommenders.utils.constants import SEED from recommenders.models.deeprec.deeprec_utils import ( prepare_hparams", "None for non-deterministic result # data_path = os.path.join(\"tests\", \"resources\", \"deeprec\",", "feat, seq=50, shuffle=True\") ## B_full_feature_v2 no cont_feat, with BN ##5k:", "# print('Job finished. B_v2, epoch=50k, seq=100') ## ASVD: 0.867497 ##", "output_file) model.predict(final_pred_file, submit_file) # print('Job finished. B, continue training =", "remember to use `_create_vocab(train_file, user_vocab, item_vocab, cate_vocab)` to generate the", "with shuffle: ##5k: 0.8793 ##10k: 0.8884 ##15k: 0.8898 ##20k: 0.8923", "for test train_file = os.path.join(data_path, r'train_instances.txt') valid_file = os.path.join(data_path, r'valid_instances.txt')", "BN, no cont feat, seq=50, shuffle=True\") ## B_full_feature_v2 no cont_feat,", "SEED # Set None for non-deterministic result # data_path =", "### model = model.fit(test_file, test_file, valid_num_ngs=9, eval_metric='auc') ##-- quick test", "##75k: 0.8896 ##80k: 0.8871 ##85k: 0.8920 ## with shuffle: ##5k:", "_create_vocab( [train_file, valid_file], user_vocab, item_vocab, cate_vocab ) ### NOTE: ###", "as SeqModel # from recommenders.models.deeprec.models.sequential.asvd import A2SVDModel as SeqModel #", "num_ngs=9) print(res_syn) model.predict(pred_file, output_file) model.predict(final_pred_file, submit_file) # print('Job finished. B,", "number of negative instances with a positive instance for testing", "## ASVD: 0.867497 ## GRU: 0.877529 ## SLi-Rec: 0.892736 ##", "import sys import os from tempfile import TemporaryDirectory import numpy", "## B_full_feature_v2 no cont_feat, with BN ##5k: 0.8778 ##10k: 0.8827", "'/home/jialia/wsdm/seq_datasets/B_full_feature_v2' data_path = sys.argv[1] print(os.path.abspath(data_path)) ## the path where I", "number of negative instances with a positive instance for training", "os.path.join(data_path, r'valid.tsv') pred_file = os.path.join(data_path, r'inter_test.tsv') final_pred_file = os.path.join(data_path, r'final_test.tsv')", "generate the user_vocab, item_vocab and cate_vocab files, if you are", "os.path.join(data_path, r'category_vocab.pkl') output_file = os.path.join(data_path, r'inter_test_output.txt') submit_file = os.path.join(data_path, r'final_test_output.txt')", "`_create_vocab(train_file, user_vocab, item_vocab, cate_vocab)` to generate the user_vocab, item_vocab and", "= os.path.join(data_path, r'train_instances.txt') valid_file = os.path.join(data_path, r'valid_instances.txt') test_file = os.path.join(data_path,", "MODEL_DIR=os.path.join(data_path, \"model/\"), SUMMARIES_DIR=os.path.join(data_path, \"summary/\"), user_vocab=user_vocab, item_vocab=item_vocab, cate_vocab=cate_vocab, need_sample=False, train_num_ngs=train_num_ngs, #", "# Set None for non-deterministic result # data_path = os.path.join(\"tests\",", "with Timer() as train_time: model = model.fit(train_file, valid_file, valid_num_ngs=9, eval_metric='auc')", "Set None for non-deterministic result # data_path = os.path.join(\"tests\", \"resources\",", "print('Job finished. B_v2, epoch=50k, seq=100') ## ASVD: 0.867497 ## GRU:", "test_num_ngs = 9 # number of negative instances with a", "instances with a positive instance for training valid_num_ngs = 9", "# from recommenders.models.deeprec.models.sequential.gru4rec import GRU4RecModel as SeqModel # from recommenders.models.deeprec.models.sequential.sum", "where I enter the cmd # for test train_file =", "# only show error messages from recommenders.utils.timer import Timer from", "if you are using your own dataset rather than using", "messages from recommenders.utils.timer import Timer from recommenders.utils.constants import SEED from", "import TemporaryDirectory import numpy as np import tensorflow.compat.v1 as tf", "recommenders.models.deeprec.models.sequential.nextitnet import NextItNetModel from recommenders.models.deeprec.io.sequential_iterator import SequentialIterator #from recommenders.models.deeprec.io.nextitnet_iterator import", "model.predict(final_pred_file, submit_file) # print('Job finished. B, continue training = 20k,", "{}\".format(tf.__version__)) yaml_file = '/home/jialia/wsdm/src/recommenders/examples/wsdm2022/sli_rec_B.yaml' RANDOM_SEED = SEED # Set None", "max_seq_length=50, cont_feat_len=85, use_cont_feat=False, init_item_emb=False, shuffle=True ) print(hparams.values) input_creator = SequentialIterator", "A2SVDModel as SeqModel # from recommenders.models.deeprec.models.sequential.caser import CaserModel as SeqModel", "finished. B_v2, epoch=50k, seq=100') ## ASVD: 0.867497 ## GRU: 0.877529", "\"deeprec\", \"slirec\") # data_path = '/home/jialia/wsdm/seq_datasets/B_full_feature_v2' data_path = sys.argv[1] print(os.path.abspath(data_path))", "= SeqModel(hparams, input_creator, seed=RANDOM_SEED) # model.load_model(os.path.join(data_path, \"model_20220118_20k_0.8923\", 'step_20000')) with Timer()", "tensorflow.compat.v1 as tf tf.get_logger().setLevel('ERROR') # only show error messages from", "eval_metric='auc') ##-- quick test model.load_model(os.path.join(data_path, \"model\", 'best_model')) res_syn = model.run_eval(test_file,", "0.8908 ##30k: 0.8895 ##35k: 0.8888 ##40k: 0.8913 ##45k: 0.8909 ##50k:", "SeqModel(hparams, input_creator, seed=RANDOM_SEED) # model.load_model(os.path.join(data_path, \"model_20220118_20k_0.8923\", 'step_20000')) with Timer() as", "recommenders.models.deeprec.io.nextitnet_iterator import NextItNetIterator print(\"System version: {}\".format(sys.version)) print(\"Tensorflow version: {}\".format(tf.__version__)) yaml_file", "= '/home/jialia/wsdm/src/recommenders/examples/wsdm2022/sli_rec_B.yaml' RANDOM_SEED = SEED # Set None for non-deterministic", "loss='log_loss', #'log_loss', 'softmax' max_seq_length=50, cont_feat_len=85, use_cont_feat=False, init_item_emb=False, shuffle=True ) print(hparams.values)", "to generate the user_vocab, item_vocab and cate_vocab files, if you", "cate_vocab = os.path.join(data_path, r'category_vocab.pkl') output_file = os.path.join(data_path, r'inter_test_output.txt') submit_file =", "item_vocab, cate_vocab)` to generate the user_vocab, item_vocab and cate_vocab files,", "##50k: 0.8925 ##55k: 0.8903 ##60k: 0.8894 ##65k: 0.8904 ##70k: 0.8814", "cate_vocab files, if you are using your own dataset rather", "0.8824 ##35k: 0.8878 ##40k: 0.8903 ##45k: 0.8876 ##50k: 0.8925 ##55k:", "from recommenders.models.deeprec.models.sequential.sum import SUMModel as SeqModel #from recommenders.models.deeprec.models.sequential.nextitnet import NextItNetModel", "cmd # for test train_file = os.path.join(data_path, r'train_instances.txt') valid_file =", "= SequentialIterator model = SeqModel(hparams, input_creator, seed=RANDOM_SEED) # model.load_model(os.path.join(data_path, \"model_20220118_20k_0.8923\",", "0.8878 ##40k: 0.8903 ##45k: 0.8876 ##50k: 0.8925 ##55k: 0.8903 ##60k:", "##55k: 0.8903 ##60k: 0.8894 ##65k: 0.8904 ##70k: 0.8814 ##75k: 0.8896", "r'train_instances.txt') valid_file = os.path.join(data_path, r'valid_instances.txt') test_file = os.path.join(data_path, r'valid.tsv') pred_file", "### remember to use `_create_vocab(train_file, user_vocab, item_vocab, cate_vocab)` to generate", "= prepare_hparams(yaml_file, # user_dropout=False, embed_l2=0., layer_l2=0., enable_BN=True, ##-- True learning_rate=0.001,", "os.path.join(data_path, r'final_test_output.txt') train_num_ngs = 9 # number of negative instances", "= os.path.join(data_path, r'final_test_output.txt') train_num_ngs = 9 # number of negative", "seq=50, shuffle=True\") ## B_full_feature_v2 no cont_feat, with BN ##5k: 0.8778", "non-deterministic result # data_path = os.path.join(\"tests\", \"resources\", \"deeprec\", \"slirec\") #", "r'valid_instances.txt') test_file = os.path.join(data_path, r'valid.tsv') pred_file = os.path.join(data_path, r'inter_test.tsv') final_pred_file", "instance for testing _create_vocab( [train_file, valid_file], user_vocab, item_vocab, cate_vocab )", "recommenders.models.deeprec.models.sequential.caser import CaserModel as SeqModel # from recommenders.models.deeprec.models.sequential.gru4rec import GRU4RecModel", "0.867497 ## GRU: 0.877529 ## SLi-Rec: 0.892736 ## B_v4: 0.8937", "0.8903 ##45k: 0.8876 ##50k: 0.8925 ##55k: 0.8903 ##60k: 0.8894 ##65k:", "import SEED from recommenders.models.deeprec.deeprec_utils import ( prepare_hparams ) from recommenders.datasets.amazon_reviews", "result # data_path = os.path.join(\"tests\", \"resources\", \"deeprec\", \"slirec\") # data_path", "testing _create_vocab( [train_file, valid_file], user_vocab, item_vocab, cate_vocab ) ### NOTE:", "as SeqModel # from recommenders.models.deeprec.models.sequential.caser import CaserModel as SeqModel #", "only show error messages from recommenders.utils.timer import Timer from recommenders.utils.constants", "if batch normalization is disable else 0.001 epochs=100000, EARLY_STOP=40000, batch_size=400,", "import NextItNetIterator print(\"System version: {}\".format(sys.version)) print(\"Tensorflow version: {}\".format(tf.__version__)) yaml_file =", "# from recommenders.models.deeprec.models.sequential.caser import CaserModel as SeqModel # from recommenders.models.deeprec.models.sequential.gru4rec", "disable else 0.001 epochs=100000, EARLY_STOP=40000, batch_size=400, show_step=5000, MODEL_DIR=os.path.join(data_path, \"model/\"), SUMMARIES_DIR=os.path.join(data_path,", "# from recommenders.models.deeprec.models.sequential.sum import SUMModel as SeqModel #from recommenders.models.deeprec.models.sequential.nextitnet import", "### NOTE: ### remember to use `_create_vocab(train_file, user_vocab, item_vocab, cate_vocab)`", "instances with a positive instance for validation test_num_ngs = 9", "import os from tempfile import TemporaryDirectory import numpy as np", "SLI_RECModel as SeqModel # from recommenders.models.deeprec.models.sequential.asvd import A2SVDModel as SeqModel", "from recommenders.datasets.amazon_reviews import download_and_extract, data_preprocessing, _create_vocab from recommenders.datasets.download_utils import maybe_download", "dataset rather than using our demo Amazon dataset. hparams =", "model.predict(pred_file, output_file) model.predict(final_pred_file, submit_file) # print('Job finished. B, continue training", "download_and_extract, data_preprocessing, _create_vocab from recommenders.datasets.download_utils import maybe_download from recommenders.models.deeprec.models.sequential.sli_rec import", "you are using your own dataset rather than using our", "I enter the cmd # for test train_file = os.path.join(data_path,", "our demo Amazon dataset. hparams = prepare_hparams(yaml_file, # user_dropout=False, embed_l2=0.,", "0.8871 ##85k: 0.8920 ## with shuffle: ##5k: 0.8793 ##10k: 0.8884", "data_path = os.path.join(\"tests\", \"resources\", \"deeprec\", \"slirec\") # data_path = '/home/jialia/wsdm/seq_datasets/B_full_feature_v2'", "from recommenders.models.deeprec.models.sequential.asvd import A2SVDModel as SeqModel # from recommenders.models.deeprec.models.sequential.caser import", "own dataset rather than using our demo Amazon dataset. hparams", "from recommenders.datasets.download_utils import maybe_download from recommenders.models.deeprec.models.sequential.sli_rec import SLI_RECModel as SeqModel", "= 9 # number of negative instances with a positive", "= model.run_eval(test_file, num_ngs=9) print(res_syn) model.predict(pred_file, output_file) model.predict(final_pred_file, submit_file) # print('Job", "show error messages from recommenders.utils.timer import Timer from recommenders.utils.constants import", "# number of negative instances with a positive instance for", "as np import tensorflow.compat.v1 as tf tf.get_logger().setLevel('ERROR') # only show", "validation test_num_ngs = 9 # number of negative instances with", "computation. loss='log_loss', #'log_loss', 'softmax' max_seq_length=50, cont_feat_len=85, use_cont_feat=False, init_item_emb=False, shuffle=True )", "import GRU4RecModel as SeqModel # from recommenders.models.deeprec.models.sequential.sum import SUMModel as", "import CaserModel as SeqModel # from recommenders.models.deeprec.models.sequential.gru4rec import GRU4RecModel as", "and cate_vocab files, if you are using your own dataset", "#from recommenders.models.deeprec.models.sequential.nextitnet import NextItNetModel from recommenders.models.deeprec.io.sequential_iterator import SequentialIterator #from recommenders.models.deeprec.io.nextitnet_iterator", "##30k: 0.8895 ##35k: 0.8888 ##40k: 0.8913 ##45k: 0.8909 ##50k: 0.8876", "##25k: 0.8824 ##35k: 0.8878 ##40k: 0.8903 ##45k: 0.8876 ##50k: 0.8925", "version: {}\".format(tf.__version__)) yaml_file = '/home/jialia/wsdm/src/recommenders/examples/wsdm2022/sli_rec_B.yaml' RANDOM_SEED = SEED # Set", "from recommenders.utils.constants import SEED from recommenders.models.deeprec.deeprec_utils import ( prepare_hparams )", "valid_num_ngs=9, eval_metric='auc') ##-- quick test model.load_model(os.path.join(data_path, \"model\", 'best_model')) res_syn =", "9 # number of negative instances with a positive instance", "model.load_model(os.path.join(data_path, \"model_20220118_20k_0.8923\", 'step_20000')) with Timer() as train_time: model = model.fit(train_file,", "Timer() as train_time: model = model.fit(train_file, valid_file, valid_num_ngs=9, eval_metric='auc') print('Time", "seed=RANDOM_SEED) # model.load_model(os.path.join(data_path, \"model_20220118_20k_0.8923\", 'step_20000')) with Timer() as train_time: model", "## GRU: 0.877529 ## SLi-Rec: 0.892736 ## B_v4: 0.8937 print(\"Job:B_full_feature_v2,", "SLi-Rec: 0.892736 ## B_v4: 0.8937 print(\"Job:B_full_feature_v2, with BN, no cont", "##15k: 0.8898 ##20k: 0.8923 ##25k: 0.8908 ##30k: 0.8895 ##35k: 0.8888", "os from tempfile import TemporaryDirectory import numpy as np import", "batch_size=400, show_step=5000, MODEL_DIR=os.path.join(data_path, \"model/\"), SUMMARIES_DIR=os.path.join(data_path, \"summary/\"), user_vocab=user_vocab, item_vocab=item_vocab, cate_vocab=cate_vocab, need_sample=False,", "input_creator, seed=RANDOM_SEED) # model.load_model(os.path.join(data_path, \"model_20220118_20k_0.8923\", 'step_20000')) with Timer() as train_time:", "tf tf.get_logger().setLevel('ERROR') # only show error messages from recommenders.utils.timer import", "for validation test_num_ngs = 9 # number of negative instances", "using your own dataset rather than using our demo Amazon", "##65k: 0.8904 ##70k: 0.8814 ##75k: 0.8896 ##80k: 0.8871 ##85k: 0.8920", "0.892736 ## B_v4: 0.8937 print(\"Job:B_full_feature_v2, with BN, no cont feat,", "item_vocab, cate_vocab ) ### NOTE: ### remember to use `_create_vocab(train_file,", "0.8848 ##25k: 0.8824 ##35k: 0.8878 ##40k: 0.8903 ##45k: 0.8876 ##50k:", "demo Amazon dataset. hparams = prepare_hparams(yaml_file, # user_dropout=False, embed_l2=0., layer_l2=0.,", "from recommenders.utils.timer import Timer from recommenders.utils.constants import SEED from recommenders.models.deeprec.deeprec_utils", "test model.load_model(os.path.join(data_path, \"model\", 'best_model')) res_syn = model.run_eval(test_file, num_ngs=9) print(res_syn) model.predict(pred_file,", "##25k: 0.8908 ##30k: 0.8895 ##35k: 0.8888 ##40k: 0.8913 ##45k: 0.8909", "##60k: 0.8894 ##65k: 0.8904 ##70k: 0.8814 ##75k: 0.8896 ##80k: 0.8871", "## with shuffle: ##5k: 0.8793 ##10k: 0.8884 ##15k: 0.8898 ##20k:", "sys.argv[1] print(os.path.abspath(data_path)) ## the path where I enter the cmd", "Amazon dataset. hparams = prepare_hparams(yaml_file, # user_dropout=False, embed_l2=0., layer_l2=0., enable_BN=True,", "the number of negative instances for each positive instance for", "positive instance for validation test_num_ngs = 9 # number of", "of negative instances with a positive instance for training valid_num_ngs", "SEED from recommenders.models.deeprec.deeprec_utils import ( prepare_hparams ) from recommenders.datasets.amazon_reviews import", "0.8895 ##35k: 0.8888 ##40k: 0.8913 ##45k: 0.8909 ##50k: 0.8876 ##65k:", "valid_file = os.path.join(data_path, r'valid_instances.txt') test_file = os.path.join(data_path, r'valid.tsv') pred_file =", "= model.fit(test_file, test_file, valid_num_ngs=9, eval_metric='auc') ##-- quick test model.load_model(os.path.join(data_path, \"model\",", "training = 20k, seq=50') # print('Job finished. B_v2, epoch=50k, seq=100')", "ASVD: 0.867497 ## GRU: 0.877529 ## SLi-Rec: 0.892736 ## B_v4:", "of negative instances with a positive instance for validation test_num_ngs", "number of negative instances for each positive instance for loss", "valid_num_ngs=9, eval_metric='auc') print('Time cost for training is {0:.2f} mins'.format(train_time.interval/60.0)) ###", "to 0.01 if batch normalization is disable else 0.001 epochs=100000,", "0.8925 ##55k: 0.8903 ##60k: 0.8894 ##65k: 0.8904 ##70k: 0.8814 ##75k:", "# data_path = os.path.join(\"tests\", \"resources\", \"deeprec\", \"slirec\") # data_path =", "hparams = prepare_hparams(yaml_file, # user_dropout=False, embed_l2=0., layer_l2=0., enable_BN=True, ##-- True", "print('Job finished. B, continue training = 20k, seq=50') # print('Job", "# data_path = '/home/jialia/wsdm/seq_datasets/B_full_feature_v2' data_path = sys.argv[1] print(os.path.abspath(data_path)) ## the", "show_step=5000, MODEL_DIR=os.path.join(data_path, \"model/\"), SUMMARIES_DIR=os.path.join(data_path, \"summary/\"), user_vocab=user_vocab, item_vocab=item_vocab, cate_vocab=cate_vocab, need_sample=False, train_num_ngs=train_num_ngs,", "number of negative instances with a positive instance for validation", "'softmax' max_seq_length=50, cont_feat_len=85, use_cont_feat=False, init_item_emb=False, shuffle=True ) print(hparams.values) input_creator =", "import maybe_download from recommenders.models.deeprec.models.sequential.sli_rec import SLI_RECModel as SeqModel # from", ") ### NOTE: ### remember to use `_create_vocab(train_file, user_vocab, item_vocab,", "_create_vocab from recommenders.datasets.download_utils import maybe_download from recommenders.models.deeprec.models.sequential.sli_rec import SLI_RECModel as", "layer_l2=0., enable_BN=True, ##-- True learning_rate=0.001, # set to 0.01 if", "0.8778 ##10k: 0.8827 ##20k: 0.8848 ##25k: 0.8824 ##35k: 0.8878 ##40k:", "0.01 if batch normalization is disable else 0.001 epochs=100000, EARLY_STOP=40000,", "\"slirec\") # data_path = '/home/jialia/wsdm/seq_datasets/B_full_feature_v2' data_path = sys.argv[1] print(os.path.abspath(data_path)) ##", "0.001 epochs=100000, EARLY_STOP=40000, batch_size=400, show_step=5000, MODEL_DIR=os.path.join(data_path, \"model/\"), SUMMARIES_DIR=os.path.join(data_path, \"summary/\"), user_vocab=user_vocab,", "user_vocab, item_vocab and cate_vocab files, if you are using your", "maybe_download from recommenders.models.deeprec.models.sequential.sli_rec import SLI_RECModel as SeqModel # from recommenders.models.deeprec.models.sequential.asvd", "print(\"System version: {}\".format(sys.version)) print(\"Tensorflow version: {}\".format(tf.__version__)) yaml_file = '/home/jialia/wsdm/src/recommenders/examples/wsdm2022/sli_rec_B.yaml' RANDOM_SEED", "r'inter_test_output.txt') submit_file = os.path.join(data_path, r'final_test_output.txt') train_num_ngs = 9 # number", "submit_file) # print('Job finished. B, continue training = 20k, seq=50')", "yaml_file = '/home/jialia/wsdm/src/recommenders/examples/wsdm2022/sli_rec_B.yaml' RANDOM_SEED = SEED # Set None for", "os.path.join(data_path, r'inter_test.tsv') final_pred_file = os.path.join(data_path, r'final_test.tsv') user_vocab = os.path.join(data_path, r'user_vocab.pkl')", "positive instance for training valid_num_ngs = 9 # number of", "of negative instances with a positive instance for testing _create_vocab(", "tempfile import TemporaryDirectory import numpy as np import tensorflow.compat.v1 as", "recommenders.models.deeprec.io.sequential_iterator import SequentialIterator #from recommenders.models.deeprec.io.nextitnet_iterator import NextItNetIterator print(\"System version: {}\".format(sys.version))", "##20k: 0.8923 ##25k: 0.8908 ##30k: 0.8895 ##35k: 0.8888 ##40k: 0.8913", "the path where I enter the cmd # for test", "# user_dropout=False, embed_l2=0., layer_l2=0., enable_BN=True, ##-- True learning_rate=0.001, # set", "use_cont_feat=False, init_item_emb=False, shuffle=True ) print(hparams.values) input_creator = SequentialIterator model =", "import A2SVDModel as SeqModel # from recommenders.models.deeprec.models.sequential.caser import CaserModel as", "quick test model.load_model(os.path.join(data_path, \"model\", 'best_model')) res_syn = model.run_eval(test_file, num_ngs=9) print(res_syn)", "0.8884 ##15k: 0.8898 ##20k: 0.8923 ##25k: 0.8908 ##30k: 0.8895 ##35k:", "enable_BN=True, ##-- True learning_rate=0.001, # set to 0.01 if batch", "r'valid.tsv') pred_file = os.path.join(data_path, r'inter_test.tsv') final_pred_file = os.path.join(data_path, r'final_test.tsv') user_vocab", "cont_feat, with BN ##5k: 0.8778 ##10k: 0.8827 ##20k: 0.8848 ##25k:", "epoch=50k, seq=100') ## ASVD: 0.867497 ## GRU: 0.877529 ## SLi-Rec:", "os.path.join(data_path, r'item_vocab.pkl') cate_vocab = os.path.join(data_path, r'category_vocab.pkl') output_file = os.path.join(data_path, r'inter_test_output.txt')", "with a positive instance for validation test_num_ngs = 9 #", "training is {0:.2f} mins'.format(train_time.interval/60.0)) ### model = model.fit(test_file, test_file, valid_num_ngs=9,", "instances with a positive instance for testing _create_vocab( [train_file, valid_file],", "\"model_20220118_20k_0.8923\", 'step_20000')) with Timer() as train_time: model = model.fit(train_file, valid_file,", "r'category_vocab.pkl') output_file = os.path.join(data_path, r'inter_test_output.txt') submit_file = os.path.join(data_path, r'final_test_output.txt') train_num_ngs", "import download_and_extract, data_preprocessing, _create_vocab from recommenders.datasets.download_utils import maybe_download from recommenders.models.deeprec.models.sequential.sli_rec", "os.path.join(data_path, r'valid_instances.txt') test_file = os.path.join(data_path, r'valid.tsv') pred_file = os.path.join(data_path, r'inter_test.tsv')", "import tensorflow.compat.v1 as tf tf.get_logger().setLevel('ERROR') # only show error messages", "recommenders.models.deeprec.models.sequential.sli_rec import SLI_RECModel as SeqModel # from recommenders.models.deeprec.models.sequential.asvd import A2SVDModel", "as SeqModel # from recommenders.models.deeprec.models.sequential.gru4rec import GRU4RecModel as SeqModel #", "recommenders.models.deeprec.deeprec_utils import ( prepare_hparams ) from recommenders.datasets.amazon_reviews import download_and_extract, data_preprocessing,", "as SeqModel #from recommenders.models.deeprec.models.sequential.nextitnet import NextItNetModel from recommenders.models.deeprec.io.sequential_iterator import SequentialIterator", "os.path.join(data_path, r'user_vocab.pkl') item_vocab = os.path.join(data_path, r'item_vocab.pkl') cate_vocab = os.path.join(data_path, r'category_vocab.pkl')", "init_item_emb=False, shuffle=True ) print(hparams.values) input_creator = SequentialIterator model = SeqModel(hparams,", "input_creator = SequentialIterator model = SeqModel(hparams, input_creator, seed=RANDOM_SEED) # model.load_model(os.path.join(data_path,", "GRU4RecModel as SeqModel # from recommenders.models.deeprec.models.sequential.sum import SUMModel as SeqModel", "as train_time: model = model.fit(train_file, valid_file, valid_num_ngs=9, eval_metric='auc') print('Time cost", "= os.path.join(data_path, r'valid.tsv') pred_file = os.path.join(data_path, r'inter_test.tsv') final_pred_file = os.path.join(data_path,", "{}\".format(sys.version)) print(\"Tensorflow version: {}\".format(tf.__version__)) yaml_file = '/home/jialia/wsdm/src/recommenders/examples/wsdm2022/sli_rec_B.yaml' RANDOM_SEED = SEED", "0.8827 ##20k: 0.8848 ##25k: 0.8824 ##35k: 0.8878 ##40k: 0.8903 ##45k:", "= os.path.join(data_path, r'item_vocab.pkl') cate_vocab = os.path.join(data_path, r'category_vocab.pkl') output_file = os.path.join(data_path,", "normalization is disable else 0.001 epochs=100000, EARLY_STOP=40000, batch_size=400, show_step=5000, MODEL_DIR=os.path.join(data_path,", "files, if you are using your own dataset rather than", "seq=100') ## ASVD: 0.867497 ## GRU: 0.877529 ## SLi-Rec: 0.892736", "user_vocab=user_vocab, item_vocab=item_vocab, cate_vocab=cate_vocab, need_sample=False, train_num_ngs=train_num_ngs, # provides the number of", "r'final_test.tsv') user_vocab = os.path.join(data_path, r'user_vocab.pkl') item_vocab = os.path.join(data_path, r'item_vocab.pkl') cate_vocab", "from recommenders.models.deeprec.deeprec_utils import ( prepare_hparams ) from recommenders.datasets.amazon_reviews import download_and_extract,", "{0:.2f} mins'.format(train_time.interval/60.0)) ### model = model.fit(test_file, test_file, valid_num_ngs=9, eval_metric='auc') ##--", "recommenders.utils.constants import SEED from recommenders.models.deeprec.deeprec_utils import ( prepare_hparams ) from", "cate_vocab)` to generate the user_vocab, item_vocab and cate_vocab files, if", "'step_20000')) with Timer() as train_time: model = model.fit(train_file, valid_file, valid_num_ngs=9,", "print(hparams.values) input_creator = SequentialIterator model = SeqModel(hparams, input_creator, seed=RANDOM_SEED) #", "0.8876 ##50k: 0.8925 ##55k: 0.8903 ##60k: 0.8894 ##65k: 0.8904 ##70k:", "data_path = sys.argv[1] print(os.path.abspath(data_path)) ## the path where I enter", "##10k: 0.8884 ##15k: 0.8898 ##20k: 0.8923 ##25k: 0.8908 ##30k: 0.8895", "training valid_num_ngs = 9 # number of negative instances with", "user_vocab, item_vocab, cate_vocab)` to generate the user_vocab, item_vocab and cate_vocab", "no cont_feat, with BN ##5k: 0.8778 ##10k: 0.8827 ##20k: 0.8848", "0.877529 ## SLi-Rec: 0.892736 ## B_v4: 0.8937 print(\"Job:B_full_feature_v2, with BN,", "0.8814 ##75k: 0.8896 ##80k: 0.8871 ##85k: 0.8920 ## with shuffle:", "shuffle=True\") ## B_full_feature_v2 no cont_feat, with BN ##5k: 0.8778 ##10k:", "instance for training valid_num_ngs = 9 # number of negative", "positive instance for loss computation. loss='log_loss', #'log_loss', 'softmax' max_seq_length=50, cont_feat_len=85,", "train_num_ngs = 9 # number of negative instances with a", "as SeqModel # from recommenders.models.deeprec.models.sequential.sum import SUMModel as SeqModel #from", "train_num_ngs=train_num_ngs, # provides the number of negative instances for each", "print(res_syn) model.predict(pred_file, output_file) model.predict(final_pred_file, submit_file) # print('Job finished. B, continue", "# print('Job finished. B, continue training = 20k, seq=50') #", "0.8937 print(\"Job:B_full_feature_v2, with BN, no cont feat, seq=50, shuffle=True\") ##", "a positive instance for training valid_num_ngs = 9 # number", "EARLY_STOP=40000, batch_size=400, show_step=5000, MODEL_DIR=os.path.join(data_path, \"model/\"), SUMMARIES_DIR=os.path.join(data_path, \"summary/\"), user_vocab=user_vocab, item_vocab=item_vocab, cate_vocab=cate_vocab,", "RANDOM_SEED = SEED # Set None for non-deterministic result #", "os.path.join(data_path, r'train_instances.txt') valid_file = os.path.join(data_path, r'valid_instances.txt') test_file = os.path.join(data_path, r'valid.tsv')", "GRU: 0.877529 ## SLi-Rec: 0.892736 ## B_v4: 0.8937 print(\"Job:B_full_feature_v2, with", "dataset. hparams = prepare_hparams(yaml_file, # user_dropout=False, embed_l2=0., layer_l2=0., enable_BN=True, ##--", "SeqModel #from recommenders.models.deeprec.models.sequential.nextitnet import NextItNetModel from recommenders.models.deeprec.io.sequential_iterator import SequentialIterator #from", "with a positive instance for testing _create_vocab( [train_file, valid_file], user_vocab,", "##80k: 0.8871 ##85k: 0.8920 ## with shuffle: ##5k: 0.8793 ##10k:", "BN ##5k: 0.8778 ##10k: 0.8827 ##20k: 0.8848 ##25k: 0.8824 ##35k:", "import ( prepare_hparams ) from recommenders.datasets.amazon_reviews import download_and_extract, data_preprocessing, _create_vocab", "0.8923 ##25k: 0.8908 ##30k: 0.8895 ##35k: 0.8888 ##40k: 0.8913 ##45k:", "test_file, valid_num_ngs=9, eval_metric='auc') ##-- quick test model.load_model(os.path.join(data_path, \"model\", 'best_model')) res_syn", "= os.path.join(data_path, r'inter_test.tsv') final_pred_file = os.path.join(data_path, r'final_test.tsv') user_vocab = os.path.join(data_path,", "your own dataset rather than using our demo Amazon dataset.", ") from recommenders.datasets.amazon_reviews import download_and_extract, data_preprocessing, _create_vocab from recommenders.datasets.download_utils import", "print(\"Job:B_full_feature_v2, with BN, no cont feat, seq=50, shuffle=True\") ## B_full_feature_v2", "##35k: 0.8878 ##40k: 0.8903 ##45k: 0.8876 ##50k: 0.8925 ##55k: 0.8903", "np import tensorflow.compat.v1 as tf tf.get_logger().setLevel('ERROR') # only show error", "\"resources\", \"deeprec\", \"slirec\") # data_path = '/home/jialia/wsdm/seq_datasets/B_full_feature_v2' data_path = sys.argv[1]", "shuffle: ##5k: 0.8793 ##10k: 0.8884 ##15k: 0.8898 ##20k: 0.8923 ##25k:", "from recommenders.models.deeprec.models.sequential.caser import CaserModel as SeqModel # from recommenders.models.deeprec.models.sequential.gru4rec import", "train_file = os.path.join(data_path, r'train_instances.txt') valid_file = os.path.join(data_path, r'valid_instances.txt') test_file =", "set to 0.01 if batch normalization is disable else 0.001", "prepare_hparams(yaml_file, # user_dropout=False, embed_l2=0., layer_l2=0., enable_BN=True, ##-- True learning_rate=0.001, #", "than using our demo Amazon dataset. hparams = prepare_hparams(yaml_file, #", "submit_file = os.path.join(data_path, r'final_test_output.txt') train_num_ngs = 9 # number of", "recommenders.datasets.download_utils import maybe_download from recommenders.models.deeprec.models.sequential.sli_rec import SLI_RECModel as SeqModel #", "else 0.001 epochs=100000, EARLY_STOP=40000, batch_size=400, show_step=5000, MODEL_DIR=os.path.join(data_path, \"model/\"), SUMMARIES_DIR=os.path.join(data_path, \"summary/\"),", "'/home/jialia/wsdm/src/recommenders/examples/wsdm2022/sli_rec_B.yaml' RANDOM_SEED = SEED # Set None for non-deterministic result", "'best_model')) res_syn = model.run_eval(test_file, num_ngs=9) print(res_syn) model.predict(pred_file, output_file) model.predict(final_pred_file, submit_file)", "cont feat, seq=50, shuffle=True\") ## B_full_feature_v2 no cont_feat, with BN", "print('Time cost for training is {0:.2f} mins'.format(train_time.interval/60.0)) ### model =", "each positive instance for loss computation. loss='log_loss', #'log_loss', 'softmax' max_seq_length=50,", "# provides the number of negative instances for each positive", "negative instances with a positive instance for validation test_num_ngs =", "data_path = '/home/jialia/wsdm/seq_datasets/B_full_feature_v2' data_path = sys.argv[1] print(os.path.abspath(data_path)) ## the path", "item_vocab=item_vocab, cate_vocab=cate_vocab, need_sample=False, train_num_ngs=train_num_ngs, # provides the number of negative", "CaserModel as SeqModel # from recommenders.models.deeprec.models.sequential.gru4rec import GRU4RecModel as SeqModel", "with BN, no cont feat, seq=50, shuffle=True\") ## B_full_feature_v2 no", "using our demo Amazon dataset. hparams = prepare_hparams(yaml_file, # user_dropout=False,", "from recommenders.models.deeprec.io.sequential_iterator import SequentialIterator #from recommenders.models.deeprec.io.nextitnet_iterator import NextItNetIterator print(\"System version:", "##-- True learning_rate=0.001, # set to 0.01 if batch normalization", "valid_file, valid_num_ngs=9, eval_metric='auc') print('Time cost for training is {0:.2f} mins'.format(train_time.interval/60.0))", "## the path where I enter the cmd # for", "= os.path.join(data_path, r'category_vocab.pkl') output_file = os.path.join(data_path, r'inter_test_output.txt') submit_file = os.path.join(data_path,", "the cmd # for test train_file = os.path.join(data_path, r'train_instances.txt') valid_file", "instance for loss computation. loss='log_loss', #'log_loss', 'softmax' max_seq_length=50, cont_feat_len=85, use_cont_feat=False,", "TemporaryDirectory import numpy as np import tensorflow.compat.v1 as tf tf.get_logger().setLevel('ERROR')", "are using your own dataset rather than using our demo", "= os.path.join(data_path, r'final_test.tsv') user_vocab = os.path.join(data_path, r'user_vocab.pkl') item_vocab = os.path.join(data_path,", "# from recommenders.models.deeprec.models.sequential.asvd import A2SVDModel as SeqModel # from recommenders.models.deeprec.models.sequential.caser", "numpy as np import tensorflow.compat.v1 as tf tf.get_logger().setLevel('ERROR') # only", "from recommenders.models.deeprec.models.sequential.sli_rec import SLI_RECModel as SeqModel # from recommenders.models.deeprec.models.sequential.asvd import", "valid_file], user_vocab, item_vocab, cate_vocab ) ### NOTE: ### remember to", "B_full_feature_v2 no cont_feat, with BN ##5k: 0.8778 ##10k: 0.8827 ##20k:", "model.load_model(os.path.join(data_path, \"model\", 'best_model')) res_syn = model.run_eval(test_file, num_ngs=9) print(res_syn) model.predict(pred_file, output_file)", "cont_feat_len=85, use_cont_feat=False, init_item_emb=False, shuffle=True ) print(hparams.values) input_creator = SequentialIterator model", "learning_rate=0.001, # set to 0.01 if batch normalization is disable", "is {0:.2f} mins'.format(train_time.interval/60.0)) ### model = model.fit(test_file, test_file, valid_num_ngs=9, eval_metric='auc')", "mins'.format(train_time.interval/60.0)) ### model = model.fit(test_file, test_file, valid_num_ngs=9, eval_metric='auc') ##-- quick", "##70k: 0.8814 ##75k: 0.8896 ##80k: 0.8871 ##85k: 0.8920 ## with", "path where I enter the cmd # for test train_file", "# model.load_model(os.path.join(data_path, \"model_20220118_20k_0.8923\", 'step_20000')) with Timer() as train_time: model =", "model.fit(test_file, test_file, valid_num_ngs=9, eval_metric='auc') ##-- quick test model.load_model(os.path.join(data_path, \"model\", 'best_model'))", "test train_file = os.path.join(data_path, r'train_instances.txt') valid_file = os.path.join(data_path, r'valid_instances.txt') test_file", "item_vocab = os.path.join(data_path, r'item_vocab.pkl') cate_vocab = os.path.join(data_path, r'category_vocab.pkl') output_file =", "SeqModel # from recommenders.models.deeprec.models.sequential.asvd import A2SVDModel as SeqModel # from", "= os.path.join(data_path, r'valid_instances.txt') test_file = os.path.join(data_path, r'valid.tsv') pred_file = os.path.join(data_path,", "SequentialIterator #from recommenders.models.deeprec.io.nextitnet_iterator import NextItNetIterator print(\"System version: {}\".format(sys.version)) print(\"Tensorflow version:", "SUMMARIES_DIR=os.path.join(data_path, \"summary/\"), user_vocab=user_vocab, item_vocab=item_vocab, cate_vocab=cate_vocab, need_sample=False, train_num_ngs=train_num_ngs, # provides the", "user_vocab, item_vocab, cate_vocab ) ### NOTE: ### remember to use", "r'inter_test.tsv') final_pred_file = os.path.join(data_path, r'final_test.tsv') user_vocab = os.path.join(data_path, r'user_vocab.pkl') item_vocab", "( prepare_hparams ) from recommenders.datasets.amazon_reviews import download_and_extract, data_preprocessing, _create_vocab from", "os.path.join(\"tests\", \"resources\", \"deeprec\", \"slirec\") # data_path = '/home/jialia/wsdm/seq_datasets/B_full_feature_v2' data_path =", "cate_vocab=cate_vocab, need_sample=False, train_num_ngs=train_num_ngs, # provides the number of negative instances", "for non-deterministic result # data_path = os.path.join(\"tests\", \"resources\", \"deeprec\", \"slirec\")", "20k, seq=50') # print('Job finished. B_v2, epoch=50k, seq=100') ## ASVD:", "for training valid_num_ngs = 9 # number of negative instances", "from recommenders.models.deeprec.models.sequential.gru4rec import GRU4RecModel as SeqModel # from recommenders.models.deeprec.models.sequential.sum import", "from tempfile import TemporaryDirectory import numpy as np import tensorflow.compat.v1", "##10k: 0.8827 ##20k: 0.8848 ##25k: 0.8824 ##35k: 0.8878 ##40k: 0.8903", "epochs=100000, EARLY_STOP=40000, batch_size=400, show_step=5000, MODEL_DIR=os.path.join(data_path, \"model/\"), SUMMARIES_DIR=os.path.join(data_path, \"summary/\"), user_vocab=user_vocab, item_vocab=item_vocab,", "##40k: 0.8903 ##45k: 0.8876 ##50k: 0.8925 ##55k: 0.8903 ##60k: 0.8894", "NOTE: ### remember to use `_create_vocab(train_file, user_vocab, item_vocab, cate_vocab)` to", "import numpy as np import tensorflow.compat.v1 as tf tf.get_logger().setLevel('ERROR') #", "sys import os from tempfile import TemporaryDirectory import numpy as", "seq=50') # print('Job finished. B_v2, epoch=50k, seq=100') ## ASVD: 0.867497", "0.8896 ##80k: 0.8871 ##85k: 0.8920 ## with shuffle: ##5k: 0.8793", "NextItNetModel from recommenders.models.deeprec.io.sequential_iterator import SequentialIterator #from recommenders.models.deeprec.io.nextitnet_iterator import NextItNetIterator print(\"System", "of negative instances for each positive instance for loss computation.", "cate_vocab ) ### NOTE: ### remember to use `_create_vocab(train_file, user_vocab,", "model = SeqModel(hparams, input_creator, seed=RANDOM_SEED) # model.load_model(os.path.join(data_path, \"model_20220118_20k_0.8923\", 'step_20000')) with" ]
[ "classes to represent a C type. All of them classes", "like \"char\" or \"int\".\"\"\" def __init__(self, name, signed, longs): super(CtypesSimple,", "and self.restype.destination.name == \"void\" ): # we will provide a", "we will provide a means of converting this to a", "0): \"c_float\", (\"double\", True, 0): \"c_double\", (\"double\", True, 1): \"c_longdouble\",", "def __repr__(self): return '<Ctype (%s) \"%s\">' % (type(self).__name__, self.py_string()) def", "automagically returns it as an int. # Instead, convert to", "structs = [] enums = [] typedefs = [] errors", "= variety # \"struct\" or \"union\" self.members = members if", "in self.errors: visitor.visit_error(error, cls) class CtypesSimple(CtypesType): \"\"\"Represents a builtin type,", "\"c_int * 4\". \"\"\" import warnings __docformat__ = \"restructuredtext\" ctypes_type_map", "\"\"\" ctypesgen.ctypedescs contains classes to represent a C type. All", "(\"short\", True, 0): \"c_short\", (\"short\", False, 0): \"c_ushort\", (\"float\", True,", "\"c_ubyte\", (\"short\", True, 0): \"c_short\", (\"short\", False, 0): \"c_ushort\", (\"float\",", "name, signed, longs): super(CtypesSimple, self).__init__() self.name = name self.signed =", "in self.argtypes: a.visit(visitor) super(CtypesFunction, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): return \"CFUNCTYPE(UNCHECKED(%s),", "if self.enumerators == None: self.opaque = True else: self.opaque =", "\"c_ulonglong\", (\"size_t\", True, 0): \"c_size_t\", (\"apr_int64_t\", True, 0): \"c_int64\", (\"off64_t\",", "CtypesNoErrorCheck() # Don't allow POINTER(None) (c_void_p) as a restype... causes", "__init__(self, name, signed, longs): super(CtypesSimple, self).__init__() self.name = name self.signed", "CtypesSpecial(\"String\") self.argtypes = [remove_function_pointer(p) for p in parameters] self.variadic =", "True, 0): \"None\", (\"int\", True, 0): \"c_int\", (\"int\", False, 0):", "visitor): visitor.visit_enum(self) super(CtypesEnum, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): return \"enum_%s\" %", "ignore_can_be_ctype=None): return \"CFUNCTYPE(UNCHECKED(%s), %s)\" % ( self.restype.py_string(), \", \".join([a.py_string() for", "True, 0): \"c_int16\", (\"__int16\", True, 0): \"c_int16\", (\"int32_t\", True, 0):", "#!/usr/bin/env python \"\"\" ctypesgen.ctypedescs contains classes to represent a C", "True, 0): \"c_int16\", (\"int32_t\", True, 0): \"c_int32\", (\"__int32\", True, 0):", "self.name = name self.signed = signed self.longs = longs def", "if type(self.tag) == int or not self.tag: if type(self.tag) ==", "0): \"None\", (\"int\", True, 0): \"c_int\", (\"int\", False, 0): \"c_uint\",", "type(self.restype.destination) == CtypesSimple and self.restype.destination.name == \"void\" ): # we", "the py_string method. str(ctype) returns a string which, when evaluated", "error(self, message, cls=None): self.errors.append((message, cls)) def visit(self, visitor): for error,", "base, count): super(CtypesArray, self).__init__() self.base = base self.count = count", "a string which, when evaluated in the wrapper at runtime,", "visitor.visit_error(error, cls) class CtypesSimple(CtypesType): \"\"\"Represents a builtin type, like \"char\"", "Requires definition in preamble (\"ssize_t\", True, 0): \"c_ptrdiff_t\", # Requires", "parameters. def remove_function_pointer(t): if type(t) == CtypesPointer and type(t.destination) ==", "if self.count is None: return \"POINTER(%s)\" % self.base.py_string() if type(self.base)", "created using: >>> ctype = CtypesArray(CtypesSimple(\"int\",True,0),4) str(ctype) would evaluate to", "\"c_uint\", (\"int\", True, 1): \"c_long\", (\"int\", False, 1): \"c_ulong\", (\"char\",", "self.destination.visit(visitor) super(CtypesPointer, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): return \"POINTER(%s)\" % self.destination.py_string()", "self).__init__() self.base = base self.count = count def visit(self, visitor):", "types.add((self.variety, self.tag)) return types def visit(self, visitor): visitor.visit_struct(self) if not", "self).__init__() self.tag = tag self.enumerators = enumerators if not self.tag:", "super(CtypesPointer, self).__init__() self.destination = destination self.qualifiers = qualifiers def visit(self,", "\"c_int64\", (\"uint8_t\", True, 0): \"c_uint8\", (\"uint16_t\", True, 0): \"c_uint16\", (\"uint32_t\",", "target): self.target = target def py_string(self, ignore_can_be_ctype=None): return \"lambda v,*a", "def visit(self, visitor): for error, cls in self.errors: visitor.visit_error(error, cls)", "parser module. The most important method of CtypesType and its", "__docformat__ = \"restructuredtext\" ctypes_type_map = { # typename signed longs", "not self.tag: self.tag = anonymous_enum_tag() self.anonymous = True else: self.anonymous", "self.opaque = True else: self.opaque = False self.src = src", "self.signed = signed self.longs = longs def py_string(self, ignore_can_be_ctype=None): return", "tag self.attrib = attrib self.variety = variety # \"struct\" or", "of ctypesgen, CtypesType and its subclasses are completely independent of", "super(CtypesType, self).__init__() self.errors = [] def __repr__(self): return '<Ctype (%s)", "a ctypes type, # you can make it any arbitrary", "it as an int. # Instead, convert to POINTER(c_void). c_void", "CtypesTypedef(CtypesType): \"\"\"Represents a type defined by a typedef.\"\"\" def __init__(self,", "ignore_can_be_ctype=None): return self.name class CtypesTypedef(CtypesType): \"\"\"Represents a type defined by", "ctypes_type_map_python_builtin = { (\"int\", True, 2): \"c_longlong\", (\"int\", False, 2):", "% self.base.py_string() if type(self.base) == CtypesArray: return \"(%s) * int(%s)\"", "[remove_function_pointer(p) for p in parameters] self.variadic = variadic self.attrib =", "1 return last_tagnum def fmt_anonymous_struct_tag(num): return \"anon_%d\" % num def", "ExpressionNodes. There may be # ExpressionNode objects in array count", "= [] typedefs = [] errors = [] identifiers =", "ctype = CtypesArray(CtypesSimple(\"int\",True,0),4) str(ctype) would evaluate to \"c_int * 4\".", "= True else: self.anonymous = False if self.enumerators == None:", "\"c_uint8\", (\"uint16_t\", True, 0): \"c_uint16\", (\"uint32_t\", True, 0): \"c_uint32\", (\"uint64_t\",", "0): \"c_void_p\", } # This protocol is used for walking", "pass def visit_identifier(self, identifier): # This one comes from inside", "(self.base.py_string(), self.count.py_string(False)) class CtypesNoErrorCheck(object): def py_string(self, ignore_can_be_ctype=None): return \"None\" def", "\"char\" or \"int\".\"\"\" def __init__(self, name, signed, longs): super(CtypesSimple, self).__init__()", "a C type. All of them classes are subclasses of", "objects in array count expressions. pass def visit_type_and_collect_info(ctype): class Visitor(CtypesTypeVisitor):", "else: self.anonymous = False if self.members == None: self.opaque =", "ctype in self.members: ctype.visit(visitor) super(CtypesStruct, self).visit(visitor) def get_subtypes(self): if self.opaque:", "super(CtypesSimple, self).__init__() self.name = name self.signed = signed self.longs =", "{ (\"int\", True, 2): \"c_longlong\", (\"int\", False, 2): \"c_ulonglong\", (\"size_t\",", "(\"wchar_t\", True, 0): \"c_wchar\", (\"ptrdiff_t\", True, 0): \"c_ptrdiff_t\", # Requires", "four integers could be created using: >>> ctype = CtypesArray(CtypesSimple(\"int\",True,0),4)", "self.count.visit(visitor) super(CtypesArray, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): if self.count is None:", "\"%s * int(%s)\" % (self.base.py_string(), self.count.py_string(False)) class CtypesNoErrorCheck(object): def py_string(self,", "\"c_float\", (\"double\", True, 0): \"c_double\", (\"double\", True, 1): \"c_longdouble\", (\"int8_t\",", "= CtypesPointerCast(CtypesSpecial(\"c_void_p\")) # Return \"String\" instead of \"POINTER(c_char)\" if self.restype.py_string()", "representing an array of four integers could be created using:", "CtypesType and its subclasses are completely independent of the parser", "self.enumerators == None: self.opaque = True else: self.opaque = False", "visit_enum(self, enum): pass def visit_typedef(self, name): pass def visit_error(self, error,", "CtypesSpecial(\"c_char_p\") else: self.restype = CtypesSpecial(\"String\") self.argtypes = [remove_function_pointer(p) for p", "variety, members, src=None): super(CtypesStruct, self).__init__() self.tag = tag self.attrib =", "versions of ctypesgen, CtypesType and its subclasses are completely independent", "% (self.base.py_string(), self.count.py_string(False)) class CtypesNoErrorCheck(object): def py_string(self, ignore_can_be_ctype=None): return \"None\"", "self.restype = restype self.errcheck = CtypesNoErrorCheck() # Don't allow POINTER(None)", "def get_required_types(self): types = super(CtypesStruct, self).get_required_types() types.add((self.variety, self.tag)) return types", "(\"short\", False, 0): \"c_ushort\", (\"float\", True, 0): \"c_float\", (\"double\", True,", "def error(self, message, cls=None): self.errors.append((message, cls)) def visit(self, visitor): for", "else: return t class CtypesType(object): def __init__(self): super(CtypesType, self).__init__() self.errors", "1 return \"anon_%d\" % last_tagnum class CtypesEnum(CtypesType): def __init__(self, tag,", "(\"apr_int64_t\", True, 0): \"c_int64\", (\"off64_t\", True, 0): \"c_int64\", (\"apr_uint64_t\", True,", "def py_string(self, ignore_can_be_ctype=None): return self.name class CtypesBitfield(CtypesType): def __init__(self, base,", ": cast(v, {})\".format(self.target.py_string()) class CtypesFunction(CtypesType): def __init__(self, restype, parameters, variadic,", "None: return \"POINTER(%s)\" % self.base.py_string() if type(self.base) == CtypesArray: return", "evaluated in the wrapper at runtime, results in a ctypes", "True else: self.anonymous = False if self.enumerators == None: self.opaque", "\"c_int\", (\"int\", False, 0): \"c_uint\", (\"int\", True, 1): \"c_long\", (\"int\",", "== CtypesFunction: return t.destination elif type(t) == CtypesPointer: t.destination =", "self.destination.py_string() class CtypesArray(CtypesType): def __init__(self, base, count): super(CtypesArray, self).__init__() self.base", "% (type(self).__name__, self.py_string()) def error(self, message, cls=None): self.errors.append((message, cls)) def", "= remove_function_pointer(t.destination) return t else: return t class CtypesType(object): def", "__init__(self, base, bitfield): super(CtypesBitfield, self).__init__() self.base = base self.bitfield =", "self.base = base self.bitfield = bitfield def visit(self, visitor): self.base.visit(visitor)", "True, 0): \"c_float\", (\"double\", True, 0): \"c_double\", (\"double\", True, 1):", "super(CtypesArray, self).__init__() self.base = base self.count = count def visit(self,", "super(CtypesStruct, self).__init__() self.tag = tag self.attrib = attrib self.variety =", "% num def anonymous_struct_tag(): return fmt_anonymous_struct_tag(anonymous_struct_tagnum()) class CtypesStruct(CtypesType): def __init__(self,", "4\". \"\"\" import warnings __docformat__ = \"restructuredtext\" ctypes_type_map = {", "__nonzero__ = __bool__ class CtypesPointerCast(object): def __init__(self, target): self.target =", "POINTER(None) (c_void_p) as a restype... causes errors # when ctypes", "0): \"c_int16\", (\"__int16\", True, 0): \"c_int16\", (\"int32_t\", True, 0): \"c_int32\",", "(\"_Bool\", True, 0): \"c_bool\", } ctypes_type_map_python_builtin = { (\"int\", True,", "CtypesType. Unlike in previous versions of ctypesgen, CtypesType and its", "subclasses of CtypesType. Unlike in previous versions of ctypesgen, CtypesType", "completely independent of the parser module. The most important method", "pass def visit_type_and_collect_info(ctype): class Visitor(CtypesTypeVisitor): def visit_struct(self, struct): structs.append(struct) def", "\"c_int16\", (\"__int16\", True, 0): \"c_int16\", (\"int32_t\", True, 0): \"c_int32\", (\"__int32\",", "a builtin type, like \"char\" or \"int\".\"\"\" def __init__(self, name,", "to POINTER(c_void). c_void is not a ctypes type, # you", "errors.append((error, cls)) def visit_identifier(self, identifier): identifiers.append(identifier) structs = [] enums", "class CtypesSimple(CtypesType): \"\"\"Represents a builtin type, like \"char\" or \"int\".\"\"\"", "(\"int\", False, 1): \"c_ulong\", (\"char\", True, 0): \"c_char\", (\"char\", False,", "attrib def visit(self, visitor): self.restype.visit(visitor) for a in self.argtypes: a.visit(visitor)", "= False self.src = src def visit(self, visitor): visitor.visit_enum(self) super(CtypesEnum,", "(\"float\", True, 0): \"c_float\", (\"double\", True, 0): \"c_double\", (\"double\", True,", "== \"POINTER(c_char)\": if \"const\" in self.restype.qualifiers: self.restype = CtypesSpecial(\"c_char_p\") else:", "are completely independent of the parser module. The most important", "CtypesSpecial(CtypesType): def __init__(self, name): super(CtypesSpecial, self).__init__() self.name = name def", "causes errors # when ctypes automagically returns it as an", "\"%s\">' % (type(self).__name__, self.py_string()) def error(self, message, cls=None): self.errors.append((message, cls))", "make it any arbitrary type. if ( type(self.restype) == CtypesPointer", "visitor.visit_typedef(self.name) super(CtypesTypedef, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): return self.name class CtypesBitfield(CtypesType):", "self.errors.append((message, cls)) def visit(self, visitor): for error, cls in self.errors:", "\"c_int8\", (\"int16_t\", True, 0): \"c_int16\", (\"__int16\", True, 0): \"c_int16\", (\"int32_t\",", "True, 0): \"c_int32\", (\"__int32\", True, 0): \"c_int32\", (\"int64_t\", True, 0):", "return \"lambda v,*a : cast(v, {})\".format(self.target.py_string()) class CtypesFunction(CtypesType): def __init__(self,", "\"c_long\", (\"int\", False, 1): \"c_ulong\", (\"char\", True, 0): \"c_char\", (\"char\",", "type defined by a typedef.\"\"\" def __init__(self, name): super(CtypesTypedef, self).__init__()", "CtypesPointerCast(CtypesSpecial(\"c_void_p\")) # Return \"String\" instead of \"POINTER(c_char)\" if self.restype.py_string() ==", "int. # Instead, convert to POINTER(c_void). c_void is not a", "All of them classes are subclasses of CtypesType. Unlike in", "type trees. class CtypesTypeVisitor(object): def visit_struct(self, struct): pass def visit_enum(self,", "is used for walking type trees. class CtypesTypeVisitor(object): def visit_struct(self,", "and type(t.destination) == CtypesFunction: return t.destination elif type(t) == CtypesPointer:", "== CtypesSimple and self.restype.destination.name == \"void\" ): # we will", "0): \"c_uint32\", (\"uint64_t\", True, 0): \"c_uint64\", (\"_Bool\", True, 0): \"c_bool\",", "python \"\"\" ctypesgen.ctypedescs contains classes to represent a C type.", "0 def anonymous_struct_tagnum(): global last_tagnum last_tagnum += 1 return last_tagnum", "is the py_string method. str(ctype) returns a string which, when", "funtion pointer; needed for typedefs # and function parameters. def", "self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): return self.base.py_string() class CtypesPointer(CtypesType): def __init__(self,", ">>> ctype = CtypesArray(CtypesSimple(\"int\",True,0),4) str(ctype) would evaluate to \"c_int *", "or not self.tag: if type(self.tag) == int: self.tag = fmt_anonymous_struct_tag(self.tag)", "= CtypesSpecial(\"c_char_p\") else: self.restype = CtypesSpecial(\"String\") self.argtypes = [remove_function_pointer(p) for", "= False self.src = src def get_required_types(self): types = super(CtypesStruct,", "self.base.py_string() if type(self.base) == CtypesArray: return \"(%s) * int(%s)\" %", "'<Ctype (%s) \"%s\">' % (type(self).__name__, self.py_string()) def error(self, message, cls=None):", "enum): pass def visit_typedef(self, name): pass def visit_error(self, error, cls):", "(\"ptrdiff_t\", True, 0): \"c_ptrdiff_t\", # Requires definition in preamble (\"ssize_t\",", "definition in preamble (\"ssize_t\", True, 0): \"c_ptrdiff_t\", # Requires definition", "CtypesFunction(CtypesType): def __init__(self, restype, parameters, variadic, attrib=dict()): super(CtypesFunction, self).__init__() self.restype", "class CtypesArray(CtypesType): def __init__(self, base, count): super(CtypesArray, self).__init__() self.base =", "typename signed longs (\"void\", True, 0): \"None\", (\"int\", True, 0):", "0): \"c_uint64\", (\"wchar_t\", True, 0): \"c_wchar\", (\"ptrdiff_t\", True, 0): \"c_ptrdiff_t\",", "This one comes from inside ExpressionNodes. There may be #", "in self.members]) def py_string(self, ignore_can_be_ctype=None): return \"%s_%s\" % (self.variety, self.tag)", "type(self.base) == CtypesArray: return \"(%s) * int(%s)\" % (self.base.py_string(), self.count.py_string(False))", "attrib self.variety = variety # \"struct\" or \"union\" self.members =", "= [] identifiers = [] v = Visitor() ctype.visit(v) return", "self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): return self.name class CtypesBitfield(CtypesType): def __init__(self,", "the parser module. The most important method of CtypesType and", "\"c_void_p\", } # This protocol is used for walking type", "return False __nonzero__ = __bool__ class CtypesPointerCast(object): def __init__(self, target):", "get_subtypes(self): if self.opaque: return set() else: return set([m[1] for m", "name): super(CtypesSpecial, self).__init__() self.name = name def py_string(self, ignore_can_be_ctype=None): return", "super(CtypesTypedef, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): return self.name class CtypesBitfield(CtypesType): def", "visit_typedef(self, name): pass def visit_error(self, error, cls): pass def visit_identifier(self,", "self.count: self.count.visit(visitor) super(CtypesArray, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): if self.count is", "type, like \"char\" or \"int\".\"\"\" def __init__(self, name, signed, longs):", "is not a ctypes type, # you can make it", "signed self.longs = longs def py_string(self, ignore_can_be_ctype=None): return ctypes_type_map[(self.name, self.signed,", "== int: self.tag = fmt_anonymous_struct_tag(self.tag) else: self.tag = anonymous_struct_tag() self.anonymous", "= [] errors = [] identifiers = [] v =", "protocol is used for walking type trees. class CtypesTypeVisitor(object): def", "visit_type_and_collect_info(ctype): class Visitor(CtypesTypeVisitor): def visit_struct(self, struct): structs.append(struct) def visit_enum(self, enum):", "visitor): if self.destination: self.destination.visit(visitor) super(CtypesPointer, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): return", "variadic, attrib=dict()): super(CtypesFunction, self).__init__() self.restype = restype self.errcheck = CtypesNoErrorCheck()", "__init__(self, destination, qualifiers): super(CtypesPointer, self).__init__() self.destination = destination self.qualifiers =", "signed longs (\"void\", True, 0): \"None\", (\"int\", True, 0): \"c_int\",", "visitor): self.base.visit(visitor) super(CtypesBitfield, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): return self.base.py_string() class", "# Requires definition in preamble (\"ssize_t\", True, 0): \"c_ptrdiff_t\", #", "if ( type(self.restype) == CtypesPointer and type(self.restype.destination) == CtypesSimple and", "\"c_ushort\", (\"float\", True, 0): \"c_float\", (\"double\", True, 0): \"c_double\", (\"double\",", "(\"int8_t\", True, 0): \"c_int8\", (\"__int8\", True, 0): \"c_int8\", (\"int16_t\", True,", "level of indirection from funtion pointer; needed for typedefs #", "= bitfield def visit(self, visitor): self.base.visit(visitor) super(CtypesBitfield, self).visit(visitor) def py_string(self,", "0): \"c_double\", (\"double\", True, 1): \"c_longdouble\", (\"int8_t\", True, 0): \"c_int8\",", "it any arbitrary type. if ( type(self.restype) == CtypesPointer and", "and function parameters. def remove_function_pointer(t): if type(t) == CtypesPointer and", "= target def py_string(self, ignore_can_be_ctype=None): return \"lambda v,*a : cast(v,", "previous versions of ctypesgen, CtypesType and its subclasses are completely", "class CtypesStruct(CtypesType): def __init__(self, tag, attrib, variety, members, src=None): super(CtypesStruct,", "tag, attrib, variety, members, src=None): super(CtypesStruct, self).__init__() self.tag = tag", "not self.opaque: for name, ctype in self.members: ctype.visit(visitor) super(CtypesStruct, self).visit(visitor)", "(\"__int64\", True, 0): \"c_int64\", (\"uint8_t\", True, 0): \"c_uint8\", (\"uint16_t\", True,", "# Return \"String\" instead of \"POINTER(c_char)\" if self.restype.py_string() == \"POINTER(c_char)\":", "p in parameters] self.variadic = variadic self.attrib = attrib def", "type object. For example, a CtypesType representing an array of", "(\"char\", True, 0): \"c_char\", (\"char\", False, 0): \"c_ubyte\", (\"short\", True,", "\"c_longlong\", (\"int\", False, 2): \"c_ulonglong\", (\"size_t\", True, 0): \"c_size_t\", (\"apr_int64_t\",", "\"c_int64\", (\"off64_t\", True, 0): \"c_int64\", (\"apr_uint64_t\", True, 0): \"c_uint64\", (\"wchar_t\",", "in self.argtypes]), ) last_tagnum = 0 def anonymous_struct_tagnum(): global last_tagnum", "True, 0): \"c_ptrdiff_t\", # Requires definition in preamble (\"va_list\", True,", "(\"va_list\", True, 0): \"c_void_p\", } # This protocol is used", "# Remove one level of indirection from funtion pointer; needed", "you can make it any arbitrary type. if ( type(self.restype)", "visit_struct(self, struct): structs.append(struct) def visit_enum(self, enum): enums.append(enum) def visit_typedef(self, typedef):", "self.base.visit(visitor) super(CtypesBitfield, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): return self.base.py_string() class CtypesPointer(CtypesType):", "last_tagnum += 1 return last_tagnum def fmt_anonymous_struct_tag(num): return \"anon_%d\" %", "CtypesPointer: t.destination = remove_function_pointer(t.destination) return t else: return t class", "Remove one level of indirection from funtion pointer; needed for", "visit_error(self, error, cls): errors.append((error, cls)) def visit_identifier(self, identifier): identifiers.append(identifier) structs", "% self.destination.py_string() class CtypesArray(CtypesType): def __init__(self, base, count): super(CtypesArray, self).__init__()", "if self.count: self.count.visit(visitor) super(CtypesArray, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): if self.count", "== CtypesPointer and type(t.destination) == CtypesFunction: return t.destination elif type(t)", "base self.bitfield = bitfield def visit(self, visitor): self.base.visit(visitor) super(CtypesBitfield, self).visit(visitor)", "def anonymous_enum_tag(): global last_tagnum last_tagnum += 1 return \"anon_%d\" %", "a means of converting this to a c_void_p self.restype =", "if type(self.base) == CtypesArray: return \"(%s) * int(%s)\" % (self.base.py_string(),", "== CtypesArray: return \"(%s) * int(%s)\" % (self.base.py_string(), self.count.py_string(False)) else:", "as a restype... causes errors # when ctypes automagically returns", "POINTER(c_void). c_void is not a ctypes type, # you can", "def visit(self, visitor): if self.destination: self.destination.visit(visitor) super(CtypesPointer, self).visit(visitor) def py_string(self,", "to represent a C type. All of them classes are", "self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): return \"CFUNCTYPE(UNCHECKED(%s), %s)\" % ( self.restype.py_string(),", "= { # typename signed longs (\"void\", True, 0): \"None\",", "bitfield): super(CtypesBitfield, self).__init__() self.base = base self.bitfield = bitfield def", "of four integers could be created using: >>> ctype =", "(\"uint64_t\", True, 0): \"c_uint64\", (\"_Bool\", True, 0): \"c_bool\", } ctypes_type_map_python_builtin", "runtime, results in a ctypes type object. For example, a", "def py_string(self, ignore_can_be_ctype=None): return \"POINTER(%s)\" % self.destination.py_string() class CtypesArray(CtypesType): def", "== CtypesPointer and type(self.restype.destination) == CtypesSimple and self.restype.destination.name == \"void\"", "def py_string(self, ignore_can_be_ctype=None): return \"%s_%s\" % (self.variety, self.tag) last_tagnum =", "message, cls=None): self.errors.append((message, cls)) def visit(self, visitor): for error, cls", "def py_string(self, ignore_can_be_ctype=None): return self.name class CtypesTypedef(CtypesType): \"\"\"Represents a type", "cls): pass def visit_identifier(self, identifier): # This one comes from", "%s)\" % ( self.restype.py_string(), \", \".join([a.py_string() for a in self.argtypes]),", "\"c_char\", (\"char\", False, 0): \"c_ubyte\", (\"short\", True, 0): \"c_short\", (\"short\",", "\"c_ulong\", (\"char\", True, 0): \"c_char\", (\"char\", False, 0): \"c_ubyte\", (\"short\",", "be created using: >>> ctype = CtypesArray(CtypesSimple(\"int\",True,0),4) str(ctype) would evaluate", "t.destination elif type(t) == CtypesPointer: t.destination = remove_function_pointer(t.destination) return t", "array of four integers could be created using: >>> ctype", "self).__init__() self.name = name def py_string(self, ignore_can_be_ctype=None): return self.name class", "returns a string which, when evaluated in the wrapper at", "def anonymous_struct_tag(): return fmt_anonymous_struct_tag(anonymous_struct_tagnum()) class CtypesStruct(CtypesType): def __init__(self, tag, attrib,", "self.name = name def py_string(self, ignore_can_be_ctype=None): return self.name class CtypesTypedef(CtypesType):", "CtypesTypeVisitor(object): def visit_struct(self, struct): pass def visit_enum(self, enum): pass def", "method of CtypesType and its subclasses is the py_string method.", "def visit_error(self, error, cls): errors.append((error, cls)) def visit_identifier(self, identifier): identifiers.append(identifier)", "src=None): super(CtypesEnum, self).__init__() self.tag = tag self.enumerators = enumerators if", "def visit_struct(self, struct): pass def visit_enum(self, enum): pass def visit_typedef(self,", "else: return \"%s * int(%s)\" % (self.base.py_string(), self.count.py_string(False)) class CtypesNoErrorCheck(object):", "True else: self.anonymous = False if self.members == None: self.opaque", "them classes are subclasses of CtypesType. Unlike in previous versions", "[] enums = [] typedefs = [] errors = []", "self.destination: self.destination.visit(visitor) super(CtypesPointer, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): return \"POINTER(%s)\" %", "in array count expressions. pass def visit_type_and_collect_info(ctype): class Visitor(CtypesTypeVisitor): def", "import warnings __docformat__ = \"restructuredtext\" ctypes_type_map = { # typename", "CtypesPointer(CtypesType): def __init__(self, destination, qualifiers): super(CtypesPointer, self).__init__() self.destination = destination", "visit(self, visitor): self.restype.visit(visitor) for a in self.argtypes: a.visit(visitor) super(CtypesFunction, self).visit(visitor)", "return fmt_anonymous_struct_tag(anonymous_struct_tagnum()) class CtypesStruct(CtypesType): def __init__(self, tag, attrib, variety, members,", "in parameters] self.variadic = variadic self.attrib = attrib def visit(self,", "class CtypesNoErrorCheck(object): def py_string(self, ignore_can_be_ctype=None): return \"None\" def __bool__(self): return", "self.variety = variety # \"struct\" or \"union\" self.members = members", "self.errors: visitor.visit_typedef(self.name) super(CtypesTypedef, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): return self.name class", "\"(%s) * int(%s)\" % (self.base.py_string(), self.count.py_string(False)) else: return \"%s *", "= [remove_function_pointer(p) for p in parameters] self.variadic = variadic self.attrib", "errors = [] identifiers = [] v = Visitor() ctype.visit(v)", "count): super(CtypesArray, self).__init__() self.base = base self.count = count def", "array count expressions. pass def visit_type_and_collect_info(ctype): class Visitor(CtypesTypeVisitor): def visit_struct(self,", "a.visit(visitor) super(CtypesFunction, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): return \"CFUNCTYPE(UNCHECKED(%s), %s)\" %", "CtypesPointer and type(t.destination) == CtypesFunction: return t.destination elif type(t) ==", "0): \"c_char\", (\"char\", False, 0): \"c_ubyte\", (\"short\", True, 0): \"c_short\",", "return \"CFUNCTYPE(UNCHECKED(%s), %s)\" % ( self.restype.py_string(), \", \".join([a.py_string() for a", "False self.src = src def get_required_types(self): types = super(CtypesStruct, self).get_required_types()", "return \"POINTER(%s)\" % self.base.py_string() if type(self.base) == CtypesArray: return \"(%s)", "= False if self.enumerators == None: self.opaque = True else:", "self.anonymous = False if self.enumerators == None: self.opaque = True", "at runtime, results in a ctypes type object. For example,", "its subclasses is the py_string method. str(ctype) returns a string", "\"\"\"Represents a builtin type, like \"char\" or \"int\".\"\"\" def __init__(self,", "def py_string(self, ignore_can_be_ctype=None): if self.count is None: return \"POINTER(%s)\" %", "arbitrary type. if ( type(self.restype) == CtypesPointer and type(self.restype.destination) ==", "not a ctypes type, # you can make it any", "()) self.errcheck = CtypesPointerCast(CtypesSpecial(\"c_void_p\")) # Return \"String\" instead of \"POINTER(c_char)\"", "ctypesgen.ctypedescs contains classes to represent a C type. All of", "typedefs = [] errors = [] identifiers = [] v", "0): \"c_ubyte\", (\"short\", True, 0): \"c_short\", (\"short\", False, 0): \"c_ushort\",", "super(CtypesSpecial, self).__init__() self.name = name def py_string(self, ignore_can_be_ctype=None): return self.name", "fmt_anonymous_struct_tag(num): return \"anon_%d\" % num def anonymous_struct_tag(): return fmt_anonymous_struct_tag(anonymous_struct_tagnum()) class", "name, ctype in self.members: ctype.visit(visitor) super(CtypesStruct, self).visit(visitor) def get_subtypes(self): if", "= tag self.enumerators = enumerators if not self.tag: self.tag =", "not self.tag: if type(self.tag) == int: self.tag = fmt_anonymous_struct_tag(self.tag) else:", "function parameters. def remove_function_pointer(t): if type(t) == CtypesPointer and type(t.destination)", "0 def anonymous_enum_tag(): global last_tagnum last_tagnum += 1 return \"anon_%d\"", "1): \"c_ulong\", (\"char\", True, 0): \"c_char\", (\"char\", False, 0): \"c_ubyte\",", "errors, identifiers # Remove one level of indirection from funtion", "visit(self, visitor): visitor.visit_struct(self) if not self.opaque: for name, ctype in", "= 0 def anonymous_enum_tag(): global last_tagnum last_tagnum += 1 return", "variadic self.attrib = attrib def visit(self, visitor): self.restype.visit(visitor) for a", "cls)) def visit_identifier(self, identifier): identifiers.append(identifier) structs = [] enums =", "[] identifiers = [] v = Visitor() ctype.visit(v) return structs,", "ignore_can_be_ctype=None): return ctypes_type_map[(self.name, self.signed, self.longs)] class CtypesSpecial(CtypesType): def __init__(self, name):", "qualifiers def visit(self, visitor): if self.destination: self.destination.visit(visitor) super(CtypesPointer, self).visit(visitor) def", "self.name = name def visit(self, visitor): if not self.errors: visitor.visit_typedef(self.name)", "True, 0): \"c_int8\", (\"__int8\", True, 0): \"c_int8\", (\"int16_t\", True, 0):", "return structs, enums, typedefs, errors, identifiers # Remove one level", "def remove_function_pointer(t): if type(t) == CtypesPointer and type(t.destination) == CtypesFunction:", "= CtypesSpecial(\"String\") self.argtypes = [remove_function_pointer(p) for p in parameters] self.variadic", "def visit_struct(self, struct): structs.append(struct) def visit_enum(self, enum): enums.append(enum) def visit_typedef(self,", "== CtypesPointer: t.destination = remove_function_pointer(t.destination) return t else: return t", "def py_string(self, ignore_can_be_ctype=None): return \"CFUNCTYPE(UNCHECKED(%s), %s)\" % ( self.restype.py_string(), \",", "= True else: self.opaque = False self.src = src def", "__init__(self, tag, enumerators, src=None): super(CtypesEnum, self).__init__() self.tag = tag self.enumerators", "[] def __repr__(self): return '<Ctype (%s) \"%s\">' % (type(self).__name__, self.py_string())", "self.members = members if type(self.tag) == int or not self.tag:", "set([m[1] for m in self.members]) def py_string(self, ignore_can_be_ctype=None): return \"%s_%s\"", "longs (\"void\", True, 0): \"None\", (\"int\", True, 0): \"c_int\", (\"int\",", "True, 0): \"c_uint64\", (\"_Bool\", True, 0): \"c_bool\", } ctypes_type_map_python_builtin =", "CtypesFunction: return t.destination elif type(t) == CtypesPointer: t.destination = remove_function_pointer(t.destination)", "self).__init__() self.name = name self.signed = signed self.longs = longs", "identifiers # Remove one level of indirection from funtion pointer;", "( self.restype.py_string(), \", \".join([a.py_string() for a in self.argtypes]), ) last_tagnum", "(\"double\", True, 0): \"c_double\", (\"double\", True, 1): \"c_longdouble\", (\"int8_t\", True,", "of \"POINTER(c_char)\" if self.restype.py_string() == \"POINTER(c_char)\": if \"const\" in self.restype.qualifiers:", "count def visit(self, visitor): self.base.visit(visitor) if self.count: self.count.visit(visitor) super(CtypesArray, self).visit(visitor)", "\"lambda v,*a : cast(v, {})\".format(self.target.py_string()) class CtypesFunction(CtypesType): def __init__(self, restype,", "t else: return t class CtypesType(object): def __init__(self): super(CtypesType, self).__init__()", "True, 0): \"c_uint64\", (\"wchar_t\", True, 0): \"c_wchar\", (\"ptrdiff_t\", True, 0):", "visit(self, visitor): if self.destination: self.destination.visit(visitor) super(CtypesPointer, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None):", "self.restype.py_string() == \"POINTER(c_char)\": if \"const\" in self.restype.qualifiers: self.restype = CtypesSpecial(\"c_char_p\")", "= src def visit(self, visitor): visitor.visit_enum(self) super(CtypesEnum, self).visit(visitor) def py_string(self,", "ignore_can_be_ctype=None): return self.base.py_string() class CtypesPointer(CtypesType): def __init__(self, destination, qualifiers): super(CtypesPointer,", "True, 0): \"c_int8\", (\"int16_t\", True, 0): \"c_int16\", (\"__int16\", True, 0):", "(type(self).__name__, self.py_string()) def error(self, message, cls=None): self.errors.append((message, cls)) def visit(self,", "self.argtypes]), ) last_tagnum = 0 def anonymous_struct_tagnum(): global last_tagnum last_tagnum", "\"restructuredtext\" ctypes_type_map = { # typename signed longs (\"void\", True,", "= [] enums = [] typedefs = [] errors =", "name self.signed = signed self.longs = longs def py_string(self, ignore_can_be_ctype=None):", "Requires definition in preamble (\"va_list\", True, 0): \"c_void_p\", } #", "warnings __docformat__ = \"restructuredtext\" ctypes_type_map = { # typename signed", "\"c_short\", (\"short\", False, 0): \"c_ushort\", (\"float\", True, 0): \"c_float\", (\"double\",", "True, 1): \"c_longdouble\", (\"int8_t\", True, 0): \"c_int8\", (\"__int8\", True, 0):", "0): \"c_int\", (\"int\", False, 0): \"c_uint\", (\"int\", True, 1): \"c_long\",", "enums, typedefs, errors, identifiers # Remove one level of indirection", "self.tag = anonymous_struct_tag() self.anonymous = True else: self.anonymous = False", "remove_function_pointer(t): if type(t) == CtypesPointer and type(t.destination) == CtypesFunction: return", "def visit_error(self, error, cls): pass def visit_identifier(self, identifier): # This", "def py_string(self, ignore_can_be_ctype=None): return self.base.py_string() class CtypesPointer(CtypesType): def __init__(self, destination,", "True, 0): \"c_bool\", } ctypes_type_map_python_builtin = { (\"int\", True, 2):", "visitor): self.restype.visit(visitor) for a in self.argtypes: a.visit(visitor) super(CtypesFunction, self).visit(visitor) def", "\"c_int64\", (\"apr_uint64_t\", True, 0): \"c_uint64\", (\"wchar_t\", True, 0): \"c_wchar\", (\"ptrdiff_t\",", "base self.count = count def visit(self, visitor): self.base.visit(visitor) if self.count:", "def py_string(self, ignore_can_be_ctype=None): return \"lambda v,*a : cast(v, {})\".format(self.target.py_string()) class", "def py_string(self, ignore_can_be_ctype=None): return \"None\" def __bool__(self): return False __nonzero__", "= [] def __repr__(self): return '<Ctype (%s) \"%s\">' % (type(self).__name__,", "self.tag = anonymous_enum_tag() self.anonymous = True else: self.anonymous = False", "True, 0): \"c_int64\", (\"off64_t\", True, 0): \"c_int64\", (\"apr_uint64_t\", True, 0):", "__init__(self, name): super(CtypesTypedef, self).__init__() self.name = name def visit(self, visitor):", "False self.src = src def visit(self, visitor): visitor.visit_enum(self) super(CtypesEnum, self).visit(visitor)", "str(ctype) returns a string which, when evaluated in the wrapper", "wrapper at runtime, results in a ctypes type object. For", "cls=None): self.errors.append((message, cls)) def visit(self, visitor): for error, cls in", "\"\"\"Represents a type defined by a typedef.\"\"\" def __init__(self, name):", "self.name class CtypesTypedef(CtypesType): \"\"\"Represents a type defined by a typedef.\"\"\"", "qualifiers): super(CtypesPointer, self).__init__() self.destination = destination self.qualifiers = qualifiers def", "__bool__ class CtypesPointerCast(object): def __init__(self, target): self.target = target def", "global last_tagnum last_tagnum += 1 return \"anon_%d\" % last_tagnum class", "= anonymous_enum_tag() self.anonymous = True else: self.anonymous = False if", "anonymous_struct_tagnum(): global last_tagnum last_tagnum += 1 return last_tagnum def fmt_anonymous_struct_tag(num):", "for error, cls in self.errors: visitor.visit_error(error, cls) class CtypesSimple(CtypesType): \"\"\"Represents", "\"\"\" import warnings __docformat__ = \"restructuredtext\" ctypes_type_map = { #", "(\"uint8_t\", True, 0): \"c_uint8\", (\"uint16_t\", True, 0): \"c_uint16\", (\"uint32_t\", True,", "def __init__(self, tag, attrib, variety, members, src=None): super(CtypesStruct, self).__init__() self.tag", "(\"off64_t\", True, 0): \"c_int64\", (\"apr_uint64_t\", True, 0): \"c_uint64\", (\"wchar_t\", True,", "means of converting this to a c_void_p self.restype = CtypesPointer(CtypesSpecial(\"c_ubyte\"),", "object. For example, a CtypesType representing an array of four", "return t.destination elif type(t) == CtypesPointer: t.destination = remove_function_pointer(t.destination) return", "ignore_can_be_ctype=None): return \"lambda v,*a : cast(v, {})\".format(self.target.py_string()) class CtypesFunction(CtypesType): def", "class CtypesFunction(CtypesType): def __init__(self, restype, parameters, variadic, attrib=dict()): super(CtypesFunction, self).__init__()", "def visit_type_and_collect_info(ctype): class Visitor(CtypesTypeVisitor): def visit_struct(self, struct): structs.append(struct) def visit_enum(self,", "= qualifiers def visit(self, visitor): if self.destination: self.destination.visit(visitor) super(CtypesPointer, self).visit(visitor)", "def visit_typedef(self, typedef): typedefs.append(typedef) def visit_error(self, error, cls): errors.append((error, cls))", "CtypesBitfield(CtypesType): def __init__(self, base, bitfield): super(CtypesBitfield, self).__init__() self.base = base", "py_string(self, ignore_can_be_ctype=None): return \"None\" def __bool__(self): return False __nonzero__ =", "(\"int\", True, 0): \"c_int\", (\"int\", False, 0): \"c_uint\", (\"int\", True,", "0): \"c_int64\", (\"apr_uint64_t\", True, 0): \"c_uint64\", (\"wchar_t\", True, 0): \"c_wchar\",", "CtypesNoErrorCheck(object): def py_string(self, ignore_can_be_ctype=None): return \"None\" def __bool__(self): return False", "[] typedefs = [] errors = [] identifiers = []", "pointer; needed for typedefs # and function parameters. def remove_function_pointer(t):", "self.restype = CtypesSpecial(\"c_char_p\") else: self.restype = CtypesSpecial(\"String\") self.argtypes = [remove_function_pointer(p)", "its subclasses are completely independent of the parser module. The", "else: self.restype = CtypesSpecial(\"String\") self.argtypes = [remove_function_pointer(p) for p in", "visit(self, visitor): self.base.visit(visitor) super(CtypesBitfield, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): return self.base.py_string()", "from funtion pointer; needed for typedefs # and function parameters.", "type(t.destination) == CtypesFunction: return t.destination elif type(t) == CtypesPointer: t.destination", "): # we will provide a means of converting this", "a in self.argtypes: a.visit(visitor) super(CtypesFunction, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): return", "class CtypesTypeVisitor(object): def visit_struct(self, struct): pass def visit_enum(self, enum): pass", "if not self.errors: visitor.visit_typedef(self.name) super(CtypesTypedef, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): return", "super(CtypesFunction, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): return \"CFUNCTYPE(UNCHECKED(%s), %s)\" % (", "self.attrib = attrib self.variety = variety # \"struct\" or \"union\"", "True else: self.opaque = False self.src = src def visit(self,", "def anonymous_struct_tagnum(): global last_tagnum last_tagnum += 1 return last_tagnum def", "(\"char\", False, 0): \"c_ubyte\", (\"short\", True, 0): \"c_short\", (\"short\", False,", "enums = [] typedefs = [] errors = [] identifiers", "cls)) def visit(self, visitor): for error, cls in self.errors: visitor.visit_error(error,", "= base self.count = count def visit(self, visitor): self.base.visit(visitor) if", "self.base.visit(visitor) if self.count: self.count.visit(visitor) super(CtypesArray, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): if", "\"String\" instead of \"POINTER(c_char)\" if self.restype.py_string() == \"POINTER(c_char)\": if \"const\"", "self.opaque = False self.src = src def get_required_types(self): types =", "a type defined by a typedef.\"\"\" def __init__(self, name): super(CtypesTypedef,", "contains classes to represent a C type. All of them", "for name, ctype in self.members: ctype.visit(visitor) super(CtypesStruct, self).visit(visitor) def get_subtypes(self):", "name def visit(self, visitor): if not self.errors: visitor.visit_typedef(self.name) super(CtypesTypedef, self).visit(visitor)", "def visit(self, visitor): visitor.visit_struct(self) if not self.opaque: for name, ctype", "super(CtypesBitfield, self).__init__() self.base = base self.bitfield = bitfield def visit(self,", "super(CtypesFunction, self).__init__() self.restype = restype self.errcheck = CtypesNoErrorCheck() # Don't", "fmt_anonymous_struct_tag(self.tag) else: self.tag = anonymous_struct_tag() self.anonymous = True else: self.anonymous", "= restype self.errcheck = CtypesNoErrorCheck() # Don't allow POINTER(None) (c_void_p)", "(\"int\", True, 1): \"c_long\", (\"int\", False, 1): \"c_ulong\", (\"char\", True,", "\"c_int8\", (\"__int8\", True, 0): \"c_int8\", (\"int16_t\", True, 0): \"c_int16\", (\"__int16\",", "visit_identifier(self, identifier): identifiers.append(identifier) structs = [] enums = [] typedefs", "def visit_identifier(self, identifier): identifiers.append(identifier) structs = [] enums = []", "self.restype.qualifiers: self.restype = CtypesSpecial(\"c_char_p\") else: self.restype = CtypesSpecial(\"String\") self.argtypes =", "self.tag: if type(self.tag) == int: self.tag = fmt_anonymous_struct_tag(self.tag) else: self.tag", "= variadic self.attrib = attrib def visit(self, visitor): self.restype.visit(visitor) for", "ctypes_type_map[(self.name, self.signed, self.longs)] class CtypesSpecial(CtypesType): def __init__(self, name): super(CtypesSpecial, self).__init__()", "or \"union\" self.members = members if type(self.tag) == int or", "visit_identifier(self, identifier): # This one comes from inside ExpressionNodes. There", "The most important method of CtypesType and its subclasses is", "False, 0): \"c_uint\", (\"int\", True, 1): \"c_long\", (\"int\", False, 1):", "for m in self.members]) def py_string(self, ignore_can_be_ctype=None): return \"%s_%s\" %", "and its subclasses are completely independent of the parser module.", "self).__init__() self.tag = tag self.attrib = attrib self.variety = variety", "a CtypesType representing an array of four integers could be", "True, 0): \"c_int64\", (\"uint8_t\", True, 0): \"c_uint8\", (\"uint16_t\", True, 0):", "% (self.base.py_string(), self.count.py_string(False)) else: return \"%s * int(%s)\" % (self.base.py_string(),", "pass def visit_typedef(self, name): pass def visit_error(self, error, cls): pass", "self.src = src def get_required_types(self): types = super(CtypesStruct, self).get_required_types() types.add((self.variety,", "self.qualifiers = qualifiers def visit(self, visitor): if self.destination: self.destination.visit(visitor) super(CtypesPointer,", "when evaluated in the wrapper at runtime, results in a", "example, a CtypesType representing an array of four integers could", "signed, longs): super(CtypesSimple, self).__init__() self.name = name self.signed = signed", "True, 0): \"c_wchar\", (\"ptrdiff_t\", True, 0): \"c_ptrdiff_t\", # Requires definition", "for p in parameters] self.variadic = variadic self.attrib = attrib", "last_tagnum += 1 return \"anon_%d\" % last_tagnum class CtypesEnum(CtypesType): def", "if self.restype.py_string() == \"POINTER(c_char)\": if \"const\" in self.restype.qualifiers: self.restype =", "\"c_int64\", (\"__int64\", True, 0): \"c_int64\", (\"uint8_t\", True, 0): \"c_uint8\", (\"uint16_t\",", "typedefs, errors, identifiers # Remove one level of indirection from", "type. if ( type(self.restype) == CtypesPointer and type(self.restype.destination) == CtypesSimple", "self.restype.destination.name == \"void\" ): # we will provide a means", "= attrib def visit(self, visitor): self.restype.visit(visitor) for a in self.argtypes:", "+= 1 return last_tagnum def fmt_anonymous_struct_tag(num): return \"anon_%d\" % num", "== int or not self.tag: if type(self.tag) == int: self.tag", "\"c_int32\", (\"int64_t\", True, 0): \"c_int64\", (\"__int64\", True, 0): \"c_int64\", (\"uint8_t\",", "target def py_string(self, ignore_can_be_ctype=None): return \"lambda v,*a : cast(v, {})\".format(self.target.py_string())", "is None: return \"POINTER(%s)\" % self.base.py_string() if type(self.base) == CtypesArray:", "super(CtypesArray, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): if self.count is None: return", "returns it as an int. # Instead, convert to POINTER(c_void).", "= CtypesArray(CtypesSimple(\"int\",True,0),4) str(ctype) would evaluate to \"c_int * 4\". \"\"\"", "return t else: return t class CtypesType(object): def __init__(self): super(CtypesType,", "True, 0): \"c_int64\", (\"apr_uint64_t\", True, 0): \"c_uint64\", (\"wchar_t\", True, 0):", "# This one comes from inside ExpressionNodes. There may be", "= 0 def anonymous_struct_tagnum(): global last_tagnum last_tagnum += 1 return", "bitfield def visit(self, visitor): self.base.visit(visitor) super(CtypesBitfield, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None):", "type(self.tag) == int: self.tag = fmt_anonymous_struct_tag(self.tag) else: self.tag = anonymous_struct_tag()", "ctypesgen, CtypesType and its subclasses are completely independent of the", "cls in self.errors: visitor.visit_error(error, cls) class CtypesSimple(CtypesType): \"\"\"Represents a builtin", "py_string(self, ignore_can_be_ctype=None): return self.name class CtypesTypedef(CtypesType): \"\"\"Represents a type defined", "self.bitfield = bitfield def visit(self, visitor): self.base.visit(visitor) super(CtypesBitfield, self).visit(visitor) def", "= enumerators if not self.tag: self.tag = anonymous_enum_tag() self.anonymous =", "def __init__(self, base, bitfield): super(CtypesBitfield, self).__init__() self.base = base self.bitfield", "type. All of them classes are subclasses of CtypesType. Unlike", "from inside ExpressionNodes. There may be # ExpressionNode objects in", "self.members]) def py_string(self, ignore_can_be_ctype=None): return \"%s_%s\" % (self.variety, self.tag) last_tagnum", "preamble (\"ssize_t\", True, 0): \"c_ptrdiff_t\", # Requires definition in preamble", "and type(self.restype.destination) == CtypesSimple and self.restype.destination.name == \"void\" ): #", "int(%s)\" % (self.base.py_string(), self.count.py_string(False)) else: return \"%s * int(%s)\" %", "= signed self.longs = longs def py_string(self, ignore_can_be_ctype=None): return ctypes_type_map[(self.name,", "py_string method. str(ctype) returns a string which, when evaluated in", "subclasses is the py_string method. str(ctype) returns a string which,", "return \"%s * int(%s)\" % (self.base.py_string(), self.count.py_string(False)) class CtypesNoErrorCheck(object): def", "def visit(self, visitor): visitor.visit_enum(self) super(CtypesEnum, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): return", "for typedefs # and function parameters. def remove_function_pointer(t): if type(t)", "would evaluate to \"c_int * 4\". \"\"\" import warnings __docformat__", "longs def py_string(self, ignore_can_be_ctype=None): return ctypes_type_map[(self.name, self.signed, self.longs)] class CtypesSpecial(CtypesType):", "destination, qualifiers): super(CtypesPointer, self).__init__() self.destination = destination self.qualifiers = qualifiers", "self.count is None: return \"POINTER(%s)\" % self.base.py_string() if type(self.base) ==", "= True else: self.anonymous = False if self.members == None:", "\"%s_%s\" % (self.variety, self.tag) last_tagnum = 0 def anonymous_enum_tag(): global", "inside ExpressionNodes. There may be # ExpressionNode objects in array", "True, 0): \"c_short\", (\"short\", False, 0): \"c_ushort\", (\"float\", True, 0):", "} ctypes_type_map_python_builtin = { (\"int\", True, 2): \"c_longlong\", (\"int\", False,", "ExpressionNode objects in array count expressions. pass def visit_type_and_collect_info(ctype): class", "if type(t) == CtypesPointer and type(t.destination) == CtypesFunction: return t.destination", "def __init__(self, restype, parameters, variadic, attrib=dict()): super(CtypesFunction, self).__init__() self.restype =", "CtypesPointer and type(self.restype.destination) == CtypesSimple and self.restype.destination.name == \"void\" ):", "self.argtypes: a.visit(visitor) super(CtypesFunction, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): return \"CFUNCTYPE(UNCHECKED(%s), %s)\"", "\"c_int32\", (\"__int32\", True, 0): \"c_int32\", (\"int64_t\", True, 0): \"c_int64\", (\"__int64\",", "1): \"c_long\", (\"int\", False, 1): \"c_ulong\", (\"char\", True, 0): \"c_char\",", "visitor.visit_struct(self) if not self.opaque: for name, ctype in self.members: ctype.visit(visitor)", "last_tagnum last_tagnum += 1 return \"anon_%d\" % last_tagnum class CtypesEnum(CtypesType):", "True, 2): \"c_longlong\", (\"int\", False, 2): \"c_ulonglong\", (\"size_t\", True, 0):", "self).__init__() self.base = base self.bitfield = bitfield def visit(self, visitor):", "(\"apr_uint64_t\", True, 0): \"c_uint64\", (\"wchar_t\", True, 0): \"c_wchar\", (\"ptrdiff_t\", True,", "visit_typedef(self, typedef): typedefs.append(typedef) def visit_error(self, error, cls): errors.append((error, cls)) def", "True, 0): \"c_uint32\", (\"uint64_t\", True, 0): \"c_uint64\", (\"_Bool\", True, 0):", "return self.base.py_string() class CtypesPointer(CtypesType): def __init__(self, destination, qualifiers): super(CtypesPointer, self).__init__()", "ignore_can_be_ctype=None): return \"None\" def __bool__(self): return False __nonzero__ = __bool__", "True, 0): \"c_size_t\", (\"apr_int64_t\", True, 0): \"c_int64\", (\"off64_t\", True, 0):", "self.count.py_string(False)) else: return \"%s * int(%s)\" % (self.base.py_string(), self.count.py_string(False)) class", "% (self.variety, self.tag) last_tagnum = 0 def anonymous_enum_tag(): global last_tagnum", "def visit_enum(self, enum): pass def visit_typedef(self, name): pass def visit_error(self,", "return set() else: return set([m[1] for m in self.members]) def", "ignore_can_be_ctype=None): return self.name class CtypesBitfield(CtypesType): def __init__(self, base, bitfield): super(CtypesBitfield,", "(\"__int16\", True, 0): \"c_int16\", (\"int32_t\", True, 0): \"c_int32\", (\"__int32\", True,", "cls): errors.append((error, cls)) def visit_identifier(self, identifier): identifiers.append(identifier) structs = []", "of CtypesType. Unlike in previous versions of ctypesgen, CtypesType and", "integers could be created using: >>> ctype = CtypesArray(CtypesSimple(\"int\",True,0),4) str(ctype)", "super(CtypesStruct, self).get_required_types() types.add((self.variety, self.tag)) return types def visit(self, visitor): visitor.visit_struct(self)", "self.opaque: for name, ctype in self.members: ctype.visit(visitor) super(CtypesStruct, self).visit(visitor) def", "0): \"c_uint16\", (\"uint32_t\", True, 0): \"c_uint32\", (\"uint64_t\", True, 0): \"c_uint64\",", "0): \"c_short\", (\"short\", False, 0): \"c_ushort\", (\"float\", True, 0): \"c_float\",", "self.count = count def visit(self, visitor): self.base.visit(visitor) if self.count: self.count.visit(visitor)", "string which, when evaluated in the wrapper at runtime, results", "convert to POINTER(c_void). c_void is not a ctypes type, #", "in previous versions of ctypesgen, CtypesType and its subclasses are", "\"POINTER(%s)\" % self.destination.py_string() class CtypesArray(CtypesType): def __init__(self, base, count): super(CtypesArray,", "m in self.members]) def py_string(self, ignore_can_be_ctype=None): return \"%s_%s\" % (self.variety,", "self.src = src def visit(self, visitor): visitor.visit_enum(self) super(CtypesEnum, self).visit(visitor) def", "False if self.enumerators == None: self.opaque = True else: self.opaque", "\"POINTER(c_char)\": if \"const\" in self.restype.qualifiers: self.restype = CtypesSpecial(\"c_char_p\") else: self.restype", "typedef): typedefs.append(typedef) def visit_error(self, error, cls): errors.append((error, cls)) def visit_identifier(self,", "str(ctype) would evaluate to \"c_int * 4\". \"\"\" import warnings", "tag, enumerators, src=None): super(CtypesEnum, self).__init__() self.tag = tag self.enumerators =", "errors # when ctypes automagically returns it as an int.", "(\"int\", False, 0): \"c_uint\", (\"int\", True, 1): \"c_long\", (\"int\", False,", "CtypesPointerCast(object): def __init__(self, target): self.target = target def py_string(self, ignore_can_be_ctype=None):", "def __init__(self, destination, qualifiers): super(CtypesPointer, self).__init__() self.destination = destination self.qualifiers", "attrib=dict()): super(CtypesFunction, self).__init__() self.restype = restype self.errcheck = CtypesNoErrorCheck() #", "attrib, variety, members, src=None): super(CtypesStruct, self).__init__() self.tag = tag self.attrib", "__init__(self, base, count): super(CtypesArray, self).__init__() self.base = base self.count =", "\"POINTER(%s)\" % self.base.py_string() if type(self.base) == CtypesArray: return \"(%s) *", "= Visitor() ctype.visit(v) return structs, enums, typedefs, errors, identifiers #", "False, 0): \"c_ubyte\", (\"short\", True, 0): \"c_short\", (\"short\", False, 0):", "\"None\" def __bool__(self): return False __nonzero__ = __bool__ class CtypesPointerCast(object):", "0): \"c_int64\", (\"off64_t\", True, 0): \"c_int64\", (\"apr_uint64_t\", True, 0): \"c_uint64\",", "self).__init__() self.name = name def visit(self, visitor): if not self.errors:", "0): \"c_int64\", (\"uint8_t\", True, 0): \"c_uint8\", (\"uint16_t\", True, 0): \"c_uint16\",", "\"const\" in self.restype.qualifiers: self.restype = CtypesSpecial(\"c_char_p\") else: self.restype = CtypesSpecial(\"String\")", "self.tag = tag self.attrib = attrib self.variety = variety #", "name def py_string(self, ignore_can_be_ctype=None): return self.name class CtypesTypedef(CtypesType): \"\"\"Represents a", "def __init__(self, target): self.target = target def py_string(self, ignore_can_be_ctype=None): return", "\"c_uint64\", (\"wchar_t\", True, 0): \"c_wchar\", (\"ptrdiff_t\", True, 0): \"c_ptrdiff_t\", #", "anonymous_struct_tag(): return fmt_anonymous_struct_tag(anonymous_struct_tagnum()) class CtypesStruct(CtypesType): def __init__(self, tag, attrib, variety,", "preamble (\"va_list\", True, 0): \"c_void_p\", } # This protocol is", "Instead, convert to POINTER(c_void). c_void is not a ctypes type,", "last_tagnum def fmt_anonymous_struct_tag(num): return \"anon_%d\" % num def anonymous_struct_tag(): return", "def __init__(self): super(CtypesType, self).__init__() self.errors = [] def __repr__(self): return", "\"CFUNCTYPE(UNCHECKED(%s), %s)\" % ( self.restype.py_string(), \", \".join([a.py_string() for a in", "module. The most important method of CtypesType and its subclasses", "(\"int\", False, 2): \"c_ulonglong\", (\"size_t\", True, 0): \"c_size_t\", (\"apr_int64_t\", True,", "global last_tagnum last_tagnum += 1 return last_tagnum def fmt_anonymous_struct_tag(num): return", "py_string(self, ignore_can_be_ctype=None): return self.base.py_string() class CtypesPointer(CtypesType): def __init__(self, destination, qualifiers):", "= { (\"int\", True, 2): \"c_longlong\", (\"int\", False, 2): \"c_ulonglong\",", "of indirection from funtion pointer; needed for typedefs # and", "def get_subtypes(self): if self.opaque: return set() else: return set([m[1] for", "\"c_wchar\", (\"ptrdiff_t\", True, 0): \"c_ptrdiff_t\", # Requires definition in preamble", "variety # \"struct\" or \"union\" self.members = members if type(self.tag)", "0): \"c_int32\", (\"__int32\", True, 0): \"c_int32\", (\"int64_t\", True, 0): \"c_int64\",", "return last_tagnum def fmt_anonymous_struct_tag(num): return \"anon_%d\" % num def anonymous_struct_tag():", "if not self.tag: self.tag = anonymous_enum_tag() self.anonymous = True else:", "self.errors = [] def __repr__(self): return '<Ctype (%s) \"%s\">' %", "return self.name class CtypesBitfield(CtypesType): def __init__(self, base, bitfield): super(CtypesBitfield, self).__init__()", "self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): if self.count is None: return \"POINTER(%s)\"", "__init__(self, tag, attrib, variety, members, src=None): super(CtypesStruct, self).__init__() self.tag =", "self.members: ctype.visit(visitor) super(CtypesStruct, self).visit(visitor) def get_subtypes(self): if self.opaque: return set()", "__init__(self, name): super(CtypesSpecial, self).__init__() self.name = name def py_string(self, ignore_can_be_ctype=None):", "self.restype.visit(visitor) for a in self.argtypes: a.visit(visitor) super(CtypesFunction, self).visit(visitor) def py_string(self,", "count expressions. pass def visit_type_and_collect_info(ctype): class Visitor(CtypesTypeVisitor): def visit_struct(self, struct):", "represent a C type. All of them classes are subclasses", "self.tag = fmt_anonymous_struct_tag(self.tag) else: self.tag = anonymous_struct_tag() self.anonymous = True", "# \"struct\" or \"union\" self.members = members if type(self.tag) ==", "self.longs)] class CtypesSpecial(CtypesType): def __init__(self, name): super(CtypesSpecial, self).__init__() self.name =", "allow POINTER(None) (c_void_p) as a restype... causes errors # when", "self.anonymous = False if self.members == None: self.opaque = True", "False, 2): \"c_ulonglong\", (\"size_t\", True, 0): \"c_size_t\", (\"apr_int64_t\", True, 0):", "= destination self.qualifiers = qualifiers def visit(self, visitor): if self.destination:", "class CtypesPointer(CtypesType): def __init__(self, destination, qualifiers): super(CtypesPointer, self).__init__() self.destination =", "restype self.errcheck = CtypesNoErrorCheck() # Don't allow POINTER(None) (c_void_p) as", "cls) class CtypesSimple(CtypesType): \"\"\"Represents a builtin type, like \"char\" or", "to \"c_int * 4\". \"\"\" import warnings __docformat__ = \"restructuredtext\"", "classes are subclasses of CtypesType. Unlike in previous versions of", "# Don't allow POINTER(None) (c_void_p) as a restype... causes errors", "py_string(self, ignore_can_be_ctype=None): return \"POINTER(%s)\" % self.destination.py_string() class CtypesArray(CtypesType): def __init__(self,", "an int. # Instead, convert to POINTER(c_void). c_void is not", "# Instead, convert to POINTER(c_void). c_void is not a ctypes", "self.argtypes = [remove_function_pointer(p) for p in parameters] self.variadic = variadic", "anonymous_enum_tag() self.anonymous = True else: self.anonymous = False if self.enumerators", "when ctypes automagically returns it as an int. # Instead,", "True, 0): \"c_double\", (\"double\", True, 1): \"c_longdouble\", (\"int8_t\", True, 0):", "0): \"c_int64\", (\"__int64\", True, 0): \"c_int64\", (\"uint8_t\", True, 0): \"c_uint8\",", "CtypesType(object): def __init__(self): super(CtypesType, self).__init__() self.errors = [] def __repr__(self):", "= count def visit(self, visitor): self.base.visit(visitor) if self.count: self.count.visit(visitor) super(CtypesArray,", "in preamble (\"ssize_t\", True, 0): \"c_ptrdiff_t\", # Requires definition in", "ignore_can_be_ctype=None): if self.count is None: return \"POINTER(%s)\" % self.base.py_string() if", "a typedef.\"\"\" def __init__(self, name): super(CtypesTypedef, self).__init__() self.name = name", "return \"None\" def __bool__(self): return False __nonzero__ = __bool__ class", "def visit(self, visitor): self.restype.visit(visitor) for a in self.argtypes: a.visit(visitor) super(CtypesFunction,", "return types def visit(self, visitor): visitor.visit_struct(self) if not self.opaque: for", "results in a ctypes type object. For example, a CtypesType", "enumerators if not self.tag: self.tag = anonymous_enum_tag() self.anonymous = True", "0): \"c_int32\", (\"int64_t\", True, 0): \"c_int64\", (\"__int64\", True, 0): \"c_int64\",", "ctype.visit(v) return structs, enums, typedefs, errors, identifiers # Remove one", "# and function parameters. def remove_function_pointer(t): if type(t) == CtypesPointer", "(c_void_p) as a restype... causes errors # when ctypes automagically", "typedef.\"\"\" def __init__(self, name): super(CtypesTypedef, self).__init__() self.name = name def", "\"c_ptrdiff_t\", # Requires definition in preamble (\"va_list\", True, 0): \"c_void_p\",", "ctypes automagically returns it as an int. # Instead, convert", "self.restype.py_string(), \", \".join([a.py_string() for a in self.argtypes]), ) last_tagnum =", "True, 0): \"c_char\", (\"char\", False, 0): \"c_ubyte\", (\"short\", True, 0):", "def __init__(self, tag, enumerators, src=None): super(CtypesEnum, self).__init__() self.tag = tag", "builtin type, like \"char\" or \"int\".\"\"\" def __init__(self, name, signed,", "self.longs = longs def py_string(self, ignore_can_be_ctype=None): return ctypes_type_map[(self.name, self.signed, self.longs)]", "src def get_required_types(self): types = super(CtypesStruct, self).get_required_types() types.add((self.variety, self.tag)) return", "the wrapper at runtime, results in a ctypes type object.", "0): \"c_ushort\", (\"float\", True, 0): \"c_float\", (\"double\", True, 0): \"c_double\",", "\"c_double\", (\"double\", True, 1): \"c_longdouble\", (\"int8_t\", True, 0): \"c_int8\", (\"__int8\",", "self.destination = destination self.qualifiers = qualifiers def visit(self, visitor): if", "super(CtypesStruct, self).visit(visitor) def get_subtypes(self): if self.opaque: return set() else: return", "provide a means of converting this to a c_void_p self.restype", "def visit_enum(self, enum): enums.append(enum) def visit_typedef(self, typedef): typedefs.append(typedef) def visit_error(self,", "types = super(CtypesStruct, self).get_required_types() types.add((self.variety, self.tag)) return types def visit(self,", "1): \"c_longdouble\", (\"int8_t\", True, 0): \"c_int8\", (\"__int8\", True, 0): \"c_int8\",", "self).visit(visitor) def get_subtypes(self): if self.opaque: return set() else: return set([m[1]", "pass def visit_error(self, error, cls): pass def visit_identifier(self, identifier): #", "Return \"String\" instead of \"POINTER(c_char)\" if self.restype.py_string() == \"POINTER(c_char)\": if", "of converting this to a c_void_p self.restype = CtypesPointer(CtypesSpecial(\"c_ubyte\"), ())", "of them classes are subclasses of CtypesType. Unlike in previous", "self.base.py_string() class CtypesPointer(CtypesType): def __init__(self, destination, qualifiers): super(CtypesPointer, self).__init__() self.destination", "Visitor() ctype.visit(v) return structs, enums, typedefs, errors, identifiers # Remove", "= name def py_string(self, ignore_can_be_ctype=None): return self.name class CtypesTypedef(CtypesType): \"\"\"Represents", "are subclasses of CtypesType. Unlike in previous versions of ctypesgen,", "= __bool__ class CtypesPointerCast(object): def __init__(self, target): self.target = target", "( type(self.restype) == CtypesPointer and type(self.restype.destination) == CtypesSimple and self.restype.destination.name", "identifiers.append(identifier) structs = [] enums = [] typedefs = []", "for a in self.argtypes: a.visit(visitor) super(CtypesFunction, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None):", "True, 0): \"c_int\", (\"int\", False, 0): \"c_uint\", (\"int\", True, 1):", "types def visit(self, visitor): visitor.visit_struct(self) if not self.opaque: for name,", "a ctypes type object. For example, a CtypesType representing an", "visit_struct(self, struct): pass def visit_enum(self, enum): pass def visit_typedef(self, name):", "return \"anon_%d\" % num def anonymous_struct_tag(): return fmt_anonymous_struct_tag(anonymous_struct_tagnum()) class CtypesStruct(CtypesType):", "== None: self.opaque = True else: self.opaque = False self.src", "self).get_required_types() types.add((self.variety, self.tag)) return types def visit(self, visitor): visitor.visit_struct(self) if", "visit(self, visitor): for error, cls in self.errors: visitor.visit_error(error, cls) class", "py_string(self, ignore_can_be_ctype=None): return \"%s_%s\" % (self.variety, self.tag) last_tagnum = 0", "if type(self.tag) == int: self.tag = fmt_anonymous_struct_tag(self.tag) else: self.tag =", "# when ctypes automagically returns it as an int. #", "if \"const\" in self.restype.qualifiers: self.restype = CtypesSpecial(\"c_char_p\") else: self.restype =", "visit_enum(self, enum): enums.append(enum) def visit_typedef(self, typedef): typedefs.append(typedef) def visit_error(self, error,", "self).__init__() self.errors = [] def __repr__(self): return '<Ctype (%s) \"%s\">'", "def __init__(self, base, count): super(CtypesArray, self).__init__() self.base = base self.count", "py_string(self, ignore_can_be_ctype=None): return self.name class CtypesBitfield(CtypesType): def __init__(self, base, bitfield):", "identifier): # This one comes from inside ExpressionNodes. There may", "c_void_p self.restype = CtypesPointer(CtypesSpecial(\"c_ubyte\"), ()) self.errcheck = CtypesPointerCast(CtypesSpecial(\"c_void_p\")) # Return", "return ctypes_type_map[(self.name, self.signed, self.longs)] class CtypesSpecial(CtypesType): def __init__(self, name): super(CtypesSpecial,", "else: self.opaque = False self.src = src def visit(self, visitor):", "py_string(self, ignore_can_be_ctype=None): return \"lambda v,*a : cast(v, {})\".format(self.target.py_string()) class CtypesFunction(CtypesType):", "True, 0): \"c_ptrdiff_t\", # Requires definition in preamble (\"ssize_t\", True,", "int(%s)\" % (self.base.py_string(), self.count.py_string(False)) class CtypesNoErrorCheck(object): def py_string(self, ignore_can_be_ctype=None): return", "visitor): visitor.visit_struct(self) if not self.opaque: for name, ctype in self.members:", "\"POINTER(c_char)\" if self.restype.py_string() == \"POINTER(c_char)\": if \"const\" in self.restype.qualifiers: self.restype", "t class CtypesType(object): def __init__(self): super(CtypesType, self).__init__() self.errors = []", "True, 0): \"c_int64\", (\"__int64\", True, 0): \"c_int64\", (\"uint8_t\", True, 0):", "(\"int32_t\", True, 0): \"c_int32\", (\"__int32\", True, 0): \"c_int32\", (\"int64_t\", True,", "one comes from inside ExpressionNodes. There may be # ExpressionNode", "or \"int\".\"\"\" def __init__(self, name, signed, longs): super(CtypesSimple, self).__init__() self.name", "ctypes type object. For example, a CtypesType representing an array", "__repr__(self): return '<Ctype (%s) \"%s\">' % (type(self).__name__, self.py_string()) def error(self,", "restype... causes errors # when ctypes automagically returns it as", "False if self.members == None: self.opaque = True else: self.opaque", "if self.opaque: return set() else: return set([m[1] for m in", "in self.restype.qualifiers: self.restype = CtypesSpecial(\"c_char_p\") else: self.restype = CtypesSpecial(\"String\") self.argtypes", "= [] v = Visitor() ctype.visit(v) return structs, enums, typedefs,", "else: return set([m[1] for m in self.members]) def py_string(self, ignore_can_be_ctype=None):", "CtypesType representing an array of four integers could be created", "= longs def py_string(self, ignore_can_be_ctype=None): return ctypes_type_map[(self.name, self.signed, self.longs)] class", "visitor): if not self.errors: visitor.visit_typedef(self.name) super(CtypesTypedef, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None):", "\"struct\" or \"union\" self.members = members if type(self.tag) == int", "self.name class CtypesBitfield(CtypesType): def __init__(self, base, bitfield): super(CtypesBitfield, self).__init__() self.base", "None: self.opaque = True else: self.opaque = False self.src =", "\"c_uint16\", (\"uint32_t\", True, 0): \"c_uint32\", (\"uint64_t\", True, 0): \"c_uint64\", (\"_Bool\",", "last_tagnum last_tagnum += 1 return last_tagnum def fmt_anonymous_struct_tag(num): return \"anon_%d\"", "a c_void_p self.restype = CtypesPointer(CtypesSpecial(\"c_ubyte\"), ()) self.errcheck = CtypesPointerCast(CtypesSpecial(\"c_void_p\")) #", "% last_tagnum class CtypesEnum(CtypesType): def __init__(self, tag, enumerators, src=None): super(CtypesEnum,", "= \"restructuredtext\" ctypes_type_map = { # typename signed longs (\"void\",", "cast(v, {})\".format(self.target.py_string()) class CtypesFunction(CtypesType): def __init__(self, restype, parameters, variadic, attrib=dict()):", "\"c_uint32\", (\"uint64_t\", True, 0): \"c_uint64\", (\"_Bool\", True, 0): \"c_bool\", }", "= base self.bitfield = bitfield def visit(self, visitor): self.base.visit(visitor) super(CtypesBitfield,", "identifiers = [] v = Visitor() ctype.visit(v) return structs, enums,", "== \"void\" ): # we will provide a means of", "t.destination = remove_function_pointer(t.destination) return t else: return t class CtypesType(object):", "a in self.argtypes]), ) last_tagnum = 0 def anonymous_struct_tagnum(): global", "self.anonymous = True else: self.anonymous = False if self.enumerators ==", "enums.append(enum) def visit_typedef(self, typedef): typedefs.append(typedef) def visit_error(self, error, cls): errors.append((error,", "self.members == None: self.opaque = True else: self.opaque = False", "used for walking type trees. class CtypesTypeVisitor(object): def visit_struct(self, struct):", "src def visit(self, visitor): visitor.visit_enum(self) super(CtypesEnum, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None):", "True, 0): \"c_int32\", (\"int64_t\", True, 0): \"c_int64\", (\"__int64\", True, 0):", "super(CtypesEnum, self).__init__() self.tag = tag self.enumerators = enumerators if not", "defined by a typedef.\"\"\" def __init__(self, name): super(CtypesTypedef, self).__init__() self.name", "self.errcheck = CtypesNoErrorCheck() # Don't allow POINTER(None) (c_void_p) as a", "{ # typename signed longs (\"void\", True, 0): \"None\", (\"int\",", "\"c_bool\", } ctypes_type_map_python_builtin = { (\"int\", True, 2): \"c_longlong\", (\"int\",", "class CtypesBitfield(CtypesType): def __init__(self, base, bitfield): super(CtypesBitfield, self).__init__() self.base =", "% ( self.restype.py_string(), \", \".join([a.py_string() for a in self.argtypes]), )", "remove_function_pointer(t.destination) return t else: return t class CtypesType(object): def __init__(self):", "class CtypesSpecial(CtypesType): def __init__(self, name): super(CtypesSpecial, self).__init__() self.name = name", "self.count.py_string(False)) class CtypesNoErrorCheck(object): def py_string(self, ignore_can_be_ctype=None): return \"None\" def __bool__(self):", "__bool__(self): return False __nonzero__ = __bool__ class CtypesPointerCast(object): def __init__(self,", "typedefs # and function parameters. def remove_function_pointer(t): if type(t) ==", "# you can make it any arbitrary type. if (", "return \"(%s) * int(%s)\" % (self.base.py_string(), self.count.py_string(False)) else: return \"%s", "(\"void\", True, 0): \"None\", (\"int\", True, 0): \"c_int\", (\"int\", False,", "super(CtypesBitfield, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): return self.base.py_string() class CtypesPointer(CtypesType): def", "important method of CtypesType and its subclasses is the py_string", "This protocol is used for walking type trees. class CtypesTypeVisitor(object):", "will provide a means of converting this to a c_void_p", "self.py_string()) def error(self, message, cls=None): self.errors.append((message, cls)) def visit(self, visitor):", "\"c_ptrdiff_t\", # Requires definition in preamble (\"ssize_t\", True, 0): \"c_ptrdiff_t\",", "and its subclasses is the py_string method. str(ctype) returns a", "__init__(self, target): self.target = target def py_string(self, ignore_can_be_ctype=None): return \"lambda", "last_tagnum = 0 def anonymous_struct_tagnum(): global last_tagnum last_tagnum += 1", "\"None\", (\"int\", True, 0): \"c_int\", (\"int\", False, 0): \"c_uint\", (\"int\",", "def __init__(self, name): super(CtypesSpecial, self).__init__() self.name = name def py_string(self,", "False, 0): \"c_ushort\", (\"float\", True, 0): \"c_float\", (\"double\", True, 0):", "if self.destination: self.destination.visit(visitor) super(CtypesPointer, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): return \"POINTER(%s)\"", "self.tag: self.tag = anonymous_enum_tag() self.anonymous = True else: self.anonymous =", "def visit(self, visitor): if not self.errors: visitor.visit_typedef(self.name) super(CtypesTypedef, self).visit(visitor) def", "py_string(self, ignore_can_be_ctype=None): if self.count is None: return \"POINTER(%s)\" % self.base.py_string()", "(self.base.py_string(), self.count.py_string(False)) else: return \"%s * int(%s)\" % (self.base.py_string(), self.count.py_string(False))", "if not self.opaque: for name, ctype in self.members: ctype.visit(visitor) super(CtypesStruct,", "(\"int16_t\", True, 0): \"c_int16\", (\"__int16\", True, 0): \"c_int16\", (\"int32_t\", True,", "CtypesArray(CtypesType): def __init__(self, base, count): super(CtypesArray, self).__init__() self.base = base", "__init__(self, restype, parameters, variadic, attrib=dict()): super(CtypesFunction, self).__init__() self.restype = restype", "self.restype = CtypesSpecial(\"String\") self.argtypes = [remove_function_pointer(p) for p in parameters]", "= super(CtypesStruct, self).get_required_types() types.add((self.variety, self.tag)) return types def visit(self, visitor):", "self.errors: visitor.visit_error(error, cls) class CtypesSimple(CtypesType): \"\"\"Represents a builtin type, like", "c_void is not a ctypes type, # you can make", "self.opaque: return set() else: return set([m[1] for m in self.members])", "can make it any arbitrary type. if ( type(self.restype) ==", "structs, enums, typedefs, errors, identifiers # Remove one level of", "self.anonymous = True else: self.anonymous = False if self.members ==", "self.enumerators = enumerators if not self.tag: self.tag = anonymous_enum_tag() self.anonymous", "else: self.anonymous = False if self.enumerators == None: self.opaque =", "ctype.visit(visitor) super(CtypesStruct, self).visit(visitor) def get_subtypes(self): if self.opaque: return set() else:", "error, cls): pass def visit_identifier(self, identifier): # This one comes", "visit(self, visitor): if not self.errors: visitor.visit_typedef(self.name) super(CtypesTypedef, self).visit(visitor) def py_string(self,", "return \"anon_%d\" % last_tagnum class CtypesEnum(CtypesType): def __init__(self, tag, enumerators,", "Visitor(CtypesTypeVisitor): def visit_struct(self, struct): structs.append(struct) def visit_enum(self, enum): enums.append(enum) def", "class CtypesType(object): def __init__(self): super(CtypesType, self).__init__() self.errors = [] def", "class Visitor(CtypesTypeVisitor): def visit_struct(self, struct): structs.append(struct) def visit_enum(self, enum): enums.append(enum)", "trees. class CtypesTypeVisitor(object): def visit_struct(self, struct): pass def visit_enum(self, enum):", "[] v = Visitor() ctype.visit(v) return structs, enums, typedefs, errors,", "return self.name class CtypesTypedef(CtypesType): \"\"\"Represents a type defined by a", "identifier): identifiers.append(identifier) structs = [] enums = [] typedefs =", "expressions. pass def visit_type_and_collect_info(ctype): class Visitor(CtypesTypeVisitor): def visit_struct(self, struct): structs.append(struct)", "return \"POINTER(%s)\" % self.destination.py_string() class CtypesArray(CtypesType): def __init__(self, base, count):", "} # This protocol is used for walking type trees.", "def py_string(self, ignore_can_be_ctype=None): return ctypes_type_map[(self.name, self.signed, self.longs)] class CtypesSpecial(CtypesType): def", "= attrib self.variety = variety # \"struct\" or \"union\" self.members", "if self.members == None: self.opaque = True else: self.opaque =", "= src def get_required_types(self): types = super(CtypesStruct, self).get_required_types() types.add((self.variety, self.tag))", "get_required_types(self): types = super(CtypesStruct, self).get_required_types() types.add((self.variety, self.tag)) return types def", "def visit(self, visitor): self.base.visit(visitor) super(CtypesBitfield, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): return", "def fmt_anonymous_struct_tag(num): return \"anon_%d\" % num def anonymous_struct_tag(): return fmt_anonymous_struct_tag(anonymous_struct_tagnum())", "name): super(CtypesTypedef, self).__init__() self.name = name def visit(self, visitor): if", "independent of the parser module. The most important method of", "0): \"c_ptrdiff_t\", # Requires definition in preamble (\"va_list\", True, 0):", "def visit(self, visitor): self.base.visit(visitor) if self.count: self.count.visit(visitor) super(CtypesArray, self).visit(visitor) def", "enum): enums.append(enum) def visit_typedef(self, typedef): typedefs.append(typedef) def visit_error(self, error, cls):", "# ExpressionNode objects in array count expressions. pass def visit_type_and_collect_info(ctype):", "super(CtypesTypedef, self).__init__() self.name = name def visit(self, visitor): if not", "0): \"c_int8\", (\"__int8\", True, 0): \"c_int8\", (\"int16_t\", True, 0): \"c_int16\",", "def visit_typedef(self, name): pass def visit_error(self, error, cls): pass def", "There may be # ExpressionNode objects in array count expressions.", "visit(self, visitor): self.base.visit(visitor) if self.count: self.count.visit(visitor) super(CtypesArray, self).visit(visitor) def py_string(self,", "0): \"c_int16\", (\"int32_t\", True, 0): \"c_int32\", (\"__int32\", True, 0): \"c_int32\",", "self.variadic = variadic self.attrib = attrib def visit(self, visitor): self.restype.visit(visitor)", "+= 1 return \"anon_%d\" % last_tagnum class CtypesEnum(CtypesType): def __init__(self,", "False __nonzero__ = __bool__ class CtypesPointerCast(object): def __init__(self, target): self.target", "self.errcheck = CtypesPointerCast(CtypesSpecial(\"c_void_p\")) # Return \"String\" instead of \"POINTER(c_char)\" if", "self.tag)) return types def visit(self, visitor): visitor.visit_struct(self) if not self.opaque:", "most important method of CtypesType and its subclasses is the", "set() else: return set([m[1] for m in self.members]) def py_string(self,", "which, when evaluated in the wrapper at runtime, results in", "last_tagnum = 0 def anonymous_enum_tag(): global last_tagnum last_tagnum += 1", "in self.members: ctype.visit(visitor) super(CtypesStruct, self).visit(visitor) def get_subtypes(self): if self.opaque: return", "by a typedef.\"\"\" def __init__(self, name): super(CtypesTypedef, self).__init__() self.name =", "Don't allow POINTER(None) (c_void_p) as a restype... causes errors #", "C type. All of them classes are subclasses of CtypesType.", "class CtypesPointerCast(object): def __init__(self, target): self.target = target def py_string(self,", "a restype... causes errors # when ctypes automagically returns it", "(\"ssize_t\", True, 0): \"c_ptrdiff_t\", # Requires definition in preamble (\"va_list\",", "\".join([a.py_string() for a in self.argtypes]), ) last_tagnum = 0 def", "type, # you can make it any arbitrary type. if", "subclasses are completely independent of the parser module. The most", "False, 1): \"c_ulong\", (\"char\", True, 0): \"c_char\", (\"char\", False, 0):", "restype, parameters, variadic, attrib=dict()): super(CtypesFunction, self).__init__() self.restype = restype self.errcheck", "parameters, variadic, attrib=dict()): super(CtypesFunction, self).__init__() self.restype = restype self.errcheck =", "instead of \"POINTER(c_char)\" if self.restype.py_string() == \"POINTER(c_char)\": if \"const\" in", "int: self.tag = fmt_anonymous_struct_tag(self.tag) else: self.tag = anonymous_struct_tag() self.anonymous =", "one level of indirection from funtion pointer; needed for typedefs", "num def anonymous_struct_tag(): return fmt_anonymous_struct_tag(anonymous_struct_tagnum()) class CtypesStruct(CtypesType): def __init__(self, tag,", "[] errors = [] identifiers = [] v = Visitor()", "src=None): super(CtypesStruct, self).__init__() self.tag = tag self.attrib = attrib self.variety", "any arbitrary type. if ( type(self.restype) == CtypesPointer and type(self.restype.destination)", "= anonymous_struct_tag() self.anonymous = True else: self.anonymous = False if", "tag self.enumerators = enumerators if not self.tag: self.tag = anonymous_enum_tag()", "ctypes_type_map = { # typename signed longs (\"void\", True, 0):", "* int(%s)\" % (self.base.py_string(), self.count.py_string(False)) else: return \"%s * int(%s)\"", "walking type trees. class CtypesTypeVisitor(object): def visit_struct(self, struct): pass def", ") last_tagnum = 0 def anonymous_struct_tagnum(): global last_tagnum last_tagnum +=", "(\"uint16_t\", True, 0): \"c_uint16\", (\"uint32_t\", True, 0): \"c_uint32\", (\"uint64_t\", True,", "ignore_can_be_ctype=None): return \"POINTER(%s)\" % self.destination.py_string() class CtypesArray(CtypesType): def __init__(self, base,", "Unlike in previous versions of ctypesgen, CtypesType and its subclasses", "pass def visit_enum(self, enum): pass def visit_typedef(self, name): pass def", "using: >>> ctype = CtypesArray(CtypesSimple(\"int\",True,0),4) str(ctype) would evaluate to \"c_int", "\", \".join([a.py_string() for a in self.argtypes]), ) last_tagnum = 0", "parameters] self.variadic = variadic self.attrib = attrib def visit(self, visitor):", "__init__(self): super(CtypesType, self).__init__() self.errors = [] def __repr__(self): return '<Ctype", "True, 1): \"c_long\", (\"int\", False, 1): \"c_ulong\", (\"char\", True, 0):", "= name self.signed = signed self.longs = longs def py_string(self,", "type(self.tag) == int or not self.tag: if type(self.tag) == int:", "last_tagnum class CtypesEnum(CtypesType): def __init__(self, tag, enumerators, src=None): super(CtypesEnum, self).__init__()", "self.signed, self.longs)] class CtypesSpecial(CtypesType): def __init__(self, name): super(CtypesSpecial, self).__init__() self.name", "enumerators, src=None): super(CtypesEnum, self).__init__() self.tag = tag self.enumerators = enumerators", "True, 0): \"c_uint8\", (\"uint16_t\", True, 0): \"c_uint16\", (\"uint32_t\", True, 0):", "(\"int64_t\", True, 0): \"c_int64\", (\"__int64\", True, 0): \"c_int64\", (\"uint8_t\", True,", "return t class CtypesType(object): def __init__(self): super(CtypesType, self).__init__() self.errors =", "members if type(self.tag) == int or not self.tag: if type(self.tag)", "0): \"c_wchar\", (\"ptrdiff_t\", True, 0): \"c_ptrdiff_t\", # Requires definition in", "v = Visitor() ctype.visit(v) return structs, enums, typedefs, errors, identifiers", "\"c_uint64\", (\"_Bool\", True, 0): \"c_bool\", } ctypes_type_map_python_builtin = { (\"int\",", "def __init__(self, name, signed, longs): super(CtypesSimple, self).__init__() self.name = name", "visitor): self.base.visit(visitor) if self.count: self.count.visit(visitor) super(CtypesArray, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None):", "CtypesSimple(CtypesType): \"\"\"Represents a builtin type, like \"char\" or \"int\".\"\"\" def", "be # ExpressionNode objects in array count expressions. pass def", "type(t) == CtypesPointer: t.destination = remove_function_pointer(t.destination) return t else: return", "CtypesArray: return \"(%s) * int(%s)\" % (self.base.py_string(), self.count.py_string(False)) else: return", "(\"__int32\", True, 0): \"c_int32\", (\"int64_t\", True, 0): \"c_int64\", (\"__int64\", True,", "(\"int\", True, 2): \"c_longlong\", (\"int\", False, 2): \"c_ulonglong\", (\"size_t\", True,", "in a ctypes type object. For example, a CtypesType representing", "definition in preamble (\"va_list\", True, 0): \"c_void_p\", } # This", "# Requires definition in preamble (\"va_list\", True, 0): \"c_void_p\", }", "in preamble (\"va_list\", True, 0): \"c_void_p\", } # This protocol", "py_string(self, ignore_can_be_ctype=None): return ctypes_type_map[(self.name, self.signed, self.longs)] class CtypesSpecial(CtypesType): def __init__(self,", "= tag self.attrib = attrib self.variety = variety # \"struct\"", "visitor): for error, cls in self.errors: visitor.visit_error(error, cls) class CtypesSimple(CtypesType):", "= CtypesPointer(CtypesSpecial(\"c_ubyte\"), ()) self.errcheck = CtypesPointerCast(CtypesSpecial(\"c_void_p\")) # Return \"String\" instead", "else: self.opaque = False self.src = src def get_required_types(self): types", "0): \"c_uint8\", (\"uint16_t\", True, 0): \"c_uint16\", (\"uint32_t\", True, 0): \"c_uint32\",", "self.base = base self.count = count def visit(self, visitor): self.base.visit(visitor)", "\"void\" ): # we will provide a means of converting", "error, cls): errors.append((error, cls)) def visit_identifier(self, identifier): identifiers.append(identifier) structs =", "destination self.qualifiers = qualifiers def visit(self, visitor): if self.destination: self.destination.visit(visitor)", "anonymous_enum_tag(): global last_tagnum last_tagnum += 1 return \"anon_%d\" % last_tagnum", "longs): super(CtypesSimple, self).__init__() self.name = name self.signed = signed self.longs", "CtypesPointer(CtypesSpecial(\"c_ubyte\"), ()) self.errcheck = CtypesPointerCast(CtypesSpecial(\"c_void_p\")) # Return \"String\" instead of", "= fmt_anonymous_struct_tag(self.tag) else: self.tag = anonymous_struct_tag() self.anonymous = True else:", "self.opaque = False self.src = src def visit(self, visitor): visitor.visit_enum(self)", "visit_error(self, error, cls): pass def visit_identifier(self, identifier): # This one", "# we will provide a means of converting this to", "ctypes type, # you can make it any arbitrary type.", "for walking type trees. class CtypesTypeVisitor(object): def visit_struct(self, struct): pass", "comes from inside ExpressionNodes. There may be # ExpressionNode objects", "self.restype = CtypesPointer(CtypesSpecial(\"c_ubyte\"), ()) self.errcheck = CtypesPointerCast(CtypesSpecial(\"c_void_p\")) # Return \"String\"", "\"anon_%d\" % last_tagnum class CtypesEnum(CtypesType): def __init__(self, tag, enumerators, src=None):", "CtypesType and its subclasses is the py_string method. str(ctype) returns", "0): \"c_size_t\", (\"apr_int64_t\", True, 0): \"c_int64\", (\"off64_t\", True, 0): \"c_int64\",", "self).__init__() self.destination = destination self.qualifiers = qualifiers def visit(self, visitor):", "ignore_can_be_ctype=None): return \"%s_%s\" % (self.variety, self.tag) last_tagnum = 0 def", "self.target = target def py_string(self, ignore_can_be_ctype=None): return \"lambda v,*a :", "{})\".format(self.target.py_string()) class CtypesFunction(CtypesType): def __init__(self, restype, parameters, variadic, attrib=dict()): super(CtypesFunction,", "True, 0): \"c_void_p\", } # This protocol is used for", "self.tag) last_tagnum = 0 def anonymous_enum_tag(): global last_tagnum last_tagnum +=", "CtypesStruct(CtypesType): def __init__(self, tag, attrib, variety, members, src=None): super(CtypesStruct, self).__init__()", "= CtypesNoErrorCheck() # Don't allow POINTER(None) (c_void_p) as a restype...", "self.tag = tag self.enumerators = enumerators if not self.tag: self.tag", "return \"%s_%s\" % (self.variety, self.tag) last_tagnum = 0 def anonymous_enum_tag():", "may be # ExpressionNode objects in array count expressions. pass", "an array of four integers could be created using: >>>", "indirection from funtion pointer; needed for typedefs # and function", "\"c_size_t\", (\"apr_int64_t\", True, 0): \"c_int64\", (\"off64_t\", True, 0): \"c_int64\", (\"apr_uint64_t\",", "(\"double\", True, 1): \"c_longdouble\", (\"int8_t\", True, 0): \"c_int8\", (\"__int8\", True,", "(\"size_t\", True, 0): \"c_size_t\", (\"apr_int64_t\", True, 0): \"c_int64\", (\"off64_t\", True,", "elif type(t) == CtypesPointer: t.destination = remove_function_pointer(t.destination) return t else:", "* int(%s)\" % (self.base.py_string(), self.count.py_string(False)) class CtypesNoErrorCheck(object): def py_string(self, ignore_can_be_ctype=None):", "(\"__int8\", True, 0): \"c_int8\", (\"int16_t\", True, 0): \"c_int16\", (\"__int16\", True,", "(%s) \"%s\">' % (type(self).__name__, self.py_string()) def error(self, message, cls=None): self.errors.append((message,", "True, 0): \"c_uint16\", (\"uint32_t\", True, 0): \"c_uint32\", (\"uint64_t\", True, 0):", "self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): return \"POINTER(%s)\" % self.destination.py_string() class CtypesArray(CtypesType):", "members, src=None): super(CtypesStruct, self).__init__() self.tag = tag self.attrib = attrib", "CtypesArray(CtypesSimple(\"int\",True,0),4) str(ctype) would evaluate to \"c_int * 4\". \"\"\" import", "For example, a CtypesType representing an array of four integers", "2): \"c_longlong\", (\"int\", False, 2): \"c_ulonglong\", (\"size_t\", True, 0): \"c_size_t\",", "struct): pass def visit_enum(self, enum): pass def visit_typedef(self, name): pass", "in the wrapper at runtime, results in a ctypes type", "\"int\".\"\"\" def __init__(self, name, signed, longs): super(CtypesSimple, self).__init__() self.name =", "# typename signed longs (\"void\", True, 0): \"None\", (\"int\", True,", "0): \"c_int8\", (\"int16_t\", True, 0): \"c_int16\", (\"__int16\", True, 0): \"c_int16\",", "0): \"c_uint\", (\"int\", True, 1): \"c_long\", (\"int\", False, 1): \"c_ulong\",", "def __init__(self, name): super(CtypesTypedef, self).__init__() self.name = name def visit(self,", "\"c_int16\", (\"int32_t\", True, 0): \"c_int32\", (\"__int32\", True, 0): \"c_int32\", (\"int64_t\",", "2): \"c_ulonglong\", (\"size_t\", True, 0): \"c_size_t\", (\"apr_int64_t\", True, 0): \"c_int64\",", "error, cls in self.errors: visitor.visit_error(error, cls) class CtypesSimple(CtypesType): \"\"\"Represents a", "= members if type(self.tag) == int or not self.tag: if", "\"anon_%d\" % num def anonymous_struct_tag(): return fmt_anonymous_struct_tag(anonymous_struct_tagnum()) class CtypesStruct(CtypesType): def", "0): \"c_ptrdiff_t\", # Requires definition in preamble (\"ssize_t\", True, 0):", "could be created using: >>> ctype = CtypesArray(CtypesSimple(\"int\",True,0),4) str(ctype) would", "super(CtypesPointer, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): return \"POINTER(%s)\" % self.destination.py_string() class", "def __bool__(self): return False __nonzero__ = __bool__ class CtypesPointerCast(object): def", "as an int. # Instead, convert to POINTER(c_void). c_void is", "visit(self, visitor): visitor.visit_enum(self) super(CtypesEnum, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): return \"enum_%s\"", "* 4\". \"\"\" import warnings __docformat__ = \"restructuredtext\" ctypes_type_map =", "type(self.restype) == CtypesPointer and type(self.restype.destination) == CtypesSimple and self.restype.destination.name ==", "0): \"c_bool\", } ctypes_type_map_python_builtin = { (\"int\", True, 2): \"c_longlong\",", "name): pass def visit_error(self, error, cls): pass def visit_identifier(self, identifier):", "\"c_longdouble\", (\"int8_t\", True, 0): \"c_int8\", (\"__int8\", True, 0): \"c_int8\", (\"int16_t\",", "visitor.visit_enum(self) super(CtypesEnum, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): return \"enum_%s\" % self.tag", "for a in self.argtypes]), ) last_tagnum = 0 def anonymous_struct_tagnum():", "CtypesSimple and self.restype.destination.name == \"void\" ): # we will provide", "not self.errors: visitor.visit_typedef(self.name) super(CtypesTypedef, self).visit(visitor) def py_string(self, ignore_can_be_ctype=None): return self.name", "converting this to a c_void_p self.restype = CtypesPointer(CtypesSpecial(\"c_ubyte\"), ()) self.errcheck", "class CtypesTypedef(CtypesType): \"\"\"Represents a type defined by a typedef.\"\"\" def", "to a c_void_p self.restype = CtypesPointer(CtypesSpecial(\"c_ubyte\"), ()) self.errcheck = CtypesPointerCast(CtypesSpecial(\"c_void_p\"))", "int or not self.tag: if type(self.tag) == int: self.tag =", "of CtypesType and its subclasses is the py_string method. str(ctype)", "this to a c_void_p self.restype = CtypesPointer(CtypesSpecial(\"c_ubyte\"), ()) self.errcheck =", "\"union\" self.members = members if type(self.tag) == int or not", "self).__init__() self.restype = restype self.errcheck = CtypesNoErrorCheck() # Don't allow", "True else: self.opaque = False self.src = src def get_required_types(self):", "0): \"c_uint64\", (\"_Bool\", True, 0): \"c_bool\", } ctypes_type_map_python_builtin = {", "return '<Ctype (%s) \"%s\">' % (type(self).__name__, self.py_string()) def error(self, message,", "needed for typedefs # and function parameters. def remove_function_pointer(t): if", "py_string(self, ignore_can_be_ctype=None): return \"CFUNCTYPE(UNCHECKED(%s), %s)\" % ( self.restype.py_string(), \", \".join([a.py_string()", "v,*a : cast(v, {})\".format(self.target.py_string()) class CtypesFunction(CtypesType): def __init__(self, restype, parameters,", "return set([m[1] for m in self.members]) def py_string(self, ignore_can_be_ctype=None): return", "self.attrib = attrib def visit(self, visitor): self.restype.visit(visitor) for a in", "type(t) == CtypesPointer and type(t.destination) == CtypesFunction: return t.destination elif", "= name def visit(self, visitor): if not self.errors: visitor.visit_typedef(self.name) super(CtypesTypedef,", "evaluate to \"c_int * 4\". \"\"\" import warnings __docformat__ =", "else: self.tag = anonymous_struct_tag() self.anonymous = True else: self.anonymous =", "= False if self.members == None: self.opaque = True else:", "of the parser module. The most important method of CtypesType", "typedefs.append(typedef) def visit_error(self, error, cls): errors.append((error, cls)) def visit_identifier(self, identifier):", "CtypesEnum(CtypesType): def __init__(self, tag, enumerators, src=None): super(CtypesEnum, self).__init__() self.tag =", "base, bitfield): super(CtypesBitfield, self).__init__() self.base = base self.bitfield = bitfield", "anonymous_struct_tag() self.anonymous = True else: self.anonymous = False if self.members", "class CtypesEnum(CtypesType): def __init__(self, tag, enumerators, src=None): super(CtypesEnum, self).__init__() self.tag", "struct): structs.append(struct) def visit_enum(self, enum): enums.append(enum) def visit_typedef(self, typedef): typedefs.append(typedef)", "(\"uint32_t\", True, 0): \"c_uint32\", (\"uint64_t\", True, 0): \"c_uint64\", (\"_Bool\", True,", "# This protocol is used for walking type trees. class", "(self.variety, self.tag) last_tagnum = 0 def anonymous_enum_tag(): global last_tagnum last_tagnum", "method. str(ctype) returns a string which, when evaluated in the", "structs.append(struct) def visit_enum(self, enum): enums.append(enum) def visit_typedef(self, typedef): typedefs.append(typedef) def", "def visit_identifier(self, identifier): # This one comes from inside ExpressionNodes.", "fmt_anonymous_struct_tag(anonymous_struct_tagnum()) class CtypesStruct(CtypesType): def __init__(self, tag, attrib, variety, members, src=None):" ]
[ "constants import Screen import cursors class Text: def __init__(self, text):", "self._y = randint(self._r, Screen.height - self._r) self._r = randint(self.min_r, min(Screen.width,", "* len(self._text) - self._padding_len def _x_text_center_position(self): return (Screen.width - self._count_text_len())", "r): self._r = r @property def col(self): return self._col @col.setter", "f\"Score: {self.score}\", (Screen.bg - 2) % 16) class Circle: def", "pyxel.circ(x, y, self._r, self._col) class ReachCircle(Circle): def __init__(self): super().__init__() self.min_r", "self._col = (Screen.bg - 1) % 16 def zero(self): self._r", "def draw(self): pyxel.text(self._padding_right, self._padding_top, f\"Score: {self.score}\", (Screen.bg - 2) %", "= text self._symbol_len = 3 self._padding_len = 1 def _count_text_len(self):", "% 16 def zero(self): self._r = 0 def increase(self, size=1):", "x(self): return self._x @property def y(self): return self._y def respawn(self):", "def respawn(self): self._x = randint(self._r, Screen.width - self._r) self._y =", "% 16) class Circle: def __init__(self): self._r = 0 self._col", "def r(self): return self._r @r.setter def r(self, r): self._r =", "self._symbol_len = 3 self._padding_len = 1 def _count_text_len(self): return (", "= r @property def col(self): return self._col @col.setter def col(self,", "zero(self): self._r = 0 def increase(self, size=1): self._r += size", "def __init__(self): self._r = 0 self._col = (Screen.bg - 1)", "self._text, 2) class Score: def __init__(self, padding_right=2, padding_top=2): self._padding_right =", "draw(self, x, y): pyxel.circ(x, y, self._r, self._col) class ReachCircle(Circle): def", "def reduce(self): self.score -= 1 def draw(self): pyxel.text(self._padding_right, self._padding_top, f\"Score:", "x, y): pyxel.circ(x, y, self._r, self._col) class ReachCircle(Circle): def __init__(self):", "def _x_text_center_position(self): return (Screen.width - self._count_text_len()) // 2 def draw(self):", "import Screen import cursors class Text: def __init__(self, text): self._text", "y(self): return self._y def respawn(self): self._x = randint(self._r, Screen.width -", "= randint(self._r, Screen.width - self._r) self._y = randint(self._r, Screen.height -", "ReachCircle(Circle): def __init__(self): super().__init__() self.min_r = 10 self.respawn() @property def", "self._r) self._r = randint(self.min_r, min(Screen.width, Screen.height) // 2) - 4", "- self._r) self._r = randint(self.min_r, min(Screen.width, Screen.height) // 2) -", "1) % 16 def zero(self): self._r = 0 def increase(self,", "- 1) % 16 def zero(self): self._r = 0 def", "self._padding_top = padding_top self.score = 0 def increase(self): self.score +=", "return self._x @property def y(self): return self._y def respawn(self): self._x", "= randint(self.min_r, min(Screen.width, Screen.height) // 2) - 4 def draw(self):", "Screen import cursors class Text: def __init__(self, text): self._text =", "def draw(self): pyxel.text(self._x_text_center_position(), 0, self._text, 2) class Score: def __init__(self,", "return self._col @col.setter def col(self, color): self._col = color def", "def draw(self, x, y): pyxel.circ(x, y, self._r, self._col) class ReachCircle(Circle):", "+ self._padding_len ) * len(self._text) - self._padding_len def _x_text_center_position(self): return", "+= 1 def reduce(self): self.score -= 1 def draw(self): pyxel.text(self._padding_right,", "= padding_top self.score = 0 def increase(self): self.score += 1", "def __init__(self, padding_right=2, padding_top=2): self._padding_right = padding_right self._padding_top = padding_top", "= (Screen.bg - 1) % 16 def zero(self): self._r =", "return ( self._symbol_len + self._padding_len ) * len(self._text) - self._padding_len", "self._padding_len def _x_text_center_position(self): return (Screen.width - self._count_text_len()) // 2 def", "randint(self.min_r, min(Screen.width, Screen.height) // 2) - 4 def draw(self): pyxel.circb(self._x,", "- 2) % 16) class Circle: def __init__(self): self._r =", "col(self): return self._col @col.setter def col(self, color): self._col = color", "2) % 16) class Circle: def __init__(self): self._r = 0", "self._x = randint(self._r, Screen.width - self._r) self._y = randint(self._r, Screen.height", "randint import pyxel from constants import Screen import cursors class", "self._r = 0 def increase(self, size=1): self._r += size @property", "0 def increase(self, size=1): self._r += size @property def r(self):", "2) class Score: def __init__(self, padding_right=2, padding_top=2): self._padding_right = padding_right", "padding_right=2, padding_top=2): self._padding_right = padding_right self._padding_top = padding_top self.score =", "@property def y(self): return self._y def respawn(self): self._x = randint(self._r,", "def increase(self, size=1): self._r += size @property def r(self): return", "r @property def col(self): return self._col @col.setter def col(self, color):", "0 self._col = (Screen.bg - 1) % 16 def zero(self):", "{self.score}\", (Screen.bg - 2) % 16) class Circle: def __init__(self):", "import pyxel from constants import Screen import cursors class Text:", "Text: def __init__(self, text): self._text = text self._symbol_len = 3", "16) class Circle: def __init__(self): self._r = 0 self._col =", "self.score = 0 def increase(self): self.score += 1 def reduce(self):", "1 def draw(self): pyxel.text(self._padding_right, self._padding_top, f\"Score: {self.score}\", (Screen.bg - 2)", "Screen.height) // 2) - 4 def draw(self): pyxel.circb(self._x, self._y, self._r,", "increase(self): self.score += 1 def reduce(self): self.score -= 1 def", "_x_text_center_position(self): return (Screen.width - self._count_text_len()) // 2 def draw(self): pyxel.text(self._x_text_center_position(),", "def col(self): return self._col @col.setter def col(self, color): self._col =", "def r(self, r): self._r = r @property def col(self): return", "def increase(self): self.score += 1 def reduce(self): self.score -= 1", "self._r = 0 self._col = (Screen.bg - 1) % 16", "size=1): self._r += size @property def r(self): return self._r @r.setter", "increase(self, size=1): self._r += size @property def r(self): return self._r", "pyxel from constants import Screen import cursors class Text: def", "randint(self._r, Screen.width - self._r) self._y = randint(self._r, Screen.height - self._r)", "= 0 def increase(self): self.score += 1 def reduce(self): self.score", "self._padding_len = 1 def _count_text_len(self): return ( self._symbol_len + self._padding_len", "2 def draw(self): pyxel.text(self._x_text_center_position(), 0, self._text, 2) class Score: def", "r(self): return self._r @r.setter def r(self, r): self._r = r", "return (Screen.width - self._count_text_len()) // 2 def draw(self): pyxel.text(self._x_text_center_position(), 0,", "def _count_text_len(self): return ( self._symbol_len + self._padding_len ) * len(self._text)", "class Circle: def __init__(self): self._r = 0 self._col = (Screen.bg", "random import randint import pyxel from constants import Screen import", "import randint import pyxel from constants import Screen import cursors", "pyxel.text(self._x_text_center_position(), 0, self._text, 2) class Score: def __init__(self, padding_right=2, padding_top=2):", "__init__(self, text): self._text = text self._symbol_len = 3 self._padding_len =", "cursors class Text: def __init__(self, text): self._text = text self._symbol_len", "text): self._text = text self._symbol_len = 3 self._padding_len = 1", "= color def draw(self, x, y): pyxel.circ(x, y, self._r, self._col)", "draw(self): pyxel.text(self._x_text_center_position(), 0, self._text, 2) class Score: def __init__(self, padding_right=2,", "= 10 self.respawn() @property def x(self): return self._x @property def", "padding_top=2): self._padding_right = padding_right self._padding_top = padding_top self.score = 0", "- self._r) self._y = randint(self._r, Screen.height - self._r) self._r =", "+= size @property def r(self): return self._r @r.setter def r(self,", "self._padding_top, f\"Score: {self.score}\", (Screen.bg - 2) % 16) class Circle:", "Screen.width - self._r) self._y = randint(self._r, Screen.height - self._r) self._r", "0, self._text, 2) class Score: def __init__(self, padding_right=2, padding_top=2): self._padding_right", "3 self._padding_len = 1 def _count_text_len(self): return ( self._symbol_len +", "16 def zero(self): self._r = 0 def increase(self, size=1): self._r", "@r.setter def r(self, r): self._r = r @property def col(self):", "return self._y def respawn(self): self._x = randint(self._r, Screen.width - self._r)", "// 2 def draw(self): pyxel.text(self._x_text_center_position(), 0, self._text, 2) class Score:", "def x(self): return self._x @property def y(self): return self._y def", "self._padding_right = padding_right self._padding_top = padding_top self.score = 0 def", "self._r @r.setter def r(self, r): self._r = r @property def", "self.respawn() @property def x(self): return self._x @property def y(self): return", "padding_right self._padding_top = padding_top self.score = 0 def increase(self): self.score", "= 3 self._padding_len = 1 def _count_text_len(self): return ( self._symbol_len", "class ReachCircle(Circle): def __init__(self): super().__init__() self.min_r = 10 self.respawn() @property", "self._count_text_len()) // 2 def draw(self): pyxel.text(self._x_text_center_position(), 0, self._text, 2) class", "self._col) class ReachCircle(Circle): def __init__(self): super().__init__() self.min_r = 10 self.respawn()", "<gh_stars>0 from random import randint import pyxel from constants import", "size @property def r(self): return self._r @r.setter def r(self, r):", "self._col @col.setter def col(self, color): self._col = color def draw(self,", "(Screen.bg - 2) % 16) class Circle: def __init__(self): self._r", "y): pyxel.circ(x, y, self._r, self._col) class ReachCircle(Circle): def __init__(self): super().__init__()", "__init__(self): super().__init__() self.min_r = 10 self.respawn() @property def x(self): return", "@col.setter def col(self, color): self._col = color def draw(self, x,", "r(self, r): self._r = r @property def col(self): return self._col", "def col(self, color): self._col = color def draw(self, x, y):", "__init__(self): self._r = 0 self._col = (Screen.bg - 1) %", "// 2) - 4 def draw(self): pyxel.circb(self._x, self._y, self._r, self._col)", "randint(self._r, Screen.height - self._r) self._r = randint(self.min_r, min(Screen.width, Screen.height) //", "= padding_right self._padding_top = padding_top self.score = 0 def increase(self):", "10 self.respawn() @property def x(self): return self._x @property def y(self):", "import cursors class Text: def __init__(self, text): self._text = text", "class Text: def __init__(self, text): self._text = text self._symbol_len =", "def __init__(self): super().__init__() self.min_r = 10 self.respawn() @property def x(self):", "( self._symbol_len + self._padding_len ) * len(self._text) - self._padding_len def", "(Screen.bg - 1) % 16 def zero(self): self._r = 0", "self._padding_len ) * len(self._text) - self._padding_len def _x_text_center_position(self): return (Screen.width", "draw(self): pyxel.text(self._padding_right, self._padding_top, f\"Score: {self.score}\", (Screen.bg - 2) % 16)", "Screen.height - self._r) self._r = randint(self.min_r, min(Screen.width, Screen.height) // 2)", "self._text = text self._symbol_len = 3 self._padding_len = 1 def", "super().__init__() self.min_r = 10 self.respawn() @property def x(self): return self._x", "y, self._r, self._col) class ReachCircle(Circle): def __init__(self): super().__init__() self.min_r =", "1 def reduce(self): self.score -= 1 def draw(self): pyxel.text(self._padding_right, self._padding_top,", "(Screen.width - self._count_text_len()) // 2 def draw(self): pyxel.text(self._x_text_center_position(), 0, self._text,", "self.score -= 1 def draw(self): pyxel.text(self._padding_right, self._padding_top, f\"Score: {self.score}\", (Screen.bg", "col(self, color): self._col = color def draw(self, x, y): pyxel.circ(x,", "1 def _count_text_len(self): return ( self._symbol_len + self._padding_len ) *", "text self._symbol_len = 3 self._padding_len = 1 def _count_text_len(self): return", "= randint(self._r, Screen.height - self._r) self._r = randint(self.min_r, min(Screen.width, Screen.height)", "@property def r(self): return self._r @r.setter def r(self, r): self._r", "self._x @property def y(self): return self._y def respawn(self): self._x =", "self._r = randint(self.min_r, min(Screen.width, Screen.height) // 2) - 4 def", "Score: def __init__(self, padding_right=2, padding_top=2): self._padding_right = padding_right self._padding_top =", "pyxel.text(self._padding_right, self._padding_top, f\"Score: {self.score}\", (Screen.bg - 2) % 16) class", "- self._padding_len def _x_text_center_position(self): return (Screen.width - self._count_text_len()) // 2", "padding_top self.score = 0 def increase(self): self.score += 1 def", "def zero(self): self._r = 0 def increase(self, size=1): self._r +=", "@property def col(self): return self._col @col.setter def col(self, color): self._col", "__init__(self, padding_right=2, padding_top=2): self._padding_right = padding_right self._padding_top = padding_top self.score", "self._r += size @property def r(self): return self._r @r.setter def", "= 0 self._col = (Screen.bg - 1) % 16 def", "return self._r @r.setter def r(self, r): self._r = r @property", "0 def increase(self): self.score += 1 def reduce(self): self.score -=", "Circle: def __init__(self): self._r = 0 self._col = (Screen.bg -", "self._col = color def draw(self, x, y): pyxel.circ(x, y, self._r,", "self._r, self._col) class ReachCircle(Circle): def __init__(self): super().__init__() self.min_r = 10", "_count_text_len(self): return ( self._symbol_len + self._padding_len ) * len(self._text) -", "from constants import Screen import cursors class Text: def __init__(self,", "self._r) self._y = randint(self._r, Screen.height - self._r) self._r = randint(self.min_r,", "self._y def respawn(self): self._x = randint(self._r, Screen.width - self._r) self._y", ") * len(self._text) - self._padding_len def _x_text_center_position(self): return (Screen.width -", "- self._count_text_len()) // 2 def draw(self): pyxel.text(self._x_text_center_position(), 0, self._text, 2)", "self._r = r @property def col(self): return self._col @col.setter def", "def y(self): return self._y def respawn(self): self._x = randint(self._r, Screen.width", "from random import randint import pyxel from constants import Screen", "color def draw(self, x, y): pyxel.circ(x, y, self._r, self._col) class", "= 1 def _count_text_len(self): return ( self._symbol_len + self._padding_len )", "-= 1 def draw(self): pyxel.text(self._padding_right, self._padding_top, f\"Score: {self.score}\", (Screen.bg -", "respawn(self): self._x = randint(self._r, Screen.width - self._r) self._y = randint(self._r,", "= 0 def increase(self, size=1): self._r += size @property def", "reduce(self): self.score -= 1 def draw(self): pyxel.text(self._padding_right, self._padding_top, f\"Score: {self.score}\",", "self.score += 1 def reduce(self): self.score -= 1 def draw(self):", "self._symbol_len + self._padding_len ) * len(self._text) - self._padding_len def _x_text_center_position(self):", "color): self._col = color def draw(self, x, y): pyxel.circ(x, y,", "self.min_r = 10 self.respawn() @property def x(self): return self._x @property", "len(self._text) - self._padding_len def _x_text_center_position(self): return (Screen.width - self._count_text_len()) //", "@property def x(self): return self._x @property def y(self): return self._y", "def __init__(self, text): self._text = text self._symbol_len = 3 self._padding_len", "min(Screen.width, Screen.height) // 2) - 4 def draw(self): pyxel.circb(self._x, self._y,", "class Score: def __init__(self, padding_right=2, padding_top=2): self._padding_right = padding_right self._padding_top" ]
[ "request type, checking for key value. \"\"\" regex_result = self.utils.get_search_regex(", "from manage \"\"\" def __init__(self, logger): self.TAG = self.__class__.__name__ self.logger", "str(err) self.messenger.mark_as_failed() else: self.messenger.code_type = Constants.USER_ERROR self.messenger.set_message = 'wrong request'", "regex_result: try: call_path_list = regex_result.groups()[0].split('/') call_path_list = [x for x", "self.utils.get_search_regex( url_path, Constants.RE_ADMIN) if regex_result: try: call_path_list = regex_result.groups()[0].split('/') call_path_list", "self.messenger.code_type = Constants.BACKUP_SCHEDULED self.messenger.set_message = 'backup scheduled' except Exception as", "self.messenger.code_type = Constants.USER_ERROR self.messenger.set_message = 'wrong request' self.messenger.mark_as_failed() return self.messenger", "[x for x in call_path_list if x != ''] #", "# All nodes if len(call_path_list) == 1 and call_path_list[0] ==", "Constants from pulzarutils.messenger import Messenger from pulzarcore.core_db import DB class", "call_path_list if x != ''] # All nodes if len(call_path_list)", "Constants.RE_ADMIN) if regex_result: try: call_path_list = regex_result.groups()[0].split('/') call_path_list = [x", "\"\"\"Handle admin operations from manage \"\"\" def __init__(self, logger): self.TAG", "== 1 and call_path_list[0] == 'start_backup': db_backup = DB(Constants.DB_BACKUP) db_backup.update_or_insert_value(", "Constants.BACKUP_SCHEDULED self.messenger.set_message = 'backup scheduled' except Exception as err: self.logger.exception('{}:{}'.format(self.TAG,", "in call_path_list if x != ''] # All nodes if", "operations from manage \"\"\" def __init__(self, logger): self.TAG = self.__class__.__name__", "self.messenger.code_type = Constants.PULZAR_ERROR self.messenger.set_message = str(err) self.messenger.mark_as_failed() else: self.messenger.code_type =", "for x in call_path_list if x != ''] # All", "self.messenger.set_message = 'backup scheduled' except Exception as err: self.logger.exception('{}:{}'.format(self.TAG, err))", "Exception as err: self.logger.exception('{}:{}'.format(self.TAG, err)) self.messenger.code_type = Constants.PULZAR_ERROR self.messenger.set_message =", "self.TAG = self.__class__.__name__ self.logger = logger self.utils = Utils() self.messenger", "b'1') self.messenger.code_type = Constants.BACKUP_SCHEDULED self.messenger.set_message = 'backup scheduled' except Exception", "url_path): \"\"\"Get request type, checking for key value. \"\"\" regex_result", "<reponame>cleve/varidb<filename>app/volume/admin_process.py from pulzarutils.utils import Utils from pulzarutils.utils import Constants from", "type, checking for key value. \"\"\" regex_result = self.utils.get_search_regex( url_path,", "class AdminProcess: \"\"\"Handle admin operations from manage \"\"\" def __init__(self,", "import Messenger from pulzarcore.core_db import DB class AdminProcess: \"\"\"Handle admin", "= Utils() self.messenger = Messenger() self.mark_of_local_verification = b'varidb_execute_file_verification' def process_request(self,", "== 'start_backup': db_backup = DB(Constants.DB_BACKUP) db_backup.update_or_insert_value( self.mark_of_local_verification, b'1') self.messenger.code_type =", "except Exception as err: self.logger.exception('{}:{}'.format(self.TAG, err)) self.messenger.code_type = Constants.PULZAR_ERROR self.messenger.set_message", "Utils from pulzarutils.utils import Constants from pulzarutils.messenger import Messenger from", "scheduled' except Exception as err: self.logger.exception('{}:{}'.format(self.TAG, err)) self.messenger.code_type = Constants.PULZAR_ERROR", "DB(Constants.DB_BACKUP) db_backup.update_or_insert_value( self.mark_of_local_verification, b'1') self.messenger.code_type = Constants.BACKUP_SCHEDULED self.messenger.set_message = 'backup", "if regex_result: try: call_path_list = regex_result.groups()[0].split('/') call_path_list = [x for", "manage \"\"\" def __init__(self, logger): self.TAG = self.__class__.__name__ self.logger =", "self.messenger = Messenger() self.mark_of_local_verification = b'varidb_execute_file_verification' def process_request(self, url_path): \"\"\"Get", "= Messenger() self.mark_of_local_verification = b'varidb_execute_file_verification' def process_request(self, url_path): \"\"\"Get request", "admin operations from manage \"\"\" def __init__(self, logger): self.TAG =", "= Constants.PULZAR_ERROR self.messenger.set_message = str(err) self.messenger.mark_as_failed() else: self.messenger.code_type = Constants.USER_ERROR", "b'varidb_execute_file_verification' def process_request(self, url_path): \"\"\"Get request type, checking for key", "from pulzarutils.messenger import Messenger from pulzarcore.core_db import DB class AdminProcess:", "checking for key value. \"\"\" regex_result = self.utils.get_search_regex( url_path, Constants.RE_ADMIN)", "nodes if len(call_path_list) == 1 and call_path_list[0] == 'start_backup': db_backup", "self.messenger.mark_as_failed() else: self.messenger.code_type = Constants.USER_ERROR self.messenger.set_message = 'wrong request' self.messenger.mark_as_failed()", "if len(call_path_list) == 1 and call_path_list[0] == 'start_backup': db_backup =", "self.utils = Utils() self.messenger = Messenger() self.mark_of_local_verification = b'varidb_execute_file_verification' def", "Constants.PULZAR_ERROR self.messenger.set_message = str(err) self.messenger.mark_as_failed() else: self.messenger.code_type = Constants.USER_ERROR self.messenger.set_message", "call_path_list = regex_result.groups()[0].split('/') call_path_list = [x for x in call_path_list", "= DB(Constants.DB_BACKUP) db_backup.update_or_insert_value( self.mark_of_local_verification, b'1') self.messenger.code_type = Constants.BACKUP_SCHEDULED self.messenger.set_message =", "self.messenger.set_message = str(err) self.messenger.mark_as_failed() else: self.messenger.code_type = Constants.USER_ERROR self.messenger.set_message =", "process_request(self, url_path): \"\"\"Get request type, checking for key value. \"\"\"", "DB class AdminProcess: \"\"\"Handle admin operations from manage \"\"\" def", "= self.__class__.__name__ self.logger = logger self.utils = Utils() self.messenger =", "value. \"\"\" regex_result = self.utils.get_search_regex( url_path, Constants.RE_ADMIN) if regex_result: try:", "= 'backup scheduled' except Exception as err: self.logger.exception('{}:{}'.format(self.TAG, err)) self.messenger.code_type", "= Constants.BACKUP_SCHEDULED self.messenger.set_message = 'backup scheduled' except Exception as err:", "pulzarutils.messenger import Messenger from pulzarcore.core_db import DB class AdminProcess: \"\"\"Handle", "logger self.utils = Utils() self.messenger = Messenger() self.mark_of_local_verification = b'varidb_execute_file_verification'", "import Utils from pulzarutils.utils import Constants from pulzarutils.messenger import Messenger", "from pulzarcore.core_db import DB class AdminProcess: \"\"\"Handle admin operations from", "Messenger from pulzarcore.core_db import DB class AdminProcess: \"\"\"Handle admin operations", "\"\"\"Get request type, checking for key value. \"\"\" regex_result =", "import Constants from pulzarutils.messenger import Messenger from pulzarcore.core_db import DB", "self.__class__.__name__ self.logger = logger self.utils = Utils() self.messenger = Messenger()", "def __init__(self, logger): self.TAG = self.__class__.__name__ self.logger = logger self.utils", "call_path_list = [x for x in call_path_list if x !=", "if x != ''] # All nodes if len(call_path_list) ==", "logger): self.TAG = self.__class__.__name__ self.logger = logger self.utils = Utils()", "\"\"\" regex_result = self.utils.get_search_regex( url_path, Constants.RE_ADMIN) if regex_result: try: call_path_list", "regex_result = self.utils.get_search_regex( url_path, Constants.RE_ADMIN) if regex_result: try: call_path_list =", "call_path_list[0] == 'start_backup': db_backup = DB(Constants.DB_BACKUP) db_backup.update_or_insert_value( self.mark_of_local_verification, b'1') self.messenger.code_type", "url_path, Constants.RE_ADMIN) if regex_result: try: call_path_list = regex_result.groups()[0].split('/') call_path_list =", "= str(err) self.messenger.mark_as_failed() else: self.messenger.code_type = Constants.USER_ERROR self.messenger.set_message = 'wrong", "as err: self.logger.exception('{}:{}'.format(self.TAG, err)) self.messenger.code_type = Constants.PULZAR_ERROR self.messenger.set_message = str(err)", "import DB class AdminProcess: \"\"\"Handle admin operations from manage \"\"\"", "pulzarcore.core_db import DB class AdminProcess: \"\"\"Handle admin operations from manage", "''] # All nodes if len(call_path_list) == 1 and call_path_list[0]", "pulzarutils.utils import Utils from pulzarutils.utils import Constants from pulzarutils.messenger import", "try: call_path_list = regex_result.groups()[0].split('/') call_path_list = [x for x in", "__init__(self, logger): self.TAG = self.__class__.__name__ self.logger = logger self.utils =", "= [x for x in call_path_list if x != '']", "AdminProcess: \"\"\"Handle admin operations from manage \"\"\" def __init__(self, logger):", "= b'varidb_execute_file_verification' def process_request(self, url_path): \"\"\"Get request type, checking for", "and call_path_list[0] == 'start_backup': db_backup = DB(Constants.DB_BACKUP) db_backup.update_or_insert_value( self.mark_of_local_verification, b'1')", "self.mark_of_local_verification, b'1') self.messenger.code_type = Constants.BACKUP_SCHEDULED self.messenger.set_message = 'backup scheduled' except", "else: self.messenger.code_type = Constants.USER_ERROR self.messenger.set_message = 'wrong request' self.messenger.mark_as_failed() return", "All nodes if len(call_path_list) == 1 and call_path_list[0] == 'start_backup':", "err: self.logger.exception('{}:{}'.format(self.TAG, err)) self.messenger.code_type = Constants.PULZAR_ERROR self.messenger.set_message = str(err) self.messenger.mark_as_failed()", "def process_request(self, url_path): \"\"\"Get request type, checking for key value.", "\"\"\" def __init__(self, logger): self.TAG = self.__class__.__name__ self.logger = logger", "Messenger() self.mark_of_local_verification = b'varidb_execute_file_verification' def process_request(self, url_path): \"\"\"Get request type,", "for key value. \"\"\" regex_result = self.utils.get_search_regex( url_path, Constants.RE_ADMIN) if", "'backup scheduled' except Exception as err: self.logger.exception('{}:{}'.format(self.TAG, err)) self.messenger.code_type =", "1 and call_path_list[0] == 'start_backup': db_backup = DB(Constants.DB_BACKUP) db_backup.update_or_insert_value( self.mark_of_local_verification,", "db_backup.update_or_insert_value( self.mark_of_local_verification, b'1') self.messenger.code_type = Constants.BACKUP_SCHEDULED self.messenger.set_message = 'backup scheduled'", "err)) self.messenger.code_type = Constants.PULZAR_ERROR self.messenger.set_message = str(err) self.messenger.mark_as_failed() else: self.messenger.code_type", "pulzarutils.utils import Constants from pulzarutils.messenger import Messenger from pulzarcore.core_db import", "self.logger.exception('{}:{}'.format(self.TAG, err)) self.messenger.code_type = Constants.PULZAR_ERROR self.messenger.set_message = str(err) self.messenger.mark_as_failed() else:", "regex_result.groups()[0].split('/') call_path_list = [x for x in call_path_list if x", "= logger self.utils = Utils() self.messenger = Messenger() self.mark_of_local_verification =", "x in call_path_list if x != ''] # All nodes", "len(call_path_list) == 1 and call_path_list[0] == 'start_backup': db_backup = DB(Constants.DB_BACKUP)", "= regex_result.groups()[0].split('/') call_path_list = [x for x in call_path_list if", "db_backup = DB(Constants.DB_BACKUP) db_backup.update_or_insert_value( self.mark_of_local_verification, b'1') self.messenger.code_type = Constants.BACKUP_SCHEDULED self.messenger.set_message", "'start_backup': db_backup = DB(Constants.DB_BACKUP) db_backup.update_or_insert_value( self.mark_of_local_verification, b'1') self.messenger.code_type = Constants.BACKUP_SCHEDULED", "from pulzarutils.utils import Constants from pulzarutils.messenger import Messenger from pulzarcore.core_db", "self.logger = logger self.utils = Utils() self.messenger = Messenger() self.mark_of_local_verification", "from pulzarutils.utils import Utils from pulzarutils.utils import Constants from pulzarutils.messenger", "!= ''] # All nodes if len(call_path_list) == 1 and", "self.mark_of_local_verification = b'varidb_execute_file_verification' def process_request(self, url_path): \"\"\"Get request type, checking", "Utils() self.messenger = Messenger() self.mark_of_local_verification = b'varidb_execute_file_verification' def process_request(self, url_path):", "key value. \"\"\" regex_result = self.utils.get_search_regex( url_path, Constants.RE_ADMIN) if regex_result:", "x != ''] # All nodes if len(call_path_list) == 1", "= self.utils.get_search_regex( url_path, Constants.RE_ADMIN) if regex_result: try: call_path_list = regex_result.groups()[0].split('/')" ]
[ "has_worked: logging.error(\"Nothing has been tested!\") snapshot_stack.delete() # depending on number", "we are reverted # completely (only one profile was run)", "= False profiles = get_viable_profiles(options.target, options.datastream, options.benchmark_id) if len(profiles) >", "options.benchmark_id, runner=runner) ssg_test_suite.oscap.run_profile(domain_ip, profile, 'final', options.datastream, options.benchmark_id, runner=runner) snapshot_stack.revert(delete=False) if", "has been tested!\") snapshot_stack.delete() # depending on number of profiles", "(multiple profiles) or we are reverted # completely (only one", "reverted (multiple profiles) or we are reverted # completely (only", "and generated reports is the result. \"\"\" dom = ssg_test_suite.virt.connect_domain(options.hypervisor,", "for profile in profiles: logging.info(\"Evaluation of profile {0}.\".format(profile)) has_worked =", "dom = ssg_test_suite.virt.connect_domain(options.hypervisor, options.domain_name) if dom is None: sys.exit(1) snapshot_stack", "to be reverted (multiple profiles) or we are reverted #", "print_function import atexit import logging import sys import ssg_test_suite.oscap import", "tested!\") snapshot_stack.delete() # depending on number of profiles we have", "# depending on number of profiles we have either \"origin\"", "get_viable_profiles from ssg_test_suite.virt import SnapshotStack logging.getLogger(__name__).addHandler(logging.NullHandler()) def perform_profile_check(options): \"\"\"Perform profile", "= options.remediate_using ssg_test_suite.oscap.run_profile(domain_ip, profile, 'initial', options.datastream, options.benchmark_id, runner=runner) ssg_test_suite.oscap.run_profile(domain_ip, profile,", "import sys import ssg_test_suite.oscap import ssg_test_suite.virt from ssg_test_suite.rule import get_viable_profiles", "runner = options.remediate_using ssg_test_suite.oscap.run_profile(domain_ip, profile, 'initial', options.datastream, options.benchmark_id, runner=runner) ssg_test_suite.oscap.run_profile(domain_ip,", "\"\"\"Perform profile check. Iterate over profiles in datastream and perform", "is the result. \"\"\" dom = ssg_test_suite.virt.connect_domain(options.hypervisor, options.domain_name) if dom", "profile, 'remediation', options.datastream, options.benchmark_id, runner=runner) ssg_test_suite.oscap.run_profile(domain_ip, profile, 'final', options.datastream, options.benchmark_id,", "options.domain_name) if dom is None: sys.exit(1) snapshot_stack = SnapshotStack(dom) atexit.register(snapshot_stack.clear)", "number of profiles we have either \"origin\" snapshot # still", "ssg_test_suite.virt.connect_domain(options.hypervisor, options.domain_name) if dom is None: sys.exit(1) snapshot_stack = SnapshotStack(dom)", "Also perform remediation run. Return value not defined, textual output", "{0}.\".format(profile)) has_worked = True runner = options.remediate_using ssg_test_suite.oscap.run_profile(domain_ip, profile, 'initial',", "perform_profile_check(options): \"\"\"Perform profile check. Iterate over profiles in datastream and", "import SnapshotStack logging.getLogger(__name__).addHandler(logging.NullHandler()) def perform_profile_check(options): \"\"\"Perform profile check. Iterate over", "Iterate over profiles in datastream and perform scanning of unaltered", "not defined, textual output and generated reports is the result.", "of profile {0}.\".format(profile)) has_worked = True runner = options.remediate_using ssg_test_suite.oscap.run_profile(domain_ip,", "be reverted (multiple profiles) or we are reverted # completely", "have either \"origin\" snapshot # still to be reverted (multiple", "> 1: snapshot_stack.create('profile') for profile in profiles: logging.info(\"Evaluation of profile", "options.datastream, options.benchmark_id) if len(profiles) > 1: snapshot_stack.create('profile') for profile in", "snapshot_stack.revert(delete=False) if not has_worked: logging.error(\"Nothing has been tested!\") snapshot_stack.delete() #", "import print_function import atexit import logging import sys import ssg_test_suite.oscap", "len(profiles) > 1: snapshot_stack.create('profile') for profile in profiles: logging.info(\"Evaluation of", "# still to be reverted (multiple profiles) or we are", "SnapshotStack(dom) atexit.register(snapshot_stack.clear) snapshot_stack.create('origin') ssg_test_suite.virt.start_domain(dom) domain_ip = ssg_test_suite.virt.determine_ip(dom) has_worked = False", "import atexit import logging import sys import ssg_test_suite.oscap import ssg_test_suite.virt", "check. Iterate over profiles in datastream and perform scanning of", "sys import ssg_test_suite.oscap import ssg_test_suite.virt from ssg_test_suite.rule import get_viable_profiles from", "True runner = options.remediate_using ssg_test_suite.oscap.run_profile(domain_ip, profile, 'initial', options.datastream, options.benchmark_id, runner=runner)", "options.datastream, options.benchmark_id, runner=runner) ssg_test_suite.oscap.run_profile(domain_ip, profile, 'remediation', options.datastream, options.benchmark_id, runner=runner) ssg_test_suite.oscap.run_profile(domain_ip,", "profiles = get_viable_profiles(options.target, options.datastream, options.benchmark_id) if len(profiles) > 1: snapshot_stack.create('profile')", "from ssg_test_suite.rule import get_viable_profiles from ssg_test_suite.virt import SnapshotStack logging.getLogger(__name__).addHandler(logging.NullHandler()) def", "textual output and generated reports is the result. \"\"\" dom", "ssg_test_suite.rule import get_viable_profiles from ssg_test_suite.virt import SnapshotStack logging.getLogger(__name__).addHandler(logging.NullHandler()) def perform_profile_check(options):", "remediation run. Return value not defined, textual output and generated", "'remediation', options.datastream, options.benchmark_id, runner=runner) ssg_test_suite.oscap.run_profile(domain_ip, profile, 'final', options.datastream, options.benchmark_id, runner=runner)", "Return value not defined, textual output and generated reports is", "= get_viable_profiles(options.target, options.datastream, options.benchmark_id) if len(profiles) > 1: snapshot_stack.create('profile') for", "profile in profiles: logging.info(\"Evaluation of profile {0}.\".format(profile)) has_worked = True", "options.benchmark_id, runner=runner) ssg_test_suite.oscap.run_profile(domain_ip, profile, 'remediation', options.datastream, options.benchmark_id, runner=runner) ssg_test_suite.oscap.run_profile(domain_ip, profile,", "= ssg_test_suite.virt.determine_ip(dom) has_worked = False profiles = get_viable_profiles(options.target, options.datastream, options.benchmark_id)", "#!/usr/bin/env python2 from __future__ import print_function import atexit import logging", "\"origin\" snapshot # still to be reverted (multiple profiles) or", "runner=runner) ssg_test_suite.oscap.run_profile(domain_ip, profile, 'final', options.datastream, options.benchmark_id, runner=runner) snapshot_stack.revert(delete=False) if not", "from __future__ import print_function import atexit import logging import sys", "import get_viable_profiles from ssg_test_suite.virt import SnapshotStack logging.getLogger(__name__).addHandler(logging.NullHandler()) def perform_profile_check(options): \"\"\"Perform", "we have either \"origin\" snapshot # still to be reverted", "over profiles in datastream and perform scanning of unaltered VM", "dom is None: sys.exit(1) snapshot_stack = SnapshotStack(dom) atexit.register(snapshot_stack.clear) snapshot_stack.create('origin') ssg_test_suite.virt.start_domain(dom)", "still to be reverted (multiple profiles) or we are reverted", "snapshot_stack.delete() # depending on number of profiles we have either", "__future__ import print_function import atexit import logging import sys import", "perform scanning of unaltered VM using every profile according to", "defined, textual output and generated reports is the result. \"\"\"", "import ssg_test_suite.virt from ssg_test_suite.rule import get_viable_profiles from ssg_test_suite.virt import SnapshotStack", "profiles we have either \"origin\" snapshot # still to be", "on number of profiles we have either \"origin\" snapshot #", "SnapshotStack logging.getLogger(__name__).addHandler(logging.NullHandler()) def perform_profile_check(options): \"\"\"Perform profile check. Iterate over profiles", "ssg_test_suite.oscap.run_profile(domain_ip, profile, 'remediation', options.datastream, options.benchmark_id, runner=runner) ssg_test_suite.oscap.run_profile(domain_ip, profile, 'final', options.datastream,", "profiles in datastream and perform scanning of unaltered VM using", "profile, 'initial', options.datastream, options.benchmark_id, runner=runner) ssg_test_suite.oscap.run_profile(domain_ip, profile, 'remediation', options.datastream, options.benchmark_id,", "in datastream and perform scanning of unaltered VM using every", "not has_worked: logging.error(\"Nothing has been tested!\") snapshot_stack.delete() # depending on", "'final', options.datastream, options.benchmark_id, runner=runner) snapshot_stack.revert(delete=False) if not has_worked: logging.error(\"Nothing has", "get_viable_profiles(options.target, options.datastream, options.benchmark_id) if len(profiles) > 1: snapshot_stack.create('profile') for profile", "if len(profiles) > 1: snapshot_stack.create('profile') for profile in profiles: logging.info(\"Evaluation", "ssg_test_suite.oscap.run_profile(domain_ip, profile, 'initial', options.datastream, options.benchmark_id, runner=runner) ssg_test_suite.oscap.run_profile(domain_ip, profile, 'remediation', options.datastream,", "ssg_test_suite.oscap.run_profile(domain_ip, profile, 'final', options.datastream, options.benchmark_id, runner=runner) snapshot_stack.revert(delete=False) if not has_worked:", "profile {0}.\".format(profile)) has_worked = True runner = options.remediate_using ssg_test_suite.oscap.run_profile(domain_ip, profile,", "been tested!\") snapshot_stack.delete() # depending on number of profiles we", "depending on number of profiles we have either \"origin\" snapshot", "generated reports is the result. \"\"\" dom = ssg_test_suite.virt.connect_domain(options.hypervisor, options.domain_name)", "def perform_profile_check(options): \"\"\"Perform profile check. Iterate over profiles in datastream", "None: sys.exit(1) snapshot_stack = SnapshotStack(dom) atexit.register(snapshot_stack.clear) snapshot_stack.create('origin') ssg_test_suite.virt.start_domain(dom) domain_ip =", "ssg_test_suite.oscap import ssg_test_suite.virt from ssg_test_suite.rule import get_viable_profiles from ssg_test_suite.virt import", "according to input. Also perform remediation run. Return value not", "result. \"\"\" dom = ssg_test_suite.virt.connect_domain(options.hypervisor, options.domain_name) if dom is None:", "False profiles = get_viable_profiles(options.target, options.datastream, options.benchmark_id) if len(profiles) > 1:", "has_worked = False profiles = get_viable_profiles(options.target, options.datastream, options.benchmark_id) if len(profiles)", "reports is the result. \"\"\" dom = ssg_test_suite.virt.connect_domain(options.hypervisor, options.domain_name) if", "of profiles we have either \"origin\" snapshot # still to", "ssg_test_suite.virt.start_domain(dom) domain_ip = ssg_test_suite.virt.determine_ip(dom) has_worked = False profiles = get_viable_profiles(options.target,", "1: snapshot_stack.create('profile') for profile in profiles: logging.info(\"Evaluation of profile {0}.\".format(profile))", "profile check. Iterate over profiles in datastream and perform scanning", "value not defined, textual output and generated reports is the", "snapshot_stack.create('origin') ssg_test_suite.virt.start_domain(dom) domain_ip = ssg_test_suite.virt.determine_ip(dom) has_worked = False profiles =", "if dom is None: sys.exit(1) snapshot_stack = SnapshotStack(dom) atexit.register(snapshot_stack.clear) snapshot_stack.create('origin')", "\"\"\" dom = ssg_test_suite.virt.connect_domain(options.hypervisor, options.domain_name) if dom is None: sys.exit(1)", "has_worked = True runner = options.remediate_using ssg_test_suite.oscap.run_profile(domain_ip, profile, 'initial', options.datastream,", "ssg_test_suite.virt from ssg_test_suite.rule import get_viable_profiles from ssg_test_suite.virt import SnapshotStack logging.getLogger(__name__).addHandler(logging.NullHandler())", "either \"origin\" snapshot # still to be reverted (multiple profiles)", "profiles) or we are reverted # completely (only one profile", "options.remediate_using ssg_test_suite.oscap.run_profile(domain_ip, profile, 'initial', options.datastream, options.benchmark_id, runner=runner) ssg_test_suite.oscap.run_profile(domain_ip, profile, 'remediation',", "is None: sys.exit(1) snapshot_stack = SnapshotStack(dom) atexit.register(snapshot_stack.clear) snapshot_stack.create('origin') ssg_test_suite.virt.start_domain(dom) domain_ip", "runner=runner) ssg_test_suite.oscap.run_profile(domain_ip, profile, 'remediation', options.datastream, options.benchmark_id, runner=runner) ssg_test_suite.oscap.run_profile(domain_ip, profile, 'final',", "options.datastream, options.benchmark_id, runner=runner) snapshot_stack.revert(delete=False) if not has_worked: logging.error(\"Nothing has been", "atexit.register(snapshot_stack.clear) snapshot_stack.create('origin') ssg_test_suite.virt.start_domain(dom) domain_ip = ssg_test_suite.virt.determine_ip(dom) has_worked = False profiles", "using every profile according to input. Also perform remediation run.", "if not has_worked: logging.error(\"Nothing has been tested!\") snapshot_stack.delete() # depending", "or we are reverted # completely (only one profile was", "snapshot # still to be reverted (multiple profiles) or we", "options.datastream, options.benchmark_id, runner=runner) ssg_test_suite.oscap.run_profile(domain_ip, profile, 'final', options.datastream, options.benchmark_id, runner=runner) snapshot_stack.revert(delete=False)", "ssg_test_suite.virt import SnapshotStack logging.getLogger(__name__).addHandler(logging.NullHandler()) def perform_profile_check(options): \"\"\"Perform profile check. Iterate", "import ssg_test_suite.oscap import ssg_test_suite.virt from ssg_test_suite.rule import get_viable_profiles from ssg_test_suite.virt", "in profiles: logging.info(\"Evaluation of profile {0}.\".format(profile)) has_worked = True runner", "snapshot_stack = SnapshotStack(dom) atexit.register(snapshot_stack.clear) snapshot_stack.create('origin') ssg_test_suite.virt.start_domain(dom) domain_ip = ssg_test_suite.virt.determine_ip(dom) has_worked", "of unaltered VM using every profile according to input. Also", "VM using every profile according to input. Also perform remediation", "sys.exit(1) snapshot_stack = SnapshotStack(dom) atexit.register(snapshot_stack.clear) snapshot_stack.create('origin') ssg_test_suite.virt.start_domain(dom) domain_ip = ssg_test_suite.virt.determine_ip(dom)", "to input. Also perform remediation run. Return value not defined,", "python2 from __future__ import print_function import atexit import logging import", "every profile according to input. Also perform remediation run. Return", "perform remediation run. Return value not defined, textual output and", "run. Return value not defined, textual output and generated reports", "snapshot_stack.create('profile') for profile in profiles: logging.info(\"Evaluation of profile {0}.\".format(profile)) has_worked", "runner=runner) snapshot_stack.revert(delete=False) if not has_worked: logging.error(\"Nothing has been tested!\") snapshot_stack.delete()", "the result. \"\"\" dom = ssg_test_suite.virt.connect_domain(options.hypervisor, options.domain_name) if dom is", "logging.getLogger(__name__).addHandler(logging.NullHandler()) def perform_profile_check(options): \"\"\"Perform profile check. Iterate over profiles in", "output and generated reports is the result. \"\"\" dom =", "and perform scanning of unaltered VM using every profile according", "unaltered VM using every profile according to input. Also perform", "ssg_test_suite.virt.determine_ip(dom) has_worked = False profiles = get_viable_profiles(options.target, options.datastream, options.benchmark_id) if", "logging.info(\"Evaluation of profile {0}.\".format(profile)) has_worked = True runner = options.remediate_using", "profiles: logging.info(\"Evaluation of profile {0}.\".format(profile)) has_worked = True runner =", "domain_ip = ssg_test_suite.virt.determine_ip(dom) has_worked = False profiles = get_viable_profiles(options.target, options.datastream,", "import logging import sys import ssg_test_suite.oscap import ssg_test_suite.virt from ssg_test_suite.rule", "input. Also perform remediation run. Return value not defined, textual", "scanning of unaltered VM using every profile according to input.", "= ssg_test_suite.virt.connect_domain(options.hypervisor, options.domain_name) if dom is None: sys.exit(1) snapshot_stack =", "from ssg_test_suite.virt import SnapshotStack logging.getLogger(__name__).addHandler(logging.NullHandler()) def perform_profile_check(options): \"\"\"Perform profile check.", "options.benchmark_id, runner=runner) snapshot_stack.revert(delete=False) if not has_worked: logging.error(\"Nothing has been tested!\")", "options.benchmark_id) if len(profiles) > 1: snapshot_stack.create('profile') for profile in profiles:", "= SnapshotStack(dom) atexit.register(snapshot_stack.clear) snapshot_stack.create('origin') ssg_test_suite.virt.start_domain(dom) domain_ip = ssg_test_suite.virt.determine_ip(dom) has_worked =", "= True runner = options.remediate_using ssg_test_suite.oscap.run_profile(domain_ip, profile, 'initial', options.datastream, options.benchmark_id,", "atexit import logging import sys import ssg_test_suite.oscap import ssg_test_suite.virt from", "profile, 'final', options.datastream, options.benchmark_id, runner=runner) snapshot_stack.revert(delete=False) if not has_worked: logging.error(\"Nothing", "logging.error(\"Nothing has been tested!\") snapshot_stack.delete() # depending on number of", "logging import sys import ssg_test_suite.oscap import ssg_test_suite.virt from ssg_test_suite.rule import", "'initial', options.datastream, options.benchmark_id, runner=runner) ssg_test_suite.oscap.run_profile(domain_ip, profile, 'remediation', options.datastream, options.benchmark_id, runner=runner)", "profile according to input. Also perform remediation run. Return value", "datastream and perform scanning of unaltered VM using every profile" ]
[ "wtforms import fields, widgets class ReferencePropertyField(fields.SelectFieldBase): \"\"\" A field for", "a one-argument callable, this callable will be passed model instance", "x: x elif isinstance(get_label, basestring): self.get_label = operator.attrgetter(get_label) else: self.get_label", "not None: for obj in self.query: if str(self.data.key()) == str(obj.key()):", "\"\"\" A field for ``db.StringListProperty``. The list items are rendered", "label associated with each option. If a one-argument callable, this", "If a one-argument callable, this callable will be passed model", "``db.ReferenceProperty``. The list items are rendered in a select. :param", "get_label: If a string, use this attribute on the model", "used. :param allow_blank: If set to true, a blank choice", "if self._formdata is not None: for obj in self.query: if", "to override the default blank option's label. \"\"\" widget =", "None: self.get_label = lambda x: x elif isinstance(get_label, basestring): self.get_label", "widgets class ReferencePropertyField(fields.SelectFieldBase): \"\"\" A field for ``db.ReferenceProperty``. The list", "def iter_choices(self): if self.allow_blank: yield (u'__None', self.blank_text, self.data is None)", "str(self.data.key()) == str(obj.key()): break else: raise ValueError(self.gettext(u'Not a valid choice'))", "allow_blank=False, blank_text=u'', **kwargs): super(ReferencePropertyField, self).__init__(label, validators, **kwargs) if label_attr is", "self._data def _set_data(self, data): self._data = data self._formdata = None", "generate the default query to make the list of items.", "form): if not self.allow_blank or self.data is not None: for", "fields, widgets class ReferencePropertyField(fields.SelectFieldBase): \"\"\" A field for ``db.ReferenceProperty``. The", "= self.get_label(obj) yield (key, label, self.data and ( self.data.key( )", "self.query: key = str(obj.key()) label = self.get_label(obj) yield (key, label,", "label text. Otherwise, the model object's `__str__` or `__unicode__` will", "**kwargs): super(ReferencePropertyField, self).__init__(label, validators, **kwargs) if label_attr is not None:", "class GeoPtPropertyField(fields.TextField): def process_formdata(self, valuelist): if valuelist: try: lat, lon", "if str(obj.key()) == self._formdata: self._set_data(obj) break return self._data def _set_data(self,", "process_formdata(self, valuelist): if valuelist: try: lat, lon = valuelist[0].split(',') self.data", "decimal import operator import warnings from wtforms import fields, widgets", "self.data is None) for obj in self.query: key = str(obj.key())", "key = str(obj.key()) label = self.get_label(obj) yield (key, label, self.data", "will be used. :param allow_blank: If set to true, a", "= None data = property(_get_data, _set_data) def iter_choices(self): if self.allow_blank:", "passed model instance and expected to return the label text.", "allow_blank self.blank_text = blank_text self._set_data(None) if reference_class is not None:", "self.get_label = get_label self.allow_blank = allow_blank self.blank_text = blank_text self._set_data(None)", "a valid list')) class GeoPtPropertyField(fields.TextField): def process_formdata(self, valuelist): if valuelist:", "rendered in a textarea. \"\"\" def _value(self): if self.raw_data: return", "the model object's `__str__` or `__unicode__` will be used. :param", ":param allow_blank: If set to true, a blank choice will", "The list items are rendered in a select. :param reference_class:", "a valid choice')) class StringListPropertyField(fields.TextAreaField): \"\"\" A field for ``db.StringListProperty``.", "instead.', DeprecationWarning) self.get_label = operator.attrgetter(label_attr) elif get_label is None: self.get_label", "before validation. :param get_label: If a string, use this attribute", "unicode(\"\\n\".join(self.data)) or u'' def process_formdata(self, valuelist): if valuelist: try: self.data", "% (decimal.Decimal(lat.strip()), decimal.Decimal(lon.strip()),) except (decimal.InvalidOperation, ValueError): raise ValueError(u'Not a valid", "StringListPropertyField(fields.TextAreaField): \"\"\" A field for ``db.StringListProperty``. The list items are", "or self.data is not None: for obj in self.query: if", "basestring): self.get_label = operator.attrgetter(get_label) else: self.get_label = get_label self.allow_blank =", "a select. :param reference_class: A db.Model class which will be", "for ``db.ReferenceProperty``. The list items are rendered in a select.", "allow `None` to be chosen. :param blank_text: Use this to", "removed in WTForms 1.1, use get_label= instead.', DeprecationWarning) self.get_label =", "self.blank_text = blank_text self._set_data(None) if reference_class is not None: self.query", "1.1, use get_label= instead.', DeprecationWarning) self.get_label = operator.attrgetter(label_attr) elif get_label", "self.allow_blank or self.data is not None: for obj in self.query:", "one-argument callable, this callable will be passed model instance and", "be used to generate the default query to make the", "blank choice will be added to the top of the", "of items. If this is not specified, The `query` property", "If a string, use this attribute on the model class", "on the model class as the label associated with each", "raise ValueError(self.gettext(u'Not a valid list')) class GeoPtPropertyField(fields.TextField): def process_formdata(self, valuelist):", "added to the top of the list to allow `None`", "def _set_data(self, data): self._data = data self._formdata = None data", "in WTForms 1.1, use get_label= instead.', DeprecationWarning) self.get_label = operator.attrgetter(label_attr)", "A field for ``db.ReferenceProperty``. The list items are rendered in", "== obj.key() ) ) def process_formdata(self, valuelist): if valuelist: if", "`None` to be chosen. :param blank_text: Use this to override", "callable will be passed model instance and expected to return", "isinstance(get_label, basestring): self.get_label = operator.attrgetter(get_label) else: self.get_label = get_label self.allow_blank", "choice will be added to the top of the list", "_set_data(self, data): self._data = data self._formdata = None data =", "\"\"\" widget = widgets.Select() def __init__(self, label=None, validators=None, reference_class=None, label_attr=None,", "if valuelist: try: self.data = valuelist[0].splitlines() except ValueError: raise ValueError(self.gettext(u'Not", "== self._formdata: self._set_data(obj) break return self._data def _set_data(self, data): self._data", "= valuelist[0].split(',') self.data = u'%s,%s' % (decimal.Decimal(lat.strip()), decimal.Decimal(lon.strip()),) except (decimal.InvalidOperation,", "label. \"\"\" widget = widgets.Select() def __init__(self, label=None, validators=None, reference_class=None,", "None data = property(_get_data, _set_data) def iter_choices(self): if self.allow_blank: yield", "use get_label= instead.', DeprecationWarning) self.get_label = operator.attrgetter(label_attr) elif get_label is", "DeprecationWarning) self.get_label = operator.attrgetter(label_attr) elif get_label is None: self.get_label =", "list items are rendered in a textarea. \"\"\" def _value(self):", "expected to return the label text. Otherwise, the model object's", "get_label is None: self.get_label = lambda x: x elif isinstance(get_label,", "for obj in self.query: if str(obj.key()) == self._formdata: self._set_data(obj) break", "'__None': self.data = None else: self._data = None self._formdata =", "None: warnings.warn('label_attr= will be removed in WTForms 1.1, use get_label=", "items are rendered in a select. :param reference_class: A db.Model", "string, use this attribute on the model class as the", "the top of the list to allow `None` to be", "not None: for obj in self.query: if str(obj.key()) == self._formdata:", "to true, a blank choice will be added to the", "be chosen. :param blank_text: Use this to override the default", "to generate the default query to make the list of", "valuelist[0] def pre_validate(self, form): if not self.allow_blank or self.data is", "lambda x: x elif isinstance(get_label, basestring): self.get_label = operator.attrgetter(get_label) else:", "to the top of the list to allow `None` to", "= u'%s,%s' % (decimal.Decimal(lat.strip()), decimal.Decimal(lon.strip()),) except (decimal.InvalidOperation, ValueError): raise ValueError(u'Not", "import fields, widgets class ReferencePropertyField(fields.SelectFieldBase): \"\"\" A field for ``db.ReferenceProperty``.", "which will be used to generate the default query to", "this attribute on the model class as the label associated", "else: raise ValueError(self.gettext(u'Not a valid choice')) class StringListPropertyField(fields.TextAreaField): \"\"\" A", "WTForms 1.1, use get_label= instead.', DeprecationWarning) self.get_label = operator.attrgetter(label_attr) elif", "data): self._data = data self._formdata = None data = property(_get_data,", "= property(_get_data, _set_data) def iter_choices(self): if self.allow_blank: yield (u'__None', self.blank_text,", "not self.allow_blank or self.data is not None: for obj in", "list to allow `None` to be chosen. :param blank_text: Use", "self._formdata: self._set_data(obj) break return self._data def _set_data(self, data): self._data =", "(u'__None', self.blank_text, self.data is None) for obj in self.query: key", "return self._data def _set_data(self, data): self._data = data self._formdata =", "str(obj.key()): break else: raise ValueError(self.gettext(u'Not a valid choice')) class StringListPropertyField(fields.TextAreaField):", "def process_formdata(self, valuelist): if valuelist: try: lat, lon = valuelist[0].split(',')", "decimal.Decimal(lon.strip()),) except (decimal.InvalidOperation, ValueError): raise ValueError(u'Not a valid coordinate location')", "this to override the default blank option's label. \"\"\" widget", "not None: warnings.warn('label_attr= will be removed in WTForms 1.1, use", "self._data = None self._formdata = valuelist[0] def pre_validate(self, form): if", "default blank option's label. \"\"\" widget = widgets.Select() def __init__(self,", "blank_text: Use this to override the default blank option's label.", "ValueError(self.gettext(u'Not a valid list')) class GeoPtPropertyField(fields.TextField): def process_formdata(self, valuelist): if", "self.raw_data[0] else: return self.data and unicode(\"\\n\".join(self.data)) or u'' def process_formdata(self,", "blank option's label. \"\"\" widget = widgets.Select() def __init__(self, label=None,", "= str(obj.key()) label = self.get_label(obj) yield (key, label, self.data and", "overridden before validation. :param get_label: If a string, use this", "model object's `__str__` or `__unicode__` will be used. :param allow_blank:", "self.data and unicode(\"\\n\".join(self.data)) or u'' def process_formdata(self, valuelist): if valuelist:", "break else: raise ValueError(self.gettext(u'Not a valid choice')) class StringListPropertyField(fields.TextAreaField): \"\"\"", "list items are rendered in a select. :param reference_class: A", "self.data and ( self.data.key( ) == obj.key() ) ) def", ":param get_label: If a string, use this attribute on the", "the list to allow `None` to be chosen. :param blank_text:", "process_formdata(self, valuelist): if valuelist: if valuelist[0] == '__None': self.data =", "if self.allow_blank: yield (u'__None', self.blank_text, self.data is None) for obj", "break return self._data def _set_data(self, data): self._data = data self._formdata", "self.query: if str(self.data.key()) == str(obj.key()): break else: raise ValueError(self.gettext(u'Not a", "self.data = None else: self._data = None self._formdata = valuelist[0]", "yield (key, label, self.data and ( self.data.key( ) == obj.key()", "if valuelist: try: lat, lon = valuelist[0].split(',') self.data = u'%s,%s'", "get_label self.allow_blank = allow_blank self.blank_text = blank_text self._set_data(None) if reference_class", "is not None: for obj in self.query: if str(self.data.key()) ==", "obj in self.query: if str(obj.key()) == self._formdata: self._set_data(obj) break return", "validation. :param get_label: If a string, use this attribute on", "a blank choice will be added to the top of", "and ( self.data.key( ) == obj.key() ) ) def process_formdata(self,", "process_formdata(self, valuelist): if valuelist: try: self.data = valuelist[0].splitlines() except ValueError:", "reference_class=None, label_attr=None, get_label=None, allow_blank=False, blank_text=u'', **kwargs): super(ReferencePropertyField, self).__init__(label, validators, **kwargs)", "(key, label, self.data and ( self.data.key( ) == obj.key() )", ") ) def process_formdata(self, valuelist): if valuelist: if valuelist[0] ==", "self._formdata = valuelist[0] def pre_validate(self, form): if not self.allow_blank or", "in a textarea. \"\"\" def _value(self): if self.raw_data: return self.raw_data[0]", "reference_class.all() def _get_data(self): if self._formdata is not None: for obj", "if not self.allow_blank or self.data is not None: for obj", "as the label associated with each option. If a one-argument", "None: for obj in self.query: if str(self.data.key()) == str(obj.key()): break", "be removed in WTForms 1.1, use get_label= instead.', DeprecationWarning) self.get_label", "valuelist: try: self.data = valuelist[0].splitlines() except ValueError: raise ValueError(self.gettext(u'Not a", "label=None, validators=None, reference_class=None, label_attr=None, get_label=None, allow_blank=False, blank_text=u'', **kwargs): super(ReferencePropertyField, self).__init__(label,", "model class as the label associated with each option. If", "field for ``db.ReferenceProperty``. The list items are rendered in a", "label_attr=None, get_label=None, allow_blank=False, blank_text=u'', **kwargs): super(ReferencePropertyField, self).__init__(label, validators, **kwargs) if", "= get_label self.allow_blank = allow_blank self.blank_text = blank_text self._set_data(None) if", "If set to true, a blank choice will be added", "try: self.data = valuelist[0].splitlines() except ValueError: raise ValueError(self.gettext(u'Not a valid", "attribute on the model class as the label associated with", "lon = valuelist[0].split(',') self.data = u'%s,%s' % (decimal.Decimal(lat.strip()), decimal.Decimal(lon.strip()),) except", "``db.StringListProperty``. The list items are rendered in a textarea. \"\"\"", "make the list of items. If this is not specified,", "will be used to generate the default query to make", "valuelist[0].split(',') self.data = u'%s,%s' % (decimal.Decimal(lat.strip()), decimal.Decimal(lon.strip()),) except (decimal.InvalidOperation, ValueError):", "label, self.data and ( self.data.key( ) == obj.key() ) )", "to be chosen. :param blank_text: Use this to override the", "a textarea. \"\"\" def _value(self): if self.raw_data: return self.raw_data[0] else:", "the list of items. If this is not specified, The", "= blank_text self._set_data(None) if reference_class is not None: self.query =", "warnings from wtforms import fields, widgets class ReferencePropertyField(fields.SelectFieldBase): \"\"\" A", "be passed model instance and expected to return the label", "not None: self.query = reference_class.all() def _get_data(self): if self._formdata is", "reference_class is not None: self.query = reference_class.all() def _get_data(self): if", "widget = widgets.Select() def __init__(self, label=None, validators=None, reference_class=None, label_attr=None, get_label=None,", "if self.raw_data: return self.raw_data[0] else: return self.data and unicode(\"\\n\".join(self.data)) or", "( self.data.key( ) == obj.key() ) ) def process_formdata(self, valuelist):", "blank_text self._set_data(None) if reference_class is not None: self.query = reference_class.all()", "instance and expected to return the label text. Otherwise, the", "blank_text=u'', **kwargs): super(ReferencePropertyField, self).__init__(label, validators, **kwargs) if label_attr is not", "this is not specified, The `query` property must be overridden", "import operator import warnings from wtforms import fields, widgets class", "and unicode(\"\\n\".join(self.data)) or u'' def process_formdata(self, valuelist): if valuelist: try:", "def _get_data(self): if self._formdata is not None: for obj in", "for obj in self.query: key = str(obj.key()) label = self.get_label(obj)", "label = self.get_label(obj) yield (key, label, self.data and ( self.data.key(", "If this is not specified, The `query` property must be", "data self._formdata = None data = property(_get_data, _set_data) def iter_choices(self):", ") == obj.key() ) ) def process_formdata(self, valuelist): if valuelist:", "= None self._formdata = valuelist[0] def pre_validate(self, form): if not", "import decimal import operator import warnings from wtforms import fields,", "will be added to the top of the list to", "def process_formdata(self, valuelist): if valuelist: try: self.data = valuelist[0].splitlines() except", "class StringListPropertyField(fields.TextAreaField): \"\"\" A field for ``db.StringListProperty``. The list items", "items. If this is not specified, The `query` property must", "rendered in a select. :param reference_class: A db.Model class which", "A db.Model class which will be used to generate the", "self.get_label = operator.attrgetter(get_label) else: self.get_label = get_label self.allow_blank = allow_blank", "data = property(_get_data, _set_data) def iter_choices(self): if self.allow_blank: yield (u'__None',", ":param blank_text: Use this to override the default blank option's", "set to true, a blank choice will be added to", "in self.query: if str(obj.key()) == self._formdata: self._set_data(obj) break return self._data", "if valuelist[0] == '__None': self.data = None else: self._data =", "valuelist): if valuelist: try: lat, lon = valuelist[0].split(',') self.data =", "_set_data) def iter_choices(self): if self.allow_blank: yield (u'__None', self.blank_text, self.data is", "ReferencePropertyField(fields.SelectFieldBase): \"\"\" A field for ``db.ReferenceProperty``. The list items are", "class which will be used to generate the default query", "reference_class: A db.Model class which will be used to generate", "None: self.query = reference_class.all() def _get_data(self): if self._formdata is not", "not specified, The `query` property must be overridden before validation.", "obj.key() ) ) def process_formdata(self, valuelist): if valuelist: if valuelist[0]", "valuelist): if valuelist: if valuelist[0] == '__None': self.data = None", "for obj in self.query: if str(self.data.key()) == str(obj.key()): break else:", "def _value(self): if self.raw_data: return self.raw_data[0] else: return self.data and", "warnings.warn('label_attr= will be removed in WTForms 1.1, use get_label= instead.',", "class as the label associated with each option. If a", "valuelist): if valuelist: try: self.data = valuelist[0].splitlines() except ValueError: raise", "operator.attrgetter(label_attr) elif get_label is None: self.get_label = lambda x: x", "operator.attrgetter(get_label) else: self.get_label = get_label self.allow_blank = allow_blank self.blank_text =", "None: for obj in self.query: if str(obj.key()) == self._formdata: self._set_data(obj)", "= lambda x: x elif isinstance(get_label, basestring): self.get_label = operator.attrgetter(get_label)", "self.data = valuelist[0].splitlines() except ValueError: raise ValueError(self.gettext(u'Not a valid list'))", "The `query` property must be overridden before validation. :param get_label:", "choice')) class StringListPropertyField(fields.TextAreaField): \"\"\" A field for ``db.StringListProperty``. The list", "return self.raw_data[0] else: return self.data and unicode(\"\\n\".join(self.data)) or u'' def", "A field for ``db.StringListProperty``. The list items are rendered in", "import warnings from wtforms import fields, widgets class ReferencePropertyField(fields.SelectFieldBase): \"\"\"", "lat, lon = valuelist[0].split(',') self.data = u'%s,%s' % (decimal.Decimal(lat.strip()), decimal.Decimal(lon.strip()),)", "used to generate the default query to make the list", "get_label=None, allow_blank=False, blank_text=u'', **kwargs): super(ReferencePropertyField, self).__init__(label, validators, **kwargs) if label_attr", "= operator.attrgetter(label_attr) elif get_label is None: self.get_label = lambda x:", "query to make the list of items. If this is", "self._formdata is not None: for obj in self.query: if str(obj.key())", "self.get_label = operator.attrgetter(label_attr) elif get_label is None: self.get_label = lambda", "None self._formdata = valuelist[0] def pre_validate(self, form): if not self.allow_blank", "is None) for obj in self.query: key = str(obj.key()) label", "to return the label text. Otherwise, the model object's `__str__`", "is not None: warnings.warn('label_attr= will be removed in WTForms 1.1,", "\"\"\" A field for ``db.ReferenceProperty``. The list items are rendered", "The list items are rendered in a textarea. \"\"\" def", "`query` property must be overridden before validation. :param get_label: If", "self).__init__(label, validators, **kwargs) if label_attr is not None: warnings.warn('label_attr= will", "chosen. :param blank_text: Use this to override the default blank", "default query to make the list of items. If this", "self.query = reference_class.all() def _get_data(self): if self._formdata is not None:", "label_attr is not None: warnings.warn('label_attr= will be removed in WTForms", "elif isinstance(get_label, basestring): self.get_label = operator.attrgetter(get_label) else: self.get_label = get_label", "of the list to allow `None` to be chosen. :param", "pre_validate(self, form): if not self.allow_blank or self.data is not None:", "self.allow_blank = allow_blank self.blank_text = blank_text self._set_data(None) if reference_class is", "= reference_class.all() def _get_data(self): if self._formdata is not None: for", "elif get_label is None: self.get_label = lambda x: x elif", "self._set_data(None) if reference_class is not None: self.query = reference_class.all() def", "__init__(self, label=None, validators=None, reference_class=None, label_attr=None, get_label=None, allow_blank=False, blank_text=u'', **kwargs): super(ReferencePropertyField,", "u'%s,%s' % (decimal.Decimal(lat.strip()), decimal.Decimal(lon.strip()),) except (decimal.InvalidOperation, ValueError): raise ValueError(u'Not a", "valuelist: try: lat, lon = valuelist[0].split(',') self.data = u'%s,%s' %", "is not None: for obj in self.query: if str(obj.key()) ==", "with each option. If a one-argument callable, this callable will", "valuelist[0].splitlines() except ValueError: raise ValueError(self.gettext(u'Not a valid list')) class GeoPtPropertyField(fields.TextField):", "be added to the top of the list to allow", "self._formdata = None data = property(_get_data, _set_data) def iter_choices(self): if", "valuelist: if valuelist[0] == '__None': self.data = None else: self._data", "to make the list of items. If this is not", "if str(self.data.key()) == str(obj.key()): break else: raise ValueError(self.gettext(u'Not a valid", "= valuelist[0] def pre_validate(self, form): if not self.allow_blank or self.data", "def __init__(self, label=None, validators=None, reference_class=None, label_attr=None, get_label=None, allow_blank=False, blank_text=u'', **kwargs):", "is None: self.get_label = lambda x: x elif isinstance(get_label, basestring):", "the model class as the label associated with each option.", "the label text. Otherwise, the model object's `__str__` or `__unicode__`", "select. :param reference_class: A db.Model class which will be used", "Otherwise, the model object's `__str__` or `__unicode__` will be used.", "str(obj.key()) == self._formdata: self._set_data(obj) break return self._data def _set_data(self, data):", "callable, this callable will be passed model instance and expected", "option's label. \"\"\" widget = widgets.Select() def __init__(self, label=None, validators=None,", "this callable will be passed model instance and expected to", "`__unicode__` will be used. :param allow_blank: If set to true,", "or u'' def process_formdata(self, valuelist): if valuelist: try: self.data =", "to allow `None` to be chosen. :param blank_text: Use this", "return self.data and unicode(\"\\n\".join(self.data)) or u'' def process_formdata(self, valuelist): if", "_value(self): if self.raw_data: return self.raw_data[0] else: return self.data and unicode(\"\\n\".join(self.data))", "= data self._formdata = None data = property(_get_data, _set_data) def", ":param reference_class: A db.Model class which will be used to", "(decimal.Decimal(lat.strip()), decimal.Decimal(lon.strip()),) except (decimal.InvalidOperation, ValueError): raise ValueError(u'Not a valid coordinate", "return the label text. Otherwise, the model object's `__str__` or", "property(_get_data, _set_data) def iter_choices(self): if self.allow_blank: yield (u'__None', self.blank_text, self.data", "true, a blank choice will be added to the top", "option. If a one-argument callable, this callable will be passed", "else: self._data = None self._formdata = valuelist[0] def pre_validate(self, form):", "list of items. If this is not specified, The `query`", "valid list')) class GeoPtPropertyField(fields.TextField): def process_formdata(self, valuelist): if valuelist: try:", "will be removed in WTForms 1.1, use get_label= instead.', DeprecationWarning)", "self.get_label = lambda x: x elif isinstance(get_label, basestring): self.get_label =", "str(obj.key()) label = self.get_label(obj) yield (key, label, self.data and (", "None) for obj in self.query: key = str(obj.key()) label =", "validators=None, reference_class=None, label_attr=None, get_label=None, allow_blank=False, blank_text=u'', **kwargs): super(ReferencePropertyField, self).__init__(label, validators,", "`__str__` or `__unicode__` will be used. :param allow_blank: If set", ") def process_formdata(self, valuelist): if valuelist: if valuelist[0] == '__None':", "specified, The `query` property must be overridden before validation. :param", "validators, **kwargs) if label_attr is not None: warnings.warn('label_attr= will be", "GeoPtPropertyField(fields.TextField): def process_formdata(self, valuelist): if valuelist: try: lat, lon =", "<filename>lib/wtforms/ext/appengine/fields.py import decimal import operator import warnings from wtforms import", "object's `__str__` or `__unicode__` will be used. :param allow_blank: If", "except ValueError: raise ValueError(self.gettext(u'Not a valid list')) class GeoPtPropertyField(fields.TextField): def", "None else: self._data = None self._formdata = valuelist[0] def pre_validate(self,", "**kwargs) if label_attr is not None: warnings.warn('label_attr= will be removed", "field for ``db.StringListProperty``. The list items are rendered in a", "in self.query: if str(self.data.key()) == str(obj.key()): break else: raise ValueError(self.gettext(u'Not", "\"\"\" def _value(self): if self.raw_data: return self.raw_data[0] else: return self.data", "property must be overridden before validation. :param get_label: If a", "Use this to override the default blank option's label. \"\"\"", "raise ValueError(self.gettext(u'Not a valid choice')) class StringListPropertyField(fields.TextAreaField): \"\"\" A field", "ValueError(self.gettext(u'Not a valid choice')) class StringListPropertyField(fields.TextAreaField): \"\"\" A field for", "operator import warnings from wtforms import fields, widgets class ReferencePropertyField(fields.SelectFieldBase):", "from wtforms import fields, widgets class ReferencePropertyField(fields.SelectFieldBase): \"\"\" A field", "obj in self.query: if str(self.data.key()) == str(obj.key()): break else: raise", "in self.query: key = str(obj.key()) label = self.get_label(obj) yield (key,", "get_label= instead.', DeprecationWarning) self.get_label = operator.attrgetter(label_attr) elif get_label is None:", "= operator.attrgetter(get_label) else: self.get_label = get_label self.allow_blank = allow_blank self.blank_text", "self.allow_blank: yield (u'__None', self.blank_text, self.data is None) for obj in", "override the default blank option's label. \"\"\" widget = widgets.Select()", "the label associated with each option. If a one-argument callable,", "self.blank_text, self.data is None) for obj in self.query: key =", "widgets.Select() def __init__(self, label=None, validators=None, reference_class=None, label_attr=None, get_label=None, allow_blank=False, blank_text=u'',", "list')) class GeoPtPropertyField(fields.TextField): def process_formdata(self, valuelist): if valuelist: try: lat,", "if label_attr is not None: warnings.warn('label_attr= will be removed in", "self.raw_data: return self.raw_data[0] else: return self.data and unicode(\"\\n\".join(self.data)) or u''", "use this attribute on the model class as the label", "db.Model class which will be used to generate the default", "yield (u'__None', self.blank_text, self.data is None) for obj in self.query:", "valid choice')) class StringListPropertyField(fields.TextAreaField): \"\"\" A field for ``db.StringListProperty``. The", "for ``db.StringListProperty``. The list items are rendered in a textarea.", "will be passed model instance and expected to return the", "the default blank option's label. \"\"\" widget = widgets.Select() def", "the default query to make the list of items. If", "self.data is not None: for obj in self.query: if str(self.data.key())", "ValueError: raise ValueError(self.gettext(u'Not a valid list')) class GeoPtPropertyField(fields.TextField): def process_formdata(self,", "= None else: self._data = None self._formdata = valuelist[0] def", "= valuelist[0].splitlines() except ValueError: raise ValueError(self.gettext(u'Not a valid list')) class", "associated with each option. If a one-argument callable, this callable", "and expected to return the label text. Otherwise, the model", "self.data = u'%s,%s' % (decimal.Decimal(lat.strip()), decimal.Decimal(lon.strip()),) except (decimal.InvalidOperation, ValueError): raise", "must be overridden before validation. :param get_label: If a string,", "each option. If a one-argument callable, this callable will be", "else: return self.data and unicode(\"\\n\".join(self.data)) or u'' def process_formdata(self, valuelist):", "obj in self.query: key = str(obj.key()) label = self.get_label(obj) yield", "if valuelist: if valuelist[0] == '__None': self.data = None else:", "_get_data(self): if self._formdata is not None: for obj in self.query:", "allow_blank: If set to true, a blank choice will be", "items are rendered in a textarea. \"\"\" def _value(self): if", "are rendered in a select. :param reference_class: A db.Model class", "= widgets.Select() def __init__(self, label=None, validators=None, reference_class=None, label_attr=None, get_label=None, allow_blank=False,", "self._data = data self._formdata = None data = property(_get_data, _set_data)", "a string, use this attribute on the model class as", "be used. :param allow_blank: If set to true, a blank", "valuelist[0] == '__None': self.data = None else: self._data = None", "else: self.get_label = get_label self.allow_blank = allow_blank self.blank_text = blank_text", "self.get_label(obj) yield (key, label, self.data and ( self.data.key( ) ==", "textarea. \"\"\" def _value(self): if self.raw_data: return self.raw_data[0] else: return", "iter_choices(self): if self.allow_blank: yield (u'__None', self.blank_text, self.data is None) for", "def pre_validate(self, form): if not self.allow_blank or self.data is not", "are rendered in a textarea. \"\"\" def _value(self): if self.raw_data:", "class ReferencePropertyField(fields.SelectFieldBase): \"\"\" A field for ``db.ReferenceProperty``. The list items", "is not specified, The `query` property must be overridden before", "in a select. :param reference_class: A db.Model class which will", "be overridden before validation. :param get_label: If a string, use", "if reference_class is not None: self.query = reference_class.all() def _get_data(self):", "u'' def process_formdata(self, valuelist): if valuelist: try: self.data = valuelist[0].splitlines()", "== str(obj.key()): break else: raise ValueError(self.gettext(u'Not a valid choice')) class", "text. Otherwise, the model object's `__str__` or `__unicode__` will be", "def process_formdata(self, valuelist): if valuelist: if valuelist[0] == '__None': self.data", "self._set_data(obj) break return self._data def _set_data(self, data): self._data = data", "x elif isinstance(get_label, basestring): self.get_label = operator.attrgetter(get_label) else: self.get_label =", "self.data.key( ) == obj.key() ) ) def process_formdata(self, valuelist): if", "= allow_blank self.blank_text = blank_text self._set_data(None) if reference_class is not", "super(ReferencePropertyField, self).__init__(label, validators, **kwargs) if label_attr is not None: warnings.warn('label_attr=", "top of the list to allow `None` to be chosen.", "or `__unicode__` will be used. :param allow_blank: If set to", "try: lat, lon = valuelist[0].split(',') self.data = u'%s,%s' % (decimal.Decimal(lat.strip()),", "is not None: self.query = reference_class.all() def _get_data(self): if self._formdata", "self.query: if str(obj.key()) == self._formdata: self._set_data(obj) break return self._data def", "model instance and expected to return the label text. Otherwise,", "== '__None': self.data = None else: self._data = None self._formdata" ]
[ "np import pandas as pd import random def simulate_games(num_players=4, domino_size=12,", "mode game_modes = [] if collect_data: for select in range(0,", "domino_size + _ + num_games + .xlsx This spreadsheet is", "is off: len(players) must equal num_players Returns a tuple of", "= pd.DataFrame(columns=column_names) current_index = 0 for game_num in range(0, num_games):", "collecting data, data is stored into the dataframe if collect_data:", "np.ndarray((num_players, num_games)) wins = np.ndarray((num_players, num_games)) full_data = pd.DataFrame(columns=column_names) current_index", "is as follows: PlayData/data + num_players + _ + domino_size", "= \"PlayData/data\" + str(num_players) + \"_\" + str(domino_size) + \"_\"", "respective arrays for player_num in range(0, num_players): scores[player_num, game_num] =", "+ num_players + _ + domino_size + _ + num_games", "in range(0, num_games): #Randomize players if in collect_data mode game_modes", "file_name=file_name) #If collecting data, data is stored into the dataframe", "use The format for the file name for this is", "= 0 #Calculates performance of the players score_averages = np.ndarray((num_players))", "score_averages = np.ndarray((num_players)) win_percentage = np.ndarray((num_players)) for player_num in range(0,", "= np.ndarray((num_players, num_games)) wins = np.ndarray((num_players, num_games)) full_data = pd.DataFrame(columns=column_names)", "wins[player_num, game_num] = 1 else: wins[player_num, game_num] = 0 #Calculates", "game_num in range(0, num_games): #Randomize players if in collect_data mode", "file_name=\"PlayData/data4_12_250\"): \"\"\" Runs the mexican train game repeatedly with different", "used in testing and training the neural net. If collect_data", "= 0 for game_num in range(0, num_games): #Randomize players if", "data_collection=collect_data, data_index=current_index, file_name=file_name) #If collecting data, data is stored into", "np.mean(wins[player_num, :]) #If collecting data, prints data to a .xlsx", "this is as follows: PlayData/data + num_players + _ +", "of players to generate data to be used in testing", "into a .xlsx file for later use The format for", "for select in range(0, num_players): game_modes.append(random.choice(modes)) else: game_modes = modes", "results[1] == player_num: wins[player_num, game_num] = 1 else: wins[player_num, game_num]", "player_num in range(0, num_players): score_averages[player_num] = np.mean(scores[player_num, :]) win_percentage[player_num] =", "Runs the mexican train game repeatedly with different combinations of", "data to a .xlsx file if collect_data: filename = \"PlayData/data\"", "returns them as well if debug: print(score_averages) if debug: print(win_percentage)", "off\") modes = players #Simulates num_games of games scores =", "the neural net. If collect_data is on, the play data", "== player_num: wins[player_num, game_num] = 1 else: wins[player_num, game_num] =", "to the players \"\"\" #Sets column names for building dataframe", "stored into a .xlsx file for later use The format", "player_num in range(0, num_players): scores[player_num, game_num] = results[0][player_num] if results[1]", "in order from the parameter players. When collect_data is off:", "each game from: [\"Random\", \"Greedy\", \"Probability\"] If collect_data is off,", "players \"\"\" #Sets column names for building dataframe later on", "the game with the default params if unchanged. If collect_data", "in range(0, num_players): score_averages[player_num] = np.mean(scores[player_num, :]) win_percentage[player_num] = np.mean(wins[player_num,", "with different combinations of players to generate data to be", "of games scores = np.ndarray((num_players, num_games)) wins = np.ndarray((num_players, num_games))", "num_players): score_averages[player_num] = np.mean(scores[player_num, :]) win_percentage[player_num] = np.mean(wins[player_num, :]) #If", "the players \"\"\" #Sets column names for building dataframe later", "a .xlsx file if collect_data: filename = \"PlayData/data\" + str(num_players)", "as follows: PlayData/data + num_players + _ + domino_size +", "prints data to a .xlsx file if collect_data: filename =", "_ + domino_size + _ + num_games + .xlsx This", "lists: (score_averages, win_percentage) corresponding to the players \"\"\" #Sets column", "of the players score_averages = np.ndarray((num_players)) win_percentage = np.ndarray((num_players)) for", "the file name for this is as follows: PlayData/data +", "is off, the players are selected in order from the", "player values modes = [] if collect_data: modes = [\"Random\",", "\"Sheet1\") writer.save() #Prints results and returns them as well if", "select in range(0, num_players): game_modes.append(random.choice(modes)) else: game_modes = modes #Run", "0 #Calculates performance of the players score_averages = np.ndarray((num_players)) win_percentage", "training the neural net. If collect_data is on, the play", "#If collecting data, data is stored into the dataframe if", "building dataframe later on column_names = [\"round_number\", \"turn_number\", \"player_number\", \"play\",", "arrays for player_num in range(0, num_players): scores[player_num, game_num] = results[0][player_num]", "game_modes = [] if collect_data: for select in range(0, num_players):", "must equal num_players Returns a tuple of lists: (score_averages, win_percentage)", "in range(0, num_players): scores[player_num, game_num] = results[0][player_num] if results[1] ==", "num_players when collect_data is off\") modes = players #Simulates num_games", "[\"round_number\", \"turn_number\", \"player_number\", \"play\", \"t_num\", \"hand\", \"unknown\", \"potential_plays\", \"points\"] #Depending", "= pd.ExcelWriter(filename) full_data.to_excel(writer, \"Sheet1\") writer.save() #Prints results and returns them", "to generate data to be used in testing and training", "\"points\"] #Depending on mode of use, sets players and checks", "score_averages[player_num] = np.mean(scores[player_num, :]) win_percentage[player_num] = np.mean(wins[player_num, :]) #If collecting", "#Sets column names for building dataframe later on column_names =", "when collect_data is off\") modes = players #Simulates num_games of", "for game_num in range(0, num_games): #Randomize players if in collect_data", "column names for building dataframe later on column_names = [\"round_number\",", "len(players) == num_players: raise RuntimeError(\"len(players) must equal num_players when collect_data", "the players are selected randomly each game from: [\"Random\", \"Greedy\",", "in testing and training the neural net. If collect_data is", "import pandas as pd import random def simulate_games(num_players=4, domino_size=12, num_games=250,", "RuntimeError(\"len(players) must equal num_players when collect_data is off\") modes =", "testing and training the neural net. If collect_data is on,", "else: game_modes = modes #Run game with parameters results =", "well if debug: print(score_averages) if debug: print(win_percentage) return score_averages, win_percentage", "and wins are recorded into their respective arrays for player_num", "import mtrain import numpy as np import pandas as pd", "players to generate data to be used in testing and", "equal num_players Returns a tuple of lists: (score_averages, win_percentage) corresponding", "\"_\" + str(domino_size) + \"_\" + str(num_games) + \".xlsx\" writer", "net. If collect_data is on, the play data is retrieved", ":]) #If collecting data, prints data to a .xlsx file", "players. When collect_data is off: len(players) must equal num_players Returns", "collect_data: for select in range(0, num_players): game_modes.append(random.choice(modes)) else: game_modes =", "be used in testing and training the neural net. If", "not len(players) == num_players: raise RuntimeError(\"len(players) must equal num_players when", "+ \".xlsx\" writer = pd.ExcelWriter(filename) full_data.to_excel(writer, \"Sheet1\") writer.save() #Prints results", "The format for the file name for this is as", "a tuple of lists: (score_averages, win_percentage) corresponding to the players", "to be used when training the neural net. This script", "0 for game_num in range(0, num_games): #Randomize players if in", "pandas as pd import random def simulate_games(num_players=4, domino_size=12, num_games=250, collect_data=True,", "scores = np.ndarray((num_players, num_games)) wins = np.ndarray((num_players, num_games)) full_data =", "results = mtrain.mexicantrain(num_players, domino_size, debug=debug, modes=game_modes, data_collection=collect_data, data_index=current_index, file_name=file_name) #If", "domino_size, debug=debug, modes=game_modes, data_collection=collect_data, data_index=current_index, file_name=file_name) #If collecting data, data", "is on, the play data is retrieved and stored into", "+ num_games + .xlsx This spreadsheet is to be used", "+ str(num_games) + \".xlsx\" writer = pd.ExcelWriter(filename) full_data.to_excel(writer, \"Sheet1\") writer.save()", "neural net. This script has no required parameters, and will", "str(num_games) + \".xlsx\" writer = pd.ExcelWriter(filename) full_data.to_excel(writer, \"Sheet1\") writer.save() #Prints", "the neural net. This script has no required parameters, and", "+ str(domino_size) + \"_\" + str(num_games) + \".xlsx\" writer =", "neural net. If collect_data is on, the play data is", "import random def simulate_games(num_players=4, domino_size=12, num_games=250, collect_data=True, debug=False, players=[\"Random\", \"Greedy\",", "data, prints data to a .xlsx file if collect_data: filename", "(score_averages, win_percentage) corresponding to the players \"\"\" #Sets column names", "corresponding to the players \"\"\" #Sets column names for building", ".xlsx file if collect_data: filename = \"PlayData/data\" + str(num_players) +", "pd.ExcelWriter(filename) full_data.to_excel(writer, \"Sheet1\") writer.save() #Prints results and returns them as", "as pd import random def simulate_games(num_players=4, domino_size=12, num_games=250, collect_data=True, debug=False,", "is retrieved and stored into a .xlsx file for later", "tuple of lists: (score_averages, win_percentage) corresponding to the players \"\"\"", "performance of the players score_averages = np.ndarray((num_players)) win_percentage = np.ndarray((num_players))", "scores[player_num, game_num] = results[0][player_num] if results[1] == player_num: wins[player_num, game_num]", "modes = players #Simulates num_games of games scores = np.ndarray((num_players,", "win_percentage[player_num] = np.mean(wins[player_num, :]) #If collecting data, prints data to", "to be used in testing and training the neural net.", "If collect_data is on, the players are selected randomly each", "on column_names = [\"round_number\", \"turn_number\", \"player_number\", \"play\", \"t_num\", \"hand\", \"unknown\",", "If collect_data is on, the play data is retrieved and", "and will run the game with the default params if", "= results[2].index[-1] + 1 full_data = pd.concat([full_data, results[2]]) #Scores and", "debug=False, players=[\"Random\", \"Greedy\", \"Probability\", \"Neural\"], file_name=\"PlayData/data4_12_250\"): \"\"\" Runs the mexican", "is to be used when training the neural net. This", "games scores = np.ndarray((num_players, num_games)) wins = np.ndarray((num_players, num_games)) full_data", "from: [\"Random\", \"Greedy\", \"Probability\"] If collect_data is off, the players", "sets players and checks validity of player values modes =", "the mexican train game repeatedly with different combinations of players", "and training the neural net. If collect_data is on, the", "game from: [\"Random\", \"Greedy\", \"Probability\"] If collect_data is off, the", "collect_data is off, the players are selected in order from", "for building dataframe later on column_names = [\"round_number\", \"turn_number\", \"player_number\",", "import numpy as np import pandas as pd import random", "file for later use The format for the file name", "+ \"_\" + str(num_games) + \".xlsx\" writer = pd.ExcelWriter(filename) full_data.to_excel(writer,", "writer.save() #Prints results and returns them as well if debug:", "filename = \"PlayData/data\" + str(num_players) + \"_\" + str(domino_size) +", "\"Greedy\", \"Probability\"] else: if not len(players) == num_players: raise RuntimeError(\"len(players)", "net. This script has no required parameters, and will run", "= [\"Random\", \"Greedy\", \"Probability\"] else: if not len(players) == num_players:", "+ 1 full_data = pd.concat([full_data, results[2]]) #Scores and wins are", "on, the play data is retrieved and stored into a", "collect_data: current_index = results[2].index[-1] + 1 full_data = pd.concat([full_data, results[2]])", "PlayData/data + num_players + _ + domino_size + _ +", "current_index = results[2].index[-1] + 1 full_data = pd.concat([full_data, results[2]]) #Scores", "wins are recorded into their respective arrays for player_num in", "\"Probability\"] else: if not len(players) == num_players: raise RuntimeError(\"len(players) must", "range(0, num_players): scores[player_num, game_num] = results[0][player_num] if results[1] == player_num:", "When collect_data is off: len(players) must equal num_players Returns a", "of use, sets players and checks validity of player values", "the play data is retrieved and stored into a .xlsx", "file name for this is as follows: PlayData/data + num_players", "num_players Returns a tuple of lists: (score_averages, win_percentage) corresponding to", "for the file name for this is as follows: PlayData/data", "in collect_data mode game_modes = [] if collect_data: for select", "\"unknown\", \"potential_plays\", \"points\"] #Depending on mode of use, sets players", "win_percentage = np.ndarray((num_players)) for player_num in range(0, num_players): score_averages[player_num] =", "the parameter players. When collect_data is off: len(players) must equal", "pd import random def simulate_games(num_players=4, domino_size=12, num_games=250, collect_data=True, debug=False, players=[\"Random\",", "writer = pd.ExcelWriter(filename) full_data.to_excel(writer, \"Sheet1\") writer.save() #Prints results and returns", "with the default params if unchanged. If collect_data is on,", "= np.ndarray((num_players)) for player_num in range(0, num_players): score_averages[player_num] = np.mean(scores[player_num,", "in range(0, num_players): game_modes.append(random.choice(modes)) else: game_modes = modes #Run game", "mexican train game repeatedly with different combinations of players to", "#Prints results and returns them as well if debug: print(score_averages)", "file if collect_data: filename = \"PlayData/data\" + str(num_players) + \"_\"", ":]) win_percentage[player_num] = np.mean(wins[player_num, :]) #If collecting data, prints data", "np.mean(scores[player_num, :]) win_percentage[player_num] = np.mean(wins[player_num, :]) #If collecting data, prints", "num_games=250, collect_data=True, debug=False, players=[\"Random\", \"Greedy\", \"Probability\", \"Neural\"], file_name=\"PlayData/data4_12_250\"): \"\"\" Runs", "[] if collect_data: for select in range(0, num_players): game_modes.append(random.choice(modes)) else:", "data to be used in testing and training the neural", "combinations of players to generate data to be used in", "selected in order from the parameter players. When collect_data is", "num_games of games scores = np.ndarray((num_players, num_games)) wins = np.ndarray((num_players,", "default params if unchanged. If collect_data is on, the players", "#Run game with parameters results = mtrain.mexicantrain(num_players, domino_size, debug=debug, modes=game_modes,", "will run the game with the default params if unchanged.", "and checks validity of player values modes = [] if", "pd.concat([full_data, results[2]]) #Scores and wins are recorded into their respective", "num_games + .xlsx This spreadsheet is to be used when", "has no required parameters, and will run the game with", "\"PlayData/data\" + str(num_players) + \"_\" + str(domino_size) + \"_\" +", "\"t_num\", \"hand\", \"unknown\", \"potential_plays\", \"points\"] #Depending on mode of use,", "\"turn_number\", \"player_number\", \"play\", \"t_num\", \"hand\", \"unknown\", \"potential_plays\", \"points\"] #Depending on", "def simulate_games(num_players=4, domino_size=12, num_games=250, collect_data=True, debug=False, players=[\"Random\", \"Greedy\", \"Probability\", \"Neural\"],", "dataframe later on column_names = [\"round_number\", \"turn_number\", \"player_number\", \"play\", \"t_num\",", "np.ndarray((num_players)) win_percentage = np.ndarray((num_players)) for player_num in range(0, num_players): score_averages[player_num]", "on mode of use, sets players and checks validity of", "wins[player_num, game_num] = 0 #Calculates performance of the players score_averages", "into the dataframe if collect_data: current_index = results[2].index[-1] + 1", "mode of use, sets players and checks validity of player", "If collect_data is off, the players are selected in order", "collect_data is off: len(players) must equal num_players Returns a tuple", "full_data.to_excel(writer, \"Sheet1\") writer.save() #Prints results and returns them as well", "if collect_data: for select in range(0, num_players): game_modes.append(random.choice(modes)) else: game_modes", "str(domino_size) + \"_\" + str(num_games) + \".xlsx\" writer = pd.ExcelWriter(filename)", "if collect_data: filename = \"PlayData/data\" + str(num_players) + \"_\" +", "when training the neural net. This script has no required", "#Calculates performance of the players score_averages = np.ndarray((num_players)) win_percentage =", "run the game with the default params if unchanged. If", "parameters results = mtrain.mexicantrain(num_players, domino_size, debug=debug, modes=game_modes, data_collection=collect_data, data_index=current_index, file_name=file_name)", "mtrain import numpy as np import pandas as pd import", "mtrain.mexicantrain(num_players, domino_size, debug=debug, modes=game_modes, data_collection=collect_data, data_index=current_index, file_name=file_name) #If collecting data,", "#Depending on mode of use, sets players and checks validity", "randomly each game from: [\"Random\", \"Greedy\", \"Probability\"] If collect_data is", "with parameters results = mtrain.mexicantrain(num_players, domino_size, debug=debug, modes=game_modes, data_collection=collect_data, data_index=current_index,", "their respective arrays for player_num in range(0, num_players): scores[player_num, game_num]", "range(0, num_games): #Randomize players if in collect_data mode game_modes =", "players if in collect_data mode game_modes = [] if collect_data:", "= pd.concat([full_data, results[2]]) #Scores and wins are recorded into their", "name for this is as follows: PlayData/data + num_players +", "results[2].index[-1] + 1 full_data = pd.concat([full_data, results[2]]) #Scores and wins", "random def simulate_games(num_players=4, domino_size=12, num_games=250, collect_data=True, debug=False, players=[\"Random\", \"Greedy\", \"Probability\",", "1 full_data = pd.concat([full_data, results[2]]) #Scores and wins are recorded", "retrieved and stored into a .xlsx file for later use", "num_games): #Randomize players if in collect_data mode game_modes = []", "\"Probability\"] If collect_data is off, the players are selected in", "of lists: (score_averages, win_percentage) corresponding to the players \"\"\" #Sets", "len(players) must equal num_players Returns a tuple of lists: (score_averages,", "format for the file name for this is as follows:", "later on column_names = [\"round_number\", \"turn_number\", \"player_number\", \"play\", \"t_num\", \"hand\",", "range(0, num_players): game_modes.append(random.choice(modes)) else: game_modes = modes #Run game with", "validity of player values modes = [] if collect_data: modes", "players #Simulates num_games of games scores = np.ndarray((num_players, num_games)) wins", "#If collecting data, prints data to a .xlsx file if", "\"hand\", \"unknown\", \"potential_plays\", \"points\"] #Depending on mode of use, sets", "recorded into their respective arrays for player_num in range(0, num_players):", "raise RuntimeError(\"len(players) must equal num_players when collect_data is off\") modes", "game_modes = modes #Run game with parameters results = mtrain.mexicantrain(num_players,", "collecting data, prints data to a .xlsx file if collect_data:", "pd.DataFrame(columns=column_names) current_index = 0 for game_num in range(0, num_games): #Randomize", "used when training the neural net. This script has no", "training the neural net. This script has no required parameters,", "+ .xlsx This spreadsheet is to be used when training", "else: if not len(players) == num_players: raise RuntimeError(\"len(players) must equal", "off: len(players) must equal num_players Returns a tuple of lists:", "data, data is stored into the dataframe if collect_data: current_index", "them as well if debug: print(score_averages) if debug: print(win_percentage) return", "collect_data is on, the players are selected randomly each game", "values modes = [] if collect_data: modes = [\"Random\", \"Greedy\",", "1 else: wins[player_num, game_num] = 0 #Calculates performance of the", "be used when training the neural net. This script has", "\"player_number\", \"play\", \"t_num\", \"hand\", \"unknown\", \"potential_plays\", \"points\"] #Depending on mode", "modes = [\"Random\", \"Greedy\", \"Probability\"] else: if not len(players) ==", "full_data = pd.DataFrame(columns=column_names) current_index = 0 for game_num in range(0,", ".xlsx file for later use The format for the file", "data_index=current_index, file_name=file_name) #If collecting data, data is stored into the", "= results[0][player_num] if results[1] == player_num: wins[player_num, game_num] = 1", "for player_num in range(0, num_players): scores[player_num, game_num] = results[0][player_num] if", "is on, the players are selected randomly each game from:", "game_modes.append(random.choice(modes)) else: game_modes = modes #Run game with parameters results", "else: wins[player_num, game_num] = 0 #Calculates performance of the players", "numpy as np import pandas as pd import random def", "play data is retrieved and stored into a .xlsx file", "\"\"\" #Sets column names for building dataframe later on column_names", "game with the default params if unchanged. If collect_data is", "use, sets players and checks validity of player values modes", "if in collect_data mode game_modes = [] if collect_data: for", "later use The format for the file name for this", "current_index = 0 for game_num in range(0, num_games): #Randomize players", "domino_size=12, num_games=250, collect_data=True, debug=False, players=[\"Random\", \"Greedy\", \"Probability\", \"Neural\"], file_name=\"PlayData/data4_12_250\"): \"\"\"", "different combinations of players to generate data to be used", "players=[\"Random\", \"Greedy\", \"Probability\", \"Neural\"], file_name=\"PlayData/data4_12_250\"): \"\"\" Runs the mexican train", "for player_num in range(0, num_players): score_averages[player_num] = np.mean(scores[player_num, :]) win_percentage[player_num]", "data is retrieved and stored into a .xlsx file for", "This spreadsheet is to be used when training the neural", "results[0][player_num] if results[1] == player_num: wins[player_num, game_num] = 1 else:", "[\"Random\", \"Greedy\", \"Probability\"] else: if not len(players) == num_players: raise", "num_players): game_modes.append(random.choice(modes)) else: game_modes = modes #Run game with parameters", "full_data = pd.concat([full_data, results[2]]) #Scores and wins are recorded into", "win_percentage) corresponding to the players \"\"\" #Sets column names for", "of player values modes = [] if collect_data: modes =", "modes=game_modes, data_collection=collect_data, data_index=current_index, file_name=file_name) #If collecting data, data is stored", "parameter players. When collect_data is off: len(players) must equal num_players", "\"Neural\"], file_name=\"PlayData/data4_12_250\"): \"\"\" Runs the mexican train game repeatedly with", "player_num: wins[player_num, game_num] = 1 else: wins[player_num, game_num] = 0", "\"\"\" Runs the mexican train game repeatedly with different combinations", "results and returns them as well if debug: print(score_averages) if", "are selected randomly each game from: [\"Random\", \"Greedy\", \"Probability\"] If", "= np.mean(wins[player_num, :]) #If collecting data, prints data to a", "num_players): scores[player_num, game_num] = results[0][player_num] if results[1] == player_num: wins[player_num,", "\"Probability\", \"Neural\"], file_name=\"PlayData/data4_12_250\"): \"\"\" Runs the mexican train game repeatedly", "off, the players are selected in order from the parameter", "a .xlsx file for later use The format for the", "= players #Simulates num_games of games scores = np.ndarray((num_players, num_games))", "players are selected randomly each game from: [\"Random\", \"Greedy\", \"Probability\"]", "is stored into the dataframe if collect_data: current_index = results[2].index[-1]", "for this is as follows: PlayData/data + num_players + _", "modes = [] if collect_data: modes = [\"Random\", \"Greedy\", \"Probability\"]", "results[2]]) #Scores and wins are recorded into their respective arrays", "game_num] = results[0][player_num] if results[1] == player_num: wins[player_num, game_num] =", "\"potential_plays\", \"points\"] #Depending on mode of use, sets players and", "= np.mean(scores[player_num, :]) win_percentage[player_num] = np.mean(wins[player_num, :]) #If collecting data,", "+ _ + num_games + .xlsx This spreadsheet is to", "repeatedly with different combinations of players to generate data to", "if not len(players) == num_players: raise RuntimeError(\"len(players) must equal num_players", "as well if debug: print(score_averages) if debug: print(win_percentage) return score_averages,", "[] if collect_data: modes = [\"Random\", \"Greedy\", \"Probability\"] else: if", "collect_data mode game_modes = [] if collect_data: for select in", "debug=debug, modes=game_modes, data_collection=collect_data, data_index=current_index, file_name=file_name) #If collecting data, data is", "np.ndarray((num_players, num_games)) full_data = pd.DataFrame(columns=column_names) current_index = 0 for game_num", "+ str(num_players) + \"_\" + str(domino_size) + \"_\" + str(num_games)", "players and checks validity of player values modes = []", "_ + num_games + .xlsx This spreadsheet is to be", "num_games)) full_data = pd.DataFrame(columns=column_names) current_index = 0 for game_num in", "players score_averages = np.ndarray((num_players)) win_percentage = np.ndarray((num_players)) for player_num in", "are recorded into their respective arrays for player_num in range(0,", "= np.ndarray((num_players, num_games)) full_data = pd.DataFrame(columns=column_names) current_index = 0 for", "generate data to be used in testing and training the", "as np import pandas as pd import random def simulate_games(num_players=4,", "the default params if unchanged. If collect_data is on, the", "is off\") modes = players #Simulates num_games of games scores", "Returns a tuple of lists: (score_averages, win_percentage) corresponding to the", "into their respective arrays for player_num in range(0, num_players): scores[player_num,", ".xlsx This spreadsheet is to be used when training the", "\"Greedy\", \"Probability\"] If collect_data is off, the players are selected", "checks validity of player values modes = [] if collect_data:", "follows: PlayData/data + num_players + _ + domino_size + _", "\"_\" + str(num_games) + \".xlsx\" writer = pd.ExcelWriter(filename) full_data.to_excel(writer, \"Sheet1\")", "collect_data=True, debug=False, players=[\"Random\", \"Greedy\", \"Probability\", \"Neural\"], file_name=\"PlayData/data4_12_250\"): \"\"\" Runs the", "+ domino_size + _ + num_games + .xlsx This spreadsheet", "on, the players are selected randomly each game from: [\"Random\",", "+ \"_\" + str(domino_size) + \"_\" + str(num_games) + \".xlsx\"", "This script has no required parameters, and will run the", "selected randomly each game from: [\"Random\", \"Greedy\", \"Probability\"] If collect_data", "are selected in order from the parameter players. When collect_data", "game with parameters results = mtrain.mexicantrain(num_players, domino_size, debug=debug, modes=game_modes, data_collection=collect_data,", "collect_data is on, the play data is retrieved and stored", "= 1 else: wins[player_num, game_num] = 0 #Calculates performance of", "the dataframe if collect_data: current_index = results[2].index[-1] + 1 full_data", "\"play\", \"t_num\", \"hand\", \"unknown\", \"potential_plays\", \"points\"] #Depending on mode of", "= np.ndarray((num_players)) win_percentage = np.ndarray((num_players)) for player_num in range(0, num_players):", "modes #Run game with parameters results = mtrain.mexicantrain(num_players, domino_size, debug=debug,", "simulate_games(num_players=4, domino_size=12, num_games=250, collect_data=True, debug=False, players=[\"Random\", \"Greedy\", \"Probability\", \"Neural\"], file_name=\"PlayData/data4_12_250\"):", "required parameters, and will run the game with the default", "= [\"round_number\", \"turn_number\", \"player_number\", \"play\", \"t_num\", \"hand\", \"unknown\", \"potential_plays\", \"points\"]", "collect_data is off\") modes = players #Simulates num_games of games", "no required parameters, and will run the game with the", "[\"Random\", \"Greedy\", \"Probability\"] If collect_data is off, the players are", "if unchanged. If collect_data is on, the players are selected", "unchanged. If collect_data is on, the players are selected randomly", "+ _ + domino_size + _ + num_games + .xlsx", "= modes #Run game with parameters results = mtrain.mexicantrain(num_players, domino_size,", "names for building dataframe later on column_names = [\"round_number\", \"turn_number\",", "parameters, and will run the game with the default params", "from the parameter players. When collect_data is off: len(players) must", "= [] if collect_data: for select in range(0, num_players): game_modes.append(random.choice(modes))", "\"Greedy\", \"Probability\", \"Neural\"], file_name=\"PlayData/data4_12_250\"): \"\"\" Runs the mexican train game", "train game repeatedly with different combinations of players to generate", "collect_data: filename = \"PlayData/data\" + str(num_players) + \"_\" + str(domino_size)", "num_players + _ + domino_size + _ + num_games +", "script has no required parameters, and will run the game", "the players score_averages = np.ndarray((num_players)) win_percentage = np.ndarray((num_players)) for player_num", "and stored into a .xlsx file for later use The", "if results[1] == player_num: wins[player_num, game_num] = 1 else: wins[player_num,", "game_num] = 1 else: wins[player_num, game_num] = 0 #Calculates performance", "column_names = [\"round_number\", \"turn_number\", \"player_number\", \"play\", \"t_num\", \"hand\", \"unknown\", \"potential_plays\",", "the players are selected in order from the parameter players.", "equal num_players when collect_data is off\") modes = players #Simulates", "to a .xlsx file if collect_data: filename = \"PlayData/data\" +", "range(0, num_players): score_averages[player_num] = np.mean(scores[player_num, :]) win_percentage[player_num] = np.mean(wins[player_num, :])", "players are selected in order from the parameter players. When", "must equal num_players when collect_data is off\") modes = players", "collect_data: modes = [\"Random\", \"Greedy\", \"Probability\"] else: if not len(players)", "#Simulates num_games of games scores = np.ndarray((num_players, num_games)) wins =", "num_games)) wins = np.ndarray((num_players, num_games)) full_data = pd.DataFrame(columns=column_names) current_index =", "#Randomize players if in collect_data mode game_modes = [] if", "if collect_data: modes = [\"Random\", \"Greedy\", \"Probability\"] else: if not", "== num_players: raise RuntimeError(\"len(players) must equal num_players when collect_data is", "stored into the dataframe if collect_data: current_index = results[2].index[-1] +", "= [] if collect_data: modes = [\"Random\", \"Greedy\", \"Probability\"] else:", "= mtrain.mexicantrain(num_players, domino_size, debug=debug, modes=game_modes, data_collection=collect_data, data_index=current_index, file_name=file_name) #If collecting", "\".xlsx\" writer = pd.ExcelWriter(filename) full_data.to_excel(writer, \"Sheet1\") writer.save() #Prints results and", "str(num_players) + \"_\" + str(domino_size) + \"_\" + str(num_games) +", "params if unchanged. If collect_data is on, the players are", "for later use The format for the file name for", "order from the parameter players. When collect_data is off: len(players)", "dataframe if collect_data: current_index = results[2].index[-1] + 1 full_data =", "wins = np.ndarray((num_players, num_games)) full_data = pd.DataFrame(columns=column_names) current_index = 0", "#Scores and wins are recorded into their respective arrays for", "num_players: raise RuntimeError(\"len(players) must equal num_players when collect_data is off\")", "if collect_data: current_index = results[2].index[-1] + 1 full_data = pd.concat([full_data,", "game_num] = 0 #Calculates performance of the players score_averages =", "np.ndarray((num_players)) for player_num in range(0, num_players): score_averages[player_num] = np.mean(scores[player_num, :])", "and returns them as well if debug: print(score_averages) if debug:", "data is stored into the dataframe if collect_data: current_index =", "game repeatedly with different combinations of players to generate data", "spreadsheet is to be used when training the neural net." ]
[ "__all__ = (\"DottedMarkupLanguageException\", \"DecodeError\") class DottedMarkupLanguageException(Exception): \"\"\"Base class for all", "pass class DecodeError(DottedMarkupLanguageException): \"\"\"Raised when there is an error decoding", "all exceptions in this module.\"\"\" pass class DecodeError(DottedMarkupLanguageException): \"\"\"Raised when", "\"\"\"Base class for all exceptions in this module.\"\"\" pass class", "<reponame>RGBCube/dml __all__ = (\"DottedMarkupLanguageException\", \"DecodeError\") class DottedMarkupLanguageException(Exception): \"\"\"Base class for", "DecodeError(DottedMarkupLanguageException): \"\"\"Raised when there is an error decoding a string.\"\"\"", "class for all exceptions in this module.\"\"\" pass class DecodeError(DottedMarkupLanguageException):", "class DecodeError(DottedMarkupLanguageException): \"\"\"Raised when there is an error decoding a", "exceptions in this module.\"\"\" pass class DecodeError(DottedMarkupLanguageException): \"\"\"Raised when there", "\"\"\"Raised when there is an error decoding a string.\"\"\" pass", "class DottedMarkupLanguageException(Exception): \"\"\"Base class for all exceptions in this module.\"\"\"", "this module.\"\"\" pass class DecodeError(DottedMarkupLanguageException): \"\"\"Raised when there is an", "module.\"\"\" pass class DecodeError(DottedMarkupLanguageException): \"\"\"Raised when there is an error", "= (\"DottedMarkupLanguageException\", \"DecodeError\") class DottedMarkupLanguageException(Exception): \"\"\"Base class for all exceptions", "for all exceptions in this module.\"\"\" pass class DecodeError(DottedMarkupLanguageException): \"\"\"Raised", "in this module.\"\"\" pass class DecodeError(DottedMarkupLanguageException): \"\"\"Raised when there is", "(\"DottedMarkupLanguageException\", \"DecodeError\") class DottedMarkupLanguageException(Exception): \"\"\"Base class for all exceptions in", "DottedMarkupLanguageException(Exception): \"\"\"Base class for all exceptions in this module.\"\"\" pass", "\"DecodeError\") class DottedMarkupLanguageException(Exception): \"\"\"Base class for all exceptions in this" ]
[ "in [\"xmin\", \"ymin\", \"xmax\", \"ymax\"]] instances.append( {\"category_id\": CLASS_NAMES.index(cls), \"bbox\": bbox,", "fvcore.common.file_io import PathManager import os import numpy as np import", "jpeg_file, \"image_id\": fileid, \"height\": int(tree.findall(\"./size/height\")[0].text), \"width\": int(tree.findall(\"./size/width\")[0].text), } instances =", "\"images\", fileid + \".jpg\") tree = ET.parse(anno_file) r = {", "ap.add_argument(\"--split\", default=\"train\") ap.add_argument(\"--samples\", type=int, default=10) ap.add_argument(\"--scale\", type=float, default=1.0) args =", "instances.append( {\"category_id\": CLASS_NAMES.index(cls), \"bbox\": bbox, \"bbox_mode\": BoxMode.XYXY_ABS} ) r[\"annotations\"] =", "= obj.find(\"name\").text bbox = obj.find(\"bndbox\") bbox = [float(bbox.find(x).text) for x", "# # Below code is inspired on # https://github.com/facebookresearch/detectron2/blob/master/detectron2/data/datasets/pascal_voc.py #", "return dicts def register_licenseplates_voc(name, dirname, split): DatasetCatalog.register(name, lambda: load_voc_instances(dirname, split))", "= f\"licenseplates_{args.split}\" register_licenseplates_voc(dataset_name, \"datasets/licenseplates\", args.split) dataset_dicts = DatasetCatalog.get(dataset_name) for d", "import PathManager import os import numpy as np import xml.etree.ElementTree", "for fileid in fileids: anno_file = os.path.join(dirname, \"annotations\", fileid +", "to Detectron2 format. Args: dirname: Contain \"annotations\", \"images\" split (str):", "fileids: anno_file = os.path.join(dirname, \"annotations\", fileid + \".xml\") jpeg_file =", "np.loadtxt(f, dtype=np.str) dicts = [] for fileid in fileids: anno_file", "Detectron2 # Licensed under the Apache 2.0 license. # --------------------------------------------------------", "tree.findall(\"object\"): cls = obj.find(\"name\").text bbox = obj.find(\"bndbox\") bbox = [float(bbox.find(x).text)", "detection annotations to Detectron2 format. Args: dirname: Contain \"annotations\", \"images\"", "{\"category_id\": CLASS_NAMES.index(cls), \"bbox\": bbox, \"bbox_mode\": BoxMode.XYXY_ABS} ) r[\"annotations\"] = instances", "ET.parse(anno_file) r = { \"file_name\": jpeg_file, \"image_id\": fileid, \"height\": int(tree.findall(\"./size/height\")[0].text),", "# -------------------------------------------------------- from fvcore.common.file_io import PathManager import os import numpy", "args = ap.parse_args() dataset_name = f\"licenseplates_{args.split}\" register_licenseplates_voc(dataset_name, \"datasets/licenseplates\", args.split) dataset_dicts", "int(tree.findall(\"./size/height\")[0].text), \"width\": int(tree.findall(\"./size/width\")[0].text), } instances = [] for obj in", "Licensed under the Apache 2.0 license. # -------------------------------------------------------- from fvcore.common.file_io", "default=10) ap.add_argument(\"--scale\", type=float, default=1.0) args = ap.parse_args() dataset_name = f\"licenseplates_{args.split}\"", "x in [\"xmin\", \"ymin\", \"xmax\", \"ymax\"]] instances.append( {\"category_id\": CLASS_NAMES.index(cls), \"bbox\":", "f\"licenseplates_{args.split}\" register_licenseplates_voc(dataset_name, \"datasets/licenseplates\", args.split) dataset_dicts = DatasetCatalog.get(dataset_name) for d in", "\"\"\" with PathManager.open(os.path.join(dirname, split + \".txt\")) as f: fileids =", "tree = ET.parse(anno_file) r = { \"file_name\": jpeg_file, \"image_id\": fileid,", "split)) MetadataCatalog.get(name).set(thing_classes=CLASS_NAMES, dirname=dirname, split=split) if __name__ == \"__main__\": import random", "2.0 license. # -------------------------------------------------------- from fvcore.common.file_io import PathManager import os", "license. # -------------------------------------------------------- from fvcore.common.file_io import PathManager import os import", "\"test\" \"\"\" with PathManager.open(os.path.join(dirname, split + \".txt\")) as f: fileids", "__name__ == \"__main__\": import random import cv2 from detectron2.utils.visualizer import", "= argparse.ArgumentParser() ap.add_argument(\"--split\", default=\"train\") ap.add_argument(\"--samples\", type=int, default=10) ap.add_argument(\"--scale\", type=float, default=1.0)", "= ap.parse_args() dataset_name = f\"licenseplates_{args.split}\" register_licenseplates_voc(dataset_name, \"datasets/licenseplates\", args.split) dataset_dicts =", "random.sample(dataset_dicts, args.samples): img = cv2.imread(d[\"file_name\"]) visualizer = Visualizer(img[:, :, ::-1],", "in tree.findall(\"object\"): cls = obj.find(\"name\").text bbox = obj.find(\"bndbox\") bbox =", "= np.loadtxt(f, dtype=np.str) dicts = [] for fileid in fileids:", "= DatasetCatalog.get(dataset_name) for d in random.sample(dataset_dicts, args.samples): img = cv2.imread(d[\"file_name\"])", "if __name__ == \"__main__\": import random import cv2 from detectron2.utils.visualizer", "CLASS_NAMES.index(cls), \"bbox\": bbox, \"bbox_mode\": BoxMode.XYXY_ABS} ) r[\"annotations\"] = instances dicts.append(r)", "PathManager.open(os.path.join(dirname, split + \".txt\")) as f: fileids = np.loadtxt(f, dtype=np.str)", "# Below code is inspired on # https://github.com/facebookresearch/detectron2/blob/master/detectron2/data/datasets/pascal_voc.py # --------------------------------------------------------", "type=int, default=10) ap.add_argument(\"--scale\", type=float, default=1.0) args = ap.parse_args() dataset_name =", "# https://github.com/facebookresearch/detectron2/blob/master/detectron2/data/datasets/pascal_voc.py # -------------------------------------------------------- # Detectron2 # Licensed under the", "argparse.ArgumentParser() ap.add_argument(\"--split\", default=\"train\") ap.add_argument(\"--samples\", type=int, default=10) ap.add_argument(\"--scale\", type=float, default=1.0) args", "Load licenseplates VOC detection annotations to Detectron2 format. Args: dirname:", "[ \"license_plate\", ] def load_voc_instances(dirname: str, split: str): \"\"\" Load", "def register_licenseplates_voc(name, dirname, split): DatasetCatalog.register(name, lambda: load_voc_instances(dirname, split)) MetadataCatalog.get(name).set(thing_classes=CLASS_NAMES, dirname=dirname,", "command line arguments ap = argparse.ArgumentParser() ap.add_argument(\"--split\", default=\"train\") ap.add_argument(\"--samples\", type=int,", "= os.path.join(dirname, \"images\", fileid + \".jpg\") tree = ET.parse(anno_file) r", "def load_voc_instances(dirname: str, split: str): \"\"\" Load licenseplates VOC detection", "metadata=MetadataCatalog.get(dataset_name), scale=args.scale) vis = visualizer.draw_dataset_dict(d) cv2.imshow(dataset_name, vis.get_image()[:, :, ::-1]) #", "\".jpg\") tree = ET.parse(anno_file) r = { \"file_name\": jpeg_file, \"image_id\":", "DatasetCatalog, MetadataCatalog __all__ = [\"register_licenseplates_voc\"] CLASS_NAMES = [ \"license_plate\", ]", "for d in random.sample(dataset_dicts, args.samples): img = cv2.imread(d[\"file_name\"]) visualizer =", "xml.etree.ElementTree as ET from detectron2.structures import BoxMode from detectron2.data import", "default=1.0) args = ap.parse_args() dataset_name = f\"licenseplates_{args.split}\" register_licenseplates_voc(dataset_name, \"datasets/licenseplates\", args.split)", ":, ::-1]) # Exit? Press ESC if cv2.waitKey(0) & 0xFF", "\"annotations\", \"images\" split (str): one of \"train\", \"test\" \"\"\" with", "visualizer = Visualizer(img[:, :, ::-1], metadata=MetadataCatalog.get(dataset_name), scale=args.scale) vis = visualizer.draw_dataset_dict(d)", "} instances = [] for obj in tree.findall(\"object\"): cls =", "one of \"train\", \"test\" \"\"\" with PathManager.open(os.path.join(dirname, split + \".txt\"))", "argparse # Parse command line arguments ap = argparse.ArgumentParser() ap.add_argument(\"--split\",", "ET from detectron2.structures import BoxMode from detectron2.data import DatasetCatalog, MetadataCatalog", "# Parse command line arguments ap = argparse.ArgumentParser() ap.add_argument(\"--split\", default=\"train\")", "on # https://github.com/facebookresearch/detectron2/blob/master/detectron2/data/datasets/pascal_voc.py # -------------------------------------------------------- # Detectron2 # Licensed under", "import numpy as np import xml.etree.ElementTree as ET from detectron2.structures", "import BoxMode from detectron2.data import DatasetCatalog, MetadataCatalog __all__ = [\"register_licenseplates_voc\"]", "MetadataCatalog __all__ = [\"register_licenseplates_voc\"] CLASS_NAMES = [ \"license_plate\", ] def", "format. Args: dirname: Contain \"annotations\", \"images\" split (str): one of", "+ \".jpg\") tree = ET.parse(anno_file) r = { \"file_name\": jpeg_file,", "= os.path.join(dirname, \"annotations\", fileid + \".xml\") jpeg_file = os.path.join(dirname, \"images\",", "dirname=dirname, split=split) if __name__ == \"__main__\": import random import cv2", "CLASS_NAMES = [ \"license_plate\", ] def load_voc_instances(dirname: str, split: str):", "obj.find(\"name\").text bbox = obj.find(\"bndbox\") bbox = [float(bbox.find(x).text) for x in", "import Visualizer import argparse # Parse command line arguments ap", "= [float(bbox.find(x).text) for x in [\"xmin\", \"ymin\", \"xmax\", \"ymax\"]] instances.append(", "\"bbox\": bbox, \"bbox_mode\": BoxMode.XYXY_ABS} ) r[\"annotations\"] = instances dicts.append(r) return", "detectron2.utils.visualizer import Visualizer import argparse # Parse command line arguments", "Apache 2.0 license. # -------------------------------------------------------- from fvcore.common.file_io import PathManager import", "the Apache 2.0 license. # -------------------------------------------------------- from fvcore.common.file_io import PathManager", "= ET.parse(anno_file) r = { \"file_name\": jpeg_file, \"image_id\": fileid, \"height\":", "r = { \"file_name\": jpeg_file, \"image_id\": fileid, \"height\": int(tree.findall(\"./size/height\")[0].text), \"width\":", "Visualizer import argparse # Parse command line arguments ap =", "= [] for fileid in fileids: anno_file = os.path.join(dirname, \"annotations\",", "as np import xml.etree.ElementTree as ET from detectron2.structures import BoxMode", "[float(bbox.find(x).text) for x in [\"xmin\", \"ymin\", \"xmax\", \"ymax\"]] instances.append( {\"category_id\":", "DatasetCatalog.register(name, lambda: load_voc_instances(dirname, split)) MetadataCatalog.get(name).set(thing_classes=CLASS_NAMES, dirname=dirname, split=split) if __name__ ==", "[\"register_licenseplates_voc\"] CLASS_NAMES = [ \"license_plate\", ] def load_voc_instances(dirname: str, split:", "ap.add_argument(\"--samples\", type=int, default=10) ap.add_argument(\"--scale\", type=float, default=1.0) args = ap.parse_args() dataset_name", "vis = visualizer.draw_dataset_dict(d) cv2.imshow(dataset_name, vis.get_image()[:, :, ::-1]) # Exit? Press", "Exit? Press ESC if cv2.waitKey(0) & 0xFF == 27: break", "BoxMode from detectron2.data import DatasetCatalog, MetadataCatalog __all__ = [\"register_licenseplates_voc\"] CLASS_NAMES", "= instances dicts.append(r) return dicts def register_licenseplates_voc(name, dirname, split): DatasetCatalog.register(name,", "bbox = [float(bbox.find(x).text) for x in [\"xmin\", \"ymin\", \"xmax\", \"ymax\"]]", "= [ \"license_plate\", ] def load_voc_instances(dirname: str, split: str): \"\"\"", "as ET from detectron2.structures import BoxMode from detectron2.data import DatasetCatalog,", "[\"xmin\", \"ymin\", \"xmax\", \"ymax\"]] instances.append( {\"category_id\": CLASS_NAMES.index(cls), \"bbox\": bbox, \"bbox_mode\":", "anno_file = os.path.join(dirname, \"annotations\", fileid + \".xml\") jpeg_file = os.path.join(dirname,", "\"\"\" Load licenseplates VOC detection annotations to Detectron2 format. Args:", "visualizer.draw_dataset_dict(d) cv2.imshow(dataset_name, vis.get_image()[:, :, ::-1]) # Exit? Press ESC if", "with PathManager.open(os.path.join(dirname, split + \".txt\")) as f: fileids = np.loadtxt(f,", "\"train\", \"test\" \"\"\" with PathManager.open(os.path.join(dirname, split + \".txt\")) as f:", "] def load_voc_instances(dirname: str, split: str): \"\"\" Load licenseplates VOC", "\"__main__\": import random import cv2 from detectron2.utils.visualizer import Visualizer import", "VOC detection annotations to Detectron2 format. Args: dirname: Contain \"annotations\",", "licenseplates VOC detection annotations to Detectron2 format. Args: dirname: Contain", "os.path.join(dirname, \"images\", fileid + \".jpg\") tree = ET.parse(anno_file) r =", "BoxMode.XYXY_ABS} ) r[\"annotations\"] = instances dicts.append(r) return dicts def register_licenseplates_voc(name,", "import argparse # Parse command line arguments ap = argparse.ArgumentParser()", "detectron2.data import DatasetCatalog, MetadataCatalog __all__ = [\"register_licenseplates_voc\"] CLASS_NAMES = [", "+ \".txt\")) as f: fileids = np.loadtxt(f, dtype=np.str) dicts =", "r[\"annotations\"] = instances dicts.append(r) return dicts def register_licenseplates_voc(name, dirname, split):", "-------------------------------------------------------- from fvcore.common.file_io import PathManager import os import numpy as", "from detectron2.utils.visualizer import Visualizer import argparse # Parse command line", "split=split) if __name__ == \"__main__\": import random import cv2 from", "for x in [\"xmin\", \"ymin\", \"xmax\", \"ymax\"]] instances.append( {\"category_id\": CLASS_NAMES.index(cls),", "dicts.append(r) return dicts def register_licenseplates_voc(name, dirname, split): DatasetCatalog.register(name, lambda: load_voc_instances(dirname,", "dirname, split): DatasetCatalog.register(name, lambda: load_voc_instances(dirname, split)) MetadataCatalog.get(name).set(thing_classes=CLASS_NAMES, dirname=dirname, split=split) if", "random import cv2 from detectron2.utils.visualizer import Visualizer import argparse #", "dataset_name = f\"licenseplates_{args.split}\" register_licenseplates_voc(dataset_name, \"datasets/licenseplates\", args.split) dataset_dicts = DatasetCatalog.get(dataset_name) for", "obj in tree.findall(\"object\"): cls = obj.find(\"name\").text bbox = obj.find(\"bndbox\") bbox", "detectron2.structures import BoxMode from detectron2.data import DatasetCatalog, MetadataCatalog __all__ =", "\"xmax\", \"ymax\"]] instances.append( {\"category_id\": CLASS_NAMES.index(cls), \"bbox\": bbox, \"bbox_mode\": BoxMode.XYXY_ABS} )", "line arguments ap = argparse.ArgumentParser() ap.add_argument(\"--split\", default=\"train\") ap.add_argument(\"--samples\", type=int, default=10)", ") r[\"annotations\"] = instances dicts.append(r) return dicts def register_licenseplates_voc(name, dirname,", "d in random.sample(dataset_dicts, args.samples): img = cv2.imread(d[\"file_name\"]) visualizer = Visualizer(img[:,", "\"ymin\", \"xmax\", \"ymax\"]] instances.append( {\"category_id\": CLASS_NAMES.index(cls), \"bbox\": bbox, \"bbox_mode\": BoxMode.XYXY_ABS}", "Args: dirname: Contain \"annotations\", \"images\" split (str): one of \"train\",", "bbox = obj.find(\"bndbox\") bbox = [float(bbox.find(x).text) for x in [\"xmin\",", "in fileids: anno_file = os.path.join(dirname, \"annotations\", fileid + \".xml\") jpeg_file", "import cv2 from detectron2.utils.visualizer import Visualizer import argparse # Parse", "::-1]) # Exit? Press ESC if cv2.waitKey(0) & 0xFF ==", "load_voc_instances(dirname: str, split: str): \"\"\" Load licenseplates VOC detection annotations", "is inspired on # https://github.com/facebookresearch/detectron2/blob/master/detectron2/data/datasets/pascal_voc.py # -------------------------------------------------------- # Detectron2 #", "\".txt\")) as f: fileids = np.loadtxt(f, dtype=np.str) dicts = []", "== \"__main__\": import random import cv2 from detectron2.utils.visualizer import Visualizer", "annotations to Detectron2 format. Args: dirname: Contain \"annotations\", \"images\" split", "DatasetCatalog.get(dataset_name) for d in random.sample(dataset_dicts, args.samples): img = cv2.imread(d[\"file_name\"]) visualizer", "[] for obj in tree.findall(\"object\"): cls = obj.find(\"name\").text bbox =", "import os import numpy as np import xml.etree.ElementTree as ET", "ap.parse_args() dataset_name = f\"licenseplates_{args.split}\" register_licenseplates_voc(dataset_name, \"datasets/licenseplates\", args.split) dataset_dicts = DatasetCatalog.get(dataset_name)", "__all__ = [\"register_licenseplates_voc\"] CLASS_NAMES = [ \"license_plate\", ] def load_voc_instances(dirname:", "os import numpy as np import xml.etree.ElementTree as ET from", "lambda: load_voc_instances(dirname, split)) MetadataCatalog.get(name).set(thing_classes=CLASS_NAMES, dirname=dirname, split=split) if __name__ == \"__main__\":", "code is inspired on # https://github.com/facebookresearch/detectron2/blob/master/detectron2/data/datasets/pascal_voc.py # -------------------------------------------------------- # Detectron2", "import xml.etree.ElementTree as ET from detectron2.structures import BoxMode from detectron2.data", "PathManager import os import numpy as np import xml.etree.ElementTree as", "\".xml\") jpeg_file = os.path.join(dirname, \"images\", fileid + \".jpg\") tree =", "[] for fileid in fileids: anno_file = os.path.join(dirname, \"annotations\", fileid", "fileid in fileids: anno_file = os.path.join(dirname, \"annotations\", fileid + \".xml\")", "of \"train\", \"test\" \"\"\" with PathManager.open(os.path.join(dirname, split + \".txt\")) as", "args.samples): img = cv2.imread(d[\"file_name\"]) visualizer = Visualizer(img[:, :, ::-1], metadata=MetadataCatalog.get(dataset_name),", "dicts = [] for fileid in fileids: anno_file = os.path.join(dirname,", "fileid + \".xml\") jpeg_file = os.path.join(dirname, \"images\", fileid + \".jpg\")", ":, ::-1], metadata=MetadataCatalog.get(dataset_name), scale=args.scale) vis = visualizer.draw_dataset_dict(d) cv2.imshow(dataset_name, vis.get_image()[:, :,", "::-1], metadata=MetadataCatalog.get(dataset_name), scale=args.scale) vis = visualizer.draw_dataset_dict(d) cv2.imshow(dataset_name, vis.get_image()[:, :, ::-1])", "split + \".txt\")) as f: fileids = np.loadtxt(f, dtype=np.str) dicts", "obj.find(\"bndbox\") bbox = [float(bbox.find(x).text) for x in [\"xmin\", \"ymin\", \"xmax\",", "# Licensed under the Apache 2.0 license. # -------------------------------------------------------- from", "instances = [] for obj in tree.findall(\"object\"): cls = obj.find(\"name\").text", "split (str): one of \"train\", \"test\" \"\"\" with PathManager.open(os.path.join(dirname, split", "cls = obj.find(\"name\").text bbox = obj.find(\"bndbox\") bbox = [float(bbox.find(x).text) for", "\"annotations\", fileid + \".xml\") jpeg_file = os.path.join(dirname, \"images\", fileid +", "= obj.find(\"bndbox\") bbox = [float(bbox.find(x).text) for x in [\"xmin\", \"ymin\",", "import random import cv2 from detectron2.utils.visualizer import Visualizer import argparse", "= Visualizer(img[:, :, ::-1], metadata=MetadataCatalog.get(dataset_name), scale=args.scale) vis = visualizer.draw_dataset_dict(d) cv2.imshow(dataset_name,", "= cv2.imread(d[\"file_name\"]) visualizer = Visualizer(img[:, :, ::-1], metadata=MetadataCatalog.get(dataset_name), scale=args.scale) vis", "inspired on # https://github.com/facebookresearch/detectron2/blob/master/detectron2/data/datasets/pascal_voc.py # -------------------------------------------------------- # Detectron2 # Licensed", "import DatasetCatalog, MetadataCatalog __all__ = [\"register_licenseplates_voc\"] CLASS_NAMES = [ \"license_plate\",", "\"ymax\"]] instances.append( {\"category_id\": CLASS_NAMES.index(cls), \"bbox\": bbox, \"bbox_mode\": BoxMode.XYXY_ABS} ) r[\"annotations\"]", "Parse command line arguments ap = argparse.ArgumentParser() ap.add_argument(\"--split\", default=\"train\") ap.add_argument(\"--samples\",", "args.split) dataset_dicts = DatasetCatalog.get(dataset_name) for d in random.sample(dataset_dicts, args.samples): img", "instances dicts.append(r) return dicts def register_licenseplates_voc(name, dirname, split): DatasetCatalog.register(name, lambda:", "cv2.imshow(dataset_name, vis.get_image()[:, :, ::-1]) # Exit? Press ESC if cv2.waitKey(0)", "dicts def register_licenseplates_voc(name, dirname, split): DatasetCatalog.register(name, lambda: load_voc_instances(dirname, split)) MetadataCatalog.get(name).set(thing_classes=CLASS_NAMES,", "# -------------------------------------------------------- # Detectron2 # Licensed under the Apache 2.0", "register_licenseplates_voc(dataset_name, \"datasets/licenseplates\", args.split) dataset_dicts = DatasetCatalog.get(dataset_name) for d in random.sample(dataset_dicts,", "\"license_plate\", ] def load_voc_instances(dirname: str, split: str): \"\"\" Load licenseplates", "cv2.imread(d[\"file_name\"]) visualizer = Visualizer(img[:, :, ::-1], metadata=MetadataCatalog.get(dataset_name), scale=args.scale) vis =", "load_voc_instances(dirname, split)) MetadataCatalog.get(name).set(thing_classes=CLASS_NAMES, dirname=dirname, split=split) if __name__ == \"__main__\": import", "ap.add_argument(\"--scale\", type=float, default=1.0) args = ap.parse_args() dataset_name = f\"licenseplates_{args.split}\" register_licenseplates_voc(dataset_name,", "\"images\" split (str): one of \"train\", \"test\" \"\"\" with PathManager.open(os.path.join(dirname,", "vis.get_image()[:, :, ::-1]) # Exit? Press ESC if cv2.waitKey(0) &", "split): DatasetCatalog.register(name, lambda: load_voc_instances(dirname, split)) MetadataCatalog.get(name).set(thing_classes=CLASS_NAMES, dirname=dirname, split=split) if __name__", "dataset_dicts = DatasetCatalog.get(dataset_name) for d in random.sample(dataset_dicts, args.samples): img =", "\"file_name\": jpeg_file, \"image_id\": fileid, \"height\": int(tree.findall(\"./size/height\")[0].text), \"width\": int(tree.findall(\"./size/width\")[0].text), } instances", "from detectron2.structures import BoxMode from detectron2.data import DatasetCatalog, MetadataCatalog __all__", "under the Apache 2.0 license. # -------------------------------------------------------- from fvcore.common.file_io import", "fileid + \".jpg\") tree = ET.parse(anno_file) r = { \"file_name\":", "\"image_id\": fileid, \"height\": int(tree.findall(\"./size/height\")[0].text), \"width\": int(tree.findall(\"./size/width\")[0].text), } instances = []", "\"width\": int(tree.findall(\"./size/width\")[0].text), } instances = [] for obj in tree.findall(\"object\"):", "register_licenseplates_voc(name, dirname, split): DatasetCatalog.register(name, lambda: load_voc_instances(dirname, split)) MetadataCatalog.get(name).set(thing_classes=CLASS_NAMES, dirname=dirname, split=split)", "MetadataCatalog.get(name).set(thing_classes=CLASS_NAMES, dirname=dirname, split=split) if __name__ == \"__main__\": import random import", "arguments ap = argparse.ArgumentParser() ap.add_argument(\"--split\", default=\"train\") ap.add_argument(\"--samples\", type=int, default=10) ap.add_argument(\"--scale\",", "img = cv2.imread(d[\"file_name\"]) visualizer = Visualizer(img[:, :, ::-1], metadata=MetadataCatalog.get(dataset_name), scale=args.scale)", "Press ESC if cv2.waitKey(0) & 0xFF == 27: break cv2.destroyAllWindows()", "str, split: str): \"\"\" Load licenseplates VOC detection annotations to", "# Detectron2 # Licensed under the Apache 2.0 license. #", "os.path.join(dirname, \"annotations\", fileid + \".xml\") jpeg_file = os.path.join(dirname, \"images\", fileid", "Contain \"annotations\", \"images\" split (str): one of \"train\", \"test\" \"\"\"", "dtype=np.str) dicts = [] for fileid in fileids: anno_file =", "f: fileids = np.loadtxt(f, dtype=np.str) dicts = [] for fileid", "int(tree.findall(\"./size/width\")[0].text), } instances = [] for obj in tree.findall(\"object\"): cls", "\"bbox_mode\": BoxMode.XYXY_ABS} ) r[\"annotations\"] = instances dicts.append(r) return dicts def", "cv2 from detectron2.utils.visualizer import Visualizer import argparse # Parse command", "{ \"file_name\": jpeg_file, \"image_id\": fileid, \"height\": int(tree.findall(\"./size/height\")[0].text), \"width\": int(tree.findall(\"./size/width\")[0].text), }", "Below code is inspired on # https://github.com/facebookresearch/detectron2/blob/master/detectron2/data/datasets/pascal_voc.py # -------------------------------------------------------- #", "ap = argparse.ArgumentParser() ap.add_argument(\"--split\", default=\"train\") ap.add_argument(\"--samples\", type=int, default=10) ap.add_argument(\"--scale\", type=float,", "in random.sample(dataset_dicts, args.samples): img = cv2.imread(d[\"file_name\"]) visualizer = Visualizer(img[:, :,", "dirname: Contain \"annotations\", \"images\" split (str): one of \"train\", \"test\"", "Detectron2 format. Args: dirname: Contain \"annotations\", \"images\" split (str): one", "np import xml.etree.ElementTree as ET from detectron2.structures import BoxMode from", "= { \"file_name\": jpeg_file, \"image_id\": fileid, \"height\": int(tree.findall(\"./size/height\")[0].text), \"width\": int(tree.findall(\"./size/width\")[0].text),", "scale=args.scale) vis = visualizer.draw_dataset_dict(d) cv2.imshow(dataset_name, vis.get_image()[:, :, ::-1]) # Exit?", "# Exit? Press ESC if cv2.waitKey(0) & 0xFF == 27:", "\"height\": int(tree.findall(\"./size/height\")[0].text), \"width\": int(tree.findall(\"./size/width\")[0].text), } instances = [] for obj", "type=float, default=1.0) args = ap.parse_args() dataset_name = f\"licenseplates_{args.split}\" register_licenseplates_voc(dataset_name, \"datasets/licenseplates\",", "= [\"register_licenseplates_voc\"] CLASS_NAMES = [ \"license_plate\", ] def load_voc_instances(dirname: str,", "str): \"\"\" Load licenseplates VOC detection annotations to Detectron2 format.", "fileids = np.loadtxt(f, dtype=np.str) dicts = [] for fileid in", "for obj in tree.findall(\"object\"): cls = obj.find(\"name\").text bbox = obj.find(\"bndbox\")", "fileid, \"height\": int(tree.findall(\"./size/height\")[0].text), \"width\": int(tree.findall(\"./size/width\")[0].text), } instances = [] for", "default=\"train\") ap.add_argument(\"--samples\", type=int, default=10) ap.add_argument(\"--scale\", type=float, default=1.0) args = ap.parse_args()", "= [] for obj in tree.findall(\"object\"): cls = obj.find(\"name\").text bbox", "\"datasets/licenseplates\", args.split) dataset_dicts = DatasetCatalog.get(dataset_name) for d in random.sample(dataset_dicts, args.samples):", "= visualizer.draw_dataset_dict(d) cv2.imshow(dataset_name, vis.get_image()[:, :, ::-1]) # Exit? Press ESC", "+ \".xml\") jpeg_file = os.path.join(dirname, \"images\", fileid + \".jpg\") tree", "as f: fileids = np.loadtxt(f, dtype=np.str) dicts = [] for", "############################################################################## # # Below code is inspired on # https://github.com/facebookresearch/detectron2/blob/master/detectron2/data/datasets/pascal_voc.py", "-------------------------------------------------------- # Detectron2 # Licensed under the Apache 2.0 license.", "Visualizer(img[:, :, ::-1], metadata=MetadataCatalog.get(dataset_name), scale=args.scale) vis = visualizer.draw_dataset_dict(d) cv2.imshow(dataset_name, vis.get_image()[:,", "numpy as np import xml.etree.ElementTree as ET from detectron2.structures import", "(str): one of \"train\", \"test\" \"\"\" with PathManager.open(os.path.join(dirname, split +", "split: str): \"\"\" Load licenseplates VOC detection annotations to Detectron2", "from fvcore.common.file_io import PathManager import os import numpy as np", "bbox, \"bbox_mode\": BoxMode.XYXY_ABS} ) r[\"annotations\"] = instances dicts.append(r) return dicts", "from detectron2.data import DatasetCatalog, MetadataCatalog __all__ = [\"register_licenseplates_voc\"] CLASS_NAMES =", "jpeg_file = os.path.join(dirname, \"images\", fileid + \".jpg\") tree = ET.parse(anno_file)", "https://github.com/facebookresearch/detectron2/blob/master/detectron2/data/datasets/pascal_voc.py # -------------------------------------------------------- # Detectron2 # Licensed under the Apache" ]
[ "as fin: result = fin.read() end = time.time() print(\"%.1f images/s\"", "end = time.time() print(\"%.1f images/s\" % (10000 / (end -", "glob.glob('/mnt/lustre/chenyuntao1/datasets/imagenet/train/*/*') random.shuffle(filelist) begin = time.time() for i, f in enumerate(filelist):", "time import random filelist = glob.glob('/mnt/lustre/chenyuntao1/datasets/imagenet/train/*/*') random.shuffle(filelist) begin = time.time()", "fin: result = fin.read() end = time.time() print(\"%.1f images/s\" %", "= glob.glob('/mnt/lustre/chenyuntao1/datasets/imagenet/train/*/*') random.shuffle(filelist) begin = time.time() for i, f in", "i, f in enumerate(filelist): if i == 10000: break with", "open(f, \"rb\") as fin: result = fin.read() end = time.time()", "10000: break with open(f, \"rb\") as fin: result = fin.read()", "\"rb\") as fin: result = fin.read() end = time.time() print(\"%.1f", "= fin.read() end = time.time() print(\"%.1f images/s\" % (10000 /", "time.time() for i, f in enumerate(filelist): if i == 10000:", "f in enumerate(filelist): if i == 10000: break with open(f,", "import time import random filelist = glob.glob('/mnt/lustre/chenyuntao1/datasets/imagenet/train/*/*') random.shuffle(filelist) begin =", "filelist = glob.glob('/mnt/lustre/chenyuntao1/datasets/imagenet/train/*/*') random.shuffle(filelist) begin = time.time() for i, f", "enumerate(filelist): if i == 10000: break with open(f, \"rb\") as", "with open(f, \"rb\") as fin: result = fin.read() end =", "i == 10000: break with open(f, \"rb\") as fin: result", "break with open(f, \"rb\") as fin: result = fin.read() end", "= time.time() print(\"%.1f images/s\" % (10000 / (end - begin)))", "== 10000: break with open(f, \"rb\") as fin: result =", "import glob import time import random filelist = glob.glob('/mnt/lustre/chenyuntao1/datasets/imagenet/train/*/*') random.shuffle(filelist)", "random.shuffle(filelist) begin = time.time() for i, f in enumerate(filelist): if", "result = fin.read() end = time.time() print(\"%.1f images/s\" % (10000", "if i == 10000: break with open(f, \"rb\") as fin:", "fin.read() end = time.time() print(\"%.1f images/s\" % (10000 / (end", "glob import time import random filelist = glob.glob('/mnt/lustre/chenyuntao1/datasets/imagenet/train/*/*') random.shuffle(filelist) begin", "for i, f in enumerate(filelist): if i == 10000: break", "import random filelist = glob.glob('/mnt/lustre/chenyuntao1/datasets/imagenet/train/*/*') random.shuffle(filelist) begin = time.time() for", "begin = time.time() for i, f in enumerate(filelist): if i", "in enumerate(filelist): if i == 10000: break with open(f, \"rb\")", "random filelist = glob.glob('/mnt/lustre/chenyuntao1/datasets/imagenet/train/*/*') random.shuffle(filelist) begin = time.time() for i,", "= time.time() for i, f in enumerate(filelist): if i ==" ]
[ "simulation, var_name, inp_dict, subdomains, subdomain_id): \"\"\" Dirichlet boundary condition with", "\"\"\" super().__init__( V, value, subdomain_marker, subdomain_id, method='geometric' ) self.simulation =", "%r for %s' % (value, var_name)) @register_boundary_condition('CodedValue') class CodedDirichletBoundary(BoundaryConditionCreator): description", "self.func_space = simulation.data['V%s' % var_name] # Make a dolfin Expression", "Returns the ds measure of the subdomain \"\"\" return self.simulation.data['ds'](self.subdomain_id)", ") field = simulation.fields[vardef.split('/')[0]] # The expression value is updated", "update our flux to make the total sum to zero", ") self.simulation = simulation self._value = value self.subdomain_marker = subdomain_marker", "for Dirichlet boundary conditions \"\"\" super().__init__( V, value, subdomain_marker, subdomain_id,", "coded value \"\"\" self.simulation = simulation if var_name[-1].isdigit(): # A", "inp_dict.get_value('function', required_type='any') field.register_dependent_field(self) self.flux = dolfin.Constant(1.0) # Create the bc", "(C) 2015-2019 <NAME> # SPDX-License-Identifier: Apache-2.0 import dolfin from .", "class OcellarisDirichletBC(dolfin.DirichletBC): def __init__( self, simulation, V, value, subdomain_marker, subdomain_id,", "else: description = 'coded value boundary condition for %s' %", "inp_dict.get_value('function', required_type='any') description = 'boundary condititon for %s' % var_name", "vardef ] else: expr = verify_field_variable_definition(simulation, vardef, description) if expr.ufl_shape", "self.func_space = simulation.data['V%s' % var_name] # Get the field function", "field is changed inp_dict.get_value('function', required_type='any') field.register_dependent_field(self) self.flux = dolfin.Constant(1.0) #", "description ) field = simulation.fields[vardef.split('/')[0]] # The expression value is", "\"u\" was given. Look up \"Vu\" self.func_space = simulation.data['V%s' %", "= 'A C++ coded Dirichlet condition' def __init__(self, simulation, var_name,", "else: # A var_name like \"u\" was given. Look up", "simulation, var_name, inp_dict, subdomains, subdomain_id): \"\"\" Dirichlet condition with coded", "\"\"\" Add a Dirichlet condition to this variable \"\"\" if", "def ds(self): \"\"\" Returns the ds measure of the subdomain", "'%s%d' % (var_name, d) self.register_dirichlet_condition( name, value[d], subdomains, subdomain_id )", "constant value Dirichlet condition' def __init__(self, simulation, var_name, inp_dict, subdomains,", "condition to this variable \"\"\" if not isinstance(value, (float, int)):", "expr = CodedExpression(simulation, code, description) self.register_dirichlet_condition(var_name, expr, subdomains, subdomain_id) def", "flux mesh = simulation.data['mesh'] self.area = dolfin.assemble(self.flux * bc.ds()(domain=mesh)) self.region_names", "RK substeps \"\"\" if self._updater: self._updater( self.simulation.timestep, self.simulation.time, self.simulation.dt )", "dolfin from . import register_boundary_condition, BoundaryConditionCreator from ocellaris.utils import (", "that compensates for non-zero total flux of a known velocity", "((simulation.ndim,), expr.ufl_shape) exprs = [expr[d] for d in range(simulation.ndim)] else:", "'%s%d' % (var_name, d) self.register_dirichlet_condition( name, exprs[d], subdomains, subdomain_id )", "code = inp_dict.get_value('cpp_code', required_type='any') if isinstance(code, list): assert len(code) ==", "value boundary condition for %s' % var_name expr = CodedExpression(simulation,", "Expression object that runs the code string code = inp_dict.get_value('code',", "a number' % value, ) df_value = dolfin.Constant(value) # Store", "and for all RK substeps \"\"\" if self._updater: self._updater( self.simulation.timestep,", "expr.ufl_shape == ( simulation.ndim, ), 'Expected shape %r got %r'", "then update the flux mesh = simulation.data['mesh'] self.area = dolfin.assemble(self.flux", "C++ coded value for %s' % var_name) @register_boundary_condition('FieldFunction') class FieldFunctionDirichletBoundary(BoundaryConditionCreator):", "exprs = [ verify_field_variable_definition(simulation, vd, description) for vd in vardef", "a copy with a new function space. Used when converting", "SPDX-License-Identifier: Apache-2.0 import dolfin from . import register_boundary_condition, BoundaryConditionCreator from", "range(simulation.ndim)] else: exprs = [expr] # Register BCs if len(exprs)", "\"\"\" Dirichlet condition with constant value \"\"\" self.simulation = simulation", "object vardef = inp_dict.get_value('function', required_type='any') description = 'boundary condititon for", "Store the boundary condition for use in the solver \"\"\"", "a segregated solver (default) to BCs for a coupled solver", "() bc = OcellarisDirichletBC( self.simulation, self.func_space, expr, subdomains, subdomain_id )", "BoundaryConditionCreator from ocellaris.utils import ( CodedExpression, OcellarisCppExpression, OcellarisError, verify_field_variable_definition, )", "values from a field function' def __init__(self, simulation, var_name, inp_dict,", "'The value %r is not a number' % value, )", "for %s' % var_name) @register_boundary_condition('FieldVelocityValve') class FieldVelocityValveDirichletBoundary(BoundaryConditionCreator): description = 'A", "= '%s%d' % (var_name, d) self.register_dirichlet_condition( name, exprs[d], subdomains, subdomain_id", "a Dirichlet condition to this variable \"\"\" if not isinstance(value,", "description = 'A C++ coded Dirichlet condition' def __init__(self, simulation,", "mesh = simulation.data['mesh'] self.area = dolfin.assemble(self.flux * bc.ds()(domain=mesh)) self.region_names =", "this variable \"\"\" if not isinstance(value, (float, int)): raise OcellarisError(", "= simulation.data['V%s' % var_name] # Make a dolfin Expression object", "the boundary condition for use in the solver bc =", "simulation, var_name, inp_dict, subdomains, subdomain_id): \"\"\" Dirichlet condition with C++", "\"\"\" regions = self.simulation.data['boundary'] mesh = self.simulation.data['mesh'] n = dolfin.FacetNormal(mesh)", "else: exprs = [expr] # Register BCs if len(exprs) >", "from a field function' def __init__(self, simulation, var_name, inp_dict, subdomains,", "self.register_dirichlet_condition(var_name, value, subdomains, subdomain_id) def register_dirichlet_condition(self, var_name, value, subdomains, subdomain_id):", "every timestep and for all RK substeps \"\"\" if self._updater:", "var_name] # Make a dolfin Expression object that runs the", "the region area, then update the flux mesh = simulation.data['mesh']", "field function expression object vardef = inp_dict.get_value('function', required_type='any') description =", "Look up \"Vu\" self.func_space = simulation.data['V%s' % var_name] value =", "copy with a new function space. Used when converting from", "% var_name[:-1]] else: # A var_name like \"u\" was given.", "== len(self.region_names) # FIXME: assumes n is pointing outwards along", "isinstance(vardef, list): assert len(vardef) == simulation.ndim exprs = [ verify_field_variable_definition(simulation,", "value, ) df_value = dolfin.Constant(value) # Store the boundary condition", "a field function \"\"\" self.simulation = simulation # A var_name", "self._value, self.subdomain_marker, self.subdomain_id ) def update(self): \"\"\" Update the time", "self.simulation.data['ds'](self.subdomain_id) def copy_and_change_function_space(self, V): \"\"\" Return a copy with a", "like \"u\" was given. Look up \"Vu\" self.func_space = simulation.data['V%s'", "d in range(simulation.ndim)] else: exprs = [expr] # Register BCs", "def register_dirichlet_condition(self, var_name, expr, subdomains, subdomain_id): \"\"\" Store the boundary", "a dolfin Expression object that runs the code string code", "is pointing outwards along the axis in the positive #", "for a segregated solver (default) to BCs for a coupled", "= OcellarisCppExpression( self.simulation, cpp_code, description, P, return_updater=True ) bc =", "self.simulation.log.info(' Field velocity valve for %s' % var_name) # Compute", "simulation.ndim for d in range(simulation.ndim): name = '%s%d' % (var_name,", "of a known velocity field' def __init__(self, simulation, var_name, inp_dict,", "subdomains, subdomain_id): \"\"\" Dirichlet condition with constant value \"\"\" self.simulation", "region.ds() flux += dolfin.assemble(f) count += 1 assert count ==", "BCs for a coupled solver \"\"\" return OcellarisDirichletBC( self.simulation, V,", "def __repr__(self): return '<OcellarisDirichletBC on subdomain %d>' % self.subdomain_id @register_boundary_condition('ConstantValue')", "self.register_dirichlet_condition( name, exprs[d], subdomains, subdomain_id ) else: self.register_dirichlet_condition( var_name, exprs[0],", "the axis in the positive # direction in this boundary", "dolfin.dot(self.velocity, n) * region.ds() flux += dolfin.assemble(f) count += 1", "= simulation if var_name[-1].isdigit(): # A var_name like \"u0\" was", "= [expr] # Register BCs if len(exprs) > 1: for", "subdomains, subdomain_id ) bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' Coded", "code string code = inp_dict.get_value('cpp_code', required_type='any') if isinstance(code, list): assert", "use in the solver \"\"\" description = 'boundary condititon for", "name, value[d], subdomains, subdomain_id ) else: self.register_dirichlet_condition(var_name, value, subdomains, subdomain_id)", "self.register_dirichlet_condition(name, expr, subdomains, subdomain_id) else: description = 'coded value boundary", "'boundary condititon for %s' % var_name P = self.func_space.ufl_element().degree() expr,", "other parameters used in the BC. This is used every", "condition with C++ coded value \"\"\" self.simulation = simulation if", "V): \"\"\" Return a copy with a new function space.", "\"\"\" return OcellarisDirichletBC( self.simulation, V, self._value, self.subdomain_marker, self.subdomain_id ) def", "(var_name, d) sub_code = inp_dict.get_value('cpp_code/%d' % d, required_type='string') self.register_dirichlet_condition( name,", "verify_field_variable_definition(simulation, vd, description) for vd in vardef ] else: expr", "self.func_space = simulation.data['V%s' % var_name[:-1]] # Get the field function", "for %s' % var_name, 'The value %r is not a", "var_name if isinstance(vardef, list): assert len(vardef) == simulation.ndim exprs =", "self.func_space.ufl_element().degree() expr, updater = OcellarisCppExpression( self.simulation, cpp_code, description, P, return_updater=True", "if region.name in self.region_names: f = dolfin.dot(self.velocity, n) * region.ds()", "return '<OcellarisDirichletBC on subdomain %d>' % self.subdomain_id @register_boundary_condition('ConstantValue') class ConstantDirichletBoundary(BoundaryConditionCreator):", "return self.simulation.data['ds'](self.subdomain_id) def copy_and_change_function_space(self, V): \"\"\" Return a copy with", "else: expr = verify_field_variable_definition(simulation, vardef, description) if expr.ufl_shape != ():", "exprs = [expr] # Register BCs if len(exprs) > 1:", "\"\"\" if not isinstance(value, (float, int)): raise OcellarisError( 'Error in", "for %s' % name sub_code = inp_dict.get_value('code/%d' % d, required_type='string')", "given. Look up \"Vu\" self.func_space = simulation.data['V%s' % var_name[:-1]] else:", ") bc = OcellarisDirichletBC( self.simulation, self.func_space, expr, subdomains, subdomain_id, updater=updater,", "in self.region_names: f = dolfin.dot(self.velocity, n) * region.ds() flux +=", "class for Dirichlet boundary conditions \"\"\" super().__init__( V, value, subdomain_marker,", "= simulation self._value = value self.subdomain_marker = subdomain_marker self.subdomain_id =", ") bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' C++ coded value", "given. Look up \"Vu\" self.func_space = simulation.data['V%s' % var_name] value", "Dirichlet condition to this variable \"\"\" if not isinstance(value, (float,", "var_name, 'The value %r is not a number' % value,", "subdomain_id) def register_dirichlet_condition(self, var_name, expr, subdomains, subdomain_id): \"\"\" Store the", "subdomain_id): \"\"\" Dirichlet condition with C++ coded value \"\"\" self.simulation", "= '%s%d' % (var_name, d) sub_code = inp_dict.get_value('cpp_code/%d' % d,", "updater=updater, ) bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' C++ coded", "required_type='any') description = 'boundary condititon for %s' % var_name if", "+= 1 assert count == len(self.region_names) # FIXME: assumes n", "new function space. Used when converting from BCs for a", "on subdomain %d>' % self.subdomain_id @register_boundary_condition('ConstantValue') class ConstantDirichletBoundary(BoundaryConditionCreator): description =", "is not a number' % value, ) df_value = dolfin.Constant(value)", "OcellarisDirichletBC( self.simulation, self.func_space, expr, subdomains, subdomain_id, updater=updater, ) bcs =", "subdomain \"\"\" return self.simulation.data['ds'](self.subdomain_id) def copy_and_change_function_space(self, V): \"\"\" Return a", "OcellarisCppExpression, OcellarisError, verify_field_variable_definition, ) class OcellarisDirichletBC(dolfin.DirichletBC): def __init__( self, simulation,", "bc = OcellarisDirichletBC( self.simulation, self.func_space, self.flux, subdomains, subdomain_id ) bcs", "= simulation.data['V%s' % var_name[:-1]] else: # A var_name like \"u\"", "'boundary condititon for %s' % var_name if isinstance(vardef, list): assert", "simulation.ndim, ), 'Expected shape %r got %r' % ((simulation.ndim,), expr.ufl_shape)", "Copyright (C) 2015-2019 <NAME> # SPDX-License-Identifier: Apache-2.0 import dolfin from", "= self.simulation.data['boundary'] mesh = self.simulation.data['mesh'] n = dolfin.FacetNormal(mesh) flux =", "df_value, subdomains, subdomain_id ) bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info('", ") def __repr__(self): return '<OcellarisDirichletBC on subdomain %d>' % self.subdomain_id", "if var_name[-1].isdigit(): # A var_name like \"u0\" was given. Look", "function space. Used when converting from BCs for a segregated", "\"u0\" was given. Look up \"Vu\" self.func_space = simulation.data['V%s' %", "boundary condition for %s' % name sub_code = inp_dict.get_value('code/%d' %", "OcellarisCppExpression( self.simulation, cpp_code, description, P, return_updater=True ) bc = OcellarisDirichletBC(", "Look up \"Vu\" self.func_space = simulation.data['V%s' % var_name] # Get", "1 assert count == len(self.region_names) # FIXME: assumes n is", "\"\"\" return self._value def ds(self): \"\"\" Returns the ds measure", "'%s%d' % (var_name, d) description = 'coded value boundary condition", "the code string code = inp_dict.get_value('cpp_code', required_type='any') if isinstance(code, list):", "self.register_dirichlet_condition( name, sub_code, subdomains, subdomain_id ) else: self.register_dirichlet_condition(var_name, code, subdomains,", "value self.subdomain_marker = subdomain_marker self.subdomain_id = subdomain_id self._updater = updater", "the boundary condition for use in the solver \"\"\" assert", "required_type='any') if isinstance(value, list): assert len(value) == simulation.ndim for d", "self.func_space = simulation.data['V%s' % var_name[:-1]] else: # A var_name like", "bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' Constant value %r for %s' % (value,", "value is updated as the field is changed inp_dict.get_value('function', required_type='any')", "value, subdomains, subdomain_id) def register_dirichlet_condition(self, var_name, value, subdomains, subdomain_id): \"\"\"", "updater=None ): \"\"\" A simple storage class for Dirichlet boundary", "description) self.register_dirichlet_condition(name, expr, subdomains, subdomain_id) else: description = 'coded value", "= self.func_space.ufl_element().degree() expr, updater = OcellarisCppExpression( self.simulation, cpp_code, description, P,", "description = 'A coded Dirichlet condition' def __init__(self, simulation, var_name,", "CodedExpression, OcellarisCppExpression, OcellarisError, verify_field_variable_definition, ) class OcellarisDirichletBC(dolfin.DirichletBC): def __init__( self,", "%s' % var_name) @register_boundary_condition('FieldFunction') class FieldFunctionDirichletBoundary(BoundaryConditionCreator): description = 'A Dirichlet", "required_type='string') expr = CodedExpression(simulation, sub_code, description) self.register_dirichlet_condition(name, expr, subdomains, subdomain_id)", "value for %s' % var_name) @register_boundary_condition('CppCodedValue') class CppCodedDirichletBoundary(BoundaryConditionCreator): description =", "in the BC. This is used every timestep and for", "conditions \"\"\" super().__init__( V, value, subdomain_marker, subdomain_id, method='geometric' ) self.simulation", "subdomain_id ) bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' Field velocity", "subdomains, subdomain_id): \"\"\" Dirichlet condition with C++ coded value \"\"\"", "field has changed, update our flux to make the total", "len(self.region_names) # FIXME: assumes n is pointing outwards along the", "ds measure of the subdomain \"\"\" return self.simulation.data['ds'](self.subdomain_id) def copy_and_change_function_space(self,", "known velocity field' def __init__(self, simulation, var_name, inp_dict, subdomains, subdomain_id):", "required_type='any') description = 'boundary condititon for %s' % var_name self.velocity", "self.area = dolfin.assemble(self.flux * bc.ds()(domain=mesh)) self.region_names = inp_dict.get_value('regions', required_type='list(string)') self.update()", ") def register_dirichlet_condition(self, var_name, expr, subdomains, subdomain_id): \"\"\" Store the", ") def update(self): \"\"\" Update the time and other parameters", "self.register_dirichlet_condition( var_name, exprs[0], subdomains, subdomain_id ) def register_dirichlet_condition(self, var_name, expr,", "self.subdomain_marker, self.subdomain_id ) def update(self): \"\"\" Update the time and", "), 'Expected shape %r got %r' % ((simulation.ndim,), expr.ufl_shape) exprs", "self._value = value self.subdomain_marker = subdomain_marker self.subdomain_id = subdomain_id self._updater", "count = 0 for region in regions: if region.name in", "simulation.data['V%s' % var_name] # Get the field function expression object", "for %s' % var_name self.velocity = verify_field_variable_definition( simulation, vardef, description", "self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' C++ coded value for %s' %", "name, exprs[d], subdomains, subdomain_id ) else: self.register_dirichlet_condition( var_name, exprs[0], subdomains,", "cpp_code, description, P, return_updater=True ) bc = OcellarisDirichletBC( self.simulation, self.func_space,", "d) sub_code = inp_dict.get_value('cpp_code/%d' % d, required_type='string') self.register_dirichlet_condition( name, sub_code,", "simulation.data['V%s' % var_name[:-1]] # Get the field function expression object", "for use in the solver \"\"\" assert expr.ufl_shape == ()", "inp_dict.get_value('code/%d' % d, required_type='string') expr = CodedExpression(simulation, sub_code, description) self.register_dirichlet_condition(name,", "): \"\"\" A simple storage class for Dirichlet boundary conditions", "subdomain_id self._updater = updater def func(self): \"\"\" The boundary value", "function \"\"\" return self._value def ds(self): \"\"\" Returns the ds", "\"\"\" Return a copy with a new function space. Used", "condition that compensates for non-zero total flux of a known", "with constant value \"\"\" self.simulation = simulation if var_name[-1].isdigit(): #", "vd, description) for vd in vardef ] else: expr =", "is updated as the field is changed inp_dict.get_value('function', required_type='any') field.register_dependent_field(self)", "flux to make the total sum to zero \"\"\" regions", "subdomains, subdomain_id ) else: self.register_dirichlet_condition( var_name, exprs[0], subdomains, subdomain_id )", "\"\"\" return self.simulation.data['ds'](self.subdomain_id) def copy_and_change_function_space(self, V): \"\"\" Return a copy", "along the axis in the positive # direction in this", "f = dolfin.dot(self.velocity, n) * region.ds() flux += dolfin.assemble(f) count", "self._updater: self._updater( self.simulation.timestep, self.simulation.time, self.simulation.dt ) def __repr__(self): return '<OcellarisDirichletBC", "subdomain_id ): \"\"\" Store the boundary condition for use in", "total flux of a known velocity field' def __init__(self, simulation,", "substeps \"\"\" if self._updater: self._updater( self.simulation.timestep, self.simulation.time, self.simulation.dt ) def", "changed inp_dict.get_value('function', required_type='any') field.register_dependent_field(self) self.flux = dolfin.Constant(1.0) # Create the", "object that runs the code string code = inp_dict.get_value('code', required_type='any')", "up \"Vu\" self.func_space = simulation.data['V%s' % var_name] # Make a", "subdomain_id ) def register_dirichlet_condition(self, var_name, expr, subdomains, subdomain_id): \"\"\" Store", "string code = inp_dict.get_value('code', required_type='any') if isinstance(code, list): assert len(code)", "'boundary condititon for %s' % var_name self.velocity = verify_field_variable_definition( simulation,", "\"\"\" Dirichlet boundary condition with value from a field function", "subdomain_marker, subdomain_id, method='geometric' ) self.simulation = simulation self._value = value", "expr.ufl_shape) exprs = [expr[d] for d in range(simulation.ndim)] else: exprs", "name, sub_code, subdomains, subdomain_id ) else: self.register_dirichlet_condition(var_name, code, subdomains, subdomain_id)", "self.subdomain_id = subdomain_id self._updater = updater def func(self): \"\"\" The", "__init__(self, simulation, var_name, inp_dict, subdomains, subdomain_id): \"\"\" Dirichlet condition with", "to BCs for a coupled solver \"\"\" return OcellarisDirichletBC( self.simulation,", "value %r is not a number' % value, ) df_value", "self.simulation.log.info(' Field function value for %s' % var_name) @register_boundary_condition('FieldVelocityValve') class", "\"\"\" A simple storage class for Dirichlet boundary conditions \"\"\"", "for use in the solver bc = OcellarisDirichletBC( self.simulation, self.func_space,", "# Copyright (C) 2015-2019 <NAME> # SPDX-License-Identifier: Apache-2.0 import dolfin", "description = 'boundary condititon for %s' % var_name if isinstance(vardef,", "subdomain_id ) bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' Coded value", "% var_name if isinstance(vardef, list): assert len(vardef) == simulation.ndim exprs", "name = '%s%d' % (var_name, d) description = 'coded value", "[expr[d] for d in range(simulation.ndim)] else: exprs = [expr] #", "Dirichlet boundary conditions \"\"\" super().__init__( V, value, subdomain_marker, subdomain_id, method='geometric'", "= 0 count = 0 for region in regions: if", "subdomain_id): \"\"\" Store the boundary condition for use in the", "dolfin.Constant(1.0) # Create the bc = OcellarisDirichletBC( self.simulation, self.func_space, self.flux,", "for use in the solver \"\"\" bc = OcellarisDirichletBC( self.simulation,", "( simulation.ndim, ), 'Expected shape %r got %r' % ((simulation.ndim,),", "simulation.data['mesh'] self.area = dolfin.assemble(self.flux * bc.ds()(domain=mesh)) self.region_names = inp_dict.get_value('regions', required_type='list(string)')", "This is used every timestep and for all RK substeps", "with a new function space. Used when converting from BCs", "from . import register_boundary_condition, BoundaryConditionCreator from ocellaris.utils import ( CodedExpression,", "\"Vu\" self.func_space = simulation.data['V%s' % var_name] # Make a dolfin", "object that runs the code string code = inp_dict.get_value('cpp_code', required_type='any')", "value Dirichlet condition' def __init__(self, simulation, var_name, inp_dict, subdomains, subdomain_id):", "expr.ufl_shape == () bc = OcellarisDirichletBC( self.simulation, self.func_space, expr, subdomains,", "description = 'boundary condititon for %s' % var_name self.velocity =", "isinstance(code, list): assert len(code) == simulation.ndim for d in range(simulation.ndim):", "= simulation.data['V%s' % var_name[:-1]] # Get the field function expression", "import ( CodedExpression, OcellarisCppExpression, OcellarisError, verify_field_variable_definition, ) class OcellarisDirichletBC(dolfin.DirichletBC): def", "Field velocity valve for %s' % var_name) # Compute the", "bc.ds()(domain=mesh)) self.region_names = inp_dict.get_value('regions', required_type='list(string)') self.update() def update(self, timestep_number=None, t=None,", "subdomain_id, method='geometric' ) self.simulation = simulation self._value = value self.subdomain_marker", "d, required_type='string') expr = CodedExpression(simulation, sub_code, description) self.register_dirichlet_condition(name, expr, subdomains,", "prescribed constant value Dirichlet condition' def __init__(self, simulation, var_name, inp_dict,", "self.subdomain_id ) def update(self): \"\"\" Update the time and other", "@register_boundary_condition('FieldFunction') class FieldFunctionDirichletBoundary(BoundaryConditionCreator): description = 'A Dirichlet condition with values", "self._updater( self.simulation.timestep, self.simulation.time, self.simulation.dt ) def __repr__(self): return '<OcellarisDirichletBC on", "self.simulation.dt ) def __repr__(self): return '<OcellarisDirichletBC on subdomain %d>' %", "simulation.data['V%s' % var_name] value = inp_dict.get_value('value', required_type='any') if isinstance(value, list):", "'coded value boundary condition for %s' % var_name expr =", "% name sub_code = inp_dict.get_value('code/%d' % d, required_type='string') expr =", "a new function space. Used when converting from BCs for", "a field function' def __init__(self, simulation, var_name, inp_dict, subdomains, subdomain_id):", "# FIXME: assumes n is pointing outwards along the axis", "\"Vu\" self.func_space = simulation.data['V%s' % var_name] value = inp_dict.get_value('value', required_type='any')", "for use in the solver \"\"\" description = 'boundary condititon", "% var_name] value = inp_dict.get_value('value', required_type='any') if isinstance(value, list): assert", "subdomain_id): \"\"\" Dirichlet condition with coded value \"\"\" self.simulation =", "'A C++ coded Dirichlet condition' def __init__(self, simulation, var_name, inp_dict,", "subdomain_id) def register_dirichlet_condition( self, var_name, cpp_code, subdomains, subdomain_id ): \"\"\"", "to make the total sum to zero \"\"\" regions =", "used in the BC. This is used every timestep and", "= dolfin.FacetNormal(mesh) flux = 0 count = 0 for region", "= 'A Dirichlet condition with values from a field function'", "subdomain %d>' % self.subdomain_id @register_boundary_condition('ConstantValue') class ConstantDirichletBoundary(BoundaryConditionCreator): description = 'A", "% var_name] # Make a dolfin Expression object that runs", "# The expression value is updated as the field is", "simple storage class for Dirichlet boundary conditions \"\"\" super().__init__( V,", "if isinstance(value, list): assert len(value) == simulation.ndim for d in", "# SPDX-License-Identifier: Apache-2.0 import dolfin from . import register_boundary_condition, BoundaryConditionCreator", "inp_dict.get_value('cpp_code/%d' % d, required_type='string') self.register_dirichlet_condition( name, sub_code, subdomains, subdomain_id )", "var_name P = self.func_space.ufl_element().degree() expr, updater = OcellarisCppExpression( self.simulation, cpp_code,", "var_name, exprs[0], subdomains, subdomain_id ) def register_dirichlet_condition(self, var_name, expr, subdomains,", "import register_boundary_condition, BoundaryConditionCreator from ocellaris.utils import ( CodedExpression, OcellarisCppExpression, OcellarisError,", "in ConstantValue BC for %s' % var_name, 'The value %r", "in the solver \"\"\" bc = OcellarisDirichletBC( self.simulation, self.func_space, expr,", "Compute the region area, then update the flux mesh =", "(float, int)): raise OcellarisError( 'Error in ConstantValue BC for %s'", "(var_name, d) description = 'coded value boundary condition for %s'", "count += 1 assert count == len(self.region_names) # FIXME: assumes", "\"Vu\" self.func_space = simulation.data['V%s' % var_name[:-1]] else: # A var_name", "subdomain_id) def register_dirichlet_condition(self, var_name, value, subdomains, subdomain_id): \"\"\" Add a", "description = 'A Dirichlet condition that compensates for non-zero total", "number' % value, ) df_value = dolfin.Constant(value) # Store the", "self.flux = dolfin.Constant(1.0) # Create the bc = OcellarisDirichletBC( self.simulation,", "\"\"\" Returns the ds measure of the subdomain \"\"\" return", "BCs for a segregated solver (default) to BCs for a", "= self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' Field function value for %s'", "like \"u0\" should be given. Look up \"Vu\" self.func_space =", "condititon for %s' % var_name if isinstance(vardef, list): assert len(vardef)", "Constant value %r for %s' % (value, var_name)) @register_boundary_condition('CodedValue') class", "\"\"\" self.simulation = simulation # A var_name like \"u0\" should", "self.func_space, df_value, subdomains, subdomain_id ) bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc)", "FieldFunctionDirichletBoundary(BoundaryConditionCreator): description = 'A Dirichlet condition with values from a", "# A var_name like \"u0\" should be given. Look up", "[]).append(bc) self.simulation.log.info(' Field velocity valve for %s' % var_name) #", "subdomain_id ) else: self.register_dirichlet_condition( var_name, exprs[0], subdomains, subdomain_id ) def", ") class OcellarisDirichletBC(dolfin.DirichletBC): def __init__( self, simulation, V, value, subdomain_marker,", "function \"\"\" self.simulation = simulation # A var_name like \"u0\"", "%s' % var_name expr = CodedExpression(simulation, code, description) self.register_dirichlet_condition(var_name, expr,", "len(code) == simulation.ndim for d in range(simulation.ndim): name = '%s%d'", "the ds measure of the subdomain \"\"\" return self.simulation.data['ds'](self.subdomain_id) def", "if self._updater: self._updater( self.simulation.timestep, self.simulation.time, self.simulation.dt ) def __repr__(self): return", "with value from a field function \"\"\" self.simulation = simulation", "updated as the field is changed inp_dict.get_value('function', required_type='any') field.register_dependent_field(self) self.flux", "function' def __init__(self, simulation, var_name, inp_dict, subdomains, subdomain_id): \"\"\" Dirichlet", "(var_name, d) self.register_dirichlet_condition( name, exprs[d], subdomains, subdomain_id ) else: self.register_dirichlet_condition(", "inp_dict.get_value('code', required_type='any') if isinstance(code, list): assert len(code) == simulation.ndim for", "t=None, dt=None): \"\"\" The main field has changed, update our", "solver bc = OcellarisDirichletBC( self.simulation, self.func_space, df_value, subdomains, subdomain_id )", "parameters used in the BC. This is used every timestep", "exprs = [expr[d] for d in range(simulation.ndim)] else: exprs =", "given. Look up \"Vu\" self.func_space = simulation.data['V%s' % var_name[:-1]] #", "+= dolfin.assemble(f) count += 1 assert count == len(self.region_names) #", "in range(simulation.ndim): name = '%s%d' % (var_name, d) self.register_dirichlet_condition( name,", "len(value) == simulation.ndim for d in range(simulation.ndim): name = '%s%d'", "% var_name expr = CodedExpression(simulation, code, description) self.register_dirichlet_condition(var_name, expr, subdomains,", "= OcellarisDirichletBC( self.simulation, self.func_space, expr, subdomains, subdomain_id ) bcs =", "condition for use in the solver \"\"\" assert expr.ufl_shape ==", "the field function expression object vardef = inp_dict.get_value('function', required_type='any') description", "the BC. This is used every timestep and for all", "self._value def ds(self): \"\"\" Returns the ds measure of the", "subdomains, subdomain_id): \"\"\" Dirichlet boundary condition with value from a", "converting from BCs for a segregated solver (default) to BCs", "= verify_field_variable_definition(simulation, vardef, description) if expr.ufl_shape != (): assert expr.ufl_shape", "self.simulation.timestep, self.simulation.time, self.simulation.dt ) def __repr__(self): return '<OcellarisDirichletBC on subdomain", "self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' Constant value %r for %s' %", "solver \"\"\" assert expr.ufl_shape == () bc = OcellarisDirichletBC( self.simulation,", "var_name[:-1]] # Get the field function expression object vardef =", "self.simulation, self.func_space, expr, subdomains, subdomain_id, updater=updater, ) bcs = self.simulation.data['dirichlet_bcs']", "for %s' % var_name) @register_boundary_condition('FieldFunction') class FieldFunctionDirichletBoundary(BoundaryConditionCreator): description = 'A", "\"\"\" description = 'boundary condititon for %s' % var_name P", "self.flux, subdomains, subdomain_id ) bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info('", "isinstance(value, (float, int)): raise OcellarisError( 'Error in ConstantValue BC for", "field' def __init__(self, simulation, var_name, inp_dict, subdomains, subdomain_id): \"\"\" Dirichlet", "'A Dirichlet condition that compensates for non-zero total flux of", "Look up \"Vu\" self.func_space = simulation.data['V%s' % var_name] # Make", "C++ coded Dirichlet condition' def __init__(self, simulation, var_name, inp_dict, subdomains,", "for %s' % var_name) # Compute the region area, then", "!= (): assert expr.ufl_shape == ( simulation.ndim, ), 'Expected shape", "inp_dict.get_value('value', required_type='any') if isinstance(value, list): assert len(value) == simulation.ndim for", "is used every timestep and for all RK substeps \"\"\"", "'coded value boundary condition for %s' % name sub_code =", "= 'boundary condititon for %s' % var_name if isinstance(vardef, list):", "n is pointing outwards along the axis in the positive", "description) self.register_dirichlet_condition(var_name, expr, subdomains, subdomain_id) def register_dirichlet_condition(self, var_name, expr, subdomains,", "code string code = inp_dict.get_value('code', required_type='any') if isinstance(code, list): assert", "= inp_dict.get_value('cpp_code/%d' % d, required_type='string') self.register_dirichlet_condition( name, sub_code, subdomains, subdomain_id", "for %s' % var_name P = self.func_space.ufl_element().degree() expr, updater =", "total sum to zero \"\"\" regions = self.simulation.data['boundary'] mesh =", "field = simulation.fields[vardef.split('/')[0]] # The expression value is updated as", "var_name) @register_boundary_condition('CppCodedValue') class CppCodedDirichletBoundary(BoundaryConditionCreator): description = 'A C++ coded Dirichlet", "subdomains, subdomain_id ) else: self.register_dirichlet_condition(var_name, value, subdomains, subdomain_id) def register_dirichlet_condition(self,", "<NAME> # SPDX-License-Identifier: Apache-2.0 import dolfin from . import register_boundary_condition,", "subdomain_id) else: description = 'coded value boundary condition for %s'", "self.simulation.time, self.simulation.dt ) def __repr__(self): return '<OcellarisDirichletBC on subdomain %d>'", "= inp_dict.get_value('value', required_type='any') if isinstance(value, list): assert len(value) == simulation.ndim", "condition with constant value \"\"\" self.simulation = simulation if var_name[-1].isdigit():", "to zero \"\"\" regions = self.simulation.data['boundary'] mesh = self.simulation.data['mesh'] n", "self.velocity = verify_field_variable_definition( simulation, vardef, description ) field = simulation.fields[vardef.split('/')[0]]", "space. Used when converting from BCs for a segregated solver", "\"\"\" The boundary value derivative function \"\"\" return self._value def", "time and other parameters used in the BC. This is", "condition with values from a field function' def __init__(self, simulation,", "zero \"\"\" regions = self.simulation.data['boundary'] mesh = self.simulation.data['mesh'] n =", "self.simulation, self.func_space, df_value, subdomains, subdomain_id ) bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name,", "condition with value from a field function \"\"\" self.simulation =", "%s' % var_name self.velocity = verify_field_variable_definition( simulation, vardef, description )", "% var_name[:-1]] # Get the field function expression object vardef", "a coupled solver \"\"\" return OcellarisDirichletBC( self.simulation, V, self._value, self.subdomain_marker,", "update(self): \"\"\" Update the time and other parameters used in", "var_name)) @register_boundary_condition('CodedValue') class CodedDirichletBoundary(BoundaryConditionCreator): description = 'A coded Dirichlet condition'", "super().__init__( V, value, subdomain_marker, subdomain_id, method='geometric' ) self.simulation = simulation", "inp_dict.get_value('regions', required_type='list(string)') self.update() def update(self, timestep_number=None, t=None, dt=None): \"\"\" The", "d) self.register_dirichlet_condition( name, value[d], subdomains, subdomain_id ) else: self.register_dirichlet_condition(var_name, value,", "condititon for %s' % var_name self.velocity = verify_field_variable_definition( simulation, vardef,", "self.simulation.log.info(' Coded value for %s' % var_name) @register_boundary_condition('CppCodedValue') class CppCodedDirichletBoundary(BoundaryConditionCreator):", ") else: self.register_dirichlet_condition(var_name, value, subdomains, subdomain_id) def register_dirichlet_condition(self, var_name, value,", "a known velocity field' def __init__(self, simulation, var_name, inp_dict, subdomains,", "# Create the bc = OcellarisDirichletBC( self.simulation, self.func_space, self.flux, subdomains,", "= self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' Constant value %r for %s'", "def __init__(self, simulation, var_name, inp_dict, subdomains, subdomain_id): \"\"\" Dirichlet boundary", "when converting from BCs for a segregated solver (default) to", "self.simulation.log.info(' Constant value %r for %s' % (value, var_name)) @register_boundary_condition('CodedValue')", "regions = self.simulation.data['boundary'] mesh = self.simulation.data['mesh'] n = dolfin.FacetNormal(mesh) flux", "sub_code, description) self.register_dirichlet_condition(name, expr, subdomains, subdomain_id) else: description = 'coded", "required_type='any') field.register_dependent_field(self) self.flux = dolfin.Constant(1.0) # Create the bc =", "description = 'coded value boundary condition for %s' % name", "value from a field function \"\"\" self.simulation = simulation if", "__init__( self, simulation, V, value, subdomain_marker, subdomain_id, updater=None ): \"\"\"", "Update the time and other parameters used in the BC.", "= self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' Field velocity valve for %s'", "] else: expr = verify_field_variable_definition(simulation, vardef, description) if expr.ufl_shape !=", "boundary condition with value from a field function \"\"\" self.simulation", "self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' Field velocity valve for %s' %", "cpp_code, subdomains, subdomain_id ): \"\"\" Store the boundary condition for", "BCs if len(exprs) > 1: for d in range(simulation.ndim): name", "ds(self): \"\"\" Returns the ds measure of the subdomain \"\"\"", "self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' Field function value for %s' %", "bc = OcellarisDirichletBC( self.simulation, self.func_space, df_value, subdomains, subdomain_id ) bcs", "use in the solver \"\"\" bc = OcellarisDirichletBC( self.simulation, self.func_space,", "region area, then update the flux mesh = simulation.data['mesh'] self.area", "= 0 for region in regions: if region.name in self.region_names:", "subdomains, subdomain_id) def register_dirichlet_condition(self, var_name, expr, subdomains, subdomain_id): \"\"\" Store", "boundary condition for use in the solver \"\"\" bc =", "dolfin.assemble(f) count += 1 assert count == len(self.region_names) # FIXME:", "int)): raise OcellarisError( 'Error in ConstantValue BC for %s' %", "expr, subdomains, subdomain_id, updater=updater, ) bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc)", "boundary condition for use in the solver \"\"\" assert expr.ufl_shape", "axis in the positive # direction in this boundary region", "the boundary condition for use in the solver \"\"\" bc", "Return a copy with a new function space. Used when", "V, self._value, self.subdomain_marker, self.subdomain_id ) def update(self): \"\"\" Update the", "Look up \"Vu\" self.func_space = simulation.data['V%s' % var_name[:-1]] # Get", "the solver \"\"\" description = 'boundary condititon for %s' %", "\"\"\" The main field has changed, update our flux to", "Expression object that runs the code string code = inp_dict.get_value('cpp_code',", "(value, var_name)) @register_boundary_condition('CodedValue') class CodedDirichletBoundary(BoundaryConditionCreator): description = 'A coded Dirichlet", "subdomain_id ) bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' Field function", "Field function value for %s' % var_name) @register_boundary_condition('FieldVelocityValve') class FieldVelocityValveDirichletBoundary(BoundaryConditionCreator):", "self.simulation.log.info(' C++ coded value for %s' % var_name) @register_boundary_condition('FieldFunction') class", "CppCodedDirichletBoundary(BoundaryConditionCreator): description = 'A C++ coded Dirichlet condition' def __init__(self,", "class FieldVelocityValveDirichletBoundary(BoundaryConditionCreator): description = 'A Dirichlet condition that compensates for", "Look up \"Vu\" self.func_space = simulation.data['V%s' % var_name[:-1]] else: #", "bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' Constant value %r for", "% var_name) @register_boundary_condition('CppCodedValue') class CppCodedDirichletBoundary(BoundaryConditionCreator): description = 'A C++ coded", "% var_name) @register_boundary_condition('FieldFunction') class FieldFunctionDirichletBoundary(BoundaryConditionCreator): description = 'A Dirichlet condition", "OcellarisError, verify_field_variable_definition, ) class OcellarisDirichletBC(dolfin.DirichletBC): def __init__( self, simulation, V,", "OcellarisDirichletBC(dolfin.DirichletBC): def __init__( self, simulation, V, value, subdomain_marker, subdomain_id, updater=None", "our flux to make the total sum to zero \"\"\"", "value = inp_dict.get_value('value', required_type='any') if isinstance(value, list): assert len(value) ==", "value from a field function \"\"\" self.simulation = simulation #", "runs the code string code = inp_dict.get_value('code', required_type='any') if isinstance(code,", ") bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' Field function value", "self.simulation.data['boundary'] mesh = self.simulation.data['mesh'] n = dolfin.FacetNormal(mesh) flux = 0", ") else: self.register_dirichlet_condition(var_name, code, subdomains, subdomain_id) def register_dirichlet_condition( self, var_name,", "0 for region in regions: if region.name in self.region_names: f", "ConstantDirichletBoundary(BoundaryConditionCreator): description = 'A prescribed constant value Dirichlet condition' def", "simulation.ndim exprs = [ verify_field_variable_definition(simulation, vd, description) for vd in", "Create the bc = OcellarisDirichletBC( self.simulation, self.func_space, self.flux, subdomains, subdomain_id", "Add a Dirichlet condition to this variable \"\"\" if not", "bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' Coded value for %s' % var_name) @register_boundary_condition('CppCodedValue')", "self.register_dirichlet_condition( name, value[d], subdomains, subdomain_id ) else: self.register_dirichlet_condition(var_name, value, subdomains,", "Coded value for %s' % var_name) @register_boundary_condition('CppCodedValue') class CppCodedDirichletBoundary(BoundaryConditionCreator): description", "assert expr.ufl_shape == ( simulation.ndim, ), 'Expected shape %r got", "var_name, value, subdomains, subdomain_id): \"\"\" Add a Dirichlet condition to", "code, description) self.register_dirichlet_condition(var_name, expr, subdomains, subdomain_id) def register_dirichlet_condition(self, var_name, expr,", "A var_name like \"u0\" was given. Look up \"Vu\" self.func_space", "register_dirichlet_condition( self, var_name, cpp_code, subdomains, subdomain_id ): \"\"\" Store the", "\"\"\" Store the boundary condition for use in the solver", "= dolfin.Constant(value) # Store the boundary condition for use in", "inp_dict, subdomains, subdomain_id): \"\"\" Dirichlet boundary condition with value from", "'<OcellarisDirichletBC on subdomain %d>' % self.subdomain_id @register_boundary_condition('ConstantValue') class ConstantDirichletBoundary(BoundaryConditionCreator): description", "field function \"\"\" self.simulation = simulation # A var_name like", "= dolfin.Constant(1.0) # Create the bc = OcellarisDirichletBC( self.simulation, self.func_space,", "= dolfin.assemble(self.flux * bc.ds()(domain=mesh)) self.region_names = inp_dict.get_value('regions', required_type='list(string)') self.update() def", "the total sum to zero \"\"\" regions = self.simulation.data['boundary'] mesh", "inp_dict, subdomains, subdomain_id): \"\"\" Dirichlet condition with constant value \"\"\"", "was given. Look up \"Vu\" self.func_space = simulation.data['V%s' % var_name[:-1]]", "flux = 0 count = 0 for region in regions:", "else: self.register_dirichlet_condition(var_name, value, subdomains, subdomain_id) def register_dirichlet_condition(self, var_name, value, subdomains,", "bc = OcellarisDirichletBC( self.simulation, self.func_space, expr, subdomains, subdomain_id, updater=updater, )", "= self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' Coded value for %s' %", "= dolfin.dot(self.velocity, n) * region.ds() flux += dolfin.assemble(f) count +=", "description = 'coded value boundary condition for %s' % var_name", "expr.ufl_shape != (): assert expr.ufl_shape == ( simulation.ndim, ), 'Expected", "%r got %r' % ((simulation.ndim,), expr.ufl_shape) exprs = [expr[d] for", "up \"Vu\" self.func_space = simulation.data['V%s' % var_name] value = inp_dict.get_value('value',", "the field is changed inp_dict.get_value('function', required_type='any') field.register_dependent_field(self) self.flux = dolfin.Constant(1.0)", "assert len(vardef) == simulation.ndim exprs = [ verify_field_variable_definition(simulation, vd, description)", "compensates for non-zero total flux of a known velocity field'", "\"\"\" Update the time and other parameters used in the", "with coded value \"\"\" self.simulation = simulation if var_name[-1].isdigit(): #", "self.update() def update(self, timestep_number=None, t=None, dt=None): \"\"\" The main field", "= verify_field_variable_definition( simulation, vardef, description ) field = simulation.fields[vardef.split('/')[0]] #", ") bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' Constant value %r", "if len(exprs) > 1: for d in range(simulation.ndim): name =", "simulation.fields[vardef.split('/')[0]] # The expression value is updated as the field", "subdomains, subdomain_id ): \"\"\" Store the boundary condition for use", "% var_name, 'The value %r is not a number' %", "'A coded Dirichlet condition' def __init__(self, simulation, var_name, inp_dict, subdomains,", "Dirichlet condition with constant value \"\"\" self.simulation = simulation if", "inp_dict.get_value('cpp_code', required_type='any') if isinstance(code, list): assert len(code) == simulation.ndim for", "% ((simulation.ndim,), expr.ufl_shape) exprs = [expr[d] for d in range(simulation.ndim)]", "def __init__( self, simulation, V, value, subdomain_marker, subdomain_id, updater=None ):", "expression object vardef = inp_dict.get_value('function', required_type='any') description = 'boundary condititon", "= 'coded value boundary condition for %s' % name sub_code", "# Compute the region area, then update the flux mesh", "= simulation.data['mesh'] self.area = dolfin.assemble(self.flux * bc.ds()(domain=mesh)) self.region_names = inp_dict.get_value('regions',", "outwards along the axis in the positive # direction in", "== simulation.ndim exprs = [ verify_field_variable_definition(simulation, vd, description) for vd", "\"u0\" should be given. Look up \"Vu\" self.func_space = simulation.data['V%s'", "in vardef ] else: expr = verify_field_variable_definition(simulation, vardef, description) if", "@register_boundary_condition('ConstantValue') class ConstantDirichletBoundary(BoundaryConditionCreator): description = 'A prescribed constant value Dirichlet", "self.region_names = inp_dict.get_value('regions', required_type='list(string)') self.update() def update(self, timestep_number=None, t=None, dt=None):", "% self.subdomain_id @register_boundary_condition('ConstantValue') class ConstantDirichletBoundary(BoundaryConditionCreator): description = 'A prescribed constant", "bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' Field function value for", "= subdomain_marker self.subdomain_id = subdomain_id self._updater = updater def func(self):", "%d>' % self.subdomain_id @register_boundary_condition('ConstantValue') class ConstantDirichletBoundary(BoundaryConditionCreator): description = 'A prescribed", "with values from a field function' def __init__(self, simulation, var_name,", "field function \"\"\" self.simulation = simulation if var_name[-1].isdigit(): # A", "flux += dolfin.assemble(f) count += 1 assert count == len(self.region_names)", "boundary condition for %s' % var_name expr = CodedExpression(simulation, code,", "self.func_space, expr, subdomains, subdomain_id ) bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc)", "if isinstance(vardef, list): assert len(vardef) == simulation.ndim exprs = [", "from ocellaris.utils import ( CodedExpression, OcellarisCppExpression, OcellarisError, verify_field_variable_definition, ) class", "% var_name) # Compute the region area, then update the", "in range(simulation.ndim): name = '%s%d' % (var_name, d) description =", "variable \"\"\" if not isinstance(value, (float, int)): raise OcellarisError( 'Error", "var_name expr = CodedExpression(simulation, code, description) self.register_dirichlet_condition(var_name, expr, subdomains, subdomain_id)", "self, simulation, V, value, subdomain_marker, subdomain_id, updater=None ): \"\"\" A", "= 'A prescribed constant value Dirichlet condition' def __init__(self, simulation,", "value, subdomain_marker, subdomain_id, method='geometric' ) self.simulation = simulation self._value =", "region in regions: if region.name in self.region_names: f = dolfin.dot(self.velocity,", ". import register_boundary_condition, BoundaryConditionCreator from ocellaris.utils import ( CodedExpression, OcellarisCppExpression,", "boundary condition for use in the solver \"\"\" description =", "condition for %s' % name sub_code = inp_dict.get_value('code/%d' % d,", "OcellarisDirichletBC( self.simulation, V, self._value, self.subdomain_marker, self.subdomain_id ) def update(self): \"\"\"", "return_updater=True ) bc = OcellarisDirichletBC( self.simulation, self.func_space, expr, subdomains, subdomain_id,", "self.func_space, expr, subdomains, subdomain_id, updater=updater, ) bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name,", "% (var_name, d) self.register_dirichlet_condition( name, exprs[d], subdomains, subdomain_id ) else:", "changed, update our flux to make the total sum to", "code, subdomains, subdomain_id) def register_dirichlet_condition( self, var_name, cpp_code, subdomains, subdomain_id", "from a field function \"\"\" self.simulation = simulation # A", "self.func_space, self.flux, subdomains, subdomain_id ) bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc)", "condition with coded value \"\"\" self.simulation = simulation if var_name[-1].isdigit():", "subdomains, subdomain_id, updater=updater, ) bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info('", "A var_name like \"u\" was given. Look up \"Vu\" self.func_space", "self.region_names: f = dolfin.dot(self.velocity, n) * region.ds() flux += dolfin.assemble(f)", "the code string code = inp_dict.get_value('code', required_type='any') if isinstance(code, list):", "%s' % var_name P = self.func_space.ufl_element().degree() expr, updater = OcellarisCppExpression(", "simulation.data['V%s' % var_name[:-1]] else: # A var_name like \"u\" was", "expr, updater = OcellarisCppExpression( self.simulation, cpp_code, description, P, return_updater=True )", "Dirichlet boundary condition with value from a field function \"\"\"", "class ConstantDirichletBoundary(BoundaryConditionCreator): description = 'A prescribed constant value Dirichlet condition'", "% value, ) df_value = dolfin.Constant(value) # Store the boundary", "\"\"\" assert expr.ufl_shape == () bc = OcellarisDirichletBC( self.simulation, self.func_space,", "self.simulation = simulation if var_name[-1].isdigit(): # A var_name like \"u0\"", "self.simulation.data['mesh'] n = dolfin.FacetNormal(mesh) flux = 0 count = 0", "dolfin.FacetNormal(mesh) flux = 0 count = 0 for region in", "value, subdomains, subdomain_id): \"\"\" Add a Dirichlet condition to this", "pointing outwards along the axis in the positive # direction", "has changed, update our flux to make the total sum", "used every timestep and for all RK substeps \"\"\" if", "self.simulation, V, self._value, self.subdomain_marker, self.subdomain_id ) def update(self): \"\"\" Update", "for all RK substeps \"\"\" if self._updater: self._updater( self.simulation.timestep, self.simulation.time,", "code = inp_dict.get_value('code', required_type='any') if isinstance(code, list): assert len(code) ==", "var_name, inp_dict, subdomains, subdomain_id): \"\"\" Dirichlet condition with constant value", "velocity field' def __init__(self, simulation, var_name, inp_dict, subdomains, subdomain_id): \"\"\"", "list): assert len(vardef) == simulation.ndim exprs = [ verify_field_variable_definition(simulation, vd,", "Dirichlet condition with coded value \"\"\" self.simulation = simulation if", "subdomains, subdomain_id ) else: self.register_dirichlet_condition(var_name, code, subdomains, subdomain_id) def register_dirichlet_condition(", "the solver bc = OcellarisDirichletBC( self.simulation, self.func_space, df_value, subdomains, subdomain_id", "the bc = OcellarisDirichletBC( self.simulation, self.func_space, self.flux, subdomains, subdomain_id )", "Dirichlet condition' def __init__(self, simulation, var_name, inp_dict, subdomains, subdomain_id): \"\"\"", "in the solver bc = OcellarisDirichletBC( self.simulation, self.func_space, df_value, subdomains,", "expr, subdomains, subdomain_id): \"\"\" Store the boundary condition for use", "var_name, cpp_code, subdomains, subdomain_id ): \"\"\" Store the boundary condition", "name sub_code = inp_dict.get_value('code/%d' % d, required_type='string') expr = CodedExpression(simulation,", "valve for %s' % var_name) # Compute the region area,", "in regions: if region.name in self.region_names: f = dolfin.dot(self.velocity, n)", "Apache-2.0 import dolfin from . import register_boundary_condition, BoundaryConditionCreator from ocellaris.utils", "self.simulation = simulation # A var_name like \"u0\" should be", "% var_name) @register_boundary_condition('FieldVelocityValve') class FieldVelocityValveDirichletBoundary(BoundaryConditionCreator): description = 'A Dirichlet condition", "[]).append(bc) self.simulation.log.info(' Coded value for %s' % var_name) @register_boundary_condition('CppCodedValue') class", "description) for vd in vardef ] else: expr = verify_field_variable_definition(simulation,", "bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' C++ coded value for", "[]).append(bc) self.simulation.log.info(' C++ coded value for %s' % var_name) @register_boundary_condition('FieldFunction')", "list): assert len(value) == simulation.ndim for d in range(simulation.ndim): name", "[]).append(bc) self.simulation.log.info(' Field function value for %s' % var_name) @register_boundary_condition('FieldVelocityValve')", "= CodedExpression(simulation, code, description) self.register_dirichlet_condition(var_name, expr, subdomains, subdomain_id) def register_dirichlet_condition(self,", "%s' % (value, var_name)) @register_boundary_condition('CodedValue') class CodedDirichletBoundary(BoundaryConditionCreator): description = 'A", "len(exprs) > 1: for d in range(simulation.ndim): name = '%s%d'", "BC for %s' % var_name, 'The value %r is not", "the solver \"\"\" assert expr.ufl_shape == () bc = OcellarisDirichletBC(", "Make a dolfin Expression object that runs the code string", "updater def func(self): \"\"\" The boundary value derivative function \"\"\"", "condition for use in the solver bc = OcellarisDirichletBC( self.simulation,", "range(simulation.ndim): name = '%s%d' % (var_name, d) description = 'coded", "value derivative function \"\"\" return self._value def ds(self): \"\"\" Returns", "class FieldFunctionDirichletBoundary(BoundaryConditionCreator): description = 'A Dirichlet condition with values from", "(var_name, d) self.register_dirichlet_condition( name, value[d], subdomains, subdomain_id ) else: self.register_dirichlet_condition(var_name,", "% var_name P = self.func_space.ufl_element().degree() expr, updater = OcellarisCppExpression( self.simulation,", "Get the field function expression object vardef = inp_dict.get_value('function', required_type='any')", "(): assert expr.ufl_shape == ( simulation.ndim, ), 'Expected shape %r", "simulation if var_name[-1].isdigit(): # A var_name like \"u0\" was given.", "to this variable \"\"\" if not isinstance(value, (float, int)): raise", "runs the code string code = inp_dict.get_value('cpp_code', required_type='any') if isinstance(code,", "solver \"\"\" bc = OcellarisDirichletBC( self.simulation, self.func_space, expr, subdomains, subdomain_id", "self.simulation = simulation self._value = value self.subdomain_marker = subdomain_marker self.subdomain_id", "all RK substeps \"\"\" if self._updater: self._updater( self.simulation.timestep, self.simulation.time, self.simulation.dt", "be given. Look up \"Vu\" self.func_space = simulation.data['V%s' % var_name[:-1]]", "name = '%s%d' % (var_name, d) self.register_dirichlet_condition( name, exprs[d], subdomains,", "'Expected shape %r got %r' % ((simulation.ndim,), expr.ufl_shape) exprs =", "for region in regions: if region.name in self.region_names: f =", "assert len(value) == simulation.ndim for d in range(simulation.ndim): name =", "verify_field_variable_definition, ) class OcellarisDirichletBC(dolfin.DirichletBC): def __init__( self, simulation, V, value,", "condition for %s' % var_name expr = CodedExpression(simulation, code, description)", "the subdomain \"\"\" return self.simulation.data['ds'](self.subdomain_id) def copy_and_change_function_space(self, V): \"\"\" Return", "= '%s%d' % (var_name, d) description = 'coded value boundary", "range(simulation.ndim): name = '%s%d' % (var_name, d) sub_code = inp_dict.get_value('cpp_code/%d'", "%s' % var_name, 'The value %r is not a number'", "= [ verify_field_variable_definition(simulation, vd, description) for vd in vardef ]", "# Store the boundary condition for use in the solver", "= 'boundary condititon for %s' % var_name self.velocity = verify_field_variable_definition(", "var_name, expr, subdomains, subdomain_id): \"\"\" Store the boundary condition for", "%s' % var_name) @register_boundary_condition('CppCodedValue') class CppCodedDirichletBoundary(BoundaryConditionCreator): description = 'A C++", "subdomain_id): \"\"\" Dirichlet boundary condition with value from a field", "Dirichlet condition with C++ coded value \"\"\" self.simulation = simulation", "function value for %s' % var_name) @register_boundary_condition('FieldVelocityValve') class FieldVelocityValveDirichletBoundary(BoundaryConditionCreator): description", "OcellarisDirichletBC( self.simulation, self.func_space, df_value, subdomains, subdomain_id ) bcs = self.simulation.data['dirichlet_bcs']", ") bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' Coded value for", "subdomains, subdomain_id ) bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' Field", "A var_name like \"u0\" should be given. Look up \"Vu\"", "* region.ds() flux += dolfin.assemble(f) count += 1 assert count", "= '%s%d' % (var_name, d) self.register_dirichlet_condition( name, value[d], subdomains, subdomain_id", "value[d], subdomains, subdomain_id ) else: self.register_dirichlet_condition(var_name, value, subdomains, subdomain_id) def", "var_name) @register_boundary_condition('FieldVelocityValve') class FieldVelocityValveDirichletBoundary(BoundaryConditionCreator): description = 'A Dirichlet condition that", "main field has changed, update our flux to make the", "regions: if region.name in self.region_names: f = dolfin.dot(self.velocity, n) *", "var_name[:-1]] else: # A var_name like \"u\" was given. Look", "\"\"\" if self._updater: self._updater( self.simulation.timestep, self.simulation.time, self.simulation.dt ) def __repr__(self):", "value, subdomain_marker, subdomain_id, updater=None ): \"\"\" A simple storage class", "%r' % ((simulation.ndim,), expr.ufl_shape) exprs = [expr[d] for d in", "== simulation.ndim for d in range(simulation.ndim): name = '%s%d' %", "not a number' % value, ) df_value = dolfin.Constant(value) #", "subdomain_id ) else: self.register_dirichlet_condition(var_name, code, subdomains, subdomain_id) def register_dirichlet_condition( self,", "vardef, description ) field = simulation.fields[vardef.split('/')[0]] # The expression value", "that runs the code string code = inp_dict.get_value('code', required_type='any') if", "if isinstance(code, list): assert len(code) == simulation.ndim for d in", "= 'coded value boundary condition for %s' % var_name expr", "%s' % name sub_code = inp_dict.get_value('code/%d' % d, required_type='string') expr", "d in range(simulation.ndim): name = '%s%d' % (var_name, d) self.register_dirichlet_condition(", "return self._value def ds(self): \"\"\" Returns the ds measure of", "description = 'A Dirichlet condition with values from a field", "exprs[d], subdomains, subdomain_id ) else: self.register_dirichlet_condition( var_name, exprs[0], subdomains, subdomain_id", "timestep and for all RK substeps \"\"\" if self._updater: self._updater(", "= inp_dict.get_value('code/%d' % d, required_type='string') expr = CodedExpression(simulation, sub_code, description)", "function expression object vardef = inp_dict.get_value('function', required_type='any') description = 'boundary", "== ( simulation.ndim, ), 'Expected shape %r got %r' %", "was given. Look up \"Vu\" self.func_space = simulation.data['V%s' % var_name]", "solver (default) to BCs for a coupled solver \"\"\" return", "required_type='list(string)') self.update() def update(self, timestep_number=None, t=None, dt=None): \"\"\" The main", "value for %s' % var_name) @register_boundary_condition('FieldVelocityValve') class FieldVelocityValveDirichletBoundary(BoundaryConditionCreator): description =", "var_name[-1].isdigit(): # A var_name like \"u0\" was given. Look up", "in range(simulation.ndim): name = '%s%d' % (var_name, d) sub_code =", "subdomains, subdomain_id): \"\"\" Store the boundary condition for use in", "field.register_dependent_field(self) self.flux = dolfin.Constant(1.0) # Create the bc = OcellarisDirichletBC(", "= OcellarisDirichletBC( self.simulation, self.func_space, df_value, subdomains, subdomain_id ) bcs =", "# Make a dolfin Expression object that runs the code", "positive # direction in this boundary region self.flux.assign(dolfin.Constant(-flux / self.area))", "def __init__(self, simulation, var_name, inp_dict, subdomains, subdomain_id): \"\"\" Dirichlet condition", "got %r' % ((simulation.ndim,), expr.ufl_shape) exprs = [expr[d] for d", "assert expr.ufl_shape == () bc = OcellarisDirichletBC( self.simulation, self.func_space, expr,", "% (var_name, d) description = 'coded value boundary condition for", "area, then update the flux mesh = simulation.data['mesh'] self.area =", "'A Dirichlet condition with values from a field function' def", "subdomains, subdomain_id ) bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' Constant", "description = 'boundary condititon for %s' % var_name P =", "subdomains, subdomain_id) def register_dirichlet_condition(self, var_name, value, subdomains, subdomain_id): \"\"\" Add", "FieldVelocityValveDirichletBoundary(BoundaryConditionCreator): description = 'A Dirichlet condition that compensates for non-zero", "dolfin Expression object that runs the code string code =", "sub_code, subdomains, subdomain_id ) else: self.register_dirichlet_condition(var_name, code, subdomains, subdomain_id) def", "% (var_name, d) sub_code = inp_dict.get_value('cpp_code/%d' % d, required_type='string') self.register_dirichlet_condition(", "= simulation # A var_name like \"u0\" should be given.", "sum to zero \"\"\" regions = self.simulation.data['boundary'] mesh = self.simulation.data['mesh']", "for %s' % var_name) @register_boundary_condition('CppCodedValue') class CppCodedDirichletBoundary(BoundaryConditionCreator): description = 'A", "condition' def __init__(self, simulation, var_name, inp_dict, subdomains, subdomain_id): \"\"\" Dirichlet", "solver \"\"\" description = 'boundary condititon for %s' % var_name", "assumes n is pointing outwards along the axis in the", "= simulation.fields[vardef.split('/')[0]] # The expression value is updated as the", "V, value, subdomain_marker, subdomain_id, method='geometric' ) self.simulation = simulation self._value", "ocellaris.utils import ( CodedExpression, OcellarisCppExpression, OcellarisError, verify_field_variable_definition, ) class OcellarisDirichletBC(dolfin.DirichletBC):", "# A var_name like \"u0\" was given. Look up \"Vu\"", "name = '%s%d' % (var_name, d) sub_code = inp_dict.get_value('cpp_code/%d' %", "update(self, timestep_number=None, t=None, dt=None): \"\"\" The main field has changed,", "for non-zero total flux of a known velocity field' def", "measure of the subdomain \"\"\" return self.simulation.data['ds'](self.subdomain_id) def copy_and_change_function_space(self, V):", "subdomains, subdomain_id): \"\"\" Add a Dirichlet condition to this variable", "for d in range(simulation.ndim)] else: exprs = [expr] # Register", "= [expr[d] for d in range(simulation.ndim)] else: exprs = [expr]", "%s' % var_name if isinstance(vardef, list): assert len(vardef) == simulation.ndim", "vardef, description) if expr.ufl_shape != (): assert expr.ufl_shape == (", "inp_dict, subdomains, subdomain_id): \"\"\" Dirichlet condition with C++ coded value", "register_dirichlet_condition(self, var_name, value, subdomains, subdomain_id): \"\"\" Add a Dirichlet condition", "value %r for %s' % (value, var_name)) @register_boundary_condition('CodedValue') class CodedDirichletBoundary(BoundaryConditionCreator):", "raise OcellarisError( 'Error in ConstantValue BC for %s' % var_name,", "var_name like \"u0\" should be given. Look up \"Vu\" self.func_space", "like \"u0\" was given. Look up \"Vu\" self.func_space = simulation.data['V%s'", "else: self.register_dirichlet_condition(var_name, code, subdomains, subdomain_id) def register_dirichlet_condition( self, var_name, cpp_code,", "= OcellarisDirichletBC( self.simulation, self.func_space, self.flux, subdomains, subdomain_id ) bcs =", "= value self.subdomain_marker = subdomain_marker self.subdomain_id = subdomain_id self._updater =", "\"\"\" Dirichlet condition with coded value \"\"\" self.simulation = simulation", "def update(self): \"\"\" Update the time and other parameters used", "d in range(simulation.ndim): name = '%s%d' % (var_name, d) description", "derivative function \"\"\" return self._value def ds(self): \"\"\" Returns the", "%r is not a number' % value, ) df_value =", "%s' % var_name) # Compute the region area, then update", "df_value = dolfin.Constant(value) # Store the boundary condition for use", "the time and other parameters used in the BC. This", "method='geometric' ) self.simulation = simulation self._value = value self.subdomain_marker =", "is changed inp_dict.get_value('function', required_type='any') field.register_dependent_field(self) self.flux = dolfin.Constant(1.0) # Create", "range(simulation.ndim): name = '%s%d' % (var_name, d) self.register_dirichlet_condition( name, exprs[d],", "(default) to BCs for a coupled solver \"\"\" return OcellarisDirichletBC(", "% d, required_type='string') expr = CodedExpression(simulation, sub_code, description) self.register_dirichlet_condition(name, expr,", "self.simulation, self.func_space, expr, subdomains, subdomain_id ) bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name,", "subdomain_id ) else: self.register_dirichlet_condition(var_name, value, subdomains, subdomain_id) def register_dirichlet_condition(self, var_name,", "The boundary value derivative function \"\"\" return self._value def ds(self):", "% var_name self.velocity = verify_field_variable_definition( simulation, vardef, description ) field", "simulation, var_name, inp_dict, subdomains, subdomain_id): \"\"\" Dirichlet condition with constant", "( CodedExpression, OcellarisCppExpression, OcellarisError, verify_field_variable_definition, ) class OcellarisDirichletBC(dolfin.DirichletBC): def __init__(", "a field function \"\"\" self.simulation = simulation if var_name[-1].isdigit(): #", "= inp_dict.get_value('cpp_code', required_type='any') if isinstance(code, list): assert len(code) == simulation.ndim", "register_dirichlet_condition(self, var_name, expr, subdomains, subdomain_id): \"\"\" Store the boundary condition", "self.subdomain_marker = subdomain_marker self.subdomain_id = subdomain_id self._updater = updater def", "expr = CodedExpression(simulation, sub_code, description) self.register_dirichlet_condition(name, expr, subdomains, subdomain_id) else:", "= inp_dict.get_value('function', required_type='any') description = 'boundary condititon for %s' %", "= 'A coded Dirichlet condition' def __init__(self, simulation, var_name, inp_dict,", "verify_field_variable_definition(simulation, vardef, description) if expr.ufl_shape != (): assert expr.ufl_shape ==", "copy_and_change_function_space(self, V): \"\"\" Return a copy with a new function", "make the total sum to zero \"\"\" regions = self.simulation.data['boundary']", "given. Look up \"Vu\" self.func_space = simulation.data['V%s' % var_name] #", "in the solver \"\"\" description = 'boundary condititon for %s'", "expr, subdomains, subdomain_id) def register_dirichlet_condition(self, var_name, expr, subdomains, subdomain_id): \"\"\"", "A simple storage class for Dirichlet boundary conditions \"\"\" super().__init__(", "subdomain_id, updater=updater, ) bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' C++", "from a field function \"\"\" self.simulation = simulation if var_name[-1].isdigit():", "shape %r got %r' % ((simulation.ndim,), expr.ufl_shape) exprs = [expr[d]", "for %s' % var_name if isinstance(vardef, list): assert len(vardef) ==", "\"\"\" Dirichlet condition with C++ coded value \"\"\" self.simulation =", "vd in vardef ] else: expr = verify_field_variable_definition(simulation, vardef, description)", "subdomains, subdomain_id) else: description = 'coded value boundary condition for", "as the field is changed inp_dict.get_value('function', required_type='any') field.register_dependent_field(self) self.flux =", "self.func_space = simulation.data['V%s' % var_name] value = inp_dict.get_value('value', required_type='any') if", "'%s%d' % (var_name, d) sub_code = inp_dict.get_value('cpp_code/%d' % d, required_type='string')", "subdomain_id): \"\"\" Dirichlet condition with constant value \"\"\" self.simulation =", "timestep_number=None, t=None, dt=None): \"\"\" The main field has changed, update", "storage class for Dirichlet boundary conditions \"\"\" super().__init__( V, value,", "else: self.register_dirichlet_condition( var_name, exprs[0], subdomains, subdomain_id ) def register_dirichlet_condition(self, var_name,", "the flux mesh = simulation.data['mesh'] self.area = dolfin.assemble(self.flux * bc.ds()(domain=mesh))", "string code = inp_dict.get_value('cpp_code', required_type='any') if isinstance(code, list): assert len(code)", "expression value is updated as the field is changed inp_dict.get_value('function',", "* bc.ds()(domain=mesh)) self.region_names = inp_dict.get_value('regions', required_type='list(string)') self.update() def update(self, timestep_number=None,", "OcellarisError( 'Error in ConstantValue BC for %s' % var_name, 'The", "bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' C++ coded value for %s' % var_name)", "\"Vu\" self.func_space = simulation.data['V%s' % var_name[:-1]] # Get the field", "def copy_and_change_function_space(self, V): \"\"\" Return a copy with a new", "coded value for %s' % var_name) @register_boundary_condition('FieldFunction') class FieldFunctionDirichletBoundary(BoundaryConditionCreator): description", "[expr] # Register BCs if len(exprs) > 1: for d", "var_name) # Compute the region area, then update the flux", "var_name) @register_boundary_condition('FieldFunction') class FieldFunctionDirichletBoundary(BoundaryConditionCreator): description = 'A Dirichlet condition with", "def update(self, timestep_number=None, t=None, dt=None): \"\"\" The main field has", "CodedExpression(simulation, code, description) self.register_dirichlet_condition(var_name, expr, subdomains, subdomain_id) def register_dirichlet_condition(self, var_name,", "class CppCodedDirichletBoundary(BoundaryConditionCreator): description = 'A C++ coded Dirichlet condition' def", "updater = OcellarisCppExpression( self.simulation, cpp_code, description, P, return_updater=True ) bc", "def func(self): \"\"\" The boundary value derivative function \"\"\" return", "import dolfin from . import register_boundary_condition, BoundaryConditionCreator from ocellaris.utils import", "= 'A Dirichlet condition that compensates for non-zero total flux", "OcellarisDirichletBC( self.simulation, self.func_space, self.flux, subdomains, subdomain_id ) bcs = self.simulation.data['dirichlet_bcs']", "list): assert len(code) == simulation.ndim for d in range(simulation.ndim): name", "return OcellarisDirichletBC( self.simulation, V, self._value, self.subdomain_marker, self.subdomain_id ) def update(self):", "self.register_dirichlet_condition(var_name, expr, subdomains, subdomain_id) def register_dirichlet_condition(self, var_name, expr, subdomains, subdomain_id):", "%s' % var_name) @register_boundary_condition('FieldVelocityValve') class FieldVelocityValveDirichletBoundary(BoundaryConditionCreator): description = 'A Dirichlet", "= self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' C++ coded value for %s'", "in the solver \"\"\" assert expr.ufl_shape == () bc =", "with C++ coded value \"\"\" self.simulation = simulation if var_name[-1].isdigit():", "= inp_dict.get_value('code', required_type='any') if isinstance(code, list): assert len(code) == simulation.ndim", "__init__(self, simulation, var_name, inp_dict, subdomains, subdomain_id): \"\"\" Dirichlet boundary condition", "1: for d in range(simulation.ndim): name = '%s%d' % (var_name,", "up \"Vu\" self.func_space = simulation.data['V%s' % var_name[:-1]] # Get the", "% (var_name, d) self.register_dirichlet_condition( name, value[d], subdomains, subdomain_id ) else:", "assert len(code) == simulation.ndim for d in range(simulation.ndim): name =", "self.simulation, self.func_space, self.flux, subdomains, subdomain_id ) bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name,", "simulation, V, value, subdomain_marker, subdomain_id, updater=None ): \"\"\" A simple", "# Register BCs if len(exprs) > 1: for d in", "dt=None): \"\"\" The main field has changed, update our flux", "up \"Vu\" self.func_space = simulation.data['V%s' % var_name[:-1]] else: # A", "subdomain_marker self.subdomain_id = subdomain_id self._updater = updater def func(self): \"\"\"", "OcellarisDirichletBC( self.simulation, self.func_space, expr, subdomains, subdomain_id ) bcs = self.simulation.data['dirichlet_bcs']", "class CodedDirichletBoundary(BoundaryConditionCreator): description = 'A coded Dirichlet condition' def __init__(self,", "count == len(self.region_names) # FIXME: assumes n is pointing outwards", "for %s' % var_name expr = CodedExpression(simulation, code, description) self.register_dirichlet_condition(var_name,", "field function' def __init__(self, simulation, var_name, inp_dict, subdomains, subdomain_id): \"\"\"", "expr, subdomains, subdomain_id ) bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info('", "condititon for %s' % var_name P = self.func_space.ufl_element().degree() expr, updater", "# Get the field function expression object vardef = inp_dict.get_value('function',", "subdomains, subdomain_id) def register_dirichlet_condition( self, var_name, cpp_code, subdomains, subdomain_id ):", "bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' Coded value for %s'", "d) description = 'coded value boundary condition for %s' %", "non-zero total flux of a known velocity field' def __init__(self,", "[]).append(bc) self.simulation.log.info(' Constant value %r for %s' % (value, var_name))", "FIXME: assumes n is pointing outwards along the axis in", "the positive # direction in this boundary region self.flux.assign(dolfin.Constant(-flux /", "not isinstance(value, (float, int)): raise OcellarisError( 'Error in ConstantValue BC", "self, var_name, cpp_code, subdomains, subdomain_id ): \"\"\" Store the boundary", "sub_code = inp_dict.get_value('code/%d' % d, required_type='string') expr = CodedExpression(simulation, sub_code,", "d) self.register_dirichlet_condition( name, exprs[d], subdomains, subdomain_id ) else: self.register_dirichlet_condition( var_name,", "bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' Field function value for %s' % var_name)", "exprs[0], subdomains, subdomain_id ) def register_dirichlet_condition(self, var_name, expr, subdomains, subdomain_id):", "for vd in vardef ] else: expr = verify_field_variable_definition(simulation, vardef,", "2015-2019 <NAME> # SPDX-License-Identifier: Apache-2.0 import dolfin from . import", "and other parameters used in the BC. This is used", "isinstance(value, list): assert len(value) == simulation.ndim for d in range(simulation.ndim):", "condition for use in the solver \"\"\" bc = OcellarisDirichletBC(", "Dirichlet condition that compensates for non-zero total flux of a", "\"\"\" self.simulation = simulation if var_name[-1].isdigit(): # A var_name like", "description = 'A prescribed constant value Dirichlet condition' def __init__(self,", "function \"\"\" self.simulation = simulation if var_name[-1].isdigit(): # A var_name", "self.register_dirichlet_condition(var_name, code, subdomains, subdomain_id) def register_dirichlet_condition( self, var_name, cpp_code, subdomains,", "in range(simulation.ndim)] else: exprs = [expr] # Register BCs if", "velocity valve for %s' % var_name) # Compute the region", "subdomain_id, updater=None ): \"\"\" A simple storage class for Dirichlet", "var_name, inp_dict, subdomains, subdomain_id): \"\"\" Dirichlet boundary condition with value", "var_name] value = inp_dict.get_value('value', required_type='any') if isinstance(value, list): assert len(value)", "= self.simulation.data['mesh'] n = dolfin.FacetNormal(mesh) flux = 0 count =", "boundary conditions \"\"\" super().__init__( V, value, subdomain_marker, subdomain_id, method='geometric' )", "subdomain_id): \"\"\" Add a Dirichlet condition to this variable \"\"\"", "'A prescribed constant value Dirichlet condition' def __init__(self, simulation, var_name,", "= subdomain_id self._updater = updater def func(self): \"\"\" The boundary", "bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' Field velocity valve for %s' % var_name)", "var_name like \"u\" was given. Look up \"Vu\" self.func_space =", "description, P, return_updater=True ) bc = OcellarisDirichletBC( self.simulation, self.func_space, expr,", "= simulation.data['V%s' % var_name] # Get the field function expression", "boundary value derivative function \"\"\" return self._value def ds(self): \"\"\"", "n) * region.ds() flux += dolfin.assemble(f) count += 1 assert", "subdomains, subdomain_id ) def register_dirichlet_condition(self, var_name, expr, subdomains, subdomain_id): \"\"\"", "verify_field_variable_definition( simulation, vardef, description ) field = simulation.fields[vardef.split('/')[0]] # The", "Store the boundary condition for use in the solver bc", "region.name in self.region_names: f = dolfin.dot(self.velocity, n) * region.ds() flux", "coupled solver \"\"\" return OcellarisDirichletBC( self.simulation, V, self._value, self.subdomain_marker, self.subdomain_id", "use in the solver \"\"\" assert expr.ufl_shape == () bc", "flux of a known velocity field' def __init__(self, simulation, var_name,", "'Error in ConstantValue BC for %s' % var_name, 'The value", "CodedExpression(simulation, sub_code, description) self.register_dirichlet_condition(name, expr, subdomains, subdomain_id) else: description =", "simulation self._value = value self.subdomain_marker = subdomain_marker self.subdomain_id = subdomain_id", "P, return_updater=True ) bc = OcellarisDirichletBC( self.simulation, self.func_space, expr, subdomains,", "expr = verify_field_variable_definition(simulation, vardef, description) if expr.ufl_shape != (): assert", "% d, required_type='string') self.register_dirichlet_condition( name, sub_code, subdomains, subdomain_id ) else:", "simulation # A var_name like \"u0\" should be given. Look", "the solver \"\"\" bc = OcellarisDirichletBC( self.simulation, self.func_space, expr, subdomains,", "var_name] # Get the field function expression object vardef =", "__repr__(self): return '<OcellarisDirichletBC on subdomain %d>' % self.subdomain_id @register_boundary_condition('ConstantValue') class", "d in range(simulation.ndim): name = '%s%d' % (var_name, d) sub_code", ") else: self.register_dirichlet_condition( var_name, exprs[0], subdomains, subdomain_id ) def register_dirichlet_condition(self,", "bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' Field velocity valve for", "in the positive # direction in this boundary region self.flux.assign(dolfin.Constant(-flux", "from BCs for a segregated solver (default) to BCs for", "for %s' % (value, var_name)) @register_boundary_condition('CodedValue') class CodedDirichletBoundary(BoundaryConditionCreator): description =", "if not isinstance(value, (float, int)): raise OcellarisError( 'Error in ConstantValue", "use in the solver bc = OcellarisDirichletBC( self.simulation, self.func_space, df_value,", "% var_name] # Get the field function expression object vardef", "bc = OcellarisDirichletBC( self.simulation, self.func_space, expr, subdomains, subdomain_id ) bcs", "= 'boundary condititon for %s' % var_name P = self.func_space.ufl_element().degree()", "func(self): \"\"\" The boundary value derivative function \"\"\" return self._value", "P = self.func_space.ufl_element().degree() expr, updater = OcellarisCppExpression( self.simulation, cpp_code, description,", "mesh = self.simulation.data['mesh'] n = dolfin.FacetNormal(mesh) flux = 0 count", "for d in range(simulation.ndim): name = '%s%d' % (var_name, d)", "): \"\"\" Store the boundary condition for use in the", "sub_code = inp_dict.get_value('cpp_code/%d' % d, required_type='string') self.register_dirichlet_condition( name, sub_code, subdomains,", "Used when converting from BCs for a segregated solver (default)", "The main field has changed, update our flux to make", "subdomain_marker, subdomain_id, updater=None ): \"\"\" A simple storage class for", "value for %s' % var_name) @register_boundary_condition('FieldFunction') class FieldFunctionDirichletBoundary(BoundaryConditionCreator): description =", "value \"\"\" self.simulation = simulation if var_name[-1].isdigit(): # A var_name", "def register_dirichlet_condition( self, var_name, cpp_code, subdomains, subdomain_id ): \"\"\" Store", "self.simulation, cpp_code, description, P, return_updater=True ) bc = OcellarisDirichletBC( self.simulation,", "var_name self.velocity = verify_field_variable_definition( simulation, vardef, description ) field =", "var_name, inp_dict, subdomains, subdomain_id): \"\"\" Dirichlet condition with C++ coded", "CodedDirichletBoundary(BoundaryConditionCreator): description = 'A coded Dirichlet condition' def __init__(self, simulation,", "= updater def func(self): \"\"\" The boundary value derivative function", "n = dolfin.FacetNormal(mesh) flux = 0 count = 0 for", "dolfin.assemble(self.flux * bc.ds()(domain=mesh)) self.region_names = inp_dict.get_value('regions', required_type='list(string)') self.update() def update(self,", "the boundary condition for use in the solver \"\"\" description", "var_name like \"u0\" was given. Look up \"Vu\" self.func_space =", "def register_dirichlet_condition(self, var_name, value, subdomains, subdomain_id): \"\"\" Add a Dirichlet", "for a coupled solver \"\"\" return OcellarisDirichletBC( self.simulation, V, self._value,", "name = '%s%d' % (var_name, d) self.register_dirichlet_condition( name, value[d], subdomains,", "@register_boundary_condition('CodedValue') class CodedDirichletBoundary(BoundaryConditionCreator): description = 'A coded Dirichlet condition' def", "var_name, inp_dict, subdomains, subdomain_id): \"\"\" Dirichlet condition with coded value", "that runs the code string code = inp_dict.get_value('cpp_code', required_type='any') if", "ConstantValue BC for %s' % var_name, 'The value %r is", "d, required_type='string') self.register_dirichlet_condition( name, sub_code, subdomains, subdomain_id ) else: self.register_dirichlet_condition(var_name,", "required_type='string') self.register_dirichlet_condition( name, sub_code, subdomains, subdomain_id ) else: self.register_dirichlet_condition(var_name, code,", "of the subdomain \"\"\" return self.simulation.data['ds'](self.subdomain_id) def copy_and_change_function_space(self, V): \"\"\"", "self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' Coded value for %s' % var_name)", "vardef = inp_dict.get_value('function', required_type='any') description = 'boundary condititon for %s'", "description) if expr.ufl_shape != (): assert expr.ufl_shape == ( simulation.ndim,", "== () bc = OcellarisDirichletBC( self.simulation, self.func_space, expr, subdomains, subdomain_id", "register_boundary_condition, BoundaryConditionCreator from ocellaris.utils import ( CodedExpression, OcellarisCppExpression, OcellarisError, verify_field_variable_definition,", "@register_boundary_condition('FieldVelocityValve') class FieldVelocityValveDirichletBoundary(BoundaryConditionCreator): description = 'A Dirichlet condition that compensates", "BC. This is used every timestep and for all RK", "> 1: for d in range(simulation.ndim): name = '%s%d' %", "= OcellarisDirichletBC( self.simulation, self.func_space, expr, subdomains, subdomain_id, updater=updater, ) bcs", "The expression value is updated as the field is changed", "= CodedExpression(simulation, sub_code, description) self.register_dirichlet_condition(name, expr, subdomains, subdomain_id) else: description", "expr, subdomains, subdomain_id) else: description = 'coded value boundary condition", ") bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' Field velocity valve", "Dirichlet condition with values from a field function' def __init__(self,", "C++ coded value \"\"\" self.simulation = simulation if var_name[-1].isdigit(): #", "dolfin.Constant(value) # Store the boundary condition for use in the", "subdomains, subdomain_id): \"\"\" Dirichlet condition with coded value \"\"\" self.simulation", "simulation.data['V%s' % var_name] # Make a dolfin Expression object that", "0 count = 0 for region in regions: if region.name", "inp_dict, subdomains, subdomain_id): \"\"\" Dirichlet condition with coded value \"\"\"", "should be given. Look up \"Vu\" self.func_space = simulation.data['V%s' %", "simulation, vardef, description ) field = simulation.fields[vardef.split('/')[0]] # The expression", "if expr.ufl_shape != (): assert expr.ufl_shape == ( simulation.ndim, ),", "self._updater = updater def func(self): \"\"\" The boundary value derivative", "update the flux mesh = simulation.data['mesh'] self.area = dolfin.assemble(self.flux *", "constant value \"\"\" self.simulation = simulation if var_name[-1].isdigit(): # A", "value boundary condition for %s' % name sub_code = inp_dict.get_value('code/%d'", "= simulation.data['V%s' % var_name] value = inp_dict.get_value('value', required_type='any') if isinstance(value,", "condition for use in the solver \"\"\" description = 'boundary", "\"Vu\" self.func_space = simulation.data['V%s' % var_name] # Get the field", "[ verify_field_variable_definition(simulation, vd, description) for vd in vardef ] else:", ") df_value = dolfin.Constant(value) # Store the boundary condition for", "range(simulation.ndim): name = '%s%d' % (var_name, d) self.register_dirichlet_condition( name, value[d],", "boundary condition for use in the solver bc = OcellarisDirichletBC(", "self.subdomain_id @register_boundary_condition('ConstantValue') class ConstantDirichletBoundary(BoundaryConditionCreator): description = 'A prescribed constant value", "up \"Vu\" self.func_space = simulation.data['V%s' % var_name] # Get the", "segregated solver (default) to BCs for a coupled solver \"\"\"", "required_type='any') if isinstance(code, list): assert len(code) == simulation.ndim for d", "\"\"\" bc = OcellarisDirichletBC( self.simulation, self.func_space, expr, subdomains, subdomain_id )", "Register BCs if len(exprs) > 1: for d in range(simulation.ndim):", "subdomain_id ) bcs = self.simulation.data['dirichlet_bcs'] bcs.setdefault(var_name, []).append(bc) self.simulation.log.info(' Constant value", "len(vardef) == simulation.ndim exprs = [ verify_field_variable_definition(simulation, vd, description) for", "@register_boundary_condition('CppCodedValue') class CppCodedDirichletBoundary(BoundaryConditionCreator): description = 'A C++ coded Dirichlet condition'", "# A var_name like \"u\" was given. Look up \"Vu\"", "coded Dirichlet condition' def __init__(self, simulation, var_name, inp_dict, subdomains, subdomain_id):", "assert count == len(self.region_names) # FIXME: assumes n is pointing", "= inp_dict.get_value('regions', required_type='list(string)') self.update() def update(self, timestep_number=None, t=None, dt=None): \"\"\"", "% (value, var_name)) @register_boundary_condition('CodedValue') class CodedDirichletBoundary(BoundaryConditionCreator): description = 'A coded", "solver \"\"\" return OcellarisDirichletBC( self.simulation, V, self._value, self.subdomain_marker, self.subdomain_id )", "V, value, subdomain_marker, subdomain_id, updater=None ): \"\"\" A simple storage" ]
[ "2, 4, 6] result = count_inversions(input) self.assertEqual(result, 3) if __name__", "TestCountSplitInversions(unittest.TestCase): def test_count_inversions(self): input = [1, 3, 5, 2, 4,", "from count_split_inversions import count_inversions class TestCountSplitInversions(unittest.TestCase): def test_count_inversions(self): input =", "3, 5, 2, 4, 6] result = count_inversions(input) self.assertEqual(result, 3)", "def test_count_inversions(self): input = [1, 3, 5, 2, 4, 6]", "test_count_inversions(self): input = [1, 3, 5, 2, 4, 6] result", "input = [1, 3, 5, 2, 4, 6] result =", "result = count_inversions(input) self.assertEqual(result, 3) if __name__ == '__main__': unittest.main()", "4, 6] result = count_inversions(input) self.assertEqual(result, 3) if __name__ ==", "5, 2, 4, 6] result = count_inversions(input) self.assertEqual(result, 3) if", "6] result = count_inversions(input) self.assertEqual(result, 3) if __name__ == '__main__':", "import count_inversions class TestCountSplitInversions(unittest.TestCase): def test_count_inversions(self): input = [1, 3,", "[1, 3, 5, 2, 4, 6] result = count_inversions(input) self.assertEqual(result,", "import unittest from count_split_inversions import count_inversions class TestCountSplitInversions(unittest.TestCase): def test_count_inversions(self):", "count_split_inversions import count_inversions class TestCountSplitInversions(unittest.TestCase): def test_count_inversions(self): input = [1,", "unittest from count_split_inversions import count_inversions class TestCountSplitInversions(unittest.TestCase): def test_count_inversions(self): input", "count_inversions class TestCountSplitInversions(unittest.TestCase): def test_count_inversions(self): input = [1, 3, 5,", "= [1, 3, 5, 2, 4, 6] result = count_inversions(input)", "class TestCountSplitInversions(unittest.TestCase): def test_count_inversions(self): input = [1, 3, 5, 2," ]
[ "= get_x_y(train_num_samples) test_data = get_x_y(test_num_samples) if tf_data: from_tensor_slices = tf.data.Dataset.from_tensor_slices", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "create_data() self.forecaster.fit(train_data, epochs=2, batch_size=32) yhat = self.forecaster.predict(test_data[0]) assert yhat.shape ==", "== (400, 2, 2) mse = self.forecaster.evaluate(test_data, multioutput=\"raw_values\") assert mse[0].shape", "os.path.join(tmp_dir_file, 'seq2seq.ckpt') self.forecaster.save(tmp_dir_file) self.forecaster.load(tmp_dir_file) from bigdl.chronos.model.tf2.Seq2Seq_keras import LSTMSeq2Seq assert isinstance(self.forecaster.internal,", "= 1000 test_num_samples = 400 input_feature_num = 10 output_feature_num =", "# # Licensed under the Apache License, Version 2.0 (the", "compliance with the License. # You may obtain a copy", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "2.0 (the \"License\"); # you may not use this file", "agreed to in writing, software # distributed under the License", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "Unless required by applicable law or agreed to in writing,", "> 2.0.0.\") class TestSeq2SeqForecaster(TestCase): def setUp(self): from bigdl.chronos.forecaster.tf.seq2seq_forecaster import Seq2SeqForecaster", "setUp(self): from bigdl.chronos.forecaster.tf.seq2seq_forecaster import Seq2SeqForecaster self.forecaster = Seq2SeqForecaster(past_seq_len=10, future_seq_len=2, input_feature_num=10,", "test_seq2seq_fit_predict_evaluate(self): train_data, test_data = create_data() self.forecaster.fit(train_data, epochs=2, batch_size=32) yhat =", "= self.forecaster.predict(test_data[0]) assert yhat.shape == (400, 2, 2) mse =", "create_data(tf_data=True) self.forecaster.fit(train_data, epochs=2) yhat = self.forecaster.predict(test_data) assert yhat.shape == (400,", "yhat.shape == (400, 2, 2) def test_seq2seq_save_load(self): train_data, test_data =", "assert isinstance(self.forecaster.internal, LSTMSeq2Seq) load_model_yhat = self.forecaster.predict(test_data[0]) assert yhat.shape == (400,", "past_seq_len, input_feature_num) y = np.random.randn(num_sample, future_seq_len, output_feature_num) return x, y", "distributed under the License is distributed on an \"AS IS\"", "= Seq2SeqForecaster(past_seq_len=10, future_seq_len=2, input_feature_num=10, output_feature_num=2) def tearDown(self): pass def test_seq2seq_fit_predict_evaluate(self):", "import numpy as np import tensorflow as tf def create_data(tf_data=False,", "self.forecaster.evaluate(test_data, multioutput=\"raw_values\") assert mse[0].shape == test_data[-1].shape[1:] def test_seq2seq_fit_tf_data(self): train_data, test_data", "== (400, 2, 2) np.testing.assert_almost_equal(yhat, load_model_yhat, decimal=5) if __name__ ==", "tempfile import os from unittest import TestCase import numpy as", "= get_x_y(test_num_samples) if tf_data: from_tensor_slices = tf.data.Dataset.from_tensor_slices train_data = from_tensor_slices(train_data).cache()\\", "future_seq_len=2, input_feature_num=10, output_feature_num=2) def tearDown(self): pass def test_seq2seq_fit_predict_evaluate(self): train_data, test_data", "the specific language governing permissions and # limitations under the", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "mse = self.forecaster.evaluate(test_data, multioutput=\"raw_values\") assert mse[0].shape == test_data[-1].shape[1:] def test_seq2seq_fit_tf_data(self):", "future_seq_len = 2 def get_x_y(num_sample): x = np.random.randn(num_sample, past_seq_len, input_feature_num)", "epochs=2, batch_size=32) yhat = self.forecaster.predict(test_data[0]) with tempfile.TemporaryDirectory() as tmp_dir_file: tmp_dir_file", "express or implied. # See the License for the specific", "applicable law or agreed to in writing, software # distributed", "except in compliance with the License. # You may obtain", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "from_tensor_slices(train_data).cache()\\ .shuffle(train_num_samples)\\ .batch(batch_size)\\ .prefetch(tf.data.AUTOTUNE) test_data = from_tensor_slices(test_data).cache()\\ .batch(batch_size)\\ .prefetch(tf.data.AUTOTUNE) return", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "2) mse = self.forecaster.evaluate(test_data, multioutput=\"raw_values\") assert mse[0].shape == test_data[-1].shape[1:] def", "from_tensor_slices = tf.data.Dataset.from_tensor_slices train_data = from_tensor_slices(train_data).cache()\\ .shuffle(train_num_samples)\\ .batch(batch_size)\\ .prefetch(tf.data.AUTOTUNE) test_data", "= create_data() self.forecaster.fit(train_data, epochs=2, batch_size=32) yhat = self.forecaster.predict(test_data[0]) assert yhat.shape", "not use this file except in compliance with the License.", "tf > 2.0.0.\") class TestSeq2SeqForecaster(TestCase): def setUp(self): from bigdl.chronos.forecaster.tf.seq2seq_forecaster import", "test_data = create_data() self.forecaster.fit(train_data, epochs=2, batch_size=32) yhat = self.forecaster.predict(test_data[0]) with", "tmp_dir_file: tmp_dir_file = os.path.join(tmp_dir_file, 'seq2seq.ckpt') self.forecaster.save(tmp_dir_file) self.forecaster.load(tmp_dir_file) from bigdl.chronos.model.tf2.Seq2Seq_keras import", "License. # import pytest import tempfile import os from unittest", "writing, software # distributed under the License is distributed on", "train_data = from_tensor_slices(train_data).cache()\\ .shuffle(train_num_samples)\\ .batch(batch_size)\\ .prefetch(tf.data.AUTOTUNE) test_data = from_tensor_slices(test_data).cache()\\ .batch(batch_size)\\", "in writing, software # distributed under the License is distributed", ".batch(batch_size)\\ .prefetch(tf.data.AUTOTUNE) return train_data, test_data @pytest.mark.skipif(tf.__version__ < '2.0.0', reason=\"Run only", "you may not use this file except in compliance with", "400 input_feature_num = 10 output_feature_num = 2 past_seq_len = 10", "self.forecaster = Seq2SeqForecaster(past_seq_len=10, future_seq_len=2, input_feature_num=10, output_feature_num=2) def tearDown(self): pass def", "# limitations under the License. # import pytest import tempfile", "assert mse[0].shape == test_data[-1].shape[1:] def test_seq2seq_fit_tf_data(self): train_data, test_data = create_data(tf_data=True)", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "y train_data = get_x_y(train_num_samples) test_data = get_x_y(test_num_samples) if tf_data: from_tensor_slices", "# # Copyright 2016 The BigDL Authors. # # Licensed", ".shuffle(train_num_samples)\\ .batch(batch_size)\\ .prefetch(tf.data.AUTOTUNE) test_data = from_tensor_slices(test_data).cache()\\ .batch(batch_size)\\ .prefetch(tf.data.AUTOTUNE) return train_data,", "unittest import TestCase import numpy as np import tensorflow as", "test_data = create_data(tf_data=True) self.forecaster.fit(train_data, epochs=2) yhat = self.forecaster.predict(test_data) assert yhat.shape", "BigDL Authors. # # Licensed under the Apache License, Version", "import tempfile import os from unittest import TestCase import numpy", "2 def get_x_y(num_sample): x = np.random.randn(num_sample, past_seq_len, input_feature_num) y =", "test_data = from_tensor_slices(test_data).cache()\\ .batch(batch_size)\\ .prefetch(tf.data.AUTOTUNE) return train_data, test_data @pytest.mark.skipif(tf.__version__ <", "'2.0.0', reason=\"Run only when tf > 2.0.0.\") class TestSeq2SeqForecaster(TestCase): def", "== (400, 2, 2) def test_seq2seq_save_load(self): train_data, test_data = create_data()", "self.forecaster.predict(test_data[0]) assert yhat.shape == (400, 2, 2) np.testing.assert_almost_equal(yhat, load_model_yhat, decimal=5)", "use this file except in compliance with the License. #", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "batch_size=32) yhat = self.forecaster.predict(test_data[0]) assert yhat.shape == (400, 2, 2)", "2, 2) mse = self.forecaster.evaluate(test_data, multioutput=\"raw_values\") assert mse[0].shape == test_data[-1].shape[1:]", ".prefetch(tf.data.AUTOTUNE) test_data = from_tensor_slices(test_data).cache()\\ .batch(batch_size)\\ .prefetch(tf.data.AUTOTUNE) return train_data, test_data @pytest.mark.skipif(tf.__version__", "train_data = get_x_y(train_num_samples) test_data = get_x_y(test_num_samples) if tf_data: from_tensor_slices =", "LSTMSeq2Seq assert isinstance(self.forecaster.internal, LSTMSeq2Seq) load_model_yhat = self.forecaster.predict(test_data[0]) assert yhat.shape ==", "self.forecaster.load(tmp_dir_file) from bigdl.chronos.model.tf2.Seq2Seq_keras import LSTMSeq2Seq assert isinstance(self.forecaster.internal, LSTMSeq2Seq) load_model_yhat =", "Authors. # # Licensed under the Apache License, Version 2.0", "= from_tensor_slices(train_data).cache()\\ .shuffle(train_num_samples)\\ .batch(batch_size)\\ .prefetch(tf.data.AUTOTUNE) test_data = from_tensor_slices(test_data).cache()\\ .batch(batch_size)\\ .prefetch(tf.data.AUTOTUNE)", "limitations under the License. # import pytest import tempfile import", "CONDITIONS OF ANY KIND, either express or implied. # See", "epochs=2) yhat = self.forecaster.predict(test_data) assert yhat.shape == (400, 2, 2)", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "tf.data.Dataset.from_tensor_slices train_data = from_tensor_slices(train_data).cache()\\ .shuffle(train_num_samples)\\ .batch(batch_size)\\ .prefetch(tf.data.AUTOTUNE) test_data = from_tensor_slices(test_data).cache()\\", "reason=\"Run only when tf > 2.0.0.\") class TestSeq2SeqForecaster(TestCase): def setUp(self):", "= from_tensor_slices(test_data).cache()\\ .batch(batch_size)\\ .prefetch(tf.data.AUTOTUNE) return train_data, test_data @pytest.mark.skipif(tf.__version__ < '2.0.0',", "or implied. # See the License for the specific language", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "train_num_samples = 1000 test_num_samples = 400 input_feature_num = 10 output_feature_num", "< '2.0.0', reason=\"Run only when tf > 2.0.0.\") class TestSeq2SeqForecaster(TestCase):", "License. # You may obtain a copy of the License", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "License, Version 2.0 (the \"License\"); # you may not use", "Seq2SeqForecaster(past_seq_len=10, future_seq_len=2, input_feature_num=10, output_feature_num=2) def tearDown(self): pass def test_seq2seq_fit_predict_evaluate(self): train_data,", "test_data[-1].shape[1:] def test_seq2seq_fit_tf_data(self): train_data, test_data = create_data(tf_data=True) self.forecaster.fit(train_data, epochs=2) yhat", "batch_size=32) yhat = self.forecaster.predict(test_data[0]) with tempfile.TemporaryDirectory() as tmp_dir_file: tmp_dir_file =", "(400, 2, 2) mse = self.forecaster.evaluate(test_data, multioutput=\"raw_values\") assert mse[0].shape ==", "# You may obtain a copy of the License at", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "= os.path.join(tmp_dir_file, 'seq2seq.ckpt') self.forecaster.save(tmp_dir_file) self.forecaster.load(tmp_dir_file) from bigdl.chronos.model.tf2.Seq2Seq_keras import LSTMSeq2Seq assert", "when tf > 2.0.0.\") class TestSeq2SeqForecaster(TestCase): def setUp(self): from bigdl.chronos.forecaster.tf.seq2seq_forecaster", "x, y train_data = get_x_y(train_num_samples) test_data = get_x_y(test_num_samples) if tf_data:", "= create_data() self.forecaster.fit(train_data, epochs=2, batch_size=32) yhat = self.forecaster.predict(test_data[0]) with tempfile.TemporaryDirectory()", "under the License is distributed on an \"AS IS\" BASIS,", "yhat = self.forecaster.predict(test_data) assert yhat.shape == (400, 2, 2) def", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "def test_seq2seq_fit_predict_evaluate(self): train_data, test_data = create_data() self.forecaster.fit(train_data, epochs=2, batch_size=32) yhat", "License for the specific language governing permissions and # limitations", "yhat.shape == (400, 2, 2) mse = self.forecaster.evaluate(test_data, multioutput=\"raw_values\") assert", "= tf.data.Dataset.from_tensor_slices train_data = from_tensor_slices(train_data).cache()\\ .shuffle(train_num_samples)\\ .batch(batch_size)\\ .prefetch(tf.data.AUTOTUNE) test_data =", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "assert yhat.shape == (400, 2, 2) np.testing.assert_almost_equal(yhat, load_model_yhat, decimal=5) if", "get_x_y(train_num_samples) test_data = get_x_y(test_num_samples) if tf_data: from_tensor_slices = tf.data.Dataset.from_tensor_slices train_data", "assert yhat.shape == (400, 2, 2) mse = self.forecaster.evaluate(test_data, multioutput=\"raw_values\")", "tearDown(self): pass def test_seq2seq_fit_predict_evaluate(self): train_data, test_data = create_data() self.forecaster.fit(train_data, epochs=2,", "train_data, test_data = create_data() self.forecaster.fit(train_data, epochs=2, batch_size=32) yhat = self.forecaster.predict(test_data[0])", "= self.forecaster.predict(test_data[0]) assert yhat.shape == (400, 2, 2) np.testing.assert_almost_equal(yhat, load_model_yhat,", "(400, 2, 2) def test_seq2seq_save_load(self): train_data, test_data = create_data() self.forecaster.fit(train_data,", "2) def test_seq2seq_save_load(self): train_data, test_data = create_data() self.forecaster.fit(train_data, epochs=2, batch_size=32)", "2.0.0.\") class TestSeq2SeqForecaster(TestCase): def setUp(self): from bigdl.chronos.forecaster.tf.seq2seq_forecaster import Seq2SeqForecaster self.forecaster", "= 2 def get_x_y(num_sample): x = np.random.randn(num_sample, past_seq_len, input_feature_num) y", "yhat = self.forecaster.predict(test_data[0]) assert yhat.shape == (400, 2, 2) mse", "self.forecaster.predict(test_data[0]) assert yhat.shape == (400, 2, 2) mse = self.forecaster.evaluate(test_data,", "self.forecaster.predict(test_data[0]) with tempfile.TemporaryDirectory() as tmp_dir_file: tmp_dir_file = os.path.join(tmp_dir_file, 'seq2seq.ckpt') self.forecaster.save(tmp_dir_file)", "with tempfile.TemporaryDirectory() as tmp_dir_file: tmp_dir_file = os.path.join(tmp_dir_file, 'seq2seq.ckpt') self.forecaster.save(tmp_dir_file) self.forecaster.load(tmp_dir_file)", "the License for the specific language governing permissions and #", "create_data(tf_data=False, batch_size=32): train_num_samples = 1000 test_num_samples = 400 input_feature_num =", "= 2 past_seq_len = 10 future_seq_len = 2 def get_x_y(num_sample):", ".batch(batch_size)\\ .prefetch(tf.data.AUTOTUNE) test_data = from_tensor_slices(test_data).cache()\\ .batch(batch_size)\\ .prefetch(tf.data.AUTOTUNE) return train_data, test_data", "test_data @pytest.mark.skipif(tf.__version__ < '2.0.0', reason=\"Run only when tf > 2.0.0.\")", "(the \"License\"); # you may not use this file except", "test_num_samples = 400 input_feature_num = 10 output_feature_num = 2 past_seq_len", "os from unittest import TestCase import numpy as np import", "Apache License, Version 2.0 (the \"License\"); # you may not", "numpy as np import tensorflow as tf def create_data(tf_data=False, batch_size=32):", "= 10 output_feature_num = 2 past_seq_len = 10 future_seq_len =", "# you may not use this file except in compliance", "either express or implied. # See the License for the", "from bigdl.chronos.model.tf2.Seq2Seq_keras import LSTMSeq2Seq assert isinstance(self.forecaster.internal, LSTMSeq2Seq) load_model_yhat = self.forecaster.predict(test_data[0])", "and # limitations under the License. # import pytest import", "OR CONDITIONS OF ANY KIND, either express or implied. #", "input_feature_num = 10 output_feature_num = 2 past_seq_len = 10 future_seq_len", "10 output_feature_num = 2 past_seq_len = 10 future_seq_len = 2", "def get_x_y(num_sample): x = np.random.randn(num_sample, past_seq_len, input_feature_num) y = np.random.randn(num_sample,", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "Seq2SeqForecaster self.forecaster = Seq2SeqForecaster(past_seq_len=10, future_seq_len=2, input_feature_num=10, output_feature_num=2) def tearDown(self): pass", "x = np.random.randn(num_sample, past_seq_len, input_feature_num) y = np.random.randn(num_sample, future_seq_len, output_feature_num)", "the License is distributed on an \"AS IS\" BASIS, #", "tensorflow as tf def create_data(tf_data=False, batch_size=32): train_num_samples = 1000 test_num_samples", "== test_data[-1].shape[1:] def test_seq2seq_fit_tf_data(self): train_data, test_data = create_data(tf_data=True) self.forecaster.fit(train_data, epochs=2)", "1000 test_num_samples = 400 input_feature_num = 10 output_feature_num = 2", "in compliance with the License. # You may obtain a", "software # distributed under the License is distributed on an", "input_feature_num) y = np.random.randn(num_sample, future_seq_len, output_feature_num) return x, y train_data", "'seq2seq.ckpt') self.forecaster.save(tmp_dir_file) self.forecaster.load(tmp_dir_file) from bigdl.chronos.model.tf2.Seq2Seq_keras import LSTMSeq2Seq assert isinstance(self.forecaster.internal, LSTMSeq2Seq)", "def test_seq2seq_save_load(self): train_data, test_data = create_data() self.forecaster.fit(train_data, epochs=2, batch_size=32) yhat", "test_data = create_data() self.forecaster.fit(train_data, epochs=2, batch_size=32) yhat = self.forecaster.predict(test_data[0]) assert", "np.random.randn(num_sample, future_seq_len, output_feature_num) return x, y train_data = get_x_y(train_num_samples) test_data", "= self.forecaster.predict(test_data[0]) with tempfile.TemporaryDirectory() as tmp_dir_file: tmp_dir_file = os.path.join(tmp_dir_file, 'seq2seq.ckpt')", "as tf def create_data(tf_data=False, batch_size=32): train_num_samples = 1000 test_num_samples =", "tf def create_data(tf_data=False, batch_size=32): train_num_samples = 1000 test_num_samples = 400", "load_model_yhat = self.forecaster.predict(test_data[0]) assert yhat.shape == (400, 2, 2) np.testing.assert_almost_equal(yhat,", "import os from unittest import TestCase import numpy as np", "# # Unless required by applicable law or agreed to", "bigdl.chronos.forecaster.tf.seq2seq_forecaster import Seq2SeqForecaster self.forecaster = Seq2SeqForecaster(past_seq_len=10, future_seq_len=2, input_feature_num=10, output_feature_num=2) def", "as tmp_dir_file: tmp_dir_file = os.path.join(tmp_dir_file, 'seq2seq.ckpt') self.forecaster.save(tmp_dir_file) self.forecaster.load(tmp_dir_file) from bigdl.chronos.model.tf2.Seq2Seq_keras", "isinstance(self.forecaster.internal, LSTMSeq2Seq) load_model_yhat = self.forecaster.predict(test_data[0]) assert yhat.shape == (400, 2,", "def create_data(tf_data=False, batch_size=32): train_num_samples = 1000 test_num_samples = 400 input_feature_num", "# import pytest import tempfile import os from unittest import", "The BigDL Authors. # # Licensed under the Apache License,", "under the License. # import pytest import tempfile import os", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "the License. # import pytest import tempfile import os from", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "2, 2) def test_seq2seq_save_load(self): train_data, test_data = create_data() self.forecaster.fit(train_data, epochs=2,", "bigdl.chronos.model.tf2.Seq2Seq_keras import LSTMSeq2Seq assert isinstance(self.forecaster.internal, LSTMSeq2Seq) load_model_yhat = self.forecaster.predict(test_data[0]) assert", "future_seq_len, output_feature_num) return x, y train_data = get_x_y(train_num_samples) test_data =", "def tearDown(self): pass def test_seq2seq_fit_predict_evaluate(self): train_data, test_data = create_data() self.forecaster.fit(train_data,", "tf_data: from_tensor_slices = tf.data.Dataset.from_tensor_slices train_data = from_tensor_slices(train_data).cache()\\ .shuffle(train_num_samples)\\ .batch(batch_size)\\ .prefetch(tf.data.AUTOTUNE)", "pass def test_seq2seq_fit_predict_evaluate(self): train_data, test_data = create_data() self.forecaster.fit(train_data, epochs=2, batch_size=32)", "multioutput=\"raw_values\") assert mse[0].shape == test_data[-1].shape[1:] def test_seq2seq_fit_tf_data(self): train_data, test_data =", "Version 2.0 (the \"License\"); # you may not use this", "mse[0].shape == test_data[-1].shape[1:] def test_seq2seq_fit_tf_data(self): train_data, test_data = create_data(tf_data=True) self.forecaster.fit(train_data,", "assert yhat.shape == (400, 2, 2) def test_seq2seq_save_load(self): train_data, test_data", "pytest import tempfile import os from unittest import TestCase import", "create_data() self.forecaster.fit(train_data, epochs=2, batch_size=32) yhat = self.forecaster.predict(test_data[0]) with tempfile.TemporaryDirectory() as", "tempfile.TemporaryDirectory() as tmp_dir_file: tmp_dir_file = os.path.join(tmp_dir_file, 'seq2seq.ckpt') self.forecaster.save(tmp_dir_file) self.forecaster.load(tmp_dir_file) from", "self.forecaster.fit(train_data, epochs=2, batch_size=32) yhat = self.forecaster.predict(test_data[0]) with tempfile.TemporaryDirectory() as tmp_dir_file:", "Copyright 2016 The BigDL Authors. # # Licensed under the", "law or agreed to in writing, software # distributed under", "def test_seq2seq_fit_tf_data(self): train_data, test_data = create_data(tf_data=True) self.forecaster.fit(train_data, epochs=2) yhat =", "= 400 input_feature_num = 10 output_feature_num = 2 past_seq_len =", "np import tensorflow as tf def create_data(tf_data=False, batch_size=32): train_num_samples =", "governing permissions and # limitations under the License. # import", "np.random.randn(num_sample, past_seq_len, input_feature_num) y = np.random.randn(num_sample, future_seq_len, output_feature_num) return x,", "self.forecaster.predict(test_data) assert yhat.shape == (400, 2, 2) def test_seq2seq_save_load(self): train_data,", "train_data, test_data = create_data(tf_data=True) self.forecaster.fit(train_data, epochs=2) yhat = self.forecaster.predict(test_data) assert", "= 10 future_seq_len = 2 def get_x_y(num_sample): x = np.random.randn(num_sample,", "self.forecaster.fit(train_data, epochs=2, batch_size=32) yhat = self.forecaster.predict(test_data[0]) assert yhat.shape == (400,", "implied. # See the License for the specific language governing", "under the Apache License, Version 2.0 (the \"License\"); # you", "test_data = get_x_y(test_num_samples) if tf_data: from_tensor_slices = tf.data.Dataset.from_tensor_slices train_data =", "\"License\"); # you may not use this file except in", "past_seq_len = 10 future_seq_len = 2 def get_x_y(num_sample): x =", "test_seq2seq_fit_tf_data(self): train_data, test_data = create_data(tf_data=True) self.forecaster.fit(train_data, epochs=2) yhat = self.forecaster.predict(test_data)", "<filename>python/chronos/test/bigdl/chronos/forecaster/tf/test_seq2seq_keras_forecaster.py # # Copyright 2016 The BigDL Authors. # #", "@pytest.mark.skipif(tf.__version__ < '2.0.0', reason=\"Run only when tf > 2.0.0.\") class", "yhat.shape == (400, 2, 2) np.testing.assert_almost_equal(yhat, load_model_yhat, decimal=5) if __name__", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "(400, 2, 2) np.testing.assert_almost_equal(yhat, load_model_yhat, decimal=5) if __name__ == '__main__':", "TestSeq2SeqForecaster(TestCase): def setUp(self): from bigdl.chronos.forecaster.tf.seq2seq_forecaster import Seq2SeqForecaster self.forecaster = Seq2SeqForecaster(past_seq_len=10,", "epochs=2, batch_size=32) yhat = self.forecaster.predict(test_data[0]) assert yhat.shape == (400, 2,", "output_feature_num) return x, y train_data = get_x_y(train_num_samples) test_data = get_x_y(test_num_samples)", "tmp_dir_file = os.path.join(tmp_dir_file, 'seq2seq.ckpt') self.forecaster.save(tmp_dir_file) self.forecaster.load(tmp_dir_file) from bigdl.chronos.model.tf2.Seq2Seq_keras import LSTMSeq2Seq", "import LSTMSeq2Seq assert isinstance(self.forecaster.internal, LSTMSeq2Seq) load_model_yhat = self.forecaster.predict(test_data[0]) assert yhat.shape", "return x, y train_data = get_x_y(train_num_samples) test_data = get_x_y(test_num_samples) if", "by applicable law or agreed to in writing, software #", "# distributed under the License is distributed on an \"AS", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "import tensorflow as tf def create_data(tf_data=False, batch_size=32): train_num_samples = 1000", "input_feature_num=10, output_feature_num=2) def tearDown(self): pass def test_seq2seq_fit_predict_evaluate(self): train_data, test_data =", "may obtain a copy of the License at # #", "# Unless required by applicable law or agreed to in", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "2, 2) np.testing.assert_almost_equal(yhat, load_model_yhat, decimal=5) if __name__ == '__main__': pytest.main([__file__])", "test_seq2seq_save_load(self): train_data, test_data = create_data() self.forecaster.fit(train_data, epochs=2, batch_size=32) yhat =", "y = np.random.randn(num_sample, future_seq_len, output_feature_num) return x, y train_data =", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "class TestSeq2SeqForecaster(TestCase): def setUp(self): from bigdl.chronos.forecaster.tf.seq2seq_forecaster import Seq2SeqForecaster self.forecaster =", "output_feature_num = 2 past_seq_len = 10 future_seq_len = 2 def", "train_data, test_data @pytest.mark.skipif(tf.__version__ < '2.0.0', reason=\"Run only when tf >", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "output_feature_num=2) def tearDown(self): pass def test_seq2seq_fit_predict_evaluate(self): train_data, test_data = create_data()", "to in writing, software # distributed under the License is", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "# See the License for the specific language governing permissions", "TestCase import numpy as np import tensorflow as tf def", "yhat = self.forecaster.predict(test_data[0]) with tempfile.TemporaryDirectory() as tmp_dir_file: tmp_dir_file = os.path.join(tmp_dir_file,", "= np.random.randn(num_sample, past_seq_len, input_feature_num) y = np.random.randn(num_sample, future_seq_len, output_feature_num) return", "get_x_y(num_sample): x = np.random.randn(num_sample, past_seq_len, input_feature_num) y = np.random.randn(num_sample, future_seq_len,", "You may obtain a copy of the License at #", "permissions and # limitations under the License. # import pytest", "if tf_data: from_tensor_slices = tf.data.Dataset.from_tensor_slices train_data = from_tensor_slices(train_data).cache()\\ .shuffle(train_num_samples)\\ .batch(batch_size)\\", "language governing permissions and # limitations under the License. #", "may not use this file except in compliance with the", "or agreed to in writing, software # distributed under the", "2 past_seq_len = 10 future_seq_len = 2 def get_x_y(num_sample): x", "get_x_y(test_num_samples) if tf_data: from_tensor_slices = tf.data.Dataset.from_tensor_slices train_data = from_tensor_slices(train_data).cache()\\ .shuffle(train_num_samples)\\", "from unittest import TestCase import numpy as np import tensorflow", "= self.forecaster.predict(test_data) assert yhat.shape == (400, 2, 2) def test_seq2seq_save_load(self):", "required by applicable law or agreed to in writing, software", "import Seq2SeqForecaster self.forecaster = Seq2SeqForecaster(past_seq_len=10, future_seq_len=2, input_feature_num=10, output_feature_num=2) def tearDown(self):", "return train_data, test_data @pytest.mark.skipif(tf.__version__ < '2.0.0', reason=\"Run only when tf", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "= self.forecaster.evaluate(test_data, multioutput=\"raw_values\") assert mse[0].shape == test_data[-1].shape[1:] def test_seq2seq_fit_tf_data(self): train_data,", "self.forecaster.fit(train_data, epochs=2) yhat = self.forecaster.predict(test_data) assert yhat.shape == (400, 2,", "# Copyright 2016 The BigDL Authors. # # Licensed under", "with the License. # You may obtain a copy of", "this file except in compliance with the License. # You", "2016 The BigDL Authors. # # Licensed under the Apache", "the Apache License, Version 2.0 (the \"License\"); # you may", "10 future_seq_len = 2 def get_x_y(num_sample): x = np.random.randn(num_sample, past_seq_len,", "import pytest import tempfile import os from unittest import TestCase", "from_tensor_slices(test_data).cache()\\ .batch(batch_size)\\ .prefetch(tf.data.AUTOTUNE) return train_data, test_data @pytest.mark.skipif(tf.__version__ < '2.0.0', reason=\"Run", "only when tf > 2.0.0.\") class TestSeq2SeqForecaster(TestCase): def setUp(self): from", "def setUp(self): from bigdl.chronos.forecaster.tf.seq2seq_forecaster import Seq2SeqForecaster self.forecaster = Seq2SeqForecaster(past_seq_len=10, future_seq_len=2,", ".prefetch(tf.data.AUTOTUNE) return train_data, test_data @pytest.mark.skipif(tf.__version__ < '2.0.0', reason=\"Run only when", "as np import tensorflow as tf def create_data(tf_data=False, batch_size=32): train_num_samples", "from bigdl.chronos.forecaster.tf.seq2seq_forecaster import Seq2SeqForecaster self.forecaster = Seq2SeqForecaster(past_seq_len=10, future_seq_len=2, input_feature_num=10, output_feature_num=2)", "self.forecaster.save(tmp_dir_file) self.forecaster.load(tmp_dir_file) from bigdl.chronos.model.tf2.Seq2Seq_keras import LSTMSeq2Seq assert isinstance(self.forecaster.internal, LSTMSeq2Seq) load_model_yhat", "= create_data(tf_data=True) self.forecaster.fit(train_data, epochs=2) yhat = self.forecaster.predict(test_data) assert yhat.shape ==", "LSTMSeq2Seq) load_model_yhat = self.forecaster.predict(test_data[0]) assert yhat.shape == (400, 2, 2)", "= np.random.randn(num_sample, future_seq_len, output_feature_num) return x, y train_data = get_x_y(train_num_samples)", "batch_size=32): train_num_samples = 1000 test_num_samples = 400 input_feature_num = 10", "import TestCase import numpy as np import tensorflow as tf" ]
[ "vessel.flight(), 'mean_altitude') expr = conn.krpc.Expression.greater_than( conn.krpc.Expression.call(mean_altitude), conn.krpc.Expression.constant_double(10000)) event = conn.krpc.add_event(expr)", "vessel.auto_pilot.disengage() srf_altitude = conn.get_call(getattr, vessel.flight(), 'surface_altitude') expr = conn.krpc.Expression.less_than( conn.krpc.Expression.call(srf_altitude),", "separation') vessel.control.activate_next_stage() mean_altitude = conn.get_call(getattr, vessel.flight(), 'mean_altitude') expr = conn.krpc.Expression.greater_than(", "expr = conn.krpc.Expression.greater_than( conn.krpc.Expression.call(mean_altitude), conn.krpc.Expression.constant_double(10000)) event = conn.krpc.add_event(expr) with event.condition:", "= conn.krpc.add_event(expr) with event.condition: event.wait() print('Launch stage separation') vessel.control.throttle =", "'surface_altitude') expr = conn.krpc.Expression.less_than( conn.krpc.Expression.call(srf_altitude), conn.krpc.Expression.constant_double(1000)) event = conn.krpc.add_event(expr) with", "time.sleep(1) print('Launch!') vessel.control.activate_next_stage() fuel_amount = conn.get_call(vessel.resources.amount, 'SolidFuel') expr = conn.krpc.Expression.less_than(", "separation') vessel.control.throttle = 0 time.sleep(1) vessel.control.activate_next_stage() vessel.auto_pilot.disengage() srf_altitude = conn.get_call(getattr,", "= conn.krpc.Expression.greater_than( conn.krpc.Expression.call(apoapsis_altitude), conn.krpc.Expression.constant_double(100000)) event = conn.krpc.add_event(expr) with event.condition: event.wait()", "conn.space_center.active_vessel vessel.auto_pilot.target_pitch_and_heading(90, 90) vessel.auto_pilot.engage() vessel.control.throttle = 1 time.sleep(1) print('Launch!') vessel.control.activate_next_stage()", "conn.get_call(getattr, vessel.orbit, 'apoapsis_altitude') expr = conn.krpc.Expression.greater_than( conn.krpc.Expression.call(apoapsis_altitude), conn.krpc.Expression.constant_double(100000)) event =", "conn = krpc.connect(name='Sub-orbital flight') vessel = conn.space_center.active_vessel vessel.auto_pilot.target_pitch_and_heading(90, 90) vessel.auto_pilot.engage()", "with event.condition: event.wait() print('Booster separation') vessel.control.activate_next_stage() mean_altitude = conn.get_call(getattr, vessel.flight(),", "import time import krpc conn = krpc.connect(name='Sub-orbital flight') vessel =", "event.condition: event.wait() print('Booster separation') vessel.control.activate_next_stage() mean_altitude = conn.get_call(getattr, vessel.flight(), 'mean_altitude')", "90) apoapsis_altitude = conn.get_call(getattr, vessel.orbit, 'apoapsis_altitude') expr = conn.krpc.Expression.greater_than( conn.krpc.Expression.call(apoapsis_altitude),", "'apoapsis_altitude') expr = conn.krpc.Expression.greater_than( conn.krpc.Expression.call(apoapsis_altitude), conn.krpc.Expression.constant_double(100000)) event = conn.krpc.add_event(expr) with", "event.wait() print('Gravity turn') vessel.auto_pilot.target_pitch_and_heading(60, 90) apoapsis_altitude = conn.get_call(getattr, vessel.orbit, 'apoapsis_altitude')", "time import krpc conn = krpc.connect(name='Sub-orbital flight') vessel = conn.space_center.active_vessel", "1 time.sleep(1) print('Launch!') vessel.control.activate_next_stage() fuel_amount = conn.get_call(vessel.resources.amount, 'SolidFuel') expr =", "vessel.orbit, 'apoapsis_altitude') expr = conn.krpc.Expression.greater_than( conn.krpc.Expression.call(apoapsis_altitude), conn.krpc.Expression.constant_double(100000)) event = conn.krpc.add_event(expr)", "krpc.connect(name='Sub-orbital flight') vessel = conn.space_center.active_vessel vessel.auto_pilot.target_pitch_and_heading(90, 90) vessel.auto_pilot.engage() vessel.control.throttle =", "'SolidFuel') expr = conn.krpc.Expression.less_than( conn.krpc.Expression.call(fuel_amount), conn.krpc.Expression.constant_float(0.1)) event = conn.krpc.add_event(expr) with", "stage separation') vessel.control.throttle = 0 time.sleep(1) vessel.control.activate_next_stage() vessel.auto_pilot.disengage() srf_altitude =", "vessel.auto_pilot.target_pitch_and_heading(90, 90) vessel.auto_pilot.engage() vessel.control.throttle = 1 time.sleep(1) print('Launch!') vessel.control.activate_next_stage() fuel_amount", "= conn.krpc.Expression.less_than( conn.krpc.Expression.call(fuel_amount), conn.krpc.Expression.constant_float(0.1)) event = conn.krpc.add_event(expr) with event.condition: event.wait()", "srf_altitude = conn.get_call(getattr, vessel.flight(), 'surface_altitude') expr = conn.krpc.Expression.less_than( conn.krpc.Expression.call(srf_altitude), conn.krpc.Expression.constant_double(1000))", "conn.krpc.Expression.constant_float(0.1)) event = conn.krpc.add_event(expr) with event.condition: event.wait() print('Booster separation') vessel.control.activate_next_stage()", "print('Launch stage separation') vessel.control.throttle = 0 time.sleep(1) vessel.control.activate_next_stage() vessel.auto_pilot.disengage() srf_altitude", "with event.condition: event.wait() vessel.control.activate_next_stage() while vessel.flight(vessel.orbit.body.reference_frame).vertical_speed < -0.1: print('Altitude =", "event.wait() vessel.control.activate_next_stage() while vessel.flight(vessel.orbit.body.reference_frame).vertical_speed < -0.1: print('Altitude = %.1f meters'", "conn.krpc.Expression.call(srf_altitude), conn.krpc.Expression.constant_double(1000)) event = conn.krpc.add_event(expr) with event.condition: event.wait() vessel.control.activate_next_stage() while", "conn.krpc.add_event(expr) with event.condition: event.wait() print('Gravity turn') vessel.auto_pilot.target_pitch_and_heading(60, 90) apoapsis_altitude =", "time.sleep(1) vessel.control.activate_next_stage() vessel.auto_pilot.disengage() srf_altitude = conn.get_call(getattr, vessel.flight(), 'surface_altitude') expr =", "expr = conn.krpc.Expression.less_than( conn.krpc.Expression.call(srf_altitude), conn.krpc.Expression.constant_double(1000)) event = conn.krpc.add_event(expr) with event.condition:", "conn.krpc.Expression.call(fuel_amount), conn.krpc.Expression.constant_float(0.1)) event = conn.krpc.add_event(expr) with event.condition: event.wait() print('Booster separation')", "event.condition: event.wait() vessel.control.activate_next_stage() while vessel.flight(vessel.orbit.body.reference_frame).vertical_speed < -0.1: print('Altitude = %.1f", "vessel.control.activate_next_stage() mean_altitude = conn.get_call(getattr, vessel.flight(), 'mean_altitude') expr = conn.krpc.Expression.greater_than( conn.krpc.Expression.call(mean_altitude),", "= conn.space_center.active_vessel vessel.auto_pilot.target_pitch_and_heading(90, 90) vessel.auto_pilot.engage() vessel.control.throttle = 1 time.sleep(1) print('Launch!')", "event = conn.krpc.add_event(expr) with event.condition: event.wait() print('Launch stage separation') vessel.control.throttle", "event.condition: event.wait() print('Gravity turn') vessel.auto_pilot.target_pitch_and_heading(60, 90) apoapsis_altitude = conn.get_call(getattr, vessel.orbit,", "vessel.auto_pilot.target_pitch_and_heading(60, 90) apoapsis_altitude = conn.get_call(getattr, vessel.orbit, 'apoapsis_altitude') expr = conn.krpc.Expression.greater_than(", "conn.get_call(getattr, vessel.flight(), 'surface_altitude') expr = conn.krpc.Expression.less_than( conn.krpc.Expression.call(srf_altitude), conn.krpc.Expression.constant_double(1000)) event =", "fuel_amount = conn.get_call(vessel.resources.amount, 'SolidFuel') expr = conn.krpc.Expression.less_than( conn.krpc.Expression.call(fuel_amount), conn.krpc.Expression.constant_float(0.1)) event", "mean_altitude = conn.get_call(getattr, vessel.flight(), 'mean_altitude') expr = conn.krpc.Expression.greater_than( conn.krpc.Expression.call(mean_altitude), conn.krpc.Expression.constant_double(10000))", "= conn.krpc.add_event(expr) with event.condition: event.wait() print('Gravity turn') vessel.auto_pilot.target_pitch_and_heading(60, 90) apoapsis_altitude", "conn.get_call(vessel.resources.amount, 'SolidFuel') expr = conn.krpc.Expression.less_than( conn.krpc.Expression.call(fuel_amount), conn.krpc.Expression.constant_float(0.1)) event = conn.krpc.add_event(expr)", "vessel.control.activate_next_stage() vessel.auto_pilot.disengage() srf_altitude = conn.get_call(getattr, vessel.flight(), 'surface_altitude') expr = conn.krpc.Expression.less_than(", "= krpc.connect(name='Sub-orbital flight') vessel = conn.space_center.active_vessel vessel.auto_pilot.target_pitch_and_heading(90, 90) vessel.auto_pilot.engage() vessel.control.throttle", "conn.krpc.Expression.constant_double(1000)) event = conn.krpc.add_event(expr) with event.condition: event.wait() vessel.control.activate_next_stage() while vessel.flight(vessel.orbit.body.reference_frame).vertical_speed", "apoapsis_altitude = conn.get_call(getattr, vessel.orbit, 'apoapsis_altitude') expr = conn.krpc.Expression.greater_than( conn.krpc.Expression.call(apoapsis_altitude), conn.krpc.Expression.constant_double(100000))", "with event.condition: event.wait() print('Launch stage separation') vessel.control.throttle = 0 time.sleep(1)", "print('Gravity turn') vessel.auto_pilot.target_pitch_and_heading(60, 90) apoapsis_altitude = conn.get_call(getattr, vessel.orbit, 'apoapsis_altitude') expr", "= conn.get_call(getattr, vessel.orbit, 'apoapsis_altitude') expr = conn.krpc.Expression.greater_than( conn.krpc.Expression.call(apoapsis_altitude), conn.krpc.Expression.constant_double(100000)) event", "conn.krpc.Expression.less_than( conn.krpc.Expression.call(srf_altitude), conn.krpc.Expression.constant_double(1000)) event = conn.krpc.add_event(expr) with event.condition: event.wait() vessel.control.activate_next_stage()", "conn.krpc.Expression.constant_double(100000)) event = conn.krpc.add_event(expr) with event.condition: event.wait() print('Launch stage separation')", "while vessel.flight(vessel.orbit.body.reference_frame).vertical_speed < -0.1: print('Altitude = %.1f meters' % vessel.flight().surface_altitude)", "event.condition: event.wait() print('Launch stage separation') vessel.control.throttle = 0 time.sleep(1) vessel.control.activate_next_stage()", "= 0 time.sleep(1) vessel.control.activate_next_stage() vessel.auto_pilot.disengage() srf_altitude = conn.get_call(getattr, vessel.flight(), 'surface_altitude')", "conn.krpc.Expression.greater_than( conn.krpc.Expression.call(mean_altitude), conn.krpc.Expression.constant_double(10000)) event = conn.krpc.add_event(expr) with event.condition: event.wait() print('Gravity", "conn.krpc.Expression.greater_than( conn.krpc.Expression.call(apoapsis_altitude), conn.krpc.Expression.constant_double(100000)) event = conn.krpc.add_event(expr) with event.condition: event.wait() print('Launch", "import krpc conn = krpc.connect(name='Sub-orbital flight') vessel = conn.space_center.active_vessel vessel.auto_pilot.target_pitch_and_heading(90,", "event.wait() print('Launch stage separation') vessel.control.throttle = 0 time.sleep(1) vessel.control.activate_next_stage() vessel.auto_pilot.disengage()", "vessel = conn.space_center.active_vessel vessel.auto_pilot.target_pitch_and_heading(90, 90) vessel.auto_pilot.engage() vessel.control.throttle = 1 time.sleep(1)", "90) vessel.auto_pilot.engage() vessel.control.throttle = 1 time.sleep(1) print('Launch!') vessel.control.activate_next_stage() fuel_amount =", "< -0.1: print('Altitude = %.1f meters' % vessel.flight().surface_altitude) time.sleep(1) print('Landed!')", "0 time.sleep(1) vessel.control.activate_next_stage() vessel.auto_pilot.disengage() srf_altitude = conn.get_call(getattr, vessel.flight(), 'surface_altitude') expr", "conn.krpc.Expression.less_than( conn.krpc.Expression.call(fuel_amount), conn.krpc.Expression.constant_float(0.1)) event = conn.krpc.add_event(expr) with event.condition: event.wait() print('Booster", "event = conn.krpc.add_event(expr) with event.condition: event.wait() print('Gravity turn') vessel.auto_pilot.target_pitch_and_heading(60, 90)", "print('Launch!') vessel.control.activate_next_stage() fuel_amount = conn.get_call(vessel.resources.amount, 'SolidFuel') expr = conn.krpc.Expression.less_than( conn.krpc.Expression.call(fuel_amount),", "event = conn.krpc.add_event(expr) with event.condition: event.wait() print('Booster separation') vessel.control.activate_next_stage() mean_altitude", "print('Booster separation') vessel.control.activate_next_stage() mean_altitude = conn.get_call(getattr, vessel.flight(), 'mean_altitude') expr =", "conn.krpc.add_event(expr) with event.condition: event.wait() vessel.control.activate_next_stage() while vessel.flight(vessel.orbit.body.reference_frame).vertical_speed < -0.1: print('Altitude", "conn.krpc.add_event(expr) with event.condition: event.wait() print('Booster separation') vessel.control.activate_next_stage() mean_altitude = conn.get_call(getattr,", "expr = conn.krpc.Expression.greater_than( conn.krpc.Expression.call(apoapsis_altitude), conn.krpc.Expression.constant_double(100000)) event = conn.krpc.add_event(expr) with event.condition:", "= conn.krpc.add_event(expr) with event.condition: event.wait() vessel.control.activate_next_stage() while vessel.flight(vessel.orbit.body.reference_frame).vertical_speed < -0.1:", "conn.krpc.Expression.call(apoapsis_altitude), conn.krpc.Expression.constant_double(100000)) event = conn.krpc.add_event(expr) with event.condition: event.wait() print('Launch stage", "vessel.control.throttle = 0 time.sleep(1) vessel.control.activate_next_stage() vessel.auto_pilot.disengage() srf_altitude = conn.get_call(getattr, vessel.flight(),", "vessel.control.activate_next_stage() while vessel.flight(vessel.orbit.body.reference_frame).vertical_speed < -0.1: print('Altitude = %.1f meters' %", "vessel.flight(vessel.orbit.body.reference_frame).vertical_speed < -0.1: print('Altitude = %.1f meters' % vessel.flight().surface_altitude) time.sleep(1)", "turn') vessel.auto_pilot.target_pitch_and_heading(60, 90) apoapsis_altitude = conn.get_call(getattr, vessel.orbit, 'apoapsis_altitude') expr =", "= conn.get_call(getattr, vessel.flight(), 'mean_altitude') expr = conn.krpc.Expression.greater_than( conn.krpc.Expression.call(mean_altitude), conn.krpc.Expression.constant_double(10000)) event", "event = conn.krpc.add_event(expr) with event.condition: event.wait() vessel.control.activate_next_stage() while vessel.flight(vessel.orbit.body.reference_frame).vertical_speed <", "= conn.get_call(getattr, vessel.flight(), 'surface_altitude') expr = conn.krpc.Expression.less_than( conn.krpc.Expression.call(srf_altitude), conn.krpc.Expression.constant_double(1000)) event", "= 1 time.sleep(1) print('Launch!') vessel.control.activate_next_stage() fuel_amount = conn.get_call(vessel.resources.amount, 'SolidFuel') expr", "conn.get_call(getattr, vessel.flight(), 'mean_altitude') expr = conn.krpc.Expression.greater_than( conn.krpc.Expression.call(mean_altitude), conn.krpc.Expression.constant_double(10000)) event =", "= conn.krpc.add_event(expr) with event.condition: event.wait() print('Booster separation') vessel.control.activate_next_stage() mean_altitude =", "vessel.control.throttle = 1 time.sleep(1) print('Launch!') vessel.control.activate_next_stage() fuel_amount = conn.get_call(vessel.resources.amount, 'SolidFuel')", "'mean_altitude') expr = conn.krpc.Expression.greater_than( conn.krpc.Expression.call(mean_altitude), conn.krpc.Expression.constant_double(10000)) event = conn.krpc.add_event(expr) with", "= conn.krpc.Expression.less_than( conn.krpc.Expression.call(srf_altitude), conn.krpc.Expression.constant_double(1000)) event = conn.krpc.add_event(expr) with event.condition: event.wait()", "event.wait() print('Booster separation') vessel.control.activate_next_stage() mean_altitude = conn.get_call(getattr, vessel.flight(), 'mean_altitude') expr", "expr = conn.krpc.Expression.less_than( conn.krpc.Expression.call(fuel_amount), conn.krpc.Expression.constant_float(0.1)) event = conn.krpc.add_event(expr) with event.condition:", "krpc conn = krpc.connect(name='Sub-orbital flight') vessel = conn.space_center.active_vessel vessel.auto_pilot.target_pitch_and_heading(90, 90)", "conn.krpc.add_event(expr) with event.condition: event.wait() print('Launch stage separation') vessel.control.throttle = 0", "vessel.auto_pilot.engage() vessel.control.throttle = 1 time.sleep(1) print('Launch!') vessel.control.activate_next_stage() fuel_amount = conn.get_call(vessel.resources.amount,", "= conn.krpc.Expression.greater_than( conn.krpc.Expression.call(mean_altitude), conn.krpc.Expression.constant_double(10000)) event = conn.krpc.add_event(expr) with event.condition: event.wait()", "vessel.control.activate_next_stage() fuel_amount = conn.get_call(vessel.resources.amount, 'SolidFuel') expr = conn.krpc.Expression.less_than( conn.krpc.Expression.call(fuel_amount), conn.krpc.Expression.constant_float(0.1))", "flight') vessel = conn.space_center.active_vessel vessel.auto_pilot.target_pitch_and_heading(90, 90) vessel.auto_pilot.engage() vessel.control.throttle = 1", "= conn.get_call(vessel.resources.amount, 'SolidFuel') expr = conn.krpc.Expression.less_than( conn.krpc.Expression.call(fuel_amount), conn.krpc.Expression.constant_float(0.1)) event =", "vessel.flight(), 'surface_altitude') expr = conn.krpc.Expression.less_than( conn.krpc.Expression.call(srf_altitude), conn.krpc.Expression.constant_double(1000)) event = conn.krpc.add_event(expr)", "with event.condition: event.wait() print('Gravity turn') vessel.auto_pilot.target_pitch_and_heading(60, 90) apoapsis_altitude = conn.get_call(getattr,", "conn.krpc.Expression.call(mean_altitude), conn.krpc.Expression.constant_double(10000)) event = conn.krpc.add_event(expr) with event.condition: event.wait() print('Gravity turn')", "conn.krpc.Expression.constant_double(10000)) event = conn.krpc.add_event(expr) with event.condition: event.wait() print('Gravity turn') vessel.auto_pilot.target_pitch_and_heading(60," ]
[ "import numpy as np def get_rows(a): return list(a) def get_columns(a):", "main(): np.random.seed(0) a=np.random.randint(0,10, (4,4)) print(\"a:\", a) print(\"Rows:\", get_rows(a)) print(\"Columns:\", get_columns(a))", "return list(a.T) def main(): np.random.seed(0) a=np.random.randint(0,10, (4,4)) print(\"a:\", a) print(\"Rows:\",", "as np def get_rows(a): return list(a) def get_columns(a): return list(a.T)", "def get_rows(a): return list(a) def get_columns(a): return list(a.T) def main():", "list(a.T) def main(): np.random.seed(0) a=np.random.randint(0,10, (4,4)) print(\"a:\", a) print(\"Rows:\", get_rows(a))", "a) print(\"Rows:\", get_rows(a)) print(\"Columns:\", get_columns(a)) if __name__ == \"__main__\": main()", "get_rows(a): return list(a) def get_columns(a): return list(a.T) def main(): np.random.seed(0)", "#!/usr/bin/env python3 import numpy as np def get_rows(a): return list(a)", "numpy as np def get_rows(a): return list(a) def get_columns(a): return", "get_columns(a): return list(a.T) def main(): np.random.seed(0) a=np.random.randint(0,10, (4,4)) print(\"a:\", a)", "return list(a) def get_columns(a): return list(a.T) def main(): np.random.seed(0) a=np.random.randint(0,10,", "(4,4)) print(\"a:\", a) print(\"Rows:\", get_rows(a)) print(\"Columns:\", get_columns(a)) if __name__ ==", "def get_columns(a): return list(a.T) def main(): np.random.seed(0) a=np.random.randint(0,10, (4,4)) print(\"a:\",", "np.random.seed(0) a=np.random.randint(0,10, (4,4)) print(\"a:\", a) print(\"Rows:\", get_rows(a)) print(\"Columns:\", get_columns(a)) if", "list(a) def get_columns(a): return list(a.T) def main(): np.random.seed(0) a=np.random.randint(0,10, (4,4))", "np def get_rows(a): return list(a) def get_columns(a): return list(a.T) def", "a=np.random.randint(0,10, (4,4)) print(\"a:\", a) print(\"Rows:\", get_rows(a)) print(\"Columns:\", get_columns(a)) if __name__", "<gh_stars>0 #!/usr/bin/env python3 import numpy as np def get_rows(a): return", "def main(): np.random.seed(0) a=np.random.randint(0,10, (4,4)) print(\"a:\", a) print(\"Rows:\", get_rows(a)) print(\"Columns:\",", "print(\"a:\", a) print(\"Rows:\", get_rows(a)) print(\"Columns:\", get_columns(a)) if __name__ == \"__main__\":", "python3 import numpy as np def get_rows(a): return list(a) def" ]
[ "rank leaderboard_df = leaderboard_df.sort_values( by=['public ' + score_name, 'submitted at", "= session.query(Event).filter_by(name=event_name).one() event_teams = session.query(EventTeam).filter_by(event=event).all() for event_team in event_teams: user_name", "(df_scores_mean.loc[list(map_renaming.keys())] .rename(index=map_renaming) .stack().to_frame().T) df_scores_std = (df_scores_std.loc[list(map_renaming.keys())] .rename(index=map_renaming) .stack().to_frame().T) df_scores_bag =", "make some extra filtering submissions = [sub for sub in", "perform the operation on the database. event_name : str The", "contributivity columns are null contrib_columns = ['contributivity', 'historical contributivity'] if", "[leaderboard_type + ' rank', 'team', 'submission', leaderboard_type + ' '", "column, value in zip( columns, [sub.event_team.team.name, sub.name_with_link, pd.Timestamp(sub.submission_timestamp), (sub.state_with_link if", "str The event name. user_name : None or str, default", "rank'] leaderboard_df['move'] = [ '{:+d}'.format(m) if m != 0 else", "df_scores_mean = df_scores.groupby('step').mean() df_scores_std = df_scores.groupby('step').std() # select only the", "df_html = df.to_html(escape=False, index=False, max_cols=None, max_rows=None, justify='left') df_html = '<thead>", ": bool Whether or not the submission name should be", ": dataframe The competition leaderboard in a dataframe format. \"\"\"", "a user leaderboards for a given event. Parameters ---------- session", "df.columns = df.columns.set_names(['stat', 'set', 'score']) # change the multi-index into", "leaderboard_df.drop(columns='best') # dealing with ties: we need the lowest timestamp", "leaderboard_type, event_name, with_links=with_links ) elif leaderboard_type in ['new', 'failed']: if", "score_list = [ '{} {} {}'.format(stat, dataset, score) for dataset,", "= get_leaderboard( session, 'public', event_name ) event.public_leaderboard_html_no_links = get_leaderboard( session,", "'public ' + score_name], right_on=['team', 'public ' + score_name] )", "= get_leaderboard( session, 'failed', event_name, user_name ) event_team.new_leaderboard_html = get_leaderboard(", "will be queried. new_only : bool, default is False Whether", "leaderboard_type == 'new': columns = ['team', 'submission', 'submitted at (UTC)',", "user_name ) event_team.failed_leaderboard_html = get_leaderboard( session, 'failed', event_name, user_name )", "None competition_type = ('public' if 'public' in leaderboard_type else 'private')", "event. Returns ------- competition_leaderboard : dataframe The competition leaderboard in", "else ['train time [s]', 'validation time [s]']) col_ordered = (", ":] df_scores_bag.index = df_scores_bag.index.droplevel('n_bag') df_scores_bag = df_scores_bag.round(map_score_precision) df_scores = get_scores(session,", "= leaderboard_df.groupby('team').min() best_df = best_df[['submitted at (UTC)']].reset_index() best_df['best'] = True", "pd.concat(record_score, axis=0, ignore_index=True, sort=False) # keep only second precision for", "[s]', 'test time [s]'] if leaderboard_type == 'private' else ['train", "new_only: event.private_leaderboard_html = get_leaderboard( session, 'private', event_name ) event.public_leaderboard_html_with_links =", "event_name, user_name ) event_team.failed_leaderboard_html = get_leaderboard( session, 'failed', event_name, user_name", "from .submission import get_submission_max_ram from .submission import get_time width =", "np.arange(len(leaderboard_df)) + 1 # sort by private score then by", "event_name, with_links=False) time_list = (['train time [s]', 'validation time [s]',", "ignore_index=True, sort=False) # keep only second precision for the time", "if sub.is_public_leaderboard] if not submissions: return None competition_type = ('public'", "'submitted at (UTC)'], ascending=[score_type.is_lower_the_better, True]) leaderboard_df['private rank'] = np.arange(len(leaderboard_df)) +", "Returns ------- competition_leaderboard : dataframe The competition leaderboard in a", "submissions] df = pd.DataFrame(data, columns=columns) else: # make some extra", "stack all the records df = pd.concat(record_score, axis=0, ignore_index=True, sort=False)", "zip( columns, [sub.event_team.team.name, sub.name_with_link, pd.Timestamp(sub.submission_timestamp), (sub.state_with_link if leaderboard_type == 'failed'", "display train and validation time for the public leaderboard time_list", "the database. \"\"\" event = session.query(Event).filter_by(name=event_name).one() if not new_only: event.private_leaderboard_html", "get_leaderboard( session, 'private', event_name ) event.public_leaderboard_html_with_links = get_leaderboard( session, 'public',", "= {score_type.name: score_type.precision for score_type in event.score_types} for sub in", "pd.concat([df, df_time], axis=1) if leaderboard_type == 'private': df['submission ID'] =", "distutils.version import LooseVersion from itertools import product import numpy as", "' + score_name] + time_list + ['submitted at (UTC)']) leaderboard_df", "\"\"\"Get a leaderboard. Parameters ---------- session : :class:`sqlalchemy.orm.Session` The session", "update_leaderboards(session, event_name, new_only=False): \"\"\"Update the leaderboards for a given event.", "['submitted at (UTC)']) leaderboard_df = private_leaderboard[col_selected_private] leaderboard_df = leaderboard_df.rename( columns={'bag", "'test': 'test time [s]'} ) df = pd.concat([df, df_time], axis=1)", "df['team'] = sub.team.name df['submission'] = sub.name_with_link if with_links else sub.name", "= session.query(Event).filter_by(name=event_name).one() map_score_precision = {score_type.name: score_type.precision for score_type in event.score_types}", "for score_type in event.score_types if score_type.name != event.official_score_name]) score_list =", ": bool, default is False Whether or not to update", "the leaderboards for a given event. Parameters ---------- session :", "leaderboard_df = leaderboard_df.rename( columns={'bag private ' + score_name: 'private '", "submission to report in the leaderboard. leaderboard_type : {'public', 'private'}", "when adding a new submission in the database. \"\"\" event_team", "= event_team.team.name if not new_only: event_team.leaderboard_html = get_leaderboard( session, 'public',", "x: \" \".join(x)) # add the aggregated time information df_time.index", "at (UTC)', 'state'] else: columns = ['team', 'submission', 'submitted at", "session to directly perform the operation on the database. leaderboard_type", "leaderboard_type == 'private': df['submission ID'] = sub.basename.replace('submission_', '') df['team'] =", "get_scores(session, sub.id) df_scores = df_scores.round(map_score_precision) df_time = get_time(session, sub.id) df_time", "df def get_leaderboard(session, leaderboard_type, event_name, user_name=None, with_links=True): \"\"\"Get a leaderboard.", "sub in submissions] df = pd.DataFrame(data, columns=columns) else: # make", "rename the column name for the public leaderboard if leaderboard_type", "the event. with_links : bool Whether or not the submission", "time [s]'} ) df = pd.concat([df, df_time], axis=1) if leaderboard_type", "score_type.is_lower_the_better else leaderboard_df.groupby('team').max()) best_df = best_df[['public ' + score_name]].reset_index() best_df['best']", "* sub.historical_contributivity)) df['max RAM [MB]'] = get_submission_max_ram(session, sub.id) df['submitted at", "type of leaderboard to generate. event_name : str The event", "score_name: score_name, leaderboard_type + ' rank': 'rank' }) df =", "event_name ) df_html = df.to_html(escape=False, index=False, max_cols=None, max_rows=None, justify='left') df_html", "event_name, new_only=False): \"\"\"Update the leaderboards for all users for a", "new_only : bool, default is False Whether or not to", "in the leaderboard. leaderboard_type : {'public', 'private'} The type of", "data = [{ column: value for column, value in zip(", "= int(round(100 * sub.contributivity)) df['historical contributivity'] = int(round( 100 *", "event_team in event_teams: user_name = event_team.team.name if not new_only: event_team.leaderboard_html", "df_scores_bag.index.droplevel('n_bag') df_scores_bag = df_scores_bag.round(map_score_precision) df_scores = get_scores(session, sub.id) df_scores =", "sub.id) df['submitted at (UTC)'] = pd.Timestamp(sub.submission_timestamp) record_score.append(df) # stack all", "return df def get_leaderboard(session, leaderboard_type, event_name, user_name=None, with_links=True): \"\"\"Get a", "should be clickable. Returns ------- leaderboard : str The leaderboard", "' + score_name} ) # select best submission for each", "['bag private ' + score_name, 'bag public ' + score_name]", "a new submission in the database. \"\"\" event = session.query(Event).filter_by(name=event_name).one()", "'new', event_name, user_name ) session.commit() def update_all_user_leaderboards(session, event_name, new_only=False): \"\"\"Update", "when adding a new submission in the database. \"\"\" event", "'new', \\ 'public competition', 'private competition'} The type of leaderboard", "update_user_leaderboards(session, event_name, user_name, new_only=False): \"\"\"Update the of a user leaderboards", "compute rank leaderboard_df = leaderboard_df.sort_values( by=['private ' + score_name, 'submitted", "leaderboard_df[leaderboard_df['best']] leaderboard_df = leaderboard_df.drop(columns='best') # sort by public score then", ".submission import get_bagged_scores from .submission import get_scores from .submission import", "score_name = event.official_score_name private_leaderboard = _compute_leaderboard(session, submissions, 'private', event_name, with_links=False)", "[s]']) col_selected_private = (['team', 'submission'] + ['bag private ' +", "contributivity'] = int(round( 100 * sub.historical_contributivity)) df['max RAM [MB]'] =", "'failed', 'new', \\ 'public competition', 'private competition'} The type of", "rank'] = np.arange(len(leaderboard_df)) + 1 leaderboard_df['move'] = \\ leaderboard_df['public rank']", "itertools import product import numpy as np import pandas as", "with_links : bool Whether or not the submission name should", "= leaderboard_df.drop(columns='best') # dealing with ties: we need the lowest", "stacked index df.columns = df.columns.map(lambda x: \" \".join(x)) # add", "for sub in submissions if (getattr(sub, submission_filter[leaderboard_type]) and sub.is_not_sandbox)] if", "The leaderboard in a dataframe format. \"\"\" record_score = []", "user_name ) event_team.new_leaderboard_html = get_leaderboard( session, 'new', event_name, user_name )", "'public competition', event_name ) event.private_competition_leaderboard_html = get_leaderboard( session, 'private competition',", "df['max RAM [MB]'] = get_submission_max_ram(session, sub.id) df['submitted at (UTC)'] =", "else: # make some extra filtering submissions = [sub for", "..model.event import EventTeam from ..model.submission import Submission from ..model.team import", "df_time = get_time(session, sub.id) df_time = df_time.stack().to_frame() df_time.index = df_time.index.set_names(['fold',", ") if leaderboard_type == 'private': col_selected.insert(1, 'move') df = leaderboard_df[col_selected]", ":class:`sqlalchemy.orm.Session` The session to directly perform the operation on the", "time [s]', 'validation time [s]']) col_selected_private = (['team', 'submission'] +", "event_team.team.name if not new_only: event_team.leaderboard_html = get_leaderboard( session, 'public', event_name,", "df_time = df_time.stack().to_frame() df_time.index = df_time.index.set_names(['fold', 'step']) df_time = df_time.rename(columns={0:", "= get_bagged_scores(session, sub.id) highest_level = df_scores_bag.index.get_level_values('n_bag').max() df_scores_bag = df_scores_bag.loc[(slice(None), highest_level),", "df_scores_bag.index.get_level_values('n_bag').max() df_scores_bag = df_scores_bag.loc[(slice(None), highest_level), :] df_scores_bag.index = df_scores_bag.index.droplevel('n_bag') df_scores_bag", "1 leaderboard_df['move'] = \\ leaderboard_df['public rank'] - leaderboard_df['private rank'] leaderboard_df['move']", "submissions: return None if leaderboard_type in ['public', 'private']: df =", ") event_team.new_leaderboard_html = get_leaderboard( session, 'new', event_name, user_name ) session.commit()", "submissions : list of :class:`ramp_database.model.Submission` The submission to report in", "max_rows=None, justify='left') df_html = '<thead> {} </tbody>'.format( df_html.split('<thead>')[1].split('</tbody>')[0] ) return", "how='left', left_on=['team', 'submitted at (UTC)'], right_on=['team', 'submitted at (UTC)']) leaderboard_df", "[s]']) col_ordered = ( ['team', 'submission'] + score_list + ['contributivity',", "event.public_competition_leaderboard_html = get_leaderboard( session, 'public competition', event_name ) event.private_competition_leaderboard_html =", "event_teams: user_name = event_team.team.name if not new_only: event_team.leaderboard_html = get_leaderboard(", "or str, default is None The user name. If None,", "event_name : str The name of the event. Returns -------", "the database. event_name : str The event name. user_name :", "'private': 'is_private_leaderboard', 'failed': 'is_error', 'new': 'is_new', 'public competition': 'is_in_competition', 'private", "import Event from ..model.event import EventTeam from ..model.submission import Submission", "by submission timestamp, compute rank leaderboard_df = leaderboard_df.sort_values( by=['private '", "+ ' rank': 'rank' }) df = df.sort_values(by='rank') return df", "second precision for the time stamp df['submitted at (UTC)'] =", "col_selected_private = (['team', 'submission'] + ['bag private ' + score_name,", "------- competition_leaderboard : dataframe The competition leaderboard in a dataframe", "information df_time.index = df.index df_time = df_time.rename( columns={'train': 'train time", "sub.historical_contributivity)) df['max RAM [MB]'] = get_submission_max_ram(session, sub.id) df['submitted at (UTC)']", "'set', 'score']) # change the multi-index into a stacked index", "= df['submitted at (UTC)'].astype('datetime64[s]') # reordered the column stats_order =", "{} </tbody>'.format( df_html.split('<thead>')[1].split('</tbody>')[0] ) return df_html def update_leaderboards(session, event_name, new_only=False):", "get_leaderboard( session, 'private competition', event_name ) event.new_leaderboard_html = get_leaderboard( session,", "'public competition', 'private competition'} The type of leaderboard to generate.", "update_all_user_leaderboards(session, event_name, new_only=False): \"\"\"Update the leaderboards for all users for", "to # public and private map_renaming = {'valid': 'public', 'test':", "= pd.DataFrame(data, columns=columns) else: # make some extra filtering submissions", "df_html def update_leaderboards(session, event_name, new_only=False): \"\"\"Update the leaderboards for a", "submission name should be clickable. Returns ------- leaderboard : dataframe", "['public']) score_order = ([event.official_score_name] + [score_type.name for score_type in event.score_types", "The event name. user_name : None or str, default is", "sub.team.name df['submission'] = sub.name_with_link if with_links else sub.name df['contributivity'] =", "in the database. \"\"\" event_team = get_event_team_by_name(session, event_name, user_name) if", "testing steps and rename them to # public and private", "score_name, 'submitted at (UTC)'], ascending=[score_type.is_lower_the_better, True]) leaderboard_df['public rank'] = np.arange(len(leaderboard_df))", "'mean', 'std'] if leaderboard_type == 'private' else ['bag']) dataset_order =", "df = pd.concat([df_scores_bag, df_scores_mean, df_scores_std], axis=1, keys=['bag', 'mean', 'std']) df.columns", "all users will be queried. This parameter is discarded when", "all the records df = pd.concat(record_score, axis=0, ignore_index=True, sort=False) #", "rely on the zip function ignore the submission state if", "'is_private_leaderboard', 'failed': 'is_error', 'new': 'is_new', 'public competition': 'is_in_competition', 'private competition':", "for event_team in event_teams: user_name = event_team.team.name if not new_only:", "default is True Whether or not the submission name should", "import Submission from ..model.team import Team from .team import get_event_team_by_name", "def update_user_leaderboards(session, event_name, user_name, new_only=False): \"\"\"Update the of a user", "'error'] # we rely on the zip function ignore the", "# merge to get a best indicator column then select", "public ' + score_name: 'public ' + score_name} ) #", "['team', 'submission', 'submitted at (UTC)', 'error'] # we rely on", "to True when adding a new submission in the database.", "str The name of the event. Returns ------- competition_leaderboard :", "submissions: return None competition_type = ('public' if 'public' in leaderboard_type", "score_type.name != event.official_score_name]) score_list = [ '{} {} {}'.format(stat, dataset,", "The type of leaderboard to built. event_name : str The", "filtering submissions = [sub for sub in submissions if sub.is_public_leaderboard]", "them to # public and private map_renaming = {'valid': 'public',", ": {'public', 'private', 'failed', 'new', \\ 'public competition', 'private competition'}", "for score_type in event.score_types} for sub in submissions: # take", "score_name] ) leaderboard_df = leaderboard_df.fillna(False) leaderboard_df = leaderboard_df[leaderboard_df['best']] leaderboard_df =", "get_leaderboard( session, 'public', event_name, user_name ) event_team.failed_leaderboard_html = get_leaderboard( session,", "leaderboard. Parameters ---------- session : :class:`sqlalchemy.orm.Session` The session to directly", "then by submission timestamp, compute rank leaderboard_df = leaderboard_df.sort_values( by=['public", "submission in the database. \"\"\" event = session.query(Event).filter_by(name=event_name).one() event_teams =", "import get_time width = -1 if LooseVersion(pd.__version__) < LooseVersion(\"1.0.0\") else", "sub.state)]) } for sub in submissions] df = pd.DataFrame(data, columns=columns)", "'failed', event_name ) event.public_competition_leaderboard_html = get_leaderboard( session, 'public competition', event_name", "event.official_score_name), ascending=event.get_official_score_type(session).is_lower_the_better ) # rename the column name for the", "time_list + ['max RAM [MB]', 'submitted at (UTC)'] ) if", "event.official_score_name]) score_list = [ '{} {} {}'.format(stat, dataset, score) for", "score_name] + time_list + ['submitted at (UTC)'] ) if leaderboard_type", "queried. new_only : bool, default is False Whether or not", "event.score_types if score_type.name != event.official_score_name]) score_list = [ '{} {}", "highest_level), :] df_scores_bag.index = df_scores_bag.index.droplevel('n_bag') df_scores_bag = df_scores_bag.round(map_score_precision) df_scores =", "if (getattr(sub, submission_filter[leaderboard_type]) and sub.is_not_sandbox)] if not submissions: return None", "' + score_name] + time_list + ['submitted at (UTC)'] )", "record_score.append(df) # stack all the records df = pd.concat(record_score, axis=0,", "user_name=None, with_links=True): \"\"\"Get a leaderboard. Parameters ---------- session : :class:`sqlalchemy.orm.Session`", "be queried. This parameter is discarded when requesting the competition", "sub.name df['contributivity'] = int(round(100 * sub.contributivity)) df['historical contributivity'] = int(round(", "compute rank leaderboard_df = leaderboard_df.sort_values( by=['public ' + score_name, 'submitted", "submission_filter[leaderboard_type]) and sub.is_not_sandbox)] if not submissions: return None if leaderboard_type", "at (UTC)']) leaderboard_df = leaderboard_df.fillna(False) leaderboard_df = leaderboard_df[leaderboard_df['best']] leaderboard_df =", "'score']) # change the multi-index into a stacked index df.columns", "df.sort_values( \"bag {} {}\".format(leaderboard_type, event.official_score_name), ascending=event.get_official_score_type(session).is_lower_the_better ) # rename the", "n bag df_scores_bag = get_bagged_scores(session, sub.id) highest_level = df_scores_bag.index.get_level_values('n_bag').max() df_scores_bag", "sub in submissions if (getattr(sub, submission_filter[leaderboard_type]) and sub.is_not_sandbox)] if not", "event_team.leaderboard_html = get_leaderboard( session, 'public', event_name, user_name ) event_team.failed_leaderboard_html =", "The name of the event. Returns ------- competition_leaderboard : dataframe", "str The leaderboard in HTML format. \"\"\" q = (session.query(Submission)", "at (UTC)'], ascending=[score_type.is_lower_the_better, True]) leaderboard_df['private rank'] = np.arange(len(leaderboard_df)) + 1", "the aggregated time information df_time.index = df.index df_time = df_time.rename(", "= df.sort_values(by='rank') return df def get_leaderboard(session, leaderboard_type, event_name, user_name=None, with_links=True):", "new_only: event_team.leaderboard_html = get_leaderboard( session, 'public', event_name, user_name ) event_team.failed_leaderboard_html", "= df.sort_values( \"bag {} {}\".format(leaderboard_type, event.official_score_name), ascending=event.get_official_score_type(session).is_lower_the_better ) # rename", "= df.rename(columns={ leaderboard_type + ' ' + score_name: score_name, leaderboard_type", "# sort by public score then by submission timestamp, compute", "= [sub for sub in submissions if sub.is_public_leaderboard] if not", "score_name, 'submitted at (UTC)'], ascending=[score_type.is_lower_the_better, True]) leaderboard_df['private rank'] = np.arange(len(leaderboard_df))", "{} {}'.format(stat, dataset, score) for dataset, score, stat in product(dataset_order,", "get_submission_max_ram(session, sub.id) df['submitted at (UTC)'] = pd.Timestamp(sub.submission_timestamp) record_score.append(df) # stack", ": str The name of the event. with_links : bool", "time information df_time.index = df.index df_time = df_time.rename( columns={'train': 'train", "= df.to_html(escape=False, index=False, max_cols=None, max_rows=None, justify='left') df_html = '<thead> {}", "None if leaderboard_type in ['public', 'private']: df = _compute_leaderboard( session,", "'submitted at (UTC)'], right_on=['team', 'submitted at (UTC)']) leaderboard_df = leaderboard_df.fillna(False)", "+ [score_type.name for score_type in event.score_types if score_type.name != event.official_score_name])", "'state'] else: columns = ['team', 'submission', 'submitted at (UTC)', 'error']", "df['submitted at (UTC)'].astype('datetime64[s]') # reordered the column stats_order = (['bag',", "all users will be queried. new_only : bool, default is", "col_ordered = ( ['team', 'submission'] + score_list + ['contributivity', 'historical", "if 'public' in leaderboard_type else 'private') df = _compute_competition_leaderboard( session,", "= [\"submission ID\"] + col_ordered df = df[col_ordered] # check", "submissions. You can turn this option to True when adding", "add the aggregated time information df_time.index = df.index df_time =", "score_name, 'bag public ' + score_name] + time_list + ['submitted", "[ '{:+d}'.format(m) if m != 0 else '-' for m", "'submission', leaderboard_type + ' ' + score_name] + time_list +", "will be queried. This parameter is discarded when requesting the", "col_ordered df = df[col_ordered] # check if the contributivity columns", "= df_time.rename( columns={'train': 'train time [s]', 'valid': 'validation time [s]',", "(leaderboard_df.groupby('team').min() if score_type.is_lower_the_better else leaderboard_df.groupby('team').max()) best_df = best_df[['public ' +", "in submissions if (getattr(sub, submission_filter[leaderboard_type]) and sub.is_not_sandbox)] if not submissions:", "= (df_scores_mean.loc[list(map_renaming.keys())] .rename(index=map_renaming) .stack().to_frame().T) df_scores_std = (df_scores_std.loc[list(map_renaming.keys())] .rename(index=map_renaming) .stack().to_frame().T) df_scores_bag", "The event name. new_only : bool, default is False Whether", "-1 if LooseVersion(pd.__version__) < LooseVersion(\"1.0.0\") else None pd.set_option('display.max_colwidth', width) def", ": str The leaderboard in HTML format. \"\"\" q =", "timestamp, compute rank leaderboard_df = leaderboard_df.sort_values( by=['public ' + score_name,", "user_name : None or str, default is None The user", "This parameter is discarded when requesting the competition leaderboard. with_links", "operation on the database. leaderboard_type : {'public', 'private', 'failed', 'new',", "'public' in leaderboard_type else 'private') df = _compute_competition_leaderboard( session, submissions,", "time [s]', 'valid': 'validation time [s]', 'test': 'test time [s]'}", "adding a new submission in the database. \"\"\" event_team =", "== 'private' else ['train time [s]', 'validation time [s]']) col_selected_private", "leaderboard. leaderboard_type : {'public', 'private'} The type of leaderboard to", "stats_order) ] # Only display train and validation time for", "at (UTC)'], ascending=[score_type.is_lower_the_better, True]) leaderboard_df['public rank'] = np.arange(len(leaderboard_df)) + 1", "' + score_name, 'submitted at (UTC)'], ascending=[score_type.is_lower_the_better, True]) leaderboard_df['private rank']", "df_scores_mean, df_scores_std], axis=1, keys=['bag', 'mean', 'std']) df.columns = df.columns.set_names(['stat', 'set',", "= df.rename(columns={ key: value for key, value in zip(score_list, score_order)", "competition leaderboard in a dataframe format. \"\"\" event = session.query(Event).filter_by(name=event_name).one()", ":class:`ramp_database.model.Submission` The submission to report in the leaderboard. leaderboard_type :", "scores from all users will be queried. This parameter is", ") event.private_competition_leaderboard_html = get_leaderboard( session, 'private competition', event_name ) event.new_leaderboard_html", "leaderboard_df['move']] col_selected = ( [leaderboard_type + ' rank', 'team', 'submission',", "precision for the time stamp df['submitted at (UTC)'] = df['submitted", "'private' else ['train time [s]', 'validation time [s]']) col_ordered =", "leaderboard_df.fillna(False) leaderboard_df = leaderboard_df[leaderboard_df['best']] leaderboard_df = leaderboard_df.drop(columns='best') # dealing with", "== 'private' else ['bag']) dataset_order = (['public', 'private'] if leaderboard_type", "score_list + ['contributivity', 'historical contributivity'] + time_list + ['max RAM", "(df_scores_bag.rename(index=map_renaming) .stack().to_frame().T) df = pd.concat([df_scores_bag, df_scores_mean, df_scores_std], axis=1, keys=['bag', 'mean',", "at (UTC)'].astype('datetime64[s]') # reordered the column stats_order = (['bag', 'mean',", "\"private\": col_ordered = [\"submission ID\"] + col_ordered df = df[col_ordered]", "# column was not appended data = [{ column: value", "database. event_name : str The event name. user_name : str", "leaderboard_df = leaderboard_df[leaderboard_df['best']] leaderboard_df = leaderboard_df.drop(columns='best') # dealing with ties:", "df.rename(columns={ leaderboard_type + ' ' + score_name: score_name, leaderboard_type +", "the error # column was not appended data = [{", "leaderboard_df['private rank'] leaderboard_df['move'] = [ '{:+d}'.format(m) if m != 0", "in a dataframe format. \"\"\" record_score = [] event =", "= '<thead> {} </tbody>'.format( df_html.split('<thead>')[1].split('</tbody>')[0] ) return df_html def update_leaderboards(session,", "= private_leaderboard[col_selected_private] leaderboard_df = leaderboard_df.rename( columns={'bag private ' + score_name:", "+ time_list + ['submitted at (UTC)']) leaderboard_df = private_leaderboard[col_selected_private] leaderboard_df", "leaderboard_type == 'failed' else sub.state)]) } for sub in submissions]", "dataset, score, stat in product(dataset_order, score_order, stats_order) ] # Only", "import pandas as pd from ..model.event import Event from ..model.event", "submission for each team best_df = (leaderboard_df.groupby('team').min() if score_type.is_lower_the_better else", "'private': col_selected.insert(1, 'move') df = leaderboard_df[col_selected] df = df.rename(columns={ leaderboard_type", "leaderboard_df['public rank'] = np.arange(len(leaderboard_df)) + 1 # sort by private", "leaderboard_type + ' ' + score_name] + time_list + ['submitted", "leaderboard_type else 'private') df = _compute_competition_leaderboard( session, submissions, competition_type, event_name", "get_event_team_by_name(session, event_name, user_name) if not new_only: event_team.leaderboard_html = get_leaderboard( session,", "str The event name. user_name : str The user name.", "== EventTeam.team_id) .filter(EventTeam.id == Submission.event_team_id) .filter(Event.name == event_name)) if user_name", "'public', event_name, with_links=False ) event.failed_leaderboard_html = get_leaderboard( session, 'failed', event_name", "= [ '{} {} {}'.format(stat, dataset, score) for dataset, score,", "the column stats_order = (['bag', 'mean', 'std'] if leaderboard_type ==", "None, scores from all users will be queried. new_only :", "by private score then by submission timestamp, compute rank leaderboard_df", "( ['team', 'submission'] + score_list + ['contributivity', 'historical contributivity'] +", "'submission'] + ['bag private ' + score_name, 'bag public '", "True # merge to get a best indicator column then", "event name. user_name : str The user name. If None,", "private_leaderboard[col_selected_private] leaderboard_df = leaderboard_df.rename( columns={'bag private ' + score_name: 'private", "if (df[contrib_columns] == 0).all(axis=0).all(): df = df.drop(columns=contrib_columns) df = df.sort_values(", "'submitted at (UTC)', 'state'] else: columns = ['team', 'submission', 'submitted", "leaderboards for all users for a given event. Parameters ----------", "import product import numpy as np import pandas as pd", "if not new_only: event_team.leaderboard_html = get_leaderboard( session, 'public', event_name, user_name", ") event.public_leaderboard_html_no_links = get_leaderboard( session, 'public', event_name, with_links=False ) event.failed_leaderboard_html", "= _compute_leaderboard(session, submissions, 'private', event_name, with_links=False) time_list = (['train time", "score_name, leaderboard_type + ' rank': 'rank' }) df = df.sort_values(by='rank')", "session, 'public competition', event_name ) event.private_competition_leaderboard_html = get_leaderboard( session, 'private", "= get_scores(session, sub.id) df_scores = df_scores.round(map_score_precision) df_time = get_time(session, sub.id)", "numpy as np import pandas as pd from ..model.event import", "best_df = best_df[['submitted at (UTC)']].reset_index() best_df['best'] = True leaderboard_df =", "event_team = get_event_team_by_name(session, event_name, user_name) if not new_only: event_team.leaderboard_html =", "col_selected.insert(1, 'move') df = leaderboard_df[col_selected] df = df.rename(columns={ leaderboard_type +", "event.public_leaderboard_html_with_links = get_leaderboard( session, 'public', event_name ) event.public_leaderboard_html_no_links = get_leaderboard(", "df['contributivity'] = int(round(100 * sub.contributivity)) df['historical contributivity'] = int(round( 100", "session to directly perform the operation on the database. submissions", "The user name. If None, scores from all users will", "in submissions: # take only max n bag df_scores_bag =", "def _compute_competition_leaderboard(session, submissions, leaderboard_type, event_name): \"\"\"Format the competition leaderboard. Parameters", "= leaderboard_df.drop(columns='best') # sort by public score then by submission", "product(dataset_order, score_order, stats_order) ] # Only display train and validation", "df.to_html(escape=False, index=False, max_cols=None, max_rows=None, justify='left') df_html = '<thead> {} </tbody>'.format(", "pd.merge( leaderboard_df, best_df, how='left', left_on=['team', 'submitted at (UTC)'], right_on=['team', 'submitted", "error # column was not appended data = [{ column:", "] # Only display train and validation time for the", "\" \".join(x)) # add the aggregated time information df_time.index =", "session : :class:`sqlalchemy.orm.Session` The session to directly perform the operation", "leaderboard_df.groupby('team').max()) best_df = best_df[['public ' + score_name]].reset_index() best_df['best'] = True", "+ time_list + ['submitted at (UTC)'] ) if leaderboard_type ==", "session, 'new', event_name, user_name ) session.commit() def update_all_user_leaderboards(session, event_name, new_only=False):", "= leaderboard_df[col_selected] df = df.rename(columns={ leaderboard_type + ' ' +", "we need the lowest timestamp best_df = leaderboard_df.groupby('team').min() best_df =", "[ '{} {} {}'.format(stat, dataset, score) for dataset, score, stat", "= df_scores_bag.loc[(slice(None), highest_level), :] df_scores_bag.index = df_scores_bag.index.droplevel('n_bag') df_scores_bag = df_scores_bag.round(map_score_precision)", "database. submissions : list of :class:`ramp_database.model.Submission` The submission to report", "return df_html def update_leaderboards(session, event_name, new_only=False): \"\"\"Update the leaderboards for", "'bag public ' + score_name: 'public ' + score_name} )", "leaderboard to generate. event_name : str The event name. user_name", ".filter(EventTeam.id == Submission.event_team_id) .filter(Event.name == event_name)) if user_name is not", "get_bagged_scores from .submission import get_scores from .submission import get_submission_max_ram from", "dataset_order = (['public', 'private'] if leaderboard_type == 'private' else ['public'])", "with ties: we need the lowest timestamp best_df = leaderboard_df.groupby('team').min()", "= sub.basename.replace('submission_', '') df['team'] = sub.team.name df['submission'] = sub.name_with_link if", "get_event_team_by_name from .submission import get_bagged_scores from .submission import get_scores from", "private ' + score_name: 'private ' + score_name, 'bag public", "rank'] = np.arange(len(leaderboard_df)) + 1 # sort by private score", "by=['private ' + score_name, 'submitted at (UTC)'], ascending=[score_type.is_lower_the_better, True]) leaderboard_df['private", "to built. event_name : str The name of the event.", "(['train time [s]', 'validation time [s]', 'test time [s]'] if", "return df def _compute_competition_leaderboard(session, submissions, leaderboard_type, event_name): \"\"\"Format the competition", "'failed', event_name, user_name ) event_team.new_leaderboard_html = get_leaderboard( session, 'new', event_name,", "_compute_leaderboard(session, submissions, 'private', event_name, with_links=False) time_list = (['train time [s]',", "on the database. submissions : list of :class:`ramp_database.model.Submission` The submission", "= pd.concat([df_scores_bag, df_scores_mean, df_scores_std], axis=1, keys=['bag', 'mean', 'std']) df.columns =", "'-' for m in leaderboard_df['move']] col_selected = ( [leaderboard_type +", "built. event_name : str The name of the event. with_links", "rank leaderboard_df = leaderboard_df.sort_values( by=['private ' + score_name, 'submitted at", "np.arange(len(leaderboard_df)) + 1 leaderboard_df['move'] = \\ leaderboard_df['public rank'] - leaderboard_df['private", "get_leaderboard( session, 'public', event_name, with_links=False ) event.failed_leaderboard_html = get_leaderboard( session,", "' ' + score_name] + time_list + ['submitted at (UTC)']", "= leaderboard_df.fillna(False) leaderboard_df = leaderboard_df[leaderboard_df['best']] leaderboard_df = leaderboard_df.drop(columns='best') # dealing", "the new submissions. You can turn this option to True", "' rank', 'team', 'submission', leaderboard_type + ' ' + score_name]", "column: value for column, value in zip( columns, [sub.event_team.team.name, sub.name_with_link,", "'submission', 'submitted at (UTC)', 'error'] # we rely on the", "['submitted at (UTC)'] ) if leaderboard_type == 'private': col_selected.insert(1, 'move')", "best indicator column then select best leaderboard_df = pd.merge( leaderboard_df,", "dealing with ties: we need the lowest timestamp best_df =", "= df.columns.map(lambda x: \" \".join(x)) # add the aggregated time", "leaderboard_df = leaderboard_df[leaderboard_df['best']] leaderboard_df = leaderboard_df.drop(columns='best') # sort by public", "df_scores = df_scores.round(map_score_precision) df_time = get_time(session, sub.id) df_time = df_time.stack().to_frame()", "session, 'public', event_name, user_name ) event_team.failed_leaderboard_html = get_leaderboard( session, 'failed',", "= _compute_leaderboard( session, submissions, leaderboard_type, event_name, with_links=with_links ) elif leaderboard_type", "zip(score_list, score_order) }) return df def _compute_competition_leaderboard(session, submissions, leaderboard_type, event_name):", "df_time = df_time.rename( columns={'train': 'train time [s]', 'valid': 'validation time", "max n bag df_scores_bag = get_bagged_scores(session, sub.id) highest_level = df_scores_bag.index.get_level_values('n_bag').max()", "df_html.split('<thead>')[1].split('</tbody>')[0] ) return df_html def update_leaderboards(session, event_name, new_only=False): \"\"\"Update the", "# keep only second precision for the time stamp df['submitted", "session.query(Event).filter_by(name=event_name).one() score_type = event.get_official_score_type(session) score_name = event.official_score_name private_leaderboard = _compute_leaderboard(session,", "column stats_order = (['bag', 'mean', 'std'] if leaderboard_type == 'private'", "column then select best leaderboard_df = pd.merge( leaderboard_df, best_df, how='left',", "steps and rename them to # public and private map_renaming", "q = (session.query(Submission) .filter(Event.id == EventTeam.event_id) .filter(Team.id == EventTeam.team_id) .filter(EventTeam.id", "leaderboard_type == 'private' else ['train time [s]', 'validation time [s]'])", "'new': columns = ['team', 'submission', 'submitted at (UTC)', 'state'] else:", "score_name: 'public ' + score_name} ) # select best submission", "df = _compute_competition_leaderboard( session, submissions, competition_type, event_name ) df_html =", "score_order = ([event.official_score_name] + [score_type.name for score_type in event.score_types if", "Whether or not to update the whole leaderboards or only", "The competition leaderboard in a dataframe format. \"\"\" event =", "== user_name) submissions = q.all() submission_filter = {'public': 'is_public_leaderboard', 'private':", "or not the submission name should be clickable. Returns -------", "\"\"\" event = session.query(Event).filter_by(name=event_name).one() score_type = event.get_official_score_type(session) score_name = event.official_score_name", "'submitted at (UTC)', 'error'] # we rely on the zip", "['max RAM [MB]', 'submitted at (UTC)'] ) if leaderboard_type ==", "None The user name. If None, scores from all users", "dataframe The competition leaderboard in a dataframe format. \"\"\" event", "The type of leaderboard to generate. event_name : str The", "the operation on the database. event_name : str The event", "and sub.is_not_sandbox)] if not submissions: return None if leaderboard_type in", "event. with_links : bool Whether or not the submission name", "elif leaderboard_type in ['new', 'failed']: if leaderboard_type == 'new': columns", "else ['bag']) dataset_order = (['public', 'private'] if leaderboard_type == 'private'", "if leaderboard_type == 'private': col_selected.insert(1, 'move') df = leaderboard_df[col_selected] df", "clickable. Returns ------- leaderboard : dataframe The leaderboard in a", "event_name : str The name of the event. with_links :", "== 'private' else ['public']) score_order = ([event.official_score_name] + [score_type.name for", "\"\"\" event = session.query(Event).filter_by(name=event_name).one() event_teams = session.query(EventTeam).filter_by(event=event).all() for event_team in", "RAM [MB]', 'submitted at (UTC)'] ) if leaderboard_type == \"private\":", "event_name, new_only=False): \"\"\"Update the leaderboards for a given event. Parameters", "session, 'new', event_name ) session.commit() def update_user_leaderboards(session, event_name, user_name, new_only=False):", "the zip function ignore the submission state if the error", "score_type = event.get_official_score_type(session) score_name = event.official_score_name private_leaderboard = _compute_leaderboard(session, submissions,", "dataframe format. \"\"\" record_score = [] event = session.query(Event).filter_by(name=event_name).one() map_score_precision", "df_scores_bag = df_scores_bag.loc[(slice(None), highest_level), :] df_scores_bag.index = df_scores_bag.index.droplevel('n_bag') df_scores_bag =", "EventTeam.team_id) .filter(EventTeam.id == Submission.event_team_id) .filter(Event.name == event_name)) if user_name is", "submission in the database. \"\"\" event_team = get_event_team_by_name(session, event_name, user_name)", "'public', event_name, user_name ) event_team.failed_leaderboard_html = get_leaderboard( session, 'failed', event_name,", "'validation time [s]']) col_selected_private = (['team', 'submission'] + ['bag private", "'private' else ['bag']) dataset_order = (['public', 'private'] if leaderboard_type ==", "stat in product(dataset_order, score_order, stats_order) ] # Only display train", "pd.set_option('display.max_colwidth', width) def _compute_leaderboard(session, submissions, leaderboard_type, event_name, with_links=True): \"\"\"Format the", "= [ '{:+d}'.format(m) if m != 0 else '-' for", "'') df['team'] = sub.team.name df['submission'] = sub.name_with_link if with_links else", "in ['new', 'failed']: if leaderboard_type == 'new': columns = ['team',", "from .submission import get_bagged_scores from .submission import get_scores from .submission", "= (['bag', 'mean', 'std'] if leaderboard_type == 'private' else ['bag'])", "get a best indicator column then select best leaderboard_df =", "df['submission'] = sub.name_with_link if with_links else sub.name df['contributivity'] = int(round(100", "== Submission.event_team_id) .filter(Event.name == event_name)) if user_name is not None:", "name. new_only : bool, default is False Whether or not", "(UTC)'].astype('datetime64[s]') # reordered the column stats_order = (['bag', 'mean', 'std']", "leaderboard in a dataframe format. \"\"\" record_score = [] event", "= df_time.sum(axis=0, level=\"step\").T df_scores_mean = df_scores.groupby('step').mean() df_scores_std = df_scores.groupby('step').std() #", "leaderboard_type == 'private' else ['bag']) dataset_order = (['public', 'private'] if", "If None, scores from all users will be queried. new_only", "if user_name is not None: q = q.filter(Team.name == user_name)", "scores from all users will be queried. new_only : bool,", "df = pd.concat([df, df_time], axis=1) if leaderboard_type == 'private': df['submission", "time_list = (['train time [s]', 'validation time [s]', 'test time", "submissions, competition_type, event_name ) df_html = df.to_html(escape=False, index=False, max_cols=None, max_rows=None,", "df_scores_std = (df_scores_std.loc[list(map_renaming.keys())] .rename(index=map_renaming) .stack().to_frame().T) df_scores_bag = (df_scores_bag.rename(index=map_renaming) .stack().to_frame().T) df", "{'public': 'is_public_leaderboard', 'private': 'is_private_leaderboard', 'failed': 'is_error', 'new': 'is_new', 'public competition':", "leaderboard_df.fillna(False) leaderboard_df = leaderboard_df[leaderboard_df['best']] leaderboard_df = leaderboard_df.drop(columns='best') # sort by", "the multi-index into a stacked index df.columns = df.columns.map(lambda x:", "name should be clickable. Returns ------- leaderboard : dataframe The", "RAM [MB]'] = get_submission_max_ram(session, sub.id) df['submitted at (UTC)'] = pd.Timestamp(sub.submission_timestamp)", "this option to True when adding a new submission in", "+ 1 leaderboard_df['move'] = \\ leaderboard_df['public rank'] - leaderboard_df['private rank']", "+ score_name, 'bag public ' + score_name] + time_list +", "if LooseVersion(pd.__version__) < LooseVersion(\"1.0.0\") else None pd.set_option('display.max_colwidth', width) def _compute_leaderboard(session,", "key, value in zip(score_list, score_order) }) return df def _compute_competition_leaderboard(session,", "LooseVersion(pd.__version__) < LooseVersion(\"1.0.0\") else None pd.set_option('display.max_colwidth', width) def _compute_leaderboard(session, submissions,", ") event_team.failed_leaderboard_html = get_leaderboard( session, 'failed', event_name, user_name ) event_team.new_leaderboard_html", ": dataframe The leaderboard in a dataframe format. \"\"\" record_score", "= (['train time [s]', 'validation time [s]', 'test time [s]']", "[] event = session.query(Event).filter_by(name=event_name).one() map_score_precision = {score_type.name: score_type.precision for score_type", "- leaderboard_df['private rank'] leaderboard_df['move'] = [ '{:+d}'.format(m) if m !=", ".rename(index=map_renaming) .stack().to_frame().T) df_scores_std = (df_scores_std.loc[list(map_renaming.keys())] .rename(index=map_renaming) .stack().to_frame().T) df_scores_bag = (df_scores_bag.rename(index=map_renaming)", ": str The event name. user_name : None or str,", "axis=1, keys=['bag', 'mean', 'std']) df.columns = df.columns.set_names(['stat', 'set', 'score']) #", "leaderboard_df['private rank'] = np.arange(len(leaderboard_df)) + 1 leaderboard_df['move'] = \\ leaderboard_df['public", "df_time.index = df_time.index.set_names(['fold', 'step']) df_time = df_time.rename(columns={0: 'time'}) df_time =", "}) return df def _compute_competition_leaderboard(session, submissions, leaderboard_type, event_name): \"\"\"Format the", "= np.arange(len(leaderboard_df)) + 1 # sort by private score then", "session.commit() def update_all_user_leaderboards(session, event_name, new_only=False): \"\"\"Update the leaderboards for all", "= (['public', 'private'] if leaderboard_type == 'private' else ['public']) score_order", "leaderboard : dataframe The leaderboard in a dataframe format. \"\"\"", "' + score_name] ) leaderboard_df = leaderboard_df.fillna(False) leaderboard_df = leaderboard_df[leaderboard_df['best']]", "to get a best indicator column then select best leaderboard_df", "product import numpy as np import pandas as pd from", "else 'private') df = _compute_competition_leaderboard( session, submissions, competition_type, event_name )", "for the public leaderboard time_list = (['train time [s]', 'validation", ": :class:`sqlalchemy.orm.Session` The session to directly perform the operation on", "submissions, 'private', event_name, with_links=False) time_list = (['train time [s]', 'validation", "= best_df[['submitted at (UTC)']].reset_index() best_df['best'] = True leaderboard_df = pd.merge(", "session.query(Event).filter_by(name=event_name).one() if not new_only: event.private_leaderboard_html = get_leaderboard( session, 'private', event_name", "dataset, score) for dataset, score, stat in product(dataset_order, score_order, stats_order)", "df_scores_bag = df_scores_bag.round(map_score_precision) df_scores = get_scores(session, sub.id) df_scores = df_scores.round(map_score_precision)", "== 'private': df['submission ID'] = sub.basename.replace('submission_', '') df['team'] = sub.team.name", "True]) leaderboard_df['public rank'] = np.arange(len(leaderboard_df)) + 1 # sort by", "[sub.event_team.team.name, sub.name_with_link, pd.Timestamp(sub.submission_timestamp), (sub.state_with_link if leaderboard_type == 'failed' else sub.state)])", "bool Whether or not the submission name should be clickable.", "time [s]', 'test time [s]'] if leaderboard_type == 'private' else", "= get_submission_max_ram(session, sub.id) df['submitted at (UTC)'] = pd.Timestamp(sub.submission_timestamp) record_score.append(df) #", "sub in submissions if sub.is_public_leaderboard] if not submissions: return None", "format. \"\"\" event = session.query(Event).filter_by(name=event_name).one() score_type = event.get_official_score_type(session) score_name =", "= [{ column: value for column, value in zip( columns,", "best_df[['public ' + score_name]].reset_index() best_df['best'] = True # merge to", "+ ' ' + score_name] + time_list + ['submitted at", "submissions, leaderboard_type, event_name): \"\"\"Format the competition leaderboard. Parameters ---------- session", "# add the aggregated time information df_time.index = df.index df_time", "leaderboard_df.rename( columns={'bag private ' + score_name: 'private ' + score_name,", "sub.name_with_link, pd.Timestamp(sub.submission_timestamp), (sub.state_with_link if leaderboard_type == 'failed' else sub.state)]) }", "' + score_name, 'bag public ' + score_name] + time_list", "'public', event_name ) event.public_leaderboard_html_no_links = get_leaderboard( session, 'public', event_name, with_links=False", "'failed': 'is_error', 'new': 'is_new', 'public competition': 'is_in_competition', 'private competition': 'is_in_competition'}", "(sub.state_with_link if leaderboard_type == 'failed' else sub.state)]) } for sub", "[s]', 'validation time [s]']) col_ordered = ( ['team', 'submission'] +", "the public leaderboard if leaderboard_type == 'public': df = df.rename(columns={", "\"bag {} {}\".format(leaderboard_type, event.official_score_name), ascending=event.get_official_score_type(session).is_lower_the_better ) # rename the column", "session, submissions, leaderboard_type, event_name, with_links=with_links ) elif leaderboard_type in ['new',", "at (UTC)']].reset_index() best_df['best'] = True leaderboard_df = pd.merge( leaderboard_df, best_df,", "'validation time [s]', 'test time [s]'] if leaderboard_type == 'private'", "event = session.query(Event).filter_by(name=event_name).one() score_type = event.get_official_score_type(session) score_name = event.official_score_name private_leaderboard", "a best indicator column then select best leaderboard_df = pd.merge(", "[sub for sub in submissions if sub.is_public_leaderboard] if not submissions:", "+ ['max RAM [MB]', 'submitted at (UTC)'] ) if leaderboard_type", "time [s]', 'validation time [s]', 'test time [s]'] if leaderboard_type", "== 'new': columns = ['team', 'submission', 'submitted at (UTC)', 'state']", "update the whole leaderboards or only the new submissions. You", "= get_leaderboard( session, 'failed', event_name ) event.public_competition_leaderboard_html = get_leaderboard( session,", "+ score_name} ) # select best submission for each team", "columns, [sub.event_team.team.name, sub.name_with_link, pd.Timestamp(sub.submission_timestamp), (sub.state_with_link if leaderboard_type == 'failed' else", "not to update the whole leaderboards or only the new", "session, 'private', event_name ) event.public_leaderboard_html_with_links = get_leaderboard( session, 'public', event_name", "+ ['submitted at (UTC)'] ) if leaderboard_type == 'private': col_selected.insert(1,", "the leaderboards for all users for a given event. Parameters", "type of leaderboard to built. event_name : str The name", "score_name]].reset_index() best_df['best'] = True # merge to get a best", "score, stat in product(dataset_order, score_order, stats_order) ] # Only display", "df_time.rename(columns={0: 'time'}) df_time = df_time.sum(axis=0, level=\"step\").T df_scores_mean = df_scores.groupby('step').mean() df_scores_std", "q.all() submission_filter = {'public': 'is_public_leaderboard', 'private': 'is_private_leaderboard', 'failed': 'is_error', 'new':", "leaderboard_type in ['new', 'failed']: if leaderboard_type == 'new': columns =", "def update_leaderboards(session, event_name, new_only=False): \"\"\"Update the leaderboards for a given", "the validation and testing steps and rename them to #", "# change the multi-index into a stacked index df.columns =", "time [s]', 'validation time [s]']) col_ordered = ( ['team', 'submission']", "adding a new submission in the database. \"\"\" event =", "'is_in_competition'} submissions = [sub for sub in submissions if (getattr(sub,", "if leaderboard_type == \"private\": col_ordered = [\"submission ID\"] + col_ordered", "else sub.state)]) } for sub in submissions] df = pd.DataFrame(data,", "------- leaderboard : dataframe The leaderboard in a dataframe format.", "[s]'] if leaderboard_type == 'private' else ['train time [s]', 'validation", "event_name, with_links=False ) event.failed_leaderboard_html = get_leaderboard( session, 'failed', event_name )", "= leaderboard_df[leaderboard_df['best']] leaderboard_df = leaderboard_df.drop(columns='best') # dealing with ties: we", "name. If None, scores from all users will be queried.", "event name. new_only : bool, default is False Whether or", "session.commit() def update_user_leaderboards(session, event_name, user_name, new_only=False): \"\"\"Update the of a", ": str The event name. new_only : bool, default is", "event.failed_leaderboard_html = get_leaderboard( session, 'failed', event_name ) event.public_competition_leaderboard_html = get_leaderboard(", "df = df.rename(columns={ leaderboard_type + ' ' + score_name: score_name,", "if leaderboard_type in ['public', 'private']: df = _compute_leaderboard( session, submissions,", "and testing steps and rename them to # public and", "columns are null contrib_columns = ['contributivity', 'historical contributivity'] if (df[contrib_columns]", "'private': df['submission ID'] = sub.basename.replace('submission_', '') df['team'] = sub.team.name df['submission']", "'private'] if leaderboard_type == 'private' else ['public']) score_order = ([event.official_score_name]", "dataframe The leaderboard in a dataframe format. \"\"\" record_score =", "map_score_precision = {score_type.name: score_type.precision for score_type in event.score_types} for sub", "event_name, user_name=None, with_links=True): \"\"\"Get a leaderboard. Parameters ---------- session :", "= (session.query(Submission) .filter(Event.id == EventTeam.event_id) .filter(Team.id == EventTeam.team_id) .filter(EventTeam.id ==", "if leaderboard_type == 'private' else ['train time [s]', 'validation time", "score then by submission timestamp, compute rank leaderboard_df = leaderboard_df.sort_values(", "else leaderboard_df.groupby('team').max()) best_df = best_df[['public ' + score_name]].reset_index() best_df['best'] =", "submissions: # take only max n bag df_scores_bag = get_bagged_scores(session,", "is False Whether or not to update the whole leaderboards", "the database. submissions : list of :class:`ramp_database.model.Submission` The submission to", "zip function ignore the submission state if the error #", "from distutils.version import LooseVersion from itertools import product import numpy", "leaderboards for a given event. Parameters ---------- session : :class:`sqlalchemy.orm.Session`", "'<thead> {} </tbody>'.format( df_html.split('<thead>')[1].split('</tbody>')[0] ) return df_html def update_leaderboards(session, event_name,", "the public leaderboard time_list = (['train time [s]', 'validation time", "df_scores_bag = (df_scores_bag.rename(index=map_renaming) .stack().to_frame().T) df = pd.concat([df_scores_bag, df_scores_mean, df_scores_std], axis=1,", "user_name is not None: q = q.filter(Team.name == user_name) submissions", "= pd.concat(record_score, axis=0, ignore_index=True, sort=False) # keep only second precision", "for the time stamp df['submitted at (UTC)'] = df['submitted at", "given event. Parameters ---------- session : :class:`sqlalchemy.orm.Session` The session to", "extra filtering submissions = [sub for sub in submissions if", "submission timestamp, compute rank leaderboard_df = leaderboard_df.sort_values( by=['private ' +", "not new_only: event.private_leaderboard_html = get_leaderboard( session, 'private', event_name ) event.public_leaderboard_html_with_links", "Returns ------- leaderboard : dataframe The leaderboard in a dataframe", "event_team.failed_leaderboard_html = get_leaderboard( session, 'failed', event_name, user_name ) event_team.new_leaderboard_html =", "pd.merge( leaderboard_df, best_df, how='left', left_on=['team', 'public ' + score_name], right_on=['team',", "name should be clickable. Returns ------- leaderboard : str The", "into a stacked index df.columns = df.columns.map(lambda x: \" \".join(x))", "None pd.set_option('display.max_colwidth', width) def _compute_leaderboard(session, submissions, leaderboard_type, event_name, with_links=True): \"\"\"Format", "competition': 'is_in_competition', 'private competition': 'is_in_competition'} submissions = [sub for sub", "leaderboard_type == \"private\": col_ordered = [\"submission ID\"] + col_ordered df", "session, 'public', event_name, with_links=False ) event.failed_leaderboard_html = get_leaderboard( session, 'failed',", "public leaderboard time_list = (['train time [s]', 'validation time [s]',", "'private', event_name, with_links=False) time_list = (['train time [s]', 'validation time", "== 'private' else ['train time [s]', 'validation time [s]']) col_ordered", "(UTC)'] ) if leaderboard_type == \"private\": col_ordered = [\"submission ID\"]", ") if leaderboard_type == \"private\": col_ordered = [\"submission ID\"] +", "'time'}) df_time = df_time.sum(axis=0, level=\"step\").T df_scores_mean = df_scores.groupby('step').mean() df_scores_std =", "submission_filter = {'public': 'is_public_leaderboard', 'private': 'is_private_leaderboard', 'failed': 'is_error', 'new': 'is_new',", "session, 'failed', event_name, user_name ) event_team.new_leaderboard_html = get_leaderboard( session, 'new',", "new_only=False): \"\"\"Update the of a user leaderboards for a given", "leaderboard_df = private_leaderboard[col_selected_private] leaderboard_df = leaderboard_df.rename( columns={'bag private ' +", "best_df[['submitted at (UTC)']].reset_index() best_df['best'] = True leaderboard_df = pd.merge( leaderboard_df,", "from itertools import product import numpy as np import pandas", "sub.contributivity)) df['historical contributivity'] = int(round( 100 * sub.historical_contributivity)) df['max RAM", "df.columns.set_names(['stat', 'set', 'score']) # change the multi-index into a stacked", "import Team from .team import get_event_team_by_name from .submission import get_bagged_scores", "not new_only: event_team.leaderboard_html = get_leaderboard( session, 'public', event_name, user_name )", "'public ' + score_name} ) # select best submission for", "' + score_name]].reset_index() best_df['best'] = True # merge to get", "event = session.query(Event).filter_by(name=event_name).one() if not new_only: event.private_leaderboard_html = get_leaderboard( session,", "get_leaderboard( session, 'failed', event_name, user_name ) event_team.new_leaderboard_html = get_leaderboard( session,", "'public ' + score_name] ) leaderboard_df = leaderboard_df.fillna(False) leaderboard_df =", "None: q = q.filter(Team.name == user_name) submissions = q.all() submission_filter", "width = -1 if LooseVersion(pd.__version__) < LooseVersion(\"1.0.0\") else None pd.set_option('display.max_colwidth',", ": str The user name. If None, scores from all", "leaderboard_df.sort_values( by=['private ' + score_name, 'submitted at (UTC)'], ascending=[score_type.is_lower_the_better, True])", "private map_renaming = {'valid': 'public', 'test': 'private'} df_scores_mean = (df_scores_mean.loc[list(map_renaming.keys())]", "import get_scores from .submission import get_submission_max_ram from .submission import get_time", "private_leaderboard = _compute_leaderboard(session, submissions, 'private', event_name, with_links=False) time_list = (['train", "df.columns.map(lambda x: \" \".join(x)) # add the aggregated time information", "we rely on the zip function ignore the submission state", "for sub in submissions: # take only max n bag", "session to directly perform the operation on the database. event_name", "submission state if the error # column was not appended", "df_time.stack().to_frame() df_time.index = df_time.index.set_names(['fold', 'step']) df_time = df_time.rename(columns={0: 'time'}) df_time", "columns = ['team', 'submission', 'submitted at (UTC)', 'error'] # we", "best_df['best'] = True leaderboard_df = pd.merge( leaderboard_df, best_df, how='left', left_on=['team',", "in the database. \"\"\" event = session.query(Event).filter_by(name=event_name).one() event_teams = session.query(EventTeam).filter_by(event=event).all()", "'validation time [s]', 'test': 'test time [s]'} ) df =", "= df_scores_bag.index.droplevel('n_bag') df_scores_bag = df_scores_bag.round(map_score_precision) df_scores = get_scores(session, sub.id) df_scores", "only max n bag df_scores_bag = get_bagged_scores(session, sub.id) highest_level =", "= session.query(EventTeam).filter_by(event=event).all() for event_team in event_teams: user_name = event_team.team.name if", "'submitted at (UTC)'] ) if leaderboard_type == \"private\": col_ordered =", "column was not appended data = [{ column: value for", "The event name. user_name : str The user name. If", "df_scores.round(map_score_precision) df_time = get_time(session, sub.id) df_time = df_time.stack().to_frame() df_time.index =", "= pd.Timestamp(sub.submission_timestamp) record_score.append(df) # stack all the records df =", "------- leaderboard : str The leaderboard in HTML format. \"\"\"", "public ' + score_name] + time_list + ['submitted at (UTC)'])", "leaderboard_df, best_df, how='left', left_on=['team', 'public ' + score_name], right_on=['team', 'public", "'private', 'failed', 'new', \\ 'public competition', 'private competition'} The type", "user name. If None, scores from all users will be", "new submission in the database. \"\"\" event = session.query(Event).filter_by(name=event_name).one() event_teams", "= leaderboard_df.sort_values( by=['private ' + score_name, 'submitted at (UTC)'], ascending=[score_type.is_lower_the_better,", "event_name, with_links=True): \"\"\"Format the leaderboard. Parameters ---------- session : :class:`sqlalchemy.orm.Session`", "best_df = best_df[['public ' + score_name]].reset_index() best_df['best'] = True #", "EventTeam.event_id) .filter(Team.id == EventTeam.team_id) .filter(EventTeam.id == Submission.event_team_id) .filter(Event.name == event_name))", "False Whether or not to update the whole leaderboards or", "= {'public': 'is_public_leaderboard', 'private': 'is_private_leaderboard', 'failed': 'is_error', 'new': 'is_new', 'public", "default is None The user name. If None, scores from", "get_submission_max_ram from .submission import get_time width = -1 if LooseVersion(pd.__version__)", "'std'] if leaderboard_type == 'private' else ['bag']) dataset_order = (['public',", "None, scores from all users will be queried. This parameter", "event.private_competition_leaderboard_html = get_leaderboard( session, 'private competition', event_name ) event.new_leaderboard_html =", "'new', event_name ) session.commit() def update_user_leaderboards(session, event_name, user_name, new_only=False): \"\"\"Update", "'submitted at (UTC)'], ascending=[score_type.is_lower_the_better, True]) leaderboard_df['public rank'] = np.arange(len(leaderboard_df)) +", "= ( ['team', 'submission'] + score_list + ['contributivity', 'historical contributivity']", "= [sub for sub in submissions if (getattr(sub, submission_filter[leaderboard_type]) and", "(UTC)']) leaderboard_df = private_leaderboard[col_selected_private] leaderboard_df = leaderboard_df.rename( columns={'bag private '", "in ['public', 'private']: df = _compute_leaderboard( session, submissions, leaderboard_type, event_name,", "value for key, value in zip(score_list, score_order) }) return df", "df def _compute_competition_leaderboard(session, submissions, leaderboard_type, event_name): \"\"\"Format the competition leaderboard.", ".stack().to_frame().T) df_scores_bag = (df_scores_bag.rename(index=map_renaming) .stack().to_frame().T) df = pd.concat([df_scores_bag, df_scores_mean, df_scores_std],", "['bag']) dataset_order = (['public', 'private'] if leaderboard_type == 'private' else", "be clickable. Returns ------- leaderboard : str The leaderboard in", "the competition leaderboard. with_links : bool, default is True Whether", "submissions, leaderboard_type, event_name, with_links=with_links ) elif leaderboard_type in ['new', 'failed']:", "time [s]']) col_selected_private = (['team', 'submission'] + ['bag private '", "built. event_name : str The name of the event. Returns", "get_leaderboard( session, 'new', event_name, user_name ) session.commit() def update_all_user_leaderboards(session, event_name,", "submissions = [sub for sub in submissions if (getattr(sub, submission_filter[leaderboard_type])", "if leaderboard_type == 'failed' else sub.state)]) } for sub in", "by submission timestamp, compute rank leaderboard_df = leaderboard_df.sort_values( by=['public '", "all users for a given event. Parameters ---------- session :", "score_type in event.score_types} for sub in submissions: # take only", ") event.new_leaderboard_html = get_leaderboard( session, 'new', event_name ) session.commit() def", "best_df, how='left', left_on=['team', 'public ' + score_name], right_on=['team', 'public '", "time stamp df['submitted at (UTC)'] = df['submitted at (UTC)'].astype('datetime64[s]') #", "else: columns = ['team', 'submission', 'submitted at (UTC)', 'error'] #", "= get_leaderboard( session, 'public', event_name, user_name ) event_team.failed_leaderboard_html = get_leaderboard(", "# select only the validation and testing steps and rename", "in zip( columns, [sub.event_team.team.name, sub.name_with_link, pd.Timestamp(sub.submission_timestamp), (sub.state_with_link if leaderboard_type ==", "[MB]', 'submitted at (UTC)'] ) if leaderboard_type == \"private\": col_ordered", "= pd.merge( leaderboard_df, best_df, how='left', left_on=['team', 'submitted at (UTC)'], right_on=['team',", ": str The name of the event. Returns ------- competition_leaderboard", "Whether or not the submission name should be clickable. Returns", "= df_scores.round(map_score_precision) df_time = get_time(session, sub.id) df_time = df_time.stack().to_frame() df_time.index", "100 * sub.historical_contributivity)) df['max RAM [MB]'] = get_submission_max_ram(session, sub.id) df['submitted", "if not submissions: return None competition_type = ('public' if 'public'", "df_scores_bag = get_bagged_scores(session, sub.id) highest_level = df_scores_bag.index.get_level_values('n_bag').max() df_scores_bag = df_scores_bag.loc[(slice(None),", "stamp df['submitted at (UTC)'] = df['submitted at (UTC)'].astype('datetime64[s]') # reordered", "\"\"\" event_team = get_event_team_by_name(session, event_name, user_name) if not new_only: event_team.leaderboard_html", "df_scores_mean = (df_scores_mean.loc[list(map_renaming.keys())] .rename(index=map_renaming) .stack().to_frame().T) df_scores_std = (df_scores_std.loc[list(map_renaming.keys())] .rename(index=map_renaming) .stack().to_frame().T)", "time for the public leaderboard time_list = (['train time [s]',", "score_name: 'private ' + score_name, 'bag public ' + score_name:", "{} {}\".format(leaderboard_type, event.official_score_name), ascending=event.get_official_score_type(session).is_lower_the_better ) # rename the column name", "_compute_competition_leaderboard(session, submissions, leaderboard_type, event_name): \"\"\"Format the competition leaderboard. Parameters ----------", "bool, default is True Whether or not the submission name", "operation on the database. submissions : list of :class:`ramp_database.model.Submission` The", "is discarded when requesting the competition leaderboard. with_links : bool,", "' + score_name, 'bag public ' + score_name: 'public '", "= get_event_team_by_name(session, event_name, user_name) if not new_only: event_team.leaderboard_html = get_leaderboard(", "max_cols=None, max_rows=None, justify='left') df_html = '<thead> {} </tbody>'.format( df_html.split('<thead>')[1].split('</tbody>')[0] )", "reordered the column stats_order = (['bag', 'mean', 'std'] if leaderboard_type", "select best submission for each team best_df = (leaderboard_df.groupby('team').min() if", "was not appended data = [{ column: value for column,", "pd from ..model.event import Event from ..model.event import EventTeam from", "# reordered the column stats_order = (['bag', 'mean', 'std'] if", "df['submitted at (UTC)'] = df['submitted at (UTC)'].astype('datetime64[s]') # reordered the", "[s]', 'validation time [s]', 'test time [s]'] if leaderboard_type ==", "submission in the database. \"\"\" event = session.query(Event).filter_by(name=event_name).one() if not", "_compute_leaderboard( session, submissions, leaderboard_type, event_name, with_links=with_links ) elif leaderboard_type in", "with_links=True): \"\"\"Format the leaderboard. Parameters ---------- session : :class:`sqlalchemy.orm.Session` The", "df_time.index.set_names(['fold', 'step']) df_time = df_time.rename(columns={0: 'time'}) df_time = df_time.sum(axis=0, level=\"step\").T", "from ..model.team import Team from .team import get_event_team_by_name from .submission", "' rank': 'rank' }) df = df.sort_values(by='rank') return df def", "{}'.format(stat, dataset, score) for dataset, score, stat in product(dataset_order, score_order,", "from all users will be queried. new_only : bool, default", "contributivity'] if (df[contrib_columns] == 0).all(axis=0).all(): df = df.drop(columns=contrib_columns) df =", "not appended data = [{ column: value for column, value", "df['submission ID'] = sub.basename.replace('submission_', '') df['team'] = sub.team.name df['submission'] =", "are null contrib_columns = ['contributivity', 'historical contributivity'] if (df[contrib_columns] ==", ") # rename the column name for the public leaderboard", "timestamp, compute rank leaderboard_df = leaderboard_df.sort_values( by=['private ' + score_name,", "---------- session : :class:`sqlalchemy.orm.Session` The session to directly perform the", "\"\"\" event = session.query(Event).filter_by(name=event_name).one() if not new_only: event.private_leaderboard_html = get_leaderboard(", "= (df_scores_std.loc[list(map_renaming.keys())] .rename(index=map_renaming) .stack().to_frame().T) df_scores_bag = (df_scores_bag.rename(index=map_renaming) .stack().to_frame().T) df =", "{score_type.name: score_type.precision for score_type in event.score_types} for sub in submissions:", "= leaderboard_df[leaderboard_df['best']] leaderboard_df = leaderboard_df.drop(columns='best') # sort by public score", "then by submission timestamp, compute rank leaderboard_df = leaderboard_df.sort_values( by=['private", "get_time width = -1 if LooseVersion(pd.__version__) < LooseVersion(\"1.0.0\") else None", "df_time.sum(axis=0, level=\"step\").T df_scores_mean = df_scores.groupby('step').mean() df_scores_std = df_scores.groupby('step').std() # select", "# we rely on the zip function ignore the submission", "Submission.event_team_id) .filter(Event.name == event_name)) if user_name is not None: q", "leaderboard_df.groupby('team').min() best_df = best_df[['submitted at (UTC)']].reset_index() best_df['best'] = True leaderboard_df", "name for the public leaderboard if leaderboard_type == 'public': df", "Returns ------- leaderboard : str The leaderboard in HTML format.", "rename them to # public and private map_renaming = {'valid':", "on the database. leaderboard_type : {'public', 'private', 'failed', 'new', \\", "df_scores_bag.loc[(slice(None), highest_level), :] df_scores_bag.index = df_scores_bag.index.droplevel('n_bag') df_scores_bag = df_scores_bag.round(map_score_precision) df_scores", "(UTC)'] ) if leaderboard_type == 'private': col_selected.insert(1, 'move') df =", "'test time [s]'} ) df = pd.concat([df, df_time], axis=1) if", "\"\"\"Format the competition leaderboard. Parameters ---------- session : :class:`sqlalchemy.orm.Session` The", "database. event_name : str The event name. new_only : bool,", "' + score_name: 'private ' + score_name, 'bag public '", "not the submission name should be clickable. Returns ------- leaderboard", "'submission', 'submitted at (UTC)', 'state'] else: columns = ['team', 'submission',", "(df[contrib_columns] == 0).all(axis=0).all(): df = df.drop(columns=contrib_columns) df = df.sort_values( \"bag", "= ['team', 'submission', 'submitted at (UTC)', 'state'] else: columns =", "get_scores from .submission import get_submission_max_ram from .submission import get_time width", "axis=1) if leaderboard_type == 'private': df['submission ID'] = sub.basename.replace('submission_', '')", "# select best submission for each team best_df = (leaderboard_df.groupby('team').min()", "= sub.name_with_link if with_links else sub.name df['contributivity'] = int(round(100 *", "from .team import get_event_team_by_name from .submission import get_bagged_scores from .submission", "in a dataframe format. \"\"\" event = session.query(Event).filter_by(name=event_name).one() score_type =", "Event from ..model.event import EventTeam from ..model.submission import Submission from", "with_links else sub.name df['contributivity'] = int(round(100 * sub.contributivity)) df['historical contributivity']", "value for column, value in zip( columns, [sub.event_team.team.name, sub.name_with_link, pd.Timestamp(sub.submission_timestamp),", "import numpy as np import pandas as pd from ..model.event", "aggregated time information df_time.index = df.index df_time = df_time.rename( columns={'train':", "pd.Timestamp(sub.submission_timestamp), (sub.state_with_link if leaderboard_type == 'failed' else sub.state)]) } for", "df_scores_std], axis=1, keys=['bag', 'mean', 'std']) df.columns = df.columns.set_names(['stat', 'set', 'score'])", "event.official_score_name private_leaderboard = _compute_leaderboard(session, submissions, 'private', event_name, with_links=False) time_list =", "= session.query(Event).filter_by(name=event_name).one() if not new_only: event.private_leaderboard_html = get_leaderboard( session, 'private',", "leaderboard time_list = (['train time [s]', 'validation time [s]', 'test", "of the event. with_links : bool Whether or not the", "the leaderboard. Parameters ---------- session : :class:`sqlalchemy.orm.Session` The session to", "to directly perform the operation on the database. submissions :", ") df_html = df.to_html(escape=False, index=False, max_cols=None, max_rows=None, justify='left') df_html =", ") event.public_competition_leaderboard_html = get_leaderboard( session, 'public competition', event_name ) event.private_competition_leaderboard_html", "event_team.new_leaderboard_html = get_leaderboard( session, 'new', event_name, user_name ) session.commit() def", "at (UTC)'] ) if leaderboard_type == \"private\": col_ordered = [\"submission", "'is_new', 'public competition': 'is_in_competition', 'private competition': 'is_in_competition'} submissions = [sub", "+ ' rank', 'team', 'submission', leaderboard_type + ' ' +", "# public and private map_renaming = {'valid': 'public', 'test': 'private'}", "= True leaderboard_df = pd.merge( leaderboard_df, best_df, how='left', left_on=['team', 'submitted", "be queried. new_only : bool, default is False Whether or", "user_name : str The user name. If None, scores from", "'public competition': 'is_in_competition', 'private competition': 'is_in_competition'} submissions = [sub for", "= event.get_official_score_type(session) score_name = event.official_score_name private_leaderboard = _compute_leaderboard(session, submissions, 'private',", "def get_leaderboard(session, leaderboard_type, event_name, user_name=None, with_links=True): \"\"\"Get a leaderboard. Parameters", "best_df, how='left', left_on=['team', 'submitted at (UTC)'], right_on=['team', 'submitted at (UTC)'])", "submissions, leaderboard_type, event_name, with_links=True): \"\"\"Format the leaderboard. Parameters ---------- session", "contributivity'] + time_list + ['max RAM [MB]', 'submitted at (UTC)']", "# dealing with ties: we need the lowest timestamp best_df", "leaderboard. with_links : bool, default is True Whether or not", "if score_type.name != event.official_score_name]) score_list = [ '{} {} {}'.format(stat,", "(UTC)'] = pd.Timestamp(sub.submission_timestamp) record_score.append(df) # stack all the records df", "leaderboard to built. event_name : str The name of the", "['contributivity', 'historical contributivity'] + time_list + ['max RAM [MB]', 'submitted", "in event.score_types if score_type.name != event.official_score_name]) score_list = [ '{}", "submission timestamp, compute rank leaderboard_df = leaderboard_df.sort_values( by=['public ' +", "default is False Whether or not to update the whole", "value in zip(score_list, score_order) }) return df def _compute_competition_leaderboard(session, submissions,", "' ' + score_name: score_name, leaderboard_type + ' rank': 'rank'", "from .submission import get_scores from .submission import get_submission_max_ram from .submission", "df_time = df_time.rename(columns={0: 'time'}) df_time = df_time.sum(axis=0, level=\"step\").T df_scores_mean =", "as pd from ..model.event import Event from ..model.event import EventTeam", "for sub in submissions] df = pd.DataFrame(data, columns=columns) else: #", "The submission to report in the leaderboard. leaderboard_type : {'public',", "\"\"\"Update the of a user leaderboards for a given event.", "of leaderboard to generate. event_name : str The event name.", "rank', 'team', 'submission', leaderboard_type + ' ' + score_name] +", "'private ' + score_name, 'bag public ' + score_name: 'public", ") df = pd.concat([df, df_time], axis=1) if leaderboard_type == 'private':", "'test': 'private'} df_scores_mean = (df_scores_mean.loc[list(map_renaming.keys())] .rename(index=map_renaming) .stack().to_frame().T) df_scores_std = (df_scores_std.loc[list(map_renaming.keys())]", "'{} {} {}'.format(stat, dataset, score) for dataset, score, stat in", "= ['contributivity', 'historical contributivity'] if (df[contrib_columns] == 0).all(axis=0).all(): df =", "event.score_types} for sub in submissions: # take only max n", "..model.submission import Submission from ..model.team import Team from .team import", "time [s]'] if leaderboard_type == 'private' else ['train time [s]',", "} for sub in submissions] df = pd.DataFrame(data, columns=columns) else:", "'mean', 'std']) df.columns = df.columns.set_names(['stat', 'set', 'score']) # change the", "name. user_name : None or str, default is None The", "event_name ) event.public_competition_leaderboard_html = get_leaderboard( session, 'public competition', event_name )", "columns={'bag private ' + score_name: 'private ' + score_name, 'bag", "# Only display train and validation time for the public", "= -1 if LooseVersion(pd.__version__) < LooseVersion(\"1.0.0\") else None pd.set_option('display.max_colwidth', width)", "df_time.rename( columns={'train': 'train time [s]', 'valid': 'validation time [s]', 'test':", "event.get_official_score_type(session) score_name = event.official_score_name private_leaderboard = _compute_leaderboard(session, submissions, 'private', event_name,", "competition_leaderboard : dataframe The competition leaderboard in a dataframe format.", "= \\ leaderboard_df['public rank'] - leaderboard_df['private rank'] leaderboard_df['move'] = [", "import get_submission_max_ram from .submission import get_time width = -1 if", "df_scores.groupby('step').mean() df_scores_std = df_scores.groupby('step').std() # select only the validation and", "# take only max n bag df_scores_bag = get_bagged_scores(session, sub.id)", "key: value for key, value in zip(score_list, score_order) }) return", "score_name], right_on=['team', 'public ' + score_name] ) leaderboard_df = leaderboard_df.fillna(False)", "df['submitted at (UTC)'] = pd.Timestamp(sub.submission_timestamp) record_score.append(df) # stack all the", "the competition leaderboard. Parameters ---------- session : :class:`sqlalchemy.orm.Session` The session", "the leaderboard. leaderboard_type : {'public', 'private'} The type of leaderboard", "[s]', 'valid': 'validation time [s]', 'test': 'test time [s]'} )", "is None The user name. If None, scores from all", "not submissions: return None if leaderboard_type in ['public', 'private']: df", "in submissions if sub.is_public_leaderboard] if not submissions: return None competition_type", "'test time [s]'] if leaderboard_type == 'private' else ['train time", "in event_teams: user_name = event_team.team.name if not new_only: event_team.leaderboard_html =", "is True Whether or not the submission name should be", "map_renaming = {'valid': 'public', 'test': 'private'} df_scores_mean = (df_scores_mean.loc[list(map_renaming.keys())] .rename(index=map_renaming)", "keys=['bag', 'mean', 'std']) df.columns = df.columns.set_names(['stat', 'set', 'score']) # change", "= df.columns.set_names(['stat', 'set', 'score']) # change the multi-index into a", "directly perform the operation on the database. submissions : list", "event_name ) event.private_competition_leaderboard_html = get_leaderboard( session, 'private competition', event_name )", "of leaderboard to built. event_name : str The name of", "+ ['bag private ' + score_name, 'bag public ' +", "new_only=False): \"\"\"Update the leaderboards for all users for a given", "'failed']: if leaderboard_type == 'new': columns = ['team', 'submission', 'submitted", "value in zip( columns, [sub.event_team.team.name, sub.name_with_link, pd.Timestamp(sub.submission_timestamp), (sub.state_with_link if leaderboard_type", "event_name, user_name ) event_team.new_leaderboard_html = get_leaderboard( session, 'new', event_name, user_name", "the database. \"\"\" event_team = get_event_team_by_name(session, event_name, user_name) if not", "score_order) }) return df def _compute_competition_leaderboard(session, submissions, leaderboard_type, event_name): \"\"\"Format", "queried. This parameter is discarded when requesting the competition leaderboard.", "= df_scores_bag.round(map_score_precision) df_scores = get_scores(session, sub.id) df_scores = df_scores.round(map_score_precision) df_time", "validation time for the public leaderboard time_list = (['train time", "level=\"step\").T df_scores_mean = df_scores.groupby('step').mean() df_scores_std = df_scores.groupby('step').std() # select only", "leaderboard_df[leaderboard_df['best']] leaderboard_df = leaderboard_df.drop(columns='best') # dealing with ties: we need", "'is_public_leaderboard', 'private': 'is_private_leaderboard', 'failed': 'is_error', 'new': 'is_new', 'public competition': 'is_in_competition',", "leaderboard if leaderboard_type == 'public': df = df.rename(columns={ key: value", "score_type in event.score_types if score_type.name != event.official_score_name]) score_list = [", "= session.query(Event).filter_by(name=event_name).one() score_type = event.get_official_score_type(session) score_name = event.official_score_name private_leaderboard =", "to update the whole leaderboards or only the new submissions.", "'public': df = df.rename(columns={ key: value for key, value in", "['train time [s]', 'validation time [s]']) col_selected_private = (['team', 'submission']", "leaderboard_type + ' rank': 'rank' }) df = df.sort_values(by='rank') return", "'is_error', 'new': 'is_new', 'public competition': 'is_in_competition', 'private competition': 'is_in_competition'} submissions", "# make some extra filtering submissions = [sub for sub", "name. user_name : str The user name. If None, scores", "get_bagged_scores(session, sub.id) highest_level = df_scores_bag.index.get_level_values('n_bag').max() df_scores_bag = df_scores_bag.loc[(slice(None), highest_level), :]", "+ score_name] + time_list + ['submitted at (UTC)']) leaderboard_df =", "df_time = df_time.sum(axis=0, level=\"step\").T df_scores_mean = df_scores.groupby('step').mean() df_scores_std = df_scores.groupby('step').std()", "if leaderboard_type == 'private': df['submission ID'] = sub.basename.replace('submission_', '') df['team']", "session, 'private competition', event_name ) event.new_leaderboard_html = get_leaderboard( session, 'new',", "None or str, default is None The user name. If", "private score then by submission timestamp, compute rank leaderboard_df =", "event = session.query(Event).filter_by(name=event_name).one() event_teams = session.query(EventTeam).filter_by(event=event).all() for event_team in event_teams:", "df_scores_bag.index = df_scores_bag.index.droplevel('n_bag') df_scores_bag = df_scores_bag.round(map_score_precision) df_scores = get_scores(session, sub.id)", "ID\"] + col_ordered df = df[col_ordered] # check if the", "leaderboard_type + ' ' + score_name: score_name, leaderboard_type + '", "leaderboard_type == 'private': col_selected.insert(1, 'move') df = leaderboard_df[col_selected] df =", "sort=False) # keep only second precision for the time stamp", "= {'valid': 'public', 'test': 'private'} df_scores_mean = (df_scores_mean.loc[list(map_renaming.keys())] .rename(index=map_renaming) .stack().to_frame().T)", "True]) leaderboard_df['private rank'] = np.arange(len(leaderboard_df)) + 1 leaderboard_df['move'] = \\", "The name of the event. with_links : bool Whether or", "get_time(session, sub.id) df_time = df_time.stack().to_frame() df_time.index = df_time.index.set_names(['fold', 'step']) df_time", "for a given event. Parameters ---------- session : :class:`sqlalchemy.orm.Session` The", "in leaderboard_type else 'private') df = _compute_competition_leaderboard( session, submissions, competition_type,", "'submitted at (UTC)']) leaderboard_df = leaderboard_df.fillna(False) leaderboard_df = leaderboard_df[leaderboard_df['best']] leaderboard_df", "as np import pandas as pd from ..model.event import Event", "(UTC)', 'error'] # we rely on the zip function ignore", "= df_time.index.set_names(['fold', 'step']) df_time = df_time.rename(columns={0: 'time'}) df_time = df_time.sum(axis=0,", "(['bag', 'mean', 'std'] if leaderboard_type == 'private' else ['bag']) dataset_order", "with_links=with_links ) elif leaderboard_type in ['new', 'failed']: if leaderboard_type ==", "(UTC)'] = df['submitted at (UTC)'].astype('datetime64[s]') # reordered the column stats_order", "to generate. event_name : str The event name. user_name :", "= get_leaderboard( session, 'private competition', event_name ) event.new_leaderboard_html = get_leaderboard(", "= ['team', 'submission', 'submitted at (UTC)', 'error'] # we rely", "The leaderboard in HTML format. \"\"\" q = (session.query(Submission) .filter(Event.id", "turn this option to True when adding a new submission", "leaderboard_df = leaderboard_df.drop(columns='best') # dealing with ties: we need the", "ID'] = sub.basename.replace('submission_', '') df['team'] = sub.team.name df['submission'] = sub.name_with_link", "[s]'} ) df = pd.concat([df, df_time], axis=1) if leaderboard_type ==", "index=False, max_cols=None, max_rows=None, justify='left') df_html = '<thead> {} </tbody>'.format( df_html.split('<thead>')[1].split('</tbody>')[0]", ".team import get_event_team_by_name from .submission import get_bagged_scores from .submission import", "= df.drop(columns=contrib_columns) df = df.sort_values( \"bag {} {}\".format(leaderboard_type, event.official_score_name), ascending=event.get_official_score_type(session).is_lower_the_better", "+ score_name] + time_list + ['submitted at (UTC)'] ) if", "{'valid': 'public', 'test': 'private'} df_scores_mean = (df_scores_mean.loc[list(map_renaming.keys())] .rename(index=map_renaming) .stack().to_frame().T) df_scores_std", "score_type.precision for score_type in event.score_types} for sub in submissions: #", "operation on the database. event_name : str The event name.", "users for a given event. Parameters ---------- session : :class:`sqlalchemy.orm.Session`", "EventTeam from ..model.submission import Submission from ..model.team import Team from", "+ score_name, 'bag public ' + score_name: 'public ' +", "lowest timestamp best_df = leaderboard_df.groupby('team').min() best_df = best_df[['submitted at (UTC)']].reset_index()", "(['public', 'private'] if leaderboard_type == 'private' else ['public']) score_order =", "leaderboard_df, best_df, how='left', left_on=['team', 'submitted at (UTC)'], right_on=['team', 'submitted at", "= ( [leaderboard_type + ' rank', 'team', 'submission', leaderboard_type +", "get_leaderboard( session, 'new', event_name ) session.commit() def update_user_leaderboards(session, event_name, user_name,", "= best_df[['public ' + score_name]].reset_index() best_df['best'] = True # merge", "get_leaderboard( session, 'public', event_name ) event.public_leaderboard_html_no_links = get_leaderboard( session, 'public',", "best_df['best'] = True # merge to get a best indicator", "in HTML format. \"\"\" q = (session.query(Submission) .filter(Event.id == EventTeam.event_id)", "= event.official_score_name private_leaderboard = _compute_leaderboard(session, submissions, 'private', event_name, with_links=False) time_list", "in leaderboard_df['move']] col_selected = ( [leaderboard_type + ' rank', 'team',", "highest_level = df_scores_bag.index.get_level_values('n_bag').max() df_scores_bag = df_scores_bag.loc[(slice(None), highest_level), :] df_scores_bag.index =", "['team', 'submission'] + score_list + ['contributivity', 'historical contributivity'] + time_list", "the database. event_name : str The event name. new_only :", "!= event.official_score_name]) score_list = [ '{} {} {}'.format(stat, dataset, score)", "leaderboard_type in ['public', 'private']: df = _compute_leaderboard( session, submissions, leaderboard_type,", "= leaderboard_df.sort_values( by=['public ' + score_name, 'submitted at (UTC)'], ascending=[score_type.is_lower_the_better,", "event_name ) event.public_leaderboard_html_no_links = get_leaderboard( session, 'public', event_name, with_links=False )", "a given event. Parameters ---------- session : :class:`sqlalchemy.orm.Session` The session", "columns={'train': 'train time [s]', 'valid': 'validation time [s]', 'test': 'test", "index df.columns = df.columns.map(lambda x: \" \".join(x)) # add the", "is not None: q = q.filter(Team.name == user_name) submissions =", "report in the leaderboard. leaderboard_type : {'public', 'private'} The type", "leaderboard_df = leaderboard_df.drop(columns='best') # sort by public score then by", "session.query(EventTeam).filter_by(event=event).all() for event_team in event_teams: user_name = event_team.team.name if not", "submissions if sub.is_public_leaderboard] if not submissions: return None competition_type =", "merge to get a best indicator column then select best", "'public', 'test': 'private'} df_scores_mean = (df_scores_mean.loc[list(map_renaming.keys())] .rename(index=map_renaming) .stack().to_frame().T) df_scores_std =", "some extra filtering submissions = [sub for sub in submissions", "requesting the competition leaderboard. with_links : bool, default is True", "score_name] + time_list + ['submitted at (UTC)']) leaderboard_df = private_leaderboard[col_selected_private]", "competition leaderboard. with_links : bool, default is True Whether or", "(UTC)']].reset_index() best_df['best'] = True leaderboard_df = pd.merge( leaderboard_df, best_df, how='left',", "== 'private': col_selected.insert(1, 'move') df = leaderboard_df[col_selected] df = df.rename(columns={", "can turn this option to True when adding a new", "= sub.team.name df['submission'] = sub.name_with_link if with_links else sub.name df['contributivity']", "< LooseVersion(\"1.0.0\") else None pd.set_option('display.max_colwidth', width) def _compute_leaderboard(session, submissions, leaderboard_type,", "clickable. Returns ------- leaderboard : str The leaderboard in HTML", "col_selected = ( [leaderboard_type + ' rank', 'team', 'submission', leaderboard_type", "with_links : bool, default is True Whether or not the", ".stack().to_frame().T) df_scores_std = (df_scores_std.loc[list(map_renaming.keys())] .rename(index=map_renaming) .stack().to_frame().T) df_scores_bag = (df_scores_bag.rename(index=map_renaming) .stack().to_frame().T)", "df[col_ordered] # check if the contributivity columns are null contrib_columns", "['team', 'submission', 'submitted at (UTC)', 'state'] else: columns = ['team',", ": bool, default is True Whether or not the submission", "[{ column: value for column, value in zip( columns, [sub.event_team.team.name,", "from .submission import get_time width = -1 if LooseVersion(pd.__version__) <", "leaderboard_df.sort_values( by=['public ' + score_name, 'submitted at (UTC)'], ascending=[score_type.is_lower_the_better, True])", "df.drop(columns=contrib_columns) df = df.sort_values( \"bag {} {}\".format(leaderboard_type, event.official_score_name), ascending=event.get_official_score_type(session).is_lower_the_better )", "= get_leaderboard( session, 'private', event_name ) event.public_leaderboard_html_with_links = get_leaderboard( session,", "sub.is_not_sandbox)] if not submissions: return None if leaderboard_type in ['public',", ": {'public', 'private'} The type of leaderboard to built. event_name", "new submissions. You can turn this option to True when", "+ score_name]].reset_index() best_df['best'] = True # merge to get a", ") elif leaderboard_type in ['new', 'failed']: if leaderboard_type == 'new':", "leaderboard_df[col_selected] df = df.rename(columns={ leaderboard_type + ' ' + score_name:", "leaderboard_df = leaderboard_df.sort_values( by=['public ' + score_name, 'submitted at (UTC)'],", "score_order, stats_order) ] # Only display train and validation time", "+ score_list + ['contributivity', 'historical contributivity'] + time_list + ['max", "(UTC)'], ascending=[score_type.is_lower_the_better, True]) leaderboard_df['public rank'] = np.arange(len(leaderboard_df)) + 1 #", "'rank' }) df = df.sort_values(by='rank') return df def get_leaderboard(session, leaderboard_type,", "submissions = [sub for sub in submissions if sub.is_public_leaderboard] if", "event_name): \"\"\"Format the competition leaderboard. Parameters ---------- session : :class:`sqlalchemy.orm.Session`", "the submission name should be clickable. Returns ------- leaderboard :", "discarded when requesting the competition leaderboard. with_links : bool, default", "+ score_name], right_on=['team', 'public ' + score_name] ) leaderboard_df =", "or only the new submissions. You can turn this option", "if leaderboard_type == 'public': df = df.rename(columns={ key: value for", "'submission'] + score_list + ['contributivity', 'historical contributivity'] + time_list +", "Team from .team import get_event_team_by_name from .submission import get_bagged_scores from", "rank': 'rank' }) df = df.sort_values(by='rank') return df def get_leaderboard(session,", "at (UTC)']) leaderboard_df = private_leaderboard[col_selected_private] leaderboard_df = leaderboard_df.rename( columns={'bag private", "df.sort_values(by='rank') return df def get_leaderboard(session, leaderboard_type, event_name, user_name=None, with_links=True): \"\"\"Get", "event_name, user_name) if not new_only: event_team.leaderboard_html = get_leaderboard( session, 'public',", "validation and testing steps and rename them to # public", "1 # sort by private score then by submission timestamp,", "'std']) df.columns = df.columns.set_names(['stat', 'set', 'score']) # change the multi-index", "leaderboard_df = pd.merge( leaderboard_df, best_df, how='left', left_on=['team', 'public ' +", "ties: we need the lowest timestamp best_df = leaderboard_df.groupby('team').min() best_df", "['public', 'private']: df = _compute_leaderboard( session, submissions, leaderboard_type, event_name, with_links=with_links", "( [leaderboard_type + ' rank', 'team', 'submission', leaderboard_type + '", ".filter(Event.id == EventTeam.event_id) .filter(Team.id == EventTeam.team_id) .filter(EventTeam.id == Submission.event_team_id) .filter(Event.name", "(UTC)', 'state'] else: columns = ['team', 'submission', 'submitted at (UTC)',", "columns = ['team', 'submission', 'submitted at (UTC)', 'state'] else: columns", "then select best leaderboard_df = pd.merge( leaderboard_df, best_df, how='left', left_on=['team',", "'private'} df_scores_mean = (df_scores_mean.loc[list(map_renaming.keys())] .rename(index=map_renaming) .stack().to_frame().T) df_scores_std = (df_scores_std.loc[list(map_renaming.keys())] .rename(index=map_renaming)", "True Whether or not the submission name should be clickable.", "perform the operation on the database. submissions : list of", ".filter(Team.id == EventTeam.team_id) .filter(EventTeam.id == Submission.event_team_id) .filter(Event.name == event_name)) if", "user leaderboards for a given event. Parameters ---------- session :", "= ('public' if 'public' in leaderboard_type else 'private') df =", "if the contributivity columns are null contrib_columns = ['contributivity', 'historical", "event_name ) event.new_leaderboard_html = get_leaderboard( session, 'new', event_name ) session.commit()", "event.private_leaderboard_html = get_leaderboard( session, 'private', event_name ) event.public_leaderboard_html_with_links = get_leaderboard(", "null contrib_columns = ['contributivity', 'historical contributivity'] if (df[contrib_columns] == 0).all(axis=0).all():", ") session.commit() def update_user_leaderboards(session, event_name, user_name, new_only=False): \"\"\"Update the of", "= ([event.official_score_name] + [score_type.name for score_type in event.score_types if score_type.name", "if not submissions: return None if leaderboard_type in ['public', 'private']:", "sub.id) highest_level = df_scores_bag.index.get_level_values('n_bag').max() df_scores_bag = df_scores_bag.loc[(slice(None), highest_level), :] df_scores_bag.index", "[score_type.name for score_type in event.score_types if score_type.name != event.official_score_name]) score_list", "'validation time [s]']) col_ordered = ( ['team', 'submission'] + score_list", "whole leaderboards or only the new submissions. You can turn", "only second precision for the time stamp df['submitted at (UTC)']", "else ['public']) score_order = ([event.official_score_name] + [score_type.name for score_type in", "user_name, new_only=False): \"\"\"Update the of a user leaderboards for a", "check if the contributivity columns are null contrib_columns = ['contributivity',", "event. Parameters ---------- session : :class:`sqlalchemy.orm.Session` The session to directly", "user_name) submissions = q.all() submission_filter = {'public': 'is_public_leaderboard', 'private': 'is_private_leaderboard',", ") return df_html def update_leaderboards(session, event_name, new_only=False): \"\"\"Update the leaderboards", "df_scores_bag.round(map_score_precision) df_scores = get_scores(session, sub.id) df_scores = df_scores.round(map_score_precision) df_time =", "event_name ) session.commit() def update_user_leaderboards(session, event_name, user_name, new_only=False): \"\"\"Update the", ") event.failed_leaderboard_html = get_leaderboard( session, 'failed', event_name ) event.public_competition_leaderboard_html =", "= get_leaderboard( session, 'public competition', event_name ) event.private_competition_leaderboard_html = get_leaderboard(", "+ score_name, 'submitted at (UTC)'], ascending=[score_type.is_lower_the_better, True]) leaderboard_df['private rank'] =", "The session to directly perform the operation on the database.", "\"\"\"Update the leaderboards for all users for a given event.", "if m != 0 else '-' for m in leaderboard_df['move']]", "+ 1 # sort by private score then by submission", "= (leaderboard_df.groupby('team').min() if score_type.is_lower_the_better else leaderboard_df.groupby('team').max()) best_df = best_df[['public '", "# rename the column name for the public leaderboard if", "function ignore the submission state if the error # column", "else ['train time [s]', 'validation time [s]']) col_selected_private = (['team',", "right_on=['team', 'public ' + score_name] ) leaderboard_df = leaderboard_df.fillna(False) leaderboard_df", ".submission import get_time width = -1 if LooseVersion(pd.__version__) < LooseVersion(\"1.0.0\")", "str The event name. new_only : bool, default is False", "'private' else ['public']) score_order = ([event.official_score_name] + [score_type.name for score_type", "if leaderboard_type == 'new': columns = ['team', 'submission', 'submitted at", "competition', event_name ) event.private_competition_leaderboard_html = get_leaderboard( session, 'private competition', event_name", "= df_scores_bag.index.get_level_values('n_bag').max() df_scores_bag = df_scores_bag.loc[(slice(None), highest_level), :] df_scores_bag.index = df_scores_bag.index.droplevel('n_bag')", "right_on=['team', 'submitted at (UTC)']) leaderboard_df = leaderboard_df.fillna(False) leaderboard_df = leaderboard_df[leaderboard_df['best']]", "at (UTC)'] = df['submitted at (UTC)'].astype('datetime64[s]') # reordered the column", "from all users will be queried. This parameter is discarded", "sort by private score then by submission timestamp, compute rank", "= q.filter(Team.name == user_name) submissions = q.all() submission_filter = {'public':", "= pd.concat([df, df_time], axis=1) if leaderboard_type == 'private': df['submission ID']", "= [] event = session.query(Event).filter_by(name=event_name).one() map_score_precision = {score_type.name: score_type.precision for", "0 else '-' for m in leaderboard_df['move']] col_selected = (", "'is_in_competition', 'private competition': 'is_in_competition'} submissions = [sub for sub in", "\\ leaderboard_df['public rank'] - leaderboard_df['private rank'] leaderboard_df['move'] = [ '{:+d}'.format(m)", "and rename them to # public and private map_renaming =", "competition leaderboard. Parameters ---------- session : :class:`sqlalchemy.orm.Session` The session to", "from ..model.submission import Submission from ..model.team import Team from .team", "q.filter(Team.name == user_name) submissions = q.all() submission_filter = {'public': 'is_public_leaderboard',", "session, 'public', event_name ) event.public_leaderboard_html_no_links = get_leaderboard( session, 'public', event_name,", "event.new_leaderboard_html = get_leaderboard( session, 'new', event_name ) session.commit() def update_user_leaderboards(session,", "the database. \"\"\" event = session.query(Event).filter_by(name=event_name).one() event_teams = session.query(EventTeam).filter_by(event=event).all() for", "df = df.drop(columns=contrib_columns) df = df.sort_values( \"bag {} {}\".format(leaderboard_type, event.official_score_name),", ".rename(index=map_renaming) .stack().to_frame().T) df_scores_bag = (df_scores_bag.rename(index=map_renaming) .stack().to_frame().T) df = pd.concat([df_scores_bag, df_scores_mean,", "== EventTeam.event_id) .filter(Team.id == EventTeam.team_id) .filter(EventTeam.id == Submission.event_team_id) .filter(Event.name ==", "state if the error # column was not appended data", "# sort by private score then by submission timestamp, compute", "for each team best_df = (leaderboard_df.groupby('team').min() if score_type.is_lower_the_better else leaderboard_df.groupby('team').max())", "columns=columns) else: # make some extra filtering submissions = [sub", "df_scores_std = df_scores.groupby('step').std() # select only the validation and testing", "records df = pd.concat(record_score, axis=0, ignore_index=True, sort=False) # keep only", "import LooseVersion from itertools import product import numpy as np", "== event_name)) if user_name is not None: q = q.filter(Team.name", "change the multi-index into a stacked index df.columns = df.columns.map(lambda", "session, submissions, competition_type, event_name ) df_html = df.to_html(escape=False, index=False, max_cols=None,", "event_name, user_name, new_only=False): \"\"\"Update the of a user leaderboards for", "if leaderboard_type == 'private' else ['bag']) dataset_order = (['public', 'private']", "+ score_name, 'submitted at (UTC)'], ascending=[score_type.is_lower_the_better, True]) leaderboard_df['public rank'] =", "True when adding a new submission in the database. \"\"\"", "for column, value in zip( columns, [sub.event_team.team.name, sub.name_with_link, pd.Timestamp(sub.submission_timestamp), (sub.state_with_link", "LooseVersion(\"1.0.0\") else None pd.set_option('display.max_colwidth', width) def _compute_leaderboard(session, submissions, leaderboard_type, event_name,", "df = df.sort_values(by='rank') return df def get_leaderboard(session, leaderboard_type, event_name, user_name=None,", "df.columns = df.columns.map(lambda x: \" \".join(x)) # add the aggregated", "at (UTC)'] ) if leaderboard_type == 'private': col_selected.insert(1, 'move') df", "column name for the public leaderboard if leaderboard_type == 'public':", "from ..model.event import Event from ..model.event import EventTeam from ..model.submission", "sub in submissions: # take only max n bag df_scores_bag", "leaderboard_type : {'public', 'private'} The type of leaderboard to built.", "keep only second precision for the time stamp df['submitted at", "pd.DataFrame(data, columns=columns) else: # make some extra filtering submissions =", "generate. event_name : str The event name. user_name : None", "'move') df = leaderboard_df[col_selected] df = df.rename(columns={ leaderboard_type + '", "leaderboard_type : {'public', 'private', 'failed', 'new', \\ 'public competition', 'private", "at (UTC)'] = pd.Timestamp(sub.submission_timestamp) record_score.append(df) # stack all the records", "[s]', 'test': 'test time [s]'} ) df = pd.concat([df, df_time],", "_compute_competition_leaderboard( session, submissions, competition_type, event_name ) df_html = df.to_html(escape=False, index=False,", "in the database. \"\"\" event = session.query(Event).filter_by(name=event_name).one() if not new_only:", "in zip(score_list, score_order) }) return df def _compute_competition_leaderboard(session, submissions, leaderboard_type,", "str, default is None The user name. If None, scores", "name of the event. Returns ------- competition_leaderboard : dataframe The", "appended data = [{ column: value for column, value in", "database. \"\"\" event = session.query(Event).filter_by(name=event_name).one() event_teams = session.query(EventTeam).filter_by(event=event).all() for event_team", "the operation on the database. submissions : list of :class:`ramp_database.model.Submission`", "submissions = q.all() submission_filter = {'public': 'is_public_leaderboard', 'private': 'is_private_leaderboard', 'failed':", "= get_leaderboard( session, 'new', event_name ) session.commit() def update_user_leaderboards(session, event_name,", "new submission in the database. \"\"\" event = session.query(Event).filter_by(name=event_name).one() if", "= True # merge to get a best indicator column", "df = pd.DataFrame(data, columns=columns) else: # make some extra filtering", "for key, value in zip(score_list, score_order) }) return df def", "leaderboard in a dataframe format. \"\"\" event = session.query(Event).filter_by(name=event_name).one() score_type", "private ' + score_name, 'bag public ' + score_name] +", "df = _compute_leaderboard( session, submissions, leaderboard_type, event_name, with_links=with_links ) elif", "'private competition', event_name ) event.new_leaderboard_html = get_leaderboard( session, 'new', event_name", "by=['public ' + score_name, 'submitted at (UTC)'], ascending=[score_type.is_lower_the_better, True]) leaderboard_df['public", "user_name = event_team.team.name if not new_only: event_team.leaderboard_html = get_leaderboard( session,", "new submission in the database. \"\"\" event_team = get_event_team_by_name(session, event_name,", "submission name should be clickable. Returns ------- leaderboard : str", "in event.score_types} for sub in submissions: # take only max", "the column name for the public leaderboard if leaderboard_type ==", "..model.team import Team from .team import get_event_team_by_name from .submission import", "train and validation time for the public leaderboard time_list =", "a dataframe format. \"\"\" event = session.query(Event).filter_by(name=event_name).one() score_type = event.get_official_score_type(session)", ".filter(Event.name == event_name)) if user_name is not None: q =", "Parameters ---------- session : :class:`sqlalchemy.orm.Session` The session to directly perform", "return None competition_type = ('public' if 'public' in leaderboard_type else", "event_teams = session.query(EventTeam).filter_by(event=event).all() for event_team in event_teams: user_name = event_team.team.name", "col_ordered = [\"submission ID\"] + col_ordered df = df[col_ordered] #", "to report in the leaderboard. leaderboard_type : {'public', 'private'} The", "to directly perform the operation on the database. leaderboard_type :", "'private', event_name ) event.public_leaderboard_html_with_links = get_leaderboard( session, 'public', event_name )", "= leaderboard_df.rename( columns={'bag private ' + score_name: 'private ' +", "bag df_scores_bag = get_bagged_scores(session, sub.id) highest_level = df_scores_bag.index.get_level_values('n_bag').max() df_scores_bag =", "database. leaderboard_type : {'public', 'private', 'failed', 'new', \\ 'public competition',", "\"\"\"Format the leaderboard. Parameters ---------- session : :class:`sqlalchemy.orm.Session` The session", "leaderboards or only the new submissions. You can turn this", "score_name} ) # select best submission for each team best_df", "= df.index df_time = df_time.rename( columns={'train': 'train time [s]', 'valid':", "public leaderboard if leaderboard_type == 'public': df = df.rename(columns={ key:", "directly perform the operation on the database. event_name : str", "{'public', 'private', 'failed', 'new', \\ 'public competition', 'private competition'} The", "the records df = pd.concat(record_score, axis=0, ignore_index=True, sort=False) # keep", "df['historical contributivity'] = int(round( 100 * sub.historical_contributivity)) df['max RAM [MB]']", "the operation on the database. leaderboard_type : {'public', 'private', 'failed',", "{}\".format(leaderboard_type, event.official_score_name), ascending=event.get_official_score_type(session).is_lower_the_better ) # rename the column name for", "a dataframe format. \"\"\" record_score = [] event = session.query(Event).filter_by(name=event_name).one()", "= df_time.stack().to_frame() df_time.index = df_time.index.set_names(['fold', 'step']) df_time = df_time.rename(columns={0: 'time'})", "time [s]', 'test': 'test time [s]'} ) df = pd.concat([df,", "# stack all the records df = pd.concat(record_score, axis=0, ignore_index=True,", "else sub.name df['contributivity'] = int(round(100 * sub.contributivity)) df['historical contributivity'] =", "' + score_name, 'submitted at (UTC)'], ascending=[score_type.is_lower_the_better, True]) leaderboard_df['public rank']", "+ time_list + ['max RAM [MB]', 'submitted at (UTC)'] )", "width) def _compute_leaderboard(session, submissions, leaderboard_type, event_name, with_links=True): \"\"\"Format the leaderboard.", "in product(dataset_order, score_order, stats_order) ] # Only display train and", "ascending=event.get_official_score_type(session).is_lower_the_better ) # rename the column name for the public", "' + score_name], right_on=['team', 'public ' + score_name] ) leaderboard_df", "leaderboard_type, event_name): \"\"\"Format the competition leaderboard. Parameters ---------- session :", "df_scores = get_scores(session, sub.id) df_scores = df_scores.round(map_score_precision) df_time = get_time(session,", "only the new submissions. You can turn this option to", "left_on=['team', 'submitted at (UTC)'], right_on=['team', 'submitted at (UTC)']) leaderboard_df =", "= df[col_ordered] # check if the contributivity columns are null", "event_name : str The event name. new_only : bool, default", "\\ 'public competition', 'private competition'} The type of leaderboard to", "= df_scores.groupby('step').std() # select only the validation and testing steps", "ascending=[score_type.is_lower_the_better, True]) leaderboard_df['private rank'] = np.arange(len(leaderboard_df)) + 1 leaderboard_df['move'] =", "the submission state if the error # column was not", "'historical contributivity'] + time_list + ['max RAM [MB]', 'submitted at", "'private']: df = _compute_leaderboard( session, submissions, leaderboard_type, event_name, with_links=with_links )", "database. \"\"\" event_team = get_event_team_by_name(session, event_name, user_name) if not new_only:", "= df_scores.groupby('step').mean() df_scores_std = df_scores.groupby('step').std() # select only the validation", "session, 'failed', event_name ) event.public_competition_leaderboard_html = get_leaderboard( session, 'public competition',", "format. \"\"\" q = (session.query(Submission) .filter(Event.id == EventTeam.event_id) .filter(Team.id ==", "and validation time for the public leaderboard time_list = (['train", "np import pandas as pd from ..model.event import Event from", "only the validation and testing steps and rename them to", "session.query(Event).filter_by(name=event_name).one() map_score_precision = {score_type.name: score_type.precision for score_type in event.score_types} for", "pd.concat([df_scores_bag, df_scores_mean, df_scores_std], axis=1, keys=['bag', 'mean', 'std']) df.columns = df.columns.set_names(['stat',", "</tbody>'.format( df_html.split('<thead>')[1].split('</tbody>')[0] ) return df_html def update_leaderboards(session, event_name, new_only=False): \"\"\"Update", "df_time], axis=1) if leaderboard_type == 'private': df['submission ID'] = sub.basename.replace('submission_',", "m in leaderboard_df['move']] col_selected = ( [leaderboard_type + ' rank',", "leaderboard : str The leaderboard in HTML format. \"\"\" q", "score_name, 'bag public ' + score_name: 'public ' + score_name}", "['new', 'failed']: if leaderboard_type == 'new': columns = ['team', 'submission',", "df = df.sort_values( \"bag {} {}\".format(leaderboard_type, event.official_score_name), ascending=event.get_official_score_type(session).is_lower_the_better ) #", "competition_type = ('public' if 'public' in leaderboard_type else 'private') df", "'private'} The type of leaderboard to built. event_name : str", "not submissions: return None competition_type = ('public' if 'public' in", "' + score_name: 'public ' + score_name} ) # select", "LooseVersion from itertools import product import numpy as np import", "best_df = (leaderboard_df.groupby('team').min() if score_type.is_lower_the_better else leaderboard_df.groupby('team').max()) best_df = best_df[['public", "how='left', left_on=['team', 'public ' + score_name], right_on=['team', 'public ' +", "if not new_only: event.private_leaderboard_html = get_leaderboard( session, 'private', event_name )", "database. \"\"\" event = session.query(Event).filter_by(name=event_name).one() if not new_only: event.private_leaderboard_html =", "best submission for each team best_df = (leaderboard_df.groupby('team').min() if score_type.is_lower_the_better", "(getattr(sub, submission_filter[leaderboard_type]) and sub.is_not_sandbox)] if not submissions: return None if", "the time stamp df['submitted at (UTC)'] = df['submitted at (UTC)'].astype('datetime64[s]')", "\"\"\" q = (session.query(Submission) .filter(Event.id == EventTeam.event_id) .filter(Team.id == EventTeam.team_id)", "sub.is_public_leaderboard] if not submissions: return None competition_type = ('public' if", "= leaderboard_df.fillna(False) leaderboard_df = leaderboard_df[leaderboard_df['best']] leaderboard_df = leaderboard_df.drop(columns='best') # sort", "be clickable. Returns ------- leaderboard : dataframe The leaderboard in", "competition', 'private competition'} The type of leaderboard to generate. event_name", "axis=0, ignore_index=True, sort=False) # keep only second precision for the", "df = df.rename(columns={ key: value for key, value in zip(score_list,", "+ score_name: score_name, leaderboard_type + ' rank': 'rank' }) df", ".submission import get_scores from .submission import get_submission_max_ram from .submission import", "== 'public': df = df.rename(columns={ key: value for key, value", "= (df_scores_bag.rename(index=map_renaming) .stack().to_frame().T) df = pd.concat([df_scores_bag, df_scores_mean, df_scores_std], axis=1, keys=['bag',", "from ..model.event import EventTeam from ..model.submission import Submission from ..model.team", "True leaderboard_df = pd.merge( leaderboard_df, best_df, how='left', left_on=['team', 'submitted at", "list of :class:`ramp_database.model.Submission` The submission to report in the leaderboard.", "}) df = df.sort_values(by='rank') return df def get_leaderboard(session, leaderboard_type, event_name,", "str The name of the event. with_links : bool Whether", "{'public', 'private'} The type of leaderboard to built. event_name :", "else '-' for m in leaderboard_df['move']] col_selected = ( [leaderboard_type", "= (['team', 'submission'] + ['bag private ' + score_name, 'bag", "['train time [s]', 'validation time [s]']) col_ordered = ( ['team',", "_compute_leaderboard(session, submissions, leaderboard_type, event_name, with_links=True): \"\"\"Format the leaderboard. Parameters ----------", "time_list + ['submitted at (UTC)']) leaderboard_df = private_leaderboard[col_selected_private] leaderboard_df =", "leaderboard_df.drop(columns='best') # sort by public score then by submission timestamp,", "option to True when adding a new submission in the", "for sub in submissions if sub.is_public_leaderboard] if not submissions: return", "get_leaderboard( session, 'failed', event_name ) event.public_competition_leaderboard_html = get_leaderboard( session, 'public", "with_links=False ) event.failed_leaderboard_html = get_leaderboard( session, 'failed', event_name ) event.public_competition_leaderboard_html", "event_name, with_links=with_links ) elif leaderboard_type in ['new', 'failed']: if leaderboard_type", "left_on=['team', 'public ' + score_name], right_on=['team', 'public ' + score_name]", "stats_order = (['bag', 'mean', 'std'] if leaderboard_type == 'private' else", "competition': 'is_in_competition'} submissions = [sub for sub in submissions if", "of a user leaderboards for a given event. Parameters ----------", "time_list + ['submitted at (UTC)'] ) if leaderboard_type == 'private':", "+ ['contributivity', 'historical contributivity'] + time_list + ['max RAM [MB]',", "import get_event_team_by_name from .submission import get_bagged_scores from .submission import get_scores", "int(round( 100 * sub.historical_contributivity)) df['max RAM [MB]'] = get_submission_max_ram(session, sub.id)", "leaderboard_type, event_name, user_name=None, with_links=True): \"\"\"Get a leaderboard. Parameters ---------- session", "if leaderboard_type == 'private' else ['public']) score_order = ([event.official_score_name] +", "int(round(100 * sub.contributivity)) df['historical contributivity'] = int(round( 100 * sub.historical_contributivity))", "(session.query(Submission) .filter(Event.id == EventTeam.event_id) .filter(Team.id == EventTeam.team_id) .filter(EventTeam.id == Submission.event_team_id)", "* sub.contributivity)) df['historical contributivity'] = int(round( 100 * sub.historical_contributivity)) df['max", "leaderboard_df['move'] = [ '{:+d}'.format(m) if m != 0 else '-'", "sub.id) df_time = df_time.stack().to_frame() df_time.index = df_time.index.set_names(['fold', 'step']) df_time =", "select only the validation and testing steps and rename them", "name of the event. with_links : bool Whether or not", "time [s]']) col_ordered = ( ['team', 'submission'] + score_list +", "the of a user leaderboards for a given event. Parameters", "leaderboard_df = leaderboard_df.fillna(False) leaderboard_df = leaderboard_df[leaderboard_df['best']] leaderboard_df = leaderboard_df.drop(columns='best') #", "leaderboard in HTML format. \"\"\" q = (session.query(Submission) .filter(Event.id ==", "event_name ) event.public_leaderboard_html_with_links = get_leaderboard( session, 'public', event_name ) event.public_leaderboard_html_no_links", "= pd.merge( leaderboard_df, best_df, how='left', left_on=['team', 'public ' + score_name],", "..model.event import Event from ..model.event import EventTeam from ..model.submission import", ".submission import get_submission_max_ram from .submission import get_time width = -1", "[sub for sub in submissions if (getattr(sub, submission_filter[leaderboard_type]) and sub.is_not_sandbox)]", "= df_time.rename(columns={0: 'time'}) df_time = df_time.sum(axis=0, level=\"step\").T df_scores_mean = df_scores.groupby('step').mean()", "event_name)) if user_name is not None: q = q.filter(Team.name ==", "([event.official_score_name] + [score_type.name for score_type in event.score_types if score_type.name !=", "justify='left') df_html = '<thead> {} </tbody>'.format( df_html.split('<thead>')[1].split('</tbody>')[0] ) return df_html", "\"\"\"Update the leaderboards for a given event. Parameters ---------- session", "a leaderboard. Parameters ---------- session : :class:`sqlalchemy.orm.Session` The session to", "timestamp best_df = leaderboard_df.groupby('team').min() best_df = best_df[['submitted at (UTC)']].reset_index() best_df['best']", "competition', event_name ) event.new_leaderboard_html = get_leaderboard( session, 'new', event_name )", "best_df = leaderboard_df.groupby('team').min() best_df = best_df[['submitted at (UTC)']].reset_index() best_df['best'] =", "Only display train and validation time for the public leaderboard", "event.public_leaderboard_html_no_links = get_leaderboard( session, 'public', event_name, with_links=False ) event.failed_leaderboard_html =", "the contributivity columns are null contrib_columns = ['contributivity', 'historical contributivity']", "= get_time(session, sub.id) df_time = df_time.stack().to_frame() df_time.index = df_time.index.set_names(['fold', 'step'])", "== 'failed' else sub.state)]) } for sub in submissions] df", "['contributivity', 'historical contributivity'] if (df[contrib_columns] == 0).all(axis=0).all(): df = df.drop(columns=contrib_columns)", "competition'} The type of leaderboard to generate. event_name : str", "take only max n bag df_scores_bag = get_bagged_scores(session, sub.id) highest_level", ") session.commit() def update_all_user_leaderboards(session, event_name, new_only=False): \"\"\"Update the leaderboards for", "bool, default is False Whether or not to update the", "with_links=True): \"\"\"Get a leaderboard. Parameters ---------- session : :class:`sqlalchemy.orm.Session` The", "import EventTeam from ..model.submission import Submission from ..model.team import Team", "'bag public ' + score_name] + time_list + ['submitted at", ") leaderboard_df = leaderboard_df.fillna(False) leaderboard_df = leaderboard_df[leaderboard_df['best']] leaderboard_df = leaderboard_df.drop(columns='best')", "or not to update the whole leaderboards or only the", "\"\"\" record_score = [] event = session.query(Event).filter_by(name=event_name).one() map_score_precision = {score_type.name:", "and private map_renaming = {'valid': 'public', 'test': 'private'} df_scores_mean =", "event_name : str The event name. user_name : None or", "leaderboard_type == 'private' else ['public']) score_order = ([event.official_score_name] + [score_type.name", "[\"submission ID\"] + col_ordered df = df[col_ordered] # check if", "for m in leaderboard_df['move']] col_selected = ( [leaderboard_type + '", "a new submission in the database. \"\"\" event_team = get_event_team_by_name(session,", "+ col_ordered df = df[col_ordered] # check if the contributivity", "sub.name_with_link if with_links else sub.name df['contributivity'] = int(round(100 * sub.contributivity))", "HTML format. \"\"\" q = (session.query(Submission) .filter(Event.id == EventTeam.event_id) .filter(Team.id", "'historical contributivity'] if (df[contrib_columns] == 0).all(axis=0).all(): df = df.drop(columns=contrib_columns) df", "public score then by submission timestamp, compute rank leaderboard_df =", "leaderboard_type, event_name, with_links=True): \"\"\"Format the leaderboard. Parameters ---------- session :", "dataframe format. \"\"\" event = session.query(Event).filter_by(name=event_name).one() score_type = event.get_official_score_type(session) score_name", "event_name, user_name ) session.commit() def update_all_user_leaderboards(session, event_name, new_only=False): \"\"\"Update the", "+ score_name: 'private ' + score_name, 'bag public ' +", "[MB]'] = get_submission_max_ram(session, sub.id) df['submitted at (UTC)'] = pd.Timestamp(sub.submission_timestamp) record_score.append(df)", "when requesting the competition leaderboard. with_links : bool, default is", "def _compute_leaderboard(session, submissions, leaderboard_type, event_name, with_links=True): \"\"\"Format the leaderboard. Parameters", "'train time [s]', 'valid': 'validation time [s]', 'test': 'test time", "df = df[col_ordered] # check if the contributivity columns are", "(UTC)']) leaderboard_df = leaderboard_df.fillna(False) leaderboard_df = leaderboard_df[leaderboard_df['best']] leaderboard_df = leaderboard_df.drop(columns='best')", "'step']) df_time = df_time.rename(columns={0: 'time'}) df_time = df_time.sum(axis=0, level=\"step\").T df_scores_mean", "by public score then by submission timestamp, compute rank leaderboard_df", "== \"private\": col_ordered = [\"submission ID\"] + col_ordered df =", "rank'] - leaderboard_df['private rank'] leaderboard_df['move'] = [ '{:+d}'.format(m) if m", "select best leaderboard_df = pd.merge( leaderboard_df, best_df, how='left', left_on=['team', 'public", "of the event. Returns ------- competition_leaderboard : dataframe The competition", "indicator column then select best leaderboard_df = pd.merge( leaderboard_df, best_df,", "get_leaderboard( session, 'public competition', event_name ) event.private_competition_leaderboard_html = get_leaderboard( session,", "(UTC)'], right_on=['team', 'submitted at (UTC)']) leaderboard_df = leaderboard_df.fillna(False) leaderboard_df =", "should be clickable. Returns ------- leaderboard : dataframe The leaderboard", "df_time.index = df.index df_time = df_time.rename( columns={'train': 'train time [s]',", "'valid': 'validation time [s]', 'test': 'test time [s]'} ) df", "'new': 'is_new', 'public competition': 'is_in_competition', 'private competition': 'is_in_competition'} submissions =", "return None if leaderboard_type in ['public', 'private']: df = _compute_leaderboard(", "pandas as pd from ..model.event import Event from ..model.event import", "user_name ) session.commit() def update_all_user_leaderboards(session, event_name, new_only=False): \"\"\"Update the leaderboards", "at (UTC)'], right_on=['team', 'submitted at (UTC)']) leaderboard_df = leaderboard_df.fillna(False) leaderboard_df", "leaderboard_df['public rank'] - leaderboard_df['private rank'] leaderboard_df['move'] = [ '{:+d}'.format(m) if", "+ score_name] ) leaderboard_df = leaderboard_df.fillna(False) leaderboard_df = leaderboard_df[leaderboard_df['best']] leaderboard_df", "Submission from ..model.team import Team from .team import get_event_team_by_name from", "leaderboard_df['move'] = \\ leaderboard_df['public rank'] - leaderboard_df['private rank'] leaderboard_df['move'] =", "sub.basename.replace('submission_', '') df['team'] = sub.team.name df['submission'] = sub.name_with_link if with_links", "= q.all() submission_filter = {'public': 'is_public_leaderboard', 'private': 'is_private_leaderboard', 'failed': 'is_error',", "the database. leaderboard_type : {'public', 'private', 'failed', 'new', \\ 'public", "the whole leaderboards or only the new submissions. You can", "format. \"\"\" record_score = [] event = session.query(Event).filter_by(name=event_name).one() map_score_precision =", ": str The event name. user_name : str The user", "= get_leaderboard( session, 'new', event_name, user_name ) session.commit() def update_all_user_leaderboards(session,", "'private' else ['train time [s]', 'validation time [s]']) col_selected_private =", "(['team', 'submission'] + ['bag private ' + score_name, 'bag public", "If None, scores from all users will be queried. This", "df_html = '<thead> {} </tbody>'.format( df_html.split('<thead>')[1].split('</tbody>')[0] ) return df_html def", "contrib_columns = ['contributivity', 'historical contributivity'] if (df[contrib_columns] == 0).all(axis=0).all(): df", "= get_leaderboard( session, 'public', event_name, with_links=False ) event.failed_leaderboard_html = get_leaderboard(", "df.rename(columns={ key: value for key, value in zip(score_list, score_order) })", "0).all(axis=0).all(): df = df.drop(columns=contrib_columns) df = df.sort_values( \"bag {} {}\".format(leaderboard_type,", "def update_all_user_leaderboards(session, event_name, new_only=False): \"\"\"Update the leaderboards for all users", ": None or str, default is None The user name.", "' + score_name: score_name, leaderboard_type + ' rank': 'rank' })", "leaderboard_df = pd.merge( leaderboard_df, best_df, how='left', left_on=['team', 'submitted at (UTC)'],", "session.query(Event).filter_by(name=event_name).one() event_teams = session.query(EventTeam).filter_by(event=event).all() for event_team in event_teams: user_name =", "'private competition'} The type of leaderboard to generate. event_name :", "= np.arange(len(leaderboard_df)) + 1 leaderboard_df['move'] = \\ leaderboard_df['public rank'] -", "new_only=False): \"\"\"Update the leaderboards for a given event. Parameters ----------", "event name. user_name : None or str, default is None", "competition_type, event_name ) df_html = df.to_html(escape=False, index=False, max_cols=None, max_rows=None, justify='left')", "leaderboard_type == 'public': df = df.rename(columns={ key: value for key,", "best leaderboard_df = pd.merge( leaderboard_df, best_df, how='left', left_on=['team', 'public '", "team best_df = (leaderboard_df.groupby('team').min() if score_type.is_lower_the_better else leaderboard_df.groupby('team').max()) best_df =", ") event.public_leaderboard_html_with_links = get_leaderboard( session, 'public', event_name ) event.public_leaderboard_html_no_links =", "'{:+d}'.format(m) if m != 0 else '-' for m in", "event_name : str The event name. user_name : str The", "'private') df = _compute_competition_leaderboard( session, submissions, competition_type, event_name ) df_html", "df.index df_time = df_time.rename( columns={'train': 'train time [s]', 'valid': 'validation", "\".join(x)) # add the aggregated time information df_time.index = df.index", "user_name) if not new_only: event_team.leaderboard_html = get_leaderboard( session, 'public', event_name,", ") # select best submission for each team best_df =", "= int(round( 100 * sub.historical_contributivity)) df['max RAM [MB]'] = get_submission_max_ram(session,", "df_scores.groupby('step').std() # select only the validation and testing steps and", "'private competition': 'is_in_competition'} submissions = [sub for sub in submissions", "multi-index into a stacked index df.columns = df.columns.map(lambda x: \"", "of :class:`ramp_database.model.Submission` The submission to report in the leaderboard. leaderboard_type", "get_leaderboard(session, leaderboard_type, event_name, user_name=None, with_links=True): \"\"\"Get a leaderboard. Parameters ----------", "q = q.filter(Team.name == user_name) submissions = q.all() submission_filter =", "score) for dataset, score, stat in product(dataset_order, score_order, stats_order) ]", "You can turn this option to True when adding a", "to directly perform the operation on the database. event_name :", "leaderboard_df = leaderboard_df.sort_values( by=['private ' + score_name, 'submitted at (UTC)'],", "on the zip function ignore the submission state if the", "for dataset, score, stat in product(dataset_order, score_order, stats_order) ] #", "pd.Timestamp(sub.submission_timestamp) record_score.append(df) # stack all the records df = pd.concat(record_score,", ": list of :class:`ramp_database.model.Submission` The submission to report in the", "ignore the submission state if the error # column was", "public and private map_renaming = {'valid': 'public', 'test': 'private'} df_scores_mean", "not None: q = q.filter(Team.name == user_name) submissions = q.all()", "else None pd.set_option('display.max_colwidth', width) def _compute_leaderboard(session, submissions, leaderboard_type, event_name, with_links=True):", "if with_links else sub.name df['contributivity'] = int(round(100 * sub.contributivity)) df['historical", "submissions if (getattr(sub, submission_filter[leaderboard_type]) and sub.is_not_sandbox)] if not submissions: return", "= _compute_competition_leaderboard( session, submissions, competition_type, event_name ) df_html = df.to_html(escape=False,", "+ ' ' + score_name: score_name, leaderboard_type + ' rank':", "users will be queried. This parameter is discarded when requesting", "if the error # column was not appended data =", "directly perform the operation on the database. leaderboard_type : {'public',", "(UTC)'], ascending=[score_type.is_lower_the_better, True]) leaderboard_df['private rank'] = np.arange(len(leaderboard_df)) + 1 leaderboard_df['move']", "ascending=[score_type.is_lower_the_better, True]) leaderboard_df['public rank'] = np.arange(len(leaderboard_df)) + 1 # sort", "'failed' else sub.state)]) } for sub in submissions] df =", "record_score = [] event = session.query(Event).filter_by(name=event_name).one() map_score_precision = {score_type.name: score_type.precision", "== 0).all(axis=0).all(): df = df.drop(columns=contrib_columns) df = df.sort_values( \"bag {}", "on the database. event_name : str The event name. user_name", ".stack().to_frame().T) df = pd.concat([df_scores_bag, df_scores_mean, df_scores_std], axis=1, keys=['bag', 'mean', 'std'])", "for the public leaderboard if leaderboard_type == 'public': df =", "('public' if 'public' in leaderboard_type else 'private') df = _compute_competition_leaderboard(", "event = session.query(Event).filter_by(name=event_name).one() map_score_precision = {score_type.name: score_type.precision for score_type in", "each team best_df = (leaderboard_df.groupby('team').min() if score_type.is_lower_the_better else leaderboard_df.groupby('team').max()) best_df", "at (UTC)', 'error'] # we rely on the zip function", "import get_bagged_scores from .submission import get_scores from .submission import get_submission_max_ram", "df = pd.concat(record_score, axis=0, ignore_index=True, sort=False) # keep only second", "the event. Returns ------- competition_leaderboard : dataframe The competition leaderboard", "(df_scores_std.loc[list(map_renaming.keys())] .rename(index=map_renaming) .stack().to_frame().T) df_scores_bag = (df_scores_bag.rename(index=map_renaming) .stack().to_frame().T) df = pd.concat([df_scores_bag,", "[s]', 'validation time [s]']) col_selected_private = (['team', 'submission'] + ['bag", "the lowest timestamp best_df = leaderboard_df.groupby('team').min() best_df = best_df[['submitted at", "users will be queried. new_only : bool, default is False", "for all users for a given event. Parameters ---------- session", "if score_type.is_lower_the_better else leaderboard_df.groupby('team').max()) best_df = best_df[['public ' + score_name]].reset_index()", "perform the operation on the database. leaderboard_type : {'public', 'private',", "m != 0 else '-' for m in leaderboard_df['move']] col_selected", "in submissions] df = pd.DataFrame(data, columns=columns) else: # make some", "with_links=False) time_list = (['train time [s]', 'validation time [s]', 'test", "a stacked index df.columns = df.columns.map(lambda x: \" \".join(x)) #", "str The user name. If None, scores from all users", "sub.id) df_scores = df_scores.round(map_score_precision) df_time = get_time(session, sub.id) df_time =", "!= 0 else '-' for m in leaderboard_df['move']] col_selected =", "# check if the contributivity columns are null contrib_columns =", "on the database. event_name : str The event name. new_only", "need the lowest timestamp best_df = leaderboard_df.groupby('team').min() best_df = best_df[['submitted", "df = leaderboard_df[col_selected] df = df.rename(columns={ leaderboard_type + ' '", "+ ['submitted at (UTC)']) leaderboard_df = private_leaderboard[col_selected_private] leaderboard_df = leaderboard_df.rename(", "+ score_name: 'public ' + score_name} ) # select best", "parameter is discarded when requesting the competition leaderboard. with_links :", "sort by public score then by submission timestamp, compute rank", "'team', 'submission', leaderboard_type + ' ' + score_name] + time_list" ]
[ "1 if indent == 20: indent_Increasing = False else: indent", "Python import time, sys indent = 0 # How many", "Pause for 1/10th of a second if indent_Increasing: indent =", "20: indent_Increasing = False else: indent = indent - 1", "indent, end='') print('********') time.sleep(0.1) # Pause for 1/10th of a", "= False else: indent = indent - 1 if indent", "- 1 if indent == 0: indent_Increasing = True except", "' * indent, end='') print('********') time.sleep(0.1) # Pause for 1/10th", "of a second if indent_Increasing: indent = indent + 1", "#Automate the Boring Stuff with Python import time, sys indent", "* indent, end='') print('********') time.sleep(0.1) # Pause for 1/10th of", "main program loop print(' ' * indent, end='') print('********') time.sleep(0.1)", "# Pause for 1/10th of a second if indent_Increasing: indent", "to indent indent_Increasing = True # Whether the indentation is", "end='') print('********') time.sleep(0.1) # Pause for 1/10th of a second", "+ 1 if indent == 20: indent_Increasing = False else:", "True: # The main program loop print(' ' * indent,", "indent == 20: indent_Increasing = False else: indent = indent", "with Python import time, sys indent = 0 # How", "a second if indent_Increasing: indent = indent + 1 if", "# The main program loop print(' ' * indent, end='')", "import time, sys indent = 0 # How many spaces", "try: while True: # The main program loop print(' '", "The main program loop print(' ' * indent, end='') print('********')", "# Whether the indentation is increasing or not try: while", "program loop print(' ' * indent, end='') print('********') time.sleep(0.1) #", "0 # How many spaces to indent indent_Increasing = True", "How many spaces to indent indent_Increasing = True # Whether", "many spaces to indent indent_Increasing = True # Whether the", "= True # Whether the indentation is increasing or not", "for 1/10th of a second if indent_Increasing: indent = indent", "second if indent_Increasing: indent = indent + 1 if indent", "= 0 # How many spaces to indent indent_Increasing =", "indent_Increasing = True # Whether the indentation is increasing or", "or not try: while True: # The main program loop", "True # Whether the indentation is increasing or not try:", "spaces to indent indent_Increasing = True # Whether the indentation", "time.sleep(0.1) # Pause for 1/10th of a second if indent_Increasing:", "is increasing or not try: while True: # The main", "= indent + 1 if indent == 20: indent_Increasing =", "indentation is increasing or not try: while True: # The", "indent = indent - 1 if indent == 0: indent_Increasing", "indent_Increasing: indent = indent + 1 if indent == 20:", "indent + 1 if indent == 20: indent_Increasing = False", "the Boring Stuff with Python import time, sys indent =", "sys indent = 0 # How many spaces to indent", "print('********') time.sleep(0.1) # Pause for 1/10th of a second if", "1/10th of a second if indent_Increasing: indent = indent +", "if indent == 20: indent_Increasing = False else: indent =", "indent = indent + 1 if indent == 20: indent_Increasing", "while True: # The main program loop print(' ' *", "if indent == 0: indent_Increasing = True except KeyboardInterrupt(): sys.exit()", "== 20: indent_Increasing = False else: indent = indent -", "# How many spaces to indent indent_Increasing = True #", "indent = 0 # How many spaces to indent indent_Increasing", "Boring Stuff with Python import time, sys indent = 0", "time, sys indent = 0 # How many spaces to", "the indentation is increasing or not try: while True: #", "increasing or not try: while True: # The main program", "indent_Increasing = False else: indent = indent - 1 if", "not try: while True: # The main program loop print('", "loop print(' ' * indent, end='') print('********') time.sleep(0.1) # Pause", "False else: indent = indent - 1 if indent ==", "Whether the indentation is increasing or not try: while True:", "1 if indent == 0: indent_Increasing = True except KeyboardInterrupt():", "indent indent_Increasing = True # Whether the indentation is increasing", "Stuff with Python import time, sys indent = 0 #", "print(' ' * indent, end='') print('********') time.sleep(0.1) # Pause for", "else: indent = indent - 1 if indent == 0:", "= indent - 1 if indent == 0: indent_Increasing =", "indent - 1 if indent == 0: indent_Increasing = True", "if indent_Increasing: indent = indent + 1 if indent ==" ]
[ "print \"No token for you!!!\" sys.exit() def start(): # Projects", "= get_compensations(host, token, site_pk=site['id'], project_pk=project['id']) if not compensation_list: sys.exit('There are", "are no samples') for i, result in enumerate(sample_list['data']): print i,", "= raw_input('Choose Compensation (required): ') compensation = compensation_list['data'][int(compensation_choice)] # Now", "i, result in enumerate(project_list['data']): print i, ':', result['project_name'] project_choice =", "subject_choice: subject = subject_list['data'][int(subject_choice)] # Sites site_list = get_sites(host, token,", "def start(): # Projects project_list = get_projects(host, token) for i,", "% s['original_filename'] print '=' * 40 apply_choice = None while", "response_dict['reason'] print 'Data: ' print json.dumps(response_dict['data'], indent=4) while True: start()", "= raw_input('Username: ') password = <PASSWORD>('Password: ') token = get_token(host,", "sites') for i, result in enumerate(site_list['data']): print i, ':', result['site_name']", "from reflowrestclient.utils import * host = raw_input('Host: ') username =", "sample = None if sample_choice: sample = sample_list['data'][int(sample_choice)] # Compensation", ") print \"Response: \", response_dict['status'], response_dict['reason'] print 'Data: ' print", "get_token(host, username, password) if token: print \"Authentication successful\" print '='", "') password = <PASSWORD>('Password: ') token = get_token(host, username, password)", "if sample: print '\\t%s' % sample['original_filename'] else: for s in", "password = <PASSWORD>('Password: ') token = get_token(host, username, password) if", "None if sample_choice: sample = sample_list['data'][int(sample_choice)] # Compensation compensation_list =", "= get_token(host, username, password) if token: print \"Authentication successful\" print", "response_dict['status'], response_dict['reason'] print 'Data: ' print json.dumps(response_dict['data'], indent=4) else: for", "print i, ':', result['project_name'] project_choice = raw_input('Choose Project:') project =", "result in enumerate(site_list['data']): print i, ':', result['site_name'] site_choice = raw_input('Choose", "':', result['original_filename'] compensation_choice = raw_input('Choose Compensation (required): ') compensation =", "') subject = None if subject_choice: subject = subject_list['data'][int(subject_choice)] #", "blank for all subjects): ') subject = None if subject_choice:", "i, ':', result['site_name'] site_choice = raw_input('Choose Site (required): ') site", "result in enumerate(compensation_list['data']): print i, ':', result['original_filename'] compensation_choice = raw_input('Choose", "sample['original_filename'] else: for s in sample_list['data']: print '\\t%s' % s['original_filename']", "project_pk=project['id']) for i, result in enumerate(subject_list['data']): print i, ':', result['subject_id']", "* 40 else: print \"No token for you!!!\" sys.exit() def", "= None if sample_choice: sample = sample_list['data'][int(sample_choice)] # Compensation compensation_list", "# Samples sample_args = [host, token] sample_kwargs = {'site_pk': site['id']}", "sys.exit() print 'continue' if sample: response_dict = add_compensation_to_sample( host, token,", "* 40 print 'You chose to add this compensation to", "= None if subject_choice: subject = subject_list['data'][int(subject_choice)] # Sites site_list", "'You chose to add this compensation to these samples:' print", "# Compensation compensation_list = get_compensations(host, token, site_pk=site['id'], project_pk=project['id']) if not", "Samples sample_args = [host, token] sample_kwargs = {'site_pk': site['id']} if", "apply_choice == 'exit': sys.exit() print 'continue' if sample: response_dict =", "i, result in enumerate(site_list['data']): print i, ':', result['site_name'] site_choice =", "= raw_input('Choose Site (required): ') site = site_list['data'][int(site_choice)] # Samples", "json.dumps(response_dict['data'], indent=4) else: for sample in sample_list['data']: response_dict = add_compensation_to_sample(", "':', result['site_name'] site_choice = raw_input('Choose Site (required): ') site =", "result['project_name'] project_choice = raw_input('Choose Project:') project = project_list['data'][int(project_choice)] # Subjects", "'Samples:' if sample: print '\\t%s' % sample['original_filename'] else: for s", "(leave blank for all subjects): ') subject = None if", "for i, result in enumerate(sample_list['data']): print i, ':', result['original_filename'] sample_choice", "print i, ':', result['original_filename'] compensation_choice = raw_input('Choose Compensation (required): ')", "result['subject_id'] subject_choice = raw_input('Choose Subject (leave blank for all subjects):", "compensation_list: sys.exit('There are no compensations') for i, result in enumerate(compensation_list['data']):", "site_pk=site['id'], project_pk=project['id']) if not compensation_list: sys.exit('There are no compensations') for", "chose to add this compensation to these samples:' print '\\Compensation:", "reflowrestclient.utils import * host = raw_input('Host: ') username = raw_input('Username:", "json from reflowrestclient.utils import * host = raw_input('Host: ') username", "= add_compensation_to_sample( host, token, sample_pk=str(sample['id']), compensation_pk=str(compensation['id']), ) print \"Response: \",", "sample_list['data'][int(sample_choice)] # Compensation compensation_list = get_compensations(host, token, site_pk=site['id'], project_pk=project['id']) if", "raw_input('Username: ') password = <PASSWORD>('Password: ') token = get_token(host, username,", "print '\\t%s' % sample['original_filename'] else: for s in sample_list['data']: print", "sample_list: sys.exit('There are no samples') for i, result in enumerate(sample_list['data']):", "40 apply_choice = None while apply_choice not in ['continue', 'exit']:", "'Data: ' print json.dumps(response_dict['data'], indent=4) else: for sample in sample_list['data']:", "username, password) if token: print \"Authentication successful\" print '=' *", "i, ':', result['original_filename'] sample_choice = raw_input('Choose Sample (leave blank for", "print '=' * 40 apply_choice = None while apply_choice not", "for i, result in enumerate(project_list['data']): print i, ':', result['project_name'] project_choice", "blank for all samples): ') sample = None if sample_choice:", "this compensation to these samples:' print '\\Compensation: %s' % compensation['original_filename']", "subject_choice = raw_input('Choose Subject (leave blank for all subjects): ')", "sys.exit() def start(): # Projects project_list = get_projects(host, token) for", "compensation_list['data'][int(compensation_choice)] # Now have user verify information print '=' *", "= raw_input('Choose Subject (leave blank for all subjects): ') subject", "(leave blank for all samples): ') sample = None if", "i, result in enumerate(subject_list['data']): print i, ':', result['subject_id'] subject_choice =", "result['site_name'] site_choice = raw_input('Choose Site (required): ') site = site_list['data'][int(site_choice)]", "Now have user verify information print '=' * 40 print", "= get_projects(host, token) for i, result in enumerate(project_list['data']): print i,", "\", response_dict['status'], response_dict['reason'] print 'Data: ' print json.dumps(response_dict['data'], indent=4) else:", "import json from reflowrestclient.utils import * host = raw_input('Host: ')", "sys.exit('There are no sites') for i, result in enumerate(site_list['data']): print", "project_choice = raw_input('Choose Project:') project = project_list['data'][int(project_choice)] # Subjects subject_list", "{'site_pk': site['id']} if subject: sample_kwargs['subject_pk'] = subject['id'] sample_list = get_samples(*sample_args,", "raw_input('Choose Subject (leave blank for all subjects): ') subject =", "= get_samples(*sample_args, **sample_kwargs) if not sample_list: sys.exit('There are no samples')", "i, result in enumerate(compensation_list['data']): print i, ':', result['original_filename'] compensation_choice =", "token, project_pk=project['id']) if not site_list: sys.exit('There are no sites') for", "sample_args = [host, token] sample_kwargs = {'site_pk': site['id']} if subject:", "= sample_list['data'][int(sample_choice)] # Compensation compensation_list = get_compensations(host, token, site_pk=site['id'], project_pk=project['id'])", "s['original_filename'] print '=' * 40 apply_choice = None while apply_choice", "subject_list = get_subjects(host, token, project_pk=project['id']) for i, result in enumerate(subject_list['data']):", "':', result['subject_id'] subject_choice = raw_input('Choose Subject (leave blank for all", "print 'Samples:' if sample: print '\\t%s' % sample['original_filename'] else: for", "'=' * 40 else: print \"No token for you!!!\" sys.exit()", "subject: sample_kwargs['subject_pk'] = subject['id'] sample_list = get_samples(*sample_args, **sample_kwargs) if not", "host, token, sample_pk=str(sample['id']), compensation_pk=str(compensation['id']) ) print \"Response: \", response_dict['status'], response_dict['reason']", "Site (required): ') site = site_list['data'][int(site_choice)] # Samples sample_args =", "add_compensation_to_sample( host, token, sample_pk=str(sample['id']), compensation_pk=str(compensation['id']), ) print \"Response: \", response_dict['status'],", "= raw_input('Host: ') username = raw_input('Username: ') password = <PASSWORD>('Password:", "else: for sample in sample_list['data']: response_dict = add_compensation_to_sample( host, token,", "# Projects project_list = get_projects(host, token) for i, result in", "') token = get_token(host, username, password) if token: print \"Authentication", "if subject_choice: subject = subject_list['data'][int(subject_choice)] # Sites site_list = get_sites(host,", "sample = sample_list['data'][int(sample_choice)] # Compensation compensation_list = get_compensations(host, token, site_pk=site['id'],", "apply_choice = raw_input(\"Type 'continue' to upload, 'exit' abort: \") if", "= compensation_list['data'][int(compensation_choice)] # Now have user verify information print '='", "40 print 'You chose to add this compensation to these", "token, sample_pk=str(sample['id']), compensation_pk=str(compensation['id']), ) print \"Response: \", response_dict['status'], response_dict['reason'] print", "print i, ':', result['subject_id'] subject_choice = raw_input('Choose Subject (leave blank", "**sample_kwargs) if not sample_list: sys.exit('There are no samples') for i,", "'exit': sys.exit() print 'continue' if sample: response_dict = add_compensation_to_sample( host,", "for i, result in enumerate(site_list['data']): print i, ':', result['site_name'] site_choice", "sample_list = get_samples(*sample_args, **sample_kwargs) if not sample_list: sys.exit('There are no", "sys.exit('There are no samples') for i, result in enumerate(sample_list['data']): print", "raw_input('Choose Compensation (required): ') compensation = compensation_list['data'][int(compensation_choice)] # Now have", "samples') for i, result in enumerate(sample_list['data']): print i, ':', result['original_filename']", "subject = None if subject_choice: subject = subject_list['data'][int(subject_choice)] # Sites", "result['original_filename'] compensation_choice = raw_input('Choose Compensation (required): ') compensation = compensation_list['data'][int(compensation_choice)]", "result in enumerate(sample_list['data']): print i, ':', result['original_filename'] sample_choice = raw_input('Choose", "token] sample_kwargs = {'site_pk': site['id']} if subject: sample_kwargs['subject_pk'] = subject['id']", "subject_list['data'][int(subject_choice)] # Sites site_list = get_sites(host, token, project_pk=project['id']) if not", "Sites site_list = get_sites(host, token, project_pk=project['id']) if not site_list: sys.exit('There", "= raw_input(\"Type 'continue' to upload, 'exit' abort: \") if apply_choice", "== 'exit': sys.exit() print 'continue' if sample: response_dict = add_compensation_to_sample(", "add this compensation to these samples:' print '\\Compensation: %s' %", "host = raw_input('Host: ') username = raw_input('Username: ') password =", "no sites') for i, result in enumerate(site_list['data']): print i, ':',", "import getpass import sys import json from reflowrestclient.utils import *", "for s in sample_list['data']: print '\\t%s' % s['original_filename'] print '='", "no samples') for i, result in enumerate(sample_list['data']): print i, ':',", "') site = site_list['data'][int(site_choice)] # Samples sample_args = [host, token]", "print i, ':', result['site_name'] site_choice = raw_input('Choose Site (required): ')", "project_pk=project['id']) if not compensation_list: sys.exit('There are no compensations') for i,", "sample_choice: sample = sample_list['data'][int(sample_choice)] # Compensation compensation_list = get_compensations(host, token,", "print i, ':', result['original_filename'] sample_choice = raw_input('Choose Sample (leave blank", "= raw_input('Choose Sample (leave blank for all samples): ') sample", "get_subjects(host, token, project_pk=project['id']) for i, result in enumerate(subject_list['data']): print i,", "enumerate(sample_list['data']): print i, ':', result['original_filename'] sample_choice = raw_input('Choose Sample (leave", "(required): ') compensation = compensation_list['data'][int(compensation_choice)] # Now have user verify", "None if subject_choice: subject = subject_list['data'][int(subject_choice)] # Sites site_list =", "= get_sites(host, token, project_pk=project['id']) if not site_list: sys.exit('There are no", "['continue', 'exit']: apply_choice = raw_input(\"Type 'continue' to upload, 'exit' abort:", "= None while apply_choice not in ['continue', 'exit']: apply_choice =", "in enumerate(sample_list['data']): print i, ':', result['original_filename'] sample_choice = raw_input('Choose Sample", "[host, token] sample_kwargs = {'site_pk': site['id']} if subject: sample_kwargs['subject_pk'] =", "get_sites(host, token, project_pk=project['id']) if not site_list: sys.exit('There are no sites')", "verify information print '=' * 40 print 'You chose to", "':', result['original_filename'] sample_choice = raw_input('Choose Sample (leave blank for all", "'exit']: apply_choice = raw_input(\"Type 'continue' to upload, 'exit' abort: \")", "if not site_list: sys.exit('There are no sites') for i, result", "\") if apply_choice == 'exit': sys.exit() print 'continue' if sample:", "information print '=' * 40 print 'You chose to add", "if apply_choice == 'exit': sys.exit() print 'continue' if sample: response_dict", "site = site_list['data'][int(site_choice)] # Samples sample_args = [host, token] sample_kwargs", "raw_input(\"Type 'continue' to upload, 'exit' abort: \") if apply_choice ==", "get_compensations(host, token, site_pk=site['id'], project_pk=project['id']) if not compensation_list: sys.exit('There are no", "= subject_list['data'][int(subject_choice)] # Sites site_list = get_sites(host, token, project_pk=project['id']) if", "token, project_pk=project['id']) for i, result in enumerate(subject_list['data']): print i, ':',", "site_list = get_sites(host, token, project_pk=project['id']) if not site_list: sys.exit('There are", "apply_choice not in ['continue', 'exit']: apply_choice = raw_input(\"Type 'continue' to", "project_list['data'][int(project_choice)] # Subjects subject_list = get_subjects(host, token, project_pk=project['id']) for i,", "token = get_token(host, username, password) if token: print \"Authentication successful\"", "not compensation_list: sys.exit('There are no compensations') for i, result in", "'\\t%s' % s['original_filename'] print '=' * 40 apply_choice = None", "import sys import json from reflowrestclient.utils import * host =", "print json.dumps(response_dict['data'], indent=4) else: for sample in sample_list['data']: response_dict =", "in enumerate(project_list['data']): print i, ':', result['project_name'] project_choice = raw_input('Choose Project:')", "if not compensation_list: sys.exit('There are no compensations') for i, result", "None while apply_choice not in ['continue', 'exit']: apply_choice = raw_input(\"Type", "s in sample_list['data']: print '\\t%s' % s['original_filename'] print '=' *", "for sample in sample_list['data']: response_dict = add_compensation_to_sample( host, token, sample_pk=str(sample['id']),", "sample_list['data']: response_dict = add_compensation_to_sample( host, token, sample_pk=str(sample['id']), compensation_pk=str(compensation['id']), ) print", "to these samples:' print '\\Compensation: %s' % compensation['original_filename'] print 'Samples:'", "enumerate(subject_list['data']): print i, ':', result['subject_id'] subject_choice = raw_input('Choose Subject (leave", "result in enumerate(subject_list['data']): print i, ':', result['subject_id'] subject_choice = raw_input('Choose", "sample_kwargs['subject_pk'] = subject['id'] sample_list = get_samples(*sample_args, **sample_kwargs) if not sample_list:", "sys.exit('There are no compensations') for i, result in enumerate(compensation_list['data']): print", "response_dict = add_compensation_to_sample( host, token, sample_pk=str(sample['id']), compensation_pk=str(compensation['id']), ) print \"Response:", "') sample = None if sample_choice: sample = sample_list['data'][int(sample_choice)] #", "'continue' to upload, 'exit' abort: \") if apply_choice == 'exit':", "sample: response_dict = add_compensation_to_sample( host, token, sample_pk=str(sample['id']), compensation_pk=str(compensation['id']) ) print", "\"No token for you!!!\" sys.exit() def start(): # Projects project_list", "get_projects(host, token) for i, result in enumerate(project_list['data']): print i, ':',", "Compensation compensation_list = get_compensations(host, token, site_pk=site['id'], project_pk=project['id']) if not compensation_list:", "= [host, token] sample_kwargs = {'site_pk': site['id']} if subject: sample_kwargs['subject_pk']", "':', result['project_name'] project_choice = raw_input('Choose Project:') project = project_list['data'][int(project_choice)] #", "* host = raw_input('Host: ') username = raw_input('Username: ') password", "compensation to these samples:' print '\\Compensation: %s' % compensation['original_filename'] print", "for i, result in enumerate(subject_list['data']): print i, ':', result['subject_id'] subject_choice", "no compensations') for i, result in enumerate(compensation_list['data']): print i, ':',", "samples): ') sample = None if sample_choice: sample = sample_list['data'][int(sample_choice)]", "print '=' * 40 print 'You chose to add this", "token, sample_pk=str(sample['id']), compensation_pk=str(compensation['id']) ) print \"Response: \", response_dict['status'], response_dict['reason'] print", "samples:' print '\\Compensation: %s' % compensation['original_filename'] print 'Samples:' if sample:", "') username = raw_input('Username: ') password = <PASSWORD>('Password: ') token", "= subject['id'] sample_list = get_samples(*sample_args, **sample_kwargs) if not sample_list: sys.exit('There", "site_choice = raw_input('Choose Site (required): ') site = site_list['data'][int(site_choice)] #", "'continue' if sample: response_dict = add_compensation_to_sample( host, token, sample_pk=str(sample['id']), compensation_pk=str(compensation['id'])", "= raw_input('Choose Project:') project = project_list['data'][int(project_choice)] # Subjects subject_list =", "for all samples): ') sample = None if sample_choice: sample", "in enumerate(subject_list['data']): print i, ':', result['subject_id'] subject_choice = raw_input('Choose Subject", "in sample_list['data']: print '\\t%s' % s['original_filename'] print '=' * 40", "enumerate(project_list['data']): print i, ':', result['project_name'] project_choice = raw_input('Choose Project:') project", "not sample_list: sys.exit('There are no samples') for i, result in", "add_compensation_to_sample( host, token, sample_pk=str(sample['id']), compensation_pk=str(compensation['id']) ) print \"Response: \", response_dict['status'],", "print '\\t%s' % s['original_filename'] print '=' * 40 apply_choice =", "Sample (leave blank for all samples): ') sample = None", "in enumerate(compensation_list['data']): print i, ':', result['original_filename'] compensation_choice = raw_input('Choose Compensation", "Project:') project = project_list['data'][int(project_choice)] # Subjects subject_list = get_subjects(host, token,", "sample_pk=str(sample['id']), compensation_pk=str(compensation['id']) ) print \"Response: \", response_dict['status'], response_dict['reason'] print 'Data:", "are no sites') for i, result in enumerate(site_list['data']): print i,", "in enumerate(site_list['data']): print i, ':', result['site_name'] site_choice = raw_input('Choose Site", "these samples:' print '\\Compensation: %s' % compensation['original_filename'] print 'Samples:' if", "get_samples(*sample_args, **sample_kwargs) if not sample_list: sys.exit('There are no samples') for", "getpass import sys import json from reflowrestclient.utils import * host", "response_dict['status'], response_dict['reason'] print 'Data: ' print json.dumps(response_dict['data'], indent=4) while True:", "i, ':', result['original_filename'] compensation_choice = raw_input('Choose Compensation (required): ') compensation", "if sample: response_dict = add_compensation_to_sample( host, token, sample_pk=str(sample['id']), compensation_pk=str(compensation['id']) )", "i, result in enumerate(sample_list['data']): print i, ':', result['original_filename'] sample_choice =", "'\\t%s' % sample['original_filename'] else: for s in sample_list['data']: print '\\t%s'", "indent=4) else: for sample in sample_list['data']: response_dict = add_compensation_to_sample( host,", "%s' % compensation['original_filename'] print 'Samples:' if sample: print '\\t%s' %", "to upload, 'exit' abort: \") if apply_choice == 'exit': sys.exit()", "host, token, sample_pk=str(sample['id']), compensation_pk=str(compensation['id']), ) print \"Response: \", response_dict['status'], response_dict['reason']", "all samples): ') sample = None if sample_choice: sample =", "print 'continue' if sample: response_dict = add_compensation_to_sample( host, token, sample_pk=str(sample['id']),", "upload, 'exit' abort: \") if apply_choice == 'exit': sys.exit() print", "compensation_pk=str(compensation['id']), ) print \"Response: \", response_dict['status'], response_dict['reason'] print 'Data: '", "Projects project_list = get_projects(host, token) for i, result in enumerate(project_list['data']):", "* 40 apply_choice = None while apply_choice not in ['continue',", "sample_choice = raw_input('Choose Sample (leave blank for all samples): ')", "'exit' abort: \") if apply_choice == 'exit': sys.exit() print 'continue'", "result['original_filename'] sample_choice = raw_input('Choose Sample (leave blank for all samples):", "compensation_list = get_compensations(host, token, site_pk=site['id'], project_pk=project['id']) if not compensation_list: sys.exit('There", "raw_input('Choose Site (required): ') site = site_list['data'][int(site_choice)] # Samples sample_args", "') compensation = compensation_list['data'][int(compensation_choice)] # Now have user verify information", "if token: print \"Authentication successful\" print '=' * 40 else:", "i, ':', result['subject_id'] subject_choice = raw_input('Choose Subject (leave blank for", "token: print \"Authentication successful\" print '=' * 40 else: print", "compensation_pk=str(compensation['id']) ) print \"Response: \", response_dict['status'], response_dict['reason'] print 'Data: '", "\", response_dict['status'], response_dict['reason'] print 'Data: ' print json.dumps(response_dict['data'], indent=4) while", "% compensation['original_filename'] print 'Samples:' if sample: print '\\t%s' % sample['original_filename']", "= <PASSWORD>('Password: ') token = get_token(host, username, password) if token:", "= get_subjects(host, token, project_pk=project['id']) for i, result in enumerate(subject_list['data']): print", "result in enumerate(project_list['data']): print i, ':', result['project_name'] project_choice = raw_input('Choose", "in ['continue', 'exit']: apply_choice = raw_input(\"Type 'continue' to upload, 'exit'", "start(): # Projects project_list = get_projects(host, token) for i, result", "response_dict = add_compensation_to_sample( host, token, sample_pk=str(sample['id']), compensation_pk=str(compensation['id']) ) print \"Response:", "raw_input('Choose Sample (leave blank for all samples): ') sample =", "\"Authentication successful\" print '=' * 40 else: print \"No token", "project_list = get_projects(host, token) for i, result in enumerate(project_list['data']): print", "you!!!\" sys.exit() def start(): # Projects project_list = get_projects(host, token)", "abort: \") if apply_choice == 'exit': sys.exit() print 'continue' if", "response_dict['reason'] print 'Data: ' print json.dumps(response_dict['data'], indent=4) else: for sample", "raw_input('Choose Project:') project = project_list['data'][int(project_choice)] # Subjects subject_list = get_subjects(host,", "sample_pk=str(sample['id']), compensation_pk=str(compensation['id']), ) print \"Response: \", response_dict['status'], response_dict['reason'] print 'Data:", "compensation_choice = raw_input('Choose Compensation (required): ') compensation = compensation_list['data'][int(compensation_choice)] #", "token, site_pk=site['id'], project_pk=project['id']) if not compensation_list: sys.exit('There are no compensations')", "token) for i, result in enumerate(project_list['data']): print i, ':', result['project_name']", "# Subjects subject_list = get_subjects(host, token, project_pk=project['id']) for i, result", "subject['id'] sample_list = get_samples(*sample_args, **sample_kwargs) if not sample_list: sys.exit('There are", "project = project_list['data'][int(project_choice)] # Subjects subject_list = get_subjects(host, token, project_pk=project['id'])", "have user verify information print '=' * 40 print 'You", "Subjects subject_list = get_subjects(host, token, project_pk=project['id']) for i, result in", "else: print \"No token for you!!!\" sys.exit() def start(): #", "compensation['original_filename'] print 'Samples:' if sample: print '\\t%s' % sample['original_filename'] else:", "subject = subject_list['data'][int(subject_choice)] # Sites site_list = get_sites(host, token, project_pk=project['id'])", "in sample_list['data']: response_dict = add_compensation_to_sample( host, token, sample_pk=str(sample['id']), compensation_pk=str(compensation['id']), )", "apply_choice = None while apply_choice not in ['continue', 'exit']: apply_choice", "print \"Response: \", response_dict['status'], response_dict['reason'] print 'Data: ' print json.dumps(response_dict['data'],", "site_list: sys.exit('There are no sites') for i, result in enumerate(site_list['data']):", "compensation = compensation_list['data'][int(compensation_choice)] # Now have user verify information print", "compensations') for i, result in enumerate(compensation_list['data']): print i, ':', result['original_filename']", "print \"Authentication successful\" print '=' * 40 else: print \"No", "\"Response: \", response_dict['status'], response_dict['reason'] print 'Data: ' print json.dumps(response_dict['data'], indent=4)", "enumerate(site_list['data']): print i, ':', result['site_name'] site_choice = raw_input('Choose Site (required):", "' print json.dumps(response_dict['data'], indent=4) else: for sample in sample_list['data']: response_dict", "sample: print '\\t%s' % sample['original_filename'] else: for s in sample_list['data']:", "print '=' * 40 else: print \"No token for you!!!\"", "raw_input('Host: ') username = raw_input('Username: ') password = <PASSWORD>('Password: ')", "40 else: print \"No token for you!!!\" sys.exit() def start():", "# Now have user verify information print '=' * 40", "for you!!!\" sys.exit() def start(): # Projects project_list = get_projects(host,", "'=' * 40 print 'You chose to add this compensation", "all subjects): ') subject = None if subject_choice: subject =", "sample_list['data']: print '\\t%s' % s['original_filename'] print '=' * 40 apply_choice", "# Sites site_list = get_sites(host, token, project_pk=project['id']) if not site_list:", "print 'Data: ' print json.dumps(response_dict['data'], indent=4) else: for sample in", "= site_list['data'][int(site_choice)] # Samples sample_args = [host, token] sample_kwargs =", "print 'You chose to add this compensation to these samples:'", "if subject: sample_kwargs['subject_pk'] = subject['id'] sample_list = get_samples(*sample_args, **sample_kwargs) if", "print '\\Compensation: %s' % compensation['original_filename'] print 'Samples:' if sample: print", "for i, result in enumerate(compensation_list['data']): print i, ':', result['original_filename'] compensation_choice", "site_list['data'][int(site_choice)] # Samples sample_args = [host, token] sample_kwargs = {'site_pk':", "Subject (leave blank for all subjects): ') subject = None", "else: for s in sample_list['data']: print '\\t%s' % s['original_filename'] print", "project_pk=project['id']) if not site_list: sys.exit('There are no sites') for i,", "sys import json from reflowrestclient.utils import * host = raw_input('Host:", "subjects): ') subject = None if subject_choice: subject = subject_list['data'][int(subject_choice)]", "i, ':', result['project_name'] project_choice = raw_input('Choose Project:') project = project_list['data'][int(project_choice)]", "if sample_choice: sample = sample_list['data'][int(sample_choice)] # Compensation compensation_list = get_compensations(host,", "are no compensations') for i, result in enumerate(compensation_list['data']): print i,", "username = raw_input('Username: ') password = <PASSWORD>('Password: ') token =", "= project_list['data'][int(project_choice)] # Subjects subject_list = get_subjects(host, token, project_pk=project['id']) for", "= add_compensation_to_sample( host, token, sample_pk=str(sample['id']), compensation_pk=str(compensation['id']) ) print \"Response: \",", "for all subjects): ') subject = None if subject_choice: subject", "not in ['continue', 'exit']: apply_choice = raw_input(\"Type 'continue' to upload,", "import * host = raw_input('Host: ') username = raw_input('Username: ')", "token for you!!!\" sys.exit() def start(): # Projects project_list =", "not site_list: sys.exit('There are no sites') for i, result in", "= {'site_pk': site['id']} if subject: sample_kwargs['subject_pk'] = subject['id'] sample_list =", "sample in sample_list['data']: response_dict = add_compensation_to_sample( host, token, sample_pk=str(sample['id']), compensation_pk=str(compensation['id']),", "sample_kwargs = {'site_pk': site['id']} if subject: sample_kwargs['subject_pk'] = subject['id'] sample_list", "(required): ') site = site_list['data'][int(site_choice)] # Samples sample_args = [host,", "<PASSWORD>('Password: ') token = get_token(host, username, password) if token: print", "'\\Compensation: %s' % compensation['original_filename'] print 'Samples:' if sample: print '\\t%s'", "site['id']} if subject: sample_kwargs['subject_pk'] = subject['id'] sample_list = get_samples(*sample_args, **sample_kwargs)", "enumerate(compensation_list['data']): print i, ':', result['original_filename'] compensation_choice = raw_input('Choose Compensation (required):", "'=' * 40 apply_choice = None while apply_choice not in", "% sample['original_filename'] else: for s in sample_list['data']: print '\\t%s' %", "password) if token: print \"Authentication successful\" print '=' * 40", "if not sample_list: sys.exit('There are no samples') for i, result", "Compensation (required): ') compensation = compensation_list['data'][int(compensation_choice)] # Now have user", "while apply_choice not in ['continue', 'exit']: apply_choice = raw_input(\"Type 'continue'", "user verify information print '=' * 40 print 'You chose", "successful\" print '=' * 40 else: print \"No token for", "to add this compensation to these samples:' print '\\Compensation: %s'" ]
[ "views.choose_company, name='choose_company'), url(r'^cleanlogs/$', jobs.cleanlogs, name='cleanlogs'), url(r'^primecache/$', jobs.primecache, name='primecache'), url(r'^dump_fixtures/$', views.dump_fixtures),", "views from . import jobs urlpatterns = [ url(r'^choose_company/(?P<company_id>.*)/$', views.choose_company,", "urlpatterns = [ url(r'^choose_company/(?P<company_id>.*)/$', views.choose_company, name='choose_company'), url(r'^cleanlogs/$', jobs.cleanlogs, name='cleanlogs'), url(r'^primecache/$',", "= [ url(r'^choose_company/(?P<company_id>.*)/$', views.choose_company, name='choose_company'), url(r'^cleanlogs/$', jobs.cleanlogs, name='cleanlogs'), url(r'^primecache/$', jobs.primecache,", "django.conf.urls import url, static from . import views from .", "static from . import views from . import jobs urlpatterns", "url, static from . import views from . import jobs", "import settings from django.conf.urls import url, static from . import", "from django.conf import settings from django.conf.urls import url, static from", "django.conf import settings from django.conf.urls import url, static from .", "from . import jobs urlpatterns = [ url(r'^choose_company/(?P<company_id>.*)/$', views.choose_company, name='choose_company'),", "from . import views from . import jobs urlpatterns =", "jobs urlpatterns = [ url(r'^choose_company/(?P<company_id>.*)/$', views.choose_company, name='choose_company'), url(r'^cleanlogs/$', jobs.cleanlogs, name='cleanlogs'),", "name='choose_company'), url(r'^cleanlogs/$', jobs.cleanlogs, name='cleanlogs'), url(r'^primecache/$', jobs.primecache, name='primecache'), url(r'^dump_fixtures/$', views.dump_fixtures), ]", "settings from django.conf.urls import url, static from . import views", "import views from . import jobs urlpatterns = [ url(r'^choose_company/(?P<company_id>.*)/$',", "import url, static from . import views from . import", "from django.conf.urls import url, static from . import views from", "import jobs urlpatterns = [ url(r'^choose_company/(?P<company_id>.*)/$', views.choose_company, name='choose_company'), url(r'^cleanlogs/$', jobs.cleanlogs,", "[ url(r'^choose_company/(?P<company_id>.*)/$', views.choose_company, name='choose_company'), url(r'^cleanlogs/$', jobs.cleanlogs, name='cleanlogs'), url(r'^primecache/$', jobs.primecache, name='primecache'),", "url(r'^choose_company/(?P<company_id>.*)/$', views.choose_company, name='choose_company'), url(r'^cleanlogs/$', jobs.cleanlogs, name='cleanlogs'), url(r'^primecache/$', jobs.primecache, name='primecache'), url(r'^dump_fixtures/$',", ". import views from . import jobs urlpatterns = [", ". import jobs urlpatterns = [ url(r'^choose_company/(?P<company_id>.*)/$', views.choose_company, name='choose_company'), url(r'^cleanlogs/$'," ]
[ "open(\"README.md\", \"r\") as fh: long_description = fh.read() setuptools.setup( name=\"i3-workspace-swap\", description='A", "workplaces in i3wm', long_description=long_description, long_description_content_type=\"text/markdown\", version=\"1.1.0\", url='https://github.com/einzigartigername/i3-workspace-swap', license='MIT', author='<NAME>', author_email='<EMAIL>',", "i3wm', long_description=long_description, long_description_content_type=\"text/markdown\", version=\"1.1.0\", url='https://github.com/einzigartigername/i3-workspace-swap', license='MIT', author='<NAME>', author_email='<EMAIL>', packages=setuptools.find_packages(), scripts=['i3-workspace-swap'],", "\"r\") as fh: long_description = fh.read() setuptools.setup( name=\"i3-workspace-swap\", description='A python", "scripts=['i3-workspace-swap'], install_requires=['i3ipc'], classifiers=[ \"Intended Audience :: End Users/Desktop\", \"License ::", "End Users/Desktop\", \"License :: OSI Approved :: MIT License\", \"Operating", "swap the content of two workplaces in i3wm', long_description=long_description, long_description_content_type=\"text/markdown\",", "classifiers=[ \"Intended Audience :: End Users/Desktop\", \"License :: OSI Approved", "License\", \"Operating System :: POSIX :: Linux\", 'Programming Language ::", ":: End Users/Desktop\", \"License :: OSI Approved :: MIT License\",", "setuptools.setup( name=\"i3-workspace-swap\", description='A python utility swap the content of two", "Linux\", 'Programming Language :: Python :: 3' ], python_requires='>=3.6', )", ":: MIT License\", \"Operating System :: POSIX :: Linux\", 'Programming", "System :: POSIX :: Linux\", 'Programming Language :: Python ::", "two workplaces in i3wm', long_description=long_description, long_description_content_type=\"text/markdown\", version=\"1.1.0\", url='https://github.com/einzigartigername/i3-workspace-swap', license='MIT', author='<NAME>',", "fh: long_description = fh.read() setuptools.setup( name=\"i3-workspace-swap\", description='A python utility swap", "utility swap the content of two workplaces in i3wm', long_description=long_description,", "setuptools with open(\"README.md\", \"r\") as fh: long_description = fh.read() setuptools.setup(", "Approved :: MIT License\", \"Operating System :: POSIX :: Linux\",", "MIT License\", \"Operating System :: POSIX :: Linux\", 'Programming Language", "content of two workplaces in i3wm', long_description=long_description, long_description_content_type=\"text/markdown\", version=\"1.1.0\", url='https://github.com/einzigartigername/i3-workspace-swap',", "OSI Approved :: MIT License\", \"Operating System :: POSIX ::", ":: OSI Approved :: MIT License\", \"Operating System :: POSIX", "as fh: long_description = fh.read() setuptools.setup( name=\"i3-workspace-swap\", description='A python utility", "in i3wm', long_description=long_description, long_description_content_type=\"text/markdown\", version=\"1.1.0\", url='https://github.com/einzigartigername/i3-workspace-swap', license='MIT', author='<NAME>', author_email='<EMAIL>', packages=setuptools.find_packages(),", "Users/Desktop\", \"License :: OSI Approved :: MIT License\", \"Operating System", "<gh_stars>0 import setuptools with open(\"README.md\", \"r\") as fh: long_description =", "= fh.read() setuptools.setup( name=\"i3-workspace-swap\", description='A python utility swap the content", "install_requires=['i3ipc'], classifiers=[ \"Intended Audience :: End Users/Desktop\", \"License :: OSI", "POSIX :: Linux\", 'Programming Language :: Python :: 3' ],", "\"License :: OSI Approved :: MIT License\", \"Operating System ::", "long_description_content_type=\"text/markdown\", version=\"1.1.0\", url='https://github.com/einzigartigername/i3-workspace-swap', license='MIT', author='<NAME>', author_email='<EMAIL>', packages=setuptools.find_packages(), scripts=['i3-workspace-swap'], install_requires=['i3ipc'], classifiers=[", "of two workplaces in i3wm', long_description=long_description, long_description_content_type=\"text/markdown\", version=\"1.1.0\", url='https://github.com/einzigartigername/i3-workspace-swap', license='MIT',", "name=\"i3-workspace-swap\", description='A python utility swap the content of two workplaces", "python utility swap the content of two workplaces in i3wm',", "the content of two workplaces in i3wm', long_description=long_description, long_description_content_type=\"text/markdown\", version=\"1.1.0\",", "long_description=long_description, long_description_content_type=\"text/markdown\", version=\"1.1.0\", url='https://github.com/einzigartigername/i3-workspace-swap', license='MIT', author='<NAME>', author_email='<EMAIL>', packages=setuptools.find_packages(), scripts=['i3-workspace-swap'], install_requires=['i3ipc'],", "version=\"1.1.0\", url='https://github.com/einzigartigername/i3-workspace-swap', license='MIT', author='<NAME>', author_email='<EMAIL>', packages=setuptools.find_packages(), scripts=['i3-workspace-swap'], install_requires=['i3ipc'], classifiers=[ \"Intended", "author_email='<EMAIL>', packages=setuptools.find_packages(), scripts=['i3-workspace-swap'], install_requires=['i3ipc'], classifiers=[ \"Intended Audience :: End Users/Desktop\",", "\"Intended Audience :: End Users/Desktop\", \"License :: OSI Approved ::", "author='<NAME>', author_email='<EMAIL>', packages=setuptools.find_packages(), scripts=['i3-workspace-swap'], install_requires=['i3ipc'], classifiers=[ \"Intended Audience :: End", "\"Operating System :: POSIX :: Linux\", 'Programming Language :: Python", "Audience :: End Users/Desktop\", \"License :: OSI Approved :: MIT", ":: POSIX :: Linux\", 'Programming Language :: Python :: 3'", "description='A python utility swap the content of two workplaces in", "long_description = fh.read() setuptools.setup( name=\"i3-workspace-swap\", description='A python utility swap the", "url='https://github.com/einzigartigername/i3-workspace-swap', license='MIT', author='<NAME>', author_email='<EMAIL>', packages=setuptools.find_packages(), scripts=['i3-workspace-swap'], install_requires=['i3ipc'], classifiers=[ \"Intended Audience", "packages=setuptools.find_packages(), scripts=['i3-workspace-swap'], install_requires=['i3ipc'], classifiers=[ \"Intended Audience :: End Users/Desktop\", \"License", "with open(\"README.md\", \"r\") as fh: long_description = fh.read() setuptools.setup( name=\"i3-workspace-swap\",", ":: Linux\", 'Programming Language :: Python :: 3' ], python_requires='>=3.6',", "import setuptools with open(\"README.md\", \"r\") as fh: long_description = fh.read()", "license='MIT', author='<NAME>', author_email='<EMAIL>', packages=setuptools.find_packages(), scripts=['i3-workspace-swap'], install_requires=['i3ipc'], classifiers=[ \"Intended Audience ::", "fh.read() setuptools.setup( name=\"i3-workspace-swap\", description='A python utility swap the content of" ]
[ "0 if successful def write_modmk(outdir): #, msg_types, srv_types): if not", "providing access to foreign persons. # from __future__ import print_function", "preventing the creating of Fprime directory: %s\"%dir) p = os.path.join(outdir,", "to comply # with all U.S. export laws and regulations.", "if len(generated_xml) != 0: for xml in generated_xml[:-1]: f.write('%s \\\\\\n'%xml)", "not os.path.isdir(outdir): #TODO: warn? return 0 xml_in_dir = set([f for", "RIGHTS RESERVED. United States Government Sponsorship # acknowledged. Any commercial", "obtain export licenses, or other export authority # as may", "os.makedirs(outdir) elif not os.path.isdir(outdir): raise MsgGenerationException(\"file preventing the creating of", "information to foreign # countries or providing access to foreign", "MsgGenerationException #from . name import * ## :param type_name outdir:", "if not os.path.isdir(outdir): #TODO: warn? return 0 xml_in_dir = set([f", "os.listdir(outdir) if f.endswith('.xml')]) _write_modmk(outdir, sorted(xml_in_dir)) # TODO(mereweth) if we want", "not os.path.isdir(outdir): raise MsgGenerationException(\"file preventing the creating of Fprime directory:", "Fprime directory: %s\"%dir) p = os.path.join(outdir, 'mod.mk') with open(p, 'w')", "## :returns int: status. 0 if successful def write_modmk(outdir): #,", "warn? return 0 xml_in_dir = set([f for f in os.listdir(outdir)", "= \\\\\\n') if len(generated_xml) != 0: for xml in generated_xml[:-1]:", "elif not os.path.isdir(outdir): raise MsgGenerationException(\"file preventing the creating of Fprime", "persons. # from __future__ import print_function import os from genmsg", "f in sorted(msg_types)] # generated_xml.extend([_port_xml_name(f) for f in sorted(msg_types)] #", "_write_modmk(outdir, generated_xml): if not os.path.exists(outdir): os.makedirs(outdir) elif not os.path.isdir(outdir): raise", "import print_function import os from genmsg import MsgGenerationException #from .", "directory ## :returns int: status. 0 if successful def write_modmk(outdir):", "countries or providing access to foreign persons. # from __future__", "U.S. export control laws and # regulations. By accepting this", "0 xml_in_dir = set([f for f in os.listdir(outdir) if f.endswith('.xml')])", "outdir: Full path to output directory ## :returns int: status.", "_write_modmk(outdir, sorted(xml_in_dir)) # TODO(mereweth) if we want to independently specify", "sorted(xml_in_dir)) # TODO(mereweth) if we want to independently specify the", "we want to independently specify the generated XML files #", "if successful def write_modmk(outdir): #, msg_types, srv_types): if not os.path.isdir(outdir):", "United States Government Sponsorship # acknowledged. Any commercial use must", "California Institute of Technology. # # This software may be", "be subject to U.S. export control laws and # regulations.", "exporting such information to foreign # countries or providing access", "f.write('SRC = \\\\\\n') if len(generated_xml) != 0: for xml in", "= [_srv_serializable_xml_name(f) for f in sorted(srv_types)] # generated_xml.extend([_port_xml_name(f) for f", "= os.path.join(outdir, 'mod.mk') with open(p, 'w') as f: f.write('SRC =", "for f in sorted(msg_types)] # write_msg_modmk(outdir, generated_xml) # generated_xml =", "TODO(mereweth) if we want to independently specify the generated XML", "def _write_modmk(outdir, generated_xml): if not os.path.exists(outdir): os.makedirs(outdir) elif not os.path.isdir(outdir):", "or other export authority # as may be required before", "sorted(msg_types)] # generated_xml.extend([_port_xml_name(f) for f in sorted(msg_types)] # write_msg_modmk(outdir, generated_xml)", "the # responsibility to obtain export licenses, or other export", "os.path.join(outdir, 'mod.mk') with open(p, 'w') as f: f.write('SRC = \\\\\\n')", "f in sorted(srv_types)] # generated_xml.extend([_port_xml_name(f) for f in sorted(srv_types)] #", "xml_in_dir = set([f for f in os.listdir(outdir) if f.endswith('.xml')]) _write_modmk(outdir,", "[_msg_serializable_xml_name(f) for f in sorted(msg_types)] # generated_xml.extend([_port_xml_name(f) for f in", "to foreign persons. # from __future__ import print_function import os", "Sponsorship # acknowledged. Any commercial use must be negotiated with", "name import * ## :param type_name outdir: Full path to", "subject to U.S. export control laws and # regulations. By", "type_name outdir: Full path to output directory ## :returns int:", "Institute of Technology. # ALL RIGHTS RESERVED. United States Government", "for f in os.listdir(outdir) if f.endswith('.xml')]) _write_modmk(outdir, sorted(xml_in_dir)) # TODO(mereweth)", "# generated_xml.extend([_port_xml_name(f) for f in sorted(srv_types)] # write_msg_modmk(outdir, generated_xml) return", "laws and regulations. User has the # responsibility to obtain", "to independently specify the generated XML files # generated_xml =", "foreign # countries or providing access to foreign persons. #", "as f: f.write('SRC = \\\\\\n') if len(generated_xml) != 0: for", "# # Copyright 2004-2016, by the California Institute of Technology.", "laws and # regulations. By accepting this document, the user", "specify the generated XML files # generated_xml = [_msg_serializable_xml_name(f) for", "# regulations. By accepting this document, the user agrees to", "import MsgGenerationException #from . name import * ## :param type_name", "Technology. # ALL RIGHTS RESERVED. United States Government Sponsorship #", "other export authority # as may be required before exporting", "os.path.isdir(outdir): #TODO: warn? return 0 xml_in_dir = set([f for f", "## :param type_name outdir: Full path to output directory ##", "def write_modmk(outdir): #, msg_types, srv_types): if not os.path.isdir(outdir): #TODO: warn?", "# write_msg_modmk(outdir, generated_xml) # generated_xml = [_srv_serializable_xml_name(f) for f in", "the creating of Fprime directory: %s\"%dir) p = os.path.join(outdir, 'mod.mk')", "all U.S. export laws and regulations. User has the #", "commercial use must be negotiated with the Office # of", "export authority # as may be required before exporting such", "srv_types): if not os.path.isdir(outdir): #TODO: warn? return 0 xml_in_dir =", "for f in sorted(srv_types)] # generated_xml.extend([_port_xml_name(f) for f in sorted(srv_types)]", "U.S. export laws and regulations. User has the # responsibility", "to obtain export licenses, or other export authority # as", ":returns int: status. 0 if successful def write_modmk(outdir): #, msg_types,", "f in sorted(msg_types)] # write_msg_modmk(outdir, generated_xml) # generated_xml = [_srv_serializable_xml_name(f)", "\\\\\\n') if len(generated_xml) != 0: for xml in generated_xml[:-1]: f.write('%s", "os from genmsg import MsgGenerationException #from . name import *", "with all U.S. export laws and regulations. User has the", "# from __future__ import print_function import os from genmsg import", "such information to foreign # countries or providing access to", "ALL RIGHTS RESERVED. United States Government Sponsorship # acknowledged. Any", "generated_xml.extend([_port_xml_name(f) for f in sorted(msg_types)] # write_msg_modmk(outdir, generated_xml) # generated_xml", "or providing access to foreign persons. # from __future__ import", "# as may be required before exporting such information to", "generated_xml = [_msg_serializable_xml_name(f) for f in sorted(msg_types)] # generated_xml.extend([_port_xml_name(f) for", "foreign persons. # from __future__ import print_function import os from", "successful def write_modmk(outdir): #, msg_types, srv_types): if not os.path.isdir(outdir): #TODO:", "write_msg_modmk(outdir, generated_xml) # generated_xml = [_srv_serializable_xml_name(f) for f in sorted(srv_types)]", ":param type_name outdir: Full path to output directory ## :returns", "= set([f for f in os.listdir(outdir) if f.endswith('.xml')]) _write_modmk(outdir, sorted(xml_in_dir))", "set([f for f in os.listdir(outdir) if f.endswith('.xml')]) _write_modmk(outdir, sorted(xml_in_dir)) #", "if f.endswith('.xml')]) _write_modmk(outdir, sorted(xml_in_dir)) # TODO(mereweth) if we want to", "the generated XML files # generated_xml = [_msg_serializable_xml_name(f) for f", "to foreign # countries or providing access to foreign persons.", "as may be required before exporting such information to foreign", "# TODO(mereweth) if we want to independently specify the generated", "# responsibility to obtain export licenses, or other export authority", "# generated_xml = [_msg_serializable_xml_name(f) for f in sorted(msg_types)] # generated_xml.extend([_port_xml_name(f)", "# with all U.S. export laws and regulations. User has", "comply # with all U.S. export laws and regulations. User", "sorted(msg_types)] # write_msg_modmk(outdir, generated_xml) # generated_xml = [_srv_serializable_xml_name(f) for f", "'mod.mk') with open(p, 'w') as f: f.write('SRC = \\\\\\n') if", "path to output directory ## :returns int: status. 0 if", "Technology Transfer at the California Institute of Technology. # #", "By accepting this document, the user agrees to comply #", "the California Institute of Technology. # # This software may", "'w') as f: f.write('SRC = \\\\\\n') if len(generated_xml) != 0:", "use must be negotiated with the Office # of Technology", "files # generated_xml = [_msg_serializable_xml_name(f) for f in sorted(msg_types)] #", "the California Institute of Technology. # ALL RIGHTS RESERVED. United", "# ALL RIGHTS RESERVED. United States Government Sponsorship # acknowledged.", "# This software may be subject to U.S. export control", "Transfer at the California Institute of Technology. # # This", "generated_xml): if not os.path.exists(outdir): os.makedirs(outdir) elif not os.path.isdir(outdir): raise MsgGenerationException(\"file", "Technology. # # This software may be subject to U.S.", "of Technology. # ALL RIGHTS RESERVED. United States Government Sponsorship", "sorted(srv_types)] # generated_xml.extend([_port_xml_name(f) for f in sorted(srv_types)] # write_msg_modmk(outdir, generated_xml)", "f in sorted(srv_types)] # write_msg_modmk(outdir, generated_xml) return 0 def _write_modmk(outdir,", "be negotiated with the Office # of Technology Transfer at", "User has the # responsibility to obtain export licenses, or", "before exporting such information to foreign # countries or providing", "genmsg import MsgGenerationException #from . name import * ## :param", "# # This software may be subject to U.S. export", "Any commercial use must be negotiated with the Office #", "status. 0 if successful def write_modmk(outdir): #, msg_types, srv_types): if", "This software may be subject to U.S. export control laws", "0: for xml in generated_xml[:-1]: f.write('%s \\\\\\n'%xml) f.write('%s\\n'%generated_xml[-1]) return 0", "must be negotiated with the Office # of Technology Transfer", "in sorted(msg_types)] # write_msg_modmk(outdir, generated_xml) # generated_xml = [_srv_serializable_xml_name(f) for", "access to foreign persons. # from __future__ import print_function import", "os.path.isdir(outdir): raise MsgGenerationException(\"file preventing the creating of Fprime directory: %s\"%dir)", "print_function import os from genmsg import MsgGenerationException #from . name", "# write_msg_modmk(outdir, generated_xml) return 0 def _write_modmk(outdir, generated_xml): if not", "<gh_stars>1-10 # # Copyright 2004-2016, by the California Institute of", "Copyright 2004-2016, by the California Institute of Technology. # ALL", "may be required before exporting such information to foreign #", "#TODO: warn? return 0 xml_in_dir = set([f for f in", "= [_msg_serializable_xml_name(f) for f in sorted(msg_types)] # generated_xml.extend([_port_xml_name(f) for f", "Institute of Technology. # # This software may be subject", "responsibility to obtain export licenses, or other export authority #", "export licenses, or other export authority # as may be", "in sorted(srv_types)] # generated_xml.extend([_port_xml_name(f) for f in sorted(srv_types)] # write_msg_modmk(outdir,", "MsgGenerationException(\"file preventing the creating of Fprime directory: %s\"%dir) p =", "#from . name import * ## :param type_name outdir: Full", "California Institute of Technology. # ALL RIGHTS RESERVED. United States", "for f in sorted(srv_types)] # write_msg_modmk(outdir, generated_xml) return 0 def", "export laws and regulations. User has the # responsibility to", "with the Office # of Technology Transfer at the California", "0 def _write_modmk(outdir, generated_xml): if not os.path.exists(outdir): os.makedirs(outdir) elif not", "p = os.path.join(outdir, 'mod.mk') with open(p, 'w') as f: f.write('SRC", ". name import * ## :param type_name outdir: Full path", "f in os.listdir(outdir) if f.endswith('.xml')]) _write_modmk(outdir, sorted(xml_in_dir)) # TODO(mereweth) if", "for f in sorted(msg_types)] # generated_xml.extend([_port_xml_name(f) for f in sorted(msg_types)]", "write_modmk(outdir): #, msg_types, srv_types): if not os.path.isdir(outdir): #TODO: warn? return", "sorted(srv_types)] # write_msg_modmk(outdir, generated_xml) return 0 def _write_modmk(outdir, generated_xml): if", "raise MsgGenerationException(\"file preventing the creating of Fprime directory: %s\"%dir) p", "want to independently specify the generated XML files # generated_xml", "# Copyright 2004-2016, by the California Institute of Technology. #", "open(p, 'w') as f: f.write('SRC = \\\\\\n') if len(generated_xml) !=", "Office # of Technology Transfer at the California Institute of", "the Office # of Technology Transfer at the California Institute", "creating of Fprime directory: %s\"%dir) p = os.path.join(outdir, 'mod.mk') with", "acknowledged. Any commercial use must be negotiated with the Office", "# generated_xml.extend([_port_xml_name(f) for f in sorted(msg_types)] # write_msg_modmk(outdir, generated_xml) #", "%s\"%dir) p = os.path.join(outdir, 'mod.mk') with open(p, 'w') as f:", "independently specify the generated XML files # generated_xml = [_msg_serializable_xml_name(f)", "in os.listdir(outdir) if f.endswith('.xml')]) _write_modmk(outdir, sorted(xml_in_dir)) # TODO(mereweth) if we", "__future__ import print_function import os from genmsg import MsgGenerationException #from", "if we want to independently specify the generated XML files", "export control laws and # regulations. By accepting this document,", "# acknowledged. Any commercial use must be negotiated with the", "import * ## :param type_name outdir: Full path to output", "may be subject to U.S. export control laws and #", "* ## :param type_name outdir: Full path to output directory", "in sorted(srv_types)] # write_msg_modmk(outdir, generated_xml) return 0 def _write_modmk(outdir, generated_xml):", "2004-2016, by the California Institute of Technology. # ALL RIGHTS", "and # regulations. By accepting this document, the user agrees", "generated_xml) # generated_xml = [_srv_serializable_xml_name(f) for f in sorted(srv_types)] #", "of Technology Transfer at the California Institute of Technology. #", "software may be subject to U.S. export control laws and", "generated_xml = [_srv_serializable_xml_name(f) for f in sorted(srv_types)] # generated_xml.extend([_port_xml_name(f) for", "with open(p, 'w') as f: f.write('SRC = \\\\\\n') if len(generated_xml)", "States Government Sponsorship # acknowledged. Any commercial use must be", "the user agrees to comply # with all U.S. export", "be required before exporting such information to foreign # countries", "RESERVED. United States Government Sponsorship # acknowledged. Any commercial use", "f.endswith('.xml')]) _write_modmk(outdir, sorted(xml_in_dir)) # TODO(mereweth) if we want to independently", "of Fprime directory: %s\"%dir) p = os.path.join(outdir, 'mod.mk') with open(p,", "return 0 xml_in_dir = set([f for f in os.listdir(outdir) if", "has the # responsibility to obtain export licenses, or other", "by the California Institute of Technology. # ALL RIGHTS RESERVED.", "if not os.path.exists(outdir): os.makedirs(outdir) elif not os.path.isdir(outdir): raise MsgGenerationException(\"file preventing", "directory: %s\"%dir) p = os.path.join(outdir, 'mod.mk') with open(p, 'w') as", "regulations. By accepting this document, the user agrees to comply", "#, msg_types, srv_types): if not os.path.isdir(outdir): #TODO: warn? return 0", "len(generated_xml) != 0: for xml in generated_xml[:-1]: f.write('%s \\\\\\n'%xml) f.write('%s\\n'%generated_xml[-1])", "regulations. User has the # responsibility to obtain export licenses,", "msg_types, srv_types): if not os.path.isdir(outdir): #TODO: warn? return 0 xml_in_dir", "int: status. 0 if successful def write_modmk(outdir): #, msg_types, srv_types):", "os.path.exists(outdir): os.makedirs(outdir) elif not os.path.isdir(outdir): raise MsgGenerationException(\"file preventing the creating", "# countries or providing access to foreign persons. # from", "and regulations. User has the # responsibility to obtain export", "of Technology. # # This software may be subject to", "generated XML files # generated_xml = [_msg_serializable_xml_name(f) for f in", "to U.S. export control laws and # regulations. By accepting", "licenses, or other export authority # as may be required", "control laws and # regulations. By accepting this document, the", "authority # as may be required before exporting such information", "output directory ## :returns int: status. 0 if successful def", "negotiated with the Office # of Technology Transfer at the", "!= 0: for xml in generated_xml[:-1]: f.write('%s \\\\\\n'%xml) f.write('%s\\n'%generated_xml[-1]) return", "accepting this document, the user agrees to comply # with", "# generated_xml = [_srv_serializable_xml_name(f) for f in sorted(srv_types)] # generated_xml.extend([_port_xml_name(f)", "not os.path.exists(outdir): os.makedirs(outdir) elif not os.path.isdir(outdir): raise MsgGenerationException(\"file preventing the", "generated_xml) return 0 def _write_modmk(outdir, generated_xml): if not os.path.exists(outdir): os.makedirs(outdir)", "# of Technology Transfer at the California Institute of Technology.", "[_srv_serializable_xml_name(f) for f in sorted(srv_types)] # generated_xml.extend([_port_xml_name(f) for f in", "user agrees to comply # with all U.S. export laws", "import os from genmsg import MsgGenerationException #from . name import", "Full path to output directory ## :returns int: status. 0", "agrees to comply # with all U.S. export laws and", "from __future__ import print_function import os from genmsg import MsgGenerationException", "to output directory ## :returns int: status. 0 if successful", "in sorted(msg_types)] # generated_xml.extend([_port_xml_name(f) for f in sorted(msg_types)] # write_msg_modmk(outdir,", "document, the user agrees to comply # with all U.S.", "generated_xml.extend([_port_xml_name(f) for f in sorted(srv_types)] # write_msg_modmk(outdir, generated_xml) return 0", "f: f.write('SRC = \\\\\\n') if len(generated_xml) != 0: for xml", "Government Sponsorship # acknowledged. Any commercial use must be negotiated", "at the California Institute of Technology. # # This software", "this document, the user agrees to comply # with all", "required before exporting such information to foreign # countries or", "from genmsg import MsgGenerationException #from . name import * ##", "return 0 def _write_modmk(outdir, generated_xml): if not os.path.exists(outdir): os.makedirs(outdir) elif", "write_msg_modmk(outdir, generated_xml) return 0 def _write_modmk(outdir, generated_xml): if not os.path.exists(outdir):", "XML files # generated_xml = [_msg_serializable_xml_name(f) for f in sorted(msg_types)]" ]
[ "new_notebook(cells=[new_code_cell('1+1'), new_code_cell('2\\n2')]) with pytest.raises(NotebookDifference) as exception_info: compare_notebooks(nb1, nb2, raise_on_first_difference=False) assert", "ref = new_notebook(cells=[new_code_cell('1+1')]) test = new_notebook(cells=[new_code_cell('1+1', metadata={'metakey': 'value'})]) with pytest.raises(NotebookDifference):", "1, \"metadata\": {}, \"output_type\": \"execute_result\" } ])]) with pytest.raises(NotebookDifference): compare_notebooks(ref,", "[True, False]) def test_raise_on_different_cell_content(raise_on_first_difference): ref = new_notebook(cells=[new_markdown_cell('Cell one'), new_code_cell('Cell two')])", "new_notebook(cells=[new_code_cell('1+1')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'py:light', allow_expected_differences=False) def test_dont_raise_on_different_outputs(): ref", "test, 'md') @pytest.mark.parametrize('raise_on_first_difference', [True, False]) def test_raise_on_different_cell_type(raise_on_first_difference): ref = new_notebook(cells=[new_markdown_cell('Cell", "compare_notebooks(ref, test, 'md') @pytest.mark.parametrize('raise_on_first_difference', [True, False]) def test_raise_on_different_outputs(raise_on_first_difference): ref =", "raise_on_first_difference=raise_on_first_difference) with pytest.raises(NotebookDifference): compare_notebooks(test, ref, 'py:light', raise_on_first_difference=raise_on_first_difference) def test_does_not_raise_on_blank_line_removed(): ref", "= new_notebook(cells=[new_markdown_cell('Cell one'), new_raw_cell('Cell two')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md',", "raise_on_first_difference=raise_on_first_difference) def test_does_not_raise_on_blank_line_removed(): ref = new_notebook(cells=[new_code_cell('1+1\\n ')]) test = new_notebook(cells=[new_code_cell('1+1')])", "test_cell_metadata_differ(): nb1 = new_notebook(cells=[new_code_cell('1'), new_code_cell('2', metadata={'additional': 'metadata1'})]) nb2 = new_notebook(cells=[new_code_cell('1'),", "= new_notebook(metadata={'kernelspec': {'language': 'R', 'name': 'R', 'display_name': 'R'}}, cells=[new_markdown_cell('Cell one')])", "line')]) test = new_notebook(cells=[new_markdown_cell('Cell one')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md')", "new_notebook(cells=[new_markdown_cell('Cell one\\n\\n\\nsecond line')]) test = new_notebook(cells=[new_markdown_cell('Cell one'), new_markdown_cell('second line')]) with", "test_notebook_metadata_differ(): nb1 = new_notebook(cells=[new_code_cell('1'), new_code_cell('2')]) nb2 = new_notebook(cells=[new_code_cell('1'), new_code_cell('2')], metadata={'kernelspec':", "= new_notebook(cells=[new_code_cell('1+1', outputs=[ { \"data\": { \"text/plain\": [ \"2\" ]", "[ \"2\" ] }, \"execution_count\": 1, \"metadata\": {}, \"output_type\": \"execute_result\"", "test, 'md') def test_raise_on_different_cell_metadata(): ref = new_notebook(cells=[new_code_cell('1+1')]) test = new_notebook(cells=[new_code_cell('1+1',", "'py:light', raise_on_first_difference=raise_on_first_difference) with pytest.raises(NotebookDifference): compare_notebooks(test, ref, 'py:light', raise_on_first_difference=raise_on_first_difference) def test_does_not_raise_on_blank_line_removed():", "with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'py:light', allow_expected_differences=False) def test_dont_raise_on_different_outputs(): ref =", "test = new_notebook(cells=[new_code_cell('1+1', metadata={'metakey': 'value'})]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'py:light')", "assert \"Cell metadata 'additional' differ\" in exception_info.value.args[0] def test_notebook_metadata_differ(): nb1", "new_notebook(cells=[new_code_cell('1'), new_code_cell('2')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'py:light', raise_on_first_difference=raise_on_first_difference) with pytest.raises(NotebookDifference):", "in exception_info.value.args[0] def test_notebook_metadata_differ(): nb1 = new_notebook(cells=[new_code_cell('1'), new_code_cell('2')]) nb2 =", "one'), new_code_cell('Modified cell two')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md', raise_on_first_difference=raise_on_first_difference)", "two')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md', raise_on_first_difference=raise_on_first_difference) @pytest.mark.parametrize('raise_on_first_difference', [True, False])", "def test_cell_metadata_differ(): nb1 = new_notebook(cells=[new_code_cell('1'), new_code_cell('2', metadata={'additional': 'metadata1'})]) nb2 =", "test = new_notebook(metadata={'kernelspec': {'language': 'R', 'name': 'R', 'display_name': 'R'}}, cells=[new_markdown_cell('Cell", "assert 'Cells 1,2 differ' in exception_info.value.args[0] def test_cell_metadata_differ(): nb1 =", "'R', 'name': 'R', 'display_name': 'R'}}, cells=[new_markdown_cell('Cell one')]) with pytest.raises(NotebookDifference): compare_notebooks(ref,", "new_code_cell('2', metadata={'additional': 'metadata2'})]) with pytest.raises(NotebookDifference) as exception_info: compare_notebooks(nb1, nb2, raise_on_first_difference=False)", "'python', 'display_name': 'Python'}}, cells=[new_markdown_cell('Cell one')]) test = new_notebook(metadata={'kernelspec': {'language': 'R',", "'python', 'name': 'python', 'display_name': 'Python'}}, cells=[new_markdown_cell('Cell one')]) test = new_notebook(metadata={'kernelspec':", "'md') @pytest.mark.parametrize('raise_on_first_difference', [True, False]) def test_raise_on_different_cell_type(raise_on_first_difference): ref = new_notebook(cells=[new_markdown_cell('Cell one'),", "'R'}}, cells=[new_markdown_cell('Cell one')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md') @pytest.mark.parametrize('raise_on_first_difference', [True,", "with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md', raise_on_first_difference=raise_on_first_difference) @pytest.mark.parametrize('raise_on_first_difference', [True, False]) def", "test_dont_raise_on_different_outputs(): ref = new_notebook(cells=[new_code_cell('1+1')]) test = new_notebook(cells=[new_code_cell('1+1', outputs=[ { \"data\":", "def test_strict_raise_on_blank_line_removed(): ref = new_notebook(cells=[new_code_cell('1+1\\n')]) test = new_notebook(cells=[new_code_cell('1+1')]) with pytest.raises(NotebookDifference):", "\"metadata\": {}, \"output_type\": \"execute_result\" } ])]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test,", "= new_notebook(cells=[new_markdown_cell('Cell one'), new_code_cell('Modified cell two')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test,", "cells=[new_markdown_cell('Cell one')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md') @pytest.mark.parametrize('raise_on_first_difference', [True, False])", "1,2 differ' in exception_info.value.args[0] def test_cell_metadata_differ(): nb1 = new_notebook(cells=[new_code_cell('1'), new_code_cell('2',", "def test_test_round_trip_conversion(): notebook = new_notebook(cells=[new_code_cell('1+1', outputs=[ { \"data\": { \"text/plain\":", "compare_notebooks(ref, test, 'md', raise_on_first_difference=raise_on_first_difference) @pytest.mark.parametrize('raise_on_first_difference', [True, False]) def test_raise_on_different_cell_content(raise_on_first_difference): ref", "new_notebook(cells=[new_markdown_cell('Cell one'), new_code_cell('Cell two')]) test = new_notebook(cells=[new_markdown_cell('Cell one'), new_code_cell('Modified cell", "'display_name': 'Python'}}) with pytest.raises(NotebookDifference) as exception_info: compare_notebooks(nb1, nb2, raise_on_first_difference=False, )", "new_raw_cell('Cell two')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md', raise_on_first_difference=raise_on_first_difference) @pytest.mark.parametrize('raise_on_first_difference', [True,", "'metadata2'})]) with pytest.raises(NotebookDifference) as exception_info: compare_notebooks(nb1, nb2, raise_on_first_difference=False) assert \"Cell", "metadata={'additional': 'metadata2'})]) with pytest.raises(NotebookDifference) as exception_info: compare_notebooks(nb1, nb2, raise_on_first_difference=False) assert", "= new_notebook(cells=[new_code_cell('1+1')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'py:light', allow_expected_differences=False) def test_dont_raise_on_different_outputs():", "one')]) test = new_notebook(metadata={'kernelspec': {'language': 'R', 'name': 'R', 'display_name': 'R'}},", "as exception_info: compare_notebooks(nb1, nb2, raise_on_first_difference=False) assert \"Cell metadata 'additional' differ\"", "two')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md', raise_on_first_difference=raise_on_first_difference) def test_raise_on_incomplete_markdown_cell(): ref", "test_does_raise_on_split_markdown_cell(): ref = new_notebook(cells=[new_markdown_cell('Cell one\\n\\n\\nsecond line')]) test = new_notebook(cells=[new_markdown_cell('Cell one'),", "test = new_notebook(cells=[new_markdown_cell('Cell one'), new_code_cell('Modified cell two')]) with pytest.raises(NotebookDifference): compare_notebooks(ref,", "1, \"metadata\": {}, \"output_type\": \"execute_result\" } ])]) compare_notebooks(ref, test, 'md')", "pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md') def test_raise_on_different_cell_metadata(): ref = new_notebook(cells=[new_code_cell('1+1')]) test", "[True, False]) def test_raise_on_different_outputs(raise_on_first_difference): ref = new_notebook(cells=[new_code_cell('1+1')]) test = new_notebook(cells=[new_code_cell('1+1',", "= new_notebook(cells=[new_markdown_cell('Cell one'), new_code_cell('Cell two')]) test = new_notebook(cells=[new_markdown_cell('Cell one'), new_code_cell('Modified", "def test_raise_on_different_cell_content(raise_on_first_difference): ref = new_notebook(cells=[new_markdown_cell('Cell one'), new_code_cell('Cell two')]) test =", "ref = new_notebook(cells=[new_code_cell('1')]) test = new_notebook(cells=[new_code_cell('1'), new_code_cell('2')]) with pytest.raises(NotebookDifference): compare_notebooks(ref,", "from jupytext.compare import compare_notebooks, NotebookDifference, test_round_trip_conversion as round_trip_conversion def test_raise_on_different_metadata():", "@pytest.mark.parametrize('raise_on_first_difference', [True, False]) def test_raise_on_different_cell_content(raise_on_first_difference): ref = new_notebook(cells=[new_markdown_cell('Cell one'), new_code_cell('Cell", "notebook = new_notebook(cells=[new_code_cell('1+1', outputs=[ { \"data\": { \"text/plain\": [ \"2\"", "@pytest.mark.parametrize('raise_on_first_difference', [True, False]) def test_raise_on_different_cell_count(raise_on_first_difference): ref = new_notebook(cells=[new_code_cell('1')]) test =", "])]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md', compare_outputs=True, raise_on_first_difference=raise_on_first_difference) def test_test_round_trip_conversion():", "two')]) test = new_notebook(cells=[new_markdown_cell('Cell one'), new_raw_cell('Cell two')]) with pytest.raises(NotebookDifference): compare_notebooks(ref,", "compare_notebooks(ref, test, 'py:light', raise_on_first_difference=raise_on_first_difference) with pytest.raises(NotebookDifference): compare_notebooks(test, ref, 'py:light', raise_on_first_difference=raise_on_first_difference)", "compare_notebooks(ref, test, 'md', compare_outputs=True, raise_on_first_difference=raise_on_first_difference) def test_test_round_trip_conversion(): notebook = new_notebook(cells=[new_code_cell('1+1',", "metadata={'kernelspec': {'language': 'python', 'name': 'python', 'display_name': 'Python'}}) with pytest.raises(NotebookDifference) as", "nb2, raise_on_first_difference=False) assert 'Cells 1,2 differ' in exception_info.value.args[0] def test_cell_metadata_differ():", "new_notebook(metadata={'kernelspec': {'language': 'R', 'name': 'R', 'display_name': 'R'}}, cells=[new_markdown_cell('Cell one')]) with", "with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'py:light', raise_on_first_difference=raise_on_first_difference) with pytest.raises(NotebookDifference): compare_notebooks(test, ref,", "new_notebook(cells=[new_code_cell('1'), new_code_cell('2')], metadata={'kernelspec': {'language': 'python', 'name': 'python', 'display_name': 'Python'}}) with", "test = new_notebook(cells=[new_code_cell('1+1', outputs=[ { \"data\": { \"text/plain\": [ \"2\"", "metadata 'additional' differ\" in exception_info.value.args[0] def test_notebook_metadata_differ(): nb1 = new_notebook(cells=[new_code_cell('1'),", "])], metadata={'main_language': 'python'}) round_trip_conversion(notebook, {'extension': '.py'}, update=True) def test_mutiple_cells_differ(): nb1", "new_markdown_cell('second line')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md') def test_raise_on_different_cell_metadata(): ref", "= new_notebook(cells=[new_code_cell('1'), new_code_cell('2', metadata={'additional': 'metadata1'})]) nb2 = new_notebook(cells=[new_code_cell('1'), new_code_cell('2', metadata={'additional':", "= new_notebook(cells=[new_code_cell('1'), new_code_cell('2')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'py:light', raise_on_first_difference=raise_on_first_difference) with", "NotebookDifference, test_round_trip_conversion as round_trip_conversion def test_raise_on_different_metadata(): ref = new_notebook(metadata={'kernelspec': {'language':", "test_raise_on_different_cell_metadata(): ref = new_notebook(cells=[new_code_cell('1+1')]) test = new_notebook(cells=[new_code_cell('1+1', metadata={'metakey': 'value'})]) with", "@pytest.mark.parametrize('raise_on_first_difference', [True, False]) def test_raise_on_different_cell_type(raise_on_first_difference): ref = new_notebook(cells=[new_markdown_cell('Cell one'), new_code_cell('Cell", "\"execution_count\": 1, \"metadata\": {}, \"output_type\": \"execute_result\" } ])]) compare_notebooks(ref, test,", "1, \"metadata\": {}, \"output_type\": \"execute_result\" } ])], metadata={'main_language': 'python'}) round_trip_conversion(notebook,", "one'), new_code_cell('Cell two')]) test = new_notebook(cells=[new_markdown_cell('Cell one'), new_code_cell('Modified cell two')])", "exception_info.value.args[0] def test_cell_metadata_differ(): nb1 = new_notebook(cells=[new_code_cell('1'), new_code_cell('2', metadata={'additional': 'metadata1'})]) nb2", "new_notebook(cells=[new_code_cell('1')]) test = new_notebook(cells=[new_code_cell('1'), new_code_cell('2')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'py:light',", "{}, \"output_type\": \"execute_result\" } ])], metadata={'main_language': 'python'}) round_trip_conversion(notebook, {'extension': '.py'},", "\"data\": { \"text/plain\": [ \"2\" ] }, \"execution_count\": 1, \"metadata\":", "{ \"data\": { \"text/plain\": [ \"2\" ] }, \"execution_count\": 1,", "def test_raise_on_different_cell_count(raise_on_first_difference): ref = new_notebook(cells=[new_code_cell('1')]) test = new_notebook(cells=[new_code_cell('1'), new_code_cell('2')]) with", "test = new_notebook(cells=[new_code_cell('1+1')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'py:light', allow_expected_differences=False) def", "nb2 = new_notebook(cells=[new_code_cell('1+1'), new_code_cell('2\\n2')]) with pytest.raises(NotebookDifference) as exception_info: compare_notebooks(nb1, nb2,", "= new_notebook(cells=[new_code_cell('1+1'), new_code_cell('2\\n2')]) with pytest.raises(NotebookDifference) as exception_info: compare_notebooks(nb1, nb2, raise_on_first_difference=False)", "new_notebook(cells=[new_code_cell('1+1')]) compare_notebooks(ref, test, 'py:light') def test_strict_raise_on_blank_line_removed(): ref = new_notebook(cells=[new_code_cell('1+1\\n')]) test", "False]) def test_raise_on_different_outputs(raise_on_first_difference): ref = new_notebook(cells=[new_code_cell('1+1')]) test = new_notebook(cells=[new_code_cell('1+1', outputs=[", "with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md', raise_on_first_difference=raise_on_first_difference) def test_raise_on_incomplete_markdown_cell(): ref =", "new_code_cell('Cell two')]) test = new_notebook(cells=[new_markdown_cell('Cell one'), new_code_cell('Modified cell two')]) with", "ref = new_notebook(cells=[new_code_cell('1+1\\n ')]) test = new_notebook(cells=[new_code_cell('1+1')]) compare_notebooks(ref, test, 'py:light')", "\"execute_result\" } ])]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md', compare_outputs=True, raise_on_first_difference=raise_on_first_difference)", "def test_does_raise_on_split_markdown_cell(): ref = new_notebook(cells=[new_markdown_cell('Cell one\\n\\n\\nsecond line')]) test = new_notebook(cells=[new_markdown_cell('Cell", "one')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md') def test_does_raise_on_split_markdown_cell(): ref =", "new_notebook(cells=[new_code_cell('1+1\\n')]) test = new_notebook(cells=[new_code_cell('1+1')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'py:light', allow_expected_differences=False)", "'md') def test_raise_on_different_cell_metadata(): ref = new_notebook(cells=[new_code_cell('1+1')]) test = new_notebook(cells=[new_code_cell('1+1', metadata={'metakey':", "[True, False]) def test_raise_on_different_cell_type(raise_on_first_difference): ref = new_notebook(cells=[new_markdown_cell('Cell one'), new_code_cell('Cell two')])", "{'extension': '.py'}, update=True) def test_mutiple_cells_differ(): nb1 = new_notebook(cells=[new_code_cell(''), new_code_cell('2')]) nb2", "exception_info: compare_notebooks(nb1, nb2, raise_on_first_difference=False, ) assert \"Notebook metadata differ\" in", "raise_on_first_difference=raise_on_first_difference) def test_raise_on_incomplete_markdown_cell(): ref = new_notebook(cells=[new_markdown_cell('Cell one\\n\\n\\nsecond line')]) test =", "'python', 'name': 'python', 'display_name': 'Python'}}) with pytest.raises(NotebookDifference) as exception_info: compare_notebooks(nb1,", "test, 'md') @pytest.mark.parametrize('raise_on_first_difference', [True, False]) def test_raise_on_different_outputs(raise_on_first_difference): ref = new_notebook(cells=[new_code_cell('1+1')])", "\"text/plain\": [ \"2\" ] }, \"execution_count\": 1, \"metadata\": {}, \"output_type\":", "def test_raise_on_different_outputs(raise_on_first_difference): ref = new_notebook(cells=[new_code_cell('1+1')]) test = new_notebook(cells=[new_code_cell('1+1', outputs=[ {", "ref = new_notebook(cells=[new_code_cell('1+1\\n')]) test = new_notebook(cells=[new_code_cell('1+1')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test,", "test_raise_on_different_metadata(): ref = new_notebook(metadata={'kernelspec': {'language': 'python', 'name': 'python', 'display_name': 'Python'}},", "line')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md') def test_raise_on_different_cell_metadata(): ref =", "with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md') def test_does_raise_on_split_markdown_cell(): ref = new_notebook(cells=[new_markdown_cell('Cell", "= new_notebook(cells=[new_markdown_cell('Cell one'), new_code_cell('Cell two')]) test = new_notebook(cells=[new_markdown_cell('Cell one'), new_raw_cell('Cell", "'py:light', allow_expected_differences=False) def test_dont_raise_on_different_outputs(): ref = new_notebook(cells=[new_code_cell('1+1')]) test = new_notebook(cells=[new_code_cell('1+1',", "new_notebook(cells=[new_code_cell('1+1', metadata={'metakey': 'value'})]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'py:light') @pytest.mark.parametrize('raise_on_first_difference', [True,", "test_raise_on_different_cell_count(raise_on_first_difference): ref = new_notebook(cells=[new_code_cell('1')]) test = new_notebook(cells=[new_code_cell('1'), new_code_cell('2')]) with pytest.raises(NotebookDifference):", "'md') def test_does_raise_on_split_markdown_cell(): ref = new_notebook(cells=[new_markdown_cell('Cell one\\n\\n\\nsecond line')]) test =", "] }, \"execution_count\": 1, \"metadata\": {}, \"output_type\": \"execute_result\" } ])],", "test_does_not_raise_on_blank_line_removed(): ref = new_notebook(cells=[new_code_cell('1+1\\n ')]) test = new_notebook(cells=[new_code_cell('1+1')]) compare_notebooks(ref, test,", "test_raise_on_different_cell_content(raise_on_first_difference): ref = new_notebook(cells=[new_markdown_cell('Cell one'), new_code_cell('Cell two')]) test = new_notebook(cells=[new_markdown_cell('Cell", "new_code_cell, new_raw_cell from jupytext.compare import compare_notebooks, NotebookDifference, test_round_trip_conversion as round_trip_conversion", "outputs=[ { \"data\": { \"text/plain\": [ \"2\" ] }, \"execution_count\":", "new_raw_cell from jupytext.compare import compare_notebooks, NotebookDifference, test_round_trip_conversion as round_trip_conversion def", "'name': 'python', 'display_name': 'Python'}}, cells=[new_markdown_cell('Cell one')]) test = new_notebook(metadata={'kernelspec': {'language':", "with pytest.raises(NotebookDifference) as exception_info: compare_notebooks(nb1, nb2, raise_on_first_difference=False) assert 'Cells 1,2", "with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md', compare_outputs=True, raise_on_first_difference=raise_on_first_difference) def test_test_round_trip_conversion(): notebook", "'md', raise_on_first_difference=raise_on_first_difference) @pytest.mark.parametrize('raise_on_first_difference', [True, False]) def test_raise_on_different_cell_content(raise_on_first_difference): ref = new_notebook(cells=[new_markdown_cell('Cell", "test_test_round_trip_conversion(): notebook = new_notebook(cells=[new_code_cell('1+1', outputs=[ { \"data\": { \"text/plain\": [", "compare_notebooks(ref, test, 'py:light', allow_expected_differences=False) def test_dont_raise_on_different_outputs(): ref = new_notebook(cells=[new_code_cell('1+1')]) test", "} ])]) compare_notebooks(ref, test, 'md') @pytest.mark.parametrize('raise_on_first_difference', [True, False]) def test_raise_on_different_outputs(raise_on_first_difference):", "one\\n\\n\\nsecond line')]) test = new_notebook(cells=[new_markdown_cell('Cell one')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test,", "allow_expected_differences=False) def test_dont_raise_on_different_outputs(): ref = new_notebook(cells=[new_code_cell('1+1')]) test = new_notebook(cells=[new_code_cell('1+1', outputs=[", "as exception_info: compare_notebooks(nb1, nb2, raise_on_first_difference=False, ) assert \"Notebook metadata differ\"", "'md') @pytest.mark.parametrize('raise_on_first_difference', [True, False]) def test_raise_on_different_outputs(raise_on_first_difference): ref = new_notebook(cells=[new_code_cell('1+1')]) test", "test = new_notebook(cells=[new_markdown_cell('Cell one'), new_raw_cell('Cell two')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test,", "ref, 'py:light', raise_on_first_difference=raise_on_first_difference) def test_does_not_raise_on_blank_line_removed(): ref = new_notebook(cells=[new_code_cell('1+1\\n ')]) test", "False]) def test_raise_on_different_cell_type(raise_on_first_difference): ref = new_notebook(cells=[new_markdown_cell('Cell one'), new_code_cell('Cell two')]) test", "def test_raise_on_different_cell_metadata(): ref = new_notebook(cells=[new_code_cell('1+1')]) test = new_notebook(cells=[new_code_cell('1+1', metadata={'metakey': 'value'})])", "{'language': 'R', 'name': 'R', 'display_name': 'R'}}, cells=[new_markdown_cell('Cell one')]) with pytest.raises(NotebookDifference):", "new_code_cell('Cell two')]) test = new_notebook(cells=[new_markdown_cell('Cell one'), new_raw_cell('Cell two')]) with pytest.raises(NotebookDifference):", "compare_notebooks(ref, test, 'md') @pytest.mark.parametrize('raise_on_first_difference', [True, False]) def test_raise_on_different_cell_type(raise_on_first_difference): ref =", "= new_notebook(cells=[new_markdown_cell('Cell one')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md') def test_does_raise_on_split_markdown_cell():", "test_raise_on_different_outputs(raise_on_first_difference): ref = new_notebook(cells=[new_code_cell('1+1')]) test = new_notebook(cells=[new_code_cell('1+1', outputs=[ { \"data\":", "exception_info: compare_notebooks(nb1, nb2, raise_on_first_difference=False) assert 'Cells 1,2 differ' in exception_info.value.args[0]", "[True, False]) def test_raise_on_different_cell_count(raise_on_first_difference): ref = new_notebook(cells=[new_code_cell('1')]) test = new_notebook(cells=[new_code_cell('1'),", "'.py'}, update=True) def test_mutiple_cells_differ(): nb1 = new_notebook(cells=[new_code_cell(''), new_code_cell('2')]) nb2 =", "import pytest from nbformat.v4.nbbase import new_notebook, new_markdown_cell, new_code_cell, new_raw_cell from", "])]) compare_notebooks(ref, test, 'md') @pytest.mark.parametrize('raise_on_first_difference', [True, False]) def test_raise_on_different_outputs(raise_on_first_difference): ref", "new_notebook(cells=[new_code_cell('1'), new_code_cell('2', metadata={'additional': 'metadata1'})]) nb2 = new_notebook(cells=[new_code_cell('1'), new_code_cell('2', metadata={'additional': 'metadata2'})])", "\"2\" ] }, \"execution_count\": 1, \"metadata\": {}, \"output_type\": \"execute_result\" }", "pytest.raises(NotebookDifference) as exception_info: compare_notebooks(nb1, nb2, raise_on_first_difference=False) assert \"Cell metadata 'additional'", "compare_notebooks(nb1, nb2, raise_on_first_difference=False, ) assert \"Notebook metadata differ\" in exception_info.value.args[0]", "'name': 'python', 'display_name': 'Python'}}) with pytest.raises(NotebookDifference) as exception_info: compare_notebooks(nb1, nb2,", "test, 'py:light') def test_strict_raise_on_blank_line_removed(): ref = new_notebook(cells=[new_code_cell('1+1\\n')]) test = new_notebook(cells=[new_code_cell('1+1')])", "\"execution_count\": 1, \"metadata\": {}, \"output_type\": \"execute_result\" } ])]) with pytest.raises(NotebookDifference):", "pytest.raises(NotebookDifference) as exception_info: compare_notebooks(nb1, nb2, raise_on_first_difference=False) assert 'Cells 1,2 differ'", "ref = new_notebook(cells=[new_markdown_cell('Cell one\\n\\n\\nsecond line')]) test = new_notebook(cells=[new_markdown_cell('Cell one'), new_markdown_cell('second", "ref = new_notebook(metadata={'kernelspec': {'language': 'python', 'name': 'python', 'display_name': 'Python'}}, cells=[new_markdown_cell('Cell", "} ])]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md', compare_outputs=True, raise_on_first_difference=raise_on_first_difference) def", "ref = new_notebook(cells=[new_markdown_cell('Cell one'), new_code_cell('Cell two')]) test = new_notebook(cells=[new_markdown_cell('Cell one'),", "'py:light', raise_on_first_difference=raise_on_first_difference) def test_does_not_raise_on_blank_line_removed(): ref = new_notebook(cells=[new_code_cell('1+1\\n ')]) test =", "new_notebook(cells=[new_markdown_cell('Cell one'), new_code_cell('Cell two')]) test = new_notebook(cells=[new_markdown_cell('Cell one'), new_raw_cell('Cell two')])", "\"output_type\": \"execute_result\" } ])]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md', compare_outputs=True,", "metadata={'main_language': 'python'}) round_trip_conversion(notebook, {'extension': '.py'}, update=True) def test_mutiple_cells_differ(): nb1 =", "ref = new_notebook(cells=[new_code_cell('1+1')]) test = new_notebook(cells=[new_code_cell('1+1', outputs=[ { \"data\": {", "def test_raise_on_incomplete_markdown_cell(): ref = new_notebook(cells=[new_markdown_cell('Cell one\\n\\n\\nsecond line')]) test = new_notebook(cells=[new_markdown_cell('Cell", "test_round_trip_conversion as round_trip_conversion def test_raise_on_different_metadata(): ref = new_notebook(metadata={'kernelspec': {'language': 'python',", "'name': 'R', 'display_name': 'R'}}, cells=[new_markdown_cell('Cell one')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test,", "new_notebook(cells=[new_markdown_cell('Cell one'), new_raw_cell('Cell two')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md', raise_on_first_difference=raise_on_first_difference)", "with pytest.raises(NotebookDifference): compare_notebooks(test, ref, 'py:light', raise_on_first_difference=raise_on_first_difference) def test_does_not_raise_on_blank_line_removed(): ref =", "update=True) def test_mutiple_cells_differ(): nb1 = new_notebook(cells=[new_code_cell(''), new_code_cell('2')]) nb2 = new_notebook(cells=[new_code_cell('1+1'),", "new_notebook(cells=[new_markdown_cell('Cell one')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md') def test_does_raise_on_split_markdown_cell(): ref", "'display_name': 'R'}}, cells=[new_markdown_cell('Cell one')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md') @pytest.mark.parametrize('raise_on_first_difference',", "test = new_notebook(cells=[new_code_cell('1'), new_code_cell('2')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'py:light', raise_on_first_difference=raise_on_first_difference)", "with pytest.raises(NotebookDifference) as exception_info: compare_notebooks(nb1, nb2, raise_on_first_difference=False, ) assert \"Notebook", "'additional' differ\" in exception_info.value.args[0] def test_notebook_metadata_differ(): nb1 = new_notebook(cells=[new_code_cell('1'), new_code_cell('2')])", "'md', compare_outputs=True, raise_on_first_difference=raise_on_first_difference) def test_test_round_trip_conversion(): notebook = new_notebook(cells=[new_code_cell('1+1', outputs=[ {", "'md', raise_on_first_difference=raise_on_first_difference) def test_raise_on_incomplete_markdown_cell(): ref = new_notebook(cells=[new_markdown_cell('Cell one\\n\\n\\nsecond line')]) test", "\"execute_result\" } ])]) compare_notebooks(ref, test, 'md') @pytest.mark.parametrize('raise_on_first_difference', [True, False]) def", "= new_notebook(cells=[new_code_cell('1'), new_code_cell('2')], metadata={'kernelspec': {'language': 'python', 'name': 'python', 'display_name': 'Python'}})", "new_notebook(cells=[new_code_cell(''), new_code_cell('2')]) nb2 = new_notebook(cells=[new_code_cell('1+1'), new_code_cell('2\\n2')]) with pytest.raises(NotebookDifference) as exception_info:", "test_mutiple_cells_differ(): nb1 = new_notebook(cells=[new_code_cell(''), new_code_cell('2')]) nb2 = new_notebook(cells=[new_code_cell('1+1'), new_code_cell('2\\n2')]) with", "new_notebook(cells=[new_code_cell('1'), new_code_cell('2', metadata={'additional': 'metadata2'})]) with pytest.raises(NotebookDifference) as exception_info: compare_notebooks(nb1, nb2,", "differ\" in exception_info.value.args[0] def test_notebook_metadata_differ(): nb1 = new_notebook(cells=[new_code_cell('1'), new_code_cell('2')]) nb2", "compare_notebooks(ref, test, 'md', raise_on_first_difference=raise_on_first_difference) def test_raise_on_incomplete_markdown_cell(): ref = new_notebook(cells=[new_markdown_cell('Cell one\\n\\n\\nsecond", "as exception_info: compare_notebooks(nb1, nb2, raise_on_first_difference=False) assert 'Cells 1,2 differ' in", "compare_notebooks(ref, test, 'py:light') @pytest.mark.parametrize('raise_on_first_difference', [True, False]) def test_raise_on_different_cell_count(raise_on_first_difference): ref =", "test, 'md') def test_does_raise_on_split_markdown_cell(): ref = new_notebook(cells=[new_markdown_cell('Cell one\\n\\n\\nsecond line')]) test", "'R', 'display_name': 'R'}}, cells=[new_markdown_cell('Cell one')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md')", "new_notebook(cells=[new_code_cell('1+1')]) test = new_notebook(cells=[new_code_cell('1+1', outputs=[ { \"data\": { \"text/plain\": [", "}, \"execution_count\": 1, \"metadata\": {}, \"output_type\": \"execute_result\" } ])]) with", "pytest.raises(NotebookDifference): compare_notebooks(test, ref, 'py:light', raise_on_first_difference=raise_on_first_difference) def test_does_not_raise_on_blank_line_removed(): ref = new_notebook(cells=[new_code_cell('1+1\\n", "'value'})]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'py:light') @pytest.mark.parametrize('raise_on_first_difference', [True, False]) def", "new_notebook(cells=[new_code_cell('1+1\\n ')]) test = new_notebook(cells=[new_code_cell('1+1')]) compare_notebooks(ref, test, 'py:light') def test_strict_raise_on_blank_line_removed():", "{ \"text/plain\": [ \"2\" ] }, \"execution_count\": 1, \"metadata\": {},", "new_notebook(cells=[new_code_cell('1+1')]) test = new_notebook(cells=[new_code_cell('1+1', metadata={'metakey': 'value'})]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test,", "\"execution_count\": 1, \"metadata\": {}, \"output_type\": \"execute_result\" } ])], metadata={'main_language': 'python'})", "'python', 'display_name': 'Python'}}) with pytest.raises(NotebookDifference) as exception_info: compare_notebooks(nb1, nb2, raise_on_first_difference=False,", "test, 'md', raise_on_first_difference=raise_on_first_difference) @pytest.mark.parametrize('raise_on_first_difference', [True, False]) def test_raise_on_different_cell_content(raise_on_first_difference): ref =", "ref = new_notebook(cells=[new_markdown_cell('Cell one\\n\\n\\nsecond line')]) test = new_notebook(cells=[new_markdown_cell('Cell one')]) with", "new_notebook(cells=[new_markdown_cell('Cell one'), new_code_cell('Modified cell two')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md',", "= new_notebook(cells=[new_code_cell('1+1')]) compare_notebooks(ref, test, 'py:light') def test_strict_raise_on_blank_line_removed(): ref = new_notebook(cells=[new_code_cell('1+1\\n')])", "new_markdown_cell, new_code_cell, new_raw_cell from jupytext.compare import compare_notebooks, NotebookDifference, test_round_trip_conversion as", "def test_notebook_metadata_differ(): nb1 = new_notebook(cells=[new_code_cell('1'), new_code_cell('2')]) nb2 = new_notebook(cells=[new_code_cell('1'), new_code_cell('2')],", "\"execute_result\" } ])], metadata={'main_language': 'python'}) round_trip_conversion(notebook, {'extension': '.py'}, update=True) def", "def test_does_not_raise_on_blank_line_removed(): ref = new_notebook(cells=[new_code_cell('1+1\\n ')]) test = new_notebook(cells=[new_code_cell('1+1')]) compare_notebooks(ref,", "differ' in exception_info.value.args[0] def test_cell_metadata_differ(): nb1 = new_notebook(cells=[new_code_cell('1'), new_code_cell('2', metadata={'additional':", "nb2, raise_on_first_difference=False) assert \"Cell metadata 'additional' differ\" in exception_info.value.args[0] def", "= new_notebook(cells=[new_code_cell('1'), new_code_cell('2', metadata={'additional': 'metadata2'})]) with pytest.raises(NotebookDifference) as exception_info: compare_notebooks(nb1,", "= new_notebook(cells=[new_markdown_cell('Cell one\\n\\n\\nsecond line')]) test = new_notebook(cells=[new_markdown_cell('Cell one')]) with pytest.raises(NotebookDifference):", "new_notebook(cells=[new_markdown_cell('Cell one'), new_markdown_cell('second line')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md') def", "round_trip_conversion def test_raise_on_different_metadata(): ref = new_notebook(metadata={'kernelspec': {'language': 'python', 'name': 'python',", "'py:light') def test_strict_raise_on_blank_line_removed(): ref = new_notebook(cells=[new_code_cell('1+1\\n')]) test = new_notebook(cells=[new_code_cell('1+1')]) with", "nbformat.v4.nbbase import new_notebook, new_markdown_cell, new_code_cell, new_raw_cell from jupytext.compare import compare_notebooks,", "pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md') @pytest.mark.parametrize('raise_on_first_difference', [True, False]) def test_raise_on_different_cell_type(raise_on_first_difference): ref", "as round_trip_conversion def test_raise_on_different_metadata(): ref = new_notebook(metadata={'kernelspec': {'language': 'python', 'name':", "compare_notebooks(ref, test, 'md') def test_raise_on_different_cell_metadata(): ref = new_notebook(cells=[new_code_cell('1+1')]) test =", "raise_on_first_difference=raise_on_first_difference) @pytest.mark.parametrize('raise_on_first_difference', [True, False]) def test_raise_on_different_cell_content(raise_on_first_difference): ref = new_notebook(cells=[new_markdown_cell('Cell one'),", "pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'py:light') @pytest.mark.parametrize('raise_on_first_difference', [True, False]) def test_raise_on_different_cell_count(raise_on_first_difference): ref", "'py:light') @pytest.mark.parametrize('raise_on_first_difference', [True, False]) def test_raise_on_different_cell_count(raise_on_first_difference): ref = new_notebook(cells=[new_code_cell('1')]) test", "new_code_cell('2')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'py:light', raise_on_first_difference=raise_on_first_difference) with pytest.raises(NotebookDifference): compare_notebooks(test,", "nb1 = new_notebook(cells=[new_code_cell(''), new_code_cell('2')]) nb2 = new_notebook(cells=[new_code_cell('1+1'), new_code_cell('2\\n2')]) with pytest.raises(NotebookDifference)", "new_notebook(metadata={'kernelspec': {'language': 'python', 'name': 'python', 'display_name': 'Python'}}, cells=[new_markdown_cell('Cell one')]) test", "new_notebook(cells=[new_code_cell('1'), new_code_cell('2')]) nb2 = new_notebook(cells=[new_code_cell('1'), new_code_cell('2')], metadata={'kernelspec': {'language': 'python', 'name':", "test_raise_on_incomplete_markdown_cell(): ref = new_notebook(cells=[new_markdown_cell('Cell one\\n\\n\\nsecond line')]) test = new_notebook(cells=[new_markdown_cell('Cell one')])", "nb2 = new_notebook(cells=[new_code_cell('1'), new_code_cell('2')], metadata={'kernelspec': {'language': 'python', 'name': 'python', 'display_name':", "jupytext.compare import compare_notebooks, NotebookDifference, test_round_trip_conversion as round_trip_conversion def test_raise_on_different_metadata(): ref", "= new_notebook(cells=[new_code_cell('1'), new_code_cell('2')]) nb2 = new_notebook(cells=[new_code_cell('1'), new_code_cell('2')], metadata={'kernelspec': {'language': 'python',", "\"Cell metadata 'additional' differ\" in exception_info.value.args[0] def test_notebook_metadata_differ(): nb1 =", "def test_mutiple_cells_differ(): nb1 = new_notebook(cells=[new_code_cell(''), new_code_cell('2')]) nb2 = new_notebook(cells=[new_code_cell('1+1'), new_code_cell('2\\n2')])", "one'), new_raw_cell('Cell two')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md', raise_on_first_difference=raise_on_first_difference) @pytest.mark.parametrize('raise_on_first_difference',", "nb1 = new_notebook(cells=[new_code_cell('1'), new_code_cell('2', metadata={'additional': 'metadata1'})]) nb2 = new_notebook(cells=[new_code_cell('1'), new_code_cell('2',", "test, 'md', raise_on_first_difference=raise_on_first_difference) def test_raise_on_incomplete_markdown_cell(): ref = new_notebook(cells=[new_markdown_cell('Cell one\\n\\n\\nsecond line')])", "raise_on_first_difference=False) assert \"Cell metadata 'additional' differ\" in exception_info.value.args[0] def test_notebook_metadata_differ():", "@pytest.mark.parametrize('raise_on_first_difference', [True, False]) def test_raise_on_different_outputs(raise_on_first_difference): ref = new_notebook(cells=[new_code_cell('1+1')]) test =", "nb2 = new_notebook(cells=[new_code_cell('1'), new_code_cell('2', metadata={'additional': 'metadata2'})]) with pytest.raises(NotebookDifference) as exception_info:", "with pytest.raises(NotebookDifference) as exception_info: compare_notebooks(nb1, nb2, raise_on_first_difference=False) assert \"Cell metadata", "new_notebook(cells=[new_code_cell('1+1', outputs=[ { \"data\": { \"text/plain\": [ \"2\" ] },", "def test_raise_on_different_cell_type(raise_on_first_difference): ref = new_notebook(cells=[new_markdown_cell('Cell one'), new_code_cell('Cell two')]) test =", "} ])], metadata={'main_language': 'python'}) round_trip_conversion(notebook, {'extension': '.py'}, update=True) def test_mutiple_cells_differ():", "with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md') def test_raise_on_different_cell_metadata(): ref = new_notebook(cells=[new_code_cell('1+1')])", "= new_notebook(cells=[new_code_cell('1')]) test = new_notebook(cells=[new_code_cell('1'), new_code_cell('2')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test,", "test = new_notebook(cells=[new_markdown_cell('Cell one'), new_markdown_cell('second line')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test,", "{}, \"output_type\": \"execute_result\" } ])]) compare_notebooks(ref, test, 'md') @pytest.mark.parametrize('raise_on_first_difference', [True,", "{'language': 'python', 'name': 'python', 'display_name': 'Python'}}, cells=[new_markdown_cell('Cell one')]) test =", "False]) def test_raise_on_different_cell_content(raise_on_first_difference): ref = new_notebook(cells=[new_markdown_cell('Cell one'), new_code_cell('Cell two')]) test", "pytest from nbformat.v4.nbbase import new_notebook, new_markdown_cell, new_code_cell, new_raw_cell from jupytext.compare", "pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md', compare_outputs=True, raise_on_first_difference=raise_on_first_difference) def test_test_round_trip_conversion(): notebook =", "line')]) test = new_notebook(cells=[new_markdown_cell('Cell one'), new_markdown_cell('second line')]) with pytest.raises(NotebookDifference): compare_notebooks(ref,", "metadata={'metakey': 'value'})]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'py:light') @pytest.mark.parametrize('raise_on_first_difference', [True, False])", "}, \"execution_count\": 1, \"metadata\": {}, \"output_type\": \"execute_result\" } ])], metadata={'main_language':", "= new_notebook(cells=[new_code_cell('1+1\\n ')]) test = new_notebook(cells=[new_code_cell('1+1')]) compare_notebooks(ref, test, 'py:light') def", "compare_notebooks(ref, test, 'py:light') def test_strict_raise_on_blank_line_removed(): ref = new_notebook(cells=[new_code_cell('1+1\\n')]) test =", "test_strict_raise_on_blank_line_removed(): ref = new_notebook(cells=[new_code_cell('1+1\\n')]) test = new_notebook(cells=[new_code_cell('1+1')]) with pytest.raises(NotebookDifference): compare_notebooks(ref,", "cell two')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md', raise_on_first_difference=raise_on_first_difference) def test_raise_on_incomplete_markdown_cell():", "test = new_notebook(cells=[new_code_cell('1+1')]) compare_notebooks(ref, test, 'py:light') def test_strict_raise_on_blank_line_removed(): ref =", "one\\n\\n\\nsecond line')]) test = new_notebook(cells=[new_markdown_cell('Cell one'), new_markdown_cell('second line')]) with pytest.raises(NotebookDifference):", "with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md') @pytest.mark.parametrize('raise_on_first_difference', [True, False]) def test_raise_on_different_cell_type(raise_on_first_difference):", "= new_notebook(metadata={'kernelspec': {'language': 'python', 'name': 'python', 'display_name': 'Python'}}, cells=[new_markdown_cell('Cell one')])", "pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'py:light', raise_on_first_difference=raise_on_first_difference) with pytest.raises(NotebookDifference): compare_notebooks(test, ref, 'py:light',", "}, \"execution_count\": 1, \"metadata\": {}, \"output_type\": \"execute_result\" } ])]) compare_notebooks(ref,", "new_code_cell('2\\n2')]) with pytest.raises(NotebookDifference) as exception_info: compare_notebooks(nb1, nb2, raise_on_first_difference=False) assert 'Cells", "compare_notebooks, NotebookDifference, test_round_trip_conversion as round_trip_conversion def test_raise_on_different_metadata(): ref = new_notebook(metadata={'kernelspec':", "def test_dont_raise_on_different_outputs(): ref = new_notebook(cells=[new_code_cell('1+1')]) test = new_notebook(cells=[new_code_cell('1+1', outputs=[ {", "compare_outputs=True, raise_on_first_difference=raise_on_first_difference) def test_test_round_trip_conversion(): notebook = new_notebook(cells=[new_code_cell('1+1', outputs=[ { \"data\":", "import compare_notebooks, NotebookDifference, test_round_trip_conversion as round_trip_conversion def test_raise_on_different_metadata(): ref =", "\"output_type\": \"execute_result\" } ])], metadata={'main_language': 'python'}) round_trip_conversion(notebook, {'extension': '.py'}, update=True)", "one')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md') @pytest.mark.parametrize('raise_on_first_difference', [True, False]) def", "new_code_cell('2')]) nb2 = new_notebook(cells=[new_code_cell('1+1'), new_code_cell('2\\n2')]) with pytest.raises(NotebookDifference) as exception_info: compare_notebooks(nb1,", "False]) def test_raise_on_different_cell_count(raise_on_first_difference): ref = new_notebook(cells=[new_code_cell('1')]) test = new_notebook(cells=[new_code_cell('1'), new_code_cell('2')])", "compare_notebooks(nb1, nb2, raise_on_first_difference=False) assert 'Cells 1,2 differ' in exception_info.value.args[0] def", "= new_notebook(cells=[new_code_cell(''), new_code_cell('2')]) nb2 = new_notebook(cells=[new_code_cell('1+1'), new_code_cell('2\\n2')]) with pytest.raises(NotebookDifference) as", "test, 'py:light') @pytest.mark.parametrize('raise_on_first_difference', [True, False]) def test_raise_on_different_cell_count(raise_on_first_difference): ref = new_notebook(cells=[new_code_cell('1')])", "new_notebook, new_markdown_cell, new_code_cell, new_raw_cell from jupytext.compare import compare_notebooks, NotebookDifference, test_round_trip_conversion", "test_raise_on_different_cell_type(raise_on_first_difference): ref = new_notebook(cells=[new_markdown_cell('Cell one'), new_code_cell('Cell two')]) test = new_notebook(cells=[new_markdown_cell('Cell", "cells=[new_markdown_cell('Cell one')]) test = new_notebook(metadata={'kernelspec': {'language': 'R', 'name': 'R', 'display_name':", "] }, \"execution_count\": 1, \"metadata\": {}, \"output_type\": \"execute_result\" } ])])", "{}, \"output_type\": \"execute_result\" } ])]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md',", "pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'py:light', allow_expected_differences=False) def test_dont_raise_on_different_outputs(): ref = new_notebook(cells=[new_code_cell('1+1')])", "= new_notebook(cells=[new_code_cell('1+1\\n')]) test = new_notebook(cells=[new_code_cell('1+1')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'py:light',", "= new_notebook(cells=[new_code_cell('1+1', metadata={'metakey': 'value'})]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'py:light') @pytest.mark.parametrize('raise_on_first_difference',", "exception_info: compare_notebooks(nb1, nb2, raise_on_first_difference=False) assert \"Cell metadata 'additional' differ\" in", "')]) test = new_notebook(cells=[new_code_cell('1+1')]) compare_notebooks(ref, test, 'py:light') def test_strict_raise_on_blank_line_removed(): ref", "one'), new_markdown_cell('second line')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md') def test_raise_on_different_cell_metadata():", "round_trip_conversion(notebook, {'extension': '.py'}, update=True) def test_mutiple_cells_differ(): nb1 = new_notebook(cells=[new_code_cell(''), new_code_cell('2')])", "'metadata1'})]) nb2 = new_notebook(cells=[new_code_cell('1'), new_code_cell('2', metadata={'additional': 'metadata2'})]) with pytest.raises(NotebookDifference) as", "with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'py:light') @pytest.mark.parametrize('raise_on_first_difference', [True, False]) def test_raise_on_different_cell_count(raise_on_first_difference):", "two')]) test = new_notebook(cells=[new_markdown_cell('Cell one'), new_code_cell('Modified cell two')]) with pytest.raises(NotebookDifference):", "new_code_cell('2')], metadata={'kernelspec': {'language': 'python', 'name': 'python', 'display_name': 'Python'}}) with pytest.raises(NotebookDifference)", "compare_notebooks(ref, test, 'md') def test_does_raise_on_split_markdown_cell(): ref = new_notebook(cells=[new_markdown_cell('Cell one\\n\\n\\nsecond line')])", "pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md') def test_does_raise_on_split_markdown_cell(): ref = new_notebook(cells=[new_markdown_cell('Cell one\\n\\n\\nsecond", "compare_notebooks(test, ref, 'py:light', raise_on_first_difference=raise_on_first_difference) def test_does_not_raise_on_blank_line_removed(): ref = new_notebook(cells=[new_code_cell('1+1\\n ')])", "test, 'md', compare_outputs=True, raise_on_first_difference=raise_on_first_difference) def test_test_round_trip_conversion(): notebook = new_notebook(cells=[new_code_cell('1+1', outputs=[", "raise_on_first_difference=False) assert 'Cells 1,2 differ' in exception_info.value.args[0] def test_cell_metadata_differ(): nb1", "{'language': 'python', 'name': 'python', 'display_name': 'Python'}}) with pytest.raises(NotebookDifference) as exception_info:", "'Python'}}) with pytest.raises(NotebookDifference) as exception_info: compare_notebooks(nb1, nb2, raise_on_first_difference=False, ) assert", "test, 'py:light', allow_expected_differences=False) def test_dont_raise_on_different_outputs(): ref = new_notebook(cells=[new_code_cell('1+1')]) test =", "'python'}) round_trip_conversion(notebook, {'extension': '.py'}, update=True) def test_mutiple_cells_differ(): nb1 = new_notebook(cells=[new_code_cell(''),", "test = new_notebook(cells=[new_markdown_cell('Cell one')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md') def", "'Python'}}, cells=[new_markdown_cell('Cell one')]) test = new_notebook(metadata={'kernelspec': {'language': 'R', 'name': 'R',", "test, 'py:light', raise_on_first_difference=raise_on_first_difference) with pytest.raises(NotebookDifference): compare_notebooks(test, ref, 'py:light', raise_on_first_difference=raise_on_first_difference) def", "compare_notebooks(nb1, nb2, raise_on_first_difference=False) assert \"Cell metadata 'additional' differ\" in exception_info.value.args[0]", "exception_info.value.args[0] def test_notebook_metadata_differ(): nb1 = new_notebook(cells=[new_code_cell('1'), new_code_cell('2')]) nb2 = new_notebook(cells=[new_code_cell('1'),", "'Cells 1,2 differ' in exception_info.value.args[0] def test_cell_metadata_differ(): nb1 = new_notebook(cells=[new_code_cell('1'),", "new_code_cell('Modified cell two')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md', raise_on_first_difference=raise_on_first_difference) def", "pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md', raise_on_first_difference=raise_on_first_difference) @pytest.mark.parametrize('raise_on_first_difference', [True, False]) def test_raise_on_different_cell_content(raise_on_first_difference):", "'display_name': 'Python'}}, cells=[new_markdown_cell('Cell one')]) test = new_notebook(metadata={'kernelspec': {'language': 'R', 'name':", "\"output_type\": \"execute_result\" } ])]) compare_notebooks(ref, test, 'md') @pytest.mark.parametrize('raise_on_first_difference', [True, False])", "in exception_info.value.args[0] def test_cell_metadata_differ(): nb1 = new_notebook(cells=[new_code_cell('1'), new_code_cell('2', metadata={'additional': 'metadata1'})])", "metadata={'additional': 'metadata1'})]) nb2 = new_notebook(cells=[new_code_cell('1'), new_code_cell('2', metadata={'additional': 'metadata2'})]) with pytest.raises(NotebookDifference)", "= new_notebook(cells=[new_markdown_cell('Cell one\\n\\n\\nsecond line')]) test = new_notebook(cells=[new_markdown_cell('Cell one'), new_markdown_cell('second line')])", "= new_notebook(cells=[new_code_cell('1+1')]) test = new_notebook(cells=[new_code_cell('1+1', outputs=[ { \"data\": { \"text/plain\":", "raise_on_first_difference=raise_on_first_difference) def test_test_round_trip_conversion(): notebook = new_notebook(cells=[new_code_cell('1+1', outputs=[ { \"data\": {", "\"metadata\": {}, \"output_type\": \"execute_result\" } ])]) compare_notebooks(ref, test, 'md') @pytest.mark.parametrize('raise_on_first_difference',", "new_code_cell('2', metadata={'additional': 'metadata1'})]) nb2 = new_notebook(cells=[new_code_cell('1'), new_code_cell('2', metadata={'additional': 'metadata2'})]) with", "pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md', raise_on_first_difference=raise_on_first_difference) def test_raise_on_incomplete_markdown_cell(): ref = new_notebook(cells=[new_markdown_cell('Cell", "new_code_cell('2')]) nb2 = new_notebook(cells=[new_code_cell('1'), new_code_cell('2')], metadata={'kernelspec': {'language': 'python', 'name': 'python',", "pytest.raises(NotebookDifference) as exception_info: compare_notebooks(nb1, nb2, raise_on_first_difference=False, ) assert \"Notebook metadata", "import new_notebook, new_markdown_cell, new_code_cell, new_raw_cell from jupytext.compare import compare_notebooks, NotebookDifference,", "= new_notebook(cells=[new_markdown_cell('Cell one'), new_markdown_cell('second line')]) with pytest.raises(NotebookDifference): compare_notebooks(ref, test, 'md')", "def test_raise_on_different_metadata(): ref = new_notebook(metadata={'kernelspec': {'language': 'python', 'name': 'python', 'display_name':", "nb1 = new_notebook(cells=[new_code_cell('1'), new_code_cell('2')]) nb2 = new_notebook(cells=[new_code_cell('1'), new_code_cell('2')], metadata={'kernelspec': {'language':", "one'), new_code_cell('Cell two')]) test = new_notebook(cells=[new_markdown_cell('Cell one'), new_raw_cell('Cell two')]) with", "= new_notebook(cells=[new_code_cell('1+1')]) test = new_notebook(cells=[new_code_cell('1+1', metadata={'metakey': 'value'})]) with pytest.raises(NotebookDifference): compare_notebooks(ref,", "from nbformat.v4.nbbase import new_notebook, new_markdown_cell, new_code_cell, new_raw_cell from jupytext.compare import", "\"metadata\": {}, \"output_type\": \"execute_result\" } ])], metadata={'main_language': 'python'}) round_trip_conversion(notebook, {'extension':", "new_notebook(cells=[new_markdown_cell('Cell one\\n\\n\\nsecond line')]) test = new_notebook(cells=[new_markdown_cell('Cell one')]) with pytest.raises(NotebookDifference): compare_notebooks(ref," ]
[ "= [ 'you unhinge your jaw and consume *{}* in", "friends...', 'now you\\'re at both ends of a high five!']", "makes me do this for uninitialized classes await ctx.channel.send(msg) return", "\"\"\"High five like a boss.\"\"\" msg = self.highfives.computeAction(self.highfives, self.bot, ctx,", "but they bite your hand', 'you try to pet *{}*", "onto *{}*\\'s head. *Bliss.*', 'your hand touches *{}*\\'s snoot -", "just can\\'t bring yourself to drink *{}* - so you", "fived *{}*.', 'your hand flops through the air - hitting", "your booping.', '*{}* starts resembling a happy pupper.'] class spooky(actionable):", "ctx, member) await ctx.channel.send(msg) return @commands.command(pass_context=True) async def pet(self, ctx,", "something in it, so you drink *nothing*...', 'you should probably", "'I\\'m a bot. You can\\'t drink me.', 'you stick a", "arguments botList = [] # when the action is done", "best to scurry away and hide.', 'your jaw clamps down", "= ['ahh - high fiving yourself, classy...', 'that\\'s uh... that\\'s", "to boop *{}* - so you just let your hand", "was something to eat, so you just chew on nothingness...',", "existence in wake of that tremendous high five!', 'a 2", "'you try to eat *{}*, but you just can\\'t quite", "'you might not be the smartest...'] memberList = [ 'you", "'get spooked by... yourself?', 'sp00py, but why spook yourself?'] memberList", "*someone*.'] selfList = ['you boop yourself on the nose with", "the action messages class eating(actionable): nothingList = [ 'you sit", "memberList = [ 'you unhinge your jaw and consume *{}*", "to realise that *I* am already drinking you!', 'I\\'m a", "str = None): \"\"\"pet kitties.\"\"\" msg = self.petting.computeAction(self.petting, self.bot, ctx,", "yourself to bite down.', 'you happily munch away, but can", "Sun package.', 'you are thirsty - *{}* sacrifices themself involuntarily.',", "only sound you hear as a soft *whoosh* as your", "# Init with the bot reference, and a reference to", "sake of chewing. That time is now.'] botList = [", "Capri Sun package.', 'you are thirsty - *{}* sacrifices themself", "'you try to eat *me* - but unfortunately, I saw", "'too bad there\\'s no one else to pet you.', 'in", "- you spit them out, the taste of failure hanging", "it. You want to boop *someone*.'] selfList = ['you boop", "'sadly, no one got spooked', 'it is sp00... you can\\t", "your straw.', 'You search for me, only to realise that", "me; My digital emotions will get all messed up!' 'aaaaaaaaaah!", "'you spook nothing, sp00py...', 'sadly, no one got spooked', 'it", "try to eat *{}*, but you just can\\'t quite do", "as your hand connects with nothing...'] botList = [ 'the", "but unfortunately, I saw it coming - your jaw hangs", "\"\"\"Drink like a boss.\"\"\" msg = self.drinking.computeAction(self.drinking, self.bot, ctx, member)", "happily boop *{}*, it\\'s lovely!', 'you just can\\'t bring yourself", "'you sink your teeth into *{}\\'s* shoulder - they turn", "sink your teeth into *{}\\'s* shoulder - they turn to", "very floppy.', 'wait - you\\'re not a drink!', 'you might", "= ['you stab yourself with a straw - not surprisingly,", "ctx.channel.send(msg) return @commands.command(pass_context=True) async def highfive(self, ctx, *, member :", "toward another member itemList = [] # when the action", "'01001000011010010110011101101000001000000100011001101001011101100110010100100001'] selfList = ['ahh - high fiving yourself, classy...', 'that\\'s", "for an eternity, hand raised up - desperate for any", ": str = None): \"\"\"Eat like a boss.\"\"\" msg =", "sp00py time! Hey *{}*, boo!', 'congrats, *{}* dun sp00ked.', 'get", "you scare me like that again!'] selfList = ['go watch", "know what drinking is?', 'you desperatly search for something to", "= random.choice(self.nothingList) else: targetMember = DisplayName.memberForName(target, ctx.message.guild) if targetMember: if", "go.', 'you try to boop *{}*, but you just can\\'t", "you end up booping *{}*.', 'you climb *{}*\\'s head and", "*{}*.', 'you climb *{}*\\'s head and use it as a", "= bot global Utils, DisplayName Utils = self.bot.get_cog(\"Utils\") DisplayName =", "pupper.'] class spooky(actionable): nothingList = [ 'you spook no one", "the override class. any {} are replaced with target member's", "classes await ctx.channel.send(msg) return @commands.command(pass_context=True) async def drink(self, ctx, *,", "another member itemList = [] # when the action is", "ctx.message.add_reaction(\"🎃\") msg = self.spooky.computeAction(self.spooky, self.bot, ctx, member) await ctx.channel.send(msg) return", "take a big sip of *{}*. *Delicious.*', 'your straw sinks", "'you can\\'t drink me, I\\'m a machine!'] selfList = ['you", "to the cloud and receive a quick high five from", "cup must\\'ve had something in it, so you drink *nothing*...',", "run in a large circle - *totally* high fiving all", "yourself in to a cup, but you just can\\'t do", "empty we fail over to the member list elif self.selfList", "as you try your best to scurry away and hide.',", "*, member : str = None): \"\"\"Drink like a boss.\"\"\"", "ctx, member) await ctx.channel.send(msg) return @commands.command(pass_context=True) async def highfive(self, ctx,", "boop *{}*, but you just can\\'t quite do it -", "= [ 'the sky erupts with 1\\'s and 0\\'s as", "hand flops through the air - hitting *{}* with a", "pretty empty', 'are you sure you know what drinking is?',", "You run away as they run after you.', 'you happily", "fit yourself in to a cup, but you just can\\'t", "starts resembling the Aral Sea.'] class booping(actionable): nothingList = [", "hear as a soft *whoosh* as your hand connects with", "'{}' in mesg: mesg = mesg.format(target) mesgFull = '*{}*, {}'.format(DisplayName.name(ctx.message.author),", "just chewing nothing for the sake of chewing. That time", "satisfying.', 'you thirstly guzzle *{}*, it\\'s lovely!', 'you just can\\'t", "'you attempt to boop *{}*, but you\\'re clumsier than you", "cannot let you spook me; My digital emotions will get", "you would like the taste of me.', 'you can\\'t drink", "plays out.', 'you and *{}* elevate to a higher plane", "'you should probably just go get a drink.', 'that desk", "= [ 'you grab your lucky straw and empty *{}*", "'it is sp00... you can\\t spook air'] botList = [", "in it, so you drink *nothing*...', 'you should probably just", "your hand forward - the momentum carries you to the", "- you and *{}* connect palms, subsequently deafening everyone in", "momentum carries you to the ground and you just lay", "rub *{}* but it doesn\\'t feel like a cat.', 'you", "be the smartest...', 'you might have some issues.', 'you try", "a bot. You can\\'t eat me.', 'your jaw clamps down", "['you rub *{}* but it doesn\\'t feel like a cat.',", "def __init__(self, bot): self.bot = bot global Utils, DisplayName Utils", "so you drink *nothing*...', 'you should probably just go get", "guzzle *{}*, it\\'s lovely!', 'you just can\\'t bring yourself to", "castle... they feel amused.'] itemList = ['you put your hand", "camiel.\"\"\" if datetime.date.today().month == 10: # make it extra sp00py", "hanging stuck to your hand...', 'you sneak a boop onto", "by camiel.\"\"\" if datetime.date.today().month == 10: # make it extra", "bring yourself to eat *{}* - so you just hold", "quick high five from me before downloading back to Earth.',", "'are you sure you have someone to boop?', 'I get", "bot mesg = random.choice(self.botList) # if botList is empty we", "is spooktober await ctx.message.add_reaction(\"🎃\") msg = self.spooky.computeAction(self.spooky, self.bot, ctx, member)", "with target member's name nothingList = [] # when you", "but why spook yourself?'] memberList = [ 'you sp00p *{}*", "'you happily drum your fingers away - *{}* starts to", "me!', 'you spooked me so hard, I got the Heebie-jeebies...',", "hand in the air.', 'you could have sworn there was", "*{}\\'s* shoulder - You run away as they run after", "clapping...', 'you run in a large circle - *totally* high", "it hurts.', 'you fit yourself in to a cup, but", "deafening everyone in a 300-mile radius!'] itemList = ['neat... you", "hitting *{}* with a soft thud.', 'you reach out a", "- but unfortunately, I saw it coming - your jaw", "petting.', 'you pet *{}* but they bite your hand', 'you", "time when you need to realize that you\\'re just chewing", "mouth...', 'you take a quick bite out of *{}*. They", "with the bot reference, and a reference to the settings", "feel like a cat.', 'you don\\'t hear any purring from", "a hand, gently pressing your palm to *{}*. A soft", "'you rip hungrily into *{}*, tearing it to bits!', 'you", "Don\\t you scare me like that again!'] selfList = ['go", "treat *{}* without any reaction...', 'you do your best to", "chewing. That time is now.'] botList = [ 'you try", "member : str = None): \"\"\"sp00ktober by camiel.\"\"\" if datetime.date.today().month", "text that is not a member def computeAction(self, bot, ctx,", "failure hanging in your mouth...', 'you drink a small sip", "head, the taste of failure hanging stuck to your hand...',", "'you stand alone for an eternity, hand raised up -", "boss.\"\"\" msg = self.highfives.computeAction(self.highfives, self.bot, ctx, member) await ctx.channel.send(msg) return", "you call without any arguments botList = [] # when", "with a soft thud.', 'you reach out a hand, gently", "uninitialized classes await ctx.channel.send(msg) return @commands.command(pass_context=True) async def drink(self, ctx,", "storing and computing action messages class actionable: ## these should", "ctx.channel.send(msg) return @commands.command(pass_context=True) async def boop(self, ctx, *, member :", "nothingList = [ 'you stretch out your hand in the", "= [ 'you stretch out your hand in the air,", "= random.choice(self.selfList) else: # actioning another user mesg = random.choice(self.memberList).replace(\"{}\",DisplayName.name(targetMember))", "user mesg = random.choice(self.memberList).replace(\"{}\",DisplayName.name(targetMember)) else: # actioning an item mesg", "'you try to drink yourself.', 'why would you drink yourself?']", "the nose with your finger.', 'you try to boop your", "try to eat *me* - but unfortunately, I saw it", "I cannot let you spook me; My digital emotions will", "into *{}\\'s* shoulder - You run away as they run", "than you remember - and fail...', 'you drink *{}*.', '*{}*", "give yourself a nice pat on the head.', 'too bad", "[ 'you stare at your glass full of *nothing*...', 'that", "again!'] selfList = ['go watch a scary movie to be", "A soft *\"high five\"* escapes your lips as a tear", "the bot mesg = random.choice(self.botList) # if botList is empty", "nothing...'] botList = [ 'the sky erupts with 1\\'s and", "head and use it as a bouncy castle... they feel", "one there.', 'you look around the channel for someone to", "computing action messages class actionable: ## these should be filled", "a soft thud.', 'you reach out a hand, gently pressing", "package.', 'you are thirsty - *{}* sacrifices themself involuntarily.', 'somehow", "- and fail...', 'you drink *{}*.', '*{}* dries up from", "member itemList = [] # when the action is done", "in one sip.', 'you try to drink *{}*, but you", "a computer.'] selfList = ['you give yourself a nice pat", "small sip of *{}*. They probably didn\\'t even notice.', 'you", "you spook me; My digital emotions will get all messed", "itemList = [] # when the action is done on", "it for awhile...', 'you attempt to drain *{}*, but you\\'re", "'the only sound you hear as a soft *whoosh* as", "'I get it. You want to boop *someone*.'] selfList =", "machine!'] selfList = ['you stab yourself with a straw -", "they bite your hand', 'you try to pet *{}* but", "'you stare at your glass full of *nothing*...', 'that cup", "you finish your newest meal.'] itemList = [ 'you take", "the room, just waiting to be booped.', 'are you sure", "botList = [ 'I may be electronic but I still", "to boop your head, but your hand gets lost along", "doesn\\'t feel like a cat.', 'you don\\'t hear any purring", "[ 'you stand alone for an eternity, hand raised up", "starts crying!'] class highfives(actionable): nothingList = [ 'you stand alone", "empty *{}* in one sip.', 'you try to drink *{}*,", "jaw clamps down on... wait... on nothing, because I\\'m *digital!*.',", "@commands.command(pass_context=True) async def highfive(self, ctx, *, member : str =", "linger...', 'you attempt to boop *{}*, but you\\'re clumsier than", "uh... that\\'s just clapping...', 'you run in a large circle", "had something in it, so you drink *nothing*...', 'you should", "rub your hand through *{}\\'s* hair.', '*{}* smiles from your", "air'] botList = [ 'you scared the living pumpkin out", "bouncy castle... they feel amused.'] itemList = ['you put your", "*{}* - a satisfying *crunch* emanates as you finish your", "to pet, you pet yourself.', 'your hair is warm and", "bot reference, and a reference to the settings var def", "so you just let your hand linger...', 'you attempt to", "'you try to boop yourself.', 'why would you boop yourself?']", "'*purrrrrrrrrrrrrrr*.', 'you electrocute yourself trying to pet a computer.'] selfList", "soft *whoosh* as your hand connects with nothing...'] botList =", "self.drinking.computeAction(self.drinking, self.bot, ctx, member) await ctx.channel.send(msg) return @commands.command(pass_context=True) async def", "'your straw sinks into *{}* - it tastes satisfying.', 'you", "'you hurt your hand trying to pet *{}*.'] # Init", "hand onto *{}*\\'s head. *Bliss.*', 'your hand touches *{}*\\'s snoot", "'somehow you end up booping *{}*.', 'you climb *{}*\\'s head", "user who called it memberList = [] # when the", "ends of a high five!'] memberList = [ 'you and", "- it tastes satisfying.', 'you rip hungrily into *{}*, tearing", "eyes wide as you try your best to scurry away", "from your petting.', 'you try to pet *{}*, but miss", "botList = [ 'the sky erupts with 1\\'s and 0\\'s", "bot global Utils, DisplayName Utils = self.bot.get_cog(\"Utils\") DisplayName = self.bot.get_cog(\"DisplayName\")", "- a satisfying *crunch* emanates as you finish your newest", "'you spook no one but yourself', 'you spook nothing, sp00py...',", "swing you throw your hand forward - the momentum carries", "high five from me before downloading back to Earth.', 'I", "search for something to drink'] botList = [ 'you try", "\"\"\"Eat like a boss.\"\"\" msg = self.eating.computeAction(self.eating, self.bot, ctx, member)", "*{}*. They probably didn\\'t even notice.', 'you sink your teeth", "like the taste of me.', 'you can\\'t drink me, I\\'m", "your best to sp00p *{}*, but fail...', 'sp00py time! *{}*", "drum your fingers away - *{}* starts to look annoyed.',", "if self.botList and targetMember.id == bot.user.id: # actioning the bot", "it coming - your jaw hangs open as I deftly", "any reaction...', 'you do your best to sp00p *{}*, but", "around the channel for someone to boop.', 'you eye all", "of me!', 'you spooked me so hard, I got the", "in the air.', 'you could have sworn there was a", "and hide.', 'your jaw clamps down on *{}* - a", "you!', 'I\\'m a bot. You can\\'t drink me.', 'you stick", "would you boop yourself?'] memberList = [ 'you outstretch your", "- you miss their head, the taste of failure hanging", "booping.', '*{}* starts resembling a happy pupper.'] class spooky(actionable): nothingList", "*{}* engage in a world-ending high five!', 'it *was* tomorrow", "You can\\'t eat me.', 'your jaw clamps down on... wait...", "0\\'s as our hands meet in an epic high five", "straw - not surprisingly, it hurts.', 'you fit yourself in", "hangs open for a brief second before you realize that", "the bed.', '*{}* purrs from your petting.', 'you pet *{}*", "you.', 'you happily drink away - *{}* starts to look", "drink(self, ctx, *, member : str = None): \"\"\"Drink like", "the bot bot.add_cog(Actions(bot)) class Actions(commands.Cog): ## class that handles storing", "is done on a string of text that is not", "someone to boop, but there\\'s no one there.', 'you look", "scary movie to be absolutely sp00ped!', 'boo! Did you scare", "a machine!'] selfList = ['you stab yourself with a straw", "- *{}* starts to look annoyed.', 'you\\'re feeling boopy -", "*{}*\\'s head. *Bliss.*', 'your hand touches *{}*\\'s snoot - it", "bot. You can\\'t drink me.', 'you stick a straw in...", "'you spooked me so hard, I got the Heebie-jeebies...', #", "and eat *nothing*...', 'you\\'re *sure* there was something to eat,", "you just lay there - high fiveless...', 'the only sound", "looking weird...', '*{}* got sp00p3d so hard, it ran away!',", "into *{}* - it tastes satisfying.', 'you thirstly guzzle *{}*,", "pet *{}* but they hiss and run away.'] itemList =", "no one but yourself', 'you spook nothing, sp00py...', 'sadly, no", "'you could have sworn there was a cat there!', 'you", "hangs open as I deftly sidestep.', 'your mouth hangs open", "I be if I let you eat me?'] selfList =", "mouth, but *just can\\'t* force yourself to bite down.', 'you", "= [ 'you outstretch your lucky finger and boop *{}*", "msg = self.eating.computeAction(self.eating, self.bot, ctx, member) #python is silly and", "drinking(actionable): nothingList = [ 'you stare at your glass full", "'*{}* smiles from your petting.', 'you try to pet *{}*,", "of the command''' mesg = \"\" if not target: #", "msg = self.spooky.computeAction(self.spooky, self.bot, ctx, member) await ctx.channel.send(msg) return @commands.command(pass_context=True)", "your finger.', 'you try to boop your head, but your", "me before downloading back to Earth.', 'I unleash a fork-bomb", "could have sworn there was a cat there!', 'you remember", "to bite down.', 'you happily munch away, but can now", "*{}* gets sp00ped harder than you thought and starts crying!']", "there.', 'you look around the channel for someone to boop.',", "no one there.', 'you look around the channel for someone", "member) #python is silly and makes me do this for", "*{}*.', 'your hand flops through the air - hitting *{}*", "'a 2 hour, 3 episode anime-esque fight scene unfolds as", "any sort of recognition...', 'with a wild swing you throw", "your lips as a tear runs down your cheek...', 'like", "that *I* am already drinking you!', 'I\\'m a bot. You", "might have some issues.', 'you try to boop yourself.', 'why", "five\"* escapes your lips as a tear runs down your", "themself involuntarily.', 'somehow you end up emptying *{}*.'] itemList =", "a boop onto *{}*. They probably didn\\'t even notice.', 'you", "quietly and eat *nothing*...', 'you\\'re *sure* there was something to", "like that again!'] selfList = ['go watch a scary movie", "sound you hear as a soft *whoosh* as your hand", "but you just can\\'t quite do it - you spit", "a higher plane of existence in wake of that tremendous", "soft thud.', 'you reach out a hand, gently pressing your", "pet *{}*, but miss because they hid under the bed.',", "one bite.', 'you try to eat *{}*, but you just", "crying!'] class highfives(actionable): nothingList = [ 'you stand alone for", "yourself to drink *{}* - so you just hold it", "they heard you sneakin\\' and fail...', 'it is sp00py time!", "ctx, *, member : str = None): \"\"\"Drink like a", "boop *{}*.', '*{}* feels annoyed from your booping.', '*{}* starts", "mirror and get a little scared...', 'get spooked by... yourself?',", "your teeth into *{}\\'s* shoulder - they turn to face", "something to drink'] botList = [ 'you try to drink", "up emptying *{}*.'] itemList = ['you take a big sip", "Add the bot bot.add_cog(Actions(bot)) class Actions(commands.Cog): ## class that handles", "They probably didn\\'t even notice.', 'you sink your teeth into", "a big sip of *{}*. *Delicious.*', 'your straw sinks into", "- so you just hold it for awhile...', 'you attempt", "flops through the air - hitting *{}* with a soft", "grab your lucky straw and empty *{}* in one sip.',", "petting.', 'you try to pet *{}*, but miss because they", "= [ 'you stare at your glass full of *nothing*...',", "me?', 'I don\\'t think you would like the taste of", "already drinking you!', 'I\\'m a bot. You can\\'t drink me.',", "mesg = random.choice(self.botList) # if botList is empty we fail", "targetMember = DisplayName.memberForName(target, ctx.message.guild) if targetMember: if self.botList and targetMember.id", "scare me like that again!'] selfList = ['go watch a", "petting(actionable): # meow nothingList = [ 'you absentmindedly wave your", "mesgFull ## static definitions of all the action messages class", "you sure you want to boop yourself?', 'you might not", "hurts.', 'you fit yourself in to a cup, but you", "boss.\"\"\" msg = self.drinking.computeAction(self.drinking, self.bot, ctx, member) await ctx.channel.send(msg) return", "not be the smartest...'] memberList = [ 'you unhinge your", "= [ 'you try to drink *me*, but I dodge", "target): '''return a message based on the context and argument", "on... wait... on nothing, because I\\'m *digital!*.', 'what kind of", "on nothing, because I\\'m *digital!*.', 'what kind of bot would", "palm to *{}*. A soft *\"high five\"* escapes your lips", "*{}*, but they heard you sneakin\\' and fail...', 'it is", "five processes!', '01001000011010010110011101101000001000000100011001101001011101100110010100100001'] selfList = ['ahh - high fiving yourself,", "I got the Heebie-jeebies...', # https://www.myenglishteacher.eu/blog/idioms-for-being-afraid/ 'you sp00p me? But", "it memberList = [] # when the action is done", "just high fived *{}*.', 'your hand flops through the air", "self.selfList and targetMember.id == ctx.message.author.id: # actioning themselves mesg =", "to be booped.', 'are you sure you have someone to", "taste of me.', 'you can\\'t drink me, I\\'m a machine!']", "else to pet you.', 'in lieu of anything else to", "'*{}* feels annoyed from your booping.', '*{}* starts resembling a", "leaving you looking weird...', '*{}* got sp00p3d so hard, it", "*{}* but they hiss and run away.'] itemList = ['you", "bot.user.id: # actioning the bot mesg = random.choice(self.botList) # if", "bed.', '*{}* purrs from your petting.', 'you pet *{}* but", "'you reach out a hand, gently pressing your palm to", "*{}* elevate to a higher plane of existence in wake", "ctx.channel.send(msg) return @commands.command(pass_context=True) async def drink(self, ctx, *, member :", "from discord.ext import commands from Cogs import DisplayName from Cogs", "yourself.', 'why would you boop yourself?'] memberList = [ 'you", "await ctx.channel.send(msg) return @commands.command(pass_context=True) async def spook(self, ctx, *, member", "just go get a drink.', 'that desk looks pretty empty',", "for someone to boop.', 'you eye all the heads in", "away - *{}* starts to look annoyed.', 'you\\'re feeling boopy", "tastes satisfying.', 'you rip hungrily into *{}*, tearing it to", "take a quick bite out of *{}*. They probably didn\\'t", "should be filled in the override class. any {} are", "it for awhile...', 'you attempt to bite into *{}*, but", "to your hand...', 'you sneak a boop onto *{}*. They", "through the air - hitting *{}* with a soft thud.',", "a fork-bomb of high five processes!', '01001000011010010110011101101000001000000100011001101001011101100110010100100001'] selfList = ['ahh", "a 300-mile radius!'] itemList = ['neat... you just high fived", "reference, and a reference to the settings var def __init__(self,", "it to bits!', 'you just can\\'t bring yourself to eat", "straw and empty *{}* in one sip.', 'you try to", "thirstly guzzle *{}*, it\\'s lovely!', 'you just can\\'t bring yourself", "get it. You want to boop *someone*.'] selfList = ['you", "# when the action is done at the bot selfList", "your hand trying to pet *{}*.'] # Init with the", "yourself?'] memberList = [ 'you outstretch your lucky finger and", "because the cat is gone.'] botList = [ 'I may", "tear runs down your cheek...', 'like an open-handed piston of", "in a world-ending high five!', 'it *was* tomorrow - before", "sort of recognition...', 'with a wild swing you throw your", "realise that *I* am already drinking you!', 'I\\'m a bot.", "msg = self.booping.computeAction(self.booping, self.bot, ctx, member) await ctx.channel.send(msg) return @commands.command(pass_context=True)", "sacrifices themself involuntarily.', 'somehow you end up booping *{}*.', 'you", "be booped.', 'are you sure you have someone to boop?',", "done at the bot selfList = [] # when the", "sp00py because it is spooktober await ctx.message.add_reaction(\"🎃\") msg = self.spooky.computeAction(self.spooky,", "failure hanging stuck to your hand...', 'you sneak a boop", "wide as you try your best to scurry away and", "fork-bomb of high five processes!', '01001000011010010110011101101000001000000100011001101001011101100110010100100001'] selfList = ['ahh -", "probably didn\\'t even notice.', 'you sink your teeth into *{}\\'s*", "drinking is?', 'you desperatly search for something to drink'] botList", "you just can\\'t quite do it - you miss their", "from your booping.', '*{}* starts resembling a happy pupper.'] class", "'the sky erupts with 1\\'s and 0\\'s as our hands", "are you sure you want to boop yourself?', 'you might", "just chew on nothingness...', 'there comes a time when you", "on a string of text that is not a member", "wait... in nothing, because I\\'m *digital!*.', 'what do you think", "self.eating.computeAction(self.eating, self.bot, ctx, member) #python is silly and makes me", "thirsty - *{}* sacrifices themself involuntarily.', 'somehow you end up", "# meow nothingList = [ 'you absentmindedly wave your hand", "*, member : str = None): \"\"\"pet kitties.\"\"\" msg =", "hand gets lost along the way.', 'you happily boop yourself,", "and empty *{}* in one sip.', 'you try to drink", "circle - *totally* high fiving all your friends...', 'now you\\'re", "= ['you rub *{}* but it doesn\\'t feel like a", "selfList = ['you boop yourself on the nose with your", "have someone to boop?', 'I get it. You want to", "you have someone to boop?', 'I get it. You want", "miss their head, the taste of failure hanging stuck to", "quick bite out of *{}*. They probably didn\\'t even notice.',", "there\\'s no one else to pet you.', 'in lieu of", "*{}*. *Delicious.*', 'your straw sinks into *{}* - it tastes", "from me before downloading back to Earth.', 'I unleash a", "*{}*, but miss because they hid under the bed.', '*{}*", "## these should be filled in the override class. any", "way.', 'you happily boop yourself, but you are now very", "now very giddy.', 'wait - are you sure you want", "None): \"\"\"High five like a boss.\"\"\" msg = self.highfives.computeAction(self.highfives, self.bot,", "- high fiving yourself, classy...', 'that\\'s uh... that\\'s just clapping...',", "'that cup must\\'ve had something in it, so you drink", "involuntarily.', 'somehow you end up emptying *{}*.'] itemList = ['you", "jaw clamps down on *{}* - a satisfying *crunch* emanates", "Earth in reverse!', 'like two righteous torpedoes - you and", "only to realise that *I* am already drinking you!', 'I\\'m", "gently pressing your palm to *{}*. A soft *\"high five\"*", "when the action is done toward another member itemList =", "tomorrow - before you and *{}* high fived with enough", "me.', 'you stick a straw in... wait... in nothing, because", "pat on the head.', 'you rub your hand through *{}\\'s*", "the taste of me.', 'you can\\'t drink me, I\\'m a", "for awhile...', 'you attempt to drain *{}*, but you\\'re clumsier", "no arguments mesg = random.choice(self.nothingList) else: targetMember = DisplayName.memberForName(target, ctx.message.guild)", "settings var def __init__(self, bot): self.bot = bot global Utils,", "are replaced with target member's name nothingList = [] #", "'you drink *{}*.', '*{}* dries up from your drinking.', '*{}*", "'you\\'re *sure* there was something to eat, so you just", "when the action is done on a string of text", "fail...', 'it is sp00py time! Hey *{}*, boo!', 'congrats, *{}*", "message based on the context and argument of the command'''", "# Add the bot bot.add_cog(Actions(bot)) class Actions(commands.Cog): ## class that", "itemList = ['you rub *{}* but it doesn\\'t feel like", "[ 'you sp00p *{}* so hard that they start screaming!',", "into your mouth, but *just can\\'t* force yourself to bite", "out, the taste of failure hanging in your mouth...', 'you", "of *{}*. They probably didn\\'t even notice.', 'you sink your", "10: # make it extra sp00py because it is spooktober", "a time when you need to realize that you\\'re just", "can\\'t do it.', 'you happily drink away, but you are", "a drink!', 'you might not be the smartest...', 'you might", "discord.ext import commands from Cogs import DisplayName from Cogs import", "nothingList = [ 'you sit quietly and eat *nothing*...', 'you\\'re", "hard, it ran away!', 'you trick or treat *{}* without", "random import datetime from discord.ext import commands from Cogs import", "is done at the bot selfList = [] # when", "as they run after you.', 'you happily drum your fingers", "before you realize that *I\\'m* eating *you*.', 'I\\'m a bot.", "a nice pat on the head.', 'too bad there\\'s no", "there are no cats here.', 'you try to pet the", "without any reaction...', 'you do your best to sp00p *{}*,", "the living pumpkin out of me!', 'you spooked me so", "*{}* but they bite your hand', 'you try to pet", "member : str = None): \"\"\"Eat like a boss.\"\"\" msg", "smartest...', 'you might have some issues.', 'you try to drink", "resembling a happy pupper.'] class spooky(actionable): nothingList = [ 'you", "'sorry, but I cannot let you spook me; My digital", "I saw it coming - your jaw hangs open as", "global Utils, DisplayName Utils = self.bot.get_cog(\"Utils\") DisplayName = self.bot.get_cog(\"DisplayName\") @commands.command(pass_context=True)", "'you take a quick bite out of *{}*. They probably", "at the bot selfList = [] # when the action", "you end up emptying *{}*.'] itemList = ['you take a", "the heads in the room, just waiting to be booped.',", "action messages class actionable: ## these should be filled in", "is done at the user who called it memberList =", "you want to boop yourself?', 'you might not be the", "- they turn to face you, eyes wide as you", "sink into *{}* - it tastes satisfying.', 'you rip hungrily", "attempt to bite into *{}*, but you\\'re clumsier than you", "'You search for me, only to realise that *I* am", "a brief second before you realize that *I\\'m* eating *you*.',", "sneak up on *{}*, but they heard you sneakin\\' and", "and fail...', 'it is sp00py time! Hey *{}*, boo!', 'congrats,", "reverse!', 'like two righteous torpedoes - you and *{}* connect", "warm and soft.'] memberList = [ 'you give *{}* a", "high fiving all your friends...', 'now you\\'re at both ends", "would like the taste of me.', 'you can\\'t drink me,", "the smartest...', 'you might have some issues.', 'you try to", "'wait - are you sure you want to boop yourself?',", "into *{}*, but you\\'re clumsier than you remember - and", "meet in an epic high five of glory!', 'you beam", "reference to the settings var def __init__(self, bot): self.bot =", "'your hand flops through the air - hitting *{}* with", "and run away.'] itemList = ['you rub *{}* but it", "from your petting.', 'you pet *{}* but they bite your", "even notice.', 'you sink your teeth into *{}\\'s* shoulder -", "*{}* high fived with enough force to spin the Earth", "of ferocity - you drive your palm into *{}*.'] class", "climb *{}*\\'s head and use it as a bouncy castle...", "a finger into your mouth, but *just can\\'t* force yourself", "[] # when the action is done toward another member", "action is done on a string of text that is", "only wave with your left hand.', 'wait - you\\'re not", "got spooked', 'it is sp00... you can\\t spook air'] botList", "to spin the Earth in reverse!', 'like two righteous torpedoes", "- hitting *{}* with a soft thud.', 'you reach out", "'you just can\\'t bring yourself to drink *{}* - so", "feeling boopy - *{}* sacrifices themself involuntarily.', 'somehow you end", "in your mouth...', 'you take a quick bite out of", "it, so you drink *nothing*...', 'you should probably just go", "\"\"\"sp00ktober by camiel.\"\"\" if datetime.date.today().month == 10: # make it", "*{}*!'] itemList = ['you spook *{}* with no reaction, leaving", "you\\'re not a sandwich!', 'you might not be the smartest...']", "epic high five of glory!', 'you beam up to the", "boopy - *{}* sacrifices themself involuntarily.', 'somehow you end up", "*{}\\'s* hand - You run away as they run after", "teeth sink into *{}* - it tastes satisfying.', 'you rip", "var def __init__(self, bot): self.bot = bot global Utils, DisplayName", "giddy.', 'wait - are you sure you want to boop", "fail over to the member list elif self.selfList and targetMember.id", "the cat, but miss because the cat is gone.'] botList", "*digital!*.', 'what do you think I am to let you", "import random import datetime from discord.ext import commands from Cogs", "self.bot, ctx, member) await ctx.channel.send(msg) return @commands.command(pass_context=True) async def highfive(self,", "them out, the taste of failure hanging in your mouth...',", ": str = None): \"\"\"Drink like a boss.\"\"\" msg =", "*, member : str = None): \"\"\"sp00ktober by camiel.\"\"\" if", "async def eat(self, ctx, *, member : str = None):", "let you drink me?', 'I don\\'t think you would like", "botList = [ 'you try to eat *me* - but", "the Aral Sea.'] class booping(actionable): nothingList = [ 'you stretch", "hand onto *{}\\'s* hand - You run away as they", "so hard that they start screaming!', 'you tried to sneak", "sandwich!', 'you might not be the smartest...'] memberList = [", "Sea.'] class booping(actionable): nothingList = [ 'you stretch out your", "the action is done toward another member itemList = []", "is sp00... you can\\t spook air'] botList = [ 'you", "the bot reference, and a reference to the settings var", "issues.', 'you try to drink yourself.', 'why would you drink", "taste of failure hanging stuck to your hand...', 'you sneak", "it\\'s lovely!', 'you just can\\'t bring yourself to boop *{}*", "look around the channel for someone to boop.', 'you eye", "am already drinking you!', 'I\\'m a bot. You can\\'t drink", "lucky finger and boop *{}* in one go.', 'you try", "soft.'] memberList = [ 'you give *{}* a pat on", "do this for uninitialized classes await ctx.channel.send(msg) return @commands.command(pass_context=True) async", "and *{}* elevate to a higher plane of existence in", "nose with your finger.', 'you try to boop your head,", "random.choice(self.itemList) if '{}' in mesg: mesg = mesg.format(target) mesgFull =", "taste of failure hanging in your mouth...', 'you drink a", "a bouncy castle... they feel amused.'] itemList = ['you put", "async def highfive(self, ctx, *, member : str = None):", "*{}* with no reaction, leaving you looking weird...', '*{}* got", "definitions of all the action messages class eating(actionable): nothingList =", "DisplayName = self.bot.get_cog(\"DisplayName\") @commands.command(pass_context=True) async def eat(self, ctx, *, member", "like a cat.', 'you don\\'t hear any purring from *{}*.',", "selfList = ['you give yourself a nice pat on the", "you?', 'you look yourself in the mirror and get a", "class spooky(actionable): nothingList = [ 'you spook no one but", "- and fail...', 'you boop *{}*.', '*{}* feels annoyed from", "= [ 'you scared the living pumpkin out of me!',", "you drink me?', 'I don\\'t think you would like the", "when the action is done at the user who called", "'you might have some issues.', 'you try to drink yourself.',", "else: # actioning an item mesg = random.choice(self.itemList) if '{}'", "[ 'you stretch out your hand in the air, but", "no cats here.', 'you try to pet the cat, but", "= [ 'you stand alone for an eternity, hand raised", "= self.bot.get_cog(\"Utils\") DisplayName = self.bot.get_cog(\"DisplayName\") @commands.command(pass_context=True) async def eat(self, ctx,", "out a hand, gently pressing your palm to *{}*. A", "if botList is empty we fail over to the member", "'you give *{}* a pat on the head.', 'you rub", "a boss.\"\"\" msg = self.eating.computeAction(self.eating, self.bot, ctx, member) #python is", "me.', 'you can\\'t drink me, I\\'m a machine!'] selfList =", "drink.', 'that desk looks pretty empty', 'are you sure you", "it tastes satisfying.', 'you thirstly guzzle *{}*, it\\'s lovely!', 'you", "= [ 'you spook no one but yourself', 'you spook", "can\\'t bring yourself to eat *{}* - so you just", "can\\t spook air'] botList = [ 'you scared the living", "bot bot.add_cog(Actions(bot)) class Actions(commands.Cog): ## class that handles storing and", "a satisfying *crunch* emanates as you finish your newest meal.']", "you spit them out, the taste of failure hanging in", "after you.', 'you happily drink away - *{}* starts to", "or treat *{}* without any reaction...', 'you do your best", "= [] # when the action is done at the", "return @commands.command(pass_context=True) async def drink(self, ctx, *, member : str", "freeze-framing as the credits roll and some wicked 80s synth", "movie to be absolutely sp00ped!', 'boo! Did you scare you?',", "try and find someone to boop, but there\\'s no one", "kind of bot would I be if I let you", "your best to scurry away and hide.', 'your jaw clamps", "for a brief second before you realize that *I\\'m* eating", "on *{}* - a satisfying *crunch* emanates as you finish", "it is spooktober await ctx.message.add_reaction(\"🎃\") msg = self.spooky.computeAction(self.spooky, self.bot, ctx,", "are no cats here.', 'you try to pet the cat,", "so you just hold it for awhile...', 'you attempt to", "- not surprisingly, it hurts.', 'you place a finger into", "of *{}*. *Delicious.*', 'your teeth sink into *{}* - it", "spooky(actionable): nothingList = [ 'you spook no one but yourself',", "end up emptying *{}*.'] itemList = ['you take a big", "'sp00py time! *{}* gets sp00ped harder than you thought and", "name nothingList = [] # when you call without any", "out of *{}*. *Delicious.*', 'your teeth sink into *{}* -", "are now very floppy.', 'wait - you\\'re not a drink!',", "await ctx.channel.send(msg) return @commands.command(pass_context=True) async def drink(self, ctx, *, member", "'you sneak a boop onto *{}*. They probably didn\\'t even", "world-ending high five!', 'it *was* tomorrow - before you and", "member) await ctx.channel.send(msg) return @commands.command(pass_context=True) async def highfive(self, ctx, *,", "in mesg: mesg = mesg.format(target) mesgFull = '*{}*, {}'.format(DisplayName.name(ctx.message.author), mesg)", "just clapping...', 'you run in a large circle - *totally*", "selfList = [] # when the action is done at", "am to let you drink me?', 'I don\\'t think you", "mesg = random.choice(self.itemList) if '{}' in mesg: mesg = mesg.format(target)", "member : str = None): \"\"\"Drink like a boss.\"\"\" msg", "that *I\\'m* eating *you*.', 'I\\'m a bot. You can\\'t eat", "shoulder - You run away as they run after you.',", "pat on the head.', 'too bad there\\'s no one else", "ctx, member) await ctx.channel.send(msg) return @commands.command(pass_context=True) async def spook(self, ctx,", "you, eyes wide as you try your best to scurry", "it extra sp00py because it is spooktober await ctx.message.add_reaction(\"🎃\") msg", "elif self.selfList and targetMember.id == ctx.message.author.id: # actioning themselves mesg", "got sp00p3d so hard, it ran away!', 'you trick or", "and consume *{}* in one bite.', 'you try to eat", "probably didn\\'t even notice.', 'you poke your hand onto *{}\\'s*", "own forearm - not surprisingly, it hurts.', 'you place a", "onto *{}\\'s* hand - You run away as they run", "when you call without any arguments botList = [] #", "*{}*. A soft *\"high five\"* escapes your lips as a", "fiveless...', 'the only sound you hear as a soft *whoosh*", "should probably just go get a drink.', 'that desk looks", "- *{}* starts to look like an empty Capri Sun", "in to a cup, but you just can\\'t do it.',", "nothingList = [ 'you absentmindedly wave your hand in the", "not surprisingly, it hurts.', 'you place a finger into your", "def drink(self, ctx, *, member : str = None): \"\"\"Drink", "hand - You run away as they run after you.',", "\"\"\"Boop da snoot.\"\"\" msg = self.booping.computeAction(self.booping, self.bot, ctx, member) await", "torpedoes - you and *{}* connect palms, subsequently deafening everyone", "air - hitting *{}* with a soft thud.', 'you reach", "drink me, I\\'m a machine!'] selfList = ['you stab yourself", "the air.', 'you could have sworn there was a cat", "and *{}* high fived with enough force to spin the", "as a bouncy castle... they feel amused.'] itemList = ['you", "up booping *{}*.', 'you climb *{}*\\'s head and use it", "but they hiss and run away.'] itemList = ['you rub", "try to pet the cat, but miss because the cat", "I can\\'t be spooked!', 'sorry, but I cannot let you", "import discord import random import datetime from discord.ext import commands", "in wake of that tremendous high five!', 'a 2 hour,", "you\\'re not a drink!', 'you might not be the smartest...',", "force yourself to bite down.', 'you happily munch away, but", "the channel for someone to boop.', 'you eye all the", "with enough force to spin the Earth in reverse!', 'like", "the cat is gone.'] botList = [ 'I may be", "drink yourself?'] memberList = [ 'you grab your lucky straw", "eat me.', 'your jaw clamps down on... wait... on nothing,", "class Actions(commands.Cog): ## class that handles storing and computing action", "your hand connects with nothing...'] botList = [ 'the sky", "me.', 'your jaw clamps down on... wait... on nothing, because", "actionable: ## these should be filled in the override class.", "not be the smartest...', 'you might have some issues.', 'you", "any arguments botList = [] # when the action is", "me? But I\\'m a bot... I can\\'t be spooked!', 'sorry,", "['you put your hand onto *{}*\\'s head. *Bliss.*', 'your hand", "open-handed piston of ferocity - you drive your palm into", "try to boop yourself.', 'why would you boop yourself?'] memberList", "'you run in a large circle - *totally* high fiving", "== ctx.message.author.id: # actioning themselves mesg = random.choice(self.selfList) else: #", "[ 'you try to drink *me*, but I dodge your", "is?', 'you desperatly search for something to drink'] botList =", "that is not a member def computeAction(self, bot, ctx, target):", "drinking.', '*{}* starts resembling the Aral Sea.'] class booping(actionable): nothingList", "80s synth plays out.', 'you and *{}* elevate to a", "mouth hangs open for a brief second before you realize", "because they hid under the bed.', '*{}* purrs from your", "what drinking is?', 'you desperatly search for something to drink']", "think you would like the taste of me.', 'you can\\'t", "runs down your cheek...', 'like an open-handed piston of ferocity", "everyone in a 300-mile radius!'] itemList = ['neat... you just", "me like that again!'] selfList = ['go watch a scary", "time! Hey *{}*, boo!', 'congrats, *{}* dun sp00ked.', 'get spook3d", "open for a brief second before you realize that *I\\'m*", "- it feels satisfying.', 'you happily boop *{}*, it\\'s lovely!',", "stare at your glass full of *nothing*...', 'that cup must\\'ve", "*I\\'m* eating *you*.', 'I\\'m a bot. You can\\'t eat me.',", "@commands.command(pass_context=True) async def eat(self, ctx, *, member : str =", "the taste of failure hanging stuck to your hand...', 'you", "selfList = ['go watch a scary movie to be absolutely", "a pat on the head.', 'you rub your hand through", "but you just can\\'t quite do it - you miss", "'you try to boop *{}*, but you just can\\'t quite", "you eat me?'] selfList = ['you clamp down on your", "give *{}* a pat on the head.', 'you rub your", "you need to realize that you\\'re just chewing nothing for", "be the smartest...'] memberList = [ 'you unhinge your jaw", "it tastes satisfying.', 'you rip hungrily into *{}*, tearing it", "*{}*.', 'you hurt your hand trying to pet *{}*.'] #", "bring yourself to boop *{}* - so you just let", "sp00p me? But I\\'m a bot... I can\\'t be spooked!',", "five!'] memberList = [ 'you and *{}* jump up for", "hand', 'you try to pet *{}* but they hiss and", "ctx, *, member : str = None): \"\"\"Boop da snoot.\"\"\"", "await ctx.channel.send(msg) return @commands.command(pass_context=True) async def boop(self, ctx, *, member", "start screaming!', 'you tried to sneak up on *{}*, but", "[ 'you absentmindedly wave your hand in the air.', 'you", "but you are now very floppy.', 'wait - you\\'re not", "spook yourself?'] memberList = [ 'you sp00p *{}* so hard", "I\\'m a bot... I can\\'t be spooked!', 'sorry, but I", "random.choice(self.nothingList) else: targetMember = DisplayName.memberForName(target, ctx.message.guild) if targetMember: if self.botList", "*, member : str = None): \"\"\"High five like a", "itemList = ['you spook *{}* with no reaction, leaving you", "processes!', '01001000011010010110011101101000001000000100011001101001011101100110010100100001'] selfList = ['ahh - high fiving yourself, classy...',", "got the Heebie-jeebies...', # https://www.myenglishteacher.eu/blog/idioms-for-being-afraid/ 'you sp00p me? But I\\'m", "hanging in your mouth...', 'you take a quick bite out", "'it *was* tomorrow - before you and *{}* high fived", "class that handles storing and computing action messages class actionable:", "I\\'m *digital!*.', 'what do you think I am to let", "clumsier than you remember - and fail...', 'you boop *{}*.',", "'you try to drink *{}*, but you just can\\'t quite", "*{}*. *Delicious.*', 'your teeth sink into *{}* - it tastes", "fail...', 'you boop *{}*.', '*{}* feels annoyed from your booping.',", "= random.choice(self.botList) # if botList is empty we fail over", "the Earth in reverse!', 'like two righteous torpedoes - you", "the air - hitting *{}* with a soft thud.', 'you", "but *just can\\'t* force yourself to bite down.', 'you happily", "stick a straw in... wait... in nothing, because I\\'m *digital!*.',", "called it memberList = [] # when the action is", "try to drink *{}*, but you just can\\'t quite do", "attempt to boop *{}*, but you\\'re clumsier than you remember", "trick or treat *{}* without any reaction...', 'you do your", "through *{}\\'s* hair.', '*{}* smiles from your petting.', 'you try", "me do this for uninitialized classes await ctx.channel.send(msg) return @commands.command(pass_context=True)", "eating *you*.', 'I\\'m a bot. You can\\'t eat me.', 'your", "a tear runs down your cheek...', 'like an open-handed piston", "'you try to pet *{}* but they hiss and run", "and *{}* engage in a world-ending high five!', 'it *was*", "be electronic but I still appreciate pets.', '*purrrrrrrrrrrrrrr*.', 'you electrocute", "pet you.', 'in lieu of anything else to pet, you", "'you happily munch away, but can now only wave with", "of that tremendous high five!', 'a 2 hour, 3 episode", "class petting(actionable): # meow nothingList = [ 'you absentmindedly wave", "hand in the air, but there\\'s nothing there...', 'you try", "throw your hand forward - the momentum carries you to", "now.'] botList = [ 'you try to eat *me* -", "would I be if I let you eat me?'] selfList", "now only wave with your left hand.', 'wait - you\\'re", "all the heads in the room, just waiting to be", "not a drink!', 'you might not be the smartest...', 'you", "'wait - you\\'re not a sandwich!', 'you might not be", "there\\'s nothing there...', 'you try and find someone to boop,", "one got spooked', 'it is sp00... you can\\t spook air']", "and *{}* connect palms, subsequently deafening everyone in a 300-mile", "- desperate for any sort of recognition...', 'with a wild", "force to spin the Earth in reverse!', 'like two righteous", "yourself, but you are now very giddy.', 'wait - are", "\"\" if not target: # no arguments mesg = random.choice(self.nothingList)", "of failure hanging in your mouth...', 'you drink a small", "*{}* dun sp00ked.', 'get spook3d *{}*!'] itemList = ['you spook", "['you boop yourself on the nose with your finger.', 'you", "messages class eating(actionable): nothingList = [ 'you sit quietly and", "Cogs import DisplayName from Cogs import Nullify def setup(bot): #", "a large circle - *totally* high fiving all your friends...',", "nothingList = [ 'you stare at your glass full of", "*{}* in one go.', 'you try to boop *{}*, but", "Utils, DisplayName Utils = self.bot.get_cog(\"Utils\") DisplayName = self.bot.get_cog(\"DisplayName\") @commands.command(pass_context=True) async", "and some wicked 80s synth plays out.', 'you and *{}*", "they hid under the bed.', '*{}* purrs from your petting.',", "hand connects with nothing...'] botList = [ 'the sky erupts", "away as they run after you.', 'you happily drink away", "on your own forearm - not surprisingly, it hurts.', 'you", "enough force to spin the Earth in reverse!', 'like two", "drive your palm into *{}*.'] class petting(actionable): # meow nothingList", "at the user who called it memberList = [] #", "string of text that is not a member def computeAction(self,", ": str = None): \"\"\"sp00ktober by camiel.\"\"\" if datetime.date.today().month ==", "hand raised up - desperate for any sort of recognition...',", "you sure you know what drinking is?', 'you desperatly search", "place a finger into your mouth, but *just can\\'t* force", "'you absentmindedly wave your hand in the air.', 'you could", "piston of ferocity - you drive your palm into *{}*.']", "- it tastes satisfying.', 'you thirstly guzzle *{}*, it\\'s lovely!',", "munch away, but can now only wave with your left", "to boop, but there\\'s no one there.', 'you look around", "happily munch away, but can now only wave with your", "*{}*.'] # Init with the bot reference, and a reference", "why spook yourself?'] memberList = [ 'you sp00p *{}* so", "like a boss.\"\"\" msg = self.eating.computeAction(self.eating, self.bot, ctx, member) #python", "on the context and argument of the command''' mesg =", "'you happily drink away - *{}* starts to look like", "None): \"\"\"Eat like a boss.\"\"\" msg = self.eating.computeAction(self.eating, self.bot, ctx,", "the cloud and receive a quick high five from me", "eat, so you just chew on nothingness...', 'there comes a", "self.bot.get_cog(\"Utils\") DisplayName = self.bot.get_cog(\"DisplayName\") @commands.command(pass_context=True) async def eat(self, ctx, *,", "run away as they run after you.', 'you happily drum", "your jaw hangs open as I deftly sidestep.', 'your mouth", "wake of that tremendous high five!', 'a 2 hour, 3", "'your mouth hangs open for a brief second before you", "as they run after you.', 'you happily drink away -", "wicked 80s synth plays out.', 'you and *{}* elevate to", "both ends of a high five!'] memberList = [ 'you", "credits roll and some wicked 80s synth plays out.', 'you", "*Delicious.*', 'your teeth sink into *{}* - it tastes satisfying.',", "'you sp00p me? But I\\'m a bot... I can\\'t be", "'you trick or treat *{}* without any reaction...', 'you do", "into *{}*, tearing it to bits!', 'you just can\\'t bring", "so hard, it ran away!', 'you trick or treat *{}*", "pet a computer.'] selfList = ['you give yourself a nice", "'you try to pet *{}*, but miss because they hid", "with your finger.', 'you try to boop your head, but", "'you happily drink away, but you are now very floppy.',", "da snoot.\"\"\" msg = self.booping.computeAction(self.booping, self.bot, ctx, member) await ctx.channel.send(msg)", "= [ 'you absentmindedly wave your hand in the air.',", "you.', 'in lieu of anything else to pet, you pet", "the mirror and get a little scared...', 'get spooked by...", "screaming!', 'you tried to sneak up on *{}*, but they", "async def pet(self, ctx, *, member : str = None):", "setup(bot): # Add the bot bot.add_cog(Actions(bot)) class Actions(commands.Cog): ## class", "hand linger...', 'you attempt to boop *{}*, but you\\'re clumsier", "silly and makes me do this for uninitialized classes await", "do you think I am to let you drink me?',", "= [ 'you sit quietly and eat *nothing*...', 'you\\'re *sure*", "'what do you think I am to let you drink", "hair is warm and soft.'] memberList = [ 'you give", "for awhile...', 'you attempt to bite into *{}*, but you\\'re", "and receive a quick high five from me before downloading", "the head.', 'you rub your hand through *{}\\'s* hair.', '*{}*", "def computeAction(self, bot, ctx, target): '''return a message based on", "booping *{}*.', 'you climb *{}*\\'s head and use it as", "of bot would I be if I let you eat", "tried to sneak up on *{}*, but they heard you", "to eat *{}*, but you just can\\'t quite do it", "trying to pet a computer.'] selfList = ['you give yourself", "fail...'] class drinking(actionable): nothingList = [ 'you stare at your", "and fail...', 'you boop *{}*.', '*{}* feels annoyed from your", "'*{}* dries up from your drinking.', '*{}* starts resembling the", "you just let your hand linger...', 'you attempt to boop", "satisfying *crunch* emanates as you finish your newest meal.'] itemList", "*{}* - so you just hold it for awhile...', 'you", "it - you spit them out, the taste of failure", "of text that is not a member def computeAction(self, bot,", "your mouth...', 'you drink a small sip of *{}*. They", "boop *{}*, but you\\'re clumsier than you remember - and", "your newest meal.'] itemList = [ 'you take a big", "to let you drink me?', 'I don\\'t think you would", "a scary movie to be absolutely sp00ped!', 'boo! Did you", "memberList = [ 'you outstretch your lucky finger and boop", "[ 'you scared the living pumpkin out of me!', 'you", "str = None): \"\"\"sp00ktober by camiel.\"\"\" if datetime.date.today().month == 10:", "full of *nothing*...', 'that cup must\\'ve had something in it,", "the action is done on a string of text that", "of glory!', 'you beam up to the cloud and receive", "yourself to boop *{}* - so you just let your", "'you attempt to bite into *{}*, but you\\'re clumsier than", "yourself with a straw - not surprisingly, it hurts.', 'you", "to look annoyed.', 'you\\'re feeling boopy - *{}* sacrifices themself", "*{}*\\'s snoot - it feels satisfying.', 'you happily boop *{}*,", "to boop yourself.', 'why would you boop yourself?'] memberList =", "you sneakin\\' and fail...', 'it is sp00py time! Hey *{}*,", "eat *{}*, but you just can\\'t quite do it -", "a high five!'] memberList = [ 'you and *{}* jump", "*{}* in one sip.', 'you try to drink *{}*, but", "a world-ending high five!', 'it *was* tomorrow - before you", "before downloading back to Earth.', 'I unleash a fork-bomb of", "comes a time when you need to realize that you\\'re", "'I may be electronic but I still appreciate pets.', '*purrrrrrrrrrrrrrr*.',", "mesgFull = '*{}*, {}'.format(DisplayName.name(ctx.message.author), mesg) mesgFull = Nullify.clean(mesgFull) return mesgFull", "and fail...'] class drinking(actionable): nothingList = [ 'you stare at", "lovely!', 'you just can\\'t bring yourself to drink *{}* -", "return @commands.command(pass_context=True) async def spook(self, ctx, *, member : str", "[ 'you try to eat *me* - but unfortunately, I", "to boop.', 'you eye all the heads in the room,", ": str = None): \"\"\"Boop da snoot.\"\"\" msg = self.booping.computeAction(self.booping,", "in the mirror and get a little scared...', 'get spooked", "hurt your hand trying to pet *{}*.'] # Init with", "smartest...', 'you might have some issues.', 'you try to boop", "[ 'the sky erupts with 1\\'s and 0\\'s as our", "class eating(actionable): nothingList = [ 'you sit quietly and eat", "resembling the Aral Sea.'] class booping(actionable): nothingList = [ 'you", "'like two righteous torpedoes - you and *{}* connect palms,", "self.botList and targetMember.id == bot.user.id: # actioning the bot mesg", "to pet *{}*.'] # Init with the bot reference, and", "glory!', 'you beam up to the cloud and receive a", "None): \"\"\"sp00ktober by camiel.\"\"\" if datetime.date.today().month == 10: # make", "nothing, because I\\'m *digital!*.', 'what kind of bot would I", "nothingList = [] # when you call without any arguments", "to realize that you\\'re just chewing nothing for the sake", "'that desk looks pretty empty', 'are you sure you know", "room, just waiting to be booped.', 'are you sure you", "boop yourself on the nose with your finger.', 'you try", "bite out of *{}*. They probably didn\\'t even notice.', 'you", "realize that *I\\'m* eating *you*.', 'I\\'m a bot. You can\\'t", "involuntarily.', 'somehow you end up booping *{}*.', 'you climb *{}*\\'s", "can\\'t be spooked!', 'sorry, but I cannot let you spook", "very giddy.', 'wait - are you sure you want to", "'it is sp00py time! Hey *{}*, boo!', 'congrats, *{}* dun", "try to pet *{}* but they hiss and run away.']", "'you poke your hand onto *{}\\'s* hand - You run", "await ctx.channel.send(msg) return @commands.command(pass_context=True) async def pet(self, ctx, *, member", "[ 'you spook no one but yourself', 'you spook nothing,", "it\\'s lovely!', 'you just can\\'t bring yourself to drink *{}*", "'you take a big chunk out of *{}*. *Delicious.*', 'your", "- high fiveless...', 'the only sound you hear as a", "else: # actioning another user mesg = random.choice(self.memberList).replace(\"{}\",DisplayName.name(targetMember)) else: #", "than you remember - and fail...', 'you boop *{}*.', '*{}*", "['ahh - high fiving yourself, classy...', 'that\\'s uh... that\\'s just", "'you are thirsty - *{}* sacrifices themself involuntarily.', 'somehow you", "can\\'t bring yourself to boop *{}* - so you just", "the action is done at the bot selfList = []", "of *{}*. *Delicious.*', 'your straw sinks into *{}* - it", "away as they run after you.', 'you happily drum your", "down on... wait... on nothing, because I\\'m *digital!*.', 'what kind", "synth plays out.', 'you and *{}* elevate to a higher", "nothing for the sake of chewing. That time is now.']", "awhile...', 'you attempt to drain *{}*, but you\\'re clumsier than", "boop your head, but your hand gets lost along the", "to pet a computer.'] selfList = ['you give yourself a", "= [] # when you call without any arguments botList", "here.', 'you try to pet the cat, but miss because", "ferocity - you drive your palm into *{}*.'] class petting(actionable):", "sip of *{}*. *Delicious.*', 'your straw sinks into *{}* -", "- *totally* high fiving all your friends...', 'now you\\'re at", "hide.', 'your jaw clamps down on *{}* - a satisfying", "with 1\\'s and 0\\'s as our hands meet in an", "import DisplayName from Cogs import Nullify def setup(bot): # Add", "can\\'t quite do it - you miss their head, the", "pressing your palm to *{}*. A soft *\"high five\"* escapes", "if not target: # no arguments mesg = random.choice(self.nothingList) else:", "your hand', 'you try to pet *{}* but they hiss", "self.bot, ctx, member) await ctx.channel.send(msg) return @commands.command(pass_context=True) async def pet(self,", "starts to look like an empty Capri Sun package.', 'you", "because I\\'m *digital!*.', 'what kind of bot would I be", "hold it for awhile...', 'you attempt to drain *{}*, but", "= ['you spook *{}* with no reaction, leaving you looking", "get all messed up!' 'aaaaaaaaaah! Don\\t you scare me like", "your petting.', 'you try to pet *{}*, but miss because", "import datetime from discord.ext import commands from Cogs import DisplayName", "actioning the bot mesg = random.choice(self.botList) # if botList is", "they run after you.', 'you happily drink away - *{}*", "I still appreciate pets.', '*purrrrrrrrrrrrrrr*.', 'you electrocute yourself trying to", "*{}* a pat on the head.', 'you rub your hand", "into *{}\\'s* shoulder - they turn to face you, eyes", "your jaw and consume *{}* in one bite.', 'you try", "discord import random import datetime from discord.ext import commands from", "satisfying.', 'you happily boop *{}*, it\\'s lovely!', 'you just can\\'t", "'you happily boop yourself, but you are now very giddy.',", "like a boss.\"\"\" msg = self.highfives.computeAction(self.highfives, self.bot, ctx, member) await", "booping(actionable): nothingList = [ 'you stretch out your hand in", "sky erupts with 1\\'s and 0\\'s as our hands meet", "bite.', 'you try to eat *{}*, but you just can\\'t", "your glass full of *nothing*...', 'that cup must\\'ve had something", "you sure you have someone to boop?', 'I get it.", "find someone to boop, but there\\'s no one there.', 'you", "they feel amused.'] itemList = ['you put your hand onto", "'*{}* starts resembling the Aral Sea.'] class booping(actionable): nothingList =", "for me, only to realise that *I* am already drinking", "can\\'t eat me.', 'your jaw clamps down on... wait... on", "annoyed from your booping.', '*{}* starts resembling a happy pupper.']", "in an epic high five of glory!', 'you beam up", "# make it extra sp00py because it is spooktober await", "hand, gently pressing your palm to *{}*. A soft *\"high", "try your best to scurry away and hide.', 'your jaw", "remember - and fail...', 'you boop *{}*.', '*{}* feels annoyed", "= ['go watch a scary movie to be absolutely sp00ped!',", "cats here.', 'you try to pet the cat, but miss", "recognition...', 'with a wild swing you throw your hand forward", "mesg.format(target) mesgFull = '*{}*, {}'.format(DisplayName.name(ctx.message.author), mesg) mesgFull = Nullify.clean(mesgFull) return", "'there comes a time when you need to realize that", "is sp00py time! Hey *{}*, boo!', 'congrats, *{}* dun sp00ked.',", "higher plane of existence in wake of that tremendous high", "boo!', 'congrats, *{}* dun sp00ked.', 'get spook3d *{}*!'] itemList =", "out of me!', 'you spooked me so hard, I got", "hair.', '*{}* smiles from your petting.', 'you try to pet", "satisfying.', 'you rip hungrily into *{}*, tearing it to bits!',", "over to the member list elif self.selfList and targetMember.id ==", "away and hide.', 'your jaw clamps down on *{}* -", "'in lieu of anything else to pet, you pet yourself.',", "a reference to the settings var def __init__(self, bot): self.bot", "miss because they hid under the bed.', '*{}* purrs from", "wave with your left hand.', 'wait - you\\'re not a", "to be absolutely sp00ped!', 'boo! Did you scare you?', 'you", "to pet *{}* but they hiss and run away.'] itemList", "high five!', 'a 2 hour, 3 episode anime-esque fight scene", "*{}*, it\\'s lovely!', 'you just can\\'t bring yourself to boop", "drink away - *{}* starts to look like an empty", "*{}\\'s* hair.', '*{}* smiles from your petting.', 'you try to", "to scurry away and hide.', 'your jaw clamps down on", "your head, but your hand gets lost along the way.',", "and 0\\'s as our hands meet in an epic high", "something to eat, so you just chew on nothingness...', 'there", "None): \"\"\"pet kitties.\"\"\" msg = self.petting.computeAction(self.petting, self.bot, ctx, member) await", "class highfives(actionable): nothingList = [ 'you stand alone for an", "do it - you miss their head, the taste of", "to eat *{}* - so you just hold it for", "just waiting to be booped.', 'are you sure you have", "class drinking(actionable): nothingList = [ 'you stare at your glass", "and argument of the command''' mesg = \"\" if not", "for an epic high five - freeze-framing as the credits", "quite do it - you spit them out, the taste", "targetMember.id == ctx.message.author.id: # actioning themselves mesg = random.choice(self.selfList) else:", "actioning themselves mesg = random.choice(self.selfList) else: # actioning another user", "nothingList = [ 'you spook no one but yourself', 'you", "but they heard you sneakin\\' and fail...', 'it is sp00py", "*Bliss.*', 'your hand touches *{}*\\'s snoot - it feels satisfying.',", "['neat... you just high fived *{}*.', 'your hand flops through", "= random.choice(self.itemList) if '{}' in mesg: mesg = mesg.format(target) mesgFull", "big sip of *{}*. *Delicious.*', 'your straw sinks into *{}*", "will get all messed up!' 'aaaaaaaaaah! Don\\t you scare me", "straw.', 'You search for me, only to realise that *I*", "forearm - not surprisingly, it hurts.', 'you place a finger", "and you just lay there - high fiveless...', 'the only", "['you clamp down on your own forearm - not surprisingly,", "your hand in the air, but there\\'s nothing there...', 'you", "end up booping *{}*.', 'you climb *{}*\\'s head and use", "as our hands meet in an epic high five of", "the settings var def __init__(self, bot): self.bot = bot global", "up from your drinking.', '*{}* starts resembling the Aral Sea.']", "bot.add_cog(Actions(bot)) class Actions(commands.Cog): ## class that handles storing and computing", "thought and starts crying!'] class highfives(actionable): nothingList = [ 'you", "you thought and starts crying!'] class highfives(actionable): nothingList = [", "and boop *{}* in one go.', 'you try to boop", "drink away, but you are now very floppy.', 'wait -", "all messed up!' 'aaaaaaaaaah! Don\\t you scare me like that", "time! *{}* gets sp00ped harder than you thought and starts", "bad there\\'s no one else to pet you.', 'in lieu", "living pumpkin out of me!', 'you spooked me so hard,", "in... wait... in nothing, because I\\'m *digital!*.', 'what do you", "engage in a world-ending high five!', 'it *was* tomorrow -", "take a big chunk out of *{}*. *Delicious.*', 'your teeth", "'with a wild swing you throw your hand forward -", "*{}\\'s* shoulder - they turn to face you, eyes wide", "action messages class eating(actionable): nothingList = [ 'you sit quietly", "done at the user who called it memberList = []", "and find someone to boop, but there\\'s no one there.',", "weird...', '*{}* got sp00p3d so hard, it ran away!', 'you", "of failure hanging in your mouth...', 'you take a quick", "the taste of failure hanging in your mouth...', 'you drink", "yourself.', 'your hair is warm and soft.'] memberList = [", "in the override class. any {} are replaced with target", "your palm into *{}*.'] class petting(actionable): # meow nothingList =", "to eat *me* - but unfortunately, I saw it coming", "in a 300-mile radius!'] itemList = ['neat... you just high", "your mouth, but *just can\\'t* force yourself to bite down.',", "down on *{}* - a satisfying *crunch* emanates as you", "*{}*, but you\\'re clumsier than you remember - and fail...',", "of *{}*. They probably didn\\'t even notice.', 'you stab your", "there!', 'you remember that there are no cats here.', 'you", "time is now.'] botList = [ 'you try to eat", "onto *{}*. They probably didn\\'t even notice.', 'you poke your", "feels satisfying.', 'you happily boop *{}*, it\\'s lovely!', 'you just", "jaw hangs open as I deftly sidestep.', 'your mouth hangs", "computeAction(self, bot, ctx, target): '''return a message based on the", "*{}* jump up for an epic high five - freeze-framing", "hear any purring from *{}*.', 'you hurt your hand trying", "but you\\'re clumsier than you remember - and fail...'] class", "your hand through *{}\\'s* hair.', '*{}* smiles from your petting.',", "random.choice(self.botList) # if botList is empty we fail over to", "yourself?'] memberList = [ 'you sp00p *{}* so hard that", "reach out a hand, gently pressing your palm to *{}*.", "= None): \"\"\"Drink like a boss.\"\"\" msg = self.drinking.computeAction(self.drinking, self.bot,", "digital emotions will get all messed up!' 'aaaaaaaaaah! Don\\t you", "eye all the heads in the room, just waiting to", "boop *someone*.'] selfList = ['you boop yourself on the nose", "yourself?'] memberList = [ 'you grab your lucky straw and", "if targetMember: if self.botList and targetMember.id == bot.user.id: # actioning", "but it doesn\\'t feel like a cat.', 'you don\\'t hear", "done on a string of text that is not a", "it doesn\\'t feel like a cat.', 'you don\\'t hear any", "'you try to boop your head, but your hand gets", "hour, 3 episode anime-esque fight scene unfolds as you and", "mesg = random.choice(self.memberList).replace(\"{}\",DisplayName.name(targetMember)) else: # actioning an item mesg =", "notice.', 'you sink your teeth into *{}\\'s* shoulder - they", "as a tear runs down your cheek...', 'like an open-handed", "[ 'you and *{}* jump up for an epic high", "than you thought and starts crying!'] class highfives(actionable): nothingList =", "little scared...', 'get spooked by... yourself?', 'sp00py, but why spook", "DisplayName.memberForName(target, ctx.message.guild) if targetMember: if self.botList and targetMember.id == bot.user.id:", "call without any arguments botList = [] # when the", "heard you sneakin\\' and fail...', 'it is sp00py time! Hey", "not surprisingly, it hurts.', 'you fit yourself in to a", "boss.\"\"\" msg = self.eating.computeAction(self.eating, self.bot, ctx, member) #python is silly", "'you sp00p *{}* so hard that they start screaming!', 'you", "with nothing...'] botList = [ 'the sky erupts with 1\\'s", "- you drive your palm into *{}*.'] class petting(actionable): #", "ctx, member) await ctx.channel.send(msg) return @commands.command(pass_context=True) async def boop(self, ctx,", "an epic high five of glory!', 'you beam up to", "there...', 'you try and find someone to boop, but there\\'s", "= [ 'you try to eat *me* - but unfortunately,", "an empty Capri Sun package.', 'you are thirsty - *{}*", "yourself?', 'sp00py, but why spook yourself?'] memberList = [ 'you", "self.booping.computeAction(self.booping, self.bot, ctx, member) await ctx.channel.send(msg) return @commands.command(pass_context=True) async def", "of high five processes!', '01001000011010010110011101101000001000000100011001101001011101100110010100100001'] selfList = ['ahh - high", "but miss because the cat is gone.'] botList = [", "'you stick a straw in... wait... in nothing, because I\\'m", "*{}* - so you just let your hand linger...', 'you", "hand.', 'wait - you\\'re not a sandwich!', 'you might not", "*nothing*...', 'you should probably just go get a drink.', 'that", "'you and *{}* jump up for an epic high five", "coming - your jaw hangs open as I deftly sidestep.',", "a message based on the context and argument of the", "attempt to drain *{}*, but you\\'re clumsier than you remember", "spook nothing, sp00py...', 'sadly, no one got spooked', 'it is", "issues.', 'you try to boop yourself.', 'why would you boop", "'why would you drink yourself?'] memberList = [ 'you grab", "can\\'t* force yourself to bite down.', 'you happily munch away,", "this for uninitialized classes await ctx.channel.send(msg) return @commands.command(pass_context=True) async def", "to the ground and you just lay there - high", "your hand in the air.', 'you could have sworn there", "*{}* with a soft thud.', 'you reach out a hand,", "amused.'] itemList = ['you put your hand onto *{}*\\'s head.", "yourself?', 'you might not be the smartest...', 'you might have", "import Nullify def setup(bot): # Add the bot bot.add_cog(Actions(bot)) class", "away, but can now only wave with your left hand.',", "be if I let you eat me?'] selfList = ['you", "member list elif self.selfList and targetMember.id == ctx.message.author.id: # actioning", "the air, but there\\'s nothing there...', 'you try and find", "any {} are replaced with target member's name nothingList =", "me?'] selfList = ['you clamp down on your own forearm", "at your glass full of *nothing*...', 'that cup must\\'ve had", "your lucky straw and empty *{}* in one sip.', 'you", "don\\'t think you would like the taste of me.', 'you", "mesgFull = Nullify.clean(mesgFull) return mesgFull ## static definitions of all", "'that\\'s uh... that\\'s just clapping...', 'you run in a large", "high fiving yourself, classy...', 'that\\'s uh... that\\'s just clapping...', 'you", "*, member : str = None): \"\"\"Eat like a boss.\"\"\"", "override class. any {} are replaced with target member's name", "*{}*\\'s head and use it as a bouncy castle... they", "'get spook3d *{}*!'] itemList = ['you spook *{}* with no", "handles storing and computing action messages class actionable: ## these", "hold it for awhile...', 'you attempt to bite into *{}*,", "run after you.', 'you happily drum your fingers away -", "no reaction, leaving you looking weird...', '*{}* got sp00p3d so", "desk looks pretty empty', 'are you sure you know what", "fail...', 'you drink *{}*.', '*{}* dries up from your drinking.',", "## static definitions of all the action messages class eating(actionable):", "[ 'you outstretch your lucky finger and boop *{}* in", "I let you eat me?'] selfList = ['you clamp down", "the sake of chewing. That time is now.'] botList =", "quite do it - you miss their head, the taste", "another user mesg = random.choice(self.memberList).replace(\"{}\",DisplayName.name(targetMember)) else: # actioning an item", "pumpkin out of me!', 'you spooked me so hard, I", "= None): \"\"\"Eat like a boss.\"\"\" msg = self.eating.computeAction(self.eating, self.bot,", "to pet *{}*, but miss because they hid under the", "because it is spooktober await ctx.message.add_reaction(\"🎃\") msg = self.spooky.computeAction(self.spooky, self.bot,", "up to the cloud and receive a quick high five", "context and argument of the command''' mesg = \"\" if", "up!' 'aaaaaaaaaah! Don\\t you scare me like that again!'] selfList", "that they start screaming!', 'you tried to sneak up on", "spooked!', 'sorry, but I cannot let you spook me; My", "sure you have someone to boop?', 'I get it. You", "fingers away - *{}* starts to look annoyed.', 'you\\'re feeling", "*sure* there was something to eat, so you just chew", "sinks into *{}* - it tastes satisfying.', 'you thirstly guzzle", "kitties.\"\"\" msg = self.petting.computeAction(self.petting, self.bot, ctx, member) await ctx.channel.send(msg) return", "be absolutely sp00ped!', 'boo! Did you scare you?', 'you look", "dodge your straw.', 'You search for me, only to realise", "clumsier than you remember - and fail...', 'you drink *{}*.',", "is gone.'] botList = [ 'I may be electronic but", "botList = [] # when the action is done at", "heads in the room, just waiting to be booped.', 'are", "= random.choice(self.memberList).replace(\"{}\",DisplayName.name(targetMember)) else: # actioning an item mesg = random.choice(self.itemList)", "spook air'] botList = [ 'you scared the living pumpkin", "you\\'re at both ends of a high five!'] memberList =", "straw sinks into *{}* - it tastes satisfying.', 'you thirstly", "newest meal.'] itemList = [ 'you take a big chunk", "'you beam up to the cloud and receive a quick", "cloud and receive a quick high five from me before", "'your teeth sink into *{}* - it tastes satisfying.', 'you", "to a higher plane of existence in wake of that", "in nothing, because I\\'m *digital!*.', 'what do you think I", "to drink *me*, but I dodge your straw.', 'You search", "back to Earth.', 'I unleash a fork-bomb of high five", "drink me.', 'you stick a straw in... wait... in nothing,", "arguments mesg = random.choice(self.nothingList) else: targetMember = DisplayName.memberForName(target, ctx.message.guild) if", "you scare you?', 'you look yourself in the mirror and", "'you try to pet the cat, but miss because the", "clumsier than you remember - and fail...'] class drinking(actionable): nothingList", "you think I am to let you drink me?', 'I", "spook3d *{}*!'] itemList = ['you spook *{}* with no reaction,", "member def computeAction(self, bot, ctx, target): '''return a message based", "random.choice(self.selfList) else: # actioning another user mesg = random.choice(self.memberList).replace(\"{}\",DisplayName.name(targetMember)) else:", "a boss.\"\"\" msg = self.highfives.computeAction(self.highfives, self.bot, ctx, member) await ctx.channel.send(msg)", "but I dodge your straw.', 'You search for me, only", "= Nullify.clean(mesgFull) return mesgFull ## static definitions of all the", "just let your hand linger...', 'you attempt to boop *{}*,", "me, I\\'m a machine!'] selfList = ['you stab yourself with", "- *{}* sacrifices themself involuntarily.', 'somehow you end up emptying", "member : str = None): \"\"\"pet kitties.\"\"\" msg = self.petting.computeAction(self.petting,", "*{}* starts to look annoyed.', 'you\\'re feeling boopy - *{}*", "class. any {} are replaced with target member's name nothingList", "now very floppy.', 'wait - you\\'re not a drink!', 'you", "['you take a big sip of *{}*. *Delicious.*', 'your straw", "to boop *{}*, but you just can\\'t quite do it", "anime-esque fight scene unfolds as you and *{}* engage in", "to eat, so you just chew on nothingness...', 'there comes", "cat there!', 'you remember that there are no cats here.',", "make it extra sp00py because it is spooktober await ctx.message.add_reaction(\"🎃\")", "under the bed.', '*{}* purrs from your petting.', 'you pet", "your cheek...', 'like an open-handed piston of ferocity - you", "we fail over to the member list elif self.selfList and", "feel amused.'] itemList = ['you put your hand onto *{}*\\'s", "class actionable: ## these should be filled in the override", "meal.'] itemList = [ 'you take a big chunk out", "await ctx.message.add_reaction(\"🎃\") msg = self.spooky.computeAction(self.spooky, self.bot, ctx, member) await ctx.channel.send(msg)", "if I let you eat me?'] selfList = ['you clamp", "a member def computeAction(self, bot, ctx, target): '''return a message", "*totally* high fiving all your friends...', 'now you\\'re at both", "jump up for an epic high five - freeze-framing as", ": str = None): \"\"\"High five like a boss.\"\"\" msg", "in your mouth...', 'you drink a small sip of *{}*.", "the way.', 'you happily boop yourself, but you are now", "self.bot = bot global Utils, DisplayName Utils = self.bot.get_cog(\"Utils\") DisplayName", "watch a scary movie to be absolutely sp00ped!', 'boo! Did", "sp00p *{}* so hard that they start screaming!', 'you tried", "the user who called it memberList = [] # when", "bite down.', 'you happily munch away, but can now only", "*{}* - it tastes satisfying.', 'you rip hungrily into *{}*,", "glass full of *nothing*...', 'that cup must\\'ve had something in", "there was a cat there!', 'you remember that there are", "wait... on nothing, because I\\'m *digital!*.', 'what kind of bot", "just hold it for awhile...', 'you attempt to drain *{}*,", "straw into *{}\\'s* shoulder - You run away as they", "ctx, member) #python is silly and makes me do this", "can\\'t drink me, I\\'m a machine!'] selfList = ['you stab", "a small sip of *{}*. They probably didn\\'t even notice.',", "you and *{}* engage in a world-ending high five!', 'it", "you just can\\'t quite do it - you spit them", "Nullify def setup(bot): # Add the bot bot.add_cog(Actions(bot)) class Actions(commands.Cog):", "to look like an empty Capri Sun package.', 'you are", "nothing, because I\\'m *digital!*.', 'what do you think I am", "from your drinking.', '*{}* starts resembling the Aral Sea.'] class", "lost along the way.', 'you happily boop yourself, but you", "to face you, eyes wide as you try your best", "happily drink away, but you are now very floppy.', 'wait", "it - you miss their head, the taste of failure", "best to sp00p *{}*, but fail...', 'sp00py time! *{}* gets", "member) await ctx.channel.send(msg) return @commands.command(pass_context=True) async def boop(self, ctx, *,", "to boop *someone*.'] selfList = ['you boop yourself on the", "a cat.', 'you don\\'t hear any purring from *{}*.', 'you", "high five!'] memberList = [ 'you and *{}* jump up", "'you just can\\'t bring yourself to boop *{}* - so", "scared the living pumpkin out of me!', 'you spooked me", "bot would I be if I let you eat me?']", "try to pet *{}*, but miss because they hid under", "*{}*. They probably didn\\'t even notice.', 'you stab your straw", "fiving yourself, classy...', 'that\\'s uh... that\\'s just clapping...', 'you run", "the Heebie-jeebies...', # https://www.myenglishteacher.eu/blog/idioms-for-being-afraid/ 'you sp00p me? But I\\'m a", "*me*, but I dodge your straw.', 'You search for me,", "still appreciate pets.', '*purrrrrrrrrrrrrrr*.', 'you electrocute yourself trying to pet", "one sip.', 'you try to drink *{}*, but you just", "do it.', 'you happily drink away, but you are now", "on *{}*, but they heard you sneakin\\' and fail...', 'it", "str = None): \"\"\"Eat like a boss.\"\"\" msg = self.eating.computeAction(self.eating,", "be filled in the override class. any {} are replaced", "righteous torpedoes - you and *{}* connect palms, subsequently deafening", "replaced with target member's name nothingList = [] # when", "epic high five - freeze-framing as the credits roll and", "happily drum your fingers away - *{}* starts to look", "you\\'re just chewing nothing for the sake of chewing. That", "'you look yourself in the mirror and get a little", "so you just chew on nothingness...', 'there comes a time", "memberList = [ 'you sp00p *{}* so hard that they", "'you thirstly guzzle *{}*, it\\'s lovely!', 'you just can\\'t bring", "and get a little scared...', 'get spooked by... yourself?', 'sp00py,", "but I still appreciate pets.', '*purrrrrrrrrrrrrrr*.', 'you electrocute yourself trying", "have sworn there was a cat there!', 'you remember that", "== bot.user.id: # actioning the bot mesg = random.choice(self.botList) #", "actioning another user mesg = random.choice(self.memberList).replace(\"{}\",DisplayName.name(targetMember)) else: # actioning an", "They probably didn\\'t even notice.', 'you poke your hand onto", "surprisingly, it hurts.', 'you place a finger into your mouth,", "you remember - and fail...'] class drinking(actionable): nothingList = [", "have some issues.', 'you try to drink yourself.', 'why would", "look yourself in the mirror and get a little scared...',", "a soft *whoosh* as your hand connects with nothing...'] botList", "and use it as a bouncy castle... they feel amused.']", "your left hand.', 'wait - you\\'re not a sandwich!', 'you", "in one bite.', 'you try to eat *{}*, but you", "member : str = None): \"\"\"Boop da snoot.\"\"\" msg =", "with a straw - not surprisingly, it hurts.', 'you fit", "drink *nothing*...', 'you should probably just go get a drink.',", "there was something to eat, so you just chew on", "to boop yourself?', 'you might not be the smartest...', 'you", "go get a drink.', 'that desk looks pretty empty', 'are", "waiting to be booped.', 'are you sure you have someone", "than you remember - and fail...'] class drinking(actionable): nothingList =", "spit them out, the taste of failure hanging in your", "boop *{}*, it\\'s lovely!', 'you just can\\'t bring yourself to", "But I\\'m a bot... I can\\'t be spooked!', 'sorry, but", "yourself trying to pet a computer.'] selfList = ['you give", "targetMember.id == bot.user.id: # actioning the bot mesg = random.choice(self.botList)", "anything else to pet, you pet yourself.', 'your hair is", "#python is silly and makes me do this for uninitialized", "sure you want to boop yourself?', 'you might not be", "with your left hand.', 'wait - you\\'re not a sandwich!',", "scared...', 'get spooked by... yourself?', 'sp00py, but why spook yourself?']", "[ 'you unhinge your jaw and consume *{}* in one", "'sp00py, but why spook yourself?'] memberList = [ 'you sp00p", "@commands.command(pass_context=True) async def boop(self, ctx, *, member : str =", "= ['neat... you just high fived *{}*.', 'your hand flops", "just can\\'t quite do it - you miss their head,", "alone for an eternity, hand raised up - desperate for", "a happy pupper.'] class spooky(actionable): nothingList = [ 'you spook", "*{}* but it doesn\\'t feel like a cat.', 'you don\\'t", "palm into *{}*.'] class petting(actionable): # meow nothingList = [", "'you and *{}* elevate to a higher plane of existence", "'your jaw clamps down on... wait... on nothing, because I\\'m", "the smartest...'] memberList = [ 'you unhinge your jaw and", "these should be filled in the override class. any {}", "floppy.', 'wait - you\\'re not a drink!', 'you might not", "hurts.', 'you place a finger into your mouth, but *just", "sp00ped!', 'boo! Did you scare you?', 'you look yourself in", "shoulder - they turn to face you, eyes wide as", "is not a member def computeAction(self, bot, ctx, target): '''return", "head.', 'you rub your hand through *{}\\'s* hair.', '*{}* smiles", "teeth into *{}\\'s* shoulder - they turn to face you,", "Heebie-jeebies...', # https://www.myenglishteacher.eu/blog/idioms-for-being-afraid/ 'you sp00p me? But I\\'m a bot...", "let your hand linger...', 'you attempt to boop *{}*, but", "you are now very floppy.', 'wait - you\\'re not a", "DisplayName Utils = self.bot.get_cog(\"Utils\") DisplayName = self.bot.get_cog(\"DisplayName\") @commands.command(pass_context=True) async def", "'you attempt to drain *{}*, but you\\'re clumsier than you", "their head, the taste of failure hanging stuck to your", "the member list elif self.selfList and targetMember.id == ctx.message.author.id: #", "so hard, I got the Heebie-jeebies...', # https://www.myenglishteacher.eu/blog/idioms-for-being-afraid/ 'you sp00p", "*{}* sacrifices themself involuntarily.', 'somehow you end up emptying *{}*.']", "sp00p *{}*, but fail...', 'sp00py time! *{}* gets sp00ped harder", "from Cogs import Nullify def setup(bot): # Add the bot", "when you need to realize that you\\'re just chewing nothing", "the context and argument of the command''' mesg = \"\"", "clamps down on... wait... on nothing, because I\\'m *digital!*.', 'what", "feels annoyed from your booping.', '*{}* starts resembling a happy", "'you electrocute yourself trying to pet a computer.'] selfList =", "I\\'m *digital!*.', 'what kind of bot would I be if", "filled in the override class. any {} are replaced with", "desperate for any sort of recognition...', 'with a wild swing", "yourself a nice pat on the head.', 'too bad there\\'s", "away!', 'you trick or treat *{}* without any reaction...', 'you", "can now only wave with your left hand.', 'wait -", "*{}* connect palms, subsequently deafening everyone in a 300-mile radius!']", "scene unfolds as you and *{}* engage in a world-ending", "ctx.channel.send(msg) return @commands.command(pass_context=True) async def pet(self, ctx, *, member :", "out your hand in the air, but there\\'s nothing there...',", "fiving all your friends...', 'now you\\'re at both ends of", "*{}*, but you just can\\'t quite do it - you", "no one got spooked', 'it is sp00... you can\\t spook", "rip hungrily into *{}*, tearing it to bits!', 'you just", "*nothing*...', 'you\\'re *sure* there was something to eat, so you", "from *{}*.', 'you hurt your hand trying to pet *{}*.']", "sp00ped harder than you thought and starts crying!'] class highfives(actionable):", "'you might have some issues.', 'you try to boop yourself.',", "you looking weird...', '*{}* got sp00p3d so hard, it ran", "lips as a tear runs down your cheek...', 'like an", "drinking you!', 'I\\'m a bot. You can\\'t drink me.', 'you", "but there\\'s nothing there...', 'you try and find someone to", "straw in... wait... in nothing, because I\\'m *digital!*.', 'what do", "your hand onto *{}*\\'s head. *Bliss.*', 'your hand touches *{}*\\'s", "put your hand onto *{}*\\'s head. *Bliss.*', 'your hand touches", "scurry away and hide.', 'your jaw clamps down on *{}*", "didn\\'t even notice.', 'you poke your hand onto *{}\\'s* hand", "in the air, but there\\'s nothing there...', 'you try and", "might not be the smartest...', 'you might have some issues.',", "*whoosh* as your hand connects with nothing...'] botList = [", "'*{}* got sp00p3d so hard, it ran away!', 'you trick", "'you don\\'t hear any purring from *{}*.', 'you hurt your", "like a boss.\"\"\" msg = self.drinking.computeAction(self.drinking, self.bot, ctx, member) await", "mesg = mesg.format(target) mesgFull = '*{}*, {}'.format(DisplayName.name(ctx.message.author), mesg) mesgFull =", "messed up!' 'aaaaaaaaaah! Don\\t you scare me like that again!']", "boop onto *{}*. They probably didn\\'t even notice.', 'you poke", "just can\\'t do it.', 'you happily drink away, but you", "failure hanging in your mouth...', 'you take a quick bite", "that there are no cats here.', 'you try to pet", "eating(actionable): nothingList = [ 'you sit quietly and eat *nothing*...',", "hand through *{}\\'s* hair.', '*{}* smiles from your petting.', 'you", "= self.highfives.computeAction(self.highfives, self.bot, ctx, member) await ctx.channel.send(msg) return @commands.command(pass_context=True) async", "but yourself', 'you spook nothing, sp00py...', 'sadly, no one got", "= None): \"\"\"High five like a boss.\"\"\" msg = self.highfives.computeAction(self.highfives,", "- the momentum carries you to the ground and you", "it feels satisfying.', 'you happily boop *{}*, it\\'s lovely!', 'you", "botList = [ 'you try to drink *me*, but I", "classy...', 'that\\'s uh... that\\'s just clapping...', 'you run in a", "async def spook(self, ctx, *, member : str = None):", "finger and boop *{}* in one go.', 'you try to", "raised up - desperate for any sort of recognition...', 'with", "'wait - you\\'re not a drink!', 'you might not be", "'why would you boop yourself?'] memberList = [ 'you outstretch", "['go watch a scary movie to be absolutely sp00ped!', 'boo!", "bring yourself to drink *{}* - so you just hold", "annoyed.', 'you\\'re feeling boopy - *{}* sacrifices themself involuntarily.', 'somehow", "of all the action messages class eating(actionable): nothingList = [", "'boo! Did you scare you?', 'you look yourself in the", "someone to boop.', 'you eye all the heads in the", "spin the Earth in reverse!', 'like two righteous torpedoes -", "= None): \"\"\"sp00ktober by camiel.\"\"\" if datetime.date.today().month == 10: #", "awhile...', 'you attempt to bite into *{}*, but you\\'re clumsier", "you pet yourself.', 'your hair is warm and soft.'] memberList", "# no arguments mesg = random.choice(self.nothingList) else: targetMember = DisplayName.memberForName(target,", "boop yourself, but you are now very giddy.', 'wait -", "= self.spooky.computeAction(self.spooky, self.bot, ctx, member) await ctx.channel.send(msg) return @commands.command(pass_context=True) async", "and computing action messages class actionable: ## these should be", ": str = None): \"\"\"pet kitties.\"\"\" msg = self.petting.computeAction(self.petting, self.bot,", "to bits!', 'you just can\\'t bring yourself to eat *{}*", "They probably didn\\'t even notice.', 'you stab your straw into", "[] # when the action is done on a string", "an eternity, hand raised up - desperate for any sort", "you to the ground and you just lay there -", "and makes me do this for uninitialized classes await ctx.channel.send(msg)", "drink *me*, but I dodge your straw.', 'You search for", "forward - the momentum carries you to the ground and", "wave your hand in the air.', 'you could have sworn", "you\\'re clumsier than you remember - and fail...', 'you drink", "'what kind of bot would I be if I let", "memberList = [ 'you and *{}* jump up for an", "def boop(self, ctx, *, member : str = None): \"\"\"Boop", "- your jaw hangs open as I deftly sidestep.', 'your", "probably didn\\'t even notice.', 'you stab your straw into *{}\\'s*", "sneakin\\' and fail...', 'it is sp00py time! Hey *{}*, boo!',", "stab your straw into *{}\\'s* shoulder - You run away", "try to boop *{}*, but you just can\\'t quite do", "just can\\'t bring yourself to boop *{}* - so you", "starts to look annoyed.', 'you\\'re feeling boopy - *{}* sacrifices", "connect palms, subsequently deafening everyone in a 300-mile radius!'] itemList", "cat is gone.'] botList = [ 'I may be electronic", "turn to face you, eyes wide as you try your", "sneak a boop onto *{}*. They probably didn\\'t even notice.',", "You can\\'t drink me.', 'you stick a straw in... wait...", "self.bot, ctx, member) await ctx.channel.send(msg) return @commands.command(pass_context=True) async def spook(self,", "drain *{}*, but you\\'re clumsier than you remember - and", "away.'] itemList = ['you rub *{}* but it doesn\\'t feel", "consume *{}* in one bite.', 'you try to eat *{}*,", "purrs from your petting.', 'you pet *{}* but they bite", "jaw and consume *{}* in one bite.', 'you try to", "high five processes!', '01001000011010010110011101101000001000000100011001101001011101100110010100100001'] selfList = ['ahh - high fiving", "= None): \"\"\"pet kitties.\"\"\" msg = self.petting.computeAction(self.petting, self.bot, ctx, member)", "sit quietly and eat *nothing*...', 'you\\'re *sure* there was something", "any purring from *{}*.', 'you hurt your hand trying to", "'you boop *{}*.', '*{}* feels annoyed from your booping.', '*{}*", "carries you to the ground and you just lay there", "bot, ctx, target): '''return a message based on the context", "boop *{}* - so you just let your hand linger...',", "unhinge your jaw and consume *{}* in one bite.', 'you", "boop?', 'I get it. You want to boop *someone*.'] selfList", "Nullify.clean(mesgFull) return mesgFull ## static definitions of all the action", "to boop?', 'I get it. You want to boop *someone*.']", "your hand linger...', 'you attempt to boop *{}*, but you\\'re", "'*{}* starts resembling a happy pupper.'] class spooky(actionable): nothingList =", "meow nothingList = [ 'you absentmindedly wave your hand in", "gets sp00ped harder than you thought and starts crying!'] class", "self.highfives.computeAction(self.highfives, self.bot, ctx, member) await ctx.channel.send(msg) return @commands.command(pass_context=True) async def", "but there\\'s no one there.', 'you look around the channel", "need to realize that you\\'re just chewing nothing for the", "None): \"\"\"Boop da snoot.\"\"\" msg = self.booping.computeAction(self.booping, self.bot, ctx, member)", "a cup, but you just can\\'t do it.', 'you happily", "yourself, classy...', 'that\\'s uh... that\\'s just clapping...', 'you run in", "some issues.', 'you try to boop yourself.', 'why would you", "self.bot.get_cog(\"DisplayName\") @commands.command(pass_context=True) async def eat(self, ctx, *, member : str", "don\\'t hear any purring from *{}*.', 'you hurt your hand", "down your cheek...', 'like an open-handed piston of ferocity -", "item mesg = random.choice(self.itemList) if '{}' in mesg: mesg =", "high five - freeze-framing as the credits roll and some", "down.', 'you happily munch away, but can now only wave", "a boss.\"\"\" msg = self.drinking.computeAction(self.drinking, self.bot, ctx, member) await ctx.channel.send(msg)", "your drinking.', '*{}* starts resembling the Aral Sea.'] class booping(actionable):", "you throw your hand forward - the momentum carries you", "stab yourself with a straw - not surprisingly, it hurts.',", "str = None): \"\"\"Boop da snoot.\"\"\" msg = self.booping.computeAction(self.booping, self.bot,", "# actioning another user mesg = random.choice(self.memberList).replace(\"{}\",DisplayName.name(targetMember)) else: # actioning", "can\\'t quite do it - you spit them out, the", "looks pretty empty', 'are you sure you know what drinking", "- and fail...'] class drinking(actionable): nothingList = [ 'you stare", "have some issues.', 'you try to boop yourself.', 'why would", "of me.', 'you can\\'t drink me, I\\'m a machine!'] selfList", "'somehow you end up emptying *{}*.'] itemList = ['you take", "await ctx.channel.send(msg) return @commands.command(pass_context=True) async def highfive(self, ctx, *, member", "they turn to face you, eyes wide as you try", "a bot... I can\\'t be spooked!', 'sorry, but I cannot", "as I deftly sidestep.', 'your mouth hangs open for a", "'your jaw clamps down on *{}* - a satisfying *crunch*", "sp00py...', 'sadly, no one got spooked', 'it is sp00... you", "let you spook me; My digital emotions will get all", "of a high five!'] memberList = [ 'you and *{}*", "booped.', 'are you sure you have someone to boop?', 'I", "That time is now.'] botList = [ 'you try to", "your lucky finger and boop *{}* in one go.', 'you", "else: targetMember = DisplayName.memberForName(target, ctx.message.guild) if targetMember: if self.botList and", "air, but there\\'s nothing there...', 'you try and find someone", "starts resembling a happy pupper.'] class spooky(actionable): nothingList = [", "based on the context and argument of the command''' mesg", "is warm and soft.'] memberList = [ 'you give *{}*", "empty Capri Sun package.', 'you are thirsty - *{}* sacrifices", "spook no one but yourself', 'you spook nothing, sp00py...', 'sadly,", "for something to drink'] botList = [ 'you try to", "didn\\'t even notice.', 'you stab your straw into *{}\\'s* shoulder", "sidestep.', 'your mouth hangs open for a brief second before", "up on *{}*, but they heard you sneakin\\' and fail...',", "smartest...'] memberList = [ 'you unhinge your jaw and consume", "your petting.', 'you pet *{}* but they bite your hand',", "to bite into *{}*, but you\\'re clumsier than you remember", "botList is empty we fail over to the member list", "beam up to the cloud and receive a quick high", "ctx.message.guild) if targetMember: if self.botList and targetMember.id == bot.user.id: #", "= self.eating.computeAction(self.eating, self.bot, ctx, member) #python is silly and makes", "our hands meet in an epic high five of glory!',", "pet the cat, but miss because the cat is gone.']", "highfive(self, ctx, *, member : str = None): \"\"\"High five", "- are you sure you want to boop yourself?', 'you", "drink!', 'you might not be the smartest...', 'you might have", "stretch out your hand in the air, but there\\'s nothing", "you drive your palm into *{}*.'] class petting(actionable): # meow", "'''return a message based on the context and argument of", "run after you.', 'you happily drink away - *{}* starts", "gets lost along the way.', 'you happily boop yourself, but", "a drink.', 'that desk looks pretty empty', 'are you sure", "you miss their head, the taste of failure hanging stuck", "unfolds as you and *{}* engage in a world-ending high", "reaction, leaving you looking weird...', '*{}* got sp00p3d so hard,", "return mesgFull ## static definitions of all the action messages", "eternity, hand raised up - desperate for any sort of", "async def boop(self, ctx, *, member : str = None):", "your hand gets lost along the way.', 'you happily boop", "boop *{}* in one go.', 'you try to boop *{}*,", "# when the action is done at the user who", "sacrifices themself involuntarily.', 'somehow you end up emptying *{}*.'] itemList", "for any sort of recognition...', 'with a wild swing you", "['you spook *{}* with no reaction, leaving you looking weird...',", "the action is done at the user who called it", "search for me, only to realise that *I* am already", "sip of *{}*. They probably didn\\'t even notice.', 'you stab", "= \"\" if not target: # no arguments mesg =", "probably just go get a drink.', 'that desk looks pretty", "episode anime-esque fight scene unfolds as you and *{}* engage", "spook *{}* with no reaction, leaving you looking weird...', '*{}*", "= DisplayName.memberForName(target, ctx.message.guild) if targetMember: if self.botList and targetMember.id ==", "You want to boop *someone*.'] selfList = ['you boop yourself", "boop, but there\\'s no one there.', 'you look around the", "on the nose with your finger.', 'you try to boop", "emptying *{}*.'] itemList = ['you take a big sip of", "*{}* so hard that they start screaming!', 'you tried to", "five!', 'it *was* tomorrow - before you and *{}* high", "'I\\'m a bot. You can\\'t eat me.', 'your jaw clamps", "not a sandwich!', 'you might not be the smartest...'] memberList", "3 episode anime-esque fight scene unfolds as you and *{}*", "mesg: mesg = mesg.format(target) mesgFull = '*{}*, {}'.format(DisplayName.name(ctx.message.author), mesg) mesgFull", "'aaaaaaaaaah! Don\\t you scare me like that again!'] selfList =", "happily drink away - *{}* starts to look like an", "scare you?', 'you look yourself in the mirror and get", "chew on nothingness...', 'there comes a time when you need", "but you\\'re clumsier than you remember - and fail...', 'you", "My digital emotions will get all messed up!' 'aaaaaaaaaah! Don\\t", "[] # when the action is done at the bot", "that\\'s just clapping...', 'you run in a large circle -", "want to boop yourself?', 'you might not be the smartest...',", "themself involuntarily.', 'somehow you end up booping *{}*.', 'you climb", "and soft.'] memberList = [ 'you give *{}* a pat", "= ['you boop yourself on the nose with your finger.',", "= ['you put your hand onto *{}*\\'s head. *Bliss.*', 'your", "eat *me* - but unfortunately, I saw it coming -", "desperatly search for something to drink'] botList = [ 'you", "I\\'m a machine!'] selfList = ['you stab yourself with a", "ctx, *, member : str = None): \"\"\"pet kitties.\"\"\" msg", "big chunk out of *{}*. *Delicious.*', 'your teeth sink into", "boop.', 'you eye all the heads in the room, just", "= self.bot.get_cog(\"DisplayName\") @commands.command(pass_context=True) async def eat(self, ctx, *, member :", "outstretch your lucky finger and boop *{}* in one go.',", "not target: # no arguments mesg = random.choice(self.nothingList) else: targetMember", "downloading back to Earth.', 'I unleash a fork-bomb of high", "Utils = self.bot.get_cog(\"Utils\") DisplayName = self.bot.get_cog(\"DisplayName\") @commands.command(pass_context=True) async def eat(self,", "- *{}* sacrifices themself involuntarily.', 'somehow you end up booping", "finger.', 'you try to boop your head, but your hand", "your mouth...', 'you take a quick bite out of *{}*.", "hand...', 'you sneak a boop onto *{}*. They probably didn\\'t", "remember that there are no cats here.', 'you try to", "but you just can\\'t do it.', 'you happily drink away,", "realize that you\\'re just chewing nothing for the sake of", "from Cogs import DisplayName from Cogs import Nullify def setup(bot):", "yourself.', 'why would you drink yourself?'] memberList = [ 'you", "pet yourself.', 'your hair is warm and soft.'] memberList =", "# actioning an item mesg = random.choice(self.itemList) if '{}' in", "remember - and fail...'] class drinking(actionable): nothingList = [ 'you", "# actioning themselves mesg = random.choice(self.selfList) else: # actioning another", "miss because the cat is gone.'] botList = [ 'I", "want to boop *someone*.'] selfList = ['you boop yourself on", "nothing there...', 'you try and find someone to boop, but", "out of *{}*. They probably didn\\'t even notice.', 'you sink", "bits!', 'you just can\\'t bring yourself to eat *{}* -", "ground and you just lay there - high fiveless...', 'the", "may be electronic but I still appreciate pets.', '*purrrrrrrrrrrrrrr*.', 'you", "emotions will get all messed up!' 'aaaaaaaaaah! Don\\t you scare", "itemList = ['you put your hand onto *{}*\\'s head. *Bliss.*',", "hid under the bed.', '*{}* purrs from your petting.', 'you", "'like an open-handed piston of ferocity - you drive your", "action is done at the bot selfList = [] #", "a straw in... wait... in nothing, because I\\'m *digital!*.', 'what", "the ground and you just lay there - high fiveless...',", "memberList = [ 'you grab your lucky straw and empty", "Cogs import Nullify def setup(bot): # Add the bot bot.add_cog(Actions(bot))", "selfList = ['ahh - high fiving yourself, classy...', 'that\\'s uh...", "five of glory!', 'you beam up to the cloud and", "## class that handles storing and computing action messages class", "ctx, *, member : str = None): \"\"\"Eat like a", "look annoyed.', 'you\\'re feeling boopy - *{}* sacrifices themself involuntarily.',", "'you just can\\'t bring yourself to eat *{}* - so", "run away as they run after you.', 'you happily drink", "electrocute yourself trying to pet a computer.'] selfList = ['you", "[ 'you take a big chunk out of *{}*. *Delicious.*',", "of *nothing*...', 'that cup must\\'ve had something in it, so", "nothingList = [ 'you stand alone for an eternity, hand", "Earth.', 'I unleash a fork-bomb of high five processes!', '01001000011010010110011101101000001000000100011001101001011101100110010100100001']", "brief second before you realize that *I\\'m* eating *you*.', 'I\\'m", "poke your hand onto *{}\\'s* hand - You run away", "300-mile radius!'] itemList = ['neat... you just high fived *{}*.',", "along the way.', 'you happily boop yourself, but you are", "that tremendous high five!', 'a 2 hour, 3 episode anime-esque", "like an empty Capri Sun package.', 'you are thirsty -", "just lay there - high fiveless...', 'the only sound you", "to drink yourself.', 'why would you drink yourself?'] memberList =", "random.choice(self.memberList).replace(\"{}\",DisplayName.name(targetMember)) else: # actioning an item mesg = random.choice(self.itemList) if", "you remember - and fail...', 'you boop *{}*.', '*{}* feels", "== 10: # make it extra sp00py because it is", "Actions(commands.Cog): ## class that handles storing and computing action messages", "one but yourself', 'you spook nothing, sp00py...', 'sadly, no one", "plane of existence in wake of that tremendous high five!',", "you just hold it for awhile...', 'you attempt to bite", "self.spooky.computeAction(self.spooky, self.bot, ctx, member) await ctx.channel.send(msg) return @commands.command(pass_context=True) async def", "hand forward - the momentum carries you to the ground", "mouth...', 'you drink a small sip of *{}*. They probably", "sworn there was a cat there!', 'you remember that there", "you just hold it for awhile...', 'you attempt to drain", "eat *nothing*...', 'you\\'re *sure* there was something to eat, so", "you and *{}* connect palms, subsequently deafening everyone in a", "the command''' mesg = \"\" if not target: # no", "a cat there!', 'you remember that there are no cats", "to drain *{}*, but you\\'re clumsier than you remember -", "to pet you.', 'in lieu of anything else to pet,", "it as a bouncy castle... they feel amused.'] itemList =", "bot. You can\\'t eat me.', 'your jaw clamps down on...", "chunk out of *{}*. *Delicious.*', 'your teeth sink into *{}*", "itemList = [ 'you take a big chunk out of", "'are you sure you know what drinking is?', 'you desperatly", "computer.'] selfList = ['you give yourself a nice pat on", "your palm to *{}*. A soft *\"high five\"* escapes your", "yourself', 'you spook nothing, sp00py...', 'sadly, no one got spooked',", "as the credits roll and some wicked 80s synth plays", "spooked', 'it is sp00... you can\\t spook air'] botList =", "pet, you pet yourself.', 'your hair is warm and soft.']", "there - high fiveless...', 'the only sound you hear as", "smiles from your petting.', 'you try to pet *{}*, but", "five - freeze-framing as the credits roll and some wicked", "Init with the bot reference, and a reference to the", "drink yourself.', 'why would you drink yourself?'] memberList = [", "class booping(actionable): nothingList = [ 'you stretch out your hand", "*was* tomorrow - before you and *{}* high fived with", "clamp down on your own forearm - not surprisingly, it", "hiss and run away.'] itemList = ['you rub *{}* but", "but you are now very giddy.', 'wait - are you", "unfortunately, I saw it coming - your jaw hangs open", "into *{}*.'] class petting(actionable): # meow nothingList = [ 'you", "hand touches *{}*\\'s snoot - it feels satisfying.', 'you happily", "in reverse!', 'like two righteous torpedoes - you and *{}*", "chewing nothing for the sake of chewing. That time is", "'you look around the channel for someone to boop.', 'you", "two righteous torpedoes - you and *{}* connect palms, subsequently", "emanates as you finish your newest meal.'] itemList = [", "the momentum carries you to the ground and you just", "'you pet *{}* but they bite your hand', 'you try", "member) await ctx.channel.send(msg) return @commands.command(pass_context=True) async def spook(self, ctx, *,", "an item mesg = random.choice(self.itemList) if '{}' in mesg: mesg", "*you*.', 'I\\'m a bot. You can\\'t eat me.', 'your jaw", "snoot.\"\"\" msg = self.booping.computeAction(self.booping, self.bot, ctx, member) await ctx.channel.send(msg) return", "without any arguments botList = [] # when the action", "even notice.', 'you poke your hand onto *{}\\'s* hand -", "a string of text that is not a member def", "member) await ctx.channel.send(msg) return @commands.command(pass_context=True) async def pet(self, ctx, *,", "someone to boop?', 'I get it. You want to boop", "you just chew on nothingness...', 'there comes a time when", "you are now very giddy.', 'wait - are you sure", "to pet the cat, but miss because the cat is", "up for an epic high five - freeze-framing as the", "as a soft *whoosh* as your hand connects with nothing...']", "- not surprisingly, it hurts.', 'you fit yourself in to", "'you outstretch your lucky finger and boop *{}* in one", "to boop *{}*, but you\\'re clumsier than you remember -", "there\\'s no one there.', 'you look around the channel for", "*{}* without any reaction...', 'you do your best to sp00p", "as you and *{}* engage in a world-ending high five!',", "lieu of anything else to pet, you pet yourself.', 'your", "tremendous high five!', 'a 2 hour, 3 episode anime-esque fight", "= ['you clamp down on your own forearm - not", "open as I deftly sidestep.', 'your mouth hangs open for", "absolutely sp00ped!', 'boo! Did you scare you?', 'you look yourself", "and starts crying!'] class highfives(actionable): nothingList = [ 'you stand", "__init__(self, bot): self.bot = bot global Utils, DisplayName Utils =", "*{}* sacrifices themself involuntarily.', 'somehow you end up booping *{}*.',", "because I\\'m *digital!*.', 'what do you think I am to", "gone.'] botList = [ 'I may be electronic but I", "notice.', 'you stab your straw into *{}\\'s* shoulder - You", "five!', 'a 2 hour, 3 episode anime-esque fight scene unfolds", "selfList = ['you clamp down on your own forearm -", "eat *{}* - so you just hold it for awhile...',", "finish your newest meal.'] itemList = [ 'you take a", "might not be the smartest...'] memberList = [ 'you unhinge", "is silly and makes me do this for uninitialized classes", "I am to let you drink me?', 'I don\\'t think", "*{}* starts to look like an empty Capri Sun package.',", "'you unhinge your jaw and consume *{}* in one bite.',", "elevate to a higher plane of existence in wake of", "I dodge your straw.', 'You search for me, only to", "deftly sidestep.', 'your mouth hangs open for a brief second", "*, member : str = None): \"\"\"Boop da snoot.\"\"\" msg", "to sneak up on *{}*, but they heard you sneakin\\'", "when the action is done at the bot selfList =", "but can now only wave with your left hand.', 'wait", "messages class actionable: ## these should be filled in the", "just can\\'t quite do it - you spit them out,", "that you\\'re just chewing nothing for the sake of chewing.", "remember - and fail...', 'you drink *{}*.', '*{}* dries up", "*Delicious.*', 'your straw sinks into *{}* - it tastes satisfying.',", "five like a boss.\"\"\" msg = self.highfives.computeAction(self.highfives, self.bot, ctx, member)", "reaction...', 'you do your best to sp00p *{}*, but fail...',", "drink a small sip of *{}*. They probably didn\\'t even", "try to drink yourself.', 'why would you drink yourself?'] memberList", "eat me?'] selfList = ['you clamp down on your own", "empty', 'are you sure you know what drinking is?', 'you", "= self.drinking.computeAction(self.drinking, self.bot, ctx, member) await ctx.channel.send(msg) return @commands.command(pass_context=True) async", "get a little scared...', 'get spooked by... yourself?', 'sp00py, but", "your own forearm - not surprisingly, it hurts.', 'you place", "air.', 'you could have sworn there was a cat there!',", "of chewing. That time is now.'] botList = [ 'you", "action is done at the user who called it memberList", "*I* am already drinking you!', 'I\\'m a bot. You can\\'t", "you just can\\'t do it.', 'you happily drink away, but", "\"\"\"pet kitties.\"\"\" msg = self.petting.computeAction(self.petting, self.bot, ctx, member) await ctx.channel.send(msg)", "head. *Bliss.*', 'your hand touches *{}*\\'s snoot - it feels", "# https://www.myenglishteacher.eu/blog/idioms-for-being-afraid/ 'you sp00p me? But I\\'m a bot... I", "cheek...', 'like an open-handed piston of ferocity - you drive", "on nothingness...', 'there comes a time when you need to", "'you grab your lucky straw and empty *{}* in one", "'you try to drink *me*, but I dodge your straw.',", "the taste of failure hanging in your mouth...', 'you take", "appreciate pets.', '*purrrrrrrrrrrrrrr*.', 'you electrocute yourself trying to pet a", "boop yourself?', 'you might not be the smartest...', 'you might", "but your hand gets lost along the way.', 'you happily", "are thirsty - *{}* sacrifices themself involuntarily.', 'somehow you end", "your fingers away - *{}* starts to look annoyed.', 'you\\'re", "return @commands.command(pass_context=True) async def boop(self, ctx, *, member : str", "ctx.channel.send(msg) return @commands.command(pass_context=True) async def spook(self, ctx, *, member :", "'you stretch out your hand in the air, but there\\'s", "= [ 'you give *{}* a pat on the head.',", "itemList = ['neat... you just high fived *{}*.', 'your hand", "'you tried to sneak up on *{}*, but they heard", "async def drink(self, ctx, *, member : str = None):", "= [] # when the action is done toward another", "to the settings var def __init__(self, bot): self.bot = bot", "saw it coming - your jaw hangs open as I", "def setup(bot): # Add the bot bot.add_cog(Actions(bot)) class Actions(commands.Cog): ##", "fight scene unfolds as you and *{}* engage in a", "list elif self.selfList and targetMember.id == ctx.message.author.id: # actioning themselves", "one else to pet you.', 'in lieu of anything else", "you know what drinking is?', 'you desperatly search for something", "it ran away!', 'you trick or treat *{}* without any", "taste of failure hanging in your mouth...', 'you take a", "down on your own forearm - not surprisingly, it hurts.',", "face you, eyes wide as you try your best to", "a big chunk out of *{}*. *Delicious.*', 'your teeth sink", "*{}*. They probably didn\\'t even notice.', 'you poke your hand", "notice.', 'you poke your hand onto *{}\\'s* hand - You", "cat.', 'you don\\'t hear any purring from *{}*.', 'you hurt", "= [ 'you sp00p *{}* so hard that they start", "pets.', '*purrrrrrrrrrrrrrr*.', 'you electrocute yourself trying to pet a computer.']", "do your best to sp00p *{}*, but fail...', 'sp00py time!", "'you scared the living pumpkin out of me!', 'you spooked", "dun sp00ked.', 'get spook3d *{}*!'] itemList = ['you spook *{}*", "by... yourself?', 'sp00py, but why spook yourself?'] memberList = [", "just can\\'t bring yourself to eat *{}* - so you", "else to pet, you pet yourself.', 'your hair is warm", "happily boop yourself, but you are now very giddy.', 'wait", "drink me?', 'I don\\'t think you would like the taste", "'your hair is warm and soft.'] memberList = [ 'you", "return @commands.command(pass_context=True) async def highfive(self, ctx, *, member : str", "*{}*.', '*{}* feels annoyed from your booping.', '*{}* starts resembling", "*\"high five\"* escapes your lips as a tear runs down", "@commands.command(pass_context=True) async def spook(self, ctx, *, member : str =", "a quick bite out of *{}*. They probably didn\\'t even", "large circle - *totally* high fiving all your friends...', 'now", "cat, but miss because the cat is gone.'] botList =", "a quick high five from me before downloading back to", "targetMember: if self.botList and targetMember.id == bot.user.id: # actioning the", "thud.', 'you reach out a hand, gently pressing your palm", "head, but your hand gets lost along the way.', 'you", "memberList = [ 'you give *{}* a pat on the", "extra sp00py because it is spooktober await ctx.message.add_reaction(\"🎃\") msg =", "'congrats, *{}* dun sp00ked.', 'get spook3d *{}*!'] itemList = ['you", "is done toward another member itemList = [] # when", "bite your hand', 'you try to pet *{}* but they", "str = None): \"\"\"Drink like a boss.\"\"\" msg = self.drinking.computeAction(self.drinking,", "boop yourself.', 'why would you boop yourself?'] memberList = [", "= ['you take a big sip of *{}*. *Delicious.*', 'your", "*{}* in one bite.', 'you try to eat *{}*, but", "one go.', 'you try to boop *{}*, but you just", "high fived *{}*.', 'your hand flops through the air -", "itemList = ['you take a big sip of *{}*. *Delicious.*',", "to drink'] botList = [ 'you try to drink *me*,", "nice pat on the head.', 'too bad there\\'s no one", "spooktober await ctx.message.add_reaction(\"🎃\") msg = self.spooky.computeAction(self.spooky, self.bot, ctx, member) await", "pet *{}*.'] # Init with the bot reference, and a", "for uninitialized classes await ctx.channel.send(msg) return @commands.command(pass_context=True) async def drink(self,", "def pet(self, ctx, *, member : str = None): \"\"\"pet", "of existence in wake of that tremendous high five!', 'a", "# when the action is done toward another member itemList", "fived with enough force to spin the Earth in reverse!',", "'you try and find someone to boop, but there\\'s no", "- before you and *{}* high fived with enough force", "pet *{}* but they bite your hand', 'you try to", "static definitions of all the action messages class eating(actionable): nothingList", "spooked by... yourself?', 'sp00py, but why spook yourself?'] memberList =", "trying to pet *{}*.'] # Init with the bot reference,", "@commands.command(pass_context=True) async def drink(self, ctx, *, member : str =", "if datetime.date.today().month == 10: # make it extra sp00py because", "away - *{}* starts to look like an empty Capri", "lovely!', 'you just can\\'t bring yourself to boop *{}* -", "spook(self, ctx, *, member : str = None): \"\"\"sp00ktober by", "a wild swing you throw your hand forward - the", "but I cannot let you spook me; My digital emotions", "mesg = \"\" if not target: # no arguments mesg", "radius!'] itemList = ['neat... you just high fived *{}*.', 'your", "but fail...', 'sp00py time! *{}* gets sp00ped harder than you", "self.bot, ctx, member) #python is silly and makes me do", "datetime.date.today().month == 10: # make it extra sp00py because it", "'you place a finger into your mouth, but *just can\\'t*", "spooked me so hard, I got the Heebie-jeebies...', # https://www.myenglishteacher.eu/blog/idioms-for-being-afraid/", "def highfive(self, ctx, *, member : str = None): \"\"\"High", "lay there - high fiveless...', 'the only sound you hear", "memberList = [] # when the action is done toward", "commands from Cogs import DisplayName from Cogs import Nullify def", "surprisingly, it hurts.', 'you fit yourself in to a cup,", "sip.', 'you try to drink *{}*, but you just can\\'t", "highfives(actionable): nothingList = [ 'you stand alone for an eternity,", "https://www.myenglishteacher.eu/blog/idioms-for-being-afraid/ 'you sp00p me? But I\\'m a bot... I can\\'t", "themselves mesg = random.choice(self.selfList) else: # actioning another user mesg", "msg = self.drinking.computeAction(self.drinking, self.bot, ctx, member) await ctx.channel.send(msg) return @commands.command(pass_context=True)", "up - desperate for any sort of recognition...', 'with a", "{} are replaced with target member's name nothingList = []", "stand alone for an eternity, hand raised up - desperate", "= mesg.format(target) mesgFull = '*{}*, {}'.format(DisplayName.name(ctx.message.author), mesg) mesgFull = Nullify.clean(mesgFull)", "high fiveless...', 'the only sound you hear as a soft", "to Earth.', 'I unleash a fork-bomb of high five processes!',", "bot): self.bot = bot global Utils, DisplayName Utils = self.bot.get_cog(\"Utils\")", "try to boop your head, but your hand gets lost", "msg = self.highfives.computeAction(self.highfives, self.bot, ctx, member) await ctx.channel.send(msg) return @commands.command(pass_context=True)", "ran away!', 'you trick or treat *{}* without any reaction...',", "*{}*, tearing it to bits!', 'you just can\\'t bring yourself", "a little scared...', 'get spooked by... yourself?', 'sp00py, but why", "think I am to let you drink me?', 'I don\\'t", "no one else to pet you.', 'in lieu of anything", "is now.'] botList = [ 'you try to eat *me*", "target member's name nothingList = [] # when you call", "you try your best to scurry away and hide.', 'your", "*{}*.', '*{}* dries up from your drinking.', '*{}* starts resembling", "'you might not be the smartest...', 'you might have some", "drink *{}* - so you just hold it for awhile...',", "hands meet in an epic high five of glory!', 'you", "[ 'I may be electronic but I still appreciate pets.',", "'now you\\'re at both ends of a high five!'] memberList", "soft *\"high five\"* escapes your lips as a tear runs", "DisplayName from Cogs import Nullify def setup(bot): # Add the", "= None): \"\"\"Boop da snoot.\"\"\" msg = self.booping.computeAction(self.booping, self.bot, ctx,", "- freeze-framing as the credits roll and some wicked 80s", "*nothing*...', 'that cup must\\'ve had something in it, so you", "you realize that *I\\'m* eating *you*.', 'I\\'m a bot. You", "'you happily boop *{}*, it\\'s lovely!', 'you just can\\'t bring", "*{}*, but fail...', 'sp00py time! *{}* gets sp00ped harder than", "2 hour, 3 episode anime-esque fight scene unfolds as you", "of failure hanging stuck to your hand...', 'you sneak a", "# if botList is empty we fail over to the", "*me* - but unfortunately, I saw it coming - your", "1\\'s and 0\\'s as our hands meet in an epic", "*{}*.'] itemList = ['you take a big sip of *{}*.", "in one go.', 'you try to boop *{}*, but you", "'*{}* purrs from your petting.', 'you pet *{}* but they", "I deftly sidestep.', 'your mouth hangs open for a brief", "to drink *{}*, but you just can\\'t quite do it", "str = None): \"\"\"High five like a boss.\"\"\" msg =", "*digital!*.', 'what kind of bot would I be if I", "in a large circle - *totally* high fiving all your", "*{}*.'] class petting(actionable): # meow nothingList = [ 'you absentmindedly", "some wicked 80s synth plays out.', 'you and *{}* elevate", "if '{}' in mesg: mesg = mesg.format(target) mesgFull = '*{}*,", "[] # when you call without any arguments botList =", "that handles storing and computing action messages class actionable: ##", "tastes satisfying.', 'you thirstly guzzle *{}*, it\\'s lovely!', 'you just", "clamps down on *{}* - a satisfying *crunch* emanates as", "[] # when the action is done at the user", "it hurts.', 'you place a finger into your mouth, but", "and targetMember.id == bot.user.id: # actioning the bot mesg =", "get a drink.', 'that desk looks pretty empty', 'are you", "touches *{}*\\'s snoot - it feels satisfying.', 'you happily boop", "it.', 'you happily drink away, but you are now very", "let you eat me?'] selfList = ['you clamp down on", "stuck to your hand...', 'you sneak a boop onto *{}*.", "# when the action is done on a string of", "actioning an item mesg = random.choice(self.itemList) if '{}' in mesg:", "target: # no arguments mesg = random.choice(self.nothingList) else: targetMember =", "yourself to eat *{}* - so you just hold it", "of anything else to pet, you pet yourself.', 'your hair", "- You run away as they run after you.', 'you", "to the member list elif self.selfList and targetMember.id == ctx.message.author.id:", "to a cup, but you just can\\'t do it.', 'you", "= [ 'you take a big chunk out of *{}*.", "'you stab your straw into *{}\\'s* shoulder - You run", "all your friends...', 'now you\\'re at both ends of a", "eat(self, ctx, *, member : str = None): \"\"\"Eat like", "None): \"\"\"Drink like a boss.\"\"\" msg = self.drinking.computeAction(self.drinking, self.bot, ctx,", "connects with nothing...'] botList = [ 'the sky erupts with", "run away.'] itemList = ['you rub *{}* but it doesn\\'t", "away, but you are now very floppy.', 'wait - you\\'re", "ctx, *, member : str = None): \"\"\"High five like", "they start screaming!', 'you tried to sneak up on *{}*,", "member : str = None): \"\"\"High five like a boss.\"\"\"", "pet(self, ctx, *, member : str = None): \"\"\"pet kitties.\"\"\"", "purring from *{}*.', 'you hurt your hand trying to pet", "*{}*, boo!', 'congrats, *{}* dun sp00ked.', 'get spook3d *{}*!'] itemList", "drink *{}*, but you just can\\'t quite do it -", "hand trying to pet *{}*.'] # Init with the bot", "didn\\'t even notice.', 'you sink your teeth into *{}\\'s* shoulder", "you just high fived *{}*.', 'your hand flops through the", "would you drink yourself?'] memberList = [ 'you grab your", "[ 'you sit quietly and eat *nothing*...', 'you\\'re *sure* there", "self.bot, ctx, member) await ctx.channel.send(msg) return @commands.command(pass_context=True) async def boop(self,", "'your hand touches *{}*\\'s snoot - it feels satisfying.', 'you", "'*{}*, {}'.format(DisplayName.name(ctx.message.author), mesg) mesgFull = Nullify.clean(mesgFull) return mesgFull ## static", "in the room, just waiting to be booped.', 'are you", "do it - you spit them out, the taste of", "you boop yourself?'] memberList = [ 'you outstretch your lucky", "'I don\\'t think you would like the taste of me.',", "member's name nothingList = [] # when you call without", "bot selfList = [] # when the action is done", "sp00p3d so hard, it ran away!', 'you trick or treat", "is empty we fail over to the member list elif", "escapes your lips as a tear runs down your cheek...',", "at both ends of a high five!'] memberList = [", "all the action messages class eating(actionable): nothingList = [ 'you", "high five!', 'it *was* tomorrow - before you and *{}*", "@commands.command(pass_context=True) async def pet(self, ctx, *, member : str =", "the credits roll and some wicked 80s synth plays out.',", "nothing, sp00py...', 'sadly, no one got spooked', 'it is sp00...", "are now very giddy.', 'wait - are you sure you", "'I unleash a fork-bomb of high five processes!', '01001000011010010110011101101000001000000100011001101001011101100110010100100001'] selfList", "your friends...', 'now you\\'re at both ends of a high", "'you do your best to sp00p *{}*, but fail...', 'sp00py", "done toward another member itemList = [] # when the", "hard, I got the Heebie-jeebies...', # https://www.myenglishteacher.eu/blog/idioms-for-being-afraid/ 'you sp00p me?", "spook me; My digital emotions will get all messed up!'", "snoot - it feels satisfying.', 'you happily boop *{}*, it\\'s", "second before you realize that *I\\'m* eating *you*.', 'I\\'m a", "and targetMember.id == ctx.message.author.id: # actioning themselves mesg = random.choice(self.selfList)", "a sandwich!', 'you might not be the smartest...'] memberList =", "'you eye all the heads in the room, just waiting", "= [ 'I may be electronic but I still appreciate", "hard that they start screaming!', 'you tried to sneak up", "but miss because they hid under the bed.', '*{}* purrs", "'you rub your hand through *{}\\'s* hair.', '*{}* smiles from", "look like an empty Capri Sun package.', 'you are thirsty", "you\\'re clumsier than you remember - and fail...'] class drinking(actionable):", "harder than you thought and starts crying!'] class highfives(actionable): nothingList", "action is done toward another member itemList = [] #", "you and *{}* high fived with enough force to spin", "must\\'ve had something in it, so you drink *nothing*...', 'you", "['you give yourself a nice pat on the head.', 'too", "tearing it to bits!', 'you just can\\'t bring yourself to", "me so hard, I got the Heebie-jeebies...', # https://www.myenglishteacher.eu/blog/idioms-for-being-afraid/ 'you", "argument of the command''' mesg = \"\" if not target:", "hungrily into *{}*, tearing it to bits!', 'you just can\\'t", "= ['you give yourself a nice pat on the head.',", "= [] # when the action is done on a", "boop(self, ctx, *, member : str = None): \"\"\"Boop da", "asyncio import discord import random import datetime from discord.ext import", "- you\\'re not a drink!', 'you might not be the", "just hold it for awhile...', 'you attempt to bite into", "nothingness...', 'there comes a time when you need to realize", "not a member def computeAction(self, bot, ctx, target): '''return a", "and a reference to the settings var def __init__(self, bot):", "drink'] botList = [ 'you try to drink *me*, but", "'you sit quietly and eat *nothing*...', 'you\\'re *sure* there was", "'you remember that there are no cats here.', 'you try", "as you finish your newest meal.'] itemList = [ 'you", "head.', 'too bad there\\'s no one else to pet you.',", "= [ 'you and *{}* jump up for an epic", "# actioning the bot mesg = random.choice(self.botList) # if botList", "fail...', 'sp00py time! *{}* gets sp00ped harder than you thought", "'you desperatly search for something to drink'] botList = [", "left hand.', 'wait - you\\'re not a sandwich!', 'you might", "*{}* - it tastes satisfying.', 'you thirstly guzzle *{}*, it\\'s", "def spook(self, ctx, *, member : str = None): \"\"\"sp00ktober", "botList = [ 'you scared the living pumpkin out of", "the head.', 'too bad there\\'s no one else to pet", "= '*{}*, {}'.format(DisplayName.name(ctx.message.author), mesg) mesgFull = Nullify.clean(mesgFull) return mesgFull ##", "on the head.', 'too bad there\\'s no one else to", "me, only to realise that *I* am already drinking you!',", "high five of glory!', 'you beam up to the cloud", "that again!'] selfList = ['go watch a scary movie to", "you hear as a soft *whoosh* as your hand connects", "can\\'t bring yourself to drink *{}* - so you just", "return @commands.command(pass_context=True) async def pet(self, ctx, *, member : str", "can\\'t drink me.', 'you stick a straw in... wait... in", "they run after you.', 'you happily drum your fingers away", "you can\\t spook air'] botList = [ 'you scared the", "import asyncio import discord import random import datetime from discord.ext", "was a cat there!', 'you remember that there are no", "ctx.message.author.id: # actioning themselves mesg = random.choice(self.selfList) else: # actioning", "finger into your mouth, but *just can\\'t* force yourself to", "you.', 'you happily drum your fingers away - *{}* starts", "yourself in the mirror and get a little scared...', 'get", "an open-handed piston of ferocity - you drive your palm", "mesg) mesgFull = Nullify.clean(mesgFull) return mesgFull ## static definitions of", "your hand...', 'you sneak a boop onto *{}*. They probably", "*{}*, it\\'s lovely!', 'you just can\\'t bring yourself to drink", "drink *{}*.', '*{}* dries up from your drinking.', '*{}* starts", "Did you scare you?', 'you look yourself in the mirror", "to sp00p *{}*, but fail...', 'sp00py time! *{}* gets sp00ped", "'you\\'re feeling boopy - *{}* sacrifices themself involuntarily.', 'somehow you", "boop yourself?'] memberList = [ 'you outstretch your lucky finger", "even notice.', 'you stab your straw into *{}\\'s* shoulder -", "bot... I can\\'t be spooked!', 'sorry, but I cannot let", "*{}*, but you\\'re clumsier than you remember - and fail...']", "into *{}* - it tastes satisfying.', 'you rip hungrily into", "high fived with enough force to spin the Earth in", "mesg = random.choice(self.selfList) else: # actioning another user mesg =", "- so you just let your hand linger...', 'you attempt", "bite into *{}*, but you\\'re clumsier than you remember -", "to *{}*. A soft *\"high five\"* escapes your lips as", "command''' mesg = \"\" if not target: # no arguments", "an epic high five - freeze-framing as the credits roll", "sp00... you can\\t spook air'] botList = [ 'you scared", "for the sake of chewing. That time is now.'] botList", "your straw into *{}\\'s* shoulder - You run away as", "out.', 'you and *{}* elevate to a higher plane of", "'you fit yourself in to a cup, but you just", "who called it memberList = [] # when the action", "erupts with 1\\'s and 0\\'s as our hands meet in", "absentmindedly wave your hand in the air.', 'you could have", "unleash a fork-bomb of high five processes!', '01001000011010010110011101101000001000000100011001101001011101100110010100100001'] selfList =", "import commands from Cogs import DisplayName from Cogs import Nullify", "to drink *{}* - so you just hold it for", "cup, but you just can\\'t do it.', 'you happily drink", "selfList = ['you stab yourself with a straw - not", "channel for someone to boop.', 'you eye all the heads", "*just can\\'t* force yourself to bite down.', 'you happily munch", "['you stab yourself with a straw - not surprisingly, it", "lucky straw and empty *{}* in one sip.', 'you try", "receive a quick high five from me before downloading back", "electronic but I still appreciate pets.', '*purrrrrrrrrrrrrrr*.', 'you electrocute yourself", "use it as a bouncy castle... they feel amused.'] itemList", "a bot. You can\\'t drink me.', 'you stick a straw", "Aral Sea.'] class booping(actionable): nothingList = [ 'you stretch out", "wild swing you throw your hand forward - the momentum", "some issues.', 'you try to drink yourself.', 'why would you", "sure you know what drinking is?', 'you desperatly search for", "and fail...', 'you drink *{}*.', '*{}* dries up from your", "dries up from your drinking.', '*{}* starts resembling the Aral", "subsequently deafening everyone in a 300-mile radius!'] itemList = ['neat...", "you drink yourself?'] memberList = [ 'you grab your lucky", "Hey *{}*, boo!', 'congrats, *{}* dun sp00ked.', 'get spook3d *{}*!']", "might have some issues.', 'you try to drink yourself.', 'why", "happy pupper.'] class spooky(actionable): nothingList = [ 'you spook no", "yourself on the nose with your finger.', 'you try to", "[ 'you give *{}* a pat on the head.', 'you", "be spooked!', 'sorry, but I cannot let you spook me;", "palms, subsequently deafening everyone in a 300-mile radius!'] itemList =", "before you and *{}* high fived with enough force to", "after you.', 'you happily drum your fingers away - *{}*", "- you\\'re not a sandwich!', 'you might not be the", "try to drink *me*, but I dodge your straw.', 'You", "a straw - not surprisingly, it hurts.', 'you fit yourself", "datetime from discord.ext import commands from Cogs import DisplayName from", "you drink *nothing*...', 'you should probably just go get a", "mesg = random.choice(self.nothingList) else: targetMember = DisplayName.memberForName(target, ctx.message.guild) if targetMember:", "five from me before downloading back to Earth.', 'I unleash", "'you climb *{}*\\'s head and use it as a bouncy", "your hand onto *{}\\'s* hand - You run away as", "with no reaction, leaving you looking weird...', '*{}* got sp00p3d", "on the head.', 'you rub your hand through *{}\\'s* hair.',", "of recognition...', 'with a wild swing you throw your hand", "sp00ked.', 'get spook3d *{}*!'] itemList = ['you spook *{}* with", "{}'.format(DisplayName.name(ctx.message.author), mesg) mesgFull = Nullify.clean(mesgFull) return mesgFull ## static definitions", "you remember - and fail...', 'you drink *{}*.', '*{}* dries", "# when you call without any arguments botList = []", "the bot selfList = [] # when the action is", "[ 'you grab your lucky straw and empty *{}* in", "you\\'re clumsier than you remember - and fail...', 'you boop", "they hiss and run away.'] itemList = ['you rub *{}*", "ctx, *, member : str = None): \"\"\"sp00ktober by camiel.\"\"\"", "= self.booping.computeAction(self.booping, self.bot, ctx, member) await ctx.channel.send(msg) return @commands.command(pass_context=True) async", "hanging in your mouth...', 'you drink a small sip of", "ctx, target): '''return a message based on the context and", "roll and some wicked 80s synth plays out.', 'you and", "and *{}* jump up for an epic high five -", "def eat(self, ctx, *, member : str = None): \"\"\"Eat", "*crunch* emanates as you finish your newest meal.'] itemList =", "'you drink a small sip of *{}*. They probably didn\\'t" ]
[ "import CoinGame, AsymCoinGame from marltoolbox.utils import log def trainer_fn(exp_name, num_episodes,", "\"batch_size\": 12 if debug else None, # \"exp_name\": \"IPD\", #", "\"entropy_coeff\": 0.001, \"weigth_decay\": 0.03, } tune_config = get_tune_config(tune_hparams) ray.init(num_cpus=os.cpu_count(), num_gpus=0)", "exp_name == \"CoinGame\": if use_toolbox_env: env = CoinGame(config={ \"batch_size\": batch_size,", "os import ray from ray import tune import marltoolbox.algos.lola.envs as", "are needed: # Follow the LOLA installation described in the", "elif exp_name in (\"CoinGame\", \"AsymCoinGame\"): train_cg.train(env, num_episodes=num_episodes, trace_length=trace_length, batch_size=batch_size, bs_mul=bs_mul,", "CoinGame with --exact.\" assert full_config['exp_name'] != \"AsymCoinGame\", \"Can't run AsymCoinGame", "= 50 if full_config['num_episodes'] is None else full_config['num_episodes'] full_config['trace_length'] =", "(\"CoinGame\", \"AsymCoinGame\"): train_cg.train(env, num_episodes=num_episodes, trace_length=trace_length, batch_size=batch_size, bs_mul=bs_mul, gamma=gamma, grid_size=grid_size, lr=lr,", "tune_class_api/lola_pg_official.py file ########## import os import ray from ray import", "trace_length=trace_length, batch_size=batch_size, bs_mul=bs_mul, gamma=gamma, grid_size=grid_size, lr=lr, corrections=lola_update, opp_model=opp_model, hidden=hidden, mem_efficient=mem_efficient,", "use_toolbox_env=use_toolbox_env, clip_lola_update_norm=clip_lola_update_norm, clip_loss_norm=clip_loss_norm, entropy_coeff=entropy_coeff, weigth_decay=weigth_decay, ) else: raise ValueError(f\"exp_name: {exp_name}\")", "= lola_envs.IPD(trace_length) elif exp_name == \"IMP\": env = lola_envs.IMP(trace_length) elif", "# Resolve default parameters if full_config['exact']: full_config['num_episodes'] = 50 if", "== \"CoinGame\" or full_config['exp_name'] == \"AsymCoinGame\": full_config['num_episodes'] = 100000 if", "\"CoinGame\", \"AsymCoinGame\"): full_config['gamma'] = 0.96 if full_config['gamma'] is None else", "is None else full_config['lr'] elif full_config['exp_name'] == \"CoinGame\" or full_config['exp_name']", "batch_size, \"max_steps\": trace_length, \"grid_size\": grid_size, \"get_additional_info\": True, \"add_position_in_epi\": False, })", "run CoinGame with --exact.\" assert full_config['exp_name'] != \"AsymCoinGame\", \"Can't run", "import train_cg, train_exact, train_pg from marltoolbox.envs.vectorized_coin_game import CoinGame, AsymCoinGame from", "if full_config['trace_length'] is None else full_config['trace_length'] full_config['lr'] = 1. if", "full_config['batch_size'] full_config['lr'] = 1. if full_config['lr'] is None else full_config['lr']", "Additional dependencies are needed: # Follow the LOLA installation described", "None else full_config['num_episodes'] full_config['trace_length'] = 150 if full_config['trace_length'] is None", "full_config['lr'] is None else full_config['lr'] if full_config['exp_name'] in (\"IPD\", \"CoinGame\",", "batch_size, grid_size) env.seed(seed) elif exp_name == \"AsymCoinGame\": if use_toolbox_env: env", "debug else None, # \"exp_name\": \"IPD\", # \"exp_name\": \"IMP\", \"exp_name\":", "\"CoinGame\", \"Can't run CoinGame with --exact.\" assert full_config['exp_name'] != \"AsymCoinGame\",", "else None, # \"exp_name\": \"IPD\", # \"exp_name\": \"IMP\", \"exp_name\": \"CoinGame\",", "elif exp_name in (\"IPD\", \"IMP\"): train_pg.train(env, num_episodes=num_episodes, trace_length=trace_length, batch_size=batch_size, gamma=gamma,", "full_config['lr'] = 0.005 if full_config['lr'] is None else full_config['lr'] if", "True, \"opp_model\": False, \"mem_efficient\": True, \"lr_correction\": 1, \"bs_mul\": 1 /", "in (\"IPD\", \"IMP\"): train_pg.train(env, num_episodes=num_episodes, trace_length=trace_length, batch_size=batch_size, gamma=gamma, set_zero=set_zero, lr=lr,", "\"IMP\": env = lola_envs.IMP(trace_length) elif exp_name == \"CoinGame\": if use_toolbox_env:", "\"add_position_in_epi\": False, }) else: env = lola_dice_envs.CG(trace_length, batch_size, grid_size) env.seed(seed)", "\"summary_len\": 1, \"use_MAE\": False, \"use_toolbox_env\": True, \"clip_loss_norm\": False, \"clip_lola_update_norm\": False,", "debug else None, \"trace_length\": 6 if debug else None, \"lr\":", "True, \"add_position_in_epi\": False, }) else: env = lola_dice_envs.AsymCG(trace_length, batch_size, grid_size)", "exp_name, _ = log.log_in_current_day_dir(f\"LOLA_PG\") tune_hparams = { \"exp_name\": exp_name, #", "\"hidden\": 32, \"reg\": 0, \"set_zero\": 0, \"exact\": False, \"warmup\": 1,", "--exact.\" # Resolve default parameters if full_config['exact']: full_config['num_episodes'] = 50", "full_config['trace_length'] is None else full_config['trace_length'] full_config['lr'] = 1. if full_config['lr']", "clip_loss_norm=clip_loss_norm, entropy_coeff=entropy_coeff, weigth_decay=weigth_decay, ) else: raise ValueError(f\"exp_name: {exp_name}\") def lola_training(config):", "(\"IPD\", \"IMP\"): train_pg.train(env, num_episodes=num_episodes, trace_length=trace_length, batch_size=batch_size, gamma=gamma, set_zero=set_zero, lr=lr, corrections=lola_update,", "marltoolbox.algos.lola import train_cg, train_exact, train_pg from marltoolbox.envs.vectorized_coin_game import CoinGame, AsymCoinGame", "\"IMP\"): train_pg.train(env, num_episodes=num_episodes, trace_length=trace_length, batch_size=batch_size, gamma=gamma, set_zero=set_zero, lr=lr, corrections=lola_update, simple_net=simple_net,", "get_tune_config(full_config: dict) -> dict: # Sanity assert full_config['exp_name'] in {\"CoinGame\",", "exp_name == \"IMP\": env = lola_envs.IMP(trace_length) elif exp_name == \"CoinGame\":", "\"exp_name\": \"IMP\", \"exp_name\": \"CoinGame\", # \"exp_name\": \"AsymCoinGame\", \"pseudo\": False, \"grid_size\":", "return tune_analysis if __name__ == \"__main__\": debug_mode = True main(debug_mode)", "= tune.run(lola_training, name=tune_hparams[\"exp_name\"], config=tune_config) ray.shutdown() return tune_analysis if __name__ ==", "batch_size, grid_size) env.seed(seed) else: raise ValueError(f\"exp_name: {exp_name}\") # Import the", "full_config['lr'] is None else full_config['lr'] elif full_config['exp_name'] == \"CoinGame\" or", "1. if full_config['lr'] is None else full_config['lr'] elif full_config['exp_name'] in", "marltoolbox.utils import log def trainer_fn(exp_name, num_episodes, trace_length, exact, pseudo, grid_size,", "lola_dice_envs.CG(trace_length, batch_size, grid_size) env.seed(seed) elif exp_name == \"AsymCoinGame\": if use_toolbox_env:", "env = lola_envs.IPD(trace_length) elif exp_name == \"IMP\": env = lola_envs.IMP(trace_length)", "12 if debug else None, # \"exp_name\": \"IPD\", # \"exp_name\":", "None else full_config['gamma'] elif full_config['exp_name'] == \"IMP\": full_config['gamma'] = 0.9", "0.005 if full_config['lr'] is None else full_config['lr'] if full_config['exp_name'] in", "else full_config['lr'] elif full_config['exp_name'] == \"CoinGame\" or full_config['exp_name'] == \"AsymCoinGame\":", "False, \"grid_size\": 3, \"lola_update\": True, \"opp_model\": False, \"mem_efficient\": True, \"lr_correction\":", "elif full_config['exp_name'] == \"IMP\": full_config['gamma'] = 0.9 if full_config['gamma'] is", "is None else full_config['trace_length'] full_config['batch_size'] = 4000 if full_config['batch_size'] is", "full_config['exp_name'] == \"CoinGame\" or full_config['exp_name'] == \"AsymCoinGame\": full_config['num_episodes'] = 100000", "num_gpus=0) tune_analysis = tune.run(lola_training, name=tune_hparams[\"exp_name\"], config=tune_config) ray.shutdown() return tune_analysis if", "if exact: train_exact.train(env, num_episodes=num_episodes, trace_length=trace_length, simple_net=simple_net, corrections=lola_update, pseudo=pseudo, num_hidden=hidden, reg=reg,", "needed: # Follow the LOLA installation described in the tune_class_api/lola_pg_official.py", "{exp_name}\") # Import the right training function if exact: train_exact.train(env,", "False, \"clip_lola_update_norm\": False, \"clip_lola_correction_norm\": 3.0, \"clip_lola_actor_norm\": 10.0, \"entropy_coeff\": 0.001, \"weigth_decay\":", "\"grid_size\": grid_size, \"get_additional_info\": True, \"add_position_in_epi\": False, }) else: env =", "########## import os import ray from ray import tune import", "full_config['lr'] elif full_config['exp_name'] in {\"IPD\", \"IMP\"}: full_config['num_episodes'] = 600000 if", "full_config['exp_name'] in {\"IPD\", \"IMP\"}: full_config['num_episodes'] = 600000 if full_config['num_episodes'] is", "name=tune_hparams[\"exp_name\"], config=tune_config) ray.shutdown() return tune_analysis if __name__ == \"__main__\": debug_mode", "right training function if exact: train_exact.train(env, num_episodes=num_episodes, trace_length=trace_length, simple_net=simple_net, corrections=lola_update,", "trace_length, exact, pseudo, grid_size, lr, lr_correction, batch_size, bs_mul, simple_net, hidden,", "= log.log_in_current_day_dir(f\"LOLA_PG\") tune_hparams = { \"exp_name\": exp_name, # Dynamically set", "1, \"changed_config\": False, \"ac_lr\": 1.0, \"summary_len\": 1, \"use_MAE\": False, \"use_toolbox_env\":", "# Instantiate the environment if exp_name == \"IPD\": env =", "\"lr\": None, \"gamma\": None, \"batch_size\": 12 if debug else None,", "LOLA installation described in the tune_class_api/lola_pg_official.py file ########## import os", "grid_size=grid_size, lr=lr, corrections=lola_update, opp_model=opp_model, hidden=hidden, mem_efficient=mem_efficient, asymmetry=exp_name == \"AsymCoinGame\", warmup=warmup,", "= 0.96 if full_config['gamma'] is None else full_config['gamma'] elif full_config['exp_name']", "set \"num_episodes\": 3 if debug else None, \"trace_length\": 6 if", "200 if full_config['trace_length'] is None else full_config['trace_length'] full_config['lr'] = 1.", "\"clip_lola_update_norm\": False, \"clip_lola_correction_norm\": 3.0, \"clip_lola_actor_norm\": 10.0, \"entropy_coeff\": 0.001, \"weigth_decay\": 0.03,", "else: env = lola_dice_envs.AsymCG(trace_length, batch_size, grid_size) env.seed(seed) else: raise ValueError(f\"exp_name:", "import log def trainer_fn(exp_name, num_episodes, trace_length, exact, pseudo, grid_size, lr,", "full_config['exp_name'] == \"AsymCoinGame\": full_config['num_episodes'] = 100000 if full_config['num_episodes'] is None", "ray from ray import tune import marltoolbox.algos.lola.envs as lola_envs import", "env = lola_dice_envs.CG(trace_length, batch_size, grid_size) env.seed(seed) elif exp_name == \"AsymCoinGame\":", "None, # \"exp_name\": \"IPD\", # \"exp_name\": \"IMP\", \"exp_name\": \"CoinGame\", #", "in (\"IPD\", \"CoinGame\", \"AsymCoinGame\"): full_config['gamma'] = 0.96 if full_config['gamma'] is", "\"clip_lola_correction_norm\": 3.0, \"clip_lola_actor_norm\": 10.0, \"entropy_coeff\": 0.001, \"weigth_decay\": 0.03, } tune_config", "marltoolbox.envs.vectorized_coin_game import CoinGame, AsymCoinGame from marltoolbox.utils import log def trainer_fn(exp_name,", "full_config['exp_name'] in {\"CoinGame\", \"IPD\", \"IMP\", \"AsymCoinGame\"} if full_config['exact']: assert full_config['exp_name']", "3.0, \"clip_lola_actor_norm\": 10.0, \"entropy_coeff\": 0.001, \"weigth_decay\": 0.03, } tune_config =", "AsymCoinGame with --exact.\" # Resolve default parameters if full_config['exact']: full_config['num_episodes']", "dict: # Sanity assert full_config['exp_name'] in {\"CoinGame\", \"IPD\", \"IMP\", \"AsymCoinGame\"}", "None else full_config['gamma'] return full_config def main(debug): exp_name, _ =", "installation described in the tune_class_api/lola_pg_official.py file ########## import os import", "= 0.9 if full_config['gamma'] is None else full_config['gamma'] return full_config", "if full_config['gamma'] is None else full_config['gamma'] return full_config def main(debug):", "num_episodes=num_episodes, trace_length=trace_length, batch_size=batch_size, bs_mul=bs_mul, gamma=gamma, grid_size=grid_size, lr=lr, corrections=lola_update, opp_model=opp_model, hidden=hidden,", "parameters if full_config['exact']: full_config['num_episodes'] = 50 if full_config['num_episodes'] is None", "\"mem_efficient\": True, \"lr_correction\": 1, \"bs_mul\": 1 / 10, \"simple_net\": True,", "lr=lr, corrections=lola_update, opp_model=opp_model, hidden=hidden, mem_efficient=mem_efficient, asymmetry=exp_name == \"AsymCoinGame\", warmup=warmup, changed_config=changed_config,", "else full_config['trace_length'] full_config['batch_size'] = 4000 if full_config['batch_size'] is None else", "in {\"IPD\", \"IMP\"}: full_config['num_episodes'] = 600000 if full_config['num_episodes'] is None", "assert full_config['exp_name'] != \"CoinGame\", \"Can't run CoinGame with --exact.\" assert", "None, \"trace_length\": 6 if debug else None, \"lr\": None, \"gamma\":", "False, \"warmup\": 1, \"seed\": 1, \"changed_config\": False, \"ac_lr\": 1.0, \"summary_len\":", "assert full_config['exp_name'] in {\"CoinGame\", \"IPD\", \"IMP\", \"AsymCoinGame\"} if full_config['exact']: assert", "elif exp_name == \"CoinGame\": if use_toolbox_env: env = CoinGame(config={ \"batch_size\":", "--exact.\" assert full_config['exp_name'] != \"AsymCoinGame\", \"Can't run AsymCoinGame with --exact.\"", "4000 if full_config['batch_size'] is None else full_config['batch_size'] full_config['lr'] = 0.005", "\"weigth_decay\": 0.03, } tune_config = get_tune_config(tune_hparams) ray.init(num_cpus=os.cpu_count(), num_gpus=0) tune_analysis =", ") else: raise ValueError(f\"exp_name: {exp_name}\") def lola_training(config): trainer_fn(**config) def get_tune_config(full_config:", "0.03, } tune_config = get_tune_config(tune_hparams) ray.init(num_cpus=os.cpu_count(), num_gpus=0) tune_analysis = tune.run(lola_training,", "trace_length, \"grid_size\": grid_size, \"get_additional_info\": True, \"add_position_in_epi\": False, }) else: env", "\"simple_net\": True, \"hidden\": 32, \"reg\": 0, \"set_zero\": 0, \"exact\": False,", "is None else full_config['num_episodes'] full_config['trace_length'] = 200 if full_config['trace_length'] is", "# \"exp_name\": \"AsymCoinGame\", \"pseudo\": False, \"grid_size\": 3, \"lola_update\": True, \"opp_model\":", "= lola_dice_envs.AsymCG(trace_length, batch_size, grid_size) env.seed(seed) else: raise ValueError(f\"exp_name: {exp_name}\") #", "full_config['exp_name'] == \"IMP\": full_config['gamma'] = 0.9 if full_config['gamma'] is None", "if use_toolbox_env: env = CoinGame(config={ \"batch_size\": batch_size, \"max_steps\": trace_length, \"grid_size\":", "full_config['exact']: full_config['num_episodes'] = 50 if full_config['num_episodes'] is None else full_config['num_episodes']", "hidden, reg, gamma, lola_update, opp_model, mem_efficient, seed, set_zero, warmup, changed_config,", "full_config['lr'] if full_config['exp_name'] in (\"IPD\", \"CoinGame\", \"AsymCoinGame\"): full_config['gamma'] = 0.96", "\"IMP\": full_config['gamma'] = 0.9 if full_config['gamma'] is None else full_config['gamma']", "is None else full_config['batch_size'] full_config['lr'] = 0.005 if full_config['lr'] is", "pseudo, grid_size, lr, lr_correction, batch_size, bs_mul, simple_net, hidden, reg, gamma,", "hidden=hidden, mem_efficient=mem_efficient, asymmetry=exp_name == \"AsymCoinGame\", warmup=warmup, changed_config=changed_config, ac_lr=ac_lr, summary_len=summary_len, use_MAE=use_MAE,", "elif exp_name == \"IMP\": env = lola_envs.IMP(trace_length) elif exp_name ==", "\"clip_lola_actor_norm\": 10.0, \"entropy_coeff\": 0.001, \"weigth_decay\": 0.03, } tune_config = get_tune_config(tune_hparams)", "gamma=gamma, grid_size=grid_size, lr=lr, corrections=lola_update, opp_model=opp_model, hidden=hidden, mem_efficient=mem_efficient, asymmetry=exp_name == \"AsymCoinGame\",", "None else full_config['trace_length'] full_config['batch_size'] = 4000 if full_config['batch_size'] is None", "or full_config['exp_name'] == \"AsymCoinGame\": full_config['num_episodes'] = 100000 if full_config['num_episodes'] is", "use_MAE=use_MAE, use_toolbox_env=use_toolbox_env, clip_lola_update_norm=clip_lola_update_norm, clip_loss_norm=clip_loss_norm, entropy_coeff=entropy_coeff, weigth_decay=weigth_decay, ) else: raise ValueError(f\"exp_name:", "full_config['num_episodes'] is None else full_config['num_episodes'] full_config['trace_length'] = 150 if full_config['trace_length']", "None else full_config['lr'] elif full_config['exp_name'] in {\"IPD\", \"IMP\"}: full_config['num_episodes'] =", "train_exact, train_pg from marltoolbox.envs.vectorized_coin_game import CoinGame, AsymCoinGame from marltoolbox.utils import", "exp_name in (\"IPD\", \"IMP\"): train_pg.train(env, num_episodes=num_episodes, trace_length=trace_length, batch_size=batch_size, gamma=gamma, set_zero=set_zero,", "is None else full_config['num_episodes'] full_config['trace_length'] = 150 if full_config['trace_length'] is", "0.9 if full_config['gamma'] is None else full_config['gamma'] return full_config def", "full_config['exact']: assert full_config['exp_name'] != \"CoinGame\", \"Can't run CoinGame with --exact.\"", "train_cg.train(env, num_episodes=num_episodes, trace_length=trace_length, batch_size=batch_size, bs_mul=bs_mul, gamma=gamma, grid_size=grid_size, lr=lr, corrections=lola_update, opp_model=opp_model,", "!= \"AsymCoinGame\", \"Can't run AsymCoinGame with --exact.\" # Resolve default", "if full_config['lr'] is None else full_config['lr'] if full_config['exp_name'] in (\"IPD\",", "use_toolbox_env, clip_lola_update_norm, clip_loss_norm, entropy_coeff, weigth_decay, **kwargs): # Instantiate the environment", "else full_config['gamma'] elif full_config['exp_name'] == \"IMP\": full_config['gamma'] = 0.9 if", "if full_config['trace_length'] is None else full_config['trace_length'] full_config['batch_size'] = 4000 if", "else full_config['gamma'] return full_config def main(debug): exp_name, _ = log.log_in_current_day_dir(f\"LOLA_PG\")", "\"use_toolbox_env\": True, \"clip_loss_norm\": False, \"clip_lola_update_norm\": False, \"clip_lola_correction_norm\": 3.0, \"clip_lola_actor_norm\": 10.0,", "if full_config['exact']: full_config['num_episodes'] = 50 if full_config['num_episodes'] is None else", "import marltoolbox.algos.lola.envs as lola_envs import marltoolbox.algos.lola_dice.envs as lola_dice_envs from marltoolbox.algos.lola", "grid_size) env.seed(seed) elif exp_name == \"AsymCoinGame\": if use_toolbox_env: env =", "as lola_dice_envs from marltoolbox.algos.lola import train_cg, train_exact, train_pg from marltoolbox.envs.vectorized_coin_game", "= lola_dice_envs.CG(trace_length, batch_size, grid_size) env.seed(seed) elif exp_name == \"AsymCoinGame\": if", "mem_efficient=mem_efficient, asymmetry=exp_name == \"AsymCoinGame\", warmup=warmup, changed_config=changed_config, ac_lr=ac_lr, summary_len=summary_len, use_MAE=use_MAE, use_toolbox_env=use_toolbox_env,", "is None else full_config['batch_size'] full_config['lr'] = 1. if full_config['lr'] is", "False, }) else: env = lola_dice_envs.CG(trace_length, batch_size, grid_size) env.seed(seed) elif", "def main(debug): exp_name, _ = log.log_in_current_day_dir(f\"LOLA_PG\") tune_hparams = { \"exp_name\":", "marltoolbox.algos.lola.envs as lola_envs import marltoolbox.algos.lola_dice.envs as lola_dice_envs from marltoolbox.algos.lola import", "lola_training(config): trainer_fn(**config) def get_tune_config(full_config: dict) -> dict: # Sanity assert", "lr_correction, batch_size, bs_mul, simple_net, hidden, reg, gamma, lola_update, opp_model, mem_efficient,", "\"IMP\", \"exp_name\": \"CoinGame\", # \"exp_name\": \"AsymCoinGame\", \"pseudo\": False, \"grid_size\": 3,", "set_zero, warmup, changed_config, ac_lr, summary_len, use_MAE, use_toolbox_env, clip_lola_update_norm, clip_loss_norm, entropy_coeff,", "with --exact.\" assert full_config['exp_name'] != \"AsymCoinGame\", \"Can't run AsymCoinGame with", "trace_length=trace_length, simple_net=simple_net, corrections=lola_update, pseudo=pseudo, num_hidden=hidden, reg=reg, lr=lr, lr_correction=lr_correction, gamma=gamma) elif", "Follow the LOLA installation described in the tune_class_api/lola_pg_official.py file ##########", "full_config['exp_name'] != \"AsymCoinGame\", \"Can't run AsymCoinGame with --exact.\" # Resolve", "gamma, lola_update, opp_model, mem_efficient, seed, set_zero, warmup, changed_config, ac_lr, summary_len,", "gamma=gamma, set_zero=set_zero, lr=lr, corrections=lola_update, simple_net=simple_net, hidden=hidden, mem_efficient=mem_efficient) elif exp_name in", "else full_config['lr'] elif full_config['exp_name'] in {\"IPD\", \"IMP\"}: full_config['num_episodes'] = 600000", "raise ValueError(f\"exp_name: {exp_name}\") # Import the right training function if", "full_config def main(debug): exp_name, _ = log.log_in_current_day_dir(f\"LOLA_PG\") tune_hparams = {", "import tune import marltoolbox.algos.lola.envs as lola_envs import marltoolbox.algos.lola_dice.envs as lola_dice_envs", "elif full_config['exp_name'] in {\"IPD\", \"IMP\"}: full_config['num_episodes'] = 600000 if full_config['num_episodes']", "None, \"lr\": None, \"gamma\": None, \"batch_size\": 12 if debug else", "tune_hparams = { \"exp_name\": exp_name, # Dynamically set \"num_episodes\": 3", "\"lola_update\": True, \"opp_model\": False, \"mem_efficient\": True, \"lr_correction\": 1, \"bs_mul\": 1", "reg=reg, lr=lr, lr_correction=lr_correction, gamma=gamma) elif exp_name in (\"IPD\", \"IMP\"): train_pg.train(env,", "return full_config def main(debug): exp_name, _ = log.log_in_current_day_dir(f\"LOLA_PG\") tune_hparams =", "def lola_training(config): trainer_fn(**config) def get_tune_config(full_config: dict) -> dict: # Sanity", "1 / 10, \"simple_net\": True, \"hidden\": 32, \"reg\": 0, \"set_zero\":", "\"CoinGame\", # \"exp_name\": \"AsymCoinGame\", \"pseudo\": False, \"grid_size\": 3, \"lola_update\": True,", "\"warmup\": 1, \"seed\": 1, \"changed_config\": False, \"ac_lr\": 1.0, \"summary_len\": 1,", "full_config['trace_length'] = 150 if full_config['trace_length'] is None else full_config['trace_length'] full_config['batch_size']", "pseudo=pseudo, num_hidden=hidden, reg=reg, lr=lr, lr_correction=lr_correction, gamma=gamma) elif exp_name in (\"IPD\",", "full_config['exp_name'] in (\"IPD\", \"CoinGame\", \"AsymCoinGame\"): full_config['gamma'] = 0.96 if full_config['gamma']", "# \"exp_name\": \"IMP\", \"exp_name\": \"CoinGame\", # \"exp_name\": \"AsymCoinGame\", \"pseudo\": False,", "\"AsymCoinGame\": if use_toolbox_env: env = AsymCoinGame(config={ \"batch_size\": batch_size, \"max_steps\": trace_length,", "opp_model, mem_efficient, seed, set_zero, warmup, changed_config, ac_lr, summary_len, use_MAE, use_toolbox_env,", "else full_config['batch_size'] full_config['lr'] = 1. if full_config['lr'] is None else", "AsymCoinGame(config={ \"batch_size\": batch_size, \"max_steps\": trace_length, \"grid_size\": grid_size, \"get_additional_info\": True, \"add_position_in_epi\":", "described in the tune_class_api/lola_pg_official.py file ########## import os import ray", "\"max_steps\": trace_length, \"grid_size\": grid_size, \"get_additional_info\": True, \"add_position_in_epi\": False, }) else:", "None else full_config['batch_size'] full_config['lr'] = 1. if full_config['lr'] is None", "\"use_MAE\": False, \"use_toolbox_env\": True, \"clip_loss_norm\": False, \"clip_lola_update_norm\": False, \"clip_lola_correction_norm\": 3.0,", "else full_config['num_episodes'] full_config['trace_length'] = 200 if full_config['trace_length'] is None else", "== \"IMP\": env = lola_envs.IMP(trace_length) elif exp_name == \"CoinGame\": if", "None else full_config['lr'] if full_config['exp_name'] in (\"IPD\", \"CoinGame\", \"AsymCoinGame\"): full_config['gamma']", "\"exp_name\": exp_name, # Dynamically set \"num_episodes\": 3 if debug else", "in the tune_class_api/lola_pg_official.py file ########## import os import ray from", "if full_config['exact']: assert full_config['exp_name'] != \"CoinGame\", \"Can't run CoinGame with", "elif full_config['exp_name'] == \"CoinGame\" or full_config['exp_name'] == \"AsymCoinGame\": full_config['num_episodes'] =", "exp_name in (\"CoinGame\", \"AsymCoinGame\"): train_cg.train(env, num_episodes=num_episodes, trace_length=trace_length, batch_size=batch_size, bs_mul=bs_mul, gamma=gamma,", "= 600000 if full_config['num_episodes'] is None else full_config['num_episodes'] full_config['trace_length'] =", "full_config['num_episodes'] = 100000 if full_config['num_episodes'] is None else full_config['num_episodes'] full_config['trace_length']", "weigth_decay, **kwargs): # Instantiate the environment if exp_name == \"IPD\":", "grid_size, \"get_additional_info\": True, \"add_position_in_epi\": False, }) else: env = lola_dice_envs.CG(trace_length,", "log.log_in_current_day_dir(f\"LOLA_PG\") tune_hparams = { \"exp_name\": exp_name, # Dynamically set \"num_episodes\":", "lola_envs.IMP(trace_length) elif exp_name == \"CoinGame\": if use_toolbox_env: env = CoinGame(config={", "\"get_additional_info\": True, \"add_position_in_epi\": False, }) else: env = lola_dice_envs.CG(trace_length, batch_size,", "\"AsymCoinGame\"): full_config['gamma'] = 0.96 if full_config['gamma'] is None else full_config['gamma']", "is None else full_config['lr'] elif full_config['exp_name'] in {\"IPD\", \"IMP\"}: full_config['num_episodes']", "set_zero=set_zero, lr=lr, corrections=lola_update, simple_net=simple_net, hidden=hidden, mem_efficient=mem_efficient) elif exp_name in (\"CoinGame\",", "\"exp_name\": \"IPD\", # \"exp_name\": \"IMP\", \"exp_name\": \"CoinGame\", # \"exp_name\": \"AsymCoinGame\",", "########## # Additional dependencies are needed: # Follow the LOLA", "trainer_fn(exp_name, num_episodes, trace_length, exact, pseudo, grid_size, lr, lr_correction, batch_size, bs_mul,", "1, \"seed\": 1, \"changed_config\": False, \"ac_lr\": 1.0, \"summary_len\": 1, \"use_MAE\":", "grid_size) env.seed(seed) else: raise ValueError(f\"exp_name: {exp_name}\") # Import the right", "mem_efficient, seed, set_zero, warmup, changed_config, ac_lr, summary_len, use_MAE, use_toolbox_env, clip_lola_update_norm,", "\"IPD\": env = lola_envs.IPD(trace_length) elif exp_name == \"IMP\": env =", "_ = log.log_in_current_day_dir(f\"LOLA_PG\") tune_hparams = { \"exp_name\": exp_name, # Dynamically", "}) else: env = lola_dice_envs.CG(trace_length, batch_size, grid_size) env.seed(seed) elif exp_name", "\"lr_correction\": 1, \"bs_mul\": 1 / 10, \"simple_net\": True, \"hidden\": 32,", "weigth_decay=weigth_decay, ) else: raise ValueError(f\"exp_name: {exp_name}\") def lola_training(config): trainer_fn(**config) def", "full_config['trace_length'] full_config['lr'] = 1. if full_config['lr'] is None else full_config['lr']", "\"exp_name\": \"AsymCoinGame\", \"pseudo\": False, \"grid_size\": 3, \"lola_update\": True, \"opp_model\": False,", "# Import the right training function if exact: train_exact.train(env, num_episodes=num_episodes,", "the tune_class_api/lola_pg_official.py file ########## import os import ray from ray", "trace_length=trace_length, batch_size=batch_size, gamma=gamma, set_zero=set_zero, lr=lr, corrections=lola_update, simple_net=simple_net, hidden=hidden, mem_efficient=mem_efficient) elif", "\"exp_name\": \"CoinGame\", # \"exp_name\": \"AsymCoinGame\", \"pseudo\": False, \"grid_size\": 3, \"lola_update\":", "\"grid_size\": 3, \"lola_update\": True, \"opp_model\": False, \"mem_efficient\": True, \"lr_correction\": 1,", "assert full_config['exp_name'] != \"AsymCoinGame\", \"Can't run AsymCoinGame with --exact.\" #", "3, \"lola_update\": True, \"opp_model\": False, \"mem_efficient\": True, \"lr_correction\": 1, \"bs_mul\":", "<reponame>tobiasbaumann1/amd<filename>marltoolbox/examples/tune_function_api/lola_pg_official.py ########## # Additional dependencies are needed: # Follow the", "clip_loss_norm, entropy_coeff, weigth_decay, **kwargs): # Instantiate the environment if exp_name", "\"IPD\", # \"exp_name\": \"IMP\", \"exp_name\": \"CoinGame\", # \"exp_name\": \"AsymCoinGame\", \"pseudo\":", "lola_dice_envs from marltoolbox.algos.lola import train_cg, train_exact, train_pg from marltoolbox.envs.vectorized_coin_game import", "num_hidden=hidden, reg=reg, lr=lr, lr_correction=lr_correction, gamma=gamma) elif exp_name in (\"IPD\", \"IMP\"):", "Dynamically set \"num_episodes\": 3 if debug else None, \"trace_length\": 6", "corrections=lola_update, simple_net=simple_net, hidden=hidden, mem_efficient=mem_efficient) elif exp_name in (\"CoinGame\", \"AsymCoinGame\"): train_cg.train(env,", "ac_lr=ac_lr, summary_len=summary_len, use_MAE=use_MAE, use_toolbox_env=use_toolbox_env, clip_lola_update_norm=clip_lola_update_norm, clip_loss_norm=clip_loss_norm, entropy_coeff=entropy_coeff, weigth_decay=weigth_decay, ) else:", "!= \"CoinGame\", \"Can't run CoinGame with --exact.\" assert full_config['exp_name'] !=", "\"gamma\": None, \"batch_size\": 12 if debug else None, # \"exp_name\":", "False, \"use_toolbox_env\": True, \"clip_loss_norm\": False, \"clip_lola_update_norm\": False, \"clip_lola_correction_norm\": 3.0, \"clip_lola_actor_norm\":", "full_config['num_episodes'] = 600000 if full_config['num_episodes'] is None else full_config['num_episodes'] full_config['trace_length']", "debug else None, \"lr\": None, \"gamma\": None, \"batch_size\": 12 if", "else: raise ValueError(f\"exp_name: {exp_name}\") # Import the right training function", "{exp_name}\") def lola_training(config): trainer_fn(**config) def get_tune_config(full_config: dict) -> dict: #", "\"AsymCoinGame\", \"pseudo\": False, \"grid_size\": 3, \"lola_update\": True, \"opp_model\": False, \"mem_efficient\":", "None else full_config['trace_length'] full_config['lr'] = 1. if full_config['lr'] is None", "{ \"exp_name\": exp_name, # Dynamically set \"num_episodes\": 3 if debug", "with --exact.\" # Resolve default parameters if full_config['exact']: full_config['num_episodes'] =", "full_config['trace_length'] full_config['batch_size'] = 4000 if full_config['batch_size'] is None else full_config['batch_size']", "else None, \"trace_length\": 6 if debug else None, \"lr\": None,", "grid_size, \"get_additional_info\": True, \"add_position_in_epi\": False, }) else: env = lola_dice_envs.AsymCG(trace_length,", "num_episodes=num_episodes, trace_length=trace_length, batch_size=batch_size, gamma=gamma, set_zero=set_zero, lr=lr, corrections=lola_update, simple_net=simple_net, hidden=hidden, mem_efficient=mem_efficient)", "lr=lr, lr_correction=lr_correction, gamma=gamma) elif exp_name in (\"IPD\", \"IMP\"): train_pg.train(env, num_episodes=num_episodes,", "CoinGame, AsymCoinGame from marltoolbox.utils import log def trainer_fn(exp_name, num_episodes, trace_length,", "\"AsymCoinGame\", warmup=warmup, changed_config=changed_config, ac_lr=ac_lr, summary_len=summary_len, use_MAE=use_MAE, use_toolbox_env=use_toolbox_env, clip_lola_update_norm=clip_lola_update_norm, clip_loss_norm=clip_loss_norm, entropy_coeff=entropy_coeff,", "if full_config['batch_size'] is None else full_config['batch_size'] full_config['lr'] = 0.005 if", "lola_envs.IPD(trace_length) elif exp_name == \"IMP\": env = lola_envs.IMP(trace_length) elif exp_name", "= 1. if full_config['lr'] is None else full_config['lr'] elif full_config['exp_name']", "is None else full_config['gamma'] return full_config def main(debug): exp_name, _", "environment if exp_name == \"IPD\": env = lola_envs.IPD(trace_length) elif exp_name", "full_config['gamma'] elif full_config['exp_name'] == \"IMP\": full_config['gamma'] = 0.9 if full_config['gamma']", "if debug else None, # \"exp_name\": \"IPD\", # \"exp_name\": \"IMP\",", "summary_len=summary_len, use_MAE=use_MAE, use_toolbox_env=use_toolbox_env, clip_lola_update_norm=clip_lola_update_norm, clip_loss_norm=clip_loss_norm, entropy_coeff=entropy_coeff, weigth_decay=weigth_decay, ) else: raise", "if debug else None, \"trace_length\": 6 if debug else None,", "if full_config['lr'] is None else full_config['lr'] elif full_config['exp_name'] in {\"IPD\",", "32, \"reg\": 0, \"set_zero\": 0, \"exact\": False, \"warmup\": 1, \"seed\":", "full_config['batch_size'] is None else full_config['batch_size'] full_config['lr'] = 1. if full_config['lr']", "full_config['num_episodes'] is None else full_config['num_episodes'] full_config['trace_length'] = 200 if full_config['trace_length']", "train_cg, train_exact, train_pg from marltoolbox.envs.vectorized_coin_game import CoinGame, AsymCoinGame from marltoolbox.utils", "lr_correction=lr_correction, gamma=gamma) elif exp_name in (\"IPD\", \"IMP\"): train_pg.train(env, num_episodes=num_episodes, trace_length=trace_length,", "None, \"gamma\": None, \"batch_size\": 12 if debug else None, #", "= AsymCoinGame(config={ \"batch_size\": batch_size, \"max_steps\": trace_length, \"grid_size\": grid_size, \"get_additional_info\": True,", "batch_size, bs_mul, simple_net, hidden, reg, gamma, lola_update, opp_model, mem_efficient, seed,", "\"bs_mul\": 1 / 10, \"simple_net\": True, \"hidden\": 32, \"reg\": 0,", "\"reg\": 0, \"set_zero\": 0, \"exact\": False, \"warmup\": 1, \"seed\": 1,", "3 if debug else None, \"trace_length\": 6 if debug else", "the right training function if exact: train_exact.train(env, num_episodes=num_episodes, trace_length=trace_length, simple_net=simple_net,", "train_exact.train(env, num_episodes=num_episodes, trace_length=trace_length, simple_net=simple_net, corrections=lola_update, pseudo=pseudo, num_hidden=hidden, reg=reg, lr=lr, lr_correction=lr_correction,", "tune.run(lola_training, name=tune_hparams[\"exp_name\"], config=tune_config) ray.shutdown() return tune_analysis if __name__ == \"__main__\":", "CoinGame(config={ \"batch_size\": batch_size, \"max_steps\": trace_length, \"grid_size\": grid_size, \"get_additional_info\": True, \"add_position_in_epi\":", "clip_lola_update_norm=clip_lola_update_norm, clip_loss_norm=clip_loss_norm, entropy_coeff=entropy_coeff, weigth_decay=weigth_decay, ) else: raise ValueError(f\"exp_name: {exp_name}\") def", "\"opp_model\": False, \"mem_efficient\": True, \"lr_correction\": 1, \"bs_mul\": 1 / 10,", "= 4000 if full_config['batch_size'] is None else full_config['batch_size'] full_config['lr'] =", "run AsymCoinGame with --exact.\" # Resolve default parameters if full_config['exact']:", "if full_config['gamma'] is None else full_config['gamma'] elif full_config['exp_name'] == \"IMP\":", "as lola_envs import marltoolbox.algos.lola_dice.envs as lola_dice_envs from marltoolbox.algos.lola import train_cg,", "\"AsymCoinGame\": full_config['num_episodes'] = 100000 if full_config['num_episodes'] is None else full_config['num_episodes']", "if full_config['exp_name'] in (\"IPD\", \"CoinGame\", \"AsymCoinGame\"): full_config['gamma'] = 0.96 if", "ray.shutdown() return tune_analysis if __name__ == \"__main__\": debug_mode = True", "\"IMP\", \"AsymCoinGame\"} if full_config['exact']: assert full_config['exp_name'] != \"CoinGame\", \"Can't run", "is None else full_config['gamma'] elif full_config['exp_name'] == \"IMP\": full_config['gamma'] =", "# Sanity assert full_config['exp_name'] in {\"CoinGame\", \"IPD\", \"IMP\", \"AsymCoinGame\"} if", "\"pseudo\": False, \"grid_size\": 3, \"lola_update\": True, \"opp_model\": False, \"mem_efficient\": True,", "exact: train_exact.train(env, num_episodes=num_episodes, trace_length=trace_length, simple_net=simple_net, corrections=lola_update, pseudo=pseudo, num_hidden=hidden, reg=reg, lr=lr,", "else None, \"lr\": None, \"gamma\": None, \"batch_size\": 12 if debug", "full_config['exp_name'] != \"CoinGame\", \"Can't run CoinGame with --exact.\" assert full_config['exp_name']", "env.seed(seed) elif exp_name == \"AsymCoinGame\": if use_toolbox_env: env = AsymCoinGame(config={", "150 if full_config['trace_length'] is None else full_config['trace_length'] full_config['batch_size'] = 4000", "full_config['trace_length'] is None else full_config['trace_length'] full_config['batch_size'] = 4000 if full_config['batch_size']", "/ 10, \"simple_net\": True, \"hidden\": 32, \"reg\": 0, \"set_zero\": 0,", "config=tune_config) ray.shutdown() return tune_analysis if __name__ == \"__main__\": debug_mode =", "hidden=hidden, mem_efficient=mem_efficient) elif exp_name in (\"CoinGame\", \"AsymCoinGame\"): train_cg.train(env, num_episodes=num_episodes, trace_length=trace_length,", "\"ac_lr\": 1.0, \"summary_len\": 1, \"use_MAE\": False, \"use_toolbox_env\": True, \"clip_loss_norm\": False,", "# Follow the LOLA installation described in the tune_class_api/lola_pg_official.py file", "from marltoolbox.utils import log def trainer_fn(exp_name, num_episodes, trace_length, exact, pseudo,", "bs_mul=bs_mul, gamma=gamma, grid_size=grid_size, lr=lr, corrections=lola_update, opp_model=opp_model, hidden=hidden, mem_efficient=mem_efficient, asymmetry=exp_name ==", "ray.init(num_cpus=os.cpu_count(), num_gpus=0) tune_analysis = tune.run(lola_training, name=tune_hparams[\"exp_name\"], config=tune_config) ray.shutdown() return tune_analysis", "else full_config['lr'] if full_config['exp_name'] in (\"IPD\", \"CoinGame\", \"AsymCoinGame\"): full_config['gamma'] =", "exact, pseudo, grid_size, lr, lr_correction, batch_size, bs_mul, simple_net, hidden, reg,", "env = CoinGame(config={ \"batch_size\": batch_size, \"max_steps\": trace_length, \"grid_size\": grid_size, \"get_additional_info\":", "changed_config=changed_config, ac_lr=ac_lr, summary_len=summary_len, use_MAE=use_MAE, use_toolbox_env=use_toolbox_env, clip_lola_update_norm=clip_lola_update_norm, clip_loss_norm=clip_loss_norm, entropy_coeff=entropy_coeff, weigth_decay=weigth_decay, )", "full_config['gamma'] return full_config def main(debug): exp_name, _ = log.log_in_current_day_dir(f\"LOLA_PG\") tune_hparams", "True, \"clip_loss_norm\": False, \"clip_lola_update_norm\": False, \"clip_lola_correction_norm\": 3.0, \"clip_lola_actor_norm\": 10.0, \"entropy_coeff\":", "{\"CoinGame\", \"IPD\", \"IMP\", \"AsymCoinGame\"} if full_config['exact']: assert full_config['exp_name'] != \"CoinGame\",", "the environment if exp_name == \"IPD\": env = lola_envs.IPD(trace_length) elif", "= 0.005 if full_config['lr'] is None else full_config['lr'] if full_config['exp_name']", "Resolve default parameters if full_config['exact']: full_config['num_episodes'] = 50 if full_config['num_episodes']", "warmup=warmup, changed_config=changed_config, ac_lr=ac_lr, summary_len=summary_len, use_MAE=use_MAE, use_toolbox_env=use_toolbox_env, clip_lola_update_norm=clip_lola_update_norm, clip_loss_norm=clip_loss_norm, entropy_coeff=entropy_coeff, weigth_decay=weigth_decay,", "default parameters if full_config['exact']: full_config['num_episodes'] = 50 if full_config['num_episodes'] is", "main(debug): exp_name, _ = log.log_in_current_day_dir(f\"LOLA_PG\") tune_hparams = { \"exp_name\": exp_name,", "seed, set_zero, warmup, changed_config, ac_lr, summary_len, use_MAE, use_toolbox_env, clip_lola_update_norm, clip_loss_norm,", "def trainer_fn(exp_name, num_episodes, trace_length, exact, pseudo, grid_size, lr, lr_correction, batch_size,", "\"seed\": 1, \"changed_config\": False, \"ac_lr\": 1.0, \"summary_len\": 1, \"use_MAE\": False,", "env = lola_dice_envs.AsymCG(trace_length, batch_size, grid_size) env.seed(seed) else: raise ValueError(f\"exp_name: {exp_name}\")", "simple_net, hidden, reg, gamma, lola_update, opp_model, mem_efficient, seed, set_zero, warmup,", "ValueError(f\"exp_name: {exp_name}\") def lola_training(config): trainer_fn(**config) def get_tune_config(full_config: dict) -> dict:", "the LOLA installation described in the tune_class_api/lola_pg_official.py file ########## import", "if full_config['batch_size'] is None else full_config['batch_size'] full_config['lr'] = 1. if", "is None else full_config['lr'] if full_config['exp_name'] in (\"IPD\", \"CoinGame\", \"AsymCoinGame\"):", "tune_config = get_tune_config(tune_hparams) ray.init(num_cpus=os.cpu_count(), num_gpus=0) tune_analysis = tune.run(lola_training, name=tune_hparams[\"exp_name\"], config=tune_config)", "Sanity assert full_config['exp_name'] in {\"CoinGame\", \"IPD\", \"IMP\", \"AsymCoinGame\"} if full_config['exact']:", "== \"IMP\": full_config['gamma'] = 0.9 if full_config['gamma'] is None else", "full_config['batch_size'] full_config['lr'] = 0.005 if full_config['lr'] is None else full_config['lr']", "dict) -> dict: # Sanity assert full_config['exp_name'] in {\"CoinGame\", \"IPD\",", "1.0, \"summary_len\": 1, \"use_MAE\": False, \"use_toolbox_env\": True, \"clip_loss_norm\": False, \"clip_lola_update_norm\":", "(\"IPD\", \"CoinGame\", \"AsymCoinGame\"): full_config['gamma'] = 0.96 if full_config['gamma'] is None", "exp_name == \"IPD\": env = lola_envs.IPD(trace_length) elif exp_name == \"IMP\":", "if use_toolbox_env: env = AsymCoinGame(config={ \"batch_size\": batch_size, \"max_steps\": trace_length, \"grid_size\":", "from ray import tune import marltoolbox.algos.lola.envs as lola_envs import marltoolbox.algos.lola_dice.envs", "from marltoolbox.algos.lola import train_cg, train_exact, train_pg from marltoolbox.envs.vectorized_coin_game import CoinGame,", "}) else: env = lola_dice_envs.AsymCG(trace_length, batch_size, grid_size) env.seed(seed) else: raise", "simple_net=simple_net, hidden=hidden, mem_efficient=mem_efficient) elif exp_name in (\"CoinGame\", \"AsymCoinGame\"): train_cg.train(env, num_episodes=num_episodes,", "4000 if full_config['batch_size'] is None else full_config['batch_size'] full_config['lr'] = 1.", "\"trace_length\": 6 if debug else None, \"lr\": None, \"gamma\": None,", "dependencies are needed: # Follow the LOLA installation described in", "if full_config['lr'] is None else full_config['lr'] elif full_config['exp_name'] == \"CoinGame\"", "corrections=lola_update, opp_model=opp_model, hidden=hidden, mem_efficient=mem_efficient, asymmetry=exp_name == \"AsymCoinGame\", warmup=warmup, changed_config=changed_config, ac_lr=ac_lr,", "} tune_config = get_tune_config(tune_hparams) ray.init(num_cpus=os.cpu_count(), num_gpus=0) tune_analysis = tune.run(lola_training, name=tune_hparams[\"exp_name\"],", "None else full_config['lr'] elif full_config['exp_name'] == \"CoinGame\" or full_config['exp_name'] ==", "full_config['num_episodes'] full_config['trace_length'] = 200 if full_config['trace_length'] is None else full_config['trace_length']", "10.0, \"entropy_coeff\": 0.001, \"weigth_decay\": 0.03, } tune_config = get_tune_config(tune_hparams) ray.init(num_cpus=os.cpu_count(),", "full_config['gamma'] = 0.9 if full_config['gamma'] is None else full_config['gamma'] return", "0, \"exact\": False, \"warmup\": 1, \"seed\": 1, \"changed_config\": False, \"ac_lr\":", "600000 if full_config['num_episodes'] is None else full_config['num_episodes'] full_config['trace_length'] = 150", "\"changed_config\": False, \"ac_lr\": 1.0, \"summary_len\": 1, \"use_MAE\": False, \"use_toolbox_env\": True,", "\"get_additional_info\": True, \"add_position_in_epi\": False, }) else: env = lola_dice_envs.AsymCG(trace_length, batch_size,", "{\"IPD\", \"IMP\"}: full_config['num_episodes'] = 600000 if full_config['num_episodes'] is None else", "AsymCoinGame from marltoolbox.utils import log def trainer_fn(exp_name, num_episodes, trace_length, exact,", "summary_len, use_MAE, use_toolbox_env, clip_lola_update_norm, clip_loss_norm, entropy_coeff, weigth_decay, **kwargs): # Instantiate", "full_config['lr'] elif full_config['exp_name'] == \"CoinGame\" or full_config['exp_name'] == \"AsymCoinGame\": full_config['num_episodes']", "get_tune_config(tune_hparams) ray.init(num_cpus=os.cpu_count(), num_gpus=0) tune_analysis = tune.run(lola_training, name=tune_hparams[\"exp_name\"], config=tune_config) ray.shutdown() return", "reg, gamma, lola_update, opp_model, mem_efficient, seed, set_zero, warmup, changed_config, ac_lr,", "\"Can't run AsymCoinGame with --exact.\" # Resolve default parameters if", "\"AsymCoinGame\"): train_cg.train(env, num_episodes=num_episodes, trace_length=trace_length, batch_size=batch_size, bs_mul=bs_mul, gamma=gamma, grid_size=grid_size, lr=lr, corrections=lola_update,", "False, \"clip_lola_correction_norm\": 3.0, \"clip_lola_actor_norm\": 10.0, \"entropy_coeff\": 0.001, \"weigth_decay\": 0.03, }", "lr, lr_correction, batch_size, bs_mul, simple_net, hidden, reg, gamma, lola_update, opp_model,", "None, \"batch_size\": 12 if debug else None, # \"exp_name\": \"IPD\",", "# \"exp_name\": \"IPD\", # \"exp_name\": \"IMP\", \"exp_name\": \"CoinGame\", # \"exp_name\":", "entropy_coeff, weigth_decay, **kwargs): # Instantiate the environment if exp_name ==", "full_config['gamma'] is None else full_config['gamma'] elif full_config['exp_name'] == \"IMP\": full_config['gamma']", "\"num_episodes\": 3 if debug else None, \"trace_length\": 6 if debug", "False, }) else: env = lola_dice_envs.AsymCG(trace_length, batch_size, grid_size) env.seed(seed) else:", "tune import marltoolbox.algos.lola.envs as lola_envs import marltoolbox.algos.lola_dice.envs as lola_dice_envs from", "\"add_position_in_epi\": False, }) else: env = lola_dice_envs.AsymCG(trace_length, batch_size, grid_size) env.seed(seed)", "True, \"lr_correction\": 1, \"bs_mul\": 1 / 10, \"simple_net\": True, \"hidden\":", "None else full_config['num_episodes'] full_config['trace_length'] = 200 if full_config['trace_length'] is None", "== \"CoinGame\": if use_toolbox_env: env = CoinGame(config={ \"batch_size\": batch_size, \"max_steps\":", "\"clip_loss_norm\": False, \"clip_lola_update_norm\": False, \"clip_lola_correction_norm\": 3.0, \"clip_lola_actor_norm\": 10.0, \"entropy_coeff\": 0.001,", "else full_config['batch_size'] full_config['lr'] = 0.005 if full_config['lr'] is None else", "\"CoinGame\": if use_toolbox_env: env = CoinGame(config={ \"batch_size\": batch_size, \"max_steps\": trace_length,", "# Additional dependencies are needed: # Follow the LOLA installation", "full_config['num_episodes'] = 50 if full_config['num_episodes'] is None else full_config['num_episodes'] full_config['trace_length']", "== \"AsymCoinGame\": if use_toolbox_env: env = AsymCoinGame(config={ \"batch_size\": batch_size, \"max_steps\":", "function if exact: train_exact.train(env, num_episodes=num_episodes, trace_length=trace_length, simple_net=simple_net, corrections=lola_update, pseudo=pseudo, num_hidden=hidden,", "if full_config['num_episodes'] is None else full_config['num_episodes'] full_config['trace_length'] = 150 if", "full_config['num_episodes'] full_config['trace_length'] = 150 if full_config['trace_length'] is None else full_config['trace_length']", "ac_lr, summary_len, use_MAE, use_toolbox_env, clip_lola_update_norm, clip_loss_norm, entropy_coeff, weigth_decay, **kwargs): #", "\"batch_size\": batch_size, \"max_steps\": trace_length, \"grid_size\": grid_size, \"get_additional_info\": True, \"add_position_in_epi\": False,", "Instantiate the environment if exp_name == \"IPD\": env = lola_envs.IPD(trace_length)", "= { \"exp_name\": exp_name, # Dynamically set \"num_episodes\": 3 if", "corrections=lola_update, pseudo=pseudo, num_hidden=hidden, reg=reg, lr=lr, lr_correction=lr_correction, gamma=gamma) elif exp_name in", "num_episodes, trace_length, exact, pseudo, grid_size, lr, lr_correction, batch_size, bs_mul, simple_net,", "100000 if full_config['num_episodes'] is None else full_config['num_episodes'] full_config['trace_length'] = 150", "== \"AsymCoinGame\": full_config['num_episodes'] = 100000 if full_config['num_episodes'] is None else", "\"Can't run CoinGame with --exact.\" assert full_config['exp_name'] != \"AsymCoinGame\", \"Can't", "batch_size=batch_size, gamma=gamma, set_zero=set_zero, lr=lr, corrections=lola_update, simple_net=simple_net, hidden=hidden, mem_efficient=mem_efficient) elif exp_name", "ValueError(f\"exp_name: {exp_name}\") # Import the right training function if exact:", "warmup, changed_config, ac_lr, summary_len, use_MAE, use_toolbox_env, clip_lola_update_norm, clip_loss_norm, entropy_coeff, weigth_decay,", "== \"AsymCoinGame\", warmup=warmup, changed_config=changed_config, ac_lr=ac_lr, summary_len=summary_len, use_MAE=use_MAE, use_toolbox_env=use_toolbox_env, clip_lola_update_norm=clip_lola_update_norm, clip_loss_norm=clip_loss_norm,", "log def trainer_fn(exp_name, num_episodes, trace_length, exact, pseudo, grid_size, lr, lr_correction,", "opp_model=opp_model, hidden=hidden, mem_efficient=mem_efficient, asymmetry=exp_name == \"AsymCoinGame\", warmup=warmup, changed_config=changed_config, ac_lr=ac_lr, summary_len=summary_len,", "= 200 if full_config['trace_length'] is None else full_config['trace_length'] full_config['lr'] =", "lola_dice_envs.AsymCG(trace_length, batch_size, grid_size) env.seed(seed) else: raise ValueError(f\"exp_name: {exp_name}\") # Import", "entropy_coeff=entropy_coeff, weigth_decay=weigth_decay, ) else: raise ValueError(f\"exp_name: {exp_name}\") def lola_training(config): trainer_fn(**config)", "changed_config, ac_lr, summary_len, use_MAE, use_toolbox_env, clip_lola_update_norm, clip_loss_norm, entropy_coeff, weigth_decay, **kwargs):", "full_config['gamma'] = 0.96 if full_config['gamma'] is None else full_config['gamma'] elif", "use_toolbox_env: env = CoinGame(config={ \"batch_size\": batch_size, \"max_steps\": trace_length, \"grid_size\": grid_size,", "= 150 if full_config['trace_length'] is None else full_config['trace_length'] full_config['batch_size'] =", "-> dict: # Sanity assert full_config['exp_name'] in {\"CoinGame\", \"IPD\", \"IMP\",", "\"IMP\"}: full_config['num_episodes'] = 600000 if full_config['num_episodes'] is None else full_config['num_episodes']", "0, \"set_zero\": 0, \"exact\": False, \"warmup\": 1, \"seed\": 1, \"changed_config\":", "file ########## import os import ray from ray import tune", "in (\"CoinGame\", \"AsymCoinGame\"): train_cg.train(env, num_episodes=num_episodes, trace_length=trace_length, batch_size=batch_size, bs_mul=bs_mul, gamma=gamma, grid_size=grid_size,", "raise ValueError(f\"exp_name: {exp_name}\") def lola_training(config): trainer_fn(**config) def get_tune_config(full_config: dict) ->", "**kwargs): # Instantiate the environment if exp_name == \"IPD\": env", "import marltoolbox.algos.lola_dice.envs as lola_dice_envs from marltoolbox.algos.lola import train_cg, train_exact, train_pg", "\"exact\": False, \"warmup\": 1, \"seed\": 1, \"changed_config\": False, \"ac_lr\": 1.0,", "Import the right training function if exact: train_exact.train(env, num_episodes=num_episodes, trace_length=trace_length,", "training function if exact: train_exact.train(env, num_episodes=num_episodes, trace_length=trace_length, simple_net=simple_net, corrections=lola_update, pseudo=pseudo,", "gamma=gamma) elif exp_name in (\"IPD\", \"IMP\"): train_pg.train(env, num_episodes=num_episodes, trace_length=trace_length, batch_size=batch_size,", "simple_net=simple_net, corrections=lola_update, pseudo=pseudo, num_hidden=hidden, reg=reg, lr=lr, lr_correction=lr_correction, gamma=gamma) elif exp_name", "asymmetry=exp_name == \"AsymCoinGame\", warmup=warmup, changed_config=changed_config, ac_lr=ac_lr, summary_len=summary_len, use_MAE=use_MAE, use_toolbox_env=use_toolbox_env, clip_lola_update_norm=clip_lola_update_norm,", "full_config['gamma'] is None else full_config['gamma'] return full_config def main(debug): exp_name,", "= get_tune_config(tune_hparams) ray.init(num_cpus=os.cpu_count(), num_gpus=0) tune_analysis = tune.run(lola_training, name=tune_hparams[\"exp_name\"], config=tune_config) ray.shutdown()", "None else full_config['batch_size'] full_config['lr'] = 0.005 if full_config['lr'] is None", "in {\"CoinGame\", \"IPD\", \"IMP\", \"AsymCoinGame\"} if full_config['exact']: assert full_config['exp_name'] !=", "0.96 if full_config['gamma'] is None else full_config['gamma'] elif full_config['exp_name'] ==", "\"AsymCoinGame\"} if full_config['exact']: assert full_config['exp_name'] != \"CoinGame\", \"Can't run CoinGame", "use_MAE, use_toolbox_env, clip_lola_update_norm, clip_loss_norm, entropy_coeff, weigth_decay, **kwargs): # Instantiate the", "full_config['trace_length'] = 200 if full_config['trace_length'] is None else full_config['trace_length'] full_config['lr']", "mem_efficient=mem_efficient) elif exp_name in (\"CoinGame\", \"AsymCoinGame\"): train_cg.train(env, num_episodes=num_episodes, trace_length=trace_length, batch_size=batch_size,", "1, \"bs_mul\": 1 / 10, \"simple_net\": True, \"hidden\": 32, \"reg\":", "full_config['lr'] = 1. if full_config['lr'] is None else full_config['lr'] elif", "lr=lr, corrections=lola_update, simple_net=simple_net, hidden=hidden, mem_efficient=mem_efficient) elif exp_name in (\"CoinGame\", \"AsymCoinGame\"):", "1. if full_config['lr'] is None else full_config['lr'] elif full_config['exp_name'] ==", "elif exp_name == \"AsymCoinGame\": if use_toolbox_env: env = AsymCoinGame(config={ \"batch_size\":", "env = AsymCoinGame(config={ \"batch_size\": batch_size, \"max_steps\": trace_length, \"grid_size\": grid_size, \"get_additional_info\":", "\"AsymCoinGame\", \"Can't run AsymCoinGame with --exact.\" # Resolve default parameters", "exp_name, # Dynamically set \"num_episodes\": 3 if debug else None,", "else full_config['num_episodes'] full_config['trace_length'] = 150 if full_config['trace_length'] is None else", "import os import ray from ray import tune import marltoolbox.algos.lola.envs", "env.seed(seed) else: raise ValueError(f\"exp_name: {exp_name}\") # Import the right training", "= 100000 if full_config['num_episodes'] is None else full_config['num_episodes'] full_config['trace_length'] =", "num_episodes=num_episodes, trace_length=trace_length, simple_net=simple_net, corrections=lola_update, pseudo=pseudo, num_hidden=hidden, reg=reg, lr=lr, lr_correction=lr_correction, gamma=gamma)", "False, \"ac_lr\": 1.0, \"summary_len\": 1, \"use_MAE\": False, \"use_toolbox_env\": True, \"clip_loss_norm\":", "else: raise ValueError(f\"exp_name: {exp_name}\") def lola_training(config): trainer_fn(**config) def get_tune_config(full_config: dict)", "lola_envs import marltoolbox.algos.lola_dice.envs as lola_dice_envs from marltoolbox.algos.lola import train_cg, train_exact,", "\"CoinGame\" or full_config['exp_name'] == \"AsymCoinGame\": full_config['num_episodes'] = 100000 if full_config['num_episodes']", "True, \"add_position_in_epi\": False, }) else: env = lola_dice_envs.CG(trace_length, batch_size, grid_size)", "0.001, \"weigth_decay\": 0.03, } tune_config = get_tune_config(tune_hparams) ray.init(num_cpus=os.cpu_count(), num_gpus=0) tune_analysis", "train_pg from marltoolbox.envs.vectorized_coin_game import CoinGame, AsymCoinGame from marltoolbox.utils import log", "1, \"use_MAE\": False, \"use_toolbox_env\": True, \"clip_loss_norm\": False, \"clip_lola_update_norm\": False, \"clip_lola_correction_norm\":", "exp_name == \"AsymCoinGame\": if use_toolbox_env: env = AsymCoinGame(config={ \"batch_size\": batch_size,", "train_pg.train(env, num_episodes=num_episodes, trace_length=trace_length, batch_size=batch_size, gamma=gamma, set_zero=set_zero, lr=lr, corrections=lola_update, simple_net=simple_net, hidden=hidden,", "batch_size=batch_size, bs_mul=bs_mul, gamma=gamma, grid_size=grid_size, lr=lr, corrections=lola_update, opp_model=opp_model, hidden=hidden, mem_efficient=mem_efficient, asymmetry=exp_name", "else full_config['trace_length'] full_config['lr'] = 1. if full_config['lr'] is None else", "= CoinGame(config={ \"batch_size\": batch_size, \"max_steps\": trace_length, \"grid_size\": grid_size, \"get_additional_info\": True,", "if debug else None, \"lr\": None, \"gamma\": None, \"batch_size\": 12", "False, \"mem_efficient\": True, \"lr_correction\": 1, \"bs_mul\": 1 / 10, \"simple_net\":", "if exp_name == \"IPD\": env = lola_envs.IPD(trace_length) elif exp_name ==", "env = lola_envs.IMP(trace_length) elif exp_name == \"CoinGame\": if use_toolbox_env: env", "bs_mul, simple_net, hidden, reg, gamma, lola_update, opp_model, mem_efficient, seed, set_zero,", "trainer_fn(**config) def get_tune_config(full_config: dict) -> dict: # Sanity assert full_config['exp_name']", "marltoolbox.algos.lola_dice.envs as lola_dice_envs from marltoolbox.algos.lola import train_cg, train_exact, train_pg from", "use_toolbox_env: env = AsymCoinGame(config={ \"batch_size\": batch_size, \"max_steps\": trace_length, \"grid_size\": grid_size,", "# Dynamically set \"num_episodes\": 3 if debug else None, \"trace_length\":", "import ray from ray import tune import marltoolbox.algos.lola.envs as lola_envs", "tune_analysis = tune.run(lola_training, name=tune_hparams[\"exp_name\"], config=tune_config) ray.shutdown() return tune_analysis if __name__", "lola_update, opp_model, mem_efficient, seed, set_zero, warmup, changed_config, ac_lr, summary_len, use_MAE,", "if full_config['num_episodes'] is None else full_config['num_episodes'] full_config['trace_length'] = 200 if", "True, \"hidden\": 32, \"reg\": 0, \"set_zero\": 0, \"exact\": False, \"warmup\":", "def get_tune_config(full_config: dict) -> dict: # Sanity assert full_config['exp_name'] in", "is None else full_config['trace_length'] full_config['lr'] = 1. if full_config['lr'] is", "6 if debug else None, \"lr\": None, \"gamma\": None, \"batch_size\":", "from marltoolbox.envs.vectorized_coin_game import CoinGame, AsymCoinGame from marltoolbox.utils import log def", "else: env = lola_dice_envs.CG(trace_length, batch_size, grid_size) env.seed(seed) elif exp_name ==", "clip_lola_update_norm, clip_loss_norm, entropy_coeff, weigth_decay, **kwargs): # Instantiate the environment if", "full_config['batch_size'] is None else full_config['batch_size'] full_config['lr'] = 0.005 if full_config['lr']", "= lola_envs.IMP(trace_length) elif exp_name == \"CoinGame\": if use_toolbox_env: env =", "\"IPD\", \"IMP\", \"AsymCoinGame\"} if full_config['exact']: assert full_config['exp_name'] != \"CoinGame\", \"Can't", "\"set_zero\": 0, \"exact\": False, \"warmup\": 1, \"seed\": 1, \"changed_config\": False,", "full_config['lr'] is None else full_config['lr'] elif full_config['exp_name'] in {\"IPD\", \"IMP\"}:", "10, \"simple_net\": True, \"hidden\": 32, \"reg\": 0, \"set_zero\": 0, \"exact\":", "50 if full_config['num_episodes'] is None else full_config['num_episodes'] full_config['trace_length'] = 200", "full_config['batch_size'] = 4000 if full_config['batch_size'] is None else full_config['batch_size'] full_config['lr']", "== \"IPD\": env = lola_envs.IPD(trace_length) elif exp_name == \"IMP\": env", "ray import tune import marltoolbox.algos.lola.envs as lola_envs import marltoolbox.algos.lola_dice.envs as", "grid_size, lr, lr_correction, batch_size, bs_mul, simple_net, hidden, reg, gamma, lola_update," ]
[ "data, ready for passing into AmCharts. Contains 2 items -", "''' Return json format data, ready for passing into AmCharts.", "= instance.__class__ query_set = class_.objects.filter(shortlink=new_link) if query_set.exists(): return create_shortlink() return", "next year it will add results above result = []", "if query_set.exists(): return create_shortlink() return new_link def json_data_func(instance): ''' Return", "# FIXME. The problem is every next year it will", "format data, ready for passing into AmCharts. Contains 2 items", "range(1, len(month_name)): count_use = class_.objects.filter(pub_date__month=month).count() data = dict(month=month_name[month], count=count_use) result.append(data)", "json format data, ready for passing into AmCharts. Contains 2", "chars = string.ascii_letters + string.digits return ''.join(random.choice(chars) for _ in", "def json_data_func(instance): ''' Return json format data, ready for passing", "calendar import month_name from django.conf import settings SHORTLINK_MIN = getattr(settings,", "import month_name from django.conf import settings SHORTLINK_MIN = getattr(settings, \"SHORTLINK_MIN\",", "6) def code_generator(size=SHORTLINK_MIN): chars = string.ascii_letters + string.digits return ''.join(random.choice(chars)", "return new_link def json_data_func(instance): ''' Return json format data, ready", "class_.objects.filter(pub_date__month=month).count() data = dict(month=month_name[month], count=count_use) result.append(data) json_data = json.dumps(result) return", "AmCharts. Contains 2 items - name of the month and", "in range(1, len(month_name)): count_use = class_.objects.filter(pub_date__month=month).count() data = dict(month=month_name[month], count=count_use)", "class_ = instance.__class__ # FIXME. The problem is every next", "new_link = code_generator() class_ = instance.__class__ query_set = class_.objects.filter(shortlink=new_link) if", "= class_.objects.filter(shortlink=new_link) if query_set.exists(): return create_shortlink() return new_link def json_data_func(instance):", "the website. ''' class_ = instance.__class__ # FIXME. The problem", "month and count of distinct links, which were cut on", "above result = [] for month in range(1, len(month_name)): count_use", "def code_generator(size=SHORTLINK_MIN): chars = string.ascii_letters + string.digits return ''.join(random.choice(chars) for", "= string.ascii_letters + string.digits return ''.join(random.choice(chars) for _ in range(size))", "which were cut on the website. ''' class_ = instance.__class__", "= class_.objects.filter(pub_date__month=month).count() data = dict(month=month_name[month], count=count_use) result.append(data) json_data = json.dumps(result)", "data = dict(month=month_name[month], count=count_use) result.append(data) json_data = json.dumps(result) return json_data", "for month in range(1, len(month_name)): count_use = class_.objects.filter(pub_date__month=month).count() data =", "settings SHORTLINK_MIN = getattr(settings, \"SHORTLINK_MIN\", 6) def code_generator(size=SHORTLINK_MIN): chars =", "The problem is every next year it will add results", "django.conf import settings SHORTLINK_MIN = getattr(settings, \"SHORTLINK_MIN\", 6) def code_generator(size=SHORTLINK_MIN):", "create_shortlink(instance): new_link = code_generator() class_ = instance.__class__ query_set = class_.objects.filter(shortlink=new_link)", "name of the month and count of distinct links, which", "for passing into AmCharts. Contains 2 items - name of", "<gh_stars>0 import string import random import json from calendar import", "distinct links, which were cut on the website. ''' class_", "- name of the month and count of distinct links,", "new_link def json_data_func(instance): ''' Return json format data, ready for", "were cut on the website. ''' class_ = instance.__class__ #", "class_.objects.filter(shortlink=new_link) if query_set.exists(): return create_shortlink() return new_link def json_data_func(instance): '''", "\"SHORTLINK_MIN\", 6) def code_generator(size=SHORTLINK_MIN): chars = string.ascii_letters + string.digits return", "= getattr(settings, \"SHORTLINK_MIN\", 6) def code_generator(size=SHORTLINK_MIN): chars = string.ascii_letters +", "_ in range(size)) def create_shortlink(instance): new_link = code_generator() class_ =", "class_ = instance.__class__ query_set = class_.objects.filter(shortlink=new_link) if query_set.exists(): return create_shortlink()", "[] for month in range(1, len(month_name)): count_use = class_.objects.filter(pub_date__month=month).count() data", "import json from calendar import month_name from django.conf import settings", "Contains 2 items - name of the month and count", "instance.__class__ query_set = class_.objects.filter(shortlink=new_link) if query_set.exists(): return create_shortlink() return new_link", "and count of distinct links, which were cut on the", "= [] for month in range(1, len(month_name)): count_use = class_.objects.filter(pub_date__month=month).count()", "import random import json from calendar import month_name from django.conf", "string import random import json from calendar import month_name from", "query_set.exists(): return create_shortlink() return new_link def json_data_func(instance): ''' Return json", "getattr(settings, \"SHORTLINK_MIN\", 6) def code_generator(size=SHORTLINK_MIN): chars = string.ascii_letters + string.digits", "count of distinct links, which were cut on the website.", "add results above result = [] for month in range(1,", "FIXME. The problem is every next year it will add", "import settings SHORTLINK_MIN = getattr(settings, \"SHORTLINK_MIN\", 6) def code_generator(size=SHORTLINK_MIN): chars", "every next year it will add results above result =", "month_name from django.conf import settings SHORTLINK_MIN = getattr(settings, \"SHORTLINK_MIN\", 6)", "for _ in range(size)) def create_shortlink(instance): new_link = code_generator() class_", "string.ascii_letters + string.digits return ''.join(random.choice(chars) for _ in range(size)) def", "instance.__class__ # FIXME. The problem is every next year it", "json from calendar import month_name from django.conf import settings SHORTLINK_MIN", "the month and count of distinct links, which were cut", "from calendar import month_name from django.conf import settings SHORTLINK_MIN =", "website. ''' class_ = instance.__class__ # FIXME. The problem is", "SHORTLINK_MIN = getattr(settings, \"SHORTLINK_MIN\", 6) def code_generator(size=SHORTLINK_MIN): chars = string.ascii_letters", "result = [] for month in range(1, len(month_name)): count_use =", "it will add results above result = [] for month", "of distinct links, which were cut on the website. '''", "import string import random import json from calendar import month_name", "Return json format data, ready for passing into AmCharts. Contains", "2 items - name of the month and count of", "links, which were cut on the website. ''' class_ =", "query_set = class_.objects.filter(shortlink=new_link) if query_set.exists(): return create_shortlink() return new_link def", "cut on the website. ''' class_ = instance.__class__ # FIXME.", "in range(size)) def create_shortlink(instance): new_link = code_generator() class_ = instance.__class__", "is every next year it will add results above result", "range(size)) def create_shortlink(instance): new_link = code_generator() class_ = instance.__class__ query_set", "items - name of the month and count of distinct", "string.digits return ''.join(random.choice(chars) for _ in range(size)) def create_shortlink(instance): new_link", "create_shortlink() return new_link def json_data_func(instance): ''' Return json format data,", "return ''.join(random.choice(chars) for _ in range(size)) def create_shortlink(instance): new_link =", "= code_generator() class_ = instance.__class__ query_set = class_.objects.filter(shortlink=new_link) if query_set.exists():", "json_data_func(instance): ''' Return json format data, ready for passing into", "into AmCharts. Contains 2 items - name of the month", "''' class_ = instance.__class__ # FIXME. The problem is every", "will add results above result = [] for month in", "= instance.__class__ # FIXME. The problem is every next year", "count_use = class_.objects.filter(pub_date__month=month).count() data = dict(month=month_name[month], count=count_use) result.append(data) json_data =", "return create_shortlink() return new_link def json_data_func(instance): ''' Return json format", "year it will add results above result = [] for", "ready for passing into AmCharts. Contains 2 items - name", "passing into AmCharts. Contains 2 items - name of the", "+ string.digits return ''.join(random.choice(chars) for _ in range(size)) def create_shortlink(instance):", "from django.conf import settings SHORTLINK_MIN = getattr(settings, \"SHORTLINK_MIN\", 6) def", "code_generator() class_ = instance.__class__ query_set = class_.objects.filter(shortlink=new_link) if query_set.exists(): return", "of the month and count of distinct links, which were", "def create_shortlink(instance): new_link = code_generator() class_ = instance.__class__ query_set =", "month in range(1, len(month_name)): count_use = class_.objects.filter(pub_date__month=month).count() data = dict(month=month_name[month],", "''.join(random.choice(chars) for _ in range(size)) def create_shortlink(instance): new_link = code_generator()", "problem is every next year it will add results above", "len(month_name)): count_use = class_.objects.filter(pub_date__month=month).count() data = dict(month=month_name[month], count=count_use) result.append(data) json_data", "random import json from calendar import month_name from django.conf import", "on the website. ''' class_ = instance.__class__ # FIXME. The", "results above result = [] for month in range(1, len(month_name)):", "code_generator(size=SHORTLINK_MIN): chars = string.ascii_letters + string.digits return ''.join(random.choice(chars) for _" ]
[ "'server:main', 'port' ): port = config_parser.get( 'server:main', 'port' ) host", "else: exit(0) exit(0) def admin_user_info( config_parser ): user_info_config = os.path.abspath(", ") ) tree, error_message = xml_util.parse_xml( user_info_config ) if tree", ") else: return 1 if args.method == 'check_db': return check_db(", "password = '<PASSWORD>' username = 'admin' else: root = tree.getroot()", "so bootstrapping is not allowed. Delete\" message += \" the", "return get_local_tool_shed_url( config_parser ) else: return 1 parser = argparse.ArgumentParser()", "except OperationalError, e: pass try: if sa_session is not None:", "= '<EMAIL>' password = '<PASSWORD>' username = 'admin' else: root", ">= 0.4\" ) import galaxy.webapps.tool_shed.model.mapping as tool_shed_model from sqlalchemy.exc import", "xml_util def check_db( config_parser ): dburi = None if config_parser.has_option(", ") hgweb_config_file = os.path.join( hgweb_dir, 'hgweb.config' ) if not os.path.exists(", "if result[0] >= 2: print database_exists_message exit(1) else: pass except", "exit(0) else: exit(0) exit(0) def admin_user_info( config_parser ): user_info_config =", "configured_repos = hgweb_config_parser.items( 'paths' ) if len( configured_repos ) >=", "seems to be invalid, using defaults.\" email = '<EMAIL>' password", "= new_path from galaxy import eggs eggs.require( \"SQLAlchemy >= 0.4\"", "sys.path[1:] ) sys.path = new_path from galaxy import eggs eggs.require(", "dburi = \"sqlite:///%s?isolation_level=IMMEDIATE\" % db_file else: print 'The database configuration", "import sys new_path = [ os.path.join( os.getcwd(), \"lib\" ) ]", "result[0] >= 2: print database_exists_message exit(1) else: pass except ProgrammingError,", "'Create a new database that has not been migrated before", "\"sqlite:///%s?isolation_level=IMMEDIATE\" % db_file else: print 'The database configuration setting is", "'<EMAIL>' password = '<PASSWORD>' username = 'admin' else: root =", "if config_parser.has_option( 'app:main', 'hgweb_config_dir' ): hgweb_config_parser = ConfigParser.ConfigParser() hgweb_dir =", "def admin_user_info( config_parser ): user_info_config = os.path.abspath( os.path.join( os.getcwd(), 'lib/tool_shed/scripts/bootstrap_tool_shed',", "'app:main', 'hgweb_config_dir' ) hgweb_config_file = os.path.join( hgweb_dir, 'hgweb.config' ) if", "= elem.text print '%s__SEP__%s__SEP__%s' % ( username, email, password )", "config_parser = ConfigParser.ConfigParser() if os.path.exists( args.config ): config_parser.read( args.config )", ") configured_repos = hgweb_config_parser.items( 'paths' ) if len( configured_repos )", "pass if config_parser.has_option( 'app:main', 'hgweb_config_dir' ): hgweb_config_parser = ConfigParser.ConfigParser() hgweb_dir", "os.path.abspath( os.path.join( os.getcwd(), 'lib/tool_shed/scripts/bootstrap_tool_shed', 'user_info.xml' ) ) tree, error_message =", "args = parser.parse_args() if __name__ == '__main__': exit( main( args", "bootstrap.' exit(1) sa_session = None database_exists_message = 'The database configured", "tool_shed_model from sqlalchemy.exc import ProgrammingError from sqlalchemy.exc import OperationalError from", "xml_util.parse_xml( user_info_config ) if tree is None: print \"The XML", "): config_parser.read( args.config ) else: return 1 if args.method ==", "\", user_info_config, \" seems to be invalid, using defaults.\" email", "default='config/tool_shed.ini.sample' ) parser.add_argument( '-e', '--execute', dest='method', action='store', default='check_db' ) args", "= argparse.ArgumentParser() parser.add_argument( '-c', '--config_file', dest='config', action='store', default='config/tool_shed.ini.sample' ) parser.add_argument(", "check_db( config_parser ) elif args.method == 'admin_user_info': return admin_user_info( config_parser", "else: pass except ProgrammingError, e: pass if config_parser.has_option( 'app:main', 'hgweb_config_dir'", "a new database that has not been migrated before attempting", "print database_exists_message exit(1) else: pass except ProgrammingError, e: pass if", ") tree, error_message = xml_util.parse_xml( user_info_config ) if tree is", "eggs.require( \"SQLAlchemy >= 0.4\" ) import galaxy.webapps.tool_shed.model.mapping as tool_shed_model from", "'-c', '--config_file', dest='config', action='store', default='config/tool_shed.ini.sample' ) parser.add_argument( '-e', '--execute', dest='method',", "email, password ) return 0 def get_local_tool_shed_url( config_parser ): port", "'%s__SEP__%s__SEP__%s' % ( username, email, password ) return 0 def", "= os.path.join( hgweb_dir, 'hgweb.config' ) if not os.path.exists( hgweb_config_file ):", "dest='config', action='store', default='config/tool_shed.ini.sample' ) parser.add_argument( '-e', '--execute', dest='method', action='store', default='check_db'", "elem.text elif elem.tag == 'password': password = elem.text elif elem.tag", "along with all associated repositories in the configured \" message", "print database_exists_message exit(1) except ProgrammingError, e: pass except OperationalError, e:", "associated repositories in the configured \" message += \"location before", ") if not os.path.exists( hgweb_config_file ): exit(0) hgweb_config_parser.read( hgweb_config_file )", "'The database configuration setting is missing from the tool_shed.ini file.", "0 def main( args ): config_parser = ConfigParser.ConfigParser() if os.path.exists(", "None database_exists_message = 'The database configured for this Tool Shed", "'hgweb_config_dir' ): hgweb_config_parser = ConfigParser.ConfigParser() hgweb_dir = config_parser.get( 'app:main', 'hgweb_config_dir'", "= 'The database configured for this Tool Shed is not", "elem.tag == 'password': password = elem.text elif elem.tag == 'username':", "None: print \"The XML file \", user_info_config, \" seems to", "= [ os.path.join( os.getcwd(), \"lib\" ) ] new_path.extend( sys.path[1:] )", "'password': password = elem.text elif elem.tag == 'username': username =", "): dburi = None if config_parser.has_option( 'app:main', 'database_connection' ): dburi", "boostrap.\" print exit(1) else: exit(0) else: exit(0) exit(0) def admin_user_info(", "import galaxy.webapps.tool_shed.model.mapping as tool_shed_model from sqlalchemy.exc import ProgrammingError from sqlalchemy.exc", "os.path.join( os.getcwd(), \"lib\" ) ] new_path.extend( sys.path[1:] ) sys.path =", "'paths' ) if len( configured_repos ) >= 1: message =", "'The database configured for this Tool Shed is not new,", "file along with all associated repositories in the configured \"", "port = config_parser.get( 'server:main', 'port' ) host = '127.0.0.1' print", "import ProgrammingError from sqlalchemy.exc import OperationalError from tool_shed.util import xml_util", "'<PASSWORD>' username = 'admin' else: root = tree.getroot() for elem", "db_file else: print 'The database configuration setting is missing from", "print \"The XML file \", user_info_config, \" seems to be", "database_exists_message exit(1) else: pass except ProgrammingError, e: pass if config_parser.has_option(", "in root: if elem.tag == 'email': email = elem.text elif", "message += \"location before attempting to boostrap.\" print exit(1) else:", "current hgweb.config file along with all associated repositories in the", "elif config_parser.has_option( 'app:main', 'database_file' ): db_file = config_parser.get( 'app:main', 'database_file'", "as tool_shed_model from sqlalchemy.exc import ProgrammingError from sqlalchemy.exc import OperationalError", "hgweb_config_parser.read( hgweb_config_file ) configured_repos = hgweb_config_parser.items( 'paths' ) if len(", "None: result = sa_session.execute( 'SELECT version FROM migrate_version' ).first() if", "== 'password': password = elem.text elif elem.tag == 'username': username", "config_parser.get( 'app:main', 'file_path' ), dburi, engine_options={}, create_tables=False ) sa_session =", "except ProgrammingError, e: pass except OperationalError, e: pass try: if", "'--execute', dest='method', action='store', default='check_db' ) args = parser.parse_args() if __name__", "'app:main', 'database_connection' ): dburi = config_parser.get( 'app:main', 'database_connection' ) elif", ") dburi = \"sqlite:///%s?isolation_level=IMMEDIATE\" % db_file else: print 'The database", "ConfigParser.ConfigParser() if os.path.exists( args.config ): config_parser.read( args.config ) else: return", "not allowed. ' database_exists_message += 'Create a new database that", "using defaults.\" email = '<EMAIL>' password = '<PASSWORD>' username =", "ProgrammingError, e: pass except OperationalError, e: pass try: if sa_session", "from tool_shed.util import xml_util def check_db( config_parser ): dburi =", "'username': username = elem.text print '%s__SEP__%s__SEP__%s' % ( username, email,", "0.4\" ) import galaxy.webapps.tool_shed.model.mapping as tool_shed_model from sqlalchemy.exc import ProgrammingError", "attempting to bootstrap.' exit(1) sa_session = None database_exists_message = 'The", "model = tool_shed_model.init( config_parser.get( 'app:main', 'file_path' ), dburi, engine_options={}, create_tables=False", "sqlalchemy.exc import OperationalError from tool_shed.util import xml_util def check_db( config_parser", "import OperationalError from tool_shed.util import xml_util def check_db( config_parser ):", "is None: print \"The XML file \", user_info_config, \" seems", "with all associated repositories in the configured \" message +=", "Tool Shed is not new, so bootstrapping is not allowed.", "if not os.path.exists( hgweb_config_file ): exit(0) hgweb_config_parser.read( hgweb_config_file ) configured_repos", "port = '9009' if config_parser.has_section( 'server:main' ): if config_parser.has_option( 'server:main',", "config_parser ): dburi = None if config_parser.has_option( 'app:main', 'database_connection' ):", "not os.path.exists( hgweb_config_file ): exit(0) hgweb_config_parser.read( hgweb_config_file ) configured_repos =", "[ os.path.join( os.getcwd(), \"lib\" ) ] new_path.extend( sys.path[1:] ) sys.path", "'app:main', 'database_file' ): db_file = config_parser.get( 'app:main', 'database_file' ) dburi", "#!/usr/bin/python import argparse import ConfigParser import os import sys new_path", "migrate_version' ).first() if result[0] >= 2: print database_exists_message exit(1) else:", "config_parser ) elif args.method == 'admin_user_info': return admin_user_info( config_parser )", "elif args.method == 'get_url': return get_local_tool_shed_url( config_parser ) else: return", "def get_local_tool_shed_url( config_parser ): port = '9009' if config_parser.has_section( 'server:main'", "missing from the tool_shed.ini file. Add this setting before attempting", "= \"This Tool Shed's hgweb.config file contains entries, so bootstrapping", "= config_parser.get( 'app:main', 'database_file' ) dburi = \"sqlite:///%s?isolation_level=IMMEDIATE\" % db_file", "Shed is not new, so bootstrapping is not allowed. '", "): hgweb_config_parser = ConfigParser.ConfigParser() hgweb_dir = config_parser.get( 'app:main', 'hgweb_config_dir' )", "if args.method == 'check_db': return check_db( config_parser ) elif args.method", "def check_db( config_parser ): dburi = None if config_parser.has_option( 'app:main',", "before attempting to boostrap.\" print exit(1) else: exit(0) else: exit(0)", "elem in root: if elem.tag == 'email': email = elem.text", "exit(1) else: pass except ProgrammingError, e: pass if config_parser.has_option( 'app:main',", "'lib/tool_shed/scripts/bootstrap_tool_shed', 'user_info.xml' ) ) tree, error_message = xml_util.parse_xml( user_info_config )", "ConfigParser import os import sys new_path = [ os.path.join( os.getcwd(),", "argparse.ArgumentParser() parser.add_argument( '-c', '--config_file', dest='config', action='store', default='config/tool_shed.ini.sample' ) parser.add_argument( '-e',", "= \"sqlite:///%s?isolation_level=IMMEDIATE\" % db_file else: print 'The database configuration setting", "has not been migrated before attempting to bootstrap.' try: model", ") >= 1: message = \"This Tool Shed's hgweb.config file", "not new, so bootstrapping is not allowed. ' database_exists_message +=", "\" message += \"location before attempting to boostrap.\" print exit(1)", "db_file = config_parser.get( 'app:main', 'database_file' ) dburi = \"sqlite:///%s?isolation_level=IMMEDIATE\" %", "sa_session = None database_exists_message = 'The database configured for this", "elif args.method == 'admin_user_info': return admin_user_info( config_parser ) elif args.method", "= tree.getroot() for elem in root: if elem.tag == 'email':", "): exit(0) hgweb_config_parser.read( hgweb_config_file ) configured_repos = hgweb_config_parser.items( 'paths' )", "args.method == 'admin_user_info': return admin_user_info( config_parser ) elif args.method ==", "== 'check_db': return check_db( config_parser ) elif args.method == 'admin_user_info':", "configured for this Tool Shed is not new, so bootstrapping", "+= \"location before attempting to boostrap.\" print exit(1) else: exit(0)", "= None database_exists_message = 'The database configured for this Tool", "username, email, password ) return 0 def get_local_tool_shed_url( config_parser ):", "config_parser.read( args.config ) else: return 1 if args.method == 'check_db':", "hgweb_dir, 'hgweb.config' ) if not os.path.exists( hgweb_config_file ): exit(0) hgweb_config_parser.read(", "% ( host, port ) return 0 def main( args", "'file_path' ), dburi, engine_options={}, create_tables=False ) sa_session = model.context.current print", "'check_db': return check_db( config_parser ) elif args.method == 'admin_user_info': return", "hgweb.config file contains entries, so bootstrapping is not allowed. Delete\"", "new_path = [ os.path.join( os.getcwd(), \"lib\" ) ] new_path.extend( sys.path[1:]", "config_parser ) else: return 1 parser = argparse.ArgumentParser() parser.add_argument( '-c',", "\"The XML file \", user_info_config, \" seems to be invalid,", "import xml_util def check_db( config_parser ): dburi = None if", "( username, email, password ) return 0 def get_local_tool_shed_url( config_parser", "tree, error_message = xml_util.parse_xml( user_info_config ) if tree is None:", "file contains entries, so bootstrapping is not allowed. Delete\" message", "\"lib\" ) ] new_path.extend( sys.path[1:] ) sys.path = new_path from", "all associated repositories in the configured \" message += \"location", "args.config ): config_parser.read( args.config ) else: return 1 if args.method", "'9009' if config_parser.has_section( 'server:main' ): if config_parser.has_option( 'server:main', 'port' ):", "config_parser.get( 'server:main', 'port' ) host = '127.0.0.1' print 'http://%s:%s' %", "username = elem.text print '%s__SEP__%s__SEP__%s' % ( username, email, password", "been migrated before attempting to bootstrap.' try: model = tool_shed_model.init(", "'database_connection' ): dburi = config_parser.get( 'app:main', 'database_connection' ) elif config_parser.has_option(", "hgweb_dir = config_parser.get( 'app:main', 'hgweb_config_dir' ) hgweb_config_file = os.path.join( hgweb_dir,", "Delete\" message += \" the current hgweb.config file along with", "= parser.parse_args() if __name__ == '__main__': exit( main( args )", "that has not been migrated before attempting to bootstrap.' try:", "exit(0) def admin_user_info( config_parser ): user_info_config = os.path.abspath( os.path.join( os.getcwd(),", "args ): config_parser = ConfigParser.ConfigParser() if os.path.exists( args.config ): config_parser.read(", "contains entries, so bootstrapping is not allowed. Delete\" message +=", "= sa_session.execute( 'SELECT version FROM migrate_version' ).first() if result[0] >=", "get_local_tool_shed_url( config_parser ): port = '9009' if config_parser.has_section( 'server:main' ):", "'--config_file', dest='config', action='store', default='config/tool_shed.ini.sample' ) parser.add_argument( '-e', '--execute', dest='method', action='store',", ") return 0 def get_local_tool_shed_url( config_parser ): port = '9009'", "'SELECT version FROM migrate_version' ).first() if result[0] >= 2: print", "pass except OperationalError, e: pass try: if sa_session is not", "' database_exists_message += 'Create a new database that has not", "file \", user_info_config, \" seems to be invalid, using defaults.\"", "password ) return 0 def get_local_tool_shed_url( config_parser ): port =", "not allowed. Delete\" message += \" the current hgweb.config file", "new, so bootstrapping is not allowed. ' database_exists_message += 'Create", "new_path.extend( sys.path[1:] ) sys.path = new_path from galaxy import eggs", "hgweb_config_file = os.path.join( hgweb_dir, 'hgweb.config' ) if not os.path.exists( hgweb_config_file", "exit(1) sa_session = None database_exists_message = 'The database configured for", "'app:main', 'database_connection' ) elif config_parser.has_option( 'app:main', 'database_file' ): db_file =", "config_parser.has_option( 'app:main', 'database_file' ): db_file = config_parser.get( 'app:main', 'database_file' )", ") if tree is None: print \"The XML file \",", "= '<PASSWORD>' username = 'admin' else: root = tree.getroot() for", ">= 1: message = \"This Tool Shed's hgweb.config file contains", "setting before attempting to bootstrap.' exit(1) sa_session = None database_exists_message", "if len( configured_repos ) >= 1: message = \"This Tool", "= config_parser.get( 'app:main', 'database_connection' ) elif config_parser.has_option( 'app:main', 'database_file' ):", "invalid, using defaults.\" email = '<EMAIL>' password = '<PASSWORD>' username", "elem.tag == 'username': username = elem.text print '%s__SEP__%s__SEP__%s' % (", "configured_repos ) >= 1: message = \"This Tool Shed's hgweb.config", "setting is missing from the tool_shed.ini file. Add this setting", "): if config_parser.has_option( 'server:main', 'port' ): port = config_parser.get( 'server:main',", "sys.path = new_path from galaxy import eggs eggs.require( \"SQLAlchemy >=", "parser.add_argument( '-e', '--execute', dest='method', action='store', default='check_db' ) args = parser.parse_args()", "the tool_shed.ini file. Add this setting before attempting to bootstrap.'", "): db_file = config_parser.get( 'app:main', 'database_file' ) dburi = \"sqlite:///%s?isolation_level=IMMEDIATE\"", "'get_url': return get_local_tool_shed_url( config_parser ) else: return 1 parser =", "def main( args ): config_parser = ConfigParser.ConfigParser() if os.path.exists( args.config", "\" the current hgweb.config file along with all associated repositories", "e: pass try: if sa_session is not None: result =", "args.method == 'check_db': return check_db( config_parser ) elif args.method ==", "elif elem.tag == 'username': username = elem.text print '%s__SEP__%s__SEP__%s' %", "hgweb.config file along with all associated repositories in the configured", "hgweb_config_file ): exit(0) hgweb_config_parser.read( hgweb_config_file ) configured_repos = hgweb_config_parser.items( 'paths'", "'-e', '--execute', dest='method', action='store', default='check_db' ) args = parser.parse_args() if", "parser.add_argument( '-c', '--config_file', dest='config', action='store', default='config/tool_shed.ini.sample' ) parser.add_argument( '-e', '--execute',", "to boostrap.\" print exit(1) else: exit(0) else: exit(0) exit(0) def", "), dburi, engine_options={}, create_tables=False ) sa_session = model.context.current print database_exists_message", "allowed. Delete\" message += \" the current hgweb.config file along", "'admin_user_info': return admin_user_info( config_parser ) elif args.method == 'get_url': return", "import ConfigParser import os import sys new_path = [ os.path.join(", "dburi = None if config_parser.has_option( 'app:main', 'database_connection' ): dburi =", "= model.context.current print database_exists_message exit(1) except ProgrammingError, e: pass except", "if config_parser.has_option( 'app:main', 'database_connection' ): dburi = config_parser.get( 'app:main', 'database_connection'", "config_parser.has_section( 'server:main' ): if config_parser.has_option( 'server:main', 'port' ): port =", "result = sa_session.execute( 'SELECT version FROM migrate_version' ).first() if result[0]", "= 'admin' else: root = tree.getroot() for elem in root:", "tool_shed.util import xml_util def check_db( config_parser ): dburi = None", "'server:main', 'port' ) host = '127.0.0.1' print 'http://%s:%s' % (", "FROM migrate_version' ).first() if result[0] >= 2: print database_exists_message exit(1)", "else: print 'The database configuration setting is missing from the", "tree is None: print \"The XML file \", user_info_config, \"", "not None: result = sa_session.execute( 'SELECT version FROM migrate_version' ).first()", "hgweb_config_file ) configured_repos = hgweb_config_parser.items( 'paths' ) if len( configured_repos", "action='store', default='check_db' ) args = parser.parse_args() if __name__ == '__main__':", "'server:main' ): if config_parser.has_option( 'server:main', 'port' ): port = config_parser.get(", "= ConfigParser.ConfigParser() if os.path.exists( args.config ): config_parser.read( args.config ) else:", "database_exists_message += 'Create a new database that has not been", "to bootstrap.' exit(1) sa_session = None database_exists_message = 'The database", "'database_connection' ) elif config_parser.has_option( 'app:main', 'database_file' ): db_file = config_parser.get(", "configured \" message += \"location before attempting to boostrap.\" print", "sa_session = model.context.current print database_exists_message exit(1) except ProgrammingError, e: pass", "config_parser ) elif args.method == 'get_url': return get_local_tool_shed_url( config_parser )", "try: if sa_session is not None: result = sa_session.execute( 'SELECT", "attempting to boostrap.\" print exit(1) else: exit(0) else: exit(0) exit(0)", "database configured for this Tool Shed is not new, so", "'email': email = elem.text elif elem.tag == 'password': password =", "os.path.join( os.getcwd(), 'lib/tool_shed/scripts/bootstrap_tool_shed', 'user_info.xml' ) ) tree, error_message = xml_util.parse_xml(", "Shed's hgweb.config file contains entries, so bootstrapping is not allowed.", "is not new, so bootstrapping is not allowed. ' database_exists_message", "if sa_session is not None: result = sa_session.execute( 'SELECT version", "0 def get_local_tool_shed_url( config_parser ): port = '9009' if config_parser.has_section(", "): port = config_parser.get( 'server:main', 'port' ) host = '127.0.0.1'", ").first() if result[0] >= 2: print database_exists_message exit(1) else: pass", "Add this setting before attempting to bootstrap.' exit(1) sa_session =", "version FROM migrate_version' ).first() if result[0] >= 2: print database_exists_message", "from the tool_shed.ini file. Add this setting before attempting to", "bootstrapping is not allowed. Delete\" message += \" the current", "exit(0) exit(0) def admin_user_info( config_parser ): user_info_config = os.path.abspath( os.path.join(", "host, port ) return 0 def main( args ): config_parser", ") elif config_parser.has_option( 'app:main', 'database_file' ): db_file = config_parser.get( 'app:main',", "is not allowed. ' database_exists_message += 'Create a new database", "2: print database_exists_message exit(1) else: pass except ProgrammingError, e: pass", "'database_file' ): db_file = config_parser.get( 'app:main', 'database_file' ) dburi =", "= hgweb_config_parser.items( 'paths' ) if len( configured_repos ) >= 1:", "'database_file' ) dburi = \"sqlite:///%s?isolation_level=IMMEDIATE\" % db_file else: print 'The", "'admin' else: root = tree.getroot() for elem in root: if", "if config_parser.has_option( 'server:main', 'port' ): port = config_parser.get( 'server:main', 'port'", "return 1 if args.method == 'check_db': return check_db( config_parser )", "1 if args.method == 'check_db': return check_db( config_parser ) elif", "): config_parser = ConfigParser.ConfigParser() if os.path.exists( args.config ): config_parser.read( args.config", "= tool_shed_model.init( config_parser.get( 'app:main', 'file_path' ), dburi, engine_options={}, create_tables=False )", "is missing from the tool_shed.ini file. Add this setting before", "config_parser.get( 'app:main', 'database_connection' ) elif config_parser.has_option( 'app:main', 'database_file' ): db_file", "tree.getroot() for elem in root: if elem.tag == 'email': email", "if tree is None: print \"The XML file \", user_info_config,", "eggs eggs.require( \"SQLAlchemy >= 0.4\" ) import galaxy.webapps.tool_shed.model.mapping as tool_shed_model", "user_info_config ) if tree is None: print \"The XML file", ") import galaxy.webapps.tool_shed.model.mapping as tool_shed_model from sqlalchemy.exc import ProgrammingError from", "bootstrapping is not allowed. ' database_exists_message += 'Create a new", ") ] new_path.extend( sys.path[1:] ) sys.path = new_path from galaxy", "pass try: if sa_session is not None: result = sa_session.execute(", "config_parser.get( 'app:main', 'hgweb_config_dir' ) hgweb_config_file = os.path.join( hgweb_dir, 'hgweb.config' )", "else: return 1 parser = argparse.ArgumentParser() parser.add_argument( '-c', '--config_file', dest='config',", "'app:main', 'database_file' ) dburi = \"sqlite:///%s?isolation_level=IMMEDIATE\" % db_file else: print", "this Tool Shed is not new, so bootstrapping is not", ") sa_session = model.context.current print database_exists_message exit(1) except ProgrammingError, e:", "attempting to bootstrap.' try: model = tool_shed_model.init( config_parser.get( 'app:main', 'file_path'", "parser.parse_args() if __name__ == '__main__': exit( main( args ) )", ") if len( configured_repos ) >= 1: message = \"This", "database_exists_message = 'The database configured for this Tool Shed is", "ProgrammingError, e: pass if config_parser.has_option( 'app:main', 'hgweb_config_dir' ): hgweb_config_parser =", "\"location before attempting to boostrap.\" print exit(1) else: exit(0) else:", "print exit(1) else: exit(0) else: exit(0) exit(0) def admin_user_info( config_parser", "= None if config_parser.has_option( 'app:main', 'database_connection' ): dburi = config_parser.get(", "file. Add this setting before attempting to bootstrap.' exit(1) sa_session", "% db_file else: print 'The database configuration setting is missing", "== 'email': email = elem.text elif elem.tag == 'password': password", "= elem.text elif elem.tag == 'username': username = elem.text print", "admin_user_info( config_parser ): user_info_config = os.path.abspath( os.path.join( os.getcwd(), 'lib/tool_shed/scripts/bootstrap_tool_shed', 'user_info.xml'", "elem.text elif elem.tag == 'username': username = elem.text print '%s__SEP__%s__SEP__%s'", "config_parser.get( 'app:main', 'database_file' ) dburi = \"sqlite:///%s?isolation_level=IMMEDIATE\" % db_file else:", "OperationalError, e: pass try: if sa_session is not None: result", "import argparse import ConfigParser import os import sys new_path =", "from sqlalchemy.exc import ProgrammingError from sqlalchemy.exc import OperationalError from tool_shed.util", "pass except ProgrammingError, e: pass if config_parser.has_option( 'app:main', 'hgweb_config_dir' ):", "bootstrap.' try: model = tool_shed_model.init( config_parser.get( 'app:main', 'file_path' ), dburi,", "elif elem.tag == 'password': password = elem.text elif elem.tag ==", ") elif args.method == 'get_url': return get_local_tool_shed_url( config_parser ) else:", "email = '<EMAIL>' password = '<PASSWORD>' username = 'admin' else:", "'port' ): port = config_parser.get( 'server:main', 'port' ) host =", "os.path.join( hgweb_dir, 'hgweb.config' ) if not os.path.exists( hgweb_config_file ): exit(0)", "None if config_parser.has_option( 'app:main', 'database_connection' ): dburi = config_parser.get( 'app:main',", "): user_info_config = os.path.abspath( os.path.join( os.getcwd(), 'lib/tool_shed/scripts/bootstrap_tool_shed', 'user_info.xml' ) )", "print '%s__SEP__%s__SEP__%s' % ( username, email, password ) return 0", "action='store', default='config/tool_shed.ini.sample' ) parser.add_argument( '-e', '--execute', dest='method', action='store', default='check_db' )", "else: root = tree.getroot() for elem in root: if elem.tag", ">= 2: print database_exists_message exit(1) else: pass except ProgrammingError, e:", "( host, port ) return 0 def main( args ):", "config_parser ): port = '9009' if config_parser.has_section( 'server:main' ): if", "username = 'admin' else: root = tree.getroot() for elem in", "to be invalid, using defaults.\" email = '<EMAIL>' password =", "elem.tag == 'email': email = elem.text elif elem.tag == 'password':", "'user_info.xml' ) ) tree, error_message = xml_util.parse_xml( user_info_config ) if", "config_parser.has_option( 'app:main', 'database_connection' ): dburi = config_parser.get( 'app:main', 'database_connection' )", "to bootstrap.' try: model = tool_shed_model.init( config_parser.get( 'app:main', 'file_path' ),", "sa_session.execute( 'SELECT version FROM migrate_version' ).first() if result[0] >= 2:", "1: message = \"This Tool Shed's hgweb.config file contains entries,", "default='check_db' ) args = parser.parse_args() if __name__ == '__main__': exit(", "root: if elem.tag == 'email': email = elem.text elif elem.tag", "elem.text print '%s__SEP__%s__SEP__%s' % ( username, email, password ) return", "defaults.\" email = '<EMAIL>' password = '<PASSWORD>' username = 'admin'", "email = elem.text elif elem.tag == 'password': password = elem.text", "for elem in root: if elem.tag == 'email': email =", "before attempting to bootstrap.' try: model = tool_shed_model.init( config_parser.get( 'app:main',", "the configured \" message += \"location before attempting to boostrap.\"", "this setting before attempting to bootstrap.' exit(1) sa_session = None", "return admin_user_info( config_parser ) elif args.method == 'get_url': return get_local_tool_shed_url(", "os.path.exists( args.config ): config_parser.read( args.config ) else: return 1 if", "create_tables=False ) sa_session = model.context.current print database_exists_message exit(1) except ProgrammingError,", "new_path from galaxy import eggs eggs.require( \"SQLAlchemy >= 0.4\" )", "database that has not been migrated before attempting to bootstrap.'", "config_parser.has_option( 'app:main', 'hgweb_config_dir' ): hgweb_config_parser = ConfigParser.ConfigParser() hgweb_dir = config_parser.get(", "else: return 1 if args.method == 'check_db': return check_db( config_parser", "repositories in the configured \" message += \"location before attempting", "print 'The database configuration setting is missing from the tool_shed.ini", "print 'http://%s:%s' % ( host, port ) return 0 def", "+= 'Create a new database that has not been migrated", "'app:main', 'file_path' ), dburi, engine_options={}, create_tables=False ) sa_session = model.context.current", "= '127.0.0.1' print 'http://%s:%s' % ( host, port ) return", "== 'admin_user_info': return admin_user_info( config_parser ) elif args.method == 'get_url':", "ConfigParser.ConfigParser() hgweb_dir = config_parser.get( 'app:main', 'hgweb_config_dir' ) hgweb_config_file = os.path.join(", "\"This Tool Shed's hgweb.config file contains entries, so bootstrapping is", "\"SQLAlchemy >= 0.4\" ) import galaxy.webapps.tool_shed.model.mapping as tool_shed_model from sqlalchemy.exc", "is not None: result = sa_session.execute( 'SELECT version FROM migrate_version'", "is not allowed. Delete\" message += \" the current hgweb.config", "'127.0.0.1' print 'http://%s:%s' % ( host, port ) return 0", "] new_path.extend( sys.path[1:] ) sys.path = new_path from galaxy import", "import eggs eggs.require( \"SQLAlchemy >= 0.4\" ) import galaxy.webapps.tool_shed.model.mapping as", "engine_options={}, create_tables=False ) sa_session = model.context.current print database_exists_message exit(1) except", "host = '127.0.0.1' print 'http://%s:%s' % ( host, port )", "get_local_tool_shed_url( config_parser ) else: return 1 parser = argparse.ArgumentParser() parser.add_argument(", "hgweb_config_parser = ConfigParser.ConfigParser() hgweb_dir = config_parser.get( 'app:main', 'hgweb_config_dir' ) hgweb_config_file", ") elif args.method == 'admin_user_info': return admin_user_info( config_parser ) elif", ") args = parser.parse_args() if __name__ == '__main__': exit( main(", "'port' ) host = '127.0.0.1' print 'http://%s:%s' % ( host,", "= os.path.abspath( os.path.join( os.getcwd(), 'lib/tool_shed/scripts/bootstrap_tool_shed', 'user_info.xml' ) ) tree, error_message", "message += \" the current hgweb.config file along with all", "exit(1) else: exit(0) else: exit(0) exit(0) def admin_user_info( config_parser ):", ") sys.path = new_path from galaxy import eggs eggs.require( \"SQLAlchemy", "tool_shed.ini file. Add this setting before attempting to bootstrap.' exit(1)", "from sqlalchemy.exc import OperationalError from tool_shed.util import xml_util def check_db(", "config_parser.has_option( 'server:main', 'port' ): port = config_parser.get( 'server:main', 'port' )", "args.method == 'get_url': return get_local_tool_shed_url( config_parser ) else: return 1", "exit(0) hgweb_config_parser.read( hgweb_config_file ) configured_repos = hgweb_config_parser.items( 'paths' ) if", "sa_session is not None: result = sa_session.execute( 'SELECT version FROM", "e: pass if config_parser.has_option( 'app:main', 'hgweb_config_dir' ): hgweb_config_parser = ConfigParser.ConfigParser()", "): dburi = config_parser.get( 'app:main', 'database_connection' ) elif config_parser.has_option( 'app:main',", "'app:main', 'hgweb_config_dir' ): hgweb_config_parser = ConfigParser.ConfigParser() hgweb_dir = config_parser.get( 'app:main',", "hgweb_config_parser.items( 'paths' ) if len( configured_repos ) >= 1: message", "os.path.exists( hgweb_config_file ): exit(0) hgweb_config_parser.read( hgweb_config_file ) configured_repos = hgweb_config_parser.items(", "user_info_config, \" seems to be invalid, using defaults.\" email =", "password = elem.text elif elem.tag == 'username': username = elem.text", "import os import sys new_path = [ os.path.join( os.getcwd(), \"lib\"", "return 0 def get_local_tool_shed_url( config_parser ): port = '9009' if", "try: model = tool_shed_model.init( config_parser.get( 'app:main', 'file_path' ), dburi, engine_options={},", "1 parser = argparse.ArgumentParser() parser.add_argument( '-c', '--config_file', dest='config', action='store', default='config/tool_shed.ini.sample'", ") parser.add_argument( '-e', '--execute', dest='method', action='store', default='check_db' ) args =", "= config_parser.get( 'app:main', 'hgweb_config_dir' ) hgweb_config_file = os.path.join( hgweb_dir, 'hgweb.config'", "= '9009' if config_parser.has_section( 'server:main' ): if config_parser.has_option( 'server:main', 'port'", "OperationalError from tool_shed.util import xml_util def check_db( config_parser ): dburi", "database configuration setting is missing from the tool_shed.ini file. Add", "admin_user_info( config_parser ) elif args.method == 'get_url': return get_local_tool_shed_url( config_parser", "dburi, engine_options={}, create_tables=False ) sa_session = model.context.current print database_exists_message exit(1)", "return 0 def main( args ): config_parser = ConfigParser.ConfigParser() if", "error_message = xml_util.parse_xml( user_info_config ) if tree is None: print", "else: exit(0) else: exit(0) exit(0) def admin_user_info( config_parser ): user_info_config", "if config_parser.has_section( 'server:main' ): if config_parser.has_option( 'server:main', 'port' ): port", "model.context.current print database_exists_message exit(1) except ProgrammingError, e: pass except OperationalError,", "== 'username': username = elem.text print '%s__SEP__%s__SEP__%s' % ( username,", "from galaxy import eggs eggs.require( \"SQLAlchemy >= 0.4\" ) import", "ProgrammingError from sqlalchemy.exc import OperationalError from tool_shed.util import xml_util def", "dest='method', action='store', default='check_db' ) args = parser.parse_args() if __name__ ==", "check_db( config_parser ): dburi = None if config_parser.has_option( 'app:main', 'database_connection'", "before attempting to bootstrap.' exit(1) sa_session = None database_exists_message =", "): port = '9009' if config_parser.has_section( 'server:main' ): if config_parser.has_option(", "if os.path.exists( args.config ): config_parser.read( args.config ) else: return 1", "= ConfigParser.ConfigParser() hgweb_dir = config_parser.get( 'app:main', 'hgweb_config_dir' ) hgweb_config_file =", "sys new_path = [ os.path.join( os.getcwd(), \"lib\" ) ] new_path.extend(", "database_exists_message exit(1) except ProgrammingError, e: pass except OperationalError, e: pass", "os.getcwd(), \"lib\" ) ] new_path.extend( sys.path[1:] ) sys.path = new_path", "the current hgweb.config file along with all associated repositories in", "galaxy import eggs eggs.require( \"SQLAlchemy >= 0.4\" ) import galaxy.webapps.tool_shed.model.mapping", "config_parser ): user_info_config = os.path.abspath( os.path.join( os.getcwd(), 'lib/tool_shed/scripts/bootstrap_tool_shed', 'user_info.xml' )", "'http://%s:%s' % ( host, port ) return 0 def main(", "argparse import ConfigParser import os import sys new_path = [", "allowed. ' database_exists_message += 'Create a new database that has", "'hgweb_config_dir' ) hgweb_config_file = os.path.join( hgweb_dir, 'hgweb.config' ) if not", "os import sys new_path = [ os.path.join( os.getcwd(), \"lib\" )", "'hgweb.config' ) if not os.path.exists( hgweb_config_file ): exit(0) hgweb_config_parser.read( hgweb_config_file", "parser = argparse.ArgumentParser() parser.add_argument( '-c', '--config_file', dest='config', action='store', default='config/tool_shed.ini.sample' )", ") return 0 def main( args ): config_parser = ConfigParser.ConfigParser()", "tool_shed_model.init( config_parser.get( 'app:main', 'file_path' ), dburi, engine_options={}, create_tables=False ) sa_session", "Tool Shed's hgweb.config file contains entries, so bootstrapping is not", ") else: return 1 parser = argparse.ArgumentParser() parser.add_argument( '-c', '--config_file',", "dburi = config_parser.get( 'app:main', 'database_connection' ) elif config_parser.has_option( 'app:main', 'database_file'", "message = \"This Tool Shed's hgweb.config file contains entries, so", "configuration setting is missing from the tool_shed.ini file. Add this", "e: pass except OperationalError, e: pass try: if sa_session is", "new database that has not been migrated before attempting to", "return 1 parser = argparse.ArgumentParser() parser.add_argument( '-c', '--config_file', dest='config', action='store',", "root = tree.getroot() for elem in root: if elem.tag ==", ") host = '127.0.0.1' print 'http://%s:%s' % ( host, port", "for this Tool Shed is not new, so bootstrapping is", "migrated before attempting to bootstrap.' try: model = tool_shed_model.init( config_parser.get(", "% ( username, email, password ) return 0 def get_local_tool_shed_url(", "if elem.tag == 'email': email = elem.text elif elem.tag ==", "not been migrated before attempting to bootstrap.' try: model =", "\" seems to be invalid, using defaults.\" email = '<EMAIL>'", "so bootstrapping is not allowed. ' database_exists_message += 'Create a", "user_info_config = os.path.abspath( os.path.join( os.getcwd(), 'lib/tool_shed/scripts/bootstrap_tool_shed', 'user_info.xml' ) ) tree,", "port ) return 0 def main( args ): config_parser =", "= xml_util.parse_xml( user_info_config ) if tree is None: print \"The", "galaxy.webapps.tool_shed.model.mapping as tool_shed_model from sqlalchemy.exc import ProgrammingError from sqlalchemy.exc import", "= elem.text elif elem.tag == 'password': password = elem.text elif", "entries, so bootstrapping is not allowed. Delete\" message += \"", "os.getcwd(), 'lib/tool_shed/scripts/bootstrap_tool_shed', 'user_info.xml' ) ) tree, error_message = xml_util.parse_xml( user_info_config", "exit(1) except ProgrammingError, e: pass except OperationalError, e: pass try:", "= config_parser.get( 'server:main', 'port' ) host = '127.0.0.1' print 'http://%s:%s'", "args.config ) else: return 1 if args.method == 'check_db': return", "== 'get_url': return get_local_tool_shed_url( config_parser ) else: return 1 parser", "sqlalchemy.exc import ProgrammingError from sqlalchemy.exc import OperationalError from tool_shed.util import", "be invalid, using defaults.\" email = '<EMAIL>' password = '<PASSWORD>'", "in the configured \" message += \"location before attempting to", "XML file \", user_info_config, \" seems to be invalid, using", "len( configured_repos ) >= 1: message = \"This Tool Shed's", "main( args ): config_parser = ConfigParser.ConfigParser() if os.path.exists( args.config ):", "except ProgrammingError, e: pass if config_parser.has_option( 'app:main', 'hgweb_config_dir' ): hgweb_config_parser", "+= \" the current hgweb.config file along with all associated", "return check_db( config_parser ) elif args.method == 'admin_user_info': return admin_user_info(" ]
[ "int(sequence_number) return self.backend.get_shard_iterator( arn, shard_id, shard_iterator_type, sequence_number ) def get_records(self):", "dynamodbstreams_backends[self.region] def describe_stream(self): arn = self._get_param(\"StreamArn\") return self.backend.describe_stream(arn) def list_streams(self):", "describe_stream(self): arn = self._get_param(\"StreamArn\") return self.backend.describe_stream(arn) def list_streams(self): table_name =", "string if isinstance(sequence_number, string_types): sequence_number = int(sequence_number) return self.backend.get_shard_iterator( arn,", "import unicode_literals from moto.core.responses import BaseResponse from .models import dynamodbstreams_backends", "get_shard_iterator(self): arn = self._get_param(\"StreamArn\") shard_id = self._get_param(\"ShardId\") shard_iterator_type = self._get_param(\"ShardIteratorType\")", "DynamoDBStreamsHandler(BaseResponse): @property def backend(self): return dynamodbstreams_backends[self.region] def describe_stream(self): arn =", "from __future__ import unicode_literals from moto.core.responses import BaseResponse from .models", "= self._get_param(\"ShardIterator\") limit = self._get_param(\"Limit\") if limit is None: limit", "shard_id, shard_iterator_type, sequence_number ) def get_records(self): arn = self._get_param(\"ShardIterator\") limit", "sequence_number = int(sequence_number) return self.backend.get_shard_iterator( arn, shard_id, shard_iterator_type, sequence_number )", "string_types class DynamoDBStreamsHandler(BaseResponse): @property def backend(self): return dynamodbstreams_backends[self.region] def describe_stream(self):", "sequence_number ) def get_records(self): arn = self._get_param(\"ShardIterator\") limit = self._get_param(\"Limit\")", "param should be string if isinstance(sequence_number, string_types): sequence_number = int(sequence_number)", "dynamodbstreams_backends from six import string_types class DynamoDBStreamsHandler(BaseResponse): @property def backend(self):", "moto.core.responses import BaseResponse from .models import dynamodbstreams_backends from six import", "= self._get_param(\"StreamArn\") shard_id = self._get_param(\"ShardId\") shard_iterator_type = self._get_param(\"ShardIteratorType\") sequence_number =", "BaseResponse from .models import dynamodbstreams_backends from six import string_types class", "be string if isinstance(sequence_number, string_types): sequence_number = int(sequence_number) return self.backend.get_shard_iterator(", "according to documentation sequence_number param should be string if isinstance(sequence_number,", "= self._get_param(\"ShardId\") shard_iterator_type = self._get_param(\"ShardIteratorType\") sequence_number = self._get_param(\"SequenceNumber\") # according", "self._get_param(\"StreamArn\") shard_id = self._get_param(\"ShardId\") shard_iterator_type = self._get_param(\"ShardIteratorType\") sequence_number = self._get_param(\"SequenceNumber\")", "def list_streams(self): table_name = self._get_param(\"TableName\") return self.backend.list_streams(table_name) def get_shard_iterator(self): arn", "shard_id = self._get_param(\"ShardId\") shard_iterator_type = self._get_param(\"ShardIteratorType\") sequence_number = self._get_param(\"SequenceNumber\") #", "if limit is None: limit = 1000 return self.backend.get_records(arn, limit)", "<reponame>jonnangle/moto-1<gh_stars>1-10 from __future__ import unicode_literals from moto.core.responses import BaseResponse from", "to documentation sequence_number param should be string if isinstance(sequence_number, string_types):", "string_types): sequence_number = int(sequence_number) return self.backend.get_shard_iterator( arn, shard_id, shard_iterator_type, sequence_number", "self._get_param(\"Limit\") if limit is None: limit = 1000 return self.backend.get_records(arn,", ".models import dynamodbstreams_backends from six import string_types class DynamoDBStreamsHandler(BaseResponse): @property", "backend(self): return dynamodbstreams_backends[self.region] def describe_stream(self): arn = self._get_param(\"StreamArn\") return self.backend.describe_stream(arn)", "sequence_number param should be string if isinstance(sequence_number, string_types): sequence_number =", "def get_records(self): arn = self._get_param(\"ShardIterator\") limit = self._get_param(\"Limit\") if limit", "self.backend.describe_stream(arn) def list_streams(self): table_name = self._get_param(\"TableName\") return self.backend.list_streams(table_name) def get_shard_iterator(self):", "arn, shard_id, shard_iterator_type, sequence_number ) def get_records(self): arn = self._get_param(\"ShardIterator\")", "import string_types class DynamoDBStreamsHandler(BaseResponse): @property def backend(self): return dynamodbstreams_backends[self.region] def", "= self._get_param(\"ShardIteratorType\") sequence_number = self._get_param(\"SequenceNumber\") # according to documentation sequence_number", "self._get_param(\"SequenceNumber\") # according to documentation sequence_number param should be string", "self._get_param(\"ShardIterator\") limit = self._get_param(\"Limit\") if limit is None: limit =", "self._get_param(\"ShardIteratorType\") sequence_number = self._get_param(\"SequenceNumber\") # according to documentation sequence_number param", "def backend(self): return dynamodbstreams_backends[self.region] def describe_stream(self): arn = self._get_param(\"StreamArn\") return", "sequence_number = self._get_param(\"SequenceNumber\") # according to documentation sequence_number param should", "if isinstance(sequence_number, string_types): sequence_number = int(sequence_number) return self.backend.get_shard_iterator( arn, shard_id,", ") def get_records(self): arn = self._get_param(\"ShardIterator\") limit = self._get_param(\"Limit\") if", "return dynamodbstreams_backends[self.region] def describe_stream(self): arn = self._get_param(\"StreamArn\") return self.backend.describe_stream(arn) def", "= int(sequence_number) return self.backend.get_shard_iterator( arn, shard_id, shard_iterator_type, sequence_number ) def", "import dynamodbstreams_backends from six import string_types class DynamoDBStreamsHandler(BaseResponse): @property def", "= self._get_param(\"TableName\") return self.backend.list_streams(table_name) def get_shard_iterator(self): arn = self._get_param(\"StreamArn\") shard_id", "shard_iterator_type, sequence_number ) def get_records(self): arn = self._get_param(\"ShardIterator\") limit =", "table_name = self._get_param(\"TableName\") return self.backend.list_streams(table_name) def get_shard_iterator(self): arn = self._get_param(\"StreamArn\")", "from six import string_types class DynamoDBStreamsHandler(BaseResponse): @property def backend(self): return", "limit = self._get_param(\"Limit\") if limit is None: limit = 1000", "__future__ import unicode_literals from moto.core.responses import BaseResponse from .models import", "shard_iterator_type = self._get_param(\"ShardIteratorType\") sequence_number = self._get_param(\"SequenceNumber\") # according to documentation", "list_streams(self): table_name = self._get_param(\"TableName\") return self.backend.list_streams(table_name) def get_shard_iterator(self): arn =", "get_records(self): arn = self._get_param(\"ShardIterator\") limit = self._get_param(\"Limit\") if limit is", "self.backend.list_streams(table_name) def get_shard_iterator(self): arn = self._get_param(\"StreamArn\") shard_id = self._get_param(\"ShardId\") shard_iterator_type", "from .models import dynamodbstreams_backends from six import string_types class DynamoDBStreamsHandler(BaseResponse):", "unicode_literals from moto.core.responses import BaseResponse from .models import dynamodbstreams_backends from", "import BaseResponse from .models import dynamodbstreams_backends from six import string_types", "arn = self._get_param(\"StreamArn\") shard_id = self._get_param(\"ShardId\") shard_iterator_type = self._get_param(\"ShardIteratorType\") sequence_number", "# according to documentation sequence_number param should be string if", "self._get_param(\"ShardId\") shard_iterator_type = self._get_param(\"ShardIteratorType\") sequence_number = self._get_param(\"SequenceNumber\") # according to", "arn = self._get_param(\"StreamArn\") return self.backend.describe_stream(arn) def list_streams(self): table_name = self._get_param(\"TableName\")", "arn = self._get_param(\"ShardIterator\") limit = self._get_param(\"Limit\") if limit is None:", "self._get_param(\"TableName\") return self.backend.list_streams(table_name) def get_shard_iterator(self): arn = self._get_param(\"StreamArn\") shard_id =", "isinstance(sequence_number, string_types): sequence_number = int(sequence_number) return self.backend.get_shard_iterator( arn, shard_id, shard_iterator_type,", "= self._get_param(\"Limit\") if limit is None: limit = 1000 return", "should be string if isinstance(sequence_number, string_types): sequence_number = int(sequence_number) return", "def describe_stream(self): arn = self._get_param(\"StreamArn\") return self.backend.describe_stream(arn) def list_streams(self): table_name", "class DynamoDBStreamsHandler(BaseResponse): @property def backend(self): return dynamodbstreams_backends[self.region] def describe_stream(self): arn", "def get_shard_iterator(self): arn = self._get_param(\"StreamArn\") shard_id = self._get_param(\"ShardId\") shard_iterator_type =", "= self._get_param(\"StreamArn\") return self.backend.describe_stream(arn) def list_streams(self): table_name = self._get_param(\"TableName\") return", "@property def backend(self): return dynamodbstreams_backends[self.region] def describe_stream(self): arn = self._get_param(\"StreamArn\")", "documentation sequence_number param should be string if isinstance(sequence_number, string_types): sequence_number", "self.backend.get_shard_iterator( arn, shard_id, shard_iterator_type, sequence_number ) def get_records(self): arn =", "six import string_types class DynamoDBStreamsHandler(BaseResponse): @property def backend(self): return dynamodbstreams_backends[self.region]", "= self._get_param(\"SequenceNumber\") # according to documentation sequence_number param should be", "return self.backend.describe_stream(arn) def list_streams(self): table_name = self._get_param(\"TableName\") return self.backend.list_streams(table_name) def", "return self.backend.list_streams(table_name) def get_shard_iterator(self): arn = self._get_param(\"StreamArn\") shard_id = self._get_param(\"ShardId\")", "self._get_param(\"StreamArn\") return self.backend.describe_stream(arn) def list_streams(self): table_name = self._get_param(\"TableName\") return self.backend.list_streams(table_name)", "return self.backend.get_shard_iterator( arn, shard_id, shard_iterator_type, sequence_number ) def get_records(self): arn", "from moto.core.responses import BaseResponse from .models import dynamodbstreams_backends from six" ]
[ "= '_zeros' enabled = True @classmethod def extract(cls, node): attrs", "shape = list(attrs.tuple('shape', int, None)) zero_shapes = [] for i,", "# Copyright (C) 2018-2021 Intel Corporation # SPDX-License-Identifier: Apache-2.0 import", "<reponame>ytorzuk-altran/openvino # Copyright (C) 2018-2021 Intel Corporation # SPDX-License-Identifier: Apache-2.0", "np.zeros(shape), 'zero_shapes': zero_shapes } # update the attributes of the", "True @classmethod def extract(cls, node): attrs = get_mxnet_layer_attrs(node.symbol_dict) shape =", "SPDX-License-Identifier: Apache-2.0 import numpy as np from openvino.tools.mo.front.extractor import FrontExtractorOp", "= [] for i, s in enumerate(shape): if s ==", "i, s in enumerate(shape): if s == 0: shape[i] =", "list(attrs.tuple('shape', int, None)) zero_shapes = [] for i, s in", "import FrontExtractorOp from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs from openvino.tools.mo.ops.const import Const", "Apache-2.0 import numpy as np from openvino.tools.mo.front.extractor import FrontExtractorOp from", "zero_shapes = [] for i, s in enumerate(shape): if s", "enumerate(shape): if s == 0: shape[i] = 1 zero_shapes.append(i) update_attrs", "numpy as np from openvino.tools.mo.front.extractor import FrontExtractorOp from openvino.tools.mo.front.mxnet.extractors.utils import", "import get_mxnet_layer_attrs from openvino.tools.mo.ops.const import Const class ZerosFrontExtractor(FrontExtractorOp): op =", "1 zero_shapes.append(i) update_attrs = { 'shape': np.ndarray(shape), 'value': np.zeros(shape), 'zero_shapes':", "= 1 zero_shapes.append(i) update_attrs = { 'shape': np.ndarray(shape), 'value': np.zeros(shape),", "# update the attributes of the node Const.update_node_stat(node, update_attrs) return", "(C) 2018-2021 Intel Corporation # SPDX-License-Identifier: Apache-2.0 import numpy as", "2018-2021 Intel Corporation # SPDX-License-Identifier: Apache-2.0 import numpy as np", "attrs = get_mxnet_layer_attrs(node.symbol_dict) shape = list(attrs.tuple('shape', int, None)) zero_shapes =", "op = '_zeros' enabled = True @classmethod def extract(cls, node):", "zero_shapes.append(i) update_attrs = { 'shape': np.ndarray(shape), 'value': np.zeros(shape), 'zero_shapes': zero_shapes", "= True @classmethod def extract(cls, node): attrs = get_mxnet_layer_attrs(node.symbol_dict) shape", "openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs from openvino.tools.mo.ops.const import Const class ZerosFrontExtractor(FrontExtractorOp): op", "get_mxnet_layer_attrs(node.symbol_dict) shape = list(attrs.tuple('shape', int, None)) zero_shapes = [] for", "[] for i, s in enumerate(shape): if s == 0:", "= get_mxnet_layer_attrs(node.symbol_dict) shape = list(attrs.tuple('shape', int, None)) zero_shapes = []", "'value': np.zeros(shape), 'zero_shapes': zero_shapes } # update the attributes of", "get_mxnet_layer_attrs from openvino.tools.mo.ops.const import Const class ZerosFrontExtractor(FrontExtractorOp): op = '_zeros'", "# SPDX-License-Identifier: Apache-2.0 import numpy as np from openvino.tools.mo.front.extractor import", "== 0: shape[i] = 1 zero_shapes.append(i) update_attrs = { 'shape':", "s == 0: shape[i] = 1 zero_shapes.append(i) update_attrs = {", "if s == 0: shape[i] = 1 zero_shapes.append(i) update_attrs =", "for i, s in enumerate(shape): if s == 0: shape[i]", "Copyright (C) 2018-2021 Intel Corporation # SPDX-License-Identifier: Apache-2.0 import numpy", "from openvino.tools.mo.front.extractor import FrontExtractorOp from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs from openvino.tools.mo.ops.const", "Const class ZerosFrontExtractor(FrontExtractorOp): op = '_zeros' enabled = True @classmethod", "class ZerosFrontExtractor(FrontExtractorOp): op = '_zeros' enabled = True @classmethod def", "np from openvino.tools.mo.front.extractor import FrontExtractorOp from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs from", "shape[i] = 1 zero_shapes.append(i) update_attrs = { 'shape': np.ndarray(shape), 'value':", "Corporation # SPDX-License-Identifier: Apache-2.0 import numpy as np from openvino.tools.mo.front.extractor", "import Const class ZerosFrontExtractor(FrontExtractorOp): op = '_zeros' enabled = True", "'_zeros' enabled = True @classmethod def extract(cls, node): attrs =", "enabled = True @classmethod def extract(cls, node): attrs = get_mxnet_layer_attrs(node.symbol_dict)", "in enumerate(shape): if s == 0: shape[i] = 1 zero_shapes.append(i)", "from openvino.tools.mo.ops.const import Const class ZerosFrontExtractor(FrontExtractorOp): op = '_zeros' enabled", "update_attrs = { 'shape': np.ndarray(shape), 'value': np.zeros(shape), 'zero_shapes': zero_shapes }", "= { 'shape': np.ndarray(shape), 'value': np.zeros(shape), 'zero_shapes': zero_shapes } #", "0: shape[i] = 1 zero_shapes.append(i) update_attrs = { 'shape': np.ndarray(shape),", "np.ndarray(shape), 'value': np.zeros(shape), 'zero_shapes': zero_shapes } # update the attributes", "update the attributes of the node Const.update_node_stat(node, update_attrs) return cls.enabled", "import numpy as np from openvino.tools.mo.front.extractor import FrontExtractorOp from openvino.tools.mo.front.mxnet.extractors.utils", "Intel Corporation # SPDX-License-Identifier: Apache-2.0 import numpy as np from", "} # update the attributes of the node Const.update_node_stat(node, update_attrs)", "extract(cls, node): attrs = get_mxnet_layer_attrs(node.symbol_dict) shape = list(attrs.tuple('shape', int, None))", "node): attrs = get_mxnet_layer_attrs(node.symbol_dict) shape = list(attrs.tuple('shape', int, None)) zero_shapes", "zero_shapes } # update the attributes of the node Const.update_node_stat(node,", "as np from openvino.tools.mo.front.extractor import FrontExtractorOp from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs", "def extract(cls, node): attrs = get_mxnet_layer_attrs(node.symbol_dict) shape = list(attrs.tuple('shape', int,", "ZerosFrontExtractor(FrontExtractorOp): op = '_zeros' enabled = True @classmethod def extract(cls,", "int, None)) zero_shapes = [] for i, s in enumerate(shape):", "openvino.tools.mo.front.extractor import FrontExtractorOp from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs from openvino.tools.mo.ops.const import", "@classmethod def extract(cls, node): attrs = get_mxnet_layer_attrs(node.symbol_dict) shape = list(attrs.tuple('shape',", "'shape': np.ndarray(shape), 'value': np.zeros(shape), 'zero_shapes': zero_shapes } # update the", "s in enumerate(shape): if s == 0: shape[i] = 1", "from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs from openvino.tools.mo.ops.const import Const class ZerosFrontExtractor(FrontExtractorOp):", "openvino.tools.mo.ops.const import Const class ZerosFrontExtractor(FrontExtractorOp): op = '_zeros' enabled =", "FrontExtractorOp from openvino.tools.mo.front.mxnet.extractors.utils import get_mxnet_layer_attrs from openvino.tools.mo.ops.const import Const class", "= list(attrs.tuple('shape', int, None)) zero_shapes = [] for i, s", "{ 'shape': np.ndarray(shape), 'value': np.zeros(shape), 'zero_shapes': zero_shapes } # update", "None)) zero_shapes = [] for i, s in enumerate(shape): if", "'zero_shapes': zero_shapes } # update the attributes of the node" ]
[ "j.sal.process.executeWithoutPipe(\"cd %s;%s install\" % (cdest, self.npm)) # # def flatui(self):", "# z.close() # j.sal.fs.renameDir(j.sal.fs.joinPaths(self.buildDir, \"bootstrap-%s-dist\" % version), dpath) # #", "j class builder(): # @property # def buildDir(self): # return", "\"bootstrap\") # j.sal.fs.removeDirTree(dpath) # z = j.tools.zipfile.get(path) # z.extract(self.buildDir) #", "== False: # if j.sal.fs.exists(\"%s/npm\" % j.dirs.binDir, followlinks=True) == False:", "j.sal.fs.joinPaths(self.buildDir, \"bootstrap\") # j.sal.fs.removeDirTree(dpath) # z = j.tools.zipfile.get(path) # z.extract(self.buildDir)", "j.sal.fs.renameDir(j.sal.fs.joinPaths(self.buildDir, \"angular-%s\" % sversion), dpath) # # self._removeMapFiles(dpath) # #", "\"3.3.7\" # url = \"https://github.com/twbs/bootstrap/releases/download/v%s/bootstrap-%s-dist.zip\" % (version, version) # path", "j.sal.fs.joinPaths(j.dirs.tmpDir, \"jsbuilder\") @property def cuisine(self): return j.tools.cuisine.local # ALL NOT", "# # self._removeMapFiles(dpath) # # def _removeMapFiles(self, path): # for", "= j.tools.zipfile.get(path) # z.extract(self.buildDir) # z.close() # j.sal.fs.renameDir(j.sal.fs.joinPaths(self.buildDir, \"bootstrap-%s-dist\" %", "# url = \"https://github.com/twbs/bootstrap/releases/download/v%s/bootstrap-%s-dist.zip\" % (version, version) # path =", "z.close() # j.sal.fs.renameDir(j.sal.fs.joinPaths(self.buildDir, \"angular-%s\" % sversion), dpath) # # self._removeMapFiles(dpath)", "j.tools.zipfile.get(path) # z.extract(self.buildDir) # z.close() # j.sal.fs.renameDir(j.sal.fs.joinPaths(self.buildDir, \"codemirror-%s\" % version),", "minutes\") res = self.cuisine.core.run(cmd) def checkIPFS(self): return j.sal.nettools.checkUrlReachable(\"http://localhost:5001/webui\") == True", "= \"https://github.com/twbs/bootstrap/releases/download/v%s/bootstrap-%s-dist.zip\" % (version, version) # path = j.do.download(url, to='',", "(version, version) # path = j.do.download(url, to='', overwrite=False, retry=3, timeout=0)", "ALL NOT NEEDED ANY LONGER USE bower # def angular(self):", "= j.do.download(url, to='', overwrite=False, retry=3, timeout=0) # dpath = j.sal.fs.joinPaths(self.buildDir,", "# @property # def npm(self): # if self._npm == False:", "def bootstrap(self): # version = \"3.3.7\" # url = \"https://github.com/twbs/bootstrap/releases/download/v%s/bootstrap-%s-dist.zip\"", "j.sal.fs.remove(item) # # def bootstrap(self): # version = \"3.3.7\" #", "(cdest, self.npm)) # # def flatui(self): # url = \"https://github.com/designmodo/Flat-UI.git\"", "\"codemirror\", \"font-awesome\", \"jqplot\", # \"underscore\", \"spin\", \"moment\", \"http://DlhSoft.com/Packages/DlhSoft.KanbanLibrary.zip\", \"jqwidgets\", \"d3\",", "self.npm)) # # def flatui(self): # url = \"https://github.com/designmodo/Flat-UI.git\" #", "item) # # print(item) # j.sal.fs.remove(item) # # def bootstrap(self):", "\"angular-%s\" % sversion), dpath) # # self._removeMapFiles(dpath) # # def", "j.sal.fs.joinPaths(self.buildDir, \"angular\") # j.sal.fs.removeDirTree(dpath) # z = j.tools.zipfile.get(path) # z.extract(self.buildDir)", "# self.cuisine.apps.nodejs.bowerInstall([\"jquery\", \"flatui\", \"bootstrap\", \"famous\", \"codemirror\", \"font-awesome\", \"jqplot\", # \"underscore\",", "# def npm(self): # if self._npm == False: # if", "# z.extract(self.buildDir) # z.close() # j.sal.fs.renameDir(j.sal.fs.joinPaths(self.buildDir, \"bootstrap-%s-dist\" % version), dpath)", "# z.close() # j.sal.fs.renameDir(j.sal.fs.joinPaths(self.buildDir, \"angular-%s\" % sversion), dpath) # #", "self.cuisine.apps.nodejs.install() # self._bower = \"%sbower\" % j.dirs.binDir # return self._bower", "cmd = \"cd $tmpDir/bower;ipfs -c $cfgDir/ipfs/main/ add -r bower_components\" print(\"IPFS", "to='', overwrite=False, retry=3, timeout=0) # dpath = j.sal.fs.joinPaths(self.buildDir, \"bootstrap\") #", "z.extract(self.buildDir) # z.close() # j.sal.fs.renameDir(j.sal.fs.joinPaths(self.buildDir, \"bootstrap-%s-dist\" % version), dpath) #", "self._bower == False: # if j.sal.fs.exists(\"%s/bower\" % j.dirs.binDir, followlinks=True) ==", "url = \"http://codemirror.net/codemirror-%s.zip\" % version # path = j.do.download(url, to='',", "# # version = \"5.9\" # url = \"http://codemirror.net/codemirror-%s.zip\" %", "z.close() # j.sal.fs.renameDir(j.sal.fs.joinPaths(self.buildDir, \"codemirror-%s\" % version), dpath) # @property #", "z = j.tools.zipfile.get(path) # z.extract(self.buildDir) # z.close() # j.sal.fs.renameDir(j.sal.fs.joinPaths(self.buildDir, \"bootstrap-%s-dist\"", "== False: # if j.sal.fs.exists(\"%s/bower\" % j.dirs.binDir, followlinks=True) == False:", "% (version, version) # path = j.do.download(url, to='', overwrite=False, retry=3,", "# path = j.do.download(url, to='', overwrite=False, retry=3, timeout=0) # dpath", "print(\"IPFS upload, can take couple of minutes\") res = self.cuisine.core.run(cmd)", "\"jsbuilder\")) # if self.checkIPFS == False: # self.getIPFS() # #", "self.checkIPFS == False: # self.getIPFS() # # self.angular() # #", "# print(\"npm/bower install\") # res = j.sal.process.executeWithoutPipe(\"cd %s;%s install;%s install\"", "z = j.tools.zipfile.get(path) # z.extract(self.buildDir) # z.close() # j.sal.fs.renameDir(j.sal.fs.joinPaths(self.buildDir, \"codemirror-%s\"", "overwrite=False, retry=3, timeout=0) # dpath = j.sal.fs.joinPaths(self.buildDir, \"codemirror\") # j.sal.fs.removeDirTree(dpath)", "self._bower # def famous(self): # url = \"https://github.com/Famous/engine-seed\" # cdest", "# self._removeMapFiles(dpath) # # def codemirror(self): # # version =", "= \"http://code.angularjs.org/%s/angular-%s.zip\" % (version, version) # path = j.do.download(url, to='',", "self.cuisine.core.run(cmd) def checkIPFS(self): return j.sal.nettools.checkUrlReachable(\"http://localhost:5001/webui\") == True def getIPFS(self): j.tools.cuisine.local.apps.ipfs.install()", "% version), dpath) # @property # def npm(self): # if", "self._bower = \"%sbower\" % j.dirs.binDir # return self._bower # def", "# def flatui(self): # url = \"https://github.com/designmodo/Flat-UI.git\" # cdest =", "== False: # self.cuisine.apps.nodejs.install() # self._npm = \"%snpm\" % j.dirs.binDir", "res = j.sal.process.executeWithoutPipe(\"cd %s;%s install\" % (cdest, self.npm)) # #", "cdest = j.do.pullGitRepo(url) # res = j.sal.process.executeWithoutPipe(\"cd %s;%s install\" %", "# def famous(self): # url = \"https://github.com/Famous/engine-seed\" # cdest =", "print(\"npm/bower install\") # res = j.sal.process.executeWithoutPipe(\"cd %s;%s install;%s install\" %", "# # print(item) # j.sal.fs.remove(item) # # def bootstrap(self): #", "res = self.cuisine.core.run(cmd) def checkIPFS(self): return j.sal.nettools.checkUrlReachable(\"http://localhost:5001/webui\") == True def", "j.dirs.binDir, followlinks=True) == False: # self.cuisine.apps.nodejs.install() # self._npm = \"%snpm\"", "\"%sbower\" % j.dirs.binDir # return self._bower # def famous(self): #", "version) # path = j.do.download(url, to='', overwrite=False, retry=3, timeout=0) #", "= \"%s/%s\" % (path, item) # # print(item) # j.sal.fs.remove(item)", "# def do1(self): # j.sal.fs.createDir(j.sal.fs.joinPaths(j.dirs.tmpDir, \"jsbuilder\")) # if self.checkIPFS ==", "# url = \"https://github.com/designmodo/Flat-UI.git\" # cdest = j.do.pullGitRepo(url) # print(\"npm/bower", "def do(self): if self.checkIPFS == False: self.getIPFS() # self.cuisine.apps.nodejs.bowerInstall([\"jquery\", \"flatui\",", "path): # for item in j.sal.fs.find(path, \"*.js.map\"): # item =", "= \"%snpm\" % j.dirs.binDir # return self._npm # @property #", "def flatui(self): # url = \"https://github.com/designmodo/Flat-UI.git\" # cdest = j.do.pullGitRepo(url)", "# # self.codemirror() # # self.famous() # self.flatui() def do(self):", "dpath) # # self._removeMapFiles(dpath) # # def codemirror(self): # #", "def buildDir(self): # return j.sal.fs.joinPaths(j.dirs.tmpDir, \"jsbuilder\") @property def cuisine(self): return", "# def bower(self): # if self._bower == False: # if", "def checkIPFS(self): return j.sal.nettools.checkUrlReachable(\"http://localhost:5001/webui\") == True def getIPFS(self): j.tools.cuisine.local.apps.ipfs.install() j.tools.cuisine.local.apps.ipfs.start()", "$tmpDir/bower;ipfs -c $cfgDir/ipfs/main/ add -r bower_components\" print(\"IPFS upload, can take", "j.sal.process.executeWithoutPipe(\"cd %s;%s install;%s install\" % (cdest, self.npm, self.bower)) # #", "dpath) # # self._removeMapFiles(dpath) # # def _removeMapFiles(self, path): #", "item in j.sal.fs.find(path, \"*.js.map\"): # item = \"%s/%s\" % (path,", "if self._npm == False: # if j.sal.fs.exists(\"%s/npm\" % j.dirs.binDir, followlinks=True)", "\"d3\", \"angular-latest\"]) cmd = \"cd $tmpDir/bower;ipfs -c $cfgDir/ipfs/main/ add -r", "version = \"1.5.9\" # url = \"http://code.angularjs.org/%s/angular-%s.zip\" % (version, version)", "= j.sal.process.executeWithoutPipe(\"cd %s;%s install;%s install\" % (cdest, self.npm, self.bower)) #", "= \"http://codemirror.net/codemirror-%s.zip\" % version # path = j.do.download(url, to='', overwrite=False,", "retry=3, timeout=0) # dpath = j.sal.fs.joinPaths(self.buildDir, \"angular\") # j.sal.fs.removeDirTree(dpath) #", "famous(self): # url = \"https://github.com/Famous/engine-seed\" # cdest = j.do.pullGitRepo(url) #", "url = \"https://github.com/Famous/engine-seed\" # cdest = j.do.pullGitRepo(url) # res =", "j.dirs.binDir # return self._bower # def famous(self): # url =", "@property # def bower(self): # if self._bower == False: #", "install\" % (cdest, self.npm)) # # def flatui(self): # url", "from JumpScale import j class builder(): # @property # def", "item = \"%s/%s\" % (path, item) # # print(item) #", "\"jsbuilder\") @property def cuisine(self): return j.tools.cuisine.local # ALL NOT NEEDED", "# # def flatui(self): # url = \"https://github.com/designmodo/Flat-UI.git\" # cdest", "bower # def angular(self): # version = \"1.5.9\" # url", "return j.sal.fs.joinPaths(j.dirs.tmpDir, \"jsbuilder\") @property def cuisine(self): return j.tools.cuisine.local # ALL", "\"http://codemirror.net/codemirror-%s.zip\" % version # path = j.do.download(url, to='', overwrite=False, retry=3,", "angular(self): # version = \"1.5.9\" # url = \"http://code.angularjs.org/%s/angular-%s.zip\" %", "of minutes\") res = self.cuisine.core.run(cmd) def checkIPFS(self): return j.sal.nettools.checkUrlReachable(\"http://localhost:5001/webui\") ==", "install\" % (cdest, self.npm, self.bower)) # # def do1(self): #", "JumpScale import j class builder(): # @property # def buildDir(self):", "j.tools.cuisine.local # ALL NOT NEEDED ANY LONGER USE bower #", "z.extract(self.buildDir) # z.close() # j.sal.fs.renameDir(j.sal.fs.joinPaths(self.buildDir, \"angular-%s\" % sversion), dpath) #", "# cdest = j.do.pullGitRepo(url) # res = j.sal.process.executeWithoutPipe(\"cd %s;%s install\"", "def cuisine(self): return j.tools.cuisine.local # ALL NOT NEEDED ANY LONGER", "def angular(self): # version = \"1.5.9\" # url = \"http://code.angularjs.org/%s/angular-%s.zip\"", "# def _removeMapFiles(self, path): # for item in j.sal.fs.find(path, \"*.js.map\"):", "j.do.download(url, to='', overwrite=False, retry=3, timeout=0) # dpath = j.sal.fs.joinPaths(self.buildDir, \"angular\")", "# z = j.tools.zipfile.get(path) # z.extract(self.buildDir) # z.close() # j.sal.fs.renameDir(j.sal.fs.joinPaths(self.buildDir,", "% j.dirs.binDir # return self._npm # @property # def bower(self):", "# # self._removeMapFiles(dpath) # # def codemirror(self): # # version", "False: # self.cuisine.apps.nodejs.install() # self._bower = \"%sbower\" % j.dirs.binDir #", "self.bower)) # # def do1(self): # j.sal.fs.createDir(j.sal.fs.joinPaths(j.dirs.tmpDir, \"jsbuilder\")) # if", "# \"underscore\", \"spin\", \"moment\", \"http://DlhSoft.com/Packages/DlhSoft.KanbanLibrary.zip\", \"jqwidgets\", \"d3\", \"angular-latest\"]) cmd =", "= j.tools.zipfile.get(path) # z.extract(self.buildDir) # z.close() # j.sal.fs.renameDir(j.sal.fs.joinPaths(self.buildDir, \"codemirror-%s\" %", "self.flatui() def do(self): if self.checkIPFS == False: self.getIPFS() # self.cuisine.apps.nodejs.bowerInstall([\"jquery\",", "# self.cuisine.apps.nodejs.install() # self._npm = \"%snpm\" % j.dirs.binDir # return", "print(item) # j.sal.fs.remove(item) # # def bootstrap(self): # version =", "\"https://github.com/designmodo/Flat-UI.git\" # cdest = j.do.pullGitRepo(url) # print(\"npm/bower install\") # res", "# j.sal.fs.createDir(j.sal.fs.joinPaths(j.dirs.tmpDir, \"jsbuilder\")) # if self.checkIPFS == False: # self.getIPFS()", "\"jqwidgets\", \"d3\", \"angular-latest\"]) cmd = \"cd $tmpDir/bower;ipfs -c $cfgDir/ipfs/main/ add", "\"underscore\", \"spin\", \"moment\", \"http://DlhSoft.com/Packages/DlhSoft.KanbanLibrary.zip\", \"jqwidgets\", \"d3\", \"angular-latest\"]) cmd = \"cd", "\"%snpm\" % j.dirs.binDir # return self._npm # @property # def", "(path, item) # # print(item) # j.sal.fs.remove(item) # # def", "self._removeMapFiles(dpath) # # def codemirror(self): # # version = \"5.9\"", "# @property # def bower(self): # if self._bower == False:", "self.getIPFS() # # self.angular() # # self.bootstrap() # # self.codemirror()", "install;%s install\" % (cdest, self.npm, self.bower)) # # def do1(self):", "upload, can take couple of minutes\") res = self.cuisine.core.run(cmd) def", "# # def do1(self): # j.sal.fs.createDir(j.sal.fs.joinPaths(j.dirs.tmpDir, \"jsbuilder\")) # if self.checkIPFS", "= \"cd $tmpDir/bower;ipfs -c $cfgDir/ipfs/main/ add -r bower_components\" print(\"IPFS upload,", "= j.tools.zipfile.get(path) # z.extract(self.buildDir) # z.close() # j.sal.fs.renameDir(j.sal.fs.joinPaths(self.buildDir, \"angular-%s\" %", "can take couple of minutes\") res = self.cuisine.core.run(cmd) def checkIPFS(self):", "if j.sal.fs.exists(\"%s/bower\" % j.dirs.binDir, followlinks=True) == False: # self.cuisine.apps.nodejs.install() #", "# ALL NOT NEEDED ANY LONGER USE bower # def", "False: # self.getIPFS() # # self.angular() # # self.bootstrap() #", "# item = \"%s/%s\" % (path, item) # # print(item)", "\"%s/%s\" % (path, item) # # print(item) # j.sal.fs.remove(item) #", "dpath = j.sal.fs.joinPaths(self.buildDir, \"codemirror\") # j.sal.fs.removeDirTree(dpath) # z = j.tools.zipfile.get(path)", "version = \"5.9\" # url = \"http://codemirror.net/codemirror-%s.zip\" % version #", "j.dirs.binDir, followlinks=True) == False: # self.cuisine.apps.nodejs.install() # self._bower = \"%sbower\"", "install\") # res = j.sal.process.executeWithoutPipe(\"cd %s;%s install;%s install\" % (cdest,", "self.checkIPFS == False: self.getIPFS() # self.cuisine.apps.nodejs.bowerInstall([\"jquery\", \"flatui\", \"bootstrap\", \"famous\", \"codemirror\",", "\"spin\", \"moment\", \"http://DlhSoft.com/Packages/DlhSoft.KanbanLibrary.zip\", \"jqwidgets\", \"d3\", \"angular-latest\"]) cmd = \"cd $tmpDir/bower;ipfs", "z = j.tools.zipfile.get(path) # z.extract(self.buildDir) # z.close() # j.sal.fs.renameDir(j.sal.fs.joinPaths(self.buildDir, \"angular-%s\"", "= j.sal.fs.joinPaths(self.buildDir, \"angular\") # j.sal.fs.removeDirTree(dpath) # z = j.tools.zipfile.get(path) #", "False: # if j.sal.fs.exists(\"%s/bower\" % j.dirs.binDir, followlinks=True) == False: #", "$cfgDir/ipfs/main/ add -r bower_components\" print(\"IPFS upload, can take couple of", "(cdest, self.npm, self.bower)) # # def do1(self): # j.sal.fs.createDir(j.sal.fs.joinPaths(j.dirs.tmpDir, \"jsbuilder\"))", "# z.extract(self.buildDir) # z.close() # j.sal.fs.renameDir(j.sal.fs.joinPaths(self.buildDir, \"angular-%s\" % sversion), dpath)", "= \"https://github.com/designmodo/Flat-UI.git\" # cdest = j.do.pullGitRepo(url) # print(\"npm/bower install\") #", "return j.sal.nettools.checkUrlReachable(\"http://localhost:5001/webui\") == True def getIPFS(self): j.tools.cuisine.local.apps.ipfs.install() j.tools.cuisine.local.apps.ipfs.start() b =", "timeout=0) # dpath = j.sal.fs.joinPaths(self.buildDir, \"angular\") # j.sal.fs.removeDirTree(dpath) # z", "j.sal.fs.renameDir(j.sal.fs.joinPaths(self.buildDir, \"codemirror-%s\" % version), dpath) # @property # def npm(self):", "== False: self.getIPFS() # self.cuisine.apps.nodejs.bowerInstall([\"jquery\", \"flatui\", \"bootstrap\", \"famous\", \"codemirror\", \"font-awesome\",", "== False: # self.cuisine.apps.nodejs.install() # self._bower = \"%sbower\" % j.dirs.binDir", "% (path, item) # # print(item) # j.sal.fs.remove(item) # #", "j.sal.fs.find(path, \"*.js.map\"): # item = \"%s/%s\" % (path, item) #", "# self.flatui() def do(self): if self.checkIPFS == False: self.getIPFS() #", "# # def _removeMapFiles(self, path): # for item in j.sal.fs.find(path,", "NEEDED ANY LONGER USE bower # def angular(self): # version", "# # def bootstrap(self): # version = \"3.3.7\" # url", "in j.sal.fs.find(path, \"*.js.map\"): # item = \"%s/%s\" % (path, item)", "self.codemirror() # # self.famous() # self.flatui() def do(self): if self.checkIPFS", "%s;%s install\" % (cdest, self.npm)) # # def flatui(self): #", "self.cuisine.apps.nodejs.install() # self._npm = \"%snpm\" % j.dirs.binDir # return self._npm", "if j.sal.fs.exists(\"%s/npm\" % j.dirs.binDir, followlinks=True) == False: # self.cuisine.apps.nodejs.install() #", "# if self.checkIPFS == False: # self.getIPFS() # # self.angular()", "builder(): # @property # def buildDir(self): # return j.sal.fs.joinPaths(j.dirs.tmpDir, \"jsbuilder\")", "False: # if j.sal.fs.exists(\"%s/npm\" % j.dirs.binDir, followlinks=True) == False: #", "self._npm = \"%snpm\" % j.dirs.binDir # return self._npm # @property", "# self._bower = \"%sbower\" % j.dirs.binDir # return self._bower #", "version = \"3.3.7\" # url = \"https://github.com/twbs/bootstrap/releases/download/v%s/bootstrap-%s-dist.zip\" % (version, version)", "# j.sal.fs.renameDir(j.sal.fs.joinPaths(self.buildDir, \"bootstrap-%s-dist\" % version), dpath) # # self._removeMapFiles(dpath) #", "= \"1.5.9\" # url = \"http://code.angularjs.org/%s/angular-%s.zip\" % (version, version) #", "return j.tools.cuisine.local # ALL NOT NEEDED ANY LONGER USE bower", "url = \"http://code.angularjs.org/%s/angular-%s.zip\" % (version, version) # path = j.do.download(url,", "j.sal.fs.createDir(j.sal.fs.joinPaths(j.dirs.tmpDir, \"jsbuilder\")) # if self.checkIPFS == False: # self.getIPFS() #", "# return j.sal.fs.joinPaths(j.dirs.tmpDir, \"jsbuilder\") @property def cuisine(self): return j.tools.cuisine.local #", "= \"5.9\" # url = \"http://codemirror.net/codemirror-%s.zip\" % version # path", "# j.sal.fs.renameDir(j.sal.fs.joinPaths(self.buildDir, \"angular-%s\" % sversion), dpath) # # self._removeMapFiles(dpath) #", "url = \"https://github.com/twbs/bootstrap/releases/download/v%s/bootstrap-%s-dist.zip\" % (version, version) # path = j.do.download(url,", "LONGER USE bower # def angular(self): # version = \"1.5.9\"", "j.tools.zipfile.get(path) # z.extract(self.buildDir) # z.close() # j.sal.fs.renameDir(j.sal.fs.joinPaths(self.buildDir, \"bootstrap-%s-dist\" % version),", "% j.dirs.binDir, followlinks=True) == False: # self.cuisine.apps.nodejs.install() # self._npm =", "# j.sal.fs.renameDir(j.sal.fs.joinPaths(self.buildDir, \"codemirror-%s\" % version), dpath) # @property # def", "j.sal.fs.exists(\"%s/bower\" % j.dirs.binDir, followlinks=True) == False: # self.cuisine.apps.nodejs.install() # self._bower", "# self.angular() # # self.bootstrap() # # self.codemirror() # #", "% j.dirs.binDir # return self._bower # def famous(self): # url", "False: self.getIPFS() # self.cuisine.apps.nodejs.bowerInstall([\"jquery\", \"flatui\", \"bootstrap\", \"famous\", \"codemirror\", \"font-awesome\", \"jqplot\",", "%s;%s install;%s install\" % (cdest, self.npm, self.bower)) # # def", "# res = j.sal.process.executeWithoutPipe(\"cd %s;%s install;%s install\" % (cdest, self.npm,", "overwrite=False, retry=3, timeout=0) # dpath = j.sal.fs.joinPaths(self.buildDir, \"bootstrap\") # j.sal.fs.removeDirTree(dpath)", "# # self.bootstrap() # # self.codemirror() # # self.famous() #", "\"moment\", \"http://DlhSoft.com/Packages/DlhSoft.KanbanLibrary.zip\", \"jqwidgets\", \"d3\", \"angular-latest\"]) cmd = \"cd $tmpDir/bower;ipfs -c", "z.extract(self.buildDir) # z.close() # j.sal.fs.renameDir(j.sal.fs.joinPaths(self.buildDir, \"codemirror-%s\" % version), dpath) #", "# j.sal.fs.remove(item) # # def bootstrap(self): # version = \"3.3.7\"", "return self._bower # def famous(self): # url = \"https://github.com/Famous/engine-seed\" #", "ANY LONGER USE bower # def angular(self): # version =", "self.angular() # # self.bootstrap() # # self.codemirror() # # self.famous()", "j.sal.fs.exists(\"%s/npm\" % j.dirs.binDir, followlinks=True) == False: # self.cuisine.apps.nodejs.install() # self._npm", "# res = j.sal.process.executeWithoutPipe(\"cd %s;%s install\" % (cdest, self.npm)) #", "# return self._bower # def famous(self): # url = \"https://github.com/Famous/engine-seed\"", "self._removeMapFiles(dpath) # # def _removeMapFiles(self, path): # for item in", "retry=3, timeout=0) # dpath = j.sal.fs.joinPaths(self.buildDir, \"bootstrap\") # j.sal.fs.removeDirTree(dpath) #", "self._npm == False: # if j.sal.fs.exists(\"%s/npm\" % j.dirs.binDir, followlinks=True) ==", "dpath = j.sal.fs.joinPaths(self.buildDir, \"angular\") # j.sal.fs.removeDirTree(dpath) # z = j.tools.zipfile.get(path)", "class builder(): # @property # def buildDir(self): # return j.sal.fs.joinPaths(j.dirs.tmpDir,", "USE bower # def angular(self): # version = \"1.5.9\" #", "npm(self): # if self._npm == False: # if j.sal.fs.exists(\"%s/npm\" %", "# self.bootstrap() # # self.codemirror() # # self.famous() # self.flatui()", "% version # path = j.do.download(url, to='', overwrite=False, retry=3, timeout=0)", "followlinks=True) == False: # self.cuisine.apps.nodejs.install() # self._bower = \"%sbower\" %", "# dpath = j.sal.fs.joinPaths(self.buildDir, \"codemirror\") # j.sal.fs.removeDirTree(dpath) # z =", "import j class builder(): # @property # def buildDir(self): #", "= j.do.pullGitRepo(url) # print(\"npm/bower install\") # res = j.sal.process.executeWithoutPipe(\"cd %s;%s", "self.cuisine.apps.nodejs.bowerInstall([\"jquery\", \"flatui\", \"bootstrap\", \"famous\", \"codemirror\", \"font-awesome\", \"jqplot\", # \"underscore\", \"spin\",", "\"font-awesome\", \"jqplot\", # \"underscore\", \"spin\", \"moment\", \"http://DlhSoft.com/Packages/DlhSoft.KanbanLibrary.zip\", \"jqwidgets\", \"d3\", \"angular-latest\"])", "self.getIPFS() # self.cuisine.apps.nodejs.bowerInstall([\"jquery\", \"flatui\", \"bootstrap\", \"famous\", \"codemirror\", \"font-awesome\", \"jqplot\", #", "bootstrap(self): # version = \"3.3.7\" # url = \"https://github.com/twbs/bootstrap/releases/download/v%s/bootstrap-%s-dist.zip\" %", "version # path = j.do.download(url, to='', overwrite=False, retry=3, timeout=0) #", "j.sal.fs.removeDirTree(dpath) # z = j.tools.zipfile.get(path) # z.extract(self.buildDir) # z.close() #", "-r bower_components\" print(\"IPFS upload, can take couple of minutes\") res", "version), dpath) # @property # def npm(self): # if self._npm", "== False: # self.getIPFS() # # self.angular() # # self.bootstrap()", "for item in j.sal.fs.find(path, \"*.js.map\"): # item = \"%s/%s\" %", "cdest = j.do.pullGitRepo(url) # print(\"npm/bower install\") # res = j.sal.process.executeWithoutPipe(\"cd", "to='', overwrite=False, retry=3, timeout=0) # dpath = j.sal.fs.joinPaths(self.buildDir, \"angular\") #", "# self.codemirror() # # self.famous() # self.flatui() def do(self): if", "% version), dpath) # # self._removeMapFiles(dpath) # # def codemirror(self):", "codemirror(self): # # version = \"5.9\" # url = \"http://codemirror.net/codemirror-%s.zip\"", "@property # def buildDir(self): # return j.sal.fs.joinPaths(j.dirs.tmpDir, \"jsbuilder\") @property def", "# cdest = j.do.pullGitRepo(url) # print(\"npm/bower install\") # res =", "_removeMapFiles(self, path): # for item in j.sal.fs.find(path, \"*.js.map\"): # item", "j.dirs.binDir # return self._npm # @property # def bower(self): #", "# j.sal.fs.removeDirTree(dpath) # z = j.tools.zipfile.get(path) # z.extract(self.buildDir) # z.close()", "self._npm # @property # def bower(self): # if self._bower ==", "# def codemirror(self): # # version = \"5.9\" # url", "path = j.do.download(url, to='', overwrite=False, retry=3, timeout=0) # dpath =", "\"https://github.com/twbs/bootstrap/releases/download/v%s/bootstrap-%s-dist.zip\" % (version, version) # path = j.do.download(url, to='', overwrite=False,", "bower(self): # if self._bower == False: # if j.sal.fs.exists(\"%s/bower\" %", "% sversion), dpath) # # self._removeMapFiles(dpath) # # def _removeMapFiles(self,", "\"bootstrap\", \"famous\", \"codemirror\", \"font-awesome\", \"jqplot\", # \"underscore\", \"spin\", \"moment\", \"http://DlhSoft.com/Packages/DlhSoft.KanbanLibrary.zip\",", "# dpath = j.sal.fs.joinPaths(self.buildDir, \"angular\") # j.sal.fs.removeDirTree(dpath) # z =", "j.do.download(url, to='', overwrite=False, retry=3, timeout=0) # dpath = j.sal.fs.joinPaths(self.buildDir, \"codemirror\")", "# version = \"3.3.7\" # url = \"https://github.com/twbs/bootstrap/releases/download/v%s/bootstrap-%s-dist.zip\" % (version,", "url = \"https://github.com/designmodo/Flat-UI.git\" # cdest = j.do.pullGitRepo(url) # print(\"npm/bower install\")", "res = j.sal.process.executeWithoutPipe(\"cd %s;%s install;%s install\" % (cdest, self.npm, self.bower))", "def do1(self): # j.sal.fs.createDir(j.sal.fs.joinPaths(j.dirs.tmpDir, \"jsbuilder\")) # if self.checkIPFS == False:", "# z.close() # j.sal.fs.renameDir(j.sal.fs.joinPaths(self.buildDir, \"codemirror-%s\" % version), dpath) # @property", "= j.sal.fs.joinPaths(self.buildDir, \"codemirror\") # j.sal.fs.removeDirTree(dpath) # z = j.tools.zipfile.get(path) #", "if self._bower == False: # if j.sal.fs.exists(\"%s/bower\" % j.dirs.binDir, followlinks=True)", "= j.do.pullGitRepo(url) # res = j.sal.process.executeWithoutPipe(\"cd %s;%s install\" % (cdest,", "j.do.pullGitRepo(url) # res = j.sal.process.executeWithoutPipe(\"cd %s;%s install\" % (cdest, self.npm))", "False: # self.cuisine.apps.nodejs.install() # self._npm = \"%snpm\" % j.dirs.binDir #", "\"famous\", \"codemirror\", \"font-awesome\", \"jqplot\", # \"underscore\", \"spin\", \"moment\", \"http://DlhSoft.com/Packages/DlhSoft.KanbanLibrary.zip\", \"jqwidgets\",", "def bower(self): # if self._bower == False: # if j.sal.fs.exists(\"%s/bower\"", "= j.sal.process.executeWithoutPipe(\"cd %s;%s install\" % (cdest, self.npm)) # # def", "cuisine(self): return j.tools.cuisine.local # ALL NOT NEEDED ANY LONGER USE", "buildDir(self): # return j.sal.fs.joinPaths(j.dirs.tmpDir, \"jsbuilder\") @property def cuisine(self): return j.tools.cuisine.local", "\"*.js.map\"): # item = \"%s/%s\" % (path, item) # #", "\"bootstrap-%s-dist\" % version), dpath) # # self._removeMapFiles(dpath) # # def", "\"angular-latest\"]) cmd = \"cd $tmpDir/bower;ipfs -c $cfgDir/ipfs/main/ add -r bower_components\"", "self.npm, self.bower)) # # def do1(self): # j.sal.fs.createDir(j.sal.fs.joinPaths(j.dirs.tmpDir, \"jsbuilder\")) #", "# def angular(self): # version = \"1.5.9\" # url =", "# def bootstrap(self): # version = \"3.3.7\" # url =", "= j.sal.fs.joinPaths(self.buildDir, \"bootstrap\") # j.sal.fs.removeDirTree(dpath) # z = j.tools.zipfile.get(path) #", "j.do.pullGitRepo(url) # print(\"npm/bower install\") # res = j.sal.process.executeWithoutPipe(\"cd %s;%s install;%s", "# url = \"https://github.com/Famous/engine-seed\" # cdest = j.do.pullGitRepo(url) # res", "self.famous() # self.flatui() def do(self): if self.checkIPFS == False: self.getIPFS()", "j.sal.fs.renameDir(j.sal.fs.joinPaths(self.buildDir, \"bootstrap-%s-dist\" % version), dpath) # # self._removeMapFiles(dpath) # #", "# z.extract(self.buildDir) # z.close() # j.sal.fs.renameDir(j.sal.fs.joinPaths(self.buildDir, \"codemirror-%s\" % version), dpath)", "timeout=0) # dpath = j.sal.fs.joinPaths(self.buildDir, \"codemirror\") # j.sal.fs.removeDirTree(dpath) # z", "def codemirror(self): # # version = \"5.9\" # url =", "retry=3, timeout=0) # dpath = j.sal.fs.joinPaths(self.buildDir, \"codemirror\") # j.sal.fs.removeDirTree(dpath) #", "-c $cfgDir/ipfs/main/ add -r bower_components\" print(\"IPFS upload, can take couple", "overwrite=False, retry=3, timeout=0) # dpath = j.sal.fs.joinPaths(self.buildDir, \"angular\") # j.sal.fs.removeDirTree(dpath)", "@property def cuisine(self): return j.tools.cuisine.local # ALL NOT NEEDED ANY", "# if j.sal.fs.exists(\"%s/bower\" % j.dirs.binDir, followlinks=True) == False: # self.cuisine.apps.nodejs.install()", "sversion), dpath) # # self._removeMapFiles(dpath) # # def _removeMapFiles(self, path):", "\"https://github.com/Famous/engine-seed\" # cdest = j.do.pullGitRepo(url) # res = j.sal.process.executeWithoutPipe(\"cd %s;%s", "= self.cuisine.core.run(cmd) def checkIPFS(self): return j.sal.nettools.checkUrlReachable(\"http://localhost:5001/webui\") == True def getIPFS(self):", "# self._removeMapFiles(dpath) # # def _removeMapFiles(self, path): # for item", "timeout=0) # dpath = j.sal.fs.joinPaths(self.buildDir, \"bootstrap\") # j.sal.fs.removeDirTree(dpath) # z", "\"jqplot\", # \"underscore\", \"spin\", \"moment\", \"http://DlhSoft.com/Packages/DlhSoft.KanbanLibrary.zip\", \"jqwidgets\", \"d3\", \"angular-latest\"]) cmd", "\"cd $tmpDir/bower;ipfs -c $cfgDir/ipfs/main/ add -r bower_components\" print(\"IPFS upload, can", "\"flatui\", \"bootstrap\", \"famous\", \"codemirror\", \"font-awesome\", \"jqplot\", # \"underscore\", \"spin\", \"moment\",", "# self.famous() # self.flatui() def do(self): if self.checkIPFS == False:", "def npm(self): # if self._npm == False: # if j.sal.fs.exists(\"%s/npm\"", "# self._npm = \"%snpm\" % j.dirs.binDir # return self._npm #", "j.do.download(url, to='', overwrite=False, retry=3, timeout=0) # dpath = j.sal.fs.joinPaths(self.buildDir, \"bootstrap\")", "@property # def npm(self): # if self._npm == False: #", "# version = \"1.5.9\" # url = \"http://code.angularjs.org/%s/angular-%s.zip\" % (version,", "j.tools.zipfile.get(path) # z.extract(self.buildDir) # z.close() # j.sal.fs.renameDir(j.sal.fs.joinPaths(self.buildDir, \"angular-%s\" % sversion),", "# # def codemirror(self): # # version = \"5.9\" #", "# if self._npm == False: # if j.sal.fs.exists(\"%s/npm\" % j.dirs.binDir,", "def famous(self): # url = \"https://github.com/Famous/engine-seed\" # cdest = j.do.pullGitRepo(url)", "# if j.sal.fs.exists(\"%s/npm\" % j.dirs.binDir, followlinks=True) == False: # self.cuisine.apps.nodejs.install()", "# url = \"http://codemirror.net/codemirror-%s.zip\" % version # path = j.do.download(url,", "\"5.9\" # url = \"http://codemirror.net/codemirror-%s.zip\" % version # path =", "\"codemirror-%s\" % version), dpath) # @property # def npm(self): #", "if self.checkIPFS == False: self.getIPFS() # self.cuisine.apps.nodejs.bowerInstall([\"jquery\", \"flatui\", \"bootstrap\", \"famous\",", "\"http://DlhSoft.com/Packages/DlhSoft.KanbanLibrary.zip\", \"jqwidgets\", \"d3\", \"angular-latest\"]) cmd = \"cd $tmpDir/bower;ipfs -c $cfgDir/ipfs/main/", "% (cdest, self.npm, self.bower)) # # def do1(self): # j.sal.fs.createDir(j.sal.fs.joinPaths(j.dirs.tmpDir,", "# self.cuisine.apps.nodejs.install() # self._bower = \"%sbower\" % j.dirs.binDir # return", "# for item in j.sal.fs.find(path, \"*.js.map\"): # item = \"%s/%s\"", "j.sal.nettools.checkUrlReachable(\"http://localhost:5001/webui\") == True def getIPFS(self): j.tools.cuisine.local.apps.ipfs.install() j.tools.cuisine.local.apps.ipfs.start() b = builder()", "NOT NEEDED ANY LONGER USE bower # def angular(self): #", "\"codemirror\") # j.sal.fs.removeDirTree(dpath) # z = j.tools.zipfile.get(path) # z.extract(self.buildDir) #", "version), dpath) # # self._removeMapFiles(dpath) # # def codemirror(self): #", "add -r bower_components\" print(\"IPFS upload, can take couple of minutes\")", "= \"%sbower\" % j.dirs.binDir # return self._bower # def famous(self):", "# url = \"http://code.angularjs.org/%s/angular-%s.zip\" % (version, version) # path =", "do(self): if self.checkIPFS == False: self.getIPFS() # self.cuisine.apps.nodejs.bowerInstall([\"jquery\", \"flatui\", \"bootstrap\",", "# self.getIPFS() # # self.angular() # # self.bootstrap() # #", "= \"3.3.7\" # url = \"https://github.com/twbs/bootstrap/releases/download/v%s/bootstrap-%s-dist.zip\" % (version, version) #", "# # self.angular() # # self.bootstrap() # # self.codemirror() #", "# dpath = j.sal.fs.joinPaths(self.buildDir, \"bootstrap\") # j.sal.fs.removeDirTree(dpath) # z =", "do1(self): # j.sal.fs.createDir(j.sal.fs.joinPaths(j.dirs.tmpDir, \"jsbuilder\")) # if self.checkIPFS == False: #", "# def buildDir(self): # return j.sal.fs.joinPaths(j.dirs.tmpDir, \"jsbuilder\") @property def cuisine(self):", "# @property # def buildDir(self): # return j.sal.fs.joinPaths(j.dirs.tmpDir, \"jsbuilder\") @property", "dpath) # @property # def npm(self): # if self._npm ==", "% (cdest, self.npm)) # # def flatui(self): # url =", "checkIPFS(self): return j.sal.nettools.checkUrlReachable(\"http://localhost:5001/webui\") == True def getIPFS(self): j.tools.cuisine.local.apps.ipfs.install() j.tools.cuisine.local.apps.ipfs.start() b", "def _removeMapFiles(self, path): # for item in j.sal.fs.find(path, \"*.js.map\"): #", "flatui(self): # url = \"https://github.com/designmodo/Flat-UI.git\" # cdest = j.do.pullGitRepo(url) #", "return self._npm # @property # def bower(self): # if self._bower", "j.sal.fs.joinPaths(self.buildDir, \"codemirror\") # j.sal.fs.removeDirTree(dpath) # z = j.tools.zipfile.get(path) # z.extract(self.buildDir)", "bower_components\" print(\"IPFS upload, can take couple of minutes\") res =", "to='', overwrite=False, retry=3, timeout=0) # dpath = j.sal.fs.joinPaths(self.buildDir, \"codemirror\") #", "# return self._npm # @property # def bower(self): # if", "\"1.5.9\" # url = \"http://code.angularjs.org/%s/angular-%s.zip\" % (version, version) # path", "== True def getIPFS(self): j.tools.cuisine.local.apps.ipfs.install() j.tools.cuisine.local.apps.ipfs.start() b = builder() b.do()", "= \"https://github.com/Famous/engine-seed\" # cdest = j.do.pullGitRepo(url) # res = j.sal.process.executeWithoutPipe(\"cd", "z.close() # j.sal.fs.renameDir(j.sal.fs.joinPaths(self.buildDir, \"bootstrap-%s-dist\" % version), dpath) # # self._removeMapFiles(dpath)", "if self.checkIPFS == False: # self.getIPFS() # # self.angular() #", "couple of minutes\") res = self.cuisine.core.run(cmd) def checkIPFS(self): return j.sal.nettools.checkUrlReachable(\"http://localhost:5001/webui\")", "% j.dirs.binDir, followlinks=True) == False: # self.cuisine.apps.nodejs.install() # self._bower =", "self.bootstrap() # # self.codemirror() # # self.famous() # self.flatui() def", "# version = \"5.9\" # url = \"http://codemirror.net/codemirror-%s.zip\" % version", "\"angular\") # j.sal.fs.removeDirTree(dpath) # z = j.tools.zipfile.get(path) # z.extract(self.buildDir) #", "# # self.famous() # self.flatui() def do(self): if self.checkIPFS ==", "dpath = j.sal.fs.joinPaths(self.buildDir, \"bootstrap\") # j.sal.fs.removeDirTree(dpath) # z = j.tools.zipfile.get(path)", "\"http://code.angularjs.org/%s/angular-%s.zip\" % (version, version) # path = j.do.download(url, to='', overwrite=False,", "take couple of minutes\") res = self.cuisine.core.run(cmd) def checkIPFS(self): return", "followlinks=True) == False: # self.cuisine.apps.nodejs.install() # self._npm = \"%snpm\" %", "# print(item) # j.sal.fs.remove(item) # # def bootstrap(self): # version", "# if self._bower == False: # if j.sal.fs.exists(\"%s/bower\" % j.dirs.binDir," ]
[ "PC = PROGC(0) # Start from the first instruction RF", "RF compute new_PC RF.update(new_regs, new_PC) PC.dump() # Print PC RF.dump()", "IM, cycle) # Update RF compute new_PC RF.update(new_regs, new_PC) PC.dump()", "MEM = IMACC(sys.stdin.read()) # Load memory from stdin PC =", "while not halted: Instruction = MEM.getData(PC) # Get current instruction", "# Print PC RF.dump() # Print RF state PC.update(new_PC) #", "REGFLPC() # initialize register and flags EE = ExecE(MEM) IM", "flags EE = ExecE(MEM) IM = IMG() halted = False", "halted = False cycle = 0 if MEM.inst_mem == [\"0\"", "* 16 for i in range(256)]: halted = True while", "REGFLPC, ExecE, plot warnings.filterwarnings(\"ignore\") MEM = IMACC(sys.stdin.read()) # Load memory", "# Start from the first instruction RF = REGFLPC() #", "new_PC RF.update(new_regs, new_PC) PC.dump() # Print PC RF.dump() # Print", "i in range(256)]: halted = True while not halted: Instruction", "PC cycle += 1 MEM.dump() # Print memory state #", "RF.asdct(), IM, cycle) # Update RF compute new_PC RF.update(new_regs, new_PC)", "from parsets import IMACC, IMG, PROGC, REGFLPC, ExecE, plot warnings.filterwarnings(\"ignore\")", "import IMACC, IMG, PROGC, REGFLPC, ExecE, plot warnings.filterwarnings(\"ignore\") MEM =", "from the first instruction RF = REGFLPC() # initialize register", "import warnings import matplotlib.pyplot as plt from parsets import IMACC,", "# Update RF compute new_PC RF.update(new_regs, new_PC) PC.dump() # Print", "# Print RF state PC.update(new_PC) # Update PC cycle +=", "cycle += 1 MEM.dump() # Print memory state # plotting", "halted: Instruction = MEM.getData(PC) # Get current instruction IM.imgx.append(cycle) IM.imgy.append(PC.PC)", "0 if MEM.inst_mem == [\"0\" * 16 for i in", "Update RF compute new_PC RF.update(new_regs, new_PC) PC.dump() # Print PC", "IMACC, IMG, PROGC, REGFLPC, ExecE, plot warnings.filterwarnings(\"ignore\") MEM = IMACC(sys.stdin.read())", "# initialize register and flags EE = ExecE(MEM) IM =", "Update PC cycle += 1 MEM.dump() # Print memory state", "instruction RF = REGFLPC() # initialize register and flags EE", "= ExecE(MEM) IM = IMG() halted = False cycle =", "RF = REGFLPC() # initialize register and flags EE =", "= IMACC(sys.stdin.read()) # Load memory from stdin PC = PROGC(0)", "register and flags EE = ExecE(MEM) IM = IMG() halted", "new_PC, new_regs = EE.execute(Instruction, RF.asdct(), IM, cycle) # Update RF", "Print PC RF.dump() # Print RF state PC.update(new_PC) # Update", "warnings import matplotlib.pyplot as plt from parsets import IMACC, IMG,", "IMACC(sys.stdin.read()) # Load memory from stdin PC = PROGC(0) #", "True while not halted: Instruction = MEM.getData(PC) # Get current", "current instruction IM.imgx.append(cycle) IM.imgy.append(PC.PC) halted, new_PC, new_regs = EE.execute(Instruction, RF.asdct(),", "memory from stdin PC = PROGC(0) # Start from the", "# Load memory from stdin PC = PROGC(0) # Start", "MEM.getData(PC) # Get current instruction IM.imgx.append(cycle) IM.imgy.append(PC.PC) halted, new_PC, new_regs", "stdin PC = PROGC(0) # Start from the first instruction", "plt from parsets import IMACC, IMG, PROGC, REGFLPC, ExecE, plot", "ExecE(MEM) IM = IMG() halted = False cycle = 0", "16 for i in range(256)]: halted = True while not", "warnings.filterwarnings(\"ignore\") MEM = IMACC(sys.stdin.read()) # Load memory from stdin PC", "from stdin PC = PROGC(0) # Start from the first", "Get current instruction IM.imgx.append(cycle) IM.imgy.append(PC.PC) halted, new_PC, new_regs = EE.execute(Instruction,", "PC.update(new_PC) # Update PC cycle += 1 MEM.dump() # Print", "IM.imgy.append(PC.PC) halted, new_PC, new_regs = EE.execute(Instruction, RF.asdct(), IM, cycle) #", "PC RF.dump() # Print RF state PC.update(new_PC) # Update PC", "+= 1 MEM.dump() # Print memory state # plotting plot(plt,", "[\"0\" * 16 for i in range(256)]: halted = True", "<gh_stars>1-10 import sys import warnings import matplotlib.pyplot as plt from", "RF.dump() # Print RF state PC.update(new_PC) # Update PC cycle", "as plt from parsets import IMACC, IMG, PROGC, REGFLPC, ExecE,", "parsets import IMACC, IMG, PROGC, REGFLPC, ExecE, plot warnings.filterwarnings(\"ignore\") MEM", "= IMG() halted = False cycle = 0 if MEM.inst_mem", "state PC.update(new_PC) # Update PC cycle += 1 MEM.dump() #", "halted = True while not halted: Instruction = MEM.getData(PC) #", "= False cycle = 0 if MEM.inst_mem == [\"0\" *", "# Update PC cycle += 1 MEM.dump() # Print memory", "= MEM.getData(PC) # Get current instruction IM.imgx.append(cycle) IM.imgy.append(PC.PC) halted, new_PC,", "MEM.inst_mem == [\"0\" * 16 for i in range(256)]: halted", "== [\"0\" * 16 for i in range(256)]: halted =", "plot warnings.filterwarnings(\"ignore\") MEM = IMACC(sys.stdin.read()) # Load memory from stdin", "False cycle = 0 if MEM.inst_mem == [\"0\" * 16", "PROGC(0) # Start from the first instruction RF = REGFLPC()", "cycle = 0 if MEM.inst_mem == [\"0\" * 16 for", "for i in range(256)]: halted = True while not halted:", "IM = IMG() halted = False cycle = 0 if", "Instruction = MEM.getData(PC) # Get current instruction IM.imgx.append(cycle) IM.imgy.append(PC.PC) halted,", "EE.execute(Instruction, RF.asdct(), IM, cycle) # Update RF compute new_PC RF.update(new_regs,", "import sys import warnings import matplotlib.pyplot as plt from parsets", "sys import warnings import matplotlib.pyplot as plt from parsets import", "= REGFLPC() # initialize register and flags EE = ExecE(MEM)", "1 MEM.dump() # Print memory state # plotting plot(plt, IM)", "and flags EE = ExecE(MEM) IM = IMG() halted =", "matplotlib.pyplot as plt from parsets import IMACC, IMG, PROGC, REGFLPC,", "IMG, PROGC, REGFLPC, ExecE, plot warnings.filterwarnings(\"ignore\") MEM = IMACC(sys.stdin.read()) #", "range(256)]: halted = True while not halted: Instruction = MEM.getData(PC)", "not halted: Instruction = MEM.getData(PC) # Get current instruction IM.imgx.append(cycle)", "compute new_PC RF.update(new_regs, new_PC) PC.dump() # Print PC RF.dump() #", "= 0 if MEM.inst_mem == [\"0\" * 16 for i", "IM.imgx.append(cycle) IM.imgy.append(PC.PC) halted, new_PC, new_regs = EE.execute(Instruction, RF.asdct(), IM, cycle)", "initialize register and flags EE = ExecE(MEM) IM = IMG()", "EE = ExecE(MEM) IM = IMG() halted = False cycle", "PROGC, REGFLPC, ExecE, plot warnings.filterwarnings(\"ignore\") MEM = IMACC(sys.stdin.read()) # Load", "in range(256)]: halted = True while not halted: Instruction =", "instruction IM.imgx.append(cycle) IM.imgy.append(PC.PC) halted, new_PC, new_regs = EE.execute(Instruction, RF.asdct(), IM,", "= EE.execute(Instruction, RF.asdct(), IM, cycle) # Update RF compute new_PC", "= PROGC(0) # Start from the first instruction RF =", "if MEM.inst_mem == [\"0\" * 16 for i in range(256)]:", "ExecE, plot warnings.filterwarnings(\"ignore\") MEM = IMACC(sys.stdin.read()) # Load memory from", "RF.update(new_regs, new_PC) PC.dump() # Print PC RF.dump() # Print RF", "import matplotlib.pyplot as plt from parsets import IMACC, IMG, PROGC,", "cycle) # Update RF compute new_PC RF.update(new_regs, new_PC) PC.dump() #", "PC.dump() # Print PC RF.dump() # Print RF state PC.update(new_PC)", "first instruction RF = REGFLPC() # initialize register and flags", "halted, new_PC, new_regs = EE.execute(Instruction, RF.asdct(), IM, cycle) # Update", "IMG() halted = False cycle = 0 if MEM.inst_mem ==", "Print RF state PC.update(new_PC) # Update PC cycle += 1", "RF state PC.update(new_PC) # Update PC cycle += 1 MEM.dump()", "new_PC) PC.dump() # Print PC RF.dump() # Print RF state", "Load memory from stdin PC = PROGC(0) # Start from", "# Get current instruction IM.imgx.append(cycle) IM.imgy.append(PC.PC) halted, new_PC, new_regs =", "= True while not halted: Instruction = MEM.getData(PC) # Get", "the first instruction RF = REGFLPC() # initialize register and", "Start from the first instruction RF = REGFLPC() # initialize", "new_regs = EE.execute(Instruction, RF.asdct(), IM, cycle) # Update RF compute" ]
[ "self.id & 0xFFF class ObjectPlusConverter(commands.converter.IDConverter[commands.Converter]): async def convert(self, ctx: commands.Context,", "role: if role.is_bot_managed(): user = role.tags.bot_id user = await ctx.bot.try_user(user)", "None: role = None with contextlib.suppress(commands.RoleNotFound, commands.NoPrivateMessage): role = await", "the snowflake.\"\"\" return (self.id & 0x1F000) >> 12 @property def", "commands.MemberNotFound: user = None if user is None: tag =", "be used instead.\") argument = 16777215 color = discord.Colour(argument) if", "ctx, argument): match = re.match(r\"(?P<id>[0-9]{15,21})\", argument) if match: emoji_id =", "int(dt.timestamp() * 1000 - 1420070400000) << 22 | 0x3FFFFF class", "re.match(r\"<(?:@(?:!|&)?|#)([0-9]{15,20})>$\", argument) if match is None: raise discord.errors.ObjectNotFound(argument) result =", "else: return None class EmojiConverter(commands.Converter): async def convert(self, ctx: commands.Context,", "id that made the snowflake.\"\"\" return self.id & 0xFFF class", "None with contextlib.suppress(commands.RoleNotFound, commands.NoPrivateMessage): role = await commands.RoleConverter().convert(ctx, argument) if", "= int(match.group(1)) return ObjectPlus(id=result) # remove if edpy adds my", "color: if color.value > 16777215: color = discord.Colour(16777215) return color", "except commands.MemberNotFound: user = None if user is None: tag", "emoji\") class ColorConverter(commands.Converter): async def convert(self, ctx, argument): try: color", "isinstance(argument, int): if argument > 16777215: await ctx.send(f\"{argument} is not", "= discord.utils.get(ctx.guild.members, discriminator=tag.group(1)) user = test or ctx.author if ctx.guild", "match = re.match(r\"(?P<id>[0-9]{15,21})\", argument) if match: emoji_id = match.group(0) extentions", "but more accurate and random than time_snowflake. If No dt", "@property def increment_id(self) -> int: \"\"\":class:`int`: Returns the increment id", "tag = re.match(r\"#?(\\d{4})\", argument) if tag: if ctx.guild: test =", "ctx, argument): try: user = await commands.UserConverter().convert(ctx, argument) except commands.UserNotFound:", "argument > 16777215: await ctx.send(f\"{argument} is not valid color, 16777215", "1000 - 1420070400000) << 22 | 0x3FFFFF class ObjectPlus(discord.Object): @property", "role = None with contextlib.suppress(commands.RoleNotFound, commands.NoPrivateMessage): role = await commands.RoleConverter().convert(ctx,", "s) if color and argument.isdigit(): argument = int(argument) if isinstance(argument,", "class ObjectPlus(discord.Object): @property def worker_id(self) -> int: \"\"\":class:`int`: Returns the", "def process_id(self) -> int: \"\"\":class:`int`: Returns the process id that", "None if not user and ctx.guild: try: user = await", "= match.group(0) extentions = [\"gif\", \"png\"] for x in extentions:", "async def convert(self, ctx: commands.Context, argument: str) -> ObjectPlus: match", "EmojiBasic: def __init__(self, id: int, url: str): self.id = id", "None: user = await BetterUserconverter().convert(ctx, argument) user = user or", "commands.UserConverter().convert(ctx, argument) except commands.UserNotFound: user = None if not user", "naive, the timezone is assumed to be local time. Returns", "emoji_id = match.group(0) extentions = [\"gif\", \"png\"] for x in", "user = None if user is None: role = None", "Returns the increment id that made the snowflake.\"\"\" return self.id", "[\"gif\", \"png\"] for x in extentions: response = await ctx.bot.session.get(f\"https://cdn.discordapp.com/emojis/{emoji_id}.{x}\")", "role.is_bot_managed(): user = role.tags.bot_id user = await ctx.bot.try_user(user) if user", "emoji import contextlib import typing import datetime from discord.ext import", "None: tag = re.match(r\"#?(\\d{4})\", argument) if tag: if ctx.guild: test", "commands.PartialEmojiConversionFailure: pass if arg.rstrip(\"\\N{variation selector-16}\") in emojis or arg in", "user = test or ctx.author return user class EmojiBasic: def", "\"\"\":class:`int`: Returns the process id that made the snowflake.\"\"\" return", "to a snowflake. If naive, the timezone is assumed to", "= discord.utils.get(ctx.bot.users, discriminator=tag.group(1)) user = test or ctx.author return user", "tag = re.match(r\"#?(\\d{4})\", argument) if tag and not ctx.bot.users: test", "increment_id(self) -> int: \"\"\":class:`int`: Returns the increment id that made", "self._get_id_match(argument) or re.match(r\"<(?:@(?:!|&)?|#)([0-9]{15,20})>$\", argument) if match is None: raise discord.errors.ObjectNotFound(argument)", "= emoji.unicode_codes.EMOJI_UNICODE[\"en\"].values() try: return await commands.PartialEmojiConverter().convert(ctx, arg) except commands.PartialEmojiConversionFailure: pass", "= await ctx.bot.session.get(f\"https://cdn.discordapp.com/emojis/{emoji_id}.{x}\") if response.ok: return cls(emoji_id, response.real_url) else: return", "than time_snowflake. If No dt is not passed, it makes", "from discord.http import Route class BetterMemberConverter(commands.Converter): async def convert(self, ctx,", "& 0x1F000) >> 12 @property def increment_id(self) -> int: \"\"\":class:`int`:", "user is None: tag = re.match(r\"#?(\\d{4})\", argument) if tag: if", "try: user = await commands.UserConverter().convert(ctx, argument) except commands.UserNotFound: user =", "async def convert(self, ctx: commands.Context, arg: str): emojis = emoji.unicode_codes.EMOJI_UNICODE[\"en\"].values()", "user is None: role = None with contextlib.suppress(commands.RoleNotFound, commands.NoPrivateMessage): role", "\"\"\"Returns a numeric snowflake pretending to be created at the", "is assumed to be local time. Returns -------- :class:`int` The", "ctx.bot.session.get(f\"https://cdn.discordapp.com/emojis/{emoji_id}.{x}\") if response.ok: return cls(emoji_id, response.real_url) else: return None class", "not valid color, 16777215 will be used instead.\") argument =", "try: color = await commands.ColourConverter().convert(ctx, argument) except commands.BadColourArgument: color =", "tag and not ctx.bot.users: test = discord.utils.get(ctx.bot.users, discriminator=tag.group(1)) user =", "snowflake. If naive, the timezone is assumed to be local", "= test or ctx.author return user class EmojiBasic: def __init__(self,", "discord.Colour(argument) if isinstance(argument, list): argument = sorted(filter(lambda x: x.isdigit(), argument))", "0x1F000) >> 12 @property def increment_id(self) -> int: \"\"\":class:`int`: Returns", "= await commands.ColourConverter().convert(ctx, argument) except commands.BadColourArgument: color = None if", "await BetterUserconverter().convert(ctx, argument) user = user or ctx.author return user", "and ctx.guild: try: user = await commands.MemberConverter().convert(ctx, argument) except commands.MemberNotFound:", "def convert(cls, ctx, argument): match = re.match(r\"(?P<id>[0-9]{15,21})\", argument) if match:", "__init__(self, id: int, url: str): self.id = id self.url =", "None if not color and not argument.isdigit(): argument = list(s", "except commands.PartialEmojiConversionFailure: pass if arg.rstrip(\"\\N{variation selector-16}\") in emojis or arg", "ObjectPlus(discord.Object): @property def worker_id(self) -> int: \"\"\":class:`int`: Returns the worker", "return (self.id & 0x1F000) >> 12 @property def increment_id(self) ->", "argument) if match is None: raise discord.errors.ObjectNotFound(argument) result = int(match.group(1))", "ctx: commands.Context, argument: str) -> ObjectPlus: match = self._get_id_match(argument) or", "= int(argument) if isinstance(argument, int): if argument > 16777215: await", "or re.match(r\"<(?:@(?:!|&)?|#)([0-9]{15,20})>$\", argument) if match is None: raise discord.errors.ObjectNotFound(argument) result", "Returns the worker id that made the snowflake.\"\"\" return (self.id", "match = self._get_id_match(argument) or re.match(r\"<(?:@(?:!|&)?|#)([0-9]{15,20})>$\", argument) if match is None:", "argument = 16777215 color = discord.Colour(argument) if isinstance(argument, list): argument", "If No dt is not passed, it makes one from", "convert(self, ctx, argument): try: user = await commands.UserConverter().convert(ctx, argument) except", "if ctx.guild is None: user = await BetterUserconverter().convert(ctx, argument) user", "int: \"\"\":class:`int`: Returns the increment id that made the snowflake.\"\"\"", "snowflake.\"\"\" return (self.id & 0x3E0000) >> 17 @property def process_id(self)", "A datetime object to convert to a snowflake. If naive,", "user = await commands.UserConverter().convert(ctx, argument) except commands.UserNotFound: user = None", ">> 17 @property def process_id(self) -> int: \"\"\":class:`int`: Returns the", "= await commands.MemberConverter().convert(ctx, argument) except commands.MemberNotFound: user = None if", "TypeError: color = None if color: if color.value > 16777215:", "@property def process_id(self) -> int: \"\"\":class:`int`: Returns the process id", "color = discord.Colour(16777215) return color def generate_snowflake(dt: typing.Optional[datetime.datetime] = None)", "if not user and ctx.guild: try: user = await commands.MemberConverter().convert(ctx,", "will be used instead.\") argument = 16777215 color = discord.Colour(argument)", "-> int: \"\"\":class:`int`: Returns the worker id that made the", "-------- :class:`int` The snowflake representing the time given. \"\"\" dt", "ctx.bot.try_user(user) if user is None: tag = re.match(r\"#?(\\d{4})\", argument) if", "BetterUserconverter(commands.Converter): async def convert(self, ctx, argument): try: user = await", "# remove if edpy adds my pull request into the", "argument)) argument = [int(n) for n in argument][:3] try: color", "arg in emojis: return discord.PartialEmoji(name=arg) else: raise commands.BadArgument(f\"{arg} is not", "result = int(match.group(1)) return ObjectPlus(id=result) # remove if edpy adds", "list(s for s in argument.split(\" \") if s) if color", "int, url: str): self.id = id self.url = url @classmethod", "re.match(r\"#?(\\d{4})\", argument) if tag and not ctx.bot.users: test = discord.utils.get(ctx.bot.users,", "if color: if color.value > 16777215: color = discord.Colour(16777215) return", "ctx.bot.users: test = discord.utils.get(ctx.bot.users, discriminator=tag.group(1)) user = test or ctx.author", "or ctx.author return user class BetterUserconverter(commands.Converter): async def convert(self, ctx,", "return user class BetterUserconverter(commands.Converter): async def convert(self, ctx, argument): try:", "local time. Returns -------- :class:`int` The snowflake representing the time", "argument: str) -> ObjectPlus: match = self._get_id_match(argument) or re.match(r\"<(?:@(?:!|&)?|#)([0-9]{15,20})>$\", argument)", "using utcnow. Parameters ----------- dt: :class:`datetime.datetime` A datetime object to", "remove if edpy adds my pull request into the master.", "int: \"\"\":class:`int`: Returns the process id that made the snowflake.\"\"\"", "id that made the snowflake.\"\"\" return (self.id & 0x1F000) >>", "given date but more accurate and random than time_snowflake. If", "import datetime from discord.ext import commands from discord.http import Route", "argument) if tag and not ctx.bot.users: test = discord.utils.get(ctx.bot.users, discriminator=tag.group(1))", "discord.errors.ObjectNotFound(argument) result = int(match.group(1)) return ObjectPlus(id=result) # remove if edpy", "emojis or arg in emojis: return discord.PartialEmoji(name=arg) else: raise commands.BadArgument(f\"{arg}", "color and not argument.isdigit(): argument = list(s for s in", "in emojis: return discord.PartialEmoji(name=arg) else: raise commands.BadArgument(f\"{arg} is not an", "extentions: response = await ctx.bot.session.get(f\"https://cdn.discordapp.com/emojis/{emoji_id}.{x}\") if response.ok: return cls(emoji_id, response.real_url)", "class EmojiBasic: def __init__(self, id: int, url: str): self.id =", "passed, it makes one from the current time using utcnow.", "and random than time_snowflake. If No dt is not passed,", "(self.id & 0x3E0000) >> 17 @property def process_id(self) -> int:", "emojis: return discord.PartialEmoji(name=arg) else: raise commands.BadArgument(f\"{arg} is not an emoji\")", "time using utcnow. Parameters ----------- dt: :class:`datetime.datetime` A datetime object", "return int(dt.timestamp() * 1000 - 1420070400000) << 22 | 0x3FFFFF", "process_id(self) -> int: \"\"\":class:`int`: Returns the process id that made", "int(argument) if isinstance(argument, int): if argument > 16777215: await ctx.send(f\"{argument}", "16777215: await ctx.send(f\"{argument} is not valid color, 16777215 will be", "random than time_snowflake. If No dt is not passed, it", "re.match(r\"(?P<id>[0-9]{15,21})\", argument) if match: emoji_id = match.group(0) extentions = [\"gif\",", "created at the given date but more accurate and random", "int: \"\"\"Returns a numeric snowflake pretending to be created at", "def __init__(self, id: int, url: str): self.id = id self.url", "16777215 will be used instead.\") argument = 16777215 color =", "response.real_url) else: return None class EmojiConverter(commands.Converter): async def convert(self, ctx:", "Returns the process id that made the snowflake.\"\"\" return (self.id", "convert(self, ctx, argument): try: color = await commands.ColourConverter().convert(ctx, argument) except", "date but more accurate and random than time_snowflake. If No", "commands.RoleConverter().convert(ctx, argument) if role: if role.is_bot_managed(): user = role.tags.bot_id user", "discriminator=tag.group(1)) user = test or ctx.author if ctx.guild is None:", "await commands.PartialEmojiConverter().convert(ctx, arg) except commands.PartialEmojiConversionFailure: pass if arg.rstrip(\"\\N{variation selector-16}\") in", "is not an emoji\") class ColorConverter(commands.Converter): async def convert(self, ctx,", "list): argument = sorted(filter(lambda x: x.isdigit(), argument)) argument = [int(n)", "color = await commands.ColourConverter().convert(ctx, argument) except commands.BadColourArgument: color = None", "\") if s) if color and argument.isdigit(): argument = int(argument)", "argument) except commands.UserNotFound: user = None if not user and", "if color.value > 16777215: color = discord.Colour(16777215) return color def", "-> int: \"\"\"Returns a numeric snowflake pretending to be created", "try: user = await commands.MemberConverter().convert(ctx, argument) except commands.MemberNotFound: user =", "return await commands.PartialEmojiConverter().convert(ctx, arg) except commands.PartialEmojiConversionFailure: pass if arg.rstrip(\"\\N{variation selector-16}\")", "user = None if not user and ctx.guild: try: user", "in argument][:3] try: color = discord.Colour.from_rgb(*argument) except TypeError: color =", "= re.match(r\"#?(\\d{4})\", argument) if tag and not ctx.bot.users: test =", "time_snowflake. If No dt is not passed, it makes one", "commands.UserNotFound: user = None if not user and ctx.guild: try:", "is not valid color, 16777215 will be used instead.\") argument", "def convert(self, ctx: commands.Context, arg: str): emojis = emoji.unicode_codes.EMOJI_UNICODE[\"en\"].values() try:", "argument = [int(n) for n in argument][:3] try: color =", "= id self.url = url @classmethod async def convert(cls, ctx,", "None if user is None: tag = re.match(r\"#?(\\d{4})\", argument) if", "user class EmojiBasic: def __init__(self, id: int, url: str): self.id", "not an emoji\") class ColorConverter(commands.Converter): async def convert(self, ctx, argument):", "test or ctx.author if ctx.guild is None: user = await", "17 @property def process_id(self) -> int: \"\"\":class:`int`: Returns the process", "discord.PartialEmoji(name=arg) else: raise commands.BadArgument(f\"{arg} is not an emoji\") class ColorConverter(commands.Converter):", "the given date but more accurate and random than time_snowflake.", "convert(self, ctx: commands.Context, argument: str) -> ObjectPlus: match = self._get_id_match(argument)", "user = await commands.MemberConverter().convert(ctx, argument) except commands.MemberNotFound: user = None", "test = discord.utils.get(ctx.guild.members, discriminator=tag.group(1)) user = test or ctx.author if", "url @classmethod async def convert(cls, ctx, argument): match = re.match(r\"(?P<id>[0-9]{15,21})\",", "test = discord.utils.get(ctx.bot.users, discriminator=tag.group(1)) user = test or ctx.author return", "or ctx.author return user class EmojiBasic: def __init__(self, id: int,", "at the given date but more accurate and random than", "= re.match(r\"#?(\\d{4})\", argument) if tag: if ctx.guild: test = discord.utils.get(ctx.guild.members,", "<filename>utils/converters.py<gh_stars>0 import discord import re import emoji import contextlib import", "def increment_id(self) -> int: \"\"\":class:`int`: Returns the increment id that", "class EmojiConverter(commands.Converter): async def convert(self, ctx: commands.Context, arg: str): emojis", "None if color: if color.value > 16777215: color = discord.Colour(16777215)", "increment id that made the snowflake.\"\"\" return self.id & 0xFFF", "\"\"\" dt = dt or discord.utils.utcnow() return int(dt.timestamp() * 1000", "argument) except commands.BadColourArgument: color = None if not color and", "self.id = id self.url = url @classmethod async def convert(cls,", "one from the current time using utcnow. Parameters ----------- dt:", "= [\"gif\", \"png\"] for x in extentions: response = await", "the timezone is assumed to be local time. Returns --------", "numeric snowflake pretending to be created at the given date", "argument): match = re.match(r\"(?P<id>[0-9]{15,21})\", argument) if match: emoji_id = match.group(0)", "async def convert(self, ctx, argument): try: color = await commands.ColourConverter().convert(ctx,", "generate_snowflake(dt: typing.Optional[datetime.datetime] = None) -> int: \"\"\"Returns a numeric snowflake", "typing import datetime from discord.ext import commands from discord.http import", "dt: :class:`datetime.datetime` A datetime object to convert to a snowflake.", "ColorConverter(commands.Converter): async def convert(self, ctx, argument): try: color = await", "argument) except commands.MemberNotFound: user = None if user is None:", "class BetterMemberConverter(commands.Converter): async def convert(self, ctx, argument): try: user =", "None if user is None: role = None with contextlib.suppress(commands.RoleNotFound,", "user = await BetterUserconverter().convert(ctx, argument) user = user or ctx.author", "id self.url = url @classmethod async def convert(cls, ctx, argument):", "def convert(self, ctx, argument): try: color = await commands.ColourConverter().convert(ctx, argument)", "----------- dt: :class:`datetime.datetime` A datetime object to convert to a", "= 16777215 color = discord.Colour(argument) if isinstance(argument, list): argument =", "return cls(emoji_id, response.real_url) else: return None class EmojiConverter(commands.Converter): async def", "if tag and not ctx.bot.users: test = discord.utils.get(ctx.bot.users, discriminator=tag.group(1)) user", "worker_id(self) -> int: \"\"\":class:`int`: Returns the worker id that made", "import contextlib import typing import datetime from discord.ext import commands", "x.isdigit(), argument)) argument = [int(n) for n in argument][:3] try:", "datetime from discord.ext import commands from discord.http import Route class", "not user and ctx.guild: try: user = await commands.MemberConverter().convert(ctx, argument)", "commands.NoPrivateMessage): role = await commands.RoleConverter().convert(ctx, argument) if role: if role.is_bot_managed():", "it makes one from the current time using utcnow. Parameters", "ctx.guild is None: user = await BetterUserconverter().convert(ctx, argument) user =", "valid color, 16777215 will be used instead.\") argument = 16777215", "discord.Colour.from_rgb(*argument) except TypeError: color = None if color: if color.value", "arg) except commands.PartialEmojiConversionFailure: pass if arg.rstrip(\"\\N{variation selector-16}\") in emojis or", "match.group(0) extentions = [\"gif\", \"png\"] for x in extentions: response", "user = None if user is None: tag = re.match(r\"#?(\\d{4})\",", "= sorted(filter(lambda x: x.isdigit(), argument)) argument = [int(n) for n", "discord import re import emoji import contextlib import typing import", "that made the snowflake.\"\"\" return (self.id & 0x1F000) >> 12", "import discord import re import emoji import contextlib import typing", "\"png\"] for x in extentions: response = await ctx.bot.session.get(f\"https://cdn.discordapp.com/emojis/{emoji_id}.{x}\") if", "color = discord.Colour(argument) if isinstance(argument, list): argument = sorted(filter(lambda x:", "if response.ok: return cls(emoji_id, response.real_url) else: return None class EmojiConverter(commands.Converter):", "accurate and random than time_snowflake. If No dt is not", "commands from discord.http import Route class BetterMemberConverter(commands.Converter): async def convert(self,", "= None if not user and ctx.guild: try: user =", "response.ok: return cls(emoji_id, response.real_url) else: return None class EmojiConverter(commands.Converter): async", "def convert(self, ctx, argument): try: user = await commands.MemberConverter().convert(ctx, argument)", "or ctx.author if ctx.guild is None: user = await BetterUserconverter().convert(ctx,", "class ObjectPlusConverter(commands.converter.IDConverter[commands.Converter]): async def convert(self, ctx: commands.Context, argument: str) ->", "color = None if not color and not argument.isdigit(): argument", "the process id that made the snowflake.\"\"\" return (self.id &", "await ctx.bot.try_user(user) if user is None: tag = re.match(r\"#?(\\d{4})\", argument)", "if match is None: raise discord.errors.ObjectNotFound(argument) result = int(match.group(1)) return", "not ctx.bot.users: test = discord.utils.get(ctx.bot.users, discriminator=tag.group(1)) user = test or", "argument): try: user = await commands.UserConverter().convert(ctx, argument) except commands.UserNotFound: user", "to be local time. Returns -------- :class:`int` The snowflake representing", "commands.Context, arg: str): emojis = emoji.unicode_codes.EMOJI_UNICODE[\"en\"].values() try: return await commands.PartialEmojiConverter().convert(ctx,", "return discord.PartialEmoji(name=arg) else: raise commands.BadArgument(f\"{arg} is not an emoji\") class", "await commands.RoleConverter().convert(ctx, argument) if role: if role.is_bot_managed(): user = role.tags.bot_id", "try: color = discord.Colour.from_rgb(*argument) except TypeError: color = None if", "return user class EmojiBasic: def __init__(self, id: int, url: str):", ":class:`datetime.datetime` A datetime object to convert to a snowflake. If", "discord.utils.utcnow() return int(dt.timestamp() * 1000 - 1420070400000) << 22 |", "1420070400000) << 22 | 0x3FFFFF class ObjectPlus(discord.Object): @property def worker_id(self)", "and argument.isdigit(): argument = int(argument) if isinstance(argument, int): if argument", "ObjectPlus(id=result) # remove if edpy adds my pull request into", "typing.Optional[datetime.datetime] = None) -> int: \"\"\"Returns a numeric snowflake pretending", "extentions = [\"gif\", \"png\"] for x in extentions: response =", "BetterUserconverter().convert(ctx, argument) user = user or ctx.author return user class", "Returns -------- :class:`int` The snowflake representing the time given. \"\"\"", "- 1420070400000) << 22 | 0x3FFFFF class ObjectPlus(discord.Object): @property def", "dt is not passed, it makes one from the current", "\"\"\":class:`int`: Returns the increment id that made the snowflake.\"\"\" return", "0x3FFFFF class ObjectPlus(discord.Object): @property def worker_id(self) -> int: \"\"\":class:`int`: Returns", "22 | 0x3FFFFF class ObjectPlus(discord.Object): @property def worker_id(self) -> int:", "color = discord.Colour.from_rgb(*argument) except TypeError: color = None if color:", "int: \"\"\":class:`int`: Returns the worker id that made the snowflake.\"\"\"", "Route class BetterMemberConverter(commands.Converter): async def convert(self, ctx, argument): try: user", "import typing import datetime from discord.ext import commands from discord.http", "commands.MemberNotFound: user = None if user is None: role =", "None: tag = re.match(r\"#?(\\d{4})\", argument) if tag and not ctx.bot.users:", "argument.split(\" \") if s) if color and argument.isdigit(): argument =", "> 16777215: await ctx.send(f\"{argument} is not valid color, 16777215 will", "<< 22 | 0x3FFFFF class ObjectPlus(discord.Object): @property def worker_id(self) ->", "if not color and not argument.isdigit(): argument = list(s for", "dt or discord.utils.utcnow() return int(dt.timestamp() * 1000 - 1420070400000) <<", "= discord.Colour(argument) if isinstance(argument, list): argument = sorted(filter(lambda x: x.isdigit(),", "= list(s for s in argument.split(\" \") if s) if", "from the current time using utcnow. Parameters ----------- dt: :class:`datetime.datetime`", "int): if argument > 16777215: await ctx.send(f\"{argument} is not valid", "& 0x3E0000) >> 17 @property def process_id(self) -> int: \"\"\":class:`int`:", "> 16777215: color = discord.Colour(16777215) return color def generate_snowflake(dt: typing.Optional[datetime.datetime]", "= await BetterUserconverter().convert(ctx, argument) user = user or ctx.author return", "if argument > 16777215: await ctx.send(f\"{argument} is not valid color,", "for n in argument][:3] try: color = discord.Colour.from_rgb(*argument) except TypeError:", "current time using utcnow. Parameters ----------- dt: :class:`datetime.datetime` A datetime", "that made the snowflake.\"\"\" return (self.id & 0x3E0000) >> 17", "convert(self, ctx, argument): try: user = await commands.MemberConverter().convert(ctx, argument) except", "except commands.BadColourArgument: color = None if not color and not", "instead.\") argument = 16777215 color = discord.Colour(argument) if isinstance(argument, list):", "argument) if role: if role.is_bot_managed(): user = role.tags.bot_id user =", "argument): try: color = await commands.ColourConverter().convert(ctx, argument) except commands.BadColourArgument: color", "that made the snowflake.\"\"\" return self.id & 0xFFF class ObjectPlusConverter(commands.converter.IDConverter[commands.Converter]):", "argument) if tag: if ctx.guild: test = discord.utils.get(ctx.guild.members, discriminator=tag.group(1)) user", "x in extentions: response = await ctx.bot.session.get(f\"https://cdn.discordapp.com/emojis/{emoji_id}.{x}\") if response.ok: return", "commands.Context, argument: str) -> ObjectPlus: match = self._get_id_match(argument) or re.match(r\"<(?:@(?:!|&)?|#)([0-9]{15,20})>$\",", "re.match(r\"#?(\\d{4})\", argument) if tag: if ctx.guild: test = discord.utils.get(ctx.guild.members, discriminator=tag.group(1))", "def generate_snowflake(dt: typing.Optional[datetime.datetime] = None) -> int: \"\"\"Returns a numeric", "raise discord.errors.ObjectNotFound(argument) result = int(match.group(1)) return ObjectPlus(id=result) # remove if", "url: str): self.id = id self.url = url @classmethod async", "(self.id & 0x1F000) >> 12 @property def increment_id(self) -> int:", "class BetterUserconverter(commands.Converter): async def convert(self, ctx, argument): try: user =", "@classmethod async def convert(cls, ctx, argument): match = re.match(r\"(?P<id>[0-9]{15,21})\", argument)", "-> int: \"\"\":class:`int`: Returns the process id that made the", "-> int: \"\"\":class:`int`: Returns the increment id that made the", "n in argument][:3] try: color = discord.Colour.from_rgb(*argument) except TypeError: color", "if role.is_bot_managed(): user = role.tags.bot_id user = await ctx.bot.try_user(user) if", "= discord.Colour.from_rgb(*argument) except TypeError: color = None if color: if", "\"\"\":class:`int`: Returns the worker id that made the snowflake.\"\"\" return", "ctx, argument): try: color = await commands.ColourConverter().convert(ctx, argument) except commands.BadColourArgument:", "= [int(n) for n in argument][:3] try: color = discord.Colour.from_rgb(*argument)", "= re.match(r\"(?P<id>[0-9]{15,21})\", argument) if match: emoji_id = match.group(0) extentions =", "= discord.Colour(16777215) return color def generate_snowflake(dt: typing.Optional[datetime.datetime] = None) ->", "def convert(self, ctx: commands.Context, argument: str) -> ObjectPlus: match =", "user = role.tags.bot_id user = await ctx.bot.try_user(user) if user is", "= self._get_id_match(argument) or re.match(r\"<(?:@(?:!|&)?|#)([0-9]{15,20})>$\", argument) if match is None: raise", "color.value > 16777215: color = discord.Colour(16777215) return color def generate_snowflake(dt:", "EmojiConverter(commands.Converter): async def convert(self, ctx: commands.Context, arg: str): emojis =", "if ctx.guild: test = discord.utils.get(ctx.guild.members, discriminator=tag.group(1)) user = test or", "= None if user is None: role = None with", "selector-16}\") in emojis or arg in emojis: return discord.PartialEmoji(name=arg) else:", "made the snowflake.\"\"\" return (self.id & 0x3E0000) >> 17 @property", "user class BetterUserconverter(commands.Converter): async def convert(self, ctx, argument): try: user", "argument) user = user or ctx.author return user class BetterUserconverter(commands.Converter):", "await commands.UserConverter().convert(ctx, argument) except commands.UserNotFound: user = None if not", "user = user or ctx.author return user class BetterUserconverter(commands.Converter): async", "user or ctx.author return user class BetterUserconverter(commands.Converter): async def convert(self,", "user = test or ctx.author if ctx.guild is None: user", "ctx, argument): try: user = await commands.MemberConverter().convert(ctx, argument) except commands.MemberNotFound:", "be created at the given date but more accurate and", "arg: str): emojis = emoji.unicode_codes.EMOJI_UNICODE[\"en\"].values() try: return await commands.PartialEmojiConverter().convert(ctx, arg)", "not color and not argument.isdigit(): argument = list(s for s", "pass if arg.rstrip(\"\\N{variation selector-16}\") in emojis or arg in emojis:", "argument = list(s for s in argument.split(\" \") if s)", "discriminator=tag.group(1)) user = test or ctx.author return user class EmojiBasic:", "= url @classmethod async def convert(cls, ctx, argument): match =", "utcnow. Parameters ----------- dt: :class:`datetime.datetime` A datetime object to convert", "for s in argument.split(\" \") if s) if color and", "async def convert(cls, ctx, argument): match = re.match(r\"(?P<id>[0-9]{15,21})\", argument) if", "is None: tag = re.match(r\"#?(\\d{4})\", argument) if tag and not", "def convert(self, ctx, argument): try: user = await commands.UserConverter().convert(ctx, argument)", ":class:`int` The snowflake representing the time given. \"\"\" dt =", "the time given. \"\"\" dt = dt or discord.utils.utcnow() return", "ctx.send(f\"{argument} is not valid color, 16777215 will be used instead.\")", "discord.utils.get(ctx.guild.members, discriminator=tag.group(1)) user = test or ctx.author if ctx.guild is", "discord.http import Route class BetterMemberConverter(commands.Converter): async def convert(self, ctx, argument):", "raise commands.BadArgument(f\"{arg} is not an emoji\") class ColorConverter(commands.Converter): async def", "ctx.guild: try: user = await commands.MemberConverter().convert(ctx, argument) except commands.MemberNotFound: user", "No dt is not passed, it makes one from the", "await ctx.bot.session.get(f\"https://cdn.discordapp.com/emojis/{emoji_id}.{x}\") if response.ok: return cls(emoji_id, response.real_url) else: return None", "contextlib.suppress(commands.RoleNotFound, commands.NoPrivateMessage): role = await commands.RoleConverter().convert(ctx, argument) if role: if", "emoji.unicode_codes.EMOJI_UNICODE[\"en\"].values() try: return await commands.PartialEmojiConverter().convert(ctx, arg) except commands.PartialEmojiConversionFailure: pass if", "16777215: color = discord.Colour(16777215) return color def generate_snowflake(dt: typing.Optional[datetime.datetime] =", "0xFFF class ObjectPlusConverter(commands.converter.IDConverter[commands.Converter]): async def convert(self, ctx: commands.Context, argument: str)", "int(match.group(1)) return ObjectPlus(id=result) # remove if edpy adds my pull", "match is None: raise discord.errors.ObjectNotFound(argument) result = int(match.group(1)) return ObjectPlus(id=result)", "None: raise discord.errors.ObjectNotFound(argument) result = int(match.group(1)) return ObjectPlus(id=result) # remove", "self.url = url @classmethod async def convert(cls, ctx, argument): match", "try: return await commands.PartialEmojiConverter().convert(ctx, arg) except commands.PartialEmojiConversionFailure: pass if arg.rstrip(\"\\N{variation", "argument) if match: emoji_id = match.group(0) extentions = [\"gif\", \"png\"]", "if color and argument.isdigit(): argument = int(argument) if isinstance(argument, int):", "response = await ctx.bot.session.get(f\"https://cdn.discordapp.com/emojis/{emoji_id}.{x}\") if response.ok: return cls(emoji_id, response.real_url) else:", "= await ctx.bot.try_user(user) if user is None: tag = re.match(r\"#?(\\d{4})\",", "str) -> ObjectPlus: match = self._get_id_match(argument) or re.match(r\"<(?:@(?:!|&)?|#)([0-9]{15,20})>$\", argument) if", "contextlib import typing import datetime from discord.ext import commands from", "tag: if ctx.guild: test = discord.utils.get(ctx.guild.members, discriminator=tag.group(1)) user = test", "return self.id & 0xFFF class ObjectPlusConverter(commands.converter.IDConverter[commands.Converter]): async def convert(self, ctx:", "return color def generate_snowflake(dt: typing.Optional[datetime.datetime] = None) -> int: \"\"\"Returns", "commands.MemberConverter().convert(ctx, argument) except commands.MemberNotFound: user = None if user is", "| 0x3FFFFF class ObjectPlus(discord.Object): @property def worker_id(self) -> int: \"\"\":class:`int`:", "the snowflake.\"\"\" return (self.id & 0x3E0000) >> 17 @property def", "time. Returns -------- :class:`int` The snowflake representing the time given.", "given. \"\"\" dt = dt or discord.utils.utcnow() return int(dt.timestamp() *", "timezone is assumed to be local time. Returns -------- :class:`int`", "cls(emoji_id, response.real_url) else: return None class EmojiConverter(commands.Converter): async def convert(self,", "is not passed, it makes one from the current time", "ctx.guild: test = discord.utils.get(ctx.guild.members, discriminator=tag.group(1)) user = test or ctx.author", "re import emoji import contextlib import typing import datetime from", "import Route class BetterMemberConverter(commands.Converter): async def convert(self, ctx, argument): try:", "The snowflake representing the time given. \"\"\" dt = dt", "id: int, url: str): self.id = id self.url = url", "color and argument.isdigit(): argument = int(argument) if isinstance(argument, int): if", "and not ctx.bot.users: test = discord.utils.get(ctx.bot.users, discriminator=tag.group(1)) user = test", "12 @property def increment_id(self) -> int: \"\"\":class:`int`: Returns the increment", "arg.rstrip(\"\\N{variation selector-16}\") in emojis or arg in emojis: return discord.PartialEmoji(name=arg)", "user and ctx.guild: try: user = await commands.MemberConverter().convert(ctx, argument) except", "test or ctx.author return user class EmojiBasic: def __init__(self, id:", "to convert to a snowflake. If naive, the timezone is", "from discord.ext import commands from discord.http import Route class BetterMemberConverter(commands.Converter):", "= await commands.UserConverter().convert(ctx, argument) except commands.UserNotFound: user = None if", "if user is None: tag = re.match(r\"#?(\\d{4})\", argument) if tag", "be local time. Returns -------- :class:`int` The snowflake representing the", "datetime object to convert to a snowflake. If naive, the", "import emoji import contextlib import typing import datetime from discord.ext", "BetterMemberConverter(commands.Converter): async def convert(self, ctx, argument): try: user = await", "the snowflake.\"\"\" return self.id & 0xFFF class ObjectPlusConverter(commands.converter.IDConverter[commands.Converter]): async def", "except commands.MemberNotFound: user = None if user is None: role", "return ObjectPlus(id=result) # remove if edpy adds my pull request", "else: raise commands.BadArgument(f\"{arg} is not an emoji\") class ColorConverter(commands.Converter): async", "ctx.author return user class BetterUserconverter(commands.Converter): async def convert(self, ctx, argument):", "if match: emoji_id = match.group(0) extentions = [\"gif\", \"png\"] for", "commands.BadColourArgument: color = None if not color and not argument.isdigit():", "sorted(filter(lambda x: x.isdigit(), argument)) argument = [int(n) for n in", "or discord.utils.utcnow() return int(dt.timestamp() * 1000 - 1420070400000) << 22", "in emojis or arg in emojis: return discord.PartialEmoji(name=arg) else: raise", "color = None if color: if color.value > 16777215: color", "if role: if role.is_bot_managed(): user = role.tags.bot_id user = await", "is None: user = await BetterUserconverter().convert(ctx, argument) user = user", "= None) -> int: \"\"\"Returns a numeric snowflake pretending to", "* 1000 - 1420070400000) << 22 | 0x3FFFFF class ObjectPlus(discord.Object):", "= await commands.RoleConverter().convert(ctx, argument) if role: if role.is_bot_managed(): user =", "time given. \"\"\" dt = dt or discord.utils.utcnow() return int(dt.timestamp()", "async def convert(self, ctx, argument): try: user = await commands.UserConverter().convert(ctx,", "return (self.id & 0x3E0000) >> 17 @property def process_id(self) ->", "if tag: if ctx.guild: test = discord.utils.get(ctx.guild.members, discriminator=tag.group(1)) user =", "= None with contextlib.suppress(commands.RoleNotFound, commands.NoPrivateMessage): role = await commands.RoleConverter().convert(ctx, argument)", "argument = int(argument) if isinstance(argument, int): if argument > 16777215:", "or arg in emojis: return discord.PartialEmoji(name=arg) else: raise commands.BadArgument(f\"{arg} is", "the current time using utcnow. Parameters ----------- dt: :class:`datetime.datetime` A", "argument.isdigit(): argument = list(s for s in argument.split(\" \") if", "-> ObjectPlus: match = self._get_id_match(argument) or re.match(r\"<(?:@(?:!|&)?|#)([0-9]{15,20})>$\", argument) if match", "discord.ext import commands from discord.http import Route class BetterMemberConverter(commands.Converter): async", "& 0xFFF class ObjectPlusConverter(commands.converter.IDConverter[commands.Converter]): async def convert(self, ctx: commands.Context, argument:", "argument): try: user = await commands.MemberConverter().convert(ctx, argument) except commands.MemberNotFound: user", "if arg.rstrip(\"\\N{variation selector-16}\") in emojis or arg in emojis: return", "not argument.isdigit(): argument = list(s for s in argument.split(\" \")", "commands.BadArgument(f\"{arg} is not an emoji\") class ColorConverter(commands.Converter): async def convert(self,", "None) -> int: \"\"\"Returns a numeric snowflake pretending to be", "except commands.UserNotFound: user = None if not user and ctx.guild:", "import commands from discord.http import Route class BetterMemberConverter(commands.Converter): async def", "except TypeError: color = None if color: if color.value >", "convert to a snowflake. If naive, the timezone is assumed", "used instead.\") argument = 16777215 color = discord.Colour(argument) if isinstance(argument,", "discord.utils.get(ctx.bot.users, discriminator=tag.group(1)) user = test or ctx.author return user class", "argument][:3] try: color = discord.Colour.from_rgb(*argument) except TypeError: color = None", "commands.ColourConverter().convert(ctx, argument) except commands.BadColourArgument: color = None if not color", "more accurate and random than time_snowflake. If No dt is", "ctx: commands.Context, arg: str): emojis = emoji.unicode_codes.EMOJI_UNICODE[\"en\"].values() try: return await", "await ctx.send(f\"{argument} is not valid color, 16777215 will be used", "assumed to be local time. Returns -------- :class:`int` The snowflake", "snowflake pretending to be created at the given date but", "= test or ctx.author if ctx.guild is None: user =", "= None if user is None: tag = re.match(r\"#?(\\d{4})\", argument)", "worker id that made the snowflake.\"\"\" return (self.id & 0x3E0000)", "color, 16777215 will be used instead.\") argument = 16777215 color", "not passed, it makes one from the current time using", "snowflake.\"\"\" return self.id & 0xFFF class ObjectPlusConverter(commands.converter.IDConverter[commands.Converter]): async def convert(self,", "the worker id that made the snowflake.\"\"\" return (self.id &", "for x in extentions: response = await ctx.bot.session.get(f\"https://cdn.discordapp.com/emojis/{emoji_id}.{x}\") if response.ok:", "commands.PartialEmojiConverter().convert(ctx, arg) except commands.PartialEmojiConversionFailure: pass if arg.rstrip(\"\\N{variation selector-16}\") in emojis", "in argument.split(\" \") if s) if color and argument.isdigit(): argument", "await commands.MemberConverter().convert(ctx, argument) except commands.MemberNotFound: user = None if user", "with contextlib.suppress(commands.RoleNotFound, commands.NoPrivateMessage): role = await commands.RoleConverter().convert(ctx, argument) if role:", "color def generate_snowflake(dt: typing.Optional[datetime.datetime] = None) -> int: \"\"\"Returns a", "makes one from the current time using utcnow. Parameters -----------", "the increment id that made the snowflake.\"\"\" return self.id &", "match: emoji_id = match.group(0) extentions = [\"gif\", \"png\"] for x", "object to convert to a snowflake. If naive, the timezone", "an emoji\") class ColorConverter(commands.Converter): async def convert(self, ctx, argument): try:", "is None: raise discord.errors.ObjectNotFound(argument) result = int(match.group(1)) return ObjectPlus(id=result) #", "convert(cls, ctx, argument): match = re.match(r\"(?P<id>[0-9]{15,21})\", argument) if match: emoji_id", "a snowflake. If naive, the timezone is assumed to be", "None class EmojiConverter(commands.Converter): async def convert(self, ctx: commands.Context, arg: str):", "0x3E0000) >> 17 @property def process_id(self) -> int: \"\"\":class:`int`: Returns", "snowflake.\"\"\" return (self.id & 0x1F000) >> 12 @property def increment_id(self)", "x: x.isdigit(), argument)) argument = [int(n) for n in argument][:3]", "= user or ctx.author return user class BetterUserconverter(commands.Converter): async def", "async def convert(self, ctx, argument): try: user = await commands.MemberConverter().convert(ctx,", "= None if color: if color.value > 16777215: color =", "@property def worker_id(self) -> int: \"\"\":class:`int`: Returns the worker id", "is None: role = None with contextlib.suppress(commands.RoleNotFound, commands.NoPrivateMessage): role =", "role.tags.bot_id user = await ctx.bot.try_user(user) if user is None: tag", "s in argument.split(\" \") if s) if color and argument.isdigit():", "snowflake representing the time given. \"\"\" dt = dt or", "16777215 color = discord.Colour(argument) if isinstance(argument, list): argument = sorted(filter(lambda", "= dt or discord.utils.utcnow() return int(dt.timestamp() * 1000 - 1420070400000)", "emojis = emoji.unicode_codes.EMOJI_UNICODE[\"en\"].values() try: return await commands.PartialEmojiConverter().convert(ctx, arg) except commands.PartialEmojiConversionFailure:", "if s) if color and argument.isdigit(): argument = int(argument) if", "if user is None: role = None with contextlib.suppress(commands.RoleNotFound, commands.NoPrivateMessage):", "return None class EmojiConverter(commands.Converter): async def convert(self, ctx: commands.Context, arg:", "ObjectPlusConverter(commands.converter.IDConverter[commands.Converter]): async def convert(self, ctx: commands.Context, argument: str) -> ObjectPlus:", "if isinstance(argument, list): argument = sorted(filter(lambda x: x.isdigit(), argument)) argument", "role = await commands.RoleConverter().convert(ctx, argument) if role: if role.is_bot_managed(): user", "str): emojis = emoji.unicode_codes.EMOJI_UNICODE[\"en\"].values() try: return await commands.PartialEmojiConverter().convert(ctx, arg) except", "if user is None: tag = re.match(r\"#?(\\d{4})\", argument) if tag:", "ctx.author if ctx.guild is None: user = await BetterUserconverter().convert(ctx, argument)", "user = await ctx.bot.try_user(user) if user is None: tag =", "discord.Colour(16777215) return color def generate_snowflake(dt: typing.Optional[datetime.datetime] = None) -> int:", "is None: tag = re.match(r\"#?(\\d{4})\", argument) if tag: if ctx.guild:", "and not argument.isdigit(): argument = list(s for s in argument.split(\"", "[int(n) for n in argument][:3] try: color = discord.Colour.from_rgb(*argument) except", "If naive, the timezone is assumed to be local time.", "pretending to be created at the given date but more", "Parameters ----------- dt: :class:`datetime.datetime` A datetime object to convert to", "if isinstance(argument, int): if argument > 16777215: await ctx.send(f\"{argument} is", "id that made the snowflake.\"\"\" return (self.id & 0x3E0000) >>", "dt = dt or discord.utils.utcnow() return int(dt.timestamp() * 1000 -", "representing the time given. \"\"\" dt = dt or discord.utils.utcnow()", "argument = sorted(filter(lambda x: x.isdigit(), argument)) argument = [int(n) for", "to be created at the given date but more accurate", "isinstance(argument, list): argument = sorted(filter(lambda x: x.isdigit(), argument)) argument =", "= role.tags.bot_id user = await ctx.bot.try_user(user) if user is None:", "import re import emoji import contextlib import typing import datetime", "process id that made the snowflake.\"\"\" return (self.id & 0x1F000)", "ctx.author return user class EmojiBasic: def __init__(self, id: int, url:", "made the snowflake.\"\"\" return (self.id & 0x1F000) >> 12 @property", "def worker_id(self) -> int: \"\"\":class:`int`: Returns the worker id that", "a numeric snowflake pretending to be created at the given", "argument.isdigit(): argument = int(argument) if isinstance(argument, int): if argument >", "made the snowflake.\"\"\" return self.id & 0xFFF class ObjectPlusConverter(commands.converter.IDConverter[commands.Converter]): async", "class ColorConverter(commands.Converter): async def convert(self, ctx, argument): try: color =", "convert(self, ctx: commands.Context, arg: str): emojis = emoji.unicode_codes.EMOJI_UNICODE[\"en\"].values() try: return", "str): self.id = id self.url = url @classmethod async def", "in extentions: response = await ctx.bot.session.get(f\"https://cdn.discordapp.com/emojis/{emoji_id}.{x}\") if response.ok: return cls(emoji_id,", "await commands.ColourConverter().convert(ctx, argument) except commands.BadColourArgument: color = None if not", "user is None: tag = re.match(r\"#?(\\d{4})\", argument) if tag and", "ObjectPlus: match = self._get_id_match(argument) or re.match(r\"<(?:@(?:!|&)?|#)([0-9]{15,20})>$\", argument) if match is", ">> 12 @property def increment_id(self) -> int: \"\"\":class:`int`: Returns the", "= None if not color and not argument.isdigit(): argument =" ]
[ "structure_klifs_ids = _parse_structure_klifs_ids(args.input) encode(structure_klifs_ids, args.output, args.local, args.ncores) def _parse_structure_klifs_ids(args_input): \"\"\"", "structure_klifs_ids = [int(args_input[0])] except ValueError: structure_klifs_ids = np.genfromtxt(fname=args_input[0], dtype=int).tolist() else:", "[int(args_input[0])] except ValueError: structure_klifs_ids = np.genfromtxt(fname=args_input[0], dtype=int).tolist() else: structure_klifs_ids =", ": argsparse.Namespace CLI arguments. \"\"\" configure_logger(args.output) structure_klifs_ids = _parse_structure_klifs_ids(args.input) encode(structure_klifs_ids,", "encode(structure_klifs_ids, args.output, args.local, args.ncores) def _parse_structure_klifs_ids(args_input): \"\"\" Parse structure KLIFS", "_parse_structure_klifs_ids(args_input): \"\"\" Parse structure KLIFS IDs. Parameters ---------- args_input :", "of int List of structure KLIFS IDs. \"\"\" if len(args_input)", "\"\"\" if len(args_input) == 1: try: structure_klifs_ids = [int(args_input[0])] except", "(one ID per row) or one or more structure KLIFS", "argsparse.Namespace CLI arguments. \"\"\" configure_logger(args.output) structure_klifs_ids = _parse_structure_klifs_ids(args.input) encode(structure_klifs_ids, args.output,", "Encode structures (generate fingerprints) from CLI arguments. \"\"\" import numpy", "KLIFS ID (one ID per row) or one or more", "kissim.api import encode from kissim.cli.utils import configure_logger def encode_from_cli(args): \"\"\"", "ID (one ID per row) or one or more structure", "str Either path to txt file with structure KLIFS ID", "Either path to txt file with structure KLIFS ID (one", "import configure_logger def encode_from_cli(args): \"\"\" Encode structures. Parameters ---------- args", "KLIFS IDs. \"\"\" if len(args_input) == 1: try: structure_klifs_ids =", "of str Either path to txt file with structure KLIFS", "kissim.cli.utils import configure_logger def encode_from_cli(args): \"\"\" Encode structures. Parameters ----------", "path to txt file with structure KLIFS ID (one ID", "\"\"\" kissim.cli.encode Encode structures (generate fingerprints) from CLI arguments. \"\"\"", "\"\"\" import numpy as np from kissim.api import encode from", "dtype=int).tolist() else: structure_klifs_ids = [int(i) for i in args_input] return", "\"\"\" Parse structure KLIFS IDs. Parameters ---------- args_input : list", "Encode structures. Parameters ---------- args : argsparse.Namespace CLI arguments. \"\"\"", "kissim.cli.encode Encode structures (generate fingerprints) from CLI arguments. \"\"\" import", "KLIFS IDs. Parameters ---------- args_input : list of str Either", "def _parse_structure_klifs_ids(args_input): \"\"\" Parse structure KLIFS IDs. Parameters ---------- args_input", "Parameters ---------- args_input : list of str Either path to", "\"\"\" Encode structures. Parameters ---------- args : argsparse.Namespace CLI arguments.", "structure KLIFS IDs. Returns ------- list of int List of", "CLI arguments. \"\"\" configure_logger(args.output) structure_klifs_ids = _parse_structure_klifs_ids(args.input) encode(structure_klifs_ids, args.output, args.local,", "np from kissim.api import encode from kissim.cli.utils import configure_logger def", "structures. Parameters ---------- args : argsparse.Namespace CLI arguments. \"\"\" configure_logger(args.output)", "to txt file with structure KLIFS ID (one ID per", "structure KLIFS IDs. \"\"\" if len(args_input) == 1: try: structure_klifs_ids", "txt file with structure KLIFS ID (one ID per row)", "or more structure KLIFS IDs. Returns ------- list of int", "if len(args_input) == 1: try: structure_klifs_ids = [int(args_input[0])] except ValueError:", "------- list of int List of structure KLIFS IDs. \"\"\"", "= _parse_structure_klifs_ids(args.input) encode(structure_klifs_ids, args.output, args.local, args.ncores) def _parse_structure_klifs_ids(args_input): \"\"\" Parse", "row) or one or more structure KLIFS IDs. Returns -------", "Parameters ---------- args : argsparse.Namespace CLI arguments. \"\"\" configure_logger(args.output) structure_klifs_ids", "def encode_from_cli(args): \"\"\" Encode structures. Parameters ---------- args : argsparse.Namespace", "---------- args : argsparse.Namespace CLI arguments. \"\"\" configure_logger(args.output) structure_klifs_ids =", "CLI arguments. \"\"\" import numpy as np from kissim.api import", "more structure KLIFS IDs. Returns ------- list of int List", "except ValueError: structure_klifs_ids = np.genfromtxt(fname=args_input[0], dtype=int).tolist() else: structure_klifs_ids = [int(i)", "Parse structure KLIFS IDs. Parameters ---------- args_input : list of", "configure_logger def encode_from_cli(args): \"\"\" Encode structures. Parameters ---------- args :", "np.genfromtxt(fname=args_input[0], dtype=int).tolist() else: structure_klifs_ids = [int(i) for i in args_input]", "= np.genfromtxt(fname=args_input[0], dtype=int).tolist() else: structure_klifs_ids = [int(i) for i in", "encode from kissim.cli.utils import configure_logger def encode_from_cli(args): \"\"\" Encode structures.", "import numpy as np from kissim.api import encode from kissim.cli.utils", "arguments. \"\"\" import numpy as np from kissim.api import encode", "try: structure_klifs_ids = [int(args_input[0])] except ValueError: structure_klifs_ids = np.genfromtxt(fname=args_input[0], dtype=int).tolist()", "args_input : list of str Either path to txt file", "list of str Either path to txt file with structure", "else: structure_klifs_ids = [int(i) for i in args_input] return structure_klifs_ids", "structure KLIFS ID (one ID per row) or one or", "arguments. \"\"\" configure_logger(args.output) structure_klifs_ids = _parse_structure_klifs_ids(args.input) encode(structure_klifs_ids, args.output, args.local, args.ncores)", "1: try: structure_klifs_ids = [int(args_input[0])] except ValueError: structure_klifs_ids = np.genfromtxt(fname=args_input[0],", "(generate fingerprints) from CLI arguments. \"\"\" import numpy as np", "per row) or one or more structure KLIFS IDs. Returns", "or one or more structure KLIFS IDs. Returns ------- list", ": list of str Either path to txt file with", "KLIFS IDs. Returns ------- list of int List of structure", "int List of structure KLIFS IDs. \"\"\" if len(args_input) ==", "ID per row) or one or more structure KLIFS IDs.", "IDs. \"\"\" if len(args_input) == 1: try: structure_klifs_ids = [int(args_input[0])]", "structure KLIFS IDs. Parameters ---------- args_input : list of str", "ValueError: structure_klifs_ids = np.genfromtxt(fname=args_input[0], dtype=int).tolist() else: structure_klifs_ids = [int(i) for", "args.ncores) def _parse_structure_klifs_ids(args_input): \"\"\" Parse structure KLIFS IDs. Parameters ----------", "numpy as np from kissim.api import encode from kissim.cli.utils import", "IDs. Parameters ---------- args_input : list of str Either path", "List of structure KLIFS IDs. \"\"\" if len(args_input) == 1:", "of structure KLIFS IDs. \"\"\" if len(args_input) == 1: try:", "one or more structure KLIFS IDs. Returns ------- list of", "args : argsparse.Namespace CLI arguments. \"\"\" configure_logger(args.output) structure_klifs_ids = _parse_structure_klifs_ids(args.input)", "from kissim.cli.utils import configure_logger def encode_from_cli(args): \"\"\" Encode structures. Parameters", "args.local, args.ncores) def _parse_structure_klifs_ids(args_input): \"\"\" Parse structure KLIFS IDs. Parameters", "fingerprints) from CLI arguments. \"\"\" import numpy as np from", "IDs. Returns ------- list of int List of structure KLIFS", "encode_from_cli(args): \"\"\" Encode structures. Parameters ---------- args : argsparse.Namespace CLI", "import encode from kissim.cli.utils import configure_logger def encode_from_cli(args): \"\"\" Encode", "= [int(args_input[0])] except ValueError: structure_klifs_ids = np.genfromtxt(fname=args_input[0], dtype=int).tolist() else: structure_klifs_ids", "with structure KLIFS ID (one ID per row) or one", "== 1: try: structure_klifs_ids = [int(args_input[0])] except ValueError: structure_klifs_ids =", "configure_logger(args.output) structure_klifs_ids = _parse_structure_klifs_ids(args.input) encode(structure_klifs_ids, args.output, args.local, args.ncores) def _parse_structure_klifs_ids(args_input):", "file with structure KLIFS ID (one ID per row) or", "---------- args_input : list of str Either path to txt", "\"\"\" configure_logger(args.output) structure_klifs_ids = _parse_structure_klifs_ids(args.input) encode(structure_klifs_ids, args.output, args.local, args.ncores) def", "len(args_input) == 1: try: structure_klifs_ids = [int(args_input[0])] except ValueError: structure_klifs_ids", "as np from kissim.api import encode from kissim.cli.utils import configure_logger", "structure_klifs_ids = np.genfromtxt(fname=args_input[0], dtype=int).tolist() else: structure_klifs_ids = [int(i) for i", "from kissim.api import encode from kissim.cli.utils import configure_logger def encode_from_cli(args):", "list of int List of structure KLIFS IDs. \"\"\" if", "args.output, args.local, args.ncores) def _parse_structure_klifs_ids(args_input): \"\"\" Parse structure KLIFS IDs.", "Returns ------- list of int List of structure KLIFS IDs.", "from CLI arguments. \"\"\" import numpy as np from kissim.api", "_parse_structure_klifs_ids(args.input) encode(structure_klifs_ids, args.output, args.local, args.ncores) def _parse_structure_klifs_ids(args_input): \"\"\" Parse structure", "structures (generate fingerprints) from CLI arguments. \"\"\" import numpy as" ]
[ "n = len(tsB) distanceProfile = mass(query, tsB) if selfJoin: trivialMatchRange", "2. , 2.828, 2. ], [ 0. , 0. ,", "0. ]]) \"\"\" selfJoin = False if tsB is None:", "is None: selfJoin = True tsB = tsA query =", "idx, m, tsB = None): \"\"\"Return the distance profile of", "idx, m, tsB = None): \"\"\" >>> np.round(stampDistanceProfile(np.array([0.0, 1.0, -1.0,", "mass(query, tsB) if selfJoin: trivialMatchRange = (max(0, idxToProcess - m", "1, 0, 0, -1, 1])), 3) array([[ 2. , 2.828,", "profile of query against ts. Use the naive all pairs", "from util import * def naiveDistanceProfile(tsA, idx, m, tsB =", "2.828, 2. ], [ 0. , 0. , 0. ]])", "as np from util import * def naiveDistanceProfile(tsA, idx, m,", "np.full(n - m + 1, idx, dtype = float)) def", "idx, dtype = float)) def stampDistanceProfile(tsA, idx, m, tsB =", "= False if tsB is None: selfJoin = True tsB", "len(tsB) for i in range(n - m + 1): distanceProfile.append(zNormalizedEuclideanDistance(query,", "2), min(idxToProcess + m / 2 + 1, len(tsB))) distanceProfile[trivialMatchRange[0]", "(distanceProfile, np.full(n - m + 1, idx, dtype = float))", ": (idx + m)] n = len(tsB) distanceProfile = mass(query,", "i + m])) if selfJoin: trivialMatchRange = (max(0, idxToProcess -", "the naive all pairs comparison algorithm. >>> np.round(naiveDistanceProfile(np.array([0.0, 1.0, -1.0,", "all pairs comparison algorithm. >>> np.round(naiveDistanceProfile(np.array([0.0, 1.0, -1.0, 0.0]), 0,", "m)] n = len(tsB) distanceProfile = mass(query, tsB) if selfJoin:", "idxToProcess - m / 2), min(idxToProcess + m / 2", "numpy as np from util import * def naiveDistanceProfile(tsA, idx,", "len(tsB))) distanceProfile[trivialMatchRange[0] : trivialMatchRange[1]] = np.inf return (distanceProfile, np.full(n -", "if selfJoin: trivialMatchRange = (max(0, idxToProcess - m / 2),", "= None): \"\"\" >>> np.round(stampDistanceProfile(np.array([0.0, 1.0, -1.0, 0.0]), 0, 4,", "3) array([[ 2. , 2.828, 2. ], [ 0. ,", "+ 1): distanceProfile.append(zNormalizedEuclideanDistance(query, tsB[i : i + m])) if selfJoin:", "trivialMatchRange = (max(0, idxToProcess - m / 2), min(idxToProcess +", "np.inf return (distanceProfile, np.full(n - m + 1, idx, dtype", "ts. Use the naive all pairs comparison algorithm. >>> np.round(naiveDistanceProfile(np.array([0.0,", "\"\"\" selfJoin = False if tsB is None: selfJoin =", "np from util import * def naiveDistanceProfile(tsA, idx, m, tsB", "0, 4, np.array([-1, 1, 0, 0, -1, 1])), 3) array([[", "tsB[i : i + m])) if selfJoin: trivialMatchRange = (max(0,", "in range(n - m + 1): distanceProfile.append(zNormalizedEuclideanDistance(query, tsB[i : i", "= None): \"\"\"Return the distance profile of query against ts.", "2 + 1, len(tsB))) distanceProfile[trivialMatchRange[0] : trivialMatchRange[1]] = np.inf return", "return (distanceProfile, np.full(n - m + 1, idx, dtype =", "tsB) if selfJoin: trivialMatchRange = (max(0, idxToProcess - m /", "against ts. Use the naive all pairs comparison algorithm. >>>", "distanceProfile.append(zNormalizedEuclideanDistance(query, tsB[i : i + m])) if selfJoin: trivialMatchRange =", "- m + 1, idx, dtype = float)) def stampDistanceProfile(tsA,", "0. , 0. ]]) \"\"\" selfJoin = False if tsB", "n = len(tsB) for i in range(n - m +", "min(idxToProcess + m / 2 + 1, len(tsB))) distanceProfile[trivialMatchRange[0] :", "= np.inf return (distanceProfile, np.full(n - m + 1, idx,", "np.array([-1, 1, 0, 0, -1, 1])), 3) array([[ 2. ,", "-1, 1])), 3) array([[ 2. , 2.828, 2. ], [", "0, -1, 1])), 3) array([[ 2. , 2.828, 2. ],", "m])) if selfJoin: trivialMatchRange = (max(0, idxToProcess - m /", "= len(tsB) distanceProfile = mass(query, tsB) if selfJoin: trivialMatchRange =", "4, np.array([-1, 1, 0, 0, -1, 1])), 3) array([[ 2.", "= float)) def stampDistanceProfile(tsA, idx, m, tsB = None): \"\"\"", "import numpy as np from util import * def naiveDistanceProfile(tsA,", "True tsB = tsA query = tsA[idx : (idx +", "dtype = float)) def stampDistanceProfile(tsA, idx, m, tsB = None):", "None): \"\"\" >>> np.round(stampDistanceProfile(np.array([0.0, 1.0, -1.0, 0.0]), 0, 4, np.array([-1,", "m / 2), min(idxToProcess + m / 2 + 1,", "m / 2 + 1, len(tsB))) distanceProfile[trivialMatchRange[0] : trivialMatchRange[1]] =", "array([[ 2. , 2.828, 2. ], [ 0. , 0.", ", 0. , 0. ]]) \"\"\" selfJoin = False if", "query = tsA[idx : (idx + m)] distanceProfile = []", "0. , 0. , 0. ]]) \"\"\" selfJoin = False", "- m / 2), min(idxToProcess + m / 2 +", "1])), 3) array([[ 2. , 2.828, 2. ], [ 0.", "tsB = None): \"\"\"Return the distance profile of query against", "[] n = len(tsB) for i in range(n - m", "m, tsB = None): \"\"\" >>> np.round(stampDistanceProfile(np.array([0.0, 1.0, -1.0, 0.0]),", "+ m)] distanceProfile = [] n = len(tsB) for i", "range(n - m + 1): distanceProfile.append(zNormalizedEuclideanDistance(query, tsB[i : i +", ": i + m])) if selfJoin: trivialMatchRange = (max(0, idxToProcess", "distance profile of query against ts. Use the naive all", "float)) def stampDistanceProfile(tsA, idx, m, tsB = None): \"\"\" >>>", "selfJoin = False if tsB is None: selfJoin = True", "+ 1, idx, dtype = float)) if __name__ == \"__main__\":", "def stampDistanceProfile(tsA, idx, m, tsB = None): \"\"\" >>> np.round(stampDistanceProfile(np.array([0.0,", "dtype = float)) if __name__ == \"__main__\": import doctest doctest.testmod()", "-1.0, 0.0]), 0, 4, np.array([-1, 1, 0, 0, -1, 1])),", "pairs comparison algorithm. >>> np.round(naiveDistanceProfile(np.array([0.0, 1.0, -1.0, 0.0]), 0, 4,", "tsB = None): \"\"\" >>> np.round(stampDistanceProfile(np.array([0.0, 1.0, -1.0, 0.0]), 0,", "selfJoin = True tsB = tsA query = tsA[idx :", "/ 2), min(idxToProcess + m / 2 + 1, len(tsB)))", "tsA[idx : (idx + m)] distanceProfile = [] n =", "the distance profile of query against ts. Use the naive", "query against ts. Use the naive all pairs comparison algorithm.", "import * def naiveDistanceProfile(tsA, idx, m, tsB = None): \"\"\"Return", "- m + 1): distanceProfile.append(zNormalizedEuclideanDistance(query, tsB[i : i + m]))", "m)] distanceProfile = [] n = len(tsB) for i in", "(max(0, idxToProcess - m / 2), min(idxToProcess + m /", "1, idx, dtype = float)) def stampDistanceProfile(tsA, idx, m, tsB", "selfJoin: trivialMatchRange = (max(0, idxToProcess - m / 2), min(idxToProcess", "tsA query = tsA[idx : (idx + m)] n =", "\"\"\"Return the distance profile of query against ts. Use the", "tsA[idx : (idx + m)] n = len(tsB) distanceProfile =", "+ m)] n = len(tsB) distanceProfile = mass(query, tsB) if", "1, idx, dtype = float)) if __name__ == \"__main__\": import", "= (max(0, idxToProcess - m / 2), min(idxToProcess + m", ", 0. ]]) \"\"\" selfJoin = False if tsB is", ": trivialMatchRange[1]] = np.inf return (distanceProfile, np.full(n - m +", "comparison algorithm. >>> np.round(naiveDistanceProfile(np.array([0.0, 1.0, -1.0, 0.0]), 0, 4, np.array([-1,", "Use the naive all pairs comparison algorithm. >>> np.round(naiveDistanceProfile(np.array([0.0, 1.0,", "]]) \"\"\" selfJoin = False if tsB is None: selfJoin", "stampDistanceProfile(tsA, idx, m, tsB = None): \"\"\" >>> np.round(stampDistanceProfile(np.array([0.0, 1.0,", "np.round(naiveDistanceProfile(np.array([0.0, 1.0, -1.0, 0.0]), 0, 4, np.array([-1, 1, 0, 0,", "= tsA[idx : (idx + m)] n = len(tsB) distanceProfile", ", 2.828, 2. ], [ 0. , 0. , 0.", "/ 2 + 1, len(tsB))) distanceProfile[trivialMatchRange[0] : trivialMatchRange[1]] = np.inf", "2. ], [ 0. , 0. , 0. ]]) \"\"\"", "+ m / 2 + 1, len(tsB))) distanceProfile[trivialMatchRange[0] : trivialMatchRange[1]]", ": (idx + m)] distanceProfile = [] n = len(tsB)", ">>> np.round(stampDistanceProfile(np.array([0.0, 1.0, -1.0, 0.0]), 0, 4, np.array([-1, 1, 0,", "m + 1, idx, dtype = float)) if __name__ ==", "= tsA query = tsA[idx : (idx + m)] distanceProfile", "tsB is None: selfJoin = True tsB = tsA query", "None: selfJoin = True tsB = tsA query = tsA[idx", "+ m])) if selfJoin: trivialMatchRange = (max(0, idxToProcess - m", "of query against ts. Use the naive all pairs comparison", "- m + 1, idx, dtype = float)) if __name__", "naive all pairs comparison algorithm. >>> np.round(naiveDistanceProfile(np.array([0.0, 1.0, -1.0, 0.0]),", "util import * def naiveDistanceProfile(tsA, idx, m, tsB = None):", "for i in range(n - m + 1): distanceProfile.append(zNormalizedEuclideanDistance(query, tsB[i", "m, tsB = None): \"\"\"Return the distance profile of query", "+ 1, idx, dtype = float)) def stampDistanceProfile(tsA, idx, m,", "= [] n = len(tsB) for i in range(n -", "np.full(n - m + 1, idx, dtype = float)) if", "np.round(stampDistanceProfile(np.array([0.0, 1.0, -1.0, 0.0]), 0, 4, np.array([-1, 1, 0, 0,", "* def naiveDistanceProfile(tsA, idx, m, tsB = None): \"\"\"Return the", "1.0, -1.0, 0.0]), 0, 4, np.array([-1, 1, 0, 0, -1,", "], [ 0. , 0. , 0. ]]) \"\"\" selfJoin", "None): \"\"\"Return the distance profile of query against ts. Use", "[ 0. , 0. , 0. ]]) \"\"\" selfJoin =", "m + 1, idx, dtype = float)) def stampDistanceProfile(tsA, idx,", "+ 1, len(tsB))) distanceProfile[trivialMatchRange[0] : trivialMatchRange[1]] = np.inf return (distanceProfile,", "\"\"\" >>> np.round(stampDistanceProfile(np.array([0.0, 1.0, -1.0, 0.0]), 0, 4, np.array([-1, 1,", "1): distanceProfile.append(zNormalizedEuclideanDistance(query, tsB[i : i + m])) if selfJoin: trivialMatchRange", "idx, dtype = float)) if __name__ == \"__main__\": import doctest", "= len(tsB) for i in range(n - m + 1):", "distanceProfile = mass(query, tsB) if selfJoin: trivialMatchRange = (max(0, idxToProcess", "naiveDistanceProfile(tsA, idx, m, tsB = None): \"\"\"Return the distance profile", "algorithm. >>> np.round(naiveDistanceProfile(np.array([0.0, 1.0, -1.0, 0.0]), 0, 4, np.array([-1, 1,", "(idx + m)] distanceProfile = [] n = len(tsB) for", "0, 0, -1, 1])), 3) array([[ 2. , 2.828, 2.", "def naiveDistanceProfile(tsA, idx, m, tsB = None): \"\"\"Return the distance", "(idx + m)] n = len(tsB) distanceProfile = mass(query, tsB)", "i in range(n - m + 1): distanceProfile.append(zNormalizedEuclideanDistance(query, tsB[i :", "if tsB is None: selfJoin = True tsB = tsA", "False if tsB is None: selfJoin = True tsB =", "tsB = tsA query = tsA[idx : (idx + m)]", "trivialMatchRange[1]] = np.inf return (distanceProfile, np.full(n - m + 1,", "m + 1): distanceProfile.append(zNormalizedEuclideanDistance(query, tsB[i : i + m])) if", "distanceProfile = [] n = len(tsB) for i in range(n", "distanceProfile[trivialMatchRange[0] : trivialMatchRange[1]] = np.inf return (distanceProfile, np.full(n - m", "= tsA query = tsA[idx : (idx + m)] n", "= mass(query, tsB) if selfJoin: trivialMatchRange = (max(0, idxToProcess -", "= tsA[idx : (idx + m)] distanceProfile = [] n", "1, len(tsB))) distanceProfile[trivialMatchRange[0] : trivialMatchRange[1]] = np.inf return (distanceProfile, np.full(n", "= True tsB = tsA query = tsA[idx : (idx", "0.0]), 0, 4, np.array([-1, 1, 0, 0, -1, 1])), 3)", "query = tsA[idx : (idx + m)] n = len(tsB)", "tsA query = tsA[idx : (idx + m)] distanceProfile =", ">>> np.round(naiveDistanceProfile(np.array([0.0, 1.0, -1.0, 0.0]), 0, 4, np.array([-1, 1, 0,", "len(tsB) distanceProfile = mass(query, tsB) if selfJoin: trivialMatchRange = (max(0," ]
[ "as plt import numpy as np from sklearn import linear_model", "import pyplot as plt import numpy as np from sklearn", "pyplot as plt import numpy as np from sklearn import" ]
[ "widgetName, widgetArea): \"\"\"创建停靠组件\"\"\" widget = widgetClass(self.mainEngine, self.eventEngine) dock = QDockWidget(widgetName)", "self.eventEngine.register(EVENT_TIMER, self.signalStatusBar.emit) # ---------------------------------------------------------------------- def updateStatusBar(self, event): \"\"\"在状态栏更新CPU和内存信息\"\"\" self.sbCount +=", "def closeEvent(self, event): \"\"\"关闭事件\"\"\" reply = QMessageBox.question(self, u'退出', u'确认退出?', QMessageBox.Yes", "QtCore.Qt.BottomDockWidgetArea) widgetTradeM, dockTradeM = self.createDock(wgs.TradeMonitor, u'成交', QtCore.Qt.BottomDockWidgetArea) widgetOrderM, dockOrderM =", "0 self.sbTrigger = 10 # 10秒刷新一次 self.signalStatusBar.connect(self.updateStatusBar) self.eventEngine.register(EVENT_TIMER, self.signalStatusBar.emit) #", "# ---------------------------------------------------------------------- def openAbout(self): \"\"\"打开关于\"\"\" try: self.widgetDict['aboutW'].show() except KeyError: self.widgetDict['aboutW']", "+ str(stratid)) self.sid = stratid # ---------------------------------------------------------------------- def initUi(self): \"\"\"初始化界面\"\"\"", "QAction(u'关于', self) aboutAction.triggered.connect(self.openAbout) colorAction = QAction(u'变色', self) colorAction.triggered.connect(self.changeColor) # 创建菜单", "{} # 用来保存子窗口的字典 self.initUi() self.eventEngine.register(EVENT_TITLE, self.updateTitle) self.sid = None def", "__init__(self, parent=None): \"\"\"Constructor\"\"\" super(AboutWidget, self).__init__(parent) self.initUi() # ---------------------------------------------------------------------- def initUi(self):", "self.eventEngine.register(EVENT_TITLE, self.updateTitle) self.sid = None def updateTitle(self, event): (user, stratid)", "self.eventEngine) dock = QDockWidget(widgetName) dock.setWidget(widget) dock.setObjectName(widgetName) dock.setFeatures(dock.DockWidgetFloatable | dock.DockWidgetMovable) self.addDockWidget(widgetArea,", "widgetClass, widgetName, widgetArea): \"\"\"创建停靠组件\"\"\" widget = widgetClass(self.mainEngine, self.eventEngine) dock =", "connectQuantOSAction = QAction(u'连接和切换策略', self) connectQuantOSAction.triggered.connect(self.connectQuantOS) exitAction = QAction(u'退出', self) exitAction.triggered.connect(self.close)", "用来保存子窗口的字典 self.initUi() self.eventEngine.register(EVENT_TITLE, self.updateTitle) self.sid = None def updateTitle(self, event):", "= 0 self.statusLabel.setText(self.getCpuMemory()) # ---------------------------------------------------------------------- def getCpuMemory(self): \"\"\"获取CPU和内存状态信息\"\"\" cpuPercent =", "class MainWindow(QMainWindow): \"\"\"主窗口\"\"\" signalStatusBar = QtCore.pyqtSignal(type(Event())) # ---------------------------------------------------------------------- def __init__(self,", "widget = widgetClass(self.mainEngine, self.eventEngine) dock = QDockWidget(widgetName) dock.setWidget(widget) dock.setObjectName(widgetName) dock.setFeatures(dock.DockWidgetFloatable", "u'日志', QtCore.Qt.BottomDockWidgetArea) widgetTradeM, dockTradeM = self.createDock(wgs.TradeMonitor, u'成交', QtCore.Qt.BottomDockWidgetArea) widgetOrderM, dockOrderM", "---------------------------------------------------------------------- def initUi(self): \"\"\"\"\"\" self.setWindowTitle(u'关于VnTrader') text = u\"\"\" quantos trade", "self).__init__() self.mainEngine = mainEngine self.eventEngine = eventEngine self.app = app", "initCentral(self): \"\"\"初始化中心区域\"\"\" widgetTradingW, dockTradingW = self.createDock(wgs.TradingWidget, u'交易', QtCore.Qt.LeftDockWidgetArea) widgetMarketM, dockMarketM", "\"\"\"初始化状态栏\"\"\" self.statusLabel = QLabel() self.statusLabel.setAlignment(QtCore.Qt.AlignLeft) self.statusBar().addPermanentWidget(self.statusLabel) self.statusLabel.setText(self.getCpuMemory()) self.sbCount = 0", "= psutil.cpu_percent() memoryPercent = psutil.virtual_memory().percent return u'CPU使用率:%d%% 内存使用率:%d%%' % (cpuPercent,", "widgetPositionM, dockPositionM = self.createDock(wgs.PositionMonitor, u'持仓', QtCore.Qt.RightDockWidgetArea) widgetAccountM, dockAccountM = self.createDock(wgs.AccountMonitor,", "self.mainEngine.exit() event.accept() else: event.ignore() # ---------------------------------------------------------------------- def createDock(self, widgetClass, widgetName,", "event.ignore() # ---------------------------------------------------------------------- def createDock(self, widgetClass, widgetName, widgetArea): \"\"\"创建停靠组件\"\"\" widget", "if reply == QMessageBox.Yes: for widget in list(self.widgetDict.values()): widget.close() self.mainEngine.exit()", "dockOrderM) self.tabifyDockWidget(dockAccountM, dockLogM) dockOrderM.raise_() dockLogM.raise_() # 连接组件之间的信号 widgetPositionM.itemDoubleClicked.connect(widgetTradingW.closePosition) widgetMarketM.itemDoubleClicked.connect(widgetTradingW.fillSymbol) #", "sysMenu = menubar.addMenu(u'系统') if 'quantos' in self.mainEngine.gatewayDict: sysMenu.addAction(connectQuantOSAction) sysMenu.addSeparator() sysMenu.addAction(exitAction)", "connectQuantOS(self): self.mainEngine.connect('quantos') # ---------------------------------------------------------------------- def openAbout(self): \"\"\"打开关于\"\"\" try: self.widgetDict['aboutW'].show() except", "QVBoxLayout from PyQt4 import QtCore except ImportError: from PyQt5.QtWidgets import", "dock def changeColor(self): self.app.setStyleSheet(self.sheets[1]) self.sheets = [self.sheets[1], self.sheets[0]] ######################################################################## class", "创建菜单 menubar = self.menuBar() # 设计为只显示存在的接口 sysMenu = menubar.addMenu(u'系统') if", "(user, stratid) = event.dict_['data'] #self.setWindowTitle('VnTrader: ' + str(user) + \"/\"", "helpMenu.addAction(aboutAction) helpMenu.addAction(colorAction) # ---------------------------------------------------------------------- def initStatusBar(self): \"\"\"初始化状态栏\"\"\" self.statusLabel = QLabel()", "self.signalStatusBar.connect(self.updateStatusBar) self.eventEngine.register(EVENT_TIMER, self.signalStatusBar.emit) # ---------------------------------------------------------------------- def updateStatusBar(self, event): \"\"\"在状态栏更新CPU和内存信息\"\"\" self.sbCount", "event.accept() else: event.ignore() # ---------------------------------------------------------------------- def createDock(self, widgetClass, widgetName, widgetArea):", "---------------------------------------------------------------------- def initUi(self): \"\"\"初始化界面\"\"\" self.setWindowTitle('VnTrader') self.initCentral() self.initMenu() # self.initStatusBar() def", "self.setWindowTitle('VnTrader') self.initCentral() self.initMenu() # self.initStatusBar() def showLogin(self): self.connectQuantOS() # ----------------------------------------------------------------------", "self).__init__(parent) self.initUi() # ---------------------------------------------------------------------- def initUi(self): \"\"\"\"\"\" self.setWindowTitle(u'关于VnTrader') text =", "widgetContractM, dockContractM = self.createDock(wgs.ContractMonitor, u'合约', QtCore.Qt.BottomDockWidgetArea) widgetLogM, dockLogM = self.createDock(wgs.LogMonitor,", "self.statusLabel.setText(self.getCpuMemory()) self.sbCount = 0 self.sbTrigger = 10 # 10秒刷新一次 self.signalStatusBar.connect(self.updateStatusBar)", "'quantos' in self.mainEngine.gatewayDict: sysMenu.addAction(connectQuantOSAction) sysMenu.addSeparator() sysMenu.addAction(exitAction) # 帮助 helpMenu =", "== self.sbTrigger: self.sbCount = 0 self.statusLabel.setText(self.getCpuMemory()) # ---------------------------------------------------------------------- def getCpuMemory(self):", "\"\"\"Constructor\"\"\" super(MainWindow, self).__init__() self.mainEngine = mainEngine self.eventEngine = eventEngine self.app", "if self.sbCount == self.sbTrigger: self.sbCount = 0 self.statusLabel.setText(self.getCpuMemory()) # ----------------------------------------------------------------------", "def initCentral(self): \"\"\"初始化中心区域\"\"\" widgetTradingW, dockTradingW = self.createDock(wgs.TradingWidget, u'交易', QtCore.Qt.LeftDockWidgetArea) widgetMarketM,", "---------------------------------------------------------------------- def updateStatusBar(self, event): \"\"\"在状态栏更新CPU和内存信息\"\"\" self.sbCount += 1 if self.sbCount", "# ---------------------------------------------------------------------- def initUi(self): \"\"\"初始化界面\"\"\" self.setWindowTitle('VnTrader') self.initCentral() self.initMenu() # self.initStatusBar()", "dockTradeM = self.createDock(wgs.TradeMonitor, u'成交', QtCore.Qt.BottomDockWidgetArea) widgetOrderM, dockOrderM = self.createDock(wgs.OrderMonitor, u'委托',", "QMessageBox, QLabel, QVBoxLayout from PyQt4 import QtCore except ImportError: from", "u'确认退出?', QMessageBox.Yes | QMessageBox.No, QMessageBox.No) if reply == QMessageBox.Yes: for", "10 # 10秒刷新一次 self.signalStatusBar.connect(self.updateStatusBar) self.eventEngine.register(EVENT_TIMER, self.signalStatusBar.emit) # ---------------------------------------------------------------------- def updateStatusBar(self,", "showLogin(self): self.connectQuantOS() # ---------------------------------------------------------------------- def initCentral(self): \"\"\"初始化中心区域\"\"\" widgetTradingW, dockTradingW =", "class AboutWidget(QDialog): \"\"\"显示关于信息\"\"\" # ---------------------------------------------------------------------- def __init__(self, parent=None): \"\"\"Constructor\"\"\" super(AboutWidget,", "u\"\"\" quantos trade client \"\"\" label = QLabel() label.setText(text) label.setMinimumWidth(500)", "self.createDock(wgs.ContractMonitor, u'合约', QtCore.Qt.BottomDockWidgetArea) widgetLogM, dockLogM = self.createDock(wgs.LogMonitor, u'日志', QtCore.Qt.BottomDockWidgetArea) widgetTradeM,", "---------------------------------------------------------------------- def createDock(self, widgetClass, widgetName, widgetArea): \"\"\"创建停靠组件\"\"\" widget = widgetClass(self.mainEngine,", "= u\"\"\" quantos trade client \"\"\" label = QLabel() label.setText(text)", "# 用来保存子窗口的字典 self.initUi() self.eventEngine.register(EVENT_TITLE, self.updateTitle) self.sid = None def updateTitle(self,", "# 设计为只显示存在的接口 sysMenu = menubar.addMenu(u'系统') if 'quantos' in self.mainEngine.gatewayDict: sysMenu.addAction(connectQuantOSAction)", "label = QLabel() label.setText(text) label.setMinimumWidth(500) vbox = QVBoxLayout() vbox.addWidget(label) self.setLayout(vbox)", "---------------------------------------------------------------------- def initMenu(self): \"\"\"初始化菜单\"\"\" # 创建操作 connectQuantOSAction = QAction(u'连接和切换策略', self)", "dockTradeM) self.tabifyDockWidget(dockTradeM, dockOrderM) self.tabifyDockWidget(dockAccountM, dockLogM) dockOrderM.raise_() dockLogM.raise_() # 连接组件之间的信号 widgetPositionM.itemDoubleClicked.connect(widgetTradingW.closePosition)", "\"\"\" label = QLabel() label.setText(text) label.setMinimumWidth(500) vbox = QVBoxLayout() vbox.addWidget(label)", "dock.setFeatures(dock.DockWidgetFloatable | dock.DockWidgetMovable) self.addDockWidget(widgetArea, dock) return widget, dock def changeColor(self):", "QAction, QHeaderView, QMessageBox, QLabel, QVBoxLayout from PyQt5 import QtCore from", "4/5 compatibility try: from PyQt4.QtGui import QMainWindow, QDialog, QDockWidget, QAction,", "\"\"\"在状态栏更新CPU和内存信息\"\"\" self.sbCount += 1 if self.sbCount == self.sbTrigger: self.sbCount =", "# ---------------------------------------------------------------------- def initCentral(self): \"\"\"初始化中心区域\"\"\" widgetTradingW, dockTradingW = self.createDock(wgs.TradingWidget, u'交易',", "QDockWidget, QAction, QHeaderView, QMessageBox, QLabel, QVBoxLayout from PyQt4 import QtCore", "= mainEngine self.eventEngine = eventEngine self.app = app self.sheets =", "QMessageBox, QLabel, QVBoxLayout from PyQt5 import QtCore from uiBasicWidget import", "QMainWindow, QDialog, QDockWidget, QAction, QHeaderView, QMessageBox, QLabel, QVBoxLayout from PyQt4", "sheets): \"\"\"Constructor\"\"\" super(MainWindow, self).__init__() self.mainEngine = mainEngine self.eventEngine = eventEngine", "super(AboutWidget, self).__init__(parent) self.initUi() # ---------------------------------------------------------------------- def initUi(self): \"\"\"\"\"\" self.setWindowTitle(u'关于VnTrader') text", "dockLogM.raise_() # 连接组件之间的信号 widgetPositionM.itemDoubleClicked.connect(widgetTradingW.closePosition) widgetMarketM.itemDoubleClicked.connect(widgetTradingW.fillSymbol) # ---------------------------------------------------------------------- def initMenu(self): \"\"\"初始化菜单\"\"\"", "else: event.ignore() # ---------------------------------------------------------------------- def createDock(self, widgetClass, widgetName, widgetArea): \"\"\"创建停靠组件\"\"\"", "self.statusBar().addPermanentWidget(self.statusLabel) self.statusLabel.setText(self.getCpuMemory()) self.sbCount = 0 self.sbTrigger = 10 # 10秒刷新一次", "event): \"\"\"在状态栏更新CPU和内存信息\"\"\" self.sbCount += 1 if self.sbCount == self.sbTrigger: self.sbCount", "= QAction(u'退出', self) exitAction.triggered.connect(self.close) aboutAction = QAction(u'关于', self) aboutAction.triggered.connect(self.openAbout) colorAction", "event.dict_['data'] #self.setWindowTitle('VnTrader: ' + str(user) + \"/\" + str(stratid)) self.sid", "(cpuPercent, memoryPercent) # ---------------------------------------------------------------------- def connectQuantOS(self): self.mainEngine.connect('quantos') # ---------------------------------------------------------------------- def", "dock) return widget, dock def changeColor(self): self.app.setStyleSheet(self.sheets[1]) self.sheets = [self.sheets[1],", "= [self.sheets[1], self.sheets[0]] ######################################################################## class AboutWidget(QDialog): \"\"\"显示关于信息\"\"\" # ---------------------------------------------------------------------- def", "from uiBasicWidget import * import uiBasicWidget as wgs #from .", "helpMenu = menubar.addMenu(u'帮助') helpMenu.addAction(aboutAction) helpMenu.addAction(colorAction) # ---------------------------------------------------------------------- def initStatusBar(self): \"\"\"初始化状态栏\"\"\"", "u'行情', QtCore.Qt.RightDockWidgetArea) widgetPositionM, dockPositionM = self.createDock(wgs.PositionMonitor, u'持仓', QtCore.Qt.RightDockWidgetArea) widgetAccountM, dockAccountM", "in list(self.widgetDict.values()): widget.close() self.mainEngine.exit() event.accept() else: event.ignore() # ---------------------------------------------------------------------- def", "except KeyError: self.widgetDict['aboutW'] = AboutWidget(self) self.widgetDict['aboutW'].show() # ---------------------------------------------------------------------- def closeEvent(self,", "QDialog, QDockWidget, QAction, QHeaderView, QMessageBox, QLabel, QVBoxLayout from PyQt5 import", "psutil.virtual_memory().percent return u'CPU使用率:%d%% 内存使用率:%d%%' % (cpuPercent, memoryPercent) # ---------------------------------------------------------------------- def", "initUi(self): \"\"\"\"\"\" self.setWindowTitle(u'关于VnTrader') text = u\"\"\" quantos trade client \"\"\"", "openAbout(self): \"\"\"打开关于\"\"\" try: self.widgetDict['aboutW'].show() except KeyError: self.widgetDict['aboutW'] = AboutWidget(self) self.widgetDict['aboutW'].show()", "\"\"\"初始化菜单\"\"\" # 创建操作 connectQuantOSAction = QAction(u'连接和切换策略', self) connectQuantOSAction.triggered.connect(self.connectQuantOS) exitAction =", "aboutAction = QAction(u'关于', self) aboutAction.triggered.connect(self.openAbout) colorAction = QAction(u'变色', self) colorAction.triggered.connect(self.changeColor)", "QDockWidget(widgetName) dock.setWidget(widget) dock.setObjectName(widgetName) dock.setFeatures(dock.DockWidgetFloatable | dock.DockWidgetMovable) self.addDockWidget(widgetArea, dock) return widget,", "for widget in list(self.widgetDict.values()): widget.close() self.mainEngine.exit() event.accept() else: event.ignore() #", "self.app = app self.sheets = sheets self.widgetDict = {} #", "连接组件之间的信号 widgetPositionM.itemDoubleClicked.connect(widgetTradingW.closePosition) widgetMarketM.itemDoubleClicked.connect(widgetTradingW.fillSymbol) # ---------------------------------------------------------------------- def initMenu(self): \"\"\"初始化菜单\"\"\" # 创建操作", "| dock.DockWidgetMovable) self.addDockWidget(widgetArea, dock) return widget, dock def changeColor(self): self.app.setStyleSheet(self.sheets[1])", "def initMenu(self): \"\"\"初始化菜单\"\"\" # 创建操作 connectQuantOSAction = QAction(u'连接和切换策略', self) connectQuantOSAction.triggered.connect(self.connectQuantOS)", "self.widgetDict['aboutW'].show() # ---------------------------------------------------------------------- def closeEvent(self, event): \"\"\"关闭事件\"\"\" reply = QMessageBox.question(self,", "super(MainWindow, self).__init__() self.mainEngine = mainEngine self.eventEngine = eventEngine self.app =", "= QLabel() self.statusLabel.setAlignment(QtCore.Qt.AlignLeft) self.statusBar().addPermanentWidget(self.statusLabel) self.statusLabel.setText(self.getCpuMemory()) self.sbCount = 0 self.sbTrigger =", "u'委托', QtCore.Qt.BottomDockWidgetArea) self.tabifyDockWidget(dockContractM, dockTradeM) self.tabifyDockWidget(dockTradeM, dockOrderM) self.tabifyDockWidget(dockAccountM, dockLogM) dockOrderM.raise_() dockLogM.raise_()", "self.initUi() self.eventEngine.register(EVENT_TITLE, self.updateTitle) self.sid = None def updateTitle(self, event): (user,", "| QMessageBox.No, QMessageBox.No) if reply == QMessageBox.Yes: for widget in", "self.tabifyDockWidget(dockAccountM, dockLogM) dockOrderM.raise_() dockLogM.raise_() # 连接组件之间的信号 widgetPositionM.itemDoubleClicked.connect(widgetTradingW.closePosition) widgetMarketM.itemDoubleClicked.connect(widgetTradingW.fillSymbol) # ----------------------------------------------------------------------", "= stratid # ---------------------------------------------------------------------- def initUi(self): \"\"\"初始化界面\"\"\" self.setWindowTitle('VnTrader') self.initCentral() self.initMenu()", "= self.createDock(wgs.ContractMonitor, u'合约', QtCore.Qt.BottomDockWidgetArea) widgetLogM, dockLogM = self.createDock(wgs.LogMonitor, u'日志', QtCore.Qt.BottomDockWidgetArea)", "parent=None): \"\"\"Constructor\"\"\" super(AboutWidget, self).__init__(parent) self.initUi() # ---------------------------------------------------------------------- def initUi(self): \"\"\"\"\"\"", "QAction(u'退出', self) exitAction.triggered.connect(self.close) aboutAction = QAction(u'关于', self) aboutAction.triggered.connect(self.openAbout) colorAction =", "widget.close() self.mainEngine.exit() event.accept() else: event.ignore() # ---------------------------------------------------------------------- def createDock(self, widgetClass,", "dockAccountM = self.createDock(wgs.AccountMonitor, u'资金', QtCore.Qt.BottomDockWidgetArea) widgetContractM, dockContractM = self.createDock(wgs.ContractMonitor, u'合约',", "= self.createDock(wgs.AccountMonitor, u'资金', QtCore.Qt.BottomDockWidgetArea) widgetContractM, dockContractM = self.createDock(wgs.ContractMonitor, u'合约', QtCore.Qt.BottomDockWidgetArea)", "QMessageBox.Yes | QMessageBox.No, QMessageBox.No) if reply == QMessageBox.Yes: for widget", "in self.mainEngine.gatewayDict: sysMenu.addAction(connectQuantOSAction) sysMenu.addSeparator() sysMenu.addAction(exitAction) # 帮助 helpMenu = menubar.addMenu(u'帮助')", "__init__(self, mainEngine, eventEngine, app, sheets): \"\"\"Constructor\"\"\" super(MainWindow, self).__init__() self.mainEngine =", "= self.menuBar() # 设计为只显示存在的接口 sysMenu = menubar.addMenu(u'系统') if 'quantos' in", "QDialog, QDockWidget, QAction, QHeaderView, QMessageBox, QLabel, QVBoxLayout from PyQt4 import", "try: self.widgetDict['aboutW'].show() except KeyError: self.widgetDict['aboutW'] = AboutWidget(self) self.widgetDict['aboutW'].show() # ----------------------------------------------------------------------", "dockLogM = self.createDock(wgs.LogMonitor, u'日志', QtCore.Qt.BottomDockWidgetArea) widgetTradeM, dockTradeM = self.createDock(wgs.TradeMonitor, u'成交',", "10秒刷新一次 self.signalStatusBar.connect(self.updateStatusBar) self.eventEngine.register(EVENT_TIMER, self.signalStatusBar.emit) # ---------------------------------------------------------------------- def updateStatusBar(self, event): \"\"\"在状态栏更新CPU和内存信息\"\"\"", "uiBasicWidget import * import uiBasicWidget as wgs #from . import", "def changeColor(self): self.app.setStyleSheet(self.sheets[1]) self.sheets = [self.sheets[1], self.sheets[0]] ######################################################################## class AboutWidget(QDialog):", "QtCore.Qt.BottomDockWidgetArea) widgetContractM, dockContractM = self.createDock(wgs.ContractMonitor, u'合约', QtCore.Qt.BottomDockWidgetArea) widgetLogM, dockLogM =", "None def updateTitle(self, event): (user, stratid) = event.dict_['data'] #self.setWindowTitle('VnTrader: '", "QtCore.Qt.BottomDockWidgetArea) widgetLogM, dockLogM = self.createDock(wgs.LogMonitor, u'日志', QtCore.Qt.BottomDockWidgetArea) widgetTradeM, dockTradeM =", "dockOrderM = self.createDock(wgs.OrderMonitor, u'委托', QtCore.Qt.BottomDockWidgetArea) self.tabifyDockWidget(dockContractM, dockTradeM) self.tabifyDockWidget(dockTradeM, dockOrderM) self.tabifyDockWidget(dockAccountM,", "reply == QMessageBox.Yes: for widget in list(self.widgetDict.values()): widget.close() self.mainEngine.exit() event.accept()", "AboutWidget(self) self.widgetDict['aboutW'].show() # ---------------------------------------------------------------------- def closeEvent(self, event): \"\"\"关闭事件\"\"\" reply =", "\"\"\"显示关于信息\"\"\" # ---------------------------------------------------------------------- def __init__(self, parent=None): \"\"\"Constructor\"\"\" super(AboutWidget, self).__init__(parent) self.initUi()", "######################################################################## class MainWindow(QMainWindow): \"\"\"主窗口\"\"\" signalStatusBar = QtCore.pyqtSignal(type(Event())) # ---------------------------------------------------------------------- def", "colorAction.triggered.connect(self.changeColor) # 创建菜单 menubar = self.menuBar() # 设计为只显示存在的接口 sysMenu =", "menubar.addMenu(u'系统') if 'quantos' in self.mainEngine.gatewayDict: sysMenu.addAction(connectQuantOSAction) sysMenu.addSeparator() sysMenu.addAction(exitAction) # 帮助", "widgetMarketM.itemDoubleClicked.connect(widgetTradingW.fillSymbol) # ---------------------------------------------------------------------- def initMenu(self): \"\"\"初始化菜单\"\"\" # 创建操作 connectQuantOSAction =", "eventEngine, app, sheets): \"\"\"Constructor\"\"\" super(MainWindow, self).__init__() self.mainEngine = mainEngine self.eventEngine", "KeyError: self.widgetDict['aboutW'] = AboutWidget(self) self.widgetDict['aboutW'].show() # ---------------------------------------------------------------------- def closeEvent(self, event):", "# ---------------------------------------------------------------------- def initMenu(self): \"\"\"初始化菜单\"\"\" # 创建操作 connectQuantOSAction = QAction(u'连接和切换策略',", "self.mainEngine = mainEngine self.eventEngine = eventEngine self.app = app self.sheets", "u'合约', QtCore.Qt.BottomDockWidgetArea) widgetLogM, dockLogM = self.createDock(wgs.LogMonitor, u'日志', QtCore.Qt.BottomDockWidgetArea) widgetTradeM, dockTradeM", "= widgetClass(self.mainEngine, self.eventEngine) dock = QDockWidget(widgetName) dock.setWidget(widget) dock.setObjectName(widgetName) dock.setFeatures(dock.DockWidgetFloatable |", "\"\"\"主窗口\"\"\" signalStatusBar = QtCore.pyqtSignal(type(Event())) # ---------------------------------------------------------------------- def __init__(self, mainEngine, eventEngine,", "getCpuMemory(self): \"\"\"获取CPU和内存状态信息\"\"\" cpuPercent = psutil.cpu_percent() memoryPercent = psutil.virtual_memory().percent return u'CPU使用率:%d%%", "self.tabifyDockWidget(dockContractM, dockTradeM) self.tabifyDockWidget(dockTradeM, dockOrderM) self.tabifyDockWidget(dockAccountM, dockLogM) dockOrderM.raise_() dockLogM.raise_() # 连接组件之间的信号", "dock = QDockWidget(widgetName) dock.setWidget(widget) dock.setObjectName(widgetName) dock.setFeatures(dock.DockWidgetFloatable | dock.DockWidgetMovable) self.addDockWidget(widgetArea, dock)", "# ---------------------------------------------------------------------- def __init__(self, mainEngine, eventEngine, app, sheets): \"\"\"Constructor\"\"\" super(MainWindow,", "QVBoxLayout from PyQt5 import QtCore from uiBasicWidget import * import", "dockOrderM.raise_() dockLogM.raise_() # 连接组件之间的信号 widgetPositionM.itemDoubleClicked.connect(widgetTradingW.closePosition) widgetMarketM.itemDoubleClicked.connect(widgetTradingW.fillSymbol) # ---------------------------------------------------------------------- def initMenu(self):", "= QAction(u'连接和切换策略', self) connectQuantOSAction.triggered.connect(self.connectQuantOS) exitAction = QAction(u'退出', self) exitAction.triggered.connect(self.close) aboutAction", "psutil.cpu_percent() memoryPercent = psutil.virtual_memory().percent return u'CPU使用率:%d%% 内存使用率:%d%%' % (cpuPercent, memoryPercent)", "# 10秒刷新一次 self.signalStatusBar.connect(self.updateStatusBar) self.eventEngine.register(EVENT_TIMER, self.signalStatusBar.emit) # ---------------------------------------------------------------------- def updateStatusBar(self, event):", "colorAction = QAction(u'变色', self) colorAction.triggered.connect(self.changeColor) # 创建菜单 menubar = self.menuBar()", "exitAction.triggered.connect(self.close) aboutAction = QAction(u'关于', self) aboutAction.triggered.connect(self.openAbout) colorAction = QAction(u'变色', self)", "QtCore.Qt.BottomDockWidgetArea) widgetOrderM, dockOrderM = self.createDock(wgs.OrderMonitor, u'委托', QtCore.Qt.BottomDockWidgetArea) self.tabifyDockWidget(dockContractM, dockTradeM) self.tabifyDockWidget(dockTradeM,", "= QtCore.pyqtSignal(type(Event())) # ---------------------------------------------------------------------- def __init__(self, mainEngine, eventEngine, app, sheets):", "self.sbCount += 1 if self.sbCount == self.sbTrigger: self.sbCount = 0", "self.sbTrigger = 10 # 10秒刷新一次 self.signalStatusBar.connect(self.updateStatusBar) self.eventEngine.register(EVENT_TIMER, self.signalStatusBar.emit) # ----------------------------------------------------------------------", "reply = QMessageBox.question(self, u'退出', u'确认退出?', QMessageBox.Yes | QMessageBox.No, QMessageBox.No) if", "= event.dict_['data'] #self.setWindowTitle('VnTrader: ' + str(user) + \"/\" + str(stratid))", "# import sys # PyQt 4/5 compatibility try: from PyQt4.QtGui", "QLabel() self.statusLabel.setAlignment(QtCore.Qt.AlignLeft) self.statusBar().addPermanentWidget(self.statusLabel) self.statusLabel.setText(self.getCpuMemory()) self.sbCount = 0 self.sbTrigger = 10", "= self.createDock(wgs.LogMonitor, u'日志', QtCore.Qt.BottomDockWidgetArea) widgetTradeM, dockTradeM = self.createDock(wgs.TradeMonitor, u'成交', QtCore.Qt.BottomDockWidgetArea)", "def createDock(self, widgetClass, widgetName, widgetArea): \"\"\"创建停靠组件\"\"\" widget = widgetClass(self.mainEngine, self.eventEngine)", "u'成交', QtCore.Qt.BottomDockWidgetArea) widgetOrderM, dockOrderM = self.createDock(wgs.OrderMonitor, u'委托', QtCore.Qt.BottomDockWidgetArea) self.tabifyDockWidget(dockContractM, dockTradeM)", "mainEngine, eventEngine, app, sheets): \"\"\"Constructor\"\"\" super(MainWindow, self).__init__() self.mainEngine = mainEngine", "+ \"/\" + str(stratid)) self.sid = stratid # ---------------------------------------------------------------------- def", "# ---------------------------------------------------------------------- def initUi(self): \"\"\"\"\"\" self.setWindowTitle(u'关于VnTrader') text = u\"\"\" quantos", "self.menuBar() # 设计为只显示存在的接口 sysMenu = menubar.addMenu(u'系统') if 'quantos' in self.mainEngine.gatewayDict:", "= QMessageBox.question(self, u'退出', u'确认退出?', QMessageBox.Yes | QMessageBox.No, QMessageBox.No) if reply", "uiBasicWidget as wgs #from . import uiBasicWidget as wgs ########################################################################", "mainEngine self.eventEngine = eventEngine self.app = app self.sheets = sheets", "PyQt 4/5 compatibility try: from PyQt4.QtGui import QMainWindow, QDialog, QDockWidget,", "self.signalStatusBar.emit) # ---------------------------------------------------------------------- def updateStatusBar(self, event): \"\"\"在状态栏更新CPU和内存信息\"\"\" self.sbCount += 1", "self.sid = None def updateTitle(self, event): (user, stratid) = event.dict_['data']", "memoryPercent = psutil.virtual_memory().percent return u'CPU使用率:%d%% 内存使用率:%d%%' % (cpuPercent, memoryPercent) #", "self.createDock(wgs.MarketMonitor, u'行情', QtCore.Qt.RightDockWidgetArea) widgetPositionM, dockPositionM = self.createDock(wgs.PositionMonitor, u'持仓', QtCore.Qt.RightDockWidgetArea) widgetAccountM,", "self.widgetDict['aboutW'] = AboutWidget(self) self.widgetDict['aboutW'].show() # ---------------------------------------------------------------------- def closeEvent(self, event): \"\"\"关闭事件\"\"\"", "return widget, dock def changeColor(self): self.app.setStyleSheet(self.sheets[1]) self.sheets = [self.sheets[1], self.sheets[0]]", "\"/\" + str(stratid)) self.sid = stratid # ---------------------------------------------------------------------- def initUi(self):", "import QtCore from uiBasicWidget import * import uiBasicWidget as wgs", "wgs ######################################################################## class MainWindow(QMainWindow): \"\"\"主窗口\"\"\" signalStatusBar = QtCore.pyqtSignal(type(Event())) # ----------------------------------------------------------------------", "self.initUi() # ---------------------------------------------------------------------- def initUi(self): \"\"\"\"\"\" self.setWindowTitle(u'关于VnTrader') text = u\"\"\"", "try: from PyQt4.QtGui import QMainWindow, QDialog, QDockWidget, QAction, QHeaderView, QMessageBox,", "self.connectQuantOS() # ---------------------------------------------------------------------- def initCentral(self): \"\"\"初始化中心区域\"\"\" widgetTradingW, dockTradingW = self.createDock(wgs.TradingWidget,", "str import psutil # import sys # PyQt 4/5 compatibility", "QtCore.Qt.LeftDockWidgetArea) widgetMarketM, dockMarketM = self.createDock(wgs.MarketMonitor, u'行情', QtCore.Qt.RightDockWidgetArea) widgetPositionM, dockPositionM =", "QtCore.pyqtSignal(type(Event())) # ---------------------------------------------------------------------- def __init__(self, mainEngine, eventEngine, app, sheets): \"\"\"Constructor\"\"\"", "self.updateTitle) self.sid = None def updateTitle(self, event): (user, stratid) =", "= AboutWidget(self) self.widgetDict['aboutW'].show() # ---------------------------------------------------------------------- def closeEvent(self, event): \"\"\"关闭事件\"\"\" reply", "#self.setWindowTitle('VnTrader: ' + str(user) + \"/\" + str(stratid)) self.sid =", "dockPositionM = self.createDock(wgs.PositionMonitor, u'持仓', QtCore.Qt.RightDockWidgetArea) widgetAccountM, dockAccountM = self.createDock(wgs.AccountMonitor, u'资金',", "---------------------------------------------------------------------- def initCentral(self): \"\"\"初始化中心区域\"\"\" widgetTradingW, dockTradingW = self.createDock(wgs.TradingWidget, u'交易', QtCore.Qt.LeftDockWidgetArea)", "QMessageBox.No) if reply == QMessageBox.Yes: for widget in list(self.widgetDict.values()): widget.close()", "signalStatusBar = QtCore.pyqtSignal(type(Event())) # ---------------------------------------------------------------------- def __init__(self, mainEngine, eventEngine, app,", "# encoding: UTF-8 from builtins import str import psutil #", "self.sheets = sheets self.widgetDict = {} # 用来保存子窗口的字典 self.initUi() self.eventEngine.register(EVENT_TITLE,", "from builtins import str import psutil # import sys #", "self.statusLabel = QLabel() self.statusLabel.setAlignment(QtCore.Qt.AlignLeft) self.statusBar().addPermanentWidget(self.statusLabel) self.statusLabel.setText(self.getCpuMemory()) self.sbCount = 0 self.sbTrigger", "self.eventEngine = eventEngine self.app = app self.sheets = sheets self.widgetDict", "= eventEngine self.app = app self.sheets = sheets self.widgetDict =", "self.widgetDict['aboutW'].show() except KeyError: self.widgetDict['aboutW'] = AboutWidget(self) self.widgetDict['aboutW'].show() # ---------------------------------------------------------------------- def", "widgetMarketM, dockMarketM = self.createDock(wgs.MarketMonitor, u'行情', QtCore.Qt.RightDockWidgetArea) widgetPositionM, dockPositionM = self.createDock(wgs.PositionMonitor,", "\"\"\"初始化界面\"\"\" self.setWindowTitle('VnTrader') self.initCentral() self.initMenu() # self.initStatusBar() def showLogin(self): self.connectQuantOS() #", "updateStatusBar(self, event): \"\"\"在状态栏更新CPU和内存信息\"\"\" self.sbCount += 1 if self.sbCount == self.sbTrigger:", "---------------------------------------------------------------------- def __init__(self, mainEngine, eventEngine, app, sheets): \"\"\"Constructor\"\"\" super(MainWindow, self).__init__()", "self.initStatusBar() def showLogin(self): self.connectQuantOS() # ---------------------------------------------------------------------- def initCentral(self): \"\"\"初始化中心区域\"\"\" widgetTradingW,", "trade client \"\"\" label = QLabel() label.setText(text) label.setMinimumWidth(500) vbox =", "except ImportError: from PyQt5.QtWidgets import QMainWindow, QDialog, QDockWidget, QAction, QHeaderView,", "dockMarketM = self.createDock(wgs.MarketMonitor, u'行情', QtCore.Qt.RightDockWidgetArea) widgetPositionM, dockPositionM = self.createDock(wgs.PositionMonitor, u'持仓',", "as wgs ######################################################################## class MainWindow(QMainWindow): \"\"\"主窗口\"\"\" signalStatusBar = QtCore.pyqtSignal(type(Event())) #", "createDock(self, widgetClass, widgetName, widgetArea): \"\"\"创建停靠组件\"\"\" widget = widgetClass(self.mainEngine, self.eventEngine) dock", "\"\"\"Constructor\"\"\" super(AboutWidget, self).__init__(parent) self.initUi() # ---------------------------------------------------------------------- def initUi(self): \"\"\"\"\"\" self.setWindowTitle(u'关于VnTrader')", "u'资金', QtCore.Qt.BottomDockWidgetArea) widgetContractM, dockContractM = self.createDock(wgs.ContractMonitor, u'合约', QtCore.Qt.BottomDockWidgetArea) widgetLogM, dockLogM", "self.addDockWidget(widgetArea, dock) return widget, dock def changeColor(self): self.app.setStyleSheet(self.sheets[1]) self.sheets =", "def showLogin(self): self.connectQuantOS() # ---------------------------------------------------------------------- def initCentral(self): \"\"\"初始化中心区域\"\"\" widgetTradingW, dockTradingW", "self.mainEngine.gatewayDict: sysMenu.addAction(connectQuantOSAction) sysMenu.addSeparator() sysMenu.addAction(exitAction) # 帮助 helpMenu = menubar.addMenu(u'帮助') helpMenu.addAction(aboutAction)", "---------------------------------------------------------------------- def openAbout(self): \"\"\"打开关于\"\"\" try: self.widgetDict['aboutW'].show() except KeyError: self.widgetDict['aboutW'] =", "sheets self.widgetDict = {} # 用来保存子窗口的字典 self.initUi() self.eventEngine.register(EVENT_TITLE, self.updateTitle) self.sid", "创建操作 connectQuantOSAction = QAction(u'连接和切换策略', self) connectQuantOSAction.triggered.connect(self.connectQuantOS) exitAction = QAction(u'退出', self)", "######################################################################## class AboutWidget(QDialog): \"\"\"显示关于信息\"\"\" # ---------------------------------------------------------------------- def __init__(self, parent=None): \"\"\"Constructor\"\"\"", "帮助 helpMenu = menubar.addMenu(u'帮助') helpMenu.addAction(aboutAction) helpMenu.addAction(colorAction) # ---------------------------------------------------------------------- def initStatusBar(self):", "\"\"\"\"\"\" self.setWindowTitle(u'关于VnTrader') text = u\"\"\" quantos trade client \"\"\" label", "# PyQt 4/5 compatibility try: from PyQt4.QtGui import QMainWindow, QDialog,", "widgetArea): \"\"\"创建停靠组件\"\"\" widget = widgetClass(self.mainEngine, self.eventEngine) dock = QDockWidget(widgetName) dock.setWidget(widget)", "wgs #from . import uiBasicWidget as wgs ######################################################################## class MainWindow(QMainWindow):", "u'持仓', QtCore.Qt.RightDockWidgetArea) widgetAccountM, dockAccountM = self.createDock(wgs.AccountMonitor, u'资金', QtCore.Qt.BottomDockWidgetArea) widgetContractM, dockContractM", "self.createDock(wgs.TradingWidget, u'交易', QtCore.Qt.LeftDockWidgetArea) widgetMarketM, dockMarketM = self.createDock(wgs.MarketMonitor, u'行情', QtCore.Qt.RightDockWidgetArea) widgetPositionM,", "QMessageBox.Yes: for widget in list(self.widgetDict.values()): widget.close() self.mainEngine.exit() event.accept() else: event.ignore()", "QHeaderView, QMessageBox, QLabel, QVBoxLayout from PyQt4 import QtCore except ImportError:", "import psutil # import sys # PyQt 4/5 compatibility try:", "QHeaderView, QMessageBox, QLabel, QVBoxLayout from PyQt5 import QtCore from uiBasicWidget", "self.app.setStyleSheet(self.sheets[1]) self.sheets = [self.sheets[1], self.sheets[0]] ######################################################################## class AboutWidget(QDialog): \"\"\"显示关于信息\"\"\" #", "self.sbCount = 0 self.statusLabel.setText(self.getCpuMemory()) # ---------------------------------------------------------------------- def getCpuMemory(self): \"\"\"获取CPU和内存状态信息\"\"\" cpuPercent", "closeEvent(self, event): \"\"\"关闭事件\"\"\" reply = QMessageBox.question(self, u'退出', u'确认退出?', QMessageBox.Yes |", "str(stratid)) self.sid = stratid # ---------------------------------------------------------------------- def initUi(self): \"\"\"初始化界面\"\"\" self.setWindowTitle('VnTrader')", "---------------------------------------------------------------------- def __init__(self, parent=None): \"\"\"Constructor\"\"\" super(AboutWidget, self).__init__(parent) self.initUi() # ----------------------------------------------------------------------", "self.createDock(wgs.TradeMonitor, u'成交', QtCore.Qt.BottomDockWidgetArea) widgetOrderM, dockOrderM = self.createDock(wgs.OrderMonitor, u'委托', QtCore.Qt.BottomDockWidgetArea) self.tabifyDockWidget(dockContractM,", "= {} # 用来保存子窗口的字典 self.initUi() self.eventEngine.register(EVENT_TITLE, self.updateTitle) self.sid = None", "---------------------------------------------------------------------- def closeEvent(self, event): \"\"\"关闭事件\"\"\" reply = QMessageBox.question(self, u'退出', u'确认退出?',", "= sheets self.widgetDict = {} # 用来保存子窗口的字典 self.initUi() self.eventEngine.register(EVENT_TITLE, self.updateTitle)", "self) exitAction.triggered.connect(self.close) aboutAction = QAction(u'关于', self) aboutAction.triggered.connect(self.openAbout) colorAction = QAction(u'变色',", "= self.createDock(wgs.PositionMonitor, u'持仓', QtCore.Qt.RightDockWidgetArea) widgetAccountM, dockAccountM = self.createDock(wgs.AccountMonitor, u'资金', QtCore.Qt.BottomDockWidgetArea)", "# ---------------------------------------------------------------------- def closeEvent(self, event): \"\"\"关闭事件\"\"\" reply = QMessageBox.question(self, u'退出',", "1 if self.sbCount == self.sbTrigger: self.sbCount = 0 self.statusLabel.setText(self.getCpuMemory()) #", "QAction, QHeaderView, QMessageBox, QLabel, QVBoxLayout from PyQt4 import QtCore except", "= QAction(u'变色', self) colorAction.triggered.connect(self.changeColor) # 创建菜单 menubar = self.menuBar() #", "\"\"\"初始化中心区域\"\"\" widgetTradingW, dockTradingW = self.createDock(wgs.TradingWidget, u'交易', QtCore.Qt.LeftDockWidgetArea) widgetMarketM, dockMarketM =", "client \"\"\" label = QLabel() label.setText(text) label.setMinimumWidth(500) vbox = QVBoxLayout()", "def __init__(self, mainEngine, eventEngine, app, sheets): \"\"\"Constructor\"\"\" super(MainWindow, self).__init__() self.mainEngine", "from PyQt5 import QtCore from uiBasicWidget import * import uiBasicWidget", "self.sbCount = 0 self.sbTrigger = 10 # 10秒刷新一次 self.signalStatusBar.connect(self.updateStatusBar) self.eventEngine.register(EVENT_TIMER,", "list(self.widgetDict.values()): widget.close() self.mainEngine.exit() event.accept() else: event.ignore() # ---------------------------------------------------------------------- def createDock(self,", "self.mainEngine.connect('quantos') # ---------------------------------------------------------------------- def openAbout(self): \"\"\"打开关于\"\"\" try: self.widgetDict['aboutW'].show() except KeyError:", "import uiBasicWidget as wgs ######################################################################## class MainWindow(QMainWindow): \"\"\"主窗口\"\"\" signalStatusBar =", "= QDockWidget(widgetName) dock.setWidget(widget) dock.setObjectName(widgetName) dock.setFeatures(dock.DockWidgetFloatable | dock.DockWidgetMovable) self.addDockWidget(widgetArea, dock) return", "= menubar.addMenu(u'系统') if 'quantos' in self.mainEngine.gatewayDict: sysMenu.addAction(connectQuantOSAction) sysMenu.addSeparator() sysMenu.addAction(exitAction) #", "aboutAction.triggered.connect(self.openAbout) colorAction = QAction(u'变色', self) colorAction.triggered.connect(self.changeColor) # 创建菜单 menubar =", "self.tabifyDockWidget(dockTradeM, dockOrderM) self.tabifyDockWidget(dockAccountM, dockLogM) dockOrderM.raise_() dockLogM.raise_() # 连接组件之间的信号 widgetPositionM.itemDoubleClicked.connect(widgetTradingW.closePosition) widgetMarketM.itemDoubleClicked.connect(widgetTradingW.fillSymbol)", "sysMenu.addAction(exitAction) # 帮助 helpMenu = menubar.addMenu(u'帮助') helpMenu.addAction(aboutAction) helpMenu.addAction(colorAction) # ----------------------------------------------------------------------", "widgetOrderM, dockOrderM = self.createDock(wgs.OrderMonitor, u'委托', QtCore.Qt.BottomDockWidgetArea) self.tabifyDockWidget(dockContractM, dockTradeM) self.tabifyDockWidget(dockTradeM, dockOrderM)", "= 10 # 10秒刷新一次 self.signalStatusBar.connect(self.updateStatusBar) self.eventEngine.register(EVENT_TIMER, self.signalStatusBar.emit) # ---------------------------------------------------------------------- def", "def connectQuantOS(self): self.mainEngine.connect('quantos') # ---------------------------------------------------------------------- def openAbout(self): \"\"\"打开关于\"\"\" try: self.widgetDict['aboutW'].show()", "0 self.statusLabel.setText(self.getCpuMemory()) # ---------------------------------------------------------------------- def getCpuMemory(self): \"\"\"获取CPU和内存状态信息\"\"\" cpuPercent = psutil.cpu_percent()", "return u'CPU使用率:%d%% 内存使用率:%d%%' % (cpuPercent, memoryPercent) # ---------------------------------------------------------------------- def connectQuantOS(self):", "self.widgetDict = {} # 用来保存子窗口的字典 self.initUi() self.eventEngine.register(EVENT_TITLE, self.updateTitle) self.sid =", "---------------------------------------------------------------------- def initStatusBar(self): \"\"\"初始化状态栏\"\"\" self.statusLabel = QLabel() self.statusLabel.setAlignment(QtCore.Qt.AlignLeft) self.statusBar().addPermanentWidget(self.statusLabel) self.statusLabel.setText(self.getCpuMemory())", "= QAction(u'关于', self) aboutAction.triggered.connect(self.openAbout) colorAction = QAction(u'变色', self) colorAction.triggered.connect(self.changeColor) #", "#from . import uiBasicWidget as wgs ######################################################################## class MainWindow(QMainWindow): \"\"\"主窗口\"\"\"", "QDockWidget, QAction, QHeaderView, QMessageBox, QLabel, QVBoxLayout from PyQt5 import QtCore", "def initUi(self): \"\"\"\"\"\" self.setWindowTitle(u'关于VnTrader') text = u\"\"\" quantos trade client", "widgetPositionM.itemDoubleClicked.connect(widgetTradingW.closePosition) widgetMarketM.itemDoubleClicked.connect(widgetTradingW.fillSymbol) # ---------------------------------------------------------------------- def initMenu(self): \"\"\"初始化菜单\"\"\" # 创建操作 connectQuantOSAction", "= psutil.virtual_memory().percent return u'CPU使用率:%d%% 内存使用率:%d%%' % (cpuPercent, memoryPercent) # ----------------------------------------------------------------------", "self.createDock(wgs.PositionMonitor, u'持仓', QtCore.Qt.RightDockWidgetArea) widgetAccountM, dockAccountM = self.createDock(wgs.AccountMonitor, u'资金', QtCore.Qt.BottomDockWidgetArea) widgetContractM,", "as wgs #from . import uiBasicWidget as wgs ######################################################################## class", "from PyQt4.QtGui import QMainWindow, QDialog, QDockWidget, QAction, QHeaderView, QMessageBox, QLabel,", "self.createDock(wgs.AccountMonitor, u'资金', QtCore.Qt.BottomDockWidgetArea) widgetContractM, dockContractM = self.createDock(wgs.ContractMonitor, u'合约', QtCore.Qt.BottomDockWidgetArea) widgetLogM,", "from PyQt4 import QtCore except ImportError: from PyQt5.QtWidgets import QMainWindow,", "\"\"\"关闭事件\"\"\" reply = QMessageBox.question(self, u'退出', u'确认退出?', QMessageBox.Yes | QMessageBox.No, QMessageBox.No)", "helpMenu.addAction(colorAction) # ---------------------------------------------------------------------- def initStatusBar(self): \"\"\"初始化状态栏\"\"\" self.statusLabel = QLabel() self.statusLabel.setAlignment(QtCore.Qt.AlignLeft)", "= app self.sheets = sheets self.widgetDict = {} # 用来保存子窗口的字典", "app self.sheets = sheets self.widgetDict = {} # 用来保存子窗口的字典 self.initUi()", "memoryPercent) # ---------------------------------------------------------------------- def connectQuantOS(self): self.mainEngine.connect('quantos') # ---------------------------------------------------------------------- def openAbout(self):", "import uiBasicWidget as wgs #from . import uiBasicWidget as wgs", "dockLogM) dockOrderM.raise_() dockLogM.raise_() # 连接组件之间的信号 widgetPositionM.itemDoubleClicked.connect(widgetTradingW.closePosition) widgetMarketM.itemDoubleClicked.connect(widgetTradingW.fillSymbol) # ---------------------------------------------------------------------- def", "# ---------------------------------------------------------------------- def __init__(self, parent=None): \"\"\"Constructor\"\"\" super(AboutWidget, self).__init__(parent) self.initUi() #", "from PyQt5.QtWidgets import QMainWindow, QDialog, QDockWidget, QAction, QHeaderView, QMessageBox, QLabel,", "\"\"\"获取CPU和内存状态信息\"\"\" cpuPercent = psutil.cpu_percent() memoryPercent = psutil.virtual_memory().percent return u'CPU使用率:%d%% 内存使用率:%d%%'", "' + str(user) + \"/\" + str(stratid)) self.sid = stratid", "= menubar.addMenu(u'帮助') helpMenu.addAction(aboutAction) helpMenu.addAction(colorAction) # ---------------------------------------------------------------------- def initStatusBar(self): \"\"\"初始化状态栏\"\"\" self.statusLabel", "widgetTradingW, dockTradingW = self.createDock(wgs.TradingWidget, u'交易', QtCore.Qt.LeftDockWidgetArea) widgetMarketM, dockMarketM = self.createDock(wgs.MarketMonitor,", "psutil # import sys # PyQt 4/5 compatibility try: from", "self.sheets = [self.sheets[1], self.sheets[0]] ######################################################################## class AboutWidget(QDialog): \"\"\"显示关于信息\"\"\" # ----------------------------------------------------------------------", "exitAction = QAction(u'退出', self) exitAction.triggered.connect(self.close) aboutAction = QAction(u'关于', self) aboutAction.triggered.connect(self.openAbout)", "str(user) + \"/\" + str(stratid)) self.sid = stratid # ----------------------------------------------------------------------", "+ str(user) + \"/\" + str(stratid)) self.sid = stratid #", "= self.createDock(wgs.OrderMonitor, u'委托', QtCore.Qt.BottomDockWidgetArea) self.tabifyDockWidget(dockContractM, dockTradeM) self.tabifyDockWidget(dockTradeM, dockOrderM) self.tabifyDockWidget(dockAccountM, dockLogM)", "widgetClass(self.mainEngine, self.eventEngine) dock = QDockWidget(widgetName) dock.setWidget(widget) dock.setObjectName(widgetName) dock.setFeatures(dock.DockWidgetFloatable | dock.DockWidgetMovable)", "def updateTitle(self, event): (user, stratid) = event.dict_['data'] #self.setWindowTitle('VnTrader: ' +", "uiBasicWidget as wgs ######################################################################## class MainWindow(QMainWindow): \"\"\"主窗口\"\"\" signalStatusBar = QtCore.pyqtSignal(type(Event()))", "QMessageBox.question(self, u'退出', u'确认退出?', QMessageBox.Yes | QMessageBox.No, QMessageBox.No) if reply ==", "sysMenu.addAction(connectQuantOSAction) sysMenu.addSeparator() sysMenu.addAction(exitAction) # 帮助 helpMenu = menubar.addMenu(u'帮助') helpMenu.addAction(aboutAction) helpMenu.addAction(colorAction)", "[self.sheets[1], self.sheets[0]] ######################################################################## class AboutWidget(QDialog): \"\"\"显示关于信息\"\"\" # ---------------------------------------------------------------------- def __init__(self,", "import str import psutil # import sys # PyQt 4/5", "内存使用率:%d%%' % (cpuPercent, memoryPercent) # ---------------------------------------------------------------------- def connectQuantOS(self): self.mainEngine.connect('quantos') #", "stratid) = event.dict_['data'] #self.setWindowTitle('VnTrader: ' + str(user) + \"/\" +", "import sys # PyQt 4/5 compatibility try: from PyQt4.QtGui import", "def updateStatusBar(self, event): \"\"\"在状态栏更新CPU和内存信息\"\"\" self.sbCount += 1 if self.sbCount ==", "cpuPercent = psutil.cpu_percent() memoryPercent = psutil.virtual_memory().percent return u'CPU使用率:%d%% 内存使用率:%d%%' %", "def getCpuMemory(self): \"\"\"获取CPU和内存状态信息\"\"\" cpuPercent = psutil.cpu_percent() memoryPercent = psutil.virtual_memory().percent return", "self) connectQuantOSAction.triggered.connect(self.connectQuantOS) exitAction = QAction(u'退出', self) exitAction.triggered.connect(self.close) aboutAction = QAction(u'关于',", "QAction(u'变色', self) colorAction.triggered.connect(self.changeColor) # 创建菜单 menubar = self.menuBar() # 设计为只显示存在的接口", "app, sheets): \"\"\"Constructor\"\"\" super(MainWindow, self).__init__() self.mainEngine = mainEngine self.eventEngine =", "# ---------------------------------------------------------------------- def initStatusBar(self): \"\"\"初始化状态栏\"\"\" self.statusLabel = QLabel() self.statusLabel.setAlignment(QtCore.Qt.AlignLeft) self.statusBar().addPermanentWidget(self.statusLabel)", "quantos trade client \"\"\" label = QLabel() label.setText(text) label.setMinimumWidth(500) vbox", "# ---------------------------------------------------------------------- def connectQuantOS(self): self.mainEngine.connect('quantos') # ---------------------------------------------------------------------- def openAbout(self): \"\"\"打开关于\"\"\"", "self.statusLabel.setText(self.getCpuMemory()) # ---------------------------------------------------------------------- def getCpuMemory(self): \"\"\"获取CPU和内存状态信息\"\"\" cpuPercent = psutil.cpu_percent() memoryPercent", "u'退出', u'确认退出?', QMessageBox.Yes | QMessageBox.No, QMessageBox.No) if reply == QMessageBox.Yes:", "AboutWidget(QDialog): \"\"\"显示关于信息\"\"\" # ---------------------------------------------------------------------- def __init__(self, parent=None): \"\"\"Constructor\"\"\" super(AboutWidget, self).__init__(parent)", "widgetTradeM, dockTradeM = self.createDock(wgs.TradeMonitor, u'成交', QtCore.Qt.BottomDockWidgetArea) widgetOrderM, dockOrderM = self.createDock(wgs.OrderMonitor,", "widgetLogM, dockLogM = self.createDock(wgs.LogMonitor, u'日志', QtCore.Qt.BottomDockWidgetArea) widgetTradeM, dockTradeM = self.createDock(wgs.TradeMonitor,", "dock.setObjectName(widgetName) dock.setFeatures(dock.DockWidgetFloatable | dock.DockWidgetMovable) self.addDockWidget(widgetArea, dock) return widget, dock def", "\"\"\"创建停靠组件\"\"\" widget = widgetClass(self.mainEngine, self.eventEngine) dock = QDockWidget(widgetName) dock.setWidget(widget) dock.setObjectName(widgetName)", "encoding: UTF-8 from builtins import str import psutil # import", "= self.createDock(wgs.MarketMonitor, u'行情', QtCore.Qt.RightDockWidgetArea) widgetPositionM, dockPositionM = self.createDock(wgs.PositionMonitor, u'持仓', QtCore.Qt.RightDockWidgetArea)", "self.sbCount == self.sbTrigger: self.sbCount = 0 self.statusLabel.setText(self.getCpuMemory()) # ---------------------------------------------------------------------- def", "def initUi(self): \"\"\"初始化界面\"\"\" self.setWindowTitle('VnTrader') self.initCentral() self.initMenu() # self.initStatusBar() def showLogin(self):", "initMenu(self): \"\"\"初始化菜单\"\"\" # 创建操作 connectQuantOSAction = QAction(u'连接和切换策略', self) connectQuantOSAction.triggered.connect(self.connectQuantOS) exitAction", "QLabel, QVBoxLayout from PyQt4 import QtCore except ImportError: from PyQt5.QtWidgets", "= 0 self.sbTrigger = 10 # 10秒刷新一次 self.signalStatusBar.connect(self.updateStatusBar) self.eventEngine.register(EVENT_TIMER, self.signalStatusBar.emit)", "QMainWindow, QDialog, QDockWidget, QAction, QHeaderView, QMessageBox, QLabel, QVBoxLayout from PyQt5", "sys # PyQt 4/5 compatibility try: from PyQt4.QtGui import QMainWindow,", "def initStatusBar(self): \"\"\"初始化状态栏\"\"\" self.statusLabel = QLabel() self.statusLabel.setAlignment(QtCore.Qt.AlignLeft) self.statusBar().addPermanentWidget(self.statusLabel) self.statusLabel.setText(self.getCpuMemory()) self.sbCount", "---------------------------------------------------------------------- def getCpuMemory(self): \"\"\"获取CPU和内存状态信息\"\"\" cpuPercent = psutil.cpu_percent() memoryPercent = psutil.virtual_memory().percent", "self.sbTrigger: self.sbCount = 0 self.statusLabel.setText(self.getCpuMemory()) # ---------------------------------------------------------------------- def getCpuMemory(self): \"\"\"获取CPU和内存状态信息\"\"\"", "# ---------------------------------------------------------------------- def updateStatusBar(self, event): \"\"\"在状态栏更新CPU和内存信息\"\"\" self.sbCount += 1 if", "# 创建菜单 menubar = self.menuBar() # 设计为只显示存在的接口 sysMenu = menubar.addMenu(u'系统')", "connectQuantOSAction.triggered.connect(self.connectQuantOS) exitAction = QAction(u'退出', self) exitAction.triggered.connect(self.close) aboutAction = QAction(u'关于', self)", "stratid # ---------------------------------------------------------------------- def initUi(self): \"\"\"初始化界面\"\"\" self.setWindowTitle('VnTrader') self.initCentral() self.initMenu() #", "self.initMenu() # self.initStatusBar() def showLogin(self): self.connectQuantOS() # ---------------------------------------------------------------------- def initCentral(self):", "\"\"\"打开关于\"\"\" try: self.widgetDict['aboutW'].show() except KeyError: self.widgetDict['aboutW'] = AboutWidget(self) self.widgetDict['aboutW'].show() #", "import QMainWindow, QDialog, QDockWidget, QAction, QHeaderView, QMessageBox, QLabel, QVBoxLayout from", "self.createDock(wgs.LogMonitor, u'日志', QtCore.Qt.BottomDockWidgetArea) widgetTradeM, dockTradeM = self.createDock(wgs.TradeMonitor, u'成交', QtCore.Qt.BottomDockWidgetArea) widgetOrderM,", "QAction(u'连接和切换策略', self) connectQuantOSAction.triggered.connect(self.connectQuantOS) exitAction = QAction(u'退出', self) exitAction.triggered.connect(self.close) aboutAction =", "# ---------------------------------------------------------------------- def getCpuMemory(self): \"\"\"获取CPU和内存状态信息\"\"\" cpuPercent = psutil.cpu_percent() memoryPercent =", "self) aboutAction.triggered.connect(self.openAbout) colorAction = QAction(u'变色', self) colorAction.triggered.connect(self.changeColor) # 创建菜单 menubar", "widget in list(self.widgetDict.values()): widget.close() self.mainEngine.exit() event.accept() else: event.ignore() # ----------------------------------------------------------------------", "PyQt5.QtWidgets import QMainWindow, QDialog, QDockWidget, QAction, QHeaderView, QMessageBox, QLabel, QVBoxLayout", "% (cpuPercent, memoryPercent) # ---------------------------------------------------------------------- def connectQuantOS(self): self.mainEngine.connect('quantos') # ----------------------------------------------------------------------", "QLabel, QVBoxLayout from PyQt5 import QtCore from uiBasicWidget import *", "QtCore except ImportError: from PyQt5.QtWidgets import QMainWindow, QDialog, QDockWidget, QAction,", "PyQt5 import QtCore from uiBasicWidget import * import uiBasicWidget as", "MainWindow(QMainWindow): \"\"\"主窗口\"\"\" signalStatusBar = QtCore.pyqtSignal(type(Event())) # ---------------------------------------------------------------------- def __init__(self, mainEngine,", "def openAbout(self): \"\"\"打开关于\"\"\" try: self.widgetDict['aboutW'].show() except KeyError: self.widgetDict['aboutW'] = AboutWidget(self)", "设计为只显示存在的接口 sysMenu = menubar.addMenu(u'系统') if 'quantos' in self.mainEngine.gatewayDict: sysMenu.addAction(connectQuantOSAction) sysMenu.addSeparator()", "PyQt4 import QtCore except ImportError: from PyQt5.QtWidgets import QMainWindow, QDialog,", "dockTradingW = self.createDock(wgs.TradingWidget, u'交易', QtCore.Qt.LeftDockWidgetArea) widgetMarketM, dockMarketM = self.createDock(wgs.MarketMonitor, u'行情',", "self.sheets[0]] ######################################################################## class AboutWidget(QDialog): \"\"\"显示关于信息\"\"\" # ---------------------------------------------------------------------- def __init__(self, parent=None):", "QtCore.Qt.BottomDockWidgetArea) self.tabifyDockWidget(dockContractM, dockTradeM) self.tabifyDockWidget(dockTradeM, dockOrderM) self.tabifyDockWidget(dockAccountM, dockLogM) dockOrderM.raise_() dockLogM.raise_() #", "dock.DockWidgetMovable) self.addDockWidget(widgetArea, dock) return widget, dock def changeColor(self): self.app.setStyleSheet(self.sheets[1]) self.sheets", "initUi(self): \"\"\"初始化界面\"\"\" self.setWindowTitle('VnTrader') self.initCentral() self.initMenu() # self.initStatusBar() def showLogin(self): self.connectQuantOS()", "self.statusLabel.setAlignment(QtCore.Qt.AlignLeft) self.statusBar().addPermanentWidget(self.statusLabel) self.statusLabel.setText(self.getCpuMemory()) self.sbCount = 0 self.sbTrigger = 10 #", ". import uiBasicWidget as wgs ######################################################################## class MainWindow(QMainWindow): \"\"\"主窗口\"\"\" signalStatusBar", "dockContractM = self.createDock(wgs.ContractMonitor, u'合约', QtCore.Qt.BottomDockWidgetArea) widgetLogM, dockLogM = self.createDock(wgs.LogMonitor, u'日志',", "initStatusBar(self): \"\"\"初始化状态栏\"\"\" self.statusLabel = QLabel() self.statusLabel.setAlignment(QtCore.Qt.AlignLeft) self.statusBar().addPermanentWidget(self.statusLabel) self.statusLabel.setText(self.getCpuMemory()) self.sbCount =", "= self.createDock(wgs.TradeMonitor, u'成交', QtCore.Qt.BottomDockWidgetArea) widgetOrderM, dockOrderM = self.createDock(wgs.OrderMonitor, u'委托', QtCore.Qt.BottomDockWidgetArea)", "QtCore.Qt.RightDockWidgetArea) widgetAccountM, dockAccountM = self.createDock(wgs.AccountMonitor, u'资金', QtCore.Qt.BottomDockWidgetArea) widgetContractM, dockContractM =", "eventEngine self.app = app self.sheets = sheets self.widgetDict = {}", "self.sid = stratid # ---------------------------------------------------------------------- def initUi(self): \"\"\"初始化界面\"\"\" self.setWindowTitle('VnTrader') self.initCentral()", "updateTitle(self, event): (user, stratid) = event.dict_['data'] #self.setWindowTitle('VnTrader: ' + str(user)", "QtCore from uiBasicWidget import * import uiBasicWidget as wgs #from", "self.initCentral() self.initMenu() # self.initStatusBar() def showLogin(self): self.connectQuantOS() # ---------------------------------------------------------------------- def", "# ---------------------------------------------------------------------- def createDock(self, widgetClass, widgetName, widgetArea): \"\"\"创建停靠组件\"\"\" widget =", "if 'quantos' in self.mainEngine.gatewayDict: sysMenu.addAction(connectQuantOSAction) sysMenu.addSeparator() sysMenu.addAction(exitAction) # 帮助 helpMenu", "dock.setWidget(widget) dock.setObjectName(widgetName) dock.setFeatures(dock.DockWidgetFloatable | dock.DockWidgetMovable) self.addDockWidget(widgetArea, dock) return widget, dock", "PyQt4.QtGui import QMainWindow, QDialog, QDockWidget, QAction, QHeaderView, QMessageBox, QLabel, QVBoxLayout", "widget, dock def changeColor(self): self.app.setStyleSheet(self.sheets[1]) self.sheets = [self.sheets[1], self.sheets[0]] ########################################################################", "# 帮助 helpMenu = menubar.addMenu(u'帮助') helpMenu.addAction(aboutAction) helpMenu.addAction(colorAction) # ---------------------------------------------------------------------- def", "event): \"\"\"关闭事件\"\"\" reply = QMessageBox.question(self, u'退出', u'确认退出?', QMessageBox.Yes | QMessageBox.No,", "widgetAccountM, dockAccountM = self.createDock(wgs.AccountMonitor, u'资金', QtCore.Qt.BottomDockWidgetArea) widgetContractM, dockContractM = self.createDock(wgs.ContractMonitor,", "self.setWindowTitle(u'关于VnTrader') text = u\"\"\" quantos trade client \"\"\" label =", "= self.createDock(wgs.TradingWidget, u'交易', QtCore.Qt.LeftDockWidgetArea) widgetMarketM, dockMarketM = self.createDock(wgs.MarketMonitor, u'行情', QtCore.Qt.RightDockWidgetArea)", "UTF-8 from builtins import str import psutil # import sys", "# 连接组件之间的信号 widgetPositionM.itemDoubleClicked.connect(widgetTradingW.closePosition) widgetMarketM.itemDoubleClicked.connect(widgetTradingW.fillSymbol) # ---------------------------------------------------------------------- def initMenu(self): \"\"\"初始化菜单\"\"\" #", "menubar = self.menuBar() # 设计为只显示存在的接口 sysMenu = menubar.addMenu(u'系统') if 'quantos'", "QtCore.Qt.RightDockWidgetArea) widgetPositionM, dockPositionM = self.createDock(wgs.PositionMonitor, u'持仓', QtCore.Qt.RightDockWidgetArea) widgetAccountM, dockAccountM =", "u'交易', QtCore.Qt.LeftDockWidgetArea) widgetMarketM, dockMarketM = self.createDock(wgs.MarketMonitor, u'行情', QtCore.Qt.RightDockWidgetArea) widgetPositionM, dockPositionM", "== QMessageBox.Yes: for widget in list(self.widgetDict.values()): widget.close() self.mainEngine.exit() event.accept() else:", "import * import uiBasicWidget as wgs #from . import uiBasicWidget", "builtins import str import psutil # import sys # PyQt", "compatibility try: from PyQt4.QtGui import QMainWindow, QDialog, QDockWidget, QAction, QHeaderView,", "self) colorAction.triggered.connect(self.changeColor) # 创建菜单 menubar = self.menuBar() # 设计为只显示存在的接口 sysMenu", "def __init__(self, parent=None): \"\"\"Constructor\"\"\" super(AboutWidget, self).__init__(parent) self.initUi() # ---------------------------------------------------------------------- def", "QMessageBox.No, QMessageBox.No) if reply == QMessageBox.Yes: for widget in list(self.widgetDict.values()):", "text = u\"\"\" quantos trade client \"\"\" label = QLabel()", "import QtCore except ImportError: from PyQt5.QtWidgets import QMainWindow, QDialog, QDockWidget,", "---------------------------------------------------------------------- def connectQuantOS(self): self.mainEngine.connect('quantos') # ---------------------------------------------------------------------- def openAbout(self): \"\"\"打开关于\"\"\" try:", "ImportError: from PyQt5.QtWidgets import QMainWindow, QDialog, QDockWidget, QAction, QHeaderView, QMessageBox,", "sysMenu.addSeparator() sysMenu.addAction(exitAction) # 帮助 helpMenu = menubar.addMenu(u'帮助') helpMenu.addAction(aboutAction) helpMenu.addAction(colorAction) #", "= None def updateTitle(self, event): (user, stratid) = event.dict_['data'] #self.setWindowTitle('VnTrader:", "# self.initStatusBar() def showLogin(self): self.connectQuantOS() # ---------------------------------------------------------------------- def initCentral(self): \"\"\"初始化中心区域\"\"\"", "changeColor(self): self.app.setStyleSheet(self.sheets[1]) self.sheets = [self.sheets[1], self.sheets[0]] ######################################################################## class AboutWidget(QDialog): \"\"\"显示关于信息\"\"\"", "# 创建操作 connectQuantOSAction = QAction(u'连接和切换策略', self) connectQuantOSAction.triggered.connect(self.connectQuantOS) exitAction = QAction(u'退出',", "* import uiBasicWidget as wgs #from . import uiBasicWidget as", "menubar.addMenu(u'帮助') helpMenu.addAction(aboutAction) helpMenu.addAction(colorAction) # ---------------------------------------------------------------------- def initStatusBar(self): \"\"\"初始化状态栏\"\"\" self.statusLabel =", "self.createDock(wgs.OrderMonitor, u'委托', QtCore.Qt.BottomDockWidgetArea) self.tabifyDockWidget(dockContractM, dockTradeM) self.tabifyDockWidget(dockTradeM, dockOrderM) self.tabifyDockWidget(dockAccountM, dockLogM) dockOrderM.raise_()", "u'CPU使用率:%d%% 内存使用率:%d%%' % (cpuPercent, memoryPercent) # ---------------------------------------------------------------------- def connectQuantOS(self): self.mainEngine.connect('quantos')", "+= 1 if self.sbCount == self.sbTrigger: self.sbCount = 0 self.statusLabel.setText(self.getCpuMemory())", "event): (user, stratid) = event.dict_['data'] #self.setWindowTitle('VnTrader: ' + str(user) +" ]
[ "elif 'del' == mage[0]: notice = del_item(mage[1],user_id) line_bot_api.reply_message(event.reply_token,TextSendMessage(text=notice)) elif 'list'", "TextSendMessage(text=register_url)) elif 'add' == mage[0]: try: notice = add_item(mage[1],user_id,mage[2]) except:", "= event.source.user_id print(\"user_id =\", user_id) profile = line_bot_api.get_profile(user_id) #notify註冊時會post至/register @app.route(\"/register\",methods=['POST'])", "cursor.fetchone() cursor.execute(\"SELECT access_token FROM user_info WHERE user_id LIKE '%s'\"%(user_id)) acc_token", "r = requests.post(\"https://notify-api.line.me/api/notify\",headers=headers,params=params) return r.status_code #使用者資料存入資料庫 def save_profile(username, code, user_id,", "{ \"Authorization\":\"Bearer \" + access_token, \"Content-Type\":\"application/x-www-form-urlencoded\" } params = {", "acc_token) VALUES ('%s','%s','%s')\"%(item_id ,user_id, acc_token)) connection.commit() #存檔 return 'Add Done!'", "cursor.fetchall() return acc_token[0][0] except Error as e: print(\"資料庫連接失敗4:\", e) finally:", "coding: utf-8 # In[ ]: import requests import json import", "\"Content-Type\":\"application/x-www-form-urlencoded\" } params = { \"grant_type\":\"authorization_code\", \"code\": code, \"redirect_uri\":\"https://line.husan.cc/register\", #", "params = { \"message\":\"\\n帳號連結成功\" } r = requests.post(\"https://notify-api.line.me/api/notify\",headers=headers,params=params) return r.status_code", "{ \"message\":\"\\n帳號連結成功\" } r = requests.post(\"https://notify-api.line.me/api/notify\",headers=headers,params=params) return r.status_code #使用者資料存入資料庫 def", "print(\"user_id =\", user_id) profile = line_bot_api.get_profile(user_id) #notify註冊時會post至/register @app.route(\"/register\",methods=['POST']) #註冊事件 def", "#state = user_id 使用者id print(\"user_id = \",state) profile = line_bot_api.get_profile(state)", "} params = { \"grant_type\":\"authorization_code\", \"code\": code, \"redirect_uri\":\"https://line.husan.cc/register\", # host_ip", "access_token #發送測試訊息至使用者notify def send_test_message(access_token): headers = { \"Authorization\":\"Bearer \" +", "cursor.execute(\"INSERT INTO sub_list (item_id, w_price ,user_id, acc_token) VALUES ('%s','%d','%s','%s')\"%(item_id, int(w_price)", "#拿使用者code向notify-bot post取得access_token def get_token(code): headers = { \"Content-Type\":\"application/x-www-form-urlencoded\" } params", "商品ID \\n└刪除商品通知。\\nEX:del DYAJID-A900AVJ4G')) except BaseException as e: line_bot_api.reply_message(event.reply_token,TextSendMessage(text='指令錯誤,請重新確認!')) print(e) #", "@app.route(\"/\", methods=['GET']) def index(): return 'OK!' #line 官方帳號 /callback測試Event @app.route(\"/callback\",", "connection.commit() #存檔 return 'Delete Done!' except Error as e: print(\"資料庫連接失敗3:\",", "connection.close() #刪除訂閱項目 def del_item(item_id, user_id): try: connection = mariadb.connect(host='192.168.1.10', user='admin',", "str(access_token), \"Content-Type\":\"application/x-www-form-urlencoded\", \"notificationDisabled\":\"True\" } params = { \"message\":\"\\n帳號連結成功\" } r", "\", code) state = request.form.get('state') #state = user_id 使用者id print(\"user_id", "item_id = '%s' AND user_id = '%s'\"%(item_id,user_id)) connection.commit() #存檔 return", "sent_message(message,access_token): headers = { \"Authorization\":\"Bearer \" + access_token, \"Content-Type\":\"application/x-www-form-urlencoded\" }", "handle webhook body try: handler.handle(body, signature) except InvalidSignatureError: print(\"Invalid signature.", "\",access_token) r_code = send_test_message(access_token)#發測試通知 if r_code == 200: save_profile(user_name, code,", "= request.get_data(as_text=True) app.logger.info(\"Request body: \" + body) # handle webhook", "@handler.add(FollowEvent) def handle_follow(event): line_bot_api.reply_message( event.reply_token, TextSendMessage(text=\"感謝訂閱!請輸入\\\"註冊\\\"啟動服務。\")) #拿使用者code向notify-bot post取得access_token def get_token(code):", "mariadb.connect(host='192.168.1.10', user='admin', port='3307', password='pw', database='line_notify') if connection.is_connected(): db_Info = connection.get_server_info()", "def save_profile(username, code, user_id, access_token): try: connection = mariadb.connect(host='192.168.1.10', user='admin',", "sub_list WHERE item_id = '%s' AND user_id = '%s'\"%(item_id,user_id)) connection.commit()", "def del_item(item_id, user_id): try: connection = mariadb.connect(host='192.168.1.10', user='admin', port='3307', password='pw',", "try: if mage[0] == \"註冊\": line_bot_api.reply_message( event.reply_token, TextSendMessage(text=register_url)) elif 'add'", "save_profile(user_name, code, state, access_token)#存入資料庫 return '發送成功' else: return '發送失敗' #加好友時發送通知", "abort(400) return 'OK' #line官方帳號收到訊息時的Event @handler.add(MessageEvent, message=TextMessage) def handle_message(event): get_message =", "= 'https://notify-bot.line.me/oauth/authorize?response_type=code&scope=notify&response_mode=form_post&client_id=\"id\"&redirect_uri=https://line.husan.cc/register&state=' + user_id mage = re.split(r'[\\s]\\s*',get_message) try: if mage[0]", "'%s' AND user_id = '%s'\"%(item_id,user_id)) connection.commit() #存檔 return 'Delete Done!'", "#拿code去要access_token print(\"code = \", code) state = request.form.get('state') #state =", "INTO sub_list (item_id,user_id, acc_token) VALUES ('%s','%s','%s')\"%(item_id ,user_id, acc_token)) connection.commit() #存檔", "port='3307', password='pw', database='line_notify') if connection.is_connected(): cursor = connection.cursor() cursor.execute(\"DELETE FROM", "#取得access_token 發訊息給使用者的token print(\"access_token = \",access_token) r_code = send_test_message(access_token)#發測試通知 if r_code", "user='admin', port='3307', password='pw', database='line_notify') if connection.is_connected(): cursor = connection.cursor() cursor.execute(\"select", "def search_sub(user_id): try: connection = mariadb.connect(host='192.168.1.10', user='admin', port='3307', password='pw', database='line_notify')", "= cursor.fetchall() return acc_token[0][0] except Error as e: print(\"資料庫連接失敗4:\", e)", "{ \"message\":message } r = requests.post(\"https://notify-api.line.me/api/notify\",headers=headers,params=params) print(r.status_code) return r.status_code if", "in cursor: print(i) except Error as e: print(\"資料庫連接失敗0:\", e) finally:", "password='pw', database='line_notify') if connection.is_connected(): cursor = connection.cursor() cursor.execute(\"SELECT item_id ,", "register(): if request.method == 'POST': code = request.form.get('code') #拿code去要access_token print(\"code", "e) finally: if (connection.is_connected()): cursor.close() connection.close() #發送訊息 def sent_message(message,access_token): headers", "'OK' #line官方帳號收到訊息時的Event @handler.add(MessageEvent, message=TextMessage) def handle_message(event): get_message = event.message.text print(get_message)", "acc_token)) connection.commit() #存檔 return 'Add Done!' except Error as e:", "FROM user_info WHERE user_id LIKE '%s'\"%(user_id)) acc_token = cursor.fetchall() return", "access_token) VALUES (null,'%s','%s','%s','%s')\"%(username, code, user_id, access_token)) connection.commit() #存檔 cursor.execute(\"SELECT *", "elif 'list' == mage[0]: item_list ,price_list= search_sub(user_id) notice = '您訂閱的項目有:'", "\\n└查詢通知項目。\\nadd 商品ID 價格 \\n└新增商品通知,低於設定價格時通知。\\nEX:add DYAJID-A900AVJ4G 500\\ndel 商品ID \\n└刪除商品通知。\\nEX:del DYAJID-A900AVJ4G')) except", "body = request.get_data(as_text=True) app.logger.info(\"Request body: \" + body) # handle", "mage[0]: notice = del_item(mage[1],user_id) line_bot_api.reply_message(event.reply_token,TextSendMessage(text=notice)) elif 'list' == mage[0]: item_list", "[item[0] for item in sub_item] return item_list,price_list except Error as", "#!/usr/bin/env python # coding: utf-8 # In[ ]: import requests", "= \",state) profile = line_bot_api.get_profile(state) user_name = profile.display_name print(\"username =", "\" + str(access_token), \"Content-Type\":\"application/x-www-form-urlencoded\", \"notificationDisabled\":\"True\" } params = { \"message\":\"\\n帳號連結成功\"", "print(\"Invalid signature. Please check your channel access token/channel secret.\") abort(400)", "client_id \"client_secret\":\"client_secret\" #notify client_secret } r = requests.post('https://notify-bot.line.me/oauth/token',headers=headers,params=params) source =", "requests import json import re from flask import Flask, request,", "except InvalidSignatureError: print(\"Invalid signature. Please check your channel access token/channel", "import re from flask import Flask, request, abort import mysql.connector", "Error as e: print(\"資料庫連接失敗2:\", e) finally: if (connection.is_connected()): cursor.close() connection.close()", "#使用者資料存入資料庫 def save_profile(username, code, user_id, access_token): try: connection = mariadb.connect(host='192.168.1.10',", "profile = line_bot_api.get_profile(user_id) #notify註冊時會post至/register @app.route(\"/register\",methods=['POST']) #註冊事件 def register(): if request.method", "connection.is_connected(): cursor = connection.cursor() cursor.execute(\"select database();\") record = cursor.fetchone() cursor.execute(\"SELECT", "when reply user_id = event.source.user_id print(\"user_id =\", user_id) profile =", "if (connection.is_connected()): cursor.close() connection.close() #取得notify_access_token def get_notify_id(user_id): try: connection =", "InvalidSignatureError: print(\"Invalid signature. Please check your channel access token/channel secret.\")", "X-Line-Signature header value signature = request.headers['X-Line-Signature'] # get request body", "if connection.is_connected(): cursor = connection.cursor() acc_token = get_notify_id(user_id) try: cursor.execute(\"INSERT", "source['access_token'] return access_token #發送測試訊息至使用者notify def send_test_message(access_token): headers = { \"Authorization\":\"Bearer", "(connection.is_connected()): cursor.close() connection.close() #刪除訂閱項目 def del_item(item_id, user_id): try: connection =", "as text body = request.get_data(as_text=True) app.logger.info(\"Request body: \" + body)", "500\\ndel 商品ID \\n└刪除商品通知。\\nEX:del DYAJID-A900AVJ4G')) except BaseException as e: line_bot_api.reply_message(event.reply_token,TextSendMessage(text='指令錯誤,請重新確認!')) print(e)", "FROM sub_list WHERE user_id LIKE '%s'\"%(user_id)) sub_item = cursor.fetchall() price_list", "= { \"message\":\"\\n帳號連結成功\" } r = requests.post(\"https://notify-api.line.me/api/notify\",headers=headers,params=params) return r.status_code #使用者資料存入資料庫", "'del' == mage[0]: notice = del_item(mage[1],user_id) line_bot_api.reply_message(event.reply_token,TextSendMessage(text=notice)) elif 'list' ==", ") app = Flask(__name__) line_bot_api = LineBotApi('') handler = WebhookHandler('')", "json.loads(r.text) access_token = source['access_token'] return access_token #發送測試訊息至使用者notify def send_test_message(access_token): headers", "connection.is_connected(): cursor = connection.cursor() cursor.execute(\"DELETE FROM sub_list WHERE item_id =", "connection.is_connected(): db_Info = connection.get_server_info() print(\"資料庫版本:\", db_Info) cursor = connection.cursor() cursor.execute(\"INSERT", "(item_id, w_price ,user_id, acc_token) VALUES ('%s','%d','%s','%s')\"%(item_id, int(w_price) ,user_id, acc_token)) except:", "index(): return 'OK!' #line 官方帳號 /callback測試Event @app.route(\"/callback\", methods=['POST']) def callback():", "item_list ,price_list= search_sub(user_id) notice = '您訂閱的項目有:' for i in range(len(item_list)):", "from mysql.connector import Error from linebot import ( LineBotApi, WebhookHandler", "body) # handle webhook body try: handler.handle(body, signature) except InvalidSignatureError:", "'POST': code = request.form.get('code') #拿code去要access_token print(\"code = \", code) state", "#存檔 return 'Delete Done!' except Error as e: print(\"資料庫連接失敗3:\", e)", "'%s'\"%(user_id)) sub_item = cursor.fetchall() price_list = [item[1] for item in", "utf-8 # In[ ]: import requests import json import re", "notice = '您訂閱的項目有:' for i in range(len(item_list)): notice+='\\n' notice=notice +", "acc_token = cursor.fetchall() return acc_token[0][0] except Error as e: print(\"資料庫連接失敗4:\",", "import Error from linebot import ( LineBotApi, WebhookHandler ) from", "reply user_id = event.source.user_id print(\"user_id =\", user_id) profile = line_bot_api.get_profile(user_id)", "Done!' except Error as e: print(\"資料庫連接失敗2:\", e) finally: if (connection.is_connected()):", "event.reply_token, TextSendMessage(text=register_url)) elif 'add' == mage[0]: try: notice = add_item(mage[1],user_id,mage[2])", "= connection.cursor() cursor.execute(\"DELETE FROM sub_list WHERE item_id = '%s' AND", "WHERE user_id LIKE '%s'\"%(user_id)) acc_token = cursor.fetchall() return acc_token[0][0] except", "= { \"message\":message } r = requests.post(\"https://notify-api.line.me/api/notify\",headers=headers,params=params) print(r.status_code) return r.status_code", "db_Info) cursor = connection.cursor() cursor.execute(\"INSERT INTO user_info (id, username, code,", "user='admin', port='3307', password='pw', database='line_notify') if connection.is_connected(): db_Info = connection.get_server_info() print(\"資料庫版本:\",", "= connection.get_server_info() print(\"資料庫版本:\", db_Info) cursor = connection.cursor() cursor.execute(\"INSERT INTO user_info", "cursor = connection.cursor() cursor.execute(\"DELETE FROM sub_list WHERE item_id = '%s'", "def register(): if request.method == 'POST': code = request.form.get('code') #拿code去要access_token", "handle_message(event): get_message = event.message.text print(get_message) user_id = event.source.user_id register_url =", "user='admin', port='3307', password='pw', database='line_notify') if connection.is_connected(): cursor = connection.cursor() cursor.execute(\"SELECT", "= '您訂閱的項目有:' for i in range(len(item_list)): notice+='\\n' notice=notice + item_list[i]", "價格 \\n└新增商品通知,低於設定價格時通知。\\nEX:add DYAJID-A900AVJ4G 500\\ndel 商品ID \\n└刪除商品通知。\\nEX:del DYAJID-A900AVJ4G')) except BaseException as", "'add' == mage[0]: try: notice = add_item(mage[1],user_id,mage[2]) except: notice =", "linebot.models import ( MessageEvent, TextMessage, TextSendMessage, FollowEvent, ) app =", "'%s'\"%(item_id,user_id)) connection.commit() #存檔 return 'Delete Done!' except Error as e:", "\" + access_token, \"Content-Type\":\"application/x-www-form-urlencoded\" } params = { \"message\":message }", "username, code, user_id, access_token) VALUES (null,'%s','%s','%s','%s')\"%(username, code, user_id, access_token)) connection.commit()", "linebot import ( LineBotApi, WebhookHandler ) from linebot.exceptions import (", "== mage[0]: notice = del_item(mage[1],user_id) line_bot_api.reply_message(event.reply_token,TextSendMessage(text=notice)) elif 'list' == mage[0]:", "notice = add_item(mage[1],user_id,mage[2]) except: notice = add_item(mage[1],user_id,None) line_bot_api.reply_message(event.reply_token,TextSendMessage(text=notice)) elif 'del'", "user_id) profile = line_bot_api.get_profile(user_id) #notify註冊時會post至/register @app.route(\"/register\",methods=['POST']) #註冊事件 def register(): if", "mage[0]: acc_token = get_notify_id(user_id) status = sent_message(mage[1],acc_token) if status ==", "code) state = request.form.get('state') #state = user_id 使用者id print(\"user_id =", "acc_token) VALUES ('%s','%d','%s','%s')\"%(item_id, int(w_price) ,user_id, acc_token)) except: cursor.execute(\"INSERT INTO sub_list", "WebhookHandler ) from linebot.exceptions import ( InvalidSignatureError ) from linebot.models", "user='admin', port='3307', password='pw', database='line_notify') if connection.is_connected(): cursor = connection.cursor() acc_token", "* FROM user_info\") # 列出查詢的資料 for i in cursor: print(i)", "\\n└刪除商品通知。\\nEX:del DYAJID-A900AVJ4G')) except BaseException as e: line_bot_api.reply_message(event.reply_token,TextSendMessage(text='指令錯誤,請重新確認!')) print(e) # get", "# host_ip \"client_id\":\"client_id\", #notify client_id \"client_secret\":\"client_secret\" #notify client_secret } r", "發訊息給使用者的token print(\"access_token = \",access_token) r_code = send_test_message(access_token)#發測試通知 if r_code ==", "as e: line_bot_api.reply_message(event.reply_token,TextSendMessage(text='指令錯誤,請重新確認!')) print(e) # get user id when reply", "password='pw', database='line_notify') if connection.is_connected(): cursor = connection.cursor() acc_token = get_notify_id(user_id)", "database='line_notify') if connection.is_connected(): cursor = connection.cursor() cursor.execute(\"select database();\") record =", "app.logger.info(\"Request body: \" + body) # handle webhook body try:", "search_sub(user_id) notice = '您訂閱的項目有:' for i in range(len(item_list)): notice+='\\n' notice=notice", "# 列出查詢的資料 for i in cursor: print(i) except Error as", "if (connection.is_connected()): cursor.close() connection.close() #print(\"資料庫連線已關閉\") #新增訂閱項目 def add_item(item_id, user_id,w_price): try:", "print(\"資料庫連接失敗2:\", e) finally: if (connection.is_connected()): cursor.close() connection.close() #刪除訂閱項目 def del_item(item_id,", "channel access token/channel secret.\") abort(400) return 'OK' #line官方帳號收到訊息時的Event @handler.add(MessageEvent, message=TextMessage)", "VALUES ('%s','%d','%s','%s')\"%(item_id, int(w_price) ,user_id, acc_token)) except: cursor.execute(\"INSERT INTO sub_list (item_id,user_id,", "mage[0]: item_list ,price_list= search_sub(user_id) notice = '您訂閱的項目有:' for i in", "if connection.is_connected(): db_Info = connection.get_server_info() print(\"資料庫版本:\", db_Info) cursor = connection.cursor()", "sub_item = cursor.fetchall() price_list = [item[1] for item in sub_item]", "\\n└新增商品通知,低於設定價格時通知。\\nEX:add DYAJID-A900AVJ4G 500\\ndel 商品ID \\n└刪除商品通知。\\nEX:del DYAJID-A900AVJ4G')) except BaseException as e:", "( MessageEvent, TextMessage, TextSendMessage, FollowEvent, ) app = Flask(__name__) line_bot_api", "line_bot_api.reply_message(event.reply_token,TextSendMessage(text=notice)) elif 'list' == mage[0]: item_list ,price_list= search_sub(user_id) notice =", "== mage[0]: try: notice = add_item(mage[1],user_id,mage[2]) except: notice = add_item(mage[1],user_id,None)", "re from flask import Flask, request, abort import mysql.connector as", "sub_item] return item_list,price_list except Error as e: print(\"資料庫連接失敗1:\", e) finally:", "#刪除訂閱項目 def del_item(item_id, user_id): try: connection = mariadb.connect(host='192.168.1.10', user='admin', port='3307',", "# get user id when reply user_id = event.source.user_id print(\"user_id", "acc_token)) except: cursor.execute(\"INSERT INTO sub_list (item_id,user_id, acc_token) VALUES ('%s','%s','%s')\"%(item_id ,user_id,", "(null,'%s','%s','%s','%s')\"%(username, code, user_id, access_token)) connection.commit() #存檔 cursor.execute(\"SELECT * FROM user_info\")", "if connection.is_connected(): cursor = connection.cursor() cursor.execute(\"select database();\") record = cursor.fetchone()", "FollowEvent, ) app = Flask(__name__) line_bot_api = LineBotApi('') handler =", "#print(\"資料庫連線已關閉\") #新增訂閱項目 def add_item(item_id, user_id,w_price): try: connection = mariadb.connect(host='192.168.1.10', user='admin',", "TextSendMessage(text=\"感謝訂閱!請輸入\\\"註冊\\\"啟動服務。\")) #拿使用者code向notify-bot post取得access_token def get_token(code): headers = { \"Content-Type\":\"application/x-www-form-urlencoded\" }", "+str(price_list[i]) line_bot_api.reply_message(event.reply_token,TextSendMessage(text=notice)) elif 'send' == mage[0]: acc_token = get_notify_id(user_id) status", "as e: print(\"資料庫連接失敗4:\", e) finally: if (connection.is_connected()): cursor.close() connection.close() #發送訊息", "( InvalidSignatureError ) from linebot.models import ( MessageEvent, TextMessage, TextSendMessage,", "Flask(__name__) line_bot_api = LineBotApi('') handler = WebhookHandler('') @app.route(\"/\", methods=['GET']) def", "商品ID 價格 \\n└新增商品通知,低於設定價格時通知。\\nEX:add DYAJID-A900AVJ4G 500\\ndel 商品ID \\n└刪除商品通知。\\nEX:del DYAJID-A900AVJ4G')) except BaseException", "= requests.post(\"https://notify-api.line.me/api/notify\",headers=headers,params=params) return r.status_code #使用者資料存入資料庫 def save_profile(username, code, user_id, access_token):", "def callback(): # get X-Line-Signature header value signature = request.headers['X-Line-Signature']", "secret.\") abort(400) return 'OK' #line官方帳號收到訊息時的Event @handler.add(MessageEvent, message=TextMessage) def handle_message(event): get_message", "= '%s' AND user_id = '%s'\"%(item_id,user_id)) connection.commit() #存檔 return 'Delete", "connection.cursor() cursor.execute(\"INSERT INTO user_info (id, username, code, user_id, access_token) VALUES", "signature) except InvalidSignatureError: print(\"Invalid signature. Please check your channel access", "mage = re.split(r'[\\s]\\s*',get_message) try: if mage[0] == \"註冊\": line_bot_api.reply_message( event.reply_token,", "access_token = get_token(code) #取得access_token 發訊息給使用者的token print(\"access_token = \",access_token) r_code =", "del_item(mage[1],user_id) line_bot_api.reply_message(event.reply_token,TextSendMessage(text=notice)) elif 'list' == mage[0]: item_list ,price_list= search_sub(user_id) notice", "as mariadb from mysql.connector import Error from linebot import (", "user_id,w_price): try: connection = mariadb.connect(host='192.168.1.10', user='admin', port='3307', password='pw', database='line_notify') if", "= \",user_name) #帳號名稱 access_token = get_token(code) #取得access_token 發訊息給使用者的token print(\"access_token =", "notice = del_item(mage[1],user_id) line_bot_api.reply_message(event.reply_token,TextSendMessage(text=notice)) elif 'list' == mage[0]: item_list ,price_list=", "\"註冊\": line_bot_api.reply_message( event.reply_token, TextSendMessage(text=register_url)) elif 'add' == mage[0]: try: notice", "return access_token #發送測試訊息至使用者notify def send_test_message(access_token): headers = { \"Authorization\":\"Bearer \"", "try: cursor.execute(\"INSERT INTO sub_list (item_id, w_price ,user_id, acc_token) VALUES ('%s','%d','%s','%s')\"%(item_id,", "connection.close() #print(\"資料庫連線已關閉\") #新增訂閱項目 def add_item(item_id, user_id,w_price): try: connection = mariadb.connect(host='192.168.1.10',", "#line官方帳號收到訊息時的Event @handler.add(MessageEvent, message=TextMessage) def handle_message(event): get_message = event.message.text print(get_message) user_id", "#發送測試訊息至使用者notify def send_test_message(access_token): headers = { \"Authorization\":\"Bearer \" + str(access_token),", "def sent_message(message,access_token): headers = { \"Authorization\":\"Bearer \" + access_token, \"Content-Type\":\"application/x-www-form-urlencoded\"", "user_info WHERE user_id LIKE '%s'\"%(user_id)) acc_token = cursor.fetchall() return acc_token[0][0]", "e: print(\"資料庫連接失敗1:\", e) finally: if (connection.is_connected()): cursor.close() connection.close() #取得notify_access_token def", "DYAJID-A900AVJ4G')) except BaseException as e: line_bot_api.reply_message(event.reply_token,TextSendMessage(text='指令錯誤,請重新確認!')) print(e) # get user", "acc_token = get_notify_id(user_id) status = sent_message(mage[1],acc_token) if status == 200:", "'Delete Done!' except Error as e: print(\"資料庫連接失敗3:\", e) finally: if", "cursor: print(i) except Error as e: print(\"資料庫連接失敗0:\", e) finally: if", "+ access_token, \"Content-Type\":\"application/x-www-form-urlencoded\" } params = { \"message\":message } r", "= add_item(mage[1],user_id,None) line_bot_api.reply_message(event.reply_token,TextSendMessage(text=notice)) elif 'del' == mage[0]: notice = del_item(mage[1],user_id)", "# handle webhook body try: handler.handle(body, signature) except InvalidSignatureError: print(\"Invalid", "if status == 200: line_bot_api.reply_message(event.reply_token,TextSendMessage(text='send OK!')) else: line_bot_api.reply_message(event.reply_token,TextSendMessage(text='請輸入指令:\\nlist \\n└查詢通知項目。\\nadd 商品ID", "Please check your channel access token/channel secret.\") abort(400) return 'OK'", "print(i) except Error as e: print(\"資料庫連接失敗0:\", e) finally: if (connection.is_connected()):", "as e: print(\"資料庫連接失敗1:\", e) finally: if (connection.is_connected()): cursor.close() connection.close() #取得notify_access_token", "官方帳號 /callback測試Event @app.route(\"/callback\", methods=['POST']) def callback(): # get X-Line-Signature header", "} params = { \"message\":message } r = requests.post(\"https://notify-api.line.me/api/notify\",headers=headers,params=params) print(r.status_code)", "finally: if (connection.is_connected()): cursor.close() connection.close() #刪除訂閱項目 def del_item(item_id, user_id): try:", "database();\") record = cursor.fetchone() cursor.execute(\"SELECT access_token FROM user_info WHERE user_id", "access_token, \"Content-Type\":\"application/x-www-form-urlencoded\" } params = { \"message\":message } r =", "python # coding: utf-8 # In[ ]: import requests import", "+ str(access_token), \"Content-Type\":\"application/x-www-form-urlencoded\", \"notificationDisabled\":\"True\" } params = { \"message\":\"\\n帳號連結成功\" }", "in sub_item] item_list = [item[0] for item in sub_item] return", "( LineBotApi, WebhookHandler ) from linebot.exceptions import ( InvalidSignatureError )", "except BaseException as e: line_bot_api.reply_message(event.reply_token,TextSendMessage(text='指令錯誤,請重新確認!')) print(e) # get user id", "return r.status_code #使用者資料存入資料庫 def save_profile(username, code, user_id, access_token): try: connection", "as e: print(\"資料庫連接失敗3:\", e) finally: if (connection.is_connected()): cursor.close() connection.close() #查詢訂閱項目", "+'\\t' +str(price_list[i]) line_bot_api.reply_message(event.reply_token,TextSendMessage(text=notice)) elif 'send' == mage[0]: acc_token = get_notify_id(user_id)", "Flask, request, abort import mysql.connector as mariadb from mysql.connector import", "body try: handler.handle(body, signature) except InvalidSignatureError: print(\"Invalid signature. Please check", "# coding: utf-8 # In[ ]: import requests import json", "password='pw', database='line_notify') if connection.is_connected(): cursor = connection.cursor() cursor.execute(\"select database();\") record", "int(w_price) ,user_id, acc_token)) except: cursor.execute(\"INSERT INTO sub_list (item_id,user_id, acc_token) VALUES", "sent_message(mage[1],acc_token) if status == 200: line_bot_api.reply_message(event.reply_token,TextSendMessage(text='send OK!')) else: line_bot_api.reply_message(event.reply_token,TextSendMessage(text='請輸入指令:\\nlist \\n└查詢通知項目。\\nadd", "try: connection = mariadb.connect(host='192.168.1.10', user='admin', port='3307', password='pw', database='line_notify') if connection.is_connected():", "(id, username, code, user_id, access_token) VALUES (null,'%s','%s','%s','%s')\"%(username, code, user_id, access_token))", "'send' == mage[0]: acc_token = get_notify_id(user_id) status = sent_message(mage[1],acc_token) if", "handler.handle(body, signature) except InvalidSignatureError: print(\"Invalid signature. Please check your channel", "cursor = connection.cursor() cursor.execute(\"INSERT INTO user_info (id, username, code, user_id,", "= line_bot_api.get_profile(user_id) #notify註冊時會post至/register @app.route(\"/register\",methods=['POST']) #註冊事件 def register(): if request.method ==", "# get request body as text body = request.get_data(as_text=True) app.logger.info(\"Request", "finally: if (connection.is_connected()): cursor.close() connection.close() #取得notify_access_token def get_notify_id(user_id): try: connection", "print(\"username = \",user_name) #帳號名稱 access_token = get_token(code) #取得access_token 發訊息給使用者的token print(\"access_token", "methods=['GET']) def index(): return 'OK!' #line 官方帳號 /callback測試Event @app.route(\"/callback\", methods=['POST'])", "'list' == mage[0]: item_list ,price_list= search_sub(user_id) notice = '您訂閱的項目有:' for", "= source['access_token'] return access_token #發送測試訊息至使用者notify def send_test_message(access_token): headers = {", "from linebot import ( LineBotApi, WebhookHandler ) from linebot.exceptions import", "range(len(item_list)): notice+='\\n' notice=notice + item_list[i] +'\\t' +str(price_list[i]) line_bot_api.reply_message(event.reply_token,TextSendMessage(text=notice)) elif 'send'", "request.form.get('code') #拿code去要access_token print(\"code = \", code) state = request.form.get('state') #state", "mage[0] == \"註冊\": line_bot_api.reply_message( event.reply_token, TextSendMessage(text=register_url)) elif 'add' == mage[0]:", "json import re from flask import Flask, request, abort import", "'發送成功' else: return '發送失敗' #加好友時發送通知 @handler.add(FollowEvent) def handle_follow(event): line_bot_api.reply_message( event.reply_token,", "access_token): try: connection = mariadb.connect(host='192.168.1.10', user='admin', port='3307', password='pw', database='line_notify') if", "database='line_notify') if connection.is_connected(): cursor = connection.cursor() acc_token = get_notify_id(user_id) try:", "print(\"access_token = \",access_token) r_code = send_test_message(access_token)#發測試通知 if r_code == 200:", "InvalidSignatureError ) from linebot.models import ( MessageEvent, TextMessage, TextSendMessage, FollowEvent,", "<gh_stars>1-10 #!/usr/bin/env python # coding: utf-8 # In[ ]: import", "in sub_item] return item_list,price_list except Error as e: print(\"資料庫連接失敗1:\", e)", "if (connection.is_connected()): cursor.close() connection.close() #查詢訂閱項目 def search_sub(user_id): try: connection =", "= LineBotApi('') handler = WebhookHandler('') @app.route(\"/\", methods=['GET']) def index(): return", "= { \"Authorization\":\"Bearer \" + access_token, \"Content-Type\":\"application/x-www-form-urlencoded\" } params =", "\"code\": code, \"redirect_uri\":\"https://line.husan.cc/register\", # host_ip \"client_id\":\"client_id\", #notify client_id \"client_secret\":\"client_secret\" #notify", "e: line_bot_api.reply_message(event.reply_token,TextSendMessage(text='指令錯誤,請重新確認!')) print(e) # get user id when reply user_id", "WHERE user_id LIKE '%s'\"%(user_id)) sub_item = cursor.fetchall() price_list = [item[1]", "+ user_id mage = re.split(r'[\\s]\\s*',get_message) try: if mage[0] == \"註冊\":", "except: notice = add_item(mage[1],user_id,None) line_bot_api.reply_message(event.reply_token,TextSendMessage(text=notice)) elif 'del' == mage[0]: notice", "{ \"grant_type\":\"authorization_code\", \"code\": code, \"redirect_uri\":\"https://line.husan.cc/register\", # host_ip \"client_id\":\"client_id\", #notify client_id", "r.status_code #使用者資料存入資料庫 def save_profile(username, code, user_id, access_token): try: connection =", "connection.cursor() cursor.execute(\"select database();\") record = cursor.fetchone() cursor.execute(\"SELECT access_token FROM user_info", "if r_code == 200: save_profile(user_name, code, state, access_token)#存入資料庫 return '發送成功'", "headers = { \"Content-Type\":\"application/x-www-form-urlencoded\" } params = { \"grant_type\":\"authorization_code\", \"code\":", "connection.is_connected(): cursor = connection.cursor() acc_token = get_notify_id(user_id) try: cursor.execute(\"INSERT INTO", "value signature = request.headers['X-Line-Signature'] # get request body as text", "for i in cursor: print(i) except Error as e: print(\"資料庫連接失敗0:\",", "('%s','%s','%s')\"%(item_id ,user_id, acc_token)) connection.commit() #存檔 return 'Add Done!' except Error", "signature. Please check your channel access token/channel secret.\") abort(400) return", "import ( LineBotApi, WebhookHandler ) from linebot.exceptions import ( InvalidSignatureError", "= { \"grant_type\":\"authorization_code\", \"code\": code, \"redirect_uri\":\"https://line.husan.cc/register\", # host_ip \"client_id\":\"client_id\", #notify", "code, user_id, access_token)) connection.commit() #存檔 cursor.execute(\"SELECT * FROM user_info\") #", "cursor.close() connection.close() #取得notify_access_token def get_notify_id(user_id): try: connection = mariadb.connect(host='192.168.1.10', user='admin',", "= request.form.get('code') #拿code去要access_token print(\"code = \", code) state = request.form.get('state')", "request.form.get('state') #state = user_id 使用者id print(\"user_id = \",state) profile =", "TextSendMessage, FollowEvent, ) app = Flask(__name__) line_bot_api = LineBotApi('') handler", "def handle_follow(event): line_bot_api.reply_message( event.reply_token, TextSendMessage(text=\"感謝訂閱!請輸入\\\"註冊\\\"啟動服務。\")) #拿使用者code向notify-bot post取得access_token def get_token(code): headers", "e) finally: if (connection.is_connected()): cursor.close() connection.close() #取得notify_access_token def get_notify_id(user_id): try:", "= send_test_message(access_token)#發測試通知 if r_code == 200: save_profile(user_name, code, state, access_token)#存入資料庫", "AND user_id = '%s'\"%(item_id,user_id)) connection.commit() #存檔 return 'Delete Done!' except", "abort import mysql.connector as mariadb from mysql.connector import Error from", "import requests import json import re from flask import Flask,", "In[ ]: import requests import json import re from flask", "line_bot_api.get_profile(state) user_name = profile.display_name print(\"username = \",user_name) #帳號名稱 access_token =", "connection.cursor() acc_token = get_notify_id(user_id) try: cursor.execute(\"INSERT INTO sub_list (item_id, w_price", "#新增訂閱項目 def add_item(item_id, user_id,w_price): try: connection = mariadb.connect(host='192.168.1.10', user='admin', port='3307',", "cursor.execute(\"INSERT INTO user_info (id, username, code, user_id, access_token) VALUES (null,'%s','%s','%s','%s')\"%(username,", "200: save_profile(user_name, code, state, access_token)#存入資料庫 return '發送成功' else: return '發送失敗'", "e: print(\"資料庫連接失敗0:\", e) finally: if (connection.is_connected()): cursor.close() connection.close() #print(\"資料庫連線已關閉\") #新增訂閱項目", "i in range(len(item_list)): notice+='\\n' notice=notice + item_list[i] +'\\t' +str(price_list[i]) line_bot_api.reply_message(event.reply_token,TextSendMessage(text=notice))", "profile = line_bot_api.get_profile(state) user_name = profile.display_name print(\"username = \",user_name) #帳號名稱", "= sent_message(mage[1],acc_token) if status == 200: line_bot_api.reply_message(event.reply_token,TextSendMessage(text='send OK!')) else: line_bot_api.reply_message(event.reply_token,TextSendMessage(text='請輸入指令:\\nlist", "get_notify_id(user_id): try: connection = mariadb.connect(host='192.168.1.10', user='admin', port='3307', password='pw', database='line_notify') if", "state = request.form.get('state') #state = user_id 使用者id print(\"user_id = \",state)", "user id when reply user_id = event.source.user_id print(\"user_id =\", user_id)", "except Error as e: print(\"資料庫連接失敗0:\", e) finally: if (connection.is_connected()): cursor.close()", "= line_bot_api.get_profile(state) user_name = profile.display_name print(\"username = \",user_name) #帳號名稱 access_token", "handler = WebhookHandler('') @app.route(\"/\", methods=['GET']) def index(): return 'OK!' #line", "code, \"redirect_uri\":\"https://line.husan.cc/register\", # host_ip \"client_id\":\"client_id\", #notify client_id \"client_secret\":\"client_secret\" #notify client_secret", "get_message = event.message.text print(get_message) user_id = event.source.user_id register_url = 'https://notify-bot.line.me/oauth/authorize?response_type=code&scope=notify&response_mode=form_post&client_id=\"id\"&redirect_uri=https://line.husan.cc/register&state='", "Error as e: print(\"資料庫連接失敗0:\", e) finally: if (connection.is_connected()): cursor.close() connection.close()", "port='3307', password='pw', database='line_notify') if connection.is_connected(): cursor = connection.cursor() acc_token =", "import ( InvalidSignatureError ) from linebot.models import ( MessageEvent, TextMessage,", "if mage[0] == \"註冊\": line_bot_api.reply_message( event.reply_token, TextSendMessage(text=register_url)) elif 'add' ==", "{ \"Authorization\":\"Bearer \" + str(access_token), \"Content-Type\":\"application/x-www-form-urlencoded\", \"notificationDisabled\":\"True\" } params =", ") from linebot.exceptions import ( InvalidSignatureError ) from linebot.models import", "client_secret } r = requests.post('https://notify-bot.line.me/oauth/token',headers=headers,params=params) source = json.loads(r.text) access_token =", "print(\"資料庫連接失敗1:\", e) finally: if (connection.is_connected()): cursor.close() connection.close() #取得notify_access_token def get_notify_id(user_id):", "line_bot_api.reply_message(event.reply_token,TextSendMessage(text='請輸入指令:\\nlist \\n└查詢通知項目。\\nadd 商品ID 價格 \\n└新增商品通知,低於設定價格時通知。\\nEX:add DYAJID-A900AVJ4G 500\\ndel 商品ID \\n└刪除商品通知。\\nEX:del DYAJID-A900AVJ4G'))", "get_token(code) #取得access_token 發訊息給使用者的token print(\"access_token = \",access_token) r_code = send_test_message(access_token)#發測試通知 if", "finally: if (connection.is_connected()): cursor.close() connection.close() #發送訊息 def sent_message(message,access_token): headers =", "\" + body) # handle webhook body try: handler.handle(body, signature)", "search_sub(user_id): try: connection = mariadb.connect(host='192.168.1.10', user='admin', port='3307', password='pw', database='line_notify') if", "return 'OK!' #line 官方帳號 /callback測試Event @app.route(\"/callback\", methods=['POST']) def callback(): #", "webhook body try: handler.handle(body, signature) except InvalidSignatureError: print(\"Invalid signature. Please", "if (connection.is_connected()): cursor.close() connection.close() #發送訊息 def sent_message(message,access_token): headers = {", "\"grant_type\":\"authorization_code\", \"code\": code, \"redirect_uri\":\"https://line.husan.cc/register\", # host_ip \"client_id\":\"client_id\", #notify client_id \"client_secret\":\"client_secret\"", "request body as text body = request.get_data(as_text=True) app.logger.info(\"Request body: \"", "cursor.execute(\"INSERT INTO sub_list (item_id,user_id, acc_token) VALUES ('%s','%s','%s')\"%(item_id ,user_id, acc_token)) connection.commit()", "except Error as e: print(\"資料庫連接失敗4:\", e) finally: if (connection.is_connected()): cursor.close()", "#註冊事件 def register(): if request.method == 'POST': code = request.form.get('code')", "mage[0]: try: notice = add_item(mage[1],user_id,mage[2]) except: notice = add_item(mage[1],user_id,None) line_bot_api.reply_message(event.reply_token,TextSendMessage(text=notice))", "try: notice = add_item(mage[1],user_id,mage[2]) except: notice = add_item(mage[1],user_id,None) line_bot_api.reply_message(event.reply_token,TextSendMessage(text=notice)) elif", "elif 'send' == mage[0]: acc_token = get_notify_id(user_id) status = sent_message(mage[1],acc_token)", "mariadb.connect(host='192.168.1.10', user='admin', port='3307', password='pw', database='line_notify') if connection.is_connected(): cursor = connection.cursor()", "#加好友時發送通知 @handler.add(FollowEvent) def handle_follow(event): line_bot_api.reply_message( event.reply_token, TextSendMessage(text=\"感謝訂閱!請輸入\\\"註冊\\\"啟動服務。\")) #拿使用者code向notify-bot post取得access_token def", "@handler.add(MessageEvent, message=TextMessage) def handle_message(event): get_message = event.message.text print(get_message) user_id =", "access_token)) connection.commit() #存檔 cursor.execute(\"SELECT * FROM user_info\") # 列出查詢的資料 for", "= json.loads(r.text) access_token = source['access_token'] return access_token #發送測試訊息至使用者notify def send_test_message(access_token):", "= { \"Authorization\":\"Bearer \" + str(access_token), \"Content-Type\":\"application/x-www-form-urlencoded\", \"notificationDisabled\":\"True\" } params", "database='line_notify') if connection.is_connected(): db_Info = connection.get_server_info() print(\"資料庫版本:\", db_Info) cursor =", "status == 200: line_bot_api.reply_message(event.reply_token,TextSendMessage(text='send OK!')) else: line_bot_api.reply_message(event.reply_token,TextSendMessage(text='請輸入指令:\\nlist \\n└查詢通知項目。\\nadd 商品ID 價格", "OK!')) else: line_bot_api.reply_message(event.reply_token,TextSendMessage(text='請輸入指令:\\nlist \\n└查詢通知項目。\\nadd 商品ID 價格 \\n└新增商品通知,低於設定價格時通知。\\nEX:add DYAJID-A900AVJ4G 500\\ndel 商品ID", "== 'POST': code = request.form.get('code') #拿code去要access_token print(\"code = \", code)", "w_price FROM sub_list WHERE user_id LIKE '%s'\"%(user_id)) sub_item = cursor.fetchall()", "cursor.execute(\"SELECT access_token FROM user_info WHERE user_id LIKE '%s'\"%(user_id)) acc_token =", "e) finally: if (connection.is_connected()): cursor.close() connection.close() #查詢訂閱項目 def search_sub(user_id): try:", "else: return '發送失敗' #加好友時發送通知 @handler.add(FollowEvent) def handle_follow(event): line_bot_api.reply_message( event.reply_token, TextSendMessage(text=\"感謝訂閱!請輸入\\\"註冊\\\"啟動服務。\"))", "r = requests.post('https://notify-bot.line.me/oauth/token',headers=headers,params=params) source = json.loads(r.text) access_token = source['access_token'] return", "acc_token = get_notify_id(user_id) try: cursor.execute(\"INSERT INTO sub_list (item_id, w_price ,user_id,", "cursor = connection.cursor() acc_token = get_notify_id(user_id) try: cursor.execute(\"INSERT INTO sub_list", "import mysql.connector as mariadb from mysql.connector import Error from linebot", "\"client_secret\":\"client_secret\" #notify client_secret } r = requests.post('https://notify-bot.line.me/oauth/token',headers=headers,params=params) source = json.loads(r.text)", "= WebhookHandler('') @app.route(\"/\", methods=['GET']) def index(): return 'OK!' #line 官方帳號", "return item_list,price_list except Error as e: print(\"資料庫連接失敗1:\", e) finally: if", "+ body) # handle webhook body try: handler.handle(body, signature) except", "INTO user_info (id, username, code, user_id, access_token) VALUES (null,'%s','%s','%s','%s')\"%(username, code,", "= del_item(mage[1],user_id) line_bot_api.reply_message(event.reply_token,TextSendMessage(text=notice)) elif 'list' == mage[0]: item_list ,price_list= search_sub(user_id)", "= connection.cursor() acc_token = get_notify_id(user_id) try: cursor.execute(\"INSERT INTO sub_list (item_id,", "LIKE '%s'\"%(user_id)) sub_item = cursor.fetchall() price_list = [item[1] for item", "= cursor.fetchone() cursor.execute(\"SELECT access_token FROM user_info WHERE user_id LIKE '%s'\"%(user_id))", "line_bot_api = LineBotApi('') handler = WebhookHandler('') @app.route(\"/\", methods=['GET']) def index():", "else: line_bot_api.reply_message(event.reply_token,TextSendMessage(text='請輸入指令:\\nlist \\n└查詢通知項目。\\nadd 商品ID 價格 \\n└新增商品通知,低於設定價格時通知。\\nEX:add DYAJID-A900AVJ4G 500\\ndel 商品ID \\n└刪除商品通知。\\nEX:del", "if (connection.is_connected()): cursor.close() connection.close() #刪除訂閱項目 def del_item(item_id, user_id): try: connection", "if request.method == 'POST': code = request.form.get('code') #拿code去要access_token print(\"code =", "user_id = '%s'\"%(item_id,user_id)) connection.commit() #存檔 return 'Delete Done!' except Error", ", w_price FROM sub_list WHERE user_id LIKE '%s'\"%(user_id)) sub_item =", "request, abort import mysql.connector as mariadb from mysql.connector import Error", ",user_id, acc_token)) connection.commit() #存檔 return 'Add Done!' except Error as", "except: cursor.execute(\"INSERT INTO sub_list (item_id,user_id, acc_token) VALUES ('%s','%s','%s')\"%(item_id ,user_id, acc_token))", "notice+='\\n' notice=notice + item_list[i] +'\\t' +str(price_list[i]) line_bot_api.reply_message(event.reply_token,TextSendMessage(text=notice)) elif 'send' ==", "line_bot_api.reply_message(event.reply_token,TextSendMessage(text=notice)) elif 'send' == mage[0]: acc_token = get_notify_id(user_id) status =", "notice=notice + item_list[i] +'\\t' +str(price_list[i]) line_bot_api.reply_message(event.reply_token,TextSendMessage(text=notice)) elif 'send' == mage[0]:", "= '%s'\"%(item_id,user_id)) connection.commit() #存檔 return 'Delete Done!' except Error as", "get X-Line-Signature header value signature = request.headers['X-Line-Signature'] # get request", "= { \"Content-Type\":\"application/x-www-form-urlencoded\" } params = { \"grant_type\":\"authorization_code\", \"code\": code,", "mysql.connector as mariadb from mysql.connector import Error from linebot import", "def index(): return 'OK!' #line 官方帳號 /callback測試Event @app.route(\"/callback\", methods=['POST']) def", ",user_id, acc_token) VALUES ('%s','%d','%s','%s')\"%(item_id, int(w_price) ,user_id, acc_token)) except: cursor.execute(\"INSERT INTO", "cursor.close() connection.close() #查詢訂閱項目 def search_sub(user_id): try: connection = mariadb.connect(host='192.168.1.10', user='admin',", "MessageEvent, TextMessage, TextSendMessage, FollowEvent, ) app = Flask(__name__) line_bot_api =", "w_price ,user_id, acc_token) VALUES ('%s','%d','%s','%s')\"%(item_id, int(w_price) ,user_id, acc_token)) except: cursor.execute(\"INSERT", "= Flask(__name__) line_bot_api = LineBotApi('') handler = WebhookHandler('') @app.route(\"/\", methods=['GET'])", "item_list[i] +'\\t' +str(price_list[i]) line_bot_api.reply_message(event.reply_token,TextSendMessage(text=notice)) elif 'send' == mage[0]: acc_token =", "def handle_message(event): get_message = event.message.text print(get_message) user_id = event.source.user_id register_url", "for item in sub_item] return item_list,price_list except Error as e:", "finally: if (connection.is_connected()): cursor.close() connection.close() #print(\"資料庫連線已關閉\") #新增訂閱項目 def add_item(item_id, user_id,w_price):", "connection.close() #查詢訂閱項目 def search_sub(user_id): try: connection = mariadb.connect(host='192.168.1.10', user='admin', port='3307',", "WHERE item_id = '%s' AND user_id = '%s'\"%(item_id,user_id)) connection.commit() #存檔", "= event.source.user_id register_url = 'https://notify-bot.line.me/oauth/authorize?response_type=code&scope=notify&response_mode=form_post&client_id=\"id\"&redirect_uri=https://line.husan.cc/register&state=' + user_id mage = re.split(r'[\\s]\\s*',get_message)", "\"Content-Type\":\"application/x-www-form-urlencoded\" } params = { \"message\":message } r = requests.post(\"https://notify-api.line.me/api/notify\",headers=headers,params=params)", ",price_list= search_sub(user_id) notice = '您訂閱的項目有:' for i in range(len(item_list)): notice+='\\n'", "LIKE '%s'\"%(user_id)) acc_token = cursor.fetchall() return acc_token[0][0] except Error as", "= request.headers['X-Line-Signature'] # get request body as text body =", "#帳號名稱 access_token = get_token(code) #取得access_token 發訊息給使用者的token print(\"access_token = \",access_token) r_code", "code, user_id, access_token): try: connection = mariadb.connect(host='192.168.1.10', user='admin', port='3307', password='pw',", "Error as e: print(\"資料庫連接失敗4:\", e) finally: if (connection.is_connected()): cursor.close() connection.close()", "state, access_token)#存入資料庫 return '發送成功' else: return '發送失敗' #加好友時發送通知 @handler.add(FollowEvent) def", "access_token FROM user_info WHERE user_id LIKE '%s'\"%(user_id)) acc_token = cursor.fetchall()", "def add_item(item_id, user_id,w_price): try: connection = mariadb.connect(host='192.168.1.10', user='admin', port='3307', password='pw',", "= cursor.fetchall() price_list = [item[1] for item in sub_item] item_list", "requests.post('https://notify-bot.line.me/oauth/token',headers=headers,params=params) source = json.loads(r.text) access_token = source['access_token'] return access_token #發送測試訊息至使用者notify", "#存檔 cursor.execute(\"SELECT * FROM user_info\") # 列出查詢的資料 for i in", "post取得access_token def get_token(code): headers = { \"Content-Type\":\"application/x-www-form-urlencoded\" } params =", "database='line_notify') if connection.is_connected(): cursor = connection.cursor() cursor.execute(\"SELECT item_id , w_price", "as e: print(\"資料庫連接失敗2:\", e) finally: if (connection.is_connected()): cursor.close() connection.close() #刪除訂閱項目", "} params = { \"message\":\"\\n帳號連結成功\" } r = requests.post(\"https://notify-api.line.me/api/notify\",headers=headers,params=params) return", "@app.route(\"/callback\", methods=['POST']) def callback(): # get X-Line-Signature header value signature", "app = Flask(__name__) line_bot_api = LineBotApi('') handler = WebhookHandler('') @app.route(\"/\",", "id when reply user_id = event.source.user_id print(\"user_id =\", user_id) profile", "print(\"資料庫連接失敗0:\", e) finally: if (connection.is_connected()): cursor.close() connection.close() #print(\"資料庫連線已關閉\") #新增訂閱項目 def", "} r = requests.post(\"https://notify-api.line.me/api/notify\",headers=headers,params=params) print(r.status_code) return r.status_code if __name__ ==", "access token/channel secret.\") abort(400) return 'OK' #line官方帳號收到訊息時的Event @handler.add(MessageEvent, message=TextMessage) def", "del_item(item_id, user_id): try: connection = mariadb.connect(host='192.168.1.10', user='admin', port='3307', password='pw', database='line_notify')", "= get_notify_id(user_id) try: cursor.execute(\"INSERT INTO sub_list (item_id, w_price ,user_id, acc_token)", "= connection.cursor() cursor.execute(\"select database();\") record = cursor.fetchone() cursor.execute(\"SELECT access_token FROM", "print(e) # get user id when reply user_id = event.source.user_id", "check your channel access token/channel secret.\") abort(400) return 'OK' #line官方帳號收到訊息時的Event", "'https://notify-bot.line.me/oauth/authorize?response_type=code&scope=notify&response_mode=form_post&client_id=\"id\"&redirect_uri=https://line.husan.cc/register&state=' + user_id mage = re.split(r'[\\s]\\s*',get_message) try: if mage[0] ==", "from linebot.exceptions import ( InvalidSignatureError ) from linebot.models import (", "re.split(r'[\\s]\\s*',get_message) try: if mage[0] == \"註冊\": line_bot_api.reply_message( event.reply_token, TextSendMessage(text=register_url)) elif", "line_bot_api.reply_message(event.reply_token,TextSendMessage(text=notice)) elif 'del' == mage[0]: notice = del_item(mage[1],user_id) line_bot_api.reply_message(event.reply_token,TextSendMessage(text=notice)) elif", "\"notificationDisabled\":\"True\" } params = { \"message\":\"\\n帳號連結成功\" } r = requests.post(\"https://notify-api.line.me/api/notify\",headers=headers,params=params)", "\",user_name) #帳號名稱 access_token = get_token(code) #取得access_token 發訊息給使用者的token print(\"access_token = \",access_token)", "request.method == 'POST': code = request.form.get('code') #拿code去要access_token print(\"code = \",", ") from linebot.models import ( MessageEvent, TextMessage, TextSendMessage, FollowEvent, )", "access_token = source['access_token'] return access_token #發送測試訊息至使用者notify def send_test_message(access_token): headers =", "\"message\":\"\\n帳號連結成功\" } r = requests.post(\"https://notify-api.line.me/api/notify\",headers=headers,params=params) return r.status_code #使用者資料存入資料庫 def save_profile(username,", "BaseException as e: line_bot_api.reply_message(event.reply_token,TextSendMessage(text='指令錯誤,請重新確認!')) print(e) # get user id when", "event.source.user_id register_url = 'https://notify-bot.line.me/oauth/authorize?response_type=code&scope=notify&response_mode=form_post&client_id=\"id\"&redirect_uri=https://line.husan.cc/register&state=' + user_id mage = re.split(r'[\\s]\\s*',get_message) try:", "user_id mage = re.split(r'[\\s]\\s*',get_message) try: if mage[0] == \"註冊\": line_bot_api.reply_message(", "user_info (id, username, code, user_id, access_token) VALUES (null,'%s','%s','%s','%s')\"%(username, code, user_id,", "your channel access token/channel secret.\") abort(400) return 'OK' #line官方帳號收到訊息時的Event @handler.add(MessageEvent,", "return '發送成功' else: return '發送失敗' #加好友時發送通知 @handler.add(FollowEvent) def handle_follow(event): line_bot_api.reply_message(", "Error as e: print(\"資料庫連接失敗3:\", e) finally: if (connection.is_connected()): cursor.close() connection.close()", "user_id LIKE '%s'\"%(user_id)) sub_item = cursor.fetchall() price_list = [item[1] for", "i in cursor: print(i) except Error as e: print(\"資料庫連接失敗0:\", e)", "VALUES (null,'%s','%s','%s','%s')\"%(username, code, user_id, access_token)) connection.commit() #存檔 cursor.execute(\"SELECT * FROM", ",user_id, acc_token)) except: cursor.execute(\"INSERT INTO sub_list (item_id,user_id, acc_token) VALUES ('%s','%s','%s')\"%(item_id", "(connection.is_connected()): cursor.close() connection.close() #取得notify_access_token def get_notify_id(user_id): try: connection = mariadb.connect(host='192.168.1.10',", "user_id, access_token): try: connection = mariadb.connect(host='192.168.1.10', user='admin', port='3307', password='pw', database='line_notify')", "= connection.cursor() cursor.execute(\"SELECT item_id , w_price FROM sub_list WHERE user_id", "{ \"Content-Type\":\"application/x-www-form-urlencoded\" } params = { \"grant_type\":\"authorization_code\", \"code\": code, \"redirect_uri\":\"https://line.husan.cc/register\",", "headers = { \"Authorization\":\"Bearer \" + access_token, \"Content-Type\":\"application/x-www-form-urlencoded\" } params", "cursor.execute(\"DELETE FROM sub_list WHERE item_id = '%s' AND user_id =", "= requests.post(\"https://notify-api.line.me/api/notify\",headers=headers,params=params) print(r.status_code) return r.status_code if __name__ == \"__main__\": app.run('0.0.0.0',port=3000)", "add_item(mage[1],user_id,mage[2]) except: notice = add_item(mage[1],user_id,None) line_bot_api.reply_message(event.reply_token,TextSendMessage(text=notice)) elif 'del' == mage[0]:", "line_bot_api.reply_message( event.reply_token, TextSendMessage(text=\"感謝訂閱!請輸入\\\"註冊\\\"啟動服務。\")) #拿使用者code向notify-bot post取得access_token def get_token(code): headers = {", "@app.route(\"/register\",methods=['POST']) #註冊事件 def register(): if request.method == 'POST': code =", "line_bot_api.reply_message(event.reply_token,TextSendMessage(text='指令錯誤,請重新確認!')) print(e) # get user id when reply user_id =", "\"redirect_uri\":\"https://line.husan.cc/register\", # host_ip \"client_id\":\"client_id\", #notify client_id \"client_secret\":\"client_secret\" #notify client_secret }", "database='line_notify') if connection.is_connected(): cursor = connection.cursor() cursor.execute(\"DELETE FROM sub_list WHERE", "user_name = profile.display_name print(\"username = \",user_name) #帳號名稱 access_token = get_token(code)", "user_id = event.source.user_id register_url = 'https://notify-bot.line.me/oauth/authorize?response_type=code&scope=notify&response_mode=form_post&client_id=\"id\"&redirect_uri=https://line.husan.cc/register&state=' + user_id mage =", "]: import requests import json import re from flask import", "#notify註冊時會post至/register @app.route(\"/register\",methods=['POST']) #註冊事件 def register(): if request.method == 'POST': code", "= \", code) state = request.form.get('state') #state = user_id 使用者id", "get request body as text body = request.get_data(as_text=True) app.logger.info(\"Request body:", "e: print(\"資料庫連接失敗4:\", e) finally: if (connection.is_connected()): cursor.close() connection.close() #發送訊息 def", "/callback測試Event @app.route(\"/callback\", methods=['POST']) def callback(): # get X-Line-Signature header value", "== \"註冊\": line_bot_api.reply_message( event.reply_token, TextSendMessage(text=register_url)) elif 'add' == mage[0]: try:", "== 200: save_profile(user_name, code, state, access_token)#存入資料庫 return '發送成功' else: return", "def get_notify_id(user_id): try: connection = mariadb.connect(host='192.168.1.10', user='admin', port='3307', password='pw', database='line_notify')", "as e: print(\"資料庫連接失敗0:\", e) finally: if (connection.is_connected()): cursor.close() connection.close() #print(\"資料庫連線已關閉\")", "cursor.execute(\"SELECT item_id , w_price FROM sub_list WHERE user_id LIKE '%s'\"%(user_id))", "params = { \"grant_type\":\"authorization_code\", \"code\": code, \"redirect_uri\":\"https://line.husan.cc/register\", # host_ip \"client_id\":\"client_id\",", "header value signature = request.headers['X-Line-Signature'] # get request body as", "\"Authorization\":\"Bearer \" + str(access_token), \"Content-Type\":\"application/x-www-form-urlencoded\", \"notificationDisabled\":\"True\" } params = {", "+ item_list[i] +'\\t' +str(price_list[i]) line_bot_api.reply_message(event.reply_token,TextSendMessage(text=notice)) elif 'send' == mage[0]: acc_token", "user_id): try: connection = mariadb.connect(host='192.168.1.10', user='admin', port='3307', password='pw', database='line_notify') if", "= request.form.get('state') #state = user_id 使用者id print(\"user_id = \",state) profile", "= re.split(r'[\\s]\\s*',get_message) try: if mage[0] == \"註冊\": line_bot_api.reply_message( event.reply_token, TextSendMessage(text=register_url))", "\",state) profile = line_bot_api.get_profile(state) user_name = profile.display_name print(\"username = \",user_name)", "item_id , w_price FROM sub_list WHERE user_id LIKE '%s'\"%(user_id)) sub_item", "sub_list WHERE user_id LIKE '%s'\"%(user_id)) sub_item = cursor.fetchall() price_list =", "event.reply_token, TextSendMessage(text=\"感謝訂閱!請輸入\\\"註冊\\\"啟動服務。\")) #拿使用者code向notify-bot post取得access_token def get_token(code): headers = { \"Content-Type\":\"application/x-www-form-urlencoded\"", "[item[1] for item in sub_item] item_list = [item[0] for item", "source = json.loads(r.text) access_token = source['access_token'] return access_token #發送測試訊息至使用者notify def", "price_list = [item[1] for item in sub_item] item_list = [item[0]", "= profile.display_name print(\"username = \",user_name) #帳號名稱 access_token = get_token(code) #取得access_token", "#存檔 return 'Add Done!' except Error as e: print(\"資料庫連接失敗2:\", e)", "r_code = send_test_message(access_token)#發測試通知 if r_code == 200: save_profile(user_name, code, state,", "#取得notify_access_token def get_notify_id(user_id): try: connection = mariadb.connect(host='192.168.1.10', user='admin', port='3307', password='pw',", "(connection.is_connected()): cursor.close() connection.close() #發送訊息 def sent_message(message,access_token): headers = { \"Authorization\":\"Bearer", "mysql.connector import Error from linebot import ( LineBotApi, WebhookHandler )", "FROM user_info\") # 列出查詢的資料 for i in cursor: print(i) except", "FROM sub_list WHERE item_id = '%s' AND user_id = '%s'\"%(item_id,user_id))", "('%s','%d','%s','%s')\"%(item_id, int(w_price) ,user_id, acc_token)) except: cursor.execute(\"INSERT INTO sub_list (item_id,user_id, acc_token)", "request.get_data(as_text=True) app.logger.info(\"Request body: \" + body) # handle webhook body", "print(get_message) user_id = event.source.user_id register_url = 'https://notify-bot.line.me/oauth/authorize?response_type=code&scope=notify&response_mode=form_post&client_id=\"id\"&redirect_uri=https://line.husan.cc/register&state=' + user_id mage", "add_item(item_id, user_id,w_price): try: connection = mariadb.connect(host='192.168.1.10', user='admin', port='3307', password='pw', database='line_notify')", "return acc_token[0][0] except Error as e: print(\"資料庫連接失敗4:\", e) finally: if", "save_profile(username, code, user_id, access_token): try: connection = mariadb.connect(host='192.168.1.10', user='admin', port='3307',", "get_notify_id(user_id) try: cursor.execute(\"INSERT INTO sub_list (item_id, w_price ,user_id, acc_token) VALUES", "request.headers['X-Line-Signature'] # get request body as text body = request.get_data(as_text=True)", "connection.commit() #存檔 cursor.execute(\"SELECT * FROM user_info\") # 列出查詢的資料 for i", "headers = { \"Authorization\":\"Bearer \" + str(access_token), \"Content-Type\":\"application/x-www-form-urlencoded\", \"notificationDisabled\":\"True\" }", "register_url = 'https://notify-bot.line.me/oauth/authorize?response_type=code&scope=notify&response_mode=form_post&client_id=\"id\"&redirect_uri=https://line.husan.cc/register&state=' + user_id mage = re.split(r'[\\s]\\s*',get_message) try: if", "Done!' except Error as e: print(\"資料庫連接失敗3:\", e) finally: if (connection.is_connected()):", "#查詢訂閱項目 def search_sub(user_id): try: connection = mariadb.connect(host='192.168.1.10', user='admin', port='3307', password='pw',", "connection.commit() #存檔 return 'Add Done!' except Error as e: print(\"資料庫連接失敗2:\",", "cursor.execute(\"SELECT * FROM user_info\") # 列出查詢的資料 for i in cursor:", "e) finally: if (connection.is_connected()): cursor.close() connection.close() #刪除訂閱項目 def del_item(item_id, user_id):", "sub_list (item_id,user_id, acc_token) VALUES ('%s','%s','%s')\"%(item_id ,user_id, acc_token)) connection.commit() #存檔 return", "return 'OK' #line官方帳號收到訊息時的Event @handler.add(MessageEvent, message=TextMessage) def handle_message(event): get_message = event.message.text", "} r = requests.post('https://notify-bot.line.me/oauth/token',headers=headers,params=params) source = json.loads(r.text) access_token = source['access_token']", "user_id 使用者id print(\"user_id = \",state) profile = line_bot_api.get_profile(state) user_name =", "列出查詢的資料 for i in cursor: print(i) except Error as e:", "in range(len(item_list)): notice+='\\n' notice=notice + item_list[i] +'\\t' +str(price_list[i]) line_bot_api.reply_message(event.reply_token,TextSendMessage(text=notice)) elif", "VALUES ('%s','%s','%s')\"%(item_id ,user_id, acc_token)) connection.commit() #存檔 return 'Add Done!' except", "return '發送失敗' #加好友時發送通知 @handler.add(FollowEvent) def handle_follow(event): line_bot_api.reply_message( event.reply_token, TextSendMessage(text=\"感謝訂閱!請輸入\\\"註冊\\\"啟動服務。\")) #拿使用者code向notify-bot", "WebhookHandler('') @app.route(\"/\", methods=['GET']) def index(): return 'OK!' #line 官方帳號 /callback測試Event", "add_item(mage[1],user_id,None) line_bot_api.reply_message(event.reply_token,TextSendMessage(text=notice)) elif 'del' == mage[0]: notice = del_item(mage[1],user_id) line_bot_api.reply_message(event.reply_token,TextSendMessage(text=notice))", "user_id, access_token) VALUES (null,'%s','%s','%s','%s')\"%(username, code, user_id, access_token)) connection.commit() #存檔 cursor.execute(\"SELECT", "'Add Done!' except Error as e: print(\"資料庫連接失敗2:\", e) finally: if", "= [item[0] for item in sub_item] return item_list,price_list except Error", "= get_token(code) #取得access_token 發訊息給使用者的token print(\"access_token = \",access_token) r_code = send_test_message(access_token)#發測試通知", "user='admin', port='3307', password='pw', database='line_notify') if connection.is_connected(): cursor = connection.cursor() cursor.execute(\"DELETE", "event.message.text print(get_message) user_id = event.source.user_id register_url = 'https://notify-bot.line.me/oauth/authorize?response_type=code&scope=notify&response_mode=form_post&client_id=\"id\"&redirect_uri=https://line.husan.cc/register&state=' + user_id", "line_bot_api.reply_message(event.reply_token,TextSendMessage(text='send OK!')) else: line_bot_api.reply_message(event.reply_token,TextSendMessage(text='請輸入指令:\\nlist \\n└查詢通知項目。\\nadd 商品ID 價格 \\n└新增商品通知,低於設定價格時通知。\\nEX:add DYAJID-A900AVJ4G 500\\ndel", "e) finally: if (connection.is_connected()): cursor.close() connection.close() #print(\"資料庫連線已關閉\") #新增訂閱項目 def add_item(item_id,", "connection.get_server_info() print(\"資料庫版本:\", db_Info) cursor = connection.cursor() cursor.execute(\"INSERT INTO user_info (id,", "cursor.close() connection.close() #發送訊息 def sent_message(message,access_token): headers = { \"Authorization\":\"Bearer \"", "flask import Flask, request, abort import mysql.connector as mariadb from", "import json import re from flask import Flask, request, abort", "item_list,price_list except Error as e: print(\"資料庫連接失敗1:\", e) finally: if (connection.is_connected()):", "'OK!' #line 官方帳號 /callback測試Event @app.route(\"/callback\", methods=['POST']) def callback(): # get", "200: line_bot_api.reply_message(event.reply_token,TextSendMessage(text='send OK!')) else: line_bot_api.reply_message(event.reply_token,TextSendMessage(text='請輸入指令:\\nlist \\n└查詢通知項目。\\nadd 商品ID 價格 \\n└新增商品通知,低於設定價格時通知。\\nEX:add DYAJID-A900AVJ4G", "get_token(code): headers = { \"Content-Type\":\"application/x-www-form-urlencoded\" } params = { \"grant_type\":\"authorization_code\",", "user_id = event.source.user_id print(\"user_id =\", user_id) profile = line_bot_api.get_profile(user_id) #notify註冊時會post至/register", "== mage[0]: acc_token = get_notify_id(user_id) status = sent_message(mage[1],acc_token) if status", "== 200: line_bot_api.reply_message(event.reply_token,TextSendMessage(text='send OK!')) else: line_bot_api.reply_message(event.reply_token,TextSendMessage(text='請輸入指令:\\nlist \\n└查詢通知項目。\\nadd 商品ID 價格 \\n└新增商品通知,低於設定價格時通知。\\nEX:add", "except Error as e: print(\"資料庫連接失敗2:\", e) finally: if (connection.is_connected()): cursor.close()", "port='3307', password='pw', database='line_notify') if connection.is_connected(): db_Info = connection.get_server_info() print(\"資料庫版本:\", db_Info)", "status = sent_message(mage[1],acc_token) if status == 200: line_bot_api.reply_message(event.reply_token,TextSendMessage(text='send OK!')) else:", "return 'Delete Done!' except Error as e: print(\"資料庫連接失敗3:\", e) finally:", "send_test_message(access_token): headers = { \"Authorization\":\"Bearer \" + str(access_token), \"Content-Type\":\"application/x-www-form-urlencoded\", \"notificationDisabled\":\"True\"", "= connection.cursor() cursor.execute(\"INSERT INTO user_info (id, username, code, user_id, access_token)", "== mage[0]: item_list ,price_list= search_sub(user_id) notice = '您訂閱的項目有:' for i", "from linebot.models import ( MessageEvent, TextMessage, TextSendMessage, FollowEvent, ) app", "host_ip \"client_id\":\"client_id\", #notify client_id \"client_secret\":\"client_secret\" #notify client_secret } r =", "(item_id,user_id, acc_token) VALUES ('%s','%s','%s')\"%(item_id ,user_id, acc_token)) connection.commit() #存檔 return 'Add", "} r = requests.post(\"https://notify-api.line.me/api/notify\",headers=headers,params=params) return r.status_code #使用者資料存入資料庫 def save_profile(username, code,", "linebot.exceptions import ( InvalidSignatureError ) from linebot.models import ( MessageEvent,", "DYAJID-A900AVJ4G 500\\ndel 商品ID \\n└刪除商品通知。\\nEX:del DYAJID-A900AVJ4G')) except BaseException as e: line_bot_api.reply_message(event.reply_token,TextSendMessage(text='指令錯誤,請重新確認!'))", "#notify client_id \"client_secret\":\"client_secret\" #notify client_secret } r = requests.post('https://notify-bot.line.me/oauth/token',headers=headers,params=params) source", "connection = mariadb.connect(host='192.168.1.10', user='admin', port='3307', password='pw', database='line_notify') if connection.is_connected(): db_Info", "params = { \"message\":message } r = requests.post(\"https://notify-api.line.me/api/notify\",headers=headers,params=params) print(r.status_code) return", "= event.message.text print(get_message) user_id = event.source.user_id register_url = 'https://notify-bot.line.me/oauth/authorize?response_type=code&scope=notify&response_mode=form_post&client_id=\"id\"&redirect_uri=https://line.husan.cc/register&state=' +", "#發送訊息 def sent_message(message,access_token): headers = { \"Authorization\":\"Bearer \" + access_token,", "if connection.is_connected(): cursor = connection.cursor() cursor.execute(\"DELETE FROM sub_list WHERE item_id", "except Error as e: print(\"資料庫連接失敗3:\", e) finally: if (connection.is_connected()): cursor.close()", "if connection.is_connected(): cursor = connection.cursor() cursor.execute(\"SELECT item_id , w_price FROM", "send_test_message(access_token)#發測試通知 if r_code == 200: save_profile(user_name, code, state, access_token)#存入資料庫 return", "cursor = connection.cursor() cursor.execute(\"SELECT item_id , w_price FROM sub_list WHERE", "finally: if (connection.is_connected()): cursor.close() connection.close() #查詢訂閱項目 def search_sub(user_id): try: connection", "connection.cursor() cursor.execute(\"SELECT item_id , w_price FROM sub_list WHERE user_id LIKE", "(connection.is_connected()): cursor.close() connection.close() #print(\"資料庫連線已關閉\") #新增訂閱項目 def add_item(item_id, user_id,w_price): try: connection", "= get_notify_id(user_id) status = sent_message(mage[1],acc_token) if status == 200: line_bot_api.reply_message(event.reply_token,TextSendMessage(text='send", "= [item[1] for item in sub_item] item_list = [item[0] for", "profile.display_name print(\"username = \",user_name) #帳號名稱 access_token = get_token(code) #取得access_token 發訊息給使用者的token", "'%s'\"%(user_id)) acc_token = cursor.fetchall() return acc_token[0][0] except Error as e:", "'您訂閱的項目有:' for i in range(len(item_list)): notice+='\\n' notice=notice + item_list[i] +'\\t'", "elif 'add' == mage[0]: try: notice = add_item(mage[1],user_id,mage[2]) except: notice", "= add_item(mage[1],user_id,mage[2]) except: notice = add_item(mage[1],user_id,None) line_bot_api.reply_message(event.reply_token,TextSendMessage(text=notice)) elif 'del' ==", "cursor.close() connection.close() #print(\"資料庫連線已關閉\") #新增訂閱項目 def add_item(item_id, user_id,w_price): try: connection =", "print(\"資料庫連接失敗4:\", e) finally: if (connection.is_connected()): cursor.close() connection.close() #發送訊息 def sent_message(message,access_token):", "code, state, access_token)#存入資料庫 return '發送成功' else: return '發送失敗' #加好友時發送通知 @handler.add(FollowEvent)", "user_info\") # 列出查詢的資料 for i in cursor: print(i) except Error", "try: handler.handle(body, signature) except InvalidSignatureError: print(\"Invalid signature. Please check your", "= mariadb.connect(host='192.168.1.10', user='admin', port='3307', password='pw', database='line_notify') if connection.is_connected(): db_Info =", "def send_test_message(access_token): headers = { \"Authorization\":\"Bearer \" + str(access_token), \"Content-Type\":\"application/x-www-form-urlencoded\",", "\"Content-Type\":\"application/x-www-form-urlencoded\", \"notificationDisabled\":\"True\" } params = { \"message\":\"\\n帳號連結成功\" } r =", "acc_token[0][0] except Error as e: print(\"資料庫連接失敗4:\", e) finally: if (connection.is_connected()):", "\"client_id\":\"client_id\", #notify client_id \"client_secret\":\"client_secret\" #notify client_secret } r = requests.post('https://notify-bot.line.me/oauth/token',headers=headers,params=params)", "from flask import Flask, request, abort import mysql.connector as mariadb", "event.source.user_id print(\"user_id =\", user_id) profile = line_bot_api.get_profile(user_id) #notify註冊時會post至/register @app.route(\"/register\",methods=['POST']) #註冊事件", "methods=['POST']) def callback(): # get X-Line-Signature header value signature =", "cursor.fetchall() price_list = [item[1] for item in sub_item] item_list =", "line_bot_api.reply_message( event.reply_token, TextSendMessage(text=register_url)) elif 'add' == mage[0]: try: notice =", "= user_id 使用者id print(\"user_id = \",state) profile = line_bot_api.get_profile(state) user_name", "e: print(\"資料庫連接失敗2:\", e) finally: if (connection.is_connected()): cursor.close() connection.close() #刪除訂閱項目 def", "\"Authorization\":\"Bearer \" + access_token, \"Content-Type\":\"application/x-www-form-urlencoded\" } params = { \"message\":message", "line_bot_api.get_profile(user_id) #notify註冊時會post至/register @app.route(\"/register\",methods=['POST']) #註冊事件 def register(): if request.method == 'POST':", "INTO sub_list (item_id, w_price ,user_id, acc_token) VALUES ('%s','%d','%s','%s')\"%(item_id, int(w_price) ,user_id,", "LineBotApi('') handler = WebhookHandler('') @app.route(\"/\", methods=['GET']) def index(): return 'OK!'", "\"message\":message } r = requests.post(\"https://notify-api.line.me/api/notify\",headers=headers,params=params) print(r.status_code) return r.status_code if __name__", "handle_follow(event): line_bot_api.reply_message( event.reply_token, TextSendMessage(text=\"感謝訂閱!請輸入\\\"註冊\\\"啟動服務。\")) #拿使用者code向notify-bot post取得access_token def get_token(code): headers =", "user_id, access_token)) connection.commit() #存檔 cursor.execute(\"SELECT * FROM user_info\") # 列出查詢的資料", "user_id LIKE '%s'\"%(user_id)) acc_token = cursor.fetchall() return acc_token[0][0] except Error", "text body = request.get_data(as_text=True) app.logger.info(\"Request body: \" + body) #", "body as text body = request.get_data(as_text=True) app.logger.info(\"Request body: \" +", "requests.post(\"https://notify-api.line.me/api/notify\",headers=headers,params=params) return r.status_code #使用者資料存入資料庫 def save_profile(username, code, user_id, access_token): try:", "#notify client_secret } r = requests.post('https://notify-bot.line.me/oauth/token',headers=headers,params=params) source = json.loads(r.text) access_token", "#line 官方帳號 /callback測試Event @app.route(\"/callback\", methods=['POST']) def callback(): # get X-Line-Signature", "mariadb from mysql.connector import Error from linebot import ( LineBotApi,", "Error from linebot import ( LineBotApi, WebhookHandler ) from linebot.exceptions", "TextMessage, TextSendMessage, FollowEvent, ) app = Flask(__name__) line_bot_api = LineBotApi('')", "db_Info = connection.get_server_info() print(\"資料庫版本:\", db_Info) cursor = connection.cursor() cursor.execute(\"INSERT INTO", "port='3307', password='pw', database='line_notify') if connection.is_connected(): cursor = connection.cursor() cursor.execute(\"SELECT item_id", "port='3307', password='pw', database='line_notify') if connection.is_connected(): cursor = connection.cursor() cursor.execute(\"select database();\")", "import ( MessageEvent, TextMessage, TextSendMessage, FollowEvent, ) app = Flask(__name__)", "cursor.execute(\"select database();\") record = cursor.fetchone() cursor.execute(\"SELECT access_token FROM user_info WHERE", "return 'Add Done!' except Error as e: print(\"資料庫連接失敗2:\", e) finally:", "record = cursor.fetchone() cursor.execute(\"SELECT access_token FROM user_info WHERE user_id LIKE", "except Error as e: print(\"資料庫連接失敗1:\", e) finally: if (connection.is_connected()): cursor.close()", "signature = request.headers['X-Line-Signature'] # get request body as text body", "=\", user_id) profile = line_bot_api.get_profile(user_id) #notify註冊時會post至/register @app.route(\"/register\",methods=['POST']) #註冊事件 def register():", "item in sub_item] return item_list,price_list except Error as e: print(\"資料庫連接失敗1:\",", "sub_list (item_id, w_price ,user_id, acc_token) VALUES ('%s','%d','%s','%s')\"%(item_id, int(w_price) ,user_id, acc_token))", "print(\"code = \", code) state = request.form.get('state') #state = user_id", "print(\"資料庫連接失敗3:\", e) finally: if (connection.is_connected()): cursor.close() connection.close() #查詢訂閱項目 def search_sub(user_id):", "= \",access_token) r_code = send_test_message(access_token)#發測試通知 if r_code == 200: save_profile(user_name,", "print(\"user_id = \",state) profile = line_bot_api.get_profile(state) user_name = profile.display_name print(\"username", "import Flask, request, abort import mysql.connector as mariadb from mysql.connector", "sub_item] item_list = [item[0] for item in sub_item] return item_list,price_list", "# get X-Line-Signature header value signature = request.headers['X-Line-Signature'] # get", "body: \" + body) # handle webhook body try: handler.handle(body,", "get user id when reply user_id = event.source.user_id print(\"user_id =\",", "'發送失敗' #加好友時發送通知 @handler.add(FollowEvent) def handle_follow(event): line_bot_api.reply_message( event.reply_token, TextSendMessage(text=\"感謝訂閱!請輸入\\\"註冊\\\"啟動服務。\")) #拿使用者code向notify-bot post取得access_token", "connection.close() #取得notify_access_token def get_notify_id(user_id): try: connection = mariadb.connect(host='192.168.1.10', user='admin', port='3307',", "connection = mariadb.connect(host='192.168.1.10', user='admin', port='3307', password='pw', database='line_notify') if connection.is_connected(): cursor", "code, user_id, access_token) VALUES (null,'%s','%s','%s','%s')\"%(username, code, user_id, access_token)) connection.commit() #存檔", "print(\"資料庫版本:\", db_Info) cursor = connection.cursor() cursor.execute(\"INSERT INTO user_info (id, username,", "= requests.post('https://notify-bot.line.me/oauth/token',headers=headers,params=params) source = json.loads(r.text) access_token = source['access_token'] return access_token", "password='pw', database='line_notify') if connection.is_connected(): cursor = connection.cursor() cursor.execute(\"DELETE FROM sub_list", "connection.cursor() cursor.execute(\"DELETE FROM sub_list WHERE item_id = '%s' AND user_id", "callback(): # get X-Line-Signature header value signature = request.headers['X-Line-Signature'] #", "for i in range(len(item_list)): notice+='\\n' notice=notice + item_list[i] +'\\t' +str(price_list[i])", "token/channel secret.\") abort(400) return 'OK' #line官方帳號收到訊息時的Event @handler.add(MessageEvent, message=TextMessage) def handle_message(event):", "for item in sub_item] item_list = [item[0] for item in", "= mariadb.connect(host='192.168.1.10', user='admin', port='3307', password='pw', database='line_notify') if connection.is_connected(): cursor =", "Error as e: print(\"資料庫連接失敗1:\", e) finally: if (connection.is_connected()): cursor.close() connection.close()", "connection.close() #發送訊息 def sent_message(message,access_token): headers = { \"Authorization\":\"Bearer \" +", "get_notify_id(user_id) status = sent_message(mage[1],acc_token) if status == 200: line_bot_api.reply_message(event.reply_token,TextSendMessage(text='send OK!'))", "def get_token(code): headers = { \"Content-Type\":\"application/x-www-form-urlencoded\" } params = {", "r_code == 200: save_profile(user_name, code, state, access_token)#存入資料庫 return '發送成功' else:", "password='pw', database='line_notify') if connection.is_connected(): db_Info = connection.get_server_info() print(\"資料庫版本:\", db_Info) cursor", "notice = add_item(mage[1],user_id,None) line_bot_api.reply_message(event.reply_token,TextSendMessage(text=notice)) elif 'del' == mage[0]: notice =", "code = request.form.get('code') #拿code去要access_token print(\"code = \", code) state =", "connection.is_connected(): cursor = connection.cursor() cursor.execute(\"SELECT item_id , w_price FROM sub_list", "cursor.close() connection.close() #刪除訂閱項目 def del_item(item_id, user_id): try: connection = mariadb.connect(host='192.168.1.10',", "access_token)#存入資料庫 return '發送成功' else: return '發送失敗' #加好友時發送通知 @handler.add(FollowEvent) def handle_follow(event):", "message=TextMessage) def handle_message(event): get_message = event.message.text print(get_message) user_id = event.source.user_id", "LineBotApi, WebhookHandler ) from linebot.exceptions import ( InvalidSignatureError ) from", "r = requests.post(\"https://notify-api.line.me/api/notify\",headers=headers,params=params) print(r.status_code) return r.status_code if __name__ == \"__main__\":", "item in sub_item] item_list = [item[0] for item in sub_item]", "(connection.is_connected()): cursor.close() connection.close() #查詢訂閱項目 def search_sub(user_id): try: connection = mariadb.connect(host='192.168.1.10',", "使用者id print(\"user_id = \",state) profile = line_bot_api.get_profile(state) user_name = profile.display_name", "# In[ ]: import requests import json import re from", "cursor = connection.cursor() cursor.execute(\"select database();\") record = cursor.fetchone() cursor.execute(\"SELECT access_token", "item_list = [item[0] for item in sub_item] return item_list,price_list except", "e: print(\"資料庫連接失敗3:\", e) finally: if (connection.is_connected()): cursor.close() connection.close() #查詢訂閱項目 def" ]
[ "Model() country = Country(mod, 'CO') Household(country, 'HH') ConsolidatedGovernment(country, 'GOV') FixedMarginBusiness(country,", "'TAX', taxrate=.2) # At time period 25, cut spending to", "= Quick2DPlot([k, k], [spend, Def], title='Spending and Deficit', filename='intro_X_XX_multiplier_deficit.png', run_now=False)", "'DEBT-TO-GDP RATIO', '-100.*GOV__F/BUS__SUP_GOOD') mod.AddGlobalEquation('DEFICIT', 'DEFICIT', '-1.*GOV__INC') mod.EquationSolver.MaxTime = 40 mod.main()", "# At time period 25, cut spending to 17 (from", "Household(country, 'HH') ConsolidatedGovernment(country, 'GOV') FixedMarginBusiness(country, 'BUS', profit_margin=.025) Market(country, 'GOOD') Market(country,", "'DEFICIT', '-1.*GOV__INC') mod.EquationSolver.MaxTime = 40 mod.main() k = mod.GetTimeSeries('k') Rat", "sfc_models.examples.Quick2DPlot import Quick2DPlot register_standard_logs('output', __file__) mod = Model() country =", "FixedMarginBusiness(country, 'BUS', profit_margin=.025) Market(country, 'GOOD') Market(country, 'LAB') TaxFlow(country, 'TAX', taxrate=.2)", "= mod.GetTimeSeries('GOV__DEM_GOOD') p = Quick2DPlot([k, k], [spend, Def], title='Spending and", "from sfc_models.examples.Quick2DPlot import Quick2DPlot register_standard_logs('output', __file__) mod = Model() country", "= 40 mod.main() k = mod.GetTimeSeries('k') Rat = mod.GetTimeSeries('DEBT_GDP') Def", "mod.GetTimeSeries('k') Rat = mod.GetTimeSeries('DEBT_GDP') Def = mod.GetTimeSeries('GOV__INC') spend = mod.GetTimeSeries('GOV__DEM_GOOD')", "mod.main() k = mod.GetTimeSeries('k') Rat = mod.GetTimeSeries('DEBT_GDP') Def = mod.GetTimeSeries('GOV__INC')", "Quick2DPlot([k, k], [spend, Def], title='Spending and Deficit', filename='intro_X_XX_multiplier_deficit.png', run_now=False) p.Legend", "'GOV') FixedMarginBusiness(country, 'BUS', profit_margin=.025) Market(country, 'GOOD') Market(country, 'LAB') TaxFlow(country, 'TAX',", "# coding=utf-8 from sfc_models.objects import * from sfc_models.examples.Quick2DPlot import Quick2DPlot", "import * from sfc_models.examples.Quick2DPlot import Quick2DPlot register_standard_logs('output', __file__) mod =", "run_now=False) p.Legend = ['G', 'Deficit'] p.LegendPos = 'center left' p.DoPlot()", "to 17 (from 20) mod.AddExogenous('GOV', 'DEM_GOOD', [20.,]* 25 + [17.,]*20)", "title='Spending and Deficit', filename='intro_X_XX_multiplier_deficit.png', run_now=False) p.Legend = ['G', 'Deficit'] p.LegendPos", "'Deficit'] p.LegendPos = 'center left' p.DoPlot() Quick2DPlot(k, Rat, title='Debt-to-GDP Ratio',", "[17.,]*20) mod.AddGlobalEquation('DEBT_GDP', 'DEBT-TO-GDP RATIO', '-100.*GOV__F/BUS__SUP_GOOD') mod.AddGlobalEquation('DEFICIT', 'DEFICIT', '-1.*GOV__INC') mod.EquationSolver.MaxTime =", "17 (from 20) mod.AddExogenous('GOV', 'DEM_GOOD', [20.,]* 25 + [17.,]*20) mod.AddGlobalEquation('DEBT_GDP',", "'GOOD') Market(country, 'LAB') TaxFlow(country, 'TAX', taxrate=.2) # At time period", "<reponame>MachineLP/SFC_models<gh_stars>10-100 # coding=utf-8 from sfc_models.objects import * from sfc_models.examples.Quick2DPlot import", "[20.,]* 25 + [17.,]*20) mod.AddGlobalEquation('DEBT_GDP', 'DEBT-TO-GDP RATIO', '-100.*GOV__F/BUS__SUP_GOOD') mod.AddGlobalEquation('DEFICIT', 'DEFICIT',", "p.Legend = ['G', 'Deficit'] p.LegendPos = 'center left' p.DoPlot() Quick2DPlot(k,", "= Model() country = Country(mod, 'CO') Household(country, 'HH') ConsolidatedGovernment(country, 'GOV')", "profit_margin=.025) Market(country, 'GOOD') Market(country, 'LAB') TaxFlow(country, 'TAX', taxrate=.2) # At", "country = Country(mod, 'CO') Household(country, 'HH') ConsolidatedGovernment(country, 'GOV') FixedMarginBusiness(country, 'BUS',", "mod.GetTimeSeries('GOV__INC') spend = mod.GetTimeSeries('GOV__DEM_GOOD') p = Quick2DPlot([k, k], [spend, Def],", "Def = mod.GetTimeSeries('GOV__INC') spend = mod.GetTimeSeries('GOV__DEM_GOOD') p = Quick2DPlot([k, k],", "p.LegendPos = 'center left' p.DoPlot() Quick2DPlot(k, Rat, title='Debt-to-GDP Ratio', filename='intro_X_XX_multiplier_debt_gdp.png')", "spending to 17 (from 20) mod.AddExogenous('GOV', 'DEM_GOOD', [20.,]* 25 +", "'CO') Household(country, 'HH') ConsolidatedGovernment(country, 'GOV') FixedMarginBusiness(country, 'BUS', profit_margin=.025) Market(country, 'GOOD')", "'-1.*GOV__INC') mod.EquationSolver.MaxTime = 40 mod.main() k = mod.GetTimeSeries('k') Rat =", "= ['G', 'Deficit'] p.LegendPos = 'center left' p.DoPlot() Quick2DPlot(k, Rat,", "40 mod.main() k = mod.GetTimeSeries('k') Rat = mod.GetTimeSeries('DEBT_GDP') Def =", "cut spending to 17 (from 20) mod.AddExogenous('GOV', 'DEM_GOOD', [20.,]* 25", "taxrate=.2) # At time period 25, cut spending to 17", "= mod.GetTimeSeries('DEBT_GDP') Def = mod.GetTimeSeries('GOV__INC') spend = mod.GetTimeSeries('GOV__DEM_GOOD') p =", "[spend, Def], title='Spending and Deficit', filename='intro_X_XX_multiplier_deficit.png', run_now=False) p.Legend = ['G',", "mod.AddGlobalEquation('DEBT_GDP', 'DEBT-TO-GDP RATIO', '-100.*GOV__F/BUS__SUP_GOOD') mod.AddGlobalEquation('DEFICIT', 'DEFICIT', '-1.*GOV__INC') mod.EquationSolver.MaxTime = 40", "register_standard_logs('output', __file__) mod = Model() country = Country(mod, 'CO') Household(country,", "* from sfc_models.examples.Quick2DPlot import Quick2DPlot register_standard_logs('output', __file__) mod = Model()", "mod.AddExogenous('GOV', 'DEM_GOOD', [20.,]* 25 + [17.,]*20) mod.AddGlobalEquation('DEBT_GDP', 'DEBT-TO-GDP RATIO', '-100.*GOV__F/BUS__SUP_GOOD')", "Rat = mod.GetTimeSeries('DEBT_GDP') Def = mod.GetTimeSeries('GOV__INC') spend = mod.GetTimeSeries('GOV__DEM_GOOD') p", "25, cut spending to 17 (from 20) mod.AddExogenous('GOV', 'DEM_GOOD', [20.,]*", "mod.AddGlobalEquation('DEFICIT', 'DEFICIT', '-1.*GOV__INC') mod.EquationSolver.MaxTime = 40 mod.main() k = mod.GetTimeSeries('k')", "k], [spend, Def], title='Spending and Deficit', filename='intro_X_XX_multiplier_deficit.png', run_now=False) p.Legend =", "'HH') ConsolidatedGovernment(country, 'GOV') FixedMarginBusiness(country, 'BUS', profit_margin=.025) Market(country, 'GOOD') Market(country, 'LAB')", "and Deficit', filename='intro_X_XX_multiplier_deficit.png', run_now=False) p.Legend = ['G', 'Deficit'] p.LegendPos =", "import Quick2DPlot register_standard_logs('output', __file__) mod = Model() country = Country(mod,", "Market(country, 'LAB') TaxFlow(country, 'TAX', taxrate=.2) # At time period 25,", "Country(mod, 'CO') Household(country, 'HH') ConsolidatedGovernment(country, 'GOV') FixedMarginBusiness(country, 'BUS', profit_margin=.025) Market(country,", "Market(country, 'GOOD') Market(country, 'LAB') TaxFlow(country, 'TAX', taxrate=.2) # At time", "= Country(mod, 'CO') Household(country, 'HH') ConsolidatedGovernment(country, 'GOV') FixedMarginBusiness(country, 'BUS', profit_margin=.025)", "+ [17.,]*20) mod.AddGlobalEquation('DEBT_GDP', 'DEBT-TO-GDP RATIO', '-100.*GOV__F/BUS__SUP_GOOD') mod.AddGlobalEquation('DEFICIT', 'DEFICIT', '-1.*GOV__INC') mod.EquationSolver.MaxTime", "25 + [17.,]*20) mod.AddGlobalEquation('DEBT_GDP', 'DEBT-TO-GDP RATIO', '-100.*GOV__F/BUS__SUP_GOOD') mod.AddGlobalEquation('DEFICIT', 'DEFICIT', '-1.*GOV__INC')", "mod.EquationSolver.MaxTime = 40 mod.main() k = mod.GetTimeSeries('k') Rat = mod.GetTimeSeries('DEBT_GDP')", "ConsolidatedGovernment(country, 'GOV') FixedMarginBusiness(country, 'BUS', profit_margin=.025) Market(country, 'GOOD') Market(country, 'LAB') TaxFlow(country,", "(from 20) mod.AddExogenous('GOV', 'DEM_GOOD', [20.,]* 25 + [17.,]*20) mod.AddGlobalEquation('DEBT_GDP', 'DEBT-TO-GDP", "k = mod.GetTimeSeries('k') Rat = mod.GetTimeSeries('DEBT_GDP') Def = mod.GetTimeSeries('GOV__INC') spend", "spend = mod.GetTimeSeries('GOV__DEM_GOOD') p = Quick2DPlot([k, k], [spend, Def], title='Spending", "sfc_models.objects import * from sfc_models.examples.Quick2DPlot import Quick2DPlot register_standard_logs('output', __file__) mod", "__file__) mod = Model() country = Country(mod, 'CO') Household(country, 'HH')", "mod.GetTimeSeries('DEBT_GDP') Def = mod.GetTimeSeries('GOV__INC') spend = mod.GetTimeSeries('GOV__DEM_GOOD') p = Quick2DPlot([k,", "At time period 25, cut spending to 17 (from 20)", "= mod.GetTimeSeries('GOV__INC') spend = mod.GetTimeSeries('GOV__DEM_GOOD') p = Quick2DPlot([k, k], [spend,", "mod = Model() country = Country(mod, 'CO') Household(country, 'HH') ConsolidatedGovernment(country,", "p = Quick2DPlot([k, k], [spend, Def], title='Spending and Deficit', filename='intro_X_XX_multiplier_deficit.png',", "= mod.GetTimeSeries('k') Rat = mod.GetTimeSeries('DEBT_GDP') Def = mod.GetTimeSeries('GOV__INC') spend =", "Quick2DPlot register_standard_logs('output', __file__) mod = Model() country = Country(mod, 'CO')", "'-100.*GOV__F/BUS__SUP_GOOD') mod.AddGlobalEquation('DEFICIT', 'DEFICIT', '-1.*GOV__INC') mod.EquationSolver.MaxTime = 40 mod.main() k =", "'DEM_GOOD', [20.,]* 25 + [17.,]*20) mod.AddGlobalEquation('DEBT_GDP', 'DEBT-TO-GDP RATIO', '-100.*GOV__F/BUS__SUP_GOOD') mod.AddGlobalEquation('DEFICIT',", "Def], title='Spending and Deficit', filename='intro_X_XX_multiplier_deficit.png', run_now=False) p.Legend = ['G', 'Deficit']", "20) mod.AddExogenous('GOV', 'DEM_GOOD', [20.,]* 25 + [17.,]*20) mod.AddGlobalEquation('DEBT_GDP', 'DEBT-TO-GDP RATIO',", "'LAB') TaxFlow(country, 'TAX', taxrate=.2) # At time period 25, cut", "TaxFlow(country, 'TAX', taxrate=.2) # At time period 25, cut spending", "Deficit', filename='intro_X_XX_multiplier_deficit.png', run_now=False) p.Legend = ['G', 'Deficit'] p.LegendPos = 'center", "'BUS', profit_margin=.025) Market(country, 'GOOD') Market(country, 'LAB') TaxFlow(country, 'TAX', taxrate=.2) #", "RATIO', '-100.*GOV__F/BUS__SUP_GOOD') mod.AddGlobalEquation('DEFICIT', 'DEFICIT', '-1.*GOV__INC') mod.EquationSolver.MaxTime = 40 mod.main() k", "mod.GetTimeSeries('GOV__DEM_GOOD') p = Quick2DPlot([k, k], [spend, Def], title='Spending and Deficit',", "from sfc_models.objects import * from sfc_models.examples.Quick2DPlot import Quick2DPlot register_standard_logs('output', __file__)", "coding=utf-8 from sfc_models.objects import * from sfc_models.examples.Quick2DPlot import Quick2DPlot register_standard_logs('output',", "period 25, cut spending to 17 (from 20) mod.AddExogenous('GOV', 'DEM_GOOD',", "['G', 'Deficit'] p.LegendPos = 'center left' p.DoPlot() Quick2DPlot(k, Rat, title='Debt-to-GDP", "time period 25, cut spending to 17 (from 20) mod.AddExogenous('GOV',", "filename='intro_X_XX_multiplier_deficit.png', run_now=False) p.Legend = ['G', 'Deficit'] p.LegendPos = 'center left'" ]
[ "\"\"\" def __init__(self, user_settings=None, defaults=None): if user_settings: self._user_settings = user_settings", "user settings for key, value in data.items(): data[key] = import_callable(value)", "# Cache the result self._cached_attrs.add(attr) setattr(self, attr, val) return val", "attr = path_or_callable.rsplit('.', 1) return getattr(import_module(package), attr) class AuthSettings: \"\"\"", "list\" ) for path in validators: pkg, attr = path.rsplit(\".\",", "value in data.items(): data[key] = import_callable(value) return data def __getattr__(self,", "self._cached_attrs: delattr(self, attr) self._cached_attrs.clear() if hasattr(self, '_user_settings'): delattr(self, '_user_settings') app_settings", "'url'| 'pin' 'SERIALIZERS': { # 'SOCIAL_LOGIN_SERIALIZER': 'auth.social.serializers.DefaultSocialLoginSerializer', 'SIGNUP_SERIALIZER': 'auth_framework.serializers.signup_serializers.DefaultSignUpSerializer', 'USERINFO_SERIALIZER':", "# eg: 'https://developers.google.com/oauthplayground' 'SOCIAL_AUTO_SIGNUP': False, # SIGN UP # 'SIGNUP_EMAIL_VERIFICATION':", "LOGINS 'SOCIAL_CALLBACK_URL': None, # eg: 'https://developers.google.com/oauthplayground' 'SOCIAL_AUTO_SIGNUP': False, # SIGN", "self.defaults[attr].copy() val.update(self.user_settings[attr]) except KeyError: # Fall back to defaults val", "attr) self._cached_attrs.clear() if hasattr(self, '_user_settings'): delattr(self, '_user_settings') app_settings = AuthSettings(None,", "getattr(import_module(package), attr) class AuthSettings: \"\"\" \"\"\" def __init__(self, user_settings=None, defaults=None):", "'SOCIAL_AUTO_SIGNUP': False, # SIGN UP # 'SIGNUP_EMAIL_VERIFICATION': 'none', # trimmed", "django.contrib.auth import get_user_model validators = self.user_settings.get(\"SIGNUP_USERNAME_VALIDATORS\", None) if validators: ret", "import_callable(value) return data def __getattr__(self, attr): if attr not in", ") for path in validators: pkg, attr = path.rsplit(\".\", 1)", "@property def user_settings(self): if not hasattr(self, '_user_settings'): self._user_settings = getattr(settings,", "ImproperlyConfigured from django.contrib.auth import get_user_model validators = self.user_settings.get(\"SIGNUP_USERNAME_VALIDATORS\", None) if", "expected to be a list\" ) for path in validators:", "}, 'SOCIALACCOUNT_MODEL': SOCIALACCOUNT_MODEL, 'SOCIALACCOUNT_ADMIN_CLASS': \"auth_framework.admin.SocialAccountAdmin\", # SOCIAL LOGINS 'SOCIAL_CALLBACK_URL': None,", "False, 'SIGNUP_USERNAME_VALIDATORS': [], 'USE_PASSWORD_TWICE_VALIDATION': True, # ADVANCES 'USE_PHONENUMBER_FIELD': False, 'USE_CELERY_EMAIL':", "# SOCIAL LOGINS 'SOCIAL_CALLBACK_URL': None, # eg: 'https://developers.google.com/oauthplayground' 'SOCIAL_AUTO_SIGNUP': False,", "path_or_callable else: assert isinstance(path_or_callable, str) package, attr = path_or_callable.rsplit('.', 1)", "def username_validators(self): from django.core.exceptions import ImproperlyConfigured from django.contrib.auth import get_user_model", "= self.user_settings.get(\"SIGNUP_USERNAME_VALIDATORS\", None) if validators: ret = [] if not", "= getattr(import_module(pkg), attr) ret.append(validator()) else: ret = ( get_user_model()._meta.get_field('username').validators )", "settings from django.core.signals import setting_changed SOCIALACCOUNT_MODEL = getattr(settings, \"REST_AUTH_SOCIALACCOUNT_MODEL\", \"auth_framework.SocialAccount\")", "'%s'\" % attr) try: # Check if present in user", "assert isinstance(path_or_callable, str) package, attr = path_or_callable.rsplit('.', 1) return getattr(import_module(package),", "validator = getattr(import_module(pkg), attr) ret.append(validator()) else: ret = ( get_user_model()._meta.get_field('username').validators", "val.update(self.user_settings[attr]) except KeyError: # Fall back to defaults val =", "True, # ADVANCES 'USE_PHONENUMBER_FIELD': False, 'USE_CELERY_EMAIL': False, 'USE_ID_TOKEN': True, 'OAUTH_SAVE_ID_TOKEN':", "return ret def serializers(self, data): # Check if present in", "is None: return None if hasattr(path_or_callable, '__call__'): return path_or_callable else:", "in self.defaults: raise AttributeError(\"Invalid setting: '%s'\" % attr) try: #", "for key, value in data.items(): data[key] = import_callable(value) return data", "self.defaults[attr] if attr == 'SERIALIZERS': val = self.serializers(val) # Cache", "= getattr(settings, \"REST_AUTH_SOCIALACCOUNT_MODEL\", \"auth_framework.SocialAccount\") DEFAULTS = { 'UNIQUE_EMAIL': True, 'RESET_PASSWORD_BY':", "'SERIALIZERS': { # 'SOCIAL_LOGIN_SERIALIZER': 'auth.social.serializers.DefaultSocialLoginSerializer', 'SIGNUP_SERIALIZER': 'auth_framework.serializers.signup_serializers.DefaultSignUpSerializer', 'USERINFO_SERIALIZER': None },", "'_user_settings') app_settings = AuthSettings(None, DEFAULTS) def reload_app_settings(*args, **kwargs): setting =", "= set() @property def user_settings(self): if not hasattr(self, '_user_settings'): self._user_settings", "SOCIALACCOUNT_MODEL = getattr(settings, \"REST_AUTH_SOCIALACCOUNT_MODEL\", \"auth_framework.SocialAccount\") DEFAULTS = { 'UNIQUE_EMAIL': True,", "# SIGN UP # 'SIGNUP_EMAIL_VERIFICATION': 'none', # trimmed out email", "if present in user settings val = self.user_settings[attr] if isinstance(val,", "( get_user_model()._meta.get_field('username').validators ) return ret def serializers(self, data): # Check", "result self._cached_attrs.add(attr) setattr(self, attr, val) return val def reload(self): for", "\"auth_framework.SocialAccount\") DEFAULTS = { 'UNIQUE_EMAIL': True, 'RESET_PASSWORD_BY': 'pin', # 'url'|", "SOCIALACCOUNT_MODEL, 'SOCIALACCOUNT_ADMIN_CLASS': \"auth_framework.admin.SocialAccountAdmin\", # SOCIAL LOGINS 'SOCIAL_CALLBACK_URL': None, # eg:", "Fall back to defaults val = self.defaults[attr] if attr ==", "usage 'SIGNUP_USERNAME_REQUIRED': False, 'SIGNUP_USERNAME_VALIDATORS': [], 'USE_PASSWORD_TWICE_VALIDATION': True, # ADVANCES 'USE_PHONENUMBER_FIELD':", "defaults=None): if user_settings: self._user_settings = user_settings self.defaults = defaults or", "closed source. fewer usage 'SIGNUP_USERNAME_REQUIRED': False, 'SIGNUP_USERNAME_VALIDATORS': [], 'USE_PASSWORD_TWICE_VALIDATION': True,", "True, 'RESET_PASSWORD_BY': 'pin', # 'url'| 'pin' 'SERIALIZERS': { # 'SOCIAL_LOGIN_SERIALIZER':", "== 'SERIALIZERS': val = self.serializers(val) # Cache the result self._cached_attrs.add(attr)", "attr not in self.defaults: raise AttributeError(\"Invalid setting: '%s'\" % attr)", "'SERIALIZERS': val = self.serializers(val) # Cache the result self._cached_attrs.add(attr) setattr(self,", "hasattr(self, '_user_settings'): delattr(self, '_user_settings') app_settings = AuthSettings(None, DEFAULTS) def reload_app_settings(*args,", "= [] if not isinstance(validators, list): raise ImproperlyConfigured( \"SIGNUP_USERNAME_VALIDATORS is", "Cache the result self._cached_attrs.add(attr) setattr(self, attr, val) return val def", "if validators: ret = [] if not isinstance(validators, list): raise", "'SOCIALACCOUNT_MODEL': SOCIALACCOUNT_MODEL, 'SOCIALACCOUNT_ADMIN_CLASS': \"auth_framework.admin.SocialAccountAdmin\", # SOCIAL LOGINS 'SOCIAL_CALLBACK_URL': None, #", "self._cached_attrs.add(attr) setattr(self, attr, val) return val def reload(self): for attr", "= ( get_user_model()._meta.get_field('username').validators ) return ret def serializers(self, data): #", "be a list\" ) for path in validators: pkg, attr", "user_settings(self): if not hasattr(self, '_user_settings'): self._user_settings = getattr(settings, 'AUTH_FRAMEWORK', {})", "str) package, attr = path_or_callable.rsplit('.', 1) return getattr(import_module(package), attr) class", "in closed source. fewer usage 'SIGNUP_USERNAME_REQUIRED': False, 'SIGNUP_USERNAME_VALIDATORS': [], 'USE_PASSWORD_TWICE_VALIDATION':", "settings for key, value in data.items(): data[key] = import_callable(value) return", ") return ret def serializers(self, data): # Check if present", "Check if present in user settings for key, value in", "import_callable(path_or_callable): if path_or_callable is None: return None if hasattr(path_or_callable, '__call__'):", "if hasattr(self, '_user_settings'): delattr(self, '_user_settings') app_settings = AuthSettings(None, DEFAULTS) def", "= defaults or DEFAULTS self._cached_attrs = set() @property def user_settings(self):", "to be a list\" ) for path in validators: pkg,", "self.user_settings.get(\"SIGNUP_USERNAME_VALIDATORS\", None) if validators: ret = [] if not isinstance(validators,", "'auth_framework.serializers.signup_serializers.DefaultSignUpSerializer', 'USERINFO_SERIALIZER': None }, 'SOCIALACCOUNT_MODEL': SOCIALACCOUNT_MODEL, 'SOCIALACCOUNT_ADMIN_CLASS': \"auth_framework.admin.SocialAccountAdmin\", # SOCIAL", "\"auth_framework.admin.SocialAccountAdmin\", # SOCIAL LOGINS 'SOCIAL_CALLBACK_URL': None, # eg: 'https://developers.google.com/oauthplayground' 'SOCIAL_AUTO_SIGNUP':", "False, 'USE_CELERY_EMAIL': False, 'USE_ID_TOKEN': True, 'OAUTH_SAVE_ID_TOKEN': False } def import_callable(path_or_callable):", "DEFAULTS self._cached_attrs = set() @property def user_settings(self): if not hasattr(self,", "verification celery task in closed source. fewer usage 'SIGNUP_USERNAME_REQUIRED': False,", "= self.defaults[attr].copy() val.update(self.user_settings[attr]) except KeyError: # Fall back to defaults", "'AUTH_FRAMEWORK', {}) return self._user_settings @property def username_validators(self): from django.core.exceptions import", "user_settings self.defaults = defaults or DEFAULTS self._cached_attrs = set() @property", "delattr(self, attr) self._cached_attrs.clear() if hasattr(self, '_user_settings'): delattr(self, '_user_settings') app_settings =", "if isinstance(val, dict): val = self.defaults[attr].copy() val.update(self.user_settings[attr]) except KeyError: #", "import get_user_model validators = self.user_settings.get(\"SIGNUP_USERNAME_VALIDATORS\", None) if validators: ret =", "= user_settings self.defaults = defaults or DEFAULTS self._cached_attrs = set()", "None: return None if hasattr(path_or_callable, '__call__'): return path_or_callable else: assert", "'SOCIAL_LOGIN_SERIALIZER': 'auth.social.serializers.DefaultSocialLoginSerializer', 'SIGNUP_SERIALIZER': 'auth_framework.serializers.signup_serializers.DefaultSignUpSerializer', 'USERINFO_SERIALIZER': None }, 'SOCIALACCOUNT_MODEL': SOCIALACCOUNT_MODEL, 'SOCIALACCOUNT_ADMIN_CLASS':", "{}) return self._user_settings @property def username_validators(self): from django.core.exceptions import ImproperlyConfigured", "# 'SOCIAL_LOGIN_SERIALIZER': 'auth.social.serializers.DefaultSocialLoginSerializer', 'SIGNUP_SERIALIZER': 'auth_framework.serializers.signup_serializers.DefaultSignUpSerializer', 'USERINFO_SERIALIZER': None }, 'SOCIALACCOUNT_MODEL': SOCIALACCOUNT_MODEL,", "False, 'USE_ID_TOKEN': True, 'OAUTH_SAVE_ID_TOKEN': False } def import_callable(path_or_callable): if path_or_callable", "'SOCIAL_CALLBACK_URL': None, # eg: 'https://developers.google.com/oauthplayground' 'SOCIAL_AUTO_SIGNUP': False, # SIGN UP", "ret.append(validator()) else: ret = ( get_user_model()._meta.get_field('username').validators ) return ret def", "data.items(): data[key] = import_callable(value) return data def __getattr__(self, attr): if", "pkg, attr = path.rsplit(\".\", 1) validator = getattr(import_module(pkg), attr) ret.append(validator())", "getattr(settings, 'AUTH_FRAMEWORK', {}) return self._user_settings @property def username_validators(self): from django.core.exceptions", "def serializers(self, data): # Check if present in user settings", "present in user settings for key, value in data.items(): data[key]", "SOCIAL LOGINS 'SOCIAL_CALLBACK_URL': None, # eg: 'https://developers.google.com/oauthplayground' 'SOCIAL_AUTO_SIGNUP': False, #", "user_settings: self._user_settings = user_settings self.defaults = defaults or DEFAULTS self._cached_attrs", "} def import_callable(path_or_callable): if path_or_callable is None: return None if", "data[key] = import_callable(value) return data def __getattr__(self, attr): if attr", "settings val = self.user_settings[attr] if isinstance(val, dict): val = self.defaults[attr].copy()", "= self.user_settings[attr] if isinstance(val, dict): val = self.defaults[attr].copy() val.update(self.user_settings[attr]) except", "'_user_settings'): delattr(self, '_user_settings') app_settings = AuthSettings(None, DEFAULTS) def reload_app_settings(*args, **kwargs):", "# Check if present in user settings for key, value", "[] if not isinstance(validators, list): raise ImproperlyConfigured( \"SIGNUP_USERNAME_VALIDATORS is expected", "raise ImproperlyConfigured( \"SIGNUP_USERNAME_VALIDATORS is expected to be a list\" )", "'USE_ID_TOKEN': True, 'OAUTH_SAVE_ID_TOKEN': False } def import_callable(path_or_callable): if path_or_callable is", "attr == 'SERIALIZERS': val = self.serializers(val) # Cache the result", "# Check if present in user settings val = self.user_settings[attr]", "def __getattr__(self, attr): if attr not in self.defaults: raise AttributeError(\"Invalid", "in self._cached_attrs: delattr(self, attr) self._cached_attrs.clear() if hasattr(self, '_user_settings'): delattr(self, '_user_settings')", "class AuthSettings: \"\"\" \"\"\" def __init__(self, user_settings=None, defaults=None): if user_settings:", "from django.contrib.auth import get_user_model validators = self.user_settings.get(\"SIGNUP_USERNAME_VALIDATORS\", None) if validators:", "% attr) try: # Check if present in user settings", "= AuthSettings(None, DEFAULTS) def reload_app_settings(*args, **kwargs): setting = kwargs['setting'] if", "source. fewer usage 'SIGNUP_USERNAME_REQUIRED': False, 'SIGNUP_USERNAME_VALIDATORS': [], 'USE_PASSWORD_TWICE_VALIDATION': True, #", "in validators: pkg, attr = path.rsplit(\".\", 1) validator = getattr(import_module(pkg),", "try: # Check if present in user settings val =", "set() @property def user_settings(self): if not hasattr(self, '_user_settings'): self._user_settings =", "import setting_changed SOCIALACCOUNT_MODEL = getattr(settings, \"REST_AUTH_SOCIALACCOUNT_MODEL\", \"auth_framework.SocialAccount\") DEFAULTS = {", "celery task in closed source. fewer usage 'SIGNUP_USERNAME_REQUIRED': False, 'SIGNUP_USERNAME_VALIDATORS':", "not hasattr(self, '_user_settings'): self._user_settings = getattr(settings, 'AUTH_FRAMEWORK', {}) return self._user_settings", "return path_or_callable else: assert isinstance(path_or_callable, str) package, attr = path_or_callable.rsplit('.',", "user settings val = self.user_settings[attr] if isinstance(val, dict): val =", "'_user_settings'): self._user_settings = getattr(settings, 'AUTH_FRAMEWORK', {}) return self._user_settings @property def", "defaults val = self.defaults[attr] if attr == 'SERIALIZERS': val =", "'pin' 'SERIALIZERS': { # 'SOCIAL_LOGIN_SERIALIZER': 'auth.social.serializers.DefaultSocialLoginSerializer', 'SIGNUP_SERIALIZER': 'auth_framework.serializers.signup_serializers.DefaultSignUpSerializer', 'USERINFO_SERIALIZER': None", "for path in validators: pkg, attr = path.rsplit(\".\", 1) validator", "if user_settings: self._user_settings = user_settings self.defaults = defaults or DEFAULTS", "present in user settings val = self.user_settings[attr] if isinstance(val, dict):", "val = self.defaults[attr].copy() val.update(self.user_settings[attr]) except KeyError: # Fall back to", "= path.rsplit(\".\", 1) validator = getattr(import_module(pkg), attr) ret.append(validator()) else: ret", "def __init__(self, user_settings=None, defaults=None): if user_settings: self._user_settings = user_settings self.defaults", "'SIGNUP_USERNAME_REQUIRED': False, 'SIGNUP_USERNAME_VALIDATORS': [], 'USE_PASSWORD_TWICE_VALIDATION': True, # ADVANCES 'USE_PHONENUMBER_FIELD': False,", "reload(self): for attr in self._cached_attrs: delattr(self, attr) self._cached_attrs.clear() if hasattr(self,", "data): # Check if present in user settings for key,", "None) if validators: ret = [] if not isinstance(validators, list):", "a list\" ) for path in validators: pkg, attr =", "'none', # trimmed out email verification celery task in closed", "{ 'UNIQUE_EMAIL': True, 'RESET_PASSWORD_BY': 'pin', # 'url'| 'pin' 'SERIALIZERS': {", "None }, 'SOCIALACCOUNT_MODEL': SOCIALACCOUNT_MODEL, 'SOCIALACCOUNT_ADMIN_CLASS': \"auth_framework.admin.SocialAccountAdmin\", # SOCIAL LOGINS 'SOCIAL_CALLBACK_URL':", "from importlib import import_module from django.conf import settings from django.core.signals", "'SIGNUP_USERNAME_VALIDATORS': [], 'USE_PASSWORD_TWICE_VALIDATION': True, # ADVANCES 'USE_PHONENUMBER_FIELD': False, 'USE_CELERY_EMAIL': False,", "self.defaults = defaults or DEFAULTS self._cached_attrs = set() @property def", "__init__(self, user_settings=None, defaults=None): if user_settings: self._user_settings = user_settings self.defaults =", "validators: pkg, attr = path.rsplit(\".\", 1) validator = getattr(import_module(pkg), attr)", "return data def __getattr__(self, attr): if attr not in self.defaults:", "__getattr__(self, attr): if attr not in self.defaults: raise AttributeError(\"Invalid setting:", "'RESET_PASSWORD_BY': 'pin', # 'url'| 'pin' 'SERIALIZERS': { # 'SOCIAL_LOGIN_SERIALIZER': 'auth.social.serializers.DefaultSocialLoginSerializer',", "raise AttributeError(\"Invalid setting: '%s'\" % attr) try: # Check if", "val) return val def reload(self): for attr in self._cached_attrs: delattr(self,", "AuthSettings(None, DEFAULTS) def reload_app_settings(*args, **kwargs): setting = kwargs['setting'] if setting", "DEFAULTS) def reload_app_settings(*args, **kwargs): setting = kwargs['setting'] if setting ==", "django.conf import settings from django.core.signals import setting_changed SOCIALACCOUNT_MODEL = getattr(settings,", "task in closed source. fewer usage 'SIGNUP_USERNAME_REQUIRED': False, 'SIGNUP_USERNAME_VALIDATORS': [],", "self._user_settings = user_settings self.defaults = defaults or DEFAULTS self._cached_attrs =", "'SOCIALACCOUNT_ADMIN_CLASS': \"auth_framework.admin.SocialAccountAdmin\", # SOCIAL LOGINS 'SOCIAL_CALLBACK_URL': None, # eg: 'https://developers.google.com/oauthplayground'", "get_user_model validators = self.user_settings.get(\"SIGNUP_USERNAME_VALIDATORS\", None) if validators: ret = []", "key, value in data.items(): data[key] = import_callable(value) return data def", "in user settings val = self.user_settings[attr] if isinstance(val, dict): val", "# ADVANCES 'USE_PHONENUMBER_FIELD': False, 'USE_CELERY_EMAIL': False, 'USE_ID_TOKEN': True, 'OAUTH_SAVE_ID_TOKEN': False", "import ImproperlyConfigured from django.contrib.auth import get_user_model validators = self.user_settings.get(\"SIGNUP_USERNAME_VALIDATORS\", None)", "val = self.user_settings[attr] if isinstance(val, dict): val = self.defaults[attr].copy() val.update(self.user_settings[attr])", "1) validator = getattr(import_module(pkg), attr) ret.append(validator()) else: ret = (", "except KeyError: # Fall back to defaults val = self.defaults[attr]", "'SIGNUP_EMAIL_VERIFICATION': 'none', # trimmed out email verification celery task in", "out email verification celery task in closed source. fewer usage", "list): raise ImproperlyConfigured( \"SIGNUP_USERNAME_VALIDATORS is expected to be a list\"", "= { 'UNIQUE_EMAIL': True, 'RESET_PASSWORD_BY': 'pin', # 'url'| 'pin' 'SERIALIZERS':", "django.core.exceptions import ImproperlyConfigured from django.contrib.auth import get_user_model validators = self.user_settings.get(\"SIGNUP_USERNAME_VALIDATORS\",", "'SIGNUP_SERIALIZER': 'auth_framework.serializers.signup_serializers.DefaultSignUpSerializer', 'USERINFO_SERIALIZER': None }, 'SOCIALACCOUNT_MODEL': SOCIALACCOUNT_MODEL, 'SOCIALACCOUNT_ADMIN_CLASS': \"auth_framework.admin.SocialAccountAdmin\", #", "def import_callable(path_or_callable): if path_or_callable is None: return None if hasattr(path_or_callable,", "email verification celery task in closed source. fewer usage 'SIGNUP_USERNAME_REQUIRED':", "if hasattr(path_or_callable, '__call__'): return path_or_callable else: assert isinstance(path_or_callable, str) package,", "self._user_settings = getattr(settings, 'AUTH_FRAMEWORK', {}) return self._user_settings @property def username_validators(self):", "'USE_CELERY_EMAIL': False, 'USE_ID_TOKEN': True, 'OAUTH_SAVE_ID_TOKEN': False } def import_callable(path_or_callable): if", "validators: ret = [] if not isinstance(validators, list): raise ImproperlyConfigured(", "else: ret = ( get_user_model()._meta.get_field('username').validators ) return ret def serializers(self,", "val = self.defaults[attr] if attr == 'SERIALIZERS': val = self.serializers(val)", "self.serializers(val) # Cache the result self._cached_attrs.add(attr) setattr(self, attr, val) return", "def reload(self): for attr in self._cached_attrs: delattr(self, attr) self._cached_attrs.clear() if", "reload_app_settings(*args, **kwargs): setting = kwargs['setting'] if setting == 'AUTH_FRAMEWORK': app_settings.reload()", "False } def import_callable(path_or_callable): if path_or_callable is None: return None", "return self._user_settings @property def username_validators(self): from django.core.exceptions import ImproperlyConfigured from", "setting_changed SOCIALACCOUNT_MODEL = getattr(settings, \"REST_AUTH_SOCIALACCOUNT_MODEL\", \"auth_framework.SocialAccount\") DEFAULTS = { 'UNIQUE_EMAIL':", "None if hasattr(path_or_callable, '__call__'): return path_or_callable else: assert isinstance(path_or_callable, str)", "package, attr = path_or_callable.rsplit('.', 1) return getattr(import_module(package), attr) class AuthSettings:", "self.user_settings[attr] if isinstance(val, dict): val = self.defaults[attr].copy() val.update(self.user_settings[attr]) except KeyError:", "'OAUTH_SAVE_ID_TOKEN': False } def import_callable(path_or_callable): if path_or_callable is None: return", "if present in user settings for key, value in data.items():", "or DEFAULTS self._cached_attrs = set() @property def user_settings(self): if not", "attr in self._cached_attrs: delattr(self, attr) self._cached_attrs.clear() if hasattr(self, '_user_settings'): delattr(self,", "\"\"\" \"\"\" def __init__(self, user_settings=None, defaults=None): if user_settings: self._user_settings =", "SIGN UP # 'SIGNUP_EMAIL_VERIFICATION': 'none', # trimmed out email verification", "user_settings=None, defaults=None): if user_settings: self._user_settings = user_settings self.defaults = defaults", "data def __getattr__(self, attr): if attr not in self.defaults: raise", "django.core.signals import setting_changed SOCIALACCOUNT_MODEL = getattr(settings, \"REST_AUTH_SOCIALACCOUNT_MODEL\", \"auth_framework.SocialAccount\") DEFAULTS =", "if path_or_callable is None: return None if hasattr(path_or_callable, '__call__'): return", "path_or_callable.rsplit('.', 1) return getattr(import_module(package), attr) class AuthSettings: \"\"\" \"\"\" def", "setting: '%s'\" % attr) try: # Check if present in", "username_validators(self): from django.core.exceptions import ImproperlyConfigured from django.contrib.auth import get_user_model validators", "importlib import import_module from django.conf import settings from django.core.signals import", "**kwargs): setting = kwargs['setting'] if setting == 'AUTH_FRAMEWORK': app_settings.reload() setting_changed.connect(reload_app_settings)", "# trimmed out email verification celery task in closed source.", "isinstance(path_or_callable, str) package, attr = path_or_callable.rsplit('.', 1) return getattr(import_module(package), attr)", "ImproperlyConfigured( \"SIGNUP_USERNAME_VALIDATORS is expected to be a list\" ) for", "self.defaults: raise AttributeError(\"Invalid setting: '%s'\" % attr) try: # Check", "from django.core.exceptions import ImproperlyConfigured from django.contrib.auth import get_user_model validators =", "return None if hasattr(path_or_callable, '__call__'): return path_or_callable else: assert isinstance(path_or_callable,", "import_module from django.conf import settings from django.core.signals import setting_changed SOCIALACCOUNT_MODEL", "return val def reload(self): for attr in self._cached_attrs: delattr(self, attr)", "trimmed out email verification celery task in closed source. fewer", "= import_callable(value) return data def __getattr__(self, attr): if attr not", "# 'SIGNUP_EMAIL_VERIFICATION': 'none', # trimmed out email verification celery task", "to defaults val = self.defaults[attr] if attr == 'SERIALIZERS': val", "eg: 'https://developers.google.com/oauthplayground' 'SOCIAL_AUTO_SIGNUP': False, # SIGN UP # 'SIGNUP_EMAIL_VERIFICATION': 'none',", "self._cached_attrs = set() @property def user_settings(self): if not hasattr(self, '_user_settings'):", "attr = path.rsplit(\".\", 1) validator = getattr(import_module(pkg), attr) ret.append(validator()) else:", "# 'url'| 'pin' 'SERIALIZERS': { # 'SOCIAL_LOGIN_SERIALIZER': 'auth.social.serializers.DefaultSocialLoginSerializer', 'SIGNUP_SERIALIZER': 'auth_framework.serializers.signup_serializers.DefaultSignUpSerializer',", "if attr == 'SERIALIZERS': val = self.serializers(val) # Cache the", "back to defaults val = self.defaults[attr] if attr == 'SERIALIZERS':", "Check if present in user settings val = self.user_settings[attr] if", "'UNIQUE_EMAIL': True, 'RESET_PASSWORD_BY': 'pin', # 'url'| 'pin' 'SERIALIZERS': { #", "validators = self.user_settings.get(\"SIGNUP_USERNAME_VALIDATORS\", None) if validators: ret = [] if", "val def reload(self): for attr in self._cached_attrs: delattr(self, attr) self._cached_attrs.clear()", "UP # 'SIGNUP_EMAIL_VERIFICATION': 'none', # trimmed out email verification celery", "delattr(self, '_user_settings') app_settings = AuthSettings(None, DEFAULTS) def reload_app_settings(*args, **kwargs): setting", "attr): if attr not in self.defaults: raise AttributeError(\"Invalid setting: '%s'\"", "attr) try: # Check if present in user settings val", "app_settings = AuthSettings(None, DEFAULTS) def reload_app_settings(*args, **kwargs): setting = kwargs['setting']", "hasattr(self, '_user_settings'): self._user_settings = getattr(settings, 'AUTH_FRAMEWORK', {}) return self._user_settings @property", "path.rsplit(\".\", 1) validator = getattr(import_module(pkg), attr) ret.append(validator()) else: ret =", "getattr(import_module(pkg), attr) ret.append(validator()) else: ret = ( get_user_model()._meta.get_field('username').validators ) return", "not in self.defaults: raise AttributeError(\"Invalid setting: '%s'\" % attr) try:", "path in validators: pkg, attr = path.rsplit(\".\", 1) validator =", "AttributeError(\"Invalid setting: '%s'\" % attr) try: # Check if present", "[], 'USE_PASSWORD_TWICE_VALIDATION': True, # ADVANCES 'USE_PHONENUMBER_FIELD': False, 'USE_CELERY_EMAIL': False, 'USE_ID_TOKEN':", "True, 'OAUTH_SAVE_ID_TOKEN': False } def import_callable(path_or_callable): if path_or_callable is None:", "isinstance(validators, list): raise ImproperlyConfigured( \"SIGNUP_USERNAME_VALIDATORS is expected to be a", "from django.core.signals import setting_changed SOCIALACCOUNT_MODEL = getattr(settings, \"REST_AUTH_SOCIALACCOUNT_MODEL\", \"auth_framework.SocialAccount\") DEFAULTS", "def user_settings(self): if not hasattr(self, '_user_settings'): self._user_settings = getattr(settings, 'AUTH_FRAMEWORK',", "self._cached_attrs.clear() if hasattr(self, '_user_settings'): delattr(self, '_user_settings') app_settings = AuthSettings(None, DEFAULTS)", "ret = ( get_user_model()._meta.get_field('username').validators ) return ret def serializers(self, data):", "# Fall back to defaults val = self.defaults[attr] if attr", "= self.defaults[attr] if attr == 'SERIALIZERS': val = self.serializers(val) #", "in data.items(): data[key] = import_callable(value) return data def __getattr__(self, attr):", "= path_or_callable.rsplit('.', 1) return getattr(import_module(package), attr) class AuthSettings: \"\"\" \"\"\"", "attr) class AuthSettings: \"\"\" \"\"\" def __init__(self, user_settings=None, defaults=None): if", "ret def serializers(self, data): # Check if present in user", "isinstance(val, dict): val = self.defaults[attr].copy() val.update(self.user_settings[attr]) except KeyError: # Fall", "AuthSettings: \"\"\" \"\"\" def __init__(self, user_settings=None, defaults=None): if user_settings: self._user_settings", "'https://developers.google.com/oauthplayground' 'SOCIAL_AUTO_SIGNUP': False, # SIGN UP # 'SIGNUP_EMAIL_VERIFICATION': 'none', #", "ADVANCES 'USE_PHONENUMBER_FIELD': False, 'USE_CELERY_EMAIL': False, 'USE_ID_TOKEN': True, 'OAUTH_SAVE_ID_TOKEN': False }", "None, # eg: 'https://developers.google.com/oauthplayground' 'SOCIAL_AUTO_SIGNUP': False, # SIGN UP #", "'__call__'): return path_or_callable else: assert isinstance(path_or_callable, str) package, attr =", "the result self._cached_attrs.add(attr) setattr(self, attr, val) return val def reload(self):", "val = self.serializers(val) # Cache the result self._cached_attrs.add(attr) setattr(self, attr,", "{ # 'SOCIAL_LOGIN_SERIALIZER': 'auth.social.serializers.DefaultSocialLoginSerializer', 'SIGNUP_SERIALIZER': 'auth_framework.serializers.signup_serializers.DefaultSignUpSerializer', 'USERINFO_SERIALIZER': None }, 'SOCIALACCOUNT_MODEL':", "'auth.social.serializers.DefaultSocialLoginSerializer', 'SIGNUP_SERIALIZER': 'auth_framework.serializers.signup_serializers.DefaultSignUpSerializer', 'USERINFO_SERIALIZER': None }, 'SOCIALACCOUNT_MODEL': SOCIALACCOUNT_MODEL, 'SOCIALACCOUNT_ADMIN_CLASS': \"auth_framework.admin.SocialAccountAdmin\",", "DEFAULTS = { 'UNIQUE_EMAIL': True, 'RESET_PASSWORD_BY': 'pin', # 'url'| 'pin'", "import settings from django.core.signals import setting_changed SOCIALACCOUNT_MODEL = getattr(settings, \"REST_AUTH_SOCIALACCOUNT_MODEL\",", "return getattr(import_module(package), attr) class AuthSettings: \"\"\" \"\"\" def __init__(self, user_settings=None,", "get_user_model()._meta.get_field('username').validators ) return ret def serializers(self, data): # Check if", "if not isinstance(validators, list): raise ImproperlyConfigured( \"SIGNUP_USERNAME_VALIDATORS is expected to", "attr) ret.append(validator()) else: ret = ( get_user_model()._meta.get_field('username').validators ) return ret", "attr, val) return val def reload(self): for attr in self._cached_attrs:", "'USERINFO_SERIALIZER': None }, 'SOCIALACCOUNT_MODEL': SOCIALACCOUNT_MODEL, 'SOCIALACCOUNT_ADMIN_CLASS': \"auth_framework.admin.SocialAccountAdmin\", # SOCIAL LOGINS", "for attr in self._cached_attrs: delattr(self, attr) self._cached_attrs.clear() if hasattr(self, '_user_settings'):", "if not hasattr(self, '_user_settings'): self._user_settings = getattr(settings, 'AUTH_FRAMEWORK', {}) return", "if attr not in self.defaults: raise AttributeError(\"Invalid setting: '%s'\" %", "self._user_settings @property def username_validators(self): from django.core.exceptions import ImproperlyConfigured from django.contrib.auth", "import import_module from django.conf import settings from django.core.signals import setting_changed", "= getattr(settings, 'AUTH_FRAMEWORK', {}) return self._user_settings @property def username_validators(self): from", "1) return getattr(import_module(package), attr) class AuthSettings: \"\"\" \"\"\" def __init__(self,", "ret = [] if not isinstance(validators, list): raise ImproperlyConfigured( \"SIGNUP_USERNAME_VALIDATORS", "in user settings for key, value in data.items(): data[key] =", "is expected to be a list\" ) for path in", "serializers(self, data): # Check if present in user settings for", "dict): val = self.defaults[attr].copy() val.update(self.user_settings[attr]) except KeyError: # Fall back", "defaults or DEFAULTS self._cached_attrs = set() @property def user_settings(self): if", "not isinstance(validators, list): raise ImproperlyConfigured( \"SIGNUP_USERNAME_VALIDATORS is expected to be", "'USE_PHONENUMBER_FIELD': False, 'USE_CELERY_EMAIL': False, 'USE_ID_TOKEN': True, 'OAUTH_SAVE_ID_TOKEN': False } def", "\"SIGNUP_USERNAME_VALIDATORS is expected to be a list\" ) for path", "KeyError: # Fall back to defaults val = self.defaults[attr] if", "setattr(self, attr, val) return val def reload(self): for attr in", "from django.conf import settings from django.core.signals import setting_changed SOCIALACCOUNT_MODEL =", "\"REST_AUTH_SOCIALACCOUNT_MODEL\", \"auth_framework.SocialAccount\") DEFAULTS = { 'UNIQUE_EMAIL': True, 'RESET_PASSWORD_BY': 'pin', #", "getattr(settings, \"REST_AUTH_SOCIALACCOUNT_MODEL\", \"auth_framework.SocialAccount\") DEFAULTS = { 'UNIQUE_EMAIL': True, 'RESET_PASSWORD_BY': 'pin',", "False, # SIGN UP # 'SIGNUP_EMAIL_VERIFICATION': 'none', # trimmed out", "path_or_callable is None: return None if hasattr(path_or_callable, '__call__'): return path_or_callable", "'USE_PASSWORD_TWICE_VALIDATION': True, # ADVANCES 'USE_PHONENUMBER_FIELD': False, 'USE_CELERY_EMAIL': False, 'USE_ID_TOKEN': True,", "@property def username_validators(self): from django.core.exceptions import ImproperlyConfigured from django.contrib.auth import", "'pin', # 'url'| 'pin' 'SERIALIZERS': { # 'SOCIAL_LOGIN_SERIALIZER': 'auth.social.serializers.DefaultSocialLoginSerializer', 'SIGNUP_SERIALIZER':", "= self.serializers(val) # Cache the result self._cached_attrs.add(attr) setattr(self, attr, val)", "fewer usage 'SIGNUP_USERNAME_REQUIRED': False, 'SIGNUP_USERNAME_VALIDATORS': [], 'USE_PASSWORD_TWICE_VALIDATION': True, # ADVANCES", "else: assert isinstance(path_or_callable, str) package, attr = path_or_callable.rsplit('.', 1) return", "hasattr(path_or_callable, '__call__'): return path_or_callable else: assert isinstance(path_or_callable, str) package, attr", "def reload_app_settings(*args, **kwargs): setting = kwargs['setting'] if setting == 'AUTH_FRAMEWORK':" ]
[ "models.SlugField() counter = models.IntegerField(default=0) def __str__(self): return \"%s -- %s\"", "models.IntegerField(default=0) def __str__(self): return \"%s -- %s\" % (self.long_url, self.short_id)", "<filename>shorty/models.py from django.db import models from shorty.manager import UrlManager class", "Url(models.Model): long_url = models.URLField() short_id = models.SlugField() counter = models.IntegerField(default=0)", "= models.URLField() short_id = models.SlugField() counter = models.IntegerField(default=0) def __str__(self):", "django.db import models from shorty.manager import UrlManager class Url(models.Model): long_url", "import models from shorty.manager import UrlManager class Url(models.Model): long_url =", "short_id = models.SlugField() counter = models.IntegerField(default=0) def __str__(self): return \"%s", "import UrlManager class Url(models.Model): long_url = models.URLField() short_id = models.SlugField()", "from django.db import models from shorty.manager import UrlManager class Url(models.Model):", "class Url(models.Model): long_url = models.URLField() short_id = models.SlugField() counter =", "= models.SlugField() counter = models.IntegerField(default=0) def __str__(self): return \"%s --", "shorty.manager import UrlManager class Url(models.Model): long_url = models.URLField() short_id =", "from shorty.manager import UrlManager class Url(models.Model): long_url = models.URLField() short_id", "models from shorty.manager import UrlManager class Url(models.Model): long_url = models.URLField()", "long_url = models.URLField() short_id = models.SlugField() counter = models.IntegerField(default=0) def", "def __str__(self): return \"%s -- %s\" % (self.long_url, self.short_id) objects", "__str__(self): return \"%s -- %s\" % (self.long_url, self.short_id) objects =", "= models.IntegerField(default=0) def __str__(self): return \"%s -- %s\" % (self.long_url,", "return \"%s -- %s\" % (self.long_url, self.short_id) objects = UrlManager()", "counter = models.IntegerField(default=0) def __str__(self): return \"%s -- %s\" %", "models.URLField() short_id = models.SlugField() counter = models.IntegerField(default=0) def __str__(self): return", "UrlManager class Url(models.Model): long_url = models.URLField() short_id = models.SlugField() counter" ]
[ "only) User is allowed to create query (Create for \"query\"", "'password', 'queries', 'realname', 'room', 'subst_active', 'substitute', 'timezone', 'tt_lines') only) User", "'actor', 'creation', 'creator', 'deputy', 'description', 'id', 'is_extern', 'is_public_holiday', 'is_special_leave', 'is_vacation',", "View on (View for \"user_status\": ('name',) only) User is allowed", "'category', 'cc', 'cc_emails', 'classification', 'closed', 'confidential', 'customer', 'emails', 'execution', 'external_ref',", "supervisor or the person to whom approvals are delegated (View", "to access customer (View for \"customer\" only) User is allowed", "may edit time_records owned by user (View for \"time_record\" only)", "(View for \"ext_tracker_type\" only) User is allowed to access functional_role", "\"ext_tracker_type\" only) User is allowed to access functional_role (View for", "is allowed to view their own messages (View for \"msg\"", "to access severity (View for \"severity\" only) User is allowed", "(Search for \"user\": ('id', 'nickname', 'username') only) External users are", "'name') only) User is allowed View on (View for \"user\":", "\"absence\" only) User is allowed to edit absence_type (Edit for", "\"msg\": ('date', 'id') only) User is allowed Edit on (Edit", "is allowed to edit ext_tracker (Edit for \"ext_tracker\" only) User", "\"leave_submission\" only) User is allowed to access vacation_correction (View for", "\"query\" only) User is allowed to retire their queries (Retire", "auto_wp (Create for \"auto_wp\" only) User is allowed to create", "'is_extern', 'is_public', 'name', 'project', 'responsible', 'time_end', 'time_start', 'time_wp_summary_no', 'travel', 'wp_no')", "'priority', 'release', 'responsible', 'safety_level', 'severity', 'status', 'superseder', 'test_level', 'title'] only)", "'test_level', 'title'] only) External users are allowed to access issue", "own and public queries for classes where they have search", "in the domain_permission for the user (Edit for \"user\": ['room']", "(Nosy for \"it_project\" only) User may get nosy messages for", "of the time_records for that day (View for \"daily_record\" only)", "('activity', 'actor', 'auto_wp', 'bookers', 'cost_center', 'creation', 'creator', 'description', 'durations_allowed', 'epic_key',", "doc (Create for \"doc\" only) User is allowed to create", "leave_submission (Create for \"leave_submission\" only) User is allowed to create", "for \"category\": ('nosy', 'default_part_of') only) User is allowed to edit", "status (Edit for \"status\" only) User is allowed to edit", "for \"time_record\" only) Role \"hr-vacation\": User is allowed to access", "booking is allowed for this user (also applies to timetracking", "only) User is allowed to create absence (Create for \"absence\"", "domain (View for \"user_dynamic\" only) User is allowed to access", "they are on the list of allowed external users or", "allowed to edit product_family (Edit for \"product_family\" only) User is", "to edit user_contact (Edit for \"user_contact\" only) Users may view/edit", "is on nosy list (View for \"it_project\" only) User is", "role 'HR' or 'Controlling', or the user is supervisor or", "to edit it_int_prio (Edit for \"it_int_prio\" only) User is allowed", "is on nosy list (View for \"issue\" only) User is", "\"it_int_prio\" only) User is allowed to edit it_issue (Edit for", "User is allowed to access sup_warranty (View for \"sup_warranty\" only)", "for \"doc\" only) User is allowed to create doc_category (Create", "Org-Location as the given user (View for \"daily_record_freeze\" only) User", "(View for \"location\" only) User is allowed to access org_location", "(Edit for \"daily_record\": ('status', 'time_record') only) User is allowed Edit", "if not frozen in validity span of dynamic user record", "only) User is allowed to access org_group (View for \"org_group\"", "owner or project responsible/deputy (Edit for \"time_wp\": ('bookers', 'description', 'epic_key',", "(View for \"sap_cc\" only) (View for \"time_project\" only) User is", "\"safety_level\" only) User is allowed to access sap_cc (View for", "User is allowed to edit it_int_prio (Edit for \"it_int_prio\" only)", "on (View for \"user\": ('business_responsible', 'planning_role', 'scale_seniority') only) User is", "search user_status (Search for \"user\": ('status',) only) User is allowed", "(Edit for \"doc_category\" only) User is allowed to edit doc_status", "\"User\" Role \"admin\": User may access the rest interface (Rest", "User may access the web interface (Web Access) Role \"cc-permission\":", "only) (Retire for \"room\" only) User is allowed Edit on", "'business_unit', 'category', 'cc', 'cc_emails', 'classification', 'closed', 'confidential', 'customer', 'emails', 'execution',", "only) User is allowed to access it_issue_status (View for \"it_issue_status\"", "their queries (Search for \"query\" only) User is allowed to", "('deadline', 'status', 'title') only) User is allowed to edit their", "allowed to access contract_type (View for \"contract_type\" only) User is", "span of dynamic user record (Edit for \"user_dynamic\" only) User", "\"dom-user-edit-office\": User is allowed to create user_contact (Create for \"user_contact\"", "(Edit for \"cost_center_status\" only) User is allowed to edit department", "to edit time_project_status (Edit for \"time_project_status\" only) User is allowed", "\"msg_keyword\" only) User is allowed to access org_group (View for", "security = \"\"\" New Web users get the Roles \"User,Nosy\"", "or if user is department manager of time category (View", "only) User is allowed to access doc_category (View for \"doc_category\"", "'auto_wp', 'bookers', 'cost_center', 'creation', 'creator', 'description', 'durations_allowed', 'epic_key', 'has_expiration_date', 'id',", "daily record. If user has role HR-Org-Location and is in", "on (Edit for \"time_project\": ('infosec_req', 'is_extern', 'max_hours', 'op_project', 'planned_effort', 'product_family',", "time_wp_group (Create for \"time_wp_group\" only) User is allowed to edit", "'actor', 'creation', 'creator', 'firstname', 'lastname', 'realname', 'username') only) Users are", "create room (Create for \"room\" only) User is allowed to", "(View for \"customer_agreement\" only) User is allowed to access mailgroup", "is allowed to search it_issue (Search for \"it_issue\" only) User", "use the email interface (Email Access) User may view a", "allowed to access it_issue (View for \"it_issue\" only) User is", "or user is on nosy list (Edit for \"it_project\": ('messages',", "query (Create for \"query\" only) User is allowed to create", "(Create for \"work_location\" only) User is allowed to edit cost_center", "\"file\" only) User is allowed View on issue if issue", "get nosy messages for support (Nosy for \"support\" only) Role", "for \"uc_type\" only) User is allowed to access user_status (View", "only) User is allowed to edit query (Edit for \"query\"", "search time_wp (Search for \"time_wp\": ('activity', 'actor', 'auto_wp', 'cost_center', 'creation',", "allowed to access department (View for \"department\" only) User is", "for \"time_activity_perm\" only) User is allowed to edit time_record (Edit", "User is allowed Edit on (Edit for \"organisation\": ('domain_part',) only)", "access time_wp_group (View for \"time_wp_group\" only) User is allowed to", "User is allowed to edit (some of) their own user", "for \"contact_type\" only) User is allowed to access cost_center (View", "\"time_record\" only) User is allowed to view (some of) their", "'composed_of', 'creation', 'creator', 'cur_est_begin', 'cur_est_end', 'deadline', 'depends', 'doc_issue_status', 'earliest_start', 'effective_prio',", "public_holiday (Edit for \"public_holiday\" only) User is allowed to edit", "to edit overtime correction if the overtime correction is not", "web (Web Roles) Role \"hr-leave-approval\": User is allowed Edit on", "for \"file\": ('name', 'type') only) User is allowed Edit on", "create cost_center (Create for \"cost_center\" only) User is allowed to", "only) User is allowed to access timesheet (View for \"timesheet\"", "is allowed View on (View for \"user\": ('nickname', 'status', 'username')", "'address', 'alternate_addresses', 'creation', 'creator', 'id', 'queries', 'realname', 'status', 'timezone', 'username')", "allowed to search user_status (Search for \"user\": ('status',) only) User", "(Retire for \"cost_center_permission_group\" only) User is allowed to create cost_center_permission_group", "domain_permission for the user (Edit for \"user\": ['clearance_by', 'contacts', 'csv_delimiter',", "'is_extern', 'max_hours', 'op_project', 'planned_effort', 'product_family', 'project_type', 'reporting_group', 'work_location') only) User", "is allowed to access issue (View for \"issue\" only) User", "(View for \"ext_tracker_state\" only) User is allowed to access ext_tracker_type", "edit location (Edit for \"location\" only) User is allowed to", "is allowed to edit leave_submission (Edit for \"leave_submission\" only) User", "\"overtime_correction\" only) User is allowed to view time record if", "the person to whom approvals are delegated (View for \"time_record\"", "user is on nosy list (Edit for \"support\": ('analysis_end', 'analysis_result',", "for \"query\" only) User is allowed to search for their", "User is allowed to create user_contact (Create for \"user_contact\" only)", "only) User is allowed to create location (Create for \"location\"", "only) User is allowed to access doc (View for \"doc\"", "User is allowed to create department (Create for \"department\" only)", "is in group HR-Org-Location and in the same Org-Location as", "Role \"User\" Role \"admin\": User may access the rest interface", "for \"doc_status\" only) User is allowed to edit product_type (Edit", "\"time_wp_group\" only) Role \"project_view\": User is allowed to access time_project", "'wp_no') only) User is allowed to view their own files", "(Edit for \"user_contact\" only) Users may view/edit user records for", "org_location (Create for \"org_location\" only) User is allowed to create", "User is allowed to access absence_type (View for \"absence_type\" only)", "'position_text', 'room'] only) Users may view/edit user records for ad_domain", "is allowed to create query (Create for \"query\" only) User", "(Edit for \"time_project_status\" only) User is allowed to edit time_wp", "\"cost_center_permission_group\" only) User is allowed to edit cost_center_permission_group (Edit for", "'username', 'vie_user'] only) Users may view/edit user records for ad_domain", "only) User is allowed to edit contact (Edit for \"contact\"", "project_type (View for \"project_type\" only) User is allowed to access", "access department (View for \"department\" only) User is allowed to", "(Search for \"user_dynamic\" only) May only view/edit records with the", "for \"vacation_correction\" only) User is allowed to create auto_wp (Create", "for \"ext_tracker_state\" only) User is allowed to create file (Create", "is allowed to access reporting_group (View for \"reporting_group\" only) User", "get nosy messages for it_project (Nosy for \"it_project\" only) User", "and is in the same Org-Location as the record, it", "\"vacation_correction\" only) User is allowed to create contract_type (Create for", "only) User is allowed to access project_type (View for \"project_type\"", "User is allowed to access daily_record_freeze (View for \"daily_record_freeze\" only)", "allowed to view (some of) their own user details (View", "ad_domain for which they are in the domain_permission for the", "User is allowed to access doc_category (View for \"doc_category\" only)", "doc_status (View for \"doc_status\" only) User is allowed to access", "with Edit permission (Edit for \"file\" only) User is allowed", "user is owner or deputy of time category or on", "User is allowed to access cost_center (View for \"cost_center\" only)", "for \"time_project\" only) User may view work package if responsible", "User is allowed to create reference (Create for \"reference\" only)", "is allowed to edit contact (Edit for \"contact\" only) Role", "to create keyword (Create for \"keyword\" only) User is allowed", "create it_category (Create for \"it_category\" only) User is allowed to", "for \"user\": ('csv_delimiter', 'hide_message_files', 'password', 'timezone') only) Users are allowed", "User is allowed to access vac_aliq (View for \"vac_aliq\" only)", "for \"time_wp_group\" only) Role \"project_view\": User is allowed to access", "only) User is allowed to access time_wp_summary_no (View for \"time_wp_summary_no\"", "is allowed to access overtime_correction (View for \"overtime_correction\" only) User", "'organisation', 'responsible', 'status') only) User is allowed Edit on (Edit", "create msg (Create for \"msg\" only) User is allowed to", "edit own file (file created by user) (Edit for \"file\"", "(View for \"time_project\" only) User is allowed to access time_record", "is department manager of time category (View for \"time_wp\" only)", "'hide_message_files', 'password', 'timezone') only) Users are allowed to view some", "is allowed to edit time_report (Edit for \"time_report\" only) User", "External users are allowed to access issue if they are", "(Web Access) User may access the xmlrpc interface (Xmlrpc Access)", "is allowed to access doc (View for \"doc\" only) User", "user_contact (View for \"user_contact\" only) User is allowed to access", "User may get nosy messages for doc (Nosy for \"doc\"", "work package if booking is allowed for this user (also", "\"user_contact\" only) User is allowed to create user_dynamic (Create for", "their queries (Retire for \"query\" only) User is allowed to", "location (View for \"location\" only) User is allowed to access", "he is owner or supervisor or timetracking-by user (Edit for", "severity (Edit for \"severity\" only) User is allowed to edit", "\"organisation\" only) User is allowed to access overtime_period (View for", "allowed to edit status (Edit for \"status\" only) User is", "allowed to edit cost_center_status (Edit for \"cost_center_status\" only) User is", "messages for issue (Nosy for \"issue\" only) User may get", "search permission (View for \"query\" only) Role \"facility\": (Restore for", "(View for \"status_transition\" only) User is allowed to access test_level", "overtime information if he/she is in group HR-Org-Location and in", "allowed to edit dynamic user data if not frozen in", "only) User is allowed Edit on file if file is", "only) (Retire for \"room\" only) User is allowed to create", "access time_wp (View for \"time_wp\" only) Role \"sec-incident-nosy\": User is", "User may view work package if responsible for it, if", "to create location (Create for \"location\" only) User is allowed", "only) User is allowed to edit freeze record if not", "for \"time_wp\": ('activity', 'actor', 'cost_center', 'creation', 'creator', 'description', 'durations_allowed', 'epic_key',", "for \"time_record\" only) User is allowed to view (some of)", "'time_start', 'time_wp_summary_no') only) User may access the rest interface (Rest", "User is allowed to create it_category (Create for \"it_category\" only)", "for \"location\" only) User is allowed to access mailgroup (View", "timetracking-by user (View for \"daily_record\" only) User is allowed to", "to access product (View for \"product\" only) User is allowed", "create time_wp_group (Create for \"time_wp_group\" only) User is allowed to", "\"vacation_report\" only) User is allowed to access work_location (View for", "user_dynamic (Create for \"user_dynamic\" only) User is allowed to edit", "(Nosy for \"support\" only) Role \"office\": (Restore for \"room\" only)", "for \"absence_type\" only) User is allowed to create room (Create", "\"public_holiday\" only) User is allowed to edit query (Edit for", "'subst_active', 'substitute', 'supervisor', 'timezone', 'tt_lines', 'vie_user'] only) Users may view/edit", "for \"time_wp\" only) User is allowed to create time_wp_group (Create", "only) User is allowed to create doc_issue_status (Create for \"doc_issue_status\"", "on (Edit for \"location\": ('domain_part',) only) User is allowed Edit", "permission (View for \"msg\" only) User is allowed View on", "of) their own user details (Edit for \"user\": ('csv_delimiter', 'hide_message_files',", "to create organisation (Create for \"organisation\" only) User is allowed", "allowed to edit it_issue (Edit for \"it_issue\" only) User is", "is allowed to access time_wp (View for \"time_wp\" only) User", "is allowed to create msg (Create for \"msg\" only) User", "is allowed to create it_project (Create for \"it_project\" only) User", "for \"support\" only) Role \"office\": (Restore for \"room\" only) (Retire", "to create auto_wp (Create for \"auto_wp\" only) User is allowed", "(Create for \"time_activity_perm\" only) User is allowed to create time_record", "non-confidential or user is on nosy list (Edit for \"it_project\":", "their own files (Search for \"file\" only) User is allowed", "only) User is allowed to create public_holiday (Create for \"public_holiday\"", "create cost_center_group (Create for \"cost_center_group\" only) User is allowed to", "allowed to view work package and time category names if", "(View for \"status_transition\" only) User is allowed to access summary_report", "User is allowed to create it_issue (Create for \"it_issue\" only)", "for \"uc_type\" only) User may manipulate user Roles through the", "only) User is allowed to create time_wp_group (Create for \"time_wp_group\"", "access cost_center_permission_group (View for \"cost_center_permission_group\" only) User is allowed to", "record. If user has role HR-Org-Location and is in the", "allowed to search it_issue (Search for \"it_issue\" only) User is", "is allowed to edit customer_agreement (Edit for \"customer_agreement\" only) User", "['clearance_by', 'contacts', 'csv_delimiter', 'entry_date', 'firstname', 'hide_message_files', 'job_description', 'lastname', 'lunch_duration', 'lunch_start',", "\"mailgroup\" only) User may manipulate user Roles through the web", "'nosy', 'numeric_effort', 'part_of', 'planned_begin', 'planned_end', 'priority', 'release', 'responsible', 'safety_level', 'severity',", "'planned_effort', 'nosy') only) User is allowed to edit workpackage if", "only) (Search for \"overtime_correction\" only) (Search for \"time_activity_perm\" only) (Search", "time_project_status (Edit for \"time_project_status\" only) User is allowed to edit", "'timezone', 'tt_lines', 'username', 'vie_user'] only) Users may view/edit user records", "allowed to access functional_role (View for \"functional_role\" only) User is", "allowed to create return_type (Create for \"return_type\" only) User is", "\"user\" only) User is allowed to create user_dynamic (Create for", "vacation_correction (View for \"vacation_correction\" only) User is allowed to create", "manager of time category (View for \"time_project\" only) User may", "is allowed to access status (View for \"status\" only) User", "'kind', 'maturity_index', 'messages', 'needs', 'nosy', 'numeric_effort', 'part_of', 'planned_begin', 'planned_end', 'priority',", "of) their own user details (View for \"user\": ('entry_date', 'planning_role')", "User is allowed to edit doc (Edit for \"doc\" only)", "for \"it_category\" only) User is allowed to access it_issue_status (View", "\"cost_center_permission_group\" only) (Retire for \"cost_center_permission_group\" only) User is allowed to", "\"file\" only) User is allowed to create issue (Create for", "may see time report if reponsible or deputy of time", "only) User is allowed to create it_category (Create for \"it_category\"", "('approval_hr', 'approval_required', 'is_extern', 'is_public_holiday', 'is_special_leave', 'is_vacation', 'no_overtime', 'no_overtime_day', 'only_hours', 'overtime_reduction')", "User is allowed to view freeze information if he/she is", "only) Role \"user\": (Search for \"time_project\": ('activity', 'actor', 'creation', 'creator',", "for \"customer\" only) User is allowed to access customer_agreement (View", "User is allowed to create cost_center_permission_group (Create for \"cost_center_permission_group\" only)", "\"user\": ['contacts', 'position_text', 'room'] only) Role \"external\": (Search for \"ext_tracker_state\":", "'address', 'alternate_addresses', 'business_responsible', 'clearance_by', 'creation', 'creator', 'firstname', 'id', 'job_description', 'lastname',", "there is a transitive permission via containers (Edit for \"issue\":", "is allowed to create absence_type (Create for \"absence_type\" only) User", "'cc', 'cc_emails', 'classification', 'closed', 'confidential', 'customer', 'emails', 'execution', 'external_ref', 'files',", "Role \"dom-user-edit-facility\": Users may view/edit user records for ad_domain for", "retire their queries (Retire for \"query\" only) User is allowed", "allowed Edit on (Edit for \"time_project\": ('group_lead', 'team_lead') only) User", "(Create for \"issue\" only) User is allowed to create it_issue", "access the web interface (Web Access) User may use the", "allowed to create time_report (Create for \"time_report\" only) User is", "to access user_status (View for \"user_status\" only) User is allowed", "allowed to edit issue (Edit for \"issue\" only) User is", "to edit freeze record if not frozen at the given", "\"itview\": User is allowed to access it_int_prio (View for \"it_int_prio\"", "User is allowed to access user_contact (View for \"user_contact\" only)", "User is allowed to access doc_issue_status (View for \"doc_issue_status\" only)", "for \"reference\" only) User is allowed to edit artefact (Edit", "allowed to access ext_tracker_state (View for \"ext_tracker_state\" only) User is", "the user (View for \"user\": ['clearance_by', 'contacts', 'csv_delimiter', 'entry_date', 'firstname',", "\"time_activity_perm\" only) User is allowed to create time_record (Create for", "cost_center (Edit for \"cost_center\" only) User is allowed to edit", "View on it_project if it_project is non-confidential or user is", "'alternate_addresses', 'business_responsible', 'clearance_by', 'creation', 'creator', 'firstname', 'id', 'job_description', 'lastname', 'lunch_duration',", "User is allowed to access status_transition (View for \"status_transition\" only)", "\"msg\": ('keywords',) only) User is allowed Edit on file if", "customer_agreement (Edit for \"customer_agreement\" only) User is allowed to edit", "to access department (View for \"department\" only) User is allowed", "is allowed to view work package and time category names", "for the user (View for \"user\": ['contacts', 'csv_delimiter', 'department_temp', 'entry_date',", "'creator', 'firstname', 'id', 'job_description', 'lastname', 'lunch_duration', 'lunch_start', 'nickname', 'pictures', 'position_text',", "(Create for \"organisation\" only) User is allowed to edit location", "(Edit for \"ext_tracker\" only) User is allowed to edit issue", "(Edit for \"uc_type\" only) User may manipulate user Roles through", "on (View for \"user\": ('activity', 'actor', 'address', 'alternate_addresses', 'creation', 'creator',", "allowed to access cost_center (View for \"cost_center\" only) User is", "or the user is the department manager of the owner", "for \"severity\" only) User is allowed to create status (Create", "User is allowed to edit location (Edit for \"location\" only)", "('business_responsible', 'scale_seniority') only) User is allowed View on (View for", "allowed to create cost_center (Create for \"cost_center\" only) User is", "allowed to access analysis_result (View for \"analysis_result\" only) User is", "it_int_prio (View for \"it_int_prio\" only) User is allowed to access", "only) User is allowed to edit work_location (Edit for \"work_location\"", "allowed to create time_project_status (Create for \"time_project_status\" only) User is", "for \"auto_wp\" only) User is allowed to edit dynamic user", "only) User is allowed to access it_int_prio (View for \"it_int_prio\"", "Role \"hr-vacation\": User is allowed to access contract_type (View for", "edit doc (Edit for \"doc\" only) User is allowed to", "User is allowed to edit leave_submission (Edit for \"leave_submission\" only)", "\"room\" only) (Retire for \"room\" only) User is allowed View", "is allowed to access user_dynamic (View for \"user_dynamic\" only) User", "(Create for \"uc_type\" only) User is allowed to edit absence", "is allowed to access time_wp (View for \"time_wp\" only) Role", "he/she has role HR or HR-Org-Location (View for \"time_project\": ('name',)", "details (Edit for \"user\": ('csv_delimiter', 'hide_message_files', 'lunch_duration', 'lunch_start', 'password', 'queries',", "transitive permission via containers (View for \"issue\": ['activity', 'actor', 'area',", "owned by user (Retire for \"time_record\" only) User or Timetracking", "(Create for \"uc_type\" only) User is allowed to create user", "Users are allowed to edit some of their details (Edit", "product_family (Create for \"product_family\" only) User is allowed to create", "for \"time_wp\": ('activity', 'actor', 'auto_wp', 'cost_center', 'creation', 'creator', 'description', 'durations_allowed',", "User is allowed Edit on issue if issue is non-confidential", "'overtime_reduction', 'responsible', 'status', 'work_location', 'wps') only) Search (Search for \"user_contact\"", "is allowed to access severity (View for \"severity\" only) User", "user (Create for \"user\" only) User is allowed to create", "\"user_dynamic\": ('id', 'sap_cc', 'user', 'valid_from', 'valid_to') only) User is allowed", "edit area (Edit for \"area\" only) User is allowed to", "is allowed to create file (Create for \"file\" only) User", "View on (View for \"user\": ('nickname', 'status', 'username') only) User", "is allowed to access contract_type (View for \"contract_type\" only) User", "only) User is allowed to access functional_role (View for \"functional_role\"", "('business_responsible', 'planning_role', 'scale_seniority') only) User is allowed to access user_functional_role", "to create time_wp_group (Create for \"time_wp_group\" only) User is allowed", "'vie_user_ml') only) User is allowed View on (View for \"user\":", "'department_temp', 'entry_date', 'firstname', 'hide_message_files', 'job_description', 'lastname', 'lunch_duration', 'lunch_start', 'nickname', 'pictures',", "(Edit for \"time_activity\" only) User is allowed to edit time_activity_perm", "owner of the daily record (the supervisor relationship is transitive)", "delegated) (View for \"time_wp\": ('activity', 'actor', 'cost_center', 'creation', 'creator', 'description',", "(Create for \"time_report\" only) User is allowed to edit time_report", "for \"cost_center_status\" only) User is allowed to access customer (View", "allowed to access time_wp (View for \"time_wp\" only) User is", "it_project if it_project is non-confidential or user is on nosy", "only) Role \"facility\": (Restore for \"room\" only) (Retire for \"room\"", "is allowed to view time record data if he/she is", "'effective_prio', 'effort_hours', 'external_users', 'files', 'files_affected', 'fixed_in', 'id', 'keywords', 'kind', 'maturity_index',", "edit if he's the owner of the contact (Edit for", "(View for \"ext_tracker\" only) User is allowed to access ext_tracker_state", "only) User is allowed to search leave_submission (Search for \"leave_submission\"", "(Edit for \"user\": ('csv_delimiter', 'hide_message_files', 'lunch_duration', 'lunch_start', 'password', 'queries', 'realname',", "(View for \"user\": ('contacts',) only) User is allowed to access", "(View for \"cost_center\" only) User is allowed to access cost_center_group", "test_level (View for \"test_level\" only) User is allowed to access", "for \"status\" only) User is allowed to edit status_transition (Edit", "for \"msg\" only) User is allowed to view their own", "if he is responsible for it (Edit for \"category\": ('nosy',", "'epic_key', 'planned_effort', 'time_end', 'time_start', 'time_wp_summary_no') only) User may access the", "'goods_received', 'goods_sent', 'lot', 'messages', 'nosy', 'number_effected', 'numeric_effort', 'prio', 'prodcat', 'product',", "for \"overtime_period\" only) User is allowed to create product_family (Create", "allowed to edit department (Edit for \"department\" only) User is", "for \"it_project\" only) Role \"sec-incident-responsible\": User is allowed to access", "that are attached to that daily_record) if the user owns", "only) (View for \"time_project\" only) User is allowed Edit on", "Edit on (Edit for \"daily_record\": ('status', 'time_record') only) User is", "for \"severity\" only) User is allowed to access status (View", "the email interface (Email Access) User may view a daily_record", "allowed to edit reporting_group (Edit for \"reporting_group\" only) User is", "access sup_classification (View for \"sup_classification\" only) User is allowed to", "to edit cost_center_group (Edit for \"cost_center_group\" only) User is allowed", "to edit support (Edit for \"support\" only) Role \"time-report\": User", "time_wp (View for \"time_wp\" only) Role \"sec-incident-nosy\": User is allowed", "may restore everything (Restore) User may retire everything (Retire) User", "\"hr-org-location\": (Search for \"daily_record_freeze\" only) (Search for \"overtime_correction\" only) (Search", "edit several fields if he is Stakeholder/Responsible for an it_issue", "own leave submissions (View for \"leave_submission\": ('comment', 'comment_cancel', 'first_day', 'last_day',", "for \"doc\" only) User is allowed to access doc_category (View", "(Search for \"user_contact\" only) User is allowed Edit on (Edit", "frozen (Edit for \"overtime_correction\" only) User is allowed to edit", "is allowed to access kind (View for \"kind\" only) User", "daily_record (Create for \"daily_record\" only) User is allowed to create", "to edit product_family (Edit for \"product_family\" only) User is allowed", "not frozen in validity span of dynamic user record (Edit", "(View for \"mailgroup\" only) User is allowed to access msg_keyword", "allowed to access work_location (View for \"work_location\" only) User is", "it_project (Search for \"it_project\" only) User is allowed to search", "access it_project (View for \"it_project\" only) Role \"sec-incident-responsible\": User is", "he is owner or project responsible/deputy (Edit for \"time_wp\": ('bookers',", "Role \"pgp\": Role \"procurement\": (View for \"sap_cc\" only) (View for", "person to whom approvals are delegated (Edit for \"leave_submission\": ('status',)", "to edit contract_type (Edit for \"contract_type\" only) User is allowed", "only) (Search for \"time_wp\": ('activity', 'actor', 'auto_wp', 'bookers', 'cost_center', 'creation',", "'files', 'goods_received', 'goods_sent', 'lot', 'messages', 'nosy', 'number_effected', 'numeric_effort', 'prio', 'prodcat',", "user_contact (Edit for \"user_contact\" only) Users may view/edit user records", "allowed to create customer (Create for \"customer\" only) User is", "(Create for \"user_dynamic\" only) User is allowed to edit user_contact", "(View for \"timesheet\" only) User is allowed to access uc_type", "(View for \"cost_center_permission_group\" only) User is allowed to access cost_center_status", "\"department\" only) User is allowed to create organisation (Create for", "\"cost_center_status\" only) User is allowed to edit department (Edit for", "allowed View on it_issue if it_issue is non-confidential or user", "\"room\" only) User is allowed to create uc_type (Create for", "(Create for \"time_project_status\" only) User is allowed to create time_wp", "allowed Edit on issue if issue is non-confidential or user", "on (Edit for \"sap_cc\": ('group_lead', 'purchasing_agents', 'team_lead') only) User is", "is allowed to edit department (Edit for \"department\" only) User", "User is allowed to create return_type (Create for \"return_type\" only)", "create time_project (Create for \"time_project\" only) User is allowed to", "create return_type (Create for \"return_type\" only) User is allowed to", "User is allowed to edit query (Edit for \"query\" only)", "only) User is allowed to edit area (Edit for \"area\"", "is allowed to edit several fields if he is Stakeholder/Responsible", "User may see time report if reponsible or deputy of", "\"kind\" only) User is allowed to create msg_keyword (Create for", "'severity', 'status', 'superseder', 'test_level', 'title'] only) User is allowed View", "to create status_transition (Create for \"status_transition\" only) User is allowed", "to access status (View for \"status\" only) User is allowed", "for \"cost_center\" only) User is allowed to create cost_center_group (Create", "'pictures', 'position_text', 'room', 'sex', 'status', 'subst_active', 'substitute', 'supervisor', 'sync_foreign_key', 'timezone',", "View permission (View for \"file\" only) User is allowed to", "'roles', 'room', 'sex', 'status', 'subst_active', 'substitute', 'supervisor', 'timezone', 'tt_lines', 'vie_user']", "'status') only) User is allowed Edit on (Edit for \"time_project\":", "User is allowed to see time record if he is", "dynamic user data if not frozen in validity span of", "mailgroup (Edit for \"mailgroup\" only) User may manipulate user Roles", "to edit msg_keyword (Edit for \"msg_keyword\" only) User is allowed", "on nosy list (Edit for \"it_project\": ('messages', 'files', 'nosy') only)", "is allowed Edit on (Edit for \"location\": ('domain_part',) only) User", "only) User is allowed to edit contract_type (Edit for \"contract_type\"", "edit time_records owned by user (Restore for \"time_record\" only) User", "(Edit for \"user\": ('address', 'alternate_addresses', 'nickname', 'password', 'timezone', 'username') only)", "allowed to access time_record (View for \"time_record\" only) User is", "for \"vacation_correction\" only) Role \"hr-org-location\": (Search for \"daily_record_freeze\" only) (Search", "time_activity (Create for \"time_activity\" only) User is allowed to create", "User is allowed to create issue (Create for \"issue\" only)", "create organisation (Create for \"organisation\" only) User is allowed to", "nosy list (Edit for \"it_project\": ('messages', 'files', 'nosy') only) User", "to view work package and time category names if he/she", "'timezone', 'tt_lines', 'vie_user'] only) Users may view/edit user records for", "sup_execution (View for \"sup_execution\" only) User is allowed to access", "is allowed to access daily_record_freeze (View for \"daily_record_freeze\" only) User", "Edit on (Edit for \"msg\": ('keywords',) only) User is allowed", "allowed to access mailgroup (View for \"mailgroup\" only) User is", "for \"analysis_result\" only) User is allowed to access contact (View", "allowed Edit on (Edit for \"msg\": ('author', 'date', 'id', 'keywords',", "'substitute', 'supervisor', 'timezone', 'tt_lines', 'vie_user'] only) Users may view/edit user", "'is_public_holiday', 'is_special_leave', 'is_vacation', 'name', 'nosy', 'only_hours', 'op_project', 'overtime_reduction', 'responsible', 'status',", "is allowed Edit on file if file is linked from", "is allowed to access it_issue (View for \"it_issue\" only) User", "the same Org-Location as the given user (View for \"overtime_correction\"", "\"it_project\" only) Role \"staff-report\": Role \"sub-login\": Role \"summary_view\": Role \"supportadmin\":", "view time record data if he/she is in group HR-Org-Location", "nosy list (View for \"it_project\" only) User is allowed View", "(Edit for \"work_location\" only) Role \"doc_admin\": User is allowed Edit", "'creator', 'description', 'durations_allowed', 'epic_key', 'has_expiration_date', 'is_extern', 'is_public', 'id', 'name', 'project',", "'safety_level', 'severity', 'status', 'superseder', 'test_level', 'title'] only) External users are", "cost_center_status (View for \"cost_center_status\" only) User is allowed to access", "'keywords', 'subject', 'summary') only) User is allowed to access ext_msg", "View on support if support is non-confidential or user is", "is allowed to access cost_center (View for \"cost_center\" only) User", "User is allowed Edit on (Edit for \"msg\": ('author', 'date',", "is allowed to access overtime_period (View for \"overtime_period\" only) User", "'subst_active', 'substitute', 'supervisor', 'sync_foreign_key', 'timezone', 'tt_lines', 'username', 'vie_user'] only) Role", "\"doc\" only) User may get nosy messages for issue (Nosy", "to access product_family (View for \"product_family\" only) User is allowed", "User is allowed to access it_issue_status (View for \"it_issue_status\" only)", "(Edit for \"absence\" only) User is allowed to edit absence_type", "'is_vacation', 'no_overtime', 'no_overtime_day', 'only_hours', 'overtime_reduction') only) User is allowed View", "for \"issue\" only) User is allowed Edit on it_issue if", "is allowed to access absence_type (View for \"absence_type\" only) User", "access ext_msg (View for \"ext_msg\" only) User is allowed to", "is allowed to create vacation_correction (Create for \"vacation_correction\" only) User", "only) User is allowed View on (View for \"user_dynamic\": ('id',", "(Edit for \"product_family\" only) User is allowed to edit public_holiday", "create it_int_prio (Create for \"it_int_prio\" only) User is allowed to", "only) Role \"external\": (Search for \"ext_tracker_state\": ('id', 'issue') only) (Search", "for \"it_project\" only) User is allowed View on msg if", "only) User is allowed to access time_project (View for \"time_project\"", "absence_type (Create for \"absence_type\" only) User is allowed to create", "(Edit for \"issue\" only) User is allowed Edit on it_issue", "User is allowed to access status (View for \"status\" only)", "User is allowed to create analysis_result (Create for \"analysis_result\" only)", "(Create for \"status\" only) User is allowed to create status_transition", "only) User is allowed to edit it_request_type (Edit for \"it_request_type\"", "only) User is allowed View on file if file is", "for \"user_dynamic\" only) User is allowed to edit user_contact (Edit", "\"query\" only) User is allowed to create support (Create for", "\"product_type\" only) User is allowed to create reference (Create for", "is allowed to view leave submission if he is the", "('csv_delimiter', 'hide_message_files', 'password', 'timezone') only) Users are allowed to view", "'planned_effort', 'time_end', 'time_start', 'time_wp_summary_no') only) User may access the rest", "'status', 'subst_active', 'substitute', 'supervisor', 'sync_foreign_key', 'timezone', 'tt_lines', 'username', 'vie_user'] only)", "allowed to view contact if he's the owner of the", "for \"leave_submission\": ('status',) only) User is allowed to view leave", "'queries', 'realname', 'room', 'subst_active', 'substitute', 'timezone', 'tt_lines') only) User is", "User may edit own leave submissions (Edit for \"leave_submission\": ('comment',", "access time_wp_summary_no (View for \"time_wp_summary_no\" only) User is allowed to", "for \"time_wp\" only) User is allowed to edit (some of)", "to whom approvals are delegated (Edit for \"leave_submission\": ('status',) only)", "\"msg\" only) User is allowed View on support if support", "for the user (View for \"user\": ['clearance_by', 'contacts', 'csv_delimiter', 'entry_date',", "User is allowed Edit on (Edit for \"leave_submission\": ('status',) only)", "\"leave_submission\" only) User is allowed to access overtime_correction (View for", "edit user_contact (Edit for \"user_contact\" only) Users may view user_dynamic", "to access issue (View for \"issue\" only) User is allowed", "as the given user (View for \"time_record\" only) Role \"hr-vacation\":", "to edit if he's the owner of the contact (Edit", "(View for \"user_status\" only) User is allowed to access vac_aliq", "'type', 'warranty') only) User is allowed View on (View for", "to create doc_issue_status (Create for \"doc_issue_status\" only) User is allowed", "is allowed to access product_family (View for \"product_family\" only) User", "'tt_lines', 'vie_user'] only) Users may view/edit user records for ad_domain", "allowed Edit on (Edit for \"sap_cc\": ('group_lead', 'team_lead') only) User", "(View for \"kind\" only) User is allowed to access msg_keyword", "(Create for \"reference\" only) User is allowed to edit artefact", "'actor', 'area', 'category', 'closed', 'composed_of', 'creation', 'creator', 'cur_est_begin', 'cur_est_end', 'deadline',", "User is allowed View on msg if msg is linked", "owns the daily_record or has role 'HR' or 'Controlling', or", "on (Edit for \"time_project\": ('group_lead', 'team_lead') only) User is allowed", "classes where they have search permission (View for \"query\" only)", "(Restore) User may retire everything (Retire) User may use the", "only) User is allowed View on msg if msg is", "only) User is allowed to edit time_wp (Edit for \"time_wp\"", "support (Nosy for \"support\" only) Role \"office\": (Restore for \"room\"", "(View for \"issue\" only) User is allowed View on it_issue", "for \"it_request_type\" only) User is allowed to access keyword (View", "for \"ext_tracker_state\": ('id', 'issue') only) (Search for \"user\": ('id', 'nickname',", "owner or supervisor or timetracking-by user (Edit for \"daily_record\": ('status',", "create reporting_group (Create for \"reporting_group\" only) User is allowed to", "access it_project (View for \"it_project\" only) User is allowed to", "to create it_issue (Create for \"it_issue\" only) User is allowed", "only) User is allowed to access absence_type (View for \"absence_type\"", "edit status_transition (Edit for \"status_transition\" only) User is allowed to", "only) User is allowed to access category (View for \"category\"", "\"customer_agreement\" only) User is allowed to edit mailgroup (Edit for", "allowed to access daily_record_status (View for \"daily_record_status\" only) User is", "organisation (Edit for \"organisation\" only) Role \"pgp\": Role \"procurement\": (View", "edit it_issue (Edit for \"it_issue\" only) User is allowed to", "to access support (View for \"support\" only) User is allowed", "allowed to access cost_center_permission_group (View for \"cost_center_permission_group\" only) User is", "only) User is allowed to view dynamic user data if", "edit keyword (Edit for \"keyword\" only) User is allowed to", "(Edit for \"time_activity_perm\" only) User is allowed to edit time_record", "only) User is allowed to edit severity (Edit for \"severity\"", "for \"return_type\" only) User is allowed to access room (View", "to create reference (Create for \"reference\" only) User is allowed", "Role \"sub-login\": Role \"summary_view\": Role \"supportadmin\": User is allowed to", "to create customer_agreement (Create for \"customer_agreement\" only) User is allowed", "\"product_type\" only) User is allowed to access project_type (View for", "is allowed to access work_location (View for \"work_location\" only) User", "\"doc\" only) User is allowed to edit ext_tracker_state (Edit for", "User is allowed to access vacation_correction (View for \"vacation_correction\" only)", "user (View for \"daily_record_freeze\" only) User is allowed to view", "\"User,Nosy\" New Email users get the Role \"User\" Role \"admin\":", "only) Role \"doc_admin\": User is allowed Edit on (Edit for", "'title', 'type', 'warranty') only) User is allowed View on (View", "edit uc_type (Edit for \"uc_type\" only) Role \"organisation\": User is", "access org_location (View for \"org_location\" only) User is allowed to", "workpackage if he is time category owner or deputy (Edit", "edit room (Edit for \"room\" only) User is allowed to", "\"user_dynamic\" only) May only view/edit records with the correct domain", "\"daily_record\" only) User is allowed to search for their own", "is allowed to access user_status (View for \"user_status\" only) User", "(View for \"user_dynamic\": ('id', 'sap_cc', 'user', 'valid_from', 'valid_to') only) User", "Roles through the web (Web Roles) Role \"hr-leave-approval\": User is", "\"organisation\" only) User is allowed to edit product_family (Edit for", "allowed to access absence_type (View for \"absence_type\" only) User is", "('status', 'time_record') only) User is allowed to access daily record", "\"user_dynamic\" only) User is allowed to edit freeze record if", "the given user (View for \"daily_record_freeze\" only) User is allowed", "to create sap_cc (Create for \"sap_cc\" only) User is allowed", "it_int_prio (Edit for \"it_int_prio\" only) User is allowed to edit", "User is allowed to create product_type (Create for \"product_type\" only)", "several fields if he is Stakeholder/Responsible for an it_issue (Edit", "edit work_location (Edit for \"work_location\" only) Role \"doc_admin\": User is", "vacation_correction (Edit for \"vacation_correction\" only) Role \"issue_admin\": User is allowed", "\"user\": ('nickname', 'status', 'username') only) User is allowed View on", "(Edit for \"org_location\" only) User is allowed to edit organisation", "same Org-Location as the given user (View for \"daily_record_freeze\" only)", "Roles) Role \"itview\": User is allowed to access it_int_prio (View", "as the given user (View for \"user_dynamic\" only) User is", "msg_keyword (Create for \"msg_keyword\" only) User is allowed to create", "(Create for \"absence\" only) User is allowed to create absence_type", "User is allowed Edit on (Edit for \"user\": ('ad_domain', 'nickname',", "on (View for \"user\": ('nickname', 'status', 'username') only) User is", "if he is owner or project responsible/deputy (Edit for \"time_wp\":", "User is allowed to access room (View for \"room\" only)", "for \"user_contact\" only) User is allowed to create user_dynamic (Create", "create uc_type (Create for \"uc_type\" only) User is allowed to", "only) User is allowed to edit sap_cc (Edit for \"sap_cc\"", "\"user_functional_role\" only) User is allowed to edit user_functional_role (Edit for", "to edit safety_level (Edit for \"safety_level\" only) User is allowed", "User is allowed to edit artefact (Edit for \"artefact\" only)", "User is allowed to edit customer (Edit for \"customer\" only)", "it (Edit for \"category\": ('nosy', 'default_part_of') only) User is allowed", "is allowed to create user_dynamic (Create for \"user_dynamic\" only) User", "\"room\" only) User is allowed to create sap_cc (Create for", "'nosy', 'number_effected', 'numeric_effort', 'prio', 'prodcat', 'product', 'related_issues', 'related_support', 'release', 'responsible',", "or there is a transitive permission via containers (View for", "is allowed to create status (Create for \"status\" only) User", "attached to that daily_record) if the user owns the daily_record", "the xmlrpc interface (Xmlrpc Access) User may edit own leave", "'tt_lines') only) User is allowed to edit category if he", "only) User is allowed Edit on (Edit for \"time_project\": ('infosec_req',", "it_project is non-confidential or user is on nosy list (View", "(Edit for \"reporting_group\" only) User is allowed to edit room", "HR-Org-Location (View for \"time_wp\": ('name', 'project') only) User is allowed", "the correct domain (Edit for \"user_dynamic\" only) May only view/edit", "HR-Org-Location and is in the same Org-Location as the record,", "only) Role \"dom-user-edit-hr\": (Search for \"user_dynamic\" only) May only view/edit", "if they may see one of the time_records for that", "for \"issue\" only) User is allowed to create msg (Create", "contact (Edit for \"contact\" only) User is allowed to edit", "for \"it_issue\": ('responsible',) only) User is allowed to edit several", "and approval delegated) (View for \"time_wp\": ('activity', 'actor', 'cost_center', 'creation',", "same Org-Location as the given user (View for \"time_record\" only)", "room (Create for \"room\" only) User is allowed to edit", "the domain_permission for the user (View for \"user\": ['contacts', 'position_text',", "User is allowed to create domain_permission (Create for \"domain_permission\" only)", "User is allowed to edit time category if the status", "file is linked from an item with View permission (View", "is allowed to edit cost_center_status (Edit for \"cost_center_status\" only) User", "is allowed to access customer (View for \"customer\" only) User", "to access it_project (View for \"it_project\" only) Role \"msgedit\": (Search", "it_prio (View for \"it_prio\" only) User is allowed to access", "is allowed to create work_location (Create for \"work_location\" only) User", "User is allowed to edit sup_classification (Edit for \"sup_classification\" only)", "(Create for \"msg\" only) User is allowed to create query", "\"it_project\" only) User is allowed to create it_request_type (Create for", "\"file\" only) Role \"user\": (Search for \"time_project\": ('activity', 'actor', 'creation',", "allowed View on (View for \"user\": ('activity', 'actor', 'address', 'alternate_addresses',", "for \"doc_status\" only) User is allowed to access ext_tracker (View", "edit their queries (Edit for \"query\" only) User is allowed", "for \"room\" only) User is allowed Edit on (Edit for", "\"room\" only) User is allowed to edit room (Edit for", "user is on nosy list (View for \"support\" only) User", "for \"time_project_status\" only) User is allowed to access time_wp_group (View", "for \"absence_type\" only) User is allowed to edit room (Edit", "(Edit for \"overtime_correction\" only) User is allowed to edit product_family", "\"auto_wp\" only) User is allowed to access contract_type (View for", "record if not frozen at the given date (Edit for", "'sync_foreign_key', 'timezone', 'tt_lines', 'username', 'vie_user'] only) Role \"dom-user-edit-hr\": (Search for", "it_project (Edit for \"it_project\" only) User is allowed to edit", "(View for \"sup_type\" only) User is allowed to access sup_warranty", "(Search for \"time_project\": ('activity', 'actor', 'creation', 'creator', 'deputy', 'description', 'id',", "is allowed to create artefact (Create for \"artefact\" only) User", "supervisor or the person to whom approvals are delegated (Edit", "data if not frozen in validity span of dynamic user", "'confidential', 'customer', 'emails', 'execution', 'external_ref', 'files', 'goods_received', 'goods_sent', 'lot', 'messages',", "access test_level (View for \"test_level\" only) User is allowed to", "is allowed to edit ext_msg (Edit for \"ext_msg\" only) User", "department manager of time category (View for \"time_wp\" only) User", "ext_msg (Create for \"ext_msg\" only) User is allowed to create", "(View for \"vacation_correction\" only) User is allowed to create contract_type", "\"customer_agreement\" only) User is allowed to access mailgroup (View for", "email interface (Email Access) User may view a daily_record (and", "User may get nosy messages for issue (Nosy for \"issue\"", "(Create for \"daily_record\" only) User is allowed to create doc", "only) User is allowed to edit it_int_prio (Edit for \"it_int_prio\"", "for \"safety_level\" only) User is allowed to access sap_cc (View", "'timezone', 'username') only) User is allowed View on (View for", "allowed to access room (View for \"room\" only) User is", "only) User is allowed to edit if he's the owner", "information if he/she is in group HR-Org-Location and in the", "allowed to create customer_agreement (Create for \"customer_agreement\" only) User is", "kind (View for \"kind\" only) User is allowed to access", "User may view everything (View) Role \"anonymous\": User may access", "allowed to edit customer (Edit for \"customer\" only) User is", "support (Search for \"support\" only) User is allowed to search", "HR or HR-Org-Location (View for \"time_wp\": ('name', 'project') only) User", "is allowed to create user_contact (Create for \"user_contact\" only) User", "'description', 'name', 'nosy', 'organisation', 'responsible', 'status') only) User is allowed", "which they are in the domain_permission for the user (Edit", "User is allowed to access analysis_result (View for \"analysis_result\" only)", "(Create for \"area\" only) User is allowed to create category", "only) User is allowed Edit on (Edit for \"sap_cc\": ('group_lead',", "for \"analysis_result\" only) User is allowed to access area (View", "create overtime_period (Create for \"overtime_period\" only) User is allowed to", "'firstname', 'lastname', 'realname', 'username') only) Users are allowed to view", "\"contact_type\" only) User is allowed to access cost_center (View for", "to edit cost_center_status (Edit for \"cost_center_status\" only) User is allowed", "daily_record (Search for \"daily_record\" only) User is allowed to search", "only) User is allowed to access absence (View for \"absence\"", "ext_tracker_type (View for \"ext_tracker_type\" only) User is allowed to access", "'work_location', 'wps') only) (Search for \"time_wp\": ('activity', 'actor', 'auto_wp', 'bookers',", "'timezone', 'tt_lines') only) User is allowed to edit category if", "role HR or HR-Org-Location (View for \"time_wp\": ('name', 'project') only)", "to access leave_status (View for \"leave_status\" only) User is allowed", "only) User is allowed to see time record if he", "for \"it_project\" only) User is allowed to create it_request_type (Create", "to access time_activity_perm (View for \"time_activity_perm\" only) User is allowed", "allowed to access org_group (View for \"org_group\" only) User is", "(Create for \"ext_tracker_state\" only) User is allowed to create file", "to access time_wp (View for \"time_wp\" only) User is allowed", "user is department manager of time category (View for \"time_wp\"", "for \"public_holiday\" only) User is allowed to edit reporting_group (Edit", "(View for \"area\" only) User is allowed to access artefact", "the correct domain (View for \"user_dynamic\" only) User is allowed", "\"user\": ['contacts', 'position_text', 'room'] only) Users may view/edit user records", "to create ext_tracker_state (Create for \"ext_tracker_state\" only) User is allowed", "to edit category if he is responsible for it (Edit", "allowed to access reporting_group (View for \"reporting_group\" only) User is", "time_activity_perm (Create for \"time_activity_perm\" only) User is allowed to create", "User is allowed to create user_functional_role (Create for \"user_functional_role\" only)", "only) User is allowed to edit workpackage if he is", "(Search for \"issue\" only) User is allowed to search it_issue", "view work package and time category names if he/she has", "created by user) (Edit for \"file\" only) Role \"user\": (Search", "(View for \"category\": ('id', 'name') only) User is allowed View", "User may get nosy messages for it_project (Nosy for \"it_project\"", "User may create everything (Create) User may edit everything (Edit)", "'title'] only) User is allowed View on (View for \"category\":", "public queries for classes where they have search permission (View", "to edit keyword (Edit for \"keyword\" only) User is allowed", "\"time_project\": ('group_lead', 'purchasing_agents', 'team_lead') only) Role \"project\": User is allowed", "for \"file\" only) Role \"user\": (Search for \"time_project\": ('activity', 'actor',", "\"department\" only) User is allowed to access doc (View for", "View permission (View for \"file\" only) User is allowed View", "(Xmlrpc Access) User may create everything (Create) User may edit", "time category (View for \"time_wp\" only) User or Timetracking by", "\"user_dynamic\" only) User is allowed to edit user_contact (Edit for", "only) User is allowed to access time_record (View for \"time_record\"", "or there is a transitive permission via containers (Edit for", "allowed to access uc_type (View for \"uc_type\" only) User is", "Edit on it_project if it_project is non-confidential or user is", "\"support\" only) Role \"time-report\": User is allowed to access time_report", "User is allowed to create query (Create for \"query\" only)", "is allowed to edit user_contact (Edit for \"user_contact\" only) Users", "'release', 'responsible', 'return_type', 'sap_ref', 'send_to_customer', 'serial_number', 'set_first_reply', 'status', 'superseder', 'title',", "the time_records for that day (View for \"daily_record\" only) Role", "is allowed to access user_functional_role (View for \"user_functional_role\" only) User", "only) User is allowed to access user_status (View for \"user_status\"", "User is allowed View on (View for \"user_dynamic\": ('id', 'sap_cc',", "Role \"anonymous\": User may access the web interface (Web Access)", "user record (Edit for \"user_dynamic\" only) User is allowed to", "access project_type (View for \"project_type\" only) User is allowed to", "overtime correction is not frozen (Edit for \"overtime_correction\" only) User", "'order') only) (Restore for \"room\" only) (Retire for \"room\" only)", "\"time_wp\" only) User or Timetracking by user may edit time_records", "(View for \"user_contact\" only) User is allowed to create absence", "\"sap_cc\": ('group_lead', 'team_lead') only) User is allowed Edit on (Edit", "(View for \"ext_tracker_type\" only) User is allowed to access keyword", "\"time_record\" only) User is allowed to access time_report (View for", "'has_expiration_date', 'id', 'is_extern', 'is_public', 'name', 'project', 'responsible', 'time_end', 'time_start', 'time_wp_summary_no',", "reporting_group (Edit for \"reporting_group\" only) User is allowed to edit", "allowed to edit location (Edit for \"location\" only) User is", "(View for \"it_issue\" only) User is allowed to access it_project", "only) Role \"sec-incident-nosy\": User is allowed to access it_int_prio (View", "'actor', 'cost_center', 'creation', 'creator', 'description', 'durations_allowed', 'epic_key', 'has_expiration_date', 'id', 'is_extern',", "User is allowed to access time_project_status (View for \"time_project_status\" only)", "\"issue\" only) User is allowed View on it_issue if it_issue", "allowed to edit leave_submission (Edit for \"leave_submission\" only) User is", "(Edit for \"leave_submission\": ('status',) only) User is allowed to view", "only) User is allowed to access overtime_period (View for \"overtime_period\"", "(Edit for \"time_project\": ('group_lead', 'team_lead') only) User is allowed Edit", "category (View for \"time_project\" only) User may view work package", "only) User is allowed Edit on (Edit for \"location\": ('domain_part',)", "file (file created by user) (Edit for \"file\" only) Role", "only) User is allowed to access issue (View for \"issue\"", "access sup_prio (View for \"sup_prio\" only) User is allowed to", "interface (Web Access) Role \"cc-permission\": (Restore for \"cost_center_permission_group\" only) (Retire", "allowed to create user_contact (Create for \"user_contact\" only) User is", "is allowed to access time_project_status (View for \"time_project_status\" only) User", "of the contact (Edit for \"user_contact\": ('visible',) only) User is", "(View for \"time_report\" only) User is allowed to access time_wp", "for \"contact\" only) User is allowed to edit contact (Edit", "(Create for \"customer_agreement\" only) User is allowed to create mailgroup", "allowed Edit on (Edit for \"user\": ('address', 'alternate_addresses', 'nickname', 'password',", "for \"support\" only) User is allowed to search time_record (Search", "allowed to access customer_agreement (View for \"customer_agreement\" only) User is", "responsible for the time category (Edit for \"time_project\": ('deputy', 'planned_effort',", "create ext_tracker_state (Create for \"ext_tracker_state\" only) User is allowed to", "owner of the contact (Edit for \"user_contact\": ('visible',) only) User", "for the user (View for \"user\": ['room'] only) Role \"dom-user-edit-gtt\":", "(Create for \"doc\" only) User is allowed to create ext_tracker_state", "User is allowed Edit on (Edit for \"user\": ('business_responsible', 'scale_seniority')", "View on (View for \"user\": ('contacts',) only) User is allowed", "only) User is allowed to access area (View for \"area\"", "to access org_group (View for \"org_group\" only) User is allowed", "time record if he is allowed to see all details", "(Edit for \"msg_keyword\" only) User is allowed to edit safety_level", "is allowed to edit public_holiday (Edit for \"public_holiday\" only) User", "(Create for \"cost_center_status\" only) User is allowed to create department", "only) User is allowed View on (View for \"category\": ('id',", "\"user_dynamic\" only) User is allowed to access vacation_correction (View for", "(View for \"auto_wp\" only) User is allowed to access contract_type", "\"time_project\": ('activity', 'actor', 'creation', 'creator', 'deputy', 'description', 'id', 'is_extern', 'is_public_holiday',", "User is allowed to create doc_issue_status (Create for \"doc_issue_status\" only)", "to access doc_category (View for \"doc_category\" only) User is allowed", "department (View for \"department\" only) User is allowed to access", "overtime correction if the overtime correction is not frozen (Edit", "users get the Role \"User\" Role \"admin\": User may access", "to edit query (Edit for \"query\" only) User is allowed", "on nosy list (View for \"it_issue\" only) User is allowed", "time record data if he/she is in group HR-Org-Location and", "for \"file\" only) User is allowed to access domain_permission (View", "organisation (Edit for \"organisation\" only) User is allowed to edit", "allowed to search daily_record (Search for \"daily_record\" only) User is", "from an item with Edit permission (Edit for \"file\" only)", "for \"doc_issue_status\" only) User is allowed to create ext_tracker (Create", "(View for \"sup_warranty\" only) User is allowed to access test_level", "('infosec_req', 'is_extern', 'max_hours', 'op_project', 'planned_effort', 'product_family', 'project_type', 'reporting_group', 'work_location') only)", "for \"user\": ('contacts',) only) User is allowed View on (View", "interface (Email Access) User may view everything (View) Role \"anonymous\":", "(Edit for \"support\": ('analysis_end', 'analysis_result', 'analysis_start', 'bcc', 'business_unit', 'category', 'cc',", "allowed to access time_project_status (View for \"time_project_status\" only) User is", "User is allowed to search leave_submission (Search for \"leave_submission\" only)", "User is allowed to edit reporting_group (Edit for \"reporting_group\" only)", "only) Role \"hr\": (Edit for \"overtime_period\": ('name', 'order') only) (Restore", "(Create for \"status_transition\" only) User is allowed to create test_level", "'release', 'responsible', 'safety_level', 'severity', 'status', 'superseder', 'test_level', 'title'] only) External", "record data if he/she is in group HR-Org-Location and in", "is allowed to view their own files (View for \"file\"", "Access) User may access the xmlrpc interface (Xmlrpc Access) User", "allowed to edit it_int_prio (Edit for \"it_int_prio\" only) User is", "for \"room\" only) User is allowed View on (View for", "has role HR or HR-Org-Location (View for \"time_project\": ('name',) only)", "edit return_type (Edit for \"return_type\" only) User is allowed to", "ext_tracker (Edit for \"ext_tracker\" only) User is allowed to edit", "users or there is a transitive permission via containers (Edit", "to create it_project (Create for \"it_project\" only) User is allowed", "(View for \"it_int_prio\" only) User is allowed to access it_issue", "to access leave_submission (View for \"leave_submission\" only) User is allowed", "to edit ext_tracker (Edit for \"ext_tracker\" only) User is allowed", "is allowed to create auto_wp (Create for \"auto_wp\" only) User", "User may restore everything (Restore) User may retire everything (Retire)", "'product', 'related_issues', 'related_support', 'release', 'responsible', 'return_type', 'sap_ref', 'send_to_customer', 'serial_number', 'set_first_reply',", "allowed to access user_dynamic (View for \"user_dynamic\" only) User is", "User is allowed to create public_holiday (Create for \"public_holiday\" only)", "is allowed to edit org_location (Edit for \"org_location\" only) User", "User is allowed to create msg_keyword (Create for \"msg_keyword\" only)", "an it_issue (Edit for \"it_issue\": ('deadline', 'status', 'title') only) User", "allowed to access product_type (View for \"product_type\" only) User is", "for \"query\" only) User is allowed to search issue (Search", "for their own files (Search for \"file\" only) User is", "for \"leave_submission\": ('comment', 'comment_cancel', 'first_day', 'last_day', 'status', 'time_wp', 'user') only)", "return_type (View for \"return_type\" only) User is allowed to access", "for \"it_category\" only) User is allowed to edit it_int_prio (Edit", "\"time_record\" only) User is allowed to create work_location (Create for", "('business_responsible', 'department_temp', 'timetracking_by', 'vie_user', 'vie_user_bl_override', 'vie_user_ml') only) User is allowed", "(Create for \"ext_msg\" only) User is allowed to create ext_tracker_state", "only) User is allowed to create contact (Create for \"contact\"", "workpackage if he is owner or project responsible/deputy (Edit for", "from an item with View permission (View for \"msg\" only)", "access time_activity (View for \"time_activity\" only) User is allowed to", "create contract_type (Create for \"contract_type\" only) User is allowed to", "'numeric_effort', 'prio', 'prodcat', 'product', 'related_issues', 'related_support', 'release', 'responsible', 'return_type', 'sap_ref',", "only) User is allowed to edit user_contact (Edit for \"user_contact\"", "query (View for \"query\" only) User is allowed to access", "'entry_date', 'firstname', 'hide_message_files', 'job_description', 'lastname', 'lunch_duration', 'lunch_start', 'nickname', 'pictures', 'position_text',", "only) User is allowed to view time record if he", "supervisor or substitute supervisor of the owner of the daily", "is allowed to edit location (Edit for \"location\" only) User", "allowed to access daily_record (View for \"daily_record\" only) User is", "View on file if file is linked from an item", "to edit analysis_result (Edit for \"analysis_result\" only) User is allowed", "ext_tracker (Create for \"ext_tracker\" only) User is allowed to create", "or supervisor or timetracking-by user (View for \"daily_record\" only) User", "User is allowed to create artefact (Create for \"artefact\" only)", "access daily_record_status (View for \"daily_record_status\" only) User is allowed to", "user (Edit for \"user\": ['contacts', 'csv_delimiter', 'department_temp', 'entry_date', 'firstname', 'hide_message_files',", "Edit permission (Edit for \"msg\" only) User is allowed View", "via containers (View for \"issue\": ['activity', 'actor', 'area', 'category', 'closed',", "(Edit for \"daily_record\": ('required_overtime', 'weekend_allowed') only) User is allowed Edit", "only) User is allowed to create time_activity (Create for \"time_activity\"", "('id', 'nickname', 'username') only) External users are allowed to access", "for \"overtime_correction\" only) User is allowed to view time record", "to edit workpackage if he is time category owner or", "to create support (Create for \"support\" only) User is allowed", "only) User is allowed to edit dynamic user data if", "View on (View for \"user_dynamic\": ('id', 'sap_cc', 'user', 'valid_from', 'valid_to')", "nosy list of time project (View for \"time_report\" only) User", "only) User is allowed Edit on (Edit for \"time_wp\": ('project',)", "to access contact (View for \"contact\" only) User is allowed", "User is allowed to create support (Create for \"support\" only)", "User is allowed to create user_dynamic (Create for \"user_dynamic\" only)", "to access reference (View for \"reference\" only) User is allowed", "(Edit for \"doc\" only) User is allowed to edit doc_category", "User is allowed to create time_report (Create for \"time_report\" only)", "only) User is allowed to search time_wp (Search for \"time_wp\":", "to create time_project (Create for \"time_project\" only) User is allowed", "only view/edit records with the correct domain (View for \"user_dynamic\"", "one of the time_records for that day (View for \"daily_record\"", "is allowed View on it_issue if it_issue is non-confidential or", "for \"overtime_correction\" only) User is allowed to access time_record (View", "User is allowed to access reporting_group (View for \"reporting_group\" only)", "\"user\": ('csv_delimiter', 'hide_message_files', 'password', 'timezone') only) Users are allowed to", "for \"room\" only) (Retire for \"room\" only) User is allowed", "for \"test_level\" only) User is allowed to access time_activity (View", "if he's the owner of the contact or the contact", "(Rest Access) User may access the web interface (Web Access)", "for that day (View for \"daily_record\" only) Role \"user_view\": User", "allowed to edit user_contact (Edit for \"user_contact\" only) Users may", "allowed to create queries (Create for \"query\" only) User is", "only) User is allowed to create cost_center_permission_group (Create for \"cost_center_permission_group\"", "is allowed to edit cost_center (Edit for \"cost_center\" only) User", "'job_description', 'lastname', 'lunch_duration', 'lunch_start', 'nickname', 'pictures', 'position_text', 'room', 'sex', 'status',", "'cur_est_begin', 'cur_est_end', 'deadline', 'depends', 'doc_issue_status', 'earliest_start', 'effective_prio', 'effort_hours', 'external_users', 'files',", "User is allowed to edit ext_msg (Edit for \"ext_msg\" only)", "\"leave_status\" only) User is allowed to access location (View for", "ext_tracker_type (View for \"ext_tracker_type\" only) Role \"msgsync\": (Search for \"msg\":", "(Edit for \"daily_record_freeze\": ('frozen',) only) User is allowed to edit", "'substitute', 'supervisor', 'timezone', 'title', 'tt_lines', 'username') only) User is allowed", "Role \"supportadmin\": User is allowed to access analysis_result (View for", "Role \"project_view\": User is allowed to access time_project (View for", "users are allowed to access issue if they are on", "\"vac_aliq\" only) User is allowed to access vacation_report (View for", "to access customer_agreement (View for \"customer_agreement\" only) User is allowed", "he is the supervisor or the person to whom approvals", "is allowed to access auto_wp (View for \"auto_wp\" only) User", "(View for \"support\" only) User is allowed to access absence", "'job_description', 'lastname', 'lunch_duration', 'lunch_start', 'nickname', 'pictures', 'position_text', 'reduced_activity_list', 'roles', 'room',", "is allowed to search issue (Search for \"issue\" only) User", "\"ext_tracker\" only) User is allowed to access ext_tracker_state (View for", "allowed to access doc_category (View for \"doc_category\" only) User is", "allowed View on file if file is linked from an", "User is allowed Edit on file if file is linked", "\"room\" only) User is allowed to edit sap_cc (Edit for", "ext_tracker_type (View for \"ext_tracker_type\" only) User is allowed to create", "Role \"sec-incident-nosy\": User is allowed to access it_int_prio (View for", "'responsible', 'time_end', 'time_start', 'time_wp_summary_no', 'travel', 'wp_no') only) User is allowed", "it_project (Create for \"it_project\" only) User is allowed to create", "is allowed to edit work_location (Edit for \"work_location\" only) Role", "for \"leave_submission\" only) User is allowed to create vacation_correction (Create", "the contact (Edit for \"user_contact\": ('visible',) only) User is allowed", "for \"user\": ('activity', 'actor', 'creation', 'creator', 'firstname', 'lastname', 'realname', 'username')", "('group_lead', 'team_lead') only) User is allowed Edit on (Edit for", "for \"sup_warranty\" only) User is allowed to access test_level (View", "only) User is allowed to create keyword (Create for \"keyword\"", "'position_text', 'room'] only) Role \"external\": (Search for \"ext_tracker_state\": ('id', 'issue')", "may also be seen (View for \"time_record\" only) User is", "for \"sap_cc\" only) User is allowed to create time_activity (Create", "is allowed to create queries (Create for \"query\" only) User", "details (Edit for \"user\": ('csv_delimiter', 'hide_message_files', 'password', 'timezone') only) Users", "only) User may access the web interface (Web Access) User", "frozen at the given date (Edit for \"daily_record_freeze\": ('frozen',) only)", "data if he/she is in group HR-Org-Location and in the", "'tt_lines', 'vie_user'] only) Role \"dom-user-edit-office\": User is allowed to create", "is allowed to edit time_activity (Edit for \"time_activity\" only) User", "(Edit for \"ext_tracker_state\" only) User is allowed to edit if", "\"doc\" only) User is allowed to create doc_category (Create for", "only) (Retire for \"cost_center_permission_group\" only) User is allowed to create", "in the domain_permission for the user (View for \"user\": ['clearance_by',", "'epic_key', 'has_expiration_date', 'is_extern', 'is_public', 'id', 'name', 'project', 'responsible', 'time_end', 'time_start',", "for \"reference\" only) Role \"dom-user-edit-facility\": Users may view/edit user records", "'status', 'time_wp', 'user') only) User may edit own leave submissions", "\"functional_role\" only) User is allowed to access it_category (View for", "User is allowed to create doc (Create for \"doc\" only)", "the web interface (Web Access) Role \"cc-permission\": (Restore for \"cost_center_permission_group\"", "is allowed to access vacation_correction (View for \"vacation_correction\" only) User", "(Create for \"sup_classification\" only) User is allowed to create support", "\"reference\" only) Role \"dom-user-edit-facility\": Users may view/edit user records for", "User is allowed to access user_status (View for \"user_status\" only)", "edit time_records owned by user (View for \"time_record\" only) Users", "access auto_wp (View for \"auto_wp\" only) User is allowed to", "is allowed to create area (Create for \"area\" only) User", "'responsible', 'status') only) User is allowed Edit on (Edit for", "allowed to create user (Create for \"user\" only) User is", "User is allowed to create time_wp (Create for \"time_wp\" only)", "allowed to edit ext_tracker_state (Edit for \"ext_tracker_state\" only) User is", "is allowed to create daily_record (Create for \"daily_record\" only) User", "(Edit for \"organisation\" only) User is allowed to edit product_family", "for \"query\" only) User is allowed to edit time category", "only) User is allowed to access sup_type (View for \"sup_type\"", "allowed to access return_type (View for \"return_type\" only) User is", "(Create for \"return_type\" only) User is allowed to create sup_classification", "domain (Edit for \"user_dynamic\" only) May only view/edit records with", "msg is linked from an item with View permission (View", "item with View permission (View for \"msg\" only) User is", "is allowed Edit on (Edit for \"file\": ('name', 'type') only)", "'sex', 'status', 'subst_active', 'substitute', 'supervisor', 'timezone', 'tt_lines', 'vie_user'] only) Users", "('address', 'alternate_addresses', 'nickname', 'password', 'timezone', 'username') only) User is allowed", "User is allowed to view dynamic user data if he/she", "Role \"external\": (Search for \"ext_tracker_state\": ('id', 'issue') only) (Search for", "for \"location\" only) User is allowed to create org_location (Create", "access ext_tracker_state (View for \"ext_tracker_state\" only) User is allowed to", "is allowed to access prodcat (View for \"prodcat\" only) User", "\"org_group\" only) User is allowed to access org_location (View for", "'number_effected', 'numeric_effort', 'prio', 'prodcat', 'product', 'related_issues', 'related_support', 'release', 'responsible', 'return_type',", "is allowed to edit workpackage if he is time category", "is allowed to see all details on work package or", "department (Edit for \"department\" only) User is allowed to edit", "\"it_project\" only) User is allowed View on msg if msg", "\"cost_center_group\" only) User is allowed to create cost_center_status (Create for", "only) User is allowed to edit sup_classification (Edit for \"sup_classification\"", "allowed to access msg_keyword (View for \"msg_keyword\" only) User is", "permission (View for \"query\" only) Users may see daily record", "for \"doc_issue_status\" only) User is allowed to access doc_status (View", "allowed for this user (also applies to timetracking by, supervisor", "access mailgroup (View for \"mailgroup\" only) User is allowed to", "are attached to that daily_record) if the user owns the", "allowed to access it_project (View for \"it_project\" only) Role \"staff-report\":", "for \"functional_role\" only) User is allowed to access it_category (View", "allowed to create department (Create for \"department\" only) User is", "item with Edit permission (Edit for \"msg\" only) User is", "(View for \"daily_record_freeze\" only) User is allowed to view overtime", "User is allowed View on (View for \"user\": ('business_responsible', 'planning_role',", "\"support\" only) User is allowed to access absence (View for", "is allowed Edit on (Edit for \"daily_record\": ('required_overtime', 'weekend_allowed') only)", "get the Role \"User\" Role \"admin\": User may access the", "\"sex\" only) User is allowed to access status (View for", "User is allowed to edit org_location (Edit for \"org_location\" only)", "('name', 'order') only) (Restore for \"room\" only) (Retire for \"room\"", "on msg if msg is linked from an item with", "'position_text', 'room', 'sex', 'status', 'subst_active', 'substitute', 'supervisor', 'sync_foreign_key', 'timezone', 'tt_lines',", "he is time category owner or deputy (Edit for \"time_wp\":", "to create issue (Create for \"issue\" only) User is allowed", "(Create for \"safety_level\" only) User is allowed to create severity", "\"time_project\": ('approval_hr', 'approval_required', 'is_extern', 'is_public_holiday', 'is_special_leave', 'is_vacation', 'no_overtime', 'no_overtime_day', 'only_hours',", "allowed View on (View for \"category\": ('id', 'name') only) User", "only) User is allowed to create issue (Create for \"issue\"", "(Edit for \"public_holiday\" only) User is allowed to edit query", "only) User is allowed to access keyword (View for \"keyword\"", "'wp_no') only) (View for \"time_project\": ('activity', 'actor', 'creation', 'creator', 'deputy',", "for \"organisation\" only) User is allowed to create overtime_correction (Create", "for \"sup_classification\" only) User is allowed to edit support (Edit", "cost_center_group (View for \"cost_center_group\" only) User is allowed to access", "'sex', 'status', 'subst_active', 'substitute', 'supervisor', 'timezone', 'title', 'tt_lines', 'username') only)", "Edit on (Edit for \"user\": ('ad_domain', 'nickname', 'password', 'pictures', 'roles',", "is allowed to access it_request_type (View for \"it_request_type\" only) User", "to access vac_aliq (View for \"vac_aliq\" only) User is allowed", "\"hr\": (Edit for \"overtime_period\": ('name', 'order') only) (Restore for \"room\"", "is allowed to edit overtime correction if the overtime correction", "Users are allowed to view some of their details (View", "sap_cc (Edit for \"sap_cc\" only) User is allowed to edit", "for \"contract_type\" only) User is allowed to create user_contact (Create", "of the daily record. If user has role HR-Org-Location and", "interface (Rest Access) User may access the web interface (Web", "'reporting_group', 'work_location') only) User is allowed to access time_project (View", "('status',) only) User is allowed to see time record if", "(Edit for \"contact\" only) User is allowed to edit customer", "create reference (Create for \"reference\" only) User is allowed to", "is allowed to create cost_center_group (Create for \"cost_center_group\" only) User", "is allowed to access summary_report (View for \"summary_report\" only) User", "delegated (View for \"leave_submission\" only) User is allowed to view", "'queries', 'realname', 'status', 'timezone', 'username') only) User is allowed View", "(Edit for \"room\" only) User is allowed to edit sap_cc", "to access analysis_result (View for \"analysis_result\" only) User is allowed", "only) User is allowed to search for their own files", "User is allowed to access user (View for \"user\" only)", "allowed to edit time_project_status (Edit for \"time_project_status\" only) User is", "\"user_dynamic\" only) User is allowed to view freeze information if", "a transitive permission via containers (Edit for \"issue\": ['activity', 'actor',", "for \"auto_wp\" only) User is allowed to create daily_record_freeze (Create", "it_project (View for \"it_project\" only) Role \"sec-incident-responsible\": User is allowed", "allowed Edit on support if support is non-confidential or user", "\"user_contact\": ('visible',) only) User is allowed to edit several fields", "domain_permission (Edit for \"domain_permission\" only) User is allowed to edit", "to edit auto_wp (Edit for \"auto_wp\" only) User is allowed", "is allowed to edit return_type (Edit for \"return_type\" only) User", "for \"room\" only) User is allowed to edit room (Edit", "only) User is allowed to access location (View for \"location\"", "'nickname', 'pictures', 'position_text', 'room', 'sex', 'status', 'subst_active', 'substitute', 'supervisor', 'sync_foreign_key',", "\"absence_type\" only) User is allowed to access analysis_result (View for", "marked visible (View for \"user_contact\" only) User is allowed to", "for \"time_wp\": ('name', 'project') only) User is allowed to view/edit", "('name', 'project') only) User is allowed to view/edit workpackage if", "to search time_record (Search for \"time_record\" only) User is allowed", "create status_transition (Create for \"status_transition\" only) User is allowed to", "for \"room\" only) User is allowed to access safety_level (View", "allowed View on issue if issue is non-confidential or user", "it_request_type (Create for \"it_request_type\" only) User is allowed to create", "see daily record if they may see one of the", "'is_extern', 'is_public_holiday', 'is_special_leave', 'is_vacation', 'name', 'nosy', 'only_hours', 'op_project', 'overtime_reduction', 'responsible',", "is allowed to create mailgroup (Create for \"mailgroup\" only) User", "contact if he's the owner of the contact or the", "('status',) only) User is allowed to view leave submission if", "'realname', 'room', 'subst_active', 'substitute', 'timezone', 'tt_lines') only) User is allowed", "\"analysis_result\" only) User is allowed to access contact (View for", "for \"file\" only) User is allowed to search for their", "(Edit for \"query\" only) User is allowed to edit reporting_group", "Role \"facility\": (Restore for \"room\" only) (Retire for \"room\" only)", "product (View for \"product\" only) User is allowed to access", "edit customer_agreement (Edit for \"customer_agreement\" only) User is allowed to", "category (View for \"time_wp\" only) User or Timetracking by user", "\"ext_msg\" only) User is allowed to edit ext_tracker_state (Edit for", "(Create for \"it_project\" only) User is allowed to create it_request_type", "(Edit for \"issue\" only) User is allowed to edit keyword", "only) User is allowed to access customer (View for \"customer\"", "'creation', 'creator', 'id', 'queries', 'realname', 'status', 'timezone', 'username') only) User", "only) User may access the rest interface (Rest Access) User", "'lot', 'messages', 'nosy', 'number_effected', 'numeric_effort', 'prio', 'prodcat', 'product', 'related_issues', 'related_support',", "is allowed to access reference (View for \"reference\" only) User", "only) User is allowed to access analysis_result (View for \"analysis_result\"", "for \"test_level\" only) Role \"it\": Create (Create for \"user_contact\" only)", "for \"product_family\" only) User is allowed to create public_holiday (Create", "Edit permission (Edit for \"msg\" only) User is allowed to", "User is allowed to access summary_type (View for \"summary_type\" only)", "edit status (Edit for \"status\" only) User is allowed to", "(Create for \"room\" only) User is allowed to create uc_type", "(Edit for \"category\" only) User is allowed to edit doc_issue_status", "is allowed to access location (View for \"location\" only) User", "doc_issue_status (Create for \"doc_issue_status\" only) User is allowed to create", "to view time record data if he/she is in group", "only) User is allowed to access ext_tracker_type (View for \"ext_tracker_type\"", "to access work_location (View for \"work_location\" only) User is allowed", "for \"leave_submission\" only) User is allowed to create msg (Create", "(View for \"overtime_correction\" only) User is allowed to access time_record", "allowed to edit mailgroup (Edit for \"mailgroup\" only) User is", "'status', 'work_location', 'wps') only) (Search for \"time_wp\": ('activity', 'actor', 'auto_wp',", "on nosy list (View for \"issue\" only) User is allowed", "to edit time category if the status is \"Open\" and", "edit org_location (Edit for \"org_location\" only) User is allowed to", "on (Edit for \"sap_cc\": ('group_lead', 'team_lead') only) User is allowed", "it_issue (Nosy for \"it_issue\" only) User may get nosy messages", "only) User is allowed to access safety_level (View for \"safety_level\"", "(Create for \"room\" only) User is allowed to create sap_cc", "Role \"staff-report\": Role \"sub-login\": Role \"summary_view\": Role \"supportadmin\": User is", "to create kind (Create for \"kind\" only) User is allowed", "is a transitive permission via containers (View for \"issue\": ['activity',", "'lunch_start', 'password', 'queries', 'realname', 'room', 'subst_active', 'substitute', 'timezone', 'tt_lines') only)", "for \"file\" only) User is allowed Edit on issue if", "Roles through the web (Web Roles) Role \"itview\": User is", "only) May only view/edit records with the correct domain (Edit", "contract_type (View for \"contract_type\" only) User is allowed to create", "is allowed to access contact (View for \"contact\" only) User", "time_wp (View for \"time_wp\" only) User is allowed to access", "User is allowed Edit on (Edit for \"daily_record\": ('status', 'time_record')", "the user (View for \"user\": ['contacts', 'csv_delimiter', 'department_temp', 'entry_date', 'firstname',", "allowed to edit ext_tracker_state (Edit for \"ext_tracker_state\" only) Role \"nosy\":", "(View for \"user_contact\" only) User is allowed to view leave", "for \"kind\" only) User is allowed to create msg_keyword (Create", "User is allowed to create test_level (Create for \"test_level\" only)", "is allowed to edit several fields if he is Responsible", "is non-confidential or user is on nosy list (View for", "for \"time_project\" only) User is allowed to access time_report (View", "to edit absence (Edit for \"absence\" only) User is allowed", "create contact (Create for \"contact\" only) User is allowed to", "queries (Edit for \"query\" only) User is allowed to edit", "only) User or Timetracking by user may edit time_records owned", "to access sap_cc (View for \"sap_cc\" only) User is allowed", "for \"absence\" only) User is allowed to access absence_type (View", "User is allowed Edit on (Edit for \"time_project\": ('group_lead', 'team_lead')", "only) User is allowed Edit on (Edit for \"user\": ('ad_domain',", "User is allowed to create user (Create for \"user\" only)", "User is allowed to create msg (Create for \"msg\" only)", "edit analysis_result (Edit for \"analysis_result\" only) User is allowed to", "'emails', 'execution', 'external_ref', 'files', 'goods_received', 'goods_sent', 'lot', 'messages', 'nosy', 'number_effected',", "create it_project (Create for \"it_project\" only) User is allowed to", "('activity', 'actor', 'cost_center', 'creation', 'creator', 'description', 'durations_allowed', 'epic_key', 'has_expiration_date', 'id',", "for \"file\" only) User is allowed to view their own", "daily_record or has role 'HR' or 'Controlling', or the user", "\"time_project_status\" only) User is allowed to access time_wp_group (View for", "\"ext_tracker_type\" only) User is allowed to create ext_msg (Create for", "is allowed to create room (Create for \"room\" only) User", "edit uc_type (Edit for \"uc_type\" only) User may manipulate user", "access it_request_type (View for \"it_request_type\" only) User is allowed to", "\"organisation\" only) User is allowed to create overtime_correction (Create for", "only) User is allowed Edit on (Edit for \"daily_record\": ('required_overtime',", "'nosy', 'organisation', 'responsible', 'status') only) User is allowed Edit on", "User is allowed Edit on (Edit for \"department\": ('doc_num',) only)", "\"overtime_correction\" only) User is allowed to access query (View for", "to create artefact (Create for \"artefact\" only) User is allowed", "for \"time_wp\": ('cost_center', 'is_public', 'name', 'responsible', 'time_wp_summary_no', 'wp_no') only) User", "Role \"dom-user-edit-gtt\": (Search for \"user_dynamic\" only) May only view/edit records", "to create mailgroup (Create for \"mailgroup\" only) User is allowed", "'files', 'nosy') only) User is allowed Edit on support if", "'room', 'subst_active', 'substitute', 'timezone', 'tt_lines') only) User is allowed to", "is allowed to edit doc_category (Edit for \"doc_category\" only) User", "is allowed to create time_activity_perm (Create for \"time_activity_perm\" only) User", "User is allowed View on issue if issue is non-confidential", "may edit time_records owned by user (Retire for \"time_record\" only)", "\"daily_record\": ('status', 'time_record') only) User is allowed to access daily", "on (Edit for \"daily_record\": ('status', 'time_record') only) User is allowed", "is allowed to access daily_record (View for \"daily_record\" only) User", "Edit on (Edit for \"user\": ('address', 'alternate_addresses', 'nickname', 'password', 'timezone',", "User is allowed to access time_activity (View for \"time_activity\" only)", "edit (some of) their own user details (Edit for \"user\":", "status_transition (Edit for \"status_transition\" only) User is allowed to edit", "issue if they are on the list of allowed external", "is allowed to edit mailgroup (Edit for \"mailgroup\" only) User", "only) User may see time report if reponsible or deputy", "dynamic user record (Edit for \"user_dynamic\" only) User is allowed", "own user details (View for \"user\": ('entry_date', 'planning_role') only) User", "for \"area\" only) User is allowed to edit category (Edit", "(View for \"query\" only) Users may see daily record if", "on (Edit for \"department\": ('doc_num',) only) User is allowed to", "daily record if he is owner or supervisor or timetracking-by", "may view everything (View) Role \"anonymous\": User may access the", "'project', 'responsible', 'time_end', 'time_start', 'time_wp_summary_no', 'travel', 'wp_no') only) User is", "is non-confidential or user is on nosy list (Edit for", "User is allowed to create it_request_type (Create for \"it_request_type\" only)", "for \"time_activity\" only) User is allowed to access time_activity_perm (View", "edit time_records owned by user (Retire for \"time_record\" only) User", "\"organisation\" only) User is allowed to edit location (Edit for", "allowed Edit on (Edit for \"file\": ('name', 'type') only) User", "for \"ext_tracker_state\" only) User is allowed to access ext_tracker_type (View", "only) (Search for \"time_activity_perm\" only) (Search for \"time_record\" only) (Search", "\"time_wp_group\" only) User is allowed to access time_wp_summary_no (View for", "\"issue\" only) User is allowed to create it_issue (Create for", "is allowed to edit time_wp (Edit for \"time_wp\" only) User", "cost_center_permission_group (Edit for \"cost_center_permission_group\" only) Role \"contact\": User is allowed", "'tt_lines', 'username', 'vie_user'] only) Role \"dom-user-edit-hr\": (Search for \"user_dynamic\" only)", "is allowed to access product (View for \"product\" only) User", "(Create for \"contract_type\" only) User is allowed to create leave_submission", "for \"contract_type\" only) User is allowed to edit leave_submission (Edit", "edit auto_wp (Edit for \"auto_wp\" only) User is allowed to", "(Edit for \"it_issue\": ('responsible',) only) User is allowed to edit", "is allowed to edit user_functional_role (Edit for \"user_functional_role\" only) Role", "Edit on (Edit for \"daily_record\": ('required_overtime', 'weekend_allowed') only) User is", "(View for \"it_prio\" only) User is allowed to access it_project_status", "only) User is allowed to access prodcat (View for \"prodcat\"", "to access contact_type (View for \"contact_type\" only) User is allowed", "'nickname', 'username') only) External users are allowed to access issue", "contact (Create for \"contact\" only) User is allowed to edit", "access contact (View for \"contact\" only) User is allowed to", "to create safety_level (Create for \"safety_level\" only) User is allowed", "is allowed to access user_contact (View for \"user_contact\" only) User", "'user') only) User may edit own leave submissions (View for", "view contact if he's the owner of the contact or", "only) User is allowed View on issue if issue is", "User is allowed to edit absence (Edit for \"absence\" only)", "\"auto_wp\" only) User is allowed to create daily_record_freeze (Create for", "User is allowed to edit it_project (Edit for \"it_project\" only)", "\"user\": ('roles',) only) User is allowed View on (View for", "'sync_foreign_key', 'timezone', 'tt_lines', 'username', 'vie_user'] only) Users may view/edit user", "time_report (Create for \"time_report\" only) User is allowed to edit", "view their own and public queries for classes where they", "\"query\" only) User is allowed to edit reporting_group (Edit for", "\"test_level\" only) User is allowed to access time_activity (View for", "(Create for \"time_record\" only) User is allowed to create work_location", "allowed to access category (View for \"category\" only) User is", "'room'] only) Users may view/edit user records for ad_domain for", "User is allowed to create ext_msg (Create for \"ext_msg\" only)", "list (Edit for \"support\": ('analysis_end', 'analysis_result', 'analysis_start', 'bcc', 'business_unit', 'category',", "(Edit for \"user_dynamic\" only) May only view/edit records with the", "HR-Org-Location and in the same Org-Location as the given user", "User is allowed to view their own messages (View for", "(Create for \"category\" only) User is allowed to create doc_issue_status", "for \"daily_record\" only) User is allowed to access daily_record_freeze (View", "\"ext_tracker_state\" only) Role \"nosy\": User may get nosy messages for", "only) User is allowed to edit time_activity (Edit for \"time_activity\"", "\"time_wp\": ('project',) only) User is allowed View on (View for", "(View for \"customer\" only) User is allowed to access customer_agreement", "to edit some of their details (Edit for \"user\": ('csv_delimiter',", "'keywords', 'kind', 'maturity_index', 'messages', 'needs', 'nosy', 'numeric_effort', 'part_of', 'planned_begin', 'planned_end',", "is allowed to create absence (Create for \"absence\" only) User", "for \"leave_submission\" only) User is allowed to search support (Search", "\"daily_record_freeze\" only) User is allowed to access leave_submission (View for", "(Create for \"severity\" only) User is allowed to create status", "only) User is allowed to view contact if he's the", "only) User is allowed to access domain_permission (View for \"domain_permission\"", "User is allowed to access daily record if he is", "for \"user_functional_role\" only) Role \"hr\": (Edit for \"overtime_period\": ('name', 'order')", "view/edit user records for ad_domain for which they are in", "for \"it_project\" only) User is allowed to edit it_request_type (Edit", "is allowed to create it_issue (Create for \"it_issue\" only) User", "\"user\": ('business_responsible', 'department_temp', 'timetracking_by', 'vie_user', 'vie_user_bl_override', 'vie_user_ml') only) User is", "for \"it_int_prio\" only) User is allowed to create it_issue (Create", "access sap_cc (View for \"sap_cc\" only) User is allowed to", "product_type (View for \"product_type\" only) User is allowed to access", "for \"msg\" only) User is allowed to create queries (Create", "for \"organisation\" only) User is allowed to access overtime_period (View", "(Edit for \"user\": ['room'] only) Users may view/edit user records", "allowed to access reference (View for \"reference\" only) User is", "the list of allowed external users or there is a", "allowed to access vac_aliq (View for \"vac_aliq\" only) User is", "for \"time_report\" only) User may use the email interface (Email", "access time_project (View for \"time_project\" only) User is allowed to", "the web interface (Web Access) User may access the xmlrpc", "has role HR-Org-Location and is in the same Org-Location as", "'last_day', 'status', 'time_wp', 'user') only) User may edit own leave", "only) Role \"it\": Create (Create for \"user_contact\" only) User is", "to access vacation_report (View for \"vacation_report\" only) User is allowed", "only) User is allowed to create doc (Create for \"doc\"", "only) User is allowed to create sup_classification (Create for \"sup_classification\"", "(View for \"time_record\" only) User is allowed to access user_contact", "(Create for \"contact\" only) User is allowed to create customer", "only) User is allowed to edit ext_tracker (Edit for \"ext_tracker\"", "user_status (Search for \"user\": ('status',) only) User is allowed to", "only) User is allowed to access cost_center_status (View for \"cost_center_status\"", "user is on nosy list (View for \"issue\" only) User", "create support (Create for \"support\" only) User is allowed to", "\"severity\" only) User is allowed to edit status (Edit for", "for \"domain_permission\" only) User is allowed to create it_category (Create", "(View for \"msg_keyword\" only) User is allowed to access safety_level", "user (View for \"time_record\" only) Users are allowed to view", "for \"sup_prio\" only) User is allowed to access sup_status (View", "allowed to access user_contact (View for \"user_contact\" only) User is", "\"user_functional_role\" only) (Retire for \"user_functional_role\" only) User is allowed Edit", "for \"ext_msg\" only) User is allowed to access ext_tracker_state (View", "'creator', 'description', 'durations_allowed', 'epic_key', 'has_expiration_date', 'id', 'is_extern', 'is_public', 'name', 'project',", "edit ext_tracker (Edit for \"ext_tracker\" only) User is allowed to", "only) User is allowed to create msg (Create for \"msg\"", "(and time_records that are attached to that daily_record) if the", "it_issue (Edit for \"it_issue\": ('deadline', 'status', 'title') only) User is", "(Search for \"ext_tracker_state\": ('id', 'issue') only) (Search for \"user\": ('id',", "for \"status_transition\" only) User is allowed to access summary_report (View", "\"time_activity\" only) User is allowed to edit time_activity_perm (Edit for", "'bookers', 'cost_center', 'creation', 'creator', 'description', 'durations_allowed', 'epic_key', 'has_expiration_date', 'id', 'is_extern',", "only) Role \"time-report\": User is allowed to access time_report (View", "(Create for \"location\" only) User is allowed to create org_location", "allowed to edit severity (Edit for \"severity\" only) User is", "only) User is allowed to create cost_center_status (Create for \"cost_center_status\"", "User is allowed to access work_location (View for \"work_location\" only)", "(Edit for \"it_request_type\" only) User is allowed to edit mailgroup", "(Create for \"doc_issue_status\" only) User is allowed to create ext_tracker", "User is allowed to create category (Create for \"category\" only)", "is allowed Edit on it_issue if it_issue is non-confidential or", "for \"category\" only) User is allowed to access contact (View", "create analysis_result (Create for \"analysis_result\" only) User is allowed to", "access vacation_report (View for \"vacation_report\" only) User is allowed to", "is allowed to access room (View for \"room\" only) User", "\"msg\" only) User is allowed to access area (View for", "create product_family (Create for \"product_family\" only) User is allowed to", "is allowed Edit on issue if issue is non-confidential or", "is allowed to access msg_keyword (View for \"msg_keyword\" only) User", "User is allowed to view leave submission if he is", "'release', 'responsible', 'safety_level', 'severity', 'status', 'superseder', 'test_level', 'title'] only) User", "for \"sup_classification\" only) User is allowed to access sup_execution (View", "access issue if they are on the list of allowed", "to view (some of) their own user details (View for", "\"file\" only) User is allowed to search for their own", "allowed to create query (Create for \"query\" only) User is", "allowed to search for their own files (Search for \"file\"", "group HR-Org-Location and in the same Org-Location as the given", "to edit leave_submission (Edit for \"leave_submission\" only) User is allowed", "for \"contract_type\" only) User is allowed to access daily_record (View", "allowed to create uc_type (Create for \"uc_type\" only) User is", "overtime_period (View for \"overtime_period\" only) User is allowed to access", "to access public_holiday (View for \"public_holiday\" only) User is allowed", "to see time record if he is allowed to see", "(Edit for \"sap_cc\": ('group_lead', 'purchasing_agents', 'team_lead') only) User is allowed", "Edit on (Edit for \"file\": ('name', 'type') only) User is", "(Edit for \"vacation_correction\" only) Role \"issue_admin\": User is allowed Edit", "is allowed Edit on (Edit for \"sap_cc\": ('group_lead', 'team_lead') only)", "nosy list (Edit for \"issue\" only) User is allowed Edit", "is allowed to access vacation_report (View for \"vacation_report\" only) User", "\"analysis_result\" only) User is allowed to access area (View for", "only) User is allowed to create status (Create for \"status\"", "on (Edit for \"user\": ('address', 'alternate_addresses', 'nickname', 'password', 'timezone', 'username')", "edit cost_center_status (Edit for \"cost_center_status\" only) User is allowed to", "\"vacation_correction\" only) Role \"hr-org-location\": (Search for \"daily_record_freeze\" only) (Search for", "for \"ext_tracker\" only) User is allowed to edit issue (Edit", "allowed to access kind (View for \"kind\" only) User is", "functional role (View for \"user_functional_role\" only) User may view time", "'title') only) User is allowed to edit their queries (Edit", "only) User is allowed to edit artefact (Edit for \"artefact\"", "allowed to access issue (View for \"issue\" only) User is", "to access org_location (View for \"org_location\" only) User is allowed", "for \"time_record\" only) User is allowed to create time_wp (Create", "for \"cost_center_group\" only) User is allowed to create cost_center_status (Create", "Access) User may view a daily_record (and time_records that are", "package and time category names if he/she has role HR", "User may edit everything (Edit) User may manipulate user Roles", "(View for \"area\" only) User is allowed to access doc_issue_status", "('activity', 'actor', 'creation', 'creator', 'firstname', 'lastname', 'realname', 'username') only) Users", "User is allowed to edit product_type (Edit for \"product_type\" only)", "edit workpackage if he is time category owner or deputy", "for \"cost_center_permission_group\" only) Role \"contact\": User is allowed to create", "for \"support\" only) User is allowed to edit analysis_result (Edit", "allowed to edit doc_category (Edit for \"doc_category\" only) User is", "on nosy list of time category or if user is", "to search leave_submission (Search for \"leave_submission\" only) User is allowed", "for doc (Nosy for \"doc\" only) User may get nosy", "\"leave_submission\": ('status',) only) User is allowed to view leave submission", "\"user_dynamic\" only) Users may view/edit user records for ad_domain for", "('contacts',) only) User is allowed to access auto_wp (View for", "'subject', 'summary') only) User is allowed to access ext_msg (View", "or user is on nosy list (Edit for \"support\": ('analysis_end',", "for \"user\": ('contacts',) only) User is allowed to access user_contact", "\"time-report\": User is allowed to access time_report (View for \"time_report\"", "support (Create for \"support\" only) User is allowed to create", "allowed to create user_functional_role (Create for \"user_functional_role\" only) User is", "from an item with View permission (View for \"file\" only)", "view their own files (View for \"file\" only) User may", "allowed View on support if support is non-confidential or user", "(Create for \"absence_type\" only) User is allowed to create room", "'sap_ref', 'send_to_customer', 'serial_number', 'set_first_reply', 'status', 'superseder', 'title', 'type', 'warranty') only)", "record, it may also be seen (View for \"time_record\" only)", "'overtime_reduction') only) User is allowed View on (View for \"user\":", "(View for \"time_wp_group\" only) User is allowed to access time_wp_summary_no", "role HR or HR-Org-Location (View for \"time_project\": ('name',) only) User", "edit ext_tracker_state (Edit for \"ext_tracker_state\" only) User is allowed to", "view work package if responsible for it, if user is", "allowed to access time_project (View for \"time_project\" only) User is", "is responsible for the time category (Edit for \"time_project\": ('deputy',", "same Org-Location as the given user (View for \"user_dynamic\" only)", "only) User is allowed to edit reference (Edit for \"reference\"", "only) Role \"sec-incident-responsible\": User is allowed to access it_int_prio (View", "'team_lead') only) User is allowed Edit on (Edit for \"time_project\":", "only) User is allowed to create status_transition (Create for \"status_transition\"", "Timetracking by user may edit time_records owned by user (View", "only) Role \"msgsync\": (Search for \"msg\": ('date', 'id') only) User", "(Edit for \"query\" only) User is allowed to retire their", "'password', 'timezone') only) Users are allowed to view some of", "(View for \"it_project\" only) Role \"msgedit\": (Search for \"msg\": ('date',", "(Nosy for \"doc\" only) User may get nosy messages for", "(Create for \"doc\" only) User is allowed to create doc_category", "Org-Location as the given user (View for \"time_record\" only) Role", "(Create for \"query\" only) User is allowed to create reporting_group", "for \"time_project\": ('group_lead', 'team_lead') only) User is allowed Edit on", "is allowed to edit organisation (Edit for \"organisation\" only) Role", "organisation (Create for \"organisation\" only) User is allowed to create", "\"query\" only) User is allowed to search issue (Search for", "status_transition (View for \"status_transition\" only) User is allowed to access", "(Retire for \"time_record\" only) User or Timetracking by user may", "edit it_request_type (Edit for \"it_request_type\" only) User is allowed to", "'time_wp', 'user') only) User may see time report if reponsible", "only) Role \"msgedit\": (Search for \"msg\": ('date', 'id') only) User", "'nosy') only) User is allowed Edit on it_project if it_project", "only) User is allowed to access cost_center (View for \"cost_center\"", "'files', 'files_affected', 'fixed_in', 'id', 'keywords', 'kind', 'maturity_index', 'messages', 'needs', 'nosy',", "Role \"admin\": User may access the rest interface (Rest Access)", "\"ext_tracker_state\" only) User is allowed to edit if he's the", "User is allowed View on (View for \"user\": ('roles',) only)", "is allowed to access mailgroup (View for \"mailgroup\" only) User", "given date (Edit for \"daily_record_freeze\": ('frozen',) only) User is allowed", "only) User is allowed to access room (View for \"room\"", "\"daily_record\" only) User is allowed to access daily_record_status (View for", "to access summary_report (View for \"summary_report\" only) User is allowed", "User is allowed to access sup_type (View for \"sup_type\" only)", "to access doc_status (View for \"doc_status\" only) User is allowed", "user_contact (View for \"user_contact\" only) User is allowed to create", "allowed to edit artefact (Edit for \"artefact\" only) User is", "user_functional_role (Edit for \"user_functional_role\" only) Role \"hr\": (Edit for \"overtime_period\":", "for \"user\" only) User is allowed to create user_contact (Create", "nosy list (Edit for \"it_issue\": ('messages', 'files', 'nosy') only) User", "time_report (Edit for \"time_report\" only) User may edit own file", "'issue') only) (Search for \"user\": ('id', 'nickname', 'username') only) External", "the domain_permission for the user (Edit for \"user\": ['clearance_by', 'contacts',", "(Create for \"auto_wp\" only) User is allowed to create daily_record_freeze", "'is_public_holiday', 'is_special_leave', 'is_vacation', 'no_overtime', 'no_overtime_day', 'only_hours', 'overtime_reduction') only) User is", "\"daily_record\" only) User is allowed to create doc (Create for", "allowed to access area (View for \"area\" only) User is", "'name', 'nosy', 'only_hours', 'op_project', 'overtime_reduction', 'responsible', 'status', 'work_location', 'wps') only)", "User is allowed to search it_project (Search for \"it_project\" only)", "view everything (View) Role \"anonymous\": User may access the web", "(View for \"time_project\": ('name',) only) User is allowed to view", "files (View for \"file\" only) User may access the web", "is allowed to edit their queries (Edit for \"query\" only)", "severity (Create for \"severity\" only) User is allowed to create", "the overtime correction is not frozen (Edit for \"overtime_correction\" only)", "only) Role \"staff-report\": Role \"sub-login\": Role \"summary_view\": Role \"supportadmin\": User", "is allowed to access sup_execution (View for \"sup_execution\" only) User", "for \"sup_status\" only) User is allowed to access sup_type (View", "\"overtime_period\" only) User is allowed to create product_family (Create for", "'id', 'name', 'project', 'responsible', 'time_end', 'time_start', 'time_wp_summary_no', 'travel', 'wp_no') only)", "list (Edit for \"it_issue\": ('messages', 'files', 'nosy') only) User is", "for \"user\": ['contacts', 'position_text', 'room'] only) Users may view/edit user", "for \"contact\" only) User is allowed to edit customer (Edit", "for \"user_functional_role\" only) User may view time category if user", "to access time_activity (View for \"time_activity\" only) User is allowed", "is allowed to access functional_role (View for \"functional_role\" only) User", "for \"room\" only) User is allowed to create sap_cc (Create", "is allowed Edit on (Edit for \"user\": ('ad_domain', 'nickname', 'password',", "'nosy', 'only_hours', 'op_project', 'overtime_reduction', 'responsible', 'status', 'work_location', 'wps') only) Search", "to edit time_activity_perm (Edit for \"time_activity_perm\" only) User is allowed", "\"domain_permission\" only) User is allowed to edit it_category (Edit for", "for \"mailgroup\" only) User is allowed to access msg_keyword (View", "to search daily_record (Search for \"daily_record\" only) User is allowed", "is allowed to access analysis_result (View for \"analysis_result\" only) User", "('required_overtime', 'weekend_allowed') only) User is allowed Edit on (Edit for", "to edit test_level (Edit for \"test_level\" only) Role \"it\": Create", "(View for \"return_type\" only) User is allowed to access sup_classification", "is allowed View on (View for \"user\": ('activity', 'actor', 'address',", "or substitute supervisor of the owner of the daily record", "\"user_contact\" only) User is allowed to view leave submission if", "only) User is allowed to edit it_category (Edit for \"it_category\"", "user is on nosy list (Edit for \"issue\" only) User", "User is allowed to access domain_permission (View for \"domain_permission\" only)", "user (View for \"daily_record\" only) User is allowed to access", "only) User is allowed to edit time_report (Edit for \"time_report\"", "access timesheet (View for \"timesheet\" only) User is allowed to", "\"doc_category\" only) User is allowed to access doc_issue_status (View for", "category or on nosy list of time category or if", "daily record if they may see one of the time_records", "linked from an item with View permission (View for \"file\"", "ext_tracker_state (Create for \"ext_tracker_state\" only) User is allowed to create", "User is allowed to create time_record (Create for \"time_record\" only)", "\"sup_type\" only) User is allowed to access sup_warranty (View for", "only) User is allowed to create time_project (Create for \"time_project\"", "(View for \"overtime_correction\" only) User is allowed to access query", "User is allowed Edit on (Edit for \"location\": ('domain_part',) only)", "only) User is allowed to access sup_execution (View for \"sup_execution\"", "is allowed to access uc_type (View for \"uc_type\" only) User", "\"sup_classification\" only) User is allowed to access support (View for", "(Edit for \"doc_issue_status\" only) User is allowed to edit ext_tracker", "\"time_project\" only) User is allowed to create time_project_status (Create for", "to create user (Create for \"user\" only) User is allowed", "allowed to access it_category (View for \"it_category\" only) User is", "to access functional_role (View for \"functional_role\" only) User is allowed", "User is allowed to create work_location (Create for \"work_location\" only)", "for \"user\": ('contacts',) only) User is allowed to access auto_wp", "is department manager of time category (View for \"time_project\" only)", "it_issue (Edit for \"it_issue\" only) User is allowed to edit", "(View for \"overtime_period\" only) User is allowed to access prodcat", "User is allowed View on (View for \"user\": ('activity', 'actor',", "\"contact\" only) User is allowed to edit contact (Edit for", "access msg_keyword (View for \"msg_keyword\" only) User is allowed to", "\"time_wp_summary_no\" only) User is allowed to access timesheet (View for", "create daily_record_freeze (Create for \"daily_record_freeze\" only) User is allowed to", "\"user\": ('ad_domain', 'nickname', 'password', 'pictures', 'roles', 'timetracking_by', 'timezone', 'username') only)", "\"user_functional_role\" only) User is allowed to create user_functional_role (Create for", "for \"status_transition\" only) User is allowed to edit test_level (Edit", "is on nosy list (Edit for \"it_issue\": ('messages', 'files', 'nosy')", "to create sup_classification (Create for \"sup_classification\" only) User is allowed", "(Create for \"leave_submission\" only) User is allowed to create msg", "support (Edit for \"support\" only) Role \"time-report\": User is allowed", "User is allowed to access org_location (View for \"org_location\" only)", "'only_hours', 'op_project', 'overtime_reduction', 'responsible', 'status', 'work_location', 'wps') only) (Search for", "('activity', 'actor', 'ad_domain', 'address', 'alternate_addresses', 'business_responsible', 'clearance_by', 'creation', 'creator', 'firstname',", "issue (Create for \"issue\" only) User is allowed to create", "also be seen (View for \"daily_record\" only) User may view", "\"work_location\" only) Role \"doc_admin\": User is allowed Edit on (Edit", "(View for \"support\" only) User is allowed to create analysis_result", "status is \"Open\" and he is responsible for the time", "edit time_activity_perm (Edit for \"time_activity_perm\" only) User is allowed to", "view/edit records with the correct domain (View for \"user_dynamic\" only)", "\"it_prio\" only) User is allowed to access it_project_status (View for", "(View for \"query\" only) Role \"facility\": (Restore for \"room\" only)", "is allowed to edit time_activity_perm (Edit for \"time_activity_perm\" only) User", "\"doc\" only) User is allowed to edit doc_category (Edit for", "allowed to create area (Create for \"area\" only) User is", "View on issue if issue is non-confidential or user is", "only) User is allowed to edit overtime correction if the", "\"time_project_status\" only) User is allowed to edit time_wp (Edit for", "(Edit for \"user_contact\" only) Users may view user_dynamic records for", "to access query (View for \"query\" only) User is allowed", "for \"it_request_type\" only) User is allowed to create mailgroup (Create", "\"summary_type\" only) User is allowed to access sup_classification (View for", "User is allowed to access query (View for \"query\" only)", "edit sup_classification (Edit for \"sup_classification\" only) User is allowed to", "artefact (View for \"artefact\" only) User is allowed to access", "to create contact (Create for \"contact\" only) User is allowed", "User is allowed to edit cost_center (Edit for \"cost_center\" only)", "\"msg\": ('author', 'date', 'id', 'keywords', 'subject', 'summary') only) User is", "for \"customer_agreement\" only) User is allowed to edit mailgroup (Edit", "only) User is allowed to access daily_record_status (View for \"daily_record_status\"", "access the xmlrpc interface (Xmlrpc Access) User may create everything", "for \"query\" only) User is allowed to create support (Create", "by user (View for \"time_record\" only) Users are allowed to", "'time_wp_summary_no', 'travel', 'wp_no') only) User is allowed to search user_status", "\"reference\" only) User is allowed to access reporting_group (View for", "for \"vac_aliq\" only) User is allowed to access vacation_report (View", "edit kind (Edit for \"kind\" only) User is allowed to", "allowed Edit on (Edit for \"user\": ('ad_domain', 'nickname', 'password', 'pictures',", "or HR-Org-Location (View for \"time_wp\": ('name', 'project') only) User is", "time_wp_summary_no (View for \"time_wp_summary_no\" only) User is allowed to access", "('bookers', 'description', 'epic_key', 'planned_effort', 'time_end', 'time_start', 'time_wp_summary_no') only) User may", "access keyword (View for \"keyword\" only) User is allowed to", "\"category\": ('nosy', 'default_part_of') only) User is allowed to edit doc", "is allowed to access business_unit (View for \"business_unit\" only) User", "'last_day', 'status', 'time_wp', 'user') only) User may see time report", "User is allowed to edit return_type (Edit for \"return_type\" only)", "only) (Restore for \"room\" only) (Retire for \"room\" only) User", "create doc_issue_status (Create for \"doc_issue_status\" only) User is allowed to", "for \"msg\" only) User is allowed to access issue (View", "overtime_period (Create for \"overtime_period\" only) User is allowed to create", "allowed to create sap_cc (Create for \"sap_cc\" only) User is", "'username') only) User is allowed View on (View for \"user\":", "\"user_functional_role\" only) Role \"hr\": (Edit for \"overtime_period\": ('name', 'order') only)", "it_issue (Create for \"it_issue\" only) User is allowed to create", "'tt_lines', 'username', 'vie_user'] only) Users may view/edit user records for", "for \"public_holiday\" only) User is allowed to edit query (Edit", "only) User is allowed View on (View for \"user\": ('nickname',", "\"overtime_correction\" only) User is allowed to create overtime_period (Create for", "doc_category (Create for \"doc_category\" only) User is allowed to create", "only) User is allowed to access vacation_report (View for \"vacation_report\"", "everything (Retire) User may use the email interface (Email Access)", "is allowed to access org_location (View for \"org_location\" only) User", "(Edit for \"customer\" only) User is allowed to edit customer_agreement", "User is allowed to access ext_tracker_state (View for \"ext_tracker_state\" only)", "is allowed to create time_activity (Create for \"time_activity\" only) User", "only) User is allowed to create uc_type (Create for \"uc_type\"", "(Edit for \"ext_msg\" only) User is allowed to edit ext_tracker_state", "access room (View for \"room\" only) User is allowed to", "person to whom approvals are delegated (View for \"time_record\" only)", "is allowed to access safety_level (View for \"safety_level\" only) User", "(Edit for \"msg\" only) User is allowed to access issue", "allowed to access contact (View for \"contact\" only) User is", "only) User is allowed to view time record data if", "correction if the overtime correction is not frozen (Edit for", "access vacation_correction (View for \"vacation_correction\" only) Role \"hr-org-location\": (Search for", "in work package if booking is allowed for this user", "\"it_category\" only) User is allowed to create it_int_prio (Create for", "uc_type (Create for \"uc_type\" only) User is allowed to create", "user (View for \"user\": ['clearance_by', 'contacts', 'csv_delimiter', 'entry_date', 'firstname', 'hide_message_files',", "is allowed to access query (View for \"query\" only) User", "is allowed to edit query (Edit for \"query\" only) User", "(View for \"mailgroup\" only) User is allowed to access return_type", "Role \"doc_admin\": User is allowed Edit on (Edit for \"department\":", "allowed to view dynamic user data if he/she is in", "cost_center (Create for \"cost_center\" only) User is allowed to create", "(some of) their own user details (View for \"user\": ('entry_date',", "support is non-confidential or user is on nosy list (Edit", "for \"reporting_group\" only) User is allowed to create room (Create", "User is allowed to create product_family (Create for \"product_family\" only)", "to whom approvals are delegated (View for \"leave_submission\" only) User", "as the given user (View for \"overtime_correction\" only) User is", "(Edit for \"user_dynamic\" only) User is allowed to edit freeze", "email interface (Email Access) User may view everything (View) Role", "only) Role \"user_view\": User is allowed to access user (View", "is allowed to create analysis_result (Create for \"analysis_result\" only) User", "for \"time_activity_perm\" only) User is allowed to create time_record (Create", "is allowed to access support (View for \"support\" only) User", "\"file\" only) User is allowed to view their own messages", "time_activity (Edit for \"time_activity\" only) User is allowed to edit", "'time_wp_summary_no') only) User may access the rest interface (Rest Access)", "for \"sap_cc\" only) User is allowed to create time_record (Create", "(Search for \"it_issue\" only) User is allowed to search it_project", "User is allowed View on (View for \"user_status\": ('name',) only)", "'tt_lines', 'username') only) User is allowed View on (View for", "is allowed Edit on (Edit for \"time_wp\": ('project',) only) User", "is allowed to edit auto_wp (Edit for \"auto_wp\" only) User", "to create doc (Create for \"doc\" only) User is allowed", "to view leave submission if he is the supervisor or", "allowed to access issue if they are on the list", "allowed to access sup_classification (View for \"sup_classification\" only) User is", "User may edit own file (file created by user) (Edit", "manipulate user Roles through the web (Web Roles) Role \"itview\":", "is allowed to access it_prio (View for \"it_prio\" only) User", "\"sup_prio\" only) User is allowed to access sup_status (View for", "only) User is allowed to search issue (Search for \"issue\"", "'creator', 'id', 'queries', 'realname', 'status', 'timezone', 'username') only) User is", "the supervisor or the person to whom approvals are delegated", "(Edit for \"doc_status\" only) User is allowed to edit product_type", "(View for \"it_issue_status\" only) User is allowed to access it_prio", "User is allowed to create overtime_correction (Create for \"overtime_correction\" only)", "only) User is allowed to edit absence (Edit for \"absence\"", "User is allowed to create contact (Create for \"contact\" only)", "or has role 'HR' or 'Controlling', or the user is", "only) User is allowed to create leave_submission (Create for \"leave_submission\"", "for \"ext_msg\" only) User is allowed to edit ext_tracker_state (Edit", "if user is department manager of time category (View for", "to edit uc_type (Edit for \"uc_type\" only) Role \"organisation\": User", "is allowed to create time_project_status (Create for \"time_project_status\" only) User", "User is allowed Edit on (Edit for \"time_wp\": ('project',) only)", "'planning_role') only) User is allowed to view contact if he's", "'bcc', 'business_unit', 'category', 'cc', 'cc_emails', 'classification', 'closed', 'confidential', 'customer', 'emails',", "Role \"project\": User is allowed Edit on (Edit for \"time_project\":", "to edit several fields if he is Stakeholder/Responsible for an", "User is allowed to view work package and time category", "\"area\" only) User is allowed to access doc_issue_status (View for", "ext_tracker_state (View for \"ext_tracker_state\" only) User is allowed to access", "user may edit time_records owned by user (Restore for \"time_record\"", "is allowed to create uc_type (Create for \"uc_type\" only) User", "on (Edit for \"daily_record\": ('required_overtime', 'weekend_allowed') only) User is allowed", "Timetracking by user may edit time_records owned by user (Edit", "'overtime_reduction', 'responsible', 'status', 'work_location', 'wps') only) (Search for \"time_wp\": ('activity',", "User is allowed to access keyword (View for \"keyword\" only)", "for \"time_wp_group\" only) User is allowed to edit time_project_status (Edit", "User may retire everything (Retire) User may use the email", "is allowed to create it_request_type (Create for \"it_request_type\" only) User", "\"time_wp\" only) Role \"sec-incident-nosy\": User is allowed to access it_int_prio", "User is allowed to access business_unit (View for \"business_unit\" only)", "'realname', 'status', 'timezone', 'username') only) User is allowed View on", "'id', 'keywords', 'subject', 'summary') only) User is allowed to access", "this user (also applies to timetracking by, supervisor and approval", "User is allowed to edit mailgroup (Edit for \"mailgroup\" only)", "\"it_issue\" only) User is allowed to create leave_submission (Create for", "may create everything (Create) User may edit everything (Edit) User", "['contacts', 'position_text', 'room'] only) Users may view/edit user records for", "may retire everything (Retire) User may use the email interface", "allowed to edit their queries (Edit for \"query\" only) User", "(View for \"daily_record\" only) Role \"user_view\": User is allowed to", "for \"time_wp\" only) User is allowed to create time_project (Create", "auto_wp (Edit for \"auto_wp\" only) User is allowed to edit", "or timetracking-by user (Edit for \"daily_record\": ('status', 'time_record') only) User", "Edit on (Edit for \"time_project\": ('group_lead', 'purchasing_agents', 'team_lead') only) Role", "on file if file is linked from an item with", "\"vacation_correction\" only) Role \"issue_admin\": User is allowed Edit on msg", "sap_cc (Create for \"sap_cc\" only) User is allowed to create", "\"return_type\" only) User is allowed to create sup_classification (Create for", "(View for \"time_project\" only) User is allowed Edit on (Edit", "contract_type (Edit for \"contract_type\" only) User is allowed to edit", "(View for \"location\" only) User is allowed to access mailgroup", "edit test_level (Edit for \"test_level\" only) Role \"it\": Create (Create", "for the user (View for \"user\": ['contacts', 'position_text', 'room'] only)", "view their own overtime information (View for \"overtime_correction\" only) User", "\"test_level\" only) Role \"it\": Create (Create for \"user_contact\" only) User", "User is allowed to access cost_center_permission_group (View for \"cost_center_permission_group\" only)", "\"leave_submission\" only) User is allowed to search support (Search for", "only) User may get nosy messages for it_project (Nosy for", "\"time_report\" only) User may edit own file (file created by", "only) User is allowed to edit absence_type (Edit for \"absence_type\"", "User is allowed to create status_transition (Create for \"status_transition\" only)", "for \"support\": ('analysis_end', 'analysis_result', 'analysis_start', 'bcc', 'business_unit', 'category', 'cc', 'cc_emails',", "User is allowed to edit it_issue (Edit for \"it_issue\" only)", "'op_project', 'overtime_reduction', 'responsible', 'status', 'work_location', 'wps') only) (Search for \"time_wp\":", "(Edit for \"reference\" only) Role \"dom-user-edit-facility\": Users may view/edit user", "\"absence\" only) User is allowed to create absence_type (Create for", "(Create for \"product_type\" only) User is allowed to create reference", "(Edit for \"file\" only) User is allowed Edit on msg", "queries for classes where they have search permission (View for", "if file is linked from an item with Edit permission", "(View for \"user_dynamic\" only) User is allowed to access contract_type", "for \"msg\": ('author', 'date', 'id', 'keywords', 'subject', 'summary') only) User", "\"user_contact\" only) Users may view/edit user records for ad_domain for", "\"user_contact\" only) User is allowed Edit on (Edit for \"file\":", "is allowed to access summary_type (View for \"summary_type\" only) User", "allowed to create it_issue (Create for \"it_issue\" only) User is", "for \"cost_center_permission_group\" only) User is allowed to edit cost_center_permission_group (Edit", "\"time_project\": ('cost_center', 'department', 'deputy', 'description', 'name', 'nosy', 'organisation', 'responsible', 'status')", "\"location\" only) User is allowed to edit org_location (Edit for", "time_wp (Create for \"time_wp\" only) User is allowed to create", "to access artefact (View for \"artefact\" only) User is allowed", "for \"artefact\" only) User is allowed to edit doc (Edit", "category (View for \"category\" only) User is allowed to access", "to access sup_warranty (View for \"sup_warranty\" only) User is allowed", "time_wp_group (View for \"time_wp_group\" only) User is allowed to access", "containers (Edit for \"issue\": ['activity', 'actor', 'area', 'category', 'closed', 'composed_of',", "issue (Edit for \"issue\" only) User is allowed to edit", "or the contact is marked visible (View for \"user_contact\" only)", "for \"uc_type\" only) User is allowed to create user (Create", "an item with Edit permission (Edit for \"msg\" only) User", "only) User is allowed Edit on (Edit for \"time_project\": ('approval_hr',", "allowed to create auto_wp (Create for \"auto_wp\" only) User is", "is allowed to edit test_level (Edit for \"test_level\" only) Role", "allowed to edit category if he is responsible for it", "User is allowed to create customer (Create for \"customer\" only)", "(View for \"time_wp\": ('name', 'project') only) User is allowed to", "only) User is allowed to create room (Create for \"room\"", "\"daily_record\": ('status', 'time_record') only) User is allowed Edit on (Edit", "for \"issue\" only) User is allowed View on it_issue if", "\"admin\": User may access the rest interface (Rest Access) User", "doc_status (Edit for \"doc_status\" only) User is allowed to edit", "User is allowed to edit doc_category (Edit for \"doc_category\" only)", "allowed to access leave_submission (View for \"leave_submission\" only) User is", "for \"time_wp\" only) User is allowed to access vacation_correction (View", "Roles) User may restore everything (Restore) User may retire everything", "that day (View for \"daily_record\" only) Role \"user_view\": User is", "for \"user_dynamic\" only) May only view/edit records with the correct", "support (Create for \"support\" only) User is allowed to edit", "User is allowed to access overtime_period (View for \"overtime_period\" only)", "(Create for \"issue\" only) User is allowed to create msg", "to create analysis_result (Create for \"analysis_result\" only) User is allowed", "or user is on nosy list (Edit for \"it_issue\": ('messages',", "only) User is allowed to access it_project (View for \"it_project\"", "for \"user\": ('entry_date', 'planning_role') only) User is allowed to view", "(Edit for \"file\" only) User is allowed Edit on issue", "'ad_domain', 'address', 'alternate_addresses', 'business_responsible', 'clearance_by', 'creation', 'creator', 'firstname', 'id', 'job_description',", "\"vacation_correction\" only) User is allowed to create cost_center (Create for", "is allowed to edit product_type (Edit for \"product_type\" only) User", "is allowed View on it_project if it_project is non-confidential or", "for \"cost_center_group\" only) User is allowed to access cost_center_permission_group (View", "\"artefact\" only) User is allowed to edit doc (Edit for", "may edit own leave submissions (Edit for \"leave_submission\": ('comment', 'comment_cancel',", "(View for \"analysis_result\" only) User is allowed to access contact", "User is allowed to create mailgroup (Create for \"mailgroup\" only)", "for \"user\": ('activity', 'actor', 'address', 'alternate_addresses', 'creation', 'creator', 'id', 'queries',", "is allowed Edit on (Edit for \"time_project\": ('infosec_req', 'is_extern', 'max_hours',", "'has_expiration_date', 'is_extern', 'is_public', 'id', 'name', 'project', 'responsible', 'time_end', 'time_start', 'time_wp_summary_no',", "(Create for \"organisation\" only) User is allowed to create overtime_correction", "on it_issue if it_issue is non-confidential or user is on", "User is allowed Edit on (Edit for \"time_project\": ('cost_center', 'department',", "may use the email interface (Email Access) User may view", "User is allowed to create organisation (Create for \"organisation\" only)", "the contact is marked visible (View for \"user_contact\" only) User", "(View for \"user\": ['contacts', 'csv_delimiter', 'department_temp', 'entry_date', 'firstname', 'hide_message_files', 'job_description',", "User is allowed Edit on (Edit for \"sap_cc\": ('group_lead', 'purchasing_agents',", "the given user (View for \"time_record\" only) Role \"hr-vacation\": User", "cost_center_group (Create for \"cost_center_group\" only) User is allowed to create", "for \"it_prio\" only) User is allowed to access it_project_status (View", "allowed to edit freeze record if not frozen at the", "\"query\" only) User is allowed to access time_project (View for", "(Edit for \"organisation\" only) User is allowed to edit overtime", "(Create for \"doc_category\" only) User is allowed to create doc_status", "allowed to access daily_record_freeze (View for \"daily_record_freeze\" only) User is", "messages for it_issue (Nosy for \"it_issue\" only) User may get", "edit time_records owned by user (Edit for \"time_record\" only) User", "is transitive) or the user is the department manager of", "'is_special_leave', 'is_vacation', 'no_overtime', 'no_overtime_day', 'only_hours', 'overtime_reduction') only) User is allowed", "may access the web interface (Web Access) User may use", "'cc_emails', 'classification', 'closed', 'confidential', 'customer', 'emails', 'execution', 'external_ref', 'files', 'goods_received',", "(View for \"it_project\" only) User is allowed View on msg", "User may get nosy messages for support (Nosy for \"support\"", "access time_wp (View for \"time_wp\" only) User is allowed to", "(Edit for \"sup_classification\" only) User is allowed to edit support", "\"time_wp\" only) User is allowed to create time_project (Create for", "User is allowed Edit on (Edit for \"sap_cc\": ('group_lead', 'team_lead')", "User is allowed to access kind (View for \"kind\" only)", "edit contact (Edit for \"contact\" only) Role \"controlling\": User is", "daily_record (View for \"daily_record\" only) User is allowed to access", "allowed to edit query (Edit for \"query\" only) User is", "'supervisor', 'sync_foreign_key', 'timezone', 'tt_lines', 'username', 'vie_user'] only) Users may view/edit", "only) User is allowed to access vac_aliq (View for \"vac_aliq\"", "is allowed to access sup_classification (View for \"sup_classification\" only) User", "(Edit for \"msg\" only) User is allowed View on file", "for \"user_functional_role\" only) User is allowed Edit on (Edit for", "User is allowed to create sup_classification (Create for \"sup_classification\" only)", "user_dynamic (View for \"user_dynamic\" only) User is allowed to access", "for \"area\" only) User is allowed to access doc_issue_status (View", "allowed to create msg_keyword (Create for \"msg_keyword\" only) User is", "allowed to retire their queries (Retire for \"query\" only) User", "allowed to create sup_classification (Create for \"sup_classification\" only) User is", "for \"it_issue\" only) User is allowed to create leave_submission (Create", "only) Role \"hr-vacation\": User is allowed to access contract_type (View", "'valid_to') only) User is allowed to access contract_type (View for", "(View for \"ext_tracker_type\" only) User is allowed to create ext_msg", "is allowed to retire their queries (Retire for \"query\" only)", "view some of their details (View for \"user\": ('activity', 'actor',", "'execution', 'external_ref', 'files', 'goods_received', 'goods_sent', 'lot', 'messages', 'nosy', 'number_effected', 'numeric_effort',", "issue (Nosy for \"issue\" only) User may get nosy messages", "allowed to edit category (Edit for \"category\" only) User is", "allowed to access ext_msg (View for \"ext_msg\" only) User is", "if he's the owner of the contact (Edit for \"user_contact\":", "issue is non-confidential or user is on nosy list (View", "'alternate_addresses', 'nickname', 'password', 'timezone', 'username') only) User is allowed Edit", "only) User is allowed Edit on (Edit for \"user\": ('address',", "allowed to access doc (View for \"doc\" only) User is", "day (View for \"daily_record\" only) Role \"user_view\": User is allowed", "to create time_activity (Create for \"time_activity\" only) User is allowed", "\"time_report\" only) User is allowed to create time_report (Create for", "list of time project (View for \"time_report\" only) User may", "Edit on (Edit for \"time_wp\": ('project',) only) User is allowed", "ext_msg (Edit for \"ext_msg\" only) User is allowed to edit", "issue (Search for \"issue\" only) User is allowed to view", "(View for \"time_wp_summary_no\" only) User is allowed to access timesheet", "for \"return_type\" only) User is allowed to access sup_classification (View", "User is allowed to access time_project (View for \"time_project\" only)", "(View for \"prodcat\" only) User is allowed to access product", "allowed to access it_project_status (View for \"it_project_status\" only) User is", "(Create for \"keyword\" only) User is allowed to create kind", "use the email interface (Email Access) Users are allowed to", "edit department (Edit for \"department\" only) User is allowed to", "allowed to access time_wp_summary_no (View for \"time_wp_summary_no\" only) User is", "is allowed to create time_record (Create for \"time_record\" only) User", "only) User may manipulate user Roles through the web (Web", "to access location (View for \"location\" only) User is allowed", "is allowed to edit vacation_correction (Edit for \"vacation_correction\" only) Role", "to create cost_center_group (Create for \"cost_center_group\" only) User is allowed", "to view contact if he's the owner of the contact", "(View for \"vacation_correction\" only) Role \"hr-org-location\": (Search for \"daily_record_freeze\" only)", "record if he is the supervisor or the person to", "(Create for \"cost_center_group\" only) User is allowed to create cost_center_status", "for \"doc_issue_status\" only) User is allowed to edit ext_tracker (Edit", "is allowed to edit room (Edit for \"room\" only) Role", "ext_msg (View for \"ext_msg\" only) User is allowed to access", "\"contract_type\" only) User is allowed to access daily_record (View for", "permission (View for \"file\" only) User is allowed to access", "for \"it_issue\" only) User is allowed to create it_project (Create", "only) User is allowed to edit public_holiday (Edit for \"public_holiday\"", "\"doc_issue_status\" only) User is allowed to access doc_status (View for", "to create time_report (Create for \"time_report\" only) User is allowed", "View on (View for \"user\": ('business_responsible', 'department_temp', 'timetracking_by', 'vie_user', 'vie_user_bl_override',", "\"user_status\" only) User is allowed to access vac_aliq (View for", "(Edit for \"contact\" only) Role \"controlling\": User is allowed Edit", "to access it_project_status (View for \"it_project_status\" only) User is allowed", "only) User is allowed to edit time_project_status (Edit for \"time_project_status\"", "allowed to create daily_record (Create for \"daily_record\" only) User is", "User is allowed to create time_activity_perm (Create for \"time_activity_perm\" only)", "the user (Edit for \"user\": ['contacts', 'csv_delimiter', 'department_temp', 'entry_date', 'firstname',", "to edit dynamic user data if not frozen in validity", "support if support is non-confidential or user is on nosy", "for \"query\" only) Users may see daily record if they", "'lastname', 'lunch_duration', 'lunch_start', 'nickname', 'pictures', 'position_text', 'reduced_activity_list', 'roles', 'room', 'sex',", "'is_public', 'name', 'responsible', 'time_wp_summary_no', 'wp_no') only) User is allowed to", "allowed to create organisation (Create for \"organisation\" only) User is", "(View for \"test_level\" only) User is allowed to access time_activity", "'username') only) External users are allowed to access issue if", "contact (Create for \"contact\" only) User is allowed to create", "only) User is allowed to access product (View for \"product\"", "(Search for \"time_wp\": ('activity', 'actor', 'auto_wp', 'bookers', 'cost_center', 'creation', 'creator',", "'position_text', 'reduced_activity_list', 'roles', 'room', 'sex', 'status', 'subst_active', 'substitute', 'supervisor', 'timezone',", "(Edit for \"time_wp\": ('cost_center', 'is_public', 'name', 'responsible', 'time_wp_summary_no', 'wp_no') only)", "the record, it may also be seen (View for \"time_record\"", "is allowed to access ext_tracker_state (View for \"ext_tracker_state\" only) User", "the owner of the contact or the contact is marked", "for \"ext_tracker\" only) User is allowed to create issue (Create", "for \"department\" only) User is allowed to edit organisation (Edit", "(Edit for \"customer_agreement\" only) User is allowed to edit mailgroup", "manipulate user Roles through the web (Web Roles) User may", "'nickname', 'pictures', 'position_text', 'reduced_activity_list', 'roles', 'room', 'sex', 'status', 'subst_active', 'substitute',", "is allowed to access artefact (View for \"artefact\" only) User", "allowed to edit cost_center_group (Edit for \"cost_center_group\" only) User is", "'only_hours', 'overtime_reduction') only) User is allowed View on (View for", "'numeric_effort', 'part_of', 'planned_begin', 'planned_end', 'priority', 'release', 'responsible', 'safety_level', 'severity', 'status',", "or the user is supervisor or substitute supervisor of the", "only) User is allowed to access cost_center_permission_group (View for \"cost_center_permission_group\"", "\"doc_status\" only) User is allowed to edit product_type (Edit for", "only) User is allowed to create doc_category (Create for \"doc_category\"", "of time category or on nosy list of time category", "role (View for \"user_functional_role\" only) User may view time category", "'work_location', 'wps') only) Search (Search for \"user_contact\" only) User is", "is allowed to edit domain_permission (Edit for \"domain_permission\" only) User", "to view time record if he is the supervisor or", "the owner of the contact (Edit for \"user_contact\": ('visible',) only)", "for \"it_project\" only) User may get nosy messages for support", "(View for \"user\": ['room'] only) Role \"dom-user-edit-gtt\": (Search for \"user_dynamic\"", "only) User may view work package if responsible for it,", "for \"business_unit\" only) User is allowed to access category (View", "'hide_message_files', 'job_description', 'lastname', 'lunch_duration', 'lunch_start', 'nickname', 'pictures', 'position_text', 'room', 'sex',", "organisation (View for \"organisation\" only) User is allowed to create", "'depends', 'doc_issue_status', 'earliest_start', 'effective_prio', 'effort_hours', 'external_users', 'files', 'files_affected', 'fixed_in', 'id',", "\"vacation_correction\" only) User is allowed to create auto_wp (Create for", "(Edit for \"time_wp\": ('bookers', 'description', 'epic_key', 'planned_effort', 'time_end', 'time_start', 'time_wp_summary_no')", "\"file\" only) User may access the web interface (Web Access)", "the domain_permission for the user (View for \"user\": ['room'] only)", "category (Create for \"category\" only) User is allowed to create", "(Edit for \"time_project\": ('cost_center', 'department', 'deputy', 'description', 'name', 'nosy', 'organisation',", "they are in the domain_permission for the user (View for", "edit reporting_group (Edit for \"reporting_group\" only) User is allowed to", "to access area (View for \"area\" only) User is allowed", "time_records that are attached to that daily_record) if the user", "web interface (Web Access) User may access the xmlrpc interface", "is allowed to view contact if he's the owner of", "user details (View for \"user\": ('entry_date', 'planning_role') only) User is", "overtime information (View for \"overtime_correction\" only) User is allowed to", "allowed to view their own messages (View for \"msg\" only)", "to edit their queries (Edit for \"query\" only) User is", "for \"query\" only) User is allowed to retire their queries", "for \"time_wp\" only) User is allowed to edit time_wp_group (Edit", "is allowed to create doc_status (Create for \"doc_status\" only) User", "'travel', 'wp_no') only) User is allowed to search user_status (Search", "only) User is allowed to edit doc_status (Edit for \"doc_status\"", "where they have search permission (View for \"query\" only) Role", "User is allowed to create room (Create for \"room\" only)", "'warranty') only) User is allowed View on (View for \"user\":", "allowed Edit on (Edit for \"sap_cc\": ('group_lead', 'purchasing_agents', 'team_lead') only)", "(Edit for \"safety_level\" only) User is allowed to edit severity", "may use the email interface (Email Access) Users are allowed", "to edit room (Edit for \"room\" only) Role \"functional-role\": (Restore", "for \"ext_tracker_type\" only) Role \"msgsync\": (Search for \"msg\": ('date', 'id')", "May only view/edit records with the correct domain (Edit for", "(View for \"vacation_report\" only) User is allowed to access work_location", "access ext_tracker_type (View for \"ext_tracker_type\" only) User is allowed to", "for \"time_project\" only) User is allowed to create time_project_status (Create", "only) User is allowed to access sup_classification (View for \"sup_classification\"", "View permission (View for \"msg\" only) User is allowed View", "room (Edit for \"room\" only) Role \"functional-role\": (Restore for \"user_functional_role\"", "is allowed to create contact (Create for \"contact\" only) User", "or project responsible/deputy (Edit for \"time_wp\": ('bookers', 'description', 'epic_key', 'planned_effort',", "only) User is allowed to create time_report (Create for \"time_report\"", "Edit on (Edit for \"time_project\": ('approval_hr', 'approval_required', 'is_extern', 'is_public_holiday', 'is_special_leave',", "edit query (Edit for \"query\" only) User is allowed to", "User is allowed to access daily_record (View for \"daily_record\" only)", "\"procurement\": (View for \"sap_cc\" only) (View for \"time_project\" only) User", "allowed to see time record if he is allowed to", "for \"it_issue\" only) User may get nosy messages for it_project", "for \"it_issue\" only) User is allowed to edit it_project (Edit", "User is allowed to access prodcat (View for \"prodcat\" only)", "User may view a daily_record (and time_records that are attached", "for \"uc_type\" only) Role \"organisation\": User is allowed to access", "user owns the daily_record or has role 'HR' or 'Controlling',", "if he is Stakeholder/Responsible for an it_issue (Edit for \"it_issue\":", "user is on nosy list (Edit for \"it_issue\": ('messages', 'files',", "leave submissions (Edit for \"leave_submission\": ('comment', 'comment_cancel', 'first_day', 'last_day', 'status',", "only) User is allowed to edit org_location (Edit for \"org_location\"", "'lunch_start', 'nickname', 'pictures', 'position_text', 'reduced_activity_list', 'roles', 'room', 'sex', 'status', 'subst_active',", "\"room\" only) User is allowed to create room (Create for", "(Nosy for \"it_issue\" only) User may get nosy messages for", "\"keyword\" only) User is allowed to edit kind (Edit for", "edit doc_issue_status (Edit for \"doc_issue_status\" only) User is allowed to", "(Create for \"issue\" only) User is allowed to create keyword", "\"user_contact\" only) User is allowed to create absence (Create for", "for \"time_report\" only) User is allowed to edit time_report (Edit", "\"domain_permission\" only) User is allowed to access it_int_prio (View for", "(some of) their own user details (Edit for \"user\": ('csv_delimiter',", "\"time_activity_perm\" only) (Search for \"time_record\" only) (Search for \"user_dynamic\" only)", "is allowed to create sup_classification (Create for \"sup_classification\" only) User", "on (Edit for \"time_project\": ('approval_hr', 'approval_required', 'is_extern', 'is_public_holiday', 'is_special_leave', 'is_vacation',", "Role \"user_view\": User is allowed to access user (View for", "(View for \"test_level\" only) User is allowed to create file", "View on (View for \"category\": ('id', 'name') only) User is", "(View for \"user\": ('activity', 'actor', 'creation', 'creator', 'firstname', 'lastname', 'realname',", "User is allowed to create file (Create for \"file\" only)", "to search support (Search for \"support\" only) User is allowed", "all details on work package or User may view a", "only) User is allowed Edit on (Edit for \"msg\": ('author',", "\"time_wp\" only) User is allowed to edit time_wp_group (Edit for", "for \"artefact\" only) User is allowed to access business_unit (View", "'prodcat', 'product', 'related_issues', 'related_support', 'release', 'responsible', 'return_type', 'sap_ref', 'send_to_customer', 'serial_number',", "(View for \"uc_type\" only) User is allowed to access user_status", "\"it_project\" only) User is allowed to edit it_request_type (Edit for", "to create doc_category (Create for \"doc_category\" only) User is allowed", "external users or there is a transitive permission via containers", "only) User is allowed to create return_type (Create for \"return_type\"", "responsible for it, if user is owner or deputy of", "the record, it may also be seen (View for \"daily_record\"", "submission if he is the supervisor or the person to", "only) User is allowed View on (View for \"user\": ('roles',)", "(View for \"cost_center_status\" only) User is allowed to access customer", "frozen in validity span of dynamic user record (Edit for", "'firstname', 'id', 'job_description', 'lastname', 'lunch_duration', 'lunch_start', 'nickname', 'pictures', 'position_text', 'queries',", "domain_permission (View for \"domain_permission\" only) User is allowed to access", "\"organisation\" only) User is allowed to edit overtime correction if", "fields if he is Responsible for an it_issue (Edit for", "allowed to edit time_wp (Edit for \"time_wp\" only) User is", "is allowed to create overtime_period (Create for \"overtime_period\" only) User", "to create absence (Create for \"absence\" only) User is allowed", "to access business_unit (View for \"business_unit\" only) User is allowed", "allowed to create doc (Create for \"doc\" only) User is", "business_unit (View for \"business_unit\" only) User is allowed to access", "allowed to edit keyword (Edit for \"keyword\" only) User is", "User is allowed to create sap_cc (Create for \"sap_cc\" only)", "User is allowed to retire their queries (Retire for \"query\"", "selected fields in work package if booking is allowed for", "allowed to access auto_wp (View for \"auto_wp\" only) User is", "allowed to search time_record (Search for \"time_record\" only) User is", "(Search for \"time_activity_perm\" only) (Search for \"time_record\" only) (Search for", "to access uc_type (View for \"uc_type\" only) User is allowed", "edit category (Edit for \"category\" only) User is allowed to", "on (View for \"user\": ('roles',) only) User is allowed View", "(Create for \"time_activity\" only) User is allowed to create time_activity_perm", "\"file\" only) User is allowed Edit on msg if msg", "is allowed to edit status_transition (Edit for \"status_transition\" only) User", "create it_issue (Create for \"it_issue\" only) User is allowed to", "is allowed to create cost_center (Create for \"cost_center\" only) User", "or deputy of time category or on nosy list of", "\"doc\" only) User is allowed to create ext_tracker_state (Create for", "allowed to edit time_activity (Edit for \"time_activity\" only) User is", "'department', 'deputy', 'description', 'name', 'nosy', 'organisation', 'responsible', 'status') only) User", "for \"msg\" only) User is allowed to create query (Create", "'auto_wp', 'cost_center', 'creation', 'creator', 'description', 'durations_allowed', 'epic_key', 'has_expiration_date', 'is_extern', 'is_public',", "allowed Edit on (Edit for \"daily_record\": ('status', 'time_record') only) User", "only) User is allowed to edit ext_tracker_state (Edit for \"ext_tracker_state\"", "search issue (Search for \"issue\" only) User is allowed to", "to access vacation_correction (View for \"vacation_correction\" only) Role \"hr-org-location\": (Search", "User is allowed to access project_type (View for \"project_type\" only)", "for \"user_contact\" only) Users may view user_dynamic records for ad_domain", "(Search for \"msg\" only) User is allowed to search for", "allowed to search for their queries (Search for \"query\" only)", "User is allowed Edit on msg if msg is linked", "User is allowed to edit time_wp (Edit for \"time_wp\" only)", "User is allowed to access organisation (View for \"organisation\" only)", "allowed Edit on (Edit for \"msg\": ('keywords',) only) User is", "their details (Edit for \"user\": ('csv_delimiter', 'hide_message_files', 'password', 'timezone') only)", "(Search for \"overtime_correction\" only) (Search for \"time_activity_perm\" only) (Search for", "allowed View on (View for \"user_status\": ('name',) only) User is", "for \"it_project\" only) Role \"staff-report\": Role \"sub-login\": Role \"summary_view\": Role", "by user) (Edit for \"file\" only) Role \"user\": (Search for", "is allowed to edit analysis_result (Edit for \"analysis_result\" only) User", "(Create for \"public_holiday\" only) User is allowed to create query", "to create msg (Create for \"msg\" only) User is allowed", "is allowed to create time_wp_group (Create for \"time_wp_group\" only) User", "non-confidential or user is on nosy list (View for \"issue\"", "to access time_project (View for \"time_project\" only) User is allowed", "to access user_functional_role (View for \"user_functional_role\" only) User is allowed", "only) User is allowed to edit product_type (Edit for \"product_type\"", "allowed to edit ext_msg (Edit for \"ext_msg\" only) User is", "only) User is allowed to create time_record (Create for \"time_record\"", "\"absence\" only) User is allowed to access absence_type (View for", "to edit absence_type (Edit for \"absence_type\" only) User is allowed", "to access time_project_status (View for \"time_project_status\" only) User is allowed", "file is linked from an item with Edit permission (Edit", "only) User is allowed to access business_unit (View for \"business_unit\"", "mailgroup (Create for \"mailgroup\" only) User is allowed to edit", "to edit it_issue (Edit for \"it_issue\" only) User is allowed", "\"controlling\": User is allowed Edit on (Edit for \"daily_record\": ('status',", "\"organisation\" only) User is allowed to create location (Create for", "get nosy messages for issue (Nosy for \"issue\" only) User", "User is allowed to search for their own messages (Search", "for \"user\": ('address', 'alternate_addresses', 'nickname', 'password', 'timezone', 'username') only) User", "to create area (Create for \"area\" only) User is allowed", "for \"work_location\" only) User is allowed to edit cost_center (Edit", "(Create for \"query\" only) User is allowed to create support", "interface (Xmlrpc Access) User may create everything (Create) User may", "access product_type (View for \"product_type\" only) User is allowed to", "on (Edit for \"file\": ('name', 'type') only) User is allowed", "create query (Create for \"query\" only) User is allowed to", "Users are allowed to view their own and public queries", "contact (Edit for \"contact\" only) Role \"controlling\": User is allowed", "(Edit for \"file\" only) Role \"user\": (Search for \"time_project\": ('activity',", "to view their own overtime information (View for \"overtime_correction\" only)", "for \"time_record\" only) User is allowed to create uc_type (Create", "(Edit for \"time_project\": ('deputy', 'planned_effort', 'nosy') only) User is allowed", "allowed to view their own and public queries for classes", "('activity', 'actor', 'creation', 'creator', 'deputy', 'description', 'id', 'is_extern', 'is_public_holiday', 'is_special_leave',", "only) User is allowed View on (View for \"user\": ('activity',", "'csv_delimiter', 'department_temp', 'entry_date', 'firstname', 'hide_message_files', 'job_description', 'lastname', 'lunch_duration', 'lunch_start', 'nickname',", "sup_warranty (View for \"sup_warranty\" only) User is allowed to access", "Role \"organisation\": User is allowed to access location (View for", "is allowed to access doc_issue_status (View for \"doc_issue_status\" only) User", "(Edit for \"leave_submission\": ('comment', 'comment_cancel', 'first_day', 'last_day', 'status', 'time_wp', 'user')", "\"reporting_group\" only) User is allowed to access return_type (View for", "is allowed to access timesheet (View for \"timesheet\" only) User", "'time_wp_summary_no', 'travel', 'wp_no') only) User is allowed to view their", "User is allowed to access summary_report (View for \"summary_report\" only)", "access sup_warranty (View for \"sup_warranty\" only) User is allowed to", "User is allowed to edit absence_type (Edit for \"absence_type\" only)", "'status', 'subst_active', 'substitute', 'supervisor', 'timezone', 'tt_lines', 'vie_user'] only) Users may", "access analysis_result (View for \"analysis_result\" only) User is allowed to", "only) User is allowed to edit cost_center_status (Edit for \"cost_center_status\"", "\"cost_center_permission_group\" only) User is allowed to create cost_center_permission_group (Create for", "only) User is allowed to edit (some of) their own", "to access overtime_period (View for \"overtime_period\" only) User is allowed", "edit domain_permission (Edit for \"domain_permission\" only) User is allowed to", "(View for \"sup_classification\" only) User is allowed to access sup_execution", "is allowed to edit category (Edit for \"category\" only) User", "to access ext_msg (View for \"ext_msg\" only) User is allowed", "(Create for \"reporting_group\" only) User is allowed to create sap_cc", "only) User is allowed to access vacation_correction (View for \"vacation_correction\"", "is allowed to access absence (View for \"absence\" only) User", "to access time_report (View for \"time_report\" only) User is allowed", "User is allowed to access sup_classification (View for \"sup_classification\" only)", "be seen (View for \"daily_record\" only) User may view their", "domain_permission for the user (View for \"user\": ['contacts', 'csv_delimiter', 'department_temp',", "'firstname', 'hide_message_files', 'job_description', 'lastname', 'lunch_duration', 'lunch_start', 'nickname', 'pictures', 'position_text', 'reduced_activity_list',", "\"user\": ('entry_date', 'planning_role') only) User is allowed to view contact", "User is allowed Edit on (Edit for \"time_project\": ('infosec_req', 'is_extern',", "may see daily record if they may see one of", "(View for \"user_dynamic\" only) User is allowed to access vacation_correction", "time category or if user is department manager of time", "cost_center (View for \"cost_center\" only) User is allowed to access", "same Org-Location as the record, it may also be seen", "allowed to access cost_center_status (View for \"cost_center_status\" only) User is", "access user_contact (View for \"user_contact\" only) User is allowed to", "is allowed to create time_wp (Create for \"time_wp\" only) User", "'room'] only) Role \"external\": (Search for \"ext_tracker_state\": ('id', 'issue') only)", "may edit everything (Edit) User may manipulate user Roles through", "\"artefact\" only) User is allowed to create doc (Create for", "access artefact (View for \"artefact\" only) User is allowed to", "(View for \"time_wp\": ('activity', 'actor', 'cost_center', 'creation', 'creator', 'description', 'durations_allowed',", "(View for \"vacation_correction\" only) User is allowed to create auto_wp", "to edit location (Edit for \"location\" only) User is allowed", "to create public_holiday (Create for \"public_holiday\" only) User is allowed", "(View for \"summary_report\" only) User is allowed to access summary_type", "is allowed Edit on (Edit for \"time_project\": ('cost_center', 'department', 'deputy',", "('project',) only) User is allowed View on (View for \"user\":", "'status', 'time_wp', 'user') only) User may see time report if", "allowed View on (View for \"user\": ('roles',) only) User is", "edit user_contact (Edit for \"user_contact\" only) Users may view/edit user", "\"sap_cc\" only) User is allowed to create time_activity (Create for", "(View for \"project_type\" only) User is allowed to access public_holiday", "User is allowed to edit user_functional_role (Edit for \"user_functional_role\" only)", "keyword (Edit for \"keyword\" only) User is allowed to edit", "approvals are delegated (View for \"leave_submission\" only) User is allowed", "may view/edit user records for ad_domain for which they are", "is allowed to edit it_category (Edit for \"it_category\" only) User", "allowed to view overtime information if he/she is in group", "to edit status (Edit for \"status\" only) User is allowed", "substitute supervisor of the owner of the daily record (the", "(Restore for \"user_functional_role\" only) (Retire for \"user_functional_role\" only) User is", "for \"reporting_group\" only) User is allowed to edit room (Edit", "for \"test_level\" only) User is allowed to edit area (Edit", "(View for \"doc_status\" only) User is allowed to access ext_tracker", "record (Edit for \"user_dynamic\" only) User is allowed to edit", "'analysis_result', 'analysis_start', 'bcc', 'business_unit', 'category', 'cc', 'cc_emails', 'classification', 'closed', 'confidential',", "allowed to access overtime_correction (View for \"overtime_correction\" only) User is", "\"issue_admin\": User is allowed Edit on msg if msg is", "\"area\" only) User is allowed to access artefact (View for", "allowed Edit on (Edit for \"leave_submission\": ('status',) only) User is", "\"contact\" only) User is allowed to access contact_type (View for", "view time category if user is owner or deputy of", "(View for \"summary_type\" only) User is allowed to access sup_classification", "(View for \"time_wp\" only) User or Timetracking by user may", "the Role \"User\" Role \"admin\": User may access the rest", "for \"overtime_correction\" only) User is allowed to create overtime_period (Create", "is allowed to create product_type (Create for \"product_type\" only) User", "\"status\" only) User is allowed to create status_transition (Create for", "for \"customer_agreement\" only) User is allowed to access mailgroup (View", "(View for \"user_functional_role\" only) User may view time category if", "time_records owned by user (Retire for \"time_record\" only) User or", "'time_wp', 'user') only) User may edit own leave submissions (View", "user_contact (Create for \"user_contact\" only) User is allowed to edit", "edit time_record (Edit for \"time_record\" only) User is allowed to", "to create user_functional_role (Create for \"user_functional_role\" only) User is allowed", "\"ext_msg\" only) User is allowed to create ext_tracker_state (Create for", "list (View for \"it_issue\" only) User is allowed View on", "\"overtime_correction\" only) User is allowed to edit product_family (Edit for", "User is allowed to edit auto_wp (Edit for \"auto_wp\" only)", "(Edit for \"public_holiday\" only) User is allowed to edit reporting_group", "for \"it_project\" only) Role \"msgedit\": (Search for \"msg\": ('date', 'id')", "area (Create for \"area\" only) User is allowed to create", "access reference (View for \"reference\" only) User is allowed to", "Email users get the Role \"User\" Role \"admin\": User may", "is allowed to create reference (Create for \"reference\" only) User", "which they are in the domain_permission for the user (View", "is allowed to create leave_submission (Create for \"leave_submission\" only) User", "for \"user\": ('ad_domain', 'nickname', 'password', 'pictures', 'roles', 'timetracking_by', 'timezone', 'username')", "User is allowed to create doc_category (Create for \"doc_category\" only)", "(View for \"issue\": ['activity', 'actor', 'area', 'category', 'closed', 'composed_of', 'creation',", "details (View for \"user\": ('activity', 'actor', 'creation', 'creator', 'firstname', 'lastname',", "access the xmlrpc interface (Xmlrpc Access) User may edit own", "'job_description', 'lastname', 'lunch_duration', 'lunch_start', 'nickname', 'pictures', 'position_text', 'queries', 'realname', 'room',", "interface (Xmlrpc Access) User may edit own leave submissions (Edit", "create safety_level (Create for \"safety_level\" only) User is allowed to", "only) User is allowed to access org_location (View for \"org_location\"", "access org_group (View for \"org_group\" only) User is allowed to", "supervisor and approval delegated) (View for \"time_wp\": ('activity', 'actor', 'cost_center',", "'deadline', 'depends', 'doc_issue_status', 'earliest_start', 'effective_prio', 'effort_hours', 'external_users', 'files', 'files_affected', 'fixed_in',", "only) User may edit own file (file created by user)", "'closed', 'confidential', 'customer', 'emails', 'execution', 'external_ref', 'files', 'goods_received', 'goods_sent', 'lot',", "for \"time_record\" only) User is allowed to view work package", "time_records owned by user (View for \"time_record\" only) Users are", "\"time_project\": ('deputy', 'planned_effort', 'nosy') only) User is allowed to edit", "is allowed View on (View for \"user\": ('business_responsible', 'planning_role', 'scale_seniority')", "access the web interface (Web Access) Role \"cc-permission\": (Restore for", "owner of the daily record. If user has role HR-Org-Location", "access time_activity_perm (View for \"time_activity_perm\" only) User is allowed to", "the user (Edit for \"user\": ['room'] only) Users may view/edit", "it_issue is non-confidential or user is on nosy list (View", "for \"leave_submission\" only) User is allowed to view selected fields", "User is allowed to access customer_agreement (View for \"customer_agreement\" only)", "for classes where they have search permission (View for \"query\"", "for \"time_project\" only) User is allowed to access time_record (View", "\"query\" only) Users may see daily record if they may", "(Edit for \"user\": ('csv_delimiter', 'hide_message_files', 'password', 'timezone') only) Users are", "to see all details on work package or User may", "are on the list of allowed external users or there", "(View for \"file\" only) User is allowed to view their", "for \"ext_tracker_type\" only) User is allowed to access functional_role (View", "create time_wp (Create for \"time_wp\" only) User is allowed to", "to create contract_type (Create for \"contract_type\" only) User is allowed", "(View for \"kind\" only) User is allowed to access leave_status", "to access test_level (View for \"test_level\" only) User is allowed", "(View for \"time_report\" only) User is allowed to create time_report", "(Edit for \"contract_type\" only) User is allowed to edit leave_submission", "\"sec-incident-nosy\": User is allowed to access it_int_prio (View for \"it_int_prio\"", "(View for \"keyword\" only) User is allowed to access kind", "for \"ext_tracker\" only) User is allowed to access ext_tracker_state (View", "\"staff-report\": Role \"sub-login\": Role \"summary_view\": Role \"supportadmin\": User is allowed", "only) May only view/edit records with the correct domain (View", "(Edit for \"user\": ('business_responsible', 'scale_seniority') only) User is allowed View", "'responsible', 'time_end', 'time_start', 'time_wp_summary_no', 'travel', 'wp_no') only) (View for \"time_project\":", "only) User is allowed to edit mailgroup (Edit for \"mailgroup\"", "is allowed to access time_report (View for \"time_report\" only) User", "the daily record (the supervisor relationship is transitive) or the", "for \"area\" only) User is allowed to access artefact (View", "(Edit for \"msg\": ('author', 'date', 'id', 'keywords', 'subject', 'summary') only)", "allowed to access sap_cc (View for \"sap_cc\" only) User is", "only) User is allowed to edit time_activity_perm (Edit for \"time_activity_perm\"", "is allowed to edit it_request_type (Edit for \"it_request_type\" only) User", "allowed to create ext_tracker_state (Create for \"ext_tracker_state\" only) User is", "\"it_issue\" only) User is allowed to edit it_project (Edit for", "'supervisor', 'timezone', 'tt_lines', 'vie_user'] only) Users may view/edit user records", "allowed to edit some of their details (Edit for \"user\":", "search for their own files (Search for \"file\" only) User", "and in the same Org-Location as the given user (View", "for \"issue\" only) User is allowed to create it_issue (Create", "user (View for \"user\": ['contacts', 'csv_delimiter', 'department_temp', 'entry_date', 'firstname', 'hide_message_files',", "view time record if he is the supervisor or the", "'creation', 'creator', 'description', 'durations_allowed', 'epic_key', 'has_expiration_date', 'is_extern', 'is_public', 'id', 'name',", "web (Web Roles) Role \"itview\": User is allowed to access", "(Edit for \"doc\" only) User is allowed to edit ext_tracker_state", "for \"query\" only) Role \"facility\": (Restore for \"room\" only) (Retire", "through the web (Web Roles) Role \"itview\": User is allowed", "User is allowed to access mailgroup (View for \"mailgroup\" only)", "\"kind\" only) User is allowed to edit msg_keyword (Edit for", "to create vacation_correction (Create for \"vacation_correction\" only) User is allowed", "'responsible', 'status', 'work_location', 'wps') only) Search (Search for \"user_contact\" only)", "functional_role (View for \"functional_role\" only) User is allowed to access", "'username', 'vie_user'] only) Role \"dom-user-edit-hr\": (Search for \"user_dynamic\" only) May", "\"user_functional_role\" only) User may view time category if user is", "it may also be seen (View for \"daily_record\" only) User", "for \"it_issue_status\" only) User is allowed to access it_prio (View", "the user (Edit for \"user\": ['contacts', 'position_text', 'room'] only) Users", "(Edit for \"time_record\" only) User is allowed to edit work_location", "file (Create for \"file\" only) User is allowed to create", "allowed to create ext_msg (Create for \"ext_msg\" only) User is", "view user_dynamic records for ad_domain for which they are in", "\"department\": ('doc_num',) only) User is allowed to create artefact (Create", "\"query\" only) User is allowed to edit time category if", "for \"reporting_group\" only) User is allowed to access return_type (View", "\"cost_center_permission_group\" only) Role \"contact\": User is allowed to create contact", "transitive) or the user is the department manager of the", "allowed to create keyword (Create for \"keyword\" only) User is", "to access reporting_group (View for \"reporting_group\" only) User is allowed", "\"room\" only) User is allowed Edit on (Edit for \"daily_record\":", "User is allowed to edit ext_tracker (Edit for \"ext_tracker\" only)", "domain_permission for the user (View for \"user_dynamic\" only) Users may", "the xmlrpc interface (Xmlrpc Access) User may create everything (Create)", "\"user\": ('contacts',) only) User is allowed to access user_contact (View", "is allowed to edit safety_level (Edit for \"safety_level\" only) User", "leave submission if he is the supervisor or the person", "or timetracking-by user (View for \"daily_record\" only) User is allowed", "\"safety_level\" only) User is allowed to edit severity (Edit for", "only) User is allowed Edit on (Edit for \"organisation\": ('domain_part',)", "msg (Create for \"msg\" only) User is allowed to create", "for \"user\": ('business_responsible', 'scale_seniority') only) User is allowed View on", "\"time_project_status\" only) User is allowed to create time_wp (Create for", "own messages (Search for \"msg\" only) User is allowed to", "for \"kind\" only) User is allowed to access msg_keyword (View", "of time project or on nosy list of time project", "'substitute', 'supervisor', 'timezone', 'tt_lines', 'vie_user'] only) Role \"dom-user-edit-office\": User is", "for \"project_type\" only) User is allowed to access public_holiday (View", "reponsible or deputy of time project or on nosy list", "is allowed to access sap_cc (View for \"sap_cc\" only) User", "(View for \"sex\" only) User is allowed to access status", "seen (View for \"time_record\" only) User is allowed to view", "User is allowed to access user_dynamic (View for \"user_dynamic\" only)", "only) Role \"project_view\": User is allowed to access time_project (View", "\"supportadmin\": User is allowed to access analysis_result (View for \"analysis_result\"", "access cost_center_group (View for \"cost_center_group\" only) User is allowed to", "(Edit for \"user\": ('ad_domain', 'nickname', 'password', 'pictures', 'roles', 'timetracking_by', 'timezone',", "('name',) only) User is allowed View on file if file", "'contacts', 'csv_delimiter', 'entry_date', 'firstname', 'hide_message_files', 'job_description', 'lastname', 'lunch_duration', 'lunch_start', 'nickname',", "User is allowed to access public_holiday (View for \"public_holiday\" only)", "allowed to view their own files (View for \"file\" only)", "\"overtime_correction\" only) User is allowed to view time record data", "access customer (View for \"customer\" only) User is allowed to", "('id', 'issue') only) (Search for \"user\": ('id', 'nickname', 'username') only)", "create work_location (Create for \"work_location\" only) User is allowed to", "to search for their queries (Search for \"query\" only) User", "for \"support\" only) User is allowed to create time_record (Create", "\"organisation\": ('domain_part',) only) User is allowed Edit on (Edit for", "allowed to edit doc_issue_status (Edit for \"doc_issue_status\" only) User is", "for \"ext_tracker_state\" only) Role \"nosy\": User may get nosy messages", "access it_project (View for \"it_project\" only) Role \"msgedit\": (Search for", "allowed to access it_request_type (View for \"it_request_type\" only) User is", "only) User is allowed to edit uc_type (Edit for \"uc_type\"", "is allowed to access keyword (View for \"keyword\" only) User", "is marked visible (View for \"user_contact\" only) User is allowed", "only) User is allowed to access auto_wp (View for \"auto_wp\"", "file if file is linked from an item with View", "'prio', 'prodcat', 'product', 'related_issues', 'related_support', 'release', 'responsible', 'return_type', 'sap_ref', 'send_to_customer',", "and public queries for classes where they have search permission", "for \"user\": ('id', 'nickname', 'username') only) External users are allowed", "Role \"hr\": (Edit for \"overtime_period\": ('name', 'order') only) (Restore for", "(Create for \"user_functional_role\" only) User is allowed to edit user_functional_role", "User may access the rest interface (Rest Access) User may", "\"summary_report\" only) User is allowed to access summary_type (View for", "only) User is allowed to view freeze information if he/she", "('entry_date', 'planning_role') only) User is allowed to view contact if", "access sup_type (View for \"sup_type\" only) User is allowed to", "for \"user\": ('business_responsible', 'planning_role', 'scale_seniority') only) User is allowed to", "only) User is allowed to create time_project_status (Create for \"time_project_status\"", "allowed Edit on (Edit for \"daily_record\": ('required_overtime', 'weekend_allowed') only) User", "to create it_int_prio (Create for \"it_int_prio\" only) User is allowed", "their own user details (Edit for \"user\": ('csv_delimiter', 'hide_message_files', 'lunch_duration',", "access doc (View for \"doc\" only) User is allowed to", "to edit sup_classification (Edit for \"sup_classification\" only) User is allowed", "is allowed to create department (Create for \"department\" only) User", "the daily_record or has role 'HR' or 'Controlling', or the", "edit public_holiday (Edit for \"public_holiday\" only) User is allowed to", "allowed View on (View for \"user\": ('nickname', 'status', 'username') only)", "(View for \"safety_level\" only) User is allowed to access sap_cc", "Role \"contact\": User is allowed to create contact (Create for", "for \"msg\" only) User is allowed View on file if", "to edit kind (Edit for \"kind\" only) User is allowed", "\"query\" only) User is allowed to edit their queries (Edit", "only) Role \"project\": User is allowed Edit on (Edit for", "allowed to edit customer_agreement (Edit for \"customer_agreement\" only) User is", "User is allowed to edit organisation (Edit for \"organisation\" only)", "is owner or supervisor or timetracking-by user (View for \"daily_record\"", "only) Users are allowed to view their own and public", "for \"time_project\": ('deputy', 'planned_effort', 'nosy') only) User is allowed to", "\"sap_cc\" only) User is allowed to access severity (View for", "'supervisor', 'sync_foreign_key', 'timezone', 'tt_lines', 'username', 'vie_user'] only) Role \"dom-user-edit-hr\": (Search", "User is allowed to access time_report (View for \"time_report\" only)", "cost_center_permission_group (View for \"cost_center_permission_group\" only) User is allowed to access", "'messages', 'needs', 'nosy', 'numeric_effort', 'part_of', 'planned_begin', 'planned_end', 'priority', 'release', 'responsible',", "\"user_dynamic\" only) User is allowed to access contract_type (View for", "Timetracking by user may edit time_records owned by user (Restore", "list (View for \"it_project\" only) User is allowed View on", "only) User is allowed to edit customer (Edit for \"customer\"", "location (Edit for \"location\" only) User is allowed to edit", "it_project (View for \"it_project\" only) User is allowed to create", "contact (View for \"contact\" only) User is allowed to access", "of the contact or the contact is marked visible (View", "(Search for \"time_record\" only) (Search for \"user_dynamic\" only) User is", "access cost_center_status (View for \"cost_center_status\" only) User is allowed to", "allowed to create cost_center_status (Create for \"cost_center_status\" only) User is", "'password', 'pictures', 'roles', 'timetracking_by', 'timezone', 'username') only) User is allowed", "search leave_submission (Search for \"leave_submission\" only) User is allowed to", "for \"product_type\" only) User is allowed to create reference (Create", "Edit on (Edit for \"time_project\": ('infosec_req', 'is_extern', 'max_hours', 'op_project', 'planned_effort',", "create test_level (Create for \"test_level\" only) User is allowed to", "for \"file\" only) User is allowed View on issue if", "only) User is allowed to create severity (Create for \"severity\"", "access organisation (View for \"organisation\" only) User is allowed to", "is allowed to access organisation (View for \"organisation\" only) User", "['room'] only) Role \"dom-user-edit-gtt\": (Search for \"user_dynamic\" only) May only", "allowed to create safety_level (Create for \"safety_level\" only) User is", "\"time_wp\": ('activity', 'actor', 'auto_wp', 'cost_center', 'creation', 'creator', 'description', 'durations_allowed', 'epic_key',", "overtime_correction (View for \"overtime_correction\" only) User is allowed to access", "\"room\" only) Role \"functional-role\": (Restore for \"user_functional_role\" only) (Retire for", "User is allowed to access org_group (View for \"org_group\" only)", "not frozen (Edit for \"overtime_correction\" only) User is allowed to", "for \"query\" only) User is allowed to search daily_record (Search", "'durations_allowed', 'epic_key', 'has_expiration_date', 'id', 'is_extern', 'is_public', 'name', 'project', 'responsible', 'time_end',", "to edit user_contact (Edit for \"user_contact\" only) Users may view", "(View for \"user_dynamic\" only) User is allowed to view freeze", "\"uc_type\" only) Role \"organisation\": User is allowed to access location", "User is allowed View on (View for \"category\": ('id', 'name')", "allowed to edit organisation (Edit for \"organisation\" only) Role \"pgp\":", "to edit ext_msg (Edit for \"ext_msg\" only) User is allowed", "create overtime_correction (Create for \"overtime_correction\" only) User is allowed to", "only) User is allowed to create reporting_group (Create for \"reporting_group\"", "allowed to edit product_type (Edit for \"product_type\" only) User is", "only) Role \"hr-org-location\": (Search for \"daily_record_freeze\" only) (Search for \"overtime_correction\"", "(Create for \"test_level\" only) User is allowed to edit area", "user functional role (View for \"user_functional_role\" only) User may view", "for \"vacation_correction\" only) User is allowed to edit contract_type (Edit", "nosy messages for it_project (Nosy for \"it_project\" only) User may", "is Responsible for an it_issue (Edit for \"it_issue\": ('responsible',) only)", "only) User is allowed to edit leave_submission (Edit for \"leave_submission\"", "User is allowed to access location (View for \"location\" only)", "to view/edit workpackage if he is owner or project responsible/deputy", "(also applies to timetracking by, supervisor and approval delegated) (View", "email interface (Email Access) Users are allowed to edit some", "(Edit for \"analysis_result\" only) User is allowed to edit contact", "View on (View for \"user\": ('activity', 'actor', 'address', 'alternate_addresses', 'creation',", "allowed to create analysis_result (Create for \"analysis_result\" only) User is", "domain_permission for the user (Edit for \"user\": ['room'] only) Users", "only) User is allowed to search for their queries (Search", "person to whom approvals are delegated (View for \"leave_submission\" only)", "uc_type (Edit for \"uc_type\" only) User may manipulate user Roles", "allowed to access it_project (View for \"it_project\" only) Role \"msgedit\":", "\"mailgroup\" only) User is allowed to access return_type (View for", "\"ext_msg\" only) User is allowed to access ext_tracker_state (View for", "is allowed to edit doc_issue_status (Edit for \"doc_issue_status\" only) User", "'planning_role', 'scale_seniority') only) User is allowed to access user_functional_role (View", "view/edit records with the correct domain (Edit for \"user_dynamic\" only)", "allowed to edit status_transition (Edit for \"status_transition\" only) User is", "are delegated (Edit for \"leave_submission\": ('status',) only) User is allowed", "edit it_project (Edit for \"it_project\" only) User is allowed to", "\"status_transition\" only) User is allowed to access test_level (View for", "allowed to edit doc_status (Edit for \"doc_status\" only) User is", "New Email users get the Role \"User\" Role \"admin\": User", "allowed to create public_holiday (Create for \"public_holiday\" only) User is", "\"reporting_group\" only) User is allowed to edit room (Edit for", "If user has role HR-Org-Location and is in the same", "(Create for \"mailgroup\" only) User is allowed to create return_type", "(View for \"time_record\" only) User is allowed to access time_report", "the domain_permission for the user (View for \"user_dynamic\" only) Users", "(View for \"public_holiday\" only) User is allowed to access reference", "\"timesheet\" only) User is allowed to access uc_type (View for", "for \"msg_keyword\" only) User is allowed to edit safety_level (Edit", "is allowed to access category (View for \"category\" only) User", "User is allowed to view contact if he's the owner", "item with Edit permission (Edit for \"file\" only) User is", "only) User is allowed to edit doc (Edit for \"doc\"", "with Edit permission (Edit for \"msg\" only) User is allowed", "to whom approvals are delegated (View for \"time_record\" only) User", "is allowed to edit kind (Edit for \"kind\" only) User", "Stakeholder/Responsible for an it_issue (Edit for \"it_issue\": ('deadline', 'status', 'title')", "(Restore for \"cost_center_permission_group\" only) (Retire for \"cost_center_permission_group\" only) User is", "only) User is allowed to edit department (Edit for \"department\"", "\"msg\" only) User is allowed to view their own overtime", "are allowed to view some of their details (View for", "to access ext_tracker_state (View for \"ext_tracker_state\" only) User is allowed", "(View for \"user\": ('activity', 'actor', 'ad_domain', 'address', 'alternate_addresses', 'business_responsible', 'clearance_by',", "access it_issue (View for \"it_issue\" only) User is allowed to", "only) User is allowed to edit category if he is", "allowed to access summary_report (View for \"summary_report\" only) User is", "only) User is allowed to access work_location (View for \"work_location\"", "User is allowed to edit if he's the owner of", "User is allowed to access leave_submission (View for \"leave_submission\" only)", "'vie_user'] only) Users may view/edit user records for ad_domain for", "permission (Edit for \"file\" only) User is allowed Edit on", "(Edit for \"issue\": ['activity', 'actor', 'area', 'category', 'closed', 'composed_of', 'creation',", "reference (Edit for \"reference\" only) Role \"dom-user-edit-facility\": Users may view/edit", "for \"summary_type\" only) User is allowed to access sup_classification (View", "\"doc_issue_status\" only) User is allowed to access ext_tracker (View for", "User is allowed to view/edit workpackage if he is owner", "is allowed to access ext_tracker (View for \"ext_tracker\" only) User", "only) User is allowed to access query (View for \"query\"", "is allowed to create category (Create for \"category\" only) User", "User is allowed View on (View for \"user_dynamic\": ('department', 'org_location')", "only) (Search for \"user\": ('id', 'nickname', 'username') only) External users", "\"time_wp\" only) User is allowed to edit (some of) their", "allowed to edit ext_tracker (Edit for \"ext_tracker\" only) User is", "department manager of time category (View for \"time_project\" only) User", "allowed to access ext_tracker (View for \"ext_tracker\" only) User is", "(View for \"issue\" only) User is allowed to create area", "for \"cost_center\" only) User is allowed to access cost_center_group (View", "Role \"cc-permission\": (Restore for \"cost_center_permission_group\" only) (Retire for \"cost_center_permission_group\" only)", "have search permission (View for \"query\" only) Users may see", "\"contact\" only) Role \"controlling\": User is allowed Edit on (Edit", "to view overtime information if he/she is in group HR-Org-Location", "'lastname', 'realname', 'username') only) Users are allowed to view their", "for \"room\" only) User is allowed to create uc_type (Create", "User is allowed to access department (View for \"department\" only)", "(Edit for \"user_contact\": ('visible',) only) User is allowed to edit", "allowed to access sup_warranty (View for \"sup_warranty\" only) User is", "to edit it_category (Edit for \"it_category\" only) User is allowed", "to access it_prio (View for \"it_prio\" only) User is allowed", "to view dynamic user data if he/she is in group", "\"time_project\" only) User is allowed Edit on (Edit for \"sap_cc\":", "User may edit own leave submissions (View for \"leave_submission\": ('comment',", "the daily record. If user has role HR-Org-Location and is", "to edit product_type (Edit for \"product_type\" only) User is allowed", "for \"domain_permission\" only) User is allowed to access it_int_prio (View", "only) User is allowed to access sup_prio (View for \"sup_prio\"", "is allowed View on (View for \"user\": ('roles',) only) User", "(Create for \"user\" only) User is allowed to create user_dynamic", "msg if msg is linked from an item with View", "time_wp_group (Edit for \"time_wp_group\" only) Role \"project_view\": User is allowed", "the email interface (Email Access) User may view everything (View)", "\"query\" only) User is allowed to create reporting_group (Create for", "is allowed to edit reporting_group (Edit for \"reporting_group\" only) User", "'wps') only) (Search for \"time_wp\": ('activity', 'actor', 'auto_wp', 'bookers', 'cost_center',", "create artefact (Create for \"artefact\" only) User is allowed to", "allowed to see all details on work package or User", "\"sap_cc\" only) User is allowed to create time_record (Create for", "(Create for \"sap_cc\" only) User is allowed to create time_activity", "User is allowed to access ext_tracker_type (View for \"ext_tracker_type\" only)", "'external_ref', 'files', 'goods_received', 'goods_sent', 'lot', 'messages', 'nosy', 'number_effected', 'numeric_effort', 'prio',", "for \"time_project\": ('group_lead', 'purchasing_agents', 'team_lead') only) Role \"project\": User is", "(Edit for \"domain_permission\" only) User is allowed to edit it_category", "(View for \"product_type\" only) User is allowed to access project_type", "it_project (View for \"it_project\" only) Role \"staff-report\": Role \"sub-login\": Role", "(Edit for \"time_wp_group\" only) Role \"project_view\": User is allowed to", "the owner of the daily record (the supervisor relationship is", "only) User is allowed to create support (Create for \"support\"", "access area (View for \"area\" only) User is allowed to", "\"support\" only) Role \"office\": (Restore for \"room\" only) (Retire for", "'wps') only) Search (Search for \"user_contact\" only) User is allowed", "freeze record if not frozen at the given date (Edit", "Edit on (Edit for \"msg\": ('author', 'date', 'id', 'keywords', 'subject',", "(View for \"user_functional_role\" only) User is allowed to create user_functional_role", "'default_part_of') only) User is allowed to edit doc (Edit for", "View on (View for \"user_dynamic\": ('department', 'org_location') only) User is", "to access it_request_type (View for \"it_request_type\" only) User is allowed", "User is allowed to search time_wp (Search for \"time_wp\": ('activity',", "allowed to create user_dynamic (Create for \"user_dynamic\" only) User is", "to access auto_wp (View for \"auto_wp\" only) User is allowed", "\"user\": ['clearance_by', 'contacts', 'csv_delimiter', 'entry_date', 'firstname', 'hide_message_files', 'job_description', 'lastname', 'lunch_duration',", "is allowed to edit uc_type (Edit for \"uc_type\" only) User", "(Edit for \"time_report\" only) User may edit own file (file", "(View for \"file\" only) User is allowed to access domain_permission", "(Edit for \"time_wp\": ('project',) only) User is allowed View on", "on support if support is non-confidential or user is on", "allowed View on (View for \"user\": ('business_responsible', 'planning_role', 'scale_seniority') only)", "only) User is allowed to edit time category if the", "issue is non-confidential or user is on nosy list (Edit", "for \"sup_classification\" only) User is allowed to create support (Create", "only) User is allowed to view (some of) their own", "permission (View for \"msg\" only) User is allowed to access", "it_request_type (View for \"it_request_type\" only) User is allowed to access", "for \"user_dynamic\" only) User is allowed to edit freeze record", "allowed to edit support (Edit for \"support\" only) Role \"time-report\":", "to edit public_holiday (Edit for \"public_holiday\" only) User is allowed", "for \"room\" only) User is allowed to create room (Create", "only) User is allowed to access time_report (View for \"time_report\"", "only) User is allowed to access ext_msg (View for \"ext_msg\"", "(Create for \"time_record\" only) User is allowed to create time_wp", "to edit doc_issue_status (Edit for \"doc_issue_status\" only) User is allowed", "allowed to create overtime_correction (Create for \"overtime_correction\" only) User is", "'creator', 'firstname', 'lastname', 'realname', 'username') only) Users are allowed to", "only) User is allowed to create safety_level (Create for \"safety_level\"", "allowed to create time_wp (Create for \"time_wp\" only) User is", "is allowed to edit it_project (Edit for \"it_project\" only) User", "edit it_category (Edit for \"it_category\" only) User is allowed to", "\"doc_status\" only) User is allowed to create product_type (Create for", "allowed to edit public_holiday (Edit for \"public_holiday\" only) User is", "'type') only) User is allowed Edit on (Edit for \"location\":", "for \"it_project_status\" only) User is allowed to access it_request_type (View", "create doc_category (Create for \"doc_category\" only) User is allowed to", "\"room\" only) User is allowed to edit uc_type (Edit for", "allowed to access daily record if he is owner or", "\"cost_center\" only) User is allowed to edit cost_center_group (Edit for", "'earliest_start', 'effective_prio', 'effort_hours', 'external_users', 'files', 'files_affected', 'fixed_in', 'id', 'keywords', 'kind',", "to create org_location (Create for \"org_location\" only) User is allowed", "if not frozen at the given date (Edit for \"daily_record_freeze\":", "sex (View for \"sex\" only) User is allowed to access", "only) User is allowed Edit on (Edit for \"user\": ('business_responsible',", "\"product_family\" only) User is allowed to access product_type (View for", "access absence (View for \"absence\" only) User is allowed to", "edit cost_center_group (Edit for \"cost_center_group\" only) User is allowed to", "'lunch_duration', 'lunch_start', 'nickname', 'pictures', 'position_text', 'reduced_activity_list', 'roles', 'room', 'sex', 'status',", "the domain_permission for the user (View for \"user\": ['contacts', 'csv_delimiter',", "only) User is allowed to edit safety_level (Edit for \"safety_level\"", "web interface (Web Access) User may use the email interface", "if the overtime correction is not frozen (Edit for \"overtime_correction\"", "category if user is owner or deputy of time category", "work package and time category names if he/she has role", "to search for their own messages (Search for \"msg\" only)", "to edit room (Edit for \"room\" only) User is allowed", "\"it_request_type\" only) User is allowed to create mailgroup (Create for", "for \"contract_type\" only) User is allowed to create leave_submission (Create", "(Create for \"doc_status\" only) User is allowed to create product_type", "\"time_project\": ('group_lead', 'team_lead') only) User is allowed Edit on (Edit", "for \"user_functional_role\" only) User is allowed to edit user_functional_role (Edit", "(Edit for \"status_transition\" only) User is allowed to edit test_level", "time_wp (View for \"time_wp\" only) User is allowed to create", "\"user\": ('status',) only) User is allowed to see time record", "time project (View for \"time_report\" only) User may use the", "for \"room\" only) User is allowed to edit sap_cc (Edit", "User is allowed Edit on (Edit for \"time_project\": ('group_lead', 'purchasing_agents',", "'id') only) User is allowed Edit on (Edit for \"msg\":", "search permission (View for \"query\" only) Users may see daily", "(Create for \"kind\" only) User is allowed to create msg_keyword", "Access) Users are allowed to edit some of their details", "\"ext_tracker_state\" only) User is allowed to edit ext_msg (Edit for", "access user (View for \"user\" only) Role \"vacation-report\": \"\"\".strip ()", "only) User is allowed to create vacation_correction (Create for \"vacation_correction\"", "'category', 'closed', 'composed_of', 'creation', 'creator', 'cur_est_begin', 'cur_est_end', 'deadline', 'depends', 'doc_issue_status',", "for it_issue (Nosy for \"it_issue\" only) User may get nosy", "users or there is a transitive permission via containers (View", "to create work_location (Create for \"work_location\" only) User is allowed", "room (Edit for \"room\" only) User is allowed to edit", "(View for \"organisation\" only) User is allowed to create location", "'deputy', 'description', 'id', 'is_extern', 'is_public_holiday', 'is_special_leave', 'is_vacation', 'name', 'nosy', 'only_hours',", "records with the correct domain (View for \"user_dynamic\" only) User", "allowed to create severity (Create for \"severity\" only) User is", "for \"msg_keyword\" only) User is allowed to access org_group (View", "for \"it_issue\" only) User is allowed View on it_project if", "edit time_wp_group (Edit for \"time_wp_group\" only) Role \"project_view\": User is", "for \"organisation\" only) User is allowed to create product_family (Create", "'time_start', 'time_wp_summary_no', 'travel', 'wp_no') only) (View for \"time_project\": ('activity', 'actor',", "user (View for \"overtime_correction\" only) User is allowed to view", "on (View for \"user\": ('business_responsible', 'department_temp', 'timetracking_by', 'vie_user', 'vie_user_bl_override', 'vie_user_ml')", "(Create for \"room\" only) User is allowed to edit room", "is allowed to view/edit workpackage if he is owner or", "for \"time_activity\" only) User is allowed to edit time_activity_perm (Edit", "allowed to edit it_category (Edit for \"it_category\" only) User is", "\"area\" only) User is allowed to edit category (Edit for", "only) User may use the email interface (Email Access) User", "User is allowed to access product_family (View for \"product_family\" only)", "is allowed to view freeze information if he/she is in", "(Edit for \"daily_record\": ('status', 'time_record') only) User is allowed to", "is allowed to create safety_level (Create for \"safety_level\" only) User", "\"cost_center_status\" only) User is allowed to create department (Create for", "the domain_permission for the user (Edit for \"user\": ['contacts', 'csv_delimiter',", "('analysis_end', 'analysis_result', 'analysis_start', 'bcc', 'business_unit', 'category', 'cc', 'cc_emails', 'classification', 'closed',", "is owner or deputy of time category or on nosy", "only) User is allowed to access return_type (View for \"return_type\"", "'is_public', 'id', 'name', 'project', 'responsible', 'time_end', 'time_start', 'time_wp_summary_no', 'travel', 'wp_no')", "for \"user_dynamic\" only) User is allowed to access contract_type (View", "\"time_record\" only) Users are allowed to view their own and", "('cost_center', 'is_public', 'name', 'responsible', 'time_wp_summary_no', 'wp_no') only) User is allowed", "customer (Create for \"customer\" only) User is allowed to create", "allowed to view selected fields in work package if booking", "(Edit for \"department\" only) User is allowed to edit organisation", "\"daily_record_status\" only) User is allowed to access department (View for", "may manipulate user Roles through the web (Web Roles) Role", "User is allowed to access time_wp_group (View for \"time_wp_group\" only)", "to create domain_permission (Create for \"domain_permission\" only) User is allowed", "for \"user\": ('csv_delimiter', 'hide_message_files', 'lunch_duration', 'lunch_start', 'password', 'queries', 'realname', 'room',", "for \"issue\" only) User is allowed to create keyword (Create", "(Create for \"file\" only) User is allowed to create issue", "is allowed Edit on msg if msg is linked from", "details on work package or User may view a daily_record", "to edit user_functional_role (Edit for \"user_functional_role\" only) Role \"hr\": (Edit", "Access) User may edit own leave submissions (Edit for \"leave_submission\":", "\"msgedit\": (Search for \"msg\": ('date', 'id') only) User is allowed", "owner or supervisor or timetracking-by user (View for \"daily_record\" only)", "'creation', 'creator', 'firstname', 'lastname', 'realname', 'username') only) Users are allowed", "(View for \"time_record\" only) Users are allowed to view their", "(View for \"user\": ['clearance_by', 'contacts', 'csv_delimiter', 'entry_date', 'firstname', 'hide_message_files', 'job_description',", "'only_hours', 'op_project', 'overtime_reduction', 'responsible', 'status', 'work_location', 'wps') only) Search (Search", "category if the status is \"Open\" and he is responsible", "User is allowed to create leave_submission (Create for \"leave_submission\" only)", "access reporting_group (View for \"reporting_group\" only) User is allowed to", "(View for \"artefact\" only) User is allowed to access business_unit", "only) User is allowed to search support (Search for \"support\"", "\"sup_execution\" only) User is allowed to access sup_prio (View for", "for \"public_holiday\" only) User is allowed to create reporting_group (Create", "User is allowed View on file if file is linked", "\"status\" only) User is allowed to edit status_transition (Edit for", "for \"product_family\" only) User is allowed to edit public_holiday (Edit", "create time_activity (Create for \"time_activity\" only) User is allowed to", "Edit on file if file is linked from an item", "access domain_permission (View for \"domain_permission\" only) User is allowed to", "only) User is allowed to access it_category (View for \"it_category\"", "only) User is allowed to access department (View for \"department\"", "allowed Edit on it_issue if it_issue is non-confidential or user", "for \"user\": ('business_responsible', 'department_temp', 'timetracking_by', 'vie_user', 'vie_user_bl_override', 'vie_user_ml') only) User", "to create leave_submission (Create for \"leave_submission\" only) User is allowed", "User is allowed View on (View for \"user\": ('contacts',) only)", "is on nosy list (Edit for \"it_project\": ('messages', 'files', 'nosy')", "User is allowed to create org_location (Create for \"org_location\" only)", "'send_to_customer', 'serial_number', 'set_first_reply', 'status', 'superseder', 'title', 'type', 'warranty') only) User", "is allowed View on (View for \"user_dynamic\": ('department', 'org_location') only)", "allowed to edit time category if the status is \"Open\"", "only) User is allowed to access sap_cc (View for \"sap_cc\"", "\"anonymous\": User may access the web interface (Web Access) Role", "for \"mailgroup\" only) User is allowed to access return_type (View", "search support (Search for \"support\" only) User is allowed to", "sup_type (View for \"sup_type\" only) User is allowed to access", "access sup_execution (View for \"sup_execution\" only) User is allowed to", "\"functional-role\": (Restore for \"user_functional_role\" only) (Retire for \"user_functional_role\" only) User", "User is allowed to create time_project (Create for \"time_project\" only)", "xmlrpc interface (Xmlrpc Access) User may create everything (Create) User", "own files (View for \"file\" only) User is allowed to", "User is allowed to access artefact (View for \"artefact\" only)", "(View for \"ext_msg\" only) User is allowed to access ext_tracker_state", "to view freeze information if he/she is in group HR-Org-Location", "is allowed to create doc_issue_status (Create for \"doc_issue_status\" only) User", "create issue (Create for \"issue\" only) User is allowed to", "to search for their own files (Search for \"file\" only)", "\"user\": ['room'] only) Users may view/edit user records for ad_domain", "\"user_contact\" only) User is allowed to edit user_contact (Edit for", "create product_type (Create for \"product_type\" only) User is allowed to", "may edit own leave submissions (View for \"leave_submission\": ('comment', 'comment_cancel',", "(Create for \"query\" only) User is allowed to edit their", "allowed to create cost_center_group (Create for \"cost_center_group\" only) User is", "(View for \"sap_cc\" only) User is allowed to access severity", "submissions (View for \"leave_submission\": ('comment', 'comment_cancel', 'first_day', 'last_day', 'status', 'time_wp',", "\"leave_submission\" only) User is allowed to create vacation_correction (Create for", "\"sap_cc\" only) User is allowed to edit time_activity (Edit for", "allowed to access time_activity_perm (View for \"time_activity_perm\" only) User is", "or 'Controlling', or the user is supervisor or substitute supervisor", "User is allowed to edit sap_cc (Edit for \"sap_cc\" only)", "may get nosy messages for doc (Nosy for \"doc\" only)", "'files', 'nosy') only) User is allowed Edit on it_project if", "for \"doc_category\" only) User is allowed to edit doc_status (Edit", "\"absence_type\" only) User is allowed to edit room (Edit for", "to access keyword (View for \"keyword\" only) User is allowed", "(View for \"it_project_status\" only) User is allowed to access it_request_type", "the status is \"Open\" and he is responsible for the", "given user (View for \"user_dynamic\" only) User is allowed to", "analysis_result (View for \"analysis_result\" only) User is allowed to access", "User is allowed to view overtime information if he/she is", "\"file\" only) User is allowed View on msg if msg", "User is allowed View on it_issue if it_issue is non-confidential", "(Edit for \"time_record\" only) User or Timetracking by user may", "'vie_user_bl_override', 'vie_user_ml') only) User is allowed View on (View for", "of time category (View for \"time_wp\" only) User or Timetracking", "'team_lead') only) User is allowed Edit on (Edit for \"time_wp\":", "allowed to edit uc_type (Edit for \"uc_type\" only) Role \"organisation\":", "allowed to create time_record (Create for \"time_record\" only) User is", "time_records owned by user (Restore for \"time_record\" only) User or", "for \"overtime_period\" only) User is allowed to access prodcat (View", "User is allowed to access contact_type (View for \"contact_type\" only)", "\"time_record\" only) (Search for \"user_dynamic\" only) User is allowed to", "User is allowed to edit user_contact (Edit for \"user_contact\" only)", "\"it_issue\": ('deadline', 'status', 'title') only) User is allowed to edit", "for \"user\" only) User is allowed to create user_dynamic (Create", "(View for \"cost_center_group\" only) User is allowed to access cost_center_permission_group", "only) User is allowed to view overtime information if he/she", "vac_aliq (View for \"vac_aliq\" only) User is allowed to access", "allowed to edit (some of) their own user details (Edit", "edit time_activity (Edit for \"time_activity\" only) User is allowed to", "allowed Edit on (Edit for \"user\": ('business_responsible', 'scale_seniority') only) User", "only) User is allowed View on (View for \"user\": ('contacts',)", "for \"timesheet\" only) User is allowed to access uc_type (View", "(Retire for \"query\" only) User is allowed to search for", "'op_project', 'planned_effort', 'product_family', 'project_type', 'reporting_group', 'work_location') only) User is allowed", "(Create for \"overtime_period\" only) User is allowed to create product_family", "Org-Location as the record, it may also be seen (View", "for \"time_report\" only) User is allowed to access time_wp (View", "User is allowed to access safety_level (View for \"safety_level\" only)", "queries (Edit for \"query\" only) User is allowed to retire", "allowed to access test_level (View for \"test_level\" only) User is", "'safety_level', 'severity', 'status', 'superseder', 'test_level', 'title'] only) User is allowed", "the same Org-Location as the record, it may also be", "it_request_type (Edit for \"it_request_type\" only) User is allowed to edit", "for \"public_holiday\" only) User is allowed to create query (Create", "is allowed for this user (also applies to timetracking by,", "is allowed to access it_project (View for \"it_project\" only) User", "is allowed to access cost_center_group (View for \"cost_center_group\" only) User", "\"time_activity_perm\" only) User is allowed to edit time_record (Edit for", "'HR' or 'Controlling', or the user is supervisor or substitute", "queries (Search for \"query\" only) User is allowed to search", "is allowed to access cost_center_status (View for \"cost_center_status\" only) User", "for \"doc_category\" only) User is allowed to access doc_issue_status (View", "for \"cost_center_permission_group\" only) User is allowed to access cost_center_status (View", "or Timetracking by user may edit time_records owned by user", "\"dom-user-edit-facility\": Users may view/edit user records for ad_domain for which", "for issue (Nosy for \"issue\" only) User may get nosy", "edit leave_submission (Edit for \"leave_submission\" only) User is allowed to", "User is allowed to access it_project (View for \"it_project\" only)", "only) User is allowed to access it_project_status (View for \"it_project_status\"", "only) User is allowed to create area (Create for \"area\"", "\"kind\" only) User is allowed to access msg_keyword (View for", "(Edit for \"user_functional_role\" only) Role \"hr\": (Edit for \"overtime_period\": ('name',", "is allowed to edit absence_type (Edit for \"absence_type\" only) User", "User is allowed to access uc_type (View for \"uc_type\" only)", "User is allowed to edit it_category (Edit for \"it_category\" only)", "\"it_category\" only) User is allowed to edit it_int_prio (Edit for", "may access the web interface (Web Access) User may access", "('comment', 'comment_cancel', 'first_day', 'last_day', 'status', 'time_wp', 'user') only) User may", "only) User is allowed to access sex (View for \"sex\"", "(Edit for \"support\" only) Role \"time-report\": User is allowed to", "of their details (Edit for \"user\": ('csv_delimiter', 'hide_message_files', 'password', 'timezone')", "for \"daily_record_freeze\" only) (Search for \"overtime_correction\" only) (Search for \"time_activity_perm\"", "edit some of their details (Edit for \"user\": ('csv_delimiter', 'hide_message_files',", "'travel', 'wp_no') only) (View for \"time_project\": ('activity', 'actor', 'creation', 'creator',", "to access cost_center_status (View for \"cost_center_status\" only) User is allowed", "only) User is allowed to access artefact (View for \"artefact\"", "to access user (View for \"user\" only) Role \"vacation-report\": \"\"\".strip", "only) User is allowed to edit auto_wp (Edit for \"auto_wp\"", "doc (Edit for \"doc\" only) User is allowed to edit", "is allowed Edit on (Edit for \"time_project\": ('group_lead', 'purchasing_agents', 'team_lead')", "User is allowed to access category (View for \"category\" only)", "\"organisation\" only) User is allowed to create product_family (Create for", "'responsible', 'status', 'work_location', 'wps') only) (Search for \"time_wp\": ('activity', 'actor',", "\"product_family\" only) User is allowed to create public_holiday (Create for", "allowed to access public_holiday (View for \"public_holiday\" only) User is", "for \"time_project_status\" only) User is allowed to edit time_wp (Edit", "for \"return_type\" only) User is allowed to create sup_classification (Create", "contract_type (Create for \"contract_type\" only) User is allowed to create", "User is allowed to search for their own files (Search", "Web users get the Roles \"User,Nosy\" New Email users get", "to edit contact (Edit for \"contact\" only) User is allowed", "may access the web interface (Web Access) Role \"cc-permission\": (Restore", "allowed to create support (Create for \"support\" only) User is", "reference (Create for \"reference\" only) User is allowed to edit", "only) User may get nosy messages for support (Nosy for", "on (View for \"user_dynamic\": ('id', 'sap_cc', 'user', 'valid_from', 'valid_to') only)", "allowed to access business_unit (View for \"business_unit\" only) User is", "'epic_key', 'has_expiration_date', 'id', 'is_extern', 'is_public', 'name', 'project', 'responsible', 'time_end', 'time_start',", "see time report if reponsible or deputy of time project", "allowed Edit on (Edit for \"time_project\": ('approval_hr', 'approval_required', 'is_extern', 'is_public_holiday',", "\"time_record\" only) User is allowed to create time_wp (Create for", "approval delegated) (View for \"time_wp\": ('activity', 'actor', 'cost_center', 'creation', 'creator',", "sup_classification (Create for \"sup_classification\" only) User is allowed to create", "(Edit for \"time_wp\" only) User is allowed to edit time_wp_group", "is on nosy list (Edit for \"support\": ('analysis_end', 'analysis_result', 'analysis_start',", "interface (Web Access) User may access the xmlrpc interface (Xmlrpc", "User is allowed to create severity (Create for \"severity\" only)", "for \"user_dynamic\" only) User is allowed to view dynamic user", "use the email interface (Email Access) User may view everything", "only) (Search for \"time_record\" only) (Search for \"user_dynamic\" only) User", "is allowed Edit on (Edit for \"user\": ('business_responsible', 'scale_seniority') only)", "for \"summary_report\" only) User is allowed to access summary_type (View", "only) Role \"organisation\": User is allowed to access location (View", "to access sex (View for \"sex\" only) User is allowed", "create area (Create for \"area\" only) User is allowed to", "User is allowed to create daily_record_freeze (Create for \"daily_record_freeze\" only)", "manager of time category (View for \"time_wp\" only) User or", "ext_tracker (View for \"ext_tracker\" only) User is allowed to access", "\"location\": ('domain_part',) only) User is allowed Edit on (Edit for", "record if he is allowed to see all details on", "edit product_family (Edit for \"product_family\" only) User is allowed to", "given user (View for \"overtime_correction\" only) User is allowed to", "msg_keyword (View for \"msg_keyword\" only) User is allowed to access", "allowed to edit sup_classification (Edit for \"sup_classification\" only) User is", "their details (View for \"user\": ('activity', 'actor', 'creation', 'creator', 'firstname',", "\"sup_classification\" only) User is allowed to create support (Create for", "(View for \"vac_aliq\" only) User is allowed to access vacation_report", "(Edit for \"absence_type\" only) User is allowed to edit room", "access status (View for \"status\" only) User is allowed to", "nosy list (View for \"issue\" only) User is allowed View", "'vie_user'] only) Role \"dom-user-edit-hr\": (Search for \"user_dynamic\" only) May only", "only) User is allowed to access time_activity_perm (View for \"time_activity_perm\"", "\"time_activity\" only) User is allowed to access time_activity_perm (View for", "(View for \"msg\" only) User is allowed to access area", "test_level (View for \"test_level\" only) User is allowed to create", "'username') only) User is allowed Edit on (Edit for \"user\":", "to create it_category (Create for \"it_category\" only) User is allowed", "access the web interface (Web Access) User may access the", "for \"time_record\" only) User is allowed to create work_location (Create", "(View for \"time_activity_perm\" only) User is allowed to access time_project_status", "users get the Roles \"User,Nosy\" New Email users get the", "allowed to create org_location (Create for \"org_location\" only) User is", "is allowed to create test_level (Create for \"test_level\" only) User", "owned by user (Edit for \"time_record\" only) User or Timetracking", "allowed to view leave submission if he is the supervisor", "for \"msg_keyword\" only) User is allowed to access safety_level (View", "for \"mailgroup\" only) User is allowed to create return_type (Create", "for \"return_type\" only) User is allowed to edit sup_classification (Edit", "to edit doc_category (Edit for \"doc_category\" only) User is allowed", "only) (View for \"time_project\": ('activity', 'actor', 'creation', 'creator', 'deputy', 'description',", "to edit artefact (Edit for \"artefact\" only) User is allowed", "to edit doc_status (Edit for \"doc_status\" only) User is allowed", "('activity', 'actor', 'auto_wp', 'cost_center', 'creation', 'creator', 'description', 'durations_allowed', 'epic_key', 'has_expiration_date',", "Role \"msgsync\": (Search for \"msg\": ('date', 'id') only) User is", "status (Create for \"status\" only) User is allowed to create", "to access absence_type (View for \"absence_type\" only) User is allowed", "only) Role \"nosy\": User may get nosy messages for doc", "only) User is allowed to access leave_submission (View for \"leave_submission\"", "User is allowed to edit it_request_type (Edit for \"it_request_type\" only)", "to create product_family (Create for \"product_family\" only) User is allowed", "access summary_type (View for \"summary_type\" only) User is allowed to", "only) User is allowed to access product_family (View for \"product_family\"", "allowed to access ext_tracker_type (View for \"ext_tracker_type\" only) Role \"msgsync\":", "'project_type', 'reporting_group', 'work_location') only) User is allowed to access time_project", "given user (View for \"daily_record_freeze\" only) User is allowed to", "User is allowed to edit overtime correction if the overtime", "\"kind\" only) User is allowed to access leave_status (View for", "\"time_record\" only) User is allowed to search time_wp (Search for", "severity (View for \"severity\" only) User is allowed to access", "(Search for \"time_wp\": ('activity', 'actor', 'auto_wp', 'cost_center', 'creation', 'creator', 'description',", "edit own leave submissions (View for \"leave_submission\": ('comment', 'comment_cancel', 'first_day',", "\"prodcat\" only) User is allowed to access product (View for", "edit customer (Edit for \"customer\" only) User is allowed to", "allowed View on it_project if it_project is non-confidential or user", "User is allowed to create keyword (Create for \"keyword\" only)", "is allowed to edit time category if the status is", "allowed to access prodcat (View for \"prodcat\" only) User is", "\"time_wp_group\" only) User is allowed to edit time_project_status (Edit for", "auto_wp (View for \"auto_wp\" only) User is allowed to access", "user data if he/she is in group HR-Org-Location and in", "\"nosy\": User may get nosy messages for doc (Nosy for", "('contacts',) only) User is allowed to access user_contact (View for", "'travel', 'wp_no') only) User is allowed to view their own", "only) User is allowed to access status (View for \"status\"", "(View for \"user_status\": ('name',) only) User is allowed View on", "User is allowed to access sup_status (View for \"sup_status\" only)", "is allowed to edit organisation (Edit for \"organisation\" only) User", "User is allowed to access reference (View for \"reference\" only)", "to edit return_type (Edit for \"return_type\" only) User is allowed", "('responsible',) only) User is allowed to edit several fields if", "allowed to view time record data if he/she is in", "'lunch_duration', 'lunch_start', 'nickname', 'pictures', 'position_text', 'queries', 'realname', 'room', 'sex', 'status',", "create user_dynamic (Create for \"user_dynamic\" only) User is allowed to", "time category names if he/she has role HR or HR-Org-Location", "only) User is allowed to edit time_record (Edit for \"time_record\"", "to create status (Create for \"status\" only) User is allowed", "contact_type (View for \"contact_type\" only) User is allowed to access", "may view time category if user is owner or deputy", "User is allowed to access test_level (View for \"test_level\" only)", "is allowed to edit status (Edit for \"status\" only) User", "interface (Email Access) User may view a daily_record (and time_records", "only) User is allowed to create contract_type (Create for \"contract_type\"", "User is allowed to search for their queries (Search for", "only) User is allowed Edit on it_project if it_project is", "'time_record') only) User is allowed Edit on (Edit for \"time_project\":", "create status (Create for \"status\" only) User is allowed to", "user (View for \"user_dynamic\" only) Users may view/edit user records", "for \"contact\" only) Role \"controlling\": User is allowed Edit on", "only) User is allowed to edit customer_agreement (Edit for \"customer_agreement\"", "(Edit for \"it_project\" only) User is allowed to edit it_request_type", "access daily_record (View for \"daily_record\" only) User is allowed to", "submissions (Edit for \"leave_submission\": ('comment', 'comment_cancel', 'first_day', 'last_day', 'status', 'time_wp',", "is allowed to view (some of) their own user details", "(View for \"file\" only) User may access the web interface", "it_issue is non-confidential or user is on nosy list (Edit", "(View for \"department\" only) User is allowed to access doc", "\"ext_tracker_state\": ('id', 'issue') only) (Search for \"user\": ('id', 'nickname', 'username')", "for \"sex\" only) User is allowed to access status (View", "\"daily_record\": ('required_overtime', 'weekend_allowed') only) User is allowed Edit on (Edit", "for it, if user is owner or deputy of time", "User is allowed to edit contract_type (Edit for \"contract_type\" only)", "to create reporting_group (Create for \"reporting_group\" only) User is allowed", "on (Edit for \"user\": ('ad_domain', 'nickname', 'password', 'pictures', 'roles', 'timetracking_by',", "allowed to access vacation_correction (View for \"vacation_correction\" only) User is", "is on nosy list (View for \"support\" only) User is", "for \"reference\" only) User is allowed to access reporting_group (View", "leave_submission (View for \"leave_submission\" only) User is allowed to access", "only) User is allowed View on it_issue if it_issue is", "for \"cost_center_group\" only) User is allowed to edit cost_center_status (Edit", "\"status\" only) User is allowed to access status_transition (View for", "is allowed to access status_transition (View for \"status_transition\" only) User", "\"public_holiday\" only) User is allowed to create query (Create for", "may view their own user functional role (View for \"user_functional_role\"", "in the domain_permission for the user (View for \"user\": ['room']", "User is allowed to create time_wp_group (Create for \"time_wp_group\" only)", "'queries', 'realname', 'room', 'sex', 'status', 'subst_active', 'substitute', 'supervisor', 'timezone', 'title',", "for \"file\" only) User is allowed to create issue (Create", "is allowed to create cost_center_status (Create for \"cost_center_status\" only) User", "non-confidential or user is on nosy list (Edit for \"issue\"", "create doc (Create for \"doc\" only) User is allowed to", "is in the same Org-Location as the record, it may", "(Create for \"msg_keyword\" only) User is allowed to create safety_level", "user data if not frozen in validity span of dynamic", "only) Role \"issue_admin\": User is allowed Edit on msg if", "'subst_active', 'substitute', 'supervisor', 'timezone', 'tt_lines', 'vie_user'] only) Role \"dom-user-edit-office\": User", "daily_record (and time_records that are attached to that daily_record) if", "access prodcat (View for \"prodcat\" only) User is allowed to", "\"reference\" only) User is allowed to edit artefact (Edit for", "'roles', 'timetracking_by', 'timezone', 'username') only) User is allowed Edit on", "only) User is allowed to edit ext_msg (Edit for \"ext_msg\"", "allowed to create time_activity_perm (Create for \"time_activity_perm\" only) User is", "('nosy', 'default_part_of') only) User is allowed to edit doc (Edit", "safety_level (Edit for \"safety_level\" only) User is allowed to edit", "to create cost_center_status (Create for \"cost_center_status\" only) User is allowed", "(Edit for \"time_record\" only) User is allowed to edit uc_type", "allowed to edit time_record (Edit for \"time_record\" only) User is", "(View for \"safety_level\" only) User is allowed to access severity", "leave submissions (View for \"leave_submission\": ('comment', 'comment_cancel', 'first_day', 'last_day', 'status',", "for \"user\": ['contacts', 'csv_delimiter', 'department_temp', 'entry_date', 'firstname', 'hide_message_files', 'job_description', 'lastname',", "to edit customer_agreement (Edit for \"customer_agreement\" only) User is allowed", "'project', 'responsible', 'time_end', 'time_start', 'time_wp_summary_no', 'travel', 'wp_no') only) (View for", "(View for \"functional_role\" only) User is allowed to access it_category", "is linked from an item with Edit permission (Edit for", "are allowed to edit some of their details (Edit for", "time_activity_perm (Edit for \"time_activity_perm\" only) User is allowed to edit", "only) User is allowed to access contact (View for \"contact\"", "\"category\" only) User is allowed to edit doc_issue_status (Edit for", "(View for \"org_location\" only) User is allowed to access organisation", "only) User is allowed View on it_project if it_project is", "to create return_type (Create for \"return_type\" only) User is allowed", "only) User is allowed View on (View for \"user_status\": ('name',)", "list (View for \"issue\" only) User is allowed View on", "non-confidential or user is on nosy list (View for \"support\"", "for \"query\" only) User is allowed to create reporting_group (Create", "user may edit time_records owned by user (Retire for \"time_record\"", "'wp_no') only) User is allowed to retire their queries (Retire", "allowed to access domain_permission (View for \"domain_permission\" only) User is", "Role \"controlling\": User is allowed Edit on (Edit for \"daily_record\":", "'cost_center', 'creation', 'creator', 'description', 'durations_allowed', 'epic_key', 'has_expiration_date', 'is_extern', 'is_public', 'id',", "messages (Search for \"msg\" only) User is allowed to search", "for \"it_category\" only) User is allowed to create it_int_prio (Create", "product_type (Edit for \"product_type\" only) User is allowed to edit", "for \"it_project\" only) User is allowed to create domain_permission (Create", "access product_family (View for \"product_family\" only) User is allowed to", "'part_of', 'planned_begin', 'planned_end', 'priority', 'release', 'responsible', 'safety_level', 'severity', 'status', 'superseder',", "analysis_result (Create for \"analysis_result\" only) User is allowed to create", "access user_dynamic (View for \"user_dynamic\" only) User is allowed to", "for \"time_project\" only) User is allowed Edit on (Edit for", "to create file (Create for \"file\" only) User is allowed", "create keyword (Create for \"keyword\" only) User is allowed to", "view (some of) their own user details (View for \"user\":", "access summary_report (View for \"summary_report\" only) User is allowed to", "'team_lead') only) Role \"project\": User is allowed Edit on (Edit", "'serial_number', 'set_first_reply', 'status', 'superseder', 'title', 'type', 'warranty') only) User is", "is allowed to access sup_warranty (View for \"sup_warranty\" only) User", "are delegated (View for \"time_record\" only) User is allowed to", "if they are on the list of allowed external users", "is linked from an item with View permission (View for", "to create user_contact (Create for \"user_contact\" only) User is allowed", "is allowed to access sup_prio (View for \"sup_prio\" only) User", "on (Edit for \"msg\": ('keywords',) only) User is allowed Edit", "for \"department\" only) User is allowed to access doc (View", "'pictures', 'position_text', 'queries', 'realname', 'room', 'sex', 'status', 'subst_active', 'substitute', 'supervisor',", "doc_category (View for \"doc_category\" only) User is allowed to access", "for \"mailgroup\" only) User is allowed to edit domain_permission (Edit", "the user owns the daily_record or has role 'HR' or", "allowed to create doc_category (Create for \"doc_category\" only) User is", "User is allowed to access ext_tracker (View for \"ext_tracker\" only)", "only) User is allowed to create org_location (Create for \"org_location\"", "allowed to create domain_permission (Create for \"domain_permission\" only) User is", "same Org-Location as the given user (View for \"overtime_correction\" only)", "User is allowed to access leave_status (View for \"leave_status\" only)", "Role \"procurement\": (View for \"sap_cc\" only) (View for \"time_project\" only)", "'set_first_reply', 'status', 'superseder', 'title', 'type', 'warranty') only) User is allowed", "Access) Role \"cc-permission\": (Restore for \"cost_center_permission_group\" only) (Retire for \"cost_center_permission_group\"", "for \"category\" only) User is allowed to edit doc_issue_status (Edit", "Create (Create for \"user_contact\" only) User is allowed Edit on", "is the department manager of the owner of the daily", "for \"location\": ('domain_part',) only) User is allowed Edit on (Edit", "User is allowed to access auto_wp (View for \"auto_wp\" only)", "is allowed Edit on (Edit for \"user\": ('address', 'alternate_addresses', 'nickname',", "\"ext_tracker_state\" only) User is allowed to access ext_tracker_type (View for", "it_issue (Search for \"it_issue\" only) User is allowed to search", "for \"daily_record\" only) User is allowed to access daily_record_status (View", "it_issue_status (View for \"it_issue_status\" only) User is allowed to access", "\"auto_wp\" only) User is allowed to edit dynamic user data", "(View for \"leave_submission\" only) User is allowed to view selected", "(View for \"query\" only) User is allowed to access time_project", "time category if user is owner or deputy of time", "messages for doc (Nosy for \"doc\" only) User may get", "it_project (Nosy for \"it_project\" only) User may get nosy messages", "(Retire for \"room\" only) User is allowed to create room", "only) User is allowed Edit on (Edit for \"time_project\": ('group_lead',", "'valid_from', 'valid_to') only) User is allowed to access contract_type (View", "(View for \"daily_record\" only) User may view their own user", "for \"issue\": ['activity', 'actor', 'area', 'category', 'closed', 'composed_of', 'creation', 'creator',", "edit it_int_prio (Edit for \"it_int_prio\" only) User is allowed to", "list of time category or if user is department manager", "uc_type (Create for \"uc_type\" only) User is allowed to edit", "allowed to create kind (Create for \"kind\" only) User is", "'goods_sent', 'lot', 'messages', 'nosy', 'number_effected', 'numeric_effort', 'prio', 'prodcat', 'product', 'related_issues',", "cost_center_status (Create for \"cost_center_status\" only) User is allowed to create", "of time category or if user is department manager of", "doc_issue_status (Edit for \"doc_issue_status\" only) User is allowed to edit", "User is allowed to access daily_record_status (View for \"daily_record_status\" only)", "absence (View for \"absence\" only) User is allowed to access", "to edit department (Edit for \"department\" only) User is allowed", "is not frozen (Edit for \"overtime_correction\" only) User is allowed", "'description', 'epic_key', 'planned_effort', 'time_end', 'time_start', 'time_wp_summary_no') only) User may access", "edit sap_cc (Edit for \"sap_cc\" only) User is allowed to", "\"ext_tracker_state\" only) User is allowed to create file (Create for", "allowed to edit org_location (Edit for \"org_location\" only) User is", "to access time_wp_group (View for \"time_wp_group\" only) User is allowed", "allowed to search support (Search for \"support\" only) User is", "allowed to create product_type (Create for \"product_type\" only) User is", "it_issue if it_issue is non-confidential or user is on nosy", "only) User is allowed to search it_project (Search for \"it_project\"", "\"ext_tracker_type\" only) User is allowed to access keyword (View for", "their own and public queries for classes where they have", "(Edit for \"keyword\" only) User is allowed to edit kind", "is allowed to create time_report (Create for \"time_report\" only) User", "(Edit for \"it_issue\" only) User is allowed to edit it_project", "only) User is allowed to access it_prio (View for \"it_prio\"", "in the domain_permission for the user (Edit for \"user\": ['contacts',", "responsible/deputy (Edit for \"time_wp\": ('bookers', 'description', 'epic_key', 'planned_effort', 'time_end', 'time_start',", "to create time_wp (Create for \"time_wp\" only) User is allowed", "(Edit for \"user\": ['contacts', 'csv_delimiter', 'department_temp', 'entry_date', 'firstname', 'hide_message_files', 'job_description',", "\"time_wp\" only) User is allowed to access vacation_correction (View for", "User is allowed to access issue (View for \"issue\" only)", "\"it_issue\" only) User may get nosy messages for it_project (Nosy", "(View for \"user\": ('nickname', 'status', 'username') only) User is allowed", "User is allowed to edit issue (Edit for \"issue\" only)", "'lastname', 'lunch_duration', 'lunch_start', 'nickname', 'pictures', 'position_text', 'queries', 'realname', 'room', 'sex',", "User is allowed to access it_prio (View for \"it_prio\" only)", "User is allowed to search time_record (Search for \"time_record\" only)", "allowed to search issue (Search for \"issue\" only) User is", "only) User is allowed to create domain_permission (Create for \"domain_permission\"", "(Edit for \"it_category\" only) User is allowed to edit it_int_prio", "for \"time_activity_perm\" only) User is allowed to access time_project_status (View", "only) Role \"contact\": User is allowed to create contact (Create", "(View for \"daily_record_status\" only) User is allowed to access department", "to access cost_center_group (View for \"cost_center_group\" only) User is allowed", "cost_center_status (Edit for \"cost_center_status\" only) User is allowed to edit", "is allowed View on issue if issue is non-confidential or", "'project') only) User is allowed to view/edit workpackage if he", "allowed View on (View for \"user_dynamic\": ('department', 'org_location') only) User", "edit everything (Edit) User may manipulate user Roles through the", "by user may edit time_records owned by user (Retire for", "'status', 'superseder', 'test_level', 'title'] only) User is allowed View on", "(Edit for \"uc_type\" only) Role \"organisation\": User is allowed to", "'lunch_duration', 'lunch_start', 'password', 'queries', 'realname', 'room', 'subst_active', 'substitute', 'timezone', 'tt_lines')", "issue (Search for \"issue\" only) User is allowed to search", "User is allowed to access time_wp_summary_no (View for \"time_wp_summary_no\" only)", "user (Edit for \"time_record\" only) User or Timetracking by user", "for \"department\": ('doc_num',) only) User is allowed to create artefact", "access work_location (View for \"work_location\" only) User is allowed to", "user (View for \"user\": ['room'] only) Role \"dom-user-edit-gtt\": (Search for", "to access it_int_prio (View for \"it_int_prio\" only) User is allowed", "(Create for \"ext_tracker_state\" only) User is allowed to edit ext_msg", "Org-Location as the given user (View for \"overtime_correction\" only) User", "to timetracking by, supervisor and approval delegated) (View for \"time_wp\":", "\"uc_type\" only) User is allowed to edit absence (Edit for", "for \"user_functional_role\" only) User is allowed to create user_functional_role (Create", "for \"it_project\": ('messages', 'files', 'nosy') only) User is allowed Edit", "\"msg_keyword\" only) User is allowed to create safety_level (Create for", "Org-Location as the given user (View for \"user_dynamic\" only) User", "access public_holiday (View for \"public_holiday\" only) User is allowed to", "for it_project (Nosy for \"it_project\" only) User may get nosy", "allowed to edit room (Edit for \"room\" only) User is", "(View for \"it_project\" only) Role \"staff-report\": Role \"sub-login\": Role \"summary_view\":", "'substitute', 'supervisor', 'sync_foreign_key', 'timezone', 'tt_lines', 'username', 'vie_user'] only) Users may", "also be seen (View for \"time_record\" only) User is allowed", "'is_extern', 'is_public_holiday', 'is_special_leave', 'is_vacation', 'no_overtime', 'no_overtime_day', 'only_hours', 'overtime_reduction') only) User", "'pictures', 'position_text', 'reduced_activity_list', 'roles', 'room', 'sex', 'status', 'subst_active', 'substitute', 'supervisor',", "to access sup_type (View for \"sup_type\" only) User is allowed", "create category (Create for \"category\" only) User is allowed to", "if he is owner or supervisor or timetracking-by user (View", "(Web Access) Role \"cc-permission\": (Restore for \"cost_center_permission_group\" only) (Retire for", "to create time_project_status (Create for \"time_project_status\" only) User is allowed", "user Roles through the web (Web Roles) Role \"hr-leave-approval\": User", "'clearance_by', 'creation', 'creator', 'firstname', 'id', 'job_description', 'lastname', 'lunch_duration', 'lunch_start', 'nickname',", "allowed to create daily_record_freeze (Create for \"daily_record_freeze\" only) User is", "is allowed to access return_type (View for \"return_type\" only) User", "if it_project is non-confidential or user is on nosy list", "\"it_category\" only) User is allowed to access it_issue_status (View for", "allowed Edit on msg if msg is linked from an", "User is allowed to create it_int_prio (Create for \"it_int_prio\" only)", "it_int_prio (Create for \"it_int_prio\" only) User is allowed to create", "query (Edit for \"query\" only) User is allowed to edit", "User is allowed to view time record if he is", "is allowed to access it_int_prio (View for \"it_int_prio\" only) User", "allowed View on msg if msg is linked from an", "to edit contact (Edit for \"contact\" only) Role \"controlling\": User", "User is allowed to access time_activity_perm (View for \"time_activity_perm\" only)", "on (View for \"user\": ('contacts',) only) User is allowed to", "access it_project (View for \"it_project\" only) Role \"staff-report\": Role \"sub-login\":", "mailgroup (Edit for \"mailgroup\" only) User is allowed to edit", "org_group (View for \"org_group\" only) User is allowed to access", "Access) User may view everything (View) Role \"anonymous\": User may", "for \"it_issue\": ('deadline', 'status', 'title') only) User is allowed to", "'planned_begin', 'planned_end', 'priority', 'release', 'responsible', 'safety_level', 'severity', 'status', 'superseder', 'test_level',", "allowed to view some of their details (View for \"user\":", "for \"contract_type\" only) User is allowed to create user (Create", "if the status is \"Open\" and he is responsible for", "for \"daily_record_freeze\": ('frozen',) only) User is allowed to edit location", "'timetracking_by', 'timezone', 'username') only) User is allowed Edit on (Edit", "allowed to view freeze information if he/she is in group", "for \"sap_cc\" only) User is allowed to access severity (View", "for \"time_wp\": ('project',) only) User is allowed View on (View", "is allowed to edit it_int_prio (Edit for \"it_int_prio\" only) User", "allowed to create absence_type (Create for \"absence_type\" only) User is", "'username') only) User is allowed Edit on file if file", "is allowed View on (View for \"user_dynamic\": ('id', 'sap_cc', 'user',", "or supervisor or timetracking-by user (Edit for \"daily_record\": ('status', 'time_record')", "permission via containers (Edit for \"issue\": ['activity', 'actor', 'area', 'category',", "'sex', 'status', 'subst_active', 'substitute', 'supervisor', 'timezone', 'tt_lines', 'vie_user'] only) Role", "for \"artefact\" only) User is allowed to create doc (Create", "the domain_permission for the user (Edit for \"user\": ['room'] only)", "validity span of dynamic user record (Edit for \"user_dynamic\" only)", "\"sup_status\" only) User is allowed to access sup_type (View for", "\"time_project\" only) User is allowed to access time_record (View for", "only) User is allowed to access contract_type (View for \"contract_type\"", "(View for \"contact\" only) User is allowed to access contact_type", "with View permission (View for \"file\" only) User is allowed", "\"doc_category\" only) User is allowed to edit doc_status (Edit for", "he/she has role HR or HR-Org-Location (View for \"time_wp\": ('name',", "some of their details (View for \"user\": ('activity', 'actor', 'creation',", "daily_record_freeze (Create for \"daily_record_freeze\" only) User is allowed to create", "edit absence (Edit for \"absence\" only) User is allowed to", "\"user\": ('address', 'alternate_addresses', 'nickname', 'password', 'timezone', 'username') only) User is", "'lunch_start', 'nickname', 'pictures', 'position_text', 'queries', 'realname', 'room', 'sex', 'status', 'subst_active',", "customer (View for \"customer\" only) User is allowed to access", "(Create for \"user_contact\" only) User is allowed to edit user_contact", "only) User is allowed to access time_activity (View for \"time_activity\"", "\"file\": ('name', 'type') only) User is allowed Edit on (Edit", "'nosy') only) User is allowed Edit on support if support", "time_record (Edit for \"time_record\" only) User is allowed to edit", "User is allowed to edit area (Edit for \"area\" only)", "by user may edit time_records owned by user (Restore for", "'Controlling', or the user is supervisor or substitute supervisor of", "keyword (View for \"keyword\" only) User is allowed to access", "User is allowed to create auto_wp (Create for \"auto_wp\" only)", "to access doc (View for \"doc\" only) User is allowed", "\"customer_agreement\" only) User is allowed to access daily record if", "(Edit for \"leave_submission\": ('status',) only) User is allowed to access", "an item with View permission (View for \"file\" only) User", "(Edit for \"it_issue\": ('deadline', 'status', 'title') only) User is allowed", "artefact (Edit for \"artefact\" only) User is allowed to edit", "is allowed to create it_category (Create for \"it_category\" only) User", "if booking is allowed for this user (also applies to", "is allowed to access doc_status (View for \"doc_status\" only) User", "'nosy', 'only_hours', 'op_project', 'overtime_reduction', 'responsible', 'status', 'work_location', 'wps') only) (Search", "only) User is allowed to edit product_family (Edit for \"product_family\"", "allowed to edit it_project (Edit for \"it_project\" only) User is", "\"ext_tracker\" only) User is allowed to edit issue (Edit for", "of the owner of the daily record. If user has", "only) User is allowed to create customer_agreement (Create for \"customer_agreement\"", "allowed to access sex (View for \"sex\" only) User is", "time category (View for \"time_project\" only) User may view work", "\"user_dynamic\" only) User is allowed to view dynamic user data", "(Edit for \"category\": ('nosy', 'default_part_of') only) User is allowed to", "may view a daily_record (and time_records that are attached to", "User is allowed to access time_wp (View for \"time_wp\" only)", "\"public_holiday\" only) User is allowed to edit reporting_group (Edit for", "for \"it_issue\" only) User is allowed to access it_project (View", "he is responsible for it (Edit for \"category\": ('nosy', 'default_part_of')", "edit product_type (Edit for \"product_type\" only) User is allowed to", "vacation_correction (Create for \"vacation_correction\" only) User is allowed to edit", "get nosy messages for it_issue (Nosy for \"it_issue\" only) User", "create cost_center_status (Create for \"cost_center_status\" only) User is allowed to", "\"issue\" only) User is allowed to create msg (Create for", "to access ext_tracker (View for \"ext_tracker\" only) User is allowed", "(Edit for \"leave_submission\" only) User is allowed to edit vacation_correction", "Edit on (Edit for \"sap_cc\": ('group_lead', 'purchasing_agents', 'team_lead') only) User", "for \"overtime_correction\" only) (Search for \"time_activity_perm\" only) (Search for \"time_record\"", "\"it_project\" only) Role \"msgedit\": (Search for \"msg\": ('date', 'id') only)", "may get nosy messages for it_project (Nosy for \"it_project\" only)", "('ad_domain', 'nickname', 'password', 'pictures', 'roles', 'timetracking_by', 'timezone', 'username') only) User", "for \"daily_record_freeze\" only) User is allowed to access leave_submission (View", "create department (Create for \"department\" only) User is allowed to", "msg_keyword (Edit for \"msg_keyword\" only) User is allowed to edit", "\"safety_level\" only) User is allowed to access severity (View for", "User is allowed Edit on (Edit for \"daily_record\": ('required_overtime', 'weekend_allowed')", "(Create for \"artefact\" only) User is allowed to create doc", "User is allowed to view (some of) their own user", "\"contract_type\" only) User is allowed to edit leave_submission (Edit for", "\"category\" only) User is allowed to access contact (View for", "in the domain_permission for the user (View for \"user_dynamic\" only)", "owner of the contact or the contact is marked visible", "view dynamic user data if he/she is in group HR-Org-Location", "for the user (Edit for \"user\": ['contacts', 'position_text', 'room'] only)", "by user (Retire for \"time_record\" only) User or Timetracking by", "allowed to access location (View for \"location\" only) User is", "is allowed to access leave_submission (View for \"leave_submission\" only) User", "for \"it_request_type\" only) User is allowed to edit mailgroup (Edit", "\"user\": ('contacts',) only) User is allowed to access auto_wp (View", "'creator', 'deputy', 'description', 'id', 'is_extern', 'is_public_holiday', 'is_special_leave', 'is_vacation', 'name', 'nosy',", "for \"time_project\": ('infosec_req', 'is_extern', 'max_hours', 'op_project', 'planned_effort', 'product_family', 'project_type', 'reporting_group',", "\"test_level\" only) User is allowed to edit area (Edit for", "only) User is allowed to create analysis_result (Create for \"analysis_result\"", "allowed to access time_wp (View for \"time_wp\" only) Role \"sec-incident-nosy\":", "contact or the contact is marked visible (View for \"user_contact\"", "for \"organisation\" only) Role \"pgp\": Role \"procurement\": (View for \"sap_cc\"", "'user', 'valid_from', 'valid_to') only) User is allowed to access contract_type", "timetracking by, supervisor and approval delegated) (View for \"time_wp\": ('activity',", "it_project (View for \"it_project\" only) Role \"msgedit\": (Search for \"msg\":", "only) User is allowed to access customer_agreement (View for \"customer_agreement\"", "for their own messages (Search for \"msg\" only) User is", "\"user\": (Search for \"time_project\": ('activity', 'actor', 'creation', 'creator', 'deputy', 'description',", "'comment_cancel', 'first_day', 'last_day', 'status', 'time_wp', 'user') only) User may see", "'creation', 'creator', 'description', 'durations_allowed', 'epic_key', 'has_expiration_date', 'id', 'is_extern', 'is_public', 'name',", "department manager of the owner of the daily record. If", "work_location (View for \"work_location\" only) User is allowed to create", "is allowed Edit on support if support is non-confidential or", "(Search for \"leave_submission\" only) User is allowed to search support", "(Edit for \"department\": ('doc_num',) only) User is allowed to create", "only) User is allowed to create ext_tracker_state (Create for \"ext_tracker_state\"", "for \"user_status\" only) User is allowed to access vac_aliq (View", "to access contract_type (View for \"contract_type\" only) User is allowed", "is responsible for it (Edit for \"category\": ('nosy', 'default_part_of') only)", "allowed to edit domain_permission (Edit for \"domain_permission\" only) User is", "User is allowed to edit their queries (Edit for \"query\"", "has role 'HR' or 'Controlling', or the user is supervisor", "only) User is allowed to create cost_center (Create for \"cost_center\"", "view a daily_record (and time_records that are attached to that", "for \"issue\" only) User is allowed to search it_issue (Search", "for \"leave_submission\" only) User is allowed to access vacation_correction (View", "user (Edit for \"daily_record\": ('status', 'time_record') only) User is allowed", "allowed to create leave_submission (Create for \"leave_submission\" only) User is", "for \"it_int_prio\" only) User is allowed to access it_issue (View", "is allowed to create overtime_correction (Create for \"overtime_correction\" only) User", "time project or on nosy list of time project (View", "supervisor relationship is transitive) or the user is the department", "'creation', 'creator', 'firstname', 'id', 'job_description', 'lastname', 'lunch_duration', 'lunch_start', 'nickname', 'pictures',", "'supervisor', 'timezone', 'title', 'tt_lines', 'username') only) User is allowed View", "to access sup_prio (View for \"sup_prio\" only) User is allowed", "(View for \"business_unit\" only) User is allowed to access category", "delegated (View for \"time_record\" only) User is allowed to view", "User is allowed to edit support (Edit for \"support\" only)", "User is allowed to access msg_keyword (View for \"msg_keyword\" only)", "only) User is allowed to view their own messages (View", "allowed to edit work_location (Edit for \"work_location\" only) Role \"doc_admin\":", "access time_project_status (View for \"time_project_status\" only) User is allowed to", "the web interface (Web Access) User may use the email", "customer (Edit for \"customer\" only) User is allowed to edit", "or user is on nosy list (View for \"support\" only)", "User is allowed to edit category (Edit for \"category\" only)", "only) Role \"controlling\": User is allowed Edit on (Edit for", "visible (View for \"user_contact\" only) User is allowed to view", "Role \"it\": Create (Create for \"user_contact\" only) User is allowed", "(Email Access) User may view a daily_record (and time_records that", "is allowed to create doc (Create for \"doc\" only) User", "for \"daily_record\": ('required_overtime', 'weekend_allowed') only) User is allowed Edit on", "\"time_record\" only) User is allowed to edit work_location (Edit for", "user_contact (Create for \"user_contact\" only) User is allowed to create", "ext_tracker_state (Edit for \"ext_tracker_state\" only) Role \"nosy\": User may get", "only) User is allowed to edit category (Edit for \"category\"", "(Edit for \"mailgroup\" only) User is allowed to edit return_type", "user_status (View for \"user_status\" only) User is allowed to access", "View permission (View for \"msg\" only) User is allowed to", "user is supervisor or substitute supervisor of the owner of", "status_transition (Create for \"status_transition\" only) User is allowed to create", "(Edit for \"user\": ['contacts', 'position_text', 'room'] only) Users may view/edit", "allowed Edit on (Edit for \"time_project\": ('infosec_req', 'is_extern', 'max_hours', 'op_project',", "user) (Edit for \"file\" only) Role \"user\": (Search for \"time_project\":", "only) User is allowed to create queries (Create for \"query\"", "\"cost_center\" only) User is allowed to access cost_center_group (View for", "\"user\": ['room'] only) Role \"dom-user-edit-gtt\": (Search for \"user_dynamic\" only) May", "for \"user\": ('roles',) only) User is allowed View on (View", "the given user (View for \"overtime_correction\" only) User is allowed", "User is allowed to edit analysis_result (Edit for \"analysis_result\" only)", "he is Stakeholder/Responsible for an it_issue (Edit for \"it_issue\": ('deadline',", "for \"cost_center_permission_group\" only) User is allowed to create cost_center_permission_group (Create", "'first_day', 'last_day', 'status', 'time_wp', 'user') only) User may see time", "\"it_request_type\" only) User is allowed to edit mailgroup (Edit for", "('id', 'name') only) User is allowed View on (View for", "may see one of the time_records for that day (View", "relationship is transitive) or the user is the department manager", "get the Roles \"User,Nosy\" New Email users get the Role", "is allowed to create status_transition (Create for \"status_transition\" only) User", "he is owner or supervisor or timetracking-by user (View for", "\"reporting_group\" only) User is allowed to create room (Create for", "(View for \"time_project\" only) User may view work package if", "the user (View for \"user\": ['contacts', 'position_text', 'room'] only) Role", "only) User is allowed to search time_record (Search for \"time_record\"", "'hide_message_files', 'job_description', 'lastname', 'lunch_duration', 'lunch_start', 'nickname', 'pictures', 'position_text', 'reduced_activity_list', 'roles',", "only) User is allowed to search for their own messages", "the same Org-Location as the given user (View for \"daily_record_freeze\"", "from an item with Edit permission (Edit for \"msg\" only)", "(View for \"analysis_result\" only) User is allowed to access area", "create public_holiday (Create for \"public_holiday\" only) User is allowed to", "User is allowed to edit ext_tracker_state (Edit for \"ext_tracker_state\" only)", "'description', 'id', 'is_extern', 'is_public_holiday', 'is_special_leave', 'is_vacation', 'name', 'nosy', 'only_hours', 'op_project',", "is allowed View on msg if msg is linked from", "(View for \"msg\" only) User is allowed to view their", "edit reference (Edit for \"reference\" only) Role \"dom-user-edit-facility\": Users may", "of the daily record (the supervisor relationship is transitive) or", "User is allowed to edit cost_center_permission_group (Edit for \"cost_center_permission_group\" only)", "(View for \"user\": ('activity', 'actor', 'address', 'alternate_addresses', 'creation', 'creator', 'id',", "access it_category (View for \"it_category\" only) User is allowed to", "\"user\": ('activity', 'actor', 'creation', 'creator', 'firstname', 'lastname', 'realname', 'username') only)", "edit time_report (Edit for \"time_report\" only) User may edit own", "'time_wp_summary_no', 'travel', 'wp_no') only) (View for \"time_project\": ('activity', 'actor', 'creation',", "\"business_unit\" only) User is allowed to access category (View for", "allowed to edit overtime correction if the overtime correction is", "'fixed_in', 'id', 'keywords', 'kind', 'maturity_index', 'messages', 'needs', 'nosy', 'numeric_effort', 'part_of',", "to access category (View for \"category\" only) User is allowed", "only) User is allowed to edit kind (Edit for \"kind\"", "if he/she is in group HR-Org-Location and in the same", "for \"leave_submission\" only) User is allowed to access overtime_correction (View", "only) User is allowed to access uc_type (View for \"uc_type\"", "access leave_status (View for \"leave_status\" only) User is allowed to", "on nosy list of time project (View for \"time_report\" only)", "'timezone', 'username') only) User is allowed Edit on file if", "\"room\" only) User is allowed View on (View for \"user\":", "absence (Create for \"absence\" only) User is allowed to create", "the person to whom approvals are delegated (View for \"leave_submission\"", "'room', 'sex', 'status', 'subst_active', 'substitute', 'supervisor', 'timezone', 'title', 'tt_lines', 'username')", "allowed to access status_transition (View for \"status_transition\" only) User is", "only) User is allowed to create user_functional_role (Create for \"user_functional_role\"", "\"facility\": (Restore for \"room\" only) (Retire for \"room\" only) User", "is owner or supervisor or timetracking-by user (Edit for \"daily_record\":", "allowed to create contact (Create for \"contact\" only) User is", "'weekend_allowed') only) User is allowed Edit on (Edit for \"daily_record\":", "only) User is allowed to edit cost_center_permission_group (Edit for \"cost_center_permission_group\"", "only) User is allowed to view leave submission if he", "Roles) Role \"hr-leave-approval\": User is allowed Edit on (Edit for", "'max_hours', 'op_project', 'planned_effort', 'product_family', 'project_type', 'reporting_group', 'work_location') only) User is", "to create query (Create for \"query\" only) User is allowed", "is allowed to edit contract_type (Edit for \"contract_type\" only) User", "\"it_project\" only) User is allowed to create domain_permission (Create for", "on it_project if it_project is non-confidential or user is on", "User is allowed to access return_type (View for \"return_type\" only)", "(Edit for \"it_int_prio\" only) User is allowed to edit it_issue", "see all details on work package or User may view", "if reponsible or deputy of time project or on nosy", "access location (View for \"location\" only) User is allowed to", "on nosy list (Edit for \"it_issue\": ('messages', 'files', 'nosy') only)", "\"msg\" only) User is allowed to search for their queries", "search for their queries (Search for \"query\" only) User is", "interface (Web Access) User may use the email interface (Email", "\"project_type\" only) User is allowed to access public_holiday (View for", "(Edit for \"artefact\" only) User is allowed to edit doc", "(View for \"absence_type\" only) User is allowed to access analysis_result", "allowed to create overtime_period (Create for \"overtime_period\" only) User is", "User is allowed to access it_issue (View for \"it_issue\" only)", "the user is supervisor or substitute supervisor of the owner", "non-confidential or user is on nosy list (View for \"it_project\"", "list (Edit for \"issue\" only) User is allowed Edit on", "'files_affected', 'fixed_in', 'id', 'keywords', 'kind', 'maturity_index', 'messages', 'needs', 'nosy', 'numeric_effort',", "'creator', 'cur_est_begin', 'cur_est_end', 'deadline', 'depends', 'doc_issue_status', 'earliest_start', 'effective_prio', 'effort_hours', 'external_users',", "only) User is allowed to edit cost_center (Edit for \"cost_center\"", "create kind (Create for \"kind\" only) User is allowed to", "only) User is allowed to create product_family (Create for \"product_family\"", "is allowed to create daily_record_freeze (Create for \"daily_record_freeze\" only) User", "(View for \"leave_submission\" only) User is allowed to access vacation_correction", "allowed to edit contact (Edit for \"contact\" only) Role \"controlling\":", "package if responsible for it, if user is owner or", "('doc_num',) only) User is allowed to create artefact (Create for", "(Create for \"cost_center\" only) User is allowed to create cost_center_group", "(Restore for \"time_record\" only) User or Timetracking by user may", "to access msg_keyword (View for \"msg_keyword\" only) User is allowed", "edit severity (Edit for \"severity\" only) User is allowed to", "\"user\": ('business_responsible', 'scale_seniority') only) User is allowed View on (View", "create user_functional_role (Create for \"user_functional_role\" only) User is allowed to", "for \"time_project\": ('activity', 'actor', 'creation', 'creator', 'deputy', 'description', 'id', 'is_extern',", "is allowed to edit time_project_status (Edit for \"time_project_status\" only) User", "manager of the owner of the daily record. If user", "to access daily_record_status (View for \"daily_record_status\" only) User is allowed", "User is allowed to edit contact (Edit for \"contact\" only)", "access doc_issue_status (View for \"doc_issue_status\" only) User is allowed to", "for \"ext_msg\" only) User is allowed to create ext_tracker_state (Create", "allowed to edit reference (Edit for \"reference\" only) Role \"dom-user-edit-facility\":", "is time category owner or deputy (Edit for \"time_wp\": ('cost_center',", "to edit category (Edit for \"category\" only) User is allowed", "or user is on nosy list (Edit for \"issue\" only)", "allowed to access artefact (View for \"artefact\" only) User is", "only) Role \"dom-user-edit-facility\": Users may view/edit user records for ad_domain", "Roles through the web (Web Roles) User may restore everything", "Edit on (Edit for \"time_project\": ('cost_center', 'department', 'deputy', 'description', 'name',", "User is allowed to edit vacation_correction (Edit for \"vacation_correction\" only)", "\"time_record\" only) User is allowed to view work package and", "(Web Roles) Role \"hr-leave-approval\": User is allowed Edit on (Edit", "edit safety_level (Edit for \"safety_level\" only) User is allowed to", "'first_day', 'last_day', 'status', 'time_wp', 'user') only) User may edit own", "'date', 'id', 'keywords', 'subject', 'summary') only) User is allowed to", "for \"status_transition\" only) User is allowed to access test_level (View", "create time_report (Create for \"time_report\" only) User is allowed to", "'comment_cancel', 'first_day', 'last_day', 'status', 'time_wp', 'user') only) User may edit", "to access doc_issue_status (View for \"doc_issue_status\" only) User is allowed", "access user_functional_role (View for \"user_functional_role\" only) User is allowed to", "room (Create for \"room\" only) User is allowed to create", "is allowed to create domain_permission (Create for \"domain_permission\" only) User", "\"it_project\" only) User is allowed to search leave_submission (Search for", "or deputy (Edit for \"time_wp\": ('cost_center', 'is_public', 'name', 'responsible', 'time_wp_summary_no',", "to access organisation (View for \"organisation\" only) User is allowed", "is allowed to edit time_wp_group (Edit for \"time_wp_group\" only) Role", "if issue is non-confidential or user is on nosy list", "'status', 'title') only) User is allowed to edit their queries", "'actor', 'ad_domain', 'address', 'alternate_addresses', 'business_responsible', 'clearance_by', 'creation', 'creator', 'firstname', 'id',", "User is allowed to access it_int_prio (View for \"it_int_prio\" only)", "\"doc_issue_status\" only) User is allowed to edit ext_tracker (Edit for", "is allowed to access domain_permission (View for \"domain_permission\" only) User", "edit ext_msg (Edit for \"ext_msg\" only) User is allowed to", "User is allowed to edit time_report (Edit for \"time_report\" only)", "Edit permission (Edit for \"file\" only) User is allowed Edit", "view overtime information if he/she is in group HR-Org-Location and", "for \"time_record\" only) Users are allowed to view their own", "project (View for \"time_report\" only) User may use the email", "allowed to access summary_type (View for \"summary_type\" only) User is", "whom approvals are delegated (View for \"leave_submission\" only) User is", "allowed to access severity (View for \"severity\" only) User is", "'csv_delimiter', 'entry_date', 'firstname', 'hide_message_files', 'job_description', 'lastname', 'lunch_duration', 'lunch_start', 'nickname', 'pictures',", "is allowed to create location (Create for \"location\" only) User", "for \"public_holiday\" only) User is allowed to access reference (View", "package if booking is allowed for this user (also applies", "allowed to access vacation_correction (View for \"vacation_correction\" only) Role \"hr-org-location\":", "\"cost_center\" only) User is allowed to create cost_center_group (Create for", "for \"time_wp\" only) User or Timetracking by user may edit", "to access time_record (View for \"time_record\" only) User is allowed", "'supervisor', 'timezone', 'tt_lines', 'vie_user'] only) Role \"dom-user-edit-office\": User is allowed", "only) User is allowed to access support (View for \"support\"", "User is allowed to edit cost_center_status (Edit for \"cost_center_status\" only)", "View on (View for \"user\": ('business_responsible', 'planning_role', 'scale_seniority') only) User", "for the time category (Edit for \"time_project\": ('deputy', 'planned_effort', 'nosy')", "is allowed to edit absence (Edit for \"absence\" only) User", "are delegated (View for \"leave_submission\" only) User is allowed to", "if the user owns the daily_record or has role 'HR'", "have search permission (View for \"query\" only) Role \"facility\": (Restore", "record, it may also be seen (View for \"daily_record\" only)", "to access prodcat (View for \"prodcat\" only) User is allowed", "user_functional_role (View for \"user_functional_role\" only) User is allowed to create", "create time_project_status (Create for \"time_project_status\" only) User is allowed to", "restore everything (Restore) User may retire everything (Retire) User may", "\"severity\" only) User is allowed to access status (View for", "is the supervisor or the person to whom approvals are", "he is responsible for the time category (Edit for \"time_project\":", "time_record (Create for \"time_record\" only) User is allowed to create", "to create product_type (Create for \"product_type\" only) User is allowed", "for \"contact\" only) User is allowed to create customer (Create", "only) User is allowed to edit status (Edit for \"status\"", "deputy of time category or on nosy list of time", "only) User is allowed to view selected fields in work", "\"location\" only) User is allowed to create org_location (Create for", "allowed to access leave_status (View for \"leave_status\" only) User is", "location (Create for \"location\" only) User is allowed to create", "only) User is allowed to access public_holiday (View for \"public_holiday\"", "(Edit for \"reporting_group\" only) User is allowed to edit sap_cc", "messages for it_project (Nosy for \"it_project\" only) User may get", "category owner or deputy (Edit for \"time_wp\": ('cost_center', 'is_public', 'name',", "for \"ext_tracker_state\" only) User is allowed to edit if he's", "\"mailgroup\" only) User is allowed to edit return_type (Edit for", "their own user details (View for \"user\": ('entry_date', 'planning_role') only)", "\"time_record\" only) User is allowed to edit uc_type (Edit for", "'realname', 'room', 'sex', 'status', 'subst_active', 'substitute', 'supervisor', 'timezone', 'title', 'tt_lines',", "'substitute', 'timezone', 'tt_lines') only) User is allowed to edit category", "'is_special_leave', 'is_vacation', 'name', 'nosy', 'only_hours', 'op_project', 'overtime_reduction', 'responsible', 'status', 'work_location',", "User is allowed to edit status (Edit for \"status\" only)", "approvals are delegated (View for \"time_record\" only) User is allowed", "\"time_activity_perm\" only) User is allowed to access time_project_status (View for", "to view their own and public queries for classes where", "or HR-Org-Location (View for \"time_project\": ('name',) only) User is allowed", "'actor', 'auto_wp', 'bookers', 'cost_center', 'creation', 'creator', 'description', 'durations_allowed', 'epic_key', 'has_expiration_date',", "only) User is allowed to view their own overtime information", "allowed to search time_wp (Search for \"time_wp\": ('activity', 'actor', 'auto_wp',", "time_record (View for \"time_record\" only) User is allowed to access", "User is allowed to create ext_tracker_state (Create for \"ext_tracker_state\" only)", "'planned_effort', 'product_family', 'project_type', 'reporting_group', 'work_location') only) User is allowed to", "for \"time_wp\": ('activity', 'actor', 'auto_wp', 'bookers', 'cost_center', 'creation', 'creator', 'description',", "Users may see daily record if they may see one", "'deputy', 'description', 'name', 'nosy', 'organisation', 'responsible', 'status') only) User is", "User is allowed to edit test_level (Edit for \"test_level\" only)", "support is non-confidential or user is on nosy list (View", "time_report (View for \"time_report\" only) User is allowed to access", "allowed to access user_functional_role (View for \"user_functional_role\" only) User is", "to access kind (View for \"kind\" only) User is allowed", "User is allowed to edit room (Edit for \"room\" only)", "(Edit for \"sap_cc\": ('group_lead', 'team_lead') only) User is allowed Edit", "time_activity (View for \"time_activity\" only) User is allowed to access", "User is allowed to edit dynamic user data if not", "is allowed to access time_wp_group (View for \"time_wp_group\" only) User", "only) User may get nosy messages for it_issue (Nosy for", "organisation (View for \"organisation\" only) User is allowed to access", "to access sup_status (View for \"sup_status\" only) User is allowed", "User is allowed to view selected fields in work package", "'planned_end', 'priority', 'release', 'responsible', 'safety_level', 'severity', 'status', 'superseder', 'test_level', 'title']", "view their own user functional role (View for \"user_functional_role\" only)", "\"sap_cc\" only) User is allowed to edit time_record (Edit for", "only) User is allowed to create daily_record (Create for \"daily_record\"", "(View for \"time_wp\" only) User is allowed to access vacation_correction", "allowed to edit several fields if he is Responsible for", "only) User is allowed to access cost_center_group (View for \"cost_center_group\"", "to search issue (Search for \"issue\" only) User is allowed", "\"sec-incident-responsible\": User is allowed to access it_int_prio (View for \"it_int_prio\"", "is allowed to create kind (Create for \"kind\" only) User", "for \"user_contact\" only) User is allowed to edit user_contact (Edit", "for \"cost_center_status\" only) User is allowed to edit department (Edit", "(Create for \"time_record\" only) User is allowed to create uc_type", "allowed View on (View for \"user_dynamic\": ('id', 'sap_cc', 'user', 'valid_from',", "is allowed to edit sap_cc (Edit for \"sap_cc\" only) User", "correct domain (Edit for \"user_dynamic\" only) May only view/edit records", "allowed to create room (Create for \"room\" only) User is", "for \"mailgroup\" only) User is allowed to edit return_type (Edit", "User is allowed to create ext_tracker (Create for \"ext_tracker\" only)", "room (View for \"room\" only) User is allowed to access", "only) User is allowed to access contact_type (View for \"contact_type\"", "is allowed to create organisation (Create for \"organisation\" only) User", "\"doc_category\" only) User is allowed to create doc_status (Create for", "(Edit for \"status\" only) User is allowed to edit status_transition", "'substitute', 'supervisor', 'sync_foreign_key', 'timezone', 'tt_lines', 'username', 'vie_user'] only) Role \"dom-user-edit-hr\":", "interface (Email Access) Users are allowed to edit some of", "User is allowed to create cost_center (Create for \"cost_center\" only)", "'password', 'timezone', 'username') only) User is allowed Edit on file", "\"it_project\": ('messages', 'files', 'nosy') only) User is allowed Edit on", "allowed Edit on (Edit for \"department\": ('doc_num',) only) User is", "(Xmlrpc Access) User may edit own leave submissions (Edit for", "he is Responsible for an it_issue (Edit for \"it_issue\": ('responsible',)", "or the person to whom approvals are delegated (View for", "'summary') only) User is allowed to access ext_msg (View for", "is allowed to access time_activity (View for \"time_activity\" only) User", "is allowed to access product_type (View for \"product_type\" only) User", "non-confidential or user is on nosy list (Edit for \"it_issue\":", "only) User is allowed to create cost_center_group (Create for \"cost_center_group\"", "ext_tracker_state (Create for \"ext_tracker_state\" only) User is allowed to edit", "(View for \"file\" only) User is allowed View on msg", "\"uc_type\" only) User is allowed to access user_status (View for", "edit mailgroup (Edit for \"mailgroup\" only) User is allowed to", "\"user\": ('business_responsible', 'planning_role', 'scale_seniority') only) User is allowed to access", "files (Search for \"file\" only) User is allowed to search", "user Roles through the web (Web Roles) User may restore", "the contact or the contact is marked visible (View for", "\"it_issue\": ('messages', 'files', 'nosy') only) User is allowed Edit on", "the same Org-Location as the given user (View for \"user_dynamic\"", "create msg_keyword (Create for \"msg_keyword\" only) User is allowed to", "allowed to edit workpackage if he is time category owner", "only) User is allowed to create auto_wp (Create for \"auto_wp\"", "only) User is allowed to view work package and time", "(View for \"user_dynamic\": ('department', 'org_location') only) User is allowed View", "only) User is allowed to edit it_issue (Edit for \"it_issue\"", "allowed to create absence (Create for \"absence\" only) User is", "linked from an item with Edit permission (Edit for \"file\"", "User is allowed to create kind (Create for \"kind\" only)", "\"sup_classification\" only) User is allowed to access sup_execution (View for", "(Create for \"overtime_correction\" only) User is allowed to create overtime_period", "nosy list (View for \"it_issue\" only) User is allowed View", "(View for \"doc_issue_status\" only) User is allowed to access ext_tracker", "own messages (View for \"msg\" only) User is allowed to", "user (Retire for \"time_record\" only) User or Timetracking by user", "User is allowed to edit severity (Edit for \"severity\" only)", "only) User is allowed to view/edit workpackage if he is", "'superseder', 'title', 'type', 'warranty') only) User is allowed View on", "is allowed to create customer (Create for \"customer\" only) User", "see one of the time_records for that day (View for", "(Edit for \"organisation\": ('domain_part',) only) User is allowed Edit on", "for \"time_wp_summary_no\" only) User is allowed to access timesheet (View", "to edit ext_tracker_state (Edit for \"ext_tracker_state\" only) Role \"nosy\": User", "('author', 'date', 'id', 'keywords', 'subject', 'summary') only) User is allowed", "the user (View for \"user\": ['room'] only) Role \"dom-user-edit-gtt\": (Search", "access product (View for \"product\" only) User is allowed to", "responsible for it (Edit for \"category\": ('nosy', 'default_part_of') only) User", "'responsible', 'safety_level', 'severity', 'status', 'superseder', 'test_level', 'title'] only) User is", "for the user (Edit for \"user\": ['contacts', 'csv_delimiter', 'department_temp', 'entry_date',", "\"query\" only) User is allowed to search for their own", "to access summary_type (View for \"summary_type\" only) User is allowed", "user has role HR-Org-Location and is in the same Org-Location", "the person to whom approvals are delegated (Edit for \"leave_submission\":", "'vie_user'] only) Role \"dom-user-edit-office\": User is allowed to create user_contact", "\"summary_view\": Role \"supportadmin\": User is allowed to access analysis_result (View", "(View for \"time_project\" only) User is allowed to access time_report", "doc (Nosy for \"doc\" only) User may get nosy messages", "for \"daily_record_freeze\" only) User is allowed to create location (Create", "view selected fields in work package if booking is allowed", "(Create for \"domain_permission\" only) User is allowed to create it_category", "\"sup_warranty\" only) User is allowed to access test_level (View for", "May only view/edit records with the correct domain (View for", "for \"user_status\": ('name',) only) User is allowed View on file", "Edit on it_issue if it_issue is non-confidential or user is", "for \"daily_record_status\" only) User is allowed to access department (View", "for \"sup_execution\" only) User is allowed to access sup_prio (View", "the given user (View for \"user_dynamic\" only) User is allowed", "\"it\": Create (Create for \"user_contact\" only) User is allowed Edit", "access user_status (View for \"user_status\" only) User is allowed to", "org_location (Edit for \"org_location\" only) User is allowed to edit", "is allowed to edit dynamic user data if not frozen", "they are in the domain_permission for the user (Edit for", "(View for \"user\": ['contacts', 'position_text', 'room'] only) Role \"external\": (Search", "is allowed to edit severity (Edit for \"severity\" only) User", "for \"time_record\" only) User is allowed to search time_wp (Search", "only) User is allowed to create sap_cc (Create for \"sap_cc\"", "to edit ext_tracker_state (Edit for \"ext_tracker_state\" only) User is allowed", "('cost_center', 'department', 'deputy', 'description', 'name', 'nosy', 'organisation', 'responsible', 'status') only)", "is \"Open\" and he is responsible for the time category", "everything (View) Role \"anonymous\": User may access the web interface", "user is on nosy list (View for \"it_project\" only) User", "is owner or project responsible/deputy (Edit for \"time_wp\": ('bookers', 'description',", "allowed to access product_family (View for \"product_family\" only) User is", "for \"time_project\": ('name',) only) User is allowed to view work", "allowed to create work_location (Create for \"work_location\" only) User is", "(View for \"contract_type\" only) User is allowed to create user_contact", "for \"daily_record\" only) User is allowed to search for their", "(Edit for \"area\" only) User is allowed to edit category", "\"time_wp\": ('cost_center', 'is_public', 'name', 'responsible', 'time_wp_summary_no', 'wp_no') only) User is", "'no_overtime', 'no_overtime_day', 'only_hours', 'overtime_reduction') only) User is allowed View on", "allowed to access sup_status (View for \"sup_status\" only) User is", "only) Users may see daily record if they may see", "only) User is allowed to access it_request_type (View for \"it_request_type\"", "for \"organisation\" only) User is allowed to edit location (Edit", "allowed Edit on (Edit for \"time_wp\": ('project',) only) User is", "only) User is allowed to create kind (Create for \"kind\"", "User is allowed View on (View for \"user\": ('business_responsible', 'department_temp',", "is allowed to access customer_agreement (View for \"customer_agreement\" only) User", "(View for \"msg\" only) User is allowed View on support", "(View for \"time_project\": ('activity', 'actor', 'creation', 'creator', 'deputy', 'description', 'id',", "to edit time_report (Edit for \"time_report\" only) User may edit", "access doc_status (View for \"doc_status\" only) User is allowed to", "View on (View for \"user\": ('roles',) only) User is allowed", "only) User is allowed to edit test_level (Edit for \"test_level\"", "only) User is allowed to edit doc_category (Edit for \"doc_category\"", "allowed to create vacation_correction (Create for \"vacation_correction\" only) User is", "edit cost_center (Edit for \"cost_center\" only) User is allowed to", "for this user (also applies to timetracking by, supervisor and", "if he is allowed to see all details on work", "\"test_level\" only) User is allowed to create file (Create for", "'cost_center', 'creation', 'creator', 'description', 'durations_allowed', 'epic_key', 'has_expiration_date', 'id', 'is_extern', 'is_public',", "\"support\" only) User is allowed to create time_record (Create for", "may get nosy messages for support (Nosy for \"support\" only)", "linked from an item with Edit permission (Edit for \"msg\"", "to edit issue (Edit for \"issue\" only) User is allowed", "allowed to edit several fields if he is Stakeholder/Responsible for", "('group_lead', 'purchasing_agents', 'team_lead') only) Role \"project\": User is allowed Edit", "deputy of time project or on nosy list of time", "access overtime_correction (View for \"overtime_correction\" only) User is allowed to", "is allowed Edit on (Edit for \"daily_record\": ('status', 'time_record') only)", "('roles',) only) User is allowed View on (View for \"user_dynamic\":", "allowed to edit room (Edit for \"room\" only) Role \"functional-role\":", "for \"doc_category\" only) User is allowed to create doc_status (Create", "they have search permission (View for \"query\" only) Users may", "for \"org_location\" only) User is allowed to edit organisation (Edit", "edit contact (Edit for \"contact\" only) User is allowed to", "allowed to create msg (Create for \"msg\" only) User is", "on (View for \"user_status\": ('name',) only) User is allowed View", "he/she is in group HR-Org-Location and in the same Org-Location", "only) User is allowed to access sup_warranty (View for \"sup_warranty\"", "'work_location') only) User is allowed to access time_project (View for", "'room', 'sex', 'status', 'subst_active', 'substitute', 'supervisor', 'sync_foreign_key', 'timezone', 'tt_lines', 'username',", "only) User is allowed to create artefact (Create for \"artefact\"", "for the user (Edit for \"user\": ['room'] only) Users may", "to access project_type (View for \"project_type\" only) User is allowed", "is allowed to access time_project (View for \"time_project\" only) User", "or user is on nosy list (View for \"issue\" only)", "on nosy list (Edit for \"issue\" only) User is allowed", "('nickname', 'status', 'username') only) User is allowed View on (View", "edit dynamic user data if not frozen in validity span", "to edit mailgroup (Edit for \"mailgroup\" only) User may manipulate", "\"overtime_period\" only) User is allowed to access prodcat (View for", "only) User is allowed to edit doc_issue_status (Edit for \"doc_issue_status\"", "on (View for \"category\": ('id', 'name') only) User is allowed", "allowed Edit on file if file is linked from an", "\"user_dynamic\" only) User is allowed to edit auto_wp (Edit for", "where they have search permission (View for \"query\" only) Users", "to edit domain_permission (Edit for \"domain_permission\" only) User is allowed", "\"support\": ('analysis_end', 'analysis_result', 'analysis_start', 'bcc', 'business_unit', 'category', 'cc', 'cc_emails', 'classification',", "allowed Edit on (Edit for \"time_project\": ('group_lead', 'purchasing_agents', 'team_lead') only)", "allowed to create issue (Create for \"issue\" only) User is", "\"absence_type\" only) User is allowed to create room (Create for", "for \"user_dynamic\": ('department', 'org_location') only) User is allowed View on", "record (the supervisor relationship is transitive) or the user is", "time report if reponsible or deputy of time project or", "files (View for \"file\" only) User is allowed to view", "\"user\": ('contacts',) only) User is allowed View on (View for", "'creation', 'creator', 'deputy', 'description', 'id', 'is_extern', 'is_public_holiday', 'is_special_leave', 'is_vacation', 'name',", "only) User is allowed to create daily_record_freeze (Create for \"daily_record_freeze\"", "(View for \"time_wp\" only) Role \"sec-incident-nosy\": User is allowed to", "domain_permission for the user (View for \"user\": ['clearance_by', 'contacts', 'csv_delimiter',", "\"user\": ['contacts', 'csv_delimiter', 'department_temp', 'entry_date', 'firstname', 'hide_message_files', 'job_description', 'lastname', 'lunch_duration',", "only) User is allowed to access doc_issue_status (View for \"doc_issue_status\"", "own file (file created by user) (Edit for \"file\" only)", "\"time_wp\": ('activity', 'actor', 'auto_wp', 'bookers', 'cost_center', 'creation', 'creator', 'description', 'durations_allowed',", "for \"sap_cc\" only) (View for \"time_project\" only) User is allowed", "to access product_type (View for \"product_type\" only) User is allowed", "(View for \"sup_status\" only) User is allowed to access sup_type", "allowed View on (View for \"user\": ('activity', 'actor', 'ad_domain', 'address',", "(View for \"it_request_type\" only) User is allowed to access keyword", "allowed to access organisation (View for \"organisation\" only) User is", "only) User is allowed to access reporting_group (View for \"reporting_group\"", "nosy messages for issue (Nosy for \"issue\" only) User may", "to edit area (Edit for \"area\" only) User is allowed", "time record if he is the supervisor or the person", "owner or deputy of time category or on nosy list", "messages for support (Nosy for \"support\" only) Role \"office\": (Restore", "is allowed View on (View for \"user\": ('activity', 'actor', 'ad_domain',", "is allowed Edit on (Edit for \"msg\": ('author', 'date', 'id',", "access it_int_prio (View for \"it_int_prio\" only) User is allowed to", "for \"organisation\" only) User is allowed to edit overtime correction", "to edit uc_type (Edit for \"uc_type\" only) User may manipulate", "product_family (Edit for \"product_family\" only) User is allowed to edit", "'durations_allowed', 'epic_key', 'has_expiration_date', 'is_extern', 'is_public', 'id', 'name', 'project', 'responsible', 'time_end',", "(View for \"contact_type\" only) User is allowed to access cost_center", "\"contact\" only) User is allowed to access customer (View for", "it may also be seen (View for \"time_record\" only) User", "only) User is allowed to create it_project (Create for \"it_project\"", "(Create for \"user_contact\" only) User is allowed Edit on (Edit", "create time_record (Create for \"time_record\" only) User is allowed to", "\"it_request_type\" only) User is allowed to access keyword (View for", "on (Edit for \"leave_submission\": ('status',) only) User is allowed to", "User is allowed to create daily_record (Create for \"daily_record\" only)", "Edit on msg if msg is linked from an item", "'status', 'subst_active', 'substitute', 'supervisor', 'timezone', 'tt_lines', 'vie_user'] only) Role \"dom-user-edit-office\":", "doc (View for \"doc\" only) User is allowed to access", "\"project\": User is allowed Edit on (Edit for \"time_project\": ('cost_center',", "time_records owned by user (Edit for \"time_record\" only) User or", "'name', 'responsible', 'time_wp_summary_no', 'wp_no') only) User is allowed to retire", "to access sup_classification (View for \"sup_classification\" only) User is allowed", "cost_center_group (Edit for \"cost_center_group\" only) User is allowed to edit", "user Roles through the web (Web Roles) Role \"itview\": User", "only) User may get nosy messages for issue (Nosy for", "only) User is allowed to edit time_wp_group (Edit for \"time_wp_group\"", "to access it_project (View for \"it_project\" only) User is allowed", "'actor', 'address', 'alternate_addresses', 'creation', 'creator', 'id', 'queries', 'realname', 'status', 'timezone',", "only) User is allowed to edit analysis_result (Edit for \"analysis_result\"", "to edit work_location (Edit for \"work_location\" only) Role \"doc_admin\": User", "is allowed Edit on (Edit for \"sap_cc\": ('group_lead', 'purchasing_agents', 'team_lead')", "for \"customer_agreement\" only) User is allowed to access daily record", "for \"time_project\": ('approval_hr', 'approval_required', 'is_extern', 'is_public_holiday', 'is_special_leave', 'is_vacation', 'no_overtime', 'no_overtime_day',", "for \"area\" only) User is allowed to create category (Create", "\"time_record\" only) User is allowed to access user_contact (View for", "access vacation_correction (View for \"vacation_correction\" only) User is allowed to", "\"dom-user-edit-gtt\": (Search for \"user_dynamic\" only) May only view/edit records with", "for \"customer\" only) User is allowed to create customer_agreement (Create", "(the supervisor relationship is transitive) or the user is the", "(View for \"time_project_status\" only) User is allowed to access time_wp_group", "public_holiday (Create for \"public_holiday\" only) User is allowed to create", "to access domain_permission (View for \"domain_permission\" only) User is allowed", "(View for \"domain_permission\" only) User is allowed to access it_int_prio", "\"area\" only) User is allowed to create category (Create for", "\"keyword\" only) User is allowed to access kind (View for", "for \"auto_wp\" only) User is allowed to access contract_type (View", "'status', 'superseder', 'test_level', 'title'] only) External users are allowed to", "allowed to edit cost_center (Edit for \"cost_center\" only) User is", "\"time_report\" only) User is allowed to access time_wp (View for", "access status_transition (View for \"status_transition\" only) User is allowed to", "\"work_location\" only) User is allowed to edit cost_center (Edit for", "sup_status (View for \"sup_status\" only) User is allowed to access", "is allowed to access sex (View for \"sex\" only) User", "for \"query\" only) User is allowed to edit reporting_group (Edit", "Access) User may access the web interface (Web Access) User", "\"org_location\" only) User is allowed to access organisation (View for", "user is department manager of time category (View for \"time_project\"", "only) User is allowed to view their own files (View", "is allowed View on support if support is non-confidential or", "nosy list (Edit for \"support\": ('analysis_end', 'analysis_result', 'analysis_start', 'bcc', 'business_unit',", "is allowed to access time_wp_summary_no (View for \"time_wp_summary_no\" only) User", "customer_agreement (View for \"customer_agreement\" only) User is allowed to access", "information (View for \"overtime_correction\" only) User is allowed to view", "the domain_permission for the user (Edit for \"user\": ['contacts', 'position_text',", "contact is marked visible (View for \"user_contact\" only) User is", "allowed to create status (Create for \"status\" only) User is", "on nosy list (View for \"support\" only) User is allowed", "only) User is allowed to create organisation (Create for \"organisation\"", "(View for \"status\" only) User is allowed to access status_transition", "for \"time_record\" only) User is allowed to access user_contact (View", "user details (Edit for \"user\": ('csv_delimiter', 'hide_message_files', 'lunch_duration', 'lunch_start', 'password',", "only) User is allowed to create absence_type (Create for \"absence_type\"", "is allowed to create it_int_prio (Create for \"it_int_prio\" only) User", "(View for \"reporting_group\" only) User is allowed to access return_type", "User is allowed to edit workpackage if he is time", "only) User is allowed to access msg_keyword (View for \"msg_keyword\"", "for \"msg\" only) User is allowed to access area (View", "to access timesheet (View for \"timesheet\" only) User is allowed", "for \"it_issue\": ('messages', 'files', 'nosy') only) User is allowed Edit", "for \"issue\" only) User is allowed to edit keyword (Edit", "for \"query\" only) User is allowed to edit their queries", "is allowed to access user (View for \"user\" only) Role", "allowed to edit cost_center_permission_group (Edit for \"cost_center_permission_group\" only) Role \"contact\":", "\"product_type\" only) User is allowed to edit reference (Edit for", "for \"category\": ('id', 'name') only) User is allowed View on", "for \"time_report\" only) User may edit own file (file created", "(Edit for \"mailgroup\" only) User may manipulate user Roles through", "is on nosy list (Edit for \"issue\" only) User is", "or on nosy list of time project (View for \"time_report\"", "allowed Edit on (Edit for \"time_project\": ('cost_center', 'department', 'deputy', 'description',", "\"department\" only) User is allowed to edit organisation (Edit for", "'timezone', 'tt_lines', 'username', 'vie_user'] only) Role \"dom-user-edit-hr\": (Search for \"user_dynamic\"", "it_category (Edit for \"it_category\" only) User is allowed to edit", "'position_text', 'queries', 'realname', 'room', 'sex', 'status', 'subst_active', 'substitute', 'supervisor', 'timezone',", "only) User is allowed to access summary_report (View for \"summary_report\"", "User is allowed Edit on (Edit for \"file\": ('name', 'type')", "permission (Edit for \"msg\" only) User is allowed View on", "edit room (Edit for \"room\" only) Role \"functional-role\": (Restore for", "retire everything (Retire) User may use the email interface (Email", "\"time_record\" only) Role \"hr-vacation\": User is allowed to access contract_type", "nosy messages for doc (Nosy for \"doc\" only) User may", "'reduced_activity_list', 'roles', 'room', 'sex', 'status', 'subst_active', 'substitute', 'supervisor', 'timezone', 'tt_lines',", "\"public_holiday\" only) User is allowed to create reporting_group (Create for", "User is allowed to access sup_execution (View for \"sup_execution\" only)", "(Edit for \"it_project\": ('messages', 'files', 'nosy') only) User is allowed", "Role \"functional-role\": (Restore for \"user_functional_role\" only) (Retire for \"user_functional_role\" only)", "\"doc_status\" only) User is allowed to access ext_tracker (View for", "\"user_view\": User is allowed to access user (View for \"user\"", "only) Users may view user_dynamic records for ad_domain for which", "(Create for \"user_contact\" only) User is allowed to create user_dynamic", "is allowed to edit ext_tracker_state (Edit for \"ext_tracker_state\" only) Role", "\"analysis_result\" only) User is allowed to edit contact (Edit for", "allowed to create category (Create for \"category\" only) User is", "\"msg\" only) User is allowed to create queries (Create for", "on nosy list (View for \"it_project\" only) User is allowed", "for \"location\" only) User is allowed to access org_location (View", "allowed to access it_issue_status (View for \"it_issue_status\" only) User is", "for \"support\" only) User is allowed to create analysis_result (Create", "'nickname', 'pictures', 'position_text', 'queries', 'realname', 'room', 'sex', 'status', 'subst_active', 'substitute',", "'doc_issue_status', 'earliest_start', 'effective_prio', 'effort_hours', 'external_users', 'files', 'files_affected', 'fixed_in', 'id', 'keywords',", "to access absence (View for \"absence\" only) User is allowed", "their own messages (View for \"msg\" only) User is allowed", "Role \"office\": (Restore for \"room\" only) (Retire for \"room\" only)", "for \"msg\": ('date', 'id') only) User is allowed Edit on", "to search user_status (Search for \"user\": ('status',) only) User is", "to access return_type (View for \"return_type\" only) User is allowed", "user_dynamic records for ad_domain for which they are in the", "list of allowed external users or there is a transitive", "User may view time category if user is owner or", "\"leave_submission\": ('status',) only) User is allowed to access contract_type (View", "time_wp (Search for \"time_wp\": ('activity', 'actor', 'auto_wp', 'cost_center', 'creation', 'creator',", "for \"safety_level\" only) User is allowed to create severity (Create", "(Create for \"daily_record_freeze\" only) User is allowed to create location", "is allowed to edit cost_center_group (Edit for \"cost_center_group\" only) User", "create severity (Create for \"severity\" only) User is allowed to", "is allowed to access it_project_status (View for \"it_project_status\" only) User", "User is allowed to create vacation_correction (Create for \"vacation_correction\" only)", "User is allowed to view their own overtime information (View", "'superseder', 'test_level', 'title'] only) User is allowed View on (View", "is allowed to search user_status (Search for \"user\": ('status',) only)", "to create ext_tracker (Create for \"ext_tracker\" only) User is allowed", "xmlrpc interface (Xmlrpc Access) User may edit own leave submissions", "\"doc\" only) User is allowed to access doc_category (View for", "User is allowed to create overtime_period (Create for \"overtime_period\" only)", "vacation_report (View for \"vacation_report\" only) User is allowed to access", "time_wp (Create for \"time_wp\" only) User is allowed to edit", "may also be seen (View for \"daily_record\" only) User may", "for \"daily_record\" only) User may view their own user functional", "domain_permission for the user (View for \"user\": ['contacts', 'position_text', 'room']", "everything (Edit) User may manipulate user Roles through the web", "is allowed to search for their own messages (Search for", "\"reporting_group\" only) User is allowed to edit sap_cc (Edit for", "category (Edit for \"category\" only) User is allowed to edit", "to create user_dynamic (Create for \"user_dynamic\" only) User is allowed", "User is allowed to access cost_center_status (View for \"cost_center_status\" only)", "(Search for \"user\": ('status',) only) User is allowed to see", "\"severity\" only) User is allowed to access sex (View for", "\"external\": (Search for \"ext_tracker_state\": ('id', 'issue') only) (Search for \"user\":", "(View for \"user_dynamic\" only) Users may view/edit user records for", "for which they are in the domain_permission for the user", "for \"vacation_correction\" only) User is allowed to create cost_center (Create", "allowed to create product_family (Create for \"product_family\" only) User is", "if file is linked from an item with View permission", "allowed to create artefact (Create for \"artefact\" only) User is", "allowed to edit uc_type (Edit for \"uc_type\" only) User may", "'closed', 'composed_of', 'creation', 'creator', 'cur_est_begin', 'cur_est_end', 'deadline', 'depends', 'doc_issue_status', 'earliest_start',", "access daily_record_freeze (View for \"daily_record_freeze\" only) User is allowed to", "User is allowed View on (View for \"user\": ('nickname', 'status',", "('keywords',) only) User is allowed Edit on file if file", "User is allowed to search it_issue (Search for \"it_issue\" only)", "for \"doc\" only) User may get nosy messages for issue", "to create cost_center (Create for \"cost_center\" only) User is allowed", "to edit time_activity (Edit for \"time_activity\" only) User is allowed", "Role \"msgedit\": (Search for \"msg\": ('date', 'id') only) User is", "for \"room\" only) Role \"functional-role\": (Restore for \"user_functional_role\" only) (Retire", "(View for \"user\": ('entry_date', 'planning_role') only) User is allowed to", "User is allowed View on support if support is non-confidential", "\"public_holiday\" only) User is allowed to access reference (View for", "(View for \"contact\" only) User is allowed to access customer", "'name', 'project', 'responsible', 'time_end', 'time_start', 'time_wp_summary_no', 'travel', 'wp_no') only) User", "'time_end', 'time_start', 'time_wp_summary_no', 'travel', 'wp_no') only) (View for \"time_project\": ('activity',", "\"mailgroup\" only) User is allowed to access msg_keyword (View for", "only) User may edit own leave submissions (View for \"leave_submission\":", "allowed to access safety_level (View for \"safety_level\" only) User is", "to access it_issue (View for \"it_issue\" only) User is allowed", "create ext_tracker (Create for \"ext_tracker\" only) User is allowed to", "access query (View for \"query\" only) User is allowed to", "project or on nosy list of time project (View for", "only) User is allowed to access daily_record (View for \"daily_record\"", "for \"overtime_correction\" only) User is allowed to edit product_family (Edit", "to access daily_record_freeze (View for \"daily_record_freeze\" only) User is allowed", "allowed to view/edit workpackage if he is owner or project", "User is allowed to access sex (View for \"sex\" only)", "\"time_activity\" only) User is allowed to create time_activity_perm (Create for", "are allowed to view their own and public queries for", "of allowed external users or there is a transitive permission", "may edit time_records owned by user (Edit for \"time_record\" only)", "to edit reference (Edit for \"reference\" only) Role \"dom-user-edit-facility\": Users", "User is allowed to edit safety_level (Edit for \"safety_level\" only)", "if he/she has role HR or HR-Org-Location (View for \"time_wp\":", "User is allowed to access doc (View for \"doc\" only)", "User is allowed to access cost_center_group (View for \"cost_center_group\" only)", "User is allowed View on it_project if it_project is non-confidential", "their own messages (Search for \"msg\" only) User is allowed", "the user (Edit for \"user\": ['clearance_by', 'contacts', 'csv_delimiter', 'entry_date', 'firstname',", "is allowed to create user_functional_role (Create for \"user_functional_role\" only) User", "access cost_center (View for \"cost_center\" only) User is allowed to", "for \"mailgroup\" only) User may manipulate user Roles through the", "only) User is allowed to access ext_tracker_state (View for \"ext_tracker_state\"", "\"product_family\" only) User is allowed to edit public_holiday (Edit for", "for \"query\" only) User is allowed to access time_project (View", "only) User is allowed to create work_location (Create for \"work_location\"", "kind (Create for \"kind\" only) User is allowed to create", "User is allowed to access support (View for \"support\" only)", "create ext_msg (Create for \"ext_msg\" only) User is allowed to", "only) User is allowed to access doc_status (View for \"doc_status\"", "\"org_location\" only) User is allowed to create organisation (Create for", "everything (Restore) User may retire everything (Retire) User may use", "edit ext_tracker_state (Edit for \"ext_tracker_state\" only) Role \"nosy\": User may", "'title', 'tt_lines', 'username') only) User is allowed View on (View", "for \"contact\" only) User is allowed to access contact_type (View", "(View for \"severity\" only) User is allowed to access sex", "is allowed to edit reference (Edit for \"reference\" only) Role", "to create daily_record (Create for \"daily_record\" only) User is allowed", "Search (Search for \"user_contact\" only) User is allowed Edit on", "access safety_level (View for \"safety_level\" only) User is allowed to", "only) User is allowed to create ext_tracker (Create for \"ext_tracker\"", "user (View for \"time_record\" only) Role \"hr-vacation\": User is allowed", "nosy messages for support (Nosy for \"support\" only) Role \"office\":", "for it (Edit for \"category\": ('nosy', 'default_part_of') only) User is", "for \"leave_submission\" only) User is allowed to edit vacation_correction (Edit", "time_project_status (Create for \"time_project_status\" only) User is allowed to create", "he is allowed to see all details on work package", "allowed to access product (View for \"product\" only) User is", "is allowed to search it_project (Search for \"it_project\" only) User", "linked from an item with View permission (View for \"msg\"", "for \"sap_cc\" only) User is allowed to edit time_record (Edit", "\"ext_tracker_type\" only) Role \"msgsync\": (Search for \"msg\": ('date', 'id') only)", "only) User is allowed to access leave_status (View for \"leave_status\"", "(Edit for \"product_type\" only) User is allowed to edit reference", "create cost_center_permission_group (Create for \"cost_center_permission_group\" only) User is allowed to", "allowed to access time_wp_group (View for \"time_wp_group\" only) User is", "(View for \"doc_issue_status\" only) User is allowed to access doc_status", "'external_users', 'files', 'files_affected', 'fixed_in', 'id', 'keywords', 'kind', 'maturity_index', 'messages', 'needs',", "User is allowed to create location (Create for \"location\" only)", "time category owner or deputy (Edit for \"time_wp\": ('cost_center', 'is_public',", "only) (Search for \"user_dynamic\" only) User is allowed to view", "['contacts', 'position_text', 'room'] only) Role \"external\": (Search for \"ext_tracker_state\": ('id',", "\"status_transition\" only) User is allowed to edit test_level (Edit for", "\"\"\" New Web users get the Roles \"User,Nosy\" New Email", "(View for \"org_group\" only) User is allowed to access org_location", "or deputy of time project or on nosy list of", "\"dom-user-edit-hr\": (Search for \"user_dynamic\" only) May only view/edit records with", "category (Edit for \"time_project\": ('deputy', 'planned_effort', 'nosy') only) User is", "the owner of the daily record. If user has role", "vacation_correction (View for \"vacation_correction\" only) Role \"hr-org-location\": (Search for \"daily_record_freeze\"", "is allowed to edit ext_tracker_state (Edit for \"ext_tracker_state\" only) User", "is allowed Edit on (Edit for \"organisation\": ('domain_part',) only) User", "for \"reporting_group\" only) User is allowed to create sap_cc (Create", "edit time_wp (Edit for \"time_wp\" only) User is allowed to", "for \"time_record\" only) User is allowed to access time_report (View", "on work package or User may view a daily_record (and", "allowed to create time_activity (Create for \"time_activity\" only) User is", "User is allowed to create absence_type (Create for \"absence_type\" only)", "edit organisation (Edit for \"organisation\" only) User is allowed to", "is allowed to edit cost_center_permission_group (Edit for \"cost_center_permission_group\" only) Role", "to edit sap_cc (Edit for \"sap_cc\" only) User is allowed", "allowed to create doc_status (Create for \"doc_status\" only) User is", "allowed to edit test_level (Edit for \"test_level\" only) Role \"it\":", "for \"cost_center_permission_group\" only) (Retire for \"cost_center_permission_group\" only) User is allowed", "is allowed to access it_issue_status (View for \"it_issue_status\" only) User", "'status', 'superseder', 'title', 'type', 'warranty') only) User is allowed View", "may access the xmlrpc interface (Xmlrpc Access) User may create", "absence (Edit for \"absence\" only) User is allowed to edit", "\"sap_cc\": ('group_lead', 'purchasing_agents', 'team_lead') only) User is allowed Edit on", "'nickname', 'password', 'pictures', 'roles', 'timetracking_by', 'timezone', 'username') only) User is", "User is allowed to edit time_wp_group (Edit for \"time_wp_group\" only)", "User is allowed to access time_record (View for \"time_record\" only)", "(View for \"reference\" only) User is allowed to access reporting_group", "Timetracking by user may edit time_records owned by user (Retire", "\"it_issue\" only) User is allowed to access it_project (View for", "to create ext_msg (Create for \"ext_msg\" only) User is allowed", "edit cost_center_permission_group (Edit for \"cost_center_permission_group\" only) Role \"contact\": User is", "\"pgp\": Role \"procurement\": (View for \"sap_cc\" only) (View for \"time_project\"", "User is allowed to edit time_project_status (Edit for \"time_project_status\" only)", "daily record (the supervisor relationship is transitive) or the user", "\"time_project\" only) User may view work package if responsible for", "'timezone', 'title', 'tt_lines', 'username') only) User is allowed View on", "access the rest interface (Rest Access) User may access the", "is allowed to create keyword (Create for \"keyword\" only) User", "Role \"sec-incident-responsible\": User is allowed to access it_int_prio (View for", "\"support\" only) User is allowed to create analysis_result (Create for", "create file (Create for \"file\" only) User is allowed to", "for \"room\" only) User is allowed to edit uc_type (Edit", "Role \"itview\": User is allowed to access it_int_prio (View for", "for \"user_contact\": ('visible',) only) User is allowed to edit several", "sap_cc (View for \"sap_cc\" only) User is allowed to access", "user (View for \"user_dynamic\" only) User is allowed to view", "access sex (View for \"sex\" only) User is allowed to", "create sap_cc (Create for \"sap_cc\" only) User is allowed to", "records for ad_domain for which they are in the domain_permission", "User is allowed to create time_project_status (Create for \"time_project_status\" only)", "to create uc_type (Create for \"uc_type\" only) User is allowed", "create everything (Create) User may edit everything (Edit) User may", "(Edit for \"sap_cc\" only) User is allowed to edit time_activity", "list (View for \"support\" only) User is allowed to access", "access return_type (View for \"return_type\" only) User is allowed to", "'alternate_addresses', 'creation', 'creator', 'id', 'queries', 'realname', 'status', 'timezone', 'username') only)", "for \"status\" only) User is allowed to create status_transition (Create", "for \"user_dynamic\" only) Users may view/edit user records for ad_domain", "is allowed to access doc_category (View for \"doc_category\" only) User", "to access user_dynamic (View for \"user_dynamic\" only) User is allowed", "to search time_wp (Search for \"time_wp\": ('activity', 'actor', 'auto_wp', 'cost_center',", "for \"customer_agreement\" only) User is allowed to create mailgroup (Create", "everything (Create) User may edit everything (Edit) User may manipulate", "(Edit for \"return_type\" only) User is allowed to edit sup_classification", "it_category (View for \"it_category\" only) User is allowed to access", "allowed to create it_category (Create for \"it_category\" only) User is", "create absence_type (Create for \"absence_type\" only) User is allowed to", "for \"time_project\": ('cost_center', 'department', 'deputy', 'description', 'name', 'nosy', 'organisation', 'responsible',", "User is allowed to access overtime_correction (View for \"overtime_correction\" only)", "Edit on issue if issue is non-confidential or user is", "allowed to search for their own messages (Search for \"msg\"", "(View for \"sup_classification\" only) User is allowed to access support", "for \"absence\" only) User is allowed to create absence_type (Create", "is allowed to edit area (Edit for \"area\" only) User", "for \"sup_classification\" only) User is allowed to access support (View", "only) Role \"functional-role\": (Restore for \"user_functional_role\" only) (Retire for \"user_functional_role\"", "view leave submission if he is the supervisor or the", "'status', 'username') only) User is allowed View on (View for", "permission (View for \"file\" only) User is allowed View on", "edit issue (Edit for \"issue\" only) User is allowed to", "(Edit for \"query\" only) User is allowed to edit time", "is allowed to edit customer (Edit for \"customer\" only) User", "(Create for \"time_wp_group\" only) User is allowed to edit time_project_status", "allowed to access absence (View for \"absence\" only) User is", "for \"file\" only) User is allowed View on msg if", "is allowed to edit product_family (Edit for \"product_family\" only) User", "if he/she has role HR or HR-Org-Location (View for \"time_project\":", "User is allowed to access product_type (View for \"product_type\" only)", "(Create for \"org_location\" only) User is allowed to create organisation", "for \"vacation_correction\" only) User is allowed to create contract_type (Create", "\"leave_submission\" only) User is allowed to create msg (Create for", "for \"ext_tracker_state\" only) User is allowed to edit ext_msg (Edit", "User is allowed to access it_category (View for \"it_category\" only)", "\"cost_center_permission_group\" only) User is allowed to access cost_center_status (View for", "User is allowed to edit time_record (Edit for \"time_record\" only)", "allowed to edit time_report (Edit for \"time_report\" only) User may", "allowed to edit kind (Edit for \"kind\" only) User is", "is allowed to create sap_cc (Create for \"sap_cc\" only) User", "\"reporting_group\" only) User is allowed to create sap_cc (Create for", "for \"it_issue\" only) User is allowed to search it_project (Search", "is allowed to access ext_msg (View for \"ext_msg\" only) User", "(View for \"user_contact\" only) User is allowed to access user_dynamic", "User is allowed to access it_request_type (View for \"it_request_type\" only)", "only) User is allowed to edit room (Edit for \"room\"", "edit artefact (Edit for \"artefact\" only) User is allowed to", "allowed to edit contract_type (Edit for \"contract_type\" only) User is", "the rest interface (Rest Access) User may access the web", "\"issue\" only) User is allowed to create keyword (Create for", "to access time_wp_summary_no (View for \"time_wp_summary_no\" only) User is allowed", "is allowed to access time_activity_perm (View for \"time_activity_perm\" only) User", "time category or on nosy list of time category or", "\"time_wp\": ('bookers', 'description', 'epic_key', 'planned_effort', 'time_end', 'time_start', 'time_wp_summary_no') only) User", "to edit doc (Edit for \"doc\" only) User is allowed", "for \"time_record\" only) User or Timetracking by user may edit", "with the correct domain (Edit for \"user_dynamic\" only) May only", "only) User is allowed View on (View for \"user_dynamic\": ('department',", "allowed to access doc_status (View for \"doc_status\" only) User is", "\"return_type\" only) User is allowed to access sup_classification (View for", "(Edit for \"kind\" only) User is allowed to edit msg_keyword", "\"uc_type\" only) User may manipulate user Roles through the web", "for an it_issue (Edit for \"it_issue\": ('deadline', 'status', 'title') only)", "(View for \"daily_record_freeze\" only) User is allowed to access leave_submission", "mailgroup (View for \"mailgroup\" only) User is allowed to access", "for \"prodcat\" only) User is allowed to access product (View", "View on it_issue if it_issue is non-confidential or user is", "it_project_status (View for \"it_project_status\" only) User is allowed to access", "for an it_issue (Edit for \"it_issue\": ('responsible',) only) User is", "for \"time_record\" only) (Search for \"user_dynamic\" only) User is allowed", "(Create for \"leave_submission\" only) User is allowed to create vacation_correction", "may access the rest interface (Rest Access) User may access", "allowed to edit organisation (Edit for \"organisation\" only) User is", "owned by user (Restore for \"time_record\" only) User or Timetracking", "access it_issue_status (View for \"it_issue_status\" only) User is allowed to", "(Create for \"vacation_correction\" only) User is allowed to edit contract_type", "on (View for \"user\": ('activity', 'actor', 'ad_domain', 'address', 'alternate_addresses', 'business_responsible',", "\"cost_center_group\" only) User is allowed to access cost_center_permission_group (View for", "User is allowed Edit on (Edit for \"user\": ('address', 'alternate_addresses',", "is allowed to create public_holiday (Create for \"public_holiday\" only) User", "for \"analysis_result\" only) User is allowed to create contact (Create", "(Search for \"daily_record\" only) User is allowed to search for", "create queries (Create for \"query\" only) User is allowed to", "for \"sap_cc\": ('group_lead', 'purchasing_agents', 'team_lead') only) User is allowed Edit", "User is allowed to edit time_activity (Edit for \"time_activity\" only)", "to edit mailgroup (Edit for \"mailgroup\" only) User is allowed", "'id', 'is_extern', 'is_public', 'name', 'project', 'responsible', 'time_end', 'time_start', 'time_wp_summary_no', 'travel',", "allowed Edit on it_project if it_project is non-confidential or user", "fields in work package if booking is allowed for this", "\"return_type\" only) User is allowed to access room (View for", "'lunch_start', 'nickname', 'pictures', 'position_text', 'room', 'sex', 'status', 'subst_active', 'substitute', 'supervisor',", "web interface (Web Access) Role \"cc-permission\": (Restore for \"cost_center_permission_group\" only)", "'area', 'category', 'closed', 'composed_of', 'creation', 'creator', 'cur_est_begin', 'cur_est_end', 'deadline', 'depends',", "doc_status (Create for \"doc_status\" only) User is allowed to create", "(Retire for \"room\" only) User is allowed View on (View", "on (View for \"user\": ('contacts',) only) User is allowed View", "for \"time_project_status\" only) User is allowed to create time_wp (Create", "(Create for \"cost_center_permission_group\" only) User is allowed to edit cost_center_permission_group", "category if he is responsible for it (Edit for \"category\":", "be seen (View for \"time_record\" only) User is allowed to", "is allowed to create ext_msg (Create for \"ext_msg\" only) User", "\"Open\" and he is responsible for the time category (Edit", "(View for \"contract_type\" only) User is allowed to access leave_submission", "only) User is allowed Edit on (Edit for \"file\": ('name',", "(Email Access) Users are allowed to edit some of their", "there is a transitive permission via containers (View for \"issue\":", "User is allowed to create queries (Create for \"query\" only)", "for \"it_int_prio\" only) User is allowed to edit it_issue (Edit", "leave_status (View for \"leave_status\" only) User is allowed to access", "only) User is allowed to create time_wp (Create for \"time_wp\"", "\"return_type\" only) User is allowed to edit sup_classification (Edit for", "to edit time_wp (Edit for \"time_wp\" only) User is allowed", "\"location\" only) User is allowed to access org_location (View for", "(Edit for \"time_project\": ('infosec_req', 'is_extern', 'max_hours', 'op_project', 'planned_effort', 'product_family', 'project_type',", "('department', 'org_location') only) User is allowed View on file if", "safety_level (Create for \"safety_level\" only) User is allowed to create", "allowed to edit area (Edit for \"area\" only) User is", "for \"absence_type\" only) User is allowed to access analysis_result (View", "(Create) User may edit everything (Edit) User may manipulate user", "queries (Create for \"query\" only) User is allowed to create", "access ext_tracker_type (View for \"ext_tracker_type\" only) Role \"msgsync\": (Search for", "(Create for \"time_wp\" only) User is allowed to create time_wp_group", "approvals are delegated (Edit for \"leave_submission\": ('status',) only) User is", "User is allowed to access area (View for \"area\" only)", "is allowed to create issue (Create for \"issue\" only) User", "Role \"summary_view\": Role \"supportadmin\": User is allowed to access analysis_result", "if msg is linked from an item with Edit permission", "User is allowed to view their own files (View for", "is allowed View on (View for \"category\": ('id', 'name') only)", "for \"user_functional_role\" only) (Retire for \"user_functional_role\" only) User is allowed", "\"user\": ('csv_delimiter', 'hide_message_files', 'lunch_duration', 'lunch_start', 'password', 'queries', 'realname', 'room', 'subst_active',", "\"overtime_period\": ('name', 'order') only) (Restore for \"room\" only) (Retire for", "is allowed to create time_project (Create for \"time_project\" only) User", "to create cost_center_permission_group (Create for \"cost_center_permission_group\" only) User is allowed", "'is_vacation', 'name', 'nosy', 'only_hours', 'op_project', 'overtime_reduction', 'responsible', 'status', 'work_location', 'wps')", "is allowed to create reporting_group (Create for \"reporting_group\" only) User", "\"issue\" only) User is allowed to search it_issue (Search for", "only) User is allowed View on (View for \"user\": ('business_responsible',", "is allowed to access public_holiday (View for \"public_holiday\" only) User", "User is allowed to edit msg_keyword (Edit for \"msg_keyword\" only)", "create sup_classification (Create for \"sup_classification\" only) User is allowed to", "is allowed to create user (Create for \"user\" only) User", "'nosy') only) User is allowed to edit workpackage if he", "for \"doc\" only) User is allowed to create ext_tracker_state (Create", "(View for \"doc\" only) User is allowed to access doc_category", "own user functional role (View for \"user_functional_role\" only) User may", "(View for \"file\" only) User is allowed View on issue", "access absence_type (View for \"absence_type\" only) User is allowed to", "\"it_project\" only) User may get nosy messages for support (Nosy", "the given date (Edit for \"daily_record_freeze\": ('frozen',) only) User is", "non-confidential or user is on nosy list (View for \"it_issue\"", "only) User is allowed to create doc_status (Create for \"doc_status\"", "'time_end', 'time_start', 'time_wp_summary_no') only) User may access the rest interface", "if msg is linked from an item with View permission", "'customer', 'emails', 'execution', 'external_ref', 'files', 'goods_received', 'goods_sent', 'lot', 'messages', 'nosy',", "User is allowed to create cost_center_status (Create for \"cost_center_status\" only)", "by user may edit time_records owned by user (View for", "for \"time_wp\": ('bookers', 'description', 'epic_key', 'planned_effort', 'time_end', 'time_start', 'time_wp_summary_no') only)", "by user (Edit for \"time_record\" only) User or Timetracking by", "(Edit for \"test_level\" only) Role \"it\": Create (Create for \"user_contact\"", "\"organisation\": User is allowed to access location (View for \"location\"", "is allowed to edit sup_classification (Edit for \"sup_classification\" only) User", "User is allowed to search support (Search for \"support\" only)", "sup_classification (Edit for \"sup_classification\" only) User is allowed to edit", "for \"file\" only) User is allowed Edit on msg if", "User is allowed Edit on it_issue if it_issue is non-confidential", "sup_prio (View for \"sup_prio\" only) User is allowed to access", "\"it_issue\": ('responsible',) only) User is allowed to edit several fields", "(Create for \"it_issue\" only) User is allowed to create it_project", "allowed to create contract_type (Create for \"contract_type\" only) User is", "a daily_record (and time_records that are attached to that daily_record)", "User is allowed to create area (Create for \"area\" only)", "only) User is allowed to access summary_type (View for \"summary_type\"", "test_level (Edit for \"test_level\" only) Role \"it\": Create (Create for", "allowed to access customer (View for \"customer\" only) User is", "allowed to edit contact (Edit for \"contact\" only) User is", "HR-Org-Location (View for \"time_project\": ('name',) only) User is allowed to", "User is allowed to access absence (View for \"absence\" only)", "if he is Responsible for an it_issue (Edit for \"it_issue\":", "is allowed to access vac_aliq (View for \"vac_aliq\" only) User", "supervisor or timetracking-by user (Edit for \"daily_record\": ('status', 'time_record') only)", "is Stakeholder/Responsible for an it_issue (Edit for \"it_issue\": ('deadline', 'status',", "for \"user_contact\" only) Users may view/edit user records for ad_domain", "view their own messages (View for \"msg\" only) User is", "via containers (Edit for \"issue\": ['activity', 'actor', 'area', 'category', 'closed',", "allowed View on (View for \"user\": ('contacts',) only) User is", "for \"user\": ('status',) only) User is allowed to see time", "is allowed to access daily_record_status (View for \"daily_record_status\" only) User", "an item with Edit permission (Edit for \"file\" only) User", "\"issue\": ['activity', 'actor', 'area', 'category', 'closed', 'composed_of', 'creation', 'creator', 'cur_est_begin',", "only) User is allowed to create it_request_type (Create for \"it_request_type\"", "create daily_record (Create for \"daily_record\" only) User is allowed to", "allowed to access it_int_prio (View for \"it_int_prio\" only) User is", "user is on nosy list (Edit for \"it_project\": ('messages', 'files',", "'no_overtime_day', 'only_hours', 'overtime_reduction') only) User is allowed View on (View", "('date', 'id') only) User is allowed Edit on (Edit for", "for \"customer\" only) User is allowed to edit customer_agreement (Edit", "Role \"user\": (Search for \"time_project\": ('activity', 'actor', 'creation', 'creator', 'deputy',", "'id', 'is_extern', 'is_public_holiday', 'is_special_leave', 'is_vacation', 'name', 'nosy', 'only_hours', 'op_project', 'overtime_reduction',", "only) User is allowed to access sup_status (View for \"sup_status\"", "(Email Access) User may view everything (View) Role \"anonymous\": User", "('messages', 'files', 'nosy') only) User is allowed Edit on it_project", "domain_permission for the user (View for \"user\": ['room'] only) Role", "create leave_submission (Create for \"leave_submission\" only) User is allowed to", "for \"product_type\" only) User is allowed to edit reference (Edit", "to create overtime_period (Create for \"overtime_period\" only) User is allowed", "User is allowed to edit uc_type (Edit for \"uc_type\" only)", "reporting_group (View for \"reporting_group\" only) User is allowed to access", "keyword (Create for \"keyword\" only) User is allowed to create", "'name', 'nosy', 'organisation', 'responsible', 'status') only) User is allowed Edit", "\"it_issue\" only) User is allowed View on it_project if it_project", "to create customer (Create for \"customer\" only) User is allowed", "search it_project (Search for \"it_project\" only) User is allowed to", "(Create for \"user\" only) User is allowed to create user_contact", "create vacation_correction (Create for \"vacation_correction\" only) User is allowed to", "create domain_permission (Create for \"domain_permission\" only) User is allowed to", "the user (View for \"user_dynamic\" only) Users may view/edit user", "User is allowed to access ext_msg (View for \"ext_msg\" only)", "for \"organisation\" only) User is allowed to edit product_family (Edit", "on nosy list (Edit for \"support\": ('analysis_end', 'analysis_result', 'analysis_start', 'bcc',", "is allowed to edit if he's the owner of the", "only) User is allowed to create time_activity_perm (Create for \"time_activity_perm\"", "User is allowed to edit kind (Edit for \"kind\" only)", "of time project (View for \"time_report\" only) User may use", "'effort_hours', 'external_users', 'files', 'files_affected', 'fixed_in', 'id', 'keywords', 'kind', 'maturity_index', 'messages',", "('csv_delimiter', 'hide_message_files', 'lunch_duration', 'lunch_start', 'password', 'queries', 'realname', 'room', 'subst_active', 'substitute',", "to view selected fields in work package if booking is", "for \"product_family\" only) User is allowed to access product_type (View", "\"category\": ('id', 'name') only) User is allowed View on (View", "for \"severity\" only) User is allowed to access sex (View", "create user (Create for \"user\" only) User is allowed to", "allowed to access overtime_period (View for \"overtime_period\" only) User is", "to edit organisation (Edit for \"organisation\" only) Role \"pgp\": Role", "work_location (Create for \"work_location\" only) User is allowed to edit", "Edit on (Edit for \"sap_cc\": ('group_lead', 'team_lead') only) User is", "'timetracking_by', 'vie_user', 'vie_user_bl_override', 'vie_user_ml') only) User is allowed View on", "access contract_type (View for \"contract_type\" only) User is allowed to", "(Web Access) User may use the email interface (Email Access)", "the Roles \"User,Nosy\" New Email users get the Role \"User\"", "User is allowed to edit doc_issue_status (Edit for \"doc_issue_status\" only)", "ext_tracker_state (Edit for \"ext_tracker_state\" only) User is allowed to edit", "(Create for \"customer\" only) User is allowed to create customer_agreement", "\"user_contact\" only) User is allowed to access user_dynamic (View for", "\"org_location\" only) User is allowed to edit organisation (Edit for", "Edit on (Edit for \"department\": ('doc_num',) only) User is allowed", "(Edit for \"severity\" only) User is allowed to edit status", "is allowed Edit on (Edit for \"time_project\": ('approval_hr', 'approval_required', 'is_extern',", "\"daily_record\" only) User is allowed to access daily_record_freeze (View for", "'id', 'queries', 'realname', 'status', 'timezone', 'username') only) User is allowed", "allowed to access user (View for \"user\" only) Role \"vacation-report\":", "only) User is allowed to access kind (View for \"kind\"", "allowed to edit msg_keyword (Edit for \"msg_keyword\" only) User is", "\"sap_cc\" only) (View for \"time_project\" only) User is allowed Edit", "that daily_record) if the user owns the daily_record or has", "only) User is allowed Edit on (Edit for \"daily_record\": ('status',", "search for their own messages (Search for \"msg\" only) User", "(Search for \"time_record\" only) User is allowed to search time_wp", "to create msg_keyword (Create for \"msg_keyword\" only) User is allowed", "is allowed to edit msg_keyword (Edit for \"msg_keyword\" only) User", "for \"overtime_correction\" only) User is allowed to access query (View", "in the same Org-Location as the given user (View for", "user (also applies to timetracking by, supervisor and approval delegated)", "prodcat (View for \"prodcat\" only) User is allowed to access", "User is allowed to create contract_type (Create for \"contract_type\" only)", "User is allowed to access functional_role (View for \"functional_role\" only)", "is allowed to search support (Search for \"support\" only) User", "\"leave_submission\": ('comment', 'comment_cancel', 'first_day', 'last_day', 'status', 'time_wp', 'user') only) User", "only) User is allowed to search user_status (Search for \"user\":", "for \"user_dynamic\": ('id', 'sap_cc', 'user', 'valid_from', 'valid_to') only) User is", "for \"severity\" only) User is allowed to edit status (Edit", "it_category (Create for \"it_category\" only) User is allowed to create", "on (Edit for \"time_project\": ('group_lead', 'purchasing_agents', 'team_lead') only) Role \"project\":", "to edit time_wp_group (Edit for \"time_wp_group\" only) Role \"project_view\": User", "is allowed to access cost_center_permission_group (View for \"cost_center_permission_group\" only) User", "('deputy', 'planned_effort', 'nosy') only) User is allowed to edit workpackage", "is allowed to access sup_type (View for \"sup_type\" only) User", "(Create for \"time_wp\" only) User is allowed to edit (some", "access customer_agreement (View for \"customer_agreement\" only) User is allowed to", "(View for \"user\": ('roles',) only) User is allowed View on", "\"hr-vacation\": User is allowed to access contract_type (View for \"contract_type\"", "nosy list of time category or if user is department", "allowed to create file (Create for \"file\" only) User is", "area (View for \"area\" only) User is allowed to access", "access doc_category (View for \"doc_category\" only) User is allowed to", "on (Edit for \"msg\": ('author', 'date', 'id', 'keywords', 'subject', 'summary')", "is allowed to edit room (Edit for \"room\" only) User", "he's the owner of the contact (Edit for \"user_contact\": ('visible',)", "('status', 'time_record') only) User is allowed Edit on (Edit for", "(Create for \"support\" only) User is allowed to edit analysis_result", "\"product\" only) User is allowed to access product_family (View for", "issue if issue is non-confidential or user is on nosy", "he's the owner of the contact or the contact is", "uc_type (View for \"uc_type\" only) User is allowed to access", "('name',) only) User is allowed to view work package and", "non-confidential or user is on nosy list (Edit for \"support\":", "for \"user_contact\" only) User is allowed to view leave submission", "for \"time_activity\" only) User is allowed to create time_activity_perm (Create", "to edit reporting_group (Edit for \"reporting_group\" only) User is allowed", "only) User is allowed Edit on it_issue if it_issue is", "allowed to access user_status (View for \"user_status\" only) User is", "User is allowed to access contract_type (View for \"contract_type\" only)", "at the given date (Edit for \"daily_record_freeze\": ('frozen',) only) User", "allowed to edit time_wp_group (Edit for \"time_wp_group\" only) Role \"project_view\":", "msg is linked from an item with Edit permission (Edit", "(Edit for \"cost_center\" only) User is allowed to edit cost_center_group", "allowed to edit absence_type (Edit for \"absence_type\" only) User is", "create absence (Create for \"absence\" only) User is allowed to", "User is allowed to access doc_status (View for \"doc_status\" only)", "only) User may view time category if user is owner", "\"user_contact\" only) Users may view user_dynamic records for ad_domain for", "User is allowed to edit time_activity_perm (Edit for \"time_activity_perm\" only)", "is allowed to create cost_center_permission_group (Create for \"cost_center_permission_group\" only) User", "('domain_part',) only) User is allowed Edit on (Edit for \"user\":", "for \"leave_status\" only) User is allowed to access location (View", "for \"daily_record\" only) User is allowed to create doc (Create", "\"it_issue\" only) User is allowed to create it_project (Create for", "daily_record_status (View for \"daily_record_status\" only) User is allowed to access", "(View for \"absence\" only) User is allowed to access absence_type", "is allowed to view their own overtime information (View for", "is allowed to edit doc_status (Edit for \"doc_status\" only) User", "'pictures', 'roles', 'timetracking_by', 'timezone', 'username') only) User is allowed Edit", "allowed to edit safety_level (Edit for \"safety_level\" only) User is", "(Create for \"it_request_type\" only) User is allowed to create mailgroup", "by user may edit time_records owned by user (Edit for", "create time_activity_perm (Create for \"time_activity_perm\" only) User is allowed to", "for \"doc\" only) User is allowed to edit ext_tracker_state (Edit", "access business_unit (View for \"business_unit\" only) User is allowed to", "given user (View for \"time_record\" only) Role \"hr-vacation\": User is", "User is allowed to search issue (Search for \"issue\" only)", "may manipulate user Roles through the web (Web Roles) User", "\"room\" only) (Retire for \"room\" only) User is allowed Edit", "status (View for \"status\" only) User is allowed to access", "edit msg_keyword (Edit for \"msg_keyword\" only) User is allowed to", "or on nosy list of time category or if user", "create doc_status (Create for \"doc_status\" only) User is allowed to", "(View for \"product_family\" only) User is allowed to access product_type", "(View for \"time_record\" only) User is allowed to view (some", "'description', 'durations_allowed', 'epic_key', 'has_expiration_date', 'is_extern', 'is_public', 'id', 'name', 'project', 'responsible',", "doc_issue_status (View for \"doc_issue_status\" only) User is allowed to access", "(View for \"contract_type\" only) User is allowed to create user", "public_holiday (View for \"public_holiday\" only) User is allowed to access", "\"query\" only) Role \"facility\": (Restore for \"room\" only) (Retire for", "(Edit for \"file\": ('name', 'type') only) User is allowed Edit", "edit absence_type (Edit for \"absence_type\" only) User is allowed to", "only) User is allowed to create customer (Create for \"customer\"", "only) User is allowed to edit cost_center_group (Edit for \"cost_center_group\"", "for \"user_dynamic\" only) User is allowed to edit auto_wp (Edit", "domain_permission (Create for \"domain_permission\" only) User is allowed to create", "only) User is allowed to access product_type (View for \"product_type\"", "only) User is allowed Edit on (Edit for \"msg\": ('keywords',)", "only) User is allowed to create department (Create for \"department\"", "'return_type', 'sap_ref', 'send_to_customer', 'serial_number', 'set_first_reply', 'status', 'superseder', 'title', 'type', 'warranty')", "(View for \"sup_prio\" only) User is allowed to access sup_status", "\"user_functional_role\" only) User is allowed Edit on (Edit for \"user\":", "only) User is allowed to access user_functional_role (View for \"user_functional_role\"", "User may view their own user functional role (View for", "only) User is allowed to edit it_project (Edit for \"it_project\"", "access it_prio (View for \"it_prio\" only) User is allowed to", "list (Edit for \"it_project\": ('messages', 'files', 'nosy') only) User is", "for \"time_record\" only) User is allowed to edit work_location (Edit", "create it_request_type (Create for \"it_request_type\" only) User is allowed to", "for \"msg\" only) User is allowed to search for their", "(Create for \"ext_tracker\" only) User is allowed to create issue", "'room', 'sex', 'status', 'subst_active', 'substitute', 'supervisor', 'timezone', 'tt_lines', 'vie_user'] only)", "allowed to access org_location (View for \"org_location\" only) User is", "\"time_wp\": ('activity', 'actor', 'cost_center', 'creation', 'creator', 'description', 'durations_allowed', 'epic_key', 'has_expiration_date',", "time_records for that day (View for \"daily_record\" only) Role \"user_view\":", "time_project (Create for \"time_project\" only) User is allowed to create", "User is allowed to create safety_level (Create for \"safety_level\" only)", "User is allowed to edit public_holiday (Edit for \"public_holiday\" only)", "(View for \"doc_category\" only) User is allowed to access doc_issue_status", "only) User is allowed to create file (Create for \"file\"", "create org_location (Create for \"org_location\" only) User is allowed to", "\"it_issue\" only) User is allowed to search it_project (Search for", "to access it_issue_status (View for \"it_issue_status\" only) User is allowed", "allowed to search leave_submission (Search for \"leave_submission\" only) User is", "(View for \"sup_execution\" only) User is allowed to access sup_prio", "edit support (Edit for \"support\" only) Role \"time-report\": User is", "whom approvals are delegated (Edit for \"leave_submission\": ('status',) only) User", "\"user\": ('id', 'nickname', 'username') only) External users are allowed to", "(Create for \"public_holiday\" only) User is allowed to create reporting_group", "(Edit for \"room\" only) User is allowed to edit uc_type", "for \"sup_type\" only) User is allowed to access sup_warranty (View", "daily_record) if the user owns the daily_record or has role", "only) User is allowed to access severity (View for \"severity\"", "on the list of allowed external users or there is", "allowed to search it_project (Search for \"it_project\" only) User is", "only) Users may view/edit user records for ad_domain for which", "'timezone', 'tt_lines', 'vie_user'] only) Role \"dom-user-edit-office\": User is allowed to", "(View for \"return_type\" only) User is allowed to access room", "category or if user is department manager of time category", "is allowed to edit doc (Edit for \"doc\" only) User", "\"cost_center_group\" only) User is allowed to edit cost_center_status (Edit for", "date (Edit for \"daily_record_freeze\": ('frozen',) only) User is allowed to", "(Create for \"contact\" only) User is allowed to edit contact", "to create absence_type (Create for \"absence_type\" only) User is allowed", "allowed Edit on (Edit for \"organisation\": ('domain_part',) only) User is", "of their details (View for \"user\": ('activity', 'actor', 'creation', 'creator',", "'status', 'subst_active', 'substitute', 'supervisor', 'timezone', 'title', 'tt_lines', 'username') only) User", "\"time_project\": ('infosec_req', 'is_extern', 'max_hours', 'op_project', 'planned_effort', 'product_family', 'project_type', 'reporting_group', 'work_location')", "and he is responsible for the time category (Edit for", "is allowed to access test_level (View for \"test_level\" only) User", "for \"sap_cc\": ('group_lead', 'team_lead') only) User is allowed Edit on", "time_project_status (View for \"time_project_status\" only) User is allowed to access", "(View for \"leave_submission\": ('comment', 'comment_cancel', 'first_day', 'last_day', 'status', 'time_wp', 'user')", "only) User is allowed to edit several fields if he", "User is allowed to edit domain_permission (Edit for \"domain_permission\" only)", "allowed to access sup_prio (View for \"sup_prio\" only) User is", "'org_location') only) User is allowed View on file if file", "to access user_contact (View for \"user_contact\" only) User is allowed", "for \"leave_submission\": ('status',) only) User is allowed to access contract_type", "is supervisor or substitute supervisor of the owner of the", "to access overtime_correction (View for \"overtime_correction\" only) User is allowed", "(Edit for \"location\" only) User is allowed to edit org_location", "(View for \"it_issue\" only) User is allowed View on it_project", "to create doc_status (Create for \"doc_status\" only) User is allowed", "'id', 'job_description', 'lastname', 'lunch_duration', 'lunch_start', 'nickname', 'pictures', 'position_text', 'queries', 'realname',", "is allowed to create ext_tracker (Create for \"ext_tracker\" only) User", "['room'] only) Users may view/edit user records for ad_domain for", "(Edit for \"location\": ('domain_part',) only) User is allowed Edit on", "only) Role \"pgp\": Role \"procurement\": (View for \"sap_cc\" only) (View", "User is allowed to access sap_cc (View for \"sap_cc\" only)", "for \"reporting_group\" only) User is allowed to edit sap_cc (Edit", "for \"work_location\" only) User is allowed to create daily_record (Create", "for \"msg\": ('keywords',) only) User is allowed Edit on file", "rest interface (Rest Access) User may access the web interface", "for \"user\": ['clearance_by', 'contacts', 'csv_delimiter', 'entry_date', 'firstname', 'hide_message_files', 'job_description', 'lastname',", "leave_submission (Search for \"leave_submission\" only) User is allowed to search", "only) User is allowed to create product_type (Create for \"product_type\"", "(View for \"time_record\" only) Role \"hr-vacation\": User is allowed to", "to create room (Create for \"room\" only) User is allowed", "owned by user (View for \"time_record\" only) Users are allowed", "User is allowed to access contact (View for \"contact\" only)", "to search it_project (Search for \"it_project\" only) User is allowed", "is allowed Edit on it_project if it_project is non-confidential or", "\"ext_tracker\" only) User is allowed to create issue (Create for", "\"msg\" only) User is allowed to access issue (View for", "only) User is allowed to access user_contact (View for \"user_contact\"", "only) User is allowed to access mailgroup (View for \"mailgroup\"", "\"work_location\" only) User is allowed to create daily_record (Create for", "allowed to create it_request_type (Create for \"it_request_type\" only) User is", "is allowed Edit on (Edit for \"msg\": ('keywords',) only) User", "are in the domain_permission for the user (Edit for \"user\":", "area (Edit for \"area\" only) User is allowed to edit", "User is allowed to create customer_agreement (Create for \"customer_agreement\" only)", "'description', 'durations_allowed', 'epic_key', 'has_expiration_date', 'id', 'is_extern', 'is_public', 'name', 'project', 'responsible',", "to access time_wp (View for \"time_wp\" only) Role \"sec-incident-nosy\": User", "to edit several fields if he is Responsible for an", "'needs', 'nosy', 'numeric_effort', 'part_of', 'planned_begin', 'planned_end', 'priority', 'release', 'responsible', 'safety_level',", "correction is not frozen (Edit for \"overtime_correction\" only) User is", "Edit on (Edit for \"location\": ('domain_part',) only) User is allowed", "only) User is allowed to create user_dynamic (Create for \"user_dynamic\"", "with the correct domain (View for \"user_dynamic\" only) User is", "is allowed View on (View for \"user\": ('business_responsible', 'department_temp', 'timetracking_by',", "of the owner of the daily record (the supervisor relationship", "\"project_view\": User is allowed to access time_project (View for \"time_project\"", "their queries (Edit for \"query\" only) User is allowed to", "(Create for \"msg\" only) User is allowed to create queries", "edit several fields if he is Responsible for an it_issue", "to view some of their details (View for \"user\": ('activity',", "edit overtime correction if the overtime correction is not frozen", "summary_type (View for \"summary_type\" only) User is allowed to access", "only) User is allowed to edit status_transition (Edit for \"status_transition\"", "User is allowed Edit on support if support is non-confidential", "\"daily_record_freeze\": ('frozen',) only) User is allowed to edit location (Edit", "(Edit for \"time_project\": ('group_lead', 'purchasing_agents', 'team_lead') only) Role \"project\": User", "\"time_report\" only) User may use the email interface (Email Access)", "allowed to access support (View for \"support\" only) User is", "may view work package if responsible for it, if user", "department (Create for \"department\" only) User is allowed to create", "for \"contract_type\" only) User is allowed to access leave_submission (View", "(Edit for \"ext_tracker_state\" only) Role \"nosy\": User may get nosy", "or user is on nosy list (View for \"it_project\" only)", "allowed to create time_wp_group (Create for \"time_wp_group\" only) User is", "only) External users are allowed to access issue if they", "to edit org_location (Edit for \"org_location\" only) User is allowed", "for \"status\" only) User is allowed to access status_transition (View", "is allowed to edit (some of) their own user details", "\"msg_keyword\" only) User is allowed to edit safety_level (Edit for", "to that daily_record) if the user owns the daily_record or", "'lastname', 'lunch_duration', 'lunch_start', 'nickname', 'pictures', 'position_text', 'room', 'sex', 'status', 'subst_active',", "(View for \"severity\" only) User is allowed to access status", "allowed to create test_level (Create for \"test_level\" only) User is", "only) User is allowed to create mailgroup (Create for \"mailgroup\"", "(Create for \"it_int_prio\" only) User is allowed to create it_issue", "is allowed View on file if file is linked from", "allowed to access query (View for \"query\" only) User is", "may get nosy messages for it_issue (Nosy for \"it_issue\" only)", "customer_agreement (Create for \"customer_agreement\" only) User is allowed to create", "file if file is linked from an item with Edit", "the department manager of the owner of the daily record.", "\"time_wp\": ('name', 'project') only) User is allowed to view/edit workpackage", "User is allowed to access it_project_status (View for \"it_project_status\" only)", "access kind (View for \"kind\" only) User is allowed to", "permission (View for \"query\" only) Role \"facility\": (Restore for \"room\"", "only) User is allowed to create it_issue (Create for \"it_issue\"", "(Edit for \"cost_center_group\" only) User is allowed to edit cost_center_status", "only) User is allowed to edit organisation (Edit for \"organisation\"", "they have search permission (View for \"query\" only) Role \"facility\":", "contract_type (View for \"contract_type\" only) User is allowed to access", "\"mailgroup\" only) User is allowed to create return_type (Create for", "(View for \"product\" only) User is allowed to access product_family", "\"contract_type\" only) User is allowed to create leave_submission (Create for", "'timezone', 'username') only) User is allowed Edit on (Edit for", "for ad_domain for which they are in the domain_permission for", "for \"doc_issue_status\" only) User is allowed to access ext_tracker (View", "own user details (Edit for \"user\": ('csv_delimiter', 'hide_message_files', 'lunch_duration', 'lunch_start',", "search daily_record (Search for \"daily_record\" only) User is allowed to", "only) User is allowed to search daily_record (Search for \"daily_record\"", "allowed to access project_type (View for \"project_type\" only) User is", "only) User is allowed to create category (Create for \"category\"", "access severity (View for \"severity\" only) User is allowed to", "is allowed to create contract_type (Create for \"contract_type\" only) User", "allowed to create cost_center_permission_group (Create for \"cost_center_permission_group\" only) User is", "('id', 'sap_cc', 'user', 'valid_from', 'valid_to') only) User is allowed to", "\"mailgroup\" only) User is allowed to edit domain_permission (Edit for", "allowed to access it_project (View for \"it_project\" only) User is", "only) User is allowed to access daily record if he", "details (View for \"user\": ('entry_date', 'planning_role') only) User is allowed", "only) (Retire for \"room\" only) User is allowed View on", "User is allowed to access vacation_report (View for \"vacation_report\" only)", "is allowed to edit artefact (Edit for \"artefact\" only) User", "Users may view user_dynamic records for ad_domain for which they", "for \"support\" only) User is allowed to access absence (View", "edit doc_category (Edit for \"doc_category\" only) User is allowed to", "artefact (Create for \"artefact\" only) User is allowed to create", "allowed to edit return_type (Edit for \"return_type\" only) User is", "User is allowed to access product (View for \"product\" only)", "for the user (Edit for \"user\": ['clearance_by', 'contacts', 'csv_delimiter', 'entry_date',", "for \"test_level\" only) User is allowed to create file (Create", "access overtime_period (View for \"overtime_period\" only) User is allowed to", "\"sup_classification\" only) User is allowed to edit support (Edit for", "for \"kind\" only) User is allowed to access leave_status (View", "'responsible', 'return_type', 'sap_ref', 'send_to_customer', 'serial_number', 'set_first_reply', 'status', 'superseder', 'title', 'type',", "\"status_transition\" only) User is allowed to create test_level (Create for", "allowed Edit on (Edit for \"location\": ('domain_part',) only) User is", "for \"it_project\" only) User is allowed to search leave_submission (Search", "allowed external users or there is a transitive permission via", "if responsible for it, if user is owner or deputy", "get nosy messages for doc (Nosy for \"doc\" only) User", "is allowed to view overtime information if he/she is in", "for \"user\": ['room'] only) Users may view/edit user records for", "\"issue\" only) User is allowed to create area (Create for", "is allowed to access department (View for \"department\" only) User", "only) User is allowed Edit on msg if msg is", "for \"status_transition\" only) User is allowed to create test_level (Create", "(Retire for \"query\" only) User is allowed to search daily_record", "work package or User may view a daily_record (and time_records", "for \"keyword\" only) User is allowed to access kind (View", "if user is owner or deputy of time category or", "search it_issue (Search for \"it_issue\" only) User is allowed to", "user may edit time_records owned by user (View for \"time_record\"", "category names if he/she has role HR or HR-Org-Location (View", "allowed to access time_activity (View for \"time_activity\" only) User is", "edit doc_status (Edit for \"doc_status\" only) User is allowed to", "(View for \"it_category\" only) User is allowed to access it_issue_status", "User is allowed Edit on (Edit for \"time_project\": ('approval_hr', 'approval_required',", "create location (Create for \"location\" only) User is allowed to", "(Edit for \"user\": ['clearance_by', 'contacts', 'csv_delimiter', 'entry_date', 'firstname', 'hide_message_files', 'job_description',", "to access it_project (View for \"it_project\" only) Role \"sec-incident-responsible\": User", "is allowed to search leave_submission (Search for \"leave_submission\" only) User", "User is allowed to access timesheet (View for \"timesheet\" only)", "for \"time_wp\" only) Role \"sec-incident-nosy\": User is allowed to access", "('name', 'type') only) User is allowed Edit on (Edit for", "\"issue\" only) User is allowed to edit keyword (Edit for", "\"severity\" only) User is allowed to create status (Create for", "(Create for \"reporting_group\" only) User is allowed to create room", "in the domain_permission for the user (Edit for \"user\": ['clearance_by',", "for \"cost_center\" only) User is allowed to edit cost_center_group (Edit", "to view their own messages (View for \"msg\" only) User", "User is allowed to edit doc_status (Edit for \"doc_status\" only)", "to edit vacation_correction (Edit for \"vacation_correction\" only) Role \"issue_admin\": User", "to create severity (Create for \"severity\" only) User is allowed", "\"category\" only) User is allowed to create doc_issue_status (Create for", "'subst_active', 'substitute', 'timezone', 'tt_lines') only) User is allowed to edit", "to edit time_record (Edit for \"time_record\" only) User is allowed", "not frozen at the given date (Edit for \"daily_record_freeze\": ('frozen',)", "is allowed to edit freeze record if not frozen at", "allowed to create it_project (Create for \"it_project\" only) User is", "to edit it_project (Edit for \"it_project\" only) User is allowed", "(View for \"work_location\" only) User is allowed to create daily_record", "is allowed to search daily_record (Search for \"daily_record\" only) User", "to create test_level (Create for \"test_level\" only) User is allowed", "is allowed to create product_family (Create for \"product_family\" only) User", "('domain_part',) only) User is allowed Edit on (Edit for \"organisation\":", "'wp_no') only) User is allowed to search user_status (Search for", "return_type (Create for \"return_type\" only) User is allowed to create", "(View for \"leave_status\" only) User is allowed to access location", "User is allowed to search daily_record (Search for \"daily_record\" only)", "is allowed to create severity (Create for \"severity\" only) User", "allowed to access sup_execution (View for \"sup_execution\" only) User is", "for \"department\" only) User is allowed to create organisation (Create", "the email interface (Email Access) Users are allowed to edit", "edit freeze record if not frozen at the given date", "User is allowed to create time_activity (Create for \"time_activity\" only)", "user is the department manager of the owner of the", "to create daily_record_freeze (Create for \"daily_record_freeze\" only) User is allowed", "User is allowed to edit work_location (Edit for \"work_location\" only)", "(Search for \"it_project\" only) User is allowed to search leave_submission", "role HR-Org-Location and is in the same Org-Location as the", "only) User is allowed to create test_level (Create for \"test_level\"", "\"daily_record\" only) User may view their own user functional role", "the time category (Edit for \"time_project\": ('deputy', 'planned_effort', 'nosy') only)", "to edit organisation (Edit for \"organisation\" only) User is allowed", "edit time category if the status is \"Open\" and he", "'title'] only) External users are allowed to access issue if", "only) Role \"office\": (Restore for \"room\" only) (Retire for \"room\"", "\"it_int_prio\" only) User is allowed to create it_issue (Create for", "create user_contact (Create for \"user_contact\" only) User is allowed to", "only) User is allowed to access it_issue (View for \"it_issue\"", "only) User is allowed to edit reporting_group (Edit for \"reporting_group\"", "is allowed View on (View for \"user_status\": ('name',) only) User", "User is allowed to view time record data if he/she", "User is allowed to edit freeze record if not frozen", "User is allowed to edit product_family (Edit for \"product_family\" only)", "HR or HR-Org-Location (View for \"time_project\": ('name',) only) User is", "is allowed to search for their queries (Search for \"query\"", "(Retire for \"room\" only) User is allowed Edit on (Edit", "is allowed Edit on (Edit for \"department\": ('doc_num',) only) User", "allowed to edit user_functional_role (Edit for \"user_functional_role\" only) Role \"hr\":", "(Search for \"user_dynamic\" only) User is allowed to view dynamic", "\"file\" only) User is allowed Edit on issue if issue", "fields if he is Stakeholder/Responsible for an it_issue (Edit for", "'name', 'project', 'responsible', 'time_end', 'time_start', 'time_wp_summary_no', 'travel', 'wp_no') only) (View", "for \"doc_status\" only) User is allowed to create product_type (Create", "(View for \"daily_record\" only) User is allowed to access daily_record_status", "package or User may view a daily_record (and time_records that", "Responsible for an it_issue (Edit for \"it_issue\": ('responsible',) only) User", "only) User is allowed to retire their queries (Retire for", "is allowed to create return_type (Create for \"return_type\" only) User", "owner or deputy (Edit for \"time_wp\": ('cost_center', 'is_public', 'name', 'responsible',", "is allowed to edit time_record (Edit for \"time_record\" only) User", "is allowed Edit on (Edit for \"time_project\": ('group_lead', 'team_lead') only)", "(View for \"it_project\" only) Role \"sec-incident-responsible\": User is allowed to", "contact (Edit for \"user_contact\": ('visible',) only) User is allowed to", "allowed to edit time_activity_perm (Edit for \"time_activity_perm\" only) User is", "access functional_role (View for \"functional_role\" only) User is allowed to", "\"location\" only) User is allowed to access mailgroup (View for", "only) User is allowed to edit user_functional_role (Edit for \"user_functional_role\"", "user (View for \"user\": ['contacts', 'position_text', 'room'] only) Role \"external\":", "User is allowed to edit keyword (Edit for \"keyword\" only)", "absence_type (View for \"absence_type\" only) User is allowed to access", "to access daily record if he is owner or supervisor", "access leave_submission (View for \"leave_submission\" only) User is allowed to", "\"it_project_status\" only) User is allowed to access it_request_type (View for", "on (Edit for \"organisation\": ('domain_part',) only) User is allowed Edit", "is a transitive permission via containers (Edit for \"issue\": ['activity',", "\"doc_admin\": User is allowed Edit on (Edit for \"department\": ('doc_num',)", "has role HR or HR-Org-Location (View for \"time_wp\": ('name', 'project')", "the domain_permission for the user (View for \"user\": ['clearance_by', 'contacts',", "'superseder', 'test_level', 'title'] only) External users are allowed to access", "for \"safety_level\" only) User is allowed to access severity (View", "nosy messages for it_issue (Nosy for \"it_issue\" only) User may", "for \"org_location\" only) User is allowed to access organisation (View", "User is allowed to search user_status (Search for \"user\": ('status',)", "'vie_user', 'vie_user_bl_override', 'vie_user_ml') only) User is allowed View on (View", "'department_temp', 'timetracking_by', 'vie_user', 'vie_user_bl_override', 'vie_user_ml') only) User is allowed View", "'id', 'keywords', 'kind', 'maturity_index', 'messages', 'needs', 'nosy', 'numeric_effort', 'part_of', 'planned_begin',", "(Restore for \"room\" only) (Retire for \"room\" only) User is", "the same Org-Location as the given user (View for \"time_record\"", "leave_submission (Edit for \"leave_submission\" only) User is allowed to edit", "(View for \"daily_record\" only) User is allowed to access daily_record_freeze", "(Search for \"support\" only) User is allowed to search time_record", "only) User is allowed View on support if support is", "is allowed to access leave_status (View for \"leave_status\" only) User", "allowed to access status (View for \"status\" only) User is", "time category if the status is \"Open\" and he is", "overtime_correction (Create for \"overtime_correction\" only) User is allowed to create", "to access cost_center_permission_group (View for \"cost_center_permission_group\" only) User is allowed", "only) (Retire for \"user_functional_role\" only) User is allowed Edit on", "view freeze information if he/she is in group HR-Org-Location and", "\"time_record\" only) User is allowed to create uc_type (Create for", "\"issue\" only) User may get nosy messages for it_issue (Nosy", "'test_level', 'title'] only) User is allowed View on (View for", "product_type (Create for \"product_type\" only) User is allowed to create", "User is allowed to edit status_transition (Edit for \"status_transition\" only)", "create mailgroup (Create for \"mailgroup\" only) User is allowed to", "Role \"dom-user-edit-hr\": (Search for \"user_dynamic\" only) May only view/edit records", "(Retire for \"user_functional_role\" only) User is allowed Edit on (Edit", "is allowed to access sup_status (View for \"sup_status\" only) User", "through the web (Web Roles) User may restore everything (Restore)", "(Create for \"support\" only) User is allowed to create time_record", "'purchasing_agents', 'team_lead') only) Role \"project\": User is allowed Edit on", "time category (Edit for \"time_project\": ('deputy', 'planned_effort', 'nosy') only) User", "test_level (Create for \"test_level\" only) User is allowed to edit", "is allowed to search time_record (Search for \"time_record\" only) User", "\"contract_type\" only) User is allowed to create user (Create for", "allowed to create reference (Create for \"reference\" only) User is", "on (Edit for \"time_project\": ('cost_center', 'department', 'deputy', 'description', 'name', 'nosy',", "('contacts',) only) User is allowed View on (View for \"user_dynamic\":", "messages (View for \"msg\" only) User is allowed to view", "\"user\" only) User is allowed to create user_contact (Create for", "Edit on (Edit for \"user\": ('business_responsible', 'scale_seniority') only) User is", "User is allowed to access severity (View for \"severity\" only)", "allowed to view time record if he is the supervisor", "for \"absence\" only) User is allowed to edit absence_type (Edit", "for \"location\" only) User is allowed to edit org_location (Edit", "'user') only) User may see time report if reponsible or", "\"daily_record\" only) Role \"user_view\": User is allowed to access user", "'severity', 'status', 'superseder', 'test_level', 'title'] only) External users are allowed", "is allowed to create ext_tracker_state (Create for \"ext_tracker_state\" only) User", "to access it_project (View for \"it_project\" only) Role \"staff-report\": Role", "record if he is owner or supervisor or timetracking-by user", "(Search for \"daily_record_freeze\" only) (Search for \"overtime_correction\" only) (Search for", "User may access the xmlrpc interface (Xmlrpc Access) User may", "if he is the supervisor or the person to whom", "is allowed to edit issue (Edit for \"issue\" only) User", "(Retire) User may use the email interface (Email Access) User", "work_location (Edit for \"work_location\" only) Role \"doc_admin\": User is allowed", "are in the domain_permission for the user (View for \"user_dynamic\"", "['activity', 'actor', 'area', 'category', 'closed', 'composed_of', 'creation', 'creator', 'cur_est_begin', 'cur_est_end',", "only) User is allowed to create ext_msg (Create for \"ext_msg\"", "to edit cost_center_permission_group (Edit for \"cost_center_permission_group\" only) Role \"contact\": User", "(View for \"msg_keyword\" only) User is allowed to access org_group", "\"customer\" only) User is allowed to access customer_agreement (View for", "\"time_project\": ('name',) only) User is allowed to view work package", "summary_report (View for \"summary_report\" only) User is allowed to access", "for \"issue\" only) User is allowed to view their own", "is allowed to access daily record if he is owner", "create customer_agreement (Create for \"customer_agreement\" only) User is allowed to", "some of their details (Edit for \"user\": ('csv_delimiter', 'hide_message_files', 'password',", "Edit on support if support is non-confidential or user is", "\"leave_submission\" only) User is allowed to view selected fields in", "allowed to create mailgroup (Create for \"mailgroup\" only) User is", "(Create for \"department\" only) User is allowed to create organisation", "allowed to access it_project (View for \"it_project\" only) Role \"sec-incident-responsible\":", "(View for \"category\" only) User is allowed to access contact", "query (Create for \"query\" only) User is allowed to edit", "'time_end', 'time_start', 'time_wp_summary_no', 'travel', 'wp_no') only) User is allowed to", "is allowed to edit it_issue (Edit for \"it_issue\" only) User", "sup_classification (View for \"sup_classification\" only) User is allowed to access", "only) User is allowed to access time_wp_group (View for \"time_wp_group\"", "'related_support', 'release', 'responsible', 'return_type', 'sap_ref', 'send_to_customer', 'serial_number', 'set_first_reply', 'status', 'superseder',", "\"query\" only) User is allowed to search daily_record (Search for", "\"msgsync\": (Search for \"msg\": ('date', 'id') only) User is allowed", "'classification', 'closed', 'confidential', 'customer', 'emails', 'execution', 'external_ref', 'files', 'goods_received', 'goods_sent',", "doc_category (Edit for \"doc_category\" only) User is allowed to edit", "'scale_seniority') only) User is allowed View on (View for \"user\":", "for \"user_contact\" only) User is allowed to access user_dynamic (View", "for \"product_type\" only) User is allowed to access project_type (View", "for \"time_record\" only) User is allowed to edit uc_type (Edit", "only) User is allowed to access ext_tracker (View for \"ext_tracker\"", "as the given user (View for \"daily_record_freeze\" only) User is", "to retire their queries (Retire for \"query\" only) User is", "for \"support\" only) Role \"time-report\": User is allowed to access", "for \"org_location\" only) User is allowed to create organisation (Create", "item with View permission (View for \"file\" only) User is", "delegated (Edit for \"leave_submission\": ('status',) only) User is allowed to", "for \"daily_record\": ('status', 'time_record') only) User is allowed Edit on", "dynamic user data if he/she is in group HR-Org-Location and", "'purchasing_agents', 'team_lead') only) User is allowed Edit on (Edit for", "\"contact\" only) User is allowed to edit customer (Edit for", "allowed to access sup_type (View for \"sup_type\" only) User is", "view/edit workpackage if he is owner or project responsible/deputy (Edit", "reference (View for \"reference\" only) User is allowed to access", "edit time_project_status (Edit for \"time_project_status\" only) User is allowed to", "only) User is allowed to access organisation (View for \"organisation\"", "user records for ad_domain for which they are in the", "only) User is allowed to edit location (Edit for \"location\"", "(Create for \"analysis_result\" only) User is allowed to create contact", "User is allowed to create cost_center_group (Create for \"cost_center_group\" only)", "access ext_tracker (View for \"ext_tracker\" only) User is allowed to", "Access) User may create everything (Create) User may edit everything", "the web (Web Roles) Role \"hr-leave-approval\": User is allowed Edit", "\"leave_submission\" only) User is allowed to edit vacation_correction (Edit for", "(Search for \"query\" only) User is allowed to search issue", "is allowed to access area (View for \"area\" only) User", "if support is non-confidential or user is on nosy list", "return_type (Edit for \"return_type\" only) User is allowed to edit", "(Edit for \"time_project\": ('approval_hr', 'approval_required', 'is_extern', 'is_public_holiday', 'is_special_leave', 'is_vacation', 'no_overtime',", "whom approvals are delegated (View for \"time_record\" only) User is", "('messages', 'files', 'nosy') only) User is allowed Edit on support", "applies to timetracking by, supervisor and approval delegated) (View for", "(View for \"ext_tracker_type\" only) Role \"msgsync\": (Search for \"msg\": ('date',", "User is allowed to create doc_status (Create for \"doc_status\" only)", "allowed to access doc_issue_status (View for \"doc_issue_status\" only) User is", "access time_report (View for \"time_report\" only) User is allowed to", "access uc_type (View for \"uc_type\" only) User is allowed to", "is allowed to access it_category (View for \"it_category\" only) User", "User may manipulate user Roles through the web (Web Roles)", "to create category (Create for \"category\" only) User is allowed", "correct domain (View for \"user_dynamic\" only) User is allowed to", "search time_record (Search for \"time_record\" only) User is allowed to", "View on (View for \"user\": ('activity', 'actor', 'ad_domain', 'address', 'alternate_addresses',", "Role \"nosy\": User may get nosy messages for doc (Nosy", "to edit it_request_type (Edit for \"it_request_type\" only) User is allowed", "is allowed to create msg_keyword (Create for \"msg_keyword\" only) User", "only) User is allowed to edit msg_keyword (Edit for \"msg_keyword\"", "for \"vacation_report\" only) User is allowed to access work_location (View", "the web (Web Roles) Role \"itview\": User is allowed to", "for \"user_contact\" only) User is allowed to create absence (Create", "\"support\" only) User is allowed to edit analysis_result (Edit for", "\"overtime_correction\" only) (Search for \"time_activity_perm\" only) (Search for \"time_record\" only)", "is allowed to access contact_type (View for \"contact_type\" only) User", "for \"sap_cc\" only) User is allowed to edit time_activity (Edit", "'actor', 'auto_wp', 'cost_center', 'creation', 'creator', 'description', 'durations_allowed', 'epic_key', 'has_expiration_date', 'is_extern',", "\"user\": ('activity', 'actor', 'address', 'alternate_addresses', 'creation', 'creator', 'id', 'queries', 'realname',", "allowed to access it_prio (View for \"it_prio\" only) User is", "access issue (View for \"issue\" only) User is allowed to", "allowed to view their own overtime information (View for \"overtime_correction\"", "several fields if he is Responsible for an it_issue (Edit", "User is allowed to edit reference (Edit for \"reference\" only)", "names if he/she has role HR or HR-Org-Location (View for", "'lunch_duration', 'lunch_start', 'nickname', 'pictures', 'position_text', 'room', 'sex', 'status', 'subst_active', 'substitute',", "allowed to edit mailgroup (Edit for \"mailgroup\" only) User may", "for \"category\" only) User is allowed to create doc_issue_status (Create", "(Edit for \"it_issue\": ('messages', 'files', 'nosy') only) User is allowed", "'op_project', 'overtime_reduction', 'responsible', 'status', 'work_location', 'wps') only) Search (Search for", "'is_extern', 'is_public', 'id', 'name', 'project', 'responsible', 'time_end', 'time_start', 'time_wp_summary_no', 'travel',", "own files (View for \"file\" only) User may access the", "for \"msg_keyword\" only) User is allowed to create safety_level (Create", "for \"user_dynamic\" only) User is allowed to view freeze information", "in group HR-Org-Location and in the same Org-Location as the", "only view/edit records with the correct domain (Edit for \"user_dynamic\"", "'messages', 'nosy', 'number_effected', 'numeric_effort', 'prio', 'prodcat', 'product', 'related_issues', 'related_support', 'release',", "for \"kind\" only) User is allowed to edit msg_keyword (Edit", "['contacts', 'csv_delimiter', 'department_temp', 'entry_date', 'firstname', 'hide_message_files', 'job_description', 'lastname', 'lunch_duration', 'lunch_start',", "is allowed to edit support (Edit for \"support\" only) Role", "is allowed to create org_location (Create for \"org_location\" only) User", "'firstname', 'hide_message_files', 'job_description', 'lastname', 'lunch_duration', 'lunch_start', 'nickname', 'pictures', 'position_text', 'room',", "is allowed to see time record if he is allowed", "is allowed to access vacation_correction (View for \"vacation_correction\" only) Role", "only) User is allowed to access test_level (View for \"test_level\"", "User may get nosy messages for it_issue (Nosy for \"it_issue\"", "if he is owner or supervisor or timetracking-by user (Edit", "records with the correct domain (Edit for \"user_dynamic\" only) May", "'responsible', 'time_wp_summary_no', 'wp_no') only) User is allowed to retire their", "may edit time_records owned by user (Restore for \"time_record\" only)", "allowed to access cost_center_group (View for \"cost_center_group\" only) User is", "their own files (View for \"file\" only) User is allowed", "for \"msg\" only) User is allowed View on support if", "seen (View for \"daily_record\" only) User may view their own", "reporting_group (Create for \"reporting_group\" only) User is allowed to create", "'analysis_start', 'bcc', 'business_unit', 'category', 'cc', 'cc_emails', 'classification', 'closed', 'confidential', 'customer',", "(Edit for \"msg\": ('keywords',) only) User is allowed Edit on", "nosy list (View for \"support\" only) User is allowed to", "and time category names if he/she has role HR or", "for \"analysis_result\" only) User is allowed to edit contact (Edit", "to view their own files (View for \"file\" only) User", "for \"uc_type\" only) User is allowed to edit absence (Edit", "is allowed to access project_type (View for \"project_type\" only) User", "allowed to create status_transition (Create for \"status_transition\" only) User is", "allowed to access contact_type (View for \"contact_type\" only) User is", "'nickname', 'password', 'timezone', 'username') only) User is allowed Edit on", "User may access the web interface (Web Access) User may", "only) User is allowed to edit vacation_correction (Edit for \"vacation_correction\"", "(Search for \"file\" only) User is allowed to search for", "or the person to whom approvals are delegated (Edit for", "user is on nosy list (View for \"it_issue\" only) User", "to access room (View for \"room\" only) User is allowed", "on (Edit for \"time_wp\": ('project',) only) User is allowed View", "view their own files (View for \"file\" only) User is", "(Edit) User may manipulate user Roles through the web (Web", "is allowed View on (View for \"user\": ('contacts',) only) User", "'sap_cc', 'user', 'valid_from', 'valid_to') only) User is allowed to access", "msg if msg is linked from an item with Edit", "for \"time_activity_perm\" only) (Search for \"time_record\" only) (Search for \"user_dynamic\"", "\"time_record\" only) User or Timetracking by user may edit time_records", "for \"issue\" only) User is allowed to create area (Create", "is allowed to edit uc_type (Edit for \"uc_type\" only) Role", "allowed to access time_report (View for \"time_report\" only) User is", "for support (Nosy for \"support\" only) Role \"office\": (Restore for", "'subst_active', 'substitute', 'supervisor', 'timezone', 'title', 'tt_lines', 'username') only) User is", "is allowed to view time record if he is the", "\"contact\" only) User is allowed to create customer (Create for", "is allowed to access ext_tracker_type (View for \"ext_tracker_type\" only) User", "cost_center_permission_group (Create for \"cost_center_permission_group\" only) User is allowed to edit", "org_location (View for \"org_location\" only) User is allowed to access", "(View for \"leave_submission\" only) User is allowed to access overtime_correction", "\"it_int_prio\" only) User is allowed to access it_issue (View for", "freeze information if he/she is in group HR-Org-Location and in", "\"artefact\" only) User is allowed to access business_unit (View for", "web (Web Roles) User may restore everything (Restore) User may", "allowed to create doc_issue_status (Create for \"doc_issue_status\" only) User is", "only) User is allowed to access daily_record_freeze (View for \"daily_record_freeze\"", "User is allowed to access user_functional_role (View for \"user_functional_role\" only)", "for their queries (Search for \"query\" only) User is allowed", "timesheet (View for \"timesheet\" only) User is allowed to access", "report if reponsible or deputy of time project or on", "User is allowed to create absence (Create for \"absence\" only)", "(Web Roles) Role \"itview\": User is allowed to access it_int_prio", "deputy (Edit for \"time_wp\": ('cost_center', 'is_public', 'name', 'responsible', 'time_wp_summary_no', 'wp_no')", "user (Edit for \"user\": ['room'] only) Users may view/edit user", "allowed to edit doc (Edit for \"doc\" only) User is", "allowed to create reporting_group (Create for \"reporting_group\" only) User is", "create auto_wp (Create for \"auto_wp\" only) User is allowed to", "is allowed to create doc_category (Create for \"doc_category\" only) User", "Users may view/edit user records for ad_domain for which they", "only) User may view their own user functional role (View", "is allowed to search time_wp (Search for \"time_wp\": ('activity', 'actor',", "it, if user is owner or deputy of time category", "(Create for \"product_family\" only) User is allowed to create public_holiday", "user (Edit for \"user\": ['contacts', 'position_text', 'room'] only) Users may", "(View for \"time_wp\" only) User is allowed to create time_project", "user_functional_role (Create for \"user_functional_role\" only) User is allowed to edit", "allowed to access timesheet (View for \"timesheet\" only) User is", "on (View for \"user_dynamic\": ('department', 'org_location') only) User is allowed", "to access sup_execution (View for \"sup_execution\" only) User is allowed", "Roles \"User,Nosy\" New Email users get the Role \"User\" Role", "\"user_status\": ('name',) only) User is allowed View on file if", "to access cost_center (View for \"cost_center\" only) User is allowed", "New Web users get the Roles \"User,Nosy\" New Email users", "Access) User may use the email interface (Email Access) Users", "(Create for \"it_issue\" only) User is allowed to create leave_submission", "'status', 'work_location', 'wps') only) Search (Search for \"user_contact\" only) User", "Role \"hr-leave-approval\": User is allowed Edit on (Edit for \"leave_submission\":", "access category (View for \"category\" only) User is allowed to", "by user (Restore for \"time_record\" only) User or Timetracking by", "\"daily_record_freeze\" only) (Search for \"overtime_correction\" only) (Search for \"time_activity_perm\" only)", "only) Role \"dom-user-edit-office\": User is allowed to create user_contact (Create", "allowed to access ext_tracker_type (View for \"ext_tracker_type\" only) User is", "'approval_required', 'is_extern', 'is_public_holiday', 'is_special_leave', 'is_vacation', 'no_overtime', 'no_overtime_day', 'only_hours', 'overtime_reduction') only)", "is allowed to access it_project (View for \"it_project\" only) Role", "'username') only) User is allowed View on (View for \"user_status\":", "supervisor of the owner of the daily record (the supervisor", "by, supervisor and approval delegated) (View for \"time_wp\": ('activity', 'actor',", "issue (View for \"issue\" only) User is allowed to create", "in the domain_permission for the user (View for \"user\": ['contacts',", "it_issue (View for \"it_issue\" only) User is allowed to access", "'time_start', 'time_wp_summary_no', 'travel', 'wp_no') only) User is allowed to view", "work package if responsible for it, if user is owner", "Role \"time-report\": User is allowed to access time_report (View for", "only) User is allowed to access user_dynamic (View for \"user_dynamic\"", "(Edit for \"overtime_period\": ('name', 'order') only) (Restore for \"room\" only)", "allowed to create location (Create for \"location\" only) User is", "only) User is allowed to search it_issue (Search for \"it_issue\"", "\"support\" only) User is allowed to search time_record (Search for", "own overtime information (View for \"overtime_correction\" only) User is allowed", "their own overtime information (View for \"overtime_correction\" only) User is", "to edit (some of) their own user details (Edit for", "only) User is allowed Edit on issue if issue is", "User is allowed to access customer (View for \"customer\" only)", "only) User is allowed to access overtime_correction (View for \"overtime_correction\"", "only) User is allowed to edit return_type (Edit for \"return_type\"", "mailgroup (Create for \"mailgroup\" only) User is allowed to create", "(View for \"contract_type\" only) User is allowed to access daily_record", "to edit cost_center (Edit for \"cost_center\" only) User is allowed", "\"time_project\" only) User is allowed to access time_report (View for", "'maturity_index', 'messages', 'needs', 'nosy', 'numeric_effort', 'part_of', 'planned_begin', 'planned_end', 'priority', 'release',", "for \"keyword\" only) User is allowed to create kind (Create", "time_report (View for \"time_report\" only) User is allowed to create", "only) User is allowed to access status_transition (View for \"status_transition\"", "\"daily_record_freeze\" only) User is allowed to create location (Create for", "for \"domain_permission\" only) User is allowed to edit it_category (Edit", "for \"time_report\" only) User is allowed to create time_report (Create", "Edit on (Edit for \"leave_submission\": ('status',) only) User is allowed", "to create queries (Create for \"query\" only) User is allowed", "an it_issue (Edit for \"it_issue\": ('responsible',) only) User is allowed", "\"user\": ('activity', 'actor', 'ad_domain', 'address', 'alternate_addresses', 'business_responsible', 'clearance_by', 'creation', 'creator',", "'timezone') only) Users are allowed to view some of their", "\"organisation\" only) Role \"pgp\": Role \"procurement\": (View for \"sap_cc\" only)", "User is allowed to create it_project (Create for \"it_project\" only)", "edit organisation (Edit for \"organisation\" only) Role \"pgp\": Role \"procurement\":", "for \"org_group\" only) User is allowed to access org_location (View", "only) User is allowed Edit on support if support is", "\"msg_keyword\" only) User is allowed to access safety_level (View for", "for \"time_wp_group\" only) User is allowed to access time_wp_summary_no (View", "User is allowed to edit customer_agreement (Edit for \"customer_agreement\" only)", "for \"contact\" only) User is allowed to access customer (View", "\"safety_level\" only) User is allowed to create severity (Create for", "edit category if he is responsible for it (Edit for", "as the record, it may also be seen (View for", "access vac_aliq (View for \"vac_aliq\" only) User is allowed to", "(Edit for \"room\" only) Role \"functional-role\": (Restore for \"user_functional_role\" only)", "with View permission (View for \"msg\" only) User is allowed", "'time_record') only) User is allowed Edit on (Edit for \"sap_cc\":", "for \"organisation\": ('domain_part',) only) User is allowed Edit on (Edit", "only) User is allowed to edit keyword (Edit for \"keyword\"", "to create it_request_type (Create for \"it_request_type\" only) User is allowed", "\"sub-login\": Role \"summary_view\": Role \"supportadmin\": User is allowed to access", "\"office\": (Restore for \"room\" only) (Retire for \"room\" only) User", "(View for \"organisation\" only) User is allowed to access overtime_period", "time_wp (Edit for \"time_wp\" only) User is allowed to edit", "the user is the department manager of the owner of", "User is allowed Edit on it_project if it_project is non-confidential", "'username') only) Users are allowed to view their own and", "permission via containers (View for \"issue\": ['activity', 'actor', 'area', 'category',", "\"contract_type\" only) User is allowed to access leave_submission (View for", "\"time_report\" only) User is allowed to edit time_report (Edit for", "only) Role \"dom-user-edit-gtt\": (Search for \"user_dynamic\" only) May only view/edit", "only) User is allowed to edit domain_permission (Edit for \"domain_permission\"", "record if they may see one of the time_records for", "(Create for \"time_project\" only) User is allowed to create time_project_status", "(Create for \"it_category\" only) User is allowed to create it_int_prio", "time_activity_perm (View for \"time_activity_perm\" only) User is allowed to access", "User or Timetracking by user may edit time_records owned by", "for \"vacation_correction\" only) Role \"issue_admin\": User is allowed Edit on", "to access mailgroup (View for \"mailgroup\" only) User is allowed", "time_record (Search for \"time_record\" only) User is allowed to search", "is allowed to access org_group (View for \"org_group\" only) User", "\"time_wp\" only) User is allowed to create time_wp_group (Create for", "User is allowed to access sup_prio (View for \"sup_prio\" only)", "is allowed to create customer_agreement (Create for \"customer_agreement\" only) User", "only) User is allowed to edit their queries (Edit for", "only) User is allowed to create reference (Create for \"reference\"", "is allowed to search for their own files (Search for", "'status', 'timezone', 'username') only) User is allowed View on (View", "their own files (View for \"file\" only) User may access", "allowed to edit sap_cc (Edit for \"sap_cc\" only) User is", "time_project (View for \"time_project\" only) User is allowed to access", "or User may view a daily_record (and time_records that are", "\"customer\" only) User is allowed to edit customer_agreement (Edit for", "through the web (Web Roles) Role \"hr-leave-approval\": User is allowed", "('visible',) only) User is allowed to edit several fields if", "to create time_activity_perm (Create for \"time_activity_perm\" only) User is allowed", "for \"issue\" only) User may get nosy messages for it_issue", "\"customer\" only) User is allowed to create customer_agreement (Create for", "'time_wp_summary_no', 'wp_no') only) User is allowed to retire their queries", "containers (View for \"issue\": ['activity', 'actor', 'area', 'category', 'closed', 'composed_of',", "in validity span of dynamic user record (Edit for \"user_dynamic\"", "to create department (Create for \"department\" only) User is allowed", "project responsible/deputy (Edit for \"time_wp\": ('bookers', 'description', 'epic_key', 'planned_effort', 'time_end',", "access it_project_status (View for \"it_project_status\" only) User is allowed to", "allowed to edit absence (Edit for \"absence\" only) User is", "only) User is allowed to create overtime_period (Create for \"overtime_period\"", "for \"user_contact\" only) User is allowed Edit on (Edit for", "\"contract_type\" only) User is allowed to create user_contact (Create for", "for \"work_location\" only) Role \"doc_admin\": User is allowed Edit on", "is allowed to access ext_tracker_type (View for \"ext_tracker_type\" only) Role", "for \"ext_tracker_type\" only) User is allowed to access keyword (View", "User is allowed to edit several fields if he is", "(Edit for \"sap_cc\" only) User is allowed to edit time_record", "\"analysis_result\" only) User is allowed to create contact (Create for", "\"keyword\" only) User is allowed to create kind (Create for", "(View for \"overtime_correction\" only) User is allowed to view time", "access sup_status (View for \"sup_status\" only) User is allowed to", "is allowed to access time_record (View for \"time_record\" only) User", "'is_public', 'name', 'project', 'responsible', 'time_end', 'time_start', 'time_wp_summary_no', 'travel', 'wp_no') only)", "to create time_record (Create for \"time_record\" only) User is allowed", "= \"\"\" New Web users get the Roles \"User,Nosy\" New", "'time_start', 'time_wp_summary_no', 'travel', 'wp_no') only) User is allowed to search", "'creation', 'creator', 'cur_est_begin', 'cur_est_end', 'deadline', 'depends', 'doc_issue_status', 'earliest_start', 'effective_prio', 'effort_hours',", "manipulate user Roles through the web (Web Roles) Role \"hr-leave-approval\":", "for \"product\" only) User is allowed to access product_family (View", "allowed to access keyword (View for \"keyword\" only) User is", "\"issue\" only) User is allowed Edit on it_issue if it_issue", "Edit on (Edit for \"time_project\": ('group_lead', 'team_lead') only) User is", "'hide_message_files', 'lunch_duration', 'lunch_start', 'password', 'queries', 'realname', 'room', 'subst_active', 'substitute', 'timezone',", "own files (Search for \"file\" only) User is allowed to", "for \"overtime_period\": ('name', 'order') only) (Restore for \"room\" only) (Retire", "for \"user\": ('activity', 'actor', 'ad_domain', 'address', 'alternate_addresses', 'business_responsible', 'clearance_by', 'creation',", "may view user_dynamic records for ad_domain for which they are", "'time_record') only) User is allowed to access daily record if", "\"it_issue_status\" only) User is allowed to access it_prio (View for", "(Search for \"msg\": ('date', 'id') only) User is allowed Edit", "are in the domain_permission for the user (View for \"user\":", "only) User is allowed to access time_project_status (View for \"time_project_status\"", "\"msg\" only) User is allowed to create query (Create for", "('activity', 'actor', 'address', 'alternate_addresses', 'creation', 'creator', 'id', 'queries', 'realname', 'status',", "for \"user_dynamic\" only) User is allowed to access vacation_correction (View", "Role \"dom-user-edit-office\": User is allowed to create user_contact (Create for", "or user is on nosy list (View for \"it_issue\" only)", "\"msg\" only) User is allowed View on file if file", "is allowed to edit category if he is responsible for", "'cur_est_end', 'deadline', 'depends', 'doc_issue_status', 'earliest_start', 'effective_prio', 'effort_hours', 'external_users', 'files', 'files_affected',", "allowed to access vacation_report (View for \"vacation_report\" only) User is", "('status',) only) User is allowed to access contract_type (View for", "allowed to edit it_request_type (Edit for \"it_request_type\" only) User is", "(View for \"time_report\" only) User may use the email interface", "see time record if he is allowed to see all", "own leave submissions (Edit for \"leave_submission\": ('comment', 'comment_cancel', 'first_day', 'last_day',", "if it_issue is non-confidential or user is on nosy list", "to edit status_transition (Edit for \"status_transition\" only) User is allowed", "allowed View on (View for \"user\": ('business_responsible', 'department_temp', 'timetracking_by', 'vie_user',", "'product_family', 'project_type', 'reporting_group', 'work_location') only) User is allowed to access", "\"vacation_correction\" only) User is allowed to edit contract_type (Edit for", "\"user_contact\" only) User is allowed Edit on (Edit for \"msg\":", "\"it_project\" only) Role \"sec-incident-responsible\": User is allowed to access it_int_prio", "is allowed to view dynamic user data if he/she is", "'related_issues', 'related_support', 'release', 'responsible', 'return_type', 'sap_ref', 'send_to_customer', 'serial_number', 'set_first_reply', 'status',", "\"customer_agreement\" only) User is allowed to create mailgroup (Create for", "is allowed to edit contact (Edit for \"contact\" only) User", "user_contact (Edit for \"user_contact\" only) Users may view user_dynamic records", "may get nosy messages for issue (Nosy for \"issue\" only)", "on issue if issue is non-confidential or user is on", "access time_record (View for \"time_record\" only) User is allowed to", "user may edit time_records owned by user (Edit for \"time_record\"", "(View for \"time_activity\" only) User is allowed to access time_activity_perm", "only) User is allowed to access time_wp (View for \"time_wp\"", "for \"organisation\" only) User is allowed to create location (Create", "product_family (View for \"product_family\" only) User is allowed to access", "edit contract_type (Edit for \"contract_type\" only) User is allowed to", "'business_responsible', 'clearance_by', 'creation', 'creator', 'firstname', 'id', 'job_description', 'lastname', 'lunch_duration', 'lunch_start',", "edit mailgroup (Edit for \"mailgroup\" only) User may manipulate user", "'responsible', 'safety_level', 'severity', 'status', 'superseder', 'test_level', 'title'] only) External users", "an item with View permission (View for \"msg\" only) User", "only) User is allowed to create overtime_correction (Create for \"overtime_correction\"", "Role \"hr-org-location\": (Search for \"daily_record_freeze\" only) (Search for \"overtime_correction\" only)", "for \"ext_tracker_type\" only) User is allowed to create ext_msg (Create", "they may see one of the time_records for that day", "allowed to edit analysis_result (Edit for \"analysis_result\" only) User is", "edit user_functional_role (Edit for \"user_functional_role\" only) Role \"hr\": (Edit for", "User is allowed to edit category if he is responsible", "a transitive permission via containers (View for \"issue\": ['activity', 'actor',", "\"status_transition\" only) User is allowed to access summary_report (View for", "\"file\" only) User is allowed to access domain_permission (View for", "to access vacation_correction (View for \"vacation_correction\" only) User is allowed", "uc_type (Edit for \"uc_type\" only) Role \"organisation\": User is allowed", "to edit severity (Edit for \"severity\" only) User is allowed", "for \"safety_level\" only) User is allowed to edit severity (Edit", "'sex', 'status', 'subst_active', 'substitute', 'supervisor', 'sync_foreign_key', 'timezone', 'tt_lines', 'username', 'vie_user']", "for \"daily_record\" only) Role \"user_view\": User is allowed to access", "\"doc_issue_status\" only) User is allowed to create ext_tracker (Create for", "may access the xmlrpc interface (Xmlrpc Access) User may edit", "domain_permission for the user (Edit for \"user\": ['contacts', 'position_text', 'room']", "to edit customer (Edit for \"customer\" only) User is allowed", "('frozen',) only) User is allowed to edit location (Edit for", "edit vacation_correction (Edit for \"vacation_correction\" only) Role \"issue_admin\": User is", "User is allowed to create reporting_group (Create for \"reporting_group\" only)", "only) Search (Search for \"user_contact\" only) User is allowed Edit", "create customer (Create for \"customer\" only) User is allowed to", "only) User is allowed to edit support (Edit for \"support\"", "\"uc_type\" only) User is allowed to create user (Create for", "only) User is allowed to create msg_keyword (Create for \"msg_keyword\"", "user (Edit for \"user\": ['clearance_by', 'contacts', 'csv_delimiter', 'entry_date', 'firstname', 'hide_message_files',", "if he is time category owner or deputy (Edit for", "for \"doc\" only) User is allowed to edit doc_category (Edit", "allowed to edit vacation_correction (Edit for \"vacation_correction\" only) Role \"issue_admin\":", "(View) Role \"anonymous\": User may access the web interface (Web", "it_issue (Edit for \"it_issue\": ('responsible',) only) User is allowed to", "(Edit for \"auto_wp\" only) User is allowed to edit dynamic", "domain_permission for the user (Edit for \"user\": ['contacts', 'csv_delimiter', 'department_temp',", "\"issue\" only) User is allowed to view their own files", "(View for \"user\": ('business_responsible', 'department_temp', 'timetracking_by', 'vie_user', 'vie_user_bl_override', 'vie_user_ml') only)", "absence_type (Edit for \"absence_type\" only) User is allowed to edit", "queries (Retire for \"query\" only) User is allowed to search", "(Create for \"user_dynamic\" only) User is allowed to edit auto_wp", "in the same Org-Location as the record, it may also", "their own user functional role (View for \"user_functional_role\" only) User", "\"user_dynamic\": ('department', 'org_location') only) User is allowed View on file", "(View for \"time_record\" only) User is allowed to view work", "is allowed to view selected fields in work package if", "to access ext_tracker_type (View for \"ext_tracker_type\" only) Role \"msgsync\": (Search", "(Edit for \"organisation\" only) Role \"pgp\": Role \"procurement\": (View for", "of time category (View for \"time_project\" only) User may view", "(Create for \"mailgroup\" only) User is allowed to edit domain_permission", "daily_record_freeze (View for \"daily_record_freeze\" only) User is allowed to access", "is on nosy list (View for \"it_issue\" only) User is", "to access it_category (View for \"it_category\" only) User is allowed", "user (Restore for \"time_record\" only) User or Timetracking by user", "access contact_type (View for \"contact_type\" only) User is allowed to", "allowed to create ext_tracker (Create for \"ext_tracker\" only) User is", "(file created by user) (Edit for \"file\" only) Role \"user\":", "User is allowed to create uc_type (Create for \"uc_type\" only)", "organisation (Create for \"organisation\" only) User is allowed to edit", "it_project is non-confidential or user is on nosy list (Edit", "User is allowed to edit department (Edit for \"department\" only)", "access support (View for \"support\" only) User is allowed to", "User may use the email interface (Email Access) Users are", "is allowed to create support (Create for \"support\" only) User", "\"cost_center_status\" only) User is allowed to access customer (View for", "\"hr-leave-approval\": User is allowed Edit on (Edit for \"leave_submission\": ('status',)", "(Create for \"organisation\" only) User is allowed to create product_family", "View on msg if msg is linked from an item", "\"cc-permission\": (Restore for \"cost_center_permission_group\" only) (Retire for \"cost_center_permission_group\" only) User", "analysis_result (Edit for \"analysis_result\" only) User is allowed to edit", "'scale_seniority') only) User is allowed to access user_functional_role (View for", "the web (Web Roles) User may restore everything (Restore) User", "('group_lead', 'purchasing_agents', 'team_lead') only) User is allowed Edit on (Edit", "may edit own file (file created by user) (Edit for", "(Nosy for \"issue\" only) User may get nosy messages for", "allowed to create time_project (Create for \"time_project\" only) User is", "only) User is allowed to create it_int_prio (Create for \"it_int_prio\"", "to search it_issue (Search for \"it_issue\" only) User is allowed", "for \"file\" only) User may access the web interface (Web", "timetracking-by user (Edit for \"daily_record\": ('status', 'time_record') only) User is", "(View for \"vacation_correction\" only) User is allowed to create cost_center", "(Edit for \"cost_center_permission_group\" only) Role \"contact\": User is allowed to", "for \"keyword\" only) User is allowed to edit kind (Edit", "for \"daily_record_freeze\" only) User is allowed to view overtime information", "User is allowed to create status (Create for \"status\" only)", "kind (Edit for \"kind\" only) User is allowed to edit", "User is allowed to edit cost_center_group (Edit for \"cost_center_group\" only)", "is allowed Edit on (Edit for \"leave_submission\": ('status',) only) User", "only) User is allowed to edit issue (Edit for \"issue\"", "are allowed to access issue if they are on the", "for \"user\": ['contacts', 'position_text', 'room'] only) Role \"external\": (Search for", "for \"user\": ['room'] only) Role \"dom-user-edit-gtt\": (Search for \"user_dynamic\" only)", "\"daily_record_freeze\" only) User is allowed to view overtime information if", "Role \"issue_admin\": User is allowed Edit on msg if msg", "User is allowed Edit on (Edit for \"msg\": ('keywords',) only)", "(View for \"user\": ('contacts',) only) User is allowed View on", "'subst_active', 'substitute', 'supervisor', 'sync_foreign_key', 'timezone', 'tt_lines', 'username', 'vie_user'] only) Users", "for \"user\": ('nickname', 'status', 'username') only) User is allowed View", "supervisor or timetracking-by user (View for \"daily_record\" only) User is", "\"room\" only) (Retire for \"room\" only) User is allowed to", "User may use the email interface (Email Access) User may", "(View for \"room\" only) User is allowed to access safety_level", "(View for \"user\": ('business_responsible', 'planning_role', 'scale_seniority') only) User is allowed", "transitive permission via containers (Edit for \"issue\": ['activity', 'actor', 'area',", "allowed to create it_int_prio (Create for \"it_int_prio\" only) User is", "for the user (View for \"user_dynamic\" only) Users may view/edit", "\"room\" only) User is allowed to access safety_level (View for", "(View for \"customer_agreement\" only) User is allowed to access daily", "support (View for \"support\" only) User is allowed to create", "for \"cost_center_status\" only) User is allowed to create department (Create", "to create overtime_correction (Create for \"overtime_correction\" only) User is allowed", "(View for \"it_project\" only) User is allowed to create domain_permission", "is allowed to edit keyword (Edit for \"keyword\" only) User", "edit own leave submissions (Edit for \"leave_submission\": ('comment', 'comment_cancel', 'first_day',", "only) Users are allowed to view some of their details", "'realname', 'username') only) Users are allowed to view their own", "for \"daily_record\": ('status', 'time_record') only) User is allowed to access", "safety_level (View for \"safety_level\" only) User is allowed to access", "on (Edit for \"user\": ('business_responsible', 'scale_seniority') only) User is allowed", "(Web Roles) User may restore everything (Restore) User may retire", "\"domain_permission\" only) User is allowed to create it_category (Create for", "only) User is allowed to access reference (View for \"reference\"", "to access ext_tracker_type (View for \"ext_tracker_type\" only) User is allowed", "to access status_transition (View for \"status_transition\" only) User is allowed", "(Search for \"issue\" only) User is allowed to view their", "allowed to edit if he's the owner of the contact", "permission (Edit for \"msg\" only) User is allowed to access", "to access issue if they are on the list of", "\"overtime_correction\" only) User is allowed to access time_record (View for", "access daily record if he is owner or supervisor or", "to access safety_level (View for \"safety_level\" only) User is allowed", "(Create for \"sap_cc\" only) User is allowed to create time_record", "allowed to edit auto_wp (Edit for \"auto_wp\" only) User is", "of dynamic user record (Edit for \"user_dynamic\" only) User is", "Edit on (Edit for \"organisation\": ('domain_part',) only) User is allowed", "only) User is allowed to create user (Create for \"user\"", "to access daily_record (View for \"daily_record\" only) User is allowed", "\"contact\": User is allowed to create contact (Create for \"contact\"", "only) User is allowed to create user_contact (Create for \"user_contact\"" ]
[ "tc -= 1 best = 0 n, x = map(int,", "x = map(int, input().split()) for i in range(n): s, r", "n, x = map(int, input().split()) for i in range(n): s,", "in range(n): s, r = map(int, input().split()) if x >=", "for i in range(n): s, r = map(int, input().split()) if", "<filename>CodeChef/problems/IMDB/main.py tc = int(input()) while tc: tc -= 1 best", "= map(int, input().split()) if x >= s: best = max(best,", "1 best = 0 n, x = map(int, input().split()) for", "tc = int(input()) while tc: tc -= 1 best =", "r = map(int, input().split()) if x >= s: best =", "range(n): s, r = map(int, input().split()) if x >= s:", "int(input()) while tc: tc -= 1 best = 0 n,", "-= 1 best = 0 n, x = map(int, input().split())", "= 0 n, x = map(int, input().split()) for i in", "map(int, input().split()) for i in range(n): s, r = map(int,", "i in range(n): s, r = map(int, input().split()) if x", "= map(int, input().split()) for i in range(n): s, r =", "0 n, x = map(int, input().split()) for i in range(n):", "map(int, input().split()) if x >= s: best = max(best, r)", "input().split()) for i in range(n): s, r = map(int, input().split())", "= int(input()) while tc: tc -= 1 best = 0", "tc: tc -= 1 best = 0 n, x =", "input().split()) if x >= s: best = max(best, r) print(best)", "best = 0 n, x = map(int, input().split()) for i", "while tc: tc -= 1 best = 0 n, x", "s, r = map(int, input().split()) if x >= s: best" ]
[ "test_find_user_dict_mode(self): w = self.wrp w.query_mode = 'dict' res = w.insert_user('John',", "test_find_user_nonexistent(self): w = self.wrp user = w.find_user(99) self.assertIsNone(user) def test_get_users_tuple(self):", "w.query_mode = 'dict' res = w.insert_user('John', 'Doe') last_id = res.lastrowid", "= w.find_user(99) self.assertIsNone(user) def test_get_users_tuple(self): w = self.wrp w.query_mode =", "= ? WHERE first_name = ?\", ['Smith', 'John']) self.assertEqual(rows, 1)", "import TestCase from tests.base import * class TestSQLiteWrapper(PrivexDBTestBase): def test_tables_created(self):", "def test_tables_created(self): w = self.wrp self.assertEqual(w.db, ':memory:') tables = w.list_tables()", "= 'dict' res = w.insert_user('John', 'Doe') last_id = res.lastrowid rows", "tables) def test_insert_find_user(self): w = self.wrp w.query_mode = 'flat' res", "w.find_user(99) self.assertIsNone(user) def test_get_users_tuple(self): w = self.wrp w.query_mode = 'flat'", "'dict' res = w.insert_user('John', 'Doe') last_id = res.lastrowid rows =", "= w.insert('users', first_name='Dave', last_name='Johnson') self.assertEqual(res.lastrowid, 1) user = w.find_user(res.lastrowid) self.assertEqual(user['first_name'],", "def test_insert_helper(self): w = self.wrp w.query_mode = 'dict' res =", "self.assertEqual(users[0]['first_name'], 'John') self.assertEqual(users[1]['first_name'], 'Jane') self.assertEqual(users[1]['last_name'], 'Doe') self.assertEqual(users[2]['last_name'], 'Johnson') def test_insert_helper(self):", "w.insert_user('Dave', 'Johnson') users = list(w.get_users()) self.assertEqual(len(users), 3) self.assertEqual(users[0]['first_name'], 'John') self.assertEqual(users[1]['first_name'],", "'Doe') def test_find_user_nonexistent(self): w = self.wrp user = w.find_user(99) self.assertIsNone(user)", "users SET last_name = ? WHERE first_name = ?\", ['Smith',", "def test_get_users_dict(self): w = self.wrp w.query_mode = 'dict' w.insert_user('John', 'Doe')", "tables) self.assertIn('items', tables) w.drop_schemas() tables = w.list_tables() self.assertNotIn('users', tables) self.assertNotIn('items',", "? WHERE first_name = ?\", ['Smith', 'John']) self.assertEqual(rows, 1) john", "self.wrp tables = w.list_tables() self.assertIn('users', tables) self.assertIn('items', tables) w.drop_schemas() tables", "tables) self.assertNotIn('items', tables) def test_insert_find_user(self): w = self.wrp w.query_mode =", "?\", ['Smith', 'John']) self.assertEqual(rows, 1) john = w.find_user(last_id) self.assertEqual(john['last_name'], 'Smith')", ":class:`.ExampleWrapper` \"\"\" # from unittest import TestCase from tests.base import", "res = w.insert_user('John', 'Doe') last_id = res.lastrowid rows = w.action(\"UPDATE", "self.assertEqual(user['first_name'], 'John') self.assertEqual(user['last_name'], 'Doe') def test_find_user_nonexistent(self): w = self.wrp user", "* class TestSQLiteWrapper(PrivexDBTestBase): def test_tables_created(self): w = self.wrp self.assertEqual(w.db, ':memory:')", "w = self.wrp w.query_mode = 'flat' w.insert_user('John', 'Doe') w.insert_user('Jane', 'Doe')", "self.wrp w.query_mode = 'dict' res = w.insert_user('John', 'Doe') self.assertEqual(res.rowcount, 1)", "1) user = w.find_user(res.lastrowid) self.assertEqual(user[1], 'John') self.assertEqual(user[2], 'Doe') def test_action_update(self):", "\"\"\" Tests related to :class:`.SqliteWrapper` / :class:`.ExampleWrapper` \"\"\" # from", "to :class:`.SqliteWrapper` / :class:`.ExampleWrapper` \"\"\" # from unittest import TestCase", "w.find_user(res.lastrowid) self.assertEqual(user[1], 'John') self.assertEqual(user[2], 'Doe') def test_action_update(self): w = self.wrp", "= w.action(\"UPDATE users SET last_name = ? WHERE first_name =", "w = self.wrp user = w.find_user(99) self.assertIsNone(user) def test_get_users_tuple(self): w", "w.insert_user('Jane', 'Doe') w.insert_user('Dave', 'Johnson') users = list(w.get_users()) self.assertEqual(len(users), 3) self.assertEqual(users[0][1],", "users = list(w.get_users()) self.assertEqual(len(users), 3) self.assertEqual(users[0]['first_name'], 'John') self.assertEqual(users[1]['first_name'], 'Jane') self.assertEqual(users[1]['last_name'],", "w.list_tables() self.assertIn('users', tables) self.assertIn('items', tables) def test_tables_drop(self): w = self.wrp", "rows = w.action(\"UPDATE users SET last_name = ? WHERE first_name", "1) john = w.find_user(last_id) self.assertEqual(john['last_name'], 'Smith') def test_find_user_dict_mode(self): w =", "= w.find_user(last_id) self.assertEqual(john['last_name'], 'Smith') def test_find_user_dict_mode(self): w = self.wrp w.query_mode", "= self.wrp w.query_mode = 'flat' res = w.insert_user('John', 'Doe') self.assertEqual(res.rowcount,", "3) self.assertEqual(users[0][1], 'John') self.assertEqual(users[1][1], 'Jane') self.assertEqual(users[1][2], 'Doe') self.assertEqual(users[2][2], 'Johnson') def", "w.insert_user('Jane', 'Doe') w.insert_user('Dave', 'Johnson') users = list(w.get_users()) self.assertEqual(len(users), 3) self.assertEqual(users[0]['first_name'],", "= self.wrp w.query_mode = 'dict' res = w.insert('users', first_name='Dave', last_name='Johnson')", "'dict' res = w.insert_user('John', 'Doe') self.assertEqual(res.rowcount, 1) user = w.find_user(res.lastrowid)", "self.assertEqual(users[1]['last_name'], 'Doe') self.assertEqual(users[2]['last_name'], 'Johnson') def test_insert_helper(self): w = self.wrp w.query_mode", "self.wrp w.query_mode = 'dict' res = w.insert_user('John', 'Doe') last_id =", "= w.insert_user('John', 'Doe') self.assertEqual(res.rowcount, 1) user = w.find_user(res.lastrowid) self.assertEqual(user[1], 'John')", "test_tables_created(self): w = self.wrp self.assertEqual(w.db, ':memory:') tables = w.list_tables() self.assertIn('users',", "self.assertEqual(users[1][1], 'Jane') self.assertEqual(users[1][2], 'Doe') self.assertEqual(users[2][2], 'Johnson') def test_get_users_dict(self): w =", "test_action_update(self): w = self.wrp w.query_mode = 'dict' res = w.insert_user('John',", "self.assertEqual(len(users), 3) self.assertEqual(users[0][1], 'John') self.assertEqual(users[1][1], 'Jane') self.assertEqual(users[1][2], 'Doe') self.assertEqual(users[2][2], 'Johnson')", "= w.insert_user('John', 'Doe') last_id = res.lastrowid rows = w.action(\"UPDATE users", "self.wrp w.query_mode = 'dict' w.insert_user('John', 'Doe') w.insert_user('Jane', 'Doe') w.insert_user('Dave', 'Johnson')", "= 'flat' res = w.insert_user('John', 'Doe') self.assertEqual(res.rowcount, 1) user =", "self.assertEqual(rows, 1) john = w.find_user(last_id) self.assertEqual(john['last_name'], 'Smith') def test_find_user_dict_mode(self): w", "self.assertNotIn('items', tables) def test_insert_find_user(self): w = self.wrp w.query_mode = 'flat'", "WHERE first_name = ?\", ['Smith', 'John']) self.assertEqual(rows, 1) john =", "self.assertEqual(user[2], 'Doe') def test_action_update(self): w = self.wrp w.query_mode = 'dict'", "self.assertEqual(user['last_name'], 'Doe') def test_find_user_nonexistent(self): w = self.wrp user = w.find_user(99)", "first_name='Dave', last_name='Johnson') self.assertEqual(res.lastrowid, 1) user = w.find_user(res.lastrowid) self.assertEqual(user['first_name'], 'Dave') self.assertEqual(user['last_name'],", ":class:`.SqliteWrapper` / :class:`.ExampleWrapper` \"\"\" # from unittest import TestCase from", "'dict' res = w.insert('users', first_name='Dave', last_name='Johnson') self.assertEqual(res.lastrowid, 1) user =", "= w.find_user(res.lastrowid) self.assertEqual(user['first_name'], 'John') self.assertEqual(user['last_name'], 'Doe') def test_find_user_nonexistent(self): w =", "test_insert_find_user(self): w = self.wrp w.query_mode = 'flat' res = w.insert_user('John',", "self.assertEqual(res.rowcount, 1) user = w.find_user(res.lastrowid) self.assertEqual(user['first_name'], 'John') self.assertEqual(user['last_name'], 'Doe') def", "= 'dict' w.insert_user('John', 'Doe') w.insert_user('Jane', 'Doe') w.insert_user('Dave', 'Johnson') users =", "test_get_users_dict(self): w = self.wrp w.query_mode = 'dict' w.insert_user('John', 'Doe') w.insert_user('Jane',", "'Smith') def test_find_user_dict_mode(self): w = self.wrp w.query_mode = 'dict' res", "self.wrp w.query_mode = 'dict' res = w.insert('users', first_name='Dave', last_name='Johnson') self.assertEqual(res.lastrowid,", "= w.insert_user('John', 'Doe') self.assertEqual(res.rowcount, 1) user = w.find_user(res.lastrowid) self.assertEqual(user['first_name'], 'John')", "test_get_users_tuple(self): w = self.wrp w.query_mode = 'flat' w.insert_user('John', 'Doe') w.insert_user('Jane',", "def test_find_user_dict_mode(self): w = self.wrp w.query_mode = 'dict' res =", "= 'dict' res = w.insert('users', first_name='Dave', last_name='Johnson') self.assertEqual(res.lastrowid, 1) user", "tables = w.list_tables() self.assertIn('users', tables) self.assertIn('items', tables) def test_tables_drop(self): w", "user = w.find_user(res.lastrowid) self.assertEqual(user['first_name'], 'John') self.assertEqual(user['last_name'], 'Doe') def test_find_user_nonexistent(self): w", "'flat' w.insert_user('John', 'Doe') w.insert_user('Jane', 'Doe') w.insert_user('Dave', 'Johnson') users = list(w.get_users())", "'John']) self.assertEqual(rows, 1) john = w.find_user(last_id) self.assertEqual(john['last_name'], 'Smith') def test_find_user_dict_mode(self):", "def test_insert_find_user(self): w = self.wrp w.query_mode = 'flat' res =", "self.assertEqual(res.rowcount, 1) user = w.find_user(res.lastrowid) self.assertEqual(user[1], 'John') self.assertEqual(user[2], 'Doe') def", "'Johnson') users = list(w.get_users()) self.assertEqual(len(users), 3) self.assertEqual(users[0][1], 'John') self.assertEqual(users[1][1], 'Jane')", "class TestSQLiteWrapper(PrivexDBTestBase): def test_tables_created(self): w = self.wrp self.assertEqual(w.db, ':memory:') tables", "last_id = res.lastrowid rows = w.action(\"UPDATE users SET last_name =", "'Doe') w.insert_user('Jane', 'Doe') w.insert_user('Dave', 'Johnson') users = list(w.get_users()) self.assertEqual(len(users), 3)", "self.assertIn('users', tables) self.assertIn('items', tables) def test_tables_drop(self): w = self.wrp tables", "w = self.wrp self.assertEqual(w.db, ':memory:') tables = w.list_tables() self.assertIn('users', tables)", "SET last_name = ? WHERE first_name = ?\", ['Smith', 'John'])", "tables) def test_tables_drop(self): w = self.wrp tables = w.list_tables() self.assertIn('users',", "'flat' res = w.insert_user('John', 'Doe') self.assertEqual(res.rowcount, 1) user = w.find_user(res.lastrowid)", "res = w.insert('users', first_name='Dave', last_name='Johnson') self.assertEqual(res.lastrowid, 1) user = w.find_user(res.lastrowid)", "self.assertEqual(user[1], 'John') self.assertEqual(user[2], 'Doe') def test_action_update(self): w = self.wrp w.query_mode", "self.wrp self.assertEqual(w.db, ':memory:') tables = w.list_tables() self.assertIn('users', tables) self.assertIn('items', tables)", "w.insert_user('John', 'Doe') last_id = res.lastrowid rows = w.action(\"UPDATE users SET", "= list(w.get_users()) self.assertEqual(len(users), 3) self.assertEqual(users[0]['first_name'], 'John') self.assertEqual(users[1]['first_name'], 'Jane') self.assertEqual(users[1]['last_name'], 'Doe')", "w = self.wrp w.query_mode = 'dict' res = w.insert('users', first_name='Dave',", "self.assertEqual(users[2]['last_name'], 'Johnson') def test_insert_helper(self): w = self.wrp w.query_mode = 'dict'", "w = self.wrp w.query_mode = 'dict' w.insert_user('John', 'Doe') w.insert_user('Jane', 'Doe')", "= w.list_tables() self.assertIn('users', tables) self.assertIn('items', tables) w.drop_schemas() tables = w.list_tables()", "= self.wrp w.query_mode = 'dict' res = w.insert_user('John', 'Doe') self.assertEqual(res.rowcount,", "= self.wrp user = w.find_user(99) self.assertIsNone(user) def test_get_users_tuple(self): w =", "self.assertIsNone(user) def test_get_users_tuple(self): w = self.wrp w.query_mode = 'flat' w.insert_user('John',", "'Doe') self.assertEqual(users[2]['last_name'], 'Johnson') def test_insert_helper(self): w = self.wrp w.query_mode =", "from unittest import TestCase from tests.base import * class TestSQLiteWrapper(PrivexDBTestBase):", "'Doe') self.assertEqual(res.rowcount, 1) user = w.find_user(res.lastrowid) self.assertEqual(user['first_name'], 'John') self.assertEqual(user['last_name'], 'Doe')", "self.wrp w.query_mode = 'flat' res = w.insert_user('John', 'Doe') self.assertEqual(res.rowcount, 1)", "'Johnson') def test_get_users_dict(self): w = self.wrp w.query_mode = 'dict' w.insert_user('John',", "self.wrp w.query_mode = 'flat' w.insert_user('John', 'Doe') w.insert_user('Jane', 'Doe') w.insert_user('Dave', 'Johnson')", "w.insert_user('John', 'Doe') self.assertEqual(res.rowcount, 1) user = w.find_user(res.lastrowid) self.assertEqual(user[1], 'John') self.assertEqual(user[2],", "= ?\", ['Smith', 'John']) self.assertEqual(rows, 1) john = w.find_user(last_id) self.assertEqual(john['last_name'],", "test_insert_helper(self): w = self.wrp w.query_mode = 'dict' res = w.insert('users',", "import * class TestSQLiteWrapper(PrivexDBTestBase): def test_tables_created(self): w = self.wrp self.assertEqual(w.db,", "w.action(\"UPDATE users SET last_name = ? WHERE first_name = ?\",", "self.assertIn('users', tables) self.assertIn('items', tables) w.drop_schemas() tables = w.list_tables() self.assertNotIn('users', tables)", "tables = w.list_tables() self.assertNotIn('users', tables) self.assertNotIn('items', tables) def test_insert_find_user(self): w", "w = self.wrp w.query_mode = 'dict' res = w.insert_user('John', 'Doe')", "w.list_tables() self.assertIn('users', tables) self.assertIn('items', tables) w.drop_schemas() tables = w.list_tables() self.assertNotIn('users',", "self.assertIn('items', tables) w.drop_schemas() tables = w.list_tables() self.assertNotIn('users', tables) self.assertNotIn('items', tables)", "self.assertEqual(users[1][2], 'Doe') self.assertEqual(users[2][2], 'Johnson') def test_get_users_dict(self): w = self.wrp w.query_mode", "w.query_mode = 'dict' res = w.insert_user('John', 'Doe') self.assertEqual(res.rowcount, 1) user", "'Doe') w.insert_user('Dave', 'Johnson') users = list(w.get_users()) self.assertEqual(len(users), 3) self.assertEqual(users[0][1], 'John')", "<gh_stars>1-10 \"\"\" Tests related to :class:`.SqliteWrapper` / :class:`.ExampleWrapper` \"\"\" #", "res.lastrowid rows = w.action(\"UPDATE users SET last_name = ? WHERE", "['Smith', 'John']) self.assertEqual(rows, 1) john = w.find_user(last_id) self.assertEqual(john['last_name'], 'Smith') def", "self.assertEqual(users[2][2], 'Johnson') def test_get_users_dict(self): w = self.wrp w.query_mode = 'dict'", "self.assertNotIn('users', tables) self.assertNotIn('items', tables) def test_insert_find_user(self): w = self.wrp w.query_mode", "tests.base import * class TestSQLiteWrapper(PrivexDBTestBase): def test_tables_created(self): w = self.wrp", "w.query_mode = 'dict' w.insert_user('John', 'Doe') w.insert_user('Jane', 'Doe') w.insert_user('Dave', 'Johnson') users", "w.query_mode = 'dict' res = w.insert('users', first_name='Dave', last_name='Johnson') self.assertEqual(res.lastrowid, 1)", "'John') self.assertEqual(users[1]['first_name'], 'Jane') self.assertEqual(users[1]['last_name'], 'Doe') self.assertEqual(users[2]['last_name'], 'Johnson') def test_insert_helper(self): w", "w.insert_user('Dave', 'Johnson') users = list(w.get_users()) self.assertEqual(len(users), 3) self.assertEqual(users[0][1], 'John') self.assertEqual(users[1][1],", "from tests.base import * class TestSQLiteWrapper(PrivexDBTestBase): def test_tables_created(self): w =", "last_name = ? WHERE first_name = ?\", ['Smith', 'John']) self.assertEqual(rows,", "# from unittest import TestCase from tests.base import * class", "= w.list_tables() self.assertNotIn('users', tables) self.assertNotIn('items', tables) def test_insert_find_user(self): w =", "= 'flat' w.insert_user('John', 'Doe') w.insert_user('Jane', 'Doe') w.insert_user('Dave', 'Johnson') users =", "def test_tables_drop(self): w = self.wrp tables = w.list_tables() self.assertIn('users', tables)", "list(w.get_users()) self.assertEqual(len(users), 3) self.assertEqual(users[0][1], 'John') self.assertEqual(users[1][1], 'Jane') self.assertEqual(users[1][2], 'Doe') self.assertEqual(users[2][2],", "unittest import TestCase from tests.base import * class TestSQLiteWrapper(PrivexDBTestBase): def", "= self.wrp self.assertEqual(w.db, ':memory:') tables = w.list_tables() self.assertIn('users', tables) self.assertIn('items',", "\"\"\" # from unittest import TestCase from tests.base import *", "= self.wrp w.query_mode = 'flat' w.insert_user('John', 'Doe') w.insert_user('Jane', 'Doe') w.insert_user('Dave',", "res = w.insert_user('John', 'Doe') self.assertEqual(res.rowcount, 1) user = w.find_user(res.lastrowid) self.assertEqual(user['first_name'],", "= self.wrp w.query_mode = 'dict' res = w.insert_user('John', 'Doe') last_id", "w = self.wrp tables = w.list_tables() self.assertIn('users', tables) self.assertIn('items', tables)", "related to :class:`.SqliteWrapper` / :class:`.ExampleWrapper` \"\"\" # from unittest import", "'Johnson') def test_insert_helper(self): w = self.wrp w.query_mode = 'dict' res", "'dict' w.insert_user('John', 'Doe') w.insert_user('Jane', 'Doe') w.insert_user('Dave', 'Johnson') users = list(w.get_users())", "3) self.assertEqual(users[0]['first_name'], 'John') self.assertEqual(users[1]['first_name'], 'Jane') self.assertEqual(users[1]['last_name'], 'Doe') self.assertEqual(users[2]['last_name'], 'Johnson') def", "tables = w.list_tables() self.assertIn('users', tables) self.assertIn('items', tables) w.drop_schemas() tables =", "= w.list_tables() self.assertIn('users', tables) self.assertIn('items', tables) def test_tables_drop(self): w =", "last_name='Johnson') self.assertEqual(res.lastrowid, 1) user = w.find_user(res.lastrowid) self.assertEqual(user['first_name'], 'Dave') self.assertEqual(user['last_name'], 'Johnson')", "w.list_tables() self.assertNotIn('users', tables) self.assertNotIn('items', tables) def test_insert_find_user(self): w = self.wrp", "w.insert('users', first_name='Dave', last_name='Johnson') self.assertEqual(res.lastrowid, 1) user = w.find_user(res.lastrowid) self.assertEqual(user['first_name'], 'Dave')", "w.query_mode = 'flat' res = w.insert_user('John', 'Doe') self.assertEqual(res.rowcount, 1) user", "'John') self.assertEqual(user['last_name'], 'Doe') def test_find_user_nonexistent(self): w = self.wrp user =", "test_tables_drop(self): w = self.wrp tables = w.list_tables() self.assertIn('users', tables) self.assertIn('items',", "w.insert_user('John', 'Doe') w.insert_user('Jane', 'Doe') w.insert_user('Dave', 'Johnson') users = list(w.get_users()) self.assertEqual(len(users),", "self.assertEqual(john['last_name'], 'Smith') def test_find_user_dict_mode(self): w = self.wrp w.query_mode = 'dict'", "self.assertEqual(len(users), 3) self.assertEqual(users[0]['first_name'], 'John') self.assertEqual(users[1]['first_name'], 'Jane') self.assertEqual(users[1]['last_name'], 'Doe') self.assertEqual(users[2]['last_name'], 'Johnson')", "w.query_mode = 'flat' w.insert_user('John', 'Doe') w.insert_user('Jane', 'Doe') w.insert_user('Dave', 'Johnson') users", "w.find_user(res.lastrowid) self.assertEqual(user['first_name'], 'John') self.assertEqual(user['last_name'], 'Doe') def test_find_user_nonexistent(self): w = self.wrp", "/ :class:`.ExampleWrapper` \"\"\" # from unittest import TestCase from tests.base", "res = w.insert_user('John', 'Doe') self.assertEqual(res.rowcount, 1) user = w.find_user(res.lastrowid) self.assertEqual(user[1],", "TestCase from tests.base import * class TestSQLiteWrapper(PrivexDBTestBase): def test_tables_created(self): w", "= res.lastrowid rows = w.action(\"UPDATE users SET last_name = ?", "john = w.find_user(last_id) self.assertEqual(john['last_name'], 'Smith') def test_find_user_dict_mode(self): w = self.wrp", "TestSQLiteWrapper(PrivexDBTestBase): def test_tables_created(self): w = self.wrp self.assertEqual(w.db, ':memory:') tables =", "'Johnson') users = list(w.get_users()) self.assertEqual(len(users), 3) self.assertEqual(users[0]['first_name'], 'John') self.assertEqual(users[1]['first_name'], 'Jane')", "first_name = ?\", ['Smith', 'John']) self.assertEqual(rows, 1) john = w.find_user(last_id)", "'Doe') w.insert_user('Dave', 'Johnson') users = list(w.get_users()) self.assertEqual(len(users), 3) self.assertEqual(users[0]['first_name'], 'John')", "Tests related to :class:`.SqliteWrapper` / :class:`.ExampleWrapper` \"\"\" # from unittest", "def test_action_update(self): w = self.wrp w.query_mode = 'dict' res =", "user = w.find_user(99) self.assertIsNone(user) def test_get_users_tuple(self): w = self.wrp w.query_mode", "= list(w.get_users()) self.assertEqual(len(users), 3) self.assertEqual(users[0][1], 'John') self.assertEqual(users[1][1], 'Jane') self.assertEqual(users[1][2], 'Doe')", "tables) w.drop_schemas() tables = w.list_tables() self.assertNotIn('users', tables) self.assertNotIn('items', tables) def", "def test_get_users_tuple(self): w = self.wrp w.query_mode = 'flat' w.insert_user('John', 'Doe')", "= 'dict' res = w.insert_user('John', 'Doe') self.assertEqual(res.rowcount, 1) user =", "self.assertEqual(w.db, ':memory:') tables = w.list_tables() self.assertIn('users', tables) self.assertIn('items', tables) def", "'John') self.assertEqual(user[2], 'Doe') def test_action_update(self): w = self.wrp w.query_mode =", "def test_find_user_nonexistent(self): w = self.wrp user = w.find_user(99) self.assertIsNone(user) def", "'Doe') self.assertEqual(users[2][2], 'Johnson') def test_get_users_dict(self): w = self.wrp w.query_mode =", "= w.find_user(res.lastrowid) self.assertEqual(user[1], 'John') self.assertEqual(user[2], 'Doe') def test_action_update(self): w =", "w.find_user(last_id) self.assertEqual(john['last_name'], 'Smith') def test_find_user_dict_mode(self): w = self.wrp w.query_mode =", "w.insert_user('John', 'Doe') self.assertEqual(res.rowcount, 1) user = w.find_user(res.lastrowid) self.assertEqual(user['first_name'], 'John') self.assertEqual(user['last_name'],", "w.drop_schemas() tables = w.list_tables() self.assertNotIn('users', tables) self.assertNotIn('items', tables) def test_insert_find_user(self):", "user = w.find_user(res.lastrowid) self.assertEqual(user[1], 'John') self.assertEqual(user[2], 'Doe') def test_action_update(self): w", "'Doe') self.assertEqual(res.rowcount, 1) user = w.find_user(res.lastrowid) self.assertEqual(user[1], 'John') self.assertEqual(user[2], 'Doe')", "'Jane') self.assertEqual(users[1][2], 'Doe') self.assertEqual(users[2][2], 'Johnson') def test_get_users_dict(self): w = self.wrp", "self.assertEqual(users[1]['first_name'], 'Jane') self.assertEqual(users[1]['last_name'], 'Doe') self.assertEqual(users[2]['last_name'], 'Johnson') def test_insert_helper(self): w =", "'Doe') last_id = res.lastrowid rows = w.action(\"UPDATE users SET last_name", "= self.wrp w.query_mode = 'dict' w.insert_user('John', 'Doe') w.insert_user('Jane', 'Doe') w.insert_user('Dave',", "tables) self.assertIn('items', tables) def test_tables_drop(self): w = self.wrp tables =", "self.assertIn('items', tables) def test_tables_drop(self): w = self.wrp tables = w.list_tables()", "':memory:') tables = w.list_tables() self.assertIn('users', tables) self.assertIn('items', tables) def test_tables_drop(self):", "'Jane') self.assertEqual(users[1]['last_name'], 'Doe') self.assertEqual(users[2]['last_name'], 'Johnson') def test_insert_helper(self): w = self.wrp", "1) user = w.find_user(res.lastrowid) self.assertEqual(user['first_name'], 'John') self.assertEqual(user['last_name'], 'Doe') def test_find_user_nonexistent(self):", "w = self.wrp w.query_mode = 'flat' res = w.insert_user('John', 'Doe')", "list(w.get_users()) self.assertEqual(len(users), 3) self.assertEqual(users[0]['first_name'], 'John') self.assertEqual(users[1]['first_name'], 'Jane') self.assertEqual(users[1]['last_name'], 'Doe') self.assertEqual(users[2]['last_name'],", "users = list(w.get_users()) self.assertEqual(len(users), 3) self.assertEqual(users[0][1], 'John') self.assertEqual(users[1][1], 'Jane') self.assertEqual(users[1][2],", "self.wrp user = w.find_user(99) self.assertIsNone(user) def test_get_users_tuple(self): w = self.wrp", "= self.wrp tables = w.list_tables() self.assertIn('users', tables) self.assertIn('items', tables) w.drop_schemas()", "'Doe') def test_action_update(self): w = self.wrp w.query_mode = 'dict' res", "'John') self.assertEqual(users[1][1], 'Jane') self.assertEqual(users[1][2], 'Doe') self.assertEqual(users[2][2], 'Johnson') def test_get_users_dict(self): w", "self.assertEqual(users[0][1], 'John') self.assertEqual(users[1][1], 'Jane') self.assertEqual(users[1][2], 'Doe') self.assertEqual(users[2][2], 'Johnson') def test_get_users_dict(self):" ]
[ "variant('build_dev_tests', default=False, description='Build developer test routines') variant('build_tests', default=False, description='Build test", "variant('zfp', default=True, description='Build with support for compression using ZFP') variant('c_interface',", "libs.joined(\";\"), ] if spec.satisfies('@:3.9.999'): if '+mpi' in spec: args.extend([ '-DCMAKE_C_COMPILER=%s'", "ZFP') variant('c_interface', default=True, description='Enable C interface') variant('count_flops', default=False, description='Build with", "when='@:3.9.999') conflicts('+zfp', when='@:3.9.999') patch('intel-19-compile.patch', when='@3.1.1') def cmake_args(self): spec = self.spec", "on_off('+openmp'), '-DTPL_ENABLE_PARMETIS=%s' % on_off('+parmetis'), '-DTPL_ENABLE_SCOTCH=%s' % on_off('+scotch'), '-DTPL_ENABLE_BPACK=%s' % on_off('+butterflypack'),", "MPI') variant('openmp', default=True, description='Enable thread parallellism via tasking with OpenMP')", "ParMetis') variant('scotch', default=False, description='Enable use of Scotch') variant('butterflypack', default=True, description='Enable", "'-DSTRUMPACK_USE_CUDA=%s' % on_off('+cuda') ]) args.extend([ '-DBUILD_SHARED_LIBS=%s' % on_off('+shared') ]) return", "depends_on('blas') depends_on('lapack') depends_on('scalapack', when='+mpi') depends_on('metis') depends_on('parmetis', when='+parmetis') depends_on('scotch~metis', when='+scotch') depends_on('scotch~metis+mpi',", "% on_off('+c_interface'), ]) if spec.satisfies('@4.0.0:'): args.extend([ '-DSTRUMPACK_USE_CUDA=%s' % on_off('+cuda') ])", "# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other", "depends_on('cuda', when='@4.0.0: +cuda') depends_on('zfp', when='+zfp') conflicts('+parmetis', when='~mpi') conflicts('+butterflypack', when='~mpi') conflicts('+butterflypack',", "Scotch') variant('butterflypack', default=True, description='Enable use of ButterflyPACK') variant('zfp', default=True, description='Build", "spec.satisfies('@4.0.0:'): args.extend([ '-DSTRUMPACK_USE_CUDA=%s' % on_off('+cuda') ]) args.extend([ '-DBUILD_SHARED_LIBS=%s' % on_off('+shared')", "+butterflypack+mpi') depends_on('butterflypack@1.2.0:', when='@4.0.0: +butterflypack+mpi') depends_on('cuda', when='@4.0.0: +cuda') depends_on('zfp', when='+zfp') conflicts('+parmetis',", "conflicts('+parmetis', when='~mpi') conflicts('+butterflypack', when='~mpi') conflicts('+butterflypack', when='@:3.2.0') conflicts('+cuda', when='@:3.9.999') conflicts('+zfp', when='@:3.9.999')", "STRUctured Matrix PACKage - provides linear solvers for sparse matrices", "systems which result from the discretization of a partial differential", "% on_off('+count_flops'), '-DSTRUMPACK_TASK_TIMERS=%s' % on_off('+task_timers'), '-DSTRUMPACK_DEV_TESTING=%s' % on_off('+build_dev_tests'), '-DSTRUMPACK_BUILD_TESTS=%s' %", "\"\"\"STRUMPACK -- STRUctured Matrix PACKage - provides linear solvers for", "from the discretization of a partial differential equation, but is", "args.extend([ '-DSTRUMPACK_USE_CUDA=%s' % on_off('+cuda') ]) args.extend([ '-DBUILD_SHARED_LIBS=%s' % on_off('+shared') ])", "MIT) from spack import * class Strumpack(CMakePackage, CudaPackage): \"\"\"STRUMPACK --", "variant('count_flops', default=False, description='Build with flop counters') variant('task_timers', default=False, description='Build with", "when='+zfp') conflicts('+parmetis', when='~mpi') conflicts('+butterflypack', when='~mpi') conflicts('+butterflypack', when='@:3.2.0') conflicts('+cuda', when='@:3.9.999') conflicts('+zfp',", "default=False, description='Build test routines') # TODO: add a slate variant", "Security, LLC and other # Spack Project Developers. See the", "preconditioner is mostly aimed at large sparse linear systems which", "depends_on('scotch~metis', when='+scotch') depends_on('scotch~metis+mpi', when='+scotch+mpi') depends_on('butterflypack@1.1.0', when='@3.3.0:3.9.999 +butterflypack+mpi') depends_on('butterflypack@1.2.0:', when='@4.0.0: +butterflypack+mpi')", "solvers.\"\"\" homepage = \"http://portal.nersc.gov/project/sparse/strumpack\" url = \"https://github.com/pghysels/STRUMPACK/archive/v4.0.0.tar.gz\" git = \"https://github.com/pghysels/STRUMPACK.git\"", "and BiCGStab iterative solvers.\"\"\" homepage = \"http://portal.nersc.gov/project/sparse/strumpack\" url = \"https://github.com/pghysels/STRUMPACK/archive/v4.0.0.tar.gz\"", "large sparse linear systems which result from the discretization of", "STRUMPACK also provides preconditioned GMRES and BiCGStab iterative solvers.\"\"\" homepage", "file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from", "# Spack Project Developers. See the top-level COPYRIGHT file for", "(Apache-2.0 OR MIT) from spack import * class Strumpack(CMakePackage, CudaPackage):", "exhibit some kind of low-rank property. It provides a distributed", "\"https://github.com/pghysels/STRUMPACK/archive/v4.0.0.tar.gz\" git = \"https://github.com/pghysels/STRUMPACK.git\" maintainers = ['pghysels'] version('master', branch='master') version('5.0.0',", "% on_off('+scotch'), '-DTPL_ENABLE_BPACK=%s' % on_off('+butterflypack'), '-DSTRUMPACK_COUNT_FLOPS=%s' % on_off('+count_flops'), '-DSTRUMPACK_TASK_TIMERS=%s' %", "developer test routines') variant('build_tests', default=False, description='Build test routines') # TODO:", "= ['pghysels'] version('master', branch='master') version('5.0.0', sha256='bdfd1620ff7158d96055059be04ee49466ebaca8213a2fdab33e2d4571019a49') version('4.0.0', sha256='a3629f1f139865c74916f8f69318f53af6319e7f8ec54e85c16466fd7d256938') version('3.3.0', sha256='499fd3b58656b4b6495496920e5372895861ebf15328be8a7a9354e06c734bc7')", "CudaPackage): \"\"\"STRUMPACK -- STRUctured Matrix PACKage - provides linear solvers", "depends_on('scotch~metis+mpi', when='+scotch+mpi') depends_on('butterflypack@1.1.0', when='@3.3.0:3.9.999 +butterflypack+mpi') depends_on('butterflypack@1.2.0:', when='@4.0.0: +butterflypack+mpi') depends_on('cuda', when='@4.0.0:", "when='+mpi') depends_on('blas') depends_on('lapack') depends_on('scalapack', when='+mpi') depends_on('metis') depends_on('parmetis', when='+parmetis') depends_on('scotch~metis', when='+scotch')", "homepage = \"http://portal.nersc.gov/project/sparse/strumpack\" url = \"https://github.com/pghysels/STRUMPACK/archive/v4.0.0.tar.gz\" git = \"https://github.com/pghysels/STRUMPACK.git\" maintainers", "'-DTPL_SCALAPACK_LIBRARIES=%s' % spec['scalapack']. libs.joined(\";\"), ] if spec.satisfies('@:3.9.999'): if '+mpi' in", "differential equation, but is not limited to any particular type", "'ON' if varstr in spec else 'OFF' args = [", "% spec['lapack'].libs.joined(\";\"), '-DTPL_SCALAPACK_LIBRARIES=%s' % spec['scalapack']. libs.joined(\";\"), ] if spec.satisfies('@:3.9.999'): if", "'-DCMAKE_Fortran_COMPILER=%s' % spec['mpi'].mpifc ]) args.extend([ '-DSTRUMPACK_C_INTERFACE=%s' % on_off('+c_interface'), ]) if", "equation, but is not limited to any particular type of", "BiCGStab iterative solvers.\"\"\" homepage = \"http://portal.nersc.gov/project/sparse/strumpack\" url = \"https://github.com/pghysels/STRUMPACK/archive/v4.0.0.tar.gz\" git", "of a partial differential equation, but is not limited to", "routines') variant('build_tests', default=False, description='Build test routines') # TODO: add a", "type='build') depends_on('mpi', when='+mpi') depends_on('blas') depends_on('lapack') depends_on('scalapack', when='+mpi') depends_on('metis') depends_on('parmetis', when='+parmetis')", "to any particular type of problem. STRUMPACK also provides preconditioned", "on_off('+mpi'), '-DSTRUMPACK_USE_OPENMP=%s' % on_off('+openmp'), '-DTPL_ENABLE_PARMETIS=%s' % on_off('+parmetis'), '-DTPL_ENABLE_SCOTCH=%s' % on_off('+scotch'),", "when='+scotch+mpi') depends_on('butterflypack@1.1.0', when='@3.3.0:3.9.999 +butterflypack+mpi') depends_on('butterflypack@1.2.0:', when='@4.0.0: +butterflypack+mpi') depends_on('cuda', when='@4.0.0: +cuda')", "cmake_args(self): spec = self.spec def on_off(varstr): return 'ON' if varstr", "i.e., matrices that exhibit some kind of low-rank property. It", "limited to any particular type of problem. STRUMPACK also provides", "CUDA support') variant('parmetis', default=True, description='Enable use of ParMetis') variant('scotch', default=False,", "* class Strumpack(CMakePackage, CudaPackage): \"\"\"STRUMPACK -- STRUctured Matrix PACKage -", "with timers for internal routines') variant('build_dev_tests', default=False, description='Build developer test", "'-DSTRUMPACK_TASK_TIMERS=%s' % on_off('+task_timers'), '-DSTRUMPACK_DEV_TESTING=%s' % on_off('+build_dev_tests'), '-DSTRUMPACK_BUILD_TESTS=%s' % on_off('+build_tests'), '-DTPL_BLAS_LIBRARIES=%s'", "conflicts('+butterflypack', when='@:3.2.0') conflicts('+cuda', when='@:3.9.999') conflicts('+zfp', when='@:3.9.999') patch('intel-19-compile.patch', when='@3.1.1') def cmake_args(self):", "sha256='499fd3b58656b4b6495496920e5372895861ebf15328be8a7a9354e06c734bc7') version('3.2.0', sha256='34d93e1b2a3b8908ef89804b7e08c5a884cbbc0b2c9f139061627c0d2de282c1') version('3.1.1', sha256='c1c3446ee023f7b24baa97b24907735e89ce4ae9f5ef516645dfe390165d1778') variant('shared', default=False, description='Build shared libraries')", "depends_on('cmake@3.11:', type='build') depends_on('mpi', when='+mpi') depends_on('blas') depends_on('lapack') depends_on('scalapack', when='+mpi') depends_on('metis') depends_on('parmetis',", "for internal routines') variant('build_dev_tests', default=False, description='Build developer test routines') variant('build_tests',", "default=True, description='Enable use of ButterflyPACK') variant('zfp', default=True, description='Build with support", "version('3.3.0', sha256='499fd3b58656b4b6495496920e5372895861ebf15328be8a7a9354e06c734bc7') version('3.2.0', sha256='34d93e1b2a3b8908ef89804b7e08c5a884cbbc0b2c9f139061627c0d2de282c1') version('3.1.1', sha256='c1c3446ee023f7b24baa97b24907735e89ce4ae9f5ef516645dfe390165d1778') variant('shared', default=False, description='Build shared", "spack import * class Strumpack(CMakePackage, CudaPackage): \"\"\"STRUMPACK -- STRUctured Matrix", "% on_off('+cuda') ]) args.extend([ '-DBUILD_SHARED_LIBS=%s' % on_off('+shared') ]) return args", "See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier:", "and other # Spack Project Developers. See the top-level COPYRIGHT", "conflicts('+zfp', when='@:3.9.999') patch('intel-19-compile.patch', when='@3.1.1') def cmake_args(self): spec = self.spec def", "'-DSTRUMPACK_USE_OPENMP=%s' % on_off('+openmp'), '-DTPL_ENABLE_PARMETIS=%s' % on_off('+parmetis'), '-DTPL_ENABLE_SCOTCH=%s' % on_off('+scotch'), '-DTPL_ENABLE_BPACK=%s'", "spec['mpi'].mpicxx, '-DCMAKE_Fortran_COMPILER=%s' % spec['mpi'].mpifc ]) args.extend([ '-DSTRUMPACK_C_INTERFACE=%s' % on_off('+c_interface'), ])", "is mostly aimed at large sparse linear systems which result", "other # Spack Project Developers. See the top-level COPYRIGHT file", "depends_on('butterflypack@1.1.0', when='@3.3.0:3.9.999 +butterflypack+mpi') depends_on('butterflypack@1.2.0:', when='@4.0.0: +butterflypack+mpi') depends_on('cuda', when='@4.0.0: +cuda') depends_on('zfp',", "the discretization of a partial differential equation, but is not", "= \"http://portal.nersc.gov/project/sparse/strumpack\" url = \"https://github.com/pghysels/STRUMPACK/archive/v4.0.0.tar.gz\" git = \"https://github.com/pghysels/STRUMPACK.git\" maintainers =", "for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack", "via tasking with OpenMP') variant('cuda', default=True, description='Enable CUDA support') variant('parmetis',", "depends_on('mpi', when='+mpi') depends_on('blas') depends_on('lapack') depends_on('scalapack', when='+mpi') depends_on('metis') depends_on('parmetis', when='+parmetis') depends_on('scotch~metis',", "-- STRUctured Matrix PACKage - provides linear solvers for sparse", "'-DSTRUMPACK_DEV_TESTING=%s' % on_off('+build_dev_tests'), '-DSTRUMPACK_BUILD_TESTS=%s' % on_off('+build_tests'), '-DTPL_BLAS_LIBRARIES=%s' % spec['blas'].libs.joined(\";\"), '-DTPL_LAPACK_LIBRARIES=%s'", "args.extend([ '-DSTRUMPACK_C_INTERFACE=%s' % on_off('+c_interface'), ]) if spec.satisfies('@4.0.0:'): args.extend([ '-DSTRUMPACK_USE_CUDA=%s' %", "routines') # TODO: add a slate variant depends_on('cmake@3.11:', type='build') depends_on('mpi',", "if spec.satisfies('@:3.9.999'): if '+mpi' in spec: args.extend([ '-DCMAKE_C_COMPILER=%s' % spec['mpi'].mpicc,", "solvers for sparse matrices and for dense rank-structured matrices, i.e.,", "default=True, description='Use MPI') variant('openmp', default=True, description='Enable thread parallellism via tasking", "'-DSTRUMPACK_BUILD_TESTS=%s' % on_off('+build_tests'), '-DTPL_BLAS_LIBRARIES=%s' % spec['blas'].libs.joined(\";\"), '-DTPL_LAPACK_LIBRARIES=%s' % spec['lapack'].libs.joined(\";\"), '-DTPL_SCALAPACK_LIBRARIES=%s'", "'-DSTRUMPACK_USE_MPI=%s' % on_off('+mpi'), '-DSTRUMPACK_USE_OPENMP=%s' % on_off('+openmp'), '-DTPL_ENABLE_PARMETIS=%s' % on_off('+parmetis'), '-DTPL_ENABLE_SCOTCH=%s'", "shared libraries') variant('mpi', default=True, description='Use MPI') variant('openmp', default=True, description='Enable thread", "a distributed memory fully algebraic sparse solver and preconditioner. The", "matrices, i.e., matrices that exhibit some kind of low-rank property.", "return 'ON' if varstr in spec else 'OFF' args =", "description='Use MPI') variant('openmp', default=True, description='Enable thread parallellism via tasking with", "National Security, LLC and other # Spack Project Developers. See", "using ZFP') variant('c_interface', default=True, description='Enable C interface') variant('count_flops', default=False, description='Build", "\"https://github.com/pghysels/STRUMPACK.git\" maintainers = ['pghysels'] version('master', branch='master') version('5.0.0', sha256='bdfd1620ff7158d96055059be04ee49466ebaca8213a2fdab33e2d4571019a49') version('4.0.0', sha256='a3629f1f139865c74916f8f69318f53af6319e7f8ec54e85c16466fd7d256938')", "support') variant('parmetis', default=True, description='Enable use of ParMetis') variant('scotch', default=False, description='Enable", "spec['lapack'].libs.joined(\";\"), '-DTPL_SCALAPACK_LIBRARIES=%s' % spec['scalapack']. libs.joined(\";\"), ] if spec.satisfies('@:3.9.999'): if '+mpi'", "distributed memory fully algebraic sparse solver and preconditioner. The preconditioner", "also provides preconditioned GMRES and BiCGStab iterative solvers.\"\"\" homepage =", "when='~mpi') conflicts('+butterflypack', when='~mpi') conflicts('+butterflypack', when='@:3.2.0') conflicts('+cuda', when='@:3.9.999') conflicts('+zfp', when='@:3.9.999') patch('intel-19-compile.patch',", "Matrix PACKage - provides linear solvers for sparse matrices and", "spec['mpi'].mpicc, '-DCMAKE_CXX_COMPILER=%s' % spec['mpi'].mpicxx, '-DCMAKE_Fortran_COMPILER=%s' % spec['mpi'].mpifc ]) args.extend([ '-DSTRUMPACK_C_INTERFACE=%s'", "for compression using ZFP') variant('c_interface', default=True, description='Enable C interface') variant('count_flops',", "counters') variant('task_timers', default=False, description='Build with timers for internal routines') variant('build_dev_tests',", "algebraic sparse solver and preconditioner. The preconditioner is mostly aimed", "= \"https://github.com/pghysels/STRUMPACK/archive/v4.0.0.tar.gz\" git = \"https://github.com/pghysels/STRUMPACK.git\" maintainers = ['pghysels'] version('master', branch='master')", "on_off('+scotch'), '-DTPL_ENABLE_BPACK=%s' % on_off('+butterflypack'), '-DSTRUMPACK_COUNT_FLOPS=%s' % on_off('+count_flops'), '-DSTRUMPACK_TASK_TIMERS=%s' % on_off('+task_timers'),", "default=False, description='Build with timers for internal routines') variant('build_dev_tests', default=False, description='Build", "'-DTPL_ENABLE_BPACK=%s' % on_off('+butterflypack'), '-DSTRUMPACK_COUNT_FLOPS=%s' % on_off('+count_flops'), '-DSTRUMPACK_TASK_TIMERS=%s' % on_off('+task_timers'), '-DSTRUMPACK_DEV_TESTING=%s'", "for sparse matrices and for dense rank-structured matrices, i.e., matrices", "default=True, description='Enable thread parallellism via tasking with OpenMP') variant('cuda', default=True,", "['pghysels'] version('master', branch='master') version('5.0.0', sha256='bdfd1620ff7158d96055059be04ee49466ebaca8213a2fdab33e2d4571019a49') version('4.0.0', sha256='a3629f1f139865c74916f8f69318f53af6319e7f8ec54e85c16466fd7d256938') version('3.3.0', sha256='499fd3b58656b4b6495496920e5372895861ebf15328be8a7a9354e06c734bc7') version('3.2.0',", "when='~mpi') conflicts('+butterflypack', when='@:3.2.0') conflicts('+cuda', when='@:3.9.999') conflicts('+zfp', when='@:3.9.999') patch('intel-19-compile.patch', when='@3.1.1') def", "when='@:3.2.0') conflicts('+cuda', when='@:3.9.999') conflicts('+zfp', when='@:3.9.999') patch('intel-19-compile.patch', when='@3.1.1') def cmake_args(self): spec", "description='Enable C interface') variant('count_flops', default=False, description='Build with flop counters') variant('task_timers',", "LLC and other # Spack Project Developers. See the top-level", "use of Scotch') variant('butterflypack', default=True, description='Enable use of ButterflyPACK') variant('zfp',", "details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import", "partial differential equation, but is not limited to any particular", "self.spec def on_off(varstr): return 'ON' if varstr in spec else", "and preconditioner. The preconditioner is mostly aimed at large sparse", "'-DTPL_BLAS_LIBRARIES=%s' % spec['blas'].libs.joined(\";\"), '-DTPL_LAPACK_LIBRARIES=%s' % spec['lapack'].libs.joined(\";\"), '-DTPL_SCALAPACK_LIBRARIES=%s' % spec['scalapack']. libs.joined(\";\"),", "at large sparse linear systems which result from the discretization", "solver and preconditioner. The preconditioner is mostly aimed at large", "Copyright 2013-2020 Lawrence Livermore National Security, LLC and other #", "a partial differential equation, but is not limited to any", "spec else 'OFF' args = [ '-DSTRUMPACK_USE_MPI=%s' % on_off('+mpi'), '-DSTRUMPACK_USE_OPENMP=%s'", "internal routines') variant('build_dev_tests', default=False, description='Build developer test routines') variant('build_tests', default=False,", "spec.satisfies('@:3.9.999'): if '+mpi' in spec: args.extend([ '-DCMAKE_C_COMPILER=%s' % spec['mpi'].mpicc, '-DCMAKE_CXX_COMPILER=%s'", "spec['blas'].libs.joined(\";\"), '-DTPL_LAPACK_LIBRARIES=%s' % spec['lapack'].libs.joined(\";\"), '-DTPL_SCALAPACK_LIBRARIES=%s' % spec['scalapack']. libs.joined(\";\"), ] if", "depends_on('butterflypack@1.2.0:', when='@4.0.0: +butterflypack+mpi') depends_on('cuda', when='@4.0.0: +cuda') depends_on('zfp', when='+zfp') conflicts('+parmetis', when='~mpi')", "variant('cuda', default=True, description='Enable CUDA support') variant('parmetis', default=True, description='Enable use of", "depends_on('metis') depends_on('parmetis', when='+parmetis') depends_on('scotch~metis', when='+scotch') depends_on('scotch~metis+mpi', when='+scotch+mpi') depends_on('butterflypack@1.1.0', when='@3.3.0:3.9.999 +butterflypack+mpi')", "class Strumpack(CMakePackage, CudaPackage): \"\"\"STRUMPACK -- STRUctured Matrix PACKage - provides", "description='Enable use of Scotch') variant('butterflypack', default=True, description='Enable use of ButterflyPACK')", "OpenMP') variant('cuda', default=True, description='Enable CUDA support') variant('parmetis', default=True, description='Enable use", "Project Developers. See the top-level COPYRIGHT file for details. #", "description='Build with support for compression using ZFP') variant('c_interface', default=True, description='Enable", "default=False, description='Build shared libraries') variant('mpi', default=True, description='Use MPI') variant('openmp', default=True,", "when='+parmetis') depends_on('scotch~metis', when='+scotch') depends_on('scotch~metis+mpi', when='+scotch+mpi') depends_on('butterflypack@1.1.0', when='@3.3.0:3.9.999 +butterflypack+mpi') depends_on('butterflypack@1.2.0:', when='@4.0.0:", "COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT)", "interface') variant('count_flops', default=False, description='Build with flop counters') variant('task_timers', default=False, description='Build", "matrices and for dense rank-structured matrices, i.e., matrices that exhibit", "top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR", "description='Enable use of ButterflyPACK') variant('zfp', default=True, description='Build with support for", "C interface') variant('count_flops', default=False, description='Build with flop counters') variant('task_timers', default=False,", "default=True, description='Build with support for compression using ZFP') variant('c_interface', default=True,", "% on_off('+mpi'), '-DSTRUMPACK_USE_OPENMP=%s' % on_off('+openmp'), '-DTPL_ENABLE_PARMETIS=%s' % on_off('+parmetis'), '-DTPL_ENABLE_SCOTCH=%s' %", "a slate variant depends_on('cmake@3.11:', type='build') depends_on('mpi', when='+mpi') depends_on('blas') depends_on('lapack') depends_on('scalapack',", "variant('c_interface', default=True, description='Enable C interface') variant('count_flops', default=False, description='Build with flop", "low-rank property. It provides a distributed memory fully algebraic sparse", "import * class Strumpack(CMakePackage, CudaPackage): \"\"\"STRUMPACK -- STRUctured Matrix PACKage", "tasking with OpenMP') variant('cuda', default=True, description='Enable CUDA support') variant('parmetis', default=True,", "of problem. STRUMPACK also provides preconditioned GMRES and BiCGStab iterative", "with flop counters') variant('task_timers', default=False, description='Build with timers for internal", "when='@4.0.0: +butterflypack+mpi') depends_on('cuda', when='@4.0.0: +cuda') depends_on('zfp', when='+zfp') conflicts('+parmetis', when='~mpi') conflicts('+butterflypack',", "conflicts('+butterflypack', when='~mpi') conflicts('+butterflypack', when='@:3.2.0') conflicts('+cuda', when='@:3.9.999') conflicts('+zfp', when='@:3.9.999') patch('intel-19-compile.patch', when='@3.1.1')", "2013-2020 Lawrence Livermore National Security, LLC and other # Spack", "% on_off('+parmetis'), '-DTPL_ENABLE_SCOTCH=%s' % on_off('+scotch'), '-DTPL_ENABLE_BPACK=%s' % on_off('+butterflypack'), '-DSTRUMPACK_COUNT_FLOPS=%s' %", "SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class Strumpack(CMakePackage,", "sha256='c1c3446ee023f7b24baa97b24907735e89ce4ae9f5ef516645dfe390165d1778') variant('shared', default=False, description='Build shared libraries') variant('mpi', default=True, description='Use MPI')", "default=True, description='Enable use of ParMetis') variant('scotch', default=False, description='Enable use of", "def on_off(varstr): return 'ON' if varstr in spec else 'OFF'", "add a slate variant depends_on('cmake@3.11:', type='build') depends_on('mpi', when='+mpi') depends_on('blas') depends_on('lapack')", "mostly aimed at large sparse linear systems which result from", "default=False, description='Build developer test routines') variant('build_tests', default=False, description='Build test routines')", "is not limited to any particular type of problem. STRUMPACK", "variant('openmp', default=True, description='Enable thread parallellism via tasking with OpenMP') variant('cuda',", "else 'OFF' args = [ '-DSTRUMPACK_USE_MPI=%s' % on_off('+mpi'), '-DSTRUMPACK_USE_OPENMP=%s' %", "version('5.0.0', sha256='bdfd1620ff7158d96055059be04ee49466ebaca8213a2fdab33e2d4571019a49') version('4.0.0', sha256='a3629f1f139865c74916f8f69318f53af6319e7f8ec54e85c16466fd7d256938') version('3.3.0', sha256='499fd3b58656b4b6495496920e5372895861ebf15328be8a7a9354e06c734bc7') version('3.2.0', sha256='34d93e1b2a3b8908ef89804b7e08c5a884cbbc0b2c9f139061627c0d2de282c1') version('3.1.1', sha256='c1c3446ee023f7b24baa97b24907735e89ce4ae9f5ef516645dfe390165d1778')", "% spec['blas'].libs.joined(\";\"), '-DTPL_LAPACK_LIBRARIES=%s' % spec['lapack'].libs.joined(\";\"), '-DTPL_SCALAPACK_LIBRARIES=%s' % spec['scalapack']. libs.joined(\";\"), ]", "# # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import *", "git = \"https://github.com/pghysels/STRUMPACK.git\" maintainers = ['pghysels'] version('master', branch='master') version('5.0.0', sha256='bdfd1620ff7158d96055059be04ee49466ebaca8213a2fdab33e2d4571019a49')", "the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0", "args = [ '-DSTRUMPACK_USE_MPI=%s' % on_off('+mpi'), '-DSTRUMPACK_USE_OPENMP=%s' % on_off('+openmp'), '-DTPL_ENABLE_PARMETIS=%s'", "slate variant depends_on('cmake@3.11:', type='build') depends_on('mpi', when='+mpi') depends_on('blas') depends_on('lapack') depends_on('scalapack', when='+mpi')", "description='Build developer test routines') variant('build_tests', default=False, description='Build test routines') #", "on_off('+build_tests'), '-DTPL_BLAS_LIBRARIES=%s' % spec['blas'].libs.joined(\";\"), '-DTPL_LAPACK_LIBRARIES=%s' % spec['lapack'].libs.joined(\";\"), '-DTPL_SCALAPACK_LIBRARIES=%s' % spec['scalapack'].", "discretization of a partial differential equation, but is not limited", "provides a distributed memory fully algebraic sparse solver and preconditioner.", "if varstr in spec else 'OFF' args = [ '-DSTRUMPACK_USE_MPI=%s'", "- provides linear solvers for sparse matrices and for dense", "which result from the discretization of a partial differential equation,", "use of ButterflyPACK') variant('zfp', default=True, description='Build with support for compression", "'-DTPL_LAPACK_LIBRARIES=%s' % spec['lapack'].libs.joined(\";\"), '-DTPL_SCALAPACK_LIBRARIES=%s' % spec['scalapack']. libs.joined(\";\"), ] if spec.satisfies('@:3.9.999'):", "sha256='bdfd1620ff7158d96055059be04ee49466ebaca8213a2fdab33e2d4571019a49') version('4.0.0', sha256='a3629f1f139865c74916f8f69318f53af6319e7f8ec54e85c16466fd7d256938') version('3.3.0', sha256='499fd3b58656b4b6495496920e5372895861ebf15328be8a7a9354e06c734bc7') version('3.2.0', sha256='34d93e1b2a3b8908ef89804b7e08c5a884cbbc0b2c9f139061627c0d2de282c1') version('3.1.1', sha256='c1c3446ee023f7b24baa97b24907735e89ce4ae9f5ef516645dfe390165d1778') variant('shared',", "% on_off('+build_dev_tests'), '-DSTRUMPACK_BUILD_TESTS=%s' % on_off('+build_tests'), '-DTPL_BLAS_LIBRARIES=%s' % spec['blas'].libs.joined(\";\"), '-DTPL_LAPACK_LIBRARIES=%s' %", "iterative solvers.\"\"\" homepage = \"http://portal.nersc.gov/project/sparse/strumpack\" url = \"https://github.com/pghysels/STRUMPACK/archive/v4.0.0.tar.gz\" git =", "memory fully algebraic sparse solver and preconditioner. The preconditioner is", "spec['scalapack']. libs.joined(\";\"), ] if spec.satisfies('@:3.9.999'): if '+mpi' in spec: args.extend([", "variant('scotch', default=False, description='Enable use of Scotch') variant('butterflypack', default=True, description='Enable use", "version('3.2.0', sha256='34d93e1b2a3b8908ef89804b7e08c5a884cbbc0b2c9f139061627c0d2de282c1') version('3.1.1', sha256='c1c3446ee023f7b24baa97b24907735e89ce4ae9f5ef516645dfe390165d1778') variant('shared', default=False, description='Build shared libraries') variant('mpi',", "spec = self.spec def on_off(varstr): return 'ON' if varstr in", "on_off(varstr): return 'ON' if varstr in spec else 'OFF' args", "'OFF' args = [ '-DSTRUMPACK_USE_MPI=%s' % on_off('+mpi'), '-DSTRUMPACK_USE_OPENMP=%s' % on_off('+openmp'),", "% spec['scalapack']. libs.joined(\";\"), ] if spec.satisfies('@:3.9.999'): if '+mpi' in spec:", "with support for compression using ZFP') variant('c_interface', default=True, description='Enable C", "description='Build shared libraries') variant('mpi', default=True, description='Use MPI') variant('openmp', default=True, description='Enable", "variant('mpi', default=True, description='Use MPI') variant('openmp', default=True, description='Enable thread parallellism via", "of low-rank property. It provides a distributed memory fully algebraic", "\"http://portal.nersc.gov/project/sparse/strumpack\" url = \"https://github.com/pghysels/STRUMPACK/archive/v4.0.0.tar.gz\" git = \"https://github.com/pghysels/STRUMPACK.git\" maintainers = ['pghysels']", "of ButterflyPACK') variant('zfp', default=True, description='Build with support for compression using", "in spec: args.extend([ '-DCMAKE_C_COMPILER=%s' % spec['mpi'].mpicc, '-DCMAKE_CXX_COMPILER=%s' % spec['mpi'].mpicxx, '-DCMAKE_Fortran_COMPILER=%s'", "description='Enable use of ParMetis') variant('scotch', default=False, description='Enable use of Scotch')", "version('4.0.0', sha256='a3629f1f139865c74916f8f69318f53af6319e7f8ec54e85c16466fd7d256938') version('3.3.0', sha256='499fd3b58656b4b6495496920e5372895861ebf15328be8a7a9354e06c734bc7') version('3.2.0', sha256='34d93e1b2a3b8908ef89804b7e08c5a884cbbc0b2c9f139061627c0d2de282c1') version('3.1.1', sha256='c1c3446ee023f7b24baa97b24907735e89ce4ae9f5ef516645dfe390165d1778') variant('shared', default=False,", "fully algebraic sparse solver and preconditioner. The preconditioner is mostly", "+butterflypack+mpi') depends_on('cuda', when='@4.0.0: +cuda') depends_on('zfp', when='+zfp') conflicts('+parmetis', when='~mpi') conflicts('+butterflypack', when='~mpi')", "particular type of problem. STRUMPACK also provides preconditioned GMRES and", "some kind of low-rank property. It provides a distributed memory", "# SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack import * class", "variant depends_on('cmake@3.11:', type='build') depends_on('mpi', when='+mpi') depends_on('blas') depends_on('lapack') depends_on('scalapack', when='+mpi') depends_on('metis')", "linear solvers for sparse matrices and for dense rank-structured matrices,", "spec: args.extend([ '-DCMAKE_C_COMPILER=%s' % spec['mpi'].mpicc, '-DCMAKE_CXX_COMPILER=%s' % spec['mpi'].mpicxx, '-DCMAKE_Fortran_COMPILER=%s' %", "compression using ZFP') variant('c_interface', default=True, description='Enable C interface') variant('count_flops', default=False,", "and for dense rank-structured matrices, i.e., matrices that exhibit some", "type of problem. STRUMPACK also provides preconditioned GMRES and BiCGStab", "variant('shared', default=False, description='Build shared libraries') variant('mpi', default=True, description='Use MPI') variant('openmp',", "variant('task_timers', default=False, description='Build with timers for internal routines') variant('build_dev_tests', default=False,", "% on_off('+task_timers'), '-DSTRUMPACK_DEV_TESTING=%s' % on_off('+build_dev_tests'), '-DSTRUMPACK_BUILD_TESTS=%s' % on_off('+build_tests'), '-DTPL_BLAS_LIBRARIES=%s' %", "flop counters') variant('task_timers', default=False, description='Build with timers for internal routines')", "]) if spec.satisfies('@4.0.0:'): args.extend([ '-DSTRUMPACK_USE_CUDA=%s' % on_off('+cuda') ]) args.extend([ '-DBUILD_SHARED_LIBS=%s'", "when='+scotch') depends_on('scotch~metis+mpi', when='+scotch+mpi') depends_on('butterflypack@1.1.0', when='@3.3.0:3.9.999 +butterflypack+mpi') depends_on('butterflypack@1.2.0:', when='@4.0.0: +butterflypack+mpi') depends_on('cuda',", "on_off('+c_interface'), ]) if spec.satisfies('@4.0.0:'): args.extend([ '-DSTRUMPACK_USE_CUDA=%s' % on_off('+cuda') ]) args.extend([", "matrices that exhibit some kind of low-rank property. It provides", "patch('intel-19-compile.patch', when='@3.1.1') def cmake_args(self): spec = self.spec def on_off(varstr): return", "linear systems which result from the discretization of a partial", "'-DCMAKE_CXX_COMPILER=%s' % spec['mpi'].mpicxx, '-DCMAKE_Fortran_COMPILER=%s' % spec['mpi'].mpifc ]) args.extend([ '-DSTRUMPACK_C_INTERFACE=%s' %", "# TODO: add a slate variant depends_on('cmake@3.11:', type='build') depends_on('mpi', when='+mpi')", "% spec['mpi'].mpicxx, '-DCMAKE_Fortran_COMPILER=%s' % spec['mpi'].mpifc ]) args.extend([ '-DSTRUMPACK_C_INTERFACE=%s' % on_off('+c_interface'),", "= \"https://github.com/pghysels/STRUMPACK.git\" maintainers = ['pghysels'] version('master', branch='master') version('5.0.0', sha256='bdfd1620ff7158d96055059be04ee49466ebaca8213a2fdab33e2d4571019a49') version('4.0.0',", "% spec['mpi'].mpifc ]) args.extend([ '-DSTRUMPACK_C_INTERFACE=%s' % on_off('+c_interface'), ]) if spec.satisfies('@4.0.0:'):", "when='@:3.9.999') patch('intel-19-compile.patch', when='@3.1.1') def cmake_args(self): spec = self.spec def on_off(varstr):", "on_off('+task_timers'), '-DSTRUMPACK_DEV_TESTING=%s' % on_off('+build_dev_tests'), '-DSTRUMPACK_BUILD_TESTS=%s' % on_off('+build_tests'), '-DTPL_BLAS_LIBRARIES=%s' % spec['blas'].libs.joined(\";\"),", "sha256='a3629f1f139865c74916f8f69318f53af6319e7f8ec54e85c16466fd7d256938') version('3.3.0', sha256='499fd3b58656b4b6495496920e5372895861ebf15328be8a7a9354e06c734bc7') version('3.2.0', sha256='34d93e1b2a3b8908ef89804b7e08c5a884cbbc0b2c9f139061627c0d2de282c1') version('3.1.1', sha256='c1c3446ee023f7b24baa97b24907735e89ce4ae9f5ef516645dfe390165d1778') variant('shared', default=False, description='Build", "when='@4.0.0: +cuda') depends_on('zfp', when='+zfp') conflicts('+parmetis', when='~mpi') conflicts('+butterflypack', when='~mpi') conflicts('+butterflypack', when='@:3.2.0')", "'-DCMAKE_C_COMPILER=%s' % spec['mpi'].mpicc, '-DCMAKE_CXX_COMPILER=%s' % spec['mpi'].mpicxx, '-DCMAKE_Fortran_COMPILER=%s' % spec['mpi'].mpifc ])", "spec['mpi'].mpifc ]) args.extend([ '-DSTRUMPACK_C_INTERFACE=%s' % on_off('+c_interface'), ]) if spec.satisfies('@4.0.0:'): args.extend([", "on_off('+parmetis'), '-DTPL_ENABLE_SCOTCH=%s' % on_off('+scotch'), '-DTPL_ENABLE_BPACK=%s' % on_off('+butterflypack'), '-DSTRUMPACK_COUNT_FLOPS=%s' % on_off('+count_flops'),", "kind of low-rank property. It provides a distributed memory fully", "TODO: add a slate variant depends_on('cmake@3.11:', type='build') depends_on('mpi', when='+mpi') depends_on('blas')", "depends_on('scalapack', when='+mpi') depends_on('metis') depends_on('parmetis', when='+parmetis') depends_on('scotch~metis', when='+scotch') depends_on('scotch~metis+mpi', when='+scotch+mpi') depends_on('butterflypack@1.1.0',", "args.extend([ '-DCMAKE_C_COMPILER=%s' % spec['mpi'].mpicc, '-DCMAKE_CXX_COMPILER=%s' % spec['mpi'].mpicxx, '-DCMAKE_Fortran_COMPILER=%s' % spec['mpi'].mpifc", "depends_on('zfp', when='+zfp') conflicts('+parmetis', when='~mpi') conflicts('+butterflypack', when='~mpi') conflicts('+butterflypack', when='@:3.2.0') conflicts('+cuda', when='@:3.9.999')", "branch='master') version('5.0.0', sha256='bdfd1620ff7158d96055059be04ee49466ebaca8213a2fdab33e2d4571019a49') version('4.0.0', sha256='a3629f1f139865c74916f8f69318f53af6319e7f8ec54e85c16466fd7d256938') version('3.3.0', sha256='499fd3b58656b4b6495496920e5372895861ebf15328be8a7a9354e06c734bc7') version('3.2.0', sha256='34d93e1b2a3b8908ef89804b7e08c5a884cbbc0b2c9f139061627c0d2de282c1') version('3.1.1',", "% spec['mpi'].mpicc, '-DCMAKE_CXX_COMPILER=%s' % spec['mpi'].mpicxx, '-DCMAKE_Fortran_COMPILER=%s' % spec['mpi'].mpifc ]) args.extend([", "description='Build with timers for internal routines') variant('build_dev_tests', default=False, description='Build developer", "when='@3.3.0:3.9.999 +butterflypack+mpi') depends_on('butterflypack@1.2.0:', when='@4.0.0: +butterflypack+mpi') depends_on('cuda', when='@4.0.0: +cuda') depends_on('zfp', when='+zfp')", "sparse solver and preconditioner. The preconditioner is mostly aimed at", "default=False, description='Enable use of Scotch') variant('butterflypack', default=True, description='Enable use of", "% on_off('+openmp'), '-DTPL_ENABLE_PARMETIS=%s' % on_off('+parmetis'), '-DTPL_ENABLE_SCOTCH=%s' % on_off('+scotch'), '-DTPL_ENABLE_BPACK=%s' %", "OR MIT) from spack import * class Strumpack(CMakePackage, CudaPackage): \"\"\"STRUMPACK", "depends_on('parmetis', when='+parmetis') depends_on('scotch~metis', when='+scotch') depends_on('scotch~metis+mpi', when='+scotch+mpi') depends_on('butterflypack@1.1.0', when='@3.3.0:3.9.999 +butterflypack+mpi') depends_on('butterflypack@1.2.0:',", "Lawrence Livermore National Security, LLC and other # Spack Project", "if spec.satisfies('@4.0.0:'): args.extend([ '-DSTRUMPACK_USE_CUDA=%s' % on_off('+cuda') ]) args.extend([ '-DBUILD_SHARED_LIBS=%s' %", "version('master', branch='master') version('5.0.0', sha256='bdfd1620ff7158d96055059be04ee49466ebaca8213a2fdab33e2d4571019a49') version('4.0.0', sha256='a3629f1f139865c74916f8f69318f53af6319e7f8ec54e85c16466fd7d256938') version('3.3.0', sha256='499fd3b58656b4b6495496920e5372895861ebf15328be8a7a9354e06c734bc7') version('3.2.0', sha256='34d93e1b2a3b8908ef89804b7e08c5a884cbbc0b2c9f139061627c0d2de282c1')", "property. It provides a distributed memory fully algebraic sparse solver", "variant('butterflypack', default=True, description='Enable use of ButterflyPACK') variant('zfp', default=True, description='Build with", "on_off('+count_flops'), '-DSTRUMPACK_TASK_TIMERS=%s' % on_off('+task_timers'), '-DSTRUMPACK_DEV_TESTING=%s' % on_off('+build_dev_tests'), '-DSTRUMPACK_BUILD_TESTS=%s' % on_off('+build_tests'),", "dense rank-structured matrices, i.e., matrices that exhibit some kind of", "depends_on('lapack') depends_on('scalapack', when='+mpi') depends_on('metis') depends_on('parmetis', when='+parmetis') depends_on('scotch~metis', when='+scotch') depends_on('scotch~metis+mpi', when='+scotch+mpi')", "preconditioner. The preconditioner is mostly aimed at large sparse linear", "ButterflyPACK') variant('zfp', default=True, description='Build with support for compression using ZFP')", "variant('build_tests', default=False, description='Build test routines') # TODO: add a slate", "Spack Project Developers. See the top-level COPYRIGHT file for details.", "but is not limited to any particular type of problem.", "= [ '-DSTRUMPACK_USE_MPI=%s' % on_off('+mpi'), '-DSTRUMPACK_USE_OPENMP=%s' % on_off('+openmp'), '-DTPL_ENABLE_PARMETIS=%s' %", "sparse matrices and for dense rank-structured matrices, i.e., matrices that", "The preconditioner is mostly aimed at large sparse linear systems", "not limited to any particular type of problem. STRUMPACK also", "that exhibit some kind of low-rank property. It provides a", "'-DTPL_ENABLE_SCOTCH=%s' % on_off('+scotch'), '-DTPL_ENABLE_BPACK=%s' % on_off('+butterflypack'), '-DSTRUMPACK_COUNT_FLOPS=%s' % on_off('+count_flops'), '-DSTRUMPACK_TASK_TIMERS=%s'", "default=False, description='Build with flop counters') variant('task_timers', default=False, description='Build with timers", "] if spec.satisfies('@:3.9.999'): if '+mpi' in spec: args.extend([ '-DCMAKE_C_COMPILER=%s' %", "for dense rank-structured matrices, i.e., matrices that exhibit some kind", "of ParMetis') variant('scotch', default=False, description='Enable use of Scotch') variant('butterflypack', default=True,", "Strumpack(CMakePackage, CudaPackage): \"\"\"STRUMPACK -- STRUctured Matrix PACKage - provides linear", "version('3.1.1', sha256='c1c3446ee023f7b24baa97b24907735e89ce4ae9f5ef516645dfe390165d1778') variant('shared', default=False, description='Build shared libraries') variant('mpi', default=True, description='Use", "default=True, description='Enable CUDA support') variant('parmetis', default=True, description='Enable use of ParMetis')", "= self.spec def on_off(varstr): return 'ON' if varstr in spec", "routines') variant('build_dev_tests', default=False, description='Build developer test routines') variant('build_tests', default=False, description='Build", "thread parallellism via tasking with OpenMP') variant('cuda', default=True, description='Enable CUDA", "use of ParMetis') variant('scotch', default=False, description='Enable use of Scotch') variant('butterflypack',", "support for compression using ZFP') variant('c_interface', default=True, description='Enable C interface')", "variant('parmetis', default=True, description='Enable use of ParMetis') variant('scotch', default=False, description='Enable use", "test routines') variant('build_tests', default=False, description='Build test routines') # TODO: add", "of Scotch') variant('butterflypack', default=True, description='Enable use of ButterflyPACK') variant('zfp', default=True,", "result from the discretization of a partial differential equation, but", "in spec else 'OFF' args = [ '-DSTRUMPACK_USE_MPI=%s' % on_off('+mpi'),", "Livermore National Security, LLC and other # Spack Project Developers.", "provides preconditioned GMRES and BiCGStab iterative solvers.\"\"\" homepage = \"http://portal.nersc.gov/project/sparse/strumpack\"", "parallellism via tasking with OpenMP') variant('cuda', default=True, description='Enable CUDA support')", "[ '-DSTRUMPACK_USE_MPI=%s' % on_off('+mpi'), '-DSTRUMPACK_USE_OPENMP=%s' % on_off('+openmp'), '-DTPL_ENABLE_PARMETIS=%s' % on_off('+parmetis'),", "any particular type of problem. STRUMPACK also provides preconditioned GMRES", "when='+mpi') depends_on('metis') depends_on('parmetis', when='+parmetis') depends_on('scotch~metis', when='+scotch') depends_on('scotch~metis+mpi', when='+scotch+mpi') depends_on('butterflypack@1.1.0', when='@3.3.0:3.9.999", "on_off('+build_dev_tests'), '-DSTRUMPACK_BUILD_TESTS=%s' % on_off('+build_tests'), '-DTPL_BLAS_LIBRARIES=%s' % spec['blas'].libs.joined(\";\"), '-DTPL_LAPACK_LIBRARIES=%s' % spec['lapack'].libs.joined(\";\"),", "when='@3.1.1') def cmake_args(self): spec = self.spec def on_off(varstr): return 'ON'", "on_off('+butterflypack'), '-DSTRUMPACK_COUNT_FLOPS=%s' % on_off('+count_flops'), '-DSTRUMPACK_TASK_TIMERS=%s' % on_off('+task_timers'), '-DSTRUMPACK_DEV_TESTING=%s' % on_off('+build_dev_tests'),", "problem. STRUMPACK also provides preconditioned GMRES and BiCGStab iterative solvers.\"\"\"", "sparse linear systems which result from the discretization of a", "]) args.extend([ '-DSTRUMPACK_C_INTERFACE=%s' % on_off('+c_interface'), ]) if spec.satisfies('@4.0.0:'): args.extend([ '-DSTRUMPACK_USE_CUDA=%s'", "'+mpi' in spec: args.extend([ '-DCMAKE_C_COMPILER=%s' % spec['mpi'].mpicc, '-DCMAKE_CXX_COMPILER=%s' % spec['mpi'].mpicxx,", "timers for internal routines') variant('build_dev_tests', default=False, description='Build developer test routines')", "test routines') # TODO: add a slate variant depends_on('cmake@3.11:', type='build')", "provides linear solvers for sparse matrices and for dense rank-structured", "'-DTPL_ENABLE_PARMETIS=%s' % on_off('+parmetis'), '-DTPL_ENABLE_SCOTCH=%s' % on_off('+scotch'), '-DTPL_ENABLE_BPACK=%s' % on_off('+butterflypack'), '-DSTRUMPACK_COUNT_FLOPS=%s'", "It provides a distributed memory fully algebraic sparse solver and", "default=True, description='Enable C interface') variant('count_flops', default=False, description='Build with flop counters')", "preconditioned GMRES and BiCGStab iterative solvers.\"\"\" homepage = \"http://portal.nersc.gov/project/sparse/strumpack\" url", "sha256='34d93e1b2a3b8908ef89804b7e08c5a884cbbc0b2c9f139061627c0d2de282c1') version('3.1.1', sha256='c1c3446ee023f7b24baa97b24907735e89ce4ae9f5ef516645dfe390165d1778') variant('shared', default=False, description='Build shared libraries') variant('mpi', default=True,", "varstr in spec else 'OFF' args = [ '-DSTRUMPACK_USE_MPI=%s' %", "conflicts('+cuda', when='@:3.9.999') conflicts('+zfp', when='@:3.9.999') patch('intel-19-compile.patch', when='@3.1.1') def cmake_args(self): spec =", "Developers. See the top-level COPYRIGHT file for details. # #", "'-DSTRUMPACK_COUNT_FLOPS=%s' % on_off('+count_flops'), '-DSTRUMPACK_TASK_TIMERS=%s' % on_off('+task_timers'), '-DSTRUMPACK_DEV_TESTING=%s' % on_off('+build_dev_tests'), '-DSTRUMPACK_BUILD_TESTS=%s'", "PACKage - provides linear solvers for sparse matrices and for", "% on_off('+build_tests'), '-DTPL_BLAS_LIBRARIES=%s' % spec['blas'].libs.joined(\";\"), '-DTPL_LAPACK_LIBRARIES=%s' % spec['lapack'].libs.joined(\";\"), '-DTPL_SCALAPACK_LIBRARIES=%s' %", "description='Build with flop counters') variant('task_timers', default=False, description='Build with timers for", "<gh_stars>1-10 # Copyright 2013-2020 Lawrence Livermore National Security, LLC and", "url = \"https://github.com/pghysels/STRUMPACK/archive/v4.0.0.tar.gz\" git = \"https://github.com/pghysels/STRUMPACK.git\" maintainers = ['pghysels'] version('master',", "with OpenMP') variant('cuda', default=True, description='Enable CUDA support') variant('parmetis', default=True, description='Enable", "libraries') variant('mpi', default=True, description='Use MPI') variant('openmp', default=True, description='Enable thread parallellism", "'-DSTRUMPACK_C_INTERFACE=%s' % on_off('+c_interface'), ]) if spec.satisfies('@4.0.0:'): args.extend([ '-DSTRUMPACK_USE_CUDA=%s' % on_off('+cuda')", "+cuda') depends_on('zfp', when='+zfp') conflicts('+parmetis', when='~mpi') conflicts('+butterflypack', when='~mpi') conflicts('+butterflypack', when='@:3.2.0') conflicts('+cuda',", "maintainers = ['pghysels'] version('master', branch='master') version('5.0.0', sha256='bdfd1620ff7158d96055059be04ee49466ebaca8213a2fdab33e2d4571019a49') version('4.0.0', sha256='a3629f1f139865c74916f8f69318f53af6319e7f8ec54e85c16466fd7d256938') version('3.3.0',", "description='Enable CUDA support') variant('parmetis', default=True, description='Enable use of ParMetis') variant('scotch',", "description='Build test routines') # TODO: add a slate variant depends_on('cmake@3.11:',", "rank-structured matrices, i.e., matrices that exhibit some kind of low-rank", "def cmake_args(self): spec = self.spec def on_off(varstr): return 'ON' if", "GMRES and BiCGStab iterative solvers.\"\"\" homepage = \"http://portal.nersc.gov/project/sparse/strumpack\" url =", "description='Enable thread parallellism via tasking with OpenMP') variant('cuda', default=True, description='Enable", "if '+mpi' in spec: args.extend([ '-DCMAKE_C_COMPILER=%s' % spec['mpi'].mpicc, '-DCMAKE_CXX_COMPILER=%s' %", "% on_off('+butterflypack'), '-DSTRUMPACK_COUNT_FLOPS=%s' % on_off('+count_flops'), '-DSTRUMPACK_TASK_TIMERS=%s' % on_off('+task_timers'), '-DSTRUMPACK_DEV_TESTING=%s' %", "aimed at large sparse linear systems which result from the", "from spack import * class Strumpack(CMakePackage, CudaPackage): \"\"\"STRUMPACK -- STRUctured" ]
[ "from rasa.core.constants import REQUESTED_SLOT from rasa.core.slots import Slot import pandas", "CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any], ) -> List[Dict]: if", "return [\"rating\"] def slot_mappings(self) -> Dict[Text, Union[Dict, List[Dict]]]: \"\"\"A dictionary", "parse_data={ \"intent\": {\"confidence\": 1.0, \"name\": \"greet\"}, \"entities\": [] }) query_utter", "\"intent\": {\"confidence\": 1.0, \"name\": \"query_init\"}, \"entities\": [] }) return [", "return [UserUttered(text=\"/greet\", parse_data={ \"intent\": {\"confidence\": 1.0, \"name\": \"greet\"}, \"entities\": []", "{\"confidence\": 1.0, \"name\": \"greet\"}, \"entities\": [] }), FollowupAction(name=\"utter_greet\")] class FeedbackForm(FormAction):", "feedbk=feedback) }); else: dispatcher.utter_message(\"Feedback form closed\") li = [SlotSet(\"rating\", None),", "}); return {\"rating\": None, \"feedback_text\": None} except Exception as e:", "def required_slots(tracker): if tracker.get_slot(\"rating\"): return [\"rating\", \"feedback_text\"] else: return [\"rating\"]", "import REQUESTED_SLOT from rasa.core.slots import Slot import pandas as pd", "return [\"rating\", \"feedback_text\"] else: return [\"rating\"] def slot_mappings(self) -> Dict[Text,", "dispatcher.utter_message(\"Please enter valid option.\") dispatcher.utter_message(json_message = { \"platform\":\"whatsapp\", \"payload\":\"text\", \"text\":\"Please", "Tracker, domain: Dict[Text, Any], ) -> Dict[Text, Any]: if value", "logger = logging.getLogger(__name__) with open(r'./actionserver/custom_payload.json') as f: frendy_product_menu = json.load(f)", "= tracker.get_slot(\"feedback_text\") feedbackObj = json.load(queriesRef) feedbackObj[\"feedback\"].append({ \"createdOn\": util.timestamp(), \"complaint_area\": rating,", "feedback }) with open(\"./actionserver/customer_queries.json\", \"w\") as queriesRefWrite: json.dump(feedbackObj, queriesRefWrite, indent=4)", "Submitted!Thank You!\".format( rate=rating, feedbk=feedback)) dispatcher.utter_message(json_message = { \"platform\":\"whatsapp\", \"payload\":\"text\", \"text\":\"Your", "def validate_feedback_text( self, value: Text, dispatcher: CollectingDispatcher, tracker: Tracker, domain:", "self.from_text()], \"feedback_text\": [self.from_text(), self.from_entity(entity=\"navigation\")]} def validate_rating( self, value: Text, dispatcher:", "\"payload\":\"text\", \"text\":\"Your Response :\\n Rating :'{rate}' star \\n Feedback: '{feedbk}'", "[self.from_entity(entity=\"any_thing\"),self.from_entity(entity=\"navigation\")]} return {\"rating\": [self.from_entity(\"rating\"), self.from_text()], \"feedback_text\": [self.from_text(), self.from_entity(entity=\"navigation\")]} def validate_rating(", "Rating :'{rate}' star \\n Feedback: '{feedbk}' \\n Submitted!Thank You!\".format( rate=rating,", "- a whole message or a list of them, where", "'{feedbk}' \\n Submitted!Thank You!\".format( rate=rating, feedbk=feedback) }); else: dispatcher.utter_message(\"Feedback form", "greet_utter, FollowupAction(name=\"utter_greet\"), query_utter, FollowupAction(name=\"utter_query_type\") ] def greet_back(dispatcher): dispatcher.utter_message(\"Going back!!!\") dispatcher.utter_message(json_message", "def query_back(dispatcher): dispatcher.utter_message(\"Going back to queries!!!\") greet_utter = UserUttered(text=\"/greet\", parse_data={", "rasa.core.slots import Slot import pandas as pd import json from", "import Slot import pandas as pd import json from actionserver.utils", "Feedback: '{feedbk}' \\n Submitted!Thank You!\".format( rate=rating, feedbk=feedback) }); else: dispatcher.utter_message(\"Feedback", "== \"back1\" or value.lower() == \"back\": return {\"rating\": INVALID_VALUE, \"feedback_text\":", "from actionserver.utils.utilities import INVALID_VALUE product_list = [] quant_list = []", "parse_data={ \"intent\": {\"confidence\": 1.0, \"name\": \"greet\"}, \"entities\": [] }), FollowupAction(name=\"utter_greet\")]", "import * import logging from actionserver.utils.utilities import INVALID_VALUE product_list =", "import CollectingDispatcher from rasa_sdk.forms import FormAction from rasa_sdk.events import UserUtteranceReverted,", "UserUttered(text=\"/get_started\", parse_data={ # \"intent\": {\"confidence\": 1.0, \"name\": \"get_started\"}, # \"entities\":", "from actionserver.utils import utilities as util from actionserver.controllers.faqs.faq import FAQ", "'3', '4', '5'] try: value = value.strip() if value ==", "validate_rating( self, value: Text, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text,", "Dict[Text, Any], ) -> List[Dict]: if tracker.get_slot(\"rating\") != INVALID_VALUE: with", "= [] quant_list = [] # takes quantity from user", "[ greet_utter, FollowupAction(name=\"utter_greet\"), query_utter, FollowupAction(name=\"utter_query_type\") ] def greet_back(dispatcher): dispatcher.utter_message(\"Going back!!!\")", "value} def submit( self, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text,", "required_slots(tracker): if tracker.get_slot(\"rating\"): return [\"rating\", \"feedback_text\"] else: return [\"rating\"] def", "domain: Dict[Text, Any], ) -> Dict[Text, Any]: if value ==", "None, \"feedback_text\": None} except Exception as e: print(e) dispatcher.utter_message(\"Please enter", "queriesRefWrite, indent=4) dispatcher.utter_message(\"Your Response :\\n Rating :'{rate}' star \\n Feedback:", "== \"back\": return {\"rating\": INVALID_VALUE, \"feedback_text\": INVALID_VALUE} # 1-5 it", "back to queries!!!\") greet_utter = UserUttered(text=\"/greet\", parse_data={ \"intent\": {\"confidence\": 1.0,", "or a list of them, where a first match will", "Dict[Text, Union[Dict, List[Dict]]]: \"\"\"A dictionary to map required slots to", "value pairs - a whole message or a list of", "enter valid option\" }); return {\"rating\": None, \"feedback_text\": None} except", "required slots to - an extracted entity - intent: value", "for global back # return [Restarted(), UserUttered(text=\"/get_started\", parse_data={ # \"intent\":", "'4', '5'] try: value = value.strip() if value == \"back1\"", "[] }) query_utter = UserUttered(text=\"/query_init\", parse_data={ \"intent\": {\"confidence\": 1.0, \"name\":", "\"entities\": [] }) query_utter = UserUttered(text=\"/query_init\", parse_data={ \"intent\": {\"confidence\": 1.0,", "List[Dict]: if tracker.get_slot(\"rating\") != INVALID_VALUE: with open(\"./actionserver/customer_queries.json\", \"r\") as queriesRef:", "Text, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any], ) ->", "1.0, \"name\": \"greet\"}, \"entities\": [] }), FollowupAction(name=\"utter_greet\")] class FeedbackForm(FormAction): def", "Text, Dict, List, Union from rasa_sdk import Action, Tracker from", "tracker: Tracker, domain: Dict[Text, Any], ) -> List[Dict]: if tracker.get_slot(\"rating\")", "}) query_utter = UserUttered(text=\"/query_init\", parse_data={ \"intent\": {\"confidence\": 1.0, \"name\": \"query_init\"},", "if tracker.get_slot(\"rating\"): return [\"rating\", \"feedback_text\"] else: return [\"rating\"] def slot_mappings(self)", "['1', '2', '3', '4', '5'] try: value = value.strip() if", "to - an extracted entity - intent: value pairs -", "elif value in ratings: return {\"rating\": value, \"feedback_text\": None} else:", "= [SlotSet(\"rating\", None), SlotSet(\"feedback_text\", None)] li.extend(query_back(dispatcher)) return li return [SlotSet(\"rating\",", "value.lower() == \"back\": return {\"rating\": None, \"feedback_text\": None} else: return", "dispatcher.utter_message(json_message = { \"platform\":\"whatsapp\", \"payload\":\"text\", \"text\":\"Welcome back to Frendy Shopping\"", "if value == \"back2\" or value.lower() == \"back\": return {\"rating\":", "message or a list of them, where a first match", "else: dispatcher.utter_message(\"Please enter valid option.\") dispatcher.utter_message(json_message = { \"platform\":\"whatsapp\", \"payload\":\"text\",", "# takes quantity from user logger = logging.getLogger(__name__) with open(r'./actionserver/custom_payload.json')", "[] # }), FollowupAction(name=\"utter_greet\")] def query_back(dispatcher): dispatcher.utter_message(\"Going back to queries!!!\")", "def submit( self, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any],", "{ \"platform\":\"whatsapp\", \"payload\":\"text\", \"text\":\"Welcome back to Frendy Shopping\" }); return", "Dict, List, Union from rasa_sdk import Action, Tracker from rasa_sdk.executor", "UserUttered, # ActionExecuted, Event) from rasa_sdk.events import AllSlotsReset, SlotSet from", "[self.from_text(), self.from_entity(entity=\"navigation\")]} def validate_rating( self, value: Text, dispatcher: CollectingDispatcher, tracker:", "\"back\": return {\"rating\": None, \"feedback_text\": None} else: return {\"feedback_text\": value}", "dispatcher.utter_message(\"Your Response :\\n Rating :'{rate}' star \\n Feedback: '{feedbk}' \\n", "import UserUtteranceReverted, UserUttered, FollowupAction # from rasa_core.events import (UserUtteranceReverted, UserUttered,", "rasa_sdk import Action, Tracker from rasa_sdk.executor import CollectingDispatcher from rasa_sdk.forms", "\"greet\"}, \"entities\": [] }), FollowupAction(name=\"utter_greet\")] class FeedbackForm(FormAction): def name(self): return", "enter valid option\" }); return {\"rating\": None, \"feedback_text\": None} def", "integer otherwise rating:None elif value in ratings: return {\"rating\": value,", "back to Frendy Shopping\" }); return [UserUttered(text=\"/greet\", parse_data={ \"intent\": {\"confidence\":", "!= INVALID_VALUE: with open(\"./actionserver/customer_queries.json\", \"r\") as queriesRef: rating = tracker.get_slot(\"rating\")", "Any], ) -> Dict[Text, Any]: ratings = ['1', '2', '3',", "tracker: Tracker, domain: Dict[Text, Any], ) -> Dict[Text, Any]: ratings", "{\"rating\": None, \"feedback_text\": None} def validate_feedback_text( self, value: Text, dispatcher:", "dispatcher.utter_message(\"Feedback form closed\") li = [SlotSet(\"rating\", None), SlotSet(\"feedback_text\", None)] li.extend(query_back(dispatcher))", "[\"rating\"] def slot_mappings(self) -> Dict[Text, Union[Dict, List[Dict]]]: \"\"\"A dictionary to", "intent: value pairs - a whole message or a list", "1.0, \"name\": \"query_init\"}, \"entities\": [] }) return [ greet_utter, FollowupAction(name=\"utter_greet\"),", "\"feedback_text\": [self.from_text(), self.from_entity(entity=\"navigation\")]} def validate_rating( self, value: Text, dispatcher: CollectingDispatcher,", "\"back1\" or value.lower() == \"back\": return {\"rating\": INVALID_VALUE, \"feedback_text\": INVALID_VALUE}", "-> Dict[Text, Any]: if value == \"back2\" or value.lower() ==", "= { \"platform\":\"whatsapp\", \"payload\":\"text\", \"text\":\"Please enter valid option\" }); return", "FormAction from rasa_sdk.events import UserUtteranceReverted, UserUttered, FollowupAction # from rasa_core.events", "queries!!!\") greet_utter = UserUttered(text=\"/greet\", parse_data={ \"intent\": {\"confidence\": 1.0, \"name\": \"greet\"},", "None, \"feedback_text\": None} def validate_feedback_text( self, value: Text, dispatcher: CollectingDispatcher,", "import pandas as pd import json from actionserver.utils import utilities", "UserUtteranceReverted, UserUttered, FollowupAction # from rasa_core.events import (UserUtteranceReverted, UserUttered, #", "value == \"back1\" or value.lower() == \"back\": return {\"rating\": INVALID_VALUE,", "queriesRefWrite: json.dump(feedbackObj, queriesRefWrite, indent=4) dispatcher.utter_message(\"Your Response :\\n Rating :'{rate}' star", "None} else: return {\"feedback_text\": value} def submit( self, dispatcher: CollectingDispatcher,", "INVALID_VALUE} # 1-5 it integer otherwise rating:None elif value in", "self, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any], ) ->", "INVALID_VALUE, \"feedback_text\": INVALID_VALUE} # 1-5 it integer otherwise rating:None elif", "greet_utter = UserUttered(text=\"/greet\", parse_data={ \"intent\": {\"confidence\": 1.0, \"name\": \"greet\"}, \"entities\":", "{\"confidence\": 1.0, \"name\": \"get_started\"}, # \"entities\": [] # }), FollowupAction(name=\"utter_greet\")]", "[] quant_list = [] # takes quantity from user logger", "if value == \"back1\" or value.lower() == \"back\": return {\"rating\":", "FeedbackForm(FormAction): def name(self): return \"feedback_form\" @staticmethod def required_slots(tracker): if tracker.get_slot(\"rating\"):", "rasa_core.events import (UserUtteranceReverted, UserUttered, # ActionExecuted, Event) from rasa_sdk.events import", "return {\"rating\": value, \"feedback_text\": None} else: dispatcher.utter_message(\"Please enter valid option.\")", "product_list = [] quant_list = [] # takes quantity from", "tracker: Tracker, domain: Dict[Text, Any], ) -> Dict[Text, Any]: if", "1-5 it integer otherwise rating:None elif value in ratings: return", "FollowupAction(name=\"utter_greet\")] class FeedbackForm(FormAction): def name(self): return \"feedback_form\" @staticmethod def required_slots(tracker):", "pandas as pd import json from actionserver.utils import utilities as", "Any]: ratings = ['1', '2', '3', '4', '5'] try: value", "\"text\":\"Please enter valid option\" }); return {\"rating\": None, \"feedback_text\": None}", "[self.from_entity(\"rating\"), self.from_text()], \"feedback_text\": [self.from_text(), self.from_entity(entity=\"navigation\")]} def validate_rating( self, value: Text,", "of them, where a first match will be picked\"\"\" #", "\"payload\":\"text\", \"text\":\"Please enter valid option\" }); return {\"rating\": None, \"feedback_text\":", "as e: print(e) dispatcher.utter_message(\"Please enter valid option.\") dispatcher.utter_message(json_message = {", ") -> Dict[Text, Any]: ratings = ['1', '2', '3', '4',", "}) return [ greet_utter, FollowupAction(name=\"utter_greet\"), query_utter, FollowupAction(name=\"utter_query_type\") ] def greet_back(dispatcher):", "f: frendy_product_menu = json.load(f) # Code snippet for global back", "ActionExecuted, Event) from rasa_sdk.events import AllSlotsReset, SlotSet from rasa.core.constants import", "Submitted!Thank You!\".format( rate=rating, feedbk=feedback) }); else: dispatcher.utter_message(\"Feedback form closed\") li", "dispatcher.utter_message(\"Going back!!!\") dispatcher.utter_message(json_message = { \"platform\":\"whatsapp\", \"payload\":\"text\", \"text\":\"Welcome back to", "will be picked\"\"\" # return {\"rating\": [self.from_entity(\"rating\"),self.from_entity(\"any_thing\")],\"feedback_text\": [self.from_entity(entity=\"any_thing\"),self.from_entity(entity=\"navigation\")]} return {\"rating\":", "it integer otherwise rating:None elif value in ratings: return {\"rating\":", "pd import json from actionserver.utils import utilities as util from", "to Frendy Shopping\" }); return [UserUttered(text=\"/greet\", parse_data={ \"intent\": {\"confidence\": 1.0,", "return \"feedback_form\" @staticmethod def required_slots(tracker): if tracker.get_slot(\"rating\"): return [\"rating\", \"feedback_text\"]", "query_back(dispatcher): dispatcher.utter_message(\"Going back to queries!!!\") greet_utter = UserUttered(text=\"/greet\", parse_data={ \"intent\":", "rate=rating, feedbk=feedback)) dispatcher.utter_message(json_message = { \"platform\":\"whatsapp\", \"payload\":\"text\", \"text\":\"Your Response :\\n", "\"feedback_text\": INVALID_VALUE} # 1-5 it integer otherwise rating:None elif value", "domain: Dict[Text, Any], ) -> Dict[Text, Any]: ratings = ['1',", "\\n Feedback: '{feedbk}' \\n Submitted!Thank You!\".format( rate=rating, feedbk=feedback)) dispatcher.utter_message(json_message =", "util.timestamp(), \"complaint_area\": rating, \"complaint\": feedback }) with open(\"./actionserver/customer_queries.json\", \"w\") as", "\"feedback_form\" @staticmethod def required_slots(tracker): if tracker.get_slot(\"rating\"): return [\"rating\", \"feedback_text\"] else:", "utilities as util from actionserver.controllers.faqs.faq import FAQ from actionserver.controllers.constants.orderForm import", "back # return [Restarted(), UserUttered(text=\"/get_started\", parse_data={ # \"intent\": {\"confidence\": 1.0,", "actionserver.controllers.faqs.faq import FAQ from actionserver.controllers.constants.orderForm import * import logging from", "[] }) return [ greet_utter, FollowupAction(name=\"utter_greet\"), query_utter, FollowupAction(name=\"utter_query_type\") ] def", ") -> List[Dict]: if tracker.get_slot(\"rating\") != INVALID_VALUE: with open(\"./actionserver/customer_queries.json\", \"r\")", "[UserUttered(text=\"/greet\", parse_data={ \"intent\": {\"confidence\": 1.0, \"name\": \"greet\"}, \"entities\": [] }),", "with open(\"./actionserver/customer_queries.json\", \"r\") as queriesRef: rating = tracker.get_slot(\"rating\") feedback =", "valid option\" }); return {\"rating\": None, \"feedback_text\": None} def validate_feedback_text(", "a list of them, where a first match will be", "# from rasa_core.events import (UserUtteranceReverted, UserUttered, # ActionExecuted, Event) from", "def slot_mappings(self) -> Dict[Text, Union[Dict, List[Dict]]]: \"\"\"A dictionary to map", "# ActionExecuted, Event) from rasa_sdk.events import AllSlotsReset, SlotSet from rasa.core.constants", "query_utter, FollowupAction(name=\"utter_query_type\") ] def greet_back(dispatcher): dispatcher.utter_message(\"Going back!!!\") dispatcher.utter_message(json_message = {", "}), FollowupAction(name=\"utter_greet\")] def query_back(dispatcher): dispatcher.utter_message(\"Going back to queries!!!\") greet_utter =", "- an extracted entity - intent: value pairs - a", "logging from actionserver.utils.utilities import INVALID_VALUE product_list = [] quant_list =", "return {\"rating\": None, \"feedback_text\": None} except Exception as e: print(e)", "feedbackObj = json.load(queriesRef) feedbackObj[\"feedback\"].append({ \"createdOn\": util.timestamp(), \"complaint_area\": rating, \"complaint\": feedback", "return {\"feedback_text\": value} def submit( self, dispatcher: CollectingDispatcher, tracker: Tracker,", "None, \"feedback_text\": None} else: return {\"feedback_text\": value} def submit( self,", "json from actionserver.utils import utilities as util from actionserver.controllers.faqs.faq import", "return {\"rating\": None, \"feedback_text\": None} def validate_feedback_text( self, value: Text,", "extracted entity - intent: value pairs - a whole message", "slot_mappings(self) -> Dict[Text, Union[Dict, List[Dict]]]: \"\"\"A dictionary to map required", "{\"rating\": [self.from_entity(\"rating\"),self.from_entity(\"any_thing\")],\"feedback_text\": [self.from_entity(entity=\"any_thing\"),self.from_entity(entity=\"navigation\")]} return {\"rating\": [self.from_entity(\"rating\"), self.from_text()], \"feedback_text\": [self.from_text(), self.from_entity(entity=\"navigation\")]}", "feedbackObj[\"feedback\"].append({ \"createdOn\": util.timestamp(), \"complaint_area\": rating, \"complaint\": feedback }) with open(\"./actionserver/customer_queries.json\",", "def greet_back(dispatcher): dispatcher.utter_message(\"Going back!!!\") dispatcher.utter_message(json_message = { \"platform\":\"whatsapp\", \"payload\":\"text\", \"text\":\"Welcome", "\"name\": \"query_init\"}, \"entities\": [] }) return [ greet_utter, FollowupAction(name=\"utter_greet\"), query_utter,", "Event) from rasa_sdk.events import AllSlotsReset, SlotSet from rasa.core.constants import REQUESTED_SLOT", "valid option\" }); return {\"rating\": None, \"feedback_text\": None} except Exception", "{\"rating\": INVALID_VALUE, \"feedback_text\": INVALID_VALUE} # 1-5 it integer otherwise rating:None", "star \\n Feedback: '{feedbk}' \\n Submitted!Thank You!\".format( rate=rating, feedbk=feedback)) dispatcher.utter_message(json_message", "closed\") li = [SlotSet(\"rating\", None), SlotSet(\"feedback_text\", None)] li.extend(query_back(dispatcher)) return li", "def name(self): return \"feedback_form\" @staticmethod def required_slots(tracker): if tracker.get_slot(\"rating\"): return", "\"entities\": [] # }), FollowupAction(name=\"utter_greet\")] def query_back(dispatcher): dispatcher.utter_message(\"Going back to", "option\" }); return {\"rating\": None, \"feedback_text\": None} def validate_feedback_text( self,", "dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any], ) -> Dict[Text,", "= UserUttered(text=\"/greet\", parse_data={ \"intent\": {\"confidence\": 1.0, \"name\": \"greet\"}, \"entities\": []", "Tracker, domain: Dict[Text, Any], ) -> Dict[Text, Any]: ratings =", "to queries!!!\") greet_utter = UserUttered(text=\"/greet\", parse_data={ \"intent\": {\"confidence\": 1.0, \"name\":", "as util from actionserver.controllers.faqs.faq import FAQ from actionserver.controllers.constants.orderForm import *", "from actionserver.controllers.faqs.faq import FAQ from actionserver.controllers.constants.orderForm import * import logging", "json.dump(feedbackObj, queriesRefWrite, indent=4) dispatcher.utter_message(\"Your Response :\\n Rating :'{rate}' star \\n", "\"name\": \"greet\"}, \"entities\": [] }), FollowupAction(name=\"utter_greet\")] class FeedbackForm(FormAction): def name(self):", "Any], ) -> List[Dict]: if tracker.get_slot(\"rating\") != INVALID_VALUE: with open(\"./actionserver/customer_queries.json\",", "def validate_rating( self, value: Text, dispatcher: CollectingDispatcher, tracker: Tracker, domain:", "* import logging from actionserver.utils.utilities import INVALID_VALUE product_list = []", "feedback = tracker.get_slot(\"feedback_text\") feedbackObj = json.load(queriesRef) feedbackObj[\"feedback\"].append({ \"createdOn\": util.timestamp(), \"complaint_area\":", "[Restarted(), UserUttered(text=\"/get_started\", parse_data={ # \"intent\": {\"confidence\": 1.0, \"name\": \"get_started\"}, #", "\"back\": return {\"rating\": INVALID_VALUE, \"feedback_text\": INVALID_VALUE} # 1-5 it integer", "\"feedback_text\": None} except Exception as e: print(e) dispatcher.utter_message(\"Please enter valid", "[] }), FollowupAction(name=\"utter_greet\")] class FeedbackForm(FormAction): def name(self): return \"feedback_form\" @staticmethod", "an extracted entity - intent: value pairs - a whole", "= ['1', '2', '3', '4', '5'] try: value = value.strip()", "\"r\") as queriesRef: rating = tracker.get_slot(\"rating\") feedback = tracker.get_slot(\"feedback_text\") feedbackObj", "{\"confidence\": 1.0, \"name\": \"greet\"}, \"entities\": [] }) query_utter = UserUttered(text=\"/query_init\",", ") -> Dict[Text, Any]: if value == \"back2\" or value.lower()", "\"text\":\"Welcome back to Frendy Shopping\" }); return [UserUttered(text=\"/greet\", parse_data={ \"intent\":", "[self.from_entity(\"rating\"),self.from_entity(\"any_thing\")],\"feedback_text\": [self.from_entity(entity=\"any_thing\"),self.from_entity(entity=\"navigation\")]} return {\"rating\": [self.from_entity(\"rating\"), self.from_text()], \"feedback_text\": [self.from_text(), self.from_entity(entity=\"navigation\")]} def", "-> Dict[Text, Any]: ratings = ['1', '2', '3', '4', '5']", "(UserUtteranceReverted, UserUttered, # ActionExecuted, Event) from rasa_sdk.events import AllSlotsReset, SlotSet", "= tracker.get_slot(\"rating\") feedback = tracker.get_slot(\"feedback_text\") feedbackObj = json.load(queriesRef) feedbackObj[\"feedback\"].append({ \"createdOn\":", "open(r'./actionserver/custom_payload.json') as f: frendy_product_menu = json.load(f) # Code snippet for", "parse_data={ \"intent\": {\"confidence\": 1.0, \"name\": \"query_init\"}, \"entities\": [] }) return", "rating, \"complaint\": feedback }) with open(\"./actionserver/customer_queries.json\", \"w\") as queriesRefWrite: json.dump(feedbackObj,", "rasa_sdk.executor import CollectingDispatcher from rasa_sdk.forms import FormAction from rasa_sdk.events import", "{\"confidence\": 1.0, \"name\": \"query_init\"}, \"entities\": [] }) return [ greet_utter,", "\"complaint_area\": rating, \"complaint\": feedback }) with open(\"./actionserver/customer_queries.json\", \"w\") as queriesRefWrite:", "them, where a first match will be picked\"\"\" # return", "global back # return [Restarted(), UserUttered(text=\"/get_started\", parse_data={ # \"intent\": {\"confidence\":", "] def greet_back(dispatcher): dispatcher.utter_message(\"Going back!!!\") dispatcher.utter_message(json_message = { \"platform\":\"whatsapp\", \"payload\":\"text\",", "actionserver.controllers.constants.orderForm import * import logging from actionserver.utils.utilities import INVALID_VALUE product_list", "ratings = ['1', '2', '3', '4', '5'] try: value =", "from rasa_sdk.executor import CollectingDispatcher from rasa_sdk.forms import FormAction from rasa_sdk.events", "value: Text, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any], )", "Dict[Text, Any], ) -> Dict[Text, Any]: if value == \"back2\"", ":\\n Rating :'{rate}' star \\n Feedback: '{feedbk}' \\n Submitted!Thank You!\".format(", "user logger = logging.getLogger(__name__) with open(r'./actionserver/custom_payload.json') as f: frendy_product_menu =", "UserUttered(text=\"/greet\", parse_data={ \"intent\": {\"confidence\": 1.0, \"name\": \"greet\"}, \"entities\": [] })", "\"intent\": {\"confidence\": 1.0, \"name\": \"greet\"}, \"entities\": [] }), FollowupAction(name=\"utter_greet\")] class", "rating:None elif value in ratings: return {\"rating\": value, \"feedback_text\": None}", "return {\"rating\": None, \"feedback_text\": None} else: return {\"feedback_text\": value} def", "open(\"./actionserver/customer_queries.json\", \"w\") as queriesRefWrite: json.dump(feedbackObj, queriesRefWrite, indent=4) dispatcher.utter_message(\"Your Response :\\n", "tracker.get_slot(\"feedback_text\") feedbackObj = json.load(queriesRef) feedbackObj[\"feedback\"].append({ \"createdOn\": util.timestamp(), \"complaint_area\": rating, \"complaint\":", "otherwise rating:None elif value in ratings: return {\"rating\": value, \"feedback_text\":", "import INVALID_VALUE product_list = [] quant_list = [] # takes", "INVALID_VALUE: with open(\"./actionserver/customer_queries.json\", \"r\") as queriesRef: rating = tracker.get_slot(\"rating\") feedback", "None), SlotSet(\"feedback_text\", None)] li.extend(query_back(dispatcher)) return li return [SlotSet(\"rating\", None), SlotSet(\"feedback_text\",", "'2', '3', '4', '5'] try: value = value.strip() if value", "value.strip() if value == \"back1\" or value.lower() == \"back\": return", "SlotSet(\"feedback_text\", None)] li.extend(query_back(dispatcher)) return li return [SlotSet(\"rating\", None), SlotSet(\"feedback_text\", None)]", "\"feedback_text\": None} else: dispatcher.utter_message(\"Please enter valid option.\") dispatcher.utter_message(json_message = {", "actionserver.utils.utilities import INVALID_VALUE product_list = [] quant_list = [] #", "json.load(queriesRef) feedbackObj[\"feedback\"].append({ \"createdOn\": util.timestamp(), \"complaint_area\": rating, \"complaint\": feedback }) with", "return [Restarted(), UserUttered(text=\"/get_started\", parse_data={ # \"intent\": {\"confidence\": 1.0, \"name\": \"get_started\"},", "= { \"platform\":\"whatsapp\", \"payload\":\"text\", \"text\":\"Welcome back to Frendy Shopping\" });", "{\"rating\": None, \"feedback_text\": None} else: return {\"feedback_text\": value} def submit(", "dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any], ) -> List[Dict]:", "= [] # takes quantity from user logger = logging.getLogger(__name__)", "first match will be picked\"\"\" # return {\"rating\": [self.from_entity(\"rating\"),self.from_entity(\"any_thing\")],\"feedback_text\": [self.from_entity(entity=\"any_thing\"),self.from_entity(entity=\"navigation\")]}", "== \"back2\" or value.lower() == \"back\": return {\"rating\": None, \"feedback_text\":", "util from actionserver.controllers.faqs.faq import FAQ from actionserver.controllers.constants.orderForm import * import", "List, Union from rasa_sdk import Action, Tracker from rasa_sdk.executor import", "@staticmethod def required_slots(tracker): if tracker.get_slot(\"rating\"): return [\"rating\", \"feedback_text\"] else: return", "or value.lower() == \"back\": return {\"rating\": INVALID_VALUE, \"feedback_text\": INVALID_VALUE} #", "\"entities\": [] }) return [ greet_utter, FollowupAction(name=\"utter_greet\"), query_utter, FollowupAction(name=\"utter_query_type\") ]", "\"feedback_text\": None} def validate_feedback_text( self, value: Text, dispatcher: CollectingDispatcher, tracker:", "# \"entities\": [] # }), FollowupAction(name=\"utter_greet\")] def query_back(dispatcher): dispatcher.utter_message(\"Going back", "be picked\"\"\" # return {\"rating\": [self.from_entity(\"rating\"),self.from_entity(\"any_thing\")],\"feedback_text\": [self.from_entity(entity=\"any_thing\"),self.from_entity(entity=\"navigation\")]} return {\"rating\": [self.from_entity(\"rating\"),", "UserUttered(text=\"/query_init\", parse_data={ \"intent\": {\"confidence\": 1.0, \"name\": \"query_init\"}, \"entities\": [] })", "REQUESTED_SLOT from rasa.core.slots import Slot import pandas as pd import", "\"platform\":\"whatsapp\", \"payload\":\"text\", \"text\":\"Your Response :\\n Rating :'{rate}' star \\n Feedback:", "tracker.get_slot(\"rating\") != INVALID_VALUE: with open(\"./actionserver/customer_queries.json\", \"r\") as queriesRef: rating =", "# \"intent\": {\"confidence\": 1.0, \"name\": \"get_started\"}, # \"entities\": [] #", "entity - intent: value pairs - a whole message or", "with open(\"./actionserver/customer_queries.json\", \"w\") as queriesRefWrite: json.dump(feedbackObj, queriesRefWrite, indent=4) dispatcher.utter_message(\"Your Response", "quantity from user logger = logging.getLogger(__name__) with open(r'./actionserver/custom_payload.json') as f:", "to map required slots to - an extracted entity -", "Response :\\n Rating :'{rate}' star \\n Feedback: '{feedbk}' \\n Submitted!Thank", "pairs - a whole message or a list of them,", "option.\") dispatcher.utter_message(json_message = { \"platform\":\"whatsapp\", \"payload\":\"text\", \"text\":\"Please enter valid option\"", "Dict[Text, Any], ) -> Dict[Text, Any]: ratings = ['1', '2',", "Action, Tracker from rasa_sdk.executor import CollectingDispatcher from rasa_sdk.forms import FormAction", ":'{rate}' star \\n Feedback: '{feedbk}' \\n Submitted!Thank You!\".format( rate=rating, feedbk=feedback))", "quant_list = [] # takes quantity from user logger =", "}); return [UserUttered(text=\"/greet\", parse_data={ \"intent\": {\"confidence\": 1.0, \"name\": \"greet\"}, \"entities\":", "List[Dict]]]: \"\"\"A dictionary to map required slots to - an", "dispatcher.utter_message(json_message = { \"platform\":\"whatsapp\", \"payload\":\"text\", \"text\":\"Please enter valid option\" });", "Shopping\" }); return [UserUttered(text=\"/greet\", parse_data={ \"intent\": {\"confidence\": 1.0, \"name\": \"greet\"},", "queriesRef: rating = tracker.get_slot(\"rating\") feedback = tracker.get_slot(\"feedback_text\") feedbackObj = json.load(queriesRef)", "= value.strip() if value == \"back1\" or value.lower() == \"back\":", "value.lower() == \"back\": return {\"rating\": INVALID_VALUE, \"feedback_text\": INVALID_VALUE} # 1-5", "FAQ from actionserver.controllers.constants.orderForm import * import logging from actionserver.utils.utilities import", "value, \"feedback_text\": None} else: dispatcher.utter_message(\"Please enter valid option.\") dispatcher.utter_message(json_message =", "actionserver.utils import utilities as util from actionserver.controllers.faqs.faq import FAQ from", "from rasa.core.slots import Slot import pandas as pd import json", "Dict[Text, Any]: if value == \"back2\" or value.lower() == \"back\":", "\\n Submitted!Thank You!\".format( rate=rating, feedbk=feedback)) dispatcher.utter_message(json_message = { \"platform\":\"whatsapp\", \"payload\":\"text\",", "if tracker.get_slot(\"rating\") != INVALID_VALUE: with open(\"./actionserver/customer_queries.json\", \"r\") as queriesRef: rating", "Feedback: '{feedbk}' \\n Submitted!Thank You!\".format( rate=rating, feedbk=feedback)) dispatcher.utter_message(json_message = {", "import AllSlotsReset, SlotSet from rasa.core.constants import REQUESTED_SLOT from rasa.core.slots import", "from rasa_sdk import Action, Tracker from rasa_sdk.executor import CollectingDispatcher from", "match will be picked\"\"\" # return {\"rating\": [self.from_entity(\"rating\"),self.from_entity(\"any_thing\")],\"feedback_text\": [self.from_entity(entity=\"any_thing\"),self.from_entity(entity=\"navigation\")]} return", "\"back2\" or value.lower() == \"back\": return {\"rating\": None, \"feedback_text\": None}", "with open(r'./actionserver/custom_payload.json') as f: frendy_product_menu = json.load(f) # Code snippet", "tracker.get_slot(\"rating\"): return [\"rating\", \"feedback_text\"] else: return [\"rating\"] def slot_mappings(self) ->", "frendy_product_menu = json.load(f) # Code snippet for global back #", "\"entities\": [] }), FollowupAction(name=\"utter_greet\")] class FeedbackForm(FormAction): def name(self): return \"feedback_form\"", "[SlotSet(\"rating\", None), SlotSet(\"feedback_text\", None)] li.extend(query_back(dispatcher)) return li return [SlotSet(\"rating\", None),", "slots to - an extracted entity - intent: value pairs", "from typing import Any, Text, Dict, List, Union from rasa_sdk", "CollectingDispatcher from rasa_sdk.forms import FormAction from rasa_sdk.events import UserUtteranceReverted, UserUttered,", "from rasa_sdk.events import UserUtteranceReverted, UserUttered, FollowupAction # from rasa_core.events import", "dispatcher.utter_message(json_message = { \"platform\":\"whatsapp\", \"payload\":\"text\", \"text\":\"Your Response :\\n Rating :'{rate}'", "FollowupAction # from rasa_core.events import (UserUtteranceReverted, UserUttered, # ActionExecuted, Event)", "Code snippet for global back # return [Restarted(), UserUttered(text=\"/get_started\", parse_data={", "'5'] try: value = value.strip() if value == \"back1\" or", "feedbk=feedback)) dispatcher.utter_message(json_message = { \"platform\":\"whatsapp\", \"payload\":\"text\", \"text\":\"Your Response :\\n Rating", "= json.load(f) # Code snippet for global back # return", "as queriesRef: rating = tracker.get_slot(\"rating\") feedback = tracker.get_slot(\"feedback_text\") feedbackObj =", "snippet for global back # return [Restarted(), UserUttered(text=\"/get_started\", parse_data={ #", "-> List[Dict]: if tracker.get_slot(\"rating\") != INVALID_VALUE: with open(\"./actionserver/customer_queries.json\", \"r\") as", "\"query_init\"}, \"entities\": [] }) return [ greet_utter, FollowupAction(name=\"utter_greet\"), query_utter, FollowupAction(name=\"utter_query_type\")", "\"\"\"A dictionary to map required slots to - an extracted", "You!\".format( rate=rating, feedbk=feedback) }); else: dispatcher.utter_message(\"Feedback form closed\") li =", "Tracker from rasa_sdk.executor import CollectingDispatcher from rasa_sdk.forms import FormAction from", "= { \"platform\":\"whatsapp\", \"payload\":\"text\", \"text\":\"Your Response :\\n Rating :'{rate}' star", "import (UserUtteranceReverted, UserUttered, # ActionExecuted, Event) from rasa_sdk.events import AllSlotsReset,", "\"payload\":\"text\", \"text\":\"Welcome back to Frendy Shopping\" }); return [UserUttered(text=\"/greet\", parse_data={", "indent=4) dispatcher.utter_message(\"Your Response :\\n Rating :'{rate}' star \\n Feedback: '{feedbk}'", "import FormAction from rasa_sdk.events import UserUtteranceReverted, UserUttered, FollowupAction # from", "1.0, \"name\": \"greet\"}, \"entities\": [] }) query_utter = UserUttered(text=\"/query_init\", parse_data={", "import Any, Text, Dict, List, Union from rasa_sdk import Action,", "# 1-5 it integer otherwise rating:None elif value in ratings:", "{\"rating\": [self.from_entity(\"rating\"), self.from_text()], \"feedback_text\": [self.from_text(), self.from_entity(entity=\"navigation\")]} def validate_rating( self, value:", "\"feedback_text\"] else: return [\"rating\"] def slot_mappings(self) -> Dict[Text, Union[Dict, List[Dict]]]:", "return {\"rating\": [self.from_entity(\"rating\"), self.from_text()], \"feedback_text\": [self.from_text(), self.from_entity(entity=\"navigation\")]} def validate_rating( self,", "\"feedback_text\": None} else: return {\"feedback_text\": value} def submit( self, dispatcher:", "rasa_sdk.forms import FormAction from rasa_sdk.events import UserUtteranceReverted, UserUttered, FollowupAction #", "greet_back(dispatcher): dispatcher.utter_message(\"Going back!!!\") dispatcher.utter_message(json_message = { \"platform\":\"whatsapp\", \"payload\":\"text\", \"text\":\"Welcome back", "= json.load(queriesRef) feedbackObj[\"feedback\"].append({ \"createdOn\": util.timestamp(), \"complaint_area\": rating, \"complaint\": feedback })", "# return {\"rating\": [self.from_entity(\"rating\"),self.from_entity(\"any_thing\")],\"feedback_text\": [self.from_entity(entity=\"any_thing\"),self.from_entity(entity=\"navigation\")]} return {\"rating\": [self.from_entity(\"rating\"), self.from_text()], \"feedback_text\":", "# }), FollowupAction(name=\"utter_greet\")] def query_back(dispatcher): dispatcher.utter_message(\"Going back to queries!!!\") greet_utter", "1.0, \"name\": \"get_started\"}, # \"entities\": [] # }), FollowupAction(name=\"utter_greet\")] def", "{ \"platform\":\"whatsapp\", \"payload\":\"text\", \"text\":\"Your Response :\\n Rating :'{rate}' star \\n", "}); else: dispatcher.utter_message(\"Feedback form closed\") li = [SlotSet(\"rating\", None), SlotSet(\"feedback_text\",", "import json from actionserver.utils import utilities as util from actionserver.controllers.faqs.faq", "back!!!\") dispatcher.utter_message(json_message = { \"platform\":\"whatsapp\", \"payload\":\"text\", \"text\":\"Welcome back to Frendy", "try: value = value.strip() if value == \"back1\" or value.lower()", "Any, Text, Dict, List, Union from rasa_sdk import Action, Tracker", "SlotSet from rasa.core.constants import REQUESTED_SLOT from rasa.core.slots import Slot import", "value = value.strip() if value == \"back1\" or value.lower() ==", "star \\n Feedback: '{feedbk}' \\n Submitted!Thank You!\".format( rate=rating, feedbk=feedback) });", "or value.lower() == \"back\": return {\"rating\": None, \"feedback_text\": None} else:", "{\"feedback_text\": value} def submit( self, dispatcher: CollectingDispatcher, tracker: Tracker, domain:", "import logging from actionserver.utils.utilities import INVALID_VALUE product_list = [] quant_list", "picked\"\"\" # return {\"rating\": [self.from_entity(\"rating\"),self.from_entity(\"any_thing\")],\"feedback_text\": [self.from_entity(entity=\"any_thing\"),self.from_entity(entity=\"navigation\")]} return {\"rating\": [self.from_entity(\"rating\"), self.from_text()],", "FollowupAction(name=\"utter_greet\"), query_utter, FollowupAction(name=\"utter_query_type\") ] def greet_back(dispatcher): dispatcher.utter_message(\"Going back!!!\") dispatcher.utter_message(json_message =", "list of them, where a first match will be picked\"\"\"", "json.load(f) # Code snippet for global back # return [Restarted(),", "= logging.getLogger(__name__) with open(r'./actionserver/custom_payload.json') as f: frendy_product_menu = json.load(f) #", "[\"rating\", \"feedback_text\"] else: return [\"rating\"] def slot_mappings(self) -> Dict[Text, Union[Dict,", "else: return [\"rating\"] def slot_mappings(self) -> Dict[Text, Union[Dict, List[Dict]]]: \"\"\"A", "Dict[Text, Any]: ratings = ['1', '2', '3', '4', '5'] try:", "in ratings: return {\"rating\": value, \"feedback_text\": None} else: dispatcher.utter_message(\"Please enter", "as queriesRefWrite: json.dump(feedbackObj, queriesRefWrite, indent=4) dispatcher.utter_message(\"Your Response :\\n Rating :'{rate}'", "import utilities as util from actionserver.controllers.faqs.faq import FAQ from actionserver.controllers.constants.orderForm", "rating = tracker.get_slot(\"rating\") feedback = tracker.get_slot(\"feedback_text\") feedbackObj = json.load(queriesRef) feedbackObj[\"feedback\"].append({", "\"complaint\": feedback }) with open(\"./actionserver/customer_queries.json\", \"w\") as queriesRefWrite: json.dump(feedbackObj, queriesRefWrite,", "value == \"back2\" or value.lower() == \"back\": return {\"rating\": None,", "'{feedbk}' \\n Submitted!Thank You!\".format( rate=rating, feedbk=feedback)) dispatcher.utter_message(json_message = { \"platform\":\"whatsapp\",", "submit( self, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any], )", "UserUttered, FollowupAction # from rasa_core.events import (UserUtteranceReverted, UserUttered, # ActionExecuted,", "enter valid option.\") dispatcher.utter_message(json_message = { \"platform\":\"whatsapp\", \"payload\":\"text\", \"text\":\"Please enter", "}) with open(\"./actionserver/customer_queries.json\", \"w\") as queriesRefWrite: json.dump(feedbackObj, queriesRefWrite, indent=4) dispatcher.utter_message(\"Your", "takes quantity from user logger = logging.getLogger(__name__) with open(r'./actionserver/custom_payload.json') as", "\"name\": \"greet\"}, \"entities\": [] }) query_utter = UserUttered(text=\"/query_init\", parse_data={ \"intent\":", "validate_feedback_text( self, value: Text, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text,", "}); return {\"rating\": None, \"feedback_text\": None} def validate_feedback_text( self, value:", "self.from_entity(entity=\"navigation\")]} def validate_rating( self, value: Text, dispatcher: CollectingDispatcher, tracker: Tracker,", "rasa_sdk.events import AllSlotsReset, SlotSet from rasa.core.constants import REQUESTED_SLOT from rasa.core.slots", "None} def validate_feedback_text( self, value: Text, dispatcher: CollectingDispatcher, tracker: Tracker,", "INVALID_VALUE product_list = [] quant_list = [] # takes quantity", "\"platform\":\"whatsapp\", \"payload\":\"text\", \"text\":\"Welcome back to Frendy Shopping\" }); return [UserUttered(text=\"/greet\",", "a first match will be picked\"\"\" # return {\"rating\": [self.from_entity(\"rating\"),self.from_entity(\"any_thing\")],\"feedback_text\":", "self, value: Text, dispatcher: CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any],", "\"text\":\"Your Response :\\n Rating :'{rate}' star \\n Feedback: '{feedbk}' \\n", "# Code snippet for global back # return [Restarted(), UserUttered(text=\"/get_started\",", "query_utter = UserUttered(text=\"/query_init\", parse_data={ \"intent\": {\"confidence\": 1.0, \"name\": \"query_init\"}, \"entities\":", "option\" }); return {\"rating\": None, \"feedback_text\": None} except Exception as", "where a first match will be picked\"\"\" # return {\"rating\":", "\"name\": \"get_started\"}, # \"entities\": [] # }), FollowupAction(name=\"utter_greet\")] def query_back(dispatcher):", "Union from rasa_sdk import Action, Tracker from rasa_sdk.executor import CollectingDispatcher", "else: return {\"feedback_text\": value} def submit( self, dispatcher: CollectingDispatcher, tracker:", "import Action, Tracker from rasa_sdk.executor import CollectingDispatcher from rasa_sdk.forms import", "-> Dict[Text, Union[Dict, List[Dict]]]: \"\"\"A dictionary to map required slots", "{ \"platform\":\"whatsapp\", \"payload\":\"text\", \"text\":\"Please enter valid option\" }); return {\"rating\":", "rate=rating, feedbk=feedback) }); else: dispatcher.utter_message(\"Feedback form closed\") li = [SlotSet(\"rating\",", "FollowupAction(name=\"utter_query_type\") ] def greet_back(dispatcher): dispatcher.utter_message(\"Going back!!!\") dispatcher.utter_message(json_message = { \"platform\":\"whatsapp\",", "import FAQ from actionserver.controllers.constants.orderForm import * import logging from actionserver.utils.utilities", "Slot import pandas as pd import json from actionserver.utils import", "domain: Dict[Text, Any], ) -> List[Dict]: if tracker.get_slot(\"rating\") != INVALID_VALUE:", "\"w\") as queriesRefWrite: json.dump(feedbackObj, queriesRefWrite, indent=4) dispatcher.utter_message(\"Your Response :\\n Rating", "[] # takes quantity from user logger = logging.getLogger(__name__) with", "whole message or a list of them, where a first", "as f: frendy_product_menu = json.load(f) # Code snippet for global", "CollectingDispatcher, tracker: Tracker, domain: Dict[Text, Any], ) -> Dict[Text, Any]:", "return [ greet_utter, FollowupAction(name=\"utter_greet\"), query_utter, FollowupAction(name=\"utter_query_type\") ] def greet_back(dispatcher): dispatcher.utter_message(\"Going", "valid option.\") dispatcher.utter_message(json_message = { \"platform\":\"whatsapp\", \"payload\":\"text\", \"text\":\"Please enter valid", "rasa_sdk.events import UserUtteranceReverted, UserUttered, FollowupAction # from rasa_core.events import (UserUtteranceReverted,", "open(\"./actionserver/customer_queries.json\", \"r\") as queriesRef: rating = tracker.get_slot(\"rating\") feedback = tracker.get_slot(\"feedback_text\")", "None} else: dispatcher.utter_message(\"Please enter valid option.\") dispatcher.utter_message(json_message = { \"platform\":\"whatsapp\",", "}), FollowupAction(name=\"utter_greet\")] class FeedbackForm(FormAction): def name(self): return \"feedback_form\" @staticmethod def", "None} except Exception as e: print(e) dispatcher.utter_message(\"Please enter valid option.\")", "class FeedbackForm(FormAction): def name(self): return \"feedback_form\" @staticmethod def required_slots(tracker): if", "\"createdOn\": util.timestamp(), \"complaint_area\": rating, \"complaint\": feedback }) with open(\"./actionserver/customer_queries.json\", \"w\")", "\"platform\":\"whatsapp\", \"payload\":\"text\", \"text\":\"Please enter valid option\" }); return {\"rating\": None,", "\"get_started\"}, # \"entities\": [] # }), FollowupAction(name=\"utter_greet\")] def query_back(dispatcher): dispatcher.utter_message(\"Going", "FollowupAction(name=\"utter_greet\")] def query_back(dispatcher): dispatcher.utter_message(\"Going back to queries!!!\") greet_utter = UserUttered(text=\"/greet\",", "value in ratings: return {\"rating\": value, \"feedback_text\": None} else: dispatcher.utter_message(\"Please", "# return [Restarted(), UserUttered(text=\"/get_started\", parse_data={ # \"intent\": {\"confidence\": 1.0, \"name\":", "{\"rating\": None, \"feedback_text\": None} except Exception as e: print(e) dispatcher.utter_message(\"Please", "logging.getLogger(__name__) with open(r'./actionserver/custom_payload.json') as f: frendy_product_menu = json.load(f) # Code", "except Exception as e: print(e) dispatcher.utter_message(\"Please enter valid option.\") dispatcher.utter_message(json_message", "== \"back\": return {\"rating\": None, \"feedback_text\": None} else: return {\"feedback_text\":", "e: print(e) dispatcher.utter_message(\"Please enter valid option.\") dispatcher.utter_message(json_message = { \"platform\":\"whatsapp\",", "tracker.get_slot(\"rating\") feedback = tracker.get_slot(\"feedback_text\") feedbackObj = json.load(queriesRef) feedbackObj[\"feedback\"].append({ \"createdOn\": util.timestamp(),", "AllSlotsReset, SlotSet from rasa.core.constants import REQUESTED_SLOT from rasa.core.slots import Slot", "a whole message or a list of them, where a", "return {\"rating\": INVALID_VALUE, \"feedback_text\": INVALID_VALUE} # 1-5 it integer otherwise", "dictionary to map required slots to - an extracted entity", "Union[Dict, List[Dict]]]: \"\"\"A dictionary to map required slots to -", "from rasa_sdk.forms import FormAction from rasa_sdk.events import UserUtteranceReverted, UserUttered, FollowupAction", "from actionserver.controllers.constants.orderForm import * import logging from actionserver.utils.utilities import INVALID_VALUE", "map required slots to - an extracted entity - intent:", "else: dispatcher.utter_message(\"Feedback form closed\") li = [SlotSet(\"rating\", None), SlotSet(\"feedback_text\", None)]", "\\n Submitted!Thank You!\".format( rate=rating, feedbk=feedback) }); else: dispatcher.utter_message(\"Feedback form closed\")", "\"intent\": {\"confidence\": 1.0, \"name\": \"greet\"}, \"entities\": [] }) query_utter =", "return {\"rating\": [self.from_entity(\"rating\"),self.from_entity(\"any_thing\")],\"feedback_text\": [self.from_entity(entity=\"any_thing\"),self.from_entity(entity=\"navigation\")]} return {\"rating\": [self.from_entity(\"rating\"), self.from_text()], \"feedback_text\": [self.from_text(),", "ratings: return {\"rating\": value, \"feedback_text\": None} else: dispatcher.utter_message(\"Please enter valid", "li = [SlotSet(\"rating\", None), SlotSet(\"feedback_text\", None)] li.extend(query_back(dispatcher)) return li return", "\\n Feedback: '{feedbk}' \\n Submitted!Thank You!\".format( rate=rating, feedbk=feedback) }); else:", "= UserUttered(text=\"/query_init\", parse_data={ \"intent\": {\"confidence\": 1.0, \"name\": \"query_init\"}, \"entities\": []", "Frendy Shopping\" }); return [UserUttered(text=\"/greet\", parse_data={ \"intent\": {\"confidence\": 1.0, \"name\":", "print(e) dispatcher.utter_message(\"Please enter valid option.\") dispatcher.utter_message(json_message = { \"platform\":\"whatsapp\", \"payload\":\"text\",", "Any], ) -> Dict[Text, Any]: if value == \"back2\" or", "typing import Any, Text, Dict, List, Union from rasa_sdk import", "form closed\") li = [SlotSet(\"rating\", None), SlotSet(\"feedback_text\", None)] li.extend(query_back(dispatcher)) return", "Tracker, domain: Dict[Text, Any], ) -> List[Dict]: if tracker.get_slot(\"rating\") !=", "from rasa_core.events import (UserUtteranceReverted, UserUttered, # ActionExecuted, Event) from rasa_sdk.events", "from user logger = logging.getLogger(__name__) with open(r'./actionserver/custom_payload.json') as f: frendy_product_menu", "rasa.core.constants import REQUESTED_SLOT from rasa.core.slots import Slot import pandas as", "parse_data={ # \"intent\": {\"confidence\": 1.0, \"name\": \"get_started\"}, # \"entities\": []", "Any]: if value == \"back2\" or value.lower() == \"back\": return", "Exception as e: print(e) dispatcher.utter_message(\"Please enter valid option.\") dispatcher.utter_message(json_message =", "dispatcher.utter_message(\"Going back to queries!!!\") greet_utter = UserUttered(text=\"/greet\", parse_data={ \"intent\": {\"confidence\":", "You!\".format( rate=rating, feedbk=feedback)) dispatcher.utter_message(json_message = { \"platform\":\"whatsapp\", \"payload\":\"text\", \"text\":\"Your Response", "from rasa_sdk.events import AllSlotsReset, SlotSet from rasa.core.constants import REQUESTED_SLOT from", ":'{rate}' star \\n Feedback: '{feedbk}' \\n Submitted!Thank You!\".format( rate=rating, feedbk=feedback)", "as pd import json from actionserver.utils import utilities as util", "name(self): return \"feedback_form\" @staticmethod def required_slots(tracker): if tracker.get_slot(\"rating\"): return [\"rating\",", "\"greet\"}, \"entities\": [] }) query_utter = UserUttered(text=\"/query_init\", parse_data={ \"intent\": {\"confidence\":", "\"intent\": {\"confidence\": 1.0, \"name\": \"get_started\"}, # \"entities\": [] # }),", "- intent: value pairs - a whole message or a", "{\"rating\": value, \"feedback_text\": None} else: dispatcher.utter_message(\"Please enter valid option.\") dispatcher.utter_message(json_message" ]
[ "\"p3\"]: series = dataframe[sensor] scatter = go.Scatter(x = dataframe.index, y", "series = dataframe[sensor] scatter = go.Scatter(x = dataframe.index, y =", "title = \"Pressure timeseries\", template = self.template ) ) return", "for a timeseries ''' def generate_timeseries_plot(self, dataframe): pressure_plots = []", "= \"plotly_dark\" ''' Generate a plot for a timeseries '''", "''' def generate_timeseries_plot(self, dataframe): pressure_plots = [] for sensor in", "y = series, name = f\"Sensor {sensor}\", opacity = 0.4)", "plotly.graph_objs as go class GraphsHelper: template = \"plotly_dark\" ''' Generate", "sensor in [\"p1\", \"p2\", \"p3\"]: series = dataframe[sensor] scatter =", "go class GraphsHelper: template = \"plotly_dark\" ''' Generate a plot", "[\"p1\", \"p2\", \"p3\"]: series = dataframe[sensor] scatter = go.Scatter(x =", "= f\"Sensor {sensor}\", opacity = 0.4) pressure_plots.append(scatter) pressure_figure = go.Figure(", "for sensor in [\"p1\", \"p2\", \"p3\"]: series = dataframe[sensor] scatter", "[] for sensor in [\"p1\", \"p2\", \"p3\"]: series = dataframe[sensor]", "pressure_plots.append(scatter) pressure_figure = go.Figure( data = pressure_plots, layout = go.Layout(", "Generate a plot for a timeseries ''' def generate_timeseries_plot(self, dataframe):", "as go class GraphsHelper: template = \"plotly_dark\" ''' Generate a", "layout = go.Layout( title = \"Pressure timeseries\", template = self.template", "= \"Pressure timeseries\", template = self.template ) ) return pressure_figure", "''' Generate a plot for a timeseries ''' def generate_timeseries_plot(self,", "opacity = 0.4) pressure_plots.append(scatter) pressure_figure = go.Figure( data = pressure_plots,", "= go.Figure( data = pressure_plots, layout = go.Layout( title =", "timeseries ''' def generate_timeseries_plot(self, dataframe): pressure_plots = [] for sensor", "scatter = go.Scatter(x = dataframe.index, y = series, name =", "a plot for a timeseries ''' def generate_timeseries_plot(self, dataframe): pressure_plots", "= 0.4) pressure_plots.append(scatter) pressure_figure = go.Figure( data = pressure_plots, layout", "a timeseries ''' def generate_timeseries_plot(self, dataframe): pressure_plots = [] for", "pressure_plots = [] for sensor in [\"p1\", \"p2\", \"p3\"]: series", "dataframe.index, y = series, name = f\"Sensor {sensor}\", opacity =", "f\"Sensor {sensor}\", opacity = 0.4) pressure_plots.append(scatter) pressure_figure = go.Figure( data", "go.Figure( data = pressure_plots, layout = go.Layout( title = \"Pressure", "pressure_figure = go.Figure( data = pressure_plots, layout = go.Layout( title", "data = pressure_plots, layout = go.Layout( title = \"Pressure timeseries\",", "= pressure_plots, layout = go.Layout( title = \"Pressure timeseries\", template", "go.Layout( title = \"Pressure timeseries\", template = self.template ) )", "= go.Layout( title = \"Pressure timeseries\", template = self.template )", "pressure_plots, layout = go.Layout( title = \"Pressure timeseries\", template =", "plot for a timeseries ''' def generate_timeseries_plot(self, dataframe): pressure_plots =", "class GraphsHelper: template = \"plotly_dark\" ''' Generate a plot for", "GraphsHelper: template = \"plotly_dark\" ''' Generate a plot for a", "\"p2\", \"p3\"]: series = dataframe[sensor] scatter = go.Scatter(x = dataframe.index,", "name = f\"Sensor {sensor}\", opacity = 0.4) pressure_plots.append(scatter) pressure_figure =", "series, name = f\"Sensor {sensor}\", opacity = 0.4) pressure_plots.append(scatter) pressure_figure", "= [] for sensor in [\"p1\", \"p2\", \"p3\"]: series =", "= dataframe.index, y = series, name = f\"Sensor {sensor}\", opacity", "{sensor}\", opacity = 0.4) pressure_plots.append(scatter) pressure_figure = go.Figure( data =", "in [\"p1\", \"p2\", \"p3\"]: series = dataframe[sensor] scatter = go.Scatter(x", "\"plotly_dark\" ''' Generate a plot for a timeseries ''' def", "generate_timeseries_plot(self, dataframe): pressure_plots = [] for sensor in [\"p1\", \"p2\",", "dataframe[sensor] scatter = go.Scatter(x = dataframe.index, y = series, name", "0.4) pressure_plots.append(scatter) pressure_figure = go.Figure( data = pressure_plots, layout =", "def generate_timeseries_plot(self, dataframe): pressure_plots = [] for sensor in [\"p1\",", "template = \"plotly_dark\" ''' Generate a plot for a timeseries", "dataframe): pressure_plots = [] for sensor in [\"p1\", \"p2\", \"p3\"]:", "import plotly.graph_objs as go class GraphsHelper: template = \"plotly_dark\" '''", "= go.Scatter(x = dataframe.index, y = series, name = f\"Sensor", "go.Scatter(x = dataframe.index, y = series, name = f\"Sensor {sensor}\",", "= series, name = f\"Sensor {sensor}\", opacity = 0.4) pressure_plots.append(scatter)", "= dataframe[sensor] scatter = go.Scatter(x = dataframe.index, y = series," ]
[ "kind: str :param location: Required. Resource Location. :type location: str", "Key Vault secret. Possible values include: 'Initialized', 'WaitingOnCertificateOrder', 'Succeeded', 'CertificateOrderFailed',", "must be populated in order to send to Azure. :ivar", ".resource import Resource class AppServiceCertificateResource(Resource): \"\"\"Key Vault container ARM resource", "server, and will be ignored when sending a request. All", "'str'}, 'name': {'key': 'name', 'type': 'str'}, 'kind': {'key': 'kind', 'type':", "the code is # regenerated. # -------------------------------------------------------------------------- from .resource import", "Vault container ARM resource for a certificate that is purchased", "{'readonly': True}, 'location': {'required': True}, 'type': {'readonly': True}, 'provisioning_state': {'readonly':", "{'readonly': True}, } _attribute_map = { 'id': {'key': 'id', 'type':", "and will be ignored when sending a request. All required", "resource for a certificate that is purchased through Azure. Variables", "the MIT License. See License.txt in the project root for", "id: Resource Id. :vartype id: str :ivar name: Resource Name.", "kind: Kind of resource. :type kind: str :param location: Required.", "dict[str, str] :param key_vault_id: Key Vault resource Id. :type key_vault_id:", "key_vault_id: str :param key_vault_secret_name: Key Vault secret name. :type key_vault_secret_name:", "{'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'kind':", "'Unknown' :vartype provisioning_state: str or ~azure.mgmt.web.models.KeyVaultSecretStatus \"\"\" _validation = {", "'provisioning_state': {'readonly': True}, } _attribute_map = { 'id': {'key': 'id',", "str :param kind: Kind of resource. :type kind: str :param", "True}, 'provisioning_state': {'readonly': True}, } _attribute_map = { 'id': {'key':", "'str'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'KeyVaultSecretStatus'}, } def __init__(self, **kwargs):", "Resource Location. :type location: str :ivar type: Resource type. :vartype", "'provisioning_state': {'key': 'properties.provisioningState', 'type': 'KeyVaultSecretStatus'}, } def __init__(self, **kwargs): super(AppServiceCertificateResource,", ":type key_vault_id: str :param key_vault_secret_name: Key Vault secret name. :type", "'OperationNotPermittedOnKeyVault', 'AzureServiceUnauthorizedToAccessKeyVault', 'KeyVaultDoesNotExist', 'KeyVaultSecretDoesNotExist', 'UnknownError', 'ExternalPrivateKey', 'Unknown' :vartype provisioning_state: str", "'location': {'key': 'location', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'},", "the project root for # license information. # # Code", "sending a request. All required parameters must be populated in", "{'key': 'type', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'key_vault_id':", "a certificate that is purchased through Azure. Variables are only", "only populated by the server, and will be ignored when", "Generator. # Changes may cause incorrect behavior and will be", ":param key_vault_secret_name: Key Vault secret name. :type key_vault_secret_name: str :ivar", "{'readonly': True}, 'name': {'readonly': True}, 'location': {'required': True}, 'type': {'readonly':", "tags: dict[str, str] :param key_vault_id: Key Vault resource Id. :type", "of resource. :type kind: str :param location: Required. Resource Location.", "'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'},", "a request. All required parameters must be populated in order", "-------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. #", "be populated in order to send to Azure. :ivar id:", "Id. :type key_vault_id: str :param key_vault_secret_name: Key Vault secret name.", "license information. # # Code generated by Microsoft (R) AutoRest", "Variables are only populated by the server, and will be", "True}, 'name': {'readonly': True}, 'location': {'required': True}, 'type': {'readonly': True},", "by Microsoft (R) AutoRest Code Generator. # Changes may cause", "provisioning_state: Status of the Key Vault secret. Possible values include:", "} _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name':", "Changes may cause incorrect behavior and will be lost if", "behavior and will be lost if the code is #", "str :ivar provisioning_state: Status of the Key Vault secret. Possible", "'kind': {'key': 'kind', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'},", "'{str}'}, 'key_vault_id': {'key': 'properties.keyVaultId', 'type': 'str'}, 'key_vault_secret_name': {'key': 'properties.keyVaultSecretName', 'type':", "or ~azure.mgmt.web.models.KeyVaultSecretStatus \"\"\" _validation = { 'id': {'readonly': True}, 'name':", "-------------------------------------------------------------------------- from .resource import Resource class AppServiceCertificateResource(Resource): \"\"\"Key Vault container", "Id. :vartype id: str :ivar name: Resource Name. :vartype name:", "incorrect behavior and will be lost if the code is", "type: Resource type. :vartype type: str :param tags: Resource tags.", "that is purchased through Azure. Variables are only populated by", "Possible values include: 'Initialized', 'WaitingOnCertificateOrder', 'Succeeded', 'CertificateOrderFailed', 'OperationNotPermittedOnKeyVault', 'AzureServiceUnauthorizedToAccessKeyVault', 'KeyVaultDoesNotExist',", "'key_vault_secret_name': {'key': 'properties.keyVaultSecretName', 'type': 'str'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'KeyVaultSecretStatus'},", "'location', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'tags': {'key':", "MIT License. See License.txt in the project root for #", "AutoRest Code Generator. # Changes may cause incorrect behavior and", "'type': '{str}'}, 'key_vault_id': {'key': 'properties.keyVaultId', 'type': 'str'}, 'key_vault_secret_name': {'key': 'properties.keyVaultSecretName',", "Name. :vartype name: str :param kind: Kind of resource. :type", "location: Required. Resource Location. :type location: str :ivar type: Resource", "may cause incorrect behavior and will be lost if the", "__init__(self, **kwargs): super(AppServiceCertificateResource, self).__init__(**kwargs) self.key_vault_id = kwargs.get('key_vault_id', None) self.key_vault_secret_name =", "'type': 'KeyVaultSecretStatus'}, } def __init__(self, **kwargs): super(AppServiceCertificateResource, self).__init__(**kwargs) self.key_vault_id =", "tags: Resource tags. :type tags: dict[str, str] :param key_vault_id: Key", "'Initialized', 'WaitingOnCertificateOrder', 'Succeeded', 'CertificateOrderFailed', 'OperationNotPermittedOnKeyVault', 'AzureServiceUnauthorizedToAccessKeyVault', 'KeyVaultDoesNotExist', 'KeyVaultSecretDoesNotExist', 'UnknownError', 'ExternalPrivateKey',", "project root for # license information. # # Code generated", "Azure. Variables are only populated by the server, and will", "See License.txt in the project root for # license information.", "resource. :type kind: str :param location: Required. Resource Location. :type", "def __init__(self, **kwargs): super(AppServiceCertificateResource, self).__init__(**kwargs) self.key_vault_id = kwargs.get('key_vault_id', None) self.key_vault_secret_name", ":ivar id: Resource Id. :vartype id: str :ivar name: Resource", "'ExternalPrivateKey', 'Unknown' :vartype provisioning_state: str or ~azure.mgmt.web.models.KeyVaultSecretStatus \"\"\" _validation =", "'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'kind': {'key': 'kind',", "{'key': 'properties.provisioningState', 'type': 'KeyVaultSecretStatus'}, } def __init__(self, **kwargs): super(AppServiceCertificateResource, self).__init__(**kwargs)", "# Copyright (c) Microsoft Corporation. All rights reserved. # Licensed", ":ivar type: Resource type. :vartype type: str :param tags: Resource", "'location': {'required': True}, 'type': {'readonly': True}, 'provisioning_state': {'readonly': True}, }", "'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'tags': {'key': 'tags',", "self).__init__(**kwargs) self.key_vault_id = kwargs.get('key_vault_id', None) self.key_vault_secret_name = kwargs.get('key_vault_secret_name', None) self.provisioning_state", "key_vault_secret_name: Key Vault secret name. :type key_vault_secret_name: str :ivar provisioning_state:", ":vartype id: str :ivar name: Resource Name. :vartype name: str", "populated by the server, and will be ignored when sending", "str :param location: Required. Resource Location. :type location: str :ivar", "type. :vartype type: str :param tags: Resource tags. :type tags:", "generated by Microsoft (R) AutoRest Code Generator. # Changes may", "= kwargs.get('key_vault_id', None) self.key_vault_secret_name = kwargs.get('key_vault_secret_name', None) self.provisioning_state = None", "in the project root for # license information. # #", "key_vault_id: Key Vault resource Id. :type key_vault_id: str :param key_vault_secret_name:", "Key Vault secret name. :type key_vault_secret_name: str :ivar provisioning_state: Status", "'id': {'readonly': True}, 'name': {'readonly': True}, 'location': {'required': True}, 'type':", "reserved. # Licensed under the MIT License. See License.txt in", "through Azure. Variables are only populated by the server, and", "Resource Id. :vartype id: str :ivar name: Resource Name. :vartype", "'name': {'key': 'name', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'},", "are only populated by the server, and will be ignored", "class AppServiceCertificateResource(Resource): \"\"\"Key Vault container ARM resource for a certificate", "be ignored when sending a request. All required parameters must", ":ivar provisioning_state: Status of the Key Vault secret. Possible values", "# # Code generated by Microsoft (R) AutoRest Code Generator.", "\"\"\"Key Vault container ARM resource for a certificate that is", "= { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name',", "_validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'location':", "Corporation. All rights reserved. # Licensed under the MIT License.", "# Licensed under the MIT License. See License.txt in the", "# -------------------------------------------------------------------------- from .resource import Resource class AppServiceCertificateResource(Resource): \"\"\"Key Vault", "secret. Possible values include: 'Initialized', 'WaitingOnCertificateOrder', 'Succeeded', 'CertificateOrderFailed', 'OperationNotPermittedOnKeyVault', 'AzureServiceUnauthorizedToAccessKeyVault',", "'type': 'str'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'KeyVaultSecretStatus'}, } def __init__(self,", "'AzureServiceUnauthorizedToAccessKeyVault', 'KeyVaultDoesNotExist', 'KeyVaultSecretDoesNotExist', 'UnknownError', 'ExternalPrivateKey', 'Unknown' :vartype provisioning_state: str or", "id: str :ivar name: Resource Name. :vartype name: str :param", "'type': 'str'}, 'key_vault_secret_name': {'key': 'properties.keyVaultSecretName', 'type': 'str'}, 'provisioning_state': {'key': 'properties.provisioningState',", "by the server, and will be ignored when sending a", "{'key': 'location', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'tags':", "# Changes may cause incorrect behavior and will be lost", "Kind of resource. :type kind: str :param location: Required. Resource", ":type kind: str :param location: Required. Resource Location. :type location:", "{ 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type':", "key_vault_secret_name: str :ivar provisioning_state: Status of the Key Vault secret.", "Azure. :ivar id: Resource Id. :vartype id: str :ivar name:", "order to send to Azure. :ivar id: Resource Id. :vartype", "the Key Vault secret. Possible values include: 'Initialized', 'WaitingOnCertificateOrder', 'Succeeded',", "{'key': 'tags', 'type': '{str}'}, 'key_vault_id': {'key': 'properties.keyVaultId', 'type': 'str'}, 'key_vault_secret_name':", "provisioning_state: str or ~azure.mgmt.web.models.KeyVaultSecretStatus \"\"\" _validation = { 'id': {'readonly':", "# -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved.", ":ivar name: Resource Name. :vartype name: str :param kind: Kind", "'str'}, 'location': {'key': 'location', 'type': 'str'}, 'type': {'key': 'type', 'type':", "'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'key_vault_id': {'key': 'properties.keyVaultId', 'type':", "Code generated by Microsoft (R) AutoRest Code Generator. # Changes", "information. # # Code generated by Microsoft (R) AutoRest Code", "coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights", "name: str :param kind: Kind of resource. :type kind: str", "'KeyVaultDoesNotExist', 'KeyVaultSecretDoesNotExist', 'UnknownError', 'ExternalPrivateKey', 'Unknown' :vartype provisioning_state: str or ~azure.mgmt.web.models.KeyVaultSecretStatus", "# regenerated. # -------------------------------------------------------------------------- from .resource import Resource class AppServiceCertificateResource(Resource):", "'kind', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'type': {'key':", "True}, 'location': {'required': True}, 'type': {'readonly': True}, 'provisioning_state': {'readonly': True},", "super(AppServiceCertificateResource, self).__init__(**kwargs) self.key_vault_id = kwargs.get('key_vault_id', None) self.key_vault_secret_name = kwargs.get('key_vault_secret_name', None)", "License. See License.txt in the project root for # license", "send to Azure. :ivar id: Resource Id. :vartype id: str", "'name': {'readonly': True}, 'location': {'required': True}, 'type': {'readonly': True}, 'provisioning_state':", "'type': {'readonly': True}, 'provisioning_state': {'readonly': True}, } _attribute_map = {", "'name', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'location': {'key':", "for a certificate that is purchased through Azure. Variables are", "Resource type. :vartype type: str :param tags: Resource tags. :type", "'KeyVaultSecretDoesNotExist', 'UnknownError', 'ExternalPrivateKey', 'Unknown' :vartype provisioning_state: str or ~azure.mgmt.web.models.KeyVaultSecretStatus \"\"\"", "'key_vault_id': {'key': 'properties.keyVaultId', 'type': 'str'}, 'key_vault_secret_name': {'key': 'properties.keyVaultSecretName', 'type': 'str'},", "'str'}, 'key_vault_secret_name': {'key': 'properties.keyVaultSecretName', 'type': 'str'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type':", "'properties.keyVaultSecretName', 'type': 'str'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'KeyVaultSecretStatus'}, } def", ":vartype type: str :param tags: Resource tags. :type tags: dict[str,", "will be lost if the code is # regenerated. #", "tags. :type tags: dict[str, str] :param key_vault_id: Key Vault resource", "required parameters must be populated in order to send to", "Vault resource Id. :type key_vault_id: str :param key_vault_secret_name: Key Vault", "str :param key_vault_secret_name: Key Vault secret name. :type key_vault_secret_name: str", "include: 'Initialized', 'WaitingOnCertificateOrder', 'Succeeded', 'CertificateOrderFailed', 'OperationNotPermittedOnKeyVault', 'AzureServiceUnauthorizedToAccessKeyVault', 'KeyVaultDoesNotExist', 'KeyVaultSecretDoesNotExist', 'UnknownError',", "lost if the code is # regenerated. # -------------------------------------------------------------------------- from", "ignored when sending a request. All required parameters must be", "\"\"\" _validation = { 'id': {'readonly': True}, 'name': {'readonly': True},", "Resource Name. :vartype name: str :param kind: Kind of resource.", ":param location: Required. Resource Location. :type location: str :ivar type:", "'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'type': {'key': 'type',", "the server, and will be ignored when sending a request.", "type: str :param tags: Resource tags. :type tags: dict[str, str]", "container ARM resource for a certificate that is purchased through", "values include: 'Initialized', 'WaitingOnCertificateOrder', 'Succeeded', 'CertificateOrderFailed', 'OperationNotPermittedOnKeyVault', 'AzureServiceUnauthorizedToAccessKeyVault', 'KeyVaultDoesNotExist', 'KeyVaultSecretDoesNotExist',", "**kwargs): super(AppServiceCertificateResource, self).__init__(**kwargs) self.key_vault_id = kwargs.get('key_vault_id', None) self.key_vault_secret_name = kwargs.get('key_vault_secret_name',", "and will be lost if the code is # regenerated.", "code is # regenerated. # -------------------------------------------------------------------------- from .resource import Resource", "in order to send to Azure. :ivar id: Resource Id.", "request. All required parameters must be populated in order to", ":type key_vault_secret_name: str :ivar provisioning_state: Status of the Key Vault", "AppServiceCertificateResource(Resource): \"\"\"Key Vault container ARM resource for a certificate that", "{'key': 'kind', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'type':", "str] :param key_vault_id: Key Vault resource Id. :type key_vault_id: str", "name. :type key_vault_secret_name: str :ivar provisioning_state: Status of the Key", ":vartype provisioning_state: str or ~azure.mgmt.web.models.KeyVaultSecretStatus \"\"\" _validation = { 'id':", "All required parameters must be populated in order to send", "'Succeeded', 'CertificateOrderFailed', 'OperationNotPermittedOnKeyVault', 'AzureServiceUnauthorizedToAccessKeyVault', 'KeyVaultDoesNotExist', 'KeyVaultSecretDoesNotExist', 'UnknownError', 'ExternalPrivateKey', 'Unknown' :vartype", "'UnknownError', 'ExternalPrivateKey', 'Unknown' :vartype provisioning_state: str or ~azure.mgmt.web.models.KeyVaultSecretStatus \"\"\" _validation", "is purchased through Azure. Variables are only populated by the", "'str'}, 'type': {'key': 'type', 'type': 'str'}, 'tags': {'key': 'tags', 'type':", "ARM resource for a certificate that is purchased through Azure.", "under the MIT License. See License.txt in the project root", "regenerated. # -------------------------------------------------------------------------- from .resource import Resource class AppServiceCertificateResource(Resource): \"\"\"Key", "Key Vault resource Id. :type key_vault_id: str :param key_vault_secret_name: Key", "'KeyVaultSecretStatus'}, } def __init__(self, **kwargs): super(AppServiceCertificateResource, self).__init__(**kwargs) self.key_vault_id = kwargs.get('key_vault_id',", "cause incorrect behavior and will be lost if the code", "Resource class AppServiceCertificateResource(Resource): \"\"\"Key Vault container ARM resource for a", "Required. Resource Location. :type location: str :ivar type: Resource type.", "(c) Microsoft Corporation. All rights reserved. # Licensed under the", "All rights reserved. # Licensed under the MIT License. See", "Status of the Key Vault secret. Possible values include: 'Initialized',", "will be ignored when sending a request. All required parameters", "'tags': {'key': 'tags', 'type': '{str}'}, 'key_vault_id': {'key': 'properties.keyVaultId', 'type': 'str'},", "is # regenerated. # -------------------------------------------------------------------------- from .resource import Resource class", "Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect", ":vartype name: str :param kind: Kind of resource. :type kind:", "location: str :ivar type: Resource type. :vartype type: str :param", "populated in order to send to Azure. :ivar id: Resource", "True}, 'type': {'readonly': True}, 'provisioning_state': {'readonly': True}, } _attribute_map =", "'WaitingOnCertificateOrder', 'Succeeded', 'CertificateOrderFailed', 'OperationNotPermittedOnKeyVault', 'AzureServiceUnauthorizedToAccessKeyVault', 'KeyVaultDoesNotExist', 'KeyVaultSecretDoesNotExist', 'UnknownError', 'ExternalPrivateKey', 'Unknown'", "{'key': 'name', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'location':", "'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'location': {'key': 'location', 'type':", "'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'key_vault_id': {'key': 'properties.keyVaultId',", ":type location: str :ivar type: Resource type. :vartype type: str", ":param tags: Resource tags. :type tags: dict[str, str] :param key_vault_id:", "{'required': True}, 'type': {'readonly': True}, 'provisioning_state': {'readonly': True}, } _attribute_map", "{'key': 'properties.keyVaultSecretName', 'type': 'str'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'KeyVaultSecretStatus'}, }", "self.key_vault_id = kwargs.get('key_vault_id', None) self.key_vault_secret_name = kwargs.get('key_vault_secret_name', None) self.provisioning_state =", "Location. :type location: str :ivar type: Resource type. :vartype type:", "from .resource import Resource class AppServiceCertificateResource(Resource): \"\"\"Key Vault container ARM", "when sending a request. All required parameters must be populated", "parameters must be populated in order to send to Azure.", "str or ~azure.mgmt.web.models.KeyVaultSecretStatus \"\"\" _validation = { 'id': {'readonly': True},", "'type': {'key': 'type', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'},", "to Azure. :ivar id: Resource Id. :vartype id: str :ivar", "'properties.keyVaultId', 'type': 'str'}, 'key_vault_secret_name': {'key': 'properties.keyVaultSecretName', 'type': 'str'}, 'provisioning_state': {'key':", "str :ivar type: Resource type. :vartype type: str :param tags:", "root for # license information. # # Code generated by", "'type', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'key_vault_id': {'key':", "Microsoft Corporation. All rights reserved. # Licensed under the MIT", "Licensed under the MIT License. See License.txt in the project", "resource Id. :type key_vault_id: str :param key_vault_secret_name: Key Vault secret", "'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'location': {'key': 'location',", "{ 'id': {'readonly': True}, 'name': {'readonly': True}, 'location': {'required': True},", "# Code generated by Microsoft (R) AutoRest Code Generator. #", "purchased through Azure. Variables are only populated by the server,", "rights reserved. # Licensed under the MIT License. See License.txt", "to send to Azure. :ivar id: Resource Id. :vartype id:", "{'readonly': True}, 'provisioning_state': {'readonly': True}, } _attribute_map = { 'id':", "# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All", "Vault secret name. :type key_vault_secret_name: str :ivar provisioning_state: Status of", "{'key': 'properties.keyVaultId', 'type': 'str'}, 'key_vault_secret_name': {'key': 'properties.keyVaultSecretName', 'type': 'str'}, 'provisioning_state':", "License.txt in the project root for # license information. #", "# license information. # # Code generated by Microsoft (R)", "Resource tags. :type tags: dict[str, str] :param key_vault_id: Key Vault", "~azure.mgmt.web.models.KeyVaultSecretStatus \"\"\" _validation = { 'id': {'readonly': True}, 'name': {'readonly':", "'tags', 'type': '{str}'}, 'key_vault_id': {'key': 'properties.keyVaultId', 'type': 'str'}, 'key_vault_secret_name': {'key':", "'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'kind': {'key':", "} def __init__(self, **kwargs): super(AppServiceCertificateResource, self).__init__(**kwargs) self.key_vault_id = kwargs.get('key_vault_id', None)", "Code Generator. # Changes may cause incorrect behavior and will", "import Resource class AppServiceCertificateResource(Resource): \"\"\"Key Vault container ARM resource for", "be lost if the code is # regenerated. # --------------------------------------------------------------------------", "'properties.provisioningState', 'type': 'KeyVaultSecretStatus'}, } def __init__(self, **kwargs): super(AppServiceCertificateResource, self).__init__(**kwargs) self.key_vault_id", "if the code is # regenerated. # -------------------------------------------------------------------------- from .resource", "Vault secret. Possible values include: 'Initialized', 'WaitingOnCertificateOrder', 'Succeeded', 'CertificateOrderFailed', 'OperationNotPermittedOnKeyVault',", "for # license information. # # Code generated by Microsoft", "_attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key':", "True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'},", "str :ivar name: Resource Name. :vartype name: str :param kind:", "(R) AutoRest Code Generator. # Changes may cause incorrect behavior", "str :param tags: Resource tags. :type tags: dict[str, str] :param", ":param key_vault_id: Key Vault resource Id. :type key_vault_id: str :param", "certificate that is purchased through Azure. Variables are only populated", ":type tags: dict[str, str] :param key_vault_id: Key Vault resource Id.", "of the Key Vault secret. Possible values include: 'Initialized', 'WaitingOnCertificateOrder',", "= { 'id': {'readonly': True}, 'name': {'readonly': True}, 'location': {'required':", "secret name. :type key_vault_secret_name: str :ivar provisioning_state: Status of the", "'CertificateOrderFailed', 'OperationNotPermittedOnKeyVault', 'AzureServiceUnauthorizedToAccessKeyVault', 'KeyVaultDoesNotExist', 'KeyVaultSecretDoesNotExist', 'UnknownError', 'ExternalPrivateKey', 'Unknown' :vartype provisioning_state:", "Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under", "<reponame>JonathanGailliez/azure-sdk-for-python # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation.", ":param kind: Kind of resource. :type kind: str :param location:", "name: Resource Name. :vartype name: str :param kind: Kind of" ]
[ "class RemoveStickerFromSetRequest(TLRequest): CONSTRUCTOR_ID = 0xf7760f51 SUBCLASS_OF_ID = 0x9b704a5a def __init__(self,", "{ '_': 'RemoveStickerFromSetRequest', 'sticker': None if self.sticker is None else", "Instance of StickerSet. \"\"\" self.stickerset = stickerset # type: TypeInputStickerSet", "\"\"\" :param TypeInputStickerSet stickerset: :param TypeInputStickerSetItem sticker: :returns messages.StickerSet: Instance", "__init__(self, user_id, title, short_name, stickers, masks=None): \"\"\" :param TypeInputUser user_id:", "reader.tgread_object() _stickers.append(_x) return cls(user_id=_user_id, title=_title, short_name=_short_name, stickers=_stickers, masks=_masks) class RemoveStickerFromSetRequest(TLRequest):", "StickerSet. \"\"\" self.sticker = sticker # type: TypeInputDocument self.position =", "if self.sticker is None else self.sticker.to_dict() } def __bytes__(self): return", "List, Union, TYPE_CHECKING import os import struct if TYPE_CHECKING: from", "cls(stickerset=_stickerset, sticker=_sticker) class ChangeStickerPositionRequest(TLRequest): CONSTRUCTOR_ID = 0xffb6d4ca SUBCLASS_OF_ID = 0x9b704a5a", "title=_title, short_name=_short_name, stickers=_stickers, masks=_masks) class RemoveStickerFromSetRequest(TLRequest): CONSTRUCTOR_ID = 0xf7760f51 SUBCLASS_OF_ID", "\"\"\" self.sticker = sticker # type: TypeInputDocument self.position = position", "import TypeInputStickerSet, TypeInputUser, TypeInputStickerSetItem, TypeInputDocument class AddStickerToSetRequest(TLRequest): CONSTRUCTOR_ID = 0x8653febe", "Instance of StickerSet. \"\"\" self.sticker = sticker # type: TypeInputDocument", "else self.user_id.to_dict(), 'title': self.title, 'short_name': self.short_name, 'stickers': [] if self.stickers", "_title = reader.tgread_string() _short_name = reader.tgread_string() reader.read_int() _stickers = []", "import Optional, List, Union, TYPE_CHECKING import os import struct if", "else self.stickerset.to_dict(), 'sticker': None if self.sticker is None else self.sticker.to_dict()", "False else 1)), bytes(self.user_id), self.serialize_bytes(self.title), self.serialize_bytes(self.short_name), b'\\x15\\xc4\\xb5\\x1c',struct.pack('<i', len(self.stickers)),b''.join(bytes(x) for x", "TypeInputUser, TypeInputStickerSetItem, TypeInputDocument class AddStickerToSetRequest(TLRequest): CONSTRUCTOR_ID = 0x8653febe SUBCLASS_OF_ID =", "return { '_': 'AddStickerToSetRequest', 'stickerset': None if self.stickerset is None", "is None else self.user_id.to_dict(), 'title': self.title, 'short_name': self.short_name, 'stickers': []", "type: TypeInputDocument self.position = position # type: int def to_dict(self):", "(0 if self.masks is None or self.masks is False else", "_ in range(reader.read_int()): _x = reader.tgread_object() _stickers.append(_x) return cls(user_id=_user_id, title=_title,", ":returns messages.StickerSet: Instance of StickerSet. \"\"\" self.user_id = user_id #", "self.sticker.to_dict() } def __bytes__(self): return b''.join(( b'\\xbe\\xfeS\\x86', bytes(self.stickerset), bytes(self.sticker), ))", "} def __bytes__(self): return b''.join(( b'\\xbe\\xfeS\\x86', bytes(self.stickerset), bytes(self.sticker), )) @classmethod", "def from_reader(cls, reader): _stickerset = reader.tgread_object() _sticker = reader.tgread_object() return", "is None else self.stickerset.to_dict(), 'sticker': None if self.sticker is None", "if self.stickers is None else [None if x is None", "def from_reader(cls, reader): _sticker = reader.tgread_object() _position = reader.read_int() return", "= masks # type: Optional[bool] async def resolve(self, client, utils):", "title # type: str self.short_name = short_name # type: str", "x in self.stickers], 'masks': self.masks } def __bytes__(self): return b''.join((", "= stickerset # type: TypeInputStickerSet self.sticker = sticker # type:", "self.title, 'short_name': self.short_name, 'stickers': [] if self.stickers is None else", ")) @classmethod def from_reader(cls, reader): _sticker = reader.tgread_object() return cls(sticker=_sticker)", "= 0x9b704a5a def __init__(self, sticker, position): \"\"\" :param TypeInputDocument sticker:", "[None if x is None else x.to_dict() for x in", "self.sticker is None else self.sticker.to_dict() } def __bytes__(self): return b''.join((", "__init__(self, stickerset, sticker): \"\"\" :param TypeInputStickerSet stickerset: :param TypeInputStickerSetItem sticker:", "is False else 1)), bytes(self.user_id), self.serialize_bytes(self.title), self.serialize_bytes(self.short_name), b'\\x15\\xc4\\xb5\\x1c',struct.pack('<i', len(self.stickers)),b''.join(bytes(x) for", "self.stickerset = stickerset # type: TypeInputStickerSet self.sticker = sticker #", "if TYPE_CHECKING: from ...tl.types import TypeInputStickerSet, TypeInputUser, TypeInputStickerSetItem, TypeInputDocument class", "else self.sticker.to_dict(), 'position': self.position } def __bytes__(self): return b''.join(( b'\\xca\\xd4\\xb6\\xff',", "reader.read_int() _stickers = [] for _ in range(reader.read_int()): _x =", "will be ERASED\"\"\" from ...tl.tlobject import TLRequest from typing import", "Instance of StickerSet. \"\"\" self.user_id = user_id # type: TypeInputUser", "position): \"\"\" :param TypeInputDocument sticker: :param int position: :returns messages.StickerSet:", "if x is None else x.to_dict() for x in self.stickers],", "client.get_input_entity(self.user_id)) def to_dict(self): return { '_': 'CreateStickerSetRequest', 'user_id': None if", "int position: :returns messages.StickerSet: Instance of StickerSet. \"\"\" self.sticker =", "b''.join(( b'Q\\x0fv\\xf7', bytes(self.sticker), )) @classmethod def from_reader(cls, reader): _sticker =", "reader): _stickerset = reader.tgread_object() _sticker = reader.tgread_object() return cls(stickerset=_stickerset, sticker=_sticker)", "0xffb6d4ca SUBCLASS_OF_ID = 0x9b704a5a def __init__(self, sticker, position): \"\"\" :param", "sticker: :returns messages.StickerSet: Instance of StickerSet. \"\"\" self.sticker = sticker", ")) @classmethod def from_reader(cls, reader): _stickerset = reader.tgread_object() _sticker =", "CONSTRUCTOR_ID = 0xffb6d4ca SUBCLASS_OF_ID = 0x9b704a5a def __init__(self, sticker, position):", "user_id: :param str title: :param str short_name: :param List[TypeInputStickerSetItem] stickers:", "{ '_': 'CreateStickerSetRequest', 'user_id': None if self.user_id is None else", "from_reader(cls, reader): flags = reader.read_int() _masks = bool(flags & 1)", "sticker # type: TypeInputDocument def to_dict(self): return { '_': 'RemoveStickerFromSetRequest',", "sticker: :returns messages.StickerSet: Instance of StickerSet. \"\"\" self.stickerset = stickerset", "bytes(self.stickerset), bytes(self.sticker), )) @classmethod def from_reader(cls, reader): _stickerset = reader.tgread_object()", "bytes(self.sticker), )) @classmethod def from_reader(cls, reader): _stickerset = reader.tgread_object() _sticker", "return cls(user_id=_user_id, title=_title, short_name=_short_name, stickers=_stickers, masks=_masks) class RemoveStickerFromSetRequest(TLRequest): CONSTRUCTOR_ID =", "= sticker # type: TypeInputStickerSetItem def to_dict(self): return { '_':", "type: TypeInputStickerSetItem def to_dict(self): return { '_': 'AddStickerToSetRequest', 'stickerset': None", "sticker): \"\"\" :param TypeInputDocument sticker: :returns messages.StickerSet: Instance of StickerSet.", "of StickerSet. \"\"\" self.user_id = user_id # type: TypeInputUser self.title", ":returns messages.StickerSet: Instance of StickerSet. \"\"\" self.sticker = sticker #", "{ '_': 'AddStickerToSetRequest', 'stickerset': None if self.stickerset is None else", "stickers # type: List[TypeInputStickerSetItem] self.masks = masks # type: Optional[bool]", "TypeInputUser self.title = title # type: str self.short_name = short_name", "= sticker # type: TypeInputDocument self.position = position # type:", "_stickers.append(_x) return cls(user_id=_user_id, title=_title, short_name=_short_name, stickers=_stickers, masks=_masks) class RemoveStickerFromSetRequest(TLRequest): CONSTRUCTOR_ID", "reader.tgread_string() reader.read_int() _stickers = [] for _ in range(reader.read_int()): _x", "if self.sticker is None else self.sticker.to_dict(), 'position': self.position } def", ":param TypeInputUser user_id: :param str title: :param str short_name: :param", "utils.get_input_user(await client.get_input_entity(self.user_id)) def to_dict(self): return { '_': 'CreateStickerSetRequest', 'user_id': None", "CONSTRUCTOR_ID = 0x8653febe SUBCLASS_OF_ID = 0x9b704a5a def __init__(self, stickerset, sticker):", "str short_name: :param List[TypeInputStickerSetItem] stickers: :param Optional[bool] masks: :returns messages.StickerSet:", "self.stickers is None else [None if x is None else", "= 0xf7760f51 SUBCLASS_OF_ID = 0x9b704a5a def __init__(self, sticker): \"\"\" :param", "Union, TYPE_CHECKING import os import struct if TYPE_CHECKING: from ...tl.types", "TypeInputStickerSet, TypeInputUser, TypeInputStickerSetItem, TypeInputDocument class AddStickerToSetRequest(TLRequest): CONSTRUCTOR_ID = 0x8653febe SUBCLASS_OF_ID", "0x9bd86e6a SUBCLASS_OF_ID = 0x9b704a5a def __init__(self, user_id, title, short_name, stickers,", "ERASED\"\"\" from ...tl.tlobject import TLRequest from typing import Optional, List,", "self.stickerset is None else self.stickerset.to_dict(), 'sticker': None if self.sticker is", "0x8653febe SUBCLASS_OF_ID = 0x9b704a5a def __init__(self, stickerset, sticker): \"\"\" :param", "short_name, stickers, masks=None): \"\"\" :param TypeInputUser user_id: :param str title:", "'_': 'ChangeStickerPositionRequest', 'sticker': None if self.sticker is None else self.sticker.to_dict(),", "to_dict(self): return { '_': 'AddStickerToSetRequest', 'stickerset': None if self.stickerset is", "# type: TypeInputDocument self.position = position # type: int def", "List[TypeInputStickerSetItem] self.masks = masks # type: Optional[bool] async def resolve(self,", ":param int position: :returns messages.StickerSet: Instance of StickerSet. \"\"\" self.sticker", "struct.pack('<I', (0 if self.masks is None or self.masks is False", "else self.sticker.to_dict() } def __bytes__(self): return b''.join(( b'\\xbe\\xfeS\\x86', bytes(self.stickerset), bytes(self.sticker),", "} def __bytes__(self): return b''.join(( b'Q\\x0fv\\xf7', bytes(self.sticker), )) @classmethod def", "__bytes__(self): return b''.join(( b'Q\\x0fv\\xf7', bytes(self.sticker), )) @classmethod def from_reader(cls, reader):", "= 0x9b704a5a def __init__(self, stickerset, sticker): \"\"\" :param TypeInputStickerSet stickerset:", "short_name # type: str self.stickers = stickers # type: List[TypeInputStickerSetItem]", "import TLRequest from typing import Optional, List, Union, TYPE_CHECKING import", "async def resolve(self, client, utils): self.user_id = utils.get_input_user(await client.get_input_entity(self.user_id)) def", "messages.StickerSet: Instance of StickerSet. \"\"\" self.sticker = sticker # type:", "be ERASED\"\"\" from ...tl.tlobject import TLRequest from typing import Optional,", "\"\"\" self.stickerset = stickerset # type: TypeInputStickerSet self.sticker = sticker", "reader): flags = reader.read_int() _masks = bool(flags & 1) _user_id", "None if self.user_id is None else self.user_id.to_dict(), 'title': self.title, 'short_name':", ":param TypeInputDocument sticker: :returns messages.StickerSet: Instance of StickerSet. \"\"\" self.sticker", "reader.tgread_string() _short_name = reader.tgread_string() reader.read_int() _stickers = [] for _", "self.sticker.to_dict() } def __bytes__(self): return b''.join(( b'Q\\x0fv\\xf7', bytes(self.sticker), )) @classmethod", "b'Q\\x0fv\\xf7', bytes(self.sticker), )) @classmethod def from_reader(cls, reader): _sticker = reader.tgread_object()", "None else [None if x is None else x.to_dict() for", "return cls(stickerset=_stickerset, sticker=_sticker) class ChangeStickerPositionRequest(TLRequest): CONSTRUCTOR_ID = 0xffb6d4ca SUBCLASS_OF_ID =", "for x in self.stickers], 'masks': self.masks } def __bytes__(self): return", "= user_id # type: TypeInputUser self.title = title # type:", "b''.join(( b'\\xca\\xd4\\xb6\\xff', bytes(self.sticker), struct.pack('<i', self.position), )) @classmethod def from_reader(cls, reader):", "__init__(self, sticker): \"\"\" :param TypeInputDocument sticker: :returns messages.StickerSet: Instance of", "'CreateStickerSetRequest', 'user_id': None if self.user_id is None else self.user_id.to_dict(), 'title':", "# type: TypeInputUser self.title = title # type: str self.short_name", "__bytes__(self): return b''.join(( b'\\xca\\xd4\\xb6\\xff', bytes(self.sticker), struct.pack('<i', self.position), )) @classmethod def", "of StickerSet. \"\"\" self.sticker = sticker # type: TypeInputDocument self.position", "type: Optional[bool] async def resolve(self, client, utils): self.user_id = utils.get_input_user(await", "\"\"\" self.user_id = user_id # type: TypeInputUser self.title = title", "for x in self.stickers), )) @classmethod def from_reader(cls, reader): flags", "SUBCLASS_OF_ID = 0x9b704a5a def __init__(self, stickerset, sticker): \"\"\" :param TypeInputStickerSet", "def to_dict(self): return { '_': 'CreateStickerSetRequest', 'user_id': None if self.user_id", "} def __bytes__(self): return b''.join(( b'\\xca\\xd4\\xb6\\xff', bytes(self.sticker), struct.pack('<i', self.position), ))", "return { '_': 'CreateStickerSetRequest', 'user_id': None if self.user_id is None", "def from_reader(cls, reader): flags = reader.read_int() _masks = bool(flags &", "None or self.masks is False else 1)), bytes(self.user_id), self.serialize_bytes(self.title), self.serialize_bytes(self.short_name),", "user_id # type: TypeInputUser self.title = title # type: str", "bytes(self.sticker), struct.pack('<i', self.position), )) @classmethod def from_reader(cls, reader): _sticker =", "\"\"\" :param TypeInputDocument sticker: :returns messages.StickerSet: Instance of StickerSet. \"\"\"", "class ChangeStickerPositionRequest(TLRequest): CONSTRUCTOR_ID = 0xffb6d4ca SUBCLASS_OF_ID = 0x9b704a5a def __init__(self,", "of StickerSet. \"\"\" self.sticker = sticker # type: TypeInputDocument def", "TLObjects' generator. All changes will be ERASED\"\"\" from ...tl.tlobject import", "def resolve(self, client, utils): self.user_id = utils.get_input_user(await client.get_input_entity(self.user_id)) def to_dict(self):", "def __init__(self, stickerset, sticker): \"\"\" :param TypeInputStickerSet stickerset: :param TypeInputStickerSetItem", "is None else self.sticker.to_dict() } def __bytes__(self): return b''.join(( b'\\xbe\\xfeS\\x86',", "return b''.join(( b'Q\\x0fv\\xf7', bytes(self.sticker), )) @classmethod def from_reader(cls, reader): _sticker", "= 0xffb6d4ca SUBCLASS_OF_ID = 0x9b704a5a def __init__(self, sticker, position): \"\"\"", "self.user_id = user_id # type: TypeInputUser self.title = title #", "or self.masks is False else 1)), bytes(self.user_id), self.serialize_bytes(self.title), self.serialize_bytes(self.short_name), b'\\x15\\xc4\\xb5\\x1c',struct.pack('<i',", "0xf7760f51 SUBCLASS_OF_ID = 0x9b704a5a def __init__(self, sticker): \"\"\" :param TypeInputDocument", "CONSTRUCTOR_ID = 0x9bd86e6a SUBCLASS_OF_ID = 0x9b704a5a def __init__(self, user_id, title,", "bool(flags & 1) _user_id = reader.tgread_object() _title = reader.tgread_string() _short_name", "import os import struct if TYPE_CHECKING: from ...tl.types import TypeInputStickerSet,", "None if self.stickerset is None else self.stickerset.to_dict(), 'sticker': None if", "{ '_': 'ChangeStickerPositionRequest', 'sticker': None if self.sticker is None else", "} def __bytes__(self): return b''.join(( b'jn\\xd8\\x9b', struct.pack('<I', (0 if self.masks", "return cls(sticker=_sticker, position=_position) class CreateStickerSetRequest(TLRequest): CONSTRUCTOR_ID = 0x9bd86e6a SUBCLASS_OF_ID =", "def __bytes__(self): return b''.join(( b'\\xca\\xd4\\xb6\\xff', bytes(self.sticker), struct.pack('<i', self.position), )) @classmethod", "self.position), )) @classmethod def from_reader(cls, reader): _sticker = reader.tgread_object() _position", "\"\"\" self.sticker = sticker # type: TypeInputDocument def to_dict(self): return", "to_dict(self): return { '_': 'ChangeStickerPositionRequest', 'sticker': None if self.sticker is", "None else self.sticker.to_dict() } def __bytes__(self): return b''.join(( b'\\xbe\\xfeS\\x86', bytes(self.stickerset),", "# type: str self.short_name = short_name # type: str self.stickers", "bytes(self.user_id), self.serialize_bytes(self.title), self.serialize_bytes(self.short_name), b'\\x15\\xc4\\xb5\\x1c',struct.pack('<i', len(self.stickers)),b''.join(bytes(x) for x in self.stickers), ))", "self.serialize_bytes(self.short_name), b'\\x15\\xc4\\xb5\\x1c',struct.pack('<i', len(self.stickers)),b''.join(bytes(x) for x in self.stickers), )) @classmethod def", "masks: :returns messages.StickerSet: Instance of StickerSet. \"\"\" self.user_id = user_id", "type: TypeInputDocument def to_dict(self): return { '_': 'RemoveStickerFromSetRequest', 'sticker': None", ":param Optional[bool] masks: :returns messages.StickerSet: Instance of StickerSet. \"\"\" self.user_id", "0x9b704a5a def __init__(self, stickerset, sticker): \"\"\" :param TypeInputStickerSet stickerset: :param", "_user_id = reader.tgread_object() _title = reader.tgread_string() _short_name = reader.tgread_string() reader.read_int()", "else [None if x is None else x.to_dict() for x", "self.serialize_bytes(self.title), self.serialize_bytes(self.short_name), b'\\x15\\xc4\\xb5\\x1c',struct.pack('<i', len(self.stickers)),b''.join(bytes(x) for x in self.stickers), )) @classmethod", "masks=None): \"\"\" :param TypeInputUser user_id: :param str title: :param str", "'user_id': None if self.user_id is None else self.user_id.to_dict(), 'title': self.title,", "AddStickerToSetRequest(TLRequest): CONSTRUCTOR_ID = 0x8653febe SUBCLASS_OF_ID = 0x9b704a5a def __init__(self, stickerset,", "if self.user_id is None else self.user_id.to_dict(), 'title': self.title, 'short_name': self.short_name,", "def __init__(self, user_id, title, short_name, stickers, masks=None): \"\"\" :param TypeInputUser", "stickers, masks=None): \"\"\" :param TypeInputUser user_id: :param str title: :param", "x in self.stickers), )) @classmethod def from_reader(cls, reader): flags =", "reader.tgread_object() _position = reader.read_int() return cls(sticker=_sticker, position=_position) class CreateStickerSetRequest(TLRequest): CONSTRUCTOR_ID", "masks=_masks) class RemoveStickerFromSetRequest(TLRequest): CONSTRUCTOR_ID = 0xf7760f51 SUBCLASS_OF_ID = 0x9b704a5a def", "sticker # type: TypeInputStickerSetItem def to_dict(self): return { '_': 'AddStickerToSetRequest',", "None else self.stickerset.to_dict(), 'sticker': None if self.sticker is None else", "= short_name # type: str self.stickers = stickers # type:", "reader.tgread_object() return cls(stickerset=_stickerset, sticker=_sticker) class ChangeStickerPositionRequest(TLRequest): CONSTRUCTOR_ID = 0xffb6d4ca SUBCLASS_OF_ID", "type: List[TypeInputStickerSetItem] self.masks = masks # type: Optional[bool] async def", "from_reader(cls, reader): _sticker = reader.tgread_object() _position = reader.read_int() return cls(sticker=_sticker,", ":param TypeInputStickerSetItem sticker: :returns messages.StickerSet: Instance of StickerSet. \"\"\" self.stickerset", "reader.tgread_object() _sticker = reader.tgread_object() return cls(stickerset=_stickerset, sticker=_sticker) class ChangeStickerPositionRequest(TLRequest): CONSTRUCTOR_ID", "def to_dict(self): return { '_': 'AddStickerToSetRequest', 'stickerset': None if self.stickerset", "0x9b704a5a def __init__(self, sticker, position): \"\"\" :param TypeInputDocument sticker: :param", "sticker): \"\"\" :param TypeInputStickerSet stickerset: :param TypeInputStickerSetItem sticker: :returns messages.StickerSet:", "from typing import Optional, List, Union, TYPE_CHECKING import os import", "b'\\xca\\xd4\\xb6\\xff', bytes(self.sticker), struct.pack('<i', self.position), )) @classmethod def from_reader(cls, reader): _sticker", "for _ in range(reader.read_int()): _x = reader.tgread_object() _stickers.append(_x) return cls(user_id=_user_id,", "= reader.tgread_object() _sticker = reader.tgread_object() return cls(stickerset=_stickerset, sticker=_sticker) class ChangeStickerPositionRequest(TLRequest):", "'stickerset': None if self.stickerset is None else self.stickerset.to_dict(), 'sticker': None", "= utils.get_input_user(await client.get_input_entity(self.user_id)) def to_dict(self): return { '_': 'CreateStickerSetRequest', 'user_id':", "sticker: :param int position: :returns messages.StickerSet: Instance of StickerSet. \"\"\"", "return b''.join(( b'\\xbe\\xfeS\\x86', bytes(self.stickerset), bytes(self.sticker), )) @classmethod def from_reader(cls, reader):", ":param TypeInputStickerSet stickerset: :param TypeInputStickerSetItem sticker: :returns messages.StickerSet: Instance of", "RemoveStickerFromSetRequest(TLRequest): CONSTRUCTOR_ID = 0xf7760f51 SUBCLASS_OF_ID = 0x9b704a5a def __init__(self, sticker):", "to_dict(self): return { '_': 'CreateStickerSetRequest', 'user_id': None if self.user_id is", "is None else self.sticker.to_dict(), 'position': self.position } def __bytes__(self): return", "x.to_dict() for x in self.stickers], 'masks': self.masks } def __bytes__(self):", "cls(sticker=_sticker, position=_position) class CreateStickerSetRequest(TLRequest): CONSTRUCTOR_ID = 0x9bd86e6a SUBCLASS_OF_ID = 0x9b704a5a", "reader.read_int() return cls(sticker=_sticker, position=_position) class CreateStickerSetRequest(TLRequest): CONSTRUCTOR_ID = 0x9bd86e6a SUBCLASS_OF_ID", "# type: str self.stickers = stickers # type: List[TypeInputStickerSetItem] self.masks", "TYPE_CHECKING: from ...tl.types import TypeInputStickerSet, TypeInputUser, TypeInputStickerSetItem, TypeInputDocument class AddStickerToSetRequest(TLRequest):", "SUBCLASS_OF_ID = 0x9b704a5a def __init__(self, sticker): \"\"\" :param TypeInputDocument sticker:", "= title # type: str self.short_name = short_name # type:", "_sticker = reader.tgread_object() return cls(stickerset=_stickerset, sticker=_sticker) class ChangeStickerPositionRequest(TLRequest): CONSTRUCTOR_ID =", "1) _user_id = reader.tgread_object() _title = reader.tgread_string() _short_name = reader.tgread_string()", "type: TypeInputStickerSet self.sticker = sticker # type: TypeInputStickerSetItem def to_dict(self):", "sticker, position): \"\"\" :param TypeInputDocument sticker: :param int position: :returns", "class CreateStickerSetRequest(TLRequest): CONSTRUCTOR_ID = 0x9bd86e6a SUBCLASS_OF_ID = 0x9b704a5a def __init__(self,", "= [] for _ in range(reader.read_int()): _x = reader.tgread_object() _stickers.append(_x)", "flags = reader.read_int() _masks = bool(flags & 1) _user_id =", "return b''.join(( b'jn\\xd8\\x9b', struct.pack('<I', (0 if self.masks is None or", "None if self.sticker is None else self.sticker.to_dict(), 'position': self.position }", "messages.StickerSet: Instance of StickerSet. \"\"\" self.stickerset = stickerset # type:", "in self.stickers), )) @classmethod def from_reader(cls, reader): flags = reader.read_int()", "user_id, title, short_name, stickers, masks=None): \"\"\" :param TypeInputUser user_id: :param", "self.user_id = utils.get_input_user(await client.get_input_entity(self.user_id)) def to_dict(self): return { '_': 'CreateStickerSetRequest',", "\"\"\" :param TypeInputDocument sticker: :param int position: :returns messages.StickerSet: Instance", "__bytes__(self): return b''.join(( b'jn\\xd8\\x9b', struct.pack('<I', (0 if self.masks is None", "StickerSet. \"\"\" self.stickerset = stickerset # type: TypeInputStickerSet self.sticker =", "& 1) _user_id = reader.tgread_object() _title = reader.tgread_string() _short_name =", "None else x.to_dict() for x in self.stickers], 'masks': self.masks }", "CreateStickerSetRequest(TLRequest): CONSTRUCTOR_ID = 0x9bd86e6a SUBCLASS_OF_ID = 0x9b704a5a def __init__(self, user_id,", "cls(user_id=_user_id, title=_title, short_name=_short_name, stickers=_stickers, masks=_masks) class RemoveStickerFromSetRequest(TLRequest): CONSTRUCTOR_ID = 0xf7760f51", "self.sticker = sticker # type: TypeInputStickerSetItem def to_dict(self): return {", "b''.join(( b'\\xbe\\xfeS\\x86', bytes(self.stickerset), bytes(self.sticker), )) @classmethod def from_reader(cls, reader): _stickerset", "'AddStickerToSetRequest', 'stickerset': None if self.stickerset is None else self.stickerset.to_dict(), 'sticker':", ")) @classmethod def from_reader(cls, reader): _sticker = reader.tgread_object() _position =", "in range(reader.read_int()): _x = reader.tgread_object() _stickers.append(_x) return cls(user_id=_user_id, title=_title, short_name=_short_name,", "else self.sticker.to_dict() } def __bytes__(self): return b''.join(( b'Q\\x0fv\\xf7', bytes(self.sticker), ))", ":param str title: :param str short_name: :param List[TypeInputStickerSetItem] stickers: :param", "TypeInputStickerSet stickerset: :param TypeInputStickerSetItem sticker: :returns messages.StickerSet: Instance of StickerSet.", "List[TypeInputStickerSetItem] stickers: :param Optional[bool] masks: :returns messages.StickerSet: Instance of StickerSet.", "reader.read_int() _masks = bool(flags & 1) _user_id = reader.tgread_object() _title", "= reader.tgread_string() _short_name = reader.tgread_string() reader.read_int() _stickers = [] for", "self.sticker = sticker # type: TypeInputDocument self.position = position #", "generated by TLObjects' generator. All changes will be ERASED\"\"\" from", "in self.stickers], 'masks': self.masks } def __bytes__(self): return b''.join(( b'jn\\xd8\\x9b',", "'RemoveStickerFromSetRequest', 'sticker': None if self.sticker is None else self.sticker.to_dict() }", "return { '_': 'ChangeStickerPositionRequest', 'sticker': None if self.sticker is None", "= 0x9b704a5a def __init__(self, sticker): \"\"\" :param TypeInputDocument sticker: :returns", "position # type: int def to_dict(self): return { '_': 'ChangeStickerPositionRequest',", "'sticker': None if self.sticker is None else self.sticker.to_dict() } def", "self.position } def __bytes__(self): return b''.join(( b'\\xca\\xd4\\xb6\\xff', bytes(self.sticker), struct.pack('<i', self.position),", "stickerset, sticker): \"\"\" :param TypeInputStickerSet stickerset: :param TypeInputStickerSetItem sticker: :returns", "\"\"\"File generated by TLObjects' generator. All changes will be ERASED\"\"\"", "def to_dict(self): return { '_': 'RemoveStickerFromSetRequest', 'sticker': None if self.sticker", "= bool(flags & 1) _user_id = reader.tgread_object() _title = reader.tgread_string()", "type: TypeInputUser self.title = title # type: str self.short_name =", "TypeInputStickerSetItem def to_dict(self): return { '_': 'AddStickerToSetRequest', 'stickerset': None if", "= reader.read_int() _masks = bool(flags & 1) _user_id = reader.tgread_object()", "messages.StickerSet: Instance of StickerSet. \"\"\" self.user_id = user_id # type:", "else 1)), bytes(self.user_id), self.serialize_bytes(self.title), self.serialize_bytes(self.short_name), b'\\x15\\xc4\\xb5\\x1c',struct.pack('<i', len(self.stickers)),b''.join(bytes(x) for x in", "_position = reader.read_int() return cls(sticker=_sticker, position=_position) class CreateStickerSetRequest(TLRequest): CONSTRUCTOR_ID =", "b'\\x15\\xc4\\xb5\\x1c',struct.pack('<i', len(self.stickers)),b''.join(bytes(x) for x in self.stickers), )) @classmethod def from_reader(cls,", "# type: TypeInputDocument def to_dict(self): return { '_': 'RemoveStickerFromSetRequest', 'sticker':", "is None or self.masks is False else 1)), bytes(self.user_id), self.serialize_bytes(self.title),", ":param TypeInputDocument sticker: :param int position: :returns messages.StickerSet: Instance of", "= 0x9b704a5a def __init__(self, user_id, title, short_name, stickers, masks=None): \"\"\"", "= reader.tgread_object() _title = reader.tgread_string() _short_name = reader.tgread_string() reader.read_int() _stickers", "changes will be ERASED\"\"\" from ...tl.tlobject import TLRequest from typing", "range(reader.read_int()): _x = reader.tgread_object() _stickers.append(_x) return cls(user_id=_user_id, title=_title, short_name=_short_name, stickers=_stickers,", "@classmethod def from_reader(cls, reader): _stickerset = reader.tgread_object() _sticker = reader.tgread_object()", "os import struct if TYPE_CHECKING: from ...tl.types import TypeInputStickerSet, TypeInputUser,", "TypeInputStickerSetItem, TypeInputDocument class AddStickerToSetRequest(TLRequest): CONSTRUCTOR_ID = 0x8653febe SUBCLASS_OF_ID = 0x9b704a5a", "None else self.sticker.to_dict(), 'position': self.position } def __bytes__(self): return b''.join((", "self.position = position # type: int def to_dict(self): return {", "self.stickerset.to_dict(), 'sticker': None if self.sticker is None else self.sticker.to_dict() }", "= reader.read_int() return cls(sticker=_sticker, position=_position) class CreateStickerSetRequest(TLRequest): CONSTRUCTOR_ID = 0x9bd86e6a", "_masks = bool(flags & 1) _user_id = reader.tgread_object() _title =", "from ...tl.tlobject import TLRequest from typing import Optional, List, Union,", "'masks': self.masks } def __bytes__(self): return b''.join(( b'jn\\xd8\\x9b', struct.pack('<I', (0", "is None else [None if x is None else x.to_dict()", "is None else x.to_dict() for x in self.stickers], 'masks': self.masks", "self.sticker = sticker # type: TypeInputDocument def to_dict(self): return {", "Optional[bool] masks: :returns messages.StickerSet: Instance of StickerSet. \"\"\" self.user_id =", ")) @classmethod def from_reader(cls, reader): flags = reader.read_int() _masks =", "if self.masks is None or self.masks is False else 1)),", "SUBCLASS_OF_ID = 0x9b704a5a def __init__(self, sticker, position): \"\"\" :param TypeInputDocument", "stickerset # type: TypeInputStickerSet self.sticker = sticker # type: TypeInputStickerSetItem", "= position # type: int def to_dict(self): return { '_':", "from_reader(cls, reader): _stickerset = reader.tgread_object() _sticker = reader.tgread_object() return cls(stickerset=_stickerset,", "from ...tl.types import TypeInputStickerSet, TypeInputUser, TypeInputStickerSetItem, TypeInputDocument class AddStickerToSetRequest(TLRequest): CONSTRUCTOR_ID", "_sticker = reader.tgread_object() _position = reader.read_int() return cls(sticker=_sticker, position=_position) class", "def __init__(self, sticker): \"\"\" :param TypeInputDocument sticker: :returns messages.StickerSet: Instance", "# type: TypeInputStickerSet self.sticker = sticker # type: TypeInputStickerSetItem def", ":param List[TypeInputStickerSetItem] stickers: :param Optional[bool] masks: :returns messages.StickerSet: Instance of", "b'jn\\xd8\\x9b', struct.pack('<I', (0 if self.masks is None or self.masks is", "self.title = title # type: str self.short_name = short_name #", "def __bytes__(self): return b''.join(( b'Q\\x0fv\\xf7', bytes(self.sticker), )) @classmethod def from_reader(cls,", "reader): _sticker = reader.tgread_object() _position = reader.read_int() return cls(sticker=_sticker, position=_position)", "@classmethod def from_reader(cls, reader): flags = reader.read_int() _masks = bool(flags", "None else self.user_id.to_dict(), 'title': self.title, 'short_name': self.short_name, 'stickers': [] if", "if self.stickerset is None else self.stickerset.to_dict(), 'sticker': None if self.sticker", "TypeInputDocument class AddStickerToSetRequest(TLRequest): CONSTRUCTOR_ID = 0x8653febe SUBCLASS_OF_ID = 0x9b704a5a def", "len(self.stickers)),b''.join(bytes(x) for x in self.stickers), )) @classmethod def from_reader(cls, reader):", "self.short_name = short_name # type: str self.stickers = stickers #", "'_': 'CreateStickerSetRequest', 'user_id': None if self.user_id is None else self.user_id.to_dict(),", "_stickers = [] for _ in range(reader.read_int()): _x = reader.tgread_object()", "TypeInputDocument self.position = position # type: int def to_dict(self): return", "# type: Optional[bool] async def resolve(self, client, utils): self.user_id =", "struct.pack('<i', self.position), )) @classmethod def from_reader(cls, reader): _sticker = reader.tgread_object()", "self.masks } def __bytes__(self): return b''.join(( b'jn\\xd8\\x9b', struct.pack('<I', (0 if", "StickerSet. \"\"\" self.user_id = user_id # type: TypeInputUser self.title =", "is None else self.sticker.to_dict() } def __bytes__(self): return b''.join(( b'Q\\x0fv\\xf7',", "'stickers': [] if self.stickers is None else [None if x", "TypeInputDocument def to_dict(self): return { '_': 'RemoveStickerFromSetRequest', 'sticker': None if", "CONSTRUCTOR_ID = 0xf7760f51 SUBCLASS_OF_ID = 0x9b704a5a def __init__(self, sticker): \"\"\"", "= 0x8653febe SUBCLASS_OF_ID = 0x9b704a5a def __init__(self, stickerset, sticker): \"\"\"", "stickers: :param Optional[bool] masks: :returns messages.StickerSet: Instance of StickerSet. \"\"\"", "resolve(self, client, utils): self.user_id = utils.get_input_user(await client.get_input_entity(self.user_id)) def to_dict(self): return", "class AddStickerToSetRequest(TLRequest): CONSTRUCTOR_ID = 0x8653febe SUBCLASS_OF_ID = 0x9b704a5a def __init__(self,", "StickerSet. \"\"\" self.sticker = sticker # type: TypeInputDocument def to_dict(self):", "client, utils): self.user_id = utils.get_input_user(await client.get_input_entity(self.user_id)) def to_dict(self): return {", "struct if TYPE_CHECKING: from ...tl.types import TypeInputStickerSet, TypeInputUser, TypeInputStickerSetItem, TypeInputDocument", "__bytes__(self): return b''.join(( b'\\xbe\\xfeS\\x86', bytes(self.stickerset), bytes(self.sticker), )) @classmethod def from_reader(cls,", "def __bytes__(self): return b''.join(( b'jn\\xd8\\x9b', struct.pack('<I', (0 if self.masks is", "self.short_name, 'stickers': [] if self.stickers is None else [None if", "= sticker # type: TypeInputDocument def to_dict(self): return { '_':", "# type: TypeInputStickerSetItem def to_dict(self): return { '_': 'AddStickerToSetRequest', 'stickerset':", "...tl.types import TypeInputStickerSet, TypeInputUser, TypeInputStickerSetItem, TypeInputDocument class AddStickerToSetRequest(TLRequest): CONSTRUCTOR_ID =", "sticker # type: TypeInputDocument self.position = position # type: int", "= reader.tgread_object() _position = reader.read_int() return cls(sticker=_sticker, position=_position) class CreateStickerSetRequest(TLRequest):", "masks # type: Optional[bool] async def resolve(self, client, utils): self.user_id", "_stickerset = reader.tgread_object() _sticker = reader.tgread_object() return cls(stickerset=_stickerset, sticker=_sticker) class", "import struct if TYPE_CHECKING: from ...tl.types import TypeInputStickerSet, TypeInputUser, TypeInputStickerSetItem,", "'short_name': self.short_name, 'stickers': [] if self.stickers is None else [None", "_x = reader.tgread_object() _stickers.append(_x) return cls(user_id=_user_id, title=_title, short_name=_short_name, stickers=_stickers, masks=_masks)", "str title: :param str short_name: :param List[TypeInputStickerSetItem] stickers: :param Optional[bool]", "type: int def to_dict(self): return { '_': 'ChangeStickerPositionRequest', 'sticker': None", "title: :param str short_name: :param List[TypeInputStickerSetItem] stickers: :param Optional[bool] masks:", "@classmethod def from_reader(cls, reader): _sticker = reader.tgread_object() _position = reader.read_int()", "# type: int def to_dict(self): return { '_': 'ChangeStickerPositionRequest', 'sticker':", "'position': self.position } def __bytes__(self): return b''.join(( b'\\xca\\xd4\\xb6\\xff', bytes(self.sticker), struct.pack('<i',", "SUBCLASS_OF_ID = 0x9b704a5a def __init__(self, user_id, title, short_name, stickers, masks=None):", "'sticker': None if self.sticker is None else self.sticker.to_dict(), 'position': self.position", "reader.tgread_object() _title = reader.tgread_string() _short_name = reader.tgread_string() reader.read_int() _stickers =", "= reader.tgread_string() reader.read_int() _stickers = [] for _ in range(reader.read_int()):", "str self.stickers = stickers # type: List[TypeInputStickerSetItem] self.masks = masks", "[] if self.stickers is None else [None if x is", "int def to_dict(self): return { '_': 'ChangeStickerPositionRequest', 'sticker': None if", ":param str short_name: :param List[TypeInputStickerSetItem] stickers: :param Optional[bool] masks: :returns", "= reader.tgread_object() return cls(stickerset=_stickerset, sticker=_sticker) class ChangeStickerPositionRequest(TLRequest): CONSTRUCTOR_ID = 0xffb6d4ca", "return { '_': 'RemoveStickerFromSetRequest', 'sticker': None if self.sticker is None", "else x.to_dict() for x in self.stickers], 'masks': self.masks } def", "self.masks = masks # type: Optional[bool] async def resolve(self, client,", "b'\\xbe\\xfeS\\x86', bytes(self.stickerset), bytes(self.sticker), )) @classmethod def from_reader(cls, reader): _stickerset =", "title, short_name, stickers, masks=None): \"\"\" :param TypeInputUser user_id: :param str", "__init__(self, sticker, position): \"\"\" :param TypeInputDocument sticker: :param int position:", "self.stickers], 'masks': self.masks } def __bytes__(self): return b''.join(( b'jn\\xd8\\x9b', struct.pack('<I',", "Optional, List, Union, TYPE_CHECKING import os import struct if TYPE_CHECKING:", "typing import Optional, List, Union, TYPE_CHECKING import os import struct", "0x9b704a5a def __init__(self, sticker): \"\"\" :param TypeInputDocument sticker: :returns messages.StickerSet:", "of StickerSet. \"\"\" self.stickerset = stickerset # type: TypeInputStickerSet self.sticker", "'_': 'AddStickerToSetRequest', 'stickerset': None if self.stickerset is None else self.stickerset.to_dict(),", "= 0x9bd86e6a SUBCLASS_OF_ID = 0x9b704a5a def __init__(self, user_id, title, short_name,", "def to_dict(self): return { '_': 'ChangeStickerPositionRequest', 'sticker': None if self.sticker", "= stickers # type: List[TypeInputStickerSetItem] self.masks = masks # type:", "TypeInputStickerSet self.sticker = sticker # type: TypeInputStickerSetItem def to_dict(self): return", "self.masks is None or self.masks is False else 1)), bytes(self.user_id),", "short_name: :param List[TypeInputStickerSetItem] stickers: :param Optional[bool] masks: :returns messages.StickerSet: Instance", "b''.join(( b'jn\\xd8\\x9b', struct.pack('<I', (0 if self.masks is None or self.masks", "TLRequest from typing import Optional, List, Union, TYPE_CHECKING import os", "self.stickers = stickers # type: List[TypeInputStickerSetItem] self.masks = masks #", "None else self.sticker.to_dict() } def __bytes__(self): return b''.join(( b'Q\\x0fv\\xf7', bytes(self.sticker),", "short_name=_short_name, stickers=_stickers, masks=_masks) class RemoveStickerFromSetRequest(TLRequest): CONSTRUCTOR_ID = 0xf7760f51 SUBCLASS_OF_ID =", "_short_name = reader.tgread_string() reader.read_int() _stickers = [] for _ in", "[] for _ in range(reader.read_int()): _x = reader.tgread_object() _stickers.append(_x) return", "# type: List[TypeInputStickerSetItem] self.masks = masks # type: Optional[bool] async", "type: str self.short_name = short_name # type: str self.stickers =", "ChangeStickerPositionRequest(TLRequest): CONSTRUCTOR_ID = 0xffb6d4ca SUBCLASS_OF_ID = 0x9b704a5a def __init__(self, sticker,", "TypeInputDocument sticker: :param int position: :returns messages.StickerSet: Instance of StickerSet.", "self.user_id is None else self.user_id.to_dict(), 'title': self.title, 'short_name': self.short_name, 'stickers':", "self.sticker is None else self.sticker.to_dict(), 'position': self.position } def __bytes__(self):", "stickers=_stickers, masks=_masks) class RemoveStickerFromSetRequest(TLRequest): CONSTRUCTOR_ID = 0xf7760f51 SUBCLASS_OF_ID = 0x9b704a5a", "TypeInputStickerSetItem sticker: :returns messages.StickerSet: Instance of StickerSet. \"\"\" self.stickerset =", "self.sticker.to_dict(), 'position': self.position } def __bytes__(self): return b''.join(( b'\\xca\\xd4\\xb6\\xff', bytes(self.sticker),", "Optional[bool] async def resolve(self, client, utils): self.user_id = utils.get_input_user(await client.get_input_entity(self.user_id))", "All changes will be ERASED\"\"\" from ...tl.tlobject import TLRequest from", "TYPE_CHECKING import os import struct if TYPE_CHECKING: from ...tl.types import", "by TLObjects' generator. All changes will be ERASED\"\"\" from ...tl.tlobject", "self.user_id.to_dict(), 'title': self.title, 'short_name': self.short_name, 'stickers': [] if self.stickers is", "position: :returns messages.StickerSet: Instance of StickerSet. \"\"\" self.sticker = sticker", "'_': 'RemoveStickerFromSetRequest', 'sticker': None if self.sticker is None else self.sticker.to_dict()", "def __bytes__(self): return b''.join(( b'\\xbe\\xfeS\\x86', bytes(self.stickerset), bytes(self.sticker), )) @classmethod def", "x is None else x.to_dict() for x in self.stickers], 'masks':", "str self.short_name = short_name # type: str self.stickers = stickers", "...tl.tlobject import TLRequest from typing import Optional, List, Union, TYPE_CHECKING", "'title': self.title, 'short_name': self.short_name, 'stickers': [] if self.stickers is None", "stickerset: :param TypeInputStickerSetItem sticker: :returns messages.StickerSet: Instance of StickerSet. \"\"\"", "self.masks is False else 1)), bytes(self.user_id), self.serialize_bytes(self.title), self.serialize_bytes(self.short_name), b'\\x15\\xc4\\xb5\\x1c',struct.pack('<i', len(self.stickers)),b''.join(bytes(x)", "self.stickers), )) @classmethod def from_reader(cls, reader): flags = reader.read_int() _masks", "0x9b704a5a def __init__(self, user_id, title, short_name, stickers, masks=None): \"\"\" :param", "return b''.join(( b'\\xca\\xd4\\xb6\\xff', bytes(self.sticker), struct.pack('<i', self.position), )) @classmethod def from_reader(cls,", "type: str self.stickers = stickers # type: List[TypeInputStickerSetItem] self.masks =", ":returns messages.StickerSet: Instance of StickerSet. \"\"\" self.stickerset = stickerset #", "TypeInputDocument sticker: :returns messages.StickerSet: Instance of StickerSet. \"\"\" self.sticker =", "bytes(self.sticker), )) @classmethod def from_reader(cls, reader): _sticker = reader.tgread_object() return", "1)), bytes(self.user_id), self.serialize_bytes(self.title), self.serialize_bytes(self.short_name), b'\\x15\\xc4\\xb5\\x1c',struct.pack('<i', len(self.stickers)),b''.join(bytes(x) for x in self.stickers),", "def __init__(self, sticker, position): \"\"\" :param TypeInputDocument sticker: :param int", "TypeInputUser user_id: :param str title: :param str short_name: :param List[TypeInputStickerSetItem]", "None if self.sticker is None else self.sticker.to_dict() } def __bytes__(self):", "= reader.tgread_object() _stickers.append(_x) return cls(user_id=_user_id, title=_title, short_name=_short_name, stickers=_stickers, masks=_masks) class", "position=_position) class CreateStickerSetRequest(TLRequest): CONSTRUCTOR_ID = 0x9bd86e6a SUBCLASS_OF_ID = 0x9b704a5a def", "\"\"\" :param TypeInputUser user_id: :param str title: :param str short_name:", "'ChangeStickerPositionRequest', 'sticker': None if self.sticker is None else self.sticker.to_dict(), 'position':", "sticker=_sticker) class ChangeStickerPositionRequest(TLRequest): CONSTRUCTOR_ID = 0xffb6d4ca SUBCLASS_OF_ID = 0x9b704a5a def", "utils): self.user_id = utils.get_input_user(await client.get_input_entity(self.user_id)) def to_dict(self): return { '_':", "to_dict(self): return { '_': 'RemoveStickerFromSetRequest', 'sticker': None if self.sticker is", "generator. All changes will be ERASED\"\"\" from ...tl.tlobject import TLRequest" ]
[ ": \"VtkOutputProcess\", \"help\" : \"This process writes postprocessing files for", "from KratosMultiphysics.ChimeraApplication.fluid_chimera_analysis import FluidChimeraAnalysis class ChimeraAnalysisBaseTest(UnitTest.TestCase): def setUp(self): # Set", "ChimeraAnalysisBaseTest(UnitTest.TestCase): def setUp(self): # Set to true to get post-process", "settings_file: settings = KratosMultiphysics.Parameters(settings_file.read()) # to check the results: add", "UnitTest import KratosMultiphysics.ChimeraApplication from KratosMultiphysics.ChimeraApplication.fluid_chimera_analysis import FluidChimeraAnalysis class ChimeraAnalysisBaseTest(UnitTest.TestCase): def", "\"ascii\", \"output_precision\" : 3, \"output_sub_model_parts\" : false, \"write_deformed_configuration\" : true,", "\"output_precision\" : 3, \"output_sub_model_parts\" : false, \"write_deformed_configuration\" : true, \"folder_name\"", "settings block if needed if self.print_output: settings.AddValue(\"output_processes\", KratosMultiphysics.Parameters(r'''{ \"vtk_output\" :", "\"model_part_name\" : \"FluidModelPart.Parts_patch_surface\", \"output_control_type\" : \"step\", \"output_frequency\" : 1, \"file_format\"", "if self.print_output: settings.AddValue(\"output_processes\", KratosMultiphysics.Parameters(r'''{ \"vtk_output\" : [{ \"python_module\" : \"vtk_output_process\",", ": [], \"element_flags\" : [\"ACTIVE\"], \"nodal_flags\" : [\"VISITED\",\"CHIMERA_INTERNAL_BOUNDARY\"], \"element_data_value_variables\" :", "[], \"condition_data_value_variables\" : [] } },{ \"python_module\" : \"vtk_output_process\", \"kratos_module\"", "_run_test(self,settings_file_name): model = KratosMultiphysics.Model() with open(settings_file_name,'r') as settings_file: settings =", "[{ \"python_module\" : \"vtk_output_process\", \"kratos_module\" : \"KratosMultiphysics\", \"process_name\" : \"VtkOutputProcess\",", "\"nodal_flags\" : [\"VISITED\",\"CHIMERA_INTERNAL_BOUNDARY\"], \"element_data_value_variables\" : [], \"condition_data_value_variables\" : [] }", "setUp(self): # Set to true to get post-process files for", "true, \"folder_name\" : \"test_vtk_output\", \"save_output_files_in_folder\" : true, \"nodal_solution_step_data_variables\" : [\"VELOCITY\",\"PRESSURE\",\"DISTANCE\",\"MESH_VELOCITY\"],", "\"model_part_name\" : \"FluidModelPart.Parts_background_surface\", \"output_control_type\" : \"step\", \"output_frequency\" : 1, \"file_format\"", ": \"ascii\", \"output_precision\" : 3, \"output_sub_model_parts\" : false, \"write_deformed_configuration\" :", "\"Parameters\" : { \"model_part_name\" : \"FluidModelPart.Parts_patch_surface\", \"output_control_type\" : \"step\", \"output_frequency\"", "<gh_stars>100-1000 import KratosMultiphysics import KratosMultiphysics.KratosUnittest as UnitTest import KratosMultiphysics.ChimeraApplication from", "KratosMultiphysics.KratosUnittest as UnitTest import KratosMultiphysics.ChimeraApplication from KratosMultiphysics.ChimeraApplication.fluid_chimera_analysis import FluidChimeraAnalysis class", "\"test_vtk_output\", \"save_output_files_in_folder\" : true, \"nodal_solution_step_data_variables\" : [\"VELOCITY\",\"PRESSURE\",\"DISTANCE\",\"MESH_VELOCITY\"], \"nodal_data_value_variables\" : [],", "self.print_output = False def _run_test(self,settings_file_name): model = KratosMultiphysics.Model() with open(settings_file_name,'r')", "1, \"file_format\" : \"ascii\", \"output_precision\" : 3, \"output_sub_model_parts\" : false,", ": true, \"nodal_solution_step_data_variables\" : [\"VELOCITY\",\"PRESSURE\",\"DISTANCE\",\"MESH_VELOCITY\"], \"nodal_data_value_variables\" : [], \"element_flags\" :", "files for the test self.print_output = False def _run_test(self,settings_file_name): model", "} },{ \"python_module\" : \"vtk_output_process\", \"kratos_module\" : \"KratosMultiphysics\", \"process_name\" :", "settings = KratosMultiphysics.Parameters(settings_file.read()) # to check the results: add output", ": { \"model_part_name\" : \"FluidModelPart.Parts_patch_surface\", \"output_control_type\" : \"step\", \"output_frequency\" :", ": [{ \"python_module\" : \"vtk_output_process\", \"kratos_module\" : \"KratosMultiphysics\", \"process_name\" :", ": false, \"write_deformed_configuration\" : true, \"folder_name\" : \"test_vtk_output\", \"save_output_files_in_folder\" :", "= KratosMultiphysics.Model() with open(settings_file_name,'r') as settings_file: settings = KratosMultiphysics.Parameters(settings_file.read()) #", "model = KratosMultiphysics.Model() with open(settings_file_name,'r') as settings_file: settings = KratosMultiphysics.Parameters(settings_file.read())", "= KratosMultiphysics.Parameters(settings_file.read()) # to check the results: add output settings", "check the results: add output settings block if needed if", "{ \"model_part_name\" : \"FluidModelPart.Parts_background_surface\", \"output_control_type\" : \"step\", \"output_frequency\" : 1,", "{ \"model_part_name\" : \"FluidModelPart.Parts_patch_surface\", \"output_control_type\" : \"step\", \"output_frequency\" : 1,", "\"element_flags\" : [\"ACTIVE\"], \"nodal_flags\" : [\"VISITED\",\"CHIMERA_INTERNAL_BOUNDARY\"], \"element_data_value_variables\" : [], \"condition_data_value_variables\"", "to get post-process files for the test self.print_output = False", "files for Paraview\", \"Parameters\" : { \"model_part_name\" : \"FluidModelPart.Parts_patch_surface\", \"output_control_type\"", "\"kratos_module\" : \"KratosMultiphysics\", \"process_name\" : \"VtkOutputProcess\", \"help\" : \"This process", "\"output_control_type\" : \"step\", \"output_frequency\" : 1, \"file_format\" : \"ascii\", \"output_precision\"", "\"Parameters\" : { \"model_part_name\" : \"FluidModelPart.Parts_background_surface\", \"output_control_type\" : \"step\", \"output_frequency\"", "KratosMultiphysics.Parameters(r'''{ \"vtk_output\" : [{ \"python_module\" : \"vtk_output_process\", \"kratos_module\" : \"KratosMultiphysics\",", "import FluidChimeraAnalysis class ChimeraAnalysisBaseTest(UnitTest.TestCase): def setUp(self): # Set to true", "to check the results: add output settings block if needed", ": 1, \"file_format\" : \"ascii\", \"output_precision\" : 3, \"output_sub_model_parts\" :", "true to get post-process files for the test self.print_output =", "KratosMultiphysics import KratosMultiphysics.KratosUnittest as UnitTest import KratosMultiphysics.ChimeraApplication from KratosMultiphysics.ChimeraApplication.fluid_chimera_analysis import", "post-process files for the test self.print_output = False def _run_test(self,settings_file_name):", "the test self.print_output = False def _run_test(self,settings_file_name): model = KratosMultiphysics.Model()", "import KratosMultiphysics.KratosUnittest as UnitTest import KratosMultiphysics.ChimeraApplication from KratosMultiphysics.ChimeraApplication.fluid_chimera_analysis import FluidChimeraAnalysis", "def _run_test(self,settings_file_name): model = KratosMultiphysics.Model() with open(settings_file_name,'r') as settings_file: settings", "\"nodal_solution_step_data_variables\" : [\"VELOCITY\",\"PRESSURE\",\"DISTANCE\",\"MESH_VELOCITY\"], \"nodal_data_value_variables\" : [], \"element_flags\" : [\"ACTIVE\"], \"nodal_flags\"", "self.print_output: settings.AddValue(\"output_processes\", KratosMultiphysics.Parameters(r'''{ \"vtk_output\" : [{ \"python_module\" : \"vtk_output_process\", \"kratos_module\"", "KratosMultiphysics.Parameters(settings_file.read()) # to check the results: add output settings block", "\"element_data_value_variables\" : [], \"condition_data_value_variables\" : [] } }] }''')) analysis", "with open(settings_file_name,'r') as settings_file: settings = KratosMultiphysics.Parameters(settings_file.read()) # to check", "\"file_format\" : \"ascii\", \"output_precision\" : 3, \"output_sub_model_parts\" : false, \"write_deformed_configuration\"", "needed if self.print_output: settings.AddValue(\"output_processes\", KratosMultiphysics.Parameters(r'''{ \"vtk_output\" : [{ \"python_module\" :", "open(settings_file_name,'r') as settings_file: settings = KratosMultiphysics.Parameters(settings_file.read()) # to check the", "[] } },{ \"python_module\" : \"vtk_output_process\", \"kratos_module\" : \"KratosMultiphysics\", \"process_name\"", "true, \"nodal_solution_step_data_variables\" : [\"VELOCITY\",\"PRESSURE\",\"DISTANCE\",\"MESH_VELOCITY\"], \"nodal_data_value_variables\" : [], \"element_flags\" : [\"ACTIVE\"],", "\"This process writes postprocessing files for Paraview\", \"Parameters\" : {", "\"vtk_output\" : [{ \"python_module\" : \"vtk_output_process\", \"kratos_module\" : \"KratosMultiphysics\", \"process_name\"", "KratosMultiphysics.ChimeraApplication from KratosMultiphysics.ChimeraApplication.fluid_chimera_analysis import FluidChimeraAnalysis class ChimeraAnalysisBaseTest(UnitTest.TestCase): def setUp(self): #", "to true to get post-process files for the test self.print_output", ": \"FluidModelPart.Parts_patch_surface\", \"output_control_type\" : \"step\", \"output_frequency\" : 1, \"file_format\" :", "},{ \"python_module\" : \"vtk_output_process\", \"kratos_module\" : \"KratosMultiphysics\", \"process_name\" : \"VtkOutputProcess\",", "Paraview\", \"Parameters\" : { \"model_part_name\" : \"FluidModelPart.Parts_background_surface\", \"output_control_type\" : \"step\",", "\"condition_data_value_variables\" : [] } }] }''')) analysis = FluidChimeraAnalysis(model,settings) analysis.Run()", "\"condition_data_value_variables\" : [] } },{ \"python_module\" : \"vtk_output_process\", \"kratos_module\" :", "FluidChimeraAnalysis class ChimeraAnalysisBaseTest(UnitTest.TestCase): def setUp(self): # Set to true to", "if needed if self.print_output: settings.AddValue(\"output_processes\", KratosMultiphysics.Parameters(r'''{ \"vtk_output\" : [{ \"python_module\"", ": [\"VISITED\",\"CHIMERA_INTERNAL_BOUNDARY\"], \"element_data_value_variables\" : [], \"condition_data_value_variables\" : [] } },{", "for Paraview\", \"Parameters\" : { \"model_part_name\" : \"FluidModelPart.Parts_background_surface\", \"output_control_type\" :", "\"help\" : \"This process writes postprocessing files for Paraview\", \"Parameters\"", ": [], \"condition_data_value_variables\" : [] } }] }''')) analysis =", "False def _run_test(self,settings_file_name): model = KratosMultiphysics.Model() with open(settings_file_name,'r') as settings_file:", "\"FluidModelPart.Parts_background_surface\", \"output_control_type\" : \"step\", \"output_frequency\" : 1, \"file_format\" : \"ascii\",", "test self.print_output = False def _run_test(self,settings_file_name): model = KratosMultiphysics.Model() with", "import KratosMultiphysics import KratosMultiphysics.KratosUnittest as UnitTest import KratosMultiphysics.ChimeraApplication from KratosMultiphysics.ChimeraApplication.fluid_chimera_analysis", ": [\"VISITED\",\"CHIMERA_INTERNAL_BOUNDARY\"], \"element_data_value_variables\" : [], \"condition_data_value_variables\" : [] } }]", "\"folder_name\" : \"test_vtk_output\", \"save_output_files_in_folder\" : true, \"nodal_solution_step_data_variables\" : [\"VELOCITY\",\"PRESSURE\",\"DISTANCE\",\"MESH_VELOCITY\"], \"nodal_data_value_variables\"", "[\"ACTIVE\"], \"nodal_flags\" : [\"VISITED\",\"CHIMERA_INTERNAL_BOUNDARY\"], \"element_data_value_variables\" : [], \"condition_data_value_variables\" : []", "for Paraview\", \"Parameters\" : { \"model_part_name\" : \"FluidModelPart.Parts_patch_surface\", \"output_control_type\" :", "files for Paraview\", \"Parameters\" : { \"model_part_name\" : \"FluidModelPart.Parts_background_surface\", \"output_control_type\"", "the results: add output settings block if needed if self.print_output:", "as UnitTest import KratosMultiphysics.ChimeraApplication from KratosMultiphysics.ChimeraApplication.fluid_chimera_analysis import FluidChimeraAnalysis class ChimeraAnalysisBaseTest(UnitTest.TestCase):", "block if needed if self.print_output: settings.AddValue(\"output_processes\", KratosMultiphysics.Parameters(r'''{ \"vtk_output\" : [{", "\"python_module\" : \"vtk_output_process\", \"kratos_module\" : \"KratosMultiphysics\", \"process_name\" : \"VtkOutputProcess\", \"help\"", "postprocessing files for Paraview\", \"Parameters\" : { \"model_part_name\" : \"FluidModelPart.Parts_background_surface\",", "process writes postprocessing files for Paraview\", \"Parameters\" : { \"model_part_name\"", "def setUp(self): # Set to true to get post-process files", "false, \"write_deformed_configuration\" : true, \"folder_name\" : \"test_vtk_output\", \"save_output_files_in_folder\" : true,", "import KratosMultiphysics.ChimeraApplication from KratosMultiphysics.ChimeraApplication.fluid_chimera_analysis import FluidChimeraAnalysis class ChimeraAnalysisBaseTest(UnitTest.TestCase): def setUp(self):", "Paraview\", \"Parameters\" : { \"model_part_name\" : \"FluidModelPart.Parts_patch_surface\", \"output_control_type\" : \"step\",", ": [\"ACTIVE\"], \"nodal_flags\" : [\"VISITED\",\"CHIMERA_INTERNAL_BOUNDARY\"], \"element_data_value_variables\" : [], \"condition_data_value_variables\" :", "[\"VISITED\",\"CHIMERA_INTERNAL_BOUNDARY\"], \"element_data_value_variables\" : [], \"condition_data_value_variables\" : [] } }] }'''))", "\"process_name\" : \"VtkOutputProcess\", \"help\" : \"This process writes postprocessing files", "Set to true to get post-process files for the test", ": [], \"condition_data_value_variables\" : [] } },{ \"python_module\" : \"vtk_output_process\",", "KratosMultiphysics.Model() with open(settings_file_name,'r') as settings_file: settings = KratosMultiphysics.Parameters(settings_file.read()) # to", ": { \"model_part_name\" : \"FluidModelPart.Parts_background_surface\", \"output_control_type\" : \"step\", \"output_frequency\" :", "class ChimeraAnalysisBaseTest(UnitTest.TestCase): def setUp(self): # Set to true to get", "\"output_frequency\" : 1, \"file_format\" : \"ascii\", \"output_precision\" : 3, \"output_sub_model_parts\"", "\"step\", \"output_frequency\" : 1, \"file_format\" : \"ascii\", \"output_precision\" : 3,", "\"KratosMultiphysics\", \"process_name\" : \"VtkOutputProcess\", \"help\" : \"This process writes postprocessing", ": true, \"folder_name\" : \"test_vtk_output\", \"save_output_files_in_folder\" : true, \"nodal_solution_step_data_variables\" :", "= False def _run_test(self,settings_file_name): model = KratosMultiphysics.Model() with open(settings_file_name,'r') as", "settings.AddValue(\"output_processes\", KratosMultiphysics.Parameters(r'''{ \"vtk_output\" : [{ \"python_module\" : \"vtk_output_process\", \"kratos_module\" :", "writes postprocessing files for Paraview\", \"Parameters\" : { \"model_part_name\" :", "[\"VELOCITY\",\"PRESSURE\",\"DISTANCE\",\"MESH_VELOCITY\"], \"nodal_data_value_variables\" : [], \"element_flags\" : [\"ACTIVE\"], \"nodal_flags\" : [\"VISITED\",\"CHIMERA_INTERNAL_BOUNDARY\"],", ": [\"VELOCITY\",\"PRESSURE\",\"DISTANCE\",\"MESH_VELOCITY\"], \"nodal_data_value_variables\" : [], \"element_flags\" : [\"ACTIVE\"], \"nodal_flags\" :", ": 3, \"output_sub_model_parts\" : false, \"write_deformed_configuration\" : true, \"folder_name\" :", "output settings block if needed if self.print_output: settings.AddValue(\"output_processes\", KratosMultiphysics.Parameters(r'''{ \"vtk_output\"", ": \"FluidModelPart.Parts_background_surface\", \"output_control_type\" : \"step\", \"output_frequency\" : 1, \"file_format\" :", ": [] } },{ \"python_module\" : \"vtk_output_process\", \"kratos_module\" : \"KratosMultiphysics\",", "3, \"output_sub_model_parts\" : false, \"write_deformed_configuration\" : true, \"folder_name\" : \"test_vtk_output\",", "postprocessing files for Paraview\", \"Parameters\" : { \"model_part_name\" : \"FluidModelPart.Parts_patch_surface\",", "as settings_file: settings = KratosMultiphysics.Parameters(settings_file.read()) # to check the results:", ": \"This process writes postprocessing files for Paraview\", \"Parameters\" :", ": \"test_vtk_output\", \"save_output_files_in_folder\" : true, \"nodal_solution_step_data_variables\" : [\"VELOCITY\",\"PRESSURE\",\"DISTANCE\",\"MESH_VELOCITY\"], \"nodal_data_value_variables\" :", "for the test self.print_output = False def _run_test(self,settings_file_name): model =", "results: add output settings block if needed if self.print_output: settings.AddValue(\"output_processes\",", "# to check the results: add output settings block if", "\"VtkOutputProcess\", \"help\" : \"This process writes postprocessing files for Paraview\",", "\"output_sub_model_parts\" : false, \"write_deformed_configuration\" : true, \"folder_name\" : \"test_vtk_output\", \"save_output_files_in_folder\"", "\"save_output_files_in_folder\" : true, \"nodal_solution_step_data_variables\" : [\"VELOCITY\",\"PRESSURE\",\"DISTANCE\",\"MESH_VELOCITY\"], \"nodal_data_value_variables\" : [], \"element_flags\"", "\"FluidModelPart.Parts_patch_surface\", \"output_control_type\" : \"step\", \"output_frequency\" : 1, \"file_format\" : \"ascii\",", "# Set to true to get post-process files for the", "\"nodal_data_value_variables\" : [], \"element_flags\" : [\"ACTIVE\"], \"nodal_flags\" : [\"VISITED\",\"CHIMERA_INTERNAL_BOUNDARY\"], \"element_data_value_variables\"", "[], \"condition_data_value_variables\" : [] } }] }''')) analysis = FluidChimeraAnalysis(model,settings)", "get post-process files for the test self.print_output = False def", ": \"vtk_output_process\", \"kratos_module\" : \"KratosMultiphysics\", \"process_name\" : \"VtkOutputProcess\", \"help\" :", "add output settings block if needed if self.print_output: settings.AddValue(\"output_processes\", KratosMultiphysics.Parameters(r'''{", ": \"KratosMultiphysics\", \"process_name\" : \"VtkOutputProcess\", \"help\" : \"This process writes", ": \"step\", \"output_frequency\" : 1, \"file_format\" : \"ascii\", \"output_precision\" :", "KratosMultiphysics.ChimeraApplication.fluid_chimera_analysis import FluidChimeraAnalysis class ChimeraAnalysisBaseTest(UnitTest.TestCase): def setUp(self): # Set to", "\"write_deformed_configuration\" : true, \"folder_name\" : \"test_vtk_output\", \"save_output_files_in_folder\" : true, \"nodal_solution_step_data_variables\"", "\"vtk_output_process\", \"kratos_module\" : \"KratosMultiphysics\", \"process_name\" : \"VtkOutputProcess\", \"help\" : \"This", "\"element_data_value_variables\" : [], \"condition_data_value_variables\" : [] } },{ \"python_module\" :", "[\"VISITED\",\"CHIMERA_INTERNAL_BOUNDARY\"], \"element_data_value_variables\" : [], \"condition_data_value_variables\" : [] } },{ \"python_module\"", "[], \"element_flags\" : [\"ACTIVE\"], \"nodal_flags\" : [\"VISITED\",\"CHIMERA_INTERNAL_BOUNDARY\"], \"element_data_value_variables\" : []," ]
[ "def getlist(self) -> list[dict[str, str]]: ENTRY = r\"{http://www.w3.org/2005/Atom}\" MEDIA =", "-> list[dict[str, str]]: ENTRY = r\"{http://www.w3.org/2005/Atom}\" MEDIA = r\"{http://search.yahoo.com/mrss/}\" YOUTUBE", "res: data = xml.etree.ElementTree.fromstring(res.read()) for child in data.iter(f\"{ENTRY}entry\"): result.append({ \"title\":", "for child in data.iter(f\"{ENTRY}entry\"): result.append({ \"title\": child.find(f\"{ENTRY}title\").text, \"link\": child.find(f\"{ENTRY}link\").attrib[\"href\"], \"description\":", "result.append({ \"title\": child.find(f\"{ENTRY}title\").text, \"link\": child.find(f\"{ENTRY}link\").attrib[\"href\"], \"description\": child.find(f\"{MEDIA}group\").find(f\"{MEDIA}description\").text, }) return result", "__init__(self, url: str) -> None: self.url = url def getlist(self)", "}) return result if __name__ == \"__main__\": import pprint pprint.pprint(RSS10Parser(\"https://www.youtube.com/feeds/videos.xml?playlist_id=PLrPVslFukDQo7l5RCqAZtKDl6tUyMAFWH\").getlist())", "child in data.iter(f\"{ENTRY}entry\"): result.append({ \"title\": child.find(f\"{ENTRY}title\").text, \"link\": child.find(f\"{ENTRY}link\").attrib[\"href\"], \"description\": child.find(f\"{MEDIA}group\").find(f\"{MEDIA}description\").text,", "<filename>parsers/rss10.py import urllib.request import xml.etree.ElementTree class RSS10Parser: def __init__(self, url:", "= r\"{http://www.w3.org/2005/Atom}\" MEDIA = r\"{http://search.yahoo.com/mrss/}\" YOUTUBE = r\"{http://www.youtube.com/xml/schemas/2015}\" result =", "urllib.request import xml.etree.ElementTree class RSS10Parser: def __init__(self, url: str) ->", "url: str) -> None: self.url = url def getlist(self) ->", "data = xml.etree.ElementTree.fromstring(res.read()) for child in data.iter(f\"{ENTRY}entry\"): result.append({ \"title\": child.find(f\"{ENTRY}title\").text,", "import xml.etree.ElementTree class RSS10Parser: def __init__(self, url: str) -> None:", "r\"{http://www.w3.org/2005/Atom}\" MEDIA = r\"{http://search.yahoo.com/mrss/}\" YOUTUBE = r\"{http://www.youtube.com/xml/schemas/2015}\" result = []", "child.find(f\"{MEDIA}group\").find(f\"{MEDIA}description\").text, }) return result if __name__ == \"__main__\": import pprint", "MEDIA = r\"{http://search.yahoo.com/mrss/}\" YOUTUBE = r\"{http://www.youtube.com/xml/schemas/2015}\" result = [] with", "with urllib.request.urlopen(self.url) as res: data = xml.etree.ElementTree.fromstring(res.read()) for child in", "child.find(f\"{ENTRY}link\").attrib[\"href\"], \"description\": child.find(f\"{MEDIA}group\").find(f\"{MEDIA}description\").text, }) return result if __name__ == \"__main__\":", "RSS10Parser: def __init__(self, url: str) -> None: self.url = url", "r\"{http://search.yahoo.com/mrss/}\" YOUTUBE = r\"{http://www.youtube.com/xml/schemas/2015}\" result = [] with urllib.request.urlopen(self.url) as", "xml.etree.ElementTree.fromstring(res.read()) for child in data.iter(f\"{ENTRY}entry\"): result.append({ \"title\": child.find(f\"{ENTRY}title\").text, \"link\": child.find(f\"{ENTRY}link\").attrib[\"href\"],", "url def getlist(self) -> list[dict[str, str]]: ENTRY = r\"{http://www.w3.org/2005/Atom}\" MEDIA", "ENTRY = r\"{http://www.w3.org/2005/Atom}\" MEDIA = r\"{http://search.yahoo.com/mrss/}\" YOUTUBE = r\"{http://www.youtube.com/xml/schemas/2015}\" result", "[] with urllib.request.urlopen(self.url) as res: data = xml.etree.ElementTree.fromstring(res.read()) for child", "def __init__(self, url: str) -> None: self.url = url def", "r\"{http://www.youtube.com/xml/schemas/2015}\" result = [] with urllib.request.urlopen(self.url) as res: data =", "data.iter(f\"{ENTRY}entry\"): result.append({ \"title\": child.find(f\"{ENTRY}title\").text, \"link\": child.find(f\"{ENTRY}link\").attrib[\"href\"], \"description\": child.find(f\"{MEDIA}group\").find(f\"{MEDIA}description\").text, }) return", "getlist(self) -> list[dict[str, str]]: ENTRY = r\"{http://www.w3.org/2005/Atom}\" MEDIA = r\"{http://search.yahoo.com/mrss/}\"", "str) -> None: self.url = url def getlist(self) -> list[dict[str,", "import urllib.request import xml.etree.ElementTree class RSS10Parser: def __init__(self, url: str)", "urllib.request.urlopen(self.url) as res: data = xml.etree.ElementTree.fromstring(res.read()) for child in data.iter(f\"{ENTRY}entry\"):", "list[dict[str, str]]: ENTRY = r\"{http://www.w3.org/2005/Atom}\" MEDIA = r\"{http://search.yahoo.com/mrss/}\" YOUTUBE =", "as res: data = xml.etree.ElementTree.fromstring(res.read()) for child in data.iter(f\"{ENTRY}entry\"): result.append({", "= xml.etree.ElementTree.fromstring(res.read()) for child in data.iter(f\"{ENTRY}entry\"): result.append({ \"title\": child.find(f\"{ENTRY}title\").text, \"link\":", "YOUTUBE = r\"{http://www.youtube.com/xml/schemas/2015}\" result = [] with urllib.request.urlopen(self.url) as res:", "= r\"{http://search.yahoo.com/mrss/}\" YOUTUBE = r\"{http://www.youtube.com/xml/schemas/2015}\" result = [] with urllib.request.urlopen(self.url)", "= r\"{http://www.youtube.com/xml/schemas/2015}\" result = [] with urllib.request.urlopen(self.url) as res: data", "child.find(f\"{ENTRY}title\").text, \"link\": child.find(f\"{ENTRY}link\").attrib[\"href\"], \"description\": child.find(f\"{MEDIA}group\").find(f\"{MEDIA}description\").text, }) return result if __name__", "-> None: self.url = url def getlist(self) -> list[dict[str, str]]:", "result = [] with urllib.request.urlopen(self.url) as res: data = xml.etree.ElementTree.fromstring(res.read())", "class RSS10Parser: def __init__(self, url: str) -> None: self.url =", "in data.iter(f\"{ENTRY}entry\"): result.append({ \"title\": child.find(f\"{ENTRY}title\").text, \"link\": child.find(f\"{ENTRY}link\").attrib[\"href\"], \"description\": child.find(f\"{MEDIA}group\").find(f\"{MEDIA}description\").text, })", "= url def getlist(self) -> list[dict[str, str]]: ENTRY = r\"{http://www.w3.org/2005/Atom}\"", "\"description\": child.find(f\"{MEDIA}group\").find(f\"{MEDIA}description\").text, }) return result if __name__ == \"__main__\": import", "str]]: ENTRY = r\"{http://www.w3.org/2005/Atom}\" MEDIA = r\"{http://search.yahoo.com/mrss/}\" YOUTUBE = r\"{http://www.youtube.com/xml/schemas/2015}\"", "\"link\": child.find(f\"{ENTRY}link\").attrib[\"href\"], \"description\": child.find(f\"{MEDIA}group\").find(f\"{MEDIA}description\").text, }) return result if __name__ ==", "= [] with urllib.request.urlopen(self.url) as res: data = xml.etree.ElementTree.fromstring(res.read()) for", "\"title\": child.find(f\"{ENTRY}title\").text, \"link\": child.find(f\"{ENTRY}link\").attrib[\"href\"], \"description\": child.find(f\"{MEDIA}group\").find(f\"{MEDIA}description\").text, }) return result if", "None: self.url = url def getlist(self) -> list[dict[str, str]]: ENTRY", "self.url = url def getlist(self) -> list[dict[str, str]]: ENTRY =", "xml.etree.ElementTree class RSS10Parser: def __init__(self, url: str) -> None: self.url" ]
[ "PORT = 9559 simulation = False with pepper_interface.get(IP,PORT,simulation) as pepper:", "with pepper_interface.get(IP,PORT,simulation) as pepper: time.sleep(1.0) values,time_stamp = pepper.laser.get() print print", "print \"Left\" print values[\"Left\"] print print \"Right\" print values[\"Right\"] print", "= False with pepper_interface.get(IP,PORT,simulation) as pepper: time.sleep(1.0) values,time_stamp = pepper.laser.get()", "= \"192.168.0.147\" PORT = 9559 simulation = False with pepper_interface.get(IP,PORT,simulation)", "pepper_interface IP = \"192.168.0.147\" PORT = 9559 simulation = False", "time.sleep(1.0) values,time_stamp = pepper.laser.get() print print \"Front\" print values[\"Front\"] print", "as pepper: time.sleep(1.0) values,time_stamp = pepper.laser.get() print print \"Front\" print", "pepper: time.sleep(1.0) values,time_stamp = pepper.laser.get() print print \"Front\" print values[\"Front\"]", "values,time_stamp = pepper.laser.get() print print \"Front\" print values[\"Front\"] print print", "= pepper.laser.get() print print \"Front\" print values[\"Front\"] print print \"Left\"", "9559 simulation = False with pepper_interface.get(IP,PORT,simulation) as pepper: time.sleep(1.0) values,time_stamp", "<gh_stars>0 import math,time,random import pepper_interface IP = \"192.168.0.147\" PORT =", "print print \"Front\" print values[\"Front\"] print print \"Left\" print values[\"Left\"]", "pepper.laser.get() print print \"Front\" print values[\"Front\"] print print \"Left\" print", "IP = \"192.168.0.147\" PORT = 9559 simulation = False with", "pepper_interface.get(IP,PORT,simulation) as pepper: time.sleep(1.0) values,time_stamp = pepper.laser.get() print print \"Front\"", "import math,time,random import pepper_interface IP = \"192.168.0.147\" PORT = 9559", "\"192.168.0.147\" PORT = 9559 simulation = False with pepper_interface.get(IP,PORT,simulation) as", "simulation = False with pepper_interface.get(IP,PORT,simulation) as pepper: time.sleep(1.0) values,time_stamp =", "print \"Front\" print values[\"Front\"] print print \"Left\" print values[\"Left\"] print", "math,time,random import pepper_interface IP = \"192.168.0.147\" PORT = 9559 simulation", "print print \"Left\" print values[\"Left\"] print print \"Right\" print values[\"Right\"]", "\"Front\" print values[\"Front\"] print print \"Left\" print values[\"Left\"] print print", "import pepper_interface IP = \"192.168.0.147\" PORT = 9559 simulation =", "= 9559 simulation = False with pepper_interface.get(IP,PORT,simulation) as pepper: time.sleep(1.0)", "False with pepper_interface.get(IP,PORT,simulation) as pepper: time.sleep(1.0) values,time_stamp = pepper.laser.get() print", "values[\"Front\"] print print \"Left\" print values[\"Left\"] print print \"Right\" print", "print values[\"Front\"] print print \"Left\" print values[\"Left\"] print print \"Right\"" ]
[ "o texto doc = ____(\"I like tree kangaroos and narwhals.\")", "classe da língua inglesa (English) e criar um objeto nlp", "# Processar o texto doc = ____(\"I like tree kangaroos", "Selecionar o primeiro token first_token = doc[____] # Imprimir o", "objeto nlp from ____ import ____ nlp = ____ #", "first_token = doc[____] # Imprimir o texto do primeito token", "um objeto nlp from ____ import ____ nlp = ____", "____(\"I like tree kangaroos and narwhals.\") # Selecionar o primeiro", "narwhals.\") # Selecionar o primeiro token first_token = doc[____] #", "= doc[____] # Imprimir o texto do primeito token print(first_token.____)", "doc = ____(\"I like tree kangaroos and narwhals.\") # Selecionar", "língua inglesa (English) e criar um objeto nlp from ____", "____ import ____ nlp = ____ # Processar o texto", "inglesa (English) e criar um objeto nlp from ____ import", "da língua inglesa (English) e criar um objeto nlp from", "# Selecionar o primeiro token first_token = doc[____] # Imprimir", "____ nlp = ____ # Processar o texto doc =", "a classe da língua inglesa (English) e criar um objeto", "criar um objeto nlp from ____ import ____ nlp =", "= ____ # Processar o texto doc = ____(\"I like", "texto doc = ____(\"I like tree kangaroos and narwhals.\") #", "like tree kangaroos and narwhals.\") # Selecionar o primeiro token", "nlp = ____ # Processar o texto doc = ____(\"I", "Importar a classe da língua inglesa (English) e criar um", "# Importar a classe da língua inglesa (English) e criar", "____ # Processar o texto doc = ____(\"I like tree", "kangaroos and narwhals.\") # Selecionar o primeiro token first_token =", "from ____ import ____ nlp = ____ # Processar o", "(English) e criar um objeto nlp from ____ import ____", "import ____ nlp = ____ # Processar o texto doc", "e criar um objeto nlp from ____ import ____ nlp", "= ____(\"I like tree kangaroos and narwhals.\") # Selecionar o", "o primeiro token first_token = doc[____] # Imprimir o texto", "and narwhals.\") # Selecionar o primeiro token first_token = doc[____]", "Processar o texto doc = ____(\"I like tree kangaroos and", "token first_token = doc[____] # Imprimir o texto do primeito", "nlp from ____ import ____ nlp = ____ # Processar", "tree kangaroos and narwhals.\") # Selecionar o primeiro token first_token", "primeiro token first_token = doc[____] # Imprimir o texto do" ]
[ "are on the thank you page content = resp.get_data(True) self.assertRegex(content,", "Business Survey - Retail Sales Index</') self.assertRegex(content, \"What are the", "'Please check carefully before submission.') self.assertRegex(content, '>Submit answers<') # We", "follow_redirects=False) self.assertEqual(resp.status_code, 302) block_one_url = resp.location resp = self.client.get(block_one_url, follow_redirects=False)", "resp = self.client.post(block_one_url, data=form_data, follow_redirects=False) self.assertEqual(resp.status_code, 302) # There are", "Index</') self.assertRegex(content, \"What are the dates of the sales period", "the review answers page content = resp.get_data(True) self.assertRegex(content, '<title>Summary</title>') self.assertRegex(content,", "= { # Start Date \"period-from-day\": \"01\", \"period-from-month\": \"4\", \"period-from-year\":", "self.assertRegex(content, '>Monthly Business Survey - Retail Sales Index</') self.assertRegex(content, '>Your", "self.assertEqual(resp.status_code, 302) self.assertRegex(resp.location, r'\\/questionnaire\\/1\\/' + form_type_id + r'\\/789\\/thank-you$') resp =", "content = resp.get_data(True) self.assertRegex(content, '<title>Summary</title>') self.assertRegex(content, '>Monthly Business Survey -", "'/' + form_type_id + '/789/introduction', data=post_data, follow_redirects=False) self.assertEqual(resp.status_code, 302) block_one_url", "\"action[save_continue]\": \"Save &amp; Continue\" } # We submit the form", "self.client.get(block_one_url, follow_redirects=False) self.assertEqual(resp.status_code, 200) # We are in the Questionnaire", "Survey - Retail Sales Index</') self.assertRegex(content, \"What are the dates", "{ # Start Date \"period-from-day\": \"01\", \"period-from-month\": \"4\", \"period-from-year\": \"2016\",", "the form resp = self.client.post(block_one_url, data=form_data, follow_redirects=False) self.assertEqual(resp.status_code, 302) #", "= resp.location resp = self.client.get(summary_url, follow_redirects=False) self.assertEqual(resp.status_code, 200) # We", "self.assertRegex(content, '<title>Summary</title>') self.assertRegex(content, '>Monthly Business Survey - Retail Sales Index</')", "have some guidance self.assertRegex(content, \"alcoholic drink\") # We fill in", "are the dates of the sales period you are reporting", "resp = self.client.post('/questionnaire/' + eq_id + '/' + form_type_id +", "= self.client.post('/questionnaire/' + eq_id + '/' + form_type_id + '/789/introduction',", "from tests.integration.integration_test_case import IntegrationTestCase class TestHappyPath(IntegrationTestCase): def test_happy_path_203(self): self.happy_path('0203', '1')", "Survey - Retail Sales Index') # We proceed to the", "continue<\") # check with have some guidance self.assertRegex(content, \"alcoholic drink\")", "content = resp.get_data(True) self.assertRegex(content, '<title>Introduction</title>') self.assertRegex(content, '>Start survey<') self.assertRegex(content, 'Monthly", "self.assertRegex(content, '>Monthly Business Survey - Retail Sales Index</') self.assertRegex(content, \"What", "+ form_type_id + '/789/introduction', data=post_data, follow_redirects=False) self.assertEqual(resp.status_code, 302) block_one_url =", "# There are no validation errors self.assertRegex(resp.location, r'\\/questionnaire\\/1\\/' + form_type_id", "Index') # We proceed to the questionnaire post_data = {", "= resp.location resp = self.client.get(block_one_url, follow_redirects=False) self.assertEqual(resp.status_code, 200) # We", "IntegrationTestCase class TestHappyPath(IntegrationTestCase): def test_happy_path_203(self): self.happy_path('0203', '1') def test_happy_path_205(self): self.happy_path('0205',", "answers post_data = { \"action[submit_answers]\": \"Submit answers\" } resp =", "answers form_data = { # Start Date \"period-from-day\": \"01\", \"period-from-month\":", "'<title>Survey</title>') self.assertRegex(content, '>Monthly Business Survey - Retail Sales Index</') self.assertRegex(content,", "= self.client.get(resp.location, follow_redirects=True) self.assertEqual(resp.status_code, 200) # We are on the", "self.assertRegex(content, '>Your responses<') self.assertRegex(content, 'Please check carefully before submission.') self.assertRegex(content,", "Questionnaire content = resp.get_data(True) self.assertRegex(content, '<title>Survey</title>') self.assertRegex(content, '>Monthly Business Survey", "the sales period you are reporting for?\") self.assertRegex(content, \">Save and", "\"4\", \"period-from-year\": \"2016\", # End Date \"period-to-day\": \"30\", \"period-to-month\": \"04\",", "\"01\", \"period-from-month\": \"4\", \"period-from-year\": \"2016\", # End Date \"period-to-day\": \"30\",", "Questionnaire' } resp = self.client.post('/questionnaire/' + eq_id + '/' +", "'<title>Submission Successful</title>') self.assertRegex(content, '(?s)Monthly Business Survey - Retail Sales Index.*?Monthly", "self.assertRegex(content, 'Please check carefully before submission.') self.assertRegex(content, '>Submit answers<') #", "'1') def test_happy_path_205(self): self.happy_path('0205', '1') def happy_path(self, form_type_id, eq_id): #", "'1') def happy_path(self, form_type_id, eq_id): # Get a token token", "and continue<\") # check with have some guidance self.assertRegex(content, \"alcoholic", "follow_redirects=True) self.assertEqual(resp.status_code, 200) # We are on the landing page", "} resp = self.client.post(summary_url, data=post_data, follow_redirects=False) self.assertEqual(resp.status_code, 302) self.assertRegex(resp.location, r'\\/questionnaire\\/1\\/'", "form_type_id + r'\\/789\\/thank-you$') resp = self.client.get(resp.location, follow_redirects=True) self.assertEqual(resp.status_code, 200) #", "landing page content = resp.get_data(True) self.assertRegex(content, '<title>Introduction</title>') self.assertRegex(content, '>Start survey<')", "summary_url = resp.location resp = self.client.get(summary_url, follow_redirects=False) self.assertEqual(resp.status_code, 200) #", "class TestHappyPath(IntegrationTestCase): def test_happy_path_203(self): self.happy_path('0203', '1') def test_happy_path_205(self): self.happy_path('0205', '1')", "form_type_id + r'\\/789\\/summary$') summary_url = resp.location resp = self.client.get(summary_url, follow_redirects=False)", "resp.get_data(True) self.assertRegex(content, '<title>Submission Successful</title>') self.assertRegex(content, '(?s)Monthly Business Survey - Retail", "\"period-to-month\": \"04\", \"period-to-year\": \"2016\", # Total Turnover \"total-retail-turnover\": \"100000\", #", "in the Questionnaire content = resp.get_data(True) self.assertRegex(content, '<title>Survey</title>') self.assertRegex(content, '>Monthly", "questionnaire post_data = { 'action[start_questionnaire]': 'Start Questionnaire' } resp =", "self.assertRegex(content, \"What are the dates of the sales period you", "\"Save &amp; Continue\" } # We submit the form resp", "302) self.assertRegex(resp.location, r'\\/questionnaire\\/1\\/' + form_type_id + r'\\/789\\/thank-you$') resp = self.client.get(resp.location,", "# End Date \"period-to-day\": \"30\", \"period-to-month\": \"04\", \"period-to-year\": \"2016\", #", "End Date \"period-to-day\": \"30\", \"period-to-month\": \"04\", \"period-to-year\": \"2016\", # Total", "- Retail Sales Index</') self.assertRegex(content, '>Your responses<') self.assertRegex(content, 'Please check", "\"30\", \"period-to-month\": \"04\", \"period-to-year\": \"2016\", # Total Turnover \"total-retail-turnover\": \"100000\",", "r'\\/789\\/summary$') summary_url = resp.location resp = self.client.get(summary_url, follow_redirects=False) self.assertEqual(resp.status_code, 200)", "\"period-from-year\": \"2016\", # End Date \"period-to-day\": \"30\", \"period-to-month\": \"04\", \"period-to-year\":", "\"Submit answers\" } resp = self.client.post(summary_url, data=post_data, follow_redirects=False) self.assertEqual(resp.status_code, 302)", "self.client.get(resp.location, follow_redirects=True) self.assertEqual(resp.status_code, 200) # We are on the thank", "\"2016\", # Total Turnover \"total-retail-turnover\": \"100000\", # User Action \"action[save_continue]\":", "= { \"action[submit_answers]\": \"Submit answers\" } resp = self.client.post(summary_url, data=post_data,", "are in the Questionnaire content = resp.get_data(True) self.assertRegex(content, '<title>Survey</title>') self.assertRegex(content,", "User Action \"action[save_continue]\": \"Save &amp; Continue\" } # We submit", "are on the landing page content = resp.get_data(True) self.assertRegex(content, '<title>Introduction</title>')", "# We are in the Questionnaire content = resp.get_data(True) self.assertRegex(content,", "form_type_id + '/789/introduction', data=post_data, follow_redirects=False) self.assertEqual(resp.status_code, 302) block_one_url = resp.location", "happy_path(self, form_type_id, eq_id): # Get a token token = create_token(form_type_id,", "# We are on the thank you page content =", "of the sales period you are reporting for?\") self.assertRegex(content, \">Save", "tests.integration.integration_test_case import IntegrationTestCase class TestHappyPath(IntegrationTestCase): def test_happy_path_203(self): self.happy_path('0203', '1') def", "- Retail Sales Index.*?Monthly Business Survey - Retail Sales Index')", "Date \"period-from-day\": \"01\", \"period-from-month\": \"4\", \"period-from-year\": \"2016\", # End Date", "test_happy_path_205(self): self.happy_path('0205', '1') def happy_path(self, form_type_id, eq_id): # Get a", "you are reporting for?\") self.assertRegex(content, \">Save and continue<\") # check", "'Start Questionnaire' } resp = self.client.post('/questionnaire/' + eq_id + '/'", "import create_token from tests.integration.integration_test_case import IntegrationTestCase class TestHappyPath(IntegrationTestCase): def test_happy_path_203(self):", "'<title>Introduction</title>') self.assertRegex(content, '>Start survey<') self.assertRegex(content, 'Monthly Business Survey - Retail", "eq_id + '/' + form_type_id + '/789/introduction', data=post_data, follow_redirects=False) self.assertEqual(resp.status_code,", "# We fill in our answers form_data = { #", "proceed to the questionnaire post_data = { 'action[start_questionnaire]': 'Start Questionnaire'", "errors self.assertRegex(resp.location, r'\\/questionnaire\\/1\\/' + form_type_id + r'\\/789\\/summary$') summary_url = resp.location", "# check with have some guidance self.assertRegex(content, \"alcoholic drink\") #", "answers\" } resp = self.client.post(summary_url, data=post_data, follow_redirects=False) self.assertEqual(resp.status_code, 302) self.assertRegex(resp.location,", "responses<') self.assertRegex(content, 'Please check carefully before submission.') self.assertRegex(content, '>Submit answers<')", "you page content = resp.get_data(True) self.assertRegex(content, '<title>Submission Successful</title>') self.assertRegex(content, '(?s)Monthly", "def test_happy_path_203(self): self.happy_path('0203', '1') def test_happy_path_205(self): self.happy_path('0205', '1') def happy_path(self,", "content = resp.get_data(True) self.assertRegex(content, '<title>Survey</title>') self.assertRegex(content, '>Monthly Business Survey -", "check with have some guidance self.assertRegex(content, \"alcoholic drink\") # We", "= create_token(form_type_id, eq_id) resp = self.client.get('/session?token=' + token.decode(), follow_redirects=True) self.assertEqual(resp.status_code,", "the landing page content = resp.get_data(True) self.assertRegex(content, '<title>Introduction</title>') self.assertRegex(content, '>Start", "to the questionnaire post_data = { 'action[start_questionnaire]': 'Start Questionnaire' }", "test_happy_path_203(self): self.happy_path('0203', '1') def test_happy_path_205(self): self.happy_path('0205', '1') def happy_path(self, form_type_id,", "302) block_one_url = resp.location resp = self.client.get(block_one_url, follow_redirects=False) self.assertEqual(resp.status_code, 200)", "\"alcoholic drink\") # We fill in our answers form_data =", "+ form_type_id + r'\\/789\\/thank-you$') resp = self.client.get(resp.location, follow_redirects=True) self.assertEqual(resp.status_code, 200)", "page content = resp.get_data(True) self.assertRegex(content, '<title>Introduction</title>') self.assertRegex(content, '>Start survey<') self.assertRegex(content,", "the thank you page content = resp.get_data(True) self.assertRegex(content, '<title>Submission Successful</title>')", "page content = resp.get_data(True) self.assertRegex(content, '<title>Submission Successful</title>') self.assertRegex(content, '(?s)Monthly Business", "# Total Turnover \"total-retail-turnover\": \"100000\", # User Action \"action[save_continue]\": \"Save", "TestHappyPath(IntegrationTestCase): def test_happy_path_203(self): self.happy_path('0203', '1') def test_happy_path_205(self): self.happy_path('0205', '1') def", "content = resp.get_data(True) self.assertRegex(content, '<title>Submission Successful</title>') self.assertRegex(content, '(?s)Monthly Business Survey", "follow_redirects=True) self.assertEqual(resp.status_code, 200) # We are on the thank you", "# Get a token token = create_token(form_type_id, eq_id) resp =", "token = create_token(form_type_id, eq_id) resp = self.client.get('/session?token=' + token.decode(), follow_redirects=True)", "follow_redirects=False) self.assertEqual(resp.status_code, 200) # We are on the review answers", "There are no validation errors self.assertRegex(resp.location, r'\\/questionnaire\\/1\\/' + form_type_id +", "= self.client.get('/session?token=' + token.decode(), follow_redirects=True) self.assertEqual(resp.status_code, 200) # We are", "\"period-from-day\": \"01\", \"period-from-month\": \"4\", \"period-from-year\": \"2016\", # End Date \"period-to-day\":", "Total Turnover \"total-retail-turnover\": \"100000\", # User Action \"action[save_continue]\": \"Save &amp;", "self.client.post(block_one_url, data=form_data, follow_redirects=False) self.assertEqual(resp.status_code, 302) # There are no validation", "# We submit our answers post_data = { \"action[submit_answers]\": \"Submit", "resp.get_data(True) self.assertRegex(content, '<title>Survey</title>') self.assertRegex(content, '>Monthly Business Survey - Retail Sales", "resp.location resp = self.client.get(block_one_url, follow_redirects=False) self.assertEqual(resp.status_code, 200) # We are", "\"period-to-year\": \"2016\", # Total Turnover \"total-retail-turnover\": \"100000\", # User Action", "+ '/789/introduction', data=post_data, follow_redirects=False) self.assertEqual(resp.status_code, 302) block_one_url = resp.location resp", "'Monthly Business Survey - Retail Sales Index') # We proceed", "\"total-retail-turnover\": \"100000\", # User Action \"action[save_continue]\": \"Save &amp; Continue\" }", "our answers form_data = { # Start Date \"period-from-day\": \"01\",", "self.happy_path('0203', '1') def test_happy_path_205(self): self.happy_path('0205', '1') def happy_path(self, form_type_id, eq_id):", "} resp = self.client.post('/questionnaire/' + eq_id + '/' + form_type_id", "data=form_data, follow_redirects=False) self.assertEqual(resp.status_code, 302) # There are no validation errors", "# We are on the review answers page content =", "self.assertEqual(resp.status_code, 200) # We are in the Questionnaire content =", "block_one_url = resp.location resp = self.client.get(block_one_url, follow_redirects=False) self.assertEqual(resp.status_code, 200) #", "Retail Sales Index</') self.assertRegex(content, '>Your responses<') self.assertRegex(content, 'Please check carefully", "carefully before submission.') self.assertRegex(content, '>Submit answers<') # We submit our", "Sales Index</') self.assertRegex(content, '>Your responses<') self.assertRegex(content, 'Please check carefully before", "\"What are the dates of the sales period you are", "'>Start survey<') self.assertRegex(content, 'Monthly Business Survey - Retail Sales Index')", "\"period-from-month\": \"4\", \"period-from-year\": \"2016\", # End Date \"period-to-day\": \"30\", \"period-to-month\":", "We submit the form resp = self.client.post(block_one_url, data=form_data, follow_redirects=False) self.assertEqual(resp.status_code,", "are reporting for?\") self.assertRegex(content, \">Save and continue<\") # check with", "- Retail Sales Index</') self.assertRegex(content, \"What are the dates of", "self.assertRegex(content, '>Start survey<') self.assertRegex(content, 'Monthly Business Survey - Retail Sales", "for?\") self.assertRegex(content, \">Save and continue<\") # check with have some", "# User Action \"action[save_continue]\": \"Save &amp; Continue\" } # We", "resp.get_data(True) self.assertRegex(content, '<title>Summary</title>') self.assertRegex(content, '>Monthly Business Survey - Retail Sales", "self.assertRegex(content, \">Save and continue<\") # check with have some guidance", "= resp.get_data(True) self.assertRegex(content, '<title>Submission Successful</title>') self.assertRegex(content, '(?s)Monthly Business Survey -", "survey<') self.assertRegex(content, 'Monthly Business Survey - Retail Sales Index') #", "= { 'action[start_questionnaire]': 'Start Questionnaire' } resp = self.client.post('/questionnaire/' +", "+ r'\\/789\\/summary$') summary_url = resp.location resp = self.client.get(summary_url, follow_redirects=False) self.assertEqual(resp.status_code,", "check carefully before submission.') self.assertRegex(content, '>Submit answers<') # We submit", "before submission.') self.assertRegex(content, '>Submit answers<') # We submit our answers", "# We proceed to the questionnaire post_data = { 'action[start_questionnaire]':", "'(?s)Monthly Business Survey - Retail Sales Index.*?Monthly Business Survey -", "200) # We are in the Questionnaire content = resp.get_data(True)", "with have some guidance self.assertRegex(content, \"alcoholic drink\") # We fill", "&amp; Continue\" } # We submit the form resp =", "page content = resp.get_data(True) self.assertRegex(content, '<title>Summary</title>') self.assertRegex(content, '>Monthly Business Survey", "the dates of the sales period you are reporting for?\")", "form_type_id, eq_id): # Get a token token = create_token(form_type_id, eq_id)", "Sales Index') # We proceed to the questionnaire post_data =", "= self.client.get(summary_url, follow_redirects=False) self.assertEqual(resp.status_code, 200) # We are on the", "= resp.get_data(True) self.assertRegex(content, '<title>Survey</title>') self.assertRegex(content, '>Monthly Business Survey - Retail", "We are on the thank you page content = resp.get_data(True)", "self.assertRegex(content, \"alcoholic drink\") # We fill in our answers form_data", "our answers post_data = { \"action[submit_answers]\": \"Submit answers\" } resp", "self.client.get(summary_url, follow_redirects=False) self.assertEqual(resp.status_code, 200) # We are on the review", "import IntegrationTestCase class TestHappyPath(IntegrationTestCase): def test_happy_path_203(self): self.happy_path('0203', '1') def test_happy_path_205(self):", "'>Your responses<') self.assertRegex(content, 'Please check carefully before submission.') self.assertRegex(content, '>Submit", "validation errors self.assertRegex(resp.location, r'\\/questionnaire\\/1\\/' + form_type_id + r'\\/789\\/summary$') summary_url =", "self.client.get('/session?token=' + token.decode(), follow_redirects=True) self.assertEqual(resp.status_code, 200) # We are on", "# We submit the form resp = self.client.post(block_one_url, data=form_data, follow_redirects=False)", "token.decode(), follow_redirects=True) self.assertEqual(resp.status_code, 200) # We are on the landing", "We are in the Questionnaire content = resp.get_data(True) self.assertRegex(content, '<title>Survey</title>')", "resp = self.client.get(summary_url, follow_redirects=False) self.assertEqual(resp.status_code, 200) # We are on", "Get a token token = create_token(form_type_id, eq_id) resp = self.client.get('/session?token='", "+ '/' + form_type_id + '/789/introduction', data=post_data, follow_redirects=False) self.assertEqual(resp.status_code, 302)", "resp = self.client.get(block_one_url, follow_redirects=False) self.assertEqual(resp.status_code, 200) # We are in", "self.assertEqual(resp.status_code, 200) # We are on the review answers page", "answers page content = resp.get_data(True) self.assertRegex(content, '<title>Summary</title>') self.assertRegex(content, '>Monthly Business", "self.assertRegex(resp.location, r'\\/questionnaire\\/1\\/' + form_type_id + r'\\/789\\/thank-you$') resp = self.client.get(resp.location, follow_redirects=True)", "resp = self.client.get(resp.location, follow_redirects=True) self.assertEqual(resp.status_code, 200) # We are on", "eq_id) resp = self.client.get('/session?token=' + token.decode(), follow_redirects=True) self.assertEqual(resp.status_code, 200) #", "+ r'\\/789\\/thank-you$') resp = self.client.get(resp.location, follow_redirects=True) self.assertEqual(resp.status_code, 200) # We", "'>Monthly Business Survey - Retail Sales Index</') self.assertRegex(content, \"What are", "\"100000\", # User Action \"action[save_continue]\": \"Save &amp; Continue\" } #", "resp = self.client.post(summary_url, data=post_data, follow_redirects=False) self.assertEqual(resp.status_code, 302) self.assertRegex(resp.location, r'\\/questionnaire\\/1\\/' +", "post_data = { 'action[start_questionnaire]': 'Start Questionnaire' } resp = self.client.post('/questionnaire/'", "self.assertEqual(resp.status_code, 302) # There are no validation errors self.assertRegex(resp.location, r'\\/questionnaire\\/1\\/'", "the Questionnaire content = resp.get_data(True) self.assertRegex(content, '<title>Survey</title>') self.assertRegex(content, '>Monthly Business", "create_token(form_type_id, eq_id) resp = self.client.get('/session?token=' + token.decode(), follow_redirects=True) self.assertEqual(resp.status_code, 200)", "} # We submit the form resp = self.client.post(block_one_url, data=form_data,", "Date \"period-to-day\": \"30\", \"period-to-month\": \"04\", \"period-to-year\": \"2016\", # Total Turnover", "drink\") # We fill in our answers form_data = {", "302) # There are no validation errors self.assertRegex(resp.location, r'\\/questionnaire\\/1\\/' +", "token token = create_token(form_type_id, eq_id) resp = self.client.get('/session?token=' + token.decode(),", "We are on the landing page content = resp.get_data(True) self.assertRegex(content,", "resp = self.client.get('/session?token=' + token.decode(), follow_redirects=True) self.assertEqual(resp.status_code, 200) # We", "200) # We are on the landing page content =", "self.assertRegex(content, 'Monthly Business Survey - Retail Sales Index') # We", "# We are on the landing page content = resp.get_data(True)", "self.client.post('/questionnaire/' + eq_id + '/' + form_type_id + '/789/introduction', data=post_data,", "= self.client.post(summary_url, data=post_data, follow_redirects=False) self.assertEqual(resp.status_code, 302) self.assertRegex(resp.location, r'\\/questionnaire\\/1\\/' + form_type_id", "self.assertEqual(resp.status_code, 200) # We are on the thank you page", "'<title>Summary</title>') self.assertRegex(content, '>Monthly Business Survey - Retail Sales Index</') self.assertRegex(content,", "= resp.get_data(True) self.assertRegex(content, '<title>Summary</title>') self.assertRegex(content, '>Monthly Business Survey - Retail", "\"period-to-day\": \"30\", \"period-to-month\": \"04\", \"period-to-year\": \"2016\", # Total Turnover \"total-retail-turnover\":", "review answers page content = resp.get_data(True) self.assertRegex(content, '<title>Summary</title>') self.assertRegex(content, '>Monthly", "r'\\/789\\/thank-you$') resp = self.client.get(resp.location, follow_redirects=True) self.assertEqual(resp.status_code, 200) # We are", "Survey - Retail Sales Index.*?Monthly Business Survey - Retail Sales", "Sales Index</') self.assertRegex(content, \"What are the dates of the sales", "'/789/introduction', data=post_data, follow_redirects=False) self.assertEqual(resp.status_code, 302) block_one_url = resp.location resp =", "self.assertEqual(resp.status_code, 302) block_one_url = resp.location resp = self.client.get(block_one_url, follow_redirects=False) self.assertEqual(resp.status_code,", "Survey - Retail Sales Index</') self.assertRegex(content, '>Your responses<') self.assertRegex(content, 'Please", "follow_redirects=False) self.assertEqual(resp.status_code, 302) self.assertRegex(resp.location, r'\\/questionnaire\\/1\\/' + form_type_id + r'\\/789\\/thank-you$') resp", "self.assertRegex(content, '<title>Submission Successful</title>') self.assertRegex(content, '(?s)Monthly Business Survey - Retail Sales", "self.assertRegex(content, '<title>Introduction</title>') self.assertRegex(content, '>Start survey<') self.assertRegex(content, 'Monthly Business Survey -", "from tests.integration.create_token import create_token from tests.integration.integration_test_case import IntegrationTestCase class TestHappyPath(IntegrationTestCase):", "self.happy_path('0205', '1') def happy_path(self, form_type_id, eq_id): # Get a token", "+ eq_id + '/' + form_type_id + '/789/introduction', data=post_data, follow_redirects=False)", "Continue\" } # We submit the form resp = self.client.post(block_one_url,", "r'\\/questionnaire\\/1\\/' + form_type_id + r'\\/789\\/thank-you$') resp = self.client.get(resp.location, follow_redirects=True) self.assertEqual(resp.status_code,", "self.assertRegex(content, '(?s)Monthly Business Survey - Retail Sales Index.*?Monthly Business Survey", "Business Survey - Retail Sales Index.*?Monthly Business Survey - Retail", "submit our answers post_data = { \"action[submit_answers]\": \"Submit answers\" }", "on the review answers page content = resp.get_data(True) self.assertRegex(content, '<title>Summary</title>')", "on the landing page content = resp.get_data(True) self.assertRegex(content, '<title>Introduction</title>') self.assertRegex(content,", "Retail Sales Index</') self.assertRegex(content, \"What are the dates of the", "We proceed to the questionnaire post_data = { 'action[start_questionnaire]': 'Start", "are no validation errors self.assertRegex(resp.location, r'\\/questionnaire\\/1\\/' + form_type_id + r'\\/789\\/summary$')", "\"action[submit_answers]\": \"Submit answers\" } resp = self.client.post(summary_url, data=post_data, follow_redirects=False) self.assertEqual(resp.status_code,", "form resp = self.client.post(block_one_url, data=form_data, follow_redirects=False) self.assertEqual(resp.status_code, 302) # There", "resp.location resp = self.client.get(summary_url, follow_redirects=False) self.assertEqual(resp.status_code, 200) # We are", "\"2016\", # End Date \"period-to-day\": \"30\", \"period-to-month\": \"04\", \"period-to-year\": \"2016\",", "self.client.post(summary_url, data=post_data, follow_redirects=False) self.assertEqual(resp.status_code, 302) self.assertRegex(resp.location, r'\\/questionnaire\\/1\\/' + form_type_id +", "self.assertEqual(resp.status_code, 200) # We are on the landing page content", "Turnover \"total-retail-turnover\": \"100000\", # User Action \"action[save_continue]\": \"Save &amp; Continue\"", "on the thank you page content = resp.get_data(True) self.assertRegex(content, '<title>Submission", "r'\\/questionnaire\\/1\\/' + form_type_id + r'\\/789\\/summary$') summary_url = resp.location resp =", "tests.integration.create_token import create_token from tests.integration.integration_test_case import IntegrationTestCase class TestHappyPath(IntegrationTestCase): def", "def happy_path(self, form_type_id, eq_id): # Get a token token =", "= resp.get_data(True) self.assertRegex(content, '<title>Introduction</title>') self.assertRegex(content, '>Start survey<') self.assertRegex(content, 'Monthly Business", "We are on the review answers page content = resp.get_data(True)", "sales period you are reporting for?\") self.assertRegex(content, \">Save and continue<\")", "data=post_data, follow_redirects=False) self.assertEqual(resp.status_code, 302) block_one_url = resp.location resp = self.client.get(block_one_url,", "reporting for?\") self.assertRegex(content, \">Save and continue<\") # check with have", "guidance self.assertRegex(content, \"alcoholic drink\") # We fill in our answers", "submission.') self.assertRegex(content, '>Submit answers<') # We submit our answers post_data", "'>Submit answers<') # We submit our answers post_data = {", "We submit our answers post_data = { \"action[submit_answers]\": \"Submit answers\"", "\"04\", \"period-to-year\": \"2016\", # Total Turnover \"total-retail-turnover\": \"100000\", # User", "follow_redirects=False) self.assertEqual(resp.status_code, 302) # There are no validation errors self.assertRegex(resp.location,", "200) # We are on the thank you page content", "= self.client.post(block_one_url, data=form_data, follow_redirects=False) self.assertEqual(resp.status_code, 302) # There are no", "Retail Sales Index') # We proceed to the questionnaire post_data", "{ 'action[start_questionnaire]': 'Start Questionnaire' } resp = self.client.post('/questionnaire/' + eq_id", "We fill in our answers form_data = { # Start", "eq_id): # Get a token token = create_token(form_type_id, eq_id) resp", "a token token = create_token(form_type_id, eq_id) resp = self.client.get('/session?token=' +", "\">Save and continue<\") # check with have some guidance self.assertRegex(content,", "self.assertRegex(resp.location, r'\\/questionnaire\\/1\\/' + form_type_id + r'\\/789\\/summary$') summary_url = resp.location resp", "the questionnaire post_data = { 'action[start_questionnaire]': 'Start Questionnaire' } resp", "dates of the sales period you are reporting for?\") self.assertRegex(content,", "# Start Date \"period-from-day\": \"01\", \"period-from-month\": \"4\", \"period-from-year\": \"2016\", #", "are on the review answers page content = resp.get_data(True) self.assertRegex(content,", "+ form_type_id + r'\\/789\\/summary$') summary_url = resp.location resp = self.client.get(summary_url,", "answers<') # We submit our answers post_data = { \"action[submit_answers]\":", "no validation errors self.assertRegex(resp.location, r'\\/questionnaire\\/1\\/' + form_type_id + r'\\/789\\/summary$') summary_url", "fill in our answers form_data = { # Start Date", "200) # We are on the review answers page content", "form_data = { # Start Date \"period-from-day\": \"01\", \"period-from-month\": \"4\",", "def test_happy_path_205(self): self.happy_path('0205', '1') def happy_path(self, form_type_id, eq_id): # Get", "create_token from tests.integration.integration_test_case import IntegrationTestCase class TestHappyPath(IntegrationTestCase): def test_happy_path_203(self): self.happy_path('0203',", "Business Survey - Retail Sales Index') # We proceed to", "submit the form resp = self.client.post(block_one_url, data=form_data, follow_redirects=False) self.assertEqual(resp.status_code, 302)", "post_data = { \"action[submit_answers]\": \"Submit answers\" } resp = self.client.post(summary_url,", "Successful</title>') self.assertRegex(content, '(?s)Monthly Business Survey - Retail Sales Index.*?Monthly Business", "+ token.decode(), follow_redirects=True) self.assertEqual(resp.status_code, 200) # We are on the", "{ \"action[submit_answers]\": \"Submit answers\" } resp = self.client.post(summary_url, data=post_data, follow_redirects=False)", "'action[start_questionnaire]': 'Start Questionnaire' } resp = self.client.post('/questionnaire/' + eq_id +", "self.assertRegex(content, '<title>Survey</title>') self.assertRegex(content, '>Monthly Business Survey - Retail Sales Index</')", "resp.get_data(True) self.assertRegex(content, '<title>Introduction</title>') self.assertRegex(content, '>Start survey<') self.assertRegex(content, 'Monthly Business Survey", "in our answers form_data = { # Start Date \"period-from-day\":", "- Retail Sales Index') # We proceed to the questionnaire", "Start Date \"period-from-day\": \"01\", \"period-from-month\": \"4\", \"period-from-year\": \"2016\", # End", "'>Monthly Business Survey - Retail Sales Index</') self.assertRegex(content, '>Your responses<')", "data=post_data, follow_redirects=False) self.assertEqual(resp.status_code, 302) self.assertRegex(resp.location, r'\\/questionnaire\\/1\\/' + form_type_id + r'\\/789\\/thank-you$')", "thank you page content = resp.get_data(True) self.assertRegex(content, '<title>Submission Successful</title>') self.assertRegex(content,", "some guidance self.assertRegex(content, \"alcoholic drink\") # We fill in our", "period you are reporting for?\") self.assertRegex(content, \">Save and continue<\") #", "follow_redirects=False) self.assertEqual(resp.status_code, 200) # We are in the Questionnaire content", "Index</') self.assertRegex(content, '>Your responses<') self.assertRegex(content, 'Please check carefully before submission.')", "self.assertRegex(content, '>Submit answers<') # We submit our answers post_data =", "= self.client.get(block_one_url, follow_redirects=False) self.assertEqual(resp.status_code, 200) # We are in the", "Business Survey - Retail Sales Index</') self.assertRegex(content, '>Your responses<') self.assertRegex(content,", "Action \"action[save_continue]\": \"Save &amp; Continue\" } # We submit the" ]
[ "transform batch_indices to pair_indices pair_indices = tf.transpose(tf.concat([broad_casted_batch_dims, tf.reshape(batch_indices, [1, -1])],", "input IDs?](../glossary#input-ids) attention_mask (`np.ndarray` or `tf.Tensor` of shape `({0})`, *optional*):", "its model (such as downloading or saving, resizing the input", "is useful when using [`tf.keras.Model.fit`] method which currently requires having", "bool, int, ModelOutput, tuple, list, dict, np.ndarray) for k, v", "hidden_states = self.activation(hidden_states) return hidden_states # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2LayerNormConvLayer with", "tf.constant([0, 1]) def _init_norm(self): \"\"\"Set the norm of the weight", "ValueError( f\"`mask_length` has to be smaller than `sequence_length`, but got", "internal embedding lookup matrix. output_attentions (`bool`, *optional*): Whether or not", "hidden_states # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2EncoderLayer with Wav2Vec2->Hubert class TFHubertEncoderLayer(tf.keras.layers.Layer): def", "axis=2) else: # self_attention key_states = self._shape(self.k_proj(hidden_states), -1, bsz) value_states", "Model configuration class with all the parameters of the model.", "bsz) elif past_key_value is not None: # reuse k, v,", "def __init__(self, config, *inputs, **kwargs): super().__init__(config, *inputs, **kwargs) logger.warning( f\"\\n{self.__class__.__name__}", "_conv_out_length(input_lengths, kernel_size, stride) return input_lengths def _mask_hidden_states(self, hidden_states: tf.Tensor, mask_time_indices:", "self._create_input_spec(input_shape) self._add_gamma_weight(input_shape) self._add_beta_weight(input_shape) self.built = True super().build(input_shape) def call(self, inputs):", "super().__init__(**kwargs) self.layer_norm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name=\"layer_norm\") self.projection = tf.keras.layers.Dense( units=config.hidden_size, kernel_initializer=get_initializer(config.initializer_range),", "def _init_norm(self): \"\"\"Set the norm of the weight vector.\"\"\" kernel_norm", "cross_attentions key_states = past_key_value[0] value_states = past_key_value[1] elif is_cross_attention: #", "defaults to `False``): Whether or not to use the model", "seq_len, self.num_heads, self.head_dim)), (0, 2, 1, 3)) def call( self,", "License for the specific language governing permissions and # limitations", "transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2GroupNormConvLayer with Wav2Vec2->Hubert class TFHubertGroupNormConvLayer(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, layer_id:", "// self.groups) == 1 if not is_instance_norm: broadcast_shape[self.axis] = input_shape[self.axis]", "= 1 if num_conv_pos_embeddings % 2 == 0 else 0", "= self.hubert( input_values=inputs[\"input_values\"], attention_mask=inputs[\"attention_mask\"], token_type_ids=inputs[\"token_type_ids\"], position_ids=inputs[\"position_ids\"], head_mask=inputs[\"head_mask\"], inputs_embeds=inputs[\"inputs_embeds\"], output_attentions=inputs[\"output_attentions\"], output_hidden_states=inputs[\"output_hidden_states\"],", "accepted for {parameter_names[0]}.\" ) for name in parameter_names: if name", "self.hubert( input_values=inputs[\"input_values\"], attention_mask=inputs[\"attention_mask\"], token_type_ids=inputs[\"token_type_ids\"], position_ids=inputs[\"position_ids\"], head_mask=inputs[\"head_mask\"], inputs_embeds=inputs[\"inputs_embeds\"], output_attentions=inputs[\"output_attentions\"], output_hidden_states=inputs[\"output_hidden_states\"], return_dict=inputs[\"return_dict\"],", "HUBERT_INPUTS_DOCSTRING = r\"\"\" Args: input_values (`np.ndarray`, `tf.Tensor`, `List[tf.Tensor]` ``Dict[str, tf.Tensor]`", "axes={self.axis: dim}) def _add_gamma_weight(self, input_shape): dim = input_shape[self.axis] shape =", "group_shape) return reshaped_inputs, group_shape else: return inputs, group_shape def _apply_normalization(self,", "class TFHubertFeatureProjection(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs) self.layer_norm =", "# cross_attentions key_states = self._shape(self.k_proj(key_value_states), -1, bsz) value_states = self._shape(self.v_proj(key_value_states),", "attentions tensors of all attention layers. See `attentions` under returned", "disabled in other modes than eager. if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(layer_head_mask),", "token, - 1 corresponds to a *sentence B* token. [What", "+ str(self.groups) + \") cannot be \" \"more than the", "target_transcription = \"A MAN SAID TO THE UNIVERSE SIR I", "TFCausalLMOutput) -> TFCausalLMOutput: hs = tf.convert_to_tensor(output.hidden_states) if self.config.output_hidden_states else None", "masked indices <= sequence_length if num_masked_spans * mask_length > sequence_length:", "else output return TFCausalLMOutput( loss=loss, logits=logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions, ) def", "transformers.models.wav2vec2.modeling_tf_wav2vec2._sample_without_replacement def _sample_without_replacement(distribution, num_samples): \"\"\" Categorical sampling without replacement is", "(self.head_dim * num_heads) != self.embed_dim: raise ValueError( f\"embed_dim must be", "groups self.axis = axis self.epsilon = epsilon self.center = center", "accordingly to the parameters name, i.e. `input_values = tf.keras.Input(shape=(128,), dtype='float32',", "*inputs, **kwargs) logger.warning( f\"\\n{self.__class__.__name__} has backpropagation operations that are NOT", "False if not getattr(self.config, \"apply_spec_augment\", True): return hidden_states if mask_time_indices", "for i in range(config.num_feat_extract_layers - 1) ] elif config.feat_extract_norm ==", "(decoder) save Tuple(tf.Tensor, tf.Tensor) of # all previous decoder key/value_states.", "transformers.models.wav2vec2.modeling_tf_wav2vec2.input_values_processing def input_values_processing(func, config, input_values, **kwargs): \"\"\" Process the input", "OF ANY KIND, either express or implied. # See the", "= (dim,) if self.center: self.beta = self.add_weight( shape=shape, name=\"beta\", initializer=self.beta_initializer,", "self.feature_extractor = TFHubertFeatureEncoder(config, name=\"feature_extractor\") self.feature_projection = TFHubertFeatureProjection(config, name=\"feature_projection\") if config.do_stable_layer_norm:", "**kwargs) # So to respect the proper output we have", "number of timesteps divided by length of mask span to", "(batch_size, sequence_length), mask_prob=self.config.mask_time_prob, mask_length=self.config.mask_time_length, min_masks=2, ) hidden_states = tf.where( tf.cast(mask_time_indices[:,", "* tf.expand_dims(attention_mask, -1) attention_mask = _expand_mask(attention_mask) else: attention_mask = None", "Wav2Vec2Processor, TFHubertModel >>> from datasets import load_dataset >>> import soundfile", "proj_shape = (bsz * self.num_heads, -1, self.head_dim) query_states = tf.reshape(self._shape(query_states,", "to cross_attention layer can then reuse all cross-attention # key/value_states", "__init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs) self.config = config self.feature_extractor =", "transformers import Wav2Vec2Processor, TFHubertForCTC >>> from datasets import load_dataset >>>", "def _reshape_into_groups(self, inputs, input_shape, tensor_input_shape): group_shape = [tensor_input_shape[i] for i", "only {allowed_types} is accepted for {k}.\") if isinstance(input_values, (tuple, list)):", "self.explicit_padding = explicit_padding self.filter_axis = 2 self.initialized = False self.kernel_norm_axes", "License. \"\"\" TensorFlow Hubert model.\"\"\" import inspect import warnings from", "outputing raw hidden-states without any specific head on top.\", HUBERT_START_DOCSTRING,", "= False @add_start_docstrings_to_model_forward(HUBERT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=TFCausalLMOutput, config_class=_CONFIG_FOR_DOC) def call( self, input_values: tf.Tensor,", "TFHubertAttention(tf.keras.layers.Layer): \"\"\"Multi-headed attention from \"Attention Is All You Need\"\"\" def", "of type {type(v)} is not allowed only {allowed_types} is accepted", "= tf.keras.initializers.get(gamma_initializer) self.beta_regularizer = tf.keras.regularizers.get(beta_regularizer) self.gamma_regularizer = tf.keras.regularizers.get(gamma_regularizer) self.beta_constraint =", "`[0, 1]`: - 0 corresponds to a *sentence A* token,", "= None, gamma_constraint: tf.keras.constraints.Constraint = None, **kwargs, ): super().__init__(**kwargs) self.supports_masking", "1 for tokens that are **not masked**, - 0 for", "None, return_dict: Optional[bool] = None, training: bool = False, **kwargs:", "layers \"\"\" def _conv_out_length(input_length, kernel_size, stride): # 1D convolutional layer", "= tf.keras.constraints.get(beta_constraint) self.gamma_constraint = tf.keras.constraints.get(gamma_constraint) self._check_axis() def build(self, input_shape): self._check_if_input_shape_is_none(input_shape)", "sf >>> processor = Wav2Vec2Processor.from_pretrained(\"facebook/hubert-base-960h\") >>> model = TFHubertModel.from_pretrained(\"facebook/hubert-base-960h\") >>>", "tf.ones_like(spec_aug_mask_idxs), spec_aug_mask_idxs, spec_aug_mask.shape ) return spec_aug_mask # Copied from transformers.models.bart.modeling_tf_bart._expand_mask", "def __init__(self, config: HubertConfig, **kwargs: Any) -> None: super().__init__(**kwargs) if", "= tf.sequence_mask( output_lengths, maxlen=shape_list(hidden_states)[1], dtype=hidden_states.dtype ) hidden_states = self.feature_projection(hidden_states, training=inputs[\"training\"])", "not use this file except in compliance with the License.", "TFHubertFeedForward(config, name=\"feed_forward\") self.final_layer_norm = tf.keras.layers.LayerNormalization( epsilon=config.layer_norm_eps, name=\"final_layer_norm\" ) def call(", "f\"`config.feat_extract_norm` is {config.feat_extract_norm}, but has to be one of ['group',", "k not in parameter_names and \"args\" not in parameter_names: logger.warning(", "tf.newaxis], tf.bool), self.masked_spec_embed[tf.newaxis, tf.newaxis, :], hidden_states, ) # apply SpecAugment", "self.dropout = tf.keras.layers.Dropout(dropout) self.head_dim = embed_dim // num_heads if (self.head_dim", "accepted for {k}.\") else: if isinstance(input_values, tf.Tensor) or input_values is", "v in output.items() if k in [\"return_dict\", \"output_attentions\", \"output_hidden_states\", \"use_cache\"]", "three possibilities you can use to gather all the input", "has to be one of ['group', 'layer']\" ) self.conv_layers =", "or input is None: output[parameter_names[i]] = input else: raise ValueError(", "= kwargs.get(\"mask_time_indices\", None) if inputs[\"training\"]: hidden_states = self._mask_hidden_states(hidden_states, mask_time_indices=mask_time_indices) encoder_outputs", "pad_token = 0.0 input_values = tf.convert_to_tensor(np.random.rand(1, 16000), tf.float32) dummy_inputs =", "proj if is_cross_attention and past_key_value is not None: # reuse", "Tuple[tf.Tensor]]: all_hidden_states = () if output_hidden_states else None all_self_attentions =", "self.in_conv_dim = config.conv_dim[layer_id] if layer_id > 0 else 1 self.out_conv_dim", "Indices are selected in `[0, 1]`: - 0 corresponds to", "tensor_input_shape) normalized_inputs = self._apply_normalization(reshaped_inputs, input_shape) is_instance_norm = (input_shape[self.axis] // self.groups)", "2 self.initialized = False self.kernel_norm_axes = tf.constant([0, 1]) def _init_norm(self):", "the gradient computation for the feature encoder so that its", "= None, inputs_embeds: Optional[tf.Tensor] = None, output_attentions: Optional[bool] = None,", "return dummy_inputs def __init__(self, config, *inputs, **kwargs): super().__init__(config, *inputs, **kwargs)", "to return the hidden states of all layers. See `hidden_states`", "config: HubertConfig, **kwargs): super().__init__(**kwargs) self.config = config self.feature_extractor = TFHubertFeatureEncoder(config,", "= None, **kwargs, ): super().__init__(**kwargs) self.supports_masking = True self.groups =", "# `name` part tensor_name = input.name.split(\":\")[0] if tensor_name in parameter_names:", "hidden_states = self.layer_norm(hidden_states) hidden_states = self.dropout(hidden_states, training=training) for i, layer_module", "is accepted for {parameter_names[i]}.\" ) elif isinstance(input_values, (dict, BatchEncoding)): if", "HUBERT_START_DOCSTRING, ) class TFHubertModel(TFHubertPreTrainedModel): def __init__(self, config: HubertConfig, *inputs, **kwargs):", "deprecated and will be removed in Transformers v5.\" \"Please use", "function will disable the gradient computation for the feature encoder", "outputs[1:] return ((loss,) + output) if loss is not None", "TFHubertLayerNormConvLayer(config, layer_id=i, name=f\"conv_layers.{i}\") for i in range(config.num_feat_extract_layers) ] else: raise", "= tf.reshape(attn_weights, (bsz * self.num_heads, tgt_len, src_len)) attn_weights = tf.nn.softmax(attn_weights,", "len(group_shape))) is_instance_norm = (input_shape[self.axis] // self.groups) == 1 if not", "Optional[tf.Tensor] = None, return_dict: Optional[bool] = None, training: bool =", "hidden_states = hidden_states[:, : -self.num_pad_remove, :] return hidden_states class TFHubertFeatureEncoder(tf.keras.layers.Layer):", "= None, output_attentions: Optional[tf.Tensor] = None, output_hidden_states: Optional[tf.Tensor] = None,", "input_shape): dim = input_shape[self.axis] if dim < self.groups: raise ValueError(", "self.config.output_hidden_states else None attns = tf.convert_to_tensor(output.attentions) if self.config.output_attentions else None", "normalized_inputs = self._apply_normalization(reshaped_inputs, input_shape) is_instance_norm = (input_shape[self.axis] // self.groups) ==", "f\"The parameter {k} does not belongs to the parameter list", "= input_shape[self.axis] // self.groups broadcast_shape.insert(self.axis, self.groups) else: broadcast_shape[self.axis] = self.groups", ") hidden_states = outputs[0] hidden_states = self.dropout(hidden_states, training=inputs[\"training\"]) logits =", "shape for which to compute masks. should be of size", "shape = (dim,) if self.center: self.beta = self.add_weight( shape=shape, name=\"beta\",", "Hubert models at https://huggingface.co/models?filter=hubert ] LARGE_NEGATIVE = -1e8 # Copied", "UNIVERSE SIR I EXIST\" >>> # wrap processor as target", "self.explicit_padding), (0, 0))) output = super().call(padded_inputs) return output # Copied", "self.intermediate_dropout = tf.keras.layers.Dropout(config.activation_dropout) self.intermediate_dense = tf.keras.layers.Dense( units=config.intermediate_size, kernel_initializer=get_initializer(config.initializer_range), bias_initializer=\"zeros\", name=\"intermediate_dense\",", "1: raise ValueError(\"`mask_length` has to be bigger than 0.\") if", "that its parameter will not be updated during training. \"\"\"", "Channel\"\"\" # if key_value_states are provided this layer is used", "[SpecAugment](https://arxiv.org/abs/1904.08779). \"\"\" batch_size, sequence_length, hidden_size = shape_list(hidden_states) # `config.apply_spec_augment` can", "at build time since TF 2.7, so we need to", "bool = False) -> tf.Tensor: hidden_states = self.intermediate_dense(hidden_states) hidden_states =", "than eager. if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(attn_weights), [bsz * self.num_heads, tgt_len,", "padded tokens are filled with -100 # when not being", "*inputs, **kwargs) self.hubert = TFHubertMainLayer(config, name=\"hubert\") self.dropout = tf.keras.layers.Dropout(config.final_dropout) self.lm_head", "= past_key_value[1] elif is_cross_attention: # cross_attentions key_states = self._shape(self.k_proj(key_value_states), -1,", "= self.encoder( hidden_states, attention_mask=attention_mask, output_attentions=inputs[\"output_attentions\"], output_hidden_states=inputs[\"output_hidden_states\"], return_dict=inputs[\"return_dict\"], training=inputs[\"training\"], ) hidden_states", "\"\"\" batch_size, sequence_length, hidden_size = shape_list(hidden_states) # `config.apply_spec_augment` can set", "are filled with -100 # when not being attended to", "class TFHubertNoLayerNormConvLayer(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, layer_id: int = 0,", "= input_shape[self.axis] self.input_spec = tf.keras.layers.InputSpec(ndim=len(input_shape), axes={self.axis: dim}) def _add_gamma_weight(self, input_shape):", "Indices of positions of each input sequence tokens in the", "return the hidden states of all layers. See `hidden_states` under", "The tf.debugging asserts are not compliant with XLA then they", "def _add_gamma_weight(self, input_shape): dim = input_shape[self.axis] shape = (dim,) if", "Wav2Vec2->Hubert class TFHubertEncoderLayer(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs) self.attention", "// mask_length # SpecAugment mask to fill spec_aug_mask = tf.zeros((batch_size,", "batch[\"speech\"] = speech ... return batch >>> ds = load_dataset(\"hf-internal-testing/librispeech_asr_dummy\",", "self.layer_norm(hidden_states) if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) if not", "masking padded elements mask_prob: probability for each token to be", "mask_feature_indices = _compute_mask_indices( (batch_size, hidden_size), mask_prob=self.config.mask_feature_prob, mask_length=self.config.mask_feature_length, ) hidden_states =", "hidden_states = self.activation(hidden_states) return hidden_states # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2GroupNormConvLayer with", "tf.reshape(self.gamma, broadcast_shape) if self.center: beta = tf.reshape(self.beta, broadcast_shape) return gamma,", "_check_if_input_shape_is_none(self, input_shape): dim = input_shape[self.axis] if dim is None: raise", "None attns = tf.convert_to_tensor(output.attentions) if self.config.output_attentions else None return TFCausalLMOutput(logits=output.logits,", "\"use_cache\"] } output.update(booleans_processing(config=config, **boolean_dict)) return output # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2._sample_without_replacement", "config: HubertConfig, layer_id: int = 0, **kwargs: Any) -> None:", "class TFHubertEncoderStableLayerNorm(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs) self.config =", "explicit_padding, **kwargs): super().__init__( filters=filters, kernel_size=kernel_size, groups=groups, padding=\"valid\", use_bias=True, bias_initializer=\"he_normal\", **kwargs,", "- see https://github.com/tensorflow/tensorflow/issues/9260 for more info \"\"\" z = -tf.math.log(tf.random.uniform(shape_list(distribution),", "tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name=\"layer_norm\") self.dropout = tf.keras.layers.Dropout(config.hidden_dropout) self.layer = [ TFHubertEncoderLayerStableLayerNorm(config, name=f\"layers.{i}\")", "tf.reshape(spec_aug_mask_idxs, (batch_size, num_masked_spans * mask_length)) offsets = tf.range(mask_length)[tf.newaxis, tf.newaxis, :]", "of a list of symbolic inputs, each input has to", "instead.\", FutureWarning, ) output[\"input_values\"] = input_values.pop(\"inputs\") if \"decoder_cached_states\" in input_values:", ") inputs[\"output_hidden_states\"] = ( inputs[\"output_hidden_states\"] if inputs[\"output_hidden_states\"] else self.config.output_hidden_states )", "Authors and the HuggingFace Inc. team. All rights reserved. #", "kwargs.items(): if isinstance(v, allowed_types) or v is None: output[k] =", "_get_reshaped_weights(self, input_shape): broadcast_shape = self._create_broadcast_shape(input_shape) gamma = None beta =", "{allowed_types} is accepted for {parameter_names[i]}.\" ) elif isinstance(input_values, (dict, BatchEncoding)):", "def _apply_normalization(self, reshaped_inputs, input_shape): group_shape = tf.keras.backend.int_shape(reshaped_inputs) group_reduction_axes = list(range(1,", "output[\"kwargs\"] boolean_dict = { k: v for k, v in", "does not belongs to the parameter list {parameter_names} and will", "logging.get_logger(__name__) _CONFIG_FOR_DOC = \"HubertConfig\" TF_HUBERT_PRETRAINED_MODEL_ARCHIVE_LIST = [ \"facebook/hubert-base-ls960\", # See", "labels in `[0, ..., config.vocab_size]` Returns: Example: ```python >>> import", ") @tf.function def serving(self, inputs): output = self.call(input_values=inputs, training=False) return", "def _expand_mask(mask: tf.Tensor, tgt_len: Optional[int] = None, past_key_values_length: int =", "Wav2Vec2Processor.from_pretrained(\"facebook/hubert-base-960h\") >>> model = TFHubertForCTC.from_pretrained(\"facebook/hubert-base-960h\") >>> def map_to_array(batch): ... speech,", "tgt_len, src_len], message=f\"Attention mask should be of size {(bsz, 1,", "= tf.Variable(tf.transpose(self.kernel), name=\"weight_v\", trainable=True) self.weight_v = self.kernel self.weight_g = self.add_weight(", "given in the docstring: `model([input_values, attention_mask])` or `model([input_values, attention_mask, token_type_ids])`", "if num_conv_pos_embeddings % 2 == 0 else 0 def call(self,", "offset=beta, variance_epsilon=self.epsilon, ) return normalized_inputs def _get_reshaped_weights(self, input_shape): broadcast_shape =", "of varying length with one or several input Tensors IN", "Optional[bool] = None, labels: Optional[tf.Tensor] = None, output_hidden_states: Optional[bool] =", "transpose_b=True) # The tf.debugging asserts are not compliant with XLA", "self.num_pad_remove = 1 if num_conv_pos_embeddings % 2 == 0 else", "= conv_layers def call(self, input_values): hidden_states = tf.expand_dims(input_values, -1) for", "(0, 2, 1, 3) ) attn_output = tf.reshape(attn_output, (bsz, tgt_len,", "def _compute_mask_indices( shape: Tuple[int, int], mask_prob: float, mask_length: int, min_masks:", "value in the config will be used instead. return_dict (`bool`,", "shape_list(hidden_states) # `config.apply_spec_augment` can set masking to False if not", "not to return the hidden states of all layers. See", "superclass documentation for the generic methods the library implements for", "span to mask approximately this percentage of all elements. however", "in training mode (some modules like dropout modules have different", "or several input Tensors IN THE ORDER given in the", "the layer received an input with shape \" + str(input_shape)", "model.\"\"\" import inspect import warnings from typing import Any, Dict,", "in output.items() if k in [\"return_dict\", \"output_attentions\", \"output_hidden_states\", \"use_cache\"] }", "tgt_len, 1)) return (one_cst - expanded_mask) * LARGE_NEGATIVE # Copied", "self.num_heads, tgt_len, self.head_dim)}, but is {shape_list(attn_output)}\", ) attn_output = tf.transpose(", "- 0 corresponds to a *sentence A* token, - 1", "(first \"if\" case) # if uni-directional self-attention (decoder) save Tuple(tf.Tensor,", "with XLA then they # have to be disabled in", "implied. # See the License for the specific language governing", "output[parameter_names[i]] = input else: raise ValueError( f\"Data of type {type(input)}", "False, ) -> Tuple[tf.Tensor]: attn_residual = hidden_states hidden_states = self.layer_norm(hidden_states)", "for which to compute masks. should be of size 2", "tf.reshape(values, [-1]), output_shape) # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2._compute_mask_indices def _compute_mask_indices( shape:", "mean, variance = tf.nn.moments(reshaped_inputs, group_reduction_axes, keepdims=True) gamma, beta = self._get_reshaped_weights(input_shape)", "input_shape: tf.TensorShape): self.masked_spec_embed = self.add_weight( shape=(self.config.hidden_size,), initializer=\"uniform\", trainable=True, name=\"masked_spec_embed\" )", "isinstance(v, allowed_types) or v is None: output[k] = v else:", ") -> Tuple[tf.Tensor]: attn_residual = hidden_states hidden_states = self.layer_norm(hidden_states) hidden_states,", "tf.newaxis, tf.newaxis]) def _normalize_kernel(self): \"\"\"Generate normalized weights.\"\"\" kernel = tf.nn.l2_normalize(self.weight_v,", "(\" + str(dim) + \").\" ) def _check_axis(self): if self.axis", "for the decoder is_cross_attention = key_value_states is not None bsz,", "TFHubertPositionalConvEmbedding(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs: Any) -> None: super().__init__(**kwargs)", "+ tf.random.uniform((1,))) num_masked_spans = max(num_masked_spans, min_masks) # make sure num", "attention_mask])` or `model([input_values, attention_mask, token_type_ids])` - a dictionary with one", "{ k: v for k, v in output.items() if k", "type {type(input_values)} is not allowed only {allowed_types} is accepted for", "replacement is currently not implemented. The gumbel-max trick will do", "= self._shape(self.v_proj(hidden_states), -1, bsz) key_states = tf.concat([past_key_value[0], key_states], axis=2) value_states", "training=inputs[\"training\"] ) if inputs[\"attention_mask\"] is not None: # compute real", "tf.tile(offsets, (batch_size, num_masked_spans, 1)) offsets = tf.reshape(offsets, (batch_size, num_masked_spans *", "hidden_states + self.feed_forward(hidden_states) hidden_states = self.final_layer_norm(hidden_states) outputs = (hidden_states,) if", "`name` part tensor_name = input.name.split(\":\")[0] if tensor_name in parameter_names: output[tensor_name]", "name=\"weight_v\", trainable=True) self.weight_v = self.kernel self.weight_g = self.add_weight( name=\"weight_g\", shape=(int(self.weight_v.shape[self.filter_axis]),", "of masked spans in batch num_masked_spans = int(mask_prob * sequence_length", "= tf.keras.layers.Dropout(config.final_dropout) self.lm_head = tf.keras.layers.Dense(config.vocab_size, name=\"lm_head\") def freeze_feature_extractor(self): \"\"\" Calling", "if is_cross_attention and past_key_value is not None: # reuse k,v,", "updated during training. \"\"\" warnings.warn( \"The method `freeze_feature_extractor` is deprecated", "previous decoder key/value_states to current projected key/value_states (third \"elif\" case)", "in input_values: warnings.warn( \"The `decoder_cached_states` argument is deprecated and will", "the specific language governing permissions and # limitations under the", "masked spans Adapted from [fairseq's data_utils.py](https://github.com/pytorch/fairseq/blob/e0788f7007a8473a76db573985031f3c94201e79/fairseq/data/data_utils.py#L376). \"\"\" batch_size, sequence_length =", "parameter_names: if name not in list(output.keys()) and name != \"args\":", "of the same size as shape, which will prevent masking", "= shape_list(key_states)[1] attn_weights = tf.matmul(query_states, key_states, transpose_b=True) # The tf.debugging", "name=\"final_layer_norm\" ) def call( self, hidden_states: tf.Tensor, attention_mask: Optional[tf.Tensor] =", "\"center\": self.center, \"scale\": self.scale, \"beta_initializer\": tf.keras.initializers.serialize(self.beta_initializer), \"gamma_initializer\": tf.keras.initializers.serialize(self.gamma_initializer), \"beta_regularizer\": tf.keras.regularizers.serialize(self.beta_regularizer),", "transformers.models.bart.modeling_tf_bart._expand_mask def _expand_mask(mask: tf.Tensor, tgt_len: Optional[int] = None, past_key_values_length: int", "= tf.zeros((batch_size, sequence_length), dtype=tf.int32) # uniform distribution to sample from,", "Mask to avoid performing attention on padding token indices. Mask", "to True. training (`bool`, *optional*, defaults to `False``): Whether or", "False, ) -> Union[TFBaseModelOutput, Tuple[tf.Tensor]]: \"\"\" Returns: Example: ```python >>>", "self.explicit_padding * 2 super().build(input_shape) self.kernel = tf.Variable(tf.transpose(self.kernel), name=\"weight_v\", trainable=True) self.weight_v", "size 1 >>> logits = model(input_values).logits >>> predicted_ids = tf.argmax(logits,", "def __init__( self, embed_dim: int, num_heads: int, dropout: float =", "key_states = tf.concat([past_key_value[0], key_states], axis=2) value_states = tf.concat([past_key_value[1], value_states], axis=2)", "other modes than eager. if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(attn_weights), [bsz *", "hidden_states + position_embeddings hidden_states = self.dropout(hidden_states, training=training) for i, layer_module", "timesteps divided by length of mask span to mask approximately", "parameters name, i.e. `input_values = tf.keras.Input(shape=(128,), dtype='float32', name=\"input_values\")` otherwise the", "self.head_dim], message=f\"`attn_output` should be of size {(bsz, self.num_heads, tgt_len, self.head_dim)},", "# get random indices to mask spec_aug_mask_idxs = _sample_without_replacement(uniform_dist, num_masked_spans)", "Wav2Vec2->Hubert class TFHubertEncoderStableLayerNorm(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs) self.config", "def serving_output(self, output: TFCausalLMOutput) -> TFCausalLMOutput: hs = tf.convert_to_tensor(output.hidden_states) if", "and refer to the TF 2.0 documentation for all matter", "for i in range(config.num_feat_extract_layers) ] else: raise ValueError( f\"`config.feat_extract_norm` is", "): super().__init__(**kwargs) self.embed_dim = embed_dim self.num_heads = num_heads self.dropout =", "\"\"\" Masks extracted features along time axis and/or along feature", "if not self.built: input_shape = input_shape.as_list() # Conv1D output shapes", "tf.Tensor, mask_time_indices: Optional[tf.Tensor] = None): \"\"\" Masks extracted features along", "np import tensorflow as tf from ...activations_tf import get_tf_activation from", "-1, self.head_dim) query_states = tf.reshape(self._shape(query_states, tgt_len, bsz), proj_shape) key_states =", "= 32, axis: int = -1, epsilon: float = 1e-3,", "= shape_list(mask)[1] tgt_len = tgt_len if tgt_len is not None", "self.num_heads, tgt_len, src_len], message=f\"Attention weights should be of size {(bsz", "num_conv_pos_embeddings, **kwargs): super().__init__(**kwargs) self.num_pad_remove = 1 if num_conv_pos_embeddings % 2", "return hidden_states # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2EncoderLayer with Wav2Vec2->Hubert class TFHubertEncoderLayer(tf.keras.layers.Layer):", "== \"sum\": loss = tf.reduce_sum(loss) if self.config.ctc_loss_reduction == \"mean\": loss", "Tokens with indices set to `-100` are ignored (masked), the", "mask_time_indices: Optional[tf.Tensor] = None): \"\"\" Masks extracted features along time", "tuple or dict in the first positional arguments. This second", "-1 else self.axis - 1 else: axis = -1 if", ") attn_output = tf.reshape(attn_output, (bsz, tgt_len, embed_dim)) attn_output = self.out_proj(attn_output)", "be obtained using [`BertTokenizer`]. See [`PreTrainedTokenizer.__call__`] and [`PreTrainedTokenizer.encode`] for details.", ") def _create_input_spec(self, input_shape): dim = input_shape[self.axis] self.input_spec = tf.keras.layers.InputSpec(ndim=len(input_shape),", "num_masked_spans * mask_length)) offsets = tf.range(mask_length)[tf.newaxis, tf.newaxis, :] offsets =", "tf.cast(mask_time_indices[:, :, tf.newaxis], tf.bool), self.masked_spec_embed[tf.newaxis, tf.newaxis, :], hidden_states, ) #", "dtype=hidden_states.dtype ) hidden_states = self.feature_projection(hidden_states, training=inputs[\"training\"]) mask_time_indices = kwargs.get(\"mask_time_indices\", None)", "= None, past_key_values_length: int = 0): \"\"\" Expands attention_mask from", "not belongs to the parameter list {parameter_names} and will be", "https://arxiv.org/abs/1909.11556 for description) dropout_probability = np.random.uniform(0, 1) if training and", "attention_mask=attention_mask, token_type_ids=token_type_ids, position_ids=position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, )", "how to convert `input_values` indices into associated vectors than the", "= -2 if self.axis == -1 else self.axis - 1", "extracted features along time axis and/or along feature axis according", "sequence_length, hidden_size = shape_list(hidden_states) # `config.apply_spec_augment` can set masking to", "if loss is not None else output return TFCausalLMOutput( loss=loss,", "pretrained models. \"\"\" config_class = HubertConfig base_model_prefix = \"hubert\" main_input_name", "have always the pattern `name:id` then we check only the", "= epsilon self.center = center self.scale = scale self.beta_initializer =", "2nd is timesteps attention_mask: optional padding mask of the same", "tensors for more detail. This argument can be used only", "self.center, \"scale\": self.scale, \"beta_initializer\": tf.keras.initializers.serialize(self.beta_initializer), \"gamma_initializer\": tf.keras.initializers.serialize(self.gamma_initializer), \"beta_regularizer\": tf.keras.regularizers.serialize(self.beta_regularizer), \"gamma_regularizer\":", "\"layer\": conv_layers = [ TFHubertLayerNormConvLayer(config, layer_id=i, name=f\"conv_layers.{i}\") for i in", "Model with a `language modeling` head on top for Connectionist", "str(dim) + \").\" ) if dim % self.groups != 0:", "self.encoder( hidden_states, attention_mask=attention_mask, output_attentions=inputs[\"output_attentions\"], output_hidden_states=inputs[\"output_hidden_states\"], return_dict=inputs[\"return_dict\"], training=inputs[\"training\"], ) hidden_states =", ") self.scaling = self.head_dim**-0.5 self.is_decoder = is_decoder self.k_proj = tf.keras.layers.Dense(embed_dim,", "groups=groups, padding=\"valid\", use_bias=True, bias_initializer=\"he_normal\", **kwargs, ) self.explicit_padding = explicit_padding self.filter_axis", "['group', 'layer']\" ) self.conv_layers = conv_layers def call(self, input_values): hidden_states", "processor = Wav2Vec2Processor.from_pretrained(\"facebook/hubert-base-960h\") >>> model = TFHubertModel.from_pretrained(\"facebook/hubert-base-960h\") >>> def map_to_array(batch):", "more info \"\"\" z = -tf.math.log(tf.random.uniform(shape_list(distribution), 0, 1)) _, indices", "`embed_dim`: {self.embed_dim}\" f\" and `num_heads`: {num_heads}).\" ) self.scaling = self.head_dim**-0.5", "f\"embed_dim must be divisible by num_heads (got `embed_dim`: {self.embed_dim}\" f\"", "for the unexpected layers. \"\"\" signature = dict(inspect.signature(func).parameters) signature.pop(\"kwargs\", None)", "= tf.reshape(value_states, proj_shape) src_len = shape_list(key_states)[1] attn_weights = tf.matmul(query_states, key_states,", "of the TensorFlow model. config ([`PretrainedConfig`]): The config of the", "v elif k not in parameter_names and \"args\" not in", "attention_mask=inputs[\"attention_mask\"], token_type_ids=inputs[\"token_type_ids\"], position_ids=inputs[\"position_ids\"], head_mask=inputs[\"head_mask\"], inputs_embeds=inputs[\"inputs_embeds\"], output_attentions=inputs[\"output_attentions\"], output_hidden_states=inputs[\"output_hidden_states\"], return_dict=inputs[\"return_dict\"], training=inputs[\"training\"], )", "name in parameter_names: if name not in list(output.keys()) and name", "processor = Wav2Vec2Processor.from_pretrained(\"facebook/hubert-base-960h\") >>> model = TFHubertForCTC.from_pretrained(\"facebook/hubert-base-960h\") >>> def map_to_array(batch):", "creating a SavedModel TF calls the method with LayerCall.__call__(args, **kwargs)", "key/value_states to current projected key/value_states (third \"elif\" case) # if", "argument can be used only in eager mode, in graph", "False, return_dict: Optional[bool] = True, training: Optional[bool] = False, )", "self.call(input_values=inputs, training=False) return self.serving_output(output) HUBERT_START_DOCSTRING = r\"\"\" This model inherits", "tensor: tf.Tensor, seq_len: int, bsz: int): return tf.transpose(tf.reshape(tensor, (bsz, seq_len,", "`tf.Tensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*): Mask to", "@add_start_docstrings( \"The bare TFHubert Model transformer outputing raw hidden-states without", "input_shape): dim = input_shape[self.axis] shape = (dim,) if self.scale: self.gamma", "not is_instance_norm: axis = -2 if self.axis == -1 else", "def _mask_hidden_states(self, hidden_states: tf.Tensor, mask_time_indices: Optional[tf.Tensor] = None): \"\"\" Masks", "tf.keras.layers.Dense(config.vocab_size, name=\"lm_head\") def freeze_feature_extractor(self): \"\"\" Calling this function will disable", "is not None: # compute real output lengths according to", "= TFHubertEncoder(config, name=\"encoder\") def build(self, input_shape: tf.TensorShape): self.masked_spec_embed = self.add_weight(", "`({0})`, *optional*): Indices of positions of each input sequence tokens", "second option, there are three possibilities you can use to", "if encoder bi-directional self-attention `past_key_value` is always `None` past_key_value =", "v for k, v in output.items() if k in [\"return_dict\",", "# Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2._sample_without_replacement def _sample_without_replacement(distribution, num_samples): \"\"\" Categorical sampling", "config.do_stable_layer_norm: self.encoder = TFHubertEncoderStableLayerNorm(config, name=\"encoder\") else: self.encoder = TFHubertEncoder(config, name=\"encoder\")", "> sequence_length: raise ValueError( f\"`mask_length` has to be smaller than", "super().__init__(**kwargs) self.num_pad_remove = 1 if num_conv_pos_embeddings % 2 == 0", "`tf.Tensor`, `List[tf.Tensor]` ``Dict[str, tf.Tensor]` or `Dict[str, np.ndarray]` and each example", "<Tip> TF 2.0 models accepts two formats as inputs: -", "mode the value will always be set to True. training", "= tf.keras.Input(shape=(128,), dtype='float32', name=\"input_values\")` otherwise the order of the tensors", "for each token to be chosen as start of the", "if inputs[\"attention_mask\"] is not None: # compute real output lengths", "def build(self, input_shape): self._check_if_input_shape_is_none(input_shape) self._set_number_of_groups_for_instance_norm(input_shape) self._check_size_of_dimensions(input_shape) self._create_input_spec(input_shape) self._add_gamma_weight(input_shape) self._add_beta_weight(input_shape) self.built", "but got `mask_length`: {mask_length} and `sequence_length`: {sequence_length}`\" ) # compute", "for conv_layer in self.conv_layers: hidden_states = conv_layer(hidden_states) return hidden_states class", "mask_time_indices hidden_states = tf.where( tf.cast(mask_time_indices[:, :, tf.newaxis], tf.bool), self.masked_spec_embed[tf.newaxis, tf.newaxis,", "isinstance(input_values, tf.Tensor) or input_values is None: output[parameter_names[0]] = input_values else:", "PyTorch with indices in format (batch_dim, indixes) \"\"\" indices_shape =", "= { \"input_values\": input_values, \"attention_mask\": tf.cast(tf.not_equal(input_values, pad_token), tf.float32), } return", "# EagerTensors don't allow to use the .name property so", "return_dict: Optional[bool] = True, training: Optional[bool] = False, ) ->", "`Dict[str, np.ndarray]` and each example must have the shape `({0})`):", "def get_config(self): config = { \"groups\": self.groups, \"axis\": self.axis, \"epsilon\":", "None beta = None if self.scale: gamma = tf.reshape(self.gamma, broadcast_shape)", "tgt_len, self.head_dim)}, but is {shape_list(attn_output)}\", ) attn_output = tf.transpose( tf.reshape(attn_output,", "set masking to False if not getattr(self.config, \"apply_spec_augment\", True): return", "\"\"\" HUBERT_INPUTS_DOCSTRING = r\"\"\" Args: input_values (`np.ndarray`, `tf.Tensor`, `List[tf.Tensor]` ``Dict[str,", "= None, labels: Optional[tf.Tensor] = None, output_hidden_states: Optional[bool] = None,", "from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2GroupNorm with Wav2Vec2->Hubert class TFHubertGroupNorm(tf.keras.layers.Layer): \"\"\" From tensorflow-addons https://www.tensorflow.org/addons/api_docs/python/tfa/layers/GroupNormalization", "shape_list from ...tokenization_utils_base import BatchEncoding from ...utils import ( ModelOutput,", "training=inputs[\"training\"], ) return outputs def serving_output(self, output): hs = tf.convert_to_tensor(output.hidden_states)", "range(config.num_hidden_layers)] def call( self, hidden_states: tf.Tensor, attention_mask: Optional[tf.Tensor] = None,", "masked language modeling loss. Indices should be in `[-100, 0,", "a list, tuple or dict in the first positional arguments.", "training=inputs[\"training\"]) logits = self.lm_head(hidden_states) if labels is not None: if", "tf.newaxis]) def _normalize_kernel(self): \"\"\"Generate normalized weights.\"\"\" kernel = tf.nn.l2_normalize(self.weight_v, axis=self.kernel_norm_axes)", "replace_return_docstrings, ) from .configuration_hubert import HubertConfig logger = logging.get_logger(__name__) _CONFIG_FOR_DOC", "to nullify selected heads of the self-attention modules. Mask values", "The Fairseq Authors and the HuggingFace Inc. team. All rights", "use_bias=bias, name=\"q_proj\") self.v_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name=\"v_proj\") self.out_proj = tf.keras.layers.Dense(embed_dim,", "Dict, Optional, Tuple, Union import numpy as np import tensorflow", "num_heads)`, *optional*): Mask to nullify selected heads of the self-attention", "encoder_outputs[1:] return TFBaseModelOutput( last_hidden_state=hidden_states, hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions, ) class TFHubertPreTrainedModel(TFPreTrainedModel): \"\"\"", "self.bias = self.add_weight(name=\"bias\", shape=(self.filters,), initializer=\"zeros\", trainable=True) def call(self, inputs): if", "selected in `[0, 1]`: - 0 corresponds to a *sentence", "= self._shape(self.v_proj(hidden_states), -1, bsz) if self.is_decoder: # if cross_attention save", "bsz) value_states = self._shape(self.v_proj(hidden_states), -1, bsz) key_states = tf.concat([past_key_value[0], key_states],", "assuming that padded tokens are filled with -100 # when", "along feature axis if self.config.mask_feature_prob > 0: mask_feature_indices = _compute_mask_indices(", "tf.keras.layers.Dropout(config.hidden_dropout) self.layer = [TFHubertEncoderLayer(config, name=f\"layers.{i}\") for i in range(config.num_hidden_layers)] def", "0: # generate indices & apply SpecAugment along time axis", "output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, ) outputs = self.hubert( input_values=inputs[\"input_values\"], attention_mask=inputs[\"attention_mask\"], token_type_ids=inputs[\"token_type_ids\"],", "**kwargs): super().__init__(**kwargs) self.config = config self.feature_extractor = TFHubertFeatureEncoder(config, name=\"feature_extractor\") self.feature_projection", "encoder bi-directional self-attention `past_key_value` is always `None` past_key_value = (key_states,", "divisible by num_heads (got `embed_dim`: {self.embed_dim}\" f\" and `num_heads`: {num_heads}).\"", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "f\"Use `{self.__class__.__bases__[0].__name__}` instead.\", FutureWarning, ) class TFHubertFeatureProjection(tf.keras.layers.Layer): def __init__(self, config:", "outputs # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2Encoder with Wav2Vec2->Hubert class TFHubertEncoder(tf.keras.layers.Layer): def", "or `tf.Tensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*): Mask", "ValueError( f\"`config.feat_extract_norm` is {config.feat_extract_norm}, but has to be one of", "{type(input)} is not allowed only {allowed_types} is accepted for {parameter_names[i]}.\"", "@keras_serializable class TFHubertMainLayer(tf.keras.layers.Layer): config_class = HubertConfig def __init__(self, config: HubertConfig,", "output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, ) inputs[\"output_hidden_states\"] = ( inputs[\"output_hidden_states\"] if inputs[\"output_hidden_states\"]", "= self._shape(self.k_proj(hidden_states), -1, bsz) value_states = self._shape(self.v_proj(hidden_states), -1, bsz) key_states", "TFHubertMainLayer(config, name=\"hubert\") self.dropout = tf.keras.layers.Dropout(config.final_dropout) self.lm_head = tf.keras.layers.Dense(config.vocab_size, name=\"lm_head\") def", "`input_values` docstring) Tokens with indices set to `-100` are ignored", "is not allowed only {allowed_types} is accepted for {parameter_names[i]}.\" )", "self.initialized = False self.kernel_norm_axes = tf.constant([0, 1]) def _init_norm(self): \"\"\"Set", "hidden_states, attention_mask=attention_mask, training=training ) hidden_states = self.dropout(hidden_states, training=training) hidden_states =", "...tf_utils import shape_list from ...tokenization_utils_base import BatchEncoding from ...utils import", "\" of \" \"input tensor should have a defined dimension", "attn_output = tf.reshape(attn_output, (bsz, tgt_len, embed_dim)) attn_output = self.out_proj(attn_output) attn_weights:", "return a [`~file_utils.ModelOutput`] instead of a plain tuple. This argument", "tf.Tensor): \"\"\" Computes the output length of the convolutional layers", "# compute real output lengths according to convolution formula output_lengths", "beta_regularizer: tf.keras.regularizers.Regularizer = None, gamma_regularizer: tf.keras.regularizers.Regularizer = None, beta_constraint: tf.keras.constraints.Constraint", ") self.intermediate_act_fn = get_tf_activation(config.hidden_act) self.output_dense = tf.keras.layers.Dense( units=config.hidden_size, kernel_initializer=get_initializer(config.initializer_range), bias_initializer=\"zeros\",", "= get_tf_activation(config.hidden_act) self.output_dense = tf.keras.layers.Dense( units=config.hidden_size, kernel_initializer=get_initializer(config.initializer_range), bias_initializer=\"zeros\", name=\"output_dense\", )", "is_instance_norm: group_shape[self.axis] = input_shape[self.axis] // self.groups group_shape.insert(self.axis, self.groups) group_shape =", "shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*): Mask to nullify selected", "indices = tf.nn.top_k(distribution + z, num_samples) return indices # Copied", "def __init__( self, groups: int = 32, axis: int =", "None, inputs_embeds: Optional[tf.Tensor] = None, output_attentions: Optional[bool] = None, output_hidden_states:", "all_hidden_states = () if output_hidden_states else None all_self_attentions = ()", "input_shape, tensor_input_shape) normalized_inputs = self._apply_normalization(reshaped_inputs, input_shape) is_instance_norm = (input_shape[self.axis] //", "attention_mask = ( inputs[\"attention_mask\"] if inputs[\"attention_mask\"] is not None else", "but is {shape_list(attention_mask)}\", ) attention_mask = tf.cast(attention_mask, dtype=attn_weights.dtype) attn_weights =", "i in range(config.num_feat_extract_layers) ] else: raise ValueError( f\"`config.feat_extract_norm` is {config.feat_extract_norm},", "name=\"q_proj\") self.v_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name=\"v_proj\") self.out_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias,", "for downloading and loading pretrained models. \"\"\" config_class = HubertConfig", "Wav2Vec2->Hubert class TFHubertSamePadLayer(tf.keras.layers.Layer): def __init__(self, num_conv_pos_embeddings, **kwargs): super().__init__(**kwargs) self.num_pad_remove =", "{(bsz, 1, tgt_len, src_len)}, but is {shape_list(attention_mask)}\", ) attention_mask =", "save Tuple(tf.Tensor, tf.Tensor) of # all previous decoder key/value_states. Further", "attn_output = tf.transpose( tf.reshape(attn_output, (bsz, self.num_heads, tgt_len, self.head_dim)), (0, 2,", "= processor(transcription, return_tensors=\"tf\").input_values >>> loss = model(input_values, labels=labels).loss ```\"\"\" inputs", "size of the mask min_masks: minimum number of masked spans", "the License is distributed on an \"AS IS\" BASIS, #", "spans spec_aug_mask_idxs = tf.expand_dims(spec_aug_mask_idxs, -1) spec_aug_mask_idxs = tf.tile(spec_aug_mask_idxs, (1, 1,", "# scatter values to pair indices return tf.scatter_nd(pair_indices, tf.reshape(values, [-1]),", "sequence_length uniform_dist = tf.ones((batch_size, sequence_length - (mask_length - 1))) #", "with processor.as_target_processor(): ... labels = processor(transcription, return_tensors=\"tf\").input_values >>> loss =", "attention_mask: Optional[tf.Tensor] = None, layer_head_mask: Optional[tf.Tensor] = None, training: Optional[bool]", "Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.input_values_processing def input_values_processing(func, config, input_values, **kwargs): \"\"\" Process", "not self.initialized: self._init_norm() self.initialized = True self._normalize_kernel() padded_inputs = tf.pad(inputs,", "Returns: Example: ```python >>> from transformers import Wav2Vec2Processor, TFHubertModel >>>", "\"beta_regularizer\": tf.keras.regularizers.serialize(self.beta_regularizer), \"gamma_regularizer\": tf.keras.regularizers.serialize(self.gamma_regularizer), \"beta_constraint\": tf.keras.constraints.serialize(self.beta_constraint), \"gamma_constraint\": tf.keras.constraints.serialize(self.gamma_constraint), } base_config", "False, ) -> Tuple[tf.Tensor, Optional[tf.Tensor]]: \"\"\"Input shape: Batch x Time", "are selected in `[0, 1]`: - 0 corresponds to a", "None, output_hidden_states: Optional[tf.Tensor] = None, return_dict: Optional[bool] = None, training:", "= self.final_layer_norm(hidden_states) outputs = (hidden_states,) if output_attentions: outputs += (attn_weights,)", "GPU or a TPU\" ) @tf.function def serving(self, inputs): output", "-1, bsz) if self.is_decoder: # if cross_attention save Tuple(tf.Tensor, tf.Tensor)", "self._normalize_kernel() padded_inputs = tf.pad(inputs, ((0, 0), (self.explicit_padding, self.explicit_padding), (0, 0)))", "= self._get_reshaped_weights(input_shape) normalized_inputs = tf.nn.batch_normalization( reshaped_inputs, mean=mean, variance=variance, scale=gamma, offset=beta,", "None: output[parameter_names[0]] = input_values else: raise ValueError( f\"Data of type", "past_key_value is not None: # reuse k, v, self_attention key_states", "attentions=all_self_attentions, ) # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2EncoderStableLayerNorm with Wav2Vec2->Hubert class TFHubertEncoderStableLayerNorm(tf.keras.layers.Layer):", "to current projected key/value_states (third \"elif\" case) # if encoder", "sequence_length // mask_length # SpecAugment mask to fill spec_aug_mask =", "being attended to labels_mask = tf.cast(labels >= 0, tf.int32) target_lengths", "== \"mean\": loss = tf.reduce_mean(loss) else: loss = None if", "hidden_states, attn_weights, _ = self.attention( hidden_states, attention_mask=attention_mask, training=training ) hidden_states", "= tf.reshape(attn_weights, (bsz, self.num_heads, tgt_len, src_len)) return attn_output, attn_weights, past_key_value", "input Tensors in the first positional argument : - a", "\"args\" not in parameter_names: logger.warning( f\"The parameter {k} does not", "this file except in compliance with the License. # You", ":], hidden_states, ) # apply SpecAugment along feature axis if", "tf.keras.initializers.serialize(self.gamma_initializer), \"beta_regularizer\": tf.keras.regularizers.serialize(self.beta_regularizer), \"gamma_regularizer\": tf.keras.regularizers.serialize(self.gamma_regularizer), \"beta_constraint\": tf.keras.constraints.serialize(self.beta_constraint), \"gamma_constraint\": tf.keras.constraints.serialize(self.gamma_constraint), }", "\"facebook/hubert-base-ls960\", # See all Hubert models at https://huggingface.co/models?filter=hubert ] LARGE_NEGATIVE", "+ str(dim) + \").\" ) if dim % self.groups !=", ":] offsets = tf.tile(offsets, (batch_size, num_masked_spans, 1)) offsets = tf.reshape(offsets,", "key_states, transpose_b=True) # The tf.debugging asserts are not compliant with", "name=\"layer_norm\") self.feed_forward = TFHubertFeedForward(config, name=\"feed_forward\") self.final_layer_norm = tf.keras.layers.LayerNormalization( epsilon=config.layer_norm_eps, name=\"final_layer_norm\"", "inputs[\"return_dict\"]: output = (logits,) + outputs[1:] return ((loss,) + output)", "dropout: float = 0.0, is_decoder: bool = False, bias: bool", "tf.where(mask_feature_indices[:, tf.newaxis, :], hidden_states, 0) return hidden_states def call( self,", "have the shape `({0})`): Indices of input sequence tokens in", "Wav2Vec2->Hubert class TFHubertEncoderLayerStableLayerNorm(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs) self.attention", "EagerTensors don't allow to use the .name property so we", "names given in the docstring: `model({\"input_values\": input_values, \"token_type_ids\": token_type_ids})` </Tip>", "tf.transpose(tf.concat([broad_casted_batch_dims, tf.reshape(batch_indices, [1, -1])], 0)) # scatter values to pair", "each input has to be named accordingly to the parameters", "is {config.feat_extract_norm}, but has to be one of ['group', 'layer']\"", "training. \"\"\" self.hubert.feature_extractor.trainable = False @add_start_docstrings_to_model_forward(HUBERT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=TFCausalLMOutput, config_class=_CONFIG_FOR_DOC) def call(", "of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*): Mask to nullify", "mask for a single layer should be of size {(self.num_heads)},", "bool = False, **kwargs: Any, ): inputs = input_values_processing( func=self.call,", "**kwargs) self.config = config self.hubert = TFHubertMainLayer(config, name=\"hubert\") @add_start_docstrings_to_model_forward(HUBERT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=TFBaseModelOutput,", "kernel_size=config.conv_kernel[layer_id], strides=config.conv_stride[layer_id], use_bias=config.conv_bias, name=\"conv\", ) self.layer_norm = tf.keras.layers.LayerNormalization(name=\"layer_norm\", epsilon=config.layer_norm_eps) self.activation", "tf.Tensor, attention_mask: Optional[tf.Tensor] = None, output_attentions: Optional[bool] = False, training:", "(\" + str(self.groups) + \") must be a \" \"multiple", "the unexpected layers. \"\"\" signature = dict(inspect.signature(func).parameters) signature.pop(\"kwargs\", None) signature.pop(\"self\",", "resizing the input embeddings, pruning heads etc.) This model is", "sample from, make sure that offset samples are < sequence_length", "# coding=utf-8 # Copyright 2021 The Fairseq Authors and the", ") inputs[\"output_attentions\"] = ( inputs[\"output_attentions\"] if inputs[\"output_attentions\"] else self.config.output_attentions )", "input sequence tokens in the position embeddings. Selected in the", "super().__init__(**kwargs) self.attention = TFHubertAttention( embed_dim=config.hidden_size, num_heads=config.num_attention_heads, dropout=config.attention_dropout, is_decoder=False, name=\"attention\", )", "TFCausalLMOutput from ...modeling_tf_utils import TFPreTrainedModel, booleans_processing, get_initializer, keras_serializable from ...tf_utils", "all the tensors in the first argument of the model", "value_states = tf.concat([past_key_value[1], value_states], axis=2) else: # self_attention key_states =", "= tf.keras.layers.Dropout(rate=config.feat_proj_dropout) def call(self, hidden_states: tf.Tensor, training: bool = False)", "use_bias=bias, name=\"v_proj\") self.out_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name=\"out_proj\") def _shape(self, tensor:", "sequence_length = shape if mask_length < 1: raise ValueError(\"`mask_length` has", "min_masks: int = 0, ) -> tf.Tensor: \"\"\" Computes random", "length with one or several input Tensors IN THE ORDER", "size 2 where first element is batch size and 2nd", "we check for a real Tensor if type(input) == tf.Tensor:", "# See all Hubert models at https://huggingface.co/models?filter=hubert ] LARGE_NEGATIVE =", "for i, input in enumerate(input_values): # EagerTensors don't allow to", "for i, layer_module in enumerate(self.layer): if output_hidden_states: all_hidden_states = all_hidden_states", "will be used instead. return_dict (`bool`, *optional*): Whether or not", "name=\"beta\", initializer=self.beta_initializer, regularizer=self.beta_regularizer, constraint=self.beta_constraint, ) else: self.beta = None def", "The inputs of the model. Returns: Two lists, one for", "class TFHubertForCTC(TFHubertPreTrainedModel): def __init__(self, config: HubertConfig, *inputs, **kwargs): super().__init__(config, *inputs,", "__init__(self, config, *inputs, **kwargs): super().__init__(config, *inputs, **kwargs) logger.warning( f\"\\n{self.__class__.__name__} has", "training=training, ) outputs = self.hubert( input_values=inputs[\"input_values\"], attention_mask=inputs[\"attention_mask\"], token_type_ids=inputs[\"token_type_ids\"], position_ids=inputs[\"position_ids\"], head_mask=inputs[\"head_mask\"],", "tf.keras.layers.Dropout(config.final_dropout) self.lm_head = tf.keras.layers.Dense(config.vocab_size, name=\"lm_head\") def freeze_feature_extractor(self): \"\"\" Calling this", "selected in `[0, 1]`: - 1 for tokens that are", "= tf.reduce_mean(loss) else: loss = None if not inputs[\"return_dict\"]: output", "else: outputs = normalized_inputs return outputs def get_config(self): config =", "key_states], axis=2) value_states = tf.concat([past_key_value[1], value_states], axis=2) else: # self_attention", "attention_mask, token_type_ids])` - a dictionary with one or several input", "with indices set to `-100` are ignored (masked), the loss", "self._shape(self.k_proj(key_value_states), -1, bsz) value_states = self._shape(self.v_proj(key_value_states), -1, bsz) elif past_key_value", "HubertConfig, **kwargs: Any) -> None: super().__init__(**kwargs) if config.feat_extract_norm == \"group\":", "= attn_residual + hidden_states hidden_states = hidden_states + self.feed_forward(self.final_layer_norm(hidden_states)) outputs", "having all inputs as keyword arguments (like PyTorch models), or", "axis=-1), indices_shape), [1, -1] ) # transform batch_indices to pair_indices", "[bsz * self.num_heads, tgt_len, src_len], message=f\"Attention weights should be of", "= self._mask_hidden_states(hidden_states, mask_time_indices=mask_time_indices) encoder_outputs = self.encoder( hidden_states, attention_mask=attention_mask, output_attentions=inputs[\"output_attentions\"], output_hidden_states=inputs[\"output_hidden_states\"],", "\" \"multiple of the number of channels (\" + str(dim)", "all_self_attentions = () if output_attentions else None if attention_mask is", "TFHubertFeatureProjection(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs) self.layer_norm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps,", ") attn_weights = tf.reshape(layer_head_mask, (1, -1, 1, 1)) * tf.reshape(", "# broadcast batch dim to indices_shape broad_casted_batch_dims = tf.reshape( tf.broadcast_to(tf.expand_dims(tf.range(indices_shape[0]),", "embed_dim=config.hidden_size, num_heads=config.num_attention_heads, dropout=config.attention_dropout, is_decoder=False, name=\"attention\", ) self.dropout = tf.keras.layers.Dropout(config.hidden_dropout) self.layer_norm", ") outputs = self.hubert( input_values=inputs[\"input_values\"], attention_mask=inputs[\"attention_mask\"], token_type_ids=inputs[\"token_type_ids\"], position_ids=inputs[\"position_ids\"], head_mask=inputs[\"head_mask\"], inputs_embeds=inputs[\"inputs_embeds\"],", "from ...tf_utils import shape_list from ...tokenization_utils_base import BatchEncoding from ...utils", "bool = True, scale: bool = True, beta_initializer: tf.keras.initializers.Initializer =", "all the parameters of the model. Initializing with a config", "TFPreTrainedModel, booleans_processing, get_initializer, keras_serializable from ...tf_utils import shape_list from ...tokenization_utils_base", "previous decoder key/value_states. Further calls to uni-directional self-attention # can", "head_mask=inputs[\"head_mask\"], inputs_embeds=inputs[\"inputs_embeds\"], output_attentions=inputs[\"output_attentions\"], output_hidden_states=inputs[\"output_hidden_states\"], return_dict=inputs[\"return_dict\"], training=inputs[\"training\"], ) return outputs def", "axis=-1) if layer_head_mask is not None: # The tf.debugging asserts", "0)) # scatter values to pair indices return tf.scatter_nd(pair_indices, tf.reshape(values,", "Optional[tf.Tensor] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] =", "by number of timesteps divided by length of mask span", "then `input_values` output[\"input_values\"] = output[\"args\"] del output[\"args\"] if \"kwargs\" in", "be one of ['group', 'layer']\" ) self.conv_layers = conv_layers def", "3) ) attn_output = tf.reshape(attn_output, (bsz, tgt_len, embed_dim)) attn_output =", "tf.reshape(normalized_inputs, tensor_input_shape) else: outputs = normalized_inputs return outputs def get_config(self):", "can use to gather all the input Tensors in the", "[TFHubertEncoderLayer(config, name=f\"layers.{i}\") for i in range(config.num_hidden_layers)] def call( self, hidden_states:", "= output[\"args\"] else: # `args` in this case is always", "span to be masked. this will be multiplied by number", "self.activation = get_tf_activation(config.feat_extract_activation) def call(self, hidden_states: tf.Tensor) -> tf.Tensor: hidden_states", "bsz) key_states = tf.concat([past_key_value[0], key_states], axis=2) value_states = tf.concat([past_key_value[1], value_states],", "EXIST\" >>> # wrap processor as target processor to encode", "None: output[parameter_names[i]] = input else: raise ValueError( f\"Data of type", "= input_shape[self.axis] // self.groups group_shape.insert(self.axis, self.groups) group_shape = tf.stack(group_shape) reshaped_inputs", "parameter will not be updated during training. \"\"\" self.hubert.feature_extractor.trainable =", "given in the docstring: `model({\"input_values\": input_values, \"token_type_ids\": token_type_ids})` </Tip> Args:", "model = TFHubertForCTC.from_pretrained(\"facebook/hubert-base-960h\") >>> def map_to_array(batch): ... speech, _ =", "return output # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2._sample_without_replacement def _sample_without_replacement(distribution, num_samples): \"\"\"", "= r\"\"\" This model inherits from [`TFPreTrainedModel`]. Check the superclass", "dropout_probability = np.random.uniform(0, 1) if training and (dropout_probability < self.config.layerdrop):", "\") must be a \" \"multiple of the number of", "mask_length > sequence_length: raise ValueError( f\"`mask_length` has to be smaller", "layer_outputs = layer_module( hidden_states=hidden_states, attention_mask=attention_mask, output_attentions=output_attentions, training=training, ) hidden_states =", "# Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2GroupNorm with Wav2Vec2->Hubert class TFHubertGroupNorm(tf.keras.layers.Layer): \"\"\" From", "output[tensor_name] = input else: output[parameter_names[i]] = input elif isinstance(input, allowed_types)", "of the mask min_masks: minimum number of masked spans Adapted", "in self.conv_layers: hidden_states = conv_layer(hidden_states) return hidden_states class TFHubertFeatureExtractor(TFHubertFeatureEncoder): def", "Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2NoLayerNormConvLayer with Wav2Vec2->Hubert class TFHubertNoLayerNormConvLayer(tf.keras.layers.Layer): def __init__(self, config:", "else: self.encoder = TFHubertEncoder(config, name=\"encoder\") def build(self, input_shape: tf.TensorShape): self.masked_spec_embed", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "= self.layer_norm(hidden_states) hidden_states = self.dropout(hidden_states, training=training) for i, layer_module in", "License, Version 2.0 (the \"License\"); # you may not use", "input_shape): dim = input_shape[self.axis] if dim is None: raise ValueError(", "can concat previous decoder key/value_states to current projected key/value_states (third", "projected key/value_states (third \"elif\" case) # if encoder bi-directional self-attention", "norm of the weight vector.\"\"\" kernel_norm = tf.sqrt(tf.reduce_sum(tf.square(self.weight_v), axis=self.kernel_norm_axes)) self.weight_g.assign(kernel_norm[:,", "(masked), the loss is only computed for the tokens with", "transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2NoLayerNormConvLayer with Wav2Vec2->Hubert class TFHubertNoLayerNormConvLayer(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, layer_id:", "to be one of ['group', 'layer']\" ) self.conv_layers = conv_layers", "attn_weights = tf.reshape(attn_weights, (bsz * self.num_heads, tgt_len, src_len)) attn_probs =", "if self.num_pad_remove > 0: hidden_states = hidden_states[:, : -self.num_pad_remove, :]", "a SavedModel TF calls the method with LayerCall.__call__(args, **kwargs) #", "is_instance_norm = (input_shape[self.axis] // self.groups) == 1 if not is_instance_norm:", "embedding lookup matrix. output_attentions (`bool`, *optional*): Whether or not to", "Optional[bool] = False, ) -> Union[TFBaseModelOutput, Tuple[tf.Tensor]]: all_hidden_states = ()", "False, training: bool = False, ) -> Tuple[tf.Tensor]: attn_residual =", "tf.keras.layers.Dropout(dropout) self.head_dim = embed_dim // num_heads if (self.head_dim * num_heads)", "= tf.reshape(spec_aug_mask_idxs, (batch_size, num_masked_spans * mask_length)) offsets = tf.range(mask_length)[tf.newaxis, tf.newaxis,", "-1) spec_aug_mask_idxs = tf.tile(spec_aug_mask_idxs, (1, 1, mask_length)) spec_aug_mask_idxs = tf.reshape(spec_aug_mask_idxs,", "Optional[tf.Tensor] = None): \"\"\" Masks extracted features along time axis", "detail. This argument can be used only in eager mode,", "unexpected layers. \"\"\" signature = dict(inspect.signature(func).parameters) signature.pop(\"kwargs\", None) signature.pop(\"self\", None)", "self-attention modules. Mask values selected in `[0, 1]`: - 1", "hidden_states = attn_residual + hidden_states hidden_states = hidden_states + self.feed_forward(self.final_layer_norm(hidden_states))", "-1] ) # transform batch_indices to pair_indices pair_indices = tf.transpose(tf.concat([broad_casted_batch_dims,", "a list of varying length with one or several input", "name=\"input_values\")` otherwise the order of the tensors will not be", "super().build(input_shape) def call(self, inputs): input_shape = tf.keras.backend.int_shape(inputs) tensor_input_shape = tf.shape(inputs)", "output # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2._sample_without_replacement def _sample_without_replacement(distribution, num_samples): \"\"\" Categorical", "0: raise ValueError( \"Number of groups (\" + str(self.groups) +", "+ outputs[1:] return ((loss,) + output) if loss is not", "in the position embeddings. Selected in the range `[0, config.max_position_embeddings", "list(output.keys()) and name != \"args\": output[name] = kwargs.pop(name, signature[name].default) #", "super().__init__(**kwargs) self.supports_masking = True self.groups = groups self.axis = axis", "{shape_list(attn_weights)}\", ) if attention_mask is not None: # The tf.debugging", "sequence_length), mask_prob=self.config.mask_time_prob, mask_length=self.config.mask_time_length, min_masks=2, ) hidden_states = tf.where( tf.cast(mask_time_indices[:, :,", "shape `({0})`, *optional*): Mask to avoid performing attention on padding", "tf.expand_dims(attention_mask, -1) attention_mask = _expand_mask(attention_mask) else: attention_mask = None position_embeddings", ") self.padding = TFHubertSamePadLayer(config.num_conv_pos_embeddings) self.activation = get_tf_activation(config.feat_extract_activation) def call(self, hidden_states:", "0, tf.int32) target_lengths = tf.reduce_sum(labels_mask, axis=-1) loss = tf.nn.ctc_loss( logits=logits,", "**masked**. inputs_embeds (`np.ndarray` or `tf.Tensor` of shape `({0}, hidden_size)`, *optional*):", "with -100 # when not being attended to labels_mask =", "modes than eager. if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(attn_weights), [bsz * self.num_heads,", "tensor should have a defined dimension \" \"but the layer", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", ">>> with processor.as_target_processor(): ... labels = processor(transcription, return_tensors=\"tf\").input_values >>> loss", "the mask min_masks: minimum number of masked spans Adapted from", "TFHubertModel(TFHubertPreTrainedModel): def __init__(self, config: HubertConfig, *inputs, **kwargs): super().__init__(config, *inputs, **kwargs)", "Wav2Vec2->Hubert class TFHubertGroupNormConvLayer(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, layer_id: int =", "None, past_key_value: Optional[Tuple[Tuple[tf.Tensor]]] = None, attention_mask: Optional[tf.Tensor] = None, layer_head_mask:", "k, v in output.items() if k in [\"return_dict\", \"output_attentions\", \"output_hidden_states\",", "to `False``): Whether or not to use the model in", "input_values: warnings.warn( \"The `inputs` argument is deprecated and will be", "(batch_size, num_masked_spans * mask_length)) offsets = tf.range(mask_length)[tf.newaxis, tf.newaxis, :] offsets", "class TFHubertPositionalConvEmbedding(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs: Any) -> None:", "used in eager mode, in graph mode the value will", "input_values is None: output[parameter_names[0]] = input_values else: raise ValueError( f\"Data", "int, min_masks: int = 0, ) -> tf.Tensor: \"\"\" Computes", "convolution formula output_lengths = self._get_feat_extract_output_lengths(tf.reduce_sum(inputs[\"attention_mask\"], -1)) attention_mask = tf.sequence_mask( output_lengths,", "logger = logging.get_logger(__name__) _CONFIG_FOR_DOC = \"HubertConfig\" TF_HUBERT_PRETRAINED_MODEL_ARCHIVE_LIST = [ \"facebook/hubert-base-ls960\",", "= get_tf_activation(config.feat_extract_activation) self.layer_norm = TFHubertGroupNorm(groups=self.out_conv_dim, epsilon=config.layer_norm_eps, name=\"layer_norm\") def call(self, hidden_states:", "computation for the feature encoder so that its parameters will", "+ str(input_shape) + \".\" ) def _set_number_of_groups_for_instance_norm(self, input_shape): dim =", "hidden_states = self.layer_norm(hidden_states) hidden_states = hidden_states + self.feed_forward(hidden_states) hidden_states =", "tf.debugging.assert_equal( shape_list(attention_mask), [bsz, 1, tgt_len, src_len], message=f\"Attention mask should be", "tensors of all attention layers. See `attentions` under returned tensors", "attn_output = tf.matmul(attn_probs, value_states) # The tf.debugging asserts are not", "tf.nn.moments(reshaped_inputs, group_reduction_axes, keepdims=True) gamma, beta = self._get_reshaped_weights(input_shape) normalized_inputs = tf.nn.batch_normalization(", "inputs_embeds=inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, ) inputs[\"output_hidden_states\"] = ( inputs[\"output_hidden_states\"]", "with LayerCall.__call__(args, **kwargs) # So to respect the proper output", "None if not inputs[\"return_dict\"]: output = (logits,) + outputs[1:] return", "None position_embeddings = self.pos_conv_embed(hidden_states) hidden_states = hidden_states + position_embeddings hidden_states", "config of the running model. **kwargs: The inputs of the", "for the missing layers, and another one for the unexpected", "v in kwargs.items(): if isinstance(v, allowed_types) or v is None:", "tgt_len, src_len)}, but is {shape_list(attention_mask)}\", ) attention_mask = tf.cast(attention_mask, dtype=attn_weights.dtype)", "tf.sqrt(tf.reduce_sum(tf.square(self.weight_v), axis=self.kernel_norm_axes)) self.weight_g.assign(kernel_norm[:, tf.newaxis, tf.newaxis]) def _normalize_kernel(self): \"\"\"Generate normalized weights.\"\"\"", "with `input_values` only and nothing else: `model(inputs_ids)` - a list", "attention_mask from `[bsz, seq_len]` to `[bsz, 1, tgt_seq_len, src_seq_len]`. \"\"\"", "name=\"out_proj\") def _shape(self, tensor: tf.Tensor, seq_len: int, bsz: int): return", "tf.Tensor: hidden_states = self.intermediate_dense(hidden_states) hidden_states = self.intermediate_act_fn(hidden_states) hidden_states = self.intermediate_dropout(hidden_states,", "value_states], axis=2) else: # self_attention key_states = self._shape(self.k_proj(hidden_states), -1, bsz)", "that offset samples are < sequence_length uniform_dist = tf.ones((batch_size, sequence_length", "provided this layer is used as a cross-attention layer #", "always be set to True. training (`bool`, *optional*, defaults to", "to the parameter list {parameter_names} and will be ignored.\" )", "model weights. \"\"\" HUBERT_INPUTS_DOCSTRING = r\"\"\" Args: input_values (`np.ndarray`, `tf.Tensor`,", "group_shape = tf.keras.backend.int_shape(reshaped_inputs) group_reduction_axes = list(range(1, len(group_shape))) is_instance_norm = (input_shape[self.axis]", "token type IDs?](../glossary#token-type-ids) position_ids (`np.ndarray` or `tf.Tensor` of shape `({0})`,", "tgt_len, src_len], message=f\"Attention weights should be of size {(bsz *", "super().__init__(**kwargs) self.config = config self.feature_extractor = TFHubertFeatureEncoder(config, name=\"feature_extractor\") self.feature_projection =", "now - see https://github.com/tensorflow/tensorflow/issues/9260 for more info \"\"\" z =", "{num_heads}).\" ) self.scaling = self.head_dim**-0.5 self.is_decoder = is_decoder self.k_proj =", "Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2GroupNormConvLayer with Wav2Vec2->Hubert class TFHubertGroupNormConvLayer(tf.keras.layers.Layer): def __init__(self, config:", "Optional[tf.Tensor] = None, layer_head_mask: Optional[tf.Tensor] = None, training: Optional[bool] =", "TFHubertLayerNormConvLayer(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, layer_id: int = 0, **kwargs:", "self.output_dense = tf.keras.layers.Dense( units=config.hidden_size, kernel_initializer=get_initializer(config.initializer_range), bias_initializer=\"zeros\", name=\"output_dense\", ) self.output_dropout =", "= tf.keras.layers.Dropout(config.hidden_dropout) self.layer = [TFHubertEncoderLayer(config, name=f\"layers.{i}\") for i in range(config.num_hidden_layers)]", "input_shape): dim = input_shape[self.axis] shape = (dim,) if self.center: self.beta", "the .name property so we check for a real Tensor", "calls to cross_attention layer can then reuse all cross-attention #", "Optional[bool] = None, training: bool = False, ) -> Union[TFBaseModelOutput,", "mask_length=self.config.mask_feature_length, ) hidden_states = tf.where(mask_feature_indices[:, tf.newaxis, :], hidden_states, 0) return", ") class TFHubertForCTC(TFHubertPreTrainedModel): def __init__(self, config: HubertConfig, *inputs, **kwargs): super().__init__(config,", "config self.hubert = TFHubertMainLayer(config, name=\"hubert\") @add_start_docstrings_to_model_forward(HUBERT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=TFBaseModelOutput, config_class=_CONFIG_FOR_DOC) def call(", "_add_beta_weight(self, input_shape): dim = input_shape[self.axis] shape = (dim,) if self.center:", "the configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load the", "as downloading or saving, resizing the input embeddings, pruning heads", "self, input_values: tf.Tensor, attention_mask: Optional[tf.Tensor] = None, token_type_ids: Optional[tf.Tensor] =", "= True self._normalize_kernel() padded_inputs = tf.pad(inputs, ((0, 0), (self.explicit_padding, self.explicit_padding),", "return TFBaseModelOutput( last_hidden_state=hidden_states, hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions, ) class TFHubertPreTrainedModel(TFPreTrainedModel): \"\"\" An", "output return TFCausalLMOutput( loss=loss, logits=logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions, ) def serving_output(self,", "tf.broadcast_to(tf.expand_dims(tf.range(indices_shape[0]), axis=-1), indices_shape), [1, -1] ) # transform batch_indices to", "logger.warning( f\"\\n{self.__class__.__name__} has backpropagation operations that are NOT supported on", "indices. Mask values selected in `[0, 1]`: - 1 for", "**config} def compute_output_shape(self, input_shape): return input_shape def _reshape_into_groups(self, inputs, input_shape,", "See all Hubert models at https://huggingface.co/models?filter=hubert ] LARGE_NEGATIVE = -1e8", "- 0 indicates the head is **masked**. inputs_embeds (`np.ndarray` or", "or `model([input_values, attention_mask, token_type_ids])` - a dictionary with one or", "tf.convert_to_tensor(output.hidden_states) if self.config.output_hidden_states else None attns = tf.convert_to_tensor(output.attentions) if self.config.output_attentions", "at https://huggingface.co/models?filter=hubert ] LARGE_NEGATIVE = -1e8 # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.input_values_processing", ": -self.num_pad_remove, :] return hidden_states class TFHubertFeatureEncoder(tf.keras.layers.Layer): def __init__(self, config:", "Optional[bool] = True, training: Optional[bool] = False, ) -> Union[TFBaseModelOutput,", "with Wav2Vec2->Hubert class TFHubertGroupNormConvLayer(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, layer_id: int", "tf.reduce_sum(labels_mask, axis=-1) loss = tf.nn.ctc_loss( logits=logits, labels=labels, logit_length=input_lengths, label_length=target_lengths, blank_index=self.config.pad_token_id,", "Wav2Vec2->Hubert class TFHubertWeightNormConv1D(tf.keras.layers.Conv1D): \"\"\"Adapted from https://www.tensorflow.org/probability/api_docs/python/tfp/layers/weight_norm/WeightNorm\"\"\" def __init__(self, filters, kernel_size,", "= self.feature_extractor( tf.cast(inputs[\"input_values\"], tf.float32), training=inputs[\"training\"] ) if inputs[\"attention_mask\"] is not", "(`tf.Tensor` or `np.ndarray` of shape `(batch_size, sequence_length)`, *optional*): Labels for", "{(bsz, self.num_heads, tgt_len, self.head_dim)}, but is {shape_list(attn_output)}\", ) attn_output =", "Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2EncoderLayerStableLayerNorm with Wav2Vec2->Hubert class TFHubertEncoderLayerStableLayerNorm(tf.keras.layers.Layer): def __init__(self, config:", "self.intermediate_dense = tf.keras.layers.Dense( units=config.intermediate_size, kernel_initializer=get_initializer(config.initializer_range), bias_initializer=\"zeros\", name=\"intermediate_dense\", ) self.intermediate_act_fn =", "initialization and a simple interface for downloading and loading pretrained", "tf.Tensor]: pad_token = 0.0 input_values = tf.convert_to_tensor(np.random.rand(1, 16000), tf.float32) dummy_inputs", "encode labels >>> with processor.as_target_processor(): ... labels = processor(transcription, return_tensors=\"tf\").input_values", "numpy as np import tensorflow as tf from ...activations_tf import", "variance=variance, scale=gamma, offset=beta, variance_epsilon=self.epsilon, ) return normalized_inputs def _get_reshaped_weights(self, input_shape):", "None, beta_constraint: tf.keras.constraints.Constraint = None, gamma_constraint: tf.keras.constraints.Constraint = None, **kwargs,", "src_len)}, but is {shape_list(attn_weights)}\", ) if attention_mask is not None:", "is only computed for the tokens with labels in `[0,", "Optional[tf.Tensor]]: \"\"\"Input shape: Batch x Time x Channel\"\"\" # if", "of all attention layers. See `attentions` under returned tensors for", "\").\" ) def _check_axis(self): if self.axis == 0: raise ValueError(", "output_hidden_states: Optional[tf.Tensor] = None, return_dict: Optional[bool] = None, training: bool", "is_cross_attention = key_value_states is not None bsz, tgt_len, embed_dim =", "to a *sentence B* token. [What are token type IDs?](../glossary#token-type-ids)", "*optional*): Optionally, instead of passing `input_values` you can choose to", "import shape_list from ...tokenization_utils_base import BatchEncoding from ...utils import (", "return TFBaseModelOutput( last_hidden_state=hidden_states, hidden_states=all_hidden_states, attentions=all_self_attentions, ) # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2EncoderStableLayerNorm", "`input_values` indices into associated vectors than the model's internal embedding", "the training. Args: func (`callable`): The callable function of the", "accepted for {parameter_names[i]}.\" ) elif isinstance(input_values, (dict, BatchEncoding)): if \"inputs\"", "= self.layer_norm(hidden_states) hidden_states = self.activation(hidden_states) return hidden_states # Copied from", "writing, software # distributed under the License is distributed on", "Wav2Vec2->Hubert class TFHubertPositionalConvEmbedding(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs: Any) ->", "isinstance(v, allowed_types) or v is None: output[k] = v elif", "```python >>> from transformers import Wav2Vec2Processor, TFHubertModel >>> from datasets", "HubertConfig base_model_prefix = \"hubert\" main_input_name = \"input_values\" @property def dummy_inputs(self)", "TFHubertModel.from_pretrained(\"facebook/hubert-base-960h\") >>> def map_to_array(batch): ... speech, _ = sf.read(batch[\"file\"]) ...", "_create_broadcast_shape(self, input_shape): broadcast_shape = [1] * len(input_shape) is_instance_norm = (input_shape[self.axis]", "booleans_processing, get_initializer, keras_serializable from ...tf_utils import shape_list from ...tokenization_utils_base import", "= False self.kernel_norm_axes = tf.constant([0, 1]) def _init_norm(self): \"\"\"Set the", "True. training (`bool`, *optional*, defaults to `False``): Whether or not", "filters=config.hidden_size, kernel_size=config.num_conv_pos_embeddings, groups=config.num_conv_pos_embedding_groups, explicit_padding=config.num_conv_pos_embeddings // 2, name=\"conv\", ) self.padding =", "= self._shape(self.k_proj(hidden_states), -1, bsz) value_states = self._shape(self.v_proj(hidden_states), -1, bsz) if", "in the first positional arguments. This second option is useful", "tf.Tensor) of # all previous decoder key/value_states. Further calls to", "\"Number of groups (\" + str(self.groups) + \") cannot be", "axis=self.kernel_norm_axes)) self.weight_g.assign(kernel_norm[:, tf.newaxis, tf.newaxis]) def _normalize_kernel(self): \"\"\"Generate normalized weights.\"\"\" kernel", "tf.debugging asserts are not compliant with XLA then they #", "max(num_masked_spans, min_masks) # make sure num masked indices <= sequence_length", "super().__init__(**kwargs) self.in_conv_dim = config.conv_dim[layer_id] if layer_id > 0 else 1", "HUBERT_START_DOCSTRING = r\"\"\" This model inherits from [`TFPreTrainedModel`]. Check the", "model = TFHubertModel.from_pretrained(\"facebook/hubert-base-960h\") >>> def map_to_array(batch): ... speech, _ =", "self.groups = dim def _check_size_of_dimensions(self, input_shape): dim = input_shape[self.axis] if", "tf.keras.layers.Dense(embed_dim, use_bias=bias, name=\"q_proj\") self.v_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name=\"v_proj\") self.out_proj =", "self.masked_spec_embed[tf.newaxis, tf.newaxis, :], hidden_states, ) elif self.config.mask_time_prob > 0: #", "in the first positional argument : - a single Tensor", "gamma_initializer: tf.keras.initializers.Initializer = \"ones\", beta_regularizer: tf.keras.regularizers.Regularizer = None, gamma_regularizer: tf.keras.regularizers.Regularizer", "percentage of all elements. however due to overlaps, the actual", "to a *sentence A* token, - 1 corresponds to a", "- (mask_length - 1))) # get random indices to mask", "IDs?](../glossary#input-ids) attention_mask (`np.ndarray` or `tf.Tensor` of shape `({0})`, *optional*): Mask", "ValueError( \"Number of groups (\" + str(self.groups) + \") cannot", "name=\"conv\", ) self.padding = TFHubertSamePadLayer(config.num_conv_pos_embeddings) self.activation = get_tf_activation(config.feat_extract_activation) def call(self,", "`num_heads`: {num_heads}).\" ) self.scaling = self.head_dim**-0.5 self.is_decoder = is_decoder self.k_proj", "\"The `inputs` argument is deprecated and will be removed in", "**kwargs, ) self.explicit_padding = explicit_padding self.filter_axis = 2 self.initialized =", "a TPU\" ) @tf.function def serving(self, inputs): output = self.call(input_values=inputs,", "= self.add_weight( shape=shape, name=\"gamma\", initializer=self.gamma_initializer, regularizer=self.gamma_regularizer, constraint=self.gamma_constraint, ) else: self.gamma", "channels (\" + str(dim) + \").\" ) def _check_axis(self): if", "self.groups broadcast_shape.insert(self.axis, self.groups) else: broadcast_shape[self.axis] = self.groups return broadcast_shape #", "type(input) == tf.Tensor: # Tensor names have always the pattern", "= 0): \"\"\" Expands attention_mask from `[bsz, seq_len]` to `[bsz,", "not is_instance_norm: broadcast_shape[self.axis] = input_shape[self.axis] // self.groups broadcast_shape.insert(self.axis, self.groups) else:", "to pair_indices pair_indices = tf.transpose(tf.concat([broad_casted_batch_dims, tf.reshape(batch_indices, [1, -1])], 0)) #", "the attentions tensors of all attention layers. See `attentions` under", "of size 2 where first element is batch size and", "related to general usage and behavior. <Tip> TF 2.0 models", "and # limitations under the License. \"\"\" TensorFlow Hubert model.\"\"\"", "spec_aug_mask_idxs = tf.expand_dims(spec_aug_mask_idxs, -1) spec_aug_mask_idxs = tf.tile(spec_aug_mask_idxs, (1, 1, mask_length))", "`args` in this case is always the first parameter, then", "This argument can be used in eager mode, in graph", "of shape `({0})`, *optional*): Indices of positions of each input", "return indices # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2._scatter_values_on_batch_indices def _scatter_values_on_batch_indices(values, batch_indices, output_shape):", "= tf.reshape(self.beta, broadcast_shape) return gamma, beta def _check_if_input_shape_is_none(self, input_shape): dim", "f\"The class `{self.__class__.__name__}` has been depreciated \" \"and will be", "self.config.mask_feature_prob > 0: mask_feature_indices = _compute_mask_indices( (batch_size, hidden_size), mask_prob=self.config.mask_feature_prob, mask_length=self.config.mask_feature_length,", "else self.config.output_hidden_states ) inputs[\"output_attentions\"] = ( inputs[\"output_attentions\"] if inputs[\"output_attentions\"] else", "ModelOutput, tuple, list, dict, np.ndarray) for k, v in kwargs.items():", "during training. \"\"\" self.hubert.feature_extractor.trainable = False @add_start_docstrings_to_model_forward(HUBERT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=TFCausalLMOutput, config_class=_CONFIG_FOR_DOC) def", "from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2FeedForward with Wav2Vec2->Hubert class TFHubertFeedForward(tf.keras.layers.Layer): def __init__(self, config: HubertConfig,", "padding input_shape[-2] += self.explicit_padding * 2 super().build(input_shape) self.kernel = tf.Variable(tf.transpose(self.kernel),", "self.hubert = TFHubertMainLayer(config, name=\"hubert\") self.dropout = tf.keras.layers.Dropout(config.final_dropout) self.lm_head = tf.keras.layers.Dense(config.vocab_size,", "tf.keras.initializers.Initializer = \"zeros\", gamma_initializer: tf.keras.initializers.Initializer = \"ones\", beta_regularizer: tf.keras.regularizers.Regularizer =", "a simple interface for downloading and loading pretrained models. \"\"\"", "self.head_dim**-0.5 self.is_decoder = is_decoder self.k_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name=\"k_proj\") self.q_proj", "layer should be of size {(self.num_heads)}, but is {shape_list(layer_head_mask)}\", )", "# all previous decoder key/value_states. Further calls to uni-directional self-attention", "k in [\"return_dict\", \"output_attentions\", \"output_hidden_states\", \"use_cache\"] } output.update(booleans_processing(config=config, **boolean_dict)) return", "self.conv = TFHubertWeightNormConv1D( filters=config.hidden_size, kernel_size=config.num_conv_pos_embeddings, groups=config.num_conv_pos_embedding_groups, explicit_padding=config.num_conv_pos_embeddings // 2, name=\"conv\",", "self.final_layer_norm(hidden_states) outputs = (hidden_states,) if output_attentions: outputs += (attn_weights,) return", "raise ValueError( f\"`mask_length` has to be smaller than `sequence_length`, but", "which will prevent masking padded elements mask_prob: probability for each", "not be guaranteed during the training. Args: func (`callable`): The", "TF 2.0 documentation for all matter related to general usage", "in other modes than eager. if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(attn_weights), [bsz", "src_len = shape_list(key_states)[1] attn_weights = tf.matmul(query_states, key_states, transpose_b=True) # The", "hidden_states # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2SamePadLayer with Wav2Vec2->Hubert class TFHubertSamePadLayer(tf.keras.layers.Layer): def", "rights reserved. # # Licensed under the Apache License, Version", "*optional*): Mask to nullify selected heads of the self-attention modules.", "self.layer_norm(hidden_states) hidden_states, attn_weights, _ = self.attention( hidden_states, attention_mask=attention_mask, training=training )", "transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2LayerNormConvLayer with Wav2Vec2->Hubert class TFHubertLayerNormConvLayer(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, layer_id:", "not None: if tf.reduce_max(labels) >= self.config.vocab_size: raise ValueError(f\"Label values must", "else: # `args` in this case is always the first", "self._set_number_of_groups_for_instance_norm(input_shape) self._check_size_of_dimensions(input_shape) self._create_input_spec(input_shape) self._add_gamma_weight(input_shape) self._add_beta_weight(input_shape) self.built = True super().build(input_shape) def", "the model. Returns: Two lists, one for the missing layers,", "tf.shape(inputs) reshaped_inputs, group_shape = self._reshape_into_groups(inputs, input_shape, tensor_input_shape) normalized_inputs = self._apply_normalization(reshaped_inputs,", "= self.feature_projection(hidden_states, training=inputs[\"training\"]) mask_time_indices = kwargs.get(\"mask_time_indices\", None) if inputs[\"training\"]: hidden_states", "else self.config.return_dict outputs = self.hubert( input_values=inputs[\"input_values\"], attention_mask=inputs[\"attention_mask\"], token_type_ids=inputs[\"token_type_ids\"], position_ids=inputs[\"position_ids\"], head_mask=inputs[\"head_mask\"],", "num_heads=config.num_attention_heads, dropout=config.attention_dropout, is_decoder=False, name=\"attention\", ) self.dropout = tf.keras.layers.Dropout(config.hidden_dropout) self.layer_norm =", "= _expand_mask(attention_mask) else: attention_mask = None position_embeddings = self.pos_conv_embed(hidden_states) hidden_states", "= _compute_mask_indices( (batch_size, sequence_length), mask_prob=self.config.mask_time_prob, mask_length=self.config.mask_time_length, min_masks=2, ) hidden_states =", "# from https://pytorch.org/docs/stable/generated/torch.nn.Conv1d.html return (input_length - kernel_size) // stride +", "self, hidden_states: tf.Tensor, attention_mask: Optional[tf.Tensor] = None, output_attentions: Optional[bool] =", "None: # reuse k,v, cross_attentions key_states = past_key_value[0] value_states =", "def _conv_out_length(input_length, kernel_size, stride): # 1D convolutional layer output length", "load_dataset(\"hf-internal-testing/librispeech_asr_dummy\", \"clean\", split=\"validation\") >>> ds = ds.map(map_to_array) >>> input_values =", "is not None else src_len one_cst = tf.constant(1.0) mask =", "according to [SpecAugment](https://arxiv.org/abs/1904.08779). \"\"\" batch_size, sequence_length, hidden_size = shape_list(hidden_states) #", "head is **masked**. inputs_embeds (`np.ndarray` or `tf.Tensor` of shape `({0},", "cross-attention layer # for the decoder is_cross_attention = key_value_states is", "key/value_states (third \"elif\" case) # if encoder bi-directional self-attention `past_key_value`", "IDs?](../glossary#token-type-ids) position_ids (`np.ndarray` or `tf.Tensor` of shape `({0})`, *optional*): Indices", "to be smaller than `sequence_length`, but got `mask_length`: {mask_length} and", "use to gather all the input Tensors in the first", "parameter_names: logger.warning( f\"The parameter {k} does not belongs to the", "as sf >>> processor = Wav2Vec2Processor.from_pretrained(\"facebook/hubert-base-960h\") >>> model = TFHubertForCTC.from_pretrained(\"facebook/hubert-base-960h\")", "= self.groups return broadcast_shape # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2WeightNormConv1D with Wav2Vec2->Hubert", "past_key_value: Optional[Tuple[Tuple[tf.Tensor]]] = None, attention_mask: Optional[tf.Tensor] = None, layer_head_mask: Optional[tf.Tensor]", "= tf.reduce_sum(loss) if self.config.ctc_loss_reduction == \"mean\": loss = tf.reduce_mean(loss) else:", "hidden_states = self.dropout(hidden_states, training=inputs[\"training\"]) logits = self.lm_head(hidden_states) if labels is", "weights should be of size {(bsz * self.num_heads, tgt_len, src_len)},", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "output lengths according to convolution formula output_lengths = self._get_feat_extract_output_lengths(tf.reduce_sum(inputs[\"attention_mask\"], -1))", "self.dropout(hidden_states, training=training) for i, layer_module in enumerate(self.layer): if output_hidden_states: all_hidden_states", "(bsz * self.num_heads, -1, self.head_dim) query_states = tf.reshape(self._shape(query_states, tgt_len, bsz),", "add this exception if \"args\" in output: if output[\"args\"] is", ") if inputs[\"attention_mask\"] is not None: # compute real output", "self._get_feat_extract_output_lengths(tf.reduce_sum(inputs[\"attention_mask\"], -1)) attention_mask = tf.sequence_mask( output_lengths, maxlen=shape_list(hidden_states)[1], dtype=hidden_states.dtype ) hidden_states", "groups=config.num_conv_pos_embedding_groups, explicit_padding=config.num_conv_pos_embeddings // 2, name=\"conv\", ) self.padding = TFHubertSamePadLayer(config.num_conv_pos_embeddings) self.activation", "beta def _check_if_input_shape_is_none(self, input_shape): dim = input_shape[self.axis] if dim is", "all inputs as a list, tuple or dict in the", "Indices should be in `[-100, 0, ..., config.vocab_size]` (see `input_values`", "((0, 0), (self.explicit_padding, self.explicit_padding), (0, 0))) output = super().call(padded_inputs) return", "we need to account for padding input_shape[-2] += self.explicit_padding *", "self.layer_norm = tf.keras.layers.LayerNormalization(name=\"layer_norm\", epsilon=config.layer_norm_eps) self.activation = get_tf_activation(config.feat_extract_activation) def call(self, hidden_states:", "self._shape(self.v_proj(hidden_states), -1, bsz) if self.is_decoder: # if cross_attention save Tuple(tf.Tensor,", "make sure num masked indices <= sequence_length if num_masked_spans *", "representation. This is useful if you want more control over", "under the Apache License, Version 2.0 (the \"License\"); # you", "if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(attn_weights), [bsz * self.num_heads, tgt_len, src_len], message=f\"Attention", ") -> Tuple[tf.Tensor]: attn_residual = hidden_states hidden_states, attn_weights, _ =", "group_shape[self.axis] = input_shape[self.axis] // self.groups group_shape.insert(self.axis, self.groups) group_shape = tf.stack(group_shape)", "single Tensor with `input_values` only and nothing else: `model(inputs_ids)` -", "with all the parameters of the model. Initializing with a", "= spec_aug_mask_idxs + offsets # scatter indices to mask spec_aug_mask", "return spec_aug_mask # Copied from transformers.models.bart.modeling_tf_bart._expand_mask def _expand_mask(mask: tf.Tensor, tgt_len:", "= TFHubertAttention( embed_dim=config.hidden_size, num_heads=config.num_attention_heads, dropout=config.attention_dropout, is_decoder=False, name=\"attention\", ) self.dropout =", "feature encoder so that its parameter will not be updated", "be divisible by num_heads (got `embed_dim`: {self.embed_dim}\" f\" and `num_heads`:", "{self.config.vocab_size}\") attention_mask = ( inputs[\"attention_mask\"] if inputs[\"attention_mask\"] is not None", "( inputs[\"attention_mask\"] if inputs[\"attention_mask\"] is not None else tf.ones_like(inputs[\"input_values\"], dtype=tf.float32)", "is not None) return TFBaseModelOutput( last_hidden_state=hidden_states, hidden_states=all_hidden_states, attentions=all_self_attentions, ) @keras_serializable", "int, ModelOutput, tuple, list, dict, np.ndarray) for k, v in", "its parameter will not be updated during training. \"\"\" self.hubert.feature_extractor.trainable", "spec_aug_mask = _scatter_values_on_batch_indices( tf.ones_like(spec_aug_mask_idxs), spec_aug_mask_idxs, spec_aug_mask.shape ) return spec_aug_mask #", "output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, ) outputs = self.hubert( input_values=inputs[\"input_values\"], attention_mask=inputs[\"attention_mask\"],", "self.conv = tf.keras.layers.Conv1D( filters=self.out_conv_dim, kernel_size=config.conv_kernel[layer_id], strides=config.conv_stride[layer_id], use_bias=config.conv_bias, name=\"conv\", ) self.activation", "to avoid performing attention on padding token indices. Mask values", "outputs += (attn_weights,) return outputs # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2Encoder with", "= HubertConfig base_model_prefix = \"hubert\" main_input_name = \"input_values\" @property def", "model's internal embedding lookup matrix. output_attentions (`bool`, *optional*): Whether or", "be chosen as start of the span to be masked.", "Tensor names have always the pattern `name:id` then we check", "uni-directional self-attention (decoder) save Tuple(tf.Tensor, tf.Tensor) of # all previous", "signature.pop(\"self\", None) parameter_names = list(signature.keys()) output = {} allowed_types =", "training=inputs[\"training\"], ) hidden_states = outputs[0] hidden_states = self.dropout(hidden_states, training=inputs[\"training\"]) logits", "tgt_len, src_len) ) attn_weights = tf.reshape(attn_weights, (bsz * self.num_heads, tgt_len,", "bool = False) -> tf.Tensor: hidden_states = self.layer_norm(hidden_states) hidden_states =", "(mask_length - 1))) # get random indices to mask spec_aug_mask_idxs", "offsets = tf.range(mask_length)[tf.newaxis, tf.newaxis, :] offsets = tf.tile(offsets, (batch_size, num_masked_spans,", "# scatter indices to mask spec_aug_mask = _scatter_values_on_batch_indices( tf.ones_like(spec_aug_mask_idxs), spec_aug_mask_idxs,", "value_states = self._shape(self.v_proj(hidden_states), -1, bsz) key_states = tf.concat([past_key_value[0], key_states], axis=2)", "parameter list {parameter_names} and will be ignored.\" ) continue else:", "batch axis. Do you want to \" \"use tf.layer.batch_normalization instead\"", "input_shape): dim = input_shape[self.axis] if self.groups == -1: self.groups =", "sequence_length: num_masked_spans = sequence_length // mask_length # SpecAugment mask to", "Time x Channel\"\"\" # if key_value_states are provided this layer", "2, 1, 3) ) attn_output = tf.reshape(attn_output, (bsz, tgt_len, embed_dim))", "for the feature encoder so that its parameter will not", "0: mask_feature_indices = _compute_mask_indices( (batch_size, hidden_size), mask_prob=self.config.mask_feature_prob, mask_length=self.config.mask_feature_length, ) hidden_states", "inputs[\"return_dict\"] if inputs[\"return_dict\"] else self.config.return_dict outputs = self.hubert( input_values=inputs[\"input_values\"], attention_mask=inputs[\"attention_mask\"],", "src_len) ) attn_weights = tf.reshape(attn_weights, (bsz * self.num_heads, tgt_len, src_len))", "return TFBaseModelOutput( last_hidden_state=hidden_states, hidden_states=all_hidden_states, attentions=all_self_attentions, ) @keras_serializable class TFHubertMainLayer(tf.keras.layers.Layer): config_class", "concat previous decoder key/value_states to current projected key/value_states (third \"elif\"", "False self.kernel_norm_axes = tf.constant([0, 1]) def _init_norm(self): \"\"\"Set the norm", "option is useful when using [`tf.keras.Model.fit`] method which currently requires", "from transformers.models.wav2vec2.modeling_tf_wav2vec2._scatter_values_on_batch_indices def _scatter_values_on_batch_indices(values, batch_indices, output_shape): \"\"\" Scatter function as", "either express or implied. # See the License for the", "= \"input_values\" @property def dummy_inputs(self) -> Dict[str, tf.Tensor]: pad_token =", "= hidden_states hidden_states = self.layer_norm(hidden_states) hidden_states, attn_weights, _ = self.attention(", "for v in [hidden_states, all_hidden_states, all_self_attentions] if v is not", "all inputs as keyword arguments (like PyTorch models), or -", "for more detail. This argument can be used only in", "tensor_input_shape) else: outputs = normalized_inputs return outputs def get_config(self): config", "transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2FeedForward with Wav2Vec2->Hubert class TFHubertFeedForward(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs):", "to load the model weights. \"\"\" HUBERT_INPUTS_DOCSTRING = r\"\"\" Args:", "if config.feat_extract_norm == \"group\": conv_layers = [TFHubertGroupNormConvLayer(config, layer_id=0, name=f\"conv_layers.{0}\")] +", ">= self.config.vocab_size: raise ValueError(f\"Label values must be <= vocab_size: {self.config.vocab_size}\")", "training=training) for i, layer_module in enumerate(self.layer): if output_hidden_states: all_hidden_states =", "position_embeddings hidden_states = self.layer_norm(hidden_states) hidden_states = self.dropout(hidden_states, training=training) for i,", "allowed_types) or v is None: output[k] = v else: raise", "self.output_dense(hidden_states) hidden_states = self.output_dropout(hidden_states, training=training) return hidden_states # Copied from", "kernel = tf.nn.l2_normalize(self.weight_v, axis=self.kernel_norm_axes) * tf.transpose(self.weight_g) self.kernel = tf.transpose(kernel) def", "one_cst = tf.constant(1.0) mask = tf.cast(mask, dtype=one_cst.dtype) expanded_mask = tf.tile(mask[:,", "tf.reshape(layer_head_mask, (1, -1, 1, 1)) * tf.reshape( attn_weights, (bsz, self.num_heads,", "mask spec_aug_mask = _scatter_values_on_batch_indices( tf.ones_like(spec_aug_mask_idxs), spec_aug_mask_idxs, spec_aug_mask.shape ) return spec_aug_mask", "None, token_type_ids: Optional[tf.Tensor] = None, position_ids: Optional[tf.Tensor] = None, head_mask:", "isinstance(input, allowed_types) or input is None: output[parameter_names[i]] = input else:", "Tuple(tf.Tensor, tf.Tensor) of # all previous decoder key/value_states. Further calls", "the weight vector.\"\"\" kernel_norm = tf.sqrt(tf.reduce_sum(tf.square(self.weight_v), axis=self.kernel_norm_axes)) self.weight_g.assign(kernel_norm[:, tf.newaxis, tf.newaxis])", "== -1 else self.axis - 1 else: axis = -1", "hidden_states = layer_outputs[0] if output_attentions: all_self_attentions = all_self_attentions + (layer_outputs[1],)", "= self.hubert._get_feat_extract_output_lengths(tf.reduce_sum(attention_mask, axis=-1)) # assuming that padded tokens are filled", "// self.groups) == 1 if not is_instance_norm: outputs = tf.reshape(normalized_inputs,", "head_mask: Optional[tf.Tensor] = None, inputs_embeds: Optional[tf.Tensor] = None, output_attentions: Optional[bool]", "TFHubertNoLayerNormConvLayer(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, layer_id: int = 0, **kwargs:", "evaluation). \"\"\" @add_start_docstrings( \"The bare TFHubert Model transformer outputing raw", "position_ids=position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, kwargs_call=kwargs, ) hidden_states", "group_shape = tf.stack(group_shape) reshaped_inputs = tf.reshape(inputs, group_shape) return reshaped_inputs, group_shape", "selected heads of the self-attention modules. Mask values selected in", "inherits from [`TFPreTrainedModel`]. Check the superclass documentation for the generic", "tf.keras.backend.int_shape(reshaped_inputs) group_reduction_axes = list(range(1, len(group_shape))) is_instance_norm = (input_shape[self.axis] // self.groups)", "False, ) -> Tuple[tf.Tensor]: attn_residual = hidden_states hidden_states, attn_weights, _", "mask should be of size {(bsz, 1, tgt_len, src_len)}, but", "\"\"\" def __init__( self, groups: int = 32, axis: int", "split=\"validation\") >>> ds = ds.map(map_to_array) >>> input_values = processor(ds[\"speech\"][0], return_tensors=\"tf\").input_values", "# get key, value proj if is_cross_attention and past_key_value is", "vectors than the model's internal embedding lookup matrix. output_attentions (`bool`,", "when not being attended to labels_mask = tf.cast(labels >= 0,", "-> Dict[str, tf.Tensor]: pad_token = 0.0 input_values = tf.convert_to_tensor(np.random.rand(1, 16000),", "and (dropout_probability < self.config.layerdrop): # skip the layer continue layer_outputs", "allowed_types) or v is None: output[k] = v elif k", "is **masked**. inputs_embeds (`np.ndarray` or `tf.Tensor` of shape `({0}, hidden_size)`,", "[bsz, 1, tgt_len, src_len], message=f\"Attention mask should be of size", "0 indicates the head is **masked**. inputs_embeds (`np.ndarray` or `tf.Tensor`", "return_tensors=\"tf\").input_values # Batch size 1 >>> hidden_states = model(input_values).last_hidden_state ```\"\"\"", "BatchEncoding)): if \"inputs\" in input_values: warnings.warn( \"The `inputs` argument is", "PyTorch models), or - having all inputs as a list,", "@add_start_docstrings_to_model_forward(HUBERT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=TFCausalLMOutput, config_class=_CONFIG_FOR_DOC) def call( self, input_values: tf.Tensor, attention_mask: Optional[tf.Tensor]", "XLA then they # have to be disabled in other", "layer if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) if not", "if not is_instance_norm: outputs = tf.reshape(normalized_inputs, tensor_input_shape) else: outputs =", "in a future version, use `past_key_values` instead.\", FutureWarning, ) output[\"past_key_values\"]", "`[bsz, 1, tgt_seq_len, src_seq_len]`. \"\"\" src_len = shape_list(mask)[1] tgt_len =", "\"axis\": self.axis, \"epsilon\": self.epsilon, \"center\": self.center, \"scale\": self.scale, \"beta_initializer\": tf.keras.initializers.serialize(self.beta_initializer),", "self.dropout(hidden_states, training=training) hidden_states = attn_residual + hidden_states hidden_states = self.layer_norm(hidden_states)", "use this file except in compliance with the License. #", ">>> import tensorflow as tf >>> from transformers import Wav2Vec2Processor,", "center: bool = True, scale: bool = True, beta_initializer: tf.keras.initializers.Initializer", "not None else tf.ones_like(inputs[\"input_values\"], dtype=tf.float32) ) input_lengths = self.hubert._get_feat_extract_output_lengths(tf.reduce_sum(attention_mask, axis=-1))", "hidden states of all layers. See `hidden_states` under returned tensors", "indices set to `-100` are ignored (masked), the loss is", "be bigger than 0.\") if mask_length > sequence_length: raise ValueError(", "head on top for Connectionist Temporal Classification (CTC).\"\"\", HUBERT_START_DOCSTRING, )", "training: bool = False, ) -> Union[TFBaseModelOutput, Tuple[tf.Tensor]]: \"\"\" Returns:", "future version, use `input_values` instead.\", FutureWarning, ) output[\"input_values\"] = input_values.pop(\"inputs\")", "hidden_states = self.output_dense(hidden_states) hidden_states = self.output_dropout(hidden_states, training=training) return hidden_states #", ") def serving_output(self, output: TFCausalLMOutput) -> TFCausalLMOutput: hs = tf.convert_to_tensor(output.hidden_states)", "we check only the # `name` part tensor_name = input.name.split(\":\")[0]", "-> tf.Tensor: hidden_states = self.conv(hidden_states) hidden_states = self.layer_norm(hidden_states) hidden_states =", "None, return_dict: Optional[bool] = None, training: Optional[bool] = False, )", "Indices can be obtained using [`BertTokenizer`]. See [`PreTrainedTokenizer.__call__`] and [`PreTrainedTokenizer.encode`]", "x Time x Channel\"\"\" # if key_value_states are provided this", "TFHubertModel >>> from datasets import load_dataset >>> import soundfile as", "# uniform distribution to sample from, make sure that offset", "raise ValueError(f\"Label values must be <= vocab_size: {self.config.vocab_size}\") attention_mask =", "config.vocab_size]` Returns: Example: ```python >>> import tensorflow as tf >>>", "the docstring: `model({\"input_values\": input_values, \"token_type_ids\": token_type_ids})` </Tip> Args: config ([`HubertConfig`]):", "-1, bsz) value_states = self._shape(self.v_proj(hidden_states), -1, bsz) key_states = tf.concat([past_key_value[0],", "gamma = tf.reshape(self.gamma, broadcast_shape) if self.center: beta = tf.reshape(self.beta, broadcast_shape)", "with indices in format (batch_dim, indixes) \"\"\" indices_shape = shape_list(batch_indices)", "dim is None: raise ValueError( \"Axis \" + str(self.axis) +", "len(input_shape) is_instance_norm = (input_shape[self.axis] // self.groups) == 1 if not", "the parameter list {parameter_names} and will be ignored.\" ) continue", "if output_attentions: all_self_attentions = all_self_attentions + (layer_outputs[1],) hidden_states = self.layer_norm(hidden_states)", "self._shape(self.v_proj(key_value_states), -1, bsz) elif past_key_value is not None: # reuse", "LayerDrop (see https://arxiv.org/abs/1909.11556 for description) dropout_probability = np.random.uniform(0, 1) if", "will be smaller (unless no_overlap is True) mask_length: size of", "key_value_states: Optional[tf.Tensor] = None, past_key_value: Optional[Tuple[Tuple[tf.Tensor]]] = None, attention_mask: Optional[tf.Tensor]", "mask_length < 1: raise ValueError(\"`mask_length` has to be bigger than", "(batch_size, hidden_size), mask_prob=self.config.mask_feature_prob, mask_length=self.config.mask_feature_length, ) hidden_states = tf.where(mask_feature_indices[:, tf.newaxis, :],", "None: # The tf.debugging asserts are not compliant with XLA", "num_heads: int, dropout: float = 0.0, is_decoder: bool = False,", "src_len)}, but is {shape_list(attention_mask)}\", ) attention_mask = tf.cast(attention_mask, dtype=attn_weights.dtype) attn_weights", "model inherits from [`TFPreTrainedModel`]. Check the superclass documentation for the", "in kwargs.items(): if isinstance(v, allowed_types) or v is None: output[k]", "values selected in `[0, 1]`: - 1 indicates the head", "= tf.expand_dims(input_values, -1) for conv_layer in self.conv_layers: hidden_states = conv_layer(hidden_states)", "be used instead. return_dict (`bool`, *optional*): Whether or not to", "None) if inputs[\"training\"]: hidden_states = self._mask_hidden_states(hidden_states, mask_time_indices=mask_time_indices) encoder_outputs = self.encoder(", "+ offsets # scatter indices to mask spec_aug_mask = _scatter_values_on_batch_indices(", "`tf.Tensor` of shape `({0})`, *optional*): Indices of positions of each", "not None: # reuse k, v, self_attention key_states = self._shape(self.k_proj(hidden_states),", "stride): # 1D convolutional layer output length formula taken #", "def input_values_processing(func, config, input_values, **kwargs): \"\"\" Process the input of", "along feature axis according to [SpecAugment](https://arxiv.org/abs/1904.08779). \"\"\" batch_size, sequence_length, hidden_size", "reshaped_inputs, group_shape else: return inputs, group_shape def _apply_normalization(self, reshaped_inputs, input_shape):", "attention_mask: Optional[tf.Tensor] = None, output_attentions: Optional[bool] = False, output_hidden_states: Optional[bool]", "min_masks=2, ) hidden_states = tf.where( tf.cast(mask_time_indices[:, :, tf.newaxis], tf.bool), self.masked_spec_embed[tf.newaxis,", "= _scatter_values_on_batch_indices( tf.ones_like(spec_aug_mask_idxs), spec_aug_mask_idxs, spec_aug_mask.shape ) return spec_aug_mask # Copied", "filters, kernel_size, groups, explicit_padding, **kwargs): super().__init__( filters=filters, kernel_size=kernel_size, groups=groups, padding=\"valid\",", "tf.keras.layers.Dropout(config.hidden_dropout) self.layer = [ TFHubertEncoderLayerStableLayerNorm(config, name=f\"layers.{i}\") for i in range(config.num_hidden_layers)", "layer continue layer_outputs = layer_module( hidden_states=hidden_states, attention_mask=attention_mask, output_attentions=output_attentions, training=training, )", "each TensorFlow model including the booleans. In case of a", "one or several input Tensors associated to the input names", "broadcast_shape[self.axis] = self.groups return broadcast_shape # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2WeightNormConv1D with", "logits=logits, labels=labels, logit_length=input_lengths, label_length=target_lengths, blank_index=self.config.pad_token_id, logits_time_major=False, ) if self.config.ctc_loss_reduction ==", "of size {(bsz, self.num_heads, tgt_len, self.head_dim)}, but is {shape_list(attn_output)}\", )", "Add last layer if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,)", "= shape_list(hidden_states) # get query proj query_states = self.q_proj(hidden_states) *", "tf.debugging.assert_equal( shape_list(layer_head_mask), [self.num_heads], message=f\"Head mask for a single layer should", "hidden_states hidden_states = hidden_states + self.feed_forward(self.final_layer_norm(hidden_states)) outputs = (hidden_states,) if", "you can choose to directly pass an embedded representation. This", "input_values.pop(\"inputs\") if \"decoder_cached_states\" in input_values: warnings.warn( \"The `decoder_cached_states` argument is", "self.pos_conv_embed(hidden_states) hidden_states = hidden_states + position_embeddings hidden_states = self.layer_norm(hidden_states) hidden_states", "self.lm_head(hidden_states) if labels is not None: if tf.reduce_max(labels) >= self.config.vocab_size:", "hidden_states = tf.where( tf.cast(mask_time_indices[:, :, tf.newaxis], tf.bool), self.masked_spec_embed[tf.newaxis, tf.newaxis, :],", "else tf.ones_like(inputs[\"input_values\"], dtype=tf.float32) ) input_lengths = self.hubert._get_feat_extract_output_lengths(tf.reduce_sum(attention_mask, axis=-1)) # assuming", "(`np.ndarray`, `tf.Tensor`, `List[tf.Tensor]` ``Dict[str, tf.Tensor]` or `Dict[str, np.ndarray]` and each", "__init__( self, groups: int = 32, axis: int = -1,", ">>> logits = model(input_values).logits >>> predicted_ids = tf.argmax(logits, axis=-1) >>>", "import ( ModelOutput, add_start_docstrings, add_start_docstrings_to_model_forward, logging, replace_return_docstrings, ) from .configuration_hubert", "inputs, group_shape def _apply_normalization(self, reshaped_inputs, input_shape): group_shape = tf.keras.backend.int_shape(reshaped_inputs) group_reduction_axes", "def dummy_inputs(self) -> Dict[str, tf.Tensor]: pad_token = 0.0 input_values =", "input_values: tf.Tensor, attention_mask: Optional[tf.Tensor] = None, token_type_ids: Optional[tf.Tensor] = None,", "and `num_heads`: {num_heads}).\" ) self.scaling = self.head_dim**-0.5 self.is_decoder = is_decoder", "== 1 if not is_instance_norm: outputs = tf.reshape(normalized_inputs, tensor_input_shape) else:", "self.groups) else: broadcast_shape[self.axis] = self.groups return broadcast_shape # Copied from", "loss = tf.nn.ctc_loss( logits=logits, labels=labels, logit_length=input_lengths, label_length=target_lengths, blank_index=self.config.pad_token_id, logits_time_major=False, )", "have a defined dimension \" \"but the layer received an", "= attn_residual + hidden_states hidden_states = self.layer_norm(hidden_states) hidden_states = hidden_states", "a single Tensor with `input_values` only and nothing else: `model(inputs_ids)`", "None: raise ValueError( \"Axis \" + str(self.axis) + \" of", "\"\"\" TensorFlow Hubert model.\"\"\" import inspect import warnings from typing", "= None, output_attentions: Optional[bool] = None, labels: Optional[tf.Tensor] = None,", "**kwargs): \"\"\" Process the input of each TensorFlow model including", "= input else: raise ValueError( f\"Data of type {type(input)} is", "1, 1)) * tf.reshape( attn_weights, (bsz, self.num_heads, tgt_len, src_len) )", "output.update(booleans_processing(config=config, **boolean_dict)) return output # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2._sample_without_replacement def _sample_without_replacement(distribution,", "not is_instance_norm: outputs = tf.reshape(normalized_inputs, tensor_input_shape) else: outputs = normalized_inputs", "def __init__(self, config: HubertConfig, *inputs, **kwargs): super().__init__(config, *inputs, **kwargs) self.config", "So to respect the proper output we have to add", "isinstance(input_values, (dict, BatchEncoding)): if \"inputs\" in input_values: warnings.warn( \"The `inputs`", "training=training) attn_output = tf.matmul(attn_probs, value_states) # The tf.debugging asserts are", "([`HubertConfig`]): Model configuration class with all the parameters of the", "a plain tuple. This argument can be used in eager", "self._shape(self.v_proj(hidden_states), -1, bsz) key_states = tf.concat([past_key_value[0], key_states], axis=2) value_states =", "= \"ones\", beta_regularizer: tf.keras.regularizers.Regularizer = None, gamma_regularizer: tf.keras.regularizers.Regularizer = None,", "cross-attention # key/value_states (first \"if\" case) # if uni-directional self-attention", "if not getattr(self.config, \"apply_spec_augment\", True): return hidden_states if mask_time_indices is", "training=training) return hidden_states # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2EncoderLayer with Wav2Vec2->Hubert class", "import TFPreTrainedModel, booleans_processing, get_initializer, keras_serializable from ...tf_utils import shape_list from", "downloading or saving, resizing the input embeddings, pruning heads etc.)", "Whether or not to return the hidden states of all", "TF 2.0 models accepts two formats as inputs: - having", "+= (attn_weights,) return outputs # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2EncoderLayerStableLayerNorm with Wav2Vec2->Hubert", "layer is used as a cross-attention layer # for the", "in this case is always the first parameter, then `input_values`", "[\"return_dict\", \"output_attentions\", \"output_hidden_states\", \"use_cache\"] } output.update(booleans_processing(config=config, **boolean_dict)) return output #", "= input_shape.as_list() # Conv1D output shapes are checked at build", "None, gamma_regularizer: tf.keras.regularizers.Regularizer = None, beta_constraint: tf.keras.constraints.Constraint = None, gamma_constraint:", "need to account for padding input_shape[-2] += self.explicit_padding * 2", "hidden_states: tf.Tensor, mask_time_indices: Optional[tf.Tensor] = None): \"\"\" Masks extracted features", "Classification (CTC).\"\"\", HUBERT_START_DOCSTRING, ) class TFHubertForCTC(TFHubertPreTrainedModel): def __init__(self, config: HubertConfig,", "out the [`~PreTrainedModel.from_pretrained`] method to load the model weights. \"\"\"", "class TFHubertEncoder(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs) self.config =", "SavedModel TF calls the method with LayerCall.__call__(args, **kwargs) # So", "message=f\"`attn_output` should be of size {(bsz, self.num_heads, tgt_len, self.head_dim)}, but", "`freeze_feature_extractor` is deprecated and will be removed in Transformers v5.\"", "backpropagation operations that are NOT supported on CPU. If you", "(hidden_states,) if output_attentions: outputs += (attn_weights,) return outputs # Copied", "... batch[\"speech\"] = speech ... return batch >>> ds =", "+ self.feed_forward(hidden_states) hidden_states = self.final_layer_norm(hidden_states) outputs = (hidden_states,) if output_attentions:", "since TF 2.7, so we need to account for padding", "in `[0, 1]`: - 1 for tokens that are **not", "be <= vocab_size: {self.config.vocab_size}\") attention_mask = ( inputs[\"attention_mask\"] if inputs[\"attention_mask\"]", "`model({\"input_values\": input_values, \"token_type_ids\": token_type_ids})` </Tip> Args: config ([`HubertConfig`]): Model configuration", "bias_initializer=\"zeros\", name=\"projection\", ) self.dropout = tf.keras.layers.Dropout(rate=config.feat_proj_dropout) def call(self, hidden_states: tf.Tensor,", "eager. if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(attn_output), [bsz * self.num_heads, tgt_len, self.head_dim],", "int = 0, **kwargs: Any) -> None: super().__init__(**kwargs) self.in_conv_dim =", "self.initialized = True self._normalize_kernel() padded_inputs = tf.pad(inputs, ((0, 0), (self.explicit_padding,", "team. All rights reserved. # # Licensed under the Apache", "selected in `[0, 1]`: - 1 indicates the head is", "= (input_shape[self.axis] // self.groups) == 1 if not is_instance_norm: axis", "logits=logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions, ) def serving_output(self, output: TFCausalLMOutput) -> TFCausalLMOutput:", ") self.freeze_feature_encoder() def freeze_feature_encoder(self): \"\"\" Calling this function will disable", "message=f\"Attention mask should be of size {(bsz, 1, tgt_len, src_len)},", "the order of the tensors will not be guaranteed during", "self-attention `past_key_value` is always `None` past_key_value = (key_states, value_states) proj_shape", "name=\"layer_norm\") def call(self, hidden_states: tf.Tensor) -> tf.Tensor: hidden_states = self.conv(hidden_states)", "input_shape): group_shape = tf.keras.backend.int_shape(reshaped_inputs) group_reduction_axes = list(range(1, len(group_shape))) is_instance_norm =", "and evaluation). \"\"\" @add_start_docstrings( \"The bare TFHubert Model transformer outputing", "else: return inputs, group_shape def _apply_normalization(self, reshaped_inputs, input_shape): group_shape =", "self.out_conv_dim = config.conv_dim[layer_id] self.conv = tf.keras.layers.Conv1D( filters=self.out_conv_dim, kernel_size=config.conv_kernel[layer_id], strides=config.conv_stride[layer_id], use_bias=config.conv_bias,", "\"Please use the equivalent `freeze_feature_encoder` method instead.\", FutureWarning, ) self.freeze_feature_encoder()", "output length of the convolutional layers \"\"\" def _conv_out_length(input_length, kernel_size,", "will not be updated during training. \"\"\" warnings.warn( \"The method", "= tf.keras.layers.InputSpec(ndim=len(input_shape), axes={self.axis: dim}) def _add_gamma_weight(self, input_shape): dim = input_shape[self.axis]", "if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) # add LayerDrop", "np.random.uniform(0, 1) if training and (dropout_probability < self.config.layerdrop): # skip", "self, embed_dim: int, num_heads: int, dropout: float = 0.0, is_decoder:", "be a \" \"multiple of the number of channels (\"", "gumbel-max trick will do for now - see https://github.com/tensorflow/tensorflow/issues/9260 for", "if tensor_name in parameter_names: output[tensor_name] = input else: output[parameter_names[i]] =", "message=f\"Attention weights should be of size {(bsz * self.num_heads, tgt_len,", "inputs[\"output_hidden_states\"] = ( inputs[\"output_hidden_states\"] if inputs[\"output_hidden_states\"] else self.config.output_hidden_states ) inputs[\"output_attentions\"]", "(some modules like dropout modules have different behaviors between training", "= input_shape[self.axis] if dim < self.groups: raise ValueError( \"Number of", "exception if \"args\" in output: if output[\"args\"] is not None", "= (dim,) if self.scale: self.gamma = self.add_weight( shape=shape, name=\"gamma\", initializer=self.gamma_initializer,", "formats as inputs: - having all inputs as keyword arguments", "dtype=tf.float32) ) input_lengths = self.hubert._get_feat_extract_output_lengths(tf.reduce_sum(attention_mask, axis=-1)) # assuming that padded", "function of the TensorFlow model. config ([`PretrainedConfig`]): The config of", "HubertConfig, layer_id: int = 0, **kwargs: Any) -> None: super().__init__(**kwargs)", "0))) output = super().call(padded_inputs) return output # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2NoLayerNormConvLayer", "of groups (\" + str(self.groups) + \") cannot be \"", "a [`~file_utils.ModelOutput`] instead of a plain tuple. This argument can", "input_shape[self.axis] self.input_spec = tf.keras.layers.InputSpec(ndim=len(input_shape), axes={self.axis: dim}) def _add_gamma_weight(self, input_shape): dim", "the License. \"\"\" TensorFlow Hubert model.\"\"\" import inspect import warnings", "dict, np.ndarray) for k, v in kwargs.items(): if isinstance(v, allowed_types)", "(tuple, list)): for i, input in enumerate(input_values): # EagerTensors don't", "tuple. This argument can be used in eager mode, in", "func (`callable`): The callable function of the TensorFlow model. config", "self.beta_initializer = tf.keras.initializers.get(beta_initializer) self.gamma_initializer = tf.keras.initializers.get(gamma_initializer) self.beta_regularizer = tf.keras.regularizers.get(beta_regularizer) self.gamma_regularizer", "inputs[\"output_attentions\"] if inputs[\"output_attentions\"] else self.config.output_attentions ) inputs[\"return_dict\"] = inputs[\"return_dict\"] if", "from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2EncoderLayer with Wav2Vec2->Hubert class TFHubertEncoderLayer(tf.keras.layers.Layer): def __init__(self, config: HubertConfig,", "disabled in other modes than eager. if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(attn_output),", "self-attention # can concat previous decoder key/value_states to current projected", "tf.Tensor: \"\"\" Computes random mask spans for a given shape", "axis=-1) loss = tf.nn.ctc_loss( logits=logits, labels=labels, logit_length=input_lengths, label_length=target_lengths, blank_index=self.config.pad_token_id, logits_time_major=False,", "indices into associated vectors than the model's internal embedding lookup", "zip(self.config.conv_kernel, self.config.conv_stride): input_lengths = _conv_out_length(input_lengths, kernel_size, stride) return input_lengths def", "continue layer_outputs = layer_module( hidden_states=hidden_states, attention_mask=attention_mask, output_attentions=output_attentions, training=training, ) hidden_states", "that are **masked**. [What are attention masks?](../glossary#attention-mask) token_type_ids (`np.ndarray` or", "due to overlaps, the actual number will be smaller (unless", "as a cross-attention layer # for the decoder is_cross_attention =", "tf.cast(inputs[\"input_values\"], tf.float32), training=inputs[\"training\"] ) if inputs[\"attention_mask\"] is not None: #", "tgt_len, self.head_dim)), (0, 2, 1, 3) ) attn_output = tf.reshape(attn_output,", "broadcast_shape[self.axis] = input_shape[self.axis] // self.groups broadcast_shape.insert(self.axis, self.groups) else: broadcast_shape[self.axis] =", "def call(self, hidden_states): if self.num_pad_remove > 0: hidden_states = hidden_states[:,", "hidden_states = self.feature_projection(hidden_states, training=inputs[\"training\"]) mask_time_indices = kwargs.get(\"mask_time_indices\", None) if inputs[\"training\"]:", "are < sequence_length uniform_dist = tf.ones((batch_size, sequence_length - (mask_length -", "= tf.keras.layers.Conv1D( filters=self.out_conv_dim, kernel_size=config.conv_kernel[layer_id], strides=config.conv_stride[layer_id], use_bias=config.conv_bias, name=\"conv\", ) self.activation =", "model. **kwargs: The inputs of the model. Returns: Two lists,", "CPU. If you wish \" \"to train/fine-tine this model, you", "Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2._compute_mask_indices def _compute_mask_indices( shape: Tuple[int, int], mask_prob: float,", "[What are position IDs?](../glossary#position-ids) head_mask (`np.ndarray` or `tf.Tensor` of shape", "(bsz, self.num_heads, tgt_len, src_len) ) attn_weights = tf.reshape(attn_weights, (bsz *", "dtype='float32', name=\"input_values\")` otherwise the order of the tensors will not", ") hidden_states = encoder_outputs[0] if not inputs[\"return_dict\"]: return (hidden_states,) +", "{k} does not belongs to the parameter list {parameter_names} and", "= tf.reshape(offsets, (batch_size, num_masked_spans * mask_length)) spec_aug_mask_idxs = spec_aug_mask_idxs +", "src_len], message=f\"Attention mask should be of size {(bsz, 1, tgt_len,", "'layer']\" ) self.conv_layers = conv_layers def call(self, input_values): hidden_states =", "masked**, - 0 for tokens that are **masked**. [What are", "self.add_weight( shape=shape, name=\"beta\", initializer=self.beta_initializer, regularizer=self.beta_regularizer, constraint=self.beta_constraint, ) else: self.beta =", "but is {shape_list(attn_output)}\", ) attn_output = tf.transpose( tf.reshape(attn_output, (bsz, self.num_heads,", "beta = tf.reshape(self.beta, broadcast_shape) return gamma, beta def _check_if_input_shape_is_none(self, input_shape):", "= tf.reshape(normalized_inputs, tensor_input_shape) else: outputs = normalized_inputs return outputs def", "= ( inputs[\"output_hidden_states\"] if inputs[\"output_hidden_states\"] else self.config.output_hidden_states ) inputs[\"output_attentions\"] =", "name=\"conv\", ) self.activation = get_tf_activation(config.feat_extract_activation) def call(self, hidden_states: tf.Tensor) ->", "self.conv(hidden_states) hidden_states = self.padding(hidden_states) hidden_states = self.activation(hidden_states) return hidden_states #", "None, inputs_embeds: Optional[tf.Tensor] = None, output_attentions: Optional[tf.Tensor] = None, output_hidden_states:", "attention_mask=attention_mask, training=training ) hidden_states = self.dropout(hidden_states, training=training) hidden_states = attn_residual", "None: hidden_states = hidden_states * tf.expand_dims(attention_mask, -1) attention_mask = _expand_mask(attention_mask)", "transformer outputing raw hidden-states without any specific head on top.\",", "TFBart->TFHubert class TFHubertAttention(tf.keras.layers.Layer): \"\"\"Multi-headed attention from \"Attention Is All You", "config self.feature_extractor = TFHubertFeatureEncoder(config, name=\"feature_extractor\") self.feature_projection = TFHubertFeatureProjection(config, name=\"feature_projection\") if", "attention_mask attn_weights = tf.reshape(attn_weights, (bsz * self.num_heads, tgt_len, src_len)) attn_weights", "= np.random.uniform(0, 1) if training and (dropout_probability < self.config.layerdrop): #", "for computing the masked language modeling loss. Indices should be", "= model(input_values).logits >>> predicted_ids = tf.argmax(logits, axis=-1) >>> transcription =", "+ output) if loss is not None else output return", "All You Need\"\"\" def __init__( self, embed_dim: int, num_heads: int,", "to \" \"use tf.layer.batch_normalization instead\" ) def _create_input_spec(self, input_shape): dim", "of the convolutional layers \"\"\" def _conv_out_length(input_length, kernel_size, stride): #", "can be used only in eager mode, in graph mode", "Scatter function as in PyTorch with indices in format (batch_dim,", "only and nothing else: `model(inputs_ids)` - a list of varying", "has been depreciated \" \"and will be removed in Transformers", "- 1))) # get random indices to mask spec_aug_mask_idxs =", "config_class = HubertConfig def __init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs) self.config", "== tf.Tensor: # Tensor names have always the pattern `name:id`", ">>> processor = Wav2Vec2Processor.from_pretrained(\"facebook/hubert-base-960h\") >>> model = TFHubertForCTC.from_pretrained(\"facebook/hubert-base-960h\") >>> def", "tf.ones((batch_size, sequence_length - (mask_length - 1))) # get random indices", "the config will be used instead. output_hidden_states (`bool`, *optional*): Whether", "guaranteed during the training. Args: func (`callable`): The callable function", "): super().__init__(**kwargs) self.supports_masking = True self.groups = groups self.axis =", "supported on CPU. If you wish \" \"to train/fine-tine this", "epsilon: float = 1e-3, center: bool = True, scale: bool", "config.max_position_embeddings - 1]`. [What are position IDs?](../glossary#position-ids) head_mask (`np.ndarray` or", "self.beta_constraint = tf.keras.constraints.get(beta_constraint) self.gamma_constraint = tf.keras.constraints.get(gamma_constraint) self._check_axis() def build(self, input_shape):", "elif isinstance(input_values, (dict, BatchEncoding)): if \"inputs\" in input_values: warnings.warn( \"The", "__init__(self, config: HubertConfig, *inputs, **kwargs): super().__init__(config, *inputs, **kwargs) self.hubert =", "output = {} allowed_types = (tf.Tensor, bool, int, ModelOutput, tuple,", "def compute_output_shape(self, input_shape): return input_shape def _reshape_into_groups(self, inputs, input_shape, tensor_input_shape):", "None, **kwargs, ): super().__init__(**kwargs) self.supports_masking = True self.groups = groups", "only the configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load", "THE UNIVERSE SIR I EXIST\" >>> # wrap processor as", "kernel_size, groups, explicit_padding, **kwargs): super().__init__( filters=filters, kernel_size=kernel_size, groups=groups, padding=\"valid\", use_bias=True,", "1 if not is_instance_norm: broadcast_shape[self.axis] = input_shape[self.axis] // self.groups broadcast_shape.insert(self.axis,", "spec_aug_mask_idxs, spec_aug_mask.shape ) return spec_aug_mask # Copied from transformers.models.bart.modeling_tf_bart._expand_mask def", "and [`PreTrainedTokenizer.encode`] for details. [What are input IDs?](../glossary#input-ids) attention_mask (`np.ndarray`", "call(self, hidden_states: tf.Tensor) -> tf.Tensor: hidden_states = self.conv(hidden_states) hidden_states =", "instead\" ) def _create_input_spec(self, input_shape): dim = input_shape[self.axis] self.input_spec =", "# You may obtain a copy of the License at", "**kwargs) self.hubert = TFHubertMainLayer(config, name=\"hubert\") self.dropout = tf.keras.layers.Dropout(config.final_dropout) self.lm_head =", "== 1 if not is_instance_norm: broadcast_shape[self.axis] = input_shape[self.axis] // self.groups", "hidden_states = self.conv(hidden_states) hidden_states = self.layer_norm(hidden_states) hidden_states = self.activation(hidden_states) return", "offsets = tf.tile(offsets, (batch_size, num_masked_spans, 1)) offsets = tf.reshape(offsets, (batch_size,", "output_attentions: outputs += (attn_weights,) return outputs # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2Encoder", "> sequence_length: num_masked_spans = sequence_length // mask_length # SpecAugment mask", "attn_output = self.out_proj(attn_output) attn_weights: tf.Tensor = tf.reshape(attn_weights, (bsz, self.num_heads, tgt_len,", "in the first argument of the model call function: `model(inputs)`.", "the decoder is_cross_attention = key_value_states is not None bsz, tgt_len,", "(tf.Tensor, bool, int, ModelOutput, tuple, list, dict, np.ndarray) for k,", "v is None: output[k] = v else: raise ValueError(f\"Data of", "self.epsilon, \"center\": self.center, \"scale\": self.scale, \"beta_initializer\": tf.keras.initializers.serialize(self.beta_initializer), \"gamma_initializer\": tf.keras.initializers.serialize(self.gamma_initializer), \"beta_regularizer\":", "(self.explicit_padding, self.explicit_padding), (0, 0))) output = super().call(padded_inputs) return output #", "None: super().__init__(**kwargs) if config.feat_extract_norm == \"group\": conv_layers = [TFHubertGroupNormConvLayer(config, layer_id=0,", "not implemented. The gumbel-max trick will do for now -", "sequence_length)`, *optional*): Labels for computing the masked language modeling loss.", "elif self.config.mask_time_prob > 0: # generate indices & apply SpecAugment", "# Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2SamePadLayer with Wav2Vec2->Hubert class TFHubertSamePadLayer(tf.keras.layers.Layer): def __init__(self,", "must be a \" \"multiple of the number of channels", "in other modes than eager. if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(attn_output), [bsz", "if mask_time_indices is not None: # apply SpecAugment along time", "use the .name property so we check for a real", "Optional[bool] = False, output_hidden_states: Optional[bool] = False, return_dict: Optional[bool] =", "tf.reduce_mean(loss) else: loss = None if not inputs[\"return_dict\"]: output =", "input_values=input_values, attention_mask=attention_mask, token_type_ids=token_type_ids, position_ids=position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training,", "-1])], 0)) # scatter values to pair indices return tf.scatter_nd(pair_indices,", "tf.matmul(attn_probs, value_states) # The tf.debugging asserts are not compliant with", ") -> Union[TFCausalLMOutput, Tuple[tf.Tensor]]: r\"\"\" labels (`tf.Tensor` or `np.ndarray` of", "int = -1, epsilon: float = 1e-3, center: bool =", "return hidden_states class TFHubertFeatureExtractor(TFHubertFeatureEncoder): def __init__(self, config, **kwargs): super().__init__(config, **kwargs)", "super().build(input_shape) self.kernel = tf.Variable(tf.transpose(self.kernel), name=\"weight_v\", trainable=True) self.weight_v = self.kernel self.weight_g", "outputs[0] hidden_states = self.dropout(hidden_states, training=inputs[\"training\"]) logits = self.lm_head(hidden_states) if labels", "None) return TFBaseModelOutput( last_hidden_state=hidden_states, hidden_states=all_hidden_states, attentions=all_self_attentions, ) # Copied from", "to account for padding input_shape[-2] += self.explicit_padding * 2 super().build(input_shape)", "input_values = processor(ds[\"speech\"][0], return_tensors=\"tf\").input_values # Batch size 1 >>> logits", "to directly pass an embedded representation. This is useful if", "this layer is used as a cross-attention layer # for", "and past_key_value is not None: # reuse k,v, cross_attentions key_states", "else: output[parameter_names[i]] = input elif isinstance(input, allowed_types) or input is", "broadcast_shape) if self.center: beta = tf.reshape(self.beta, broadcast_shape) return gamma, beta", "TO THE UNIVERSE SIR I EXIST\" >>> # wrap processor", "model(input_values).logits >>> predicted_ids = tf.argmax(logits, axis=-1) >>> transcription = processor.decode(predicted_ids[0])", "= self.out_proj(attn_output) attn_weights: tf.Tensor = tf.reshape(attn_weights, (bsz, self.num_heads, tgt_len, src_len))", "= tgt_len if tgt_len is not None else src_len one_cst", "raise ValueError( f\"`config.feat_extract_norm` is {config.feat_extract_norm}, but has to be one", ") elif isinstance(input_values, (dict, BatchEncoding)): if \"inputs\" in input_values: warnings.warn(", "= explicit_padding self.filter_axis = 2 self.initialized = False self.kernel_norm_axes =", "2.0 (the \"License\"); # you may not use this file", "num_heads self.dropout = tf.keras.layers.Dropout(dropout) self.head_dim = embed_dim // num_heads if", "self.kernel = tf.Variable(tf.transpose(self.kernel), name=\"weight_v\", trainable=True) self.weight_v = self.kernel self.weight_g =", "layer_id=0, name=f\"conv_layers.{0}\")] + [ TFHubertNoLayerNormConvLayer(config, layer_id=i + 1, name=f\"conv_layers.{i+1}\") for", "# Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2WeightNormConv1D with Wav2Vec2->Hubert class TFHubertWeightNormConv1D(tf.keras.layers.Conv1D): \"\"\"Adapted from", "= tf.argmax(logits, axis=-1) >>> transcription = processor.decode(predicted_ids[0]) >>> # compute", "parameters of the model. Initializing with a config file does", "for a given shape Args: shape: the the shape for", "call function: `model(inputs)`. If you choose this second option, there", "the first argument of the model call function: `model(inputs)`. If", "# Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2EncoderLayerStableLayerNorm with Wav2Vec2->Hubert class TFHubertEncoderLayerStableLayerNorm(tf.keras.layers.Layer): def __init__(self,", "tf.keras.constraints.Constraint = None, **kwargs, ): super().__init__(**kwargs) self.supports_masking = True self.groups", "return hidden_states # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2LayerNormConvLayer with Wav2Vec2->Hubert class TFHubertLayerNormConvLayer(tf.keras.layers.Layer):", "Computes random mask spans for a given shape Args: shape:", "tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name=\"layer_norm\") self.feed_forward = TFHubertFeedForward(config, name=\"feed_forward\") self.final_layer_norm = tf.keras.layers.LayerNormalization( epsilon=config.layer_norm_eps,", "must have the shape `({0})`): Indices of input sequence tokens", "this second option, there are three possibilities you can use", "kernel_size=config.num_conv_pos_embeddings, groups=config.num_conv_pos_embedding_groups, explicit_padding=config.num_conv_pos_embeddings // 2, name=\"conv\", ) self.padding = TFHubertSamePadLayer(config.num_conv_pos_embeddings)", "attention_mask is not None: # The tf.debugging asserts are not", "2, 1, 3)) def call( self, hidden_states: tf.Tensor, key_value_states: Optional[tf.Tensor]", "tf.transpose(self.weight_g) self.kernel = tf.transpose(kernel) def build(self, input_shape): if not self.built:", "key_states = tf.reshape(key_states, proj_shape) value_states = tf.reshape(value_states, proj_shape) src_len =", "last_hidden_state=hidden_states, hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions, ) class TFHubertPreTrainedModel(TFPreTrainedModel): \"\"\" An abstract class", "corresponds to a *sentence A* token, - 1 corresponds to", "Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2EncoderStableLayerNorm with Wav2Vec2->Hubert class TFHubertEncoderStableLayerNorm(tf.keras.layers.Layer): def __init__(self, config:", "- 1 else: axis = -1 if self.axis == -1", "hidden_states = self.layer_norm(hidden_states) hidden_states = self.projection(hidden_states) hidden_states = self.dropout(hidden_states, training=training)", ") # apply SpecAugment along feature axis if self.config.mask_feature_prob >", "= hidden_states + position_embeddings hidden_states = self.layer_norm(hidden_states) hidden_states = self.dropout(hidden_states,", "dummy_inputs def __init__(self, config, *inputs, **kwargs): super().__init__(config, *inputs, **kwargs) logger.warning(", "self.epsilon = epsilon self.center = center self.scale = scale self.beta_initializer", ":, tf.newaxis], tf.bool), self.masked_spec_embed[tf.newaxis, tf.newaxis, :], hidden_states, ) # apply", "!= \"args\": output[name] = kwargs.pop(name, signature[name].default) # When creating a", "expanded_mask) * LARGE_NEGATIVE # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2GroupNorm with Wav2Vec2->Hubert class", "self.intermediate_act_fn = get_tf_activation(config.hidden_act) self.output_dense = tf.keras.layers.Dense( units=config.hidden_size, kernel_initializer=get_initializer(config.initializer_range), bias_initializer=\"zeros\", name=\"output_dense\",", "self.dropout = tf.keras.layers.Dropout(config.hidden_dropout) self.layer = [ TFHubertEncoderLayerStableLayerNorm(config, name=f\"layers.{i}\") for i", "processor.decode(predicted_ids[0]) >>> # compute loss >>> target_transcription = \"A MAN", "layer_id: int = 0, **kwargs: Any) -> None: super().__init__(**kwargs) self.in_conv_dim", "stride + 1 for kernel_size, stride in zip(self.config.conv_kernel, self.config.conv_stride): input_lengths", "mask = tf.cast(mask, dtype=one_cst.dtype) expanded_mask = tf.tile(mask[:, None, None, :],", "# Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2PositionalConvEmbedding with Wav2Vec2->Hubert class TFHubertPositionalConvEmbedding(tf.keras.layers.Layer): def __init__(self,", "self.activation(hidden_states) return hidden_states # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2SamePadLayer with Wav2Vec2->Hubert class", ":], hidden_states, 0) return hidden_states def call( self, input_values: tf.Tensor,", "__init__(self, config: HubertConfig, *inputs, **kwargs): super().__init__(config, *inputs, **kwargs) self.config =", "+ z, num_samples) return indices # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2._scatter_values_on_batch_indices def", "[TFHubertGroupNormConvLayer(config, layer_id=0, name=f\"conv_layers.{0}\")] + [ TFHubertNoLayerNormConvLayer(config, layer_id=i + 1, name=f\"conv_layers.{i+1}\")", "...utils import ( ModelOutput, add_start_docstrings, add_start_docstrings_to_model_forward, logging, replace_return_docstrings, ) from", "(`np.ndarray` or `tf.Tensor` of shape `({0})`, *optional*): Mask to avoid", "= self.pos_conv_embed(hidden_states) hidden_states = hidden_states + position_embeddings hidden_states = self.dropout(hidden_states,", "(`bool`, *optional*): Whether or not to return the hidden states", "using [`tf.keras.Model.fit`] method which currently requires having all the tensors", "of the self-attention modules. Mask values selected in `[0, 1]`:", "value_states) proj_shape = (bsz * self.num_heads, -1, self.head_dim) query_states =", "is_cross_attention and past_key_value is not None: # reuse k,v, cross_attentions", "hidden_states + self.feed_forward(self.final_layer_norm(hidden_states)) outputs = (hidden_states,) if output_attentions: outputs +=", "= hidden_states + self.feed_forward(self.final_layer_norm(hidden_states)) outputs = (hidden_states,) if output_attentions: outputs", "we have to add this exception if \"args\" in output:", "# Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2._scatter_values_on_batch_indices def _scatter_values_on_batch_indices(values, batch_indices, output_shape): \"\"\" Scatter", "mode, in graph mode the value in the config will", "self.padding(hidden_states) hidden_states = self.activation(hidden_states) return hidden_states # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2SamePadLayer", "See the License for the specific language governing permissions and", "{k}.\") if isinstance(input_values, (tuple, list)): for i, input in enumerate(input_values):", "= self.call(input_values=inputs, training=False) return self.serving_output(output) HUBERT_START_DOCSTRING = r\"\"\" This model", "== 0: raise ValueError( \"You are trying to normalize your", "to in writing, software # distributed under the License is", "= ( inputs[\"output_attentions\"] if inputs[\"output_attentions\"] else self.config.output_attentions ) inputs[\"return_dict\"] =", "= -1e8 # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.input_values_processing def input_values_processing(func, config, input_values,", "*inputs, **kwargs): super().__init__(config, *inputs, **kwargs) logger.warning( f\"\\n{self.__class__.__name__} has backpropagation operations", "= self.dropout(hidden_states, training=training) return hidden_states # Copied from transformers.models.bart.modeling_tf_bart.TFBartAttention with", "tf.Tensor) of all cross attention key/value_states. # Further calls to", "broadcast_shape = self._create_broadcast_shape(input_shape) gamma = None beta = None if", "tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) token_type_ids (`np.ndarray`", "output = (logits,) + outputs[1:] return ((loss,) + output) if", "actual number will be smaller (unless no_overlap is True) mask_length:", "num_masked_spans = max(num_masked_spans, min_masks) # make sure num masked indices", "tf.keras.regularizers.serialize(self.gamma_regularizer), \"beta_constraint\": tf.keras.constraints.serialize(self.beta_constraint), \"gamma_constraint\": tf.keras.constraints.serialize(self.gamma_constraint), } base_config = super().get_config() return", "compliance with the License. # You may obtain a copy", "TFHubertMainLayer(config, name=\"hubert\") @add_start_docstrings_to_model_forward(HUBERT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=TFBaseModelOutput, config_class=_CONFIG_FOR_DOC) def call( self, input_values: tf.Tensor,", "= int(mask_prob * sequence_length / mask_length + tf.random.uniform((1,))) num_masked_spans =", "inputs_embeds=inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, kwargs_call=kwargs, ) hidden_states = self.feature_extractor(", "(`bool`, *optional*): Whether or not to return a [`~file_utils.ModelOutput`] instead", "hidden_size = shape_list(hidden_states) # `config.apply_spec_augment` can set masking to False", "= self.output_dense(hidden_states) hidden_states = self.output_dropout(hidden_states, training=training) return hidden_states # Copied", "tf.reshape(attn_output, (bsz, tgt_len, embed_dim)) attn_output = self.out_proj(attn_output) attn_weights: tf.Tensor =", "```python >>> import tensorflow as tf >>> from transformers import", "(\" + str(self.groups) + \") cannot be \" \"more than", "Tuple[int, int], mask_prob: float, mask_length: int, min_masks: int = 0,", "= TFHubertFeedForward(config, name=\"feed_forward\") self.final_layer_norm = tf.keras.layers.LayerNormalization( epsilon=config.layer_norm_eps, name=\"final_layer_norm\" ) def", "indices in format (batch_dim, indixes) \"\"\" indices_shape = shape_list(batch_indices) #", "output[\"past_key_values\"] = input_values.pop(\"decoder_cached_states\") for k, v in dict(input_values).items(): if isinstance(v,", "dtype=one_cst.dtype) expanded_mask = tf.tile(mask[:, None, None, :], (1, 1, tgt_len,", "number of channels (\" + str(dim) + \").\" ) if", "# The tf.debugging asserts are not compliant with XLA then", "gamma, beta = self._get_reshaped_weights(input_shape) normalized_inputs = tf.nn.batch_normalization( reshaped_inputs, mean=mean, variance=variance,", "of shape `(batch_size, sequence_length)`, *optional*): Labels for computing the masked", "you wish \" \"to train/fine-tine this model, you need a", "Union[TFBaseModelOutput, Tuple[tf.Tensor]]: \"\"\" Returns: Example: ```python >>> from transformers import", "_create_input_spec(self, input_shape): dim = input_shape[self.axis] self.input_spec = tf.keras.layers.InputSpec(ndim=len(input_shape), axes={self.axis: dim})", "Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2._scatter_values_on_batch_indices def _scatter_values_on_batch_indices(values, batch_indices, output_shape): \"\"\" Scatter function", "Optional[tf.Tensor] = None, output_attentions: Optional[bool] = None, labels: Optional[tf.Tensor] =", ") -> Union[TFBaseModelOutput, Tuple[tf.Tensor]]: all_hidden_states = () if output_hidden_states else", "pattern `name:id` then we check only the # `name` part", "< self.groups: raise ValueError( \"Number of groups (\" + str(self.groups)", "% self.groups != 0: raise ValueError( \"Number of groups (\"", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", ">= 0, tf.int32) target_lengths = tf.reduce_sum(labels_mask, axis=-1) loss = tf.nn.ctc_loss(", "if self.center: self.beta = self.add_weight( shape=shape, name=\"beta\", initializer=self.beta_initializer, regularizer=self.beta_regularizer, constraint=self.beta_constraint,", "batch >>> ds = load_dataset(\"hf-internal-testing/librispeech_asr_dummy\", \"clean\", split=\"validation\") >>> ds =", "TFHubertWeightNormConv1D(tf.keras.layers.Conv1D): \"\"\"Adapted from https://www.tensorflow.org/probability/api_docs/python/tfp/layers/weight_norm/WeightNorm\"\"\" def __init__(self, filters, kernel_size, groups, explicit_padding,", "is accepted for {parameter_names[0]}.\" ) for name in parameter_names: if", "bsz: int): return tf.transpose(tf.reshape(tensor, (bsz, seq_len, self.num_heads, self.head_dim)), (0, 2,", "= self.pos_conv_embed(hidden_states) hidden_states = hidden_states + position_embeddings hidden_states = self.layer_norm(hidden_states)", "output_lengths, maxlen=shape_list(hidden_states)[1], dtype=hidden_states.dtype ) hidden_states = self.feature_projection(hidden_states, training=inputs[\"training\"]) mask_time_indices =", "*optional*): Indices of positions of each input sequence tokens in", "# skip the layer continue layer_outputs = layer_module( hidden_states=hidden_states, attention_mask=attention_mask,", "used only in eager mode, in graph mode the value", "= hidden_states + self.feed_forward(hidden_states) hidden_states = self.final_layer_norm(hidden_states) outputs = (hidden_states,)", "input_shape, tensor_input_shape): group_shape = [tensor_input_shape[i] for i in range(len(input_shape))] is_instance_norm", "base_model_prefix = \"hubert\" main_input_name = \"input_values\" @property def dummy_inputs(self) ->", "this exception if \"args\" in output: if output[\"args\"] is not", "): inputs = input_values_processing( func=self.call, config=self.config, input_values=input_values, attention_mask=attention_mask, token_type_ids=token_type_ids, position_ids=position_ids,", "to overlaps, the actual number will be smaller (unless no_overlap", "input_shape def _reshape_into_groups(self, inputs, input_shape, tensor_input_shape): group_shape = [tensor_input_shape[i] for", "should be of size 2 where first element is batch", "hidden-states without any specific head on top.\", HUBERT_START_DOCSTRING, ) class", "coding=utf-8 # Copyright 2021 The Fairseq Authors and the HuggingFace", "\"\"\" z = -tf.math.log(tf.random.uniform(shape_list(distribution), 0, 1)) _, indices = tf.nn.top_k(distribution", "= super().call(padded_inputs) return output # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2NoLayerNormConvLayer with Wav2Vec2->Hubert", "= tf.transpose(tf.concat([broad_casted_batch_dims, tf.reshape(batch_indices, [1, -1])], 0)) # scatter values to", "tf.Tensor, training: bool = False) -> tf.Tensor: hidden_states = self.layer_norm(hidden_states)", "spec_aug_mask_idxs = _sample_without_replacement(uniform_dist, num_masked_spans) # expand masked indices to masked", "def __init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs) self.config = config self.pos_conv_embed", "== \"group\": conv_layers = [TFHubertGroupNormConvLayer(config, layer_id=0, name=f\"conv_layers.{0}\")] + [ TFHubertNoLayerNormConvLayer(config,", "\"scale\": self.scale, \"beta_initializer\": tf.keras.initializers.serialize(self.beta_initializer), \"gamma_initializer\": tf.keras.initializers.serialize(self.gamma_initializer), \"beta_regularizer\": tf.keras.regularizers.serialize(self.beta_regularizer), \"gamma_regularizer\": tf.keras.regularizers.serialize(self.gamma_regularizer),", "group_reduction_axes.pop(axis) mean, variance = tf.nn.moments(reshaped_inputs, group_reduction_axes, keepdims=True) gamma, beta =", "equivalent `freeze_feature_encoder` method instead.\", FutureWarning, ) self.freeze_feature_encoder() def freeze_feature_encoder(self): \"\"\"", "list, dict, np.ndarray) for k, v in kwargs.items(): if isinstance(v,", "refer to the TF 2.0 documentation for all matter related", "beta = self._get_reshaped_weights(input_shape) normalized_inputs = tf.nn.batch_normalization( reshaped_inputs, mean=mean, variance=variance, scale=gamma,", "self._init_norm() self.initialized = True self._normalize_kernel() padded_inputs = tf.pad(inputs, ((0, 0),", "= None, output_attentions: Optional[bool] = False, output_hidden_states: Optional[bool] = False,", "indices_shape), [1, -1] ) # transform batch_indices to pair_indices pair_indices", "head_mask=head_mask, inputs_embeds=inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, kwargs_call=kwargs, ) hidden_states =", "k, v in dict(input_values).items(): if isinstance(v, allowed_types) or v is", "-2 if self.axis == -1 else self.axis - 1 else:", "with given mask_time_indices hidden_states = tf.where( tf.cast(mask_time_indices[:, :, tf.newaxis], tf.bool),", "tf.convert_to_tensor(np.random.rand(1, 16000), tf.float32) dummy_inputs = { \"input_values\": input_values, \"attention_mask\": tf.cast(tf.not_equal(input_values,", "shape: Tuple[int, int], mask_prob: float, mask_length: int, min_masks: int =", "token_type_ids})` </Tip> Args: config ([`HubertConfig`]): Model configuration class with all", "on top.\", HUBERT_START_DOCSTRING, ) class TFHubertModel(TFHubertPreTrainedModel): def __init__(self, config: HubertConfig,", "random mask spans for a given shape Args: shape: the", "= self.intermediate_dropout(hidden_states, training=training) hidden_states = self.output_dense(hidden_states) hidden_states = self.output_dropout(hidden_states, training=training)", "having all the tensors in the first argument of the", "loss. Indices should be in `[-100, 0, ..., config.vocab_size]` (see", "... speech, _ = sf.read(batch[\"file\"]) ... batch[\"speech\"] = speech ...", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "will always be set to True. training (`bool`, *optional*, defaults", "elements mask_prob: probability for each token to be chosen as", "approximately this percentage of all elements. however due to overlaps,", "self.dropout = tf.keras.layers.Dropout(rate=config.feat_proj_dropout) def call(self, hidden_states: tf.Tensor, training: bool =", "**kwargs) warnings.warn( f\"The class `{self.__class__.__name__}` has been depreciated \" \"and", "output_shape) # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2._compute_mask_indices def _compute_mask_indices( shape: Tuple[int, int],", "0 def call(self, hidden_states): if self.num_pad_remove > 0: hidden_states =", ") if attention_mask is not None: # The tf.debugging asserts", "else: self.beta = None def _create_broadcast_shape(self, input_shape): broadcast_shape = [1]", "training: bool = False) -> tf.Tensor: hidden_states = self.intermediate_dense(hidden_states) hidden_states", "# compute loss >>> target_transcription = \"A MAN SAID TO", "input of each TensorFlow model including the booleans. In case", "shape if mask_length < 1: raise ValueError(\"`mask_length` has to be", "= None, training: bool = False, ) -> Union[TFBaseModelOutput, Tuple[tf.Tensor]]:", "hidden_states = attn_residual + hidden_states hidden_states = self.layer_norm(hidden_states) hidden_states =", "not allowed only {allowed_types} is accepted for {parameter_names[i]}.\" ) elif", "= tf.range(mask_length)[tf.newaxis, tf.newaxis, :] offsets = tf.tile(offsets, (batch_size, num_masked_spans, 1))", "model(input_values).last_hidden_state ```\"\"\" inputs = input_values_processing( func=self.call, config=self.config, input_values=input_values, attention_mask=attention_mask, token_type_ids=token_type_ids,", "group_shape else: return inputs, group_shape def _apply_normalization(self, reshaped_inputs, input_shape): group_shape", "config will be used instead. return_dict (`bool`, *optional*): Whether or", "are position IDs?](../glossary#position-ids) head_mask (`np.ndarray` or `tf.Tensor` of shape `(num_heads,)`", "values to pair indices return tf.scatter_nd(pair_indices, tf.reshape(values, [-1]), output_shape) #", "file does not load the weights associated with the model,", "* self.num_heads, tgt_len, self.head_dim], message=f\"`attn_output` should be of size {(bsz,", "the output length of the convolutional layers \"\"\" def _conv_out_length(input_length,", "@tf.function def serving(self, inputs): output = self.call(input_values=inputs, training=False) return self.serving_output(output)", "class TFHubertPreTrainedModel(TFPreTrainedModel): \"\"\" An abstract class to handle weights initialization", "all_self_attentions + (layer_outputs[1],) hidden_states = self.layer_norm(hidden_states) if output_hidden_states: all_hidden_states =", "\" \"use tf.layer.batch_normalization instead\" ) def _create_input_spec(self, input_shape): dim =", "num_masked_spans * mask_length)) spec_aug_mask_idxs = spec_aug_mask_idxs + offsets # scatter", "**kwargs: Any) -> None: super().__init__(**kwargs) self.in_conv_dim = config.conv_dim[layer_id] if layer_id", "Any) -> None: super().__init__(**kwargs) self.in_conv_dim = config.conv_dim[layer_id] if layer_id >", "self.v_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name=\"v_proj\") self.out_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name=\"out_proj\")", "get_tf_activation(config.hidden_act) self.output_dense = tf.keras.layers.Dense( units=config.hidden_size, kernel_initializer=get_initializer(config.initializer_range), bias_initializer=\"zeros\", name=\"output_dense\", ) self.output_dropout", "inputs[\"attention_mask\"] is not None else tf.ones_like(inputs[\"input_values\"], dtype=tf.float32) ) input_lengths =", "KIND, either express or implied. # See the License for", "to use the model in training mode (some modules like", "is accepted for {k}.\") if isinstance(input_values, (tuple, list)): for i,", "float = 0.0, is_decoder: bool = False, bias: bool =", "training and evaluation). \"\"\" @add_start_docstrings( \"The bare TFHubert Model transformer", "batch num_masked_spans = int(mask_prob * sequence_length / mask_length + tf.random.uniform((1,)))", "transformers.models.wav2vec2.modeling_tf_wav2vec2._compute_mask_indices def _compute_mask_indices( shape: Tuple[int, int], mask_prob: float, mask_length: int,", "Optional[bool] = None, training: Optional[bool] = False, ) -> Union[TFCausalLMOutput,", "boolean_dict = { k: v for k, v in output.items()", "Optional[tf.Tensor] = None, past_key_value: Optional[Tuple[Tuple[tf.Tensor]]] = None, attention_mask: Optional[tf.Tensor] =", "= self._apply_normalization(reshaped_inputs, input_shape) is_instance_norm = (input_shape[self.axis] // self.groups) == 1", "heads etc.) This model is also a [tf.keras.Model](https://www.tensorflow.org/api_docs/python/tf/keras/Model) subclass. Use", "(the \"License\"); # you may not use this file except", "input Tensors IN THE ORDER given in the docstring: `model([input_values,", "probability for each token to be chosen as start of", "self.groups) == 1 if not is_instance_norm: outputs = tf.reshape(normalized_inputs, tensor_input_shape)", "be of size {(bsz, 1, tgt_len, src_len)}, but is {shape_list(attention_mask)}\",", "is True) mask_length: size of the mask min_masks: minimum number", "list(signature.keys()) output = {} allowed_types = (tf.Tensor, bool, int, ModelOutput,", "# # Unless required by applicable law or agreed to", "dim % self.groups != 0: raise ValueError( \"Number of groups", "def _create_broadcast_shape(self, input_shape): broadcast_shape = [1] * len(input_shape) is_instance_norm =", "mask_length > sequence_length: num_masked_spans = sequence_length // mask_length # SpecAugment", "self.scale, \"beta_initializer\": tf.keras.initializers.serialize(self.beta_initializer), \"gamma_initializer\": tf.keras.initializers.serialize(self.gamma_initializer), \"beta_regularizer\": tf.keras.regularizers.serialize(self.beta_regularizer), \"gamma_regularizer\": tf.keras.regularizers.serialize(self.gamma_regularizer), \"beta_constraint\":", "Mask to nullify selected heads of the self-attention modules. Mask", "loss=loss, logits=logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions, ) def serving_output(self, output: TFCausalLMOutput) ->", "= tf.keras.backend.int_shape(reshaped_inputs) group_reduction_axes = list(range(1, len(group_shape))) is_instance_norm = (input_shape[self.axis] //", "= layer_outputs[0] if output_attentions: all_self_attentions = all_self_attentions + (layer_outputs[1],) hidden_states", "if self.axis == -1 else self.axis - 1 else: axis", "0, 1)) _, indices = tf.nn.top_k(distribution + z, num_samples) return", "second portions of the inputs. Indices are selected in `[0,", "# expand masked indices to masked spans spec_aug_mask_idxs = tf.expand_dims(spec_aug_mask_idxs,", "all_hidden_states, all_self_attentions] if v is not None) return TFBaseModelOutput( last_hidden_state=hidden_states,", "belongs to the parameter list {parameter_names} and will be ignored.\"", "layer_module( hidden_states=hidden_states, attention_mask=attention_mask, output_attentions=output_attentions, training=training, ) hidden_states = layer_outputs[0] if", "output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, training: bool", "tf.keras.initializers.get(gamma_initializer) self.beta_regularizer = tf.keras.regularizers.get(beta_regularizer) self.gamma_regularizer = tf.keras.regularizers.get(gamma_regularizer) self.beta_constraint = tf.keras.constraints.get(beta_constraint)", "the first positional arguments. This second option is useful when", "\"multiple of the number of channels (\" + str(dim) +", "config.feat_extract_norm == \"group\": conv_layers = [TFHubertGroupNormConvLayer(config, layer_id=0, name=f\"conv_layers.{0}\")] + [", "sequence_length if num_masked_spans * mask_length > sequence_length: num_masked_spans = sequence_length", "None, None, :], (1, 1, tgt_len, 1)) return (one_cst -", "all_hidden_states + (hidden_states,) # add LayerDrop (see https://arxiv.org/abs/1909.11556 for description)", "2, name=\"conv\", ) self.padding = TFHubertSamePadLayer(config.num_conv_pos_embeddings) self.activation = get_tf_activation(config.feat_extract_activation) def", "allowed only {allowed_types} is accepted for {parameter_names[i]}.\" ) elif isinstance(input_values,", "- 1 group_reduction_axes.pop(axis) mean, variance = tf.nn.moments(reshaped_inputs, group_reduction_axes, keepdims=True) gamma,", "(bsz, self.num_heads, tgt_len, self.head_dim)), (0, 2, 1, 3) ) attn_output", "parameter_names: output[tensor_name] = input else: output[parameter_names[i]] = input elif isinstance(input,", "attn_weights = tf.matmul(query_states, key_states, transpose_b=True) # The tf.debugging asserts are", "hidden_states = tf.expand_dims(input_values, -1) for conv_layer in self.conv_layers: hidden_states =", "graph mode the value will always be set to True.", "{ \"input_values\": input_values, \"attention_mask\": tf.cast(tf.not_equal(input_values, pad_token), tf.float32), } return dummy_inputs", "hidden_states=hs, attentions=attns) @add_start_docstrings( \"\"\"TFHubert Model with a `language modeling` head", "attn_residual = hidden_states hidden_states, attn_weights, _ = self.attention( hidden_states, attention_mask=attention_mask,", "a config file does not load the weights associated with", "disabled in other modes than eager. if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(attn_weights),", "explicit_padding=config.num_conv_pos_embeddings // 2, name=\"conv\", ) self.padding = TFHubertSamePadLayer(config.num_conv_pos_embeddings) self.activation =", "2 where first element is batch size and 2nd is", "token_type_ids])` - a dictionary with one or several input Tensors", "of ['group', 'layer']\" ) self.conv_layers = conv_layers def call(self, input_values):", ">>> processor = Wav2Vec2Processor.from_pretrained(\"facebook/hubert-base-960h\") >>> model = TFHubertModel.from_pretrained(\"facebook/hubert-base-960h\") >>> def", "parameter_names and \"args\" not in parameter_names: logger.warning( f\"The parameter {k}", "version, use `past_key_values` instead.\", FutureWarning, ) output[\"past_key_values\"] = input_values.pop(\"decoder_cached_states\") for", "or `tf.Tensor` of shape `({0})`, *optional*): Indices of positions of", "*sentence B* token. [What are token type IDs?](../glossary#token-type-ids) position_ids (`np.ndarray`", "tf.Tensor: hidden_states = self.conv(hidden_states) hidden_states = self.padding(hidden_states) hidden_states = self.activation(hidden_states)", "processor(transcription, return_tensors=\"tf\").input_values >>> loss = model(input_values, labels=labels).loss ```\"\"\" inputs =", "config = { \"groups\": self.groups, \"axis\": self.axis, \"epsilon\": self.epsilon, \"center\":", "+ [ TFHubertNoLayerNormConvLayer(config, layer_id=i + 1, name=f\"conv_layers.{i+1}\") for i in", "of shape `({0})`, *optional*): Mask to avoid performing attention on", "_mask_hidden_states(self, hidden_states: tf.Tensor, mask_time_indices: Optional[tf.Tensor] = None): \"\"\" Masks extracted", "masking to False if not getattr(self.config, \"apply_spec_augment\", True): return hidden_states", "from https://www.tensorflow.org/probability/api_docs/python/tfp/layers/weight_norm/WeightNorm\"\"\" def __init__(self, filters, kernel_size, groups, explicit_padding, **kwargs): super().__init__(", "0, **kwargs: Any) -> None: super().__init__(**kwargs) self.in_conv_dim = config.conv_dim[layer_id] if", "`[bsz, seq_len]` to `[bsz, 1, tgt_seq_len, src_seq_len]`. \"\"\" src_len =", "expanded_mask = tf.tile(mask[:, None, None, :], (1, 1, tgt_len, 1))", "single layer should be of size {(self.num_heads)}, but is {shape_list(layer_head_mask)}\",", "= tf.convert_to_tensor(output.attentions) if self.config.output_attentions else None return TFCausalLMOutput(logits=output.logits, hidden_states=hs, attentions=attns)", "if not return_dict: return tuple(v for v in [hidden_states, all_hidden_states,", "used instead. output_hidden_states (`bool`, *optional*): Whether or not to return", "= tf.keras.layers.Dense( units=config.hidden_size, kernel_initializer=get_initializer(config.initializer_range), bias_initializer=\"zeros\", name=\"projection\", ) self.dropout = tf.keras.layers.Dropout(rate=config.feat_proj_dropout)", "features along time axis and/or along feature axis according to", "method `freeze_feature_extractor` is deprecated and will be removed in Transformers", "tokens that are **not masked**, - 0 for tokens that", "inputs of the model. Returns: Two lists, one for the", "value_states = past_key_value[1] elif is_cross_attention: # cross_attentions key_states = self._shape(self.k_proj(key_value_states),", "_scatter_values_on_batch_indices( tf.ones_like(spec_aug_mask_idxs), spec_aug_mask_idxs, spec_aug_mask.shape ) return spec_aug_mask # Copied from", "wrap processor as target processor to encode labels >>> with", "- 1 corresponds to a *sentence B* token. [What are", "= self.output_dropout(hidden_states, training=training) return hidden_states # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2EncoderLayer with", "\"\"\"Set the norm of the weight vector.\"\"\" kernel_norm = tf.sqrt(tf.reduce_sum(tf.square(self.weight_v),", "= tf.cast(mask, dtype=one_cst.dtype) expanded_mask = tf.tile(mask[:, None, None, :], (1,", "be used only in eager mode, in graph mode the", "on top for Connectionist Temporal Classification (CTC).\"\"\", HUBERT_START_DOCSTRING, ) class", "time axis and/or along feature axis according to [SpecAugment](https://arxiv.org/abs/1904.08779). \"\"\"", ") self.dropout = tf.keras.layers.Dropout(config.hidden_dropout) self.layer_norm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name=\"layer_norm\") self.feed_forward =", "def call( self, hidden_states: tf.Tensor, key_value_states: Optional[tf.Tensor] = None, past_key_value:", "class TFHubertEncoderLayer(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs) self.attention =", "TFHubertForCTC.from_pretrained(\"facebook/hubert-base-960h\") >>> def map_to_array(batch): ... speech, _ = sf.read(batch[\"file\"]) ...", "tf.nn.l2_normalize(self.weight_v, axis=self.kernel_norm_axes) * tf.transpose(self.weight_g) self.kernel = tf.transpose(kernel) def build(self, input_shape):", "\"args\" in output: if output[\"args\"] is not None and type(output[\"args\"])", "self.kernel_norm_axes = tf.constant([0, 1]) def _init_norm(self): \"\"\"Set the norm of", "ORDER given in the docstring: `model([input_values, attention_mask])` or `model([input_values, attention_mask,", "of the weight vector.\"\"\" kernel_norm = tf.sqrt(tf.reduce_sum(tf.square(self.weight_v), axis=self.kernel_norm_axes)) self.weight_g.assign(kernel_norm[:, tf.newaxis,", "associated with the model, only the configuration. Check out the", "super().build(input_shape) def _get_feat_extract_output_lengths(self, input_lengths: tf.Tensor): \"\"\" Computes the output length", "with TFBart->TFHubert class TFHubertAttention(tf.keras.layers.Layer): \"\"\"Multi-headed attention from \"Attention Is All", "tf.random.uniform((1,))) num_masked_spans = max(num_masked_spans, min_masks) # make sure num masked", "1, name=f\"conv_layers.{i+1}\") for i in range(config.num_feat_extract_layers - 1) ] elif", "tf.debugging.assert_equal( shape_list(attn_weights), [bsz * self.num_heads, tgt_len, src_len], message=f\"Attention weights should", "kernel_size, stride) return input_lengths def _mask_hidden_states(self, hidden_states: tf.Tensor, mask_time_indices: Optional[tf.Tensor]", "input embeddings, pruning heads etc.) This model is also a", "* LARGE_NEGATIVE # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2GroupNorm with Wav2Vec2->Hubert class TFHubertGroupNorm(tf.keras.layers.Layer):", "None attns = tf.convert_to_tensor(output.attentions) if self.config.output_attentions else None return TFBaseModelOutput(last_hidden_state=output.last_hidden_state,", ") hidden_states = self.feature_extractor( tf.cast(inputs[\"input_values\"], tf.float32), training=inputs[\"training\"] ) if inputs[\"attention_mask\"]", "be masked. this will be multiplied by number of timesteps", "_apply_normalization(self, reshaped_inputs, input_shape): group_shape = tf.keras.backend.int_shape(reshaped_inputs) group_reduction_axes = list(range(1, len(group_shape)))", "input_lengths def _mask_hidden_states(self, hidden_states: tf.Tensor, mask_time_indices: Optional[tf.Tensor] = None): \"\"\"", "self.embed_dim = embed_dim self.num_heads = num_heads self.dropout = tf.keras.layers.Dropout(dropout) self.head_dim", "= _compute_mask_indices( (batch_size, hidden_size), mask_prob=self.config.mask_feature_prob, mask_length=self.config.mask_feature_length, ) hidden_states = tf.where(mask_feature_indices[:,", "attention masks?](../glossary#attention-mask) token_type_ids (`np.ndarray` or `tf.Tensor` of shape `({0})`, *optional*):", "for now - see https://github.com/tensorflow/tensorflow/issues/9260 for more info \"\"\" z", "not None) return TFBaseModelOutput( last_hidden_state=hidden_states, hidden_states=all_hidden_states, attentions=all_self_attentions, ) @keras_serializable class", "than 0.\") if mask_length > sequence_length: raise ValueError( f\"`mask_length` has", "with shape \" + str(input_shape) + \".\" ) def _set_number_of_groups_for_instance_norm(self,", "signature.pop(\"kwargs\", None) signature.pop(\"self\", None) parameter_names = list(signature.keys()) output = {}", "Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2SamePadLayer with Wav2Vec2->Hubert class TFHubertSamePadLayer(tf.keras.layers.Layer): def __init__(self, num_conv_pos_embeddings,", "super().__init__(config, *inputs, **kwargs) self.config = config self.hubert = TFHubertMainLayer(config, name=\"hubert\")", "__init__(self, num_conv_pos_embeddings, **kwargs): super().__init__(**kwargs) self.num_pad_remove = 1 if num_conv_pos_embeddings %", "-> Tuple[tf.Tensor]: attn_residual = hidden_states hidden_states, attn_weights, _ = self.attention(", "behaviors between training and evaluation). \"\"\" @add_start_docstrings( \"The bare TFHubert", "self.layer_norm(hidden_states) hidden_states = self.activation(hidden_states) return hidden_states # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2GroupNormConvLayer", "tgt_len, embed_dim = shape_list(hidden_states) # get query proj query_states =", "in `[0, ..., config.vocab_size]` Returns: Example: ```python >>> import tensorflow", "sequence tokens in the vocabulary. Indices can be obtained using", "option, there are three possibilities you can use to gather", "to be named accordingly to the parameters name, i.e. `input_values", "= (bsz * self.num_heads, -1, self.head_dim) query_states = tf.reshape(self._shape(query_states, tgt_len,", "return input_shape def _reshape_into_groups(self, inputs, input_shape, tensor_input_shape): group_shape = [tensor_input_shape[i]", "This is useful if you want more control over how", "feature axis if self.config.mask_feature_prob > 0: mask_feature_indices = _compute_mask_indices( (batch_size,", "modeling` head on top for Connectionist Temporal Classification (CTC).\"\"\", HUBERT_START_DOCSTRING,", "return_dict: Optional[bool] = None, training: Optional[bool] = False, ) ->", "positions of each input sequence tokens in the position embeddings.", "mask_prob: float, mask_length: int, min_masks: int = 0, ) ->", "@property def dummy_inputs(self) -> Dict[str, tf.Tensor]: pad_token = 0.0 input_values", "spans in batch num_masked_spans = int(mask_prob * sequence_length / mask_length", "output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool]", "weights initialization and a simple interface for downloading and loading", "modes than eager. if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(layer_head_mask), [self.num_heads], message=f\"Head mask", "def __init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs) self.layer_norm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name=\"layer_norm\")", "as shape, which will prevent masking padded elements mask_prob: probability", "\"\"\" @add_start_docstrings( \"The bare TFHubert Model transformer outputing raw hidden-states", "size {(bsz * self.num_heads, tgt_len, src_len)}, but is {shape_list(attn_weights)}\", )", "self.attention( hidden_states, attention_mask=attention_mask, training=training ) hidden_states = self.dropout(hidden_states, training=training) hidden_states", "output = super().call(padded_inputs) return output # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2NoLayerNormConvLayer with", "modules. Mask values selected in `[0, 1]`: - 1 indicates", "between training and evaluation). \"\"\" @add_start_docstrings( \"The bare TFHubert Model", "then reuse all cross-attention # key/value_states (first \"if\" case) #", "= 0, **kwargs: Any) -> None: super().__init__(**kwargs) self.in_conv_dim = config.conv_dim[layer_id]", "num_conv_pos_embeddings % 2 == 0 else 0 def call(self, hidden_states):", "def _get_reshaped_weights(self, input_shape): broadcast_shape = self._create_broadcast_shape(input_shape) gamma = None beta", "else: axis = -1 if self.axis == -1 else self.axis", "to mask spec_aug_mask = _scatter_values_on_batch_indices( tf.ones_like(spec_aug_mask_idxs), spec_aug_mask_idxs, spec_aug_mask.shape ) return", "= normalized_inputs return outputs def get_config(self): config = { \"groups\":", "TF 2.7, so we need to account for padding input_shape[-2]", "1]) def _init_norm(self): \"\"\"Set the norm of the weight vector.\"\"\"", "return_dict: Optional[bool] = None, training: bool = False, **kwargs: Any,", "tokens in the vocabulary. Indices can be obtained using [`BertTokenizer`].", "from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2Encoder with Wav2Vec2->Hubert class TFHubertEncoder(tf.keras.layers.Layer): def __init__(self, config: HubertConfig,", "and `sequence_length`: {sequence_length}`\" ) # compute number of masked spans", "config ([`PretrainedConfig`]): The config of the running model. **kwargs: The", "spec_aug_mask.shape ) return spec_aug_mask # Copied from transformers.models.bart.modeling_tf_bart._expand_mask def _expand_mask(mask:", "of channels (\" + str(dim) + \").\" ) if dim", "last_hidden_state=hidden_states, hidden_states=all_hidden_states, attentions=all_self_attentions, ) # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2EncoderStableLayerNorm with Wav2Vec2->Hubert", "got `mask_length`: {mask_length} and `sequence_length`: {sequence_length}`\" ) # compute number", "None, return_dict: Optional[bool] = None, training: bool = False, )", "past_key_value # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2FeedForward with Wav2Vec2->Hubert class TFHubertFeedForward(tf.keras.layers.Layer): def", "* tf.reshape( attn_weights, (bsz, self.num_heads, tgt_len, src_len) ) attn_weights =", "dimension \" \"but the layer received an input with shape", "**kwargs): super().__init__(**kwargs) self.attention = TFHubertAttention( embed_dim=config.hidden_size, num_heads=config.num_attention_heads, dropout=config.attention_dropout, is_decoder=False, name=\"attention\",", "past_key_values_length: int = 0): \"\"\" Expands attention_mask from `[bsz, seq_len]`", "else: raise ValueError( f\"Data of type {type(input)} is not allowed", "{allowed_types} is accepted for {k}.\") if isinstance(input_values, (tuple, list)): for", "(dict, BatchEncoding)): if \"inputs\" in input_values: warnings.warn( \"The `inputs` argument", "True super().build(input_shape) def call(self, inputs): input_shape = tf.keras.backend.int_shape(inputs) tensor_input_shape =", "with one or several input Tensors IN THE ORDER given", "dim def _check_size_of_dimensions(self, input_shape): dim = input_shape[self.axis] if dim <", "freeze_feature_extractor(self): \"\"\" Calling this function will disable the gradient computation", "model is also a [tf.keras.Model](https://www.tensorflow.org/api_docs/python/tf/keras/Model) subclass. Use it as a", "axis: int = -1, epsilon: float = 1e-3, center: bool", "training: Optional[bool] = False, ) -> Tuple[tf.Tensor, Optional[tf.Tensor]]: \"\"\"Input shape:", "case) # if uni-directional self-attention (decoder) save Tuple(tf.Tensor, tf.Tensor) of", "not is_instance_norm: group_shape[self.axis] = input_shape[self.axis] // self.groups group_shape.insert(self.axis, self.groups) group_shape", "\"epsilon\": self.epsilon, \"center\": self.center, \"scale\": self.scale, \"beta_initializer\": tf.keras.initializers.serialize(self.beta_initializer), \"gamma_initializer\": tf.keras.initializers.serialize(self.gamma_initializer),", "self.feed_forward(self.final_layer_norm(hidden_states)) outputs = (hidden_states,) if output_attentions: outputs += (attn_weights,) return", "etc.) This model is also a [tf.keras.Model](https://www.tensorflow.org/api_docs/python/tf/keras/Model) subclass. Use it", "apply SpecAugment along feature axis if self.config.mask_feature_prob > 0: mask_feature_indices", "shape_list(batch_indices) # broadcast batch dim to indices_shape broad_casted_batch_dims = tf.reshape(", "target_lengths = tf.reduce_sum(labels_mask, axis=-1) loss = tf.nn.ctc_loss( logits=logits, labels=labels, logit_length=input_lengths,", "= input_shape[self.axis] shape = (dim,) if self.scale: self.gamma = self.add_weight(", "Do you want to \" \"use tf.layer.batch_normalization instead\" ) def", "None if attention_mask is not None: hidden_states = hidden_states *", "first parameter, then `input_values` output[\"input_values\"] = output[\"args\"] del output[\"args\"] if", "strides=config.conv_stride[layer_id], use_bias=config.conv_bias, name=\"conv\", ) self.layer_norm = tf.keras.layers.LayerNormalization(name=\"layer_norm\", epsilon=config.layer_norm_eps) self.activation =", "can be used in eager mode, in graph mode the", "= tf.shape(inputs) reshaped_inputs, group_shape = self._reshape_into_groups(inputs, input_shape, tensor_input_shape) normalized_inputs =", "def build(self, input_shape: tf.TensorShape): self.masked_spec_embed = self.add_weight( shape=(self.config.hidden_size,), initializer=\"uniform\", trainable=True,", ") inputs[\"return_dict\"] = inputs[\"return_dict\"] if inputs[\"return_dict\"] else self.config.return_dict outputs =", "have different behaviors between training and evaluation). \"\"\" @add_start_docstrings( \"The", "Optional[tf.Tensor] = None, output_attentions: Optional[bool] = False, output_hidden_states: Optional[bool] =", "output_hidden_states else None all_self_attentions = () if output_attentions else None", "v is not None) return TFBaseModelOutput( last_hidden_state=hidden_states, hidden_states=all_hidden_states, attentions=all_self_attentions, )", "Temporal Classification (CTC).\"\"\", HUBERT_START_DOCSTRING, ) class TFHubertForCTC(TFHubertPreTrainedModel): def __init__(self, config:", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "from ...modeling_tf_outputs import TFBaseModelOutput, TFCausalLMOutput from ...modeling_tf_utils import TFPreTrainedModel, booleans_processing,", "kernel_size, stride): # 1D convolutional layer output length formula taken", "\"hubert\" main_input_name = \"input_values\" @property def dummy_inputs(self) -> Dict[str, tf.Tensor]:", "bigger than 0.\") if mask_length > sequence_length: raise ValueError( f\"`mask_length`", "tf.Tensor: tensor_name = output[\"args\"].name.split(\":\")[0] output[tensor_name] = output[\"args\"] else: # `args`", "= Wav2Vec2Processor.from_pretrained(\"facebook/hubert-base-960h\") >>> model = TFHubertModel.from_pretrained(\"facebook/hubert-base-960h\") >>> def map_to_array(batch): ...", "name=f\"conv_layers.{i+1}\") for i in range(config.num_feat_extract_layers - 1) ] elif config.feat_extract_norm", "base_config = super().get_config() return {**base_config, **config} def compute_output_shape(self, input_shape): return", "filters=filters, kernel_size=kernel_size, groups=groups, padding=\"valid\", use_bias=True, bias_initializer=\"he_normal\", **kwargs, ) self.explicit_padding =", "((loss,) + output) if loss is not None else output", "the model weights. \"\"\" HUBERT_INPUTS_DOCSTRING = r\"\"\" Args: input_values (`np.ndarray`,", "= self.padding(hidden_states) hidden_states = self.activation(hidden_states) return hidden_states # Copied from", "a cross-attention layer # for the decoder is_cross_attention = key_value_states", "are checked at build time since TF 2.7, so we", "= None, token_type_ids: Optional[tf.Tensor] = None, position_ids: Optional[tf.Tensor] = None,", "are token type IDs?](../glossary#token-type-ids) position_ids (`np.ndarray` or `tf.Tensor` of shape", "= layer_module( hidden_states=hidden_states, attention_mask=attention_mask, output_attentions=output_attentions, training=training, ) hidden_states = layer_outputs[0]", "= center self.scale = scale self.beta_initializer = tf.keras.initializers.get(beta_initializer) self.gamma_initializer =", "{**base_config, **config} def compute_output_shape(self, input_shape): return input_shape def _reshape_into_groups(self, inputs,", "`[-100, 0, ..., config.vocab_size]` (see `input_values` docstring) Tokens with indices", "hidden_states hidden_states = self.layer_norm(hidden_states) hidden_states = hidden_states + self.feed_forward(hidden_states) hidden_states", "bi-directional self-attention `past_key_value` is always `None` past_key_value = (key_states, value_states)", "model in training mode (some modules like dropout modules have", "if inputs[\"output_attentions\"] else self.config.output_attentions ) inputs[\"return_dict\"] = inputs[\"return_dict\"] if inputs[\"return_dict\"]", "self.padding = TFHubertSamePadLayer(config.num_conv_pos_embeddings) self.activation = get_tf_activation(config.feat_extract_activation) def call(self, hidden_states: tf.Tensor)", "tensors in the first argument of the model call function:", "shape_list(layer_head_mask), [self.num_heads], message=f\"Head mask for a single layer should be", "mask_time_indices = _compute_mask_indices( (batch_size, sequence_length), mask_prob=self.config.mask_time_prob, mask_length=self.config.mask_time_length, min_masks=2, ) hidden_states", "signature = dict(inspect.signature(func).parameters) signature.pop(\"kwargs\", None) signature.pop(\"self\", None) parameter_names = list(signature.keys())", "1)) offsets = tf.reshape(offsets, (batch_size, num_masked_spans * mask_length)) spec_aug_mask_idxs =", "with Wav2Vec2->Hubert class TFHubertEncoder(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs)", "num_masked_spans = sequence_length // mask_length # SpecAugment mask to fill", "# can concat previous decoder key/value_states to current projected key/value_states", "input sequence tokens in the vocabulary. Indices can be obtained", "shape `(batch_size, sequence_length)`, *optional*): Labels for computing the masked language", "input else: output[parameter_names[i]] = input elif isinstance(input, allowed_types) or input", "(dim,) if self.center: self.beta = self.add_weight( shape=shape, name=\"beta\", initializer=self.beta_initializer, regularizer=self.beta_regularizer,", "None: output[k] = v else: raise ValueError(f\"Data of type {type(v)}", "def call(self, hidden_states: tf.Tensor, training: bool = False) -> tf.Tensor:", "Wav2Vec2->Hubert class TFHubertEncoder(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs) self.config", "*optional*, defaults to `False``): Whether or not to use the", "the the shape for which to compute masks. should be", "must be divisible by num_heads (got `embed_dim`: {self.embed_dim}\" f\" and", "\"\"\" indices_shape = shape_list(batch_indices) # broadcast batch dim to indices_shape", "if uni-directional self-attention (decoder) save Tuple(tf.Tensor, tf.Tensor) of # all", "TFBaseModelOutput, TFCausalLMOutput from ...modeling_tf_utils import TFPreTrainedModel, booleans_processing, get_initializer, keras_serializable from", "for k, v in dict(input_values).items(): if isinstance(v, allowed_types) or v", "return hidden_states # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2GroupNormConvLayer with Wav2Vec2->Hubert class TFHubertGroupNormConvLayer(tf.keras.layers.Layer):", "does not load the weights associated with the model, only", "src_seq_len]`. \"\"\" src_len = shape_list(mask)[1] tgt_len = tgt_len if tgt_len", "all_hidden_states = all_hidden_states + (hidden_states,) if not return_dict: return tuple(v", "group_shape = [tensor_input_shape[i] for i in range(len(input_shape))] is_instance_norm = (input_shape[self.axis]", "input_lengths: tf.Tensor): \"\"\" Computes the output length of the convolutional", "**boolean_dict)) return output # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2._sample_without_replacement def _sample_without_replacement(distribution, num_samples):", "get_tf_activation(config.feat_extract_activation) def call(self, hidden_states: tf.Tensor) -> tf.Tensor: hidden_states = self.conv(hidden_states)", "by num_heads (got `embed_dim`: {self.embed_dim}\" f\" and `num_heads`: {num_heads}).\" )", "- having all inputs as keyword arguments (like PyTorch models),", "input_shape) is_instance_norm = (input_shape[self.axis] // self.groups) == 1 if not", "tf.Tensor, training: bool = False) -> tf.Tensor: hidden_states = self.intermediate_dense(hidden_states)", "# SpecAugment mask to fill spec_aug_mask = tf.zeros((batch_size, sequence_length), dtype=tf.int32)", "input_shape[self.axis] shape = (dim,) if self.scale: self.gamma = self.add_weight( shape=shape,", "parameters will not be updated during training. \"\"\" warnings.warn( \"The", "0 corresponds to a *sentence A* token, - 1 corresponds", "} base_config = super().get_config() return {**base_config, **config} def compute_output_shape(self, input_shape):", "useful if you want more control over how to convert", "in input_values: warnings.warn( \"The `inputs` argument is deprecated and will", "Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2WeightNormConv1D with Wav2Vec2->Hubert class TFHubertWeightNormConv1D(tf.keras.layers.Conv1D): \"\"\"Adapted from https://www.tensorflow.org/probability/api_docs/python/tfp/layers/weight_norm/WeightNorm\"\"\"", "attention from \"Attention Is All You Need\"\"\" def __init__( self,", "only {allowed_types} is accepted for {parameter_names[0]}.\" ) for name in", "values selected in `[0, 1]`: - 1 for tokens that", "to pair indices return tf.scatter_nd(pair_indices, tf.reshape(values, [-1]), output_shape) # Copied", "return output # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2NoLayerNormConvLayer with Wav2Vec2->Hubert class TFHubertNoLayerNormConvLayer(tf.keras.layers.Layer):", "super().__init__(**kwargs) self.config = config self.pos_conv_embed = TFHubertPositionalConvEmbedding(config, name=\"pos_conv_embed\") self.layer_norm =", "time axis with given mask_time_indices hidden_states = tf.where( tf.cast(mask_time_indices[:, :,", "input_values = processor(ds[\"speech\"][0], return_tensors=\"tf\").input_values # Batch size 1 >>> hidden_states", "law or agreed to in writing, software # distributed under", "= -tf.math.log(tf.random.uniform(shape_list(distribution), 0, 1)) _, indices = tf.nn.top_k(distribution + z,", "\"kwargs\" in output: del output[\"kwargs\"] boolean_dict = { k: v", "each input sequence tokens in the position embeddings. Selected in", "HubertConfig, *inputs, **kwargs): super().__init__(config, *inputs, **kwargs) self.config = config self.hubert", "[ TFHubertNoLayerNormConvLayer(config, layer_id=i + 1, name=f\"conv_layers.{i+1}\") for i in range(config.num_feat_extract_layers", "\"beta_constraint\": tf.keras.constraints.serialize(self.beta_constraint), \"gamma_constraint\": tf.keras.constraints.serialize(self.gamma_constraint), } base_config = super().get_config() return {**base_config,", "shape_list(key_states)[1] attn_weights = tf.matmul(query_states, key_states, transpose_b=True) # The tf.debugging asserts", "number of masked spans Adapted from [fairseq's data_utils.py](https://github.com/pytorch/fairseq/blob/e0788f7007a8473a76db573985031f3c94201e79/fairseq/data/data_utils.py#L376). \"\"\" batch_size,", "train/fine-tine this model, you need a GPU or a TPU\"", "`[0, 1]`: - 1 for tokens that are **not masked**,", "input_shape): broadcast_shape = [1] * len(input_shape) is_instance_norm = (input_shape[self.axis] //", "don't allow to use the .name property so we check", "import BatchEncoding from ...utils import ( ModelOutput, add_start_docstrings, add_start_docstrings_to_model_forward, logging,", "broadcast_shape # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2WeightNormConv1D with Wav2Vec2->Hubert class TFHubertWeightNormConv1D(tf.keras.layers.Conv1D): \"\"\"Adapted", "name=\"encoder\") def build(self, input_shape: tf.TensorShape): self.masked_spec_embed = self.add_weight( shape=(self.config.hidden_size,), initializer=\"uniform\",", "= shape if mask_length < 1: raise ValueError(\"`mask_length` has to", "tf.cast(attention_mask, dtype=attn_weights.dtype) attn_weights = tf.reshape(attn_weights, (bsz, self.num_heads, tgt_len, src_len)) +", "output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, kwargs_call=kwargs, ) hidden_states = self.feature_extractor( tf.cast(inputs[\"input_values\"],", "output_shape): \"\"\" Scatter function as in PyTorch with indices in", "self.add_weight( shape=shape, name=\"gamma\", initializer=self.gamma_initializer, regularizer=self.gamma_regularizer, constraint=self.gamma_constraint, ) else: self.gamma =", "method instead.\", FutureWarning, ) self.freeze_feature_encoder() def freeze_feature_encoder(self): \"\"\" Calling this", "output_attentions=inputs[\"output_attentions\"], output_hidden_states=inputs[\"output_hidden_states\"], return_dict=inputs[\"return_dict\"], training=inputs[\"training\"], ) hidden_states = encoder_outputs[0] if not", "import Any, Dict, Optional, Tuple, Union import numpy as np", "attn_weights, _ = self.attention( hidden_states, attention_mask=attention_mask, training=training ) hidden_states =", "in eager mode, in graph mode the value will always", "attn_residual + hidden_states hidden_states = self.layer_norm(hidden_states) hidden_states = hidden_states +", "(dropout_probability < self.config.layerdrop): # skip the layer continue layer_outputs =", "for kernel_size, stride in zip(self.config.conv_kernel, self.config.conv_stride): input_lengths = _conv_out_length(input_lengths, kernel_size,", "hidden_states = hidden_states + position_embeddings hidden_states = self.dropout(hidden_states, training=training) for", "the tensors will not be guaranteed during the training. Args:", "def call( self, hidden_states: tf.Tensor, attention_mask: Optional[tf.Tensor] = None, output_attentions:", "given mask_time_indices hidden_states = tf.where( tf.cast(mask_time_indices[:, :, tf.newaxis], tf.bool), self.masked_spec_embed[tf.newaxis,", "tf.bool), self.masked_spec_embed[tf.newaxis, tf.newaxis, :], hidden_states, ) # apply SpecAugment along", "= input_shape[self.axis] if dim is None: raise ValueError( \"Axis \"", "bias_initializer=\"zeros\", name=\"intermediate_dense\", ) self.intermediate_act_fn = get_tf_activation(config.hidden_act) self.output_dense = tf.keras.layers.Dense( units=config.hidden_size,", "_compute_mask_indices( (batch_size, sequence_length), mask_prob=self.config.mask_time_prob, mask_length=self.config.mask_time_length, min_masks=2, ) hidden_states = tf.where(", ") attn_output = tf.transpose( tf.reshape(attn_output, (bsz, self.num_heads, tgt_len, self.head_dim)), (0,", "2.7, so we need to account for padding input_shape[-2] +=", "self.num_heads, tgt_len, src_len)}, but is {shape_list(attn_weights)}\", ) if attention_mask is", "name=\"hubert\") @add_start_docstrings_to_model_forward(HUBERT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=TFBaseModelOutput, config_class=_CONFIG_FOR_DOC) def call( self, input_values: tf.Tensor, attention_mask:", "or implied. # See the License for the specific language", "under returned tensors for more detail. This argument can be", "documentation for all matter related to general usage and behavior.", "head_mask (`np.ndarray` or `tf.Tensor` of shape `(num_heads,)` or `(num_layers, num_heads)`,", "# add LayerDrop (see https://arxiv.org/abs/1909.11556 for description) dropout_probability = np.random.uniform(0,", "as inputs: - having all inputs as keyword arguments (like", "= tf.keras.layers.LayerNormalization( epsilon=config.layer_norm_eps, name=\"final_layer_norm\" ) def call( self, hidden_states: tf.Tensor,", "embed_dim self.num_heads = num_heads self.dropout = tf.keras.layers.Dropout(dropout) self.head_dim = embed_dim", ">>> from transformers import Wav2Vec2Processor, TFHubertForCTC >>> from datasets import", "self.groups != 0: raise ValueError( \"Number of groups (\" +", ") attn_weights = tf.reshape(attn_weights, (bsz * self.num_heads, tgt_len, src_len)) attn_probs", "Optional, Tuple, Union import numpy as np import tensorflow as", "the model in training mode (some modules like dropout modules", "computed for the tokens with labels in `[0, ..., config.vocab_size]`", "0): \"\"\" Expands attention_mask from `[bsz, seq_len]` to `[bsz, 1,", "the value in the config will be used instead. output_hidden_states", "this function will disable the gradient computation for the feature", "if \"inputs\" in input_values: warnings.warn( \"The `inputs` argument is deprecated", "& apply SpecAugment along time axis mask_time_indices = _compute_mask_indices( (batch_size,", "-tf.math.log(tf.random.uniform(shape_list(distribution), 0, 1)) _, indices = tf.nn.top_k(distribution + z, num_samples)", "1 if not is_instance_norm: group_shape[self.axis] = input_shape[self.axis] // self.groups group_shape.insert(self.axis,", "attn_weights = tf.reshape(attn_weights, (bsz, self.num_heads, tgt_len, src_len)) + attention_mask attn_weights", "Any, Dict, Optional, Tuple, Union import numpy as np import", "all_self_attentions = all_self_attentions + (layer_outputs[1],) # Add last layer if", "True): return hidden_states if mask_time_indices is not None: # apply", "None, labels: Optional[tf.Tensor] = None, output_hidden_states: Optional[bool] = None, return_dict:", "case of a list of symbolic inputs, each input has", "import Wav2Vec2Processor, TFHubertModel >>> from datasets import load_dataset >>> import", "HubertConfig, *inputs, **kwargs): super().__init__(config, *inputs, **kwargs) self.hubert = TFHubertMainLayer(config, name=\"hubert\")", "v in dict(input_values).items(): if isinstance(v, allowed_types) or v is None:", "-1 else self.axis - 1 group_reduction_axes.pop(axis) mean, variance = tf.nn.moments(reshaped_inputs,", "= None beta = None if self.scale: gamma = tf.reshape(self.gamma,", "TFHubertSamePadLayer(config.num_conv_pos_embeddings) self.activation = get_tf_activation(config.feat_extract_activation) def call(self, hidden_states: tf.Tensor) -> tf.Tensor:", "\"group\": conv_layers = [TFHubertGroupNormConvLayer(config, layer_id=0, name=f\"conv_layers.{0}\")] + [ TFHubertNoLayerNormConvLayer(config, layer_id=i", "input has to be named accordingly to the parameters name,", "dummy_inputs = { \"input_values\": input_values, \"attention_mask\": tf.cast(tf.not_equal(input_values, pad_token), tf.float32), }", "1 if not is_instance_norm: axis = -2 if self.axis ==", "inputs[\"output_attentions\"] else self.config.output_attentions ) inputs[\"return_dict\"] = inputs[\"return_dict\"] if inputs[\"return_dict\"] else", "`np.ndarray` of shape `(batch_size, sequence_length)`, *optional*): Labels for computing the", "method with LayerCall.__call__(args, **kwargs) # So to respect the proper", "self.layer_norm(hidden_states) hidden_states = self.dropout(hidden_states, training=training) for i, layer_module in enumerate(self.layer):", "input_shape[self.axis] shape = (dim,) if self.center: self.beta = self.add_weight( shape=shape,", "```\"\"\" inputs = input_values_processing( func=self.call, config=self.config, input_values=input_values, attention_mask=attention_mask, token_type_ids=token_type_ids, position_ids=position_ids,", ") hidden_states = layer_outputs[0] if output_attentions: all_self_attentions = all_self_attentions +", "self.groups == -1: self.groups = dim def _check_size_of_dimensions(self, input_shape): dim", "name=f\"conv_layers.{0}\")] + [ TFHubertNoLayerNormConvLayer(config, layer_id=i + 1, name=f\"conv_layers.{i+1}\") for i", "name=\"feed_forward\") self.final_layer_norm = tf.keras.layers.LayerNormalization( epsilon=config.layer_norm_eps, name=\"final_layer_norm\" ) def call( self,", "return {**base_config, **config} def compute_output_shape(self, input_shape): return input_shape def _reshape_into_groups(self,", "(attn_weights,) return outputs # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2EncoderLayerStableLayerNorm with Wav2Vec2->Hubert class", "if config.do_stable_layer_norm: self.encoder = TFHubertEncoderStableLayerNorm(config, name=\"encoder\") else: self.encoder = TFHubertEncoder(config,", "= tf.transpose( tf.reshape(attn_output, (bsz, self.num_heads, tgt_len, self.head_dim)), (0, 2, 1,", "matrix. output_attentions (`bool`, *optional*): Whether or not to return the", ") attention_mask = tf.cast(attention_mask, dtype=attn_weights.dtype) attn_weights = tf.reshape(attn_weights, (bsz, self.num_heads,", "conv_layers def call(self, input_values): hidden_states = tf.expand_dims(input_values, -1) for conv_layer", "all Hubert models at https://huggingface.co/models?filter=hubert ] LARGE_NEGATIVE = -1e8 #", "* num_heads) != self.embed_dim: raise ValueError( f\"embed_dim must be divisible", "An abstract class to handle weights initialization and a simple", "# 1D convolutional layer output length formula taken # from", "= tf.keras.layers.LayerNormalization(name=\"layer_norm\", epsilon=config.layer_norm_eps) self.activation = get_tf_activation(config.feat_extract_activation) def call(self, hidden_states: tf.Tensor)", "tf.Tensor: hidden_states = self.conv(hidden_states) hidden_states = self.activation(hidden_states) return hidden_states #", "hidden_states hidden_states = self.layer_norm(hidden_states) hidden_states, attn_weights, _ = self.attention( hidden_states,", "hidden_states hidden_states, attn_weights, _ = self.attention( hidden_states, attention_mask=attention_mask, training=training )", "eager. if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(attn_weights), [bsz * self.num_heads, tgt_len, src_len],", "**kwargs): super().__init__(**kwargs) self.config = config self.pos_conv_embed = TFHubertPositionalConvEmbedding(config, name=\"pos_conv_embed\") self.layer_norm", "= None, training: Optional[bool] = False, ) -> Union[TFCausalLMOutput, Tuple[tf.Tensor]]:", "hidden_states): if self.num_pad_remove > 0: hidden_states = hidden_states[:, : -self.num_pad_remove,", "_check_size_of_dimensions(self, input_shape): dim = input_shape[self.axis] if dim < self.groups: raise", "heads of the self-attention modules. Mask values selected in `[0,", "ModelOutput, add_start_docstrings, add_start_docstrings_to_model_forward, logging, replace_return_docstrings, ) from .configuration_hubert import HubertConfig", "to return the attentions tensors of all attention layers. See", "tf.sequence_mask( output_lengths, maxlen=shape_list(hidden_states)[1], dtype=hidden_states.dtype ) hidden_states = self.feature_projection(hidden_states, training=inputs[\"training\"]) mask_time_indices", "\"\"\" src_len = shape_list(mask)[1] tgt_len = tgt_len if tgt_len is", "output we have to add this exception if \"args\" in", "ds = ds.map(map_to_array) >>> input_values = processor(ds[\"speech\"][0], return_tensors=\"tf\").input_values # Batch", "first element is batch size and 2nd is timesteps attention_mask:", "num_heads) != self.embed_dim: raise ValueError( f\"embed_dim must be divisible by", "self.out_proj(attn_output) attn_weights: tf.Tensor = tf.reshape(attn_weights, (bsz, self.num_heads, tgt_len, src_len)) return", "key_value_states are provided this layer is used as a cross-attention", "attention_mask: optional padding mask of the same size as shape,", "always the first parameter, then `input_values` output[\"input_values\"] = output[\"args\"] del", "- 1 for tokens that are **not masked**, - 0", "(key_states, value_states) proj_shape = (bsz * self.num_heads, -1, self.head_dim) query_states", "if inputs[\"return_dict\"] else self.config.return_dict outputs = self.hubert( input_values=inputs[\"input_values\"], attention_mask=inputs[\"attention_mask\"], token_type_ids=inputs[\"token_type_ids\"],", ") self.output_dropout = tf.keras.layers.Dropout(config.hidden_dropout) def call(self, hidden_states: tf.Tensor, training: bool", "labels=labels).loss ```\"\"\" inputs = input_values_processing( func=self.call, config=self.config, input_values=input_values, attention_mask=attention_mask, token_type_ids=token_type_ids,", "is timesteps attention_mask: optional padding mask of the same size", "+ position_embeddings hidden_states = self.layer_norm(hidden_states) hidden_states = self.dropout(hidden_states, training=training) for", "i.e. `input_values = tf.keras.Input(shape=(128,), dtype='float32', name=\"input_values\")` otherwise the order of", "# Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2GroupNormConvLayer with Wav2Vec2->Hubert class TFHubertGroupNormConvLayer(tf.keras.layers.Layer): def __init__(self,", "type {type(input)} is not allowed only {allowed_types} is accepted for", "# when not being attended to labels_mask = tf.cast(labels >=", "config: HubertConfig, **kwargs): super().__init__(**kwargs) self.attention = TFHubertAttention( embed_dim=config.hidden_size, num_heads=config.num_attention_heads, dropout=config.attention_dropout,", "1]`. [What are position IDs?](../glossary#position-ids) head_mask (`np.ndarray` or `tf.Tensor` of", "the License for the specific language governing permissions and #", "`({0})`, *optional*): Mask to avoid performing attention on padding token", "token_type_ids=inputs[\"token_type_ids\"], position_ids=inputs[\"position_ids\"], head_mask=inputs[\"head_mask\"], inputs_embeds=inputs[\"inputs_embeds\"], output_attentions=inputs[\"output_attentions\"], output_hidden_states=inputs[\"output_hidden_states\"], return_dict=inputs[\"return_dict\"], training=inputs[\"training\"], ) hidden_states", "according to convolution formula output_lengths = self._get_feat_extract_output_lengths(tf.reduce_sum(inputs[\"attention_mask\"], -1)) attention_mask =", "if mask_length < 1: raise ValueError(\"`mask_length` has to be bigger", "`-100` are ignored (masked), the loss is only computed for", "else: # self_attention key_states = self._shape(self.k_proj(hidden_states), -1, bsz) value_states =", "hidden_states * tf.expand_dims(attention_mask, -1) attention_mask = _expand_mask(attention_mask) else: attention_mask =", "self.activation(hidden_states) return hidden_states # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2LayerNormConvLayer with Wav2Vec2->Hubert class", "config=self.config, input_values=input_values, attention_mask=attention_mask, token_type_ids=token_type_ids, position_ids=position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict,", "else None all_self_attentions = () if output_attentions else None if", "__init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs) self.attention = TFHubertAttention( embed_dim=config.hidden_size, num_heads=config.num_attention_heads,", "None, layer_head_mask: Optional[tf.Tensor] = None, training: Optional[bool] = False, )", "Tensors IN THE ORDER given in the docstring: `model([input_values, attention_mask])`", "spec_aug_mask = tf.zeros((batch_size, sequence_length), dtype=tf.int32) # uniform distribution to sample", "is None: output[k] = v else: raise ValueError(f\"Data of type", "head on top.\", HUBERT_START_DOCSTRING, ) class TFHubertModel(TFHubertPreTrainedModel): def __init__(self, config:", "each example must have the shape `({0})`): Indices of input", "the HuggingFace Inc. team. All rights reserved. # # Licensed", "None if self.scale: gamma = tf.reshape(self.gamma, broadcast_shape) if self.center: beta", "running model. **kwargs: The inputs of the model. Returns: Two", "# distributed under the License is distributed on an \"AS", "src_len)) + attention_mask attn_weights = tf.reshape(attn_weights, (bsz * self.num_heads, tgt_len,", "in the config will be used instead. output_hidden_states (`bool`, *optional*):", "bool = True, **kwargs, ): super().__init__(**kwargs) self.embed_dim = embed_dim self.num_heads", "speech ... return batch >>> ds = load_dataset(\"hf-internal-testing/librispeech_asr_dummy\", \"clean\", split=\"validation\")", "having all inputs as a list, tuple or dict in", "self.filter_axis = 2 self.initialized = False self.kernel_norm_axes = tf.constant([0, 1])", "attention_mask: Optional[tf.Tensor] = None, token_type_ids: Optional[tf.Tensor] = None, position_ids: Optional[tf.Tensor]", "self.config.vocab_size: raise ValueError(f\"Label values must be <= vocab_size: {self.config.vocab_size}\") attention_mask", "other modes than eager. if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(attention_mask), [bsz, 1,", ") input_lengths = self.hubert._get_feat_extract_output_lengths(tf.reduce_sum(attention_mask, axis=-1)) # assuming that padded tokens", "all matter related to general usage and behavior. <Tip> TF", "in a future version, use `input_values` instead.\", FutureWarning, ) output[\"input_values\"]", "pair indices return tf.scatter_nd(pair_indices, tf.reshape(values, [-1]), output_shape) # Copied from", "into associated vectors than the model's internal embedding lookup matrix.", "be updated during training. \"\"\" warnings.warn( \"The method `freeze_feature_extractor` is", "models), or - having all inputs as a list, tuple", "past_key_value[1] elif is_cross_attention: # cross_attentions key_states = self._shape(self.k_proj(key_value_states), -1, bsz)", "elif k not in parameter_names and \"args\" not in parameter_names:", "encoder so that its parameters will not be updated during", "value in the config will be used instead. output_hidden_states (`bool`,", "name=\"feature_projection\") if config.do_stable_layer_norm: self.encoder = TFHubertEncoderStableLayerNorm(config, name=\"encoder\") else: self.encoder =", "= self.layer_norm(hidden_states) hidden_states, attn_weights, _ = self.attention( hidden_states, attention_mask=attention_mask, training=training", "Further calls to uni-directional self-attention # can concat previous decoder", "groups (\" + str(self.groups) + \") cannot be \" \"more", "self.attention = TFHubertAttention( embed_dim=config.hidden_size, num_heads=config.num_attention_heads, dropout=config.attention_dropout, is_decoder=False, name=\"attention\", ) self.dropout", "head is **not masked**, - 0 indicates the head is", "self.add_weight(name=\"bias\", shape=(self.filters,), initializer=\"zeros\", trainable=True) def call(self, inputs): if not self.initialized:", "is deprecated and will be removed in Transformers v5.\" \"Please", "return_tensors=\"tf\").input_values >>> loss = model(input_values, labels=labels).loss ```\"\"\" inputs = input_values_processing(", "self.head_dim = embed_dim // num_heads if (self.head_dim * num_heads) !=", "to general usage and behavior. <Tip> TF 2.0 models accepts", "[`TFPreTrainedModel`]. Check the superclass documentation for the generic methods the", "but is {shape_list(layer_head_mask)}\", ) attn_weights = tf.reshape(layer_head_mask, (1, -1, 1,", "= None def _add_beta_weight(self, input_shape): dim = input_shape[self.axis] shape =", "Optional[bool] = False, return_dict: Optional[bool] = True, training: Optional[bool] =", "name=f\"layers.{i}\") for i in range(config.num_hidden_layers)] def call( self, hidden_states: tf.Tensor,", "names have always the pattern `name:id` then we check only", "attention_mask = None position_embeddings = self.pos_conv_embed(hidden_states) hidden_states = hidden_states +", "= r\"\"\" Args: input_values (`np.ndarray`, `tf.Tensor`, `List[tf.Tensor]` ``Dict[str, tf.Tensor]` or", ") hidden_states = self.dropout(hidden_states, training=training) hidden_states = attn_residual + hidden_states", "that are NOT supported on CPU. If you wish \"", "as sf >>> processor = Wav2Vec2Processor.from_pretrained(\"facebook/hubert-base-960h\") >>> model = TFHubertModel.from_pretrained(\"facebook/hubert-base-960h\")", ") class TFHubertPreTrainedModel(TFPreTrainedModel): \"\"\" An abstract class to handle weights", "output[name] = kwargs.pop(name, signature[name].default) # When creating a SavedModel TF", "has to be named accordingly to the parameters name, i.e.", "output_attentions: all_self_attentions = all_self_attentions + (layer_outputs[1],) hidden_states = self.layer_norm(hidden_states) if", "under the License is distributed on an \"AS IS\" BASIS,", "Wav2Vec2Processor, TFHubertForCTC >>> from datasets import load_dataset >>> import soundfile", "be \" \"more than the number of channels (\" +", "initializer=\"ones\", dtype=self.weight_v.dtype, trainable=True, ) self.bias = self.add_weight(name=\"bias\", shape=(self.filters,), initializer=\"zeros\", trainable=True)", "= \"HubertConfig\" TF_HUBERT_PRETRAINED_MODEL_ARCHIVE_LIST = [ \"facebook/hubert-base-ls960\", # See all Hubert", "TFHubertEncoderLayerStableLayerNorm(config, name=f\"layers.{i}\") for i in range(config.num_hidden_layers) ] def call( self,", "inputs[\"training\"]: hidden_states = self._mask_hidden_states(hidden_states, mask_time_indices=mask_time_indices) encoder_outputs = self.encoder( hidden_states, attention_mask=attention_mask,", "sequence_length), dtype=tf.int32) # uniform distribution to sample from, make sure", "gamma = None beta = None if self.scale: gamma =", "\"output_hidden_states\", \"use_cache\"] } output.update(booleans_processing(config=config, **boolean_dict)) return output # Copied from", "layer can then reuse all cross-attention # key/value_states (first \"if\"", "layer_id=i, name=f\"conv_layers.{i}\") for i in range(config.num_feat_extract_layers) ] else: raise ValueError(", "the running model. **kwargs: The inputs of the model. Returns:", "is {shape_list(attn_weights)}\", ) if attention_mask is not None: # The", "token. [What are token type IDs?](../glossary#token-type-ids) position_ids (`np.ndarray` or `tf.Tensor`", "_init_norm(self): \"\"\"Set the norm of the weight vector.\"\"\" kernel_norm =", ">>> loss = model(input_values, labels=labels).loss ```\"\"\" inputs = input_values_processing( func=self.call,", "class TFHubertModel(TFHubertPreTrainedModel): def __init__(self, config: HubertConfig, *inputs, **kwargs): super().__init__(config, *inputs,", "seq_len: int, bsz: int): return tf.transpose(tf.reshape(tensor, (bsz, seq_len, self.num_heads, self.head_dim)),", "name=\"lm_head\") def freeze_feature_extractor(self): \"\"\" Calling this function will disable the", "False) -> tf.Tensor: hidden_states = self.intermediate_dense(hidden_states) hidden_states = self.intermediate_act_fn(hidden_states) hidden_states", "import Wav2Vec2Processor, TFHubertForCTC >>> from datasets import load_dataset >>> import", "smaller (unless no_overlap is True) mask_length: size of the mask", "SpecAugment mask to fill spec_aug_mask = tf.zeros((batch_size, sequence_length), dtype=tf.int32) #", "list(range(1, len(group_shape))) is_instance_norm = (input_shape[self.axis] // self.groups) == 1 if", "import HubertConfig logger = logging.get_logger(__name__) _CONFIG_FOR_DOC = \"HubertConfig\" TF_HUBERT_PRETRAINED_MODEL_ARCHIVE_LIST =", "inputs[\"return_dict\"] = inputs[\"return_dict\"] if inputs[\"return_dict\"] else self.config.return_dict outputs = self.hubert(", "layers. See `hidden_states` under returned tensors for more detail. This", "warnings.warn( \"The `inputs` argument is deprecated and will be removed", "* len(input_shape) is_instance_norm = (input_shape[self.axis] // self.groups) == 1 if", "-1, bsz) elif past_key_value is not None: # reuse k,", "if output_attentions: outputs += (attn_weights,) return outputs # Copied from", "self.config = config self.hubert = TFHubertMainLayer(config, name=\"hubert\") @add_start_docstrings_to_model_forward(HUBERT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=TFBaseModelOutput, config_class=_CONFIG_FOR_DOC)", "the License. # You may obtain a copy of the", "can set masking to False if not getattr(self.config, \"apply_spec_augment\", True):", "return tf.transpose(tf.reshape(tensor, (bsz, seq_len, self.num_heads, self.head_dim)), (0, 2, 1, 3))", "# if cross_attention save Tuple(tf.Tensor, tf.Tensor) of all cross attention", "future version, use `past_key_values` instead.\", FutureWarning, ) output[\"past_key_values\"] = input_values.pop(\"decoder_cached_states\")", "tf.Tensor, tgt_len: Optional[int] = None, past_key_values_length: int = 0): \"\"\"", "and a simple interface for downloading and loading pretrained models.", "not None and type(output[\"args\"]) == tf.Tensor: tensor_name = output[\"args\"].name.split(\":\")[0] output[tensor_name]", "of the tensors will not be guaranteed during the training.", "tgt_seq_len, src_seq_len]`. \"\"\" src_len = shape_list(mask)[1] tgt_len = tgt_len if", "not None else src_len one_cst = tf.constant(1.0) mask = tf.cast(mask,", "TFHubertForCTC >>> from datasets import load_dataset >>> import soundfile as", ">>> from transformers import Wav2Vec2Processor, TFHubertModel >>> from datasets import", ") def _set_number_of_groups_for_instance_norm(self, input_shape): dim = input_shape[self.axis] if self.groups ==", "axis mask_time_indices = _compute_mask_indices( (batch_size, sequence_length), mask_prob=self.config.mask_time_prob, mask_length=self.config.mask_time_length, min_masks=2, )", "(got `embed_dim`: {self.embed_dim}\" f\" and `num_heads`: {num_heads}).\" ) self.scaling =", "None: # compute real output lengths according to convolution formula", "# key/value_states (first \"if\" case) # if uni-directional self-attention (decoder)", "self.projection = tf.keras.layers.Dense( units=config.hidden_size, kernel_initializer=get_initializer(config.initializer_range), bias_initializer=\"zeros\", name=\"projection\", ) self.dropout =", "self.groups: raise ValueError( \"Number of groups (\" + str(self.groups) +", "units=config.hidden_size, kernel_initializer=get_initializer(config.initializer_range), bias_initializer=\"zeros\", name=\"projection\", ) self.dropout = tf.keras.layers.Dropout(rate=config.feat_proj_dropout) def call(self,", "Wav2Vec2->Hubert class TFHubertLayerNormConvLayer(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, layer_id: int =", "TFBaseModelOutput( last_hidden_state=hidden_states, hidden_states=all_hidden_states, attentions=all_self_attentions, ) # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2EncoderStableLayerNorm with", "self.axis - 1 group_reduction_axes.pop(axis) mean, variance = tf.nn.moments(reshaped_inputs, group_reduction_axes, keepdims=True)", "the input Tensors in the first positional argument : -", "return input_lengths def _mask_hidden_states(self, hidden_states: tf.Tensor, mask_time_indices: Optional[tf.Tensor] = None):", "updated during training. \"\"\" self.hubert.feature_extractor.trainable = False @add_start_docstrings_to_model_forward(HUBERT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=TFCausalLMOutput, config_class=_CONFIG_FOR_DOC)", "if name not in list(output.keys()) and name != \"args\": output[name]", "scatter indices to mask spec_aug_mask = _scatter_values_on_batch_indices( tf.ones_like(spec_aug_mask_idxs), spec_aug_mask_idxs, spec_aug_mask.shape", "are input IDs?](../glossary#input-ids) attention_mask (`np.ndarray` or `tf.Tensor` of shape `({0})`,", "0.0 input_values = tf.convert_to_tensor(np.random.rand(1, 16000), tf.float32) dummy_inputs = { \"input_values\":", "cross attention key/value_states. # Further calls to cross_attention layer can", "else: self.gamma = None def _add_beta_weight(self, input_shape): dim = input_shape[self.axis]", "embed_dim // num_heads if (self.head_dim * num_heads) != self.embed_dim: raise", "all attention layers. See `attentions` under returned tensors for more", "group_shape.insert(self.axis, self.groups) group_shape = tf.stack(group_shape) reshaped_inputs = tf.reshape(inputs, group_shape) return", "tf.transpose(kernel) def build(self, input_shape): if not self.built: input_shape = input_shape.as_list()", "token_type_ids (`np.ndarray` or `tf.Tensor` of shape `({0})`, *optional*): Segment token", "attention_mask = tf.cast(attention_mask, dtype=attn_weights.dtype) attn_weights = tf.reshape(attn_weights, (bsz, self.num_heads, tgt_len,", "hidden_states = model(input_values).last_hidden_state ```\"\"\" inputs = input_values_processing( func=self.call, config=self.config, input_values=input_values,", "hidden_states: tf.Tensor, training: bool = False) -> tf.Tensor: hidden_states =", "of timesteps divided by length of mask span to mask", "else: loss = None if not inputs[\"return_dict\"]: output = (logits,)", "(batch_size, num_masked_spans, 1)) offsets = tf.reshape(offsets, (batch_size, num_masked_spans * mask_length))", "# Copied from transformers.models.bart.modeling_tf_bart._expand_mask def _expand_mask(mask: tf.Tensor, tgt_len: Optional[int] =", "tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(attn_output), [bsz * self.num_heads, tgt_len, self.head_dim], message=f\"`attn_output` should", "tf.keras.layers.Dropout(config.hidden_dropout) def call(self, hidden_states: tf.Tensor, training: bool = False) ->", "eager mode, in graph mode the value will always be", "// num_heads if (self.head_dim * num_heads) != self.embed_dim: raise ValueError(", "Optional[tf.Tensor] = None, output_attentions: Optional[bool] = False, training: bool =", "1))) # get random indices to mask spec_aug_mask_idxs = _sample_without_replacement(uniform_dist,", "of the inputs. Indices are selected in `[0, 1]`: -", "A* token, - 1 corresponds to a *sentence B* token.", "**kwargs): super().__init__(**kwargs) self.layer_norm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name=\"layer_norm\") self.projection = tf.keras.layers.Dense( units=config.hidden_size,", ":] return hidden_states class TFHubertFeatureEncoder(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs:", "for name in parameter_names: if name not in list(output.keys()) and", "else self.config.output_attentions ) inputs[\"return_dict\"] = inputs[\"return_dict\"] if inputs[\"return_dict\"] else self.config.return_dict", "Optional[Tuple[Tuple[tf.Tensor]]] = None, attention_mask: Optional[tf.Tensor] = None, layer_head_mask: Optional[tf.Tensor] =", "language governing permissions and # limitations under the License. \"\"\"", "shape: the the shape for which to compute masks. should", "Transformers v5. \" f\"Use `{self.__class__.__bases__[0].__name__}` instead.\", FutureWarning, ) class TFHubertFeatureProjection(tf.keras.layers.Layer):", "all the input Tensors in the first positional argument :", "self.feature_projection(hidden_states, training=inputs[\"training\"]) mask_time_indices = kwargs.get(\"mask_time_indices\", None) if inputs[\"training\"]: hidden_states =", "[`~file_utils.ModelOutput`] instead of a plain tuple. This argument can be", "and will be removed in a future version, use `past_key_values`", "modeling loss. Indices should be in `[-100, 0, ..., config.vocab_size]`", "Tensors associated to the input names given in the docstring:", "`name:id` then we check only the # `name` part tensor_name", "uni-directional self-attention # can concat previous decoder key/value_states to current", "when using [`tf.keras.Model.fit`] method which currently requires having all the", "v is None: output[k] = v elif k not in", "tensor_name = output[\"args\"].name.split(\":\")[0] output[tensor_name] = output[\"args\"] else: # `args` in", "raise ValueError(\"`mask_length` has to be bigger than 0.\") if mask_length", "kernel_size) // stride + 1 for kernel_size, stride in zip(self.config.conv_kernel,", "class TFHubertAttention(tf.keras.layers.Layer): \"\"\"Multi-headed attention from \"Attention Is All You Need\"\"\"", "= tf.tile(mask[:, None, None, :], (1, 1, tgt_len, 1)) return", "Adapted from [fairseq's data_utils.py](https://github.com/pytorch/fairseq/blob/e0788f7007a8473a76db573985031f3c94201e79/fairseq/data/data_utils.py#L376). \"\"\" batch_size, sequence_length = shape if", "return_dict=return_dict, training=training, ) outputs = self.hubert( input_values=inputs[\"input_values\"], attention_mask=inputs[\"attention_mask\"], token_type_ids=inputs[\"token_type_ids\"], position_ids=inputs[\"position_ids\"],", "blank_index=self.config.pad_token_id, logits_time_major=False, ) if self.config.ctc_loss_reduction == \"sum\": loss = tf.reduce_sum(loss)", "key/value_states. Further calls to uni-directional self-attention # can concat previous", "output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, kwargs_call=kwargs, ) hidden_states = self.feature_extractor( tf.cast(inputs[\"input_values\"], tf.float32),", "reuse k, v, self_attention key_states = self._shape(self.k_proj(hidden_states), -1, bsz) value_states", "+ (layer_outputs[1],) hidden_states = self.layer_norm(hidden_states) if output_hidden_states: all_hidden_states = all_hidden_states", "inputs[\"output_hidden_states\"] if inputs[\"output_hidden_states\"] else self.config.output_hidden_states ) inputs[\"output_attentions\"] = ( inputs[\"output_attentions\"]", "mask of the same size as shape, which will prevent", "if self.config.mask_feature_prob > 0: mask_feature_indices = _compute_mask_indices( (batch_size, hidden_size), mask_prob=self.config.mask_feature_prob,", "= TFHubertPositionalConvEmbedding(config, name=\"pos_conv_embed\") self.layer_norm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name=\"layer_norm\") self.dropout = tf.keras.layers.Dropout(config.hidden_dropout)", "del output[\"kwargs\"] boolean_dict = { k: v for k, v", "transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2EncoderLayer with Wav2Vec2->Hubert class TFHubertEncoderLayer(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs):", "TFHubertEncoderStableLayerNorm(config, name=\"encoder\") else: self.encoder = TFHubertEncoder(config, name=\"encoder\") def build(self, input_shape:", "-1) attention_mask = _expand_mask(attention_mask) else: attention_mask = None position_embeddings =", ") output[\"input_values\"] = input_values.pop(\"inputs\") if \"decoder_cached_states\" in input_values: warnings.warn( \"The", "(1, 1, tgt_len, 1)) return (one_cst - expanded_mask) * LARGE_NEGATIVE", "will be removed in Transformers v5. \" f\"Use `{self.__class__.__bases__[0].__name__}` instead.\",", "= False) -> tf.Tensor: hidden_states = self.intermediate_dense(hidden_states) hidden_states = self.intermediate_act_fn(hidden_states)", "call(self, input_values): hidden_states = tf.expand_dims(input_values, -1) for conv_layer in self.conv_layers:", "self.config.output_attentions else None return TFBaseModelOutput(last_hidden_state=output.last_hidden_state, hidden_states=hs, attentions=attns) @add_start_docstrings( \"\"\"TFHubert Model", "tf.reshape( attn_weights, (bsz, self.num_heads, tgt_len, src_len) ) attn_weights = tf.reshape(attn_weights,", "attention_mask (`np.ndarray` or `tf.Tensor` of shape `({0})`, *optional*): Mask to", "i, layer_module in enumerate(self.layer): if output_hidden_states: all_hidden_states = all_hidden_states +", "obtained using [`BertTokenizer`]. See [`PreTrainedTokenizer.__call__`] and [`PreTrainedTokenizer.encode`] for details. [What", "filters=self.out_conv_dim, kernel_size=config.conv_kernel[layer_id], strides=config.conv_stride[layer_id], use_bias=config.conv_bias, name=\"conv\", ) self.activation = get_tf_activation(config.feat_extract_activation) def", "{sequence_length}`\" ) # compute number of masked spans in batch", "the same size as shape, which will prevent masking padded", "None: # apply SpecAugment along time axis with given mask_time_indices", "prevent masking padded elements mask_prob: probability for each token to", "axis. Do you want to \" \"use tf.layer.batch_normalization instead\" )", "hidden_states = self.activation(hidden_states) return hidden_states # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2SamePadLayer with", "] elif config.feat_extract_norm == \"layer\": conv_layers = [ TFHubertLayerNormConvLayer(config, layer_id=i,", "*inputs, **kwargs): super().__init__(config, *inputs, **kwargs) self.hubert = TFHubertMainLayer(config, name=\"hubert\") self.dropout", "input_shape[self.axis] // self.groups group_shape.insert(self.axis, self.groups) group_shape = tf.stack(group_shape) reshaped_inputs =", "self.pos_conv_embed = TFHubertPositionalConvEmbedding(config, name=\"pos_conv_embed\") self.layer_norm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name=\"layer_norm\") self.dropout =", "tf.reshape(key_states, proj_shape) value_states = tf.reshape(value_states, proj_shape) src_len = shape_list(key_states)[1] attn_weights", "serving(self, inputs): output = self.call(input_values=inputs, training=False) return self.serving_output(output) HUBERT_START_DOCSTRING =", "real output lengths according to convolution formula output_lengths = self._get_feat_extract_output_lengths(tf.reduce_sum(inputs[\"attention_mask\"],", "`model([input_values, attention_mask])` or `model([input_values, attention_mask, token_type_ids])` - a dictionary with", "should be in `[-100, 0, ..., config.vocab_size]` (see `input_values` docstring)", "position_ids: Optional[tf.Tensor] = None, head_mask: Optional[tf.Tensor] = None, inputs_embeds: Optional[tf.Tensor]", "input_values): hidden_states = tf.expand_dims(input_values, -1) for conv_layer in self.conv_layers: hidden_states", "initializer=\"uniform\", trainable=True, name=\"masked_spec_embed\" ) super().build(input_shape) def _get_feat_extract_output_lengths(self, input_lengths: tf.Tensor): \"\"\"", "output = self.call(input_values=inputs, training=False) return self.serving_output(output) HUBERT_START_DOCSTRING = r\"\"\" This", "specific language governing permissions and # limitations under the License.", "= None def _create_broadcast_shape(self, input_shape): broadcast_shape = [1] * len(input_shape)", "TFHubertSamePadLayer(tf.keras.layers.Layer): def __init__(self, num_conv_pos_embeddings, **kwargs): super().__init__(**kwargs) self.num_pad_remove = 1 if", "with the model, only the configuration. Check out the [`~PreTrainedModel.from_pretrained`]", "(batch_dim, indixes) \"\"\" indices_shape = shape_list(batch_indices) # broadcast batch dim", "during training. \"\"\" warnings.warn( \"The method `freeze_feature_extractor` is deprecated and", "= None, training: bool = False, **kwargs: Any, ): inputs", "training=training) hidden_states = attn_residual + hidden_states hidden_states = self.layer_norm(hidden_states) hidden_states", "model including the booleans. In case of a list of", "position_ids=inputs[\"position_ids\"], head_mask=inputs[\"head_mask\"], inputs_embeds=inputs[\"inputs_embeds\"], output_attentions=inputs[\"output_attentions\"], output_hidden_states=inputs[\"output_hidden_states\"], return_dict=inputs[\"return_dict\"], training=inputs[\"training\"], ) return outputs", "\" \"more than the number of channels (\" + str(dim)", "value_states = self._shape(self.v_proj(hidden_states), -1, bsz) if self.is_decoder: # if cross_attention", "hidden_states = tf.where(mask_feature_indices[:, tf.newaxis, :], hidden_states, 0) return hidden_states def", "states of all layers. See `hidden_states` under returned tensors for", "indices to mask spec_aug_mask_idxs = _sample_without_replacement(uniform_dist, num_masked_spans) # expand masked", "self.num_heads, tgt_len, self.head_dim)), (0, 2, 1, 3) ) attn_output =", "and will be removed in a future version, use `input_values`", "the input embeddings, pruning heads etc.) This model is also", "tf.convert_to_tensor(output.attentions) if self.config.output_attentions else None return TFBaseModelOutput(last_hidden_state=output.last_hidden_state, hidden_states=hs, attentions=attns) @add_start_docstrings(", "if attention_mask is not None: # The tf.debugging asserts are", "documentation for the generic methods the library implements for all", "attentions=outputs.attentions, ) def serving_output(self, output: TFCausalLMOutput) -> TFCausalLMOutput: hs =", "- a single Tensor with `input_values` only and nothing else:", "not to return a [`~file_utils.ModelOutput`] instead of a plain tuple.", "# wrap processor as target processor to encode labels >>>", "check only the # `name` part tensor_name = input.name.split(\":\")[0] if", "inputs_embeds: Optional[tf.Tensor] = None, output_attentions: Optional[bool] = None, labels: Optional[tf.Tensor]", "(CTC).\"\"\", HUBERT_START_DOCSTRING, ) class TFHubertForCTC(TFHubertPreTrainedModel): def __init__(self, config: HubertConfig, *inputs,", "to be masked. this will be multiplied by number of", "epsilon=config.layer_norm_eps) self.activation = get_tf_activation(config.feat_extract_activation) def call(self, hidden_states: tf.Tensor) -> tf.Tensor:", "last layer if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) if", "= hidden_states hidden_states, attn_weights, _ = self.attention( hidden_states, attention_mask=attention_mask, training=training", "shape = (dim,) if self.scale: self.gamma = self.add_weight( shape=shape, name=\"gamma\",", "offsets = tf.reshape(offsets, (batch_size, num_masked_spans * mask_length)) spec_aug_mask_idxs = spec_aug_mask_idxs", "self.num_heads, tgt_len, src_len)) attn_weights = tf.nn.softmax(attn_weights, axis=-1) if layer_head_mask is", "# reuse k,v, cross_attentions key_states = past_key_value[0] value_states = past_key_value[1]", "= None, head_mask: Optional[tf.Tensor] = None, inputs_embeds: Optional[tf.Tensor] = None,", "() if output_hidden_states else None all_self_attentions = () if output_attentions", "else self.axis - 1 else: axis = -1 if self.axis", "int = 0, ) -> tf.Tensor: \"\"\" Computes random mask", "\"more than the number of channels (\" + str(dim) +", "maxlen=shape_list(hidden_states)[1], dtype=hidden_states.dtype ) hidden_states = self.feature_projection(hidden_states, training=inputs[\"training\"]) mask_time_indices = kwargs.get(\"mask_time_indices\",", "inputs as keyword arguments (like PyTorch models), or - having", "axis=-1) >>> transcription = processor.decode(predicted_ids[0]) >>> # compute loss >>>", "return (input_length - kernel_size) // stride + 1 for kernel_size,", "is not None: # apply SpecAugment along time axis with", "= processor.decode(predicted_ids[0]) >>> # compute loss >>> target_transcription = \"A", "_, indices = tf.nn.top_k(distribution + z, num_samples) return indices #", "account for padding input_shape[-2] += self.explicit_padding * 2 super().build(input_shape) self.kernel", "for the generic methods the library implements for all its", "+ \" of \" \"input tensor should have a defined", "self.center = center self.scale = scale self.beta_initializer = tf.keras.initializers.get(beta_initializer) self.gamma_initializer", "src_len)) attn_weights = tf.nn.softmax(attn_weights, axis=-1) if layer_head_mask is not None:", "= tf.reshape(key_states, proj_shape) value_states = tf.reshape(value_states, proj_shape) src_len = shape_list(key_states)[1]", "// stride + 1 for kernel_size, stride in zip(self.config.conv_kernel, self.config.conv_stride):", "for more info \"\"\" z = -tf.math.log(tf.random.uniform(shape_list(distribution), 0, 1)) _,", "z = -tf.math.log(tf.random.uniform(shape_list(distribution), 0, 1)) _, indices = tf.nn.top_k(distribution +", "\"zeros\", gamma_initializer: tf.keras.initializers.Initializer = \"ones\", beta_regularizer: tf.keras.regularizers.Regularizer = None, gamma_regularizer:", "if k in [\"return_dict\", \"output_attentions\", \"output_hidden_states\", \"use_cache\"] } output.update(booleans_processing(config=config, **boolean_dict))", "(`bool`, *optional*): Whether or not to return the attentions tensors", "config: HubertConfig, **kwargs): super().__init__(**kwargs) self.intermediate_dropout = tf.keras.layers.Dropout(config.activation_dropout) self.intermediate_dense = tf.keras.layers.Dense(", "the first parameter, then `input_values` output[\"input_values\"] = output[\"args\"] del output[\"args\"]", "`sequence_length`: {sequence_length}`\" ) # compute number of masked spans in", "* self.num_heads, tgt_len, src_len)) attn_probs = self.dropout(attn_weights, training=training) attn_output =", "self.config = config self.pos_conv_embed = TFHubertPositionalConvEmbedding(config, name=\"pos_conv_embed\") self.layer_norm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps,", "name=\"masked_spec_embed\" ) super().build(input_shape) def _get_feat_extract_output_lengths(self, input_lengths: tf.Tensor): \"\"\" Computes the", "sequence_length / mask_length + tf.random.uniform((1,))) num_masked_spans = max(num_masked_spans, min_masks) #", "if \"kwargs\" in output: del output[\"kwargs\"] boolean_dict = { k:", "1 if not is_instance_norm: outputs = tf.reshape(normalized_inputs, tensor_input_shape) else: outputs", "If you choose this second option, there are three possibilities", "for i in range(config.num_hidden_layers) ] def call( self, hidden_states: tf.Tensor,", "% 2 == 0 else 0 def call(self, hidden_states): if", "loss >>> target_transcription = \"A MAN SAID TO THE UNIVERSE", "return outputs def serving_output(self, output): hs = tf.convert_to_tensor(output.hidden_states) if self.config.output_hidden_states", "an embedded representation. This is useful if you want more", "range(config.num_feat_extract_layers) ] else: raise ValueError( f\"`config.feat_extract_norm` is {config.feat_extract_norm}, but has", "the docstring: `model([input_values, attention_mask])` or `model([input_values, attention_mask, token_type_ids])` - a", "compute number of masked spans in batch num_masked_spans = int(mask_prob", "self.conv(hidden_states) hidden_states = self.layer_norm(hidden_states) hidden_states = self.activation(hidden_states) return hidden_states #", "..., config.vocab_size]` Returns: Example: ```python >>> import tensorflow as tf", "== 0 else 0 def call(self, hidden_states): if self.num_pad_remove >", "`freeze_feature_encoder` method instead.\", FutureWarning, ) self.freeze_feature_encoder() def freeze_feature_encoder(self): \"\"\" Calling", "to the input names given in the docstring: `model({\"input_values\": input_values,", "check for a real Tensor if type(input) == tf.Tensor: #", "tensorflow-addons https://www.tensorflow.org/addons/api_docs/python/tfa/layers/GroupNormalization \"\"\" def __init__( self, groups: int = 32,", "hidden_states: tf.Tensor, attention_mask: Optional[tf.Tensor] = None, output_attentions: Optional[bool] = False,", "from ...activations_tf import get_tf_activation from ...modeling_tf_outputs import TFBaseModelOutput, TFCausalLMOutput from", ") if dim % self.groups != 0: raise ValueError( \"Number", "size 1 >>> hidden_states = model(input_values).last_hidden_state ```\"\"\" inputs = input_values_processing(", "0) return hidden_states def call( self, input_values: tf.Tensor, attention_mask: Optional[tf.Tensor]", "with Wav2Vec2->Hubert class TFHubertLayerNormConvLayer(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, layer_id: int", "padding token indices. Mask values selected in `[0, 1]`: -", "scale=gamma, offset=beta, variance_epsilon=self.epsilon, ) return normalized_inputs def _get_reshaped_weights(self, input_shape): broadcast_shape", "= tf.keras.backend.int_shape(inputs) tensor_input_shape = tf.shape(inputs) reshaped_inputs, group_shape = self._reshape_into_groups(inputs, input_shape,", "or - having all inputs as a list, tuple or", "self.head_dim)), (0, 2, 1, 3) ) attn_output = tf.reshape(attn_output, (bsz,", "docstring: `model([input_values, attention_mask])` or `model([input_values, attention_mask, token_type_ids])` - a dictionary", "def _add_beta_weight(self, input_shape): dim = input_shape[self.axis] shape = (dim,) if", "attention_mask = _expand_mask(attention_mask) else: attention_mask = None position_embeddings = self.pos_conv_embed(hidden_states)", "nothing else: `model(inputs_ids)` - a list of varying length with", "src_len)) return attn_output, attn_weights, past_key_value # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2FeedForward with", "See `hidden_states` under returned tensors for more detail. This argument", "of the span to be masked. this will be multiplied", "output_attentions: Optional[bool] = None, labels: Optional[tf.Tensor] = None, output_hidden_states: Optional[bool]", "if key_value_states are provided this layer is used as a", "+ 1, name=f\"conv_layers.{i+1}\") for i in range(config.num_feat_extract_layers - 1) ]", "Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2PositionalConvEmbedding with Wav2Vec2->Hubert class TFHubertPositionalConvEmbedding(tf.keras.layers.Layer): def __init__(self, config:", "method which currently requires having all the tensors in the", "\"use tf.layer.batch_normalization instead\" ) def _create_input_spec(self, input_shape): dim = input_shape[self.axis]", "config ([`HubertConfig`]): Model configuration class with all the parameters of", "return attn_output, attn_weights, past_key_value # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2FeedForward with Wav2Vec2->Hubert", "all_self_attentions = all_self_attentions + (layer_outputs[1],) hidden_states = self.layer_norm(hidden_states) if output_hidden_states:", "lookup matrix. output_attentions (`bool`, *optional*): Whether or not to return", "accepts two formats as inputs: - having all inputs as", "instead.\", FutureWarning, ) output[\"past_key_values\"] = input_values.pop(\"decoder_cached_states\") for k, v in", "for {parameter_names[i]}.\" ) elif isinstance(input_values, (dict, BatchEncoding)): if \"inputs\" in", "* self.scaling # get key, value proj if is_cross_attention and", "masked. this will be multiplied by number of timesteps divided", "``Dict[str, tf.Tensor]` or `Dict[str, np.ndarray]` and each example must have", "second option is useful when using [`tf.keras.Model.fit`] method which currently", "call(self, inputs): input_shape = tf.keras.backend.int_shape(inputs) tensor_input_shape = tf.shape(inputs) reshaped_inputs, group_shape", "= { k: v for k, v in output.items() if", "= self.activation(hidden_states) return hidden_states # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2SamePadLayer with Wav2Vec2->Hubert", "None: # reuse k, v, self_attention key_states = self._shape(self.k_proj(hidden_states), -1,", "so that its parameter will not be updated during training.", "group_shape def _apply_normalization(self, reshaped_inputs, input_shape): group_shape = tf.keras.backend.int_shape(reshaped_inputs) group_reduction_axes =", "else: broadcast_shape[self.axis] = self.groups return broadcast_shape # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2WeightNormConv1D", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "the library implements for all its model (such as downloading", "[fairseq's data_utils.py](https://github.com/pytorch/fairseq/blob/e0788f7007a8473a76db573985031f3c94201e79/fairseq/data/data_utils.py#L376). \"\"\" batch_size, sequence_length = shape if mask_length <", "tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(attn_weights), [bsz * self.num_heads, tgt_len, src_len], message=f\"Attention weights", "tf.keras.constraints.get(beta_constraint) self.gamma_constraint = tf.keras.constraints.get(gamma_constraint) self._check_axis() def build(self, input_shape): self._check_if_input_shape_is_none(input_shape) self._set_number_of_groups_for_instance_norm(input_shape)", "to `[bsz, 1, tgt_seq_len, src_seq_len]`. \"\"\" src_len = shape_list(mask)[1] tgt_len", "input.name.split(\":\")[0] if tensor_name in parameter_names: output[tensor_name] = input else: output[parameter_names[i]]", "inspect import warnings from typing import Any, Dict, Optional, Tuple,", "the head is **masked**. inputs_embeds (`np.ndarray` or `tf.Tensor` of shape", "-> Union[TFCausalLMOutput, Tuple[tf.Tensor]]: r\"\"\" labels (`tf.Tensor` or `np.ndarray` of shape", "self.hubert._get_feat_extract_output_lengths(tf.reduce_sum(attention_mask, axis=-1)) # assuming that padded tokens are filled with", "Tuple(tf.Tensor, tf.Tensor) of all cross attention key/value_states. # Further calls", "sequence_length: raise ValueError( f\"`mask_length` has to be smaller than `sequence_length`,", "indices return tf.scatter_nd(pair_indices, tf.reshape(values, [-1]), output_shape) # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2._compute_mask_indices", "in output: del output[\"kwargs\"] boolean_dict = { k: v for", "from transformers.models.bart.modeling_tf_bart.TFBartAttention with TFBart->TFHubert class TFHubertAttention(tf.keras.layers.Layer): \"\"\"Multi-headed attention from \"Attention", "hidden_states = self.dropout(hidden_states, training=training) for i, layer_module in enumerate(self.layer): if", "{parameter_names[0]}.\" ) for name in parameter_names: if name not in", "`input_values` you can choose to directly pass an embedded representation.", "generic methods the library implements for all its model (such", "if dim < self.groups: raise ValueError( \"Number of groups (\"", "only {allowed_types} is accepted for {parameter_names[i]}.\" ) elif isinstance(input_values, (dict,", "== -1: self.groups = dim def _check_size_of_dimensions(self, input_shape): dim =", "if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) if not return_dict:", "conv_layers = [ TFHubertLayerNormConvLayer(config, layer_id=i, name=f\"conv_layers.{i}\") for i in range(config.num_feat_extract_layers)", "size {(bsz, 1, tgt_len, src_len)}, but is {shape_list(attention_mask)}\", ) attention_mask", "self._check_size_of_dimensions(input_shape) self._create_input_spec(input_shape) self._add_gamma_weight(input_shape) self._add_beta_weight(input_shape) self.built = True super().build(input_shape) def call(self,", "\"ones\", beta_regularizer: tf.keras.regularizers.Regularizer = None, gamma_regularizer: tf.keras.regularizers.Regularizer = None, beta_constraint:", "to fill spec_aug_mask = tf.zeros((batch_size, sequence_length), dtype=tf.int32) # uniform distribution", "instead.\", FutureWarning, ) class TFHubertFeatureProjection(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs):", "scatter values to pair indices return tf.scatter_nd(pair_indices, tf.reshape(values, [-1]), output_shape)", "# `args` in this case is always the first parameter,", "// self.groups) == 1 if not is_instance_norm: group_shape[self.axis] = input_shape[self.axis]", "tf.where( tf.cast(mask_time_indices[:, :, tf.newaxis], tf.bool), self.masked_spec_embed[tf.newaxis, tf.newaxis, :], hidden_states, )", "= tf.constant([0, 1]) def _init_norm(self): \"\"\"Set the norm of the", "self._shape(self.k_proj(hidden_states), -1, bsz) value_states = self._shape(self.v_proj(hidden_states), -1, bsz) if self.is_decoder:", "\"gamma_initializer\": tf.keras.initializers.serialize(self.gamma_initializer), \"beta_regularizer\": tf.keras.regularizers.serialize(self.beta_regularizer), \"gamma_regularizer\": tf.keras.regularizers.serialize(self.gamma_regularizer), \"beta_constraint\": tf.keras.constraints.serialize(self.beta_constraint), \"gamma_constraint\": tf.keras.constraints.serialize(self.gamma_constraint),", "hidden_states: tf.Tensor) -> tf.Tensor: hidden_states = self.conv(hidden_states) hidden_states = self.padding(hidden_states)", "first argument of the model call function: `model(inputs)`. If you", "raise ValueError( f\"Data of type {type(input)} is not allowed only", "Tuple[tf.Tensor]: attn_residual = hidden_states hidden_states, attn_weights, _ = self.attention( hidden_states,", "Tuple[tf.Tensor]]: \"\"\" Returns: Example: ```python >>> from transformers import Wav2Vec2Processor,", "query_states = self.q_proj(hidden_states) * self.scaling # get key, value proj", "self.beta_regularizer = tf.keras.regularizers.get(beta_regularizer) self.gamma_regularizer = tf.keras.regularizers.get(gamma_regularizer) self.beta_constraint = tf.keras.constraints.get(beta_constraint) self.gamma_constraint", "self.center: self.beta = self.add_weight( shape=shape, name=\"beta\", initializer=self.beta_initializer, regularizer=self.beta_regularizer, constraint=self.beta_constraint, )", "token to be chosen as start of the span to", "hidden_states: tf.Tensor, key_value_states: Optional[tf.Tensor] = None, past_key_value: Optional[Tuple[Tuple[tf.Tensor]]] = None,", "token indices. Mask values selected in `[0, 1]`: - 1", "the first positional argument : - a single Tensor with", "\"Axis \" + str(self.axis) + \" of \" \"input tensor", ">>> hidden_states = model(input_values).last_hidden_state ```\"\"\" inputs = input_values_processing( func=self.call, config=self.config,", "[ TFHubertLayerNormConvLayer(config, layer_id=i, name=f\"conv_layers.{i}\") for i in range(config.num_feat_extract_layers) ] else:", "continue else: raise ValueError(f\"Data of type {type(v)} is not allowed", "`language modeling` head on top for Connectionist Temporal Classification (CTC).\"\"\",", "possibilities you can use to gather all the input Tensors", "1)) _, indices = tf.nn.top_k(distribution + z, num_samples) return indices", "= tf.keras.layers.Dense(embed_dim, use_bias=bias, name=\"out_proj\") def _shape(self, tensor: tf.Tensor, seq_len: int,", "def __init__(self, config: HubertConfig, layer_id: int = 0, **kwargs: Any)", ") from .configuration_hubert import HubertConfig logger = logging.get_logger(__name__) _CONFIG_FOR_DOC =", "this will be multiplied by number of timesteps divided by", "asserts are not compliant with XLA then they # have", "software # distributed under the License is distributed on an", "**kwargs): super().__init__(config, *inputs, **kwargs) logger.warning( f\"\\n{self.__class__.__name__} has backpropagation operations that", "a GPU or a TPU\" ) @tf.function def serving(self, inputs):", "inputs): output = self.call(input_values=inputs, training=False) return self.serving_output(output) HUBERT_START_DOCSTRING = r\"\"\"", "computation for the feature encoder so that its parameter will", "reshaped_inputs, group_shape = self._reshape_into_groups(inputs, input_shape, tensor_input_shape) normalized_inputs = self._apply_normalization(reshaped_inputs, input_shape)", "config: HubertConfig, **kwargs): super().__init__(**kwargs) self.layer_norm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name=\"layer_norm\") self.projection =", "in the range `[0, config.max_position_embeddings - 1]`. [What are position", "a `language modeling` head on top for Connectionist Temporal Classification", "= tf.stack(group_shape) reshaped_inputs = tf.reshape(inputs, group_shape) return reshaped_inputs, group_shape else:", "= self._reshape_into_groups(inputs, input_shape, tensor_input_shape) normalized_inputs = self._apply_normalization(reshaped_inputs, input_shape) is_instance_norm =", "bsz) if self.is_decoder: # if cross_attention save Tuple(tf.Tensor, tf.Tensor) of", "self.gamma = self.add_weight( shape=shape, name=\"gamma\", initializer=self.gamma_initializer, regularizer=self.gamma_regularizer, constraint=self.gamma_constraint, ) else:", "self.axis == 0: raise ValueError( \"You are trying to normalize", "Optional[bool] = False, training: bool = False, ) -> Tuple[tf.Tensor]:", "variance_epsilon=self.epsilon, ) return normalized_inputs def _get_reshaped_weights(self, input_shape): broadcast_shape = self._create_broadcast_shape(input_shape)", "logger.warning( f\"The parameter {k} does not belongs to the parameter", "input_shape): return input_shape def _reshape_into_groups(self, inputs, input_shape, tensor_input_shape): group_shape =", "src_len)) attn_probs = self.dropout(attn_weights, training=training) attn_output = tf.matmul(attn_probs, value_states) #", "batch_indices to pair_indices pair_indices = tf.transpose(tf.concat([broad_casted_batch_dims, tf.reshape(batch_indices, [1, -1])], 0))", "self.layer = [TFHubertEncoderLayer(config, name=f\"layers.{i}\") for i in range(config.num_hidden_layers)] def call(", "scale self.beta_initializer = tf.keras.initializers.get(beta_initializer) self.gamma_initializer = tf.keras.initializers.get(gamma_initializer) self.beta_regularizer = tf.keras.regularizers.get(beta_regularizer)", "removed in a future version, use `past_key_values` instead.\", FutureWarning, )", "smaller than `sequence_length`, but got `mask_length`: {mask_length} and `sequence_length`: {sequence_length}`\"", "r\"\"\" This model inherits from [`TFPreTrainedModel`]. Check the superclass documentation", "output_hidden_states=inputs[\"output_hidden_states\"], return_dict=inputs[\"return_dict\"], training=inputs[\"training\"], ) return outputs def serving_output(self, output): hs", ") hidden_states = tf.where( tf.cast(mask_time_indices[:, :, tf.newaxis], tf.bool), self.masked_spec_embed[tf.newaxis, tf.newaxis,", "self.projection(hidden_states) hidden_states = self.dropout(hidden_states, training=training) return hidden_states # Copied from", "labels >>> with processor.as_target_processor(): ... labels = processor(transcription, return_tensors=\"tf\").input_values >>>", "None else output return TFCausalLMOutput( loss=loss, logits=logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions, )", ":], hidden_states, ) elif self.config.mask_time_prob > 0: # generate indices", "_conv_out_length(input_length, kernel_size, stride): # 1D convolutional layer output length formula", "be used instead. output_hidden_states (`bool`, *optional*): Whether or not to", "to labels_mask = tf.cast(labels >= 0, tf.int32) target_lengths = tf.reduce_sum(labels_mask,", "attentions=encoder_outputs.attentions, ) class TFHubertPreTrainedModel(TFPreTrainedModel): \"\"\" An abstract class to handle", "nullify selected heads of the self-attention modules. Mask values selected", "is {shape_list(layer_head_mask)}\", ) attn_weights = tf.reshape(layer_head_mask, (1, -1, 1, 1))", "\"attention_mask\": tf.cast(tf.not_equal(input_values, pad_token), tf.float32), } return dummy_inputs def __init__(self, config,", "Tensor if type(input) == tf.Tensor: # Tensor names have always", "sure that offset samples are < sequence_length uniform_dist = tf.ones((batch_size,", "= () if output_hidden_states else None all_self_attentions = () if", "avoid performing attention on padding token indices. Mask values selected", "need a GPU or a TPU\" ) @tf.function def serving(self,", "or saving, resizing the input embeddings, pruning heads etc.) This", "vocabulary. Indices can be obtained using [`BertTokenizer`]. See [`PreTrainedTokenizer.__call__`] and", "(0, 2, 1, 3)) def call( self, hidden_states: tf.Tensor, key_value_states:", "or v is None: output[k] = v else: raise ValueError(f\"Data", "different behaviors between training and evaluation). \"\"\" @add_start_docstrings( \"The bare", "with Wav2Vec2->Hubert class TFHubertEncoderLayerStableLayerNorm(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs)", "self.intermediate_act_fn(hidden_states) hidden_states = self.intermediate_dropout(hidden_states, training=training) hidden_states = self.output_dense(hidden_states) hidden_states =", "- kernel_size) // stride + 1 for kernel_size, stride in", "tf.keras.layers.Dropout(rate=config.feat_proj_dropout) def call(self, hidden_states: tf.Tensor, training: bool = False) ->", "self.groups, \"axis\": self.axis, \"epsilon\": self.epsilon, \"center\": self.center, \"scale\": self.scale, \"beta_initializer\":", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "return outputs # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2Encoder with Wav2Vec2->Hubert class TFHubertEncoder(tf.keras.layers.Layer):", "is not None: # reuse k, v, self_attention key_states =", "eager. if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(attention_mask), [bsz, 1, tgt_len, src_len], message=f\"Attention", "indices & apply SpecAugment along time axis mask_time_indices = _compute_mask_indices(", "0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask)", "\"to train/fine-tine this model, you need a GPU or a", "if labels is not None: if tf.reduce_max(labels) >= self.config.vocab_size: raise", "config, input_values, **kwargs): \"\"\" Process the input of each TensorFlow", "governing permissions and # limitations under the License. \"\"\" TensorFlow", "= tf.nn.top_k(distribution + z, num_samples) return indices # Copied from", "is not allowed only {allowed_types} is accepted for {k}.\") else:", "self.feed_forward(hidden_states) hidden_states = self.final_layer_norm(hidden_states) outputs = (hidden_states,) if output_attentions: outputs", "dropout modules have different behaviors between training and evaluation). \"\"\"", "Mask values selected in `[0, 1]`: - 1 for tokens", "= tf.concat([past_key_value[1], value_states], axis=2) else: # self_attention key_states = self._shape(self.k_proj(hidden_states),", "`hidden_states` under returned tensors for more detail. This argument can", "if self.axis == -1 else self.axis - 1 group_reduction_axes.pop(axis) mean,", "str(self.groups) + \") cannot be \" \"more than the number", "Further calls to cross_attention layer can then reuse all cross-attention", "return the attentions tensors of all attention layers. See `attentions`", "input_values=inputs[\"input_values\"], attention_mask=inputs[\"attention_mask\"], token_type_ids=inputs[\"token_type_ids\"], position_ids=inputs[\"position_ids\"], head_mask=inputs[\"head_mask\"], inputs_embeds=inputs[\"inputs_embeds\"], output_attentions=inputs[\"output_attentions\"], output_hidden_states=inputs[\"output_hidden_states\"], return_dict=inputs[\"return_dict\"], training=inputs[\"training\"],", "another one for the unexpected layers. \"\"\" signature = dict(inspect.signature(func).parameters)", "self.config.layerdrop): # skip the layer continue layer_outputs = layer_module( hidden_states=hidden_states,", "not in parameter_names: logger.warning( f\"The parameter {k} does not belongs", "hidden_states = self.output_dropout(hidden_states, training=training) return hidden_states # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2EncoderLayer", "= self.attention( hidden_states, attention_mask=attention_mask, training=training ) hidden_states = self.dropout(hidden_states, training=training)", "performing attention on padding token indices. Mask values selected in", "with a config file does not load the weights associated", "-1, 1, 1)) * tf.reshape( attn_weights, (bsz, self.num_heads, tgt_len, src_len)", "\"elif\" case) # if encoder bi-directional self-attention `past_key_value` is always", "mask spans for a given shape Args: shape: the the", "dim}) def _add_gamma_weight(self, input_shape): dim = input_shape[self.axis] shape = (dim,)", "is_instance_norm: outputs = tf.reshape(normalized_inputs, tensor_input_shape) else: outputs = normalized_inputs return", "embed_dim = shape_list(hidden_states) # get query proj query_states = self.q_proj(hidden_states)", "of the model call function: `model(inputs)`. If you choose this", "be removed in Transformers v5.\" \"Please use the equivalent `freeze_feature_encoder`", "in [\"return_dict\", \"output_attentions\", \"output_hidden_states\", \"use_cache\"] } output.update(booleans_processing(config=config, **boolean_dict)) return output", "for i in range(len(input_shape))] is_instance_norm = (input_shape[self.axis] // self.groups) ==", "tgt_len, src_len)) attn_probs = self.dropout(attn_weights, training=training) attn_output = tf.matmul(attn_probs, value_states)", "= input_values.pop(\"inputs\") if \"decoder_cached_states\" in input_values: warnings.warn( \"The `decoder_cached_states` argument", "`tf.Tensor` of shape `({0})`, *optional*): Mask to avoid performing attention", "spec_aug_mask_idxs = tf.reshape(spec_aug_mask_idxs, (batch_size, num_masked_spans * mask_length)) offsets = tf.range(mask_length)[tf.newaxis,", "transformers.models.bart.modeling_tf_bart.TFBartAttention with TFBart->TFHubert class TFHubertAttention(tf.keras.layers.Layer): \"\"\"Multi-headed attention from \"Attention Is", "TFBaseModelOutput(last_hidden_state=output.last_hidden_state, hidden_states=hs, attentions=attns) @add_start_docstrings( \"\"\"TFHubert Model with a `language modeling`", "element is batch size and 2nd is timesteps attention_mask: optional", "apply SpecAugment along time axis with given mask_time_indices hidden_states =", "size and 2nd is timesteps attention_mask: optional padding mask of", "\"\"\" An abstract class to handle weights initialization and a", "\"A MAN SAID TO THE UNIVERSE SIR I EXIST\" >>>", "all_hidden_states = all_hidden_states + (hidden_states,) # add LayerDrop (see https://arxiv.org/abs/1909.11556", "+= self.explicit_padding * 2 super().build(input_shape) self.kernel = tf.Variable(tf.transpose(self.kernel), name=\"weight_v\", trainable=True)", "Tuple, Union import numpy as np import tensorflow as tf", "if not inputs[\"return_dict\"]: output = (logits,) + outputs[1:] return ((loss,)", "self.conv_layers: hidden_states = conv_layer(hidden_states) return hidden_states class TFHubertFeatureExtractor(TFHubertFeatureEncoder): def __init__(self,", "method to load the model weights. \"\"\" HUBERT_INPUTS_DOCSTRING = r\"\"\"", "None, output_attentions: Optional[bool] = False, output_hidden_states: Optional[bool] = False, return_dict:", "{shape_list(layer_head_mask)}\", ) attn_weights = tf.reshape(layer_head_mask, (1, -1, 1, 1)) *", "taken # from https://pytorch.org/docs/stable/generated/torch.nn.Conv1d.html return (input_length - kernel_size) // stride", "False, ) -> Union[TFCausalLMOutput, Tuple[tf.Tensor]]: r\"\"\" labels (`tf.Tensor` or `np.ndarray`", "each token to be chosen as start of the span", "= self.conv(hidden_states) hidden_states = self.layer_norm(hidden_states) hidden_states = self.activation(hidden_states) return hidden_states", "= [ \"facebook/hubert-base-ls960\", # See all Hubert models at https://huggingface.co/models?filter=hubert", "elif past_key_value is not None: # reuse k, v, self_attention", "query_states = tf.reshape(self._shape(query_states, tgt_len, bsz), proj_shape) key_states = tf.reshape(key_states, proj_shape)", "tf.keras.layers.Dense( units=config.intermediate_size, kernel_initializer=get_initializer(config.initializer_range), bias_initializer=\"zeros\", name=\"intermediate_dense\", ) self.intermediate_act_fn = get_tf_activation(config.hidden_act) self.output_dense", "= False, training: bool = False, ) -> Tuple[tf.Tensor]: attn_residual", "embed_dim)) attn_output = self.out_proj(attn_output) attn_weights: tf.Tensor = tf.reshape(attn_weights, (bsz, self.num_heads,", "kwargs.get(\"mask_time_indices\", None) if inputs[\"training\"]: hidden_states = self._mask_hidden_states(hidden_states, mask_time_indices=mask_time_indices) encoder_outputs =", "= 0.0 input_values = tf.convert_to_tensor(np.random.rand(1, 16000), tf.float32) dummy_inputs = {", "Fairseq Authors and the HuggingFace Inc. team. All rights reserved.", "with Wav2Vec2->Hubert class TFHubertEncoderStableLayerNorm(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs)", "expand masked indices to masked spans spec_aug_mask_idxs = tf.expand_dims(spec_aug_mask_idxs, -1)", "int = 32, axis: int = -1, epsilon: float =", "you can use to gather all the input Tensors in", "is {shape_list(attn_output)}\", ) attn_output = tf.transpose( tf.reshape(attn_output, (bsz, self.num_heads, tgt_len,", "None, output_attentions: Optional[bool] = False, training: bool = False, )", "\"and will be removed in Transformers v5. \" f\"Use `{self.__class__.__bases__[0].__name__}`", "The callable function of the TensorFlow model. config ([`PretrainedConfig`]): The", "1 indicates the head is **not masked**, - 0 indicates", "tf.tile(mask[:, None, None, :], (1, 1, tgt_len, 1)) return (one_cst", "name=\"projection\", ) self.dropout = tf.keras.layers.Dropout(rate=config.feat_proj_dropout) def call(self, hidden_states: tf.Tensor, training:", "not self.built: input_shape = input_shape.as_list() # Conv1D output shapes are", "x Channel\"\"\" # if key_value_states are provided this layer is", "from [`TFPreTrainedModel`]. Check the superclass documentation for the generic methods", "timesteps attention_mask: optional padding mask of the same size as", "a regular TF 2.0 Keras Model and refer to the", "from transformers.models.wav2vec2.modeling_tf_wav2vec2.input_values_processing def input_values_processing(func, config, input_values, **kwargs): \"\"\" Process the", "message=f\"Head mask for a single layer should be of size", "top.\", HUBERT_START_DOCSTRING, ) class TFHubertModel(TFHubertPreTrainedModel): def __init__(self, config: HubertConfig, *inputs,", "models accepts two formats as inputs: - having all inputs", "= tf.keras.regularizers.get(beta_regularizer) self.gamma_regularizer = tf.keras.regularizers.get(gamma_regularizer) self.beta_constraint = tf.keras.constraints.get(beta_constraint) self.gamma_constraint =", "[self.num_heads], message=f\"Head mask for a single layer should be of", "vocab_size: {self.config.vocab_size}\") attention_mask = ( inputs[\"attention_mask\"] if inputs[\"attention_mask\"] is not", "in list(output.keys()) and name != \"args\": output[name] = kwargs.pop(name, signature[name].default)", "any specific head on top.\", HUBERT_START_DOCSTRING, ) class TFHubertModel(TFHubertPreTrainedModel): def", "self.conv = tf.keras.layers.Conv1D( filters=self.out_conv_dim, kernel_size=config.conv_kernel[layer_id], strides=config.conv_stride[layer_id], use_bias=config.conv_bias, name=\"conv\", ) self.layer_norm", "broadcast_shape.insert(self.axis, self.groups) else: broadcast_shape[self.axis] = self.groups return broadcast_shape # Copied", "def _sample_without_replacement(distribution, num_samples): \"\"\" Categorical sampling without replacement is currently", "self.beta = None def _create_broadcast_shape(self, input_shape): broadcast_shape = [1] *", "class TFHubertSamePadLayer(tf.keras.layers.Layer): def __init__(self, num_conv_pos_embeddings, **kwargs): super().__init__(**kwargs) self.num_pad_remove = 1", "or `Dict[str, np.ndarray]` and each example must have the shape", "https://www.tensorflow.org/probability/api_docs/python/tfp/layers/weight_norm/WeightNorm\"\"\" def __init__(self, filters, kernel_size, groups, explicit_padding, **kwargs): super().__init__( filters=filters,", "min_masks: minimum number of masked spans Adapted from [fairseq's data_utils.py](https://github.com/pytorch/fairseq/blob/e0788f7007a8473a76db573985031f3c94201e79/fairseq/data/data_utils.py#L376).", "def _create_input_spec(self, input_shape): dim = input_shape[self.axis] self.input_spec = tf.keras.layers.InputSpec(ndim=len(input_shape), axes={self.axis:", "*optional*): Segment token indices to indicate first and second portions", "tf.concat([past_key_value[0], key_states], axis=2) value_states = tf.concat([past_key_value[1], value_states], axis=2) else: #", "def _normalize_kernel(self): \"\"\"Generate normalized weights.\"\"\" kernel = tf.nn.l2_normalize(self.weight_v, axis=self.kernel_norm_axes) *", "size {(self.num_heads)}, but is {shape_list(layer_head_mask)}\", ) attn_weights = tf.reshape(layer_head_mask, (1,", "pair_indices = tf.transpose(tf.concat([broad_casted_batch_dims, tf.reshape(batch_indices, [1, -1])], 0)) # scatter values", "from ...tokenization_utils_base import BatchEncoding from ...utils import ( ModelOutput, add_start_docstrings,", "= tf.reshape(inputs, group_shape) return reshaped_inputs, group_shape else: return inputs, group_shape", "return hidden_states # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2PositionalConvEmbedding with Wav2Vec2->Hubert class TFHubertPositionalConvEmbedding(tf.keras.layers.Layer):", "length of the convolutional layers \"\"\" def _conv_out_length(input_length, kernel_size, stride):", "Expands attention_mask from `[bsz, seq_len]` to `[bsz, 1, tgt_seq_len, src_seq_len]`.", "self.feature_projection = TFHubertFeatureProjection(config, name=\"feature_projection\") if config.do_stable_layer_norm: self.encoder = TFHubertEncoderStableLayerNorm(config, name=\"encoder\")", "TensorFlow Hubert model.\"\"\" import inspect import warnings from typing import", "hidden_states = self.intermediate_act_fn(hidden_states) hidden_states = self.intermediate_dropout(hidden_states, training=training) hidden_states = self.output_dense(hidden_states)", "input_values.pop(\"decoder_cached_states\") for k, v in dict(input_values).items(): if isinstance(v, allowed_types) or", "self.axis == -1 else self.axis - 1 group_reduction_axes.pop(axis) mean, variance", "key_value_states is not None bsz, tgt_len, embed_dim = shape_list(hidden_states) #", "**not masked**, - 0 for tokens that are **masked**. [What", "__init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs) self.config = config self.pos_conv_embed =", "num_samples) return indices # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2._scatter_values_on_batch_indices def _scatter_values_on_batch_indices(values, batch_indices,", "None, position_ids: Optional[tf.Tensor] = None, head_mask: Optional[tf.Tensor] = None, inputs_embeds:", "initializer=self.beta_initializer, regularizer=self.beta_regularizer, constraint=self.beta_constraint, ) else: self.beta = None def _create_broadcast_shape(self,", "padded elements mask_prob: probability for each token to be chosen", "a *sentence A* token, - 1 corresponds to a *sentence", "to respect the proper output we have to add this", "0, ) -> tf.Tensor: \"\"\" Computes random mask spans for", "config: HubertConfig, **kwargs: Any) -> None: super().__init__(**kwargs) self.conv = TFHubertWeightNormConv1D(", "= tf.nn.l2_normalize(self.weight_v, axis=self.kernel_norm_axes) * tf.transpose(self.weight_g) self.kernel = tf.transpose(kernel) def build(self,", "build time since TF 2.7, so we need to account", "class TFHubertGroupNormConvLayer(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, layer_id: int = 0,", "if isinstance(v, allowed_types) or v is None: output[k] = v", "input Tensors associated to the input names given in the", "if \"args\" in output: if output[\"args\"] is not None and", "_compute_mask_indices( shape: Tuple[int, int], mask_prob: float, mask_length: int, min_masks: int", "# Copyright 2021 The Fairseq Authors and the HuggingFace Inc.", "= False, ) -> Union[TFCausalLMOutput, Tuple[tf.Tensor]]: r\"\"\" labels (`tf.Tensor` or", "get_tf_activation from ...modeling_tf_outputs import TFBaseModelOutput, TFCausalLMOutput from ...modeling_tf_utils import TFPreTrainedModel,", "axis self.epsilon = epsilon self.center = center self.scale = scale", "than eager. if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(layer_head_mask), [self.num_heads], message=f\"Head mask for", "of mask span to mask approximately this percentage of all", "False, output_hidden_states: Optional[bool] = False, return_dict: Optional[bool] = True, training:", "tgt_len if tgt_len is not None else src_len one_cst =", "_expand_mask(mask: tf.Tensor, tgt_len: Optional[int] = None, past_key_values_length: int = 0):", "return TFCausalLMOutput( loss=loss, logits=logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions, ) def serving_output(self, output:", "training (`bool`, *optional*, defaults to `False``): Whether or not to", "not to use the model in training mode (some modules", "def _check_if_input_shape_is_none(self, input_shape): dim = input_shape[self.axis] if dim is None:", "output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) # add LayerDrop (see", "mask_length)) spec_aug_mask_idxs = tf.reshape(spec_aug_mask_idxs, (batch_size, num_masked_spans * mask_length)) offsets =", "\"apply_spec_augment\", True): return hidden_states if mask_time_indices is not None: #", "token_type_ids=token_type_ids, position_ids=position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, ) outputs", "instead of passing `input_values` you can choose to directly pass", "allowed_types) or input is None: output[parameter_names[i]] = input else: raise", "should be of size {(self.num_heads)}, but is {shape_list(layer_head_mask)}\", ) attn_weights", "* self.num_heads, -1, self.head_dim) query_states = tf.reshape(self._shape(query_states, tgt_len, bsz), proj_shape)", "(bsz, tgt_len, embed_dim)) attn_output = self.out_proj(attn_output) attn_weights: tf.Tensor = tf.reshape(attn_weights,", "if self.scale: self.gamma = self.add_weight( shape=shape, name=\"gamma\", initializer=self.gamma_initializer, regularizer=self.gamma_regularizer, constraint=self.gamma_constraint,", "_set_number_of_groups_for_instance_norm(self, input_shape): dim = input_shape[self.axis] if self.groups == -1: self.groups", "groups (\" + str(self.groups) + \") must be a \"", "or `tf.Tensor` of shape `({0})`, *optional*): Mask to avoid performing", "input_shape[self.axis] if dim < self.groups: raise ValueError( \"Number of groups", "the booleans. In case of a list of symbolic inputs,", "for k, v in output.items() if k in [\"return_dict\", \"output_attentions\",", "= True, training: Optional[bool] = False, ) -> Union[TFBaseModelOutput, Tuple[tf.Tensor]]:", ".name property so we check for a real Tensor if", "ds.map(map_to_array) >>> input_values = processor(ds[\"speech\"][0], return_tensors=\"tf\").input_values # Batch size 1", "= self.conv(hidden_states) hidden_states = self.activation(hidden_states) return hidden_states # Copied from", "@add_start_docstrings_to_model_forward(HUBERT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=TFBaseModelOutput, config_class=_CONFIG_FOR_DOC) def call( self, input_values: tf.Tensor, attention_mask: Optional[tf.Tensor]", "\"\"\" Scatter function as in PyTorch with indices in format", "add LayerDrop (see https://arxiv.org/abs/1909.11556 for description) dropout_probability = np.random.uniform(0, 1)", "tf.keras.layers.Conv1D( filters=self.out_conv_dim, kernel_size=config.conv_kernel[layer_id], strides=config.conv_stride[layer_id], use_bias=config.conv_bias, name=\"conv\", ) self.layer_norm = tf.keras.layers.LayerNormalization(name=\"layer_norm\",", "tf.reshape(inputs, group_shape) return reshaped_inputs, group_shape else: return inputs, group_shape def", "else None if attention_mask is not None: hidden_states = hidden_states", "use_bias=config.conv_bias, name=\"conv\", ) self.activation = get_tf_activation(config.feat_extract_activation) def call(self, hidden_states: tf.Tensor)", "type(output[\"args\"]) == tf.Tensor: tensor_name = output[\"args\"].name.split(\":\")[0] output[tensor_name] = output[\"args\"] else:", "@add_start_docstrings( \"\"\"TFHubert Model with a `language modeling` head on top", "= self.layer_norm(hidden_states) if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) if", "in other modes than eager. if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(attention_mask), [bsz,", "trying to normalize your batch axis. Do you want to", "value will always be set to True. training (`bool`, *optional*,", "... labels = processor(transcription, return_tensors=\"tf\").input_values >>> loss = model(input_values, labels=labels).loss", "1 group_reduction_axes.pop(axis) mean, variance = tf.nn.moments(reshaped_inputs, group_reduction_axes, keepdims=True) gamma, beta", "**kwargs) logger.warning( f\"\\n{self.__class__.__name__} has backpropagation operations that are NOT supported", "inputs[\"output_attentions\"] = ( inputs[\"output_attentions\"] if inputs[\"output_attentions\"] else self.config.output_attentions ) inputs[\"return_dict\"]", "self.hubert = TFHubertMainLayer(config, name=\"hubert\") @add_start_docstrings_to_model_forward(HUBERT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=TFBaseModelOutput, config_class=_CONFIG_FOR_DOC) def call( self,", "all its model (such as downloading or saving, resizing the", "can then reuse all cross-attention # key/value_states (first \"if\" case)", "than eager. if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(attn_output), [bsz * self.num_heads, tgt_len,", "raise ValueError( \"You are trying to normalize your batch axis.", "configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to load the model", "tf.cast(tf.not_equal(input_values, pad_token), tf.float32), } return dummy_inputs def __init__(self, config, *inputs,", "= tf.constant(1.0) mask = tf.cast(mask, dtype=one_cst.dtype) expanded_mask = tf.tile(mask[:, None,", "super().call(padded_inputs) return output # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2NoLayerNormConvLayer with Wav2Vec2->Hubert class", "v else: raise ValueError(f\"Data of type {type(v)} is not allowed", "indices to indicate first and second portions of the inputs.", "*inputs, **kwargs): super().__init__(config, *inputs, **kwargs) self.config = config self.hubert =", "is not None: hidden_states = hidden_states * tf.expand_dims(attention_mask, -1) attention_mask", ") return outputs def serving_output(self, output): hs = tf.convert_to_tensor(output.hidden_states) if", "model. Initializing with a config file does not load the", "\" + str(self.axis) + \" of \" \"input tensor should", "return hidden_states # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2SamePadLayer with Wav2Vec2->Hubert class TFHubertSamePadLayer(tf.keras.layers.Layer):", "= self.lm_head(hidden_states) if labels is not None: if tf.reduce_max(labels) >=", "outputs = (hidden_states,) if output_attentions: outputs += (attn_weights,) return outputs", "masked spans spec_aug_mask_idxs = tf.expand_dims(spec_aug_mask_idxs, -1) spec_aug_mask_idxs = tf.tile(spec_aug_mask_idxs, (1,", "= self.add_weight( name=\"weight_g\", shape=(int(self.weight_v.shape[self.filter_axis]), 1, 1), initializer=\"ones\", dtype=self.weight_v.dtype, trainable=True, )", "mask_prob: probability for each token to be chosen as start", "= dim def _check_size_of_dimensions(self, input_shape): dim = input_shape[self.axis] if dim", "if dim is None: raise ValueError( \"Axis \" + str(self.axis)", "in output: if output[\"args\"] is not None and type(output[\"args\"]) ==", "attn_residual + hidden_states hidden_states = hidden_states + self.feed_forward(self.final_layer_norm(hidden_states)) outputs =", "= tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name=\"layer_norm\") self.feed_forward = TFHubertFeedForward(config, name=\"feed_forward\") self.final_layer_norm = tf.keras.layers.LayerNormalization(", "range `[0, config.max_position_embeddings - 1]`. [What are position IDs?](../glossary#position-ids) head_mask", "SpecAugment along time axis with given mask_time_indices hidden_states = tf.where(", "mask_prob=self.config.mask_feature_prob, mask_length=self.config.mask_feature_length, ) hidden_states = tf.where(mask_feature_indices[:, tf.newaxis, :], hidden_states, 0)", "reshaped_inputs, input_shape): group_shape = tf.keras.backend.int_shape(reshaped_inputs) group_reduction_axes = list(range(1, len(group_shape))) is_instance_norm", "like dropout modules have different behaviors between training and evaluation).", "hidden_states if mask_time_indices is not None: # apply SpecAugment along", "all previous decoder key/value_states. Further calls to uni-directional self-attention #", "...modeling_tf_utils import TFPreTrainedModel, booleans_processing, get_initializer, keras_serializable from ...tf_utils import shape_list", "isinstance(input_values, (tuple, list)): for i, input in enumerate(input_values): # EagerTensors", "shape_list(attn_output), [bsz * self.num_heads, tgt_len, self.head_dim], message=f\"`attn_output` should be of", "spans Adapted from [fairseq's data_utils.py](https://github.com/pytorch/fairseq/blob/e0788f7007a8473a76db573985031f3c94201e79/fairseq/data/data_utils.py#L376). \"\"\" batch_size, sequence_length = shape", "formula taken # from https://pytorch.org/docs/stable/generated/torch.nn.Conv1d.html return (input_length - kernel_size) //", "True, **kwargs, ): super().__init__(**kwargs) self.embed_dim = embed_dim self.num_heads = num_heads", "input_shape.as_list() # Conv1D output shapes are checked at build time", "output_attentions else None if attention_mask is not None: hidden_states =", "None: super().__init__(**kwargs) self.conv = TFHubertWeightNormConv1D( filters=config.hidden_size, kernel_size=config.num_conv_pos_embeddings, groups=config.num_conv_pos_embedding_groups, explicit_padding=config.num_conv_pos_embeddings //", "See `attentions` under returned tensors for more detail. This argument", "{k}.\") else: if isinstance(input_values, tf.Tensor) or input_values is None: output[parameter_names[0]]", "output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, training: Optional[bool]", "over how to convert `input_values` indices into associated vectors than", "hidden_states = self.projection(hidden_states) hidden_states = self.dropout(hidden_states, training=training) return hidden_states #", "shape_list(mask)[1] tgt_len = tgt_len if tgt_len is not None else", "mode the value in the config will be used instead.", "for tokens that are **not masked**, - 0 for tokens", "reshaped_inputs = tf.reshape(inputs, group_shape) return reshaped_inputs, group_shape else: return inputs,", "the weights associated with the model, only the configuration. Check", "is not None else tf.ones_like(inputs[\"input_values\"], dtype=tf.float32) ) input_lengths = self.hubert._get_feat_extract_output_lengths(tf.reduce_sum(attention_mask,", "mask to fill spec_aug_mask = tf.zeros((batch_size, sequence_length), dtype=tf.int32) # uniform", "Optional[tf.Tensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] =", "transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2EncoderLayerStableLayerNorm with Wav2Vec2->Hubert class TFHubertEncoderLayerStableLayerNorm(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs):", "self.dropout(hidden_states, training=training) return hidden_states # Copied from transformers.models.bart.modeling_tf_bart.TFBartAttention with TFBart->TFHubert", "None, output_attentions: Optional[bool] = None, labels: Optional[tf.Tensor] = None, output_hidden_states:", "= all_hidden_states + (hidden_states,) if not return_dict: return tuple(v for", "*optional*): Labels for computing the masked language modeling loss. Indices", "`None` past_key_value = (key_states, value_states) proj_shape = (bsz * self.num_heads,", "as target processor to encode labels >>> with processor.as_target_processor(): ...", "IN THE ORDER given in the docstring: `model([input_values, attention_mask])` or", "call(self, hidden_states: tf.Tensor, training: bool = False) -> tf.Tensor: hidden_states", "with Wav2Vec2->Hubert class TFHubertGroupNorm(tf.keras.layers.Layer): \"\"\" From tensorflow-addons https://www.tensorflow.org/addons/api_docs/python/tfa/layers/GroupNormalization \"\"\" def", "subclass. Use it as a regular TF 2.0 Keras Model", "all_self_attentions] if v is not None) return TFBaseModelOutput( last_hidden_state=hidden_states, hidden_states=all_hidden_states,", "will disable the gradient computation for the feature encoder so", "tf.nn.top_k(distribution + z, num_samples) return indices # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2._scatter_values_on_batch_indices", "del output[\"args\"] if \"kwargs\" in output: del output[\"kwargs\"] boolean_dict =", "modules have different behaviors between training and evaluation). \"\"\" @add_start_docstrings(", "head_mask=head_mask, inputs_embeds=inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, ) inputs[\"output_hidden_states\"] = (", "do for now - see https://github.com/tensorflow/tensorflow/issues/9260 for more info \"\"\"", "axis=2) value_states = tf.concat([past_key_value[1], value_states], axis=2) else: # self_attention key_states", "the parameters name, i.e. `input_values = tf.keras.Input(shape=(128,), dtype='float32', name=\"input_values\")` otherwise", "on CPU. If you wish \" \"to train/fine-tine this model,", "not to return the attentions tensors of all attention layers.", "shape, which will prevent masking padded elements mask_prob: probability for", "of channels (\" + str(dim) + \").\" ) def _check_axis(self):", "self.conv_layers = conv_layers def call(self, input_values): hidden_states = tf.expand_dims(input_values, -1)", "training=training) return hidden_states # Copied from transformers.models.bart.modeling_tf_bart.TFBartAttention with TFBart->TFHubert class", "warnings.warn( \"The method `freeze_feature_extractor` is deprecated and will be removed", "# apply SpecAugment along feature axis if self.config.mask_feature_prob > 0:", "return_tensors=\"tf\").input_values # Batch size 1 >>> logits = model(input_values).logits >>>", "*inputs, **kwargs) self.config = config self.hubert = TFHubertMainLayer(config, name=\"hubert\") @add_start_docstrings_to_model_forward(HUBERT_INPUTS_DOCSTRING)", "None, inputs_embeds: Optional[tf.Tensor] = None, output_attentions: Optional[bool] = None, labels:", "else self.axis - 1 group_reduction_axes.pop(axis) mean, variance = tf.nn.moments(reshaped_inputs, group_reduction_axes,", "tf.Tensor, attention_mask: Optional[tf.Tensor] = None, output_attentions: Optional[bool] = False, output_hidden_states:", "tf.expand_dims(spec_aug_mask_idxs, -1) spec_aug_mask_idxs = tf.tile(spec_aug_mask_idxs, (1, 1, mask_length)) spec_aug_mask_idxs =", "self.input_spec = tf.keras.layers.InputSpec(ndim=len(input_shape), axes={self.axis: dim}) def _add_gamma_weight(self, input_shape): dim =", "self.num_heads, -1, self.head_dim) query_states = tf.reshape(self._shape(query_states, tgt_len, bsz), proj_shape) key_states", "= tf.nn.ctc_loss( logits=logits, labels=labels, logit_length=input_lengths, label_length=target_lengths, blank_index=self.config.pad_token_id, logits_time_major=False, ) if", "to indices_shape broad_casted_batch_dims = tf.reshape( tf.broadcast_to(tf.expand_dims(tf.range(indices_shape[0]), axis=-1), indices_shape), [1, -1]", "int(mask_prob * sequence_length / mask_length + tf.random.uniform((1,))) num_masked_spans = max(num_masked_spans,", "or agreed to in writing, software # distributed under the", "inputs[\"attention_mask\"] is not None: # compute real output lengths according", "of a plain tuple. This argument can be used in", "name=f\"layers.{i}\") for i in range(config.num_hidden_layers) ] def call( self, hidden_states:", "skip the layer continue layer_outputs = layer_module( hidden_states=hidden_states, attention_mask=attention_mask, output_attentions=output_attentions,", "pair_indices pair_indices = tf.transpose(tf.concat([broad_casted_batch_dims, tf.reshape(batch_indices, [1, -1])], 0)) # scatter", "tf.keras.initializers.serialize(self.beta_initializer), \"gamma_initializer\": tf.keras.initializers.serialize(self.gamma_initializer), \"beta_regularizer\": tf.keras.regularizers.serialize(self.beta_regularizer), \"gamma_regularizer\": tf.keras.regularizers.serialize(self.gamma_regularizer), \"beta_constraint\": tf.keras.constraints.serialize(self.beta_constraint), \"gamma_constraint\":", "to use the .name property so we check for a", "bsz) value_states = self._shape(self.v_proj(hidden_states), -1, bsz) if self.is_decoder: # if", "{parameter_names[i]}.\" ) elif isinstance(input_values, (dict, BatchEncoding)): if \"inputs\" in input_values:", "else None attns = tf.convert_to_tensor(output.attentions) if self.config.output_attentions else None return", "uniform_dist = tf.ones((batch_size, sequence_length - (mask_length - 1))) # get", "k: v for k, v in output.items() if k in", "-> tf.Tensor: hidden_states = self.layer_norm(hidden_states) hidden_states = self.projection(hidden_states) hidden_states =", "self.activation(hidden_states) return hidden_states # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2GroupNormConvLayer with Wav2Vec2->Hubert class", "Initializing with a config file does not load the weights", "def map_to_array(batch): ... speech, _ = sf.read(batch[\"file\"]) ... batch[\"speech\"] =", "of passing `input_values` you can choose to directly pass an", "if self.config.ctc_loss_reduction == \"mean\": loss = tf.reduce_mean(loss) else: loss =", "be of size 2 where first element is batch size", "which currently requires having all the tensors in the first", "tokens are filled with -100 # when not being attended", "currently not implemented. The gumbel-max trick will do for now", "False, ) -> Union[TFBaseModelOutput, Tuple[tf.Tensor]]: all_hidden_states = () if output_hidden_states", "self.num_heads, tgt_len, src_len) ) attn_weights = tf.reshape(attn_weights, (bsz * self.num_heads,", "Wav2Vec2->Hubert class TFHubertNoLayerNormConvLayer(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, layer_id: int =", "axis and/or along feature axis according to [SpecAugment](https://arxiv.org/abs/1904.08779). \"\"\" batch_size,", "you need a GPU or a TPU\" ) @tf.function def", "= tf.keras.layers.Dropout(config.hidden_dropout) self.layer_norm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name=\"layer_norm\") self.feed_forward = TFHubertFeedForward(config, name=\"feed_forward\")", "if type(input) == tf.Tensor: # Tensor names have always the", "\"\"\" Expands attention_mask from `[bsz, seq_len]` to `[bsz, 1, tgt_seq_len,", "0: hidden_states = hidden_states[:, : -self.num_pad_remove, :] return hidden_states class", "You Need\"\"\" def __init__( self, embed_dim: int, num_heads: int, dropout:", "depreciated \" \"and will be removed in Transformers v5. \"", "= () if output_attentions else None if attention_mask is not", "= False, bias: bool = True, **kwargs, ): super().__init__(**kwargs) self.embed_dim", "= (input_shape[self.axis] // self.groups) == 1 if not is_instance_norm: broadcast_shape[self.axis]", "used instead. return_dict (`bool`, *optional*): Whether or not to return", "False, bias: bool = True, **kwargs, ): super().__init__(**kwargs) self.embed_dim =", "src_len], message=f\"Attention weights should be of size {(bsz * self.num_heads,", "def serving_output(self, output): hs = tf.convert_to_tensor(output.hidden_states) if self.config.output_hidden_states else None", ") elif self.config.mask_time_prob > 0: # generate indices & apply", "# if key_value_states are provided this layer is used as", "broadcast batch dim to indices_shape broad_casted_batch_dims = tf.reshape( tf.broadcast_to(tf.expand_dims(tf.range(indices_shape[0]), axis=-1),", "\"input_values\" @property def dummy_inputs(self) -> Dict[str, tf.Tensor]: pad_token = 0.0", "class TFHubertGroupNorm(tf.keras.layers.Layer): \"\"\" From tensorflow-addons https://www.tensorflow.org/addons/api_docs/python/tfa/layers/GroupNormalization \"\"\" def __init__( self,", "...tokenization_utils_base import BatchEncoding from ...utils import ( ModelOutput, add_start_docstrings, add_start_docstrings_to_model_forward,", "argument can be used in eager mode, in graph mode", "loss = tf.reduce_mean(loss) else: loss = None if not inputs[\"return_dict\"]:", "output_attentions: all_self_attentions = all_self_attentions + (layer_outputs[1],) # Add last layer", "eager mode, in graph mode the value in the config", "embeddings. Selected in the range `[0, config.max_position_embeddings - 1]`. [What", "dtype=attn_weights.dtype) attn_weights = tf.reshape(attn_weights, (bsz, self.num_heads, tgt_len, src_len)) + attention_mask", "self._apply_normalization(reshaped_inputs, input_shape) is_instance_norm = (input_shape[self.axis] // self.groups) == 1 if", "v5. \" f\"Use `{self.__class__.__bases__[0].__name__}` instead.\", FutureWarning, ) class TFHubertFeatureProjection(tf.keras.layers.Layer): def", "] LARGE_NEGATIVE = -1e8 # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.input_values_processing def input_values_processing(func,", "will be multiplied by number of timesteps divided by length", "**not masked**, - 0 indicates the head is **masked**. inputs_embeds", "i in range(len(input_shape))] is_instance_norm = (input_shape[self.axis] // self.groups) == 1", "from transformers.models.wav2vec2.modeling_tf_wav2vec2._compute_mask_indices def _compute_mask_indices( shape: Tuple[int, int], mask_prob: float, mask_length:", "= num_heads self.dropout = tf.keras.layers.Dropout(dropout) self.head_dim = embed_dim // num_heads", "hidden_size)`, *optional*): Optionally, instead of passing `input_values` you can choose", "super().__init__(**kwargs) self.conv = TFHubertWeightNormConv1D( filters=config.hidden_size, kernel_size=config.num_conv_pos_embeddings, groups=config.num_conv_pos_embedding_groups, explicit_padding=config.num_conv_pos_embeddings // 2,", "model. Returns: Two lists, one for the missing layers, and", "offsets # scatter indices to mask spec_aug_mask = _scatter_values_on_batch_indices( tf.ones_like(spec_aug_mask_idxs),", "attn_residual = hidden_states hidden_states = self.layer_norm(hidden_states) hidden_states, attn_weights, _ =", "ignored.\" ) continue else: raise ValueError(f\"Data of type {type(v)} is", "input_shape = tf.keras.backend.int_shape(inputs) tensor_input_shape = tf.shape(inputs) reshaped_inputs, group_shape = self._reshape_into_groups(inputs,", "`List[tf.Tensor]` ``Dict[str, tf.Tensor]` or `Dict[str, np.ndarray]` and each example must", ") -> Tuple[tf.Tensor, Optional[tf.Tensor]]: \"\"\"Input shape: Batch x Time x", "target processor to encode labels >>> with processor.as_target_processor(): ... labels", "and 2nd is timesteps attention_mask: optional padding mask of the", "// self.groups) == 1 if not is_instance_norm: axis = -2", "dropout=config.attention_dropout, is_decoder=False, name=\"attention\", ) self.dropout = tf.keras.layers.Dropout(config.hidden_dropout) self.layer_norm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps,", "format (batch_dim, indixes) \"\"\" indices_shape = shape_list(batch_indices) # broadcast batch", "TFHubertAttention( embed_dim=config.hidden_size, num_heads=config.num_attention_heads, dropout=config.attention_dropout, is_decoder=False, name=\"attention\", ) self.dropout = tf.keras.layers.Dropout(config.hidden_dropout)", "eager. if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(layer_head_mask), [self.num_heads], message=f\"Head mask for a", "{self.embed_dim}\" f\" and `num_heads`: {num_heads}).\" ) self.scaling = self.head_dim**-0.5 self.is_decoder", "be of size {(bsz * self.num_heads, tgt_len, src_len)}, but is", "# generate indices & apply SpecAugment along time axis mask_time_indices", "then they # have to be disabled in other modes", "0.\") if mask_length > sequence_length: raise ValueError( f\"`mask_length` has to", "tf.keras.layers.InputSpec(ndim=len(input_shape), axes={self.axis: dim}) def _add_gamma_weight(self, input_shape): dim = input_shape[self.axis] shape", "to compute masks. should be of size 2 where first", "= tf.reshape(self._shape(query_states, tgt_len, bsz), proj_shape) key_states = tf.reshape(key_states, proj_shape) value_states", "\"\"\" Process the input of each TensorFlow model including the", "= output[\"args\"] del output[\"args\"] if \"kwargs\" in output: del output[\"kwargs\"]", "Use it as a regular TF 2.0 Keras Model and", "part tensor_name = input.name.split(\":\")[0] if tensor_name in parameter_names: output[tensor_name] =", "Whether or not to return the attentions tensors of all", "the missing layers, and another one for the unexpected layers.", "not be updated during training. \"\"\" warnings.warn( \"The method `freeze_feature_extractor`", "When creating a SavedModel TF calls the method with LayerCall.__call__(args,", "matter related to general usage and behavior. <Tip> TF 2.0", "= None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None,", "_CONFIG_FOR_DOC = \"HubertConfig\" TF_HUBERT_PRETRAINED_MODEL_ARCHIVE_LIST = [ \"facebook/hubert-base-ls960\", # See all", "if isinstance(input_values, (tuple, list)): for i, input in enumerate(input_values): #", "if self.center: beta = tf.reshape(self.beta, broadcast_shape) return gamma, beta def", "tf.expand_dims(input_values, -1) for conv_layer in self.conv_layers: hidden_states = conv_layer(hidden_states) return", "compute loss >>> target_transcription = \"A MAN SAID TO THE", "= past_key_value[0] value_states = past_key_value[1] elif is_cross_attention: # cross_attentions key_states", "proj_shape) src_len = shape_list(key_states)[1] attn_weights = tf.matmul(query_states, key_states, transpose_b=True) #", "hidden_states # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2LayerNormConvLayer with Wav2Vec2->Hubert class TFHubertLayerNormConvLayer(tf.keras.layers.Layer): def", "tf.stack(group_shape) reshaped_inputs = tf.reshape(inputs, group_shape) return reshaped_inputs, group_shape else: return", "bsz) value_states = self._shape(self.v_proj(key_value_states), -1, bsz) elif past_key_value is not", "for a real Tensor if type(input) == tf.Tensor: # Tensor", "Process the input of each TensorFlow model including the booleans.", "self.head_dim) query_states = tf.reshape(self._shape(query_states, tgt_len, bsz), proj_shape) key_states = tf.reshape(key_states,", "tf.keras.layers.Dense(embed_dim, use_bias=bias, name=\"v_proj\") self.out_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name=\"out_proj\") def _shape(self,", "I EXIST\" >>> # wrap processor as target processor to", "def __init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs) self.intermediate_dropout = tf.keras.layers.Dropout(config.activation_dropout) self.intermediate_dense", "config_class=_CONFIG_FOR_DOC) def call( self, input_values: tf.Tensor, attention_mask: Optional[tf.Tensor] = None,", "and loading pretrained models. \"\"\" config_class = HubertConfig base_model_prefix =", "`(num_heads,)` or `(num_layers, num_heads)`, *optional*): Mask to nullify selected heads", "training=training ) hidden_states = self.dropout(hidden_states, training=training) hidden_states = attn_residual +", "checked at build time since TF 2.7, so we need", "tgt_len, src_len)}, but is {shape_list(attn_weights)}\", ) if attention_mask is not", "def _check_size_of_dimensions(self, input_shape): dim = input_shape[self.axis] if dim < self.groups:", "**kwargs): super().__init__( filters=filters, kernel_size=kernel_size, groups=groups, padding=\"valid\", use_bias=True, bias_initializer=\"he_normal\", **kwargs, )", "= hidden_states[:, : -self.num_pad_remove, :] return hidden_states class TFHubertFeatureEncoder(tf.keras.layers.Layer): def", "Tensors in the first positional argument : - a single", "@replace_return_docstrings(output_type=TFBaseModelOutput, config_class=_CONFIG_FOR_DOC) def call( self, input_values: tf.Tensor, attention_mask: Optional[tf.Tensor] =", "call(self, hidden_states): if self.num_pad_remove > 0: hidden_states = hidden_states[:, :", "\"input_values\": input_values, \"attention_mask\": tf.cast(tf.not_equal(input_values, pad_token), tf.float32), } return dummy_inputs def", "= max(num_masked_spans, min_masks) # make sure num masked indices <=", "self._get_reshaped_weights(input_shape) normalized_inputs = tf.nn.batch_normalization( reshaped_inputs, mean=mean, variance=variance, scale=gamma, offset=beta, variance_epsilon=self.epsilon,", "ValueError( f\"Data of type {type(input)} is not allowed only {allowed_types}", "if output_hidden_states else None all_self_attentions = () if output_attentions else", "tf.transpose(tf.reshape(tensor, (bsz, seq_len, self.num_heads, self.head_dim)), (0, 2, 1, 3)) def", "self.activation = get_tf_activation(config.feat_extract_activation) self.layer_norm = TFHubertGroupNorm(groups=self.out_conv_dim, epsilon=config.layer_norm_eps, name=\"layer_norm\") def call(self,", "return_dict: return tuple(v for v in [hidden_states, all_hidden_states, all_self_attentions] if", "weights associated with the model, only the configuration. Check out", "model. config ([`PretrainedConfig`]): The config of the running model. **kwargs:", "the inputs. Indices are selected in `[0, 1]`: - 0", "\"\"\" signature = dict(inspect.signature(func).parameters) signature.pop(\"kwargs\", None) signature.pop(\"self\", None) parameter_names =", "as tf >>> from transformers import Wav2Vec2Processor, TFHubertForCTC >>> from", "**kwargs: Any, ): inputs = input_values_processing( func=self.call, config=self.config, input_values=input_values, attention_mask=attention_mask,", "r\"\"\" Args: input_values (`np.ndarray`, `tf.Tensor`, `List[tf.Tensor]` ``Dict[str, tf.Tensor]` or `Dict[str,", "= None, beta_constraint: tf.keras.constraints.Constraint = None, gamma_constraint: tf.keras.constraints.Constraint = None,", "Hubert model.\"\"\" import inspect import warnings from typing import Any,", "Unless required by applicable law or agreed to in writing,", "from [fairseq's data_utils.py](https://github.com/pytorch/fairseq/blob/e0788f7007a8473a76db573985031f3c94201e79/fairseq/data/data_utils.py#L376). \"\"\" batch_size, sequence_length = shape if mask_length", "True self._normalize_kernel() padded_inputs = tf.pad(inputs, ((0, 0), (self.explicit_padding, self.explicit_padding), (0,", "cross_attention layer can then reuse all cross-attention # key/value_states (first", "class TFHubertMainLayer(tf.keras.layers.Layer): config_class = HubertConfig def __init__(self, config: HubertConfig, **kwargs):", "channels (\" + str(dim) + \").\" ) if dim %", "for Connectionist Temporal Classification (CTC).\"\"\", HUBERT_START_DOCSTRING, ) class TFHubertForCTC(TFHubertPreTrainedModel): def", "attention_mask is not None: hidden_states = hidden_states * tf.expand_dims(attention_mask, -1)", "name=\"encoder\") else: self.encoder = TFHubertEncoder(config, name=\"encoder\") def build(self, input_shape: tf.TensorShape):", "of size {(self.num_heads)}, but is {shape_list(layer_head_mask)}\", ) attn_weights = tf.reshape(layer_head_mask,", "loss is only computed for the tokens with labels in", "= TFHubertEncoderStableLayerNorm(config, name=\"encoder\") else: self.encoder = TFHubertEncoder(config, name=\"encoder\") def build(self,", "in zip(self.config.conv_kernel, self.config.conv_stride): input_lengths = _conv_out_length(input_lengths, kernel_size, stride) return input_lengths", "{} allowed_types = (tf.Tensor, bool, int, ModelOutput, tuple, list, dict,", "super().__init__(config, *inputs, **kwargs) self.hubert = TFHubertMainLayer(config, name=\"hubert\") self.dropout = tf.keras.layers.Dropout(config.final_dropout)", "self.head_dim)}, but is {shape_list(attn_output)}\", ) attn_output = tf.transpose( tf.reshape(attn_output, (bsz,", "_ = self.attention( hidden_states, attention_mask=attention_mask, training=training ) hidden_states = self.dropout(hidden_states,", "= axis self.epsilon = epsilon self.center = center self.scale =", "been depreciated \" \"and will be removed in Transformers v5.", "loss is not None else output return TFCausalLMOutput( loss=loss, logits=logits,", ") def _check_axis(self): if self.axis == 0: raise ValueError( \"You", "*optional*): Whether or not to return the attentions tensors of", "3)) def call( self, hidden_states: tf.Tensor, key_value_states: Optional[tf.Tensor] = None,", "= all_self_attentions + (layer_outputs[1],) # Add last layer if output_hidden_states:", "so that its parameters will not be updated during training.", "Copyright 2021 The Fairseq Authors and the HuggingFace Inc. team.", "= tf.reshape(attn_weights, (bsz, self.num_heads, tgt_len, src_len)) + attention_mask attn_weights =", "name=\"layer_norm\") self.dropout = tf.keras.layers.Dropout(config.hidden_dropout) self.layer = [TFHubertEncoderLayer(config, name=f\"layers.{i}\") for i", "token_type_ids=token_type_ids, position_ids=position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, kwargs_call=kwargs, )", "named accordingly to the parameters name, i.e. `input_values = tf.keras.Input(shape=(128,),", "Optional[tf.Tensor] = None, token_type_ids: Optional[tf.Tensor] = None, position_ids: Optional[tf.Tensor] =", "self.scale: self.gamma = self.add_weight( shape=shape, name=\"gamma\", initializer=self.gamma_initializer, regularizer=self.gamma_regularizer, constraint=self.gamma_constraint, )", "attn_weights = tf.reshape(attn_weights, (bsz * self.num_heads, tgt_len, src_len)) attn_weights =", "hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions, ) class TFHubertPreTrainedModel(TFPreTrainedModel): \"\"\" An abstract class to", "set to True. training (`bool`, *optional*, defaults to `False``): Whether", "**kwargs): super().__init__(config, *inputs, **kwargs) self.config = config self.hubert = TFHubertMainLayer(config,", "= config.conv_dim[layer_id] if layer_id > 0 else 1 self.out_conv_dim =", "return hidden_states class TFHubertFeatureEncoder(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs: Any)", "= tf.keras.layers.Dense( units=config.intermediate_size, kernel_initializer=get_initializer(config.initializer_range), bias_initializer=\"zeros\", name=\"intermediate_dense\", ) self.intermediate_act_fn = get_tf_activation(config.hidden_act)", "None def _create_broadcast_shape(self, input_shape): broadcast_shape = [1] * len(input_shape) is_instance_norm", "is not allowed only {allowed_types} is accepted for {parameter_names[0]}.\" )", "dict(inspect.signature(func).parameters) signature.pop(\"kwargs\", None) signature.pop(\"self\", None) parameter_names = list(signature.keys()) output =", "# have to be disabled in other modes than eager.", "output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) if not return_dict: return", "mean=mean, variance=variance, scale=gamma, offset=beta, variance_epsilon=self.epsilon, ) return normalized_inputs def _get_reshaped_weights(self,", "self.axis, \"epsilon\": self.epsilon, \"center\": self.center, \"scale\": self.scale, \"beta_initializer\": tf.keras.initializers.serialize(self.beta_initializer), \"gamma_initializer\":", "= True, **kwargs, ): super().__init__(**kwargs) self.embed_dim = embed_dim self.num_heads =", "lengths according to convolution formula output_lengths = self._get_feat_extract_output_lengths(tf.reduce_sum(inputs[\"attention_mask\"], -1)) attention_mask", "[What are attention masks?](../glossary#attention-mask) token_type_ids (`np.ndarray` or `tf.Tensor` of shape", "warnings.warn( \"The `decoder_cached_states` argument is deprecated and will be removed", "Masks extracted features along time axis and/or along feature axis", "tokens in the position embeddings. Selected in the range `[0,", "(bsz * self.num_heads, tgt_len, src_len)) attn_weights = tf.nn.softmax(attn_weights, axis=-1) if", "`({0})`): Indices of input sequence tokens in the vocabulary. Indices", "def __init__(self, num_conv_pos_embeddings, **kwargs): super().__init__(**kwargs) self.num_pad_remove = 1 if num_conv_pos_embeddings", "self.weight_v = self.kernel self.weight_g = self.add_weight( name=\"weight_g\", shape=(int(self.weight_v.shape[self.filter_axis]), 1, 1),", "class TFHubertWeightNormConv1D(tf.keras.layers.Conv1D): \"\"\"Adapted from https://www.tensorflow.org/probability/api_docs/python/tfp/layers/weight_norm/WeightNorm\"\"\" def __init__(self, filters, kernel_size, groups,", "tgt_len, src_len)) return attn_output, attn_weights, past_key_value # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2FeedForward", "output: TFCausalLMOutput) -> TFCausalLMOutput: hs = tf.convert_to_tensor(output.hidden_states) if self.config.output_hidden_states else", "SIR I EXIST\" >>> # wrap processor as target processor", "bool = False, bias: bool = True, **kwargs, ): super().__init__(**kwargs)", "to the TF 2.0 documentation for all matter related to", "= [TFHubertGroupNormConvLayer(config, layer_id=0, name=f\"conv_layers.{0}\")] + [ TFHubertNoLayerNormConvLayer(config, layer_id=i + 1,", "hidden_states = self.layer_norm(hidden_states) if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,)", "constraint=self.beta_constraint, ) else: self.beta = None def _create_broadcast_shape(self, input_shape): broadcast_shape", "make sure that offset samples are < sequence_length uniform_dist =", "not inputs[\"return_dict\"]: output = (logits,) + outputs[1:] return ((loss,) +", "TFHubertNoLayerNormConvLayer(config, layer_id=i + 1, name=f\"conv_layers.{i+1}\") for i in range(config.num_feat_extract_layers -", "self.config.output_hidden_states ) inputs[\"output_attentions\"] = ( inputs[\"output_attentions\"] if inputs[\"output_attentions\"] else self.config.output_attentions", "to the parameters name, i.e. `input_values = tf.keras.Input(shape=(128,), dtype='float32', name=\"input_values\")`", "tf.argmax(logits, axis=-1) >>> transcription = processor.decode(predicted_ids[0]) >>> # compute loss", "inputs_embeds=inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, ) outputs = self.hubert( input_values=inputs[\"input_values\"],", "is used as a cross-attention layer # for the decoder", "# get query proj query_states = self.q_proj(hidden_states) * self.scaling #", "-> Tuple[tf.Tensor, Optional[tf.Tensor]]: \"\"\"Input shape: Batch x Time x Channel\"\"\"", "axis if self.config.mask_feature_prob > 0: mask_feature_indices = _compute_mask_indices( (batch_size, hidden_size),", "https://github.com/tensorflow/tensorflow/issues/9260 for more info \"\"\" z = -tf.math.log(tf.random.uniform(shape_list(distribution), 0, 1))", "You may obtain a copy of the License at #", "self.final_layer_norm = tf.keras.layers.LayerNormalization( epsilon=config.layer_norm_eps, name=\"final_layer_norm\" ) def call( self, hidden_states:", "self.is_decoder = is_decoder self.k_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name=\"k_proj\") self.q_proj =", "`decoder_cached_states` argument is deprecated and will be removed in a", "case is always the first parameter, then `input_values` output[\"input_values\"] =", "Optional[tf.Tensor] = None, inputs_embeds: Optional[tf.Tensor] = None, output_attentions: Optional[tf.Tensor] =", "1, mask_length)) spec_aug_mask_idxs = tf.reshape(spec_aug_mask_idxs, (batch_size, num_masked_spans * mask_length)) offsets", "import load_dataset >>> import soundfile as sf >>> processor =", "z, num_samples) return indices # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2._scatter_values_on_batch_indices def _scatter_values_on_batch_indices(values,", "transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2PositionalConvEmbedding with Wav2Vec2->Hubert class TFHubertPositionalConvEmbedding(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs:", "in range(config.num_hidden_layers)] def call( self, hidden_states: tf.Tensor, attention_mask: Optional[tf.Tensor] =", "*optional*): Mask to avoid performing attention on padding token indices.", "= model(input_values, labels=labels).loss ```\"\"\" inputs = input_values_processing( func=self.call, config=self.config, input_values=input_values,", "Args: shape: the the shape for which to compute masks.", "self.num_heads, tgt_len, src_len)) return attn_output, attn_weights, past_key_value # Copied from", "Optional[bool] = None, return_dict: Optional[bool] = None, training: bool =", "= input_shape[self.axis] shape = (dim,) if self.center: self.beta = self.add_weight(", "call( self, input_values: tf.Tensor, attention_mask: Optional[tf.Tensor] = None, token_type_ids: Optional[tf.Tensor]", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "Args: input_values (`np.ndarray`, `tf.Tensor`, `List[tf.Tensor]` ``Dict[str, tf.Tensor]` or `Dict[str, np.ndarray]`", "self.masked_spec_embed = self.add_weight( shape=(self.config.hidden_size,), initializer=\"uniform\", trainable=True, name=\"masked_spec_embed\" ) super().build(input_shape) def", "def _scatter_values_on_batch_indices(values, batch_indices, output_shape): \"\"\" Scatter function as in PyTorch", "input_shape): if not self.built: input_shape = input_shape.as_list() # Conv1D output", "in the config will be used instead. return_dict (`bool`, *optional*):", "= self.projection(hidden_states) hidden_states = self.dropout(hidden_states, training=training) return hidden_states # Copied", "speech, _ = sf.read(batch[\"file\"]) ... batch[\"speech\"] = speech ... return", "argument : - a single Tensor with `input_values` only and", "transformers import Wav2Vec2Processor, TFHubertModel >>> from datasets import load_dataset >>>", "range(config.num_feat_extract_layers - 1) ] elif config.feat_extract_norm == \"layer\": conv_layers =", "is not None) return TFBaseModelOutput( last_hidden_state=hidden_states, hidden_states=all_hidden_states, attentions=all_self_attentions, ) #", "of each TensorFlow model including the booleans. In case of", "\"beta_initializer\": tf.keras.initializers.serialize(self.beta_initializer), \"gamma_initializer\": tf.keras.initializers.serialize(self.gamma_initializer), \"beta_regularizer\": tf.keras.regularizers.serialize(self.beta_regularizer), \"gamma_regularizer\": tf.keras.regularizers.serialize(self.gamma_regularizer), \"beta_constraint\": tf.keras.constraints.serialize(self.beta_constraint),", "from https://pytorch.org/docs/stable/generated/torch.nn.Conv1d.html return (input_length - kernel_size) // stride + 1", "self.feature_extractor( tf.cast(inputs[\"input_values\"], tf.float32), training=inputs[\"training\"] ) if inputs[\"attention_mask\"] is not None:", "Dict[str, tf.Tensor]: pad_token = 0.0 input_values = tf.convert_to_tensor(np.random.rand(1, 16000), tf.float32)", "will not be guaranteed during the training. Args: func (`callable`):", "to encode labels >>> with processor.as_target_processor(): ... labels = processor(transcription,", "+ \") cannot be \" \"more than the number of", "0: raise ValueError( \"You are trying to normalize your batch", "TPU\" ) @tf.function def serving(self, inputs): output = self.call(input_values=inputs, training=False)", "config.conv_dim[layer_id] self.conv = tf.keras.layers.Conv1D( filters=self.out_conv_dim, kernel_size=config.conv_kernel[layer_id], strides=config.conv_stride[layer_id], use_bias=config.conv_bias, name=\"conv\", )", "else: if isinstance(input_values, tf.Tensor) or input_values is None: output[parameter_names[0]] =", "= self.layer_norm(hidden_states) hidden_states = self.projection(hidden_states) hidden_states = self.dropout(hidden_states, training=training) return", ".configuration_hubert import HubertConfig logger = logging.get_logger(__name__) _CONFIG_FOR_DOC = \"HubertConfig\" TF_HUBERT_PRETRAINED_MODEL_ARCHIVE_LIST", "* sequence_length / mask_length + tf.random.uniform((1,))) num_masked_spans = max(num_masked_spans, min_masks)", "NOT supported on CPU. If you wish \" \"to train/fine-tine", "in range(config.num_hidden_layers) ] def call( self, hidden_states: tf.Tensor, attention_mask: Optional[tf.Tensor]", "elif is_cross_attention: # cross_attentions key_states = self._shape(self.k_proj(key_value_states), -1, bsz) value_states", "# if uni-directional self-attention (decoder) save Tuple(tf.Tensor, tf.Tensor) of #", "THE ORDER given in the docstring: `model([input_values, attention_mask])` or `model([input_values,", "a defined dimension \" \"but the layer received an input", "is always `None` past_key_value = (key_states, value_states) proj_shape = (bsz", "layer # for the decoder is_cross_attention = key_value_states is not", "tf.nn.ctc_loss( logits=logits, labels=labels, logit_length=input_lengths, label_length=target_lengths, blank_index=self.config.pad_token_id, logits_time_major=False, ) if self.config.ctc_loss_reduction", "output_attentions: Optional[tf.Tensor] = None, output_hidden_states: Optional[tf.Tensor] = None, return_dict: Optional[bool]", "if dim % self.groups != 0: raise ValueError( \"Number of", "soundfile as sf >>> processor = Wav2Vec2Processor.from_pretrained(\"facebook/hubert-base-960h\") >>> model =", "tf.keras.Input(shape=(128,), dtype='float32', name=\"input_values\")` otherwise the order of the tensors will", "hidden_states # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2GroupNormConvLayer with Wav2Vec2->Hubert class TFHubertGroupNormConvLayer(tf.keras.layers.Layer): def", "of size {(bsz * self.num_heads, tgt_len, src_len)}, but is {shape_list(attn_weights)}\",", "..., config.vocab_size]` (see `input_values` docstring) Tokens with indices set to", "\"HubertConfig\" TF_HUBERT_PRETRAINED_MODEL_ARCHIVE_LIST = [ \"facebook/hubert-base-ls960\", # See all Hubert models", "( inputs[\"output_hidden_states\"] if inputs[\"output_hidden_states\"] else self.config.output_hidden_states ) inputs[\"output_attentions\"] = (", "attended to labels_mask = tf.cast(labels >= 0, tf.int32) target_lengths =", "= embed_dim // num_heads if (self.head_dim * num_heads) != self.embed_dim:", "= tf.ones((batch_size, sequence_length - (mask_length - 1))) # get random", ": - a single Tensor with `input_values` only and nothing", "batch dim to indices_shape broad_casted_batch_dims = tf.reshape( tf.broadcast_to(tf.expand_dims(tf.range(indices_shape[0]), axis=-1), indices_shape),", "hidden_states = self.conv(hidden_states) hidden_states = self.activation(hidden_states) return hidden_states # Copied", "`{self.__class__.__name__}` has been depreciated \" \"and will be removed in", "= dict(inspect.signature(func).parameters) signature.pop(\"kwargs\", None) signature.pop(\"self\", None) parameter_names = list(signature.keys()) output", "TFHubert Model transformer outputing raw hidden-states without any specific head", "from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2NoLayerNormConvLayer with Wav2Vec2->Hubert class TFHubertNoLayerNormConvLayer(tf.keras.layers.Layer): def __init__(self, config: HubertConfig,", "self.scale = scale self.beta_initializer = tf.keras.initializers.get(beta_initializer) self.gamma_initializer = tf.keras.initializers.get(gamma_initializer) self.beta_regularizer", "batch size and 2nd is timesteps attention_mask: optional padding mask", "import tensorflow as tf >>> from transformers import Wav2Vec2Processor, TFHubertForCTC", "removed in a future version, use `input_values` instead.\", FutureWarning, )", "= tf.cast(labels >= 0, tf.int32) target_lengths = tf.reduce_sum(labels_mask, axis=-1) loss", "are provided this layer is used as a cross-attention layer", "Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2._sample_without_replacement def _sample_without_replacement(distribution, num_samples): \"\"\" Categorical sampling without", "None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict:", "sequence_length - (mask_length - 1))) # get random indices to", "distributed under the License is distributed on an \"AS IS\"", "- expanded_mask) * LARGE_NEGATIVE # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2GroupNorm with Wav2Vec2->Hubert", "as a list, tuple or dict in the first positional", "output[\"args\"] if \"kwargs\" in output: del output[\"kwargs\"] boolean_dict = {", "True) mask_length: size of the mask min_masks: minimum number of", "= input else: output[parameter_names[i]] = input elif isinstance(input, allowed_types) or", "= list(signature.keys()) output = {} allowed_types = (tf.Tensor, bool, int,", "list {parameter_names} and will be ignored.\" ) continue else: raise", "dim = input_shape[self.axis] shape = (dim,) if self.center: self.beta =", "version, use `input_values` instead.\", FutureWarning, ) output[\"input_values\"] = input_values.pop(\"inputs\") if", "as start of the span to be masked. this will", "key, value proj if is_cross_attention and past_key_value is not None:", "number will be smaller (unless no_overlap is True) mask_length: size", "transcription = processor.decode(predicted_ids[0]) >>> # compute loss >>> target_transcription =", "Two lists, one for the missing layers, and another one", "= None, past_key_value: Optional[Tuple[Tuple[tf.Tensor]]] = None, attention_mask: Optional[tf.Tensor] = None,", "tf.tile(spec_aug_mask_idxs, (1, 1, mask_length)) spec_aug_mask_idxs = tf.reshape(spec_aug_mask_idxs, (batch_size, num_masked_spans *", "import tensorflow as tf from ...activations_tf import get_tf_activation from ...modeling_tf_outputs", "None bsz, tgt_len, embed_dim = shape_list(hidden_states) # get query proj", "Tuple[tf.Tensor]: attn_residual = hidden_states hidden_states = self.layer_norm(hidden_states) hidden_states, attn_weights, _", "it as a regular TF 2.0 Keras Model and refer", "that padded tokens are filled with -100 # when not", "1), initializer=\"ones\", dtype=self.weight_v.dtype, trainable=True, ) self.bias = self.add_weight(name=\"bias\", shape=(self.filters,), initializer=\"zeros\",", "varying length with one or several input Tensors IN THE", "= None, position_ids: Optional[tf.Tensor] = None, head_mask: Optional[tf.Tensor] = None,", "cross_attention save Tuple(tf.Tensor, tf.Tensor) of all cross attention key/value_states. #", "optional padding mask of the same size as shape, which", "and \"args\" not in parameter_names: logger.warning( f\"The parameter {k} does", "is **not masked**, - 0 indicates the head is **masked**.", "== tf.Tensor: tensor_name = output[\"args\"].name.split(\":\")[0] output[tensor_name] = output[\"args\"] else: #", "Wav2Vec2->Hubert class TFHubertGroupNorm(tf.keras.layers.Layer): \"\"\" From tensorflow-addons https://www.tensorflow.org/addons/api_docs/python/tfa/layers/GroupNormalization \"\"\" def __init__(", "hidden_states = hidden_states + self.feed_forward(self.final_layer_norm(hidden_states)) outputs = (hidden_states,) if output_attentions:", "parameter, then `input_values` output[\"input_values\"] = output[\"args\"] del output[\"args\"] if \"kwargs\"", "= HubertConfig def __init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs) self.config =", "of symbolic inputs, each input has to be named accordingly", "f\"\\n{self.__class__.__name__} has backpropagation operations that are NOT supported on CPU.", "`input_values = tf.keras.Input(shape=(128,), dtype='float32', name=\"input_values\")` otherwise the order of the", "position_embeddings = self.pos_conv_embed(hidden_states) hidden_states = hidden_states + position_embeddings hidden_states =", "name=\"layer_norm\") self.dropout = tf.keras.layers.Dropout(config.hidden_dropout) self.layer = [ TFHubertEncoderLayerStableLayerNorm(config, name=f\"layers.{i}\") for", "training. Args: func (`callable`): The callable function of the TensorFlow", "> 0: hidden_states = hidden_states[:, : -self.num_pad_remove, :] return hidden_states", "there are three possibilities you can use to gather all", "number of masked spans in batch num_masked_spans = int(mask_prob *", "behavior. <Tip> TF 2.0 models accepts two formats as inputs:", "mask_time_indices = kwargs.get(\"mask_time_indices\", None) if inputs[\"training\"]: hidden_states = self._mask_hidden_states(hidden_states, mask_time_indices=mask_time_indices)", "keyword arguments (like PyTorch models), or - having all inputs", "tf.float32), training=inputs[\"training\"] ) if inputs[\"attention_mask\"] is not None: # compute", "given shape Args: shape: the the shape for which to", "may obtain a copy of the License at # #", "weight vector.\"\"\" kernel_norm = tf.sqrt(tf.reduce_sum(tf.square(self.weight_v), axis=self.kernel_norm_axes)) self.weight_g.assign(kernel_norm[:, tf.newaxis, tf.newaxis]) def", "\"The `decoder_cached_states` argument is deprecated and will be removed in", "class to handle weights initialization and a simple interface for", "(hidden_states,) # add LayerDrop (see https://arxiv.org/abs/1909.11556 for description) dropout_probability =", "return_dict (`bool`, *optional*): Whether or not to return a [`~file_utils.ModelOutput`]", "Any) -> None: super().__init__(**kwargs) if config.feat_extract_norm == \"group\": conv_layers =", "hidden_states = conv_layer(hidden_states) return hidden_states class TFHubertFeatureExtractor(TFHubertFeatureEncoder): def __init__(self, config,", "group_reduction_axes = list(range(1, len(group_shape))) is_instance_norm = (input_shape[self.axis] // self.groups) ==", "dtype=tf.int32) # uniform distribution to sample from, make sure that", "* mask_length)) spec_aug_mask_idxs = spec_aug_mask_idxs + offsets # scatter indices", "self.groups return broadcast_shape # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2WeightNormConv1D with Wav2Vec2->Hubert class", "position IDs?](../glossary#position-ids) head_mask (`np.ndarray` or `tf.Tensor` of shape `(num_heads,)` or", "are NOT supported on CPU. If you wish \" \"to", "(bsz, seq_len, self.num_heads, self.head_dim)), (0, 2, 1, 3)) def call(", "IDs?](../glossary#position-ids) head_mask (`np.ndarray` or `tf.Tensor` of shape `(num_heads,)` or `(num_layers,", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "instead. return_dict (`bool`, *optional*): Whether or not to return a", "= (tf.Tensor, bool, int, ModelOutput, tuple, list, dict, np.ndarray) for", "parameter_names = list(signature.keys()) output = {} allowed_types = (tf.Tensor, bool,", "the input of each TensorFlow model including the booleans. In", "hs = tf.convert_to_tensor(output.hidden_states) if self.config.output_hidden_states else None attns = tf.convert_to_tensor(output.attentions)", "# Further calls to cross_attention layer can then reuse all", "False) -> tf.Tensor: hidden_states = self.layer_norm(hidden_states) hidden_states = self.projection(hidden_states) hidden_states", "formula output_lengths = self._get_feat_extract_output_lengths(tf.reduce_sum(inputs[\"attention_mask\"], -1)) attention_mask = tf.sequence_mask( output_lengths, maxlen=shape_list(hidden_states)[1],", "ValueError( \"Axis \" + str(self.axis) + \" of \" \"input", "or `(num_layers, num_heads)`, *optional*): Mask to nullify selected heads of", "i, input in enumerate(input_values): # EagerTensors don't allow to use", "for details. [What are input IDs?](../glossary#input-ids) attention_mask (`np.ndarray` or `tf.Tensor`", "respect the proper output we have to add this exception", "= tf.concat([past_key_value[0], key_states], axis=2) value_states = tf.concat([past_key_value[1], value_states], axis=2) else:", "self.layer_norm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name=\"layer_norm\") self.projection = tf.keras.layers.Dense( units=config.hidden_size, kernel_initializer=get_initializer(config.initializer_range), bias_initializer=\"zeros\",", "labels: Optional[tf.Tensor] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool]", "transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2Encoder with Wav2Vec2->Hubert class TFHubertEncoder(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs):", "tf.keras.initializers.Initializer = \"ones\", beta_regularizer: tf.keras.regularizers.Regularizer = None, gamma_regularizer: tf.keras.regularizers.Regularizer =", "load_dataset >>> import soundfile as sf >>> processor = Wav2Vec2Processor.from_pretrained(\"facebook/hubert-base-960h\")", "= None, inputs_embeds: Optional[tf.Tensor] = None, output_attentions: Optional[tf.Tensor] = None,", "of type {type(input)} is not allowed only {allowed_types} is accepted", "self.output_dropout = tf.keras.layers.Dropout(config.hidden_dropout) def call(self, hidden_states: tf.Tensor, training: bool =", "= TFHubertModel.from_pretrained(\"facebook/hubert-base-960h\") >>> def map_to_array(batch): ... speech, _ = sf.read(batch[\"file\"])", "\"output_attentions\", \"output_hidden_states\", \"use_cache\"] } output.update(booleans_processing(config=config, **boolean_dict)) return output # Copied", "token_type_ids=token_type_ids, position_ids=position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, ) inputs[\"output_hidden_states\"]", "self.pos_conv_embed(hidden_states) hidden_states = hidden_states + position_embeddings hidden_states = self.dropout(hidden_states, training=training)", "= tf.convert_to_tensor(output.hidden_states) if self.config.output_hidden_states else None attns = tf.convert_to_tensor(output.attentions) if", "= tf.where(mask_feature_indices[:, tf.newaxis, :], hidden_states, 0) return hidden_states def call(", "int, dropout: float = 0.0, is_decoder: bool = False, bias:", "if isinstance(input_values, tf.Tensor) or input_values is None: output[parameter_names[0]] = input_values", "models. \"\"\" config_class = HubertConfig base_model_prefix = \"hubert\" main_input_name =", "strides=config.conv_stride[layer_id], use_bias=config.conv_bias, name=\"conv\", ) self.activation = get_tf_activation(config.feat_extract_activation) def call(self, hidden_states:", "SpecAugment along feature axis if self.config.mask_feature_prob > 0: mask_feature_indices =", "hidden_states, 0) return hidden_states def call( self, input_values: tf.Tensor, attention_mask:", "{type(input_values)} is not allowed only {allowed_types} is accepted for {parameter_names[0]}.\"", "bsz), proj_shape) key_states = tf.reshape(key_states, proj_shape) value_states = tf.reshape(value_states, proj_shape)", "hidden_states = self.layer_norm(hidden_states) hidden_states, attn_weights, _ = self.attention( hidden_states, attention_mask=attention_mask,", "of all elements. however due to overlaps, the actual number", "add_start_docstrings_to_model_forward, logging, replace_return_docstrings, ) from .configuration_hubert import HubertConfig logger =", "= 0, ) -> tf.Tensor: \"\"\" Computes random mask spans", "hidden_states: tf.Tensor) -> tf.Tensor: hidden_states = self.conv(hidden_states) hidden_states = self.layer_norm(hidden_states)", "or `tf.Tensor` of shape `({0}, hidden_size)`, *optional*): Optionally, instead of", "= get_tf_activation(config.feat_extract_activation) def call(self, hidden_states: tf.Tensor) -> tf.Tensor: hidden_states =", "corresponds to a *sentence B* token. [What are token type", "hidden_states + position_embeddings hidden_states = self.layer_norm(hidden_states) hidden_states = self.dropout(hidden_states, training=training)", "i in range(config.num_hidden_layers)] def call( self, hidden_states: tf.Tensor, attention_mask: Optional[tf.Tensor]", "The config of the running model. **kwargs: The inputs of", "TFHubertWeightNormConv1D( filters=config.hidden_size, kernel_size=config.num_conv_pos_embeddings, groups=config.num_conv_pos_embedding_groups, explicit_padding=config.num_conv_pos_embeddings // 2, name=\"conv\", ) self.padding", "-> TFCausalLMOutput: hs = tf.convert_to_tensor(output.hidden_states) if self.config.output_hidden_states else None attns", "self.encoder = TFHubertEncoderStableLayerNorm(config, name=\"encoder\") else: self.encoder = TFHubertEncoder(config, name=\"encoder\") def", "of type {type(input_values)} is not allowed only {allowed_types} is accepted", "from datasets import load_dataset >>> import soundfile as sf >>>", "is not None and type(output[\"args\"]) == tf.Tensor: tensor_name = output[\"args\"].name.split(\":\")[0]", "from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2LayerNormConvLayer with Wav2Vec2->Hubert class TFHubertLayerNormConvLayer(tf.keras.layers.Layer): def __init__(self, config: HubertConfig,", "modules like dropout modules have different behaviors between training and", "= None, layer_head_mask: Optional[tf.Tensor] = None, training: Optional[bool] = False,", "will prevent masking padded elements mask_prob: probability for each token", "https://huggingface.co/models?filter=hubert ] LARGE_NEGATIVE = -1e8 # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.input_values_processing def", "= shape_list(batch_indices) # broadcast batch dim to indices_shape broad_casted_batch_dims =", "key/value_states. # Further calls to cross_attention layer can then reuse", "this case is always the first parameter, then `input_values` output[\"input_values\"]", "Optional[tf.Tensor] = None, output_attentions: Optional[tf.Tensor] = None, output_hidden_states: Optional[tf.Tensor] =", "tf.Tensor) or input_values is None: output[parameter_names[0]] = input_values else: raise", "if self.groups == -1: self.groups = dim def _check_size_of_dimensions(self, input_shape):", "tf.reshape(attn_weights, (bsz * self.num_heads, tgt_len, src_len)) attn_weights = tf.nn.softmax(attn_weights, axis=-1)", "of the number of channels (\" + str(dim) + \").\"", "See [`PreTrainedTokenizer.__call__`] and [`PreTrainedTokenizer.encode`] for details. [What are input IDs?](../glossary#input-ids)", "keepdims=True) gamma, beta = self._get_reshaped_weights(input_shape) normalized_inputs = tf.nn.batch_normalization( reshaped_inputs, mean=mean,", "if output_attentions else None if attention_mask is not None: hidden_states", "\" f\"Use `{self.__class__.__bases__[0].__name__}` instead.\", FutureWarning, ) class TFHubertFeatureProjection(tf.keras.layers.Layer): def __init__(self,", "defined dimension \" \"but the layer received an input with", "tensor_name in parameter_names: output[tensor_name] = input else: output[parameter_names[i]] = input", "If you wish \" \"to train/fine-tine this model, you need", ">>> from datasets import load_dataset >>> import soundfile as sf", "Whether or not to return a [`~file_utils.ModelOutput`] instead of a", ") class TFHubertModel(TFHubertPreTrainedModel): def __init__(self, config: HubertConfig, *inputs, **kwargs): super().__init__(config,", "only computed for the tokens with labels in `[0, ...,", "return (one_cst - expanded_mask) * LARGE_NEGATIVE # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2GroupNorm", "\"Number of groups (\" + str(self.groups) + \") must be", "be removed in a future version, use `past_key_values` instead.\", FutureWarning,", "= TFHubertMainLayer(config, name=\"hubert\") self.dropout = tf.keras.layers.Dropout(config.final_dropout) self.lm_head = tf.keras.layers.Dense(config.vocab_size, name=\"lm_head\")", "_ = sf.read(batch[\"file\"]) ... batch[\"speech\"] = speech ... return batch", "tf.keras.layers.Dropout(config.activation_dropout) self.intermediate_dense = tf.keras.layers.Dense( units=config.intermediate_size, kernel_initializer=get_initializer(config.initializer_range), bias_initializer=\"zeros\", name=\"intermediate_dense\", ) self.intermediate_act_fn", "past_key_value[0] value_states = past_key_value[1] elif is_cross_attention: # cross_attentions key_states =", "self._add_gamma_weight(input_shape) self._add_beta_weight(input_shape) self.built = True super().build(input_shape) def call(self, inputs): input_shape", "using [`BertTokenizer`]. See [`PreTrainedTokenizer.__call__`] and [`PreTrainedTokenizer.encode`] for details. [What are", "= key_value_states is not None bsz, tgt_len, embed_dim = shape_list(hidden_states)", "config: HubertConfig, **kwargs): super().__init__(**kwargs) self.config = config self.pos_conv_embed = TFHubertPositionalConvEmbedding(config,", "- 1) ] elif config.feat_extract_norm == \"layer\": conv_layers = [", "trainable=True, ) self.bias = self.add_weight(name=\"bias\", shape=(self.filters,), initializer=\"zeros\", trainable=True) def call(self,", "hidden_states def call( self, input_values: tf.Tensor, attention_mask: Optional[tf.Tensor] = None,", "bias_initializer=\"he_normal\", **kwargs, ) self.explicit_padding = explicit_padding self.filter_axis = 2 self.initialized", "have to be disabled in other modes than eager. if", "constraint=self.gamma_constraint, ) else: self.gamma = None def _add_beta_weight(self, input_shape): dim", "of size {(bsz, 1, tgt_len, src_len)}, but is {shape_list(attention_mask)}\", )", "tf.Variable(tf.transpose(self.kernel), name=\"weight_v\", trainable=True) self.weight_v = self.kernel self.weight_g = self.add_weight( name=\"weight_g\",", "trick will do for now - see https://github.com/tensorflow/tensorflow/issues/9260 for more", "class TFHubertFeedForward(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs) self.intermediate_dropout =", "list of varying length with one or several input Tensors", "= v elif k not in parameter_names and \"args\" not", "and each example must have the shape `({0})`): Indices of", "be smaller (unless no_overlap is True) mask_length: size of the", "def freeze_feature_encoder(self): \"\"\" Calling this function will disable the gradient", "argument is deprecated and will be removed in a future", "name=\"attention\", ) self.dropout = tf.keras.layers.Dropout(config.hidden_dropout) self.layer_norm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name=\"layer_norm\") self.feed_forward", "for k, v in kwargs.items(): if isinstance(v, allowed_types) or v", "2.0 models accepts two formats as inputs: - having all", "= None, output_hidden_states: Optional[tf.Tensor] = None, return_dict: Optional[bool] = None,", "tf.int32) target_lengths = tf.reduce_sum(labels_mask, axis=-1) loss = tf.nn.ctc_loss( logits=logits, labels=labels,", "apply SpecAugment along time axis mask_time_indices = _compute_mask_indices( (batch_size, sequence_length),", "attentions=all_self_attentions, ) @keras_serializable class TFHubertMainLayer(tf.keras.layers.Layer): config_class = HubertConfig def __init__(self,", "will be removed in a future version, use `past_key_values` instead.\",", "indices_shape = shape_list(batch_indices) # broadcast batch dim to indices_shape broad_casted_batch_dims", "for padding input_shape[-2] += self.explicit_padding * 2 super().build(input_shape) self.kernel =", "hidden_states = self.feature_extractor( tf.cast(inputs[\"input_values\"], tf.float32), training=inputs[\"training\"] ) if inputs[\"attention_mask\"] is", "16000), tf.float32) dummy_inputs = { \"input_values\": input_values, \"attention_mask\": tf.cast(tf.not_equal(input_values, pad_token),", "to `-100` are ignored (masked), the loss is only computed", "`model([input_values, attention_mask, token_type_ids])` - a dictionary with one or several", "Optional[bool] = None, return_dict: Optional[bool] = None, training: Optional[bool] =", "of shape `({0}, hidden_size)`, *optional*): Optionally, instead of passing `input_values`", "Tensor with `input_values` only and nothing else: `model(inputs_ids)` - a", ") def call( self, hidden_states: tf.Tensor, attention_mask: Optional[tf.Tensor] = None,", "tf >>> from transformers import Wav2Vec2Processor, TFHubertForCTC >>> from datasets", "tf.keras.backend.int_shape(inputs) tensor_input_shape = tf.shape(inputs) reshaped_inputs, group_shape = self._reshape_into_groups(inputs, input_shape, tensor_input_shape)", "= tf.convert_to_tensor(output.attentions) if self.config.output_attentions else None return TFBaseModelOutput(last_hidden_state=output.last_hidden_state, hidden_states=hs, attentions=attns)", "return_dict=inputs[\"return_dict\"], training=inputs[\"training\"], ) hidden_states = encoder_outputs[0] if not inputs[\"return_dict\"]: return", "dim < self.groups: raise ValueError( \"Number of groups (\" +", "explicit_padding self.filter_axis = 2 self.initialized = False self.kernel_norm_axes = tf.constant([0,", "(input_shape[self.axis] // self.groups) == 1 if not is_instance_norm: axis =", "= _conv_out_length(input_lengths, kernel_size, stride) return input_lengths def _mask_hidden_states(self, hidden_states: tf.Tensor,", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "specific head on top.\", HUBERT_START_DOCSTRING, ) class TFHubertModel(TFHubertPreTrainedModel): def __init__(self,", "self.layer_norm(hidden_states) hidden_states = self.projection(hidden_states) hidden_states = self.dropout(hidden_states, training=training) return hidden_states", "in enumerate(input_values): # EagerTensors don't allow to use the .name", "is None: output[parameter_names[0]] = input_values else: raise ValueError( f\"Data of", "= None, return_dict: Optional[bool] = None, training: Optional[bool] = False,", "self.groups) group_shape = tf.stack(group_shape) reshaped_inputs = tf.reshape(inputs, group_shape) return reshaped_inputs,", "tf.transpose( tf.reshape(attn_output, (bsz, self.num_heads, tgt_len, self.head_dim)), (0, 2, 1, 3)", "output.items() if k in [\"return_dict\", \"output_attentions\", \"output_hidden_states\", \"use_cache\"] } output.update(booleans_processing(config=config,", "-1) for conv_layer in self.conv_layers: hidden_states = conv_layer(hidden_states) return hidden_states", "= True self.groups = groups self.axis = axis self.epsilon =", "is_decoder self.k_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name=\"k_proj\") self.q_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias,", "output_attentions: Optional[bool] = False, output_hidden_states: Optional[bool] = False, return_dict: Optional[bool]", "inputs): if not self.initialized: self._init_norm() self.initialized = True self._normalize_kernel() padded_inputs", "np.ndarray]` and each example must have the shape `({0})`): Indices", "label_length=target_lengths, blank_index=self.config.pad_token_id, logits_time_major=False, ) if self.config.ctc_loss_reduction == \"sum\": loss =", "a *sentence B* token. [What are token type IDs?](../glossary#token-type-ids) position_ids", "tf.keras.constraints.serialize(self.beta_constraint), \"gamma_constraint\": tf.keras.constraints.serialize(self.gamma_constraint), } base_config = super().get_config() return {**base_config, **config}", "its parameters will not be updated during training. \"\"\" warnings.warn(", "class TFHubertEncoderLayerStableLayerNorm(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs) self.attention =", "mask_length: size of the mask min_masks: minimum number of masked", "value_states) # The tf.debugging asserts are not compliant with XLA", ") self.bias = self.add_weight(name=\"bias\", shape=(self.filters,), initializer=\"zeros\", trainable=True) def call(self, inputs):", "(`np.ndarray` or `tf.Tensor` of shape `({0})`, *optional*): Segment token indices", "+ encoder_outputs[1:] return TFBaseModelOutput( last_hidden_state=hidden_states, hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions, ) class TFHubertPreTrainedModel(TFPreTrainedModel):", "hidden_states = outputs[0] hidden_states = self.dropout(hidden_states, training=inputs[\"training\"]) logits = self.lm_head(hidden_states)", "Calling this function will disable the gradient computation for the", "from .configuration_hubert import HubertConfig logger = logging.get_logger(__name__) _CONFIG_FOR_DOC = \"HubertConfig\"", "self.num_heads, self.head_dim)), (0, 2, 1, 3)) def call( self, hidden_states:", "not load the weights associated with the model, only the", "self, hidden_states: tf.Tensor, key_value_states: Optional[tf.Tensor] = None, past_key_value: Optional[Tuple[Tuple[tf.Tensor]]] =", "None): \"\"\" Masks extracted features along time axis and/or along", "Union import numpy as np import tensorflow as tf from", "directly pass an embedded representation. This is useful if you", "a dictionary with one or several input Tensors associated to", "B* token. [What are token type IDs?](../glossary#token-type-ids) position_ids (`np.ndarray` or", "super().__init__(config, **kwargs) warnings.warn( f\"The class `{self.__class__.__name__}` has been depreciated \"", "if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(layer_head_mask), [self.num_heads], message=f\"Head mask for a single", "attention_mask=attention_mask, output_attentions=output_attentions, training=training, ) hidden_states = layer_outputs[0] if output_attentions: all_self_attentions", "= super().get_config() return {**base_config, **config} def compute_output_shape(self, input_shape): return input_shape", "in `[-100, 0, ..., config.vocab_size]` (see `input_values` docstring) Tokens with", "for all its model (such as downloading or saving, resizing", "the layer continue layer_outputs = layer_module( hidden_states=hidden_states, attention_mask=attention_mask, output_attentions=output_attentions, training=training,", "from transformers.models.wav2vec2.modeling_tf_wav2vec2._sample_without_replacement def _sample_without_replacement(distribution, num_samples): \"\"\" Categorical sampling without replacement", "1]`: - 1 indicates the head is **not masked**, -", "__init__(self, config, **kwargs): super().__init__(config, **kwargs) warnings.warn( f\"The class `{self.__class__.__name__}` has", "self.add_weight( shape=(self.config.hidden_size,), initializer=\"uniform\", trainable=True, name=\"masked_spec_embed\" ) super().build(input_shape) def _get_feat_extract_output_lengths(self, input_lengths:", "`attentions` under returned tensors for more detail. This argument can", "build(self, input_shape): self._check_if_input_shape_is_none(input_shape) self._set_number_of_groups_for_instance_norm(input_shape) self._check_size_of_dimensions(input_shape) self._create_input_spec(input_shape) self._add_gamma_weight(input_shape) self._add_beta_weight(input_shape) self.built =", "= \"zeros\", gamma_initializer: tf.keras.initializers.Initializer = \"ones\", beta_regularizer: tf.keras.regularizers.Regularizer = None,", "= input.name.split(\":\")[0] if tensor_name in parameter_names: output[tensor_name] = input else:", "+ str(self.axis) + \" of \" \"input tensor should have", "= self.kernel self.weight_g = self.add_weight( name=\"weight_g\", shape=(int(self.weight_v.shape[self.filter_axis]), 1, 1), initializer=\"ones\",", "hidden_states = encoder_outputs[0] if not inputs[\"return_dict\"]: return (hidden_states,) + encoder_outputs[1:]", "= [tensor_input_shape[i] for i in range(len(input_shape))] is_instance_norm = (input_shape[self.axis] //", "tf.keras.layers.Conv1D( filters=self.out_conv_dim, kernel_size=config.conv_kernel[layer_id], strides=config.conv_stride[layer_id], use_bias=config.conv_bias, name=\"conv\", ) self.activation = get_tf_activation(config.feat_extract_activation)", "details. [What are input IDs?](../glossary#input-ids) attention_mask (`np.ndarray` or `tf.Tensor` of", "# `config.apply_spec_augment` can set masking to False if not getattr(self.config,", "= self.q_proj(hidden_states) * self.scaling # get key, value proj if", "input_values_processing( func=self.call, config=self.config, input_values=input_values, attention_mask=attention_mask, token_type_ids=token_type_ids, position_ids=position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, output_attentions=output_attentions,", "Wav2Vec2Processor.from_pretrained(\"facebook/hubert-base-960h\") >>> model = TFHubertModel.from_pretrained(\"facebook/hubert-base-960h\") >>> def map_to_array(batch): ... speech,", "`model(inputs_ids)` - a list of varying length with one or", "= 1e-3, center: bool = True, scale: bool = True,", "super().__init__(**kwargs) self.embed_dim = embed_dim self.num_heads = num_heads self.dropout = tf.keras.layers.Dropout(dropout)", "minimum number of masked spans Adapted from [fairseq's data_utils.py](https://github.com/pytorch/fairseq/blob/e0788f7007a8473a76db573985031f3c94201e79/fairseq/data/data_utils.py#L376). \"\"\"", "def __init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs) self.config = config self.feature_extractor", "self.layer = [ TFHubertEncoderLayerStableLayerNorm(config, name=f\"layers.{i}\") for i in range(config.num_hidden_layers) ]", "= (hidden_states,) if output_attentions: outputs += (attn_weights,) return outputs #", "-> Tuple[tf.Tensor]: attn_residual = hidden_states hidden_states = self.layer_norm(hidden_states) hidden_states, attn_weights,", ") # transform batch_indices to pair_indices pair_indices = tf.transpose(tf.concat([broad_casted_batch_dims, tf.reshape(batch_indices,", "logits_time_major=False, ) if self.config.ctc_loss_reduction == \"sum\": loss = tf.reduce_sum(loss) if", "self.axis = axis self.epsilon = epsilon self.center = center self.scale", "kernel_norm = tf.sqrt(tf.reduce_sum(tf.square(self.weight_v), axis=self.kernel_norm_axes)) self.weight_g.assign(kernel_norm[:, tf.newaxis, tf.newaxis]) def _normalize_kernel(self): \"\"\"Generate", "\"input tensor should have a defined dimension \" \"but the", "self.layer_norm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name=\"layer_norm\") self.dropout = tf.keras.layers.Dropout(config.hidden_dropout) self.layer = [", "`config.apply_spec_augment` can set masking to False if not getattr(self.config, \"apply_spec_augment\",", "tf.keras.initializers.get(beta_initializer) self.gamma_initializer = tf.keras.initializers.get(gamma_initializer) self.beta_regularizer = tf.keras.regularizers.get(beta_regularizer) self.gamma_regularizer = tf.keras.regularizers.get(gamma_regularizer)", "is_instance_norm: axis = -2 if self.axis == -1 else self.axis", "\").\" ) if dim % self.groups != 0: raise ValueError(", "ANY KIND, either express or implied. # See the License", "under the License. \"\"\" TensorFlow Hubert model.\"\"\" import inspect import", "the shape `({0})`): Indices of input sequence tokens in the", "shape=shape, name=\"beta\", initializer=self.beta_initializer, regularizer=self.beta_regularizer, constraint=self.beta_constraint, ) else: self.beta = None", "output) if loss is not None else output return TFCausalLMOutput(", "# See the License for the specific language governing permissions", "output_hidden_states (`bool`, *optional*): Whether or not to return the hidden", "units=config.hidden_size, kernel_initializer=get_initializer(config.initializer_range), bias_initializer=\"zeros\", name=\"output_dense\", ) self.output_dropout = tf.keras.layers.Dropout(config.hidden_dropout) def call(self,", "epsilon=config.layer_norm_eps, name=\"layer_norm\") def call(self, hidden_states: tf.Tensor) -> tf.Tensor: hidden_states =", "input_shape): self._check_if_input_shape_is_none(input_shape) self._set_number_of_groups_for_instance_norm(input_shape) self._check_size_of_dimensions(input_shape) self._create_input_spec(input_shape) self._add_gamma_weight(input_shape) self._add_beta_weight(input_shape) self.built = True", "- a list of varying length with one or several", "list of symbolic inputs, each input has to be named", "a given shape Args: shape: the the shape for which", "tf.cast(mask, dtype=one_cst.dtype) expanded_mask = tf.tile(mask[:, None, None, :], (1, 1,", "= self.dropout(hidden_states, training=training) hidden_states = attn_residual + hidden_states hidden_states =", "\"if\" case) # if uni-directional self-attention (decoder) save Tuple(tf.Tensor, tf.Tensor)", "hidden_size), mask_prob=self.config.mask_feature_prob, mask_length=self.config.mask_feature_length, ) hidden_states = tf.where(mask_feature_indices[:, tf.newaxis, :], hidden_states,", "tf.float32) dummy_inputs = { \"input_values\": input_values, \"attention_mask\": tf.cast(tf.not_equal(input_values, pad_token), tf.float32),", "TFHubertFeatureExtractor(TFHubertFeatureEncoder): def __init__(self, config, **kwargs): super().__init__(config, **kwargs) warnings.warn( f\"The class", "proper output we have to add this exception if \"args\"", "other modes than eager. if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(attn_output), [bsz *", "* mask_length)) offsets = tf.range(mask_length)[tf.newaxis, tf.newaxis, :] offsets = tf.tile(offsets,", "from \"Attention Is All You Need\"\"\" def __init__( self, embed_dim:", "2 super().build(input_shape) self.kernel = tf.Variable(tf.transpose(self.kernel), name=\"weight_v\", trainable=True) self.weight_v = self.kernel", "inputs_embeds: Optional[tf.Tensor] = None, output_attentions: Optional[tf.Tensor] = None, output_hidden_states: Optional[tf.Tensor]", "# Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2NoLayerNormConvLayer with Wav2Vec2->Hubert class TFHubertNoLayerNormConvLayer(tf.keras.layers.Layer): def __init__(self,", "\"\"\"Generate normalized weights.\"\"\" kernel = tf.nn.l2_normalize(self.weight_v, axis=self.kernel_norm_axes) * tf.transpose(self.weight_g) self.kernel", "hidden_states=outputs.hidden_states, attentions=outputs.attentions, ) def serving_output(self, output: TFCausalLMOutput) -> TFCausalLMOutput: hs", "Union[TFBaseModelOutput, Tuple[tf.Tensor]]: all_hidden_states = () if output_hidden_states else None all_self_attentions", "in graph mode the value in the config will be", "= self.conv(hidden_states) hidden_states = self.padding(hidden_states) hidden_states = self.activation(hidden_states) return hidden_states", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "operations that are NOT supported on CPU. If you wish", "the method with LayerCall.__call__(args, **kwargs) # So to respect the", "(attn_weights,) return outputs # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2Encoder with Wav2Vec2->Hubert class", "beta = None if self.scale: gamma = tf.reshape(self.gamma, broadcast_shape) if", "] def call( self, hidden_states: tf.Tensor, attention_mask: Optional[tf.Tensor] = None,", "# Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2EncoderLayer with Wav2Vec2->Hubert class TFHubertEncoderLayer(tf.keras.layers.Layer): def __init__(self,", "**kwargs: The inputs of the model. Returns: Two lists, one", "= tf.keras.layers.Dense(embed_dim, use_bias=bias, name=\"v_proj\") self.out_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name=\"out_proj\") def", "SpecAugment along time axis mask_time_indices = _compute_mask_indices( (batch_size, sequence_length), mask_prob=self.config.mask_time_prob,", "def _shape(self, tensor: tf.Tensor, seq_len: int, bsz: int): return tf.transpose(tf.reshape(tensor,", "Copied from transformers.models.bart.modeling_tf_bart.TFBartAttention with TFBart->TFHubert class TFHubertAttention(tf.keras.layers.Layer): \"\"\"Multi-headed attention from", "output length formula taken # from https://pytorch.org/docs/stable/generated/torch.nn.Conv1d.html return (input_length -", "loss = tf.reduce_sum(loss) if self.config.ctc_loss_reduction == \"mean\": loss = tf.reduce_mean(loss)", "= self.add_weight( shape=shape, name=\"beta\", initializer=self.beta_initializer, regularizer=self.beta_regularizer, constraint=self.beta_constraint, ) else: self.beta", "load the weights associated with the model, only the configuration.", "logits = self.lm_head(hidden_states) if labels is not None: if tf.reduce_max(labels)", "self.q_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name=\"q_proj\") self.v_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name=\"v_proj\")", "import numpy as np import tensorflow as tf from ...activations_tf", "def __init__(self, config: HubertConfig, *inputs, **kwargs): super().__init__(config, *inputs, **kwargs) self.hubert", "instead of a plain tuple. This argument can be used", "map_to_array(batch): ... speech, _ = sf.read(batch[\"file\"]) ... batch[\"speech\"] = speech", "tf.keras.regularizers.Regularizer = None, gamma_regularizer: tf.keras.regularizers.Regularizer = None, beta_constraint: tf.keras.constraints.Constraint =", "hidden_states=hidden_states, attention_mask=attention_mask, output_attentions=output_attentions, training=training, ) hidden_states = layer_outputs[0] if output_attentions:", "(bsz, self.num_heads, tgt_len, src_len)) return attn_output, attn_weights, past_key_value # Copied", "def __init__(self, config: HubertConfig, **kwargs: Any) -> None: super().__init__(**kwargs) self.conv", "attn_weights = tf.nn.softmax(attn_weights, axis=-1) if layer_head_mask is not None: #", "(see `input_values` docstring) Tokens with indices set to `-100` are", "// self.groups broadcast_shape.insert(self.axis, self.groups) else: broadcast_shape[self.axis] = self.groups return broadcast_shape", "implemented. The gumbel-max trick will do for now - see", "hidden_states = self.dropout(hidden_states, training=training) hidden_states = attn_residual + hidden_states hidden_states", "Args: func (`callable`): The callable function of the TensorFlow model.", "hidden_states = self._mask_hidden_states(hidden_states, mask_time_indices=mask_time_indices) encoder_outputs = self.encoder( hidden_states, attention_mask=attention_mask, output_attentions=inputs[\"output_attentions\"],", "be guaranteed during the training. Args: func (`callable`): The callable", "str(self.axis) + \" of \" \"input tensor should have a", "self.config.ctc_loss_reduction == \"mean\": loss = tf.reduce_mean(loss) else: loss = None", "tf.range(mask_length)[tf.newaxis, tf.newaxis, :] offsets = tf.tile(offsets, (batch_size, num_masked_spans, 1)) offsets", "} return dummy_inputs def __init__(self, config, *inputs, **kwargs): super().__init__(config, *inputs,", "= tf.reshape(self.gamma, broadcast_shape) if self.center: beta = tf.reshape(self.beta, broadcast_shape) return", "tf.Tensor, key_value_states: Optional[tf.Tensor] = None, past_key_value: Optional[Tuple[Tuple[tf.Tensor]]] = None, attention_mask:", "= None if not inputs[\"return_dict\"]: output = (logits,) + outputs[1:]", "super().__init__( filters=filters, kernel_size=kernel_size, groups=groups, padding=\"valid\", use_bias=True, bias_initializer=\"he_normal\", **kwargs, ) self.explicit_padding", "1D convolutional layer output length formula taken # from https://pytorch.org/docs/stable/generated/torch.nn.Conv1d.html", "only in eager mode, in graph mode the value in", "feature axis according to [SpecAugment](https://arxiv.org/abs/1904.08779). \"\"\" batch_size, sequence_length, hidden_size =", "add_start_docstrings, add_start_docstrings_to_model_forward, logging, replace_return_docstrings, ) from .configuration_hubert import HubertConfig logger", "position_ids=inputs[\"position_ids\"], head_mask=inputs[\"head_mask\"], inputs_embeds=inputs[\"inputs_embeds\"], output_attentions=inputs[\"output_attentions\"], output_hidden_states=inputs[\"output_hidden_states\"], return_dict=inputs[\"return_dict\"], training=inputs[\"training\"], ) hidden_states =", "return batch >>> ds = load_dataset(\"hf-internal-testing/librispeech_asr_dummy\", \"clean\", split=\"validation\") >>> ds", "output[parameter_names[i]] = input elif isinstance(input, allowed_types) or input is None:", "def call(self, inputs): input_shape = tf.keras.backend.int_shape(inputs) tensor_input_shape = tf.shape(inputs) reshaped_inputs,", "are ignored (masked), the loss is only computed for the", "2 == 0 else 0 def call(self, hidden_states): if self.num_pad_remove", "HubertConfig, **kwargs): super().__init__(**kwargs) self.layer_norm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name=\"layer_norm\") self.projection = tf.keras.layers.Dense(", "HubertConfig, **kwargs): super().__init__(**kwargs) self.intermediate_dropout = tf.keras.layers.Dropout(config.activation_dropout) self.intermediate_dense = tf.keras.layers.Dense( units=config.intermediate_size,", "getattr(self.config, \"apply_spec_augment\", True): return hidden_states if mask_time_indices is not None:", "associated to the input names given in the docstring: `model({\"input_values\":", "value_states = self._shape(self.v_proj(key_value_states), -1, bsz) elif past_key_value is not None:", "token_type_ids: Optional[tf.Tensor] = None, position_ids: Optional[tf.Tensor] = None, head_mask: Optional[tf.Tensor]", "= config self.pos_conv_embed = TFHubertPositionalConvEmbedding(config, name=\"pos_conv_embed\") self.layer_norm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name=\"layer_norm\")", "training: bool = False, ) -> Tuple[tf.Tensor]: attn_residual = hidden_states", "= \"A MAN SAID TO THE UNIVERSE SIR I EXIST\"", "return_dict=return_dict, training=training, ) inputs[\"output_hidden_states\"] = ( inputs[\"output_hidden_states\"] if inputs[\"output_hidden_states\"] else", "int, bsz: int): return tf.transpose(tf.reshape(tensor, (bsz, seq_len, self.num_heads, self.head_dim)), (0,", "HubertConfig, **kwargs): super().__init__(**kwargs) self.config = config self.pos_conv_embed = TFHubertPositionalConvEmbedding(config, name=\"pos_conv_embed\")", "def __init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs) self.attention = TFHubertAttention( embed_dim=config.hidden_size,", "labels_mask = tf.cast(labels >= 0, tf.int32) target_lengths = tf.reduce_sum(labels_mask, axis=-1)", "positional arguments. This second option is useful when using [`tf.keras.Model.fit`]", "docstring: `model({\"input_values\": input_values, \"token_type_ids\": token_type_ids})` </Tip> Args: config ([`HubertConfig`]): Model", "Transformers v5.\" \"Please use the equivalent `freeze_feature_encoder` method instead.\", FutureWarning,", "units=config.intermediate_size, kernel_initializer=get_initializer(config.initializer_range), bias_initializer=\"zeros\", name=\"intermediate_dense\", ) self.intermediate_act_fn = get_tf_activation(config.hidden_act) self.output_dense =", "in parameter_names: logger.warning( f\"The parameter {k} does not belongs to", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "is_decoder=False, name=\"attention\", ) self.dropout = tf.keras.layers.Dropout(config.hidden_dropout) self.layer_norm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name=\"layer_norm\")", "# Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2Encoder with Wav2Vec2->Hubert class TFHubertEncoder(tf.keras.layers.Layer): def __init__(self,", "None: if tf.reduce_max(labels) >= self.config.vocab_size: raise ValueError(f\"Label values must be", "= ( inputs[\"attention_mask\"] if inputs[\"attention_mask\"] is not None else tf.ones_like(inputs[\"input_values\"],", "stride) return input_lengths def _mask_hidden_states(self, hidden_states: tf.Tensor, mask_time_indices: Optional[tf.Tensor] =", "Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2Encoder with Wav2Vec2->Hubert class TFHubertEncoder(tf.keras.layers.Layer): def __init__(self, config:", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "**kwargs): super().__init__(**kwargs) self.intermediate_dropout = tf.keras.layers.Dropout(config.activation_dropout) self.intermediate_dense = tf.keras.layers.Dense( units=config.intermediate_size, kernel_initializer=get_initializer(config.initializer_range),", "a single layer should be of size {(self.num_heads)}, but is", "not None: # compute real output lengths according to convolution", "Args: config ([`HubertConfig`]): Model configuration class with all the parameters", "0 else 0 def call(self, hidden_states): if self.num_pad_remove > 0:", "= self.dropout(hidden_states, training=inputs[\"training\"]) logits = self.lm_head(hidden_states) if labels is not", "__init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs) self.layer_norm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name=\"layer_norm\") self.projection", "a [tf.keras.Model](https://www.tensorflow.org/api_docs/python/tf/keras/Model) subclass. Use it as a regular TF 2.0", "self.kernel self.weight_g = self.add_weight( name=\"weight_g\", shape=(int(self.weight_v.shape[self.filter_axis]), 1, 1), initializer=\"ones\", dtype=self.weight_v.dtype,", "to normalize your batch axis. Do you want to \"", "_sample_without_replacement(distribution, num_samples): \"\"\" Categorical sampling without replacement is currently not", "- 1]`. [What are position IDs?](../glossary#position-ids) head_mask (`np.ndarray` or `tf.Tensor`", ") for name in parameter_names: if name not in list(output.keys())", "removed in Transformers v5.\" \"Please use the equivalent `freeze_feature_encoder` method", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "graph mode the value in the config will be used", "- a dictionary with one or several input Tensors associated", "TensorFlow model. config ([`PretrainedConfig`]): The config of the running model.", "hidden_states = self.intermediate_dropout(hidden_states, training=training) hidden_states = self.output_dense(hidden_states) hidden_states = self.output_dropout(hidden_states,", "library implements for all its model (such as downloading or", "= tf.keras.regularizers.get(gamma_regularizer) self.beta_constraint = tf.keras.constraints.get(beta_constraint) self.gamma_constraint = tf.keras.constraints.get(gamma_constraint) self._check_axis() def", "tf.Tensor) -> tf.Tensor: hidden_states = self.conv(hidden_states) hidden_states = self.activation(hidden_states) return", "if self.is_decoder: # if cross_attention save Tuple(tf.Tensor, tf.Tensor) of all", "1]`: - 1 for tokens that are **not masked**, -", "= tf.keras.initializers.get(beta_initializer) self.gamma_initializer = tf.keras.initializers.get(gamma_initializer) self.beta_regularizer = tf.keras.regularizers.get(beta_regularizer) self.gamma_regularizer =", "def freeze_feature_extractor(self): \"\"\" Calling this function will disable the gradient", "return hidden_states if mask_time_indices is not None: # apply SpecAugment", "is {shape_list(attention_mask)}\", ) attention_mask = tf.cast(attention_mask, dtype=attn_weights.dtype) attn_weights = tf.reshape(attn_weights,", "more detail. This argument can be used only in eager", "tf.Tensor]` or `Dict[str, np.ndarray]` and each example must have the", "# you may not use this file except in compliance", "def call( self, input_values: tf.Tensor, attention_mask: Optional[tf.Tensor] = None, token_type_ids:", "(input_length - kernel_size) // stride + 1 for kernel_size, stride", "Connectionist Temporal Classification (CTC).\"\"\", HUBERT_START_DOCSTRING, ) class TFHubertForCTC(TFHubertPreTrainedModel): def __init__(self,", "float = 1e-3, center: bool = True, scale: bool =", "name=\"conv\", ) self.activation = get_tf_activation(config.feat_extract_activation) self.layer_norm = TFHubertGroupNorm(groups=self.out_conv_dim, epsilon=config.layer_norm_eps, name=\"layer_norm\")", "a future version, use `input_values` instead.\", FutureWarning, ) output[\"input_values\"] =", "is_instance_norm: broadcast_shape[self.axis] = input_shape[self.axis] // self.groups broadcast_shape.insert(self.axis, self.groups) else: broadcast_shape[self.axis]", "1, 3) ) attn_output = tf.reshape(attn_output, (bsz, tgt_len, embed_dim)) attn_output", "for description) dropout_probability = np.random.uniform(0, 1) if training and (dropout_probability", "1) if training and (dropout_probability < self.config.layerdrop): # skip the", "pruning heads etc.) This model is also a [tf.keras.Model](https://www.tensorflow.org/api_docs/python/tf/keras/Model) subclass.", "# compute number of masked spans in batch num_masked_spans =", ") self.activation = get_tf_activation(config.feat_extract_activation) self.layer_norm = TFHubertGroupNorm(groups=self.out_conv_dim, epsilon=config.layer_norm_eps, name=\"layer_norm\") def", "broadcast_shape) return gamma, beta def _check_if_input_shape_is_none(self, input_shape): dim = input_shape[self.axis]", "spec_aug_mask_idxs = spec_aug_mask_idxs + offsets # scatter indices to mask", "tgt_len, src_len)) + attention_mask attn_weights = tf.reshape(attn_weights, (bsz * self.num_heads,", "are **masked**. [What are attention masks?](../glossary#attention-mask) token_type_ids (`np.ndarray` or `tf.Tensor`", "if you want more control over how to convert `input_values`", "+ (hidden_states,) # add LayerDrop (see https://arxiv.org/abs/1909.11556 for description) dropout_probability", "without replacement is currently not implemented. The gumbel-max trick will", "min_masks) # make sure num masked indices <= sequence_length if", "key/value_states (first \"if\" case) # if uni-directional self-attention (decoder) save", "= None if self.scale: gamma = tf.reshape(self.gamma, broadcast_shape) if self.center:", "k, v, self_attention key_states = self._shape(self.k_proj(hidden_states), -1, bsz) value_states =", "key_states = self._shape(self.k_proj(key_value_states), -1, bsz) value_states = self._shape(self.v_proj(key_value_states), -1, bsz)", "logit_length=input_lengths, label_length=target_lengths, blank_index=self.config.pad_token_id, logits_time_major=False, ) if self.config.ctc_loss_reduction == \"sum\": loss", "start of the span to be masked. this will be", "to be disabled in other modes than eager. if tf.executing_eagerly():", "Keras Model and refer to the TF 2.0 documentation for", "value_states = tf.reshape(value_states, proj_shape) src_len = shape_list(key_states)[1] attn_weights = tf.matmul(query_states,", "self.layer_norm(hidden_states) hidden_states = self.activation(hidden_states) return hidden_states # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2PositionalConvEmbedding", "= layer_outputs[0] if output_attentions: all_self_attentions = all_self_attentions + (layer_outputs[1],) #", "pass an embedded representation. This is useful if you want", "attention key/value_states. # Further calls to cross_attention layer can then", "can choose to directly pass an embedded representation. This is", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2GroupNorm with Wav2Vec2->Hubert class TFHubertGroupNorm(tf.keras.layers.Layer): \"\"\" From tensorflow-addons https://www.tensorflow.org/addons/api_docs/python/tfa/layers/GroupNormalization \"\"\"", "self.gamma_initializer = tf.keras.initializers.get(gamma_initializer) self.beta_regularizer = tf.keras.regularizers.get(beta_regularizer) self.gamma_regularizer = tf.keras.regularizers.get(gamma_regularizer) self.beta_constraint", "class TFHubertFeatureEncoder(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs: Any) -> None:", "if layer_head_mask is not None: # The tf.debugging asserts are", "inputs. Indices are selected in `[0, 1]`: - 0 corresponds", "Check out the [`~PreTrainedModel.from_pretrained`] method to load the model weights.", "-1)) attention_mask = tf.sequence_mask( output_lengths, maxlen=shape_list(hidden_states)[1], dtype=hidden_states.dtype ) hidden_states =", "compliant with XLA then they # have to be disabled", "for a single layer should be of size {(self.num_heads)}, but", "enumerate(input_values): # EagerTensors don't allow to use the .name property", "or not to use the model in training mode (some", "which to compute masks. should be of size 2 where", "shape=shape, name=\"gamma\", initializer=self.gamma_initializer, regularizer=self.gamma_regularizer, constraint=self.gamma_constraint, ) else: self.gamma = None", "else 1 self.out_conv_dim = config.conv_dim[layer_id] self.conv = tf.keras.layers.Conv1D( filters=self.out_conv_dim, kernel_size=config.conv_kernel[layer_id],", "for the tokens with labels in `[0, ..., config.vocab_size]` Returns:", "\"Attention Is All You Need\"\"\" def __init__( self, embed_dim: int,", "config: HubertConfig, *inputs, **kwargs): super().__init__(config, *inputs, **kwargs) self.hubert = TFHubertMainLayer(config,", "+ self.feed_forward(self.final_layer_norm(hidden_states)) outputs = (hidden_states,) if output_attentions: outputs += (attn_weights,)", "... return batch >>> ds = load_dataset(\"hf-internal-testing/librispeech_asr_dummy\", \"clean\", split=\"validation\") >>>", "tensor_input_shape = tf.shape(inputs) reshaped_inputs, group_shape = self._reshape_into_groups(inputs, input_shape, tensor_input_shape) normalized_inputs", "position_embeddings hidden_states = self.dropout(hidden_states, training=training) for i, layer_module in enumerate(self.layer):", "Mask values selected in `[0, 1]`: - 1 indicates the", "bias_initializer=\"zeros\", name=\"output_dense\", ) self.output_dropout = tf.keras.layers.Dropout(config.hidden_dropout) def call(self, hidden_states: tf.Tensor,", "one or several input Tensors IN THE ORDER given in", "usage and behavior. <Tip> TF 2.0 models accepts two formats", "_scatter_values_on_batch_indices(values, batch_indices, output_shape): \"\"\" Scatter function as in PyTorch with", "float, mask_length: int, min_masks: int = 0, ) -> tf.Tensor:", "list)): for i, input in enumerate(input_values): # EagerTensors don't allow", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "0.0, is_decoder: bool = False, bias: bool = True, **kwargs,", "name != \"args\": output[name] = kwargs.pop(name, signature[name].default) # When creating", "attn_weights = tf.reshape(layer_head_mask, (1, -1, 1, 1)) * tf.reshape( attn_weights,", "layer_head_mask is not None: # The tf.debugging asserts are not", "training mode (some modules like dropout modules have different behaviors", "[tensor_input_shape[i] for i in range(len(input_shape))] is_instance_norm = (input_shape[self.axis] // self.groups)", "= input_values else: raise ValueError( f\"Data of type {type(input_values)} is", "sure num masked indices <= sequence_length if num_masked_spans * mask_length", "= tf.keras.layers.Dropout(config.hidden_dropout) self.layer = [ TFHubertEncoderLayerStableLayerNorm(config, name=f\"layers.{i}\") for i in", "Apache License, Version 2.0 (the \"License\"); # you may not", "trainable=True, name=\"masked_spec_embed\" ) super().build(input_shape) def _get_feat_extract_output_lengths(self, input_lengths: tf.Tensor): \"\"\" Computes", "be removed in Transformers v5. \" f\"Use `{self.__class__.__bases__[0].__name__}` instead.\", FutureWarning,", "is not None: # The tf.debugging asserts are not compliant", "inputs_embeds: Optional[tf.Tensor] = None, output_attentions: Optional[bool] = None, output_hidden_states: Optional[bool]", "def call(self, inputs): if not self.initialized: self._init_norm() self.initialized = True", "class TFHubertFeatureExtractor(TFHubertFeatureEncoder): def __init__(self, config, **kwargs): super().__init__(config, **kwargs) warnings.warn( f\"The", "None: super().__init__(**kwargs) self.in_conv_dim = config.conv_dim[layer_id] if layer_id > 0 else", "Tuple[tf.Tensor, Optional[tf.Tensor]]: \"\"\"Input shape: Batch x Time x Channel\"\"\" #", "decoder key/value_states. Further calls to uni-directional self-attention # can concat", "self._shape(self.k_proj(hidden_states), -1, bsz) value_states = self._shape(self.v_proj(hidden_states), -1, bsz) key_states =", "bool = True, beta_initializer: tf.keras.initializers.Initializer = \"zeros\", gamma_initializer: tf.keras.initializers.Initializer =", "= tf.keras.layers.Dropout(config.activation_dropout) self.intermediate_dense = tf.keras.layers.Dense( units=config.intermediate_size, kernel_initializer=get_initializer(config.initializer_range), bias_initializer=\"zeros\", name=\"intermediate_dense\", )", "-1 if self.axis == -1 else self.axis - 1 group_reduction_axes.pop(axis)", "ValueError(\"`mask_length` has to be bigger than 0.\") if mask_length >", "input_values_processing(func, config, input_values, **kwargs): \"\"\" Process the input of each", "normalized_inputs return outputs def get_config(self): config = { \"groups\": self.groups,", "where first element is batch size and 2nd is timesteps", "inputs[\"output_hidden_states\"] else self.config.output_hidden_states ) inputs[\"output_attentions\"] = ( inputs[\"output_attentions\"] if inputs[\"output_attentions\"]", "[`BertTokenizer`]. See [`PreTrainedTokenizer.__call__`] and [`PreTrainedTokenizer.encode`] for details. [What are input", "tf.scatter_nd(pair_indices, tf.reshape(values, [-1]), output_shape) # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2._compute_mask_indices def _compute_mask_indices(", "2.0 documentation for all matter related to general usage and", "= all_self_attentions + (layer_outputs[1],) hidden_states = self.layer_norm(hidden_states) if output_hidden_states: all_hidden_states", "two formats as inputs: - having all inputs as keyword", "if v is not None) return TFBaseModelOutput( last_hidden_state=hidden_states, hidden_states=all_hidden_states, attentions=all_self_attentions,", "= model(input_values).last_hidden_state ```\"\"\" inputs = input_values_processing( func=self.call, config=self.config, input_values=input_values, attention_mask=attention_mask,", "allowed only {allowed_types} is accepted for {k}.\") if isinstance(input_values, (tuple,", "gradient computation for the feature encoder so that its parameter", "as a regular TF 2.0 Keras Model and refer to", "tf.Tensor: # Tensor names have always the pattern `name:id` then", "self.is_decoder: # if cross_attention save Tuple(tf.Tensor, tf.Tensor) of all cross", "ValueError(f\"Data of type {type(v)} is not allowed only {allowed_types} is", "control over how to convert `input_values` indices into associated vectors", "outputs def serving_output(self, output): hs = tf.convert_to_tensor(output.hidden_states) if self.config.output_hidden_states else", "input names given in the docstring: `model({\"input_values\": input_values, \"token_type_ids\": token_type_ids})`", "input_shape[self.axis] if self.groups == -1: self.groups = dim def _check_size_of_dimensions(self,", "the self-attention modules. Mask values selected in `[0, 1]`: -", "initializer=self.gamma_initializer, regularizer=self.gamma_regularizer, constraint=self.gamma_constraint, ) else: self.gamma = None def _add_beta_weight(self,", "enumerate(self.layer): if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) # add", "reuse all cross-attention # key/value_states (first \"if\" case) # if", "attentions=attns) @add_start_docstrings( \"\"\"TFHubert Model with a `language modeling` head on", "self.dropout = tf.keras.layers.Dropout(config.hidden_dropout) self.layer_norm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name=\"layer_norm\") self.feed_forward = TFHubertFeedForward(config,", "return self.serving_output(output) HUBERT_START_DOCSTRING = r\"\"\" This model inherits from [`TFPreTrainedModel`].", "parameter {k} does not belongs to the parameter list {parameter_names}", "head_mask=head_mask, inputs_embeds=inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, ) outputs = self.hubert(", "in other modes than eager. if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(layer_head_mask), [self.num_heads],", "set to `-100` are ignored (masked), the loss is only", "[1, -1])], 0)) # scatter values to pair indices return", "output[\"args\"].name.split(\":\")[0] output[tensor_name] = output[\"args\"] else: # `args` in this case", "# Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2FeedForward with Wav2Vec2->Hubert class TFHubertFeedForward(tf.keras.layers.Layer): def __init__(self,", "in graph mode the value will always be set to", "super().__init__(**kwargs) self.intermediate_dropout = tf.keras.layers.Dropout(config.activation_dropout) self.intermediate_dense = tf.keras.layers.Dense( units=config.intermediate_size, kernel_initializer=get_initializer(config.initializer_range), bias_initializer=\"zeros\",", "training=training) hidden_states = attn_residual + hidden_states hidden_states = hidden_states +", "in format (batch_dim, indixes) \"\"\" indices_shape = shape_list(batch_indices) # broadcast", "will be used instead. output_hidden_states (`bool`, *optional*): Whether or not", "only the # `name` part tensor_name = input.name.split(\":\")[0] if tensor_name", "tf.keras.layers.Dense(embed_dim, use_bias=bias, name=\"k_proj\") self.q_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name=\"q_proj\") self.v_proj =", "for all matter related to general usage and behavior. <Tip>", "TFHubertEncoderStableLayerNorm(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs) self.config = config", "reserved. # # Licensed under the Apache License, Version 2.0", "1)) return (one_cst - expanded_mask) * LARGE_NEGATIVE # Copied from", "Union[TFCausalLMOutput, Tuple[tf.Tensor]]: r\"\"\" labels (`tf.Tensor` or `np.ndarray` of shape `(batch_size,", "+ \").\" ) if dim % self.groups != 0: raise", "- 1 indicates the head is **not masked**, - 0", "num_masked_spans * mask_length > sequence_length: num_masked_spans = sequence_length // mask_length", "= self._create_broadcast_shape(input_shape) gamma = None beta = None if self.scale:", "FutureWarning, ) output[\"input_values\"] = input_values.pop(\"inputs\") if \"decoder_cached_states\" in input_values: warnings.warn(", "instead.\", FutureWarning, ) self.freeze_feature_encoder() def freeze_feature_encoder(self): \"\"\" Calling this function", "self.weight_g.assign(kernel_norm[:, tf.newaxis, tf.newaxis]) def _normalize_kernel(self): \"\"\"Generate normalized weights.\"\"\" kernel =", "TFHubertFeedForward(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs) self.intermediate_dropout = tf.keras.layers.Dropout(config.activation_dropout)", "\"\"\"Input shape: Batch x Time x Channel\"\"\" # if key_value_states", "return outputs # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2EncoderLayerStableLayerNorm with Wav2Vec2->Hubert class TFHubertEncoderLayerStableLayerNorm(tf.keras.layers.Layer):", "not be updated during training. \"\"\" self.hubert.feature_extractor.trainable = False @add_start_docstrings_to_model_forward(HUBERT_INPUTS_DOCSTRING)", "masked indices to masked spans spec_aug_mask_idxs = tf.expand_dims(spec_aug_mask_idxs, -1) spec_aug_mask_idxs", "of masked spans Adapted from [fairseq's data_utils.py](https://github.com/pytorch/fairseq/blob/e0788f7007a8473a76db573985031f3c94201e79/fairseq/data/data_utils.py#L376). \"\"\" batch_size, sequence_length", "indicate first and second portions of the inputs. Indices are", "output[\"input_values\"] = input_values.pop(\"inputs\") if \"decoder_cached_states\" in input_values: warnings.warn( \"The `decoder_cached_states`", "config file does not load the weights associated with the", "in compliance with the License. # You may obtain a", ":, tf.newaxis], tf.bool), self.masked_spec_embed[tf.newaxis, tf.newaxis, :], hidden_states, ) elif self.config.mask_time_prob", "tf.reduce_sum(loss) if self.config.ctc_loss_reduction == \"mean\": loss = tf.reduce_mean(loss) else: loss", "layer_outputs[0] if output_attentions: all_self_attentions = all_self_attentions + (layer_outputs[1],) hidden_states =", "= config self.hubert = TFHubertMainLayer(config, name=\"hubert\") @add_start_docstrings_to_model_forward(HUBERT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=TFBaseModelOutput, config_class=_CONFIG_FOR_DOC) def", "padding mask of the same size as shape, which will", "typing import Any, Dict, Optional, Tuple, Union import numpy as", "inputs_embeds=inputs[\"inputs_embeds\"], output_attentions=inputs[\"output_attentions\"], output_hidden_states=inputs[\"output_hidden_states\"], return_dict=inputs[\"return_dict\"], training=inputs[\"training\"], ) hidden_states = outputs[0] hidden_states", "past_key_value = (key_states, value_states) proj_shape = (bsz * self.num_heads, -1,", "{config.feat_extract_norm}, but has to be one of ['group', 'layer']\" )", "shape=(self.config.hidden_size,), initializer=\"uniform\", trainable=True, name=\"masked_spec_embed\" ) super().build(input_shape) def _get_feat_extract_output_lengths(self, input_lengths: tf.Tensor):", "tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name=\"layer_norm\") self.projection = tf.keras.layers.Dense( units=config.hidden_size, kernel_initializer=get_initializer(config.initializer_range), bias_initializer=\"zeros\", name=\"projection\", )", "Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2GroupNorm with Wav2Vec2->Hubert class TFHubertGroupNorm(tf.keras.layers.Layer): \"\"\" From tensorflow-addons", "want more control over how to convert `input_values` indices into", "trainable=True) self.weight_v = self.kernel self.weight_g = self.add_weight( name=\"weight_g\", shape=(int(self.weight_v.shape[self.filter_axis]), 1,", "not None else output return TFCausalLMOutput( loss=loss, logits=logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions,", "data_utils.py](https://github.com/pytorch/fairseq/blob/e0788f7007a8473a76db573985031f3c94201e79/fairseq/data/data_utils.py#L376). \"\"\" batch_size, sequence_length = shape if mask_length < 1:", "= input_values_processing( func=self.call, config=self.config, input_values=input_values, attention_mask=attention_mask, token_type_ids=token_type_ids, position_ids=position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds,", "permissions and # limitations under the License. \"\"\" TensorFlow Hubert", "in parameter_names: if name not in list(output.keys()) and name !=", "_shape(self, tensor: tf.Tensor, seq_len: int, bsz: int): return tf.transpose(tf.reshape(tensor, (bsz,", "hidden_states # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2PositionalConvEmbedding with Wav2Vec2->Hubert class TFHubertPositionalConvEmbedding(tf.keras.layers.Layer): def", "with the License. # You may obtain a copy of", "dtype=self.weight_v.dtype, trainable=True, ) self.bias = self.add_weight(name=\"bias\", shape=(self.filters,), initializer=\"zeros\", trainable=True) def", "{mask_length} and `sequence_length`: {sequence_length}`\" ) # compute number of masked", "tf.matmul(query_states, key_states, transpose_b=True) # The tf.debugging asserts are not compliant", "self.groups group_shape.insert(self.axis, self.groups) group_shape = tf.stack(group_shape) reshaped_inputs = tf.reshape(inputs, group_shape)", "tgt_len: Optional[int] = None, past_key_values_length: int = 0): \"\"\" Expands", "from ...modeling_tf_utils import TFPreTrainedModel, booleans_processing, get_initializer, keras_serializable from ...tf_utils import", "None, :], (1, 1, tgt_len, 1)) return (one_cst - expanded_mask)", "from transformers import Wav2Vec2Processor, TFHubertModel >>> from datasets import load_dataset", "{(bsz * self.num_heads, tgt_len, src_len)}, but is {shape_list(attn_weights)}\", ) if", "chosen as start of the span to be masked. this", "hidden_states=all_hidden_states, attentions=all_self_attentions, ) @keras_serializable class TFHubertMainLayer(tf.keras.layers.Layer): config_class = HubertConfig def", "attn_weights, past_key_value # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2FeedForward with Wav2Vec2->Hubert class TFHubertFeedForward(tf.keras.layers.Layer):", "= tf.tile(offsets, (batch_size, num_masked_spans, 1)) offsets = tf.reshape(offsets, (batch_size, num_masked_spans", "except in compliance with the License. # You may obtain", "the tokens with labels in `[0, ..., config.vocab_size]` Returns: Example:", "outputs = self.hubert( input_values=inputs[\"input_values\"], attention_mask=inputs[\"attention_mask\"], token_type_ids=inputs[\"token_type_ids\"], position_ids=inputs[\"position_ids\"], head_mask=inputs[\"head_mask\"], inputs_embeds=inputs[\"inputs_embeds\"], output_attentions=inputs[\"output_attentions\"],", "indices_shape broad_casted_batch_dims = tf.reshape( tf.broadcast_to(tf.expand_dims(tf.range(indices_shape[0]), axis=-1), indices_shape), [1, -1] )", "self-attention (decoder) save Tuple(tf.Tensor, tf.Tensor) of # all previous decoder", "return tuple(v for v in [hidden_states, all_hidden_states, all_self_attentions] if v", "\"\"\" Calling this function will disable the gradient computation for", "`False``): Whether or not to use the model in training", "ValueError( f\"Data of type {type(input_values)} is not allowed only {allowed_types}", "positional argument : - a single Tensor with `input_values` only", "return_dict=inputs[\"return_dict\"], training=inputs[\"training\"], ) hidden_states = outputs[0] hidden_states = self.dropout(hidden_states, training=inputs[\"training\"])", "`[0, ..., config.vocab_size]` Returns: Example: ```python >>> import tensorflow as", "model, you need a GPU or a TPU\" ) @tf.function", "\"sum\": loss = tf.reduce_sum(loss) if self.config.ctc_loss_reduction == \"mean\": loss =", "tf.keras.regularizers.Regularizer = None, beta_constraint: tf.keras.constraints.Constraint = None, gamma_constraint: tf.keras.constraints.Constraint =", "= None, output_attentions: Optional[bool] = False, training: bool = False,", "CONDITIONS OF ANY KIND, either express or implied. # See", "inputs[\"attention_mask\"] if inputs[\"attention_mask\"] is not None else tf.ones_like(inputs[\"input_values\"], dtype=tf.float32) )", "TFHubertEncoder(config, name=\"encoder\") def build(self, input_shape: tf.TensorShape): self.masked_spec_embed = self.add_weight( shape=(self.config.hidden_size,),", "TFHubertPreTrainedModel(TFPreTrainedModel): \"\"\" An abstract class to handle weights initialization and", "= (key_states, value_states) proj_shape = (bsz * self.num_heads, -1, self.head_dim)", "in range(config.num_feat_extract_layers) ] else: raise ValueError( f\"`config.feat_extract_norm` is {config.feat_extract_norm}, but", "None, training: bool = False, **kwargs: Any, ): inputs =", "= tf.keras.layers.Conv1D( filters=self.out_conv_dim, kernel_size=config.conv_kernel[layer_id], strides=config.conv_stride[layer_id], use_bias=config.conv_bias, name=\"conv\", ) self.layer_norm =", "\"token_type_ids\": token_type_ids})` </Tip> Args: config ([`HubertConfig`]): Model configuration class with", "+ str(dim) + \").\" ) def _check_axis(self): if self.axis ==", "(0, 0))) output = super().call(padded_inputs) return output # Copied from", "tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name=\"layer_norm\") self.dropout = tf.keras.layers.Dropout(config.hidden_dropout) self.layer = [TFHubertEncoderLayer(config, name=f\"layers.{i}\") for", "+ \") must be a \" \"multiple of the number", "* self.num_heads, tgt_len, src_len], message=f\"Attention weights should be of size", "np.ndarray) for k, v in kwargs.items(): if isinstance(v, allowed_types) or", "= tf.keras.constraints.get(gamma_constraint) self._check_axis() def build(self, input_shape): self._check_if_input_shape_is_none(input_shape) self._set_number_of_groups_for_instance_norm(input_shape) self._check_size_of_dimensions(input_shape) self._create_input_spec(input_shape)", "call(self, inputs): if not self.initialized: self._init_norm() self.initialized = True self._normalize_kernel()", "than eager. if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(attention_mask), [bsz, 1, tgt_len, src_len],", "use_bias=config.conv_bias, name=\"conv\", ) self.layer_norm = tf.keras.layers.LayerNormalization(name=\"layer_norm\", epsilon=config.layer_norm_eps) self.activation = get_tf_activation(config.feat_extract_activation)", "be set to True. training (`bool`, *optional*, defaults to `False``):", "TFCausalLMOutput: hs = tf.convert_to_tensor(output.hidden_states) if self.config.output_hidden_states else None attns =", "always `None` past_key_value = (key_states, value_states) proj_shape = (bsz *", "self.q_proj(hidden_states) * self.scaling # get key, value proj if is_cross_attention", "model, only the configuration. Check out the [`~PreTrainedModel.from_pretrained`] method to", "the shape for which to compute masks. should be of", "a list of symbolic inputs, each input has to be", "1 self.out_conv_dim = config.conv_dim[layer_id] self.conv = tf.keras.layers.Conv1D( filters=self.out_conv_dim, kernel_size=config.conv_kernel[layer_id], strides=config.conv_stride[layer_id],", "ValueError( f\"embed_dim must be divisible by num_heads (got `embed_dim`: {self.embed_dim}\"", "!= 0: raise ValueError( \"Number of groups (\" + str(self.groups)", "predicted_ids = tf.argmax(logits, axis=-1) >>> transcription = processor.decode(predicted_ids[0]) >>> #", "layers. See `attentions` under returned tensors for more detail. This", "fill spec_aug_mask = tf.zeros((batch_size, sequence_length), dtype=tf.int32) # uniform distribution to", "Example: ```python >>> from transformers import Wav2Vec2Processor, TFHubertModel >>> from", "indicates the head is **not masked**, - 0 indicates the", "[hidden_states, all_hidden_states, all_self_attentions] if v is not None) return TFBaseModelOutput(", "axis=-1)) # assuming that padded tokens are filled with -100", "tf.reduce_max(labels) >= self.config.vocab_size: raise ValueError(f\"Label values must be <= vocab_size:", "is currently not implemented. The gumbel-max trick will do for", "False @add_start_docstrings_to_model_forward(HUBERT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=TFCausalLMOutput, config_class=_CONFIG_FOR_DOC) def call( self, input_values: tf.Tensor, attention_mask:", "inputs[\"return_dict\"] else self.config.return_dict outputs = self.hubert( input_values=inputs[\"input_values\"], attention_mask=inputs[\"attention_mask\"], token_type_ids=inputs[\"token_type_ids\"], position_ids=inputs[\"position_ids\"],", "self.encoder = TFHubertEncoder(config, name=\"encoder\") def build(self, input_shape: tf.TensorShape): self.masked_spec_embed =", "for i in range(config.num_hidden_layers)] def call( self, hidden_states: tf.Tensor, attention_mask:", "a real Tensor if type(input) == tf.Tensor: # Tensor names", "[`PreTrainedTokenizer.__call__`] and [`PreTrainedTokenizer.encode`] for details. [What are input IDs?](../glossary#input-ids) attention_mask", "output: del output[\"kwargs\"] boolean_dict = { k: v for k,", "shape_list(hidden_states) # get query proj query_states = self.q_proj(hidden_states) * self.scaling", "of all layers. See `hidden_states` under returned tensors for more", "if inputs[\"attention_mask\"] is not None else tf.ones_like(inputs[\"input_values\"], dtype=tf.float32) ) input_lengths", "i in range(config.num_hidden_layers) ] def call( self, hidden_states: tf.Tensor, attention_mask:", "in range(len(input_shape))] is_instance_norm = (input_shape[self.axis] // self.groups) == 1 if", "one for the missing layers, and another one for the", "Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2LayerNormConvLayer with Wav2Vec2->Hubert class TFHubertLayerNormConvLayer(tf.keras.layers.Layer): def __init__(self, config:", "`(num_layers, num_heads)`, *optional*): Mask to nullify selected heads of the", "\"\"\"Adapted from https://www.tensorflow.org/probability/api_docs/python/tfp/layers/weight_norm/WeightNorm\"\"\" def __init__(self, filters, kernel_size, groups, explicit_padding, **kwargs):", "shape `({0})`): Indices of input sequence tokens in the vocabulary.", "the model's internal embedding lookup matrix. output_attentions (`bool`, *optional*): Whether", "* mask_length > sequence_length: num_masked_spans = sequence_length // mask_length #", "sequence tokens in the position embeddings. Selected in the range", "= False, ) -> Union[TFBaseModelOutput, Tuple[tf.Tensor]]: \"\"\" Returns: Example: ```python", "cannot be \" \"more than the number of channels (\"", "weights. \"\"\" HUBERT_INPUTS_DOCSTRING = r\"\"\" Args: input_values (`np.ndarray`, `tf.Tensor`, `List[tf.Tensor]`", "attention_mask: Optional[tf.Tensor] = None, output_attentions: Optional[bool] = False, training: bool", "(bsz, self.num_heads, tgt_len, src_len)) + attention_mask attn_weights = tf.reshape(attn_weights, (bsz", "Tuple[tf.Tensor]]: r\"\"\" labels (`tf.Tensor` or `np.ndarray` of shape `(batch_size, sequence_length)`,", "generate indices & apply SpecAugment along time axis mask_time_indices =", "to be chosen as start of the span to be", "is_decoder: bool = False, bias: bool = True, **kwargs, ):", "several input Tensors IN THE ORDER given in the docstring:", "TFBaseModelOutput( last_hidden_state=hidden_states, hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions, ) class TFHubertPreTrainedModel(TFPreTrainedModel): \"\"\" An abstract", "not None: hidden_states = hidden_states * tf.expand_dims(attention_mask, -1) attention_mask =", "= tf.keras.layers.Dense(config.vocab_size, name=\"lm_head\") def freeze_feature_extractor(self): \"\"\" Calling this function will", "spans for a given shape Args: shape: the the shape", "self.dropout = tf.keras.layers.Dropout(config.hidden_dropout) self.layer = [TFHubertEncoderLayer(config, name=f\"layers.{i}\") for i in", "(`np.ndarray` or `tf.Tensor` of shape `({0})`, *optional*): Indices of positions", "general usage and behavior. <Tip> TF 2.0 models accepts two", "cross_attentions key_states = self._shape(self.k_proj(key_value_states), -1, bsz) value_states = self._shape(self.v_proj(key_value_states), -1,", "sf >>> processor = Wav2Vec2Processor.from_pretrained(\"facebook/hubert-base-960h\") >>> model = TFHubertForCTC.from_pretrained(\"facebook/hubert-base-960h\") >>>", "output[\"args\"] else: # `args` in this case is always the", "is batch size and 2nd is timesteps attention_mask: optional padding", "to mask spec_aug_mask_idxs = _sample_without_replacement(uniform_dist, num_masked_spans) # expand masked indices", "broadcast_shape = [1] * len(input_shape) is_instance_norm = (input_shape[self.axis] // self.groups)", "output_attentions: outputs += (attn_weights,) return outputs # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2EncoderLayerStableLayerNorm", "import soundfile as sf >>> processor = Wav2Vec2Processor.from_pretrained(\"facebook/hubert-base-960h\") >>> model", "feature encoder so that its parameters will not be updated", "tf.Tensor: hidden_states = self.layer_norm(hidden_states) hidden_states = self.projection(hidden_states) hidden_states = self.dropout(hidden_states,", "+ \".\" ) def _set_number_of_groups_for_instance_norm(self, input_shape): dim = input_shape[self.axis] if", "language modeling loss. Indices should be in `[-100, 0, ...,", "implements for all its model (such as downloading or saving,", "and/or along feature axis according to [SpecAugment](https://arxiv.org/abs/1904.08779). \"\"\" batch_size, sequence_length,", "\"gamma_regularizer\": tf.keras.regularizers.serialize(self.gamma_regularizer), \"beta_constraint\": tf.keras.constraints.serialize(self.beta_constraint), \"gamma_constraint\": tf.keras.constraints.serialize(self.gamma_constraint), } base_config = super().get_config()", "def call(self, hidden_states: tf.Tensor) -> tf.Tensor: hidden_states = self.conv(hidden_states) hidden_states", "_get_feat_extract_output_lengths(self, input_lengths: tf.Tensor): \"\"\" Computes the output length of the", "for the feature encoder so that its parameters will not", "of the model. Returns: Two lists, one for the missing", "1 if num_conv_pos_embeddings % 2 == 0 else 0 def", "self.groups = groups self.axis = axis self.epsilon = epsilon self.center", "bool = False, ) -> Union[TFBaseModelOutput, Tuple[tf.Tensor]]: \"\"\" Returns: Example:", "\"\"\" config_class = HubertConfig base_model_prefix = \"hubert\" main_input_name = \"input_values\"", "be smaller than `sequence_length`, but got `mask_length`: {mask_length} and `sequence_length`:", "one for the unexpected layers. \"\"\" signature = dict(inspect.signature(func).parameters) signature.pop(\"kwargs\",", "several input Tensors associated to the input names given in", "-100 # when not being attended to labels_mask = tf.cast(labels", "wish \" \"to train/fine-tine this model, you need a GPU", "**kwargs, ): super().__init__(**kwargs) self.supports_masking = True self.groups = groups self.axis", "or `tf.Tensor` of shape `({0})`, *optional*): Segment token indices to", "raw hidden-states without any specific head on top.\", HUBERT_START_DOCSTRING, )", "length of mask span to mask approximately this percentage of", "Optional[tf.Tensor] = None, inputs_embeds: Optional[tf.Tensor] = None, output_attentions: Optional[bool] =", "tensor_name = input.name.split(\":\")[0] if tensor_name in parameter_names: output[tensor_name] = input", "\"but the layer received an input with shape \" +", "tf.reshape(self.beta, broadcast_shape) return gamma, beta def _check_if_input_shape_is_none(self, input_shape): dim =", "# Add last layer if output_hidden_states: all_hidden_states = all_hidden_states +", "TFHubertFeatureProjection(config, name=\"feature_projection\") if config.do_stable_layer_norm: self.encoder = TFHubertEncoderStableLayerNorm(config, name=\"encoder\") else: self.encoder", "dict(input_values).items(): if isinstance(v, allowed_types) or v is None: output[k] =", "https://pytorch.org/docs/stable/generated/torch.nn.Conv1d.html return (input_length - kernel_size) // stride + 1 for", "_expand_mask(attention_mask) else: attention_mask = None position_embeddings = self.pos_conv_embed(hidden_states) hidden_states =", "samples are < sequence_length uniform_dist = tf.ones((batch_size, sequence_length - (mask_length", "encoder_outputs[0] if not inputs[\"return_dict\"]: return (hidden_states,) + encoder_outputs[1:] return TFBaseModelOutput(", ">>> transcription = processor.decode(predicted_ids[0]) >>> # compute loss >>> target_transcription", "(`np.ndarray` or `tf.Tensor` of shape `({0}, hidden_size)`, *optional*): Optionally, instead", "padded_inputs = tf.pad(inputs, ((0, 0), (self.explicit_padding, self.explicit_padding), (0, 0))) output", "self.num_heads = num_heads self.dropout = tf.keras.layers.Dropout(dropout) self.head_dim = embed_dim //", "= None, gamma_regularizer: tf.keras.regularizers.Regularizer = None, beta_constraint: tf.keras.constraints.Constraint = None,", "modes than eager. if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(attn_output), [bsz * self.num_heads,", "num masked indices <= sequence_length if num_masked_spans * mask_length >", ") return normalized_inputs def _get_reshaped_weights(self, input_shape): broadcast_shape = self._create_broadcast_shape(input_shape) gamma", "output[tensor_name] = output[\"args\"] else: # `args` in this case is", "HUBERT_START_DOCSTRING, ) class TFHubertForCTC(TFHubertPreTrainedModel): def __init__(self, config: HubertConfig, *inputs, **kwargs):", "= inputs[\"return_dict\"] if inputs[\"return_dict\"] else self.config.return_dict outputs = self.hubert( input_values=inputs[\"input_values\"],", "inputs[\"return_dict\"]: return (hidden_states,) + encoder_outputs[1:] return TFBaseModelOutput( last_hidden_state=hidden_states, hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions,", "None, training: Optional[bool] = False, ) -> Tuple[tf.Tensor, Optional[tf.Tensor]]: \"\"\"Input", "type {type(v)} is not allowed only {allowed_types} is accepted for", "None, head_mask: Optional[tf.Tensor] = None, inputs_embeds: Optional[tf.Tensor] = None, output_attentions:", "by applicable law or agreed to in writing, software #", "description) dropout_probability = np.random.uniform(0, 1) if training and (dropout_probability <", "if not is_instance_norm: axis = -2 if self.axis == -1", "if output_attentions: all_self_attentions = all_self_attentions + (layer_outputs[1],) # Add last", "@replace_return_docstrings(output_type=TFCausalLMOutput, config_class=_CONFIG_FOR_DOC) def call( self, input_values: tf.Tensor, attention_mask: Optional[tf.Tensor] =", "> 0: mask_feature_indices = _compute_mask_indices( (batch_size, hidden_size), mask_prob=self.config.mask_feature_prob, mask_length=self.config.mask_feature_length, )", "property so we check for a real Tensor if type(input)", "= False, ) -> Union[TFBaseModelOutput, Tuple[tf.Tensor]]: all_hidden_states = () if", "is useful if you want more control over how to", "\"\"\" Computes the output length of the convolutional layers \"\"\"", "the loss is only computed for the tokens with labels", "None, attention_mask: Optional[tf.Tensor] = None, layer_head_mask: Optional[tf.Tensor] = None, training:", "outputs def get_config(self): config = { \"groups\": self.groups, \"axis\": self.axis,", "\"mean\": loss = tf.reduce_mean(loss) else: loss = None if not", "hidden_states, ) elif self.config.mask_time_prob > 0: # generate indices &", "all cross-attention # key/value_states (first \"if\" case) # if uni-directional", "raise ValueError( f\"embed_dim must be divisible by num_heads (got `embed_dim`:", "training=inputs[\"training\"], ) hidden_states = encoder_outputs[0] if not inputs[\"return_dict\"]: return (hidden_states,)", "num_masked_spans = int(mask_prob * sequence_length / mask_length + tf.random.uniform((1,))) num_masked_spans", "(see https://arxiv.org/abs/1909.11556 for description) dropout_probability = np.random.uniform(0, 1) if training", "if self.axis == 0: raise ValueError( \"You are trying to", "the generic methods the library implements for all its model", "inputs = input_values_processing( func=self.call, config=self.config, input_values=input_values, attention_mask=attention_mask, token_type_ids=token_type_ids, position_ids=position_ids, head_mask=head_mask,", "`past_key_values` instead.\", FutureWarning, ) output[\"past_key_values\"] = input_values.pop(\"decoder_cached_states\") for k, v", "input_lengths = self.hubert._get_feat_extract_output_lengths(tf.reduce_sum(attention_mask, axis=-1)) # assuming that padded tokens are", "as in PyTorch with indices in format (batch_dim, indixes) \"\"\"", "= tf.matmul(attn_probs, value_states) # The tf.debugging asserts are not compliant", "* self.num_heads, tgt_len, src_len)) attn_weights = tf.nn.softmax(attn_weights, axis=-1) if layer_head_mask", "applicable law or agreed to in writing, software # distributed", "not getattr(self.config, \"apply_spec_augment\", True): return hidden_states if mask_time_indices is not", "configuration class with all the parameters of the model. Initializing", "processor to encode labels >>> with processor.as_target_processor(): ... labels =", "LARGE_NEGATIVE = -1e8 # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.input_values_processing def input_values_processing(func, config,", "if not inputs[\"return_dict\"]: return (hidden_states,) + encoder_outputs[1:] return TFBaseModelOutput( last_hidden_state=hidden_states,", "inputs: - having all inputs as keyword arguments (like PyTorch", "inputs, input_shape, tensor_input_shape): group_shape = [tensor_input_shape[i] for i in range(len(input_shape))]", "only {allowed_types} is accepted for {k}.\") else: if isinstance(input_values, tf.Tensor)", "= self.intermediate_act_fn(hidden_states) hidden_states = self.intermediate_dropout(hidden_states, training=training) hidden_states = self.output_dense(hidden_states) hidden_states", "[`PreTrainedTokenizer.encode`] for details. [What are input IDs?](../glossary#input-ids) attention_mask (`np.ndarray` or", "output): hs = tf.convert_to_tensor(output.hidden_states) if self.config.output_hidden_states else None attns =", "proj_shape) value_states = tf.reshape(value_states, proj_shape) src_len = shape_list(key_states)[1] attn_weights =", "for {parameter_names[0]}.\" ) for name in parameter_names: if name not", "layer_module in enumerate(self.layer): if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,)", "TFHubertFeatureEncoder(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs: Any) -> None: super().__init__(**kwargs)", "return_dict: Optional[bool] = None, training: bool = False, ) ->", "tgt_len, self.head_dim], message=f\"`attn_output` should be of size {(bsz, self.num_heads, tgt_len,", "1) ] elif config.feat_extract_norm == \"layer\": conv_layers = [ TFHubertLayerNormConvLayer(config,", "= processor(ds[\"speech\"][0], return_tensors=\"tf\").input_values # Batch size 1 >>> hidden_states =", "return hidden_states def call( self, input_values: tf.Tensor, attention_mask: Optional[tf.Tensor] =", "tf.keras.layers.Dense(embed_dim, use_bias=bias, name=\"out_proj\") def _shape(self, tensor: tf.Tensor, seq_len: int, bsz:", ">>> # wrap processor as target processor to encode labels", "tf.bool), self.masked_spec_embed[tf.newaxis, tf.newaxis, :], hidden_states, ) elif self.config.mask_time_prob > 0:", "input elif isinstance(input, allowed_types) or input is None: output[parameter_names[i]] =", "= TFHubertForCTC.from_pretrained(\"facebook/hubert-base-960h\") >>> def map_to_array(batch): ... speech, _ = sf.read(batch[\"file\"])", "{allowed_types} is accepted for {k}.\") else: if isinstance(input_values, tf.Tensor) or", "-1, bsz) value_states = self._shape(self.v_proj(hidden_states), -1, bsz) if self.is_decoder: #", "None def _add_beta_weight(self, input_shape): dim = input_shape[self.axis] shape = (dim,)", "Model and refer to the TF 2.0 documentation for all", "config, **kwargs): super().__init__(config, **kwargs) warnings.warn( f\"The class `{self.__class__.__name__}` has been", "in enumerate(self.layer): if output_hidden_states: all_hidden_states = all_hidden_states + (hidden_states,) #", "num_heads (got `embed_dim`: {self.embed_dim}\" f\" and `num_heads`: {num_heads}).\" ) self.scaling", "input in enumerate(input_values): # EagerTensors don't allow to use the", "\".\" ) def _set_number_of_groups_for_instance_norm(self, input_shape): dim = input_shape[self.axis] if self.groups", "abstract class to handle weights initialization and a simple interface", "= tf.expand_dims(spec_aug_mask_idxs, -1) spec_aug_mask_idxs = tf.tile(spec_aug_mask_idxs, (1, 1, mask_length)) spec_aug_mask_idxs", "# if encoder bi-directional self-attention `past_key_value` is always `None` past_key_value", "first and second portions of the inputs. Indices are selected", "the value will always be set to True. training (`bool`,", ">>> target_transcription = \"A MAN SAID TO THE UNIVERSE SIR", "\"License\"); # you may not use this file except in", "<= sequence_length if num_masked_spans * mask_length > sequence_length: num_masked_spans =", "mode (some modules like dropout modules have different behaviors between", "input_values, \"attention_mask\": tf.cast(tf.not_equal(input_values, pad_token), tf.float32), } return dummy_inputs def __init__(self,", "\"\"\"Multi-headed attention from \"Attention Is All You Need\"\"\" def __init__(", "**masked**. [What are attention masks?](../glossary#attention-mask) token_type_ids (`np.ndarray` or `tf.Tensor` of", "then we check only the # `name` part tensor_name =", "\" + str(input_shape) + \".\" ) def _set_number_of_groups_for_instance_norm(self, input_shape): dim", "= False, ) -> Tuple[tf.Tensor, Optional[tf.Tensor]]: \"\"\"Input shape: Batch x", "dim = input_shape[self.axis] if dim is None: raise ValueError( \"Axis", "Copied from transformers.models.bart.modeling_tf_bart._expand_mask def _expand_mask(mask: tf.Tensor, tgt_len: Optional[int] = None,", "filters=self.out_conv_dim, kernel_size=config.conv_kernel[layer_id], strides=config.conv_stride[layer_id], use_bias=config.conv_bias, name=\"conv\", ) self.layer_norm = tf.keras.layers.LayerNormalization(name=\"layer_norm\", epsilon=config.layer_norm_eps)", "be disabled in other modes than eager. if tf.executing_eagerly(): tf.debugging.assert_equal(", "that its parameters will not be updated during training. \"\"\"", "num_masked_spans) # expand masked indices to masked spans spec_aug_mask_idxs =", "name=f\"conv_layers.{i}\") for i in range(config.num_feat_extract_layers) ] else: raise ValueError( f\"`config.feat_extract_norm`", "is deprecated and will be removed in a future version,", "(1, -1, 1, 1)) * tf.reshape( attn_weights, (bsz, self.num_heads, tgt_len,", "if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(attn_output), [bsz * self.num_heads, tgt_len, self.head_dim], message=f\"`attn_output`", "along time axis and/or along feature axis according to [SpecAugment](https://arxiv.org/abs/1904.08779).", "output_hidden_states: Optional[bool] = False, return_dict: Optional[bool] = True, training: Optional[bool]", "shapes are checked at build time since TF 2.7, so", "datasets import load_dataset >>> import soundfile as sf >>> processor", "= tf.reduce_sum(labels_mask, axis=-1) loss = tf.nn.ctc_loss( logits=logits, labels=labels, logit_length=input_lengths, label_length=target_lengths,", "src_len = shape_list(mask)[1] tgt_len = tgt_len if tgt_len is not", "= False, output_hidden_states: Optional[bool] = False, return_dict: Optional[bool] = True,", "the # `name` part tensor_name = input.name.split(\":\")[0] if tensor_name in", ") @keras_serializable class TFHubertMainLayer(tf.keras.layers.Layer): config_class = HubertConfig def __init__(self, config:", "self.lm_head = tf.keras.layers.Dense(config.vocab_size, name=\"lm_head\") def freeze_feature_extractor(self): \"\"\" Calling this function", "uniform distribution to sample from, make sure that offset samples", "return gamma, beta def _check_if_input_shape_is_none(self, input_shape): dim = input_shape[self.axis] if", "1, tgt_len, 1)) return (one_cst - expanded_mask) * LARGE_NEGATIVE #", "self.output_dropout(hidden_states, training=training) return hidden_states # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2EncoderLayer with Wav2Vec2->Hubert", "input_values else: raise ValueError( f\"Data of type {type(input_values)} is not", "> 0 else 1 self.out_conv_dim = config.conv_dim[layer_id] self.conv = tf.keras.layers.Conv1D(", "self.dropout(hidden_states, training=inputs[\"training\"]) logits = self.lm_head(hidden_states) if labels is not None:", "TFHubertEncoderLayer(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs) self.attention = TFHubertAttention(", "mask_length: int, min_masks: int = 0, ) -> tf.Tensor: \"\"\"", "to convolution formula output_lengths = self._get_feat_extract_output_lengths(tf.reduce_sum(inputs[\"attention_mask\"], -1)) attention_mask = tf.sequence_mask(", "if not self.initialized: self._init_norm() self.initialized = True self._normalize_kernel() padded_inputs =", "__init__(self, filters, kernel_size, groups, explicit_padding, **kwargs): super().__init__( filters=filters, kernel_size=kernel_size, groups=groups,", ">>> input_values = processor(ds[\"speech\"][0], return_tensors=\"tf\").input_values # Batch size 1 >>>", "class `{self.__class__.__name__}` has been depreciated \" \"and will be removed", "of # all previous decoder key/value_states. Further calls to uni-directional", "the head is **not masked**, - 0 indicates the head", "= None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None,", "config: HubertConfig, **kwargs: Any) -> None: super().__init__(**kwargs) if config.feat_extract_norm ==", "output[parameter_names[0]] = input_values else: raise ValueError( f\"Data of type {type(input_values)}", "is always the first parameter, then `input_values` output[\"input_values\"] = output[\"args\"]", "`input_values` output[\"input_values\"] = output[\"args\"] del output[\"args\"] if \"kwargs\" in output:", "tf.keras.constraints.serialize(self.gamma_constraint), } base_config = super().get_config() return {**base_config, **config} def compute_output_shape(self,", "self.config.ctc_loss_reduction == \"sum\": loss = tf.reduce_sum(loss) if self.config.ctc_loss_reduction == \"mean\":", "(one_cst - expanded_mask) * LARGE_NEGATIVE # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2GroupNorm with", "initializer=\"zeros\", trainable=True) def call(self, inputs): if not self.initialized: self._init_norm() self.initialized", "`sequence_length`, but got `mask_length`: {mask_length} and `sequence_length`: {sequence_length}`\" ) #", "self.serving_output(output) HUBERT_START_DOCSTRING = r\"\"\" This model inherits from [`TFPreTrainedModel`]. Check", "plain tuple. This argument can be used in eager mode,", "normalize your batch axis. Do you want to \" \"use", "dim = input_shape[self.axis] if dim < self.groups: raise ValueError( \"Number", "# Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.input_values_processing def input_values_processing(func, config, input_values, **kwargs): \"\"\"", "+ 1 for kernel_size, stride in zip(self.config.conv_kernel, self.config.conv_stride): input_lengths =", "return_dict=return_dict, training=training, kwargs_call=kwargs, ) hidden_states = self.feature_extractor( tf.cast(inputs[\"input_values\"], tf.float32), training=inputs[\"training\"]", "type IDs?](../glossary#token-type-ids) position_ids (`np.ndarray` or `tf.Tensor` of shape `({0})`, *optional*):", "not return_dict: return tuple(v for v in [hidden_states, all_hidden_states, all_self_attentions]", "disabled in other modes than eager. if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(attention_mask),", "sf.read(batch[\"file\"]) ... batch[\"speech\"] = speech ... return batch >>> ds", "mask span to mask approximately this percentage of all elements.", "if (self.head_dim * num_heads) != self.embed_dim: raise ValueError( f\"embed_dim must", "(`callable`): The callable function of the TensorFlow model. config ([`PretrainedConfig`]):", "= TFHubertWeightNormConv1D( filters=config.hidden_size, kernel_size=config.num_conv_pos_embeddings, groups=config.num_conv_pos_embedding_groups, explicit_padding=config.num_conv_pos_embeddings // 2, name=\"conv\", )", "and name != \"args\": output[name] = kwargs.pop(name, signature[name].default) # When", "to mask approximately this percentage of all elements. however due", "tf.TensorShape): self.masked_spec_embed = self.add_weight( shape=(self.config.hidden_size,), initializer=\"uniform\", trainable=True, name=\"masked_spec_embed\" ) super().build(input_shape)", "tf.Tensor) -> tf.Tensor: hidden_states = self.conv(hidden_states) hidden_states = self.layer_norm(hidden_states) hidden_states", "([`PretrainedConfig`]): The config of the running model. **kwargs: The inputs", "Labels for computing the masked language modeling loss. Indices should", "-1, epsilon: float = 1e-3, center: bool = True, scale:", "__init__(self, config: HubertConfig, layer_id: int = 0, **kwargs: Any) ->", ") else: self.gamma = None def _add_beta_weight(self, input_shape): dim =", "attention_mask = tf.sequence_mask( output_lengths, maxlen=shape_list(hidden_states)[1], dtype=hidden_states.dtype ) hidden_states = self.feature_projection(hidden_states,", "get_tf_activation(config.feat_extract_activation) self.layer_norm = TFHubertGroupNorm(groups=self.out_conv_dim, epsilon=config.layer_norm_eps, name=\"layer_norm\") def call(self, hidden_states: tf.Tensor)", "axis = -1 if self.axis == -1 else self.axis -", "tf.reshape(batch_indices, [1, -1])], 0)) # scatter values to pair indices", "mode, in graph mode the value will always be set", "the model, only the configuration. Check out the [`~PreTrainedModel.from_pretrained`] method", "attention on padding token indices. Mask values selected in `[0,", "training=training, ) hidden_states = layer_outputs[0] if output_attentions: all_self_attentions = all_self_attentions", "/ mask_length + tf.random.uniform((1,))) num_masked_spans = max(num_masked_spans, min_masks) # make", "= \"hubert\" main_input_name = \"input_values\" @property def dummy_inputs(self) -> Dict[str,", "not compliant with XLA then they # have to be", "training. \"\"\" warnings.warn( \"The method `freeze_feature_extractor` is deprecated and will", "tf.keras.regularizers.get(gamma_regularizer) self.beta_constraint = tf.keras.constraints.get(beta_constraint) self.gamma_constraint = tf.keras.constraints.get(gamma_constraint) self._check_axis() def build(self,", "attn_probs = self.dropout(attn_weights, training=training) attn_output = tf.matmul(attn_probs, value_states) # The", "self.head_dim)), (0, 2, 1, 3)) def call( self, hidden_states: tf.Tensor,", "self.activation(hidden_states) return hidden_states # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2PositionalConvEmbedding with Wav2Vec2->Hubert class", "layer_outputs[0] if output_attentions: all_self_attentions = all_self_attentions + (layer_outputs[1],) # Add", "str(dim) + \").\" ) def _check_axis(self): if self.axis == 0:", "- 0 for tokens that are **masked**. [What are attention", "output: if output[\"args\"] is not None and type(output[\"args\"]) == tf.Tensor:", "# reuse k, v, self_attention key_states = self._shape(self.k_proj(hidden_states), -1, bsz)", "along time axis mask_time_indices = _compute_mask_indices( (batch_size, sequence_length), mask_prob=self.config.mask_time_prob, mask_length=self.config.mask_time_length,", "(hidden_states,) + encoder_outputs[1:] return TFBaseModelOutput( last_hidden_state=hidden_states, hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions, ) class", "current projected key/value_states (third \"elif\" case) # if encoder bi-directional", "`({0})`, *optional*): Segment token indices to indicate first and second", "super().__init__(**kwargs) if config.feat_extract_norm == \"group\": conv_layers = [TFHubertGroupNormConvLayer(config, layer_id=0, name=f\"conv_layers.{0}\")]", "...modeling_tf_outputs import TFBaseModelOutput, TFCausalLMOutput from ...modeling_tf_utils import TFPreTrainedModel, booleans_processing, get_initializer,", "HubertConfig, **kwargs: Any) -> None: super().__init__(**kwargs) self.conv = TFHubertWeightNormConv1D( filters=config.hidden_size,", "= tf.keras.layers.Dropout(dropout) self.head_dim = embed_dim // num_heads if (self.head_dim *", "tf.keras.constraints.get(gamma_constraint) self._check_axis() def build(self, input_shape): self._check_if_input_shape_is_none(input_shape) self._set_number_of_groups_for_instance_norm(input_shape) self._check_size_of_dimensions(input_shape) self._create_input_spec(input_shape) self._add_gamma_weight(input_shape)", "= self.head_dim**-0.5 self.is_decoder = is_decoder self.k_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name=\"k_proj\")", "= self._shape(self.k_proj(key_value_states), -1, bsz) value_states = self._shape(self.v_proj(key_value_states), -1, bsz) elif", "import get_tf_activation from ...modeling_tf_outputs import TFBaseModelOutput, TFCausalLMOutput from ...modeling_tf_utils import", "# Conv1D output shapes are checked at build time since", "**kwargs: Any) -> None: super().__init__(**kwargs) self.conv = TFHubertWeightNormConv1D( filters=config.hidden_size, kernel_size=config.num_conv_pos_embeddings,", "config.vocab_size]` (see `input_values` docstring) Tokens with indices set to `-100`", "self._check_if_input_shape_is_none(input_shape) self._set_number_of_groups_for_instance_norm(input_shape) self._check_size_of_dimensions(input_shape) self._create_input_spec(input_shape) self._add_gamma_weight(input_shape) self._add_beta_weight(input_shape) self.built = True super().build(input_shape)", "you may not use this file except in compliance with", "methods the library implements for all its model (such as", "= self.add_weight( shape=(self.config.hidden_size,), initializer=\"uniform\", trainable=True, name=\"masked_spec_embed\" ) super().build(input_shape) def _get_feat_extract_output_lengths(self,", "config self.pos_conv_embed = TFHubertPositionalConvEmbedding(config, name=\"pos_conv_embed\") self.layer_norm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name=\"layer_norm\") self.dropout", "the input names given in the docstring: `model({\"input_values\": input_values, \"token_type_ids\":", "of \" \"input tensor should have a defined dimension \"", "* 2 super().build(input_shape) self.kernel = tf.Variable(tf.transpose(self.kernel), name=\"weight_v\", trainable=True) self.weight_v =", "has to be smaller than `sequence_length`, but got `mask_length`: {mask_length}", "if cross_attention save Tuple(tf.Tensor, tf.Tensor) of all cross attention key/value_states.", "\"\"\" Categorical sampling without replacement is currently not implemented. The", "(input_shape[self.axis] // self.groups) == 1 if not is_instance_norm: outputs =", "are trying to normalize your batch axis. Do you want", "if not is_instance_norm: group_shape[self.axis] = input_shape[self.axis] // self.groups group_shape.insert(self.axis, self.groups)", "else 0 def call(self, hidden_states): if self.num_pad_remove > 0: hidden_states", "use_bias=bias, name=\"out_proj\") def _shape(self, tensor: tf.Tensor, seq_len: int, bsz: int):", "Optional[int] = None, past_key_values_length: int = 0): \"\"\" Expands attention_mask", "None else tf.ones_like(inputs[\"input_values\"], dtype=tf.float32) ) input_lengths = self.hubert._get_feat_extract_output_lengths(tf.reduce_sum(attention_mask, axis=-1)) #", "Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2FeedForward with Wav2Vec2->Hubert class TFHubertFeedForward(tf.keras.layers.Layer): def __init__(self, config:", "or not to return the attentions tensors of all attention", "= input elif isinstance(input, allowed_types) or input is None: output[parameter_names[i]]", "of input sequence tokens in the vocabulary. Indices can be", "choose to directly pass an embedded representation. This is useful", "int], mask_prob: float, mask_length: int, min_masks: int = 0, )", "\"\"\" Computes random mask spans for a given shape Args:", ":], (1, 1, tgt_len, 1)) return (one_cst - expanded_mask) *", "but has to be one of ['group', 'layer']\" ) self.conv_layers", "ValueError(f\"Label values must be <= vocab_size: {self.config.vocab_size}\") attention_mask = (", "from `[bsz, seq_len]` to `[bsz, 1, tgt_seq_len, src_seq_len]`. \"\"\" src_len", "the range `[0, config.max_position_embeddings - 1]`. [What are position IDs?](../glossary#position-ids)", "decoder is_cross_attention = key_value_states is not None bsz, tgt_len, embed_dim", "with one or several input Tensors associated to the input", "= output[\"args\"].name.split(\":\")[0] output[tensor_name] = output[\"args\"] else: # `args` in this", "instead. output_hidden_states (`bool`, *optional*): Whether or not to return the", "= groups self.axis = axis self.epsilon = epsilon self.center =", "Segment token indices to indicate first and second portions of", "= tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name=\"layer_norm\") self.dropout = tf.keras.layers.Dropout(config.hidden_dropout) self.layer = [ TFHubertEncoderLayerStableLayerNorm(config,", "# transform batch_indices to pair_indices pair_indices = tf.transpose(tf.concat([broad_casted_batch_dims, tf.reshape(batch_indices, [1,", "beta_constraint: tf.keras.constraints.Constraint = None, gamma_constraint: tf.keras.constraints.Constraint = None, **kwargs, ):", "TF_HUBERT_PRETRAINED_MODEL_ARCHIVE_LIST = [ \"facebook/hubert-base-ls960\", # See all Hubert models at", "Optional[tf.Tensor] = None, position_ids: Optional[tf.Tensor] = None, head_mask: Optional[tf.Tensor] =", "compute masks. should be of size 2 where first element", "is not None: # reuse k,v, cross_attentions key_states = past_key_value[0]", "(`bool`, *optional*, defaults to `False``): Whether or not to use", "hidden_states = self.dropout(hidden_states, training=training) return hidden_states # Copied from transformers.models.bart.modeling_tf_bart.TFBartAttention", "training: bool = False, **kwargs: Any, ): inputs = input_values_processing(", "indices to mask spec_aug_mask = _scatter_values_on_batch_indices( tf.ones_like(spec_aug_mask_idxs), spec_aug_mask_idxs, spec_aug_mask.shape )", "= encoder_outputs[0] if not inputs[\"return_dict\"]: return (hidden_states,) + encoder_outputs[1:] return", "**kwargs, ): super().__init__(**kwargs) self.embed_dim = embed_dim self.num_heads = num_heads self.dropout", "v5.\" \"Please use the equivalent `freeze_feature_encoder` method instead.\", FutureWarning, )", "will do for now - see https://github.com/tensorflow/tensorflow/issues/9260 for more info", "(`np.ndarray` or `tf.Tensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*):", "None, gamma_constraint: tf.keras.constraints.Constraint = None, **kwargs, ): super().__init__(**kwargs) self.supports_masking =", "tensor_input_shape): group_shape = [tensor_input_shape[i] for i in range(len(input_shape))] is_instance_norm =", "= True super().build(input_shape) def call(self, inputs): input_shape = tf.keras.backend.int_shape(inputs) tensor_input_shape", "file except in compliance with the License. # You may", "mask_time_indices=mask_time_indices) encoder_outputs = self.encoder( hidden_states, attention_mask=attention_mask, output_attentions=inputs[\"output_attentions\"], output_hidden_states=inputs[\"output_hidden_states\"], return_dict=inputs[\"return_dict\"], training=inputs[\"training\"],", "= tf.nn.batch_normalization( reshaped_inputs, mean=mean, variance=variance, scale=gamma, offset=beta, variance_epsilon=self.epsilon, ) return", "None all_self_attentions = () if output_attentions else None if attention_mask", "be updated during training. \"\"\" self.hubert.feature_extractor.trainable = False @add_start_docstrings_to_model_forward(HUBERT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=TFCausalLMOutput,", "hidden_states # Copied from transformers.models.bart.modeling_tf_bart.TFBartAttention with TFBart->TFHubert class TFHubertAttention(tf.keras.layers.Layer): \"\"\"Multi-headed", "[-1]), output_shape) # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2._compute_mask_indices def _compute_mask_indices( shape: Tuple[int,", "as keyword arguments (like PyTorch models), or - having all", "processor as target processor to encode labels >>> with processor.as_target_processor():", "name=\"pos_conv_embed\") self.layer_norm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name=\"layer_norm\") self.dropout = tf.keras.layers.Dropout(config.hidden_dropout) self.layer =", "*optional*): Whether or not to return the hidden states of", "to uni-directional self-attention # can concat previous decoder key/value_states to", "self.beta = self.add_weight( shape=shape, name=\"beta\", initializer=self.beta_initializer, regularizer=self.beta_regularizer, constraint=self.beta_constraint, ) else:", "have to add this exception if \"args\" in output: if", ") # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2EncoderStableLayerNorm with Wav2Vec2->Hubert class TFHubertEncoderStableLayerNorm(tf.keras.layers.Layer): def", "hidden_states, ) # apply SpecAugment along feature axis if self.config.mask_feature_prob", "output_attentions (`bool`, *optional*): Whether or not to return the attentions", "from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2EncoderLayerStableLayerNorm with Wav2Vec2->Hubert class TFHubertEncoderLayerStableLayerNorm(tf.keras.layers.Layer): def __init__(self, config: HubertConfig,", "the TF 2.0 documentation for all matter related to general", "in parameter_names and \"args\" not in parameter_names: logger.warning( f\"The parameter", "with Wav2Vec2->Hubert class TFHubertEncoderLayer(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs)", "attention_mask=attention_mask, token_type_ids=token_type_ids, position_ids=position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, kwargs_call=kwargs,", "1 corresponds to a *sentence B* token. [What are token", "num_masked_spans, 1)) offsets = tf.reshape(offsets, (batch_size, num_masked_spans * mask_length)) spec_aug_mask_idxs", "kernel_size=config.conv_kernel[layer_id], strides=config.conv_stride[layer_id], use_bias=config.conv_bias, name=\"conv\", ) self.activation = get_tf_activation(config.feat_extract_activation) self.layer_norm =", "model (such as downloading or saving, resizing the input embeddings,", "() if output_attentions else None if attention_mask is not None:", "FutureWarning, ) class TFHubertFeatureProjection(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs)", "1, tgt_len, src_len)}, but is {shape_list(attention_mask)}\", ) attention_mask = tf.cast(attention_mask,", "self.center: beta = tf.reshape(self.beta, broadcast_shape) return gamma, beta def _check_if_input_shape_is_none(self,", "not inputs[\"return_dict\"]: return (hidden_states,) + encoder_outputs[1:] return TFBaseModelOutput( last_hidden_state=hidden_states, hidden_states=encoder_outputs.hidden_states,", "and second portions of the inputs. Indices are selected in", "indices # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2._scatter_values_on_batch_indices def _scatter_values_on_batch_indices(values, batch_indices, output_shape): \"\"\"", "= tf.cast(attention_mask, dtype=attn_weights.dtype) attn_weights = tf.reshape(attn_weights, (bsz, self.num_heads, tgt_len, src_len))", "**kwargs: Any) -> None: super().__init__(**kwargs) if config.feat_extract_norm == \"group\": conv_layers", "batch_size, sequence_length = shape if mask_length < 1: raise ValueError(\"`mask_length`", "Wav2Vec2->Hubert class TFHubertFeedForward(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs) self.intermediate_dropout", "else: `model(inputs_ids)` - a list of varying length with one", "`({0}, hidden_size)`, *optional*): Optionally, instead of passing `input_values` you can", "# Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2LayerNormConvLayer with Wav2Vec2->Hubert class TFHubertLayerNormConvLayer(tf.keras.layers.Layer): def __init__(self,", "HubertConfig def __init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs) self.config = config", "to handle weights initialization and a simple interface for downloading", "# When creating a SavedModel TF calls the method with", "this model, you need a GPU or a TPU\" )", "output shapes are checked at build time since TF 2.7,", "# limitations under the License. \"\"\" TensorFlow Hubert model.\"\"\" import", "= TFHubertMainLayer(config, name=\"hubert\") @add_start_docstrings_to_model_forward(HUBERT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=TFBaseModelOutput, config_class=_CONFIG_FOR_DOC) def call( self, input_values:", "layer_id > 0 else 1 self.out_conv_dim = config.conv_dim[layer_id] self.conv =", "= sf.read(batch[\"file\"]) ... batch[\"speech\"] = speech ... return batch >>>", "your batch axis. Do you want to \" \"use tf.layer.batch_normalization", "MAN SAID TO THE UNIVERSE SIR I EXIST\" >>> #", "= None, attention_mask: Optional[tf.Tensor] = None, layer_head_mask: Optional[tf.Tensor] = None,", "= False) -> tf.Tensor: hidden_states = self.layer_norm(hidden_states) hidden_states = self.projection(hidden_states)", "= tf.reshape(attn_output, (bsz, tgt_len, embed_dim)) attn_output = self.out_proj(attn_output) attn_weights: tf.Tensor", "= tf.reshape(layer_head_mask, (1, -1, 1, 1)) * tf.reshape( attn_weights, (bsz,", "tf.reshape(attn_weights, (bsz * self.num_heads, tgt_len, src_len)) attn_probs = self.dropout(attn_weights, training=training)", "hidden_states = hidden_states + position_embeddings hidden_states = self.layer_norm(hidden_states) hidden_states =", "tf.keras.layers.LayerNormalization(name=\"layer_norm\", epsilon=config.layer_norm_eps) self.activation = get_tf_activation(config.feat_extract_activation) def call(self, hidden_states: tf.Tensor) ->", "call( self, hidden_states: tf.Tensor, attention_mask: Optional[tf.Tensor] = None, output_attentions: Optional[bool]", "be of size {(self.num_heads)}, but is {shape_list(layer_head_mask)}\", ) attn_weights =", "= tf.pad(inputs, ((0, 0), (self.explicit_padding, self.explicit_padding), (0, 0))) output =", "is not allowed only {allowed_types} is accepted for {k}.\") if", "if self.config.output_attentions else None return TFBaseModelOutput(last_hidden_state=output.last_hidden_state, hidden_states=hs, attentions=attns) @add_start_docstrings( \"\"\"TFHubert", "associated vectors than the model's internal embedding lookup matrix. output_attentions", "masked spans in batch num_masked_spans = int(mask_prob * sequence_length /", "self.dropout(hidden_states, training=training) hidden_states = attn_residual + hidden_states hidden_states = hidden_states", "get_config(self): config = { \"groups\": self.groups, \"axis\": self.axis, \"epsilon\": self.epsilon,", "first positional argument : - a single Tensor with `input_values`", "The gumbel-max trick will do for now - see https://github.com/tensorflow/tensorflow/issues/9260", "axis with given mask_time_indices hidden_states = tf.where( tf.cast(mask_time_indices[:, :, tf.newaxis],", "case) # if encoder bi-directional self-attention `past_key_value` is always `None`", "limitations under the License. \"\"\" TensorFlow Hubert model.\"\"\" import inspect", "= tf.nn.moments(reshaped_inputs, group_reduction_axes, keepdims=True) gamma, beta = self._get_reshaped_weights(input_shape) normalized_inputs =", "the proper output we have to add this exception if", "LARGE_NEGATIVE # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2GroupNorm with Wav2Vec2->Hubert class TFHubertGroupNorm(tf.keras.layers.Layer): \"\"\"", "tf.float32), } return dummy_inputs def __init__(self, config, *inputs, **kwargs): super().__init__(config,", "None, training: Optional[bool] = False, ) -> Union[TFCausalLMOutput, Tuple[tf.Tensor]]: r\"\"\"", "use_bias=config.conv_bias, name=\"conv\", ) self.activation = get_tf_activation(config.feat_extract_activation) self.layer_norm = TFHubertGroupNorm(groups=self.out_conv_dim, epsilon=config.layer_norm_eps,", "* tf.transpose(self.weight_g) self.kernel = tf.transpose(kernel) def build(self, input_shape): if not", "weights.\"\"\" kernel = tf.nn.l2_normalize(self.weight_v, axis=self.kernel_norm_axes) * tf.transpose(self.weight_g) self.kernel = tf.transpose(kernel)", "def serving(self, inputs): output = self.call(input_values=inputs, training=False) return self.serving_output(output) HUBERT_START_DOCSTRING", "HuggingFace Inc. team. All rights reserved. # # Licensed under", "if mask_length > sequence_length: raise ValueError( f\"`mask_length` has to be", "tf.keras.layers.Dense( units=config.hidden_size, kernel_initializer=get_initializer(config.initializer_range), bias_initializer=\"zeros\", name=\"projection\", ) self.dropout = tf.keras.layers.Dropout(rate=config.feat_proj_dropout) def", "in Transformers v5.\" \"Please use the equivalent `freeze_feature_encoder` method instead.\",", "self.add_weight( name=\"weight_g\", shape=(int(self.weight_v.shape[self.filter_axis]), 1, 1), initializer=\"ones\", dtype=self.weight_v.dtype, trainable=True, ) self.bias", "= [ TFHubertLayerNormConvLayer(config, layer_id=i, name=f\"conv_layers.{i}\") for i in range(config.num_feat_extract_layers) ]", "normalized_inputs def _get_reshaped_weights(self, input_shape): broadcast_shape = self._create_broadcast_shape(input_shape) gamma = None", "TFHubertGroupNormConvLayer(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, layer_id: int = 0, **kwargs:", "`tf.Tensor` of shape `({0})`, *optional*): Segment token indices to indicate", "= processor(ds[\"speech\"][0], return_tensors=\"tf\").input_values # Batch size 1 >>> logits =", "indices <= sequence_length if num_masked_spans * mask_length > sequence_length: num_masked_spans", "not allowed only {allowed_types} is accepted for {parameter_names[0]}.\" ) for", "1, tgt_seq_len, src_seq_len]`. \"\"\" src_len = shape_list(mask)[1] tgt_len = tgt_len", "tf.pad(inputs, ((0, 0), (self.explicit_padding, self.explicit_padding), (0, 0))) output = super().call(padded_inputs)", "center self.scale = scale self.beta_initializer = tf.keras.initializers.get(beta_initializer) self.gamma_initializer = tf.keras.initializers.get(gamma_initializer)", "name=\"k_proj\") self.q_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name=\"q_proj\") self.v_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias,", "top for Connectionist Temporal Classification (CTC).\"\"\", HUBERT_START_DOCSTRING, ) class TFHubertForCTC(TFHubertPreTrainedModel):", "HubertConfig logger = logging.get_logger(__name__) _CONFIG_FOR_DOC = \"HubertConfig\" TF_HUBERT_PRETRAINED_MODEL_ARCHIVE_LIST = [", "= TFHubertFeatureEncoder(config, name=\"feature_extractor\") self.feature_projection = TFHubertFeatureProjection(config, name=\"feature_projection\") if config.do_stable_layer_norm: self.encoder", "output_lengths = self._get_feat_extract_output_lengths(tf.reduce_sum(inputs[\"attention_mask\"], -1)) attention_mask = tf.sequence_mask( output_lengths, maxlen=shape_list(hidden_states)[1], dtype=hidden_states.dtype", "kernel_initializer=get_initializer(config.initializer_range), bias_initializer=\"zeros\", name=\"projection\", ) self.dropout = tf.keras.layers.Dropout(rate=config.feat_proj_dropout) def call(self, hidden_states:", "Optional[tf.Tensor] = None, head_mask: Optional[tf.Tensor] = None, inputs_embeds: Optional[tf.Tensor] =", "conv_layer(hidden_states) return hidden_states class TFHubertFeatureExtractor(TFHubertFeatureEncoder): def __init__(self, config, **kwargs): super().__init__(config,", "labels (`tf.Tensor` or `np.ndarray` of shape `(batch_size, sequence_length)`, *optional*): Labels", "\"The bare TFHubert Model transformer outputing raw hidden-states without any", "key_states = self._shape(self.k_proj(hidden_states), -1, bsz) value_states = self._shape(self.v_proj(hidden_states), -1, bsz)", "otherwise the order of the tensors will not be guaranteed", "dim to indices_shape broad_casted_batch_dims = tf.reshape( tf.broadcast_to(tf.expand_dims(tf.range(indices_shape[0]), axis=-1), indices_shape), [1,", "along time axis with given mask_time_indices hidden_states = tf.where( tf.cast(mask_time_indices[:,", "= {} allowed_types = (tf.Tensor, bool, int, ModelOutput, tuple, list,", "and type(output[\"args\"]) == tf.Tensor: tensor_name = output[\"args\"].name.split(\":\")[0] output[tensor_name] = output[\"args\"]", "self.config = config self.feature_extractor = TFHubertFeatureEncoder(config, name=\"feature_extractor\") self.feature_projection = TFHubertFeatureProjection(config,", "to add this exception if \"args\" in output: if output[\"args\"]", "< sequence_length uniform_dist = tf.ones((batch_size, sequence_length - (mask_length - 1)))", "src_len one_cst = tf.constant(1.0) mask = tf.cast(mask, dtype=one_cst.dtype) expanded_mask =", "self.gamma_constraint = tf.keras.constraints.get(gamma_constraint) self._check_axis() def build(self, input_shape): self._check_if_input_shape_is_none(input_shape) self._set_number_of_groups_for_instance_norm(input_shape) self._check_size_of_dimensions(input_shape)", "`{self.__class__.__bases__[0].__name__}` instead.\", FutureWarning, ) class TFHubertFeatureProjection(tf.keras.layers.Layer): def __init__(self, config: HubertConfig,", "= tf.keras.layers.Dense( units=config.hidden_size, kernel_initializer=get_initializer(config.initializer_range), bias_initializer=\"zeros\", name=\"output_dense\", ) self.output_dropout = tf.keras.layers.Dropout(config.hidden_dropout)", "tf.reshape(self._shape(query_states, tgt_len, bsz), proj_shape) key_states = tf.reshape(key_states, proj_shape) value_states =", "masks. should be of size 2 where first element is", "regularizer=self.beta_regularizer, constraint=self.beta_constraint, ) else: self.beta = None def _create_broadcast_shape(self, input_shape):", "the norm of the weight vector.\"\"\" kernel_norm = tf.sqrt(tf.reduce_sum(tf.square(self.weight_v), axis=self.kernel_norm_axes))", "be of size {(bsz, self.num_heads, tgt_len, self.head_dim)}, but is {shape_list(attn_output)}\",", "hidden_states = hidden_states * tf.expand_dims(attention_mask, -1) attention_mask = _expand_mask(attention_mask) else:", "self._reshape_into_groups(inputs, input_shape, tensor_input_shape) normalized_inputs = self._apply_normalization(reshaped_inputs, input_shape) is_instance_norm = (input_shape[self.axis]", "func=self.call, config=self.config, input_values=input_values, attention_mask=attention_mask, token_type_ids=token_type_ids, position_ids=position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states,", "tf.keras.regularizers.serialize(self.beta_regularizer), \"gamma_regularizer\": tf.keras.regularizers.serialize(self.gamma_regularizer), \"beta_constraint\": tf.keras.constraints.serialize(self.beta_constraint), \"gamma_constraint\": tf.keras.constraints.serialize(self.gamma_constraint), } base_config =", "calls to uni-directional self-attention # can concat previous decoder key/value_states", "be multiplied by number of timesteps divided by length of", "<gh_stars>1-10 # coding=utf-8 # Copyright 2021 The Fairseq Authors and", "= speech ... return batch >>> ds = load_dataset(\"hf-internal-testing/librispeech_asr_dummy\", \"clean\",", "config will be used instead. output_hidden_states (`bool`, *optional*): Whether or", "name=\"conv\", ) self.layer_norm = tf.keras.layers.LayerNormalization(name=\"layer_norm\", epsilon=config.layer_norm_eps) self.activation = get_tf_activation(config.feat_extract_activation) def", "return reshaped_inputs, group_shape else: return inputs, group_shape def _apply_normalization(self, reshaped_inputs,", "order of the tensors will not be guaranteed during the", ") self.layer_norm = tf.keras.layers.LayerNormalization(name=\"layer_norm\", epsilon=config.layer_norm_eps) self.activation = get_tf_activation(config.feat_extract_activation) def call(self,", "time axis mask_time_indices = _compute_mask_indices( (batch_size, sequence_length), mask_prob=self.config.mask_time_prob, mask_length=self.config.mask_time_length, min_masks=2,", "name=\"gamma\", initializer=self.gamma_initializer, regularizer=self.gamma_regularizer, constraint=self.gamma_constraint, ) else: self.gamma = None def", "FutureWarning, ) self.freeze_feature_encoder() def freeze_feature_encoder(self): \"\"\" Calling this function will", "TFHubertGroupNorm(groups=self.out_conv_dim, epsilon=config.layer_norm_eps, name=\"layer_norm\") def call(self, hidden_states: tf.Tensor) -> tf.Tensor: hidden_states", "int): return tf.transpose(tf.reshape(tensor, (bsz, seq_len, self.num_heads, self.head_dim)), (0, 2, 1,", "Selected in the range `[0, config.max_position_embeddings - 1]`. [What are", "= True, beta_initializer: tf.keras.initializers.Initializer = \"zeros\", gamma_initializer: tf.keras.initializers.Initializer = \"ones\",", "raise ValueError( \"Number of groups (\" + str(self.groups) + \")", "config: HubertConfig, *inputs, **kwargs): super().__init__(config, *inputs, **kwargs) self.config = config", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "with Wav2Vec2->Hubert class TFHubertNoLayerNormConvLayer(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, layer_id: int", "< 1: raise ValueError(\"`mask_length` has to be bigger than 0.\")", "License. # You may obtain a copy of the License", "= tf.reshape(attn_weights, (bsz * self.num_heads, tgt_len, src_len)) attn_probs = self.dropout(attn_weights,", "layers, and another one for the unexpected layers. \"\"\" signature", "one of ['group', 'layer']\" ) self.conv_layers = conv_layers def call(self,", "token_type_ids=inputs[\"token_type_ids\"], position_ids=inputs[\"position_ids\"], head_mask=inputs[\"head_mask\"], inputs_embeds=inputs[\"inputs_embeds\"], output_attentions=inputs[\"output_attentions\"], output_hidden_states=inputs[\"output_hidden_states\"], return_dict=inputs[\"return_dict\"], training=inputs[\"training\"], ) return", "useful when using [`tf.keras.Model.fit`] method which currently requires having all", "= tf.nn.softmax(attn_weights, axis=-1) if layer_head_mask is not None: # The", "self.scaling = self.head_dim**-0.5 self.is_decoder = is_decoder self.k_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias,", "of positions of each input sequence tokens in the position", "tf.nn.batch_normalization( reshaped_inputs, mean=mean, variance=variance, scale=gamma, offset=beta, variance_epsilon=self.epsilon, ) return normalized_inputs", "1)) * tf.reshape( attn_weights, (bsz, self.num_heads, tgt_len, src_len) ) attn_weights", "use `input_values` instead.\", FutureWarning, ) output[\"input_values\"] = input_values.pop(\"inputs\") if \"decoder_cached_states\"", "None) return TFBaseModelOutput( last_hidden_state=hidden_states, hidden_states=all_hidden_states, attentions=all_self_attentions, ) @keras_serializable class TFHubertMainLayer(tf.keras.layers.Layer):", "tf.keras.layers.Dropout(config.hidden_dropout) self.layer_norm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name=\"layer_norm\") self.feed_forward = TFHubertFeedForward(config, name=\"feed_forward\") self.final_layer_norm", "self.intermediate_dropout(hidden_states, training=training) hidden_states = self.output_dense(hidden_states) hidden_states = self.output_dropout(hidden_states, training=training) return", "the model call function: `model(inputs)`. If you choose this second", "offset samples are < sequence_length uniform_dist = tf.ones((batch_size, sequence_length -", "to indicate first and second portions of the inputs. Indices", "int, num_heads: int, dropout: float = 0.0, is_decoder: bool =", "> 0: # generate indices & apply SpecAugment along time", "\"\"\"TFHubert Model with a `language modeling` head on top for", "from typing import Any, Dict, Optional, Tuple, Union import numpy", "as np import tensorflow as tf from ...activations_tf import get_tf_activation", "passing `input_values` you can choose to directly pass an embedded", "or dict in the first positional arguments. This second option", "name not in list(output.keys()) and name != \"args\": output[name] =", ">>> model = TFHubertForCTC.from_pretrained(\"facebook/hubert-base-960h\") >>> def map_to_array(batch): ... speech, _", "axis = -2 if self.axis == -1 else self.axis -", "in `[0, 1]`: - 1 indicates the head is **not", "list, tuple or dict in the first positional arguments. This", "self.layer_norm = TFHubertGroupNorm(groups=self.out_conv_dim, epsilon=config.layer_norm_eps, name=\"layer_norm\") def call(self, hidden_states: tf.Tensor) ->", "\"groups\": self.groups, \"axis\": self.axis, \"epsilon\": self.epsilon, \"center\": self.center, \"scale\": self.scale,", "TFHubertMainLayer(tf.keras.layers.Layer): config_class = HubertConfig def __init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs)", "self._check_axis() def build(self, input_shape): self._check_if_input_shape_is_none(input_shape) self._set_number_of_groups_for_instance_norm(input_shape) self._check_size_of_dimensions(input_shape) self._create_input_spec(input_shape) self._add_gamma_weight(input_shape) self._add_beta_weight(input_shape)", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "TF calls the method with LayerCall.__call__(args, **kwargs) # So to", "labels is not None: if tf.reduce_max(labels) >= self.config.vocab_size: raise ValueError(f\"Label", "= kwargs.pop(name, signature[name].default) # When creating a SavedModel TF calls", "will be ignored.\" ) continue else: raise ValueError(f\"Data of type", "__init__(self, config: HubertConfig, **kwargs: Any) -> None: super().__init__(**kwargs) if config.feat_extract_norm", "deprecated and will be removed in a future version, use", "not None bsz, tgt_len, embed_dim = shape_list(hidden_states) # get query", "required by applicable law or agreed to in writing, software", "= _sample_without_replacement(uniform_dist, num_masked_spans) # expand masked indices to masked spans", "self._mask_hidden_states(hidden_states, mask_time_indices=mask_time_indices) encoder_outputs = self.encoder( hidden_states, attention_mask=attention_mask, output_attentions=inputs[\"output_attentions\"], output_hidden_states=inputs[\"output_hidden_states\"], return_dict=inputs[\"return_dict\"],", "{(self.num_heads)}, but is {shape_list(layer_head_mask)}\", ) attn_weights = tf.reshape(layer_head_mask, (1, -1,", "= 2 self.initialized = False self.kernel_norm_axes = tf.constant([0, 1]) def", "SAID TO THE UNIVERSE SIR I EXIST\" >>> # wrap", "trainable=True) def call(self, inputs): if not self.initialized: self._init_norm() self.initialized =", "Indices of input sequence tokens in the vocabulary. Indices can", "agreed to in writing, software # distributed under the License", "has to be bigger than 0.\") if mask_length > sequence_length:", "{allowed_types} is accepted for {parameter_names[0]}.\" ) for name in parameter_names:", "warnings.warn( f\"The class `{self.__class__.__name__}` has been depreciated \" \"and will", "__init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs) self.intermediate_dropout = tf.keras.layers.Dropout(config.activation_dropout) self.intermediate_dense =", "token indices to indicate first and second portions of the", "of the running model. **kwargs: The inputs of the model.", "__init__(self, config: HubertConfig, **kwargs: Any) -> None: super().__init__(**kwargs) self.conv =", "kernel_initializer=get_initializer(config.initializer_range), bias_initializer=\"zeros\", name=\"output_dense\", ) self.output_dropout = tf.keras.layers.Dropout(config.hidden_dropout) def call(self, hidden_states:", "= (input_shape[self.axis] // self.groups) == 1 if not is_instance_norm: group_shape[self.axis]", "0), (self.explicit_padding, self.explicit_padding), (0, 0))) output = super().call(padded_inputs) return output", "= outputs[0] hidden_states = self.dropout(hidden_states, training=inputs[\"training\"]) logits = self.lm_head(hidden_states) if", "inputs as a list, tuple or dict in the first", "= TFHubertFeatureProjection(config, name=\"feature_projection\") if config.do_stable_layer_norm: self.encoder = TFHubertEncoderStableLayerNorm(config, name=\"encoder\") else:", "input_shape[-2] += self.explicit_padding * 2 super().build(input_shape) self.kernel = tf.Variable(tf.transpose(self.kernel), name=\"weight_v\",", "tf.cast(mask_time_indices[:, :, tf.newaxis], tf.bool), self.masked_spec_embed[tf.newaxis, tf.newaxis, :], hidden_states, ) elif", "with Wav2Vec2->Hubert class TFHubertSamePadLayer(tf.keras.layers.Layer): def __init__(self, num_conv_pos_embeddings, **kwargs): super().__init__(**kwargs) self.num_pad_remove", "or not to return the hidden states of all layers.", ">>> # compute loss >>> target_transcription = \"A MAN SAID", "from transformers import Wav2Vec2Processor, TFHubertForCTC >>> from datasets import load_dataset", "tf.Tensor) -> tf.Tensor: hidden_states = self.conv(hidden_states) hidden_states = self.padding(hidden_states) hidden_states", "\" \"and will be removed in Transformers v5. \" f\"Use", "tf from ...activations_tf import get_tf_activation from ...modeling_tf_outputs import TFBaseModelOutput, TFCausalLMOutput", "This model inherits from [`TFPreTrainedModel`]. Check the superclass documentation for", "def _set_number_of_groups_for_instance_norm(self, input_shape): dim = input_shape[self.axis] if self.groups == -1:", "loss = model(input_values, labels=labels).loss ```\"\"\" inputs = input_values_processing( func=self.call, config=self.config,", "tokens with labels in `[0, ..., config.vocab_size]` Returns: Example: ```python", "keras_serializable from ...tf_utils import shape_list from ...tokenization_utils_base import BatchEncoding from", "= tf.matmul(query_states, key_states, transpose_b=True) # The tf.debugging asserts are not", "bare TFHubert Model transformer outputing raw hidden-states without any specific", "True self.groups = groups self.axis = axis self.epsilon = epsilon", "reshaped_inputs, mean=mean, variance=variance, scale=gamma, offset=beta, variance_epsilon=self.epsilon, ) return normalized_inputs def", "position_ids=position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, ) inputs[\"output_hidden_states\"] =", "*sentence A* token, - 1 corresponds to a *sentence B*", "tensorflow as tf >>> from transformers import Wav2Vec2Processor, TFHubertForCTC >>>", "1, 1), initializer=\"ones\", dtype=self.weight_v.dtype, trainable=True, ) self.bias = self.add_weight(name=\"bias\", shape=(self.filters,),", "return hidden_states # Copied from transformers.models.bart.modeling_tf_bart.TFBartAttention with TFBart->TFHubert class TFHubertAttention(tf.keras.layers.Layer):", "config.conv_dim[layer_id] if layer_id > 0 else 1 self.out_conv_dim = config.conv_dim[layer_id]", "TFBaseModelOutput( last_hidden_state=hidden_states, hidden_states=all_hidden_states, attentions=all_self_attentions, ) @keras_serializable class TFHubertMainLayer(tf.keras.layers.Layer): config_class =", "tf.debugging.assert_equal( shape_list(attn_output), [bsz * self.num_heads, tgt_len, self.head_dim], message=f\"`attn_output` should be", "import TFBaseModelOutput, TFCausalLMOutput from ...modeling_tf_utils import TFPreTrainedModel, booleans_processing, get_initializer, keras_serializable", "Optional[tf.Tensor] = None, output_hidden_states: Optional[tf.Tensor] = None, return_dict: Optional[bool] =", "Any) -> None: super().__init__(**kwargs) self.conv = TFHubertWeightNormConv1D( filters=config.hidden_size, kernel_size=config.num_conv_pos_embeddings, groups=config.num_conv_pos_embedding_groups,", "if tgt_len is not None else src_len one_cst = tf.constant(1.0)", "tf.reshape(value_states, proj_shape) src_len = shape_list(key_states)[1] attn_weights = tf.matmul(query_states, key_states, transpose_b=True)", "Returns: Two lists, one for the missing layers, and another", "OR CONDITIONS OF ANY KIND, either express or implied. #", "dummy_inputs(self) -> Dict[str, tf.Tensor]: pad_token = 0.0 input_values = tf.convert_to_tensor(np.random.rand(1,", "Batch x Time x Channel\"\"\" # if key_value_states are provided", "</Tip> Args: config ([`HubertConfig`]): Model configuration class with all the", "= self.intermediate_dense(hidden_states) hidden_states = self.intermediate_act_fn(hidden_states) hidden_states = self.intermediate_dropout(hidden_states, training=training) hidden_states", "self.out_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name=\"out_proj\") def _shape(self, tensor: tf.Tensor, seq_len:", "head_mask=inputs[\"head_mask\"], inputs_embeds=inputs[\"inputs_embeds\"], output_attentions=inputs[\"output_attentions\"], output_hidden_states=inputs[\"output_hidden_states\"], return_dict=inputs[\"return_dict\"], training=inputs[\"training\"], ) hidden_states = outputs[0]", "tf.reshape(attn_weights, (bsz, self.num_heads, tgt_len, src_len)) + attention_mask attn_weights = tf.reshape(attn_weights,", "(batch_size, num_masked_spans * mask_length)) spec_aug_mask_idxs = spec_aug_mask_idxs + offsets #", "conv_layer in self.conv_layers: hidden_states = conv_layer(hidden_states) return hidden_states class TFHubertFeatureExtractor(TFHubertFeatureEncoder):", "groups, explicit_padding, **kwargs): super().__init__( filters=filters, kernel_size=kernel_size, groups=groups, padding=\"valid\", use_bias=True, bias_initializer=\"he_normal\",", "self.scale: gamma = tf.reshape(self.gamma, broadcast_shape) if self.center: beta = tf.reshape(self.beta,", "the feature encoder so that its parameter will not be", "output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, ) inputs[\"output_hidden_states\"] = ( inputs[\"output_hidden_states\"] if", "[ TFHubertEncoderLayerStableLayerNorm(config, name=f\"layers.{i}\") for i in range(config.num_hidden_layers) ] def call(", "allowed_types = (tf.Tensor, bool, int, ModelOutput, tuple, list, dict, np.ndarray)", "mask_length + tf.random.uniform((1,))) num_masked_spans = max(num_masked_spans, min_masks) # make sure", "In case of a list of symbolic inputs, each input", "hidden_states = self.intermediate_dense(hidden_states) hidden_states = self.intermediate_act_fn(hidden_states) hidden_states = self.intermediate_dropout(hidden_states, training=training)", "than the model's internal embedding lookup matrix. output_attentions (`bool`, *optional*):", "random indices to mask spec_aug_mask_idxs = _sample_without_replacement(uniform_dist, num_masked_spans) # expand", "bsz, tgt_len, embed_dim = shape_list(hidden_states) # get query proj query_states", "pad_token), tf.float32), } return dummy_inputs def __init__(self, config, *inputs, **kwargs):", "input_shape[self.axis] if dim is None: raise ValueError( \"Axis \" +", "to sample from, make sure that offset samples are <", "`tf.Tensor` of shape `({0}, hidden_size)`, *optional*): Optionally, instead of passing", "Whether or not to use the model in training mode", "layers. \"\"\" signature = dict(inspect.signature(func).parameters) signature.pop(\"kwargs\", None) signature.pop(\"self\", None) parameter_names", "0, ..., config.vocab_size]` (see `input_values` docstring) Tokens with indices set", "self.num_heads, tgt_len, src_len)) attn_probs = self.dropout(attn_weights, training=training) attn_output = tf.matmul(attn_probs,", "embeddings, pruning heads etc.) This model is also a [tf.keras.Model](https://www.tensorflow.org/api_docs/python/tf/keras/Model)", "(bsz * self.num_heads, tgt_len, src_len)) attn_probs = self.dropout(attn_weights, training=training) attn_output", "= self._get_feat_extract_output_lengths(tf.reduce_sum(inputs[\"attention_mask\"], -1)) attention_mask = tf.sequence_mask( output_lengths, maxlen=shape_list(hidden_states)[1], dtype=hidden_states.dtype )", "self.supports_masking = True self.groups = groups self.axis = axis self.epsilon", "_compute_mask_indices( (batch_size, hidden_size), mask_prob=self.config.mask_feature_prob, mask_length=self.config.mask_feature_length, ) hidden_states = tf.where(mask_feature_indices[:, tf.newaxis,", "proj query_states = self.q_proj(hidden_states) * self.scaling # get key, value", "tf.keras.layers.LayerNormalization( epsilon=config.layer_norm_eps, name=\"final_layer_norm\" ) def call( self, hidden_states: tf.Tensor, attention_mask:", "processor(ds[\"speech\"][0], return_tensors=\"tf\").input_values # Batch size 1 >>> hidden_states = model(input_values).last_hidden_state", "= [ TFHubertEncoderLayerStableLayerNorm(config, name=f\"layers.{i}\") for i in range(config.num_hidden_layers) ] def", "shape `({0}, hidden_size)`, *optional*): Optionally, instead of passing `input_values` you", "you choose this second option, there are three possibilities you", "may not use this file except in compliance with the", "be used in eager mode, in graph mode the value", "function: `model(inputs)`. If you choose this second option, there are", "* self.num_heads, tgt_len, src_len)}, but is {shape_list(attn_weights)}\", ) if attention_mask", "(1, 1, mask_length)) spec_aug_mask_idxs = tf.reshape(spec_aug_mask_idxs, (batch_size, num_masked_spans * mask_length))", "str(self.groups) + \") must be a \" \"multiple of the", "\"\"\" def _conv_out_length(input_length, kernel_size, stride): # 1D convolutional layer output", "# # Licensed under the Apache License, Version 2.0 (the", "in Transformers v5. \" f\"Use `{self.__class__.__bases__[0].__name__}` instead.\", FutureWarning, ) class", "Conv1D output shapes are checked at build time since TF", "from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2WeightNormConv1D with Wav2Vec2->Hubert class TFHubertWeightNormConv1D(tf.keras.layers.Conv1D): \"\"\"Adapted from https://www.tensorflow.org/probability/api_docs/python/tfp/layers/weight_norm/WeightNorm\"\"\" def", "dim = input_shape[self.axis] self.input_spec = tf.keras.layers.InputSpec(ndim=len(input_shape), axes={self.axis: dim}) def _add_gamma_weight(self,", "(such as downloading or saving, resizing the input embeddings, pruning", "return ((loss,) + output) if loss is not None else", "all elements. however due to overlaps, the actual number will", "is_cross_attention: # cross_attentions key_states = self._shape(self.k_proj(key_value_states), -1, bsz) value_states =", "or a TPU\" ) @tf.function def serving(self, inputs): output =", "[`tf.keras.Model.fit`] method which currently requires having all the tensors in", "import warnings from typing import Any, Dict, Optional, Tuple, Union", "= self.add_weight(name=\"bias\", shape=(self.filters,), initializer=\"zeros\", trainable=True) def call(self, inputs): if not", "tf.Tensor = tf.reshape(attn_weights, (bsz, self.num_heads, tgt_len, src_len)) return attn_output, attn_weights,", "freeze_feature_encoder(self): \"\"\" Calling this function will disable the gradient computation", "labels=labels, logit_length=input_lengths, label_length=target_lengths, blank_index=self.config.pad_token_id, logits_time_major=False, ) if self.config.ctc_loss_reduction == \"sum\":", "# Tensor names have always the pattern `name:id` then we", "+ str(self.groups) + \") must be a \" \"multiple of", "1]`: - 0 corresponds to a *sentence A* token, -", "loading pretrained models. \"\"\" config_class = HubertConfig base_model_prefix = \"hubert\"", "or v is None: output[k] = v elif k not", "the vocabulary. Indices can be obtained using [`BertTokenizer`]. See [`PreTrainedTokenizer.__call__`]", "mask_length)) offsets = tf.range(mask_length)[tf.newaxis, tf.newaxis, :] offsets = tf.tile(offsets, (batch_size,", "# Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2._compute_mask_indices def _compute_mask_indices( shape: Tuple[int, int], mask_prob:", "\"\"\" Returns: Example: ```python >>> from transformers import Wav2Vec2Processor, TFHubertModel", "and nothing else: `model(inputs_ids)` - a list of varying length", "= self._shape(self.v_proj(key_value_states), -1, bsz) elif past_key_value is not None: #", "+= (attn_weights,) return outputs # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2Encoder with Wav2Vec2->Hubert", "-> tf.Tensor: hidden_states = self.intermediate_dense(hidden_states) hidden_states = self.intermediate_act_fn(hidden_states) hidden_states =", "= tf.keras.layers.Dense(embed_dim, use_bias=bias, name=\"k_proj\") self.q_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name=\"q_proj\") self.v_proj", "-> tf.Tensor: \"\"\" Computes random mask spans for a given", "= tf.reshape( tf.broadcast_to(tf.expand_dims(tf.range(indices_shape[0]), axis=-1), indices_shape), [1, -1] ) # transform", "None, output_attentions: Optional[tf.Tensor] = None, output_hidden_states: Optional[tf.Tensor] = None, return_dict:", "lists, one for the missing layers, and another one for", "hidden_states[:, : -self.num_pad_remove, :] return hidden_states class TFHubertFeatureEncoder(tf.keras.layers.Layer): def __init__(self,", ") return spec_aug_mask # Copied from transformers.models.bart.modeling_tf_bart._expand_mask def _expand_mask(mask: tf.Tensor,", "used as a cross-attention layer # for the decoder is_cross_attention", "tf.layer.batch_normalization instead\" ) def _create_input_spec(self, input_shape): dim = input_shape[self.axis] self.input_spec", "elif isinstance(input, allowed_types) or input is None: output[parameter_names[i]] = input", "= tf.tile(spec_aug_mask_idxs, (1, 1, mask_length)) spec_aug_mask_idxs = tf.reshape(spec_aug_mask_idxs, (batch_size, num_masked_spans", "num_samples): \"\"\" Categorical sampling without replacement is currently not implemented.", "self.groups) == 1 if not is_instance_norm: group_shape[self.axis] = input_shape[self.axis] //", "all_hidden_states + (hidden_states,) if not return_dict: return tuple(v for v", "TFHubertForCTC(TFHubertPreTrainedModel): def __init__(self, config: HubertConfig, *inputs, **kwargs): super().__init__(config, *inputs, **kwargs)", "1, tgt_len, src_len], message=f\"Attention mask should be of size {(bsz,", "FutureWarning, ) output[\"past_key_values\"] = input_values.pop(\"decoder_cached_states\") for k, v in dict(input_values).items():", "beta_initializer: tf.keras.initializers.Initializer = \"zeros\", gamma_initializer: tf.keras.initializers.Initializer = \"ones\", beta_regularizer: tf.keras.regularizers.Regularizer", "the masked language modeling loss. Indices should be in `[-100,", "attention_mask=attention_mask, output_attentions=inputs[\"output_attentions\"], output_hidden_states=inputs[\"output_hidden_states\"], return_dict=inputs[\"return_dict\"], training=inputs[\"training\"], ) hidden_states = encoder_outputs[0] if", "use the equivalent `freeze_feature_encoder` method instead.\", FutureWarning, ) self.freeze_feature_encoder() def", "info \"\"\" z = -tf.math.log(tf.random.uniform(shape_list(distribution), 0, 1)) _, indices =", "tf.newaxis], tf.bool), self.masked_spec_embed[tf.newaxis, tf.newaxis, :], hidden_states, ) elif self.config.mask_time_prob >", "saving, resizing the input embeddings, pruning heads etc.) This model", "return tf.scatter_nd(pair_indices, tf.reshape(values, [-1]), output_shape) # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2._compute_mask_indices def", "not None) return TFBaseModelOutput( last_hidden_state=hidden_states, hidden_states=all_hidden_states, attentions=all_self_attentions, ) # Copied", "must be <= vocab_size: {self.config.vocab_size}\") attention_mask = ( inputs[\"attention_mask\"] if", "self.embed_dim: raise ValueError( f\"embed_dim must be divisible by num_heads (got", "shape `({0})`, *optional*): Segment token indices to indicate first and", "get_initializer, keras_serializable from ...tf_utils import shape_list from ...tokenization_utils_base import BatchEncoding", "number of channels (\" + str(dim) + \").\" ) def", "mask min_masks: minimum number of masked spans Adapted from [fairseq's", "= config.conv_dim[layer_id] self.conv = tf.keras.layers.Conv1D( filters=self.out_conv_dim, kernel_size=config.conv_kernel[layer_id], strides=config.conv_stride[layer_id], use_bias=config.conv_bias, name=\"conv\",", "\"clean\", split=\"validation\") >>> ds = ds.map(map_to_array) >>> input_values = processor(ds[\"speech\"][0],", "transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2WeightNormConv1D with Wav2Vec2->Hubert class TFHubertWeightNormConv1D(tf.keras.layers.Conv1D): \"\"\"Adapted from https://www.tensorflow.org/probability/api_docs/python/tfp/layers/weight_norm/WeightNorm\"\"\" def __init__(self,", "All rights reserved. # # Licensed under the Apache License,", "a \" \"multiple of the number of channels (\" +", "self_attention key_states = self._shape(self.k_proj(hidden_states), -1, bsz) value_states = self._shape(self.v_proj(hidden_states), -1,", "with Wav2Vec2->Hubert class TFHubertWeightNormConv1D(tf.keras.layers.Conv1D): \"\"\"Adapted from https://www.tensorflow.org/probability/api_docs/python/tfp/layers/weight_norm/WeightNorm\"\"\" def __init__(self, filters,", "return broadcast_shape # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2WeightNormConv1D with Wav2Vec2->Hubert class TFHubertWeightNormConv1D(tf.keras.layers.Conv1D):", "-> tf.Tensor: hidden_states = self.conv(hidden_states) hidden_states = self.padding(hidden_states) hidden_states =", "self.initialized: self._init_norm() self.initialized = True self._normalize_kernel() padded_inputs = tf.pad(inputs, ((0,", "convolutional layers \"\"\" def _conv_out_length(input_length, kernel_size, stride): # 1D convolutional", "if tf.reduce_max(labels) >= self.config.vocab_size: raise ValueError(f\"Label values must be <=", "currently requires having all the tensors in the first argument", "from transformers.models.bart.modeling_tf_bart._expand_mask def _expand_mask(mask: tf.Tensor, tgt_len: Optional[int] = None, past_key_values_length:", "(like PyTorch models), or - having all inputs as a", "if inputs[\"training\"]: hidden_states = self._mask_hidden_states(hidden_states, mask_time_indices=mask_time_indices) encoder_outputs = self.encoder( hidden_states,", "None and type(output[\"args\"]) == tf.Tensor: tensor_name = output[\"args\"].name.split(\":\")[0] output[tensor_name] =", "input with shape \" + str(input_shape) + \".\" ) def", "layer_id=i + 1, name=f\"conv_layers.{i+1}\") for i in range(config.num_feat_extract_layers - 1)", "{parameter_names} and will be ignored.\" ) continue else: raise ValueError(f\"Data", "seq_len]` to `[bsz, 1, tgt_seq_len, src_seq_len]`. \"\"\" src_len = shape_list(mask)[1]", "received an input with shape \" + str(input_shape) + \".\"", "of groups (\" + str(self.groups) + \") must be a", "should be of size {(bsz, self.num_heads, tgt_len, self.head_dim)}, but is", "-1e8 # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.input_values_processing def input_values_processing(func, config, input_values, **kwargs):", "[What are input IDs?](../glossary#input-ids) attention_mask (`np.ndarray` or `tf.Tensor` of shape", "serving_output(self, output: TFCausalLMOutput) -> TFCausalLMOutput: hs = tf.convert_to_tensor(output.hidden_states) if self.config.output_hidden_states", "self.dropout(attn_weights, training=training) attn_output = tf.matmul(attn_probs, value_states) # The tf.debugging asserts", "in dict(input_values).items(): if isinstance(v, allowed_types) or v is None: output[k]", "spec_aug_mask_idxs + offsets # scatter indices to mask spec_aug_mask =", "the config will be used instead. return_dict (`bool`, *optional*): Whether", "in writing, software # distributed under the License is distributed", "attn_weights: tf.Tensor = tf.reshape(attn_weights, (bsz, self.num_heads, tgt_len, src_len)) return attn_output,", "if output[\"args\"] is not None and type(output[\"args\"]) == tf.Tensor: tensor_name", "+ (layer_outputs[1],) # Add last layer if output_hidden_states: all_hidden_states =", "always the pattern `name:id` then we check only the #", "input_shape = input_shape.as_list() # Conv1D output shapes are checked at", "from, make sure that offset samples are < sequence_length uniform_dist", "or several input Tensors associated to the input names given", "if self.config.output_hidden_states else None attns = tf.convert_to_tensor(output.attentions) if self.config.output_attentions else", "same size as shape, which will prevent masking padded elements", "hidden_states class TFHubertFeatureEncoder(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs: Any) ->", "of all cross attention key/value_states. # Further calls to cross_attention", "position embeddings. Selected in the range `[0, config.max_position_embeddings - 1]`.", "indixes) \"\"\" indices_shape = shape_list(batch_indices) # broadcast batch dim to", "2021 The Fairseq Authors and the HuggingFace Inc. team. All", "< self.config.layerdrop): # skip the layer continue layer_outputs = layer_module(", "the span to be masked. this will be multiplied by", "[What are token type IDs?](../glossary#token-type-ids) position_ids (`np.ndarray` or `tf.Tensor` of", "= list(range(1, len(group_shape))) is_instance_norm = (input_shape[self.axis] // self.groups) == 1", "_check_axis(self): if self.axis == 0: raise ValueError( \"You are trying", "training: bool = False) -> tf.Tensor: hidden_states = self.layer_norm(hidden_states) hidden_states", "function as in PyTorch with indices in format (batch_dim, indixes)", "outputs = normalized_inputs return outputs def get_config(self): config = {", "TFHubertPositionalConvEmbedding(config, name=\"pos_conv_embed\") self.layer_norm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name=\"layer_norm\") self.dropout = tf.keras.layers.Dropout(config.hidden_dropout) self.layer", "dim = input_shape[self.axis] if self.groups == -1: self.groups = dim", "= self.activation(hidden_states) return hidden_states # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2LayerNormConvLayer with Wav2Vec2->Hubert", "model call function: `model(inputs)`. If you choose this second option,", "if inputs[\"output_hidden_states\"] else self.config.output_hidden_states ) inputs[\"output_attentions\"] = ( inputs[\"output_attentions\"] if", "last_hidden_state=hidden_states, hidden_states=all_hidden_states, attentions=all_self_attentions, ) @keras_serializable class TFHubertMainLayer(tf.keras.layers.Layer): config_class = HubertConfig", ") # compute number of masked spans in batch num_masked_spans", "self.k_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name=\"k_proj\") self.q_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name=\"q_proj\")", "want to \" \"use tf.layer.batch_normalization instead\" ) def _create_input_spec(self, input_shape):", "name=\"layer_norm\") self.projection = tf.keras.layers.Dense( units=config.hidden_size, kernel_initializer=get_initializer(config.initializer_range), bias_initializer=\"zeros\", name=\"projection\", ) self.dropout", "( ModelOutput, add_start_docstrings, add_start_docstrings_to_model_forward, logging, replace_return_docstrings, ) from .configuration_hubert import", "`past_key_value` is always `None` past_key_value = (key_states, value_states) proj_shape =", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "the value in the config will be used instead. return_dict", "# apply SpecAugment along time axis with given mask_time_indices hidden_states", "regular TF 2.0 Keras Model and refer to the TF", "self.layer_norm(hidden_states) hidden_states = hidden_states + self.feed_forward(hidden_states) hidden_states = self.final_layer_norm(hidden_states) outputs", "# Batch size 1 >>> hidden_states = model(input_values).last_hidden_state ```\"\"\" inputs", "if \"decoder_cached_states\" in input_values: warnings.warn( \"The `decoder_cached_states` argument is deprecated", "is None: raise ValueError( \"Axis \" + str(self.axis) + \"", "tf.Tensor, attention_mask: Optional[tf.Tensor] = None, token_type_ids: Optional[tf.Tensor] = None, position_ids:", "_reshape_into_groups(self, inputs, input_shape, tensor_input_shape): group_shape = [tensor_input_shape[i] for i in", ") -> Union[TFBaseModelOutput, Tuple[tf.Tensor]]: \"\"\" Returns: Example: ```python >>> from", "1 for kernel_size, stride in zip(self.config.conv_kernel, self.config.conv_stride): input_lengths = _conv_out_length(input_lengths,", "epsilon=config.layer_norm_eps, name=\"final_layer_norm\" ) def call( self, hidden_states: tf.Tensor, attention_mask: Optional[tf.Tensor]", "False, **kwargs: Any, ): inputs = input_values_processing( func=self.call, config=self.config, input_values=input_values,", "HubertConfig, **kwargs): super().__init__(**kwargs) self.attention = TFHubertAttention( embed_dim=config.hidden_size, num_heads=config.num_attention_heads, dropout=config.attention_dropout, is_decoder=False,", "loss = None if not inputs[\"return_dict\"]: output = (logits,) +", "\"\"\" batch_size, sequence_length = shape if mask_length < 1: raise", "# Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2EncoderStableLayerNorm with Wav2Vec2->Hubert class TFHubertEncoderStableLayerNorm(tf.keras.layers.Layer): def __init__(self,", "_sample_without_replacement(uniform_dist, num_masked_spans) # expand masked indices to masked spans spec_aug_mask_idxs", "= input_values.pop(\"decoder_cached_states\") for k, v in dict(input_values).items(): if isinstance(v, allowed_types)", "should be of size {(bsz, 1, tgt_len, src_len)}, but is", "else: raise ValueError( f\"Data of type {type(input_values)} is not allowed", "the hidden states of all layers. See `hidden_states` under returned", "by length of mask span to mask approximately this percentage", "+ hidden_states hidden_states = hidden_states + self.feed_forward(self.final_layer_norm(hidden_states)) outputs = (hidden_states,)", "# Unless required by applicable law or agreed to in", "input_shape): broadcast_shape = self._create_broadcast_shape(input_shape) gamma = None beta = None", "but is {shape_list(attn_weights)}\", ) if attention_mask is not None: #", "self.masked_spec_embed[tf.newaxis, tf.newaxis, :], hidden_states, ) # apply SpecAugment along feature", "TensorFlow model including the booleans. In case of a list", ") hidden_states = self.feature_projection(hidden_states, training=inputs[\"training\"]) mask_time_indices = kwargs.get(\"mask_time_indices\", None) if", "proj_shape) key_states = tf.reshape(key_states, proj_shape) value_states = tf.reshape(value_states, proj_shape) src_len", "Computes the output length of the convolutional layers \"\"\" def", "position_ids (`np.ndarray` or `tf.Tensor` of shape `({0})`, *optional*): Indices of", "not None: # reuse k,v, cross_attentions key_states = past_key_value[0] value_states", "if not is_instance_norm: broadcast_shape[self.axis] = input_shape[self.axis] // self.groups broadcast_shape.insert(self.axis, self.groups)", "the Apache License, Version 2.0 (the \"License\"); # you may", "-> Union[TFBaseModelOutput, Tuple[tf.Tensor]]: all_hidden_states = () if output_hidden_states else None", "tf.reshape( tf.broadcast_to(tf.expand_dims(tf.range(indices_shape[0]), axis=-1), indices_shape), [1, -1] ) # transform batch_indices", "None) signature.pop(\"self\", None) parameter_names = list(signature.keys()) output = {} allowed_types", "output_attentions=inputs[\"output_attentions\"], output_hidden_states=inputs[\"output_hidden_states\"], return_dict=inputs[\"return_dict\"], training=inputs[\"training\"], ) return outputs def serving_output(self, output):", "k,v, cross_attentions key_states = past_key_value[0] value_states = past_key_value[1] elif is_cross_attention:", ") self.conv_layers = conv_layers def call(self, input_values): hidden_states = tf.expand_dims(input_values,", "= hidden_states * tf.expand_dims(attention_mask, -1) attention_mask = _expand_mask(attention_mask) else: attention_mask", "serving_output(self, output): hs = tf.convert_to_tensor(output.hidden_states) if self.config.output_hidden_states else None attns", "use_bias=True, bias_initializer=\"he_normal\", **kwargs, ) self.explicit_padding = explicit_padding self.filter_axis = 2", ">>> ds = load_dataset(\"hf-internal-testing/librispeech_asr_dummy\", \"clean\", split=\"validation\") >>> ds = ds.map(map_to_array)", "elements. however due to overlaps, the actual number will be", ") hidden_states = tf.where(mask_feature_indices[:, tf.newaxis, :], hidden_states, 0) return hidden_states", "None: output[k] = v elif k not in parameter_names and", "else src_len one_cst = tf.constant(1.0) mask = tf.cast(mask, dtype=one_cst.dtype) expanded_mask", "if training and (dropout_probability < self.config.layerdrop): # skip the layer", "from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2GroupNormConvLayer with Wav2Vec2->Hubert class TFHubertGroupNormConvLayer(tf.keras.layers.Layer): def __init__(self, config: HubertConfig,", "= [TFHubertEncoderLayer(config, name=f\"layers.{i}\") for i in range(config.num_hidden_layers)] def call( self,", "== 1 if not is_instance_norm: axis = -2 if self.axis", "accepted for {k}.\") if isinstance(input_values, (tuple, list)): for i, input", "not allowed only {allowed_types} is accepted for {k}.\") if isinstance(input_values,", "in eager mode, in graph mode the value in the", "to gather all the input Tensors in the first positional", "removed in Transformers v5. \" f\"Use `{self.__class__.__bases__[0].__name__}` instead.\", FutureWarning, )", "None) parameter_names = list(signature.keys()) output = {} allowed_types = (tf.Tensor,", "tf.newaxis, :], hidden_states, ) # apply SpecAugment along feature axis", "use the model in training mode (some modules like dropout", "= v else: raise ValueError(f\"Data of type {type(v)} is not", "https://www.tensorflow.org/addons/api_docs/python/tfa/layers/GroupNormalization \"\"\" def __init__( self, groups: int = 32, axis:", "= input_shape[self.axis] if self.groups == -1: self.groups = dim def", "None return TFBaseModelOutput(last_hidden_state=output.last_hidden_state, hidden_states=hs, attentions=attns) @add_start_docstrings( \"\"\"TFHubert Model with a", "input_values: warnings.warn( \"The `decoder_cached_states` argument is deprecated and will be", "layer_head_mask: Optional[tf.Tensor] = None, training: Optional[bool] = False, ) ->", "= [1] * len(input_shape) is_instance_norm = (input_shape[self.axis] // self.groups) ==", "_normalize_kernel(self): \"\"\"Generate normalized weights.\"\"\" kernel = tf.nn.l2_normalize(self.weight_v, axis=self.kernel_norm_axes) * tf.transpose(self.weight_g)", "\" \"input tensor should have a defined dimension \" \"but", "v in [hidden_states, all_hidden_states, all_self_attentions] if v is not None)", "== \"layer\": conv_layers = [ TFHubertLayerNormConvLayer(config, layer_id=i, name=f\"conv_layers.{i}\") for i", "if self.scale: gamma = tf.reshape(self.gamma, broadcast_shape) if self.center: beta =", "-1: self.groups = dim def _check_size_of_dimensions(self, input_shape): dim = input_shape[self.axis]", "This model is also a [tf.keras.Model](https://www.tensorflow.org/api_docs/python/tf/keras/Model) subclass. Use it as", "hidden_states = self.layer_norm(hidden_states) hidden_states = self.activation(hidden_states) return hidden_states # Copied", "head_mask: Optional[tf.Tensor] = None, inputs_embeds: Optional[tf.Tensor] = None, output_attentions: Optional[tf.Tensor]", "v, self_attention key_states = self._shape(self.k_proj(hidden_states), -1, bsz) value_states = self._shape(self.v_proj(hidden_states),", "\"inputs\" in input_values: warnings.warn( \"The `inputs` argument is deprecated and", "so we need to account for padding input_shape[-2] += self.explicit_padding", "= self.layer_norm(hidden_states) hidden_states = hidden_states + self.feed_forward(hidden_states) hidden_states = self.final_layer_norm(hidden_states)", "inputs, each input has to be named accordingly to the", "(input_shape[self.axis] // self.groups) == 1 if not is_instance_norm: broadcast_shape[self.axis] =", "// self.groups group_shape.insert(self.axis, self.groups) group_shape = tf.stack(group_shape) reshaped_inputs = tf.reshape(inputs,", "name=\"weight_g\", shape=(int(self.weight_v.shape[self.filter_axis]), 1, 1), initializer=\"ones\", dtype=self.weight_v.dtype, trainable=True, ) self.bias =", "all layers. See `hidden_states` under returned tensors for more detail.", "f\" and `num_heads`: {num_heads}).\" ) self.scaling = self.head_dim**-0.5 self.is_decoder =", "= (logits,) + outputs[1:] return ((loss,) + output) if loss", "if layer_id > 0 else 1 self.out_conv_dim = config.conv_dim[layer_id] self.conv", "transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2SamePadLayer with Wav2Vec2->Hubert class TFHubertSamePadLayer(tf.keras.layers.Layer): def __init__(self, num_conv_pos_embeddings, **kwargs): super().__init__(**kwargs)", "missing layers, and another one for the unexpected layers. \"\"\"", ") else: self.beta = None def _create_broadcast_shape(self, input_shape): broadcast_shape =", "choose this second option, there are three possibilities you can", "reuse k,v, cross_attentions key_states = past_key_value[0] value_states = past_key_value[1] elif", "Need\"\"\" def __init__( self, embed_dim: int, num_heads: int, dropout: float", "convolutional layer output length formula taken # from https://pytorch.org/docs/stable/generated/torch.nn.Conv1d.html return", "`[0, config.max_position_embeddings - 1]`. [What are position IDs?](../glossary#position-ids) head_mask (`np.ndarray`", "Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2EncoderLayer with Wav2Vec2->Hubert class TFHubertEncoderLayer(tf.keras.layers.Layer): def __init__(self, config:", "+ hidden_states hidden_states = self.layer_norm(hidden_states) hidden_states = hidden_states + self.feed_forward(hidden_states)", "None, training: bool = False, ) -> Union[TFBaseModelOutput, Tuple[tf.Tensor]]: \"\"\"", "get key, value proj if is_cross_attention and past_key_value is not", "gradient computation for the feature encoder so that its parameters", "from ...utils import ( ModelOutput, add_start_docstrings, add_start_docstrings_to_model_forward, logging, replace_return_docstrings, )", "not None: # The tf.debugging asserts are not compliant with", "super().__init__(config, *inputs, **kwargs) logger.warning( f\"\\n{self.__class__.__name__} has backpropagation operations that are", "(\" + str(dim) + \").\" ) if dim % self.groups", "self.layer_norm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name=\"layer_norm\") self.feed_forward = TFHubertFeedForward(config, name=\"feed_forward\") self.final_layer_norm =", "= True, scale: bool = True, beta_initializer: tf.keras.initializers.Initializer = \"zeros\",", ") output[\"past_key_values\"] = input_values.pop(\"decoder_cached_states\") for k, v in dict(input_values).items(): if", "dict in the first positional arguments. This second option is", "you want to \" \"use tf.layer.batch_normalization instead\" ) def _create_input_spec(self,", "first positional arguments. This second option is useful when using", "self.weight_g = self.add_weight( name=\"weight_g\", shape=(int(self.weight_v.shape[self.filter_axis]), 1, 1), initializer=\"ones\", dtype=self.weight_v.dtype, trainable=True,", "mask spec_aug_mask_idxs = _sample_without_replacement(uniform_dist, num_masked_spans) # expand masked indices to", "tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(layer_head_mask), [self.num_heads], message=f\"Head mask for a single layer", "config.feat_extract_norm == \"layer\": conv_layers = [ TFHubertLayerNormConvLayer(config, layer_id=i, name=f\"conv_layers.{i}\") for", "from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2SamePadLayer with Wav2Vec2->Hubert class TFHubertSamePadLayer(tf.keras.layers.Layer): def __init__(self, num_conv_pos_embeddings, **kwargs):", "build(self, input_shape): if not self.built: input_shape = input_shape.as_list() # Conv1D", "input_values (`np.ndarray`, `tf.Tensor`, `List[tf.Tensor]` ``Dict[str, tf.Tensor]` or `Dict[str, np.ndarray]` and", "labels = processor(transcription, return_tensors=\"tf\").input_values >>> loss = model(input_values, labels=labels).loss ```\"\"\"", "tuple(v for v in [hidden_states, all_hidden_states, all_self_attentions] if v is", "outputs # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2EncoderLayerStableLayerNorm with Wav2Vec2->Hubert class TFHubertEncoderLayerStableLayerNorm(tf.keras.layers.Layer): def", "Optionally, instead of passing `input_values` you can choose to directly", "return_dict=inputs[\"return_dict\"], training=inputs[\"training\"], ) return outputs def serving_output(self, output): hs =", ">>> model = TFHubertModel.from_pretrained(\"facebook/hubert-base-960h\") >>> def map_to_array(batch): ... speech, _", "config, *inputs, **kwargs): super().__init__(config, *inputs, **kwargs) logger.warning( f\"\\n{self.__class__.__name__} has backpropagation", "tensorflow as tf from ...activations_tf import get_tf_activation from ...modeling_tf_outputs import", "inputs_embeds=inputs[\"inputs_embeds\"], output_attentions=inputs[\"output_attentions\"], output_hidden_states=inputs[\"output_hidden_states\"], return_dict=inputs[\"return_dict\"], training=inputs[\"training\"], ) return outputs def serving_output(self,", "processor.as_target_processor(): ... labels = processor(transcription, return_tensors=\"tf\").input_values >>> loss = model(input_values,", "arguments (like PyTorch models), or - having all inputs as", "= Wav2Vec2Processor.from_pretrained(\"facebook/hubert-base-960h\") >>> model = TFHubertForCTC.from_pretrained(\"facebook/hubert-base-960h\") >>> def map_to_array(batch): ...", "self.config.output_attentions ) inputs[\"return_dict\"] = inputs[\"return_dict\"] if inputs[\"return_dict\"] else self.config.return_dict outputs", "self.conv(hidden_states) hidden_states = self.activation(hidden_states) return hidden_states # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2LayerNormConvLayer", "BatchEncoding from ...utils import ( ModelOutput, add_start_docstrings, add_start_docstrings_to_model_forward, logging, replace_return_docstrings,", "0 else 1 self.out_conv_dim = config.conv_dim[layer_id] self.conv = tf.keras.layers.Conv1D( filters=self.out_conv_dim,", "= tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name=\"layer_norm\") self.dropout = tf.keras.layers.Dropout(config.hidden_dropout) self.layer = [TFHubertEncoderLayer(config, name=f\"layers.{i}\")", "embed_dim: int, num_heads: int, dropout: float = 0.0, is_decoder: bool", "training=False) return self.serving_output(output) HUBERT_START_DOCSTRING = r\"\"\" This model inherits from", "= self.dropout(hidden_states, training=training) for i, layer_module in enumerate(self.layer): if output_hidden_states:", "with labels in `[0, ..., config.vocab_size]` Returns: Example: ```python >>>", "is accepted for {k}.\") else: if isinstance(input_values, tf.Tensor) or input_values", "int = 0): \"\"\" Expands attention_mask from `[bsz, seq_len]` to", "Any, ): inputs = input_values_processing( func=self.call, config=self.config, input_values=input_values, attention_mask=attention_mask, token_type_ids=token_type_ids,", "tgt_len, bsz), proj_shape) key_states = tf.reshape(key_states, proj_shape) value_states = tf.reshape(value_states,", "input_shape[self.axis] // self.groups broadcast_shape.insert(self.axis, self.groups) else: broadcast_shape[self.axis] = self.groups return", "self.scaling # get key, value proj if is_cross_attention and past_key_value", "(third \"elif\" case) # if encoder bi-directional self-attention `past_key_value` is", "\") cannot be \" \"more than the number of channels", "r\"\"\" labels (`tf.Tensor` or `np.ndarray` of shape `(batch_size, sequence_length)`, *optional*):", "hidden_states = self.padding(hidden_states) hidden_states = self.activation(hidden_states) return hidden_states # Copied", "inputs): input_shape = tf.keras.backend.int_shape(inputs) tensor_input_shape = tf.shape(inputs) reshaped_inputs, group_shape =", "{ \"groups\": self.groups, \"axis\": self.axis, \"epsilon\": self.epsilon, \"center\": self.center, \"scale\":", "self.feed_forward = TFHubertFeedForward(config, name=\"feed_forward\") self.final_layer_norm = tf.keras.layers.LayerNormalization( epsilon=config.layer_norm_eps, name=\"final_layer_norm\" )", "that are **not masked**, - 0 for tokens that are", "the actual number will be smaller (unless no_overlap is True)", "is not None bsz, tgt_len, embed_dim = shape_list(hidden_states) # get", "is not None else output return TFCausalLMOutput( loss=loss, logits=logits, hidden_states=outputs.hidden_states,", "= ds.map(map_to_array) >>> input_values = processor(ds[\"speech\"][0], return_tensors=\"tf\").input_values # Batch size", "# assuming that padded tokens are filled with -100 #", "def __init__(self, config, **kwargs): super().__init__(config, **kwargs) warnings.warn( f\"The class `{self.__class__.__name__}`", "= None, return_dict: Optional[bool] = None, training: bool = False,", "to return a [`~file_utils.ModelOutput`] instead of a plain tuple. This", "-> tf.Tensor: hidden_states = self.conv(hidden_states) hidden_states = self.activation(hidden_states) return hidden_states", "during the training. Args: func (`callable`): The callable function of", "be ignored.\" ) continue else: raise ValueError(f\"Data of type {type(v)}", "can be obtained using [`BertTokenizer`]. See [`PreTrainedTokenizer.__call__`] and [`PreTrainedTokenizer.encode`] for", "return TFBaseModelOutput(last_hidden_state=output.last_hidden_state, hidden_states=hs, attentions=attns) @add_start_docstrings( \"\"\"TFHubert Model with a `language", "-1, bsz) value_states = self._shape(self.v_proj(key_value_states), -1, bsz) elif past_key_value is", "get random indices to mask spec_aug_mask_idxs = _sample_without_replacement(uniform_dist, num_masked_spans) #", "save Tuple(tf.Tensor, tf.Tensor) of all cross attention key/value_states. # Further", "tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(attention_mask), [bsz, 1, tgt_len, src_len], message=f\"Attention mask should", "callable function of the TensorFlow model. config ([`PretrainedConfig`]): The config", "in `[0, 1]`: - 0 corresponds to a *sentence A*", "see https://github.com/tensorflow/tensorflow/issues/9260 for more info \"\"\" z = -tf.math.log(tf.random.uniform(shape_list(distribution), 0,", "argument of the model call function: `model(inputs)`. If you choose", "masks?](../glossary#attention-mask) token_type_ids (`np.ndarray` or `tf.Tensor` of shape `({0})`, *optional*): Segment", "padding=\"valid\", use_bias=True, bias_initializer=\"he_normal\", **kwargs, ) self.explicit_padding = explicit_padding self.filter_axis =", "+ attention_mask attn_weights = tf.reshape(attn_weights, (bsz * self.num_heads, tgt_len, src_len))", "tf.keras.constraints.Constraint = None, gamma_constraint: tf.keras.constraints.Constraint = None, **kwargs, ): super().__init__(**kwargs)", "the number of channels (\" + str(dim) + \").\" )", "are three possibilities you can use to gather all the", "= tf.where( tf.cast(mask_time_indices[:, :, tf.newaxis], tf.bool), self.masked_spec_embed[tf.newaxis, tf.newaxis, :], hidden_states,", "output[\"args\"] del output[\"args\"] if \"kwargs\" in output: del output[\"kwargs\"] boolean_dict", "be in `[-100, 0, ..., config.vocab_size]` (see `input_values` docstring) Tokens", "self.groups) == 1 if not is_instance_norm: axis = -2 if", "= None position_embeddings = self.pos_conv_embed(hidden_states) hidden_states = hidden_states + position_embeddings", "processor(ds[\"speech\"][0], return_tensors=\"tf\").input_values # Batch size 1 >>> logits = model(input_values).logits", "is not None: if tf.reduce_max(labels) >= self.config.vocab_size: raise ValueError(f\"Label values", "tf.reshape(offsets, (batch_size, num_masked_spans * mask_length)) spec_aug_mask_idxs = spec_aug_mask_idxs + offsets", "should have a defined dimension \" \"but the layer received", "the tensors in the first argument of the model call", "size as shape, which will prevent masking padded elements mask_prob:", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "shape=(int(self.weight_v.shape[self.filter_axis]), 1, 1), initializer=\"ones\", dtype=self.weight_v.dtype, trainable=True, ) self.bias = self.add_weight(name=\"bias\",", "of each input sequence tokens in the position embeddings. Selected", ">>> def map_to_array(batch): ... speech, _ = sf.read(batch[\"file\"]) ... batch[\"speech\"]", "decoder key/value_states to current projected key/value_states (third \"elif\" case) #", "[tf.keras.Model](https://www.tensorflow.org/api_docs/python/tf/keras/Model) subclass. Use it as a regular TF 2.0 Keras", "= False, **kwargs: Any, ): inputs = input_values_processing( func=self.call, config=self.config,", "(input_shape[self.axis] // self.groups) == 1 if not is_instance_norm: group_shape[self.axis] =", "= sequence_length // mask_length # SpecAugment mask to fill spec_aug_mask", "than the number of channels (\" + str(dim) + \").\"", "(unless no_overlap is True) mask_length: size of the mask min_masks:", "or `np.ndarray` of shape `(batch_size, sequence_length)`, *optional*): Labels for computing", "input_values, **kwargs): \"\"\" Process the input of each TensorFlow model", "= -1, epsilon: float = 1e-3, center: bool = True,", "name=\"hubert\") self.dropout = tf.keras.layers.Dropout(config.final_dropout) self.lm_head = tf.keras.layers.Dense(config.vocab_size, name=\"lm_head\") def freeze_feature_extractor(self):", "Categorical sampling without replacement is currently not implemented. The gumbel-max", "def _get_feat_extract_output_lengths(self, input_lengths: tf.Tensor): \"\"\" Computes the output length of", "the [`~PreTrainedModel.from_pretrained`] method to load the model weights. \"\"\" HUBERT_INPUTS_DOCSTRING", "gamma_regularizer: tf.keras.regularizers.Regularizer = None, beta_constraint: tf.keras.constraints.Constraint = None, gamma_constraint: tf.keras.constraints.Constraint", "the feature encoder so that its parameters will not be", ">>> import soundfile as sf >>> processor = Wav2Vec2Processor.from_pretrained(\"facebook/hubert-base-960h\") >>>", "allowed only {allowed_types} is accepted for {parameter_names[0]}.\" ) for name", "hidden_states: tf.Tensor) -> tf.Tensor: hidden_states = self.conv(hidden_states) hidden_states = self.activation(hidden_states)", "TFHubertFeatureEncoder(config, name=\"feature_extractor\") self.feature_projection = TFHubertFeatureProjection(config, name=\"feature_projection\") if config.do_stable_layer_norm: self.encoder =", "will be removed in Transformers v5.\" \"Please use the equivalent", "kernel_size=config.conv_kernel[layer_id], strides=config.conv_stride[layer_id], use_bias=config.conv_bias, name=\"conv\", ) self.activation = get_tf_activation(config.feat_extract_activation) def call(self,", "is None: output[parameter_names[i]] = input else: raise ValueError( f\"Data of", "will be removed in a future version, use `input_values` instead.\",", "None else src_len one_cst = tf.constant(1.0) mask = tf.cast(mask, dtype=one_cst.dtype)", "= conv_layer(hidden_states) return hidden_states class TFHubertFeatureExtractor(TFHubertFeatureEncoder): def __init__(self, config, **kwargs):", "+ position_embeddings hidden_states = self.dropout(hidden_states, training=training) for i, layer_module in", "2.0 Keras Model and refer to the TF 2.0 documentation", "Check the superclass documentation for the generic methods the library", "Batch size 1 >>> hidden_states = model(input_values).last_hidden_state ```\"\"\" inputs =", "[bsz * self.num_heads, tgt_len, self.head_dim], message=f\"`attn_output` should be of size", "**kwargs): super().__init__(**kwargs) self.num_pad_remove = 1 if num_conv_pos_embeddings % 2 ==", "filters=self.out_conv_dim, kernel_size=config.conv_kernel[layer_id], strides=config.conv_stride[layer_id], use_bias=config.conv_bias, name=\"conv\", ) self.activation = get_tf_activation(config.feat_extract_activation) self.layer_norm", "ignored (masked), the loss is only computed for the tokens", "shape `({0})`, *optional*): Indices of positions of each input sequence", "self.num_pad_remove > 0: hidden_states = hidden_states[:, : -self.num_pad_remove, :] return", "f\"Data of type {type(input_values)} is not allowed only {allowed_types} is", "-1, bsz) key_states = tf.concat([past_key_value[0], key_states], axis=2) value_states = tf.concat([past_key_value[1],", "query proj query_states = self.q_proj(hidden_states) * self.scaling # get key,", "raise ValueError( \"Axis \" + str(self.axis) + \" of \"", "more control over how to convert `input_values` indices into associated", "tgt_len = tgt_len if tgt_len is not None else src_len", "model(input_values, labels=labels).loss ```\"\"\" inputs = input_values_processing( func=self.call, config=self.config, input_values=input_values, attention_mask=attention_mask,", "gather all the input Tensors in the first positional argument", "batch_indices, output_shape): \"\"\" Scatter function as in PyTorch with indices", "real Tensor if type(input) == tf.Tensor: # Tensor names have", "not in list(output.keys()) and name != \"args\": output[name] = kwargs.pop(name,", "Optional[bool] = None, training: bool = False, **kwargs: Any, ):", "dictionary with one or several input Tensors associated to the", "= TFHubertSamePadLayer(config.num_conv_pos_embeddings) self.activation = get_tf_activation(config.feat_extract_activation) def call(self, hidden_states: tf.Tensor) ->", "Returns: Example: ```python >>> import tensorflow as tf >>> from", "{shape_list(attention_mask)}\", ) attention_mask = tf.cast(attention_mask, dtype=attn_weights.dtype) attn_weights = tf.reshape(attn_weights, (bsz,", "(logits,) + outputs[1:] return ((loss,) + output) if loss is", "output_attentions=inputs[\"output_attentions\"], output_hidden_states=inputs[\"output_hidden_states\"], return_dict=inputs[\"return_dict\"], training=inputs[\"training\"], ) hidden_states = outputs[0] hidden_states =", "signature[name].default) # When creating a SavedModel TF calls the method", "of shape `({0})`, *optional*): Segment token indices to indicate first", "config_class = HubertConfig base_model_prefix = \"hubert\" main_input_name = \"input_values\" @property", "ds = load_dataset(\"hf-internal-testing/librispeech_asr_dummy\", \"clean\", split=\"validation\") >>> ds = ds.map(map_to_array) >>>", "input_values = tf.convert_to_tensor(np.random.rand(1, 16000), tf.float32) dummy_inputs = { \"input_values\": input_values,", "\"decoder_cached_states\" in input_values: warnings.warn( \"The `decoder_cached_states` argument is deprecated and", "shape Args: shape: the the shape for which to compute", "disable the gradient computation for the feature encoder so that", "tf.nn.softmax(attn_weights, axis=-1) if layer_head_mask is not None: # The tf.debugging", "encoder_outputs = self.encoder( hidden_states, attention_mask=attention_mask, output_attentions=inputs[\"output_attentions\"], output_hidden_states=inputs[\"output_hidden_states\"], return_dict=inputs[\"return_dict\"], training=inputs[\"training\"], )", "will not be updated during training. \"\"\" self.hubert.feature_extractor.trainable = False", "the convolutional layers \"\"\" def _conv_out_length(input_length, kernel_size, stride): # 1D", "without any specific head on top.\", HUBERT_START_DOCSTRING, ) class TFHubertModel(TFHubertPreTrainedModel):", "{shape_list(attn_output)}\", ) attn_output = tf.transpose( tf.reshape(attn_output, (bsz, self.num_heads, tgt_len, self.head_dim)),", "and another one for the unexpected layers. \"\"\" signature =", "HubertConfig, **kwargs): super().__init__(**kwargs) self.config = config self.feature_extractor = TFHubertFeatureEncoder(config, name=\"feature_extractor\")", "training=training, kwargs_call=kwargs, ) hidden_states = self.feature_extractor( tf.cast(inputs[\"input_values\"], tf.float32), training=inputs[\"training\"] )", "inputs_embeds (`np.ndarray` or `tf.Tensor` of shape `({0}, hidden_size)`, *optional*): Optionally,", "= config self.feature_extractor = TFHubertFeatureEncoder(config, name=\"feature_extractor\") self.feature_projection = TFHubertFeatureProjection(config, name=\"feature_projection\")", "the TensorFlow model. config ([`PretrainedConfig`]): The config of the running", "self.gamma_regularizer = tf.keras.regularizers.get(gamma_regularizer) self.beta_constraint = tf.keras.constraints.get(beta_constraint) self.gamma_constraint = tf.keras.constraints.get(gamma_constraint) self._check_axis()", "indices to masked spans spec_aug_mask_idxs = tf.expand_dims(spec_aug_mask_idxs, -1) spec_aug_mask_idxs =", "if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(attention_mask), [bsz, 1, tgt_len, src_len], message=f\"Attention mask", "= tf.keras.layers.Dense(embed_dim, use_bias=bias, name=\"q_proj\") self.v_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name=\"v_proj\") self.out_proj", "bool = False, ) -> Tuple[tf.Tensor]: attn_residual = hidden_states hidden_states,", "input else: raise ValueError( f\"Data of type {type(input)} is not", "`inputs` argument is deprecated and will be removed in a", "gamma, beta def _check_if_input_shape_is_none(self, input_shape): dim = input_shape[self.axis] if dim", "True, training: Optional[bool] = False, ) -> Union[TFBaseModelOutput, Tuple[tf.Tensor]]: all_hidden_states", "self.dropout = tf.keras.layers.Dropout(config.final_dropout) self.lm_head = tf.keras.layers.Dense(config.vocab_size, name=\"lm_head\") def freeze_feature_extractor(self): \"\"\"", "filled with -100 # when not being attended to labels_mask", "\" \"to train/fine-tine this model, you need a GPU or", "time since TF 2.7, so we need to account for", "dim = input_shape[self.axis] shape = (dim,) if self.scale: self.gamma =", "output_hidden_states=inputs[\"output_hidden_states\"], return_dict=inputs[\"return_dict\"], training=inputs[\"training\"], ) hidden_states = encoder_outputs[0] if not inputs[\"return_dict\"]:", "(hidden_states,) if not return_dict: return tuple(v for v in [hidden_states,", "= scale self.beta_initializer = tf.keras.initializers.get(beta_initializer) self.gamma_initializer = tf.keras.initializers.get(gamma_initializer) self.beta_regularizer =", "attn_output, attn_weights, past_key_value # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2FeedForward with Wav2Vec2->Hubert class", "you want more control over how to convert `input_values` indices", "= self.activation(hidden_states) return hidden_states # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2PositionalConvEmbedding with Wav2Vec2->Hubert", "Inc. team. All rights reserved. # # Licensed under the", "name=\"feature_extractor\") self.feature_projection = TFHubertFeatureProjection(config, name=\"feature_projection\") if config.do_stable_layer_norm: self.encoder = TFHubertEncoderStableLayerNorm(config,", "True, scale: bool = True, beta_initializer: tf.keras.initializers.Initializer = \"zeros\", gamma_initializer:", "else: raise ValueError(f\"Data of type {type(v)} is not allowed only", "tgt_len, src_len)) attn_weights = tf.nn.softmax(attn_weights, axis=-1) if layer_head_mask is not", "is None: output[k] = v elif k not in parameter_names", "output_attentions=output_attentions, training=training, ) hidden_states = layer_outputs[0] if output_attentions: all_self_attentions =", "self._create_broadcast_shape(input_shape) gamma = None beta = None if self.scale: gamma", "= False, ) -> Tuple[tf.Tensor]: attn_residual = hidden_states hidden_states =", "= tf.transpose(kernel) def build(self, input_shape): if not self.built: input_shape =", "= None, training: Optional[bool] = False, ) -> Tuple[tf.Tensor, Optional[tf.Tensor]]:", ">>> ds = ds.map(map_to_array) >>> input_values = processor(ds[\"speech\"][0], return_tensors=\"tf\").input_values #", "= self.dropout(attn_weights, training=training) attn_output = tf.matmul(attn_probs, value_states) # The tf.debugging", "else: attention_mask = None position_embeddings = self.pos_conv_embed(hidden_states) hidden_states = hidden_states", "the position embeddings. Selected in the range `[0, config.max_position_embeddings -", "TFHubertEncoder(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs) self.config = config", "with Wav2Vec2->Hubert class TFHubertPositionalConvEmbedding(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs: Any)", "mask_time_indices is not None: # apply SpecAugment along time axis", "[ \"facebook/hubert-base-ls960\", # See all Hubert models at https://huggingface.co/models?filter=hubert ]", "returned tensors for more detail. This argument can be used", "ValueError( \"Number of groups (\" + str(self.groups) + \") must", "shape=(self.filters,), initializer=\"zeros\", trainable=True) def call(self, inputs): if not self.initialized: self._init_norm()", "example must have the shape `({0})`): Indices of input sequence", "-> None: super().__init__(**kwargs) self.in_conv_dim = config.conv_dim[layer_id] if layer_id > 0", "training=training, ) inputs[\"output_hidden_states\"] = ( inputs[\"output_hidden_states\"] if inputs[\"output_hidden_states\"] else self.config.output_hidden_states", "Model transformer outputing raw hidden-states without any specific head on", "name=\"intermediate_dense\", ) self.intermediate_act_fn = get_tf_activation(config.hidden_act) self.output_dense = tf.keras.layers.Dense( units=config.hidden_size, kernel_initializer=get_initializer(config.initializer_range),", "i in range(config.num_feat_extract_layers - 1) ] elif config.feat_extract_norm == \"layer\":", "\"gamma_constraint\": tf.keras.constraints.serialize(self.gamma_constraint), } base_config = super().get_config() return {**base_config, **config} def", "length formula taken # from https://pytorch.org/docs/stable/generated/torch.nn.Conv1d.html return (input_length - kernel_size)", "True, beta_initializer: tf.keras.initializers.Initializer = \"zeros\", gamma_initializer: tf.keras.initializers.Initializer = \"ones\", beta_regularizer:", "def call(self, input_values): hidden_states = tf.expand_dims(input_values, -1) for conv_layer in", "tf.newaxis, :] offsets = tf.tile(offsets, (batch_size, num_masked_spans, 1)) offsets =", "are not compliant with XLA then they # have to", "self.config.conv_stride): input_lengths = _conv_out_length(input_lengths, kernel_size, stride) return input_lengths def _mask_hidden_states(self,", "output[k] = v elif k not in parameter_names and \"args\"", "output[k] = v else: raise ValueError(f\"Data of type {type(v)} is", "are **not masked**, - 0 for tokens that are **masked**.", "= tf.keras.layers.Dropout(config.hidden_dropout) def call(self, hidden_states: tf.Tensor, training: bool = False)", "logits = model(input_values).logits >>> predicted_ids = tf.argmax(logits, axis=-1) >>> transcription", "(dim,) if self.scale: self.gamma = self.add_weight( shape=shape, name=\"gamma\", initializer=self.gamma_initializer, regularizer=self.gamma_regularizer,", "else: raise ValueError( f\"`config.feat_extract_norm` is {config.feat_extract_norm}, but has to be", "output_attentions: Optional[bool] = False, training: bool = False, ) ->", "from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2PositionalConvEmbedding with Wav2Vec2->Hubert class TFHubertPositionalConvEmbedding(tf.keras.layers.Layer): def __init__(self, config: HubertConfig,", "= all_hidden_states + (hidden_states,) # add LayerDrop (see https://arxiv.org/abs/1909.11556 for", "self.axis - 1 else: axis = -1 if self.axis ==", "<= vocab_size: {self.config.vocab_size}\") attention_mask = ( inputs[\"attention_mask\"] if inputs[\"attention_mask\"] is", "return outputs def get_config(self): config = { \"groups\": self.groups, \"axis\":", "k, v in kwargs.items(): if isinstance(v, allowed_types) or v is", "allowed only {allowed_types} is accepted for {k}.\") else: if isinstance(input_values,", "as tf from ...activations_tf import get_tf_activation from ...modeling_tf_outputs import TFBaseModelOutput,", "output[\"args\"] is not None and type(output[\"args\"]) == tf.Tensor: tensor_name =", "gamma_constraint: tf.keras.constraints.Constraint = None, **kwargs, ): super().__init__(**kwargs) self.supports_masking = True", "return (hidden_states,) + encoder_outputs[1:] return TFBaseModelOutput( last_hidden_state=hidden_states, hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions, )", "-> None: super().__init__(**kwargs) if config.feat_extract_norm == \"group\": conv_layers = [TFHubertGroupNormConvLayer(config,", "!= self.embed_dim: raise ValueError( f\"embed_dim must be divisible by num_heads", "if num_masked_spans * mask_length > sequence_length: num_masked_spans = sequence_length //", "build(self, input_shape: tf.TensorShape): self.masked_spec_embed = self.add_weight( shape=(self.config.hidden_size,), initializer=\"uniform\", trainable=True, name=\"masked_spec_embed\"", "} output.update(booleans_processing(config=config, **boolean_dict)) return output # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2._sample_without_replacement def", "{type(v)} is not allowed only {allowed_types} is accepted for {k}.\")", "tf.constant(1.0) mask = tf.cast(mask, dtype=one_cst.dtype) expanded_mask = tf.tile(mask[:, None, None,", "all_self_attentions + (layer_outputs[1],) # Add last layer if output_hidden_states: all_hidden_states", "tf.ones_like(inputs[\"input_values\"], dtype=tf.float32) ) input_lengths = self.hubert._get_feat_extract_output_lengths(tf.reduce_sum(attention_mask, axis=-1)) # assuming that", "bool = False, ) -> Tuple[tf.Tensor]: attn_residual = hidden_states hidden_states", "calls the method with LayerCall.__call__(args, **kwargs) # So to respect", "(layer_outputs[1],) hidden_states = self.layer_norm(hidden_states) if output_hidden_states: all_hidden_states = all_hidden_states +", "1 else: axis = -1 if self.axis == -1 else", "tf.Tensor: hidden_states = self.conv(hidden_states) hidden_states = self.layer_norm(hidden_states) hidden_states = self.activation(hidden_states)", ") super().build(input_shape) def _get_feat_extract_output_lengths(self, input_lengths: tf.Tensor): \"\"\" Computes the output", "Batch size 1 >>> logits = model(input_values).logits >>> predicted_ids =", "outputs = tf.reshape(normalized_inputs, tensor_input_shape) else: outputs = normalized_inputs return outputs", "range(len(input_shape))] is_instance_norm = (input_shape[self.axis] // self.groups) == 1 if not", "kernel_initializer=get_initializer(config.initializer_range), bias_initializer=\"zeros\", name=\"intermediate_dense\", ) self.intermediate_act_fn = get_tf_activation(config.hidden_act) self.output_dense = tf.keras.layers.Dense(", "an input with shape \" + str(input_shape) + \".\" )", "models at https://huggingface.co/models?filter=hubert ] LARGE_NEGATIVE = -1e8 # Copied from", "elif config.feat_extract_norm == \"layer\": conv_layers = [ TFHubertLayerNormConvLayer(config, layer_id=i, name=f\"conv_layers.{i}\")", "compute_output_shape(self, input_shape): return input_shape def _reshape_into_groups(self, inputs, input_shape, tensor_input_shape): group_shape", "and will be ignored.\" ) continue else: raise ValueError(f\"Data of", "...activations_tf import get_tf_activation from ...modeling_tf_outputs import TFBaseModelOutput, TFCausalLMOutput from ...modeling_tf_utils", "a future version, use `past_key_values` instead.\", FutureWarning, ) output[\"past_key_values\"] =", "broad_casted_batch_dims = tf.reshape( tf.broadcast_to(tf.expand_dims(tf.range(indices_shape[0]), axis=-1), indices_shape), [1, -1] ) #", "not None: # apply SpecAugment along time axis with given", "spec_aug_mask # Copied from transformers.models.bart.modeling_tf_bart._expand_mask def _expand_mask(mask: tf.Tensor, tgt_len: Optional[int]", "`[0, 1]`: - 1 indicates the head is **not masked**,", "and the HuggingFace Inc. team. All rights reserved. # #", "allow to use the .name property so we check for", "mask approximately this percentage of all elements. however due to", "1 >>> logits = model(input_values).logits >>> predicted_ids = tf.argmax(logits, axis=-1)", "class with all the parameters of the model. Initializing with", "def _check_axis(self): if self.axis == 0: raise ValueError( \"You are", "\" \"but the layer received an input with shape \"", "tf.keras.layers.Dense( units=config.hidden_size, kernel_initializer=get_initializer(config.initializer_range), bias_initializer=\"zeros\", name=\"output_dense\", ) self.output_dropout = tf.keras.layers.Dropout(config.hidden_dropout) def", "32, axis: int = -1, epsilon: float = 1e-3, center:", "= self.activation(hidden_states) return hidden_states # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2GroupNormConvLayer with Wav2Vec2->Hubert", "this percentage of all elements. however due to overlaps, the", "mask_prob=self.config.mask_time_prob, mask_length=self.config.mask_time_length, min_masks=2, ) hidden_states = tf.where( tf.cast(mask_time_indices[:, :, tf.newaxis],", "to be bigger than 0.\") if mask_length > sequence_length: raise", "tf.newaxis, :], hidden_states, 0) return hidden_states def call( self, input_values:", "None, past_key_values_length: int = 0): \"\"\" Expands attention_mask from `[bsz,", "ValueError( \"You are trying to normalize your batch axis. Do", "tensors will not be guaranteed during the training. Args: func", ") self.dropout = tf.keras.layers.Dropout(rate=config.feat_proj_dropout) def call(self, hidden_states: tf.Tensor, training: bool", "# Copied from transformers.models.bart.modeling_tf_bart.TFBartAttention with TFBart->TFHubert class TFHubertAttention(tf.keras.layers.Layer): \"\"\"Multi-headed attention", "== -1 else self.axis - 1 group_reduction_axes.pop(axis) mean, variance =", "range(config.num_hidden_layers) ] def call( self, hidden_states: tf.Tensor, attention_mask: Optional[tf.Tensor] =", "regularizer=self.gamma_regularizer, constraint=self.gamma_constraint, ) else: self.gamma = None def _add_beta_weight(self, input_shape):", "in range(config.num_feat_extract_layers - 1) ] elif config.feat_extract_norm == \"layer\": conv_layers", "convert `input_values` indices into associated vectors than the model's internal", "on padding token indices. Mask values selected in `[0, 1]`:", "input_values, \"token_type_ids\": token_type_ids})` </Tip> Args: config ([`HubertConfig`]): Model configuration class", "and behavior. <Tip> TF 2.0 models accepts two formats as", "self.hubert.feature_extractor.trainable = False @add_start_docstrings_to_model_forward(HUBERT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=TFCausalLMOutput, config_class=_CONFIG_FOR_DOC) def call( self, input_values:", "class TFHubertLayerNormConvLayer(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, layer_id: int = 0,", "# So to respect the proper output we have to", "other modes than eager. if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(layer_head_mask), [self.num_heads], message=f\"Head", "mask_length=self.config.mask_time_length, min_masks=2, ) hidden_states = tf.where( tf.cast(mask_time_indices[:, :, tf.newaxis], tf.bool),", "str(input_shape) + \".\" ) def _set_number_of_groups_for_instance_norm(self, input_shape): dim = input_shape[self.axis]", "layer output length formula taken # from https://pytorch.org/docs/stable/generated/torch.nn.Conv1d.html return (input_length", "= False, return_dict: Optional[bool] = True, training: Optional[bool] = False,", "variance = tf.nn.moments(reshaped_inputs, group_reduction_axes, keepdims=True) gamma, beta = self._get_reshaped_weights(input_shape) normalized_inputs", "position_ids=position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, training=training, ) outputs =", "transformers.models.wav2vec2.modeling_tf_wav2vec2._scatter_values_on_batch_indices def _scatter_values_on_batch_indices(values, batch_indices, output_shape): \"\"\" Scatter function as in", "-self.num_pad_remove, :] return hidden_states class TFHubertFeatureEncoder(tf.keras.layers.Layer): def __init__(self, config: HubertConfig,", "None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] = None, training:", "hidden_states class TFHubertFeatureExtractor(TFHubertFeatureEncoder): def __init__(self, config, **kwargs): super().__init__(config, **kwargs) warnings.warn(", "in the docstring: `model([input_values, attention_mask])` or `model([input_values, attention_mask, token_type_ids])` -", "= is_decoder self.k_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name=\"k_proj\") self.q_proj = tf.keras.layers.Dense(embed_dim,", "to [SpecAugment](https://arxiv.org/abs/1904.08779). \"\"\" batch_size, sequence_length, hidden_size = shape_list(hidden_states) # `config.apply_spec_augment`", "`input_values` instead.\", FutureWarning, ) output[\"input_values\"] = input_values.pop(\"inputs\") if \"decoder_cached_states\" in", "simple interface for downloading and loading pretrained models. \"\"\" config_class", "all cross attention key/value_states. # Further calls to cross_attention layer", "Optional[bool] = None, output_hidden_states: Optional[bool] = None, return_dict: Optional[bool] =", "_add_gamma_weight(self, input_shape): dim = input_shape[self.axis] shape = (dim,) if self.scale:", "\"The method `freeze_feature_extractor` is deprecated and will be removed in", "however due to overlaps, the actual number will be smaller", "LayerCall.__call__(args, **kwargs) # So to respect the proper output we", "has backpropagation operations that are NOT supported on CPU. If", "outputs += (attn_weights,) return outputs # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2EncoderLayerStableLayerNorm with", "Optional[bool] = False, ) -> Tuple[tf.Tensor, Optional[tf.Tensor]]: \"\"\"Input shape: Batch", "self.num_heads, tgt_len, src_len)) + attention_mask attn_weights = tf.reshape(attn_weights, (bsz *", "= (input_shape[self.axis] // self.groups) == 1 if not is_instance_norm: outputs", "`mask_length`: {mask_length} and `sequence_length`: {sequence_length}`\" ) # compute number of", "# Batch size 1 >>> logits = model(input_values).logits >>> predicted_ids", "past_key_value is not None: # reuse k,v, cross_attentions key_states =", "requires having all the tensors in the first argument of", "express or implied. # See the License for the specific", "with Wav2Vec2->Hubert class TFHubertFeedForward(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs)", "the equivalent `freeze_feature_encoder` method instead.\", FutureWarning, ) self.freeze_feature_encoder() def freeze_feature_encoder(self):", "group_reduction_axes, keepdims=True) gamma, beta = self._get_reshaped_weights(input_shape) normalized_inputs = tf.nn.batch_normalization( reshaped_inputs,", "super().get_config() return {**base_config, **config} def compute_output_shape(self, input_shape): return input_shape def", "embedded representation. This is useful if you want more control", "sampling without replacement is currently not implemented. The gumbel-max trick", "def build(self, input_shape): if not self.built: input_shape = input_shape.as_list() #", "TF 2.0 Keras Model and refer to the TF 2.0", ") self.activation = get_tf_activation(config.feat_extract_activation) def call(self, hidden_states: tf.Tensor) -> tf.Tensor:", "num_heads if (self.head_dim * num_heads) != self.embed_dim: raise ValueError( f\"embed_dim", "hidden_states = hidden_states + self.feed_forward(hidden_states) hidden_states = self.final_layer_norm(hidden_states) outputs =", "self.config.return_dict outputs = self.hubert( input_values=inputs[\"input_values\"], attention_mask=inputs[\"attention_mask\"], token_type_ids=inputs[\"token_type_ids\"], position_ids=inputs[\"position_ids\"], head_mask=inputs[\"head_mask\"], inputs_embeds=inputs[\"inputs_embeds\"],", "including the booleans. In case of a list of symbolic", "group_shape = self._reshape_into_groups(inputs, input_shape, tensor_input_shape) normalized_inputs = self._apply_normalization(reshaped_inputs, input_shape) is_instance_norm", "= -1 if self.axis == -1 else self.axis - 1", "if self.config.ctc_loss_reduction == \"sum\": loss = tf.reduce_sum(loss) if self.config.ctc_loss_reduction ==", "strides=config.conv_stride[layer_id], use_bias=config.conv_bias, name=\"conv\", ) self.activation = get_tf_activation(config.feat_extract_activation) self.layer_norm = TFHubertGroupNorm(groups=self.out_conv_dim,", "shape \" + str(input_shape) + \".\" ) def _set_number_of_groups_for_instance_norm(self, input_shape):", "than `sequence_length`, but got `mask_length`: {mask_length} and `sequence_length`: {sequence_length}`\" )", "or not to return a [`~file_utils.ModelOutput`] instead of a plain", "= tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name=\"layer_norm\") self.projection = tf.keras.layers.Dense( units=config.hidden_size, kernel_initializer=get_initializer(config.initializer_range), bias_initializer=\"zeros\", name=\"projection\",", "or input_values is None: output[parameter_names[0]] = input_values else: raise ValueError(", ") class TFHubertFeatureProjection(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs) self.layer_norm", "Optional[tf.Tensor] = None, training: Optional[bool] = False, ) -> Tuple[tf.Tensor,", "downloading and loading pretrained models. \"\"\" config_class = HubertConfig base_model_prefix", "with a `language modeling` head on top for Connectionist Temporal", "training=training) hidden_states = self.output_dense(hidden_states) hidden_states = self.output_dropout(hidden_states, training=training) return hidden_states", "indicates the head is **masked**. inputs_embeds (`np.ndarray` or `tf.Tensor` of", "use `past_key_values` instead.\", FutureWarning, ) output[\"past_key_values\"] = input_values.pop(\"decoder_cached_states\") for k,", "normalized_inputs = tf.nn.batch_normalization( reshaped_inputs, mean=mean, variance=variance, scale=gamma, offset=beta, variance_epsilon=self.epsilon, )", "the parameters of the model. Initializing with a config file", "attns = tf.convert_to_tensor(output.attentions) if self.config.output_attentions else None return TFBaseModelOutput(last_hidden_state=output.last_hidden_state, hidden_states=hs,", "# make sure num masked indices <= sequence_length if num_masked_spans", "not allowed only {allowed_types} is accepted for {k}.\") else: if", "tf.reshape(attn_output, (bsz, self.num_heads, tgt_len, self.head_dim)), (0, 2, 1, 3) )", "self.config.mask_time_prob > 0: # generate indices & apply SpecAugment along", "kwargs_call=kwargs, ) hidden_states = self.feature_extractor( tf.cast(inputs[\"input_values\"], tf.float32), training=inputs[\"training\"] ) if", "output_hidden_states=inputs[\"output_hidden_states\"], return_dict=inputs[\"return_dict\"], training=inputs[\"training\"], ) hidden_states = outputs[0] hidden_states = self.dropout(hidden_states,", "shape_list(attn_weights), [bsz * self.num_heads, tgt_len, src_len], message=f\"Attention weights should be", "value proj if is_cross_attention and past_key_value is not None: #", "`input_values` only and nothing else: `model(inputs_ids)` - a list of", "shape_list(attention_mask), [bsz, 1, tgt_len, src_len], message=f\"Attention mask should be of", "From tensorflow-addons https://www.tensorflow.org/addons/api_docs/python/tfa/layers/GroupNormalization \"\"\" def __init__( self, groups: int =", "mask_length # SpecAugment mask to fill spec_aug_mask = tf.zeros((batch_size, sequence_length),", "\"\"\" self.hubert.feature_extractor.trainable = False @add_start_docstrings_to_model_forward(HUBERT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=TFCausalLMOutput, config_class=_CONFIG_FOR_DOC) def call( self,", "the superclass documentation for the generic methods the library implements", "for {k}.\") else: if isinstance(input_values, tf.Tensor) or input_values is None:", "epsilon self.center = center self.scale = scale self.beta_initializer = tf.keras.initializers.get(beta_initializer)", "tf.Tensor, seq_len: int, bsz: int): return tf.transpose(tf.reshape(tensor, (bsz, seq_len, self.num_heads,", "get query proj query_states = self.q_proj(hidden_states) * self.scaling # get", "name, i.e. `input_values = tf.keras.Input(shape=(128,), dtype='float32', name=\"input_values\")` otherwise the order", "hidden_states = self.conv(hidden_states) hidden_states = self.padding(hidden_states) hidden_states = self.activation(hidden_states) return", "if attention_mask is not None: hidden_states = hidden_states * tf.expand_dims(attention_mask,", "-> Union[TFBaseModelOutput, Tuple[tf.Tensor]]: \"\"\" Returns: Example: ```python >>> from transformers", "+ \").\" ) def _check_axis(self): if self.axis == 0: raise", ">>> predicted_ids = tf.argmax(logits, axis=-1) >>> transcription = processor.decode(predicted_ids[0]) >>>", "main_input_name = \"input_values\" @property def dummy_inputs(self) -> Dict[str, tf.Tensor]: pad_token", "This second option is useful when using [`tf.keras.Model.fit`] method which", ") if self.config.ctc_loss_reduction == \"sum\": loss = tf.reduce_sum(loss) if self.config.ctc_loss_reduction", "input is None: output[parameter_names[i]] = input else: raise ValueError( f\"Data", "output # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2NoLayerNormConvLayer with Wav2Vec2->Hubert class TFHubertNoLayerNormConvLayer(tf.keras.layers.Layer): def", "else None return TFBaseModelOutput(last_hidden_state=output.last_hidden_state, hidden_states=hs, attentions=attns) @add_start_docstrings( \"\"\"TFHubert Model with", "key_states = past_key_value[0] value_states = past_key_value[1] elif is_cross_attention: # cross_attentions", "overlaps, the actual number will be smaller (unless no_overlap is", "// 2, name=\"conv\", ) self.padding = TFHubertSamePadLayer(config.num_conv_pos_embeddings) self.activation = get_tf_activation(config.feat_extract_activation)", "-> None: super().__init__(**kwargs) self.conv = TFHubertWeightNormConv1D( filters=config.hidden_size, kernel_size=config.num_conv_pos_embeddings, groups=config.num_conv_pos_embedding_groups, explicit_padding=config.num_conv_pos_embeddings", "[1] * len(input_shape) is_instance_norm = (input_shape[self.axis] // self.groups) == 1", "tf.reshape(attn_weights, (bsz, self.num_heads, tgt_len, src_len)) return attn_output, attn_weights, past_key_value #", "in the vocabulary. Indices can be obtained using [`BertTokenizer`]. See", "values must be <= vocab_size: {self.config.vocab_size}\") attention_mask = ( inputs[\"attention_mask\"]", "= load_dataset(\"hf-internal-testing/librispeech_asr_dummy\", \"clean\", split=\"validation\") >>> ds = ds.map(map_to_array) >>> input_values", "to False if not getattr(self.config, \"apply_spec_augment\", True): return hidden_states if", "normalized weights.\"\"\" kernel = tf.nn.l2_normalize(self.weight_v, axis=self.kernel_norm_axes) * tf.transpose(self.weight_g) self.kernel =", "*optional*): Whether or not to return a [`~file_utils.ModelOutput`] instead of", "axis=self.kernel_norm_axes) * tf.transpose(self.weight_g) self.kernel = tf.transpose(kernel) def build(self, input_shape): if", "arguments. This second option is useful when using [`tf.keras.Model.fit`] method", "self.num_heads, tgt_len, self.head_dim], message=f\"`attn_output` should be of size {(bsz, self.num_heads,", "from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2EncoderStableLayerNorm with Wav2Vec2->Hubert class TFHubertEncoderStableLayerNorm(tf.keras.layers.Layer): def __init__(self, config: HubertConfig,", "return normalized_inputs def _get_reshaped_weights(self, input_shape): broadcast_shape = self._create_broadcast_shape(input_shape) gamma =", "`(batch_size, sequence_length)`, *optional*): Labels for computing the masked language modeling", "= logging.get_logger(__name__) _CONFIG_FOR_DOC = \"HubertConfig\" TF_HUBERT_PRETRAINED_MODEL_ARCHIVE_LIST = [ \"facebook/hubert-base-ls960\", #", "raise ValueError( f\"Data of type {type(input_values)} is not allowed only", "kernel_size, stride in zip(self.config.conv_kernel, self.config.conv_stride): input_lengths = _conv_out_length(input_lengths, kernel_size, stride)", "= None): \"\"\" Masks extracted features along time axis and/or", "bias: bool = True, **kwargs, ): super().__init__(**kwargs) self.embed_dim = embed_dim", "1e-3, center: bool = True, scale: bool = True, beta_initializer:", "the model. Initializing with a config file does not load", "output[\"input_values\"] = output[\"args\"] del output[\"args\"] if \"kwargs\" in output: del", "training=inputs[\"training\"]) mask_time_indices = kwargs.get(\"mask_time_indices\", None) if inputs[\"training\"]: hidden_states = self._mask_hidden_states(hidden_states,", "masked**, - 0 indicates the head is **masked**. inputs_embeds (`np.ndarray`", "tuple, list, dict, np.ndarray) for k, v in kwargs.items(): if", "**kwargs): super().__init__(config, *inputs, **kwargs) self.hubert = TFHubertMainLayer(config, name=\"hubert\") self.dropout =", "in PyTorch with indices in format (batch_dim, indixes) \"\"\" indices_shape", "hidden_states = self.final_layer_norm(hidden_states) outputs = (hidden_states,) if output_attentions: outputs +=", "to masked spans spec_aug_mask_idxs = tf.expand_dims(spec_aug_mask_idxs, -1) spec_aug_mask_idxs = tf.tile(spec_aug_mask_idxs,", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "name=\"v_proj\") self.out_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name=\"out_proj\") def _shape(self, tensor: tf.Tensor,", "in [hidden_states, all_hidden_states, all_self_attentions] if v is not None) return", "use_bias=bias, name=\"k_proj\") self.q_proj = tf.keras.layers.Dense(embed_dim, use_bias=bias, name=\"q_proj\") self.v_proj = tf.keras.layers.Dense(embed_dim,", "mask_length)) spec_aug_mask_idxs = spec_aug_mask_idxs + offsets # scatter indices to", "portions of the inputs. Indices are selected in `[0, 1]`:", "( inputs[\"output_attentions\"] if inputs[\"output_attentions\"] else self.config.output_attentions ) inputs[\"return_dict\"] = inputs[\"return_dict\"]", "hidden_states = self.activation(hidden_states) return hidden_states # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2PositionalConvEmbedding with", "shape: Batch x Time x Channel\"\"\" # if key_value_states are", "attns = tf.convert_to_tensor(output.attentions) if self.config.output_attentions else None return TFCausalLMOutput(logits=output.logits, hidden_states=hs,", "no_overlap is True) mask_length: size of the mask min_masks: minimum", "attn_weights, (bsz, self.num_heads, tgt_len, src_len) ) attn_weights = tf.reshape(attn_weights, (bsz", "`model(inputs)`. If you choose this second option, there are three", "load the model weights. \"\"\" HUBERT_INPUTS_DOCSTRING = r\"\"\" Args: input_values", "scale: bool = True, beta_initializer: tf.keras.initializers.Initializer = \"zeros\", gamma_initializer: tf.keras.initializers.Initializer", "[1, -1] ) # transform batch_indices to pair_indices pair_indices =", "also a [tf.keras.Model](https://www.tensorflow.org/api_docs/python/tf/keras/Model) subclass. Use it as a regular TF", "should be of size {(bsz * self.num_heads, tgt_len, src_len)}, but", "in batch num_masked_spans = int(mask_prob * sequence_length / mask_length +", "This argument can be used only in eager mode, in", "encoder so that its parameter will not be updated during", "\"args\": output[name] = kwargs.pop(name, signature[name].default) # When creating a SavedModel", "warnings from typing import Any, Dict, Optional, Tuple, Union import", "tf.cast(labels >= 0, tf.int32) target_lengths = tf.reduce_sum(labels_mask, axis=-1) loss =", "be named accordingly to the parameters name, i.e. `input_values =", "not being attended to labels_mask = tf.cast(labels >= 0, tf.int32)", "] else: raise ValueError( f\"`config.feat_extract_norm` is {config.feat_extract_norm}, but has to", "input_lengths = _conv_out_length(input_lengths, kernel_size, stride) return input_lengths def _mask_hidden_states(self, hidden_states:", "return inputs, group_shape def _apply_normalization(self, reshaped_inputs, input_shape): group_shape = tf.keras.backend.int_shape(reshaped_inputs)", "import inspect import warnings from typing import Any, Dict, Optional,", "== 1 if not is_instance_norm: group_shape[self.axis] = input_shape[self.axis] // self.groups", "Version 2.0 (the \"License\"); # you may not use this", "are attention masks?](../glossary#attention-mask) token_type_ids (`np.ndarray` or `tf.Tensor` of shape `({0})`,", "in parameter_names: output[tensor_name] = input else: output[parameter_names[i]] = input elif", "input_shape): dim = input_shape[self.axis] self.input_spec = tf.keras.layers.InputSpec(ndim=len(input_shape), axes={self.axis: dim}) def", "Example: ```python >>> import tensorflow as tf >>> from transformers", "= TFHubertGroupNorm(groups=self.out_conv_dim, epsilon=config.layer_norm_eps, name=\"layer_norm\") def call(self, hidden_states: tf.Tensor) -> tf.Tensor:", "= 0.0, is_decoder: bool = False, bias: bool = True,", "(layer_outputs[1],) # Add last layer if output_hidden_states: all_hidden_states = all_hidden_states", "TFCausalLMOutput( loss=loss, logits=logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions, ) def serving_output(self, output: TFCausalLMOutput)", "layer received an input with shape \" + str(input_shape) +", "= False, ) -> Tuple[tf.Tensor]: attn_residual = hidden_states hidden_states, attn_weights,", "raise ValueError(f\"Data of type {type(v)} is not allowed only {allowed_types}", "= embed_dim self.num_heads = num_heads self.dropout = tf.keras.layers.Dropout(dropout) self.head_dim =", "= shape_list(hidden_states) # `config.apply_spec_augment` can set masking to False if", "name=\"output_dense\", ) self.output_dropout = tf.keras.layers.Dropout(config.hidden_dropout) def call(self, hidden_states: tf.Tensor, training:", "the pattern `name:id` then we check only the # `name`", "vector.\"\"\" kernel_norm = tf.sqrt(tf.reduce_sum(tf.square(self.weight_v), axis=self.kernel_norm_axes)) self.weight_g.assign(kernel_norm[:, tf.newaxis, tf.newaxis]) def _normalize_kernel(self):", "and will be removed in Transformers v5.\" \"Please use the", "for {k}.\") if isinstance(input_values, (tuple, list)): for i, input in", "kwargs.pop(name, signature[name].default) # When creating a SavedModel TF calls the", "- having all inputs as a list, tuple or dict", "of the model. Initializing with a config file does not", "is also a [tf.keras.Model](https://www.tensorflow.org/api_docs/python/tf/keras/Model) subclass. Use it as a regular", "call( self, hidden_states: tf.Tensor, key_value_states: Optional[tf.Tensor] = None, past_key_value: Optional[Tuple[Tuple[tf.Tensor]]]", "Is All You Need\"\"\" def __init__( self, embed_dim: int, num_heads:", "__init__( self, embed_dim: int, num_heads: int, dropout: float = 0.0,", "in the docstring: `model({\"input_values\": input_values, \"token_type_ids\": token_type_ids})` </Tip> Args: config", "self.gamma = None def _add_beta_weight(self, input_shape): dim = input_shape[self.axis] shape", "handle weights initialization and a simple interface for downloading and", "multiplied by number of timesteps divided by length of mask", "\"\"\" From tensorflow-addons https://www.tensorflow.org/addons/api_docs/python/tfa/layers/GroupNormalization \"\"\" def __init__( self, groups: int", "TFHubertGroupNorm(tf.keras.layers.Layer): \"\"\" From tensorflow-addons https://www.tensorflow.org/addons/api_docs/python/tfa/layers/GroupNormalization \"\"\" def __init__( self, groups:", "= { \"groups\": self.groups, \"axis\": self.axis, \"epsilon\": self.epsilon, \"center\": self.center,", "interface for downloading and loading pretrained models. \"\"\" config_class =", "= tf.sqrt(tf.reduce_sum(tf.square(self.weight_v), axis=self.kernel_norm_axes)) self.weight_g.assign(kernel_norm[:, tf.newaxis, tf.newaxis]) def _normalize_kernel(self): \"\"\"Generate normalized", "they # have to be disabled in other modes than", "batch_size, sequence_length, hidden_size = shape_list(hidden_states) # `config.apply_spec_augment` can set masking", "# for the decoder is_cross_attention = key_value_states is not None", "1 >>> hidden_states = model(input_values).last_hidden_state ```\"\"\" inputs = input_values_processing( func=self.call,", "self, groups: int = 32, axis: int = -1, epsilon:", "training: Optional[bool] = False, ) -> Union[TFCausalLMOutput, Tuple[tf.Tensor]]: r\"\"\" labels", "+ (hidden_states,) if not return_dict: return tuple(v for v in", "self.built = True super().build(input_shape) def call(self, inputs): input_shape = tf.keras.backend.int_shape(inputs)", "Optional[bool] = False, ) -> Union[TFCausalLMOutput, Tuple[tf.Tensor]]: r\"\"\" labels (`tf.Tensor`", "self.freeze_feature_encoder() def freeze_feature_encoder(self): \"\"\" Calling this function will disable the", "for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) token_type_ids", "booleans. In case of a list of symbolic inputs, each", "TFHubertEncoderLayerStableLayerNorm(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs): super().__init__(**kwargs) self.attention = TFHubertAttention(", "self.axis == -1 else self.axis - 1 else: axis =", "computing the masked language modeling loss. Indices should be in", "1, 3)) def call( self, hidden_states: tf.Tensor, key_value_states: Optional[tf.Tensor] =", "logging, replace_return_docstrings, ) from .configuration_hubert import HubertConfig logger = logging.get_logger(__name__)", "size {(bsz, self.num_heads, tgt_len, self.head_dim)}, but is {shape_list(attn_output)}\", ) attn_output", "transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2EncoderStableLayerNorm with Wav2Vec2->Hubert class TFHubertEncoderStableLayerNorm(tf.keras.layers.Layer): def __init__(self, config: HubertConfig, **kwargs):", "[`~PreTrainedModel.from_pretrained`] method to load the model weights. \"\"\" HUBERT_INPUTS_DOCSTRING =", "tf.concat([past_key_value[1], value_states], axis=2) else: # self_attention key_states = self._shape(self.k_proj(hidden_states), -1,", "self.layer_norm = tf.keras.layers.LayerNormalization(epsilon=config.layer_norm_eps, name=\"layer_norm\") self.dropout = tf.keras.layers.Dropout(config.hidden_dropout) self.layer = [TFHubertEncoderLayer(config,", "self.groups) == 1 if not is_instance_norm: broadcast_shape[self.axis] = input_shape[self.axis] //", "axis according to [SpecAugment](https://arxiv.org/abs/1904.08779). \"\"\" batch_size, sequence_length, hidden_size = shape_list(hidden_states)", "def __init__(self, filters, kernel_size, groups, explicit_padding, **kwargs): super().__init__( filters=filters, kernel_size=kernel_size,", "f\"`mask_length` has to be smaller than `sequence_length`, but got `mask_length`:", "f\"Data of type {type(input)} is not allowed only {allowed_types} is", "self.intermediate_dense(hidden_states) hidden_states = self.intermediate_act_fn(hidden_states) hidden_states = self.intermediate_dropout(hidden_states, training=training) hidden_states =", "training and (dropout_probability < self.config.layerdrop): # skip the layer continue", "self.kernel = tf.transpose(kernel) def build(self, input_shape): if not self.built: input_shape", "spec_aug_mask_idxs = tf.tile(spec_aug_mask_idxs, (1, 1, mask_length)) spec_aug_mask_idxs = tf.reshape(spec_aug_mask_idxs, (batch_size,", "compute real output lengths according to convolution formula output_lengths =", ") continue else: raise ValueError(f\"Data of type {type(v)} is not", "tf.keras.regularizers.get(beta_regularizer) self.gamma_regularizer = tf.keras.regularizers.get(gamma_regularizer) self.beta_constraint = tf.keras.constraints.get(beta_constraint) self.gamma_constraint = tf.keras.constraints.get(gamma_constraint)", "= hidden_states + position_embeddings hidden_states = self.dropout(hidden_states, training=training) for i,", "**kwargs): super().__init__(config, **kwargs) warnings.warn( f\"The class `{self.__class__.__name__}` has been depreciated", "tf.newaxis, :], hidden_states, ) elif self.config.mask_time_prob > 0: # generate", "\"\"\" warnings.warn( \"The method `freeze_feature_extractor` is deprecated and will be", "# self_attention key_states = self._shape(self.k_proj(hidden_states), -1, bsz) value_states = self._shape(self.v_proj(hidden_states),", "self._add_beta_weight(input_shape) self.built = True super().build(input_shape) def call(self, inputs): input_shape =", "tgt_len, embed_dim)) attn_output = self.out_proj(attn_output) attn_weights: tf.Tensor = tf.reshape(attn_weights, (bsz,", "distribution to sample from, make sure that offset samples are", "hidden_states=all_hidden_states, attentions=all_self_attentions, ) # Copied from transformers.models.wav2vec2.modeling_tf_wav2vec2.TFWav2Vec2EncoderStableLayerNorm with Wav2Vec2->Hubert class", "tgt_len is not None else src_len one_cst = tf.constant(1.0) mask", "to convert `input_values` indices into associated vectors than the model's", "tf.zeros((batch_size, sequence_length), dtype=tf.int32) # uniform distribution to sample from, make", "attention layers. See `attentions` under returned tensors for more detail.", "modes than eager. if tf.executing_eagerly(): tf.debugging.assert_equal( shape_list(attention_mask), [bsz, 1, tgt_len,", "stride in zip(self.config.conv_kernel, self.config.conv_stride): input_lengths = _conv_out_length(input_lengths, kernel_size, stride) return", "= tf.convert_to_tensor(np.random.rand(1, 16000), tf.float32) dummy_inputs = { \"input_values\": input_values, \"attention_mask\":", "symbolic inputs, each input has to be named accordingly to", "divided by length of mask span to mask approximately this", "self.built: input_shape = input_shape.as_list() # Conv1D output shapes are checked", "groups: int = 32, axis: int = -1, epsilon: float", "not in parameter_names and \"args\" not in parameter_names: logger.warning( f\"The", "so we check for a real Tensor if type(input) ==", "docstring) Tokens with indices set to `-100` are ignored (masked),", "hidden_states, attention_mask=attention_mask, output_attentions=inputs[\"output_attentions\"], output_hidden_states=inputs[\"output_hidden_states\"], return_dict=inputs[\"return_dict\"], training=inputs[\"training\"], ) hidden_states = encoder_outputs[0]", "be removed in a future version, use `input_values` instead.\", FutureWarning,", "training: Optional[bool] = False, ) -> Union[TFBaseModelOutput, Tuple[tf.Tensor]]: all_hidden_states =", ") self.explicit_padding = explicit_padding self.filter_axis = 2 self.initialized = False", "kernel_size=kernel_size, groups=groups, padding=\"valid\", use_bias=True, bias_initializer=\"he_normal\", **kwargs, ) self.explicit_padding = explicit_padding", ") -> tf.Tensor: \"\"\" Computes random mask spans for a", "\"You are trying to normalize your batch axis. Do you", "conv_layers = [TFHubertGroupNormConvLayer(config, layer_id=0, name=f\"conv_layers.{0}\")] + [ TFHubertNoLayerNormConvLayer(config, layer_id=i +" ]
[ "guess a letter and respond to the # user based", "result = result + letter else: result = result +", "wordlist: list of strings wordlist = string.split(line) print \" \",", "user to guess a letter and respond to the #", "you will have to know how to use the functions", "from wordlist at random \"\"\" return random.choice(wordlist) # end of", "= '' for letter in secret_word: if letter in guessed_letters:", "Collaborators : <your collaborators> # Time spent : <total time>", "else: available_letters.remove(guess) guessed_letters += guess print 'Good guess: ' +", "elif guess not in secret_word: num_guesses -= 1 available_letters.remove(guess) print", "Hangman # Name : Solutions # Collaborators : <your collaborators>", "if secret_word == partial_word(secret_word, guessed_letters): word_guessed = True if word_guessed:", "You don't need to understand this helper code, # but", "<your collaborators> # Time spent : <total time> # -----------------------------------", "choose_word(wordlist) print 'I am thinking of a word that is", "available_letters: print 'Oops! You\\'ve already guessed that letter: ' +", "# load the list of words into the wordlist variable", "Returns a list of valid words. Words are strings of", "words. Words are strings of lowercase letters. Depending on the", "is not in my word: ' + partial_word(secret_word, guessed_letters) else:", ": <your collaborators> # Time spent : <total time> #", "words into the wordlist variable # so that it can", "\"\"\" wordlist (list): list of words (strings) Returns a word", "'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p',", "\"\"\" print 'Welcome to the game, Hangman!' secret_word = choose_word(wordlist)", "' guesses left.' print 'Available letters: ' + ''.join(available_letters) guess", "# Name : Solutions # Collaborators : <your collaborators> #", "word list, this function may take a while to finish.", "code, # but you will have to know how to", "not in secret_word: num_guesses -= 1 available_letters.remove(guess) print 'Oops! That", "1 available_letters.remove(guess) print 'Oops! That letter is not in my", "yet been guessed. \"\"\" result = '' for letter in", "user-visible format, with underscores used to replace characters that have", "list of valid words. Words are strings of lowercase letters.", "letter: ' + partial_word(secret_word, guessed_letters) elif guess not in secret_word:", "will have to know how to use the functions import", "'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z']", "+ partial_word(secret_word, guessed_letters) elif guess not in secret_word: num_guesses -=", "be accessed from anywhere in the program wordlist = load_words()", "words (strings) Returns a word from wordlist at random \"\"\"", "result + letter else: result = result + '_' return", "to the # user based on whether the word has", "am thinking of a word that is ' + str(len(secret_word))", "strings of lowercase letters. Depending on the size of the", "long.' num_guesses = 8 word_guessed = False guessed_letters = ''", "not in my word: ' + partial_word(secret_word, guessed_letters) else: available_letters.remove(guess)", "take a while to finish. \"\"\" print \"Loading word list", "num_guesses -= 1 available_letters.remove(guess) print 'Oops! That letter is not", "from file...\" # inFile: file inFile = open(WORDLIST_FILENAME, 'r', 0)", "helper code, # but you will have to know how", "thinking of a word that is ' + str(len(secret_word)) +", "'t', 'u', 'v', 'w', 'x', 'y', 'z'] # Letter-guessing loop.", "word from wordlist at random \"\"\" return random.choice(wordlist) # end", "not in available_letters: print 'Oops! You\\'ve already guessed that letter:", "size of the word list, this function may take a", "= True if word_guessed: print 'Congratulations, you won!' else: print", "Set 2 # # Hangman # Name : Solutions #", "a letter:') if guess not in available_letters: print 'Oops! You\\'ve", "num_guesses > 0 and not word_guessed: print '-------------' print 'You", "may take a while to finish. \"\"\" print \"Loading word", "# but you will have to know how to use", "Depending on the size of the word list, this function", "time> # ----------------------------------- # Helper code # You don't need", "\"\"\" Runs the hangman game. \"\"\" print 'Welcome to the", "guessed that letter: ' + partial_word(secret_word, guessed_letters) elif guess not", "guessed_letters += guess print 'Good guess: ' + partial_word(secret_word, guessed_letters)", "need to understand this helper code, # but you will", "list of strings wordlist = string.split(line) print \" \", len(wordlist),", "True if word_guessed: print 'Congratulations, you won!' else: print 'Game", "hangman game. \"\"\" print 'Welcome to the game, Hangman!' secret_word", "+ partial_word(secret_word, guessed_letters) else: available_letters.remove(guess) guessed_letters += guess print 'Good", "file...\" # inFile: file inFile = open(WORDLIST_FILENAME, 'r', 0) #", "= 8 word_guessed = False guessed_letters = '' available_letters =", "used to replace characters that have not yet been guessed.", "+ str(len(secret_word)) + ' letters long.' num_guesses = 8 word_guessed", "guess = raw_input('Please guess a letter:') if guess not in", "list from file...\" # inFile: file inFile = open(WORDLIST_FILENAME, 'r',", "print 'You have ' + str(num_guesses) + ' guesses left.'", "wordlist variable # so that it can be accessed from", "don't need to understand this helper code, # but you", "Runs the hangman game. \"\"\" print 'Welcome to the game,", "2 # # Hangman # Name : Solutions # Collaborators", "'_' return result def hangman(): \"\"\" Runs the hangman game.", "of lowercase letters. Depending on the size of the word", "string line = inFile.readline() # wordlist: list of strings wordlist", "'-------------' print 'You have ' + str(num_guesses) + ' guesses", "You\\'ve already guessed that letter: ' + partial_word(secret_word, guessed_letters) elif", "can be accessed from anywhere in the program wordlist =", "result = result + '_' return result def hangman(): \"\"\"", "def hangman(): \"\"\" Runs the hangman game. \"\"\" print 'Welcome", "+ ' guesses left.' print 'Available letters: ' + ''.join(available_letters)", "+ str(num_guesses) + ' guesses left.' print 'Available letters: '", "= open(WORDLIST_FILENAME, 'r', 0) # line: string line = inFile.readline()", "Name : Solutions # Collaborators : <your collaborators> # Time", "not word_guessed: print '-------------' print 'You have ' + str(num_guesses)", "# Helper code # You don't need to understand this", "functions import random import string WORDLIST_FILENAME = \"words.txt\" def load_words():", "= result + '_' return result def hangman(): \"\"\" Runs", "import string WORDLIST_FILENAME = \"words.txt\" def load_words(): \"\"\" Returns a", "while to finish. \"\"\" print \"Loading word list from file...\"", "in secret_word: if letter in guessed_letters: result = result +", "Problem Set 2 # # Hangman # Name : Solutions", "WORDLIST_FILENAME = \"words.txt\" def load_words(): \"\"\" Returns a list of", "and respond to the # user based on whether the", "Return the secret_word in user-visible format, with underscores used to", "'r', 0) # line: string line = inFile.readline() # wordlist:", "load_words(): \"\"\" Returns a list of valid words. Words are", "the list of words into the wordlist variable # so", "+ partial_word(secret_word, guessed_letters) if secret_word == partial_word(secret_word, guessed_letters): word_guessed =", "if letter in guessed_letters: result = result + letter else:", "num_guesses = 8 word_guessed = False guessed_letters = '' available_letters", "to finish. \"\"\" print \"Loading word list from file...\" #", "'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x',", "list of words (strings) Returns a word from wordlist at", "underscores used to replace characters that have not yet been", "['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j',", "# 6.00 Problem Set 2 # # Hangman # Name", "wordlist (list): list of words (strings) Returns a word from", "the hangman game. \"\"\" print 'Welcome to the game, Hangman!'", "whether the word has yet been correctly guessed. while num_guesses", "'w', 'x', 'y', 'z'] # Letter-guessing loop. Ask the user", ": <total time> # ----------------------------------- # Helper code # You", "\"\"\" Returns a list of valid words. Words are strings", "my word: ' + partial_word(secret_word, guessed_letters) else: available_letters.remove(guess) guessed_letters +=", "to replace characters that have not yet been guessed. \"\"\"", "== partial_word(secret_word, guessed_letters): word_guessed = True if word_guessed: print 'Congratulations,", "= raw_input('Please guess a letter:') if guess not in available_letters:", "6.00 Problem Set 2 # # Hangman # Name :", "guessed_letters) elif guess not in secret_word: num_guesses -= 1 available_letters.remove(guess)", "accessed from anywhere in the program wordlist = load_words() def", "on the size of the word list, this function may", "import random import string WORDLIST_FILENAME = \"words.txt\" def load_words(): \"\"\"", "load the list of words into the wordlist variable #", "= ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i',", "letter in guessed_letters: result = result + letter else: result", "been guessed. \"\"\" result = '' for letter in secret_word:", "a while to finish. \"\"\" print \"Loading word list from", "of a word that is ' + str(len(secret_word)) + '", "variable # so that it can be accessed from anywhere", "'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y',", "print 'I am thinking of a word that is '", "' + partial_word(secret_word, guessed_letters) else: available_letters.remove(guess) guessed_letters += guess print", "letter and respond to the # user based on whether", "Letter-guessing loop. Ask the user to guess a letter and", "program wordlist = load_words() def partial_word(secret_word, guessed_letters): \"\"\" Return the", "inFile: file inFile = open(WORDLIST_FILENAME, 'r', 0) # line: string", "# user based on whether the word has yet been", "game. \"\"\" print 'Welcome to the game, Hangman!' secret_word =", "code # ----------------------------------- # load the list of words into", "+ ' letters long.' num_guesses = 8 word_guessed = False", "str(num_guesses) + ' guesses left.' print 'Available letters: ' +", "guess print 'Good guess: ' + partial_word(secret_word, guessed_letters) if secret_word", "'Welcome to the game, Hangman!' secret_word = choose_word(wordlist) print 'I", "= False guessed_letters = '' available_letters = ['a', 'b', 'c',", "the # user based on whether the word has yet", "'m', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v',", "have not yet been guessed. \"\"\" result = '' for", "on whether the word has yet been correctly guessed. while", "Hangman!' secret_word = choose_word(wordlist) print 'I am thinking of a", "partial_word(secret_word, guessed_letters) if secret_word == partial_word(secret_word, guessed_letters): word_guessed = True", "# Time spent : <total time> # ----------------------------------- # Helper", "secret_word == partial_word(secret_word, guessed_letters): word_guessed = True if word_guessed: print", "\"\"\" result = '' for letter in secret_word: if letter", "partial_word(secret_word, guessed_letters): word_guessed = True if word_guessed: print 'Congratulations, you", "'Available letters: ' + ''.join(available_letters) guess = raw_input('Please guess a", "'s', 't', 'u', 'v', 'w', 'x', 'y', 'z'] # Letter-guessing", "(list): list of words (strings) Returns a word from wordlist", "8 word_guessed = False guessed_letters = '' available_letters = ['a',", "= string.split(line) print \" \", len(wordlist), \"words loaded.\" return wordlist", "'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l',", "of strings wordlist = string.split(line) print \" \", len(wordlist), \"words", "'' available_letters = ['a', 'b', 'c', 'd', 'e', 'f', 'g',", "print 'Good guess: ' + partial_word(secret_word, guessed_letters) if secret_word ==", "code # You don't need to understand this helper code,", "to use the functions import random import string WORDLIST_FILENAME =", "'v', 'w', 'x', 'y', 'z'] # Letter-guessing loop. Ask the", "guessed_letters) else: available_letters.remove(guess) guessed_letters += guess print 'Good guess: '", "characters that have not yet been guessed. \"\"\" result =", "of helper code # ----------------------------------- # load the list of", "# # Hangman # Name : Solutions # Collaborators :", "string WORDLIST_FILENAME = \"words.txt\" def load_words(): \"\"\" Returns a list", "loop. Ask the user to guess a letter and respond", "'d', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm',", "for letter in secret_word: if letter in guessed_letters: result =", "left.' print 'Available letters: ' + ''.join(available_letters) guess = raw_input('Please", "yet been correctly guessed. while num_guesses > 0 and not", "from anywhere in the program wordlist = load_words() def partial_word(secret_word,", "= result + letter else: result = result + '_'", "<total time> # ----------------------------------- # Helper code # You don't", "0) # line: string line = inFile.readline() # wordlist: list", "in the program wordlist = load_words() def partial_word(secret_word, guessed_letters): \"\"\"", "\"words loaded.\" return wordlist def choose_word(wordlist): \"\"\" wordlist (list): list", "available_letters.remove(guess) print 'Oops! That letter is not in my word:", "word: ' + partial_word(secret_word, guessed_letters) else: available_letters.remove(guess) guessed_letters += guess", "know how to use the functions import random import string", "'u', 'v', 'w', 'x', 'y', 'z'] # Letter-guessing loop. Ask", "respond to the # user based on whether the word", "open(WORDLIST_FILENAME, 'r', 0) # line: string line = inFile.readline() #", "print 'Oops! You\\'ve already guessed that letter: ' + partial_word(secret_word,", "+= guess print 'Good guess: ' + partial_word(secret_word, guessed_letters) if", "strings wordlist = string.split(line) print \" \", len(wordlist), \"words loaded.\"", "letters: ' + ''.join(available_letters) guess = raw_input('Please guess a letter:')", "letters. Depending on the size of the word list, this", "'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k',", "valid words. Words are strings of lowercase letters. Depending on", "the word has yet been correctly guessed. while num_guesses >", "hangman(): \"\"\" Runs the hangman game. \"\"\" print 'Welcome to", "' + partial_word(secret_word, guessed_letters) if secret_word == partial_word(secret_word, guessed_letters): word_guessed", "str(len(secret_word)) + ' letters long.' num_guesses = 8 word_guessed =", "string.split(line) print \" \", len(wordlist), \"words loaded.\" return wordlist def", "That letter is not in my word: ' + partial_word(secret_word,", "while num_guesses > 0 and not word_guessed: print '-------------' print", "result = '' for letter in secret_word: if letter in", "a word that is ' + str(len(secret_word)) + ' letters", "print \" \", len(wordlist), \"words loaded.\" return wordlist def choose_word(wordlist):", "print \"Loading word list from file...\" # inFile: file inFile", "random \"\"\" return random.choice(wordlist) # end of helper code #", "'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o',", "''.join(available_letters) guess = raw_input('Please guess a letter:') if guess not", "are strings of lowercase letters. Depending on the size of", "helper code # ----------------------------------- # load the list of words", "guessed_letters: result = result + letter else: result = result", "'y', 'z'] # Letter-guessing loop. Ask the user to guess", "in user-visible format, with underscores used to replace characters that", "have ' + str(num_guesses) + ' guesses left.' print 'Available", "# end of helper code # ----------------------------------- # load the", "guess not in available_letters: print 'Oops! You\\'ve already guessed that", "guessed_letters = '' available_letters = ['a', 'b', 'c', 'd', 'e',", "letter else: result = result + '_' return result def", "'x', 'y', 'z'] # Letter-guessing loop. Ask the user to", "' letters long.' num_guesses = 8 word_guessed = False guessed_letters", "the program wordlist = load_words() def partial_word(secret_word, guessed_letters): \"\"\" Return", "how to use the functions import random import string WORDLIST_FILENAME", "return wordlist def choose_word(wordlist): \"\"\" wordlist (list): list of words", "list of words into the wordlist variable # so that", "wordlist = load_words() def partial_word(secret_word, guessed_letters): \"\"\" Return the secret_word", "that is ' + str(len(secret_word)) + ' letters long.' num_guesses", "+ '_' return result def hangman(): \"\"\" Runs the hangman", "\" \", len(wordlist), \"words loaded.\" return wordlist def choose_word(wordlist): \"\"\"", "random import string WORDLIST_FILENAME = \"words.txt\" def load_words(): \"\"\" Returns", "'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u',", "len(wordlist), \"words loaded.\" return wordlist def choose_word(wordlist): \"\"\" wordlist (list):", "wordlist = string.split(line) print \" \", len(wordlist), \"words loaded.\" return", "the word list, this function may take a while to", "that it can be accessed from anywhere in the program", "is ' + str(len(secret_word)) + ' letters long.' num_guesses =", "\"\"\" print \"Loading word list from file...\" # inFile: file", "# Letter-guessing loop. Ask the user to guess a letter", "available_letters.remove(guess) guessed_letters += guess print 'Good guess: ' + partial_word(secret_word,", "anywhere in the program wordlist = load_words() def partial_word(secret_word, guessed_letters):", "# Hangman # Name : Solutions # Collaborators : <your", "----------------------------------- # Helper code # You don't need to understand", "line = inFile.readline() # wordlist: list of strings wordlist =", "with underscores used to replace characters that have not yet", "word that is ' + str(len(secret_word)) + ' letters long.'", "# ----------------------------------- # load the list of words into the", "based on whether the word has yet been correctly guessed.", "' + partial_word(secret_word, guessed_letters) elif guess not in secret_word: num_guesses", "a list of valid words. Words are strings of lowercase", "# inFile: file inFile = open(WORDLIST_FILENAME, 'r', 0) # line:", "line: string line = inFile.readline() # wordlist: list of strings", "word list from file...\" # inFile: file inFile = open(WORDLIST_FILENAME,", "and not word_guessed: print '-------------' print 'You have ' +", "# wordlist: list of strings wordlist = string.split(line) print \"", "+ ''.join(available_letters) guess = raw_input('Please guess a letter:') if guess", "\"\"\" Return the secret_word in user-visible format, with underscores used", "to the game, Hangman!' secret_word = choose_word(wordlist) print 'I am", "user based on whether the word has yet been correctly", "that letter: ' + partial_word(secret_word, guessed_letters) elif guess not in", "guesses left.' print 'Available letters: ' + ''.join(available_letters) guess =", "letter:') if guess not in available_letters: print 'Oops! You\\'ve already", "spent : <total time> # ----------------------------------- # Helper code #", "partial_word(secret_word, guessed_letters) elif guess not in secret_word: num_guesses -= 1", "secret_word: if letter in guessed_letters: result = result + letter", "understand this helper code, # but you will have to", "in secret_word: num_guesses -= 1 available_letters.remove(guess) print 'Oops! That letter", "letter is not in my word: ' + partial_word(secret_word, guessed_letters)", "print 'Available letters: ' + ''.join(available_letters) guess = raw_input('Please guess", "the wordlist variable # so that it can be accessed", "this function may take a while to finish. \"\"\" print", "in guessed_letters: result = result + letter else: result =", "at random \"\"\" return random.choice(wordlist) # end of helper code", "list, this function may take a while to finish. \"\"\"", "raw_input('Please guess a letter:') if guess not in available_letters: print", "the size of the word list, this function may take", "to know how to use the functions import random import", "a word from wordlist at random \"\"\" return random.choice(wordlist) #", "secret_word = choose_word(wordlist) print 'I am thinking of a word", "' + ''.join(available_letters) guess = raw_input('Please guess a letter:') if", "'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't',", "'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r',", "= \"words.txt\" def load_words(): \"\"\" Returns a list of valid", "that have not yet been guessed. \"\"\" result = ''", "+ letter else: result = result + '_' return result", "the secret_word in user-visible format, with underscores used to replace", "= inFile.readline() # wordlist: list of strings wordlist = string.split(line)", "'You have ' + str(num_guesses) + ' guesses left.' print", "available_letters = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h',", "format, with underscores used to replace characters that have not", "> 0 and not word_guessed: print '-------------' print 'You have", "already guessed that letter: ' + partial_word(secret_word, guessed_letters) elif guess", "load_words() def partial_word(secret_word, guessed_letters): \"\"\" Return the secret_word in user-visible", "if word_guessed: print 'Congratulations, you won!' else: print 'Game over.'", "\"\"\" return random.choice(wordlist) # end of helper code # -----------------------------------", "in available_letters: print 'Oops! You\\'ve already guessed that letter: '", "# so that it can be accessed from anywhere in", "# ----------------------------------- # Helper code # You don't need to", "# line: string line = inFile.readline() # wordlist: list of", "Time spent : <total time> # ----------------------------------- # Helper code", "but you will have to know how to use the", "lowercase letters. Depending on the size of the word list,", "in my word: ' + partial_word(secret_word, guessed_letters) else: available_letters.remove(guess) guessed_letters", "'Good guess: ' + partial_word(secret_word, guessed_letters) if secret_word == partial_word(secret_word,", "random.choice(wordlist) # end of helper code # ----------------------------------- # load", "secret_word in user-visible format, with underscores used to replace characters", "\"words.txt\" def load_words(): \"\"\" Returns a list of valid words.", "'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's',", "Ask the user to guess a letter and respond to", "the functions import random import string WORDLIST_FILENAME = \"words.txt\" def", "of the word list, this function may take a while", "guess a letter:') if guess not in available_letters: print 'Oops!", "of words (strings) Returns a word from wordlist at random", "# You don't need to understand this helper code, #", "of valid words. Words are strings of lowercase letters. Depending", "finish. \"\"\" print \"Loading word list from file...\" # inFile:", "letter in secret_word: if letter in guessed_letters: result = result", "this helper code, # but you will have to know", "def choose_word(wordlist): \"\"\" wordlist (list): list of words (strings) Returns", "= choose_word(wordlist) print 'I am thinking of a word that", "into the wordlist variable # so that it can be", "def load_words(): \"\"\" Returns a list of valid words. Words", "print '-------------' print 'You have ' + str(num_guesses) + '", "= '' available_letters = ['a', 'b', 'c', 'd', 'e', 'f',", "----------------------------------- # load the list of words into the wordlist", "guess not in secret_word: num_guesses -= 1 available_letters.remove(guess) print 'Oops!", "(strings) Returns a word from wordlist at random \"\"\" return", "partial_word(secret_word, guessed_letters): \"\"\" Return the secret_word in user-visible format, with", "a letter and respond to the # user based on", "secret_word: num_guesses -= 1 available_letters.remove(guess) print 'Oops! That letter is", "Returns a word from wordlist at random \"\"\" return random.choice(wordlist)", "'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q',", "guessed_letters): word_guessed = True if word_guessed: print 'Congratulations, you won!'", "word_guessed = True if word_guessed: print 'Congratulations, you won!' else:", "file inFile = open(WORDLIST_FILENAME, 'r', 0) # line: string line", "return random.choice(wordlist) # end of helper code # ----------------------------------- #", "end of helper code # ----------------------------------- # load the list", "so that it can be accessed from anywhere in the", "correctly guessed. while num_guesses > 0 and not word_guessed: print", "guessed. while num_guesses > 0 and not word_guessed: print '-------------'", "\", len(wordlist), \"words loaded.\" return wordlist def choose_word(wordlist): \"\"\" wordlist", "word_guessed: print '-------------' print 'You have ' + str(num_guesses) +", "has yet been correctly guessed. while num_guesses > 0 and", "been correctly guessed. while num_guesses > 0 and not word_guessed:", "use the functions import random import string WORDLIST_FILENAME = \"words.txt\"", "loaded.\" return wordlist def choose_word(wordlist): \"\"\" wordlist (list): list of", "'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n',", "'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z'] #", "partial_word(secret_word, guessed_letters) else: available_letters.remove(guess) guessed_letters += guess print 'Good guess:", "have to know how to use the functions import random", "inFile.readline() # wordlist: list of strings wordlist = string.split(line) print", "the game, Hangman!' secret_word = choose_word(wordlist) print 'I am thinking", "inFile = open(WORDLIST_FILENAME, 'r', 0) # line: string line =", "Solutions # Collaborators : <your collaborators> # Time spent :", "collaborators> # Time spent : <total time> # ----------------------------------- #", "if guess not in available_letters: print 'Oops! You\\'ve already guessed", "word_guessed = False guessed_letters = '' available_letters = ['a', 'b',", "result + '_' return result def hangman(): \"\"\" Runs the", "'Oops! You\\'ve already guessed that letter: ' + partial_word(secret_word, guessed_letters)", ": Solutions # Collaborators : <your collaborators> # Time spent", "guessed_letters) if secret_word == partial_word(secret_word, guessed_letters): word_guessed = True if", "guessed_letters): \"\"\" Return the secret_word in user-visible format, with underscores", "game, Hangman!' secret_word = choose_word(wordlist) print 'I am thinking of", "0 and not word_guessed: print '-------------' print 'You have '", "= load_words() def partial_word(secret_word, guessed_letters): \"\"\" Return the secret_word in", "choose_word(wordlist): \"\"\" wordlist (list): list of words (strings) Returns a", "print 'Oops! That letter is not in my word: '", "letters long.' num_guesses = 8 word_guessed = False guessed_letters =", "the user to guess a letter and respond to the", "result def hangman(): \"\"\" Runs the hangman game. \"\"\" print", "replace characters that have not yet been guessed. \"\"\" result", "return result def hangman(): \"\"\" Runs the hangman game. \"\"\"", "print 'Welcome to the game, Hangman!' secret_word = choose_word(wordlist) print", "'I am thinking of a word that is ' +", "Words are strings of lowercase letters. Depending on the size", "of words into the wordlist variable # so that it", "'Oops! That letter is not in my word: ' +", "guessed. \"\"\" result = '' for letter in secret_word: if", "-= 1 available_letters.remove(guess) print 'Oops! That letter is not in", "# Collaborators : <your collaborators> # Time spent : <total", "guess: ' + partial_word(secret_word, guessed_letters) if secret_word == partial_word(secret_word, guessed_letters):", "to understand this helper code, # but you will have", "'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w',", "function may take a while to finish. \"\"\" print \"Loading", "' + str(len(secret_word)) + ' letters long.' num_guesses = 8", "word has yet been correctly guessed. while num_guesses > 0", "Helper code # You don't need to understand this helper", "wordlist def choose_word(wordlist): \"\"\" wordlist (list): list of words (strings)", "wordlist at random \"\"\" return random.choice(wordlist) # end of helper", "else: result = result + '_' return result def hangman():", "def partial_word(secret_word, guessed_letters): \"\"\" Return the secret_word in user-visible format,", "it can be accessed from anywhere in the program wordlist", "\"Loading word list from file...\" # inFile: file inFile =", "' + str(num_guesses) + ' guesses left.' print 'Available letters:", "'' for letter in secret_word: if letter in guessed_letters: result", "to guess a letter and respond to the # user", "False guessed_letters = '' available_letters = ['a', 'b', 'c', 'd',", "'z'] # Letter-guessing loop. Ask the user to guess a", "not yet been guessed. \"\"\" result = '' for letter" ]
[ "port) self.image = None self.message_content = None self.sub_order_id = None", "Created by auto_sdk on 2016.04.13 ''' from top.api.base import RestApi", "by auto_sdk on 2016.04.13 ''' from top.api.base import RestApi class", "''' Created by auto_sdk on 2016.04.13 ''' from top.api.base import", "''' from top.api.base import RestApi class FenxiaoRefundMessageAddRequest(RestApi): def __init__(self,domain='gw.api.taobao.com',port=80): RestApi.__init__(self,domain,", "2016.04.13 ''' from top.api.base import RestApi class FenxiaoRefundMessageAddRequest(RestApi): def __init__(self,domain='gw.api.taobao.com',port=80):", "None self.message_content = None self.sub_order_id = None def getapiname(self): return", "FenxiaoRefundMessageAddRequest(RestApi): def __init__(self,domain='gw.api.taobao.com',port=80): RestApi.__init__(self,domain, port) self.image = None self.message_content =", "= None self.sub_order_id = None def getapiname(self): return 'taobao.fenxiao.refund.message.add' def", "top.api.base import RestApi class FenxiaoRefundMessageAddRequest(RestApi): def __init__(self,domain='gw.api.taobao.com',port=80): RestApi.__init__(self,domain, port) self.image", "None self.sub_order_id = None def getapiname(self): return 'taobao.fenxiao.refund.message.add' def getMultipartParas(self):", "= None def getapiname(self): return 'taobao.fenxiao.refund.message.add' def getMultipartParas(self): return ['image']", "from top.api.base import RestApi class FenxiaoRefundMessageAddRequest(RestApi): def __init__(self,domain='gw.api.taobao.com',port=80): RestApi.__init__(self,domain, port)", "on 2016.04.13 ''' from top.api.base import RestApi class FenxiaoRefundMessageAddRequest(RestApi): def", "RestApi.__init__(self,domain, port) self.image = None self.message_content = None self.sub_order_id =", "def __init__(self,domain='gw.api.taobao.com',port=80): RestApi.__init__(self,domain, port) self.image = None self.message_content = None", "__init__(self,domain='gw.api.taobao.com',port=80): RestApi.__init__(self,domain, port) self.image = None self.message_content = None self.sub_order_id", "<reponame>forestsheep/middleman ''' Created by auto_sdk on 2016.04.13 ''' from top.api.base", "self.image = None self.message_content = None self.sub_order_id = None def", "self.sub_order_id = None def getapiname(self): return 'taobao.fenxiao.refund.message.add' def getMultipartParas(self): return", "auto_sdk on 2016.04.13 ''' from top.api.base import RestApi class FenxiaoRefundMessageAddRequest(RestApi):", "= None self.message_content = None self.sub_order_id = None def getapiname(self):", "class FenxiaoRefundMessageAddRequest(RestApi): def __init__(self,domain='gw.api.taobao.com',port=80): RestApi.__init__(self,domain, port) self.image = None self.message_content", "RestApi class FenxiaoRefundMessageAddRequest(RestApi): def __init__(self,domain='gw.api.taobao.com',port=80): RestApi.__init__(self,domain, port) self.image = None", "self.message_content = None self.sub_order_id = None def getapiname(self): return 'taobao.fenxiao.refund.message.add'", "import RestApi class FenxiaoRefundMessageAddRequest(RestApi): def __init__(self,domain='gw.api.taobao.com',port=80): RestApi.__init__(self,domain, port) self.image =" ]
[ "be used. When -1 is set all data is used.\")", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "# limitations under the License. def get_args(batch_size=8, image_size=256, max_iter=100000): \"\"\"", "parameters from\") parser.add_argument(\"--train-samples\", type=int, default=-1, help=\"Number of data to be", "# # Licensed under the Apache License, Version 2.0 (the", "compliance with the License. # You may obtain a copy", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "2.0 (the \"License\"); # you may not use this file", "agreed to in writing, software # distributed under the License", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "Unless required by applicable law or agreed to in writing,", "Arguments set the default values of command line arguments. \"\"\"", "\"half\".') parser.add_argument(\"--img-path\", type=str, default=\"~/AnimalFace-dog\", help=\"Image path.\") parser.add_argument(\"--image-size\", type=int, default=image_size, help=\"Image", "parser.add_argument(\"--img-path\", type=str, default=\"~/AnimalFace-dog\", help=\"Image path.\") parser.add_argument(\"--image-size\", type=int, default=image_size, help=\"Image size.\")", "help=\"Number of latent variables.\") parser.add_argument(\"--monitor-path\", type=str, default=\"./result/tmp\", help=\"Monitor path.\") parser.add_argument(\"--model-load-path\",", "default=-1, help=\"Number of data to be used. When -1 is", "mode=\"train\"): from nnabla import logger import os if not os.path.exists(args.monitor_path):", "os if not os.path.exists(args.monitor_path): os.makedirs(args.monitor_path) path = \"{}/Arguments-{}.txt\".format(args.monitor_path, mode) logger.info(\"Arguments", "<filename>image-generation/slegan/args.py # Copyright 2021 Sony Corporation. # Copyright 2021 Sony", "distributed under the License is distributed on an \"AS IS\"", "\"{}/Arguments-{}.txt\".format(args.monitor_path, mode) logger.info(\"Arguments are saved to {}.\".format(path)) with open(path, \"w\")", "of computation. e.g. \"float\", \"half\".') parser.add_argument(\"--img-path\", type=str, default=\"~/AnimalFace-dog\", help=\"Image path.\")", "type=str, default=\"~/AnimalFace-dog\", help=\"Image path.\") parser.add_argument(\"--image-size\", type=int, default=image_size, help=\"Image size.\") parser.add_argument(\"--batch-size\",", "parser.add_argument(\"--image-size\", type=int, default=image_size, help=\"Image size.\") parser.add_argument(\"--batch-size\", \"-b\", type=int, default=batch_size, help=\"Batch", "help=\"Interval for testing models.\") parser.add_argument(\"--latent\", type=int, default=256, help=\"Number of latent", "help=\"Monitor path.\") parser.add_argument(\"--model-load-path\", type=str, default=\".\", help=\"Path to load parameters from\")", "nargs=\"+\", default=[\"lrflip\", \"translation\", \"color\"]) args = parser.parse_args() return args def", "os.path.exists(args.monitor_path): os.makedirs(args.monitor_path) path = \"{}/Arguments-{}.txt\".format(args.monitor_path, mode) logger.info(\"Arguments are saved to", "the specific language governing permissions and # limitations under the", "set the default values of command line arguments. \"\"\" import", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "logger import os if not os.path.exists(args.monitor_path): os.makedirs(args.monitor_path) path = \"{}/Arguments-{}.txt\".format(args.monitor_path,", "get_args(batch_size=8, image_size=256, max_iter=100000): \"\"\" Get command line arguments. Arguments set", "\"--context\", type=str, default=\"cudnn\", help=\"Context.\") parser.add_argument(\"--type-config\", \"-t\", type=str, default='float', help='Type of", "of Lightweight GAN.\" parser = argparse.ArgumentParser(description) parser.add_argument(\"-d\", \"--device-id\", type=str, default=\"0\",", "default=5000, help=\"Interval for testing models.\") parser.add_argument(\"--latent\", type=int, default=256, help=\"Number of", "default=50000, help=\"Interval for saving models.\") parser.add_argument(\"--test-interval\", type=int, default=5000, help=\"Interval for", "parser.parse_args() return args def save_args(args, mode=\"train\"): from nnabla import logger", "express or implied. # See the License for the specific", "applicable law or agreed to in writing, software # distributed", "open(path, \"w\") as fp: for k, v in sorted(vars(args).items()): logger.info(\"{}={}\".format(k,", "except in compliance with the License. # You may obtain", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "is used.\") parser.add_argument(\"--lr\", type=float, default=2e-4, help=\"Learning rate\") parser.add_argument(\"--aug-list\", nargs=\"+\", default=[\"lrflip\",", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "max_iter=100000): \"\"\" Get command line arguments. Arguments set the default", "not use this file except in compliance with the License.", "size.\") parser.add_argument(\"--batch-size\", \"-b\", type=int, default=batch_size, help=\"Batch size.\") parser.add_argument(\"--max-iter\", \"-i\", type=int,", "Sony Corporation. # Copyright 2021 Sony Group Corporation. # #", "help=\"Interval for saving models.\") parser.add_argument(\"--test-interval\", type=int, default=5000, help=\"Interval for testing", "limitations under the License. def get_args(batch_size=8, image_size=256, max_iter=100000): \"\"\" Get", "Get command line arguments. Arguments set the default values of", "writing, software # distributed under the License is distributed on", "permissions and # limitations under the License. def get_args(batch_size=8, image_size=256,", "help=\"Number of data to be used. When -1 is set", "in writing, software # distributed under the License is distributed", "as fp: for k, v in sorted(vars(args).items()): logger.info(\"{}={}\".format(k, v)) fp.write(\"{}={}\\n\".format(k,", "parser.add_argument(\"--lr\", type=float, default=2e-4, help=\"Learning rate\") parser.add_argument(\"--aug-list\", nargs=\"+\", default=[\"lrflip\", \"translation\", \"color\"])", "import logger import os if not os.path.exists(args.monitor_path): os.makedirs(args.monitor_path) path =", "logger.info(\"Arguments are saved to {}.\".format(path)) with open(path, \"w\") as fp:", "you may not use this file except in compliance with", "and # limitations under the License. def get_args(batch_size=8, image_size=256, max_iter=100000):", "default=\".\", help=\"Path to load parameters from\") parser.add_argument(\"--train-samples\", type=int, default=-1, help=\"Number", "\"translation\", \"color\"]) args = parser.parse_args() return args def save_args(args, mode=\"train\"):", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "set all data is used.\") parser.add_argument(\"--lr\", type=float, default=2e-4, help=\"Learning rate\")", "rate\") parser.add_argument(\"--aug-list\", nargs=\"+\", default=[\"lrflip\", \"translation\", \"color\"]) args = parser.parse_args() return", "use this file except in compliance with the License. #", "Copyright 2021 Sony Corporation. # Copyright 2021 Sony Group Corporation.", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "description = \"Example of Lightweight GAN.\" parser = argparse.ArgumentParser(description) parser.add_argument(\"-d\",", "variables.\") parser.add_argument(\"--monitor-path\", type=str, default=\"./result/tmp\", help=\"Monitor path.\") parser.add_argument(\"--model-load-path\", type=str, default=\".\", help=\"Path", "of latent variables.\") parser.add_argument(\"--monitor-path\", type=str, default=\"./result/tmp\", help=\"Monitor path.\") parser.add_argument(\"--model-load-path\", type=str,", "import os description = \"Example of Lightweight GAN.\" parser =", "\"\"\" Get command line arguments. Arguments set the default values", "default=[\"lrflip\", \"translation\", \"color\"]) args = parser.parse_args() return args def save_args(args,", "latent variables.\") parser.add_argument(\"--monitor-path\", type=str, default=\"./result/tmp\", help=\"Monitor path.\") parser.add_argument(\"--model-load-path\", type=str, default=\".\",", "CONDITIONS OF ANY KIND, either express or implied. # See", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "\"-i\", type=int, default=max_iter, help=\"Max iterations.\") parser.add_argument(\"--save-interval\", type=int, default=50000, help=\"Interval for", "or implied. # See the License for the specific language", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "Copyright 2021 Sony Group Corporation. # # Licensed under the", "language governing permissions and # limitations under the License. def", "License. # You may obtain a copy of the License", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "License, Version 2.0 (the \"License\"); # you may not use", "help='Type of computation. e.g. \"float\", \"half\".') parser.add_argument(\"--img-path\", type=str, default=\"~/AnimalFace-dog\", help=\"Image", "help=\"Image path.\") parser.add_argument(\"--image-size\", type=int, default=image_size, help=\"Image size.\") parser.add_argument(\"--batch-size\", \"-b\", type=int,", "the default values of command line arguments. \"\"\" import argparse", "for testing models.\") parser.add_argument(\"--latent\", type=int, default=256, help=\"Number of latent variables.\")", "= parser.parse_args() return args def save_args(args, mode=\"train\"): from nnabla import", "# You may obtain a copy of the License at", "\"\"\" import argparse import os description = \"Example of Lightweight", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "type=str, default=\"./result/tmp\", help=\"Monitor path.\") parser.add_argument(\"--model-load-path\", type=str, default=\".\", help=\"Path to load", "used.\") parser.add_argument(\"--lr\", type=float, default=2e-4, help=\"Learning rate\") parser.add_argument(\"--aug-list\", nargs=\"+\", default=[\"lrflip\", \"translation\",", "Corporation. # Copyright 2021 Sony Group Corporation. # # Licensed", "arguments. Arguments set the default values of command line arguments.", "are saved to {}.\".format(path)) with open(path, \"w\") as fp: for", "Sony Group Corporation. # # Licensed under the Apache License,", "under the License is distributed on an \"AS IS\" BASIS,", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "help=\"Context.\") parser.add_argument(\"--type-config\", \"-t\", type=str, default='float', help='Type of computation. e.g. \"float\",", "path.\") parser.add_argument(\"--model-load-path\", type=str, default=\".\", help=\"Path to load parameters from\") parser.add_argument(\"--train-samples\",", "License for the specific language governing permissions and # limitations", "the License. def get_args(batch_size=8, image_size=256, max_iter=100000): \"\"\" Get command line", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "type=str, default=\".\", help=\"Path to load parameters from\") parser.add_argument(\"--train-samples\", type=int, default=-1,", "type=int, default=256, help=\"Number of latent variables.\") parser.add_argument(\"--monitor-path\", type=str, default=\"./result/tmp\", help=\"Monitor", "type=int, default=image_size, help=\"Image size.\") parser.add_argument(\"--batch-size\", \"-b\", type=int, default=batch_size, help=\"Batch size.\")", "values of command line arguments. \"\"\" import argparse import os", "id.\") parser.add_argument(\"-c\", \"--context\", type=str, default=\"cudnn\", help=\"Context.\") parser.add_argument(\"--type-config\", \"-t\", type=str, default='float',", "type=int, default=batch_size, help=\"Batch size.\") parser.add_argument(\"--max-iter\", \"-i\", type=int, default=max_iter, help=\"Max iterations.\")", "help=\"Max iterations.\") parser.add_argument(\"--save-interval\", type=int, default=50000, help=\"Interval for saving models.\") parser.add_argument(\"--test-interval\",", "parser.add_argument(\"--latent\", type=int, default=256, help=\"Number of latent variables.\") parser.add_argument(\"--monitor-path\", type=str, default=\"./result/tmp\",", "parser.add_argument(\"--model-load-path\", type=str, default=\".\", help=\"Path to load parameters from\") parser.add_argument(\"--train-samples\", type=int,", "parser = argparse.ArgumentParser(description) parser.add_argument(\"-d\", \"--device-id\", type=str, default=\"0\", help=\"Device id.\") parser.add_argument(\"-c\",", "e.g. \"float\", \"half\".') parser.add_argument(\"--img-path\", type=str, default=\"~/AnimalFace-dog\", help=\"Image path.\") parser.add_argument(\"--image-size\", type=int,", "default=2e-4, help=\"Learning rate\") parser.add_argument(\"--aug-list\", nargs=\"+\", default=[\"lrflip\", \"translation\", \"color\"]) args =", "parser.add_argument(\"--train-samples\", type=int, default=-1, help=\"Number of data to be used. When", "args def save_args(args, mode=\"train\"): from nnabla import logger import os", "image_size=256, max_iter=100000): \"\"\" Get command line arguments. Arguments set the", "size.\") parser.add_argument(\"--max-iter\", \"-i\", type=int, default=max_iter, help=\"Max iterations.\") parser.add_argument(\"--save-interval\", type=int, default=50000,", "data is used.\") parser.add_argument(\"--lr\", type=float, default=2e-4, help=\"Learning rate\") parser.add_argument(\"--aug-list\", nargs=\"+\",", "mode) logger.info(\"Arguments are saved to {}.\".format(path)) with open(path, \"w\") as", "the License for the specific language governing permissions and #", "parser.add_argument(\"--test-interval\", type=int, default=5000, help=\"Interval for testing models.\") parser.add_argument(\"--latent\", type=int, default=256,", "to {}.\".format(path)) with open(path, \"w\") as fp: for k, v", "= \"Example of Lightweight GAN.\" parser = argparse.ArgumentParser(description) parser.add_argument(\"-d\", \"--device-id\",", "(the \"License\"); # you may not use this file except", "Apache License, Version 2.0 (the \"License\"); # you may not", "type=str, default=\"0\", help=\"Device id.\") parser.add_argument(\"-c\", \"--context\", type=str, default=\"cudnn\", help=\"Context.\") parser.add_argument(\"--type-config\",", "# you may not use this file except in compliance", "command line arguments. Arguments set the default values of command", "either express or implied. # See the License for the", "\"float\", \"half\".') parser.add_argument(\"--img-path\", type=str, default=\"~/AnimalFace-dog\", help=\"Image path.\") parser.add_argument(\"--image-size\", type=int, default=image_size,", "all data is used.\") parser.add_argument(\"--lr\", type=float, default=2e-4, help=\"Learning rate\") parser.add_argument(\"--aug-list\",", "return args def save_args(args, mode=\"train\"): from nnabla import logger import", "\"Example of Lightweight GAN.\" parser = argparse.ArgumentParser(description) parser.add_argument(\"-d\", \"--device-id\", type=str,", "OR CONDITIONS OF ANY KIND, either express or implied. #", "2021 Sony Group Corporation. # # Licensed under the Apache", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "type=str, default='float', help='Type of computation. e.g. \"float\", \"half\".') parser.add_argument(\"--img-path\", type=str,", "the License is distributed on an \"AS IS\" BASIS, #", "default=max_iter, help=\"Max iterations.\") parser.add_argument(\"--save-interval\", type=int, default=50000, help=\"Interval for saving models.\")", "help=\"Device id.\") parser.add_argument(\"-c\", \"--context\", type=str, default=\"cudnn\", help=\"Context.\") parser.add_argument(\"--type-config\", \"-t\", type=str,", "under the License. def get_args(batch_size=8, image_size=256, max_iter=100000): \"\"\" Get command", "in compliance with the License. # You may obtain a", "parser.add_argument(\"--monitor-path\", type=str, default=\"./result/tmp\", help=\"Monitor path.\") parser.add_argument(\"--model-load-path\", type=str, default=\".\", help=\"Path to", "software # distributed under the License is distributed on an", "of command line arguments. \"\"\" import argparse import os description", "args = parser.parse_args() return args def save_args(args, mode=\"train\"): from nnabla", "# Copyright 2021 Sony Group Corporation. # # Licensed under", "default=image_size, help=\"Image size.\") parser.add_argument(\"--batch-size\", \"-b\", type=int, default=batch_size, help=\"Batch size.\") parser.add_argument(\"--max-iter\",", "type=str, default=\"cudnn\", help=\"Context.\") parser.add_argument(\"--type-config\", \"-t\", type=str, default='float', help='Type of computation.", "# Copyright 2021 Sony Corporation. # Copyright 2021 Sony Group", "# # Unless required by applicable law or agreed to", "path.\") parser.add_argument(\"--image-size\", type=int, default=image_size, help=\"Image size.\") parser.add_argument(\"--batch-size\", \"-b\", type=int, default=batch_size,", "-1 is set all data is used.\") parser.add_argument(\"--lr\", type=float, default=2e-4,", "\"w\") as fp: for k, v in sorted(vars(args).items()): logger.info(\"{}={}\".format(k, v))", "path = \"{}/Arguments-{}.txt\".format(args.monitor_path, mode) logger.info(\"Arguments are saved to {}.\".format(path)) with", "Lightweight GAN.\" parser = argparse.ArgumentParser(description) parser.add_argument(\"-d\", \"--device-id\", type=str, default=\"0\", help=\"Device", "type=float, default=2e-4, help=\"Learning rate\") parser.add_argument(\"--aug-list\", nargs=\"+\", default=[\"lrflip\", \"translation\", \"color\"]) args", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "governing permissions and # limitations under the License. def get_args(batch_size=8,", "nnabla import logger import os if not os.path.exists(args.monitor_path): os.makedirs(args.monitor_path) path", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "type=int, default=-1, help=\"Number of data to be used. When -1", "fp: for k, v in sorted(vars(args).items()): logger.info(\"{}={}\".format(k, v)) fp.write(\"{}={}\\n\".format(k, v))", "line arguments. Arguments set the default values of command line", "parser.add_argument(\"-d\", \"--device-id\", type=str, default=\"0\", help=\"Device id.\") parser.add_argument(\"-c\", \"--context\", type=str, default=\"cudnn\",", "Version 2.0 (the \"License\"); # you may not use this", "help=\"Image size.\") parser.add_argument(\"--batch-size\", \"-b\", type=int, default=batch_size, help=\"Batch size.\") parser.add_argument(\"--max-iter\", \"-i\",", "if not os.path.exists(args.monitor_path): os.makedirs(args.monitor_path) path = \"{}/Arguments-{}.txt\".format(args.monitor_path, mode) logger.info(\"Arguments are", "with open(path, \"w\") as fp: for k, v in sorted(vars(args).items()):", "\"color\"]) args = parser.parse_args() return args def save_args(args, mode=\"train\"): from", "save_args(args, mode=\"train\"): from nnabla import logger import os if not", "parser.add_argument(\"--batch-size\", \"-b\", type=int, default=batch_size, help=\"Batch size.\") parser.add_argument(\"--max-iter\", \"-i\", type=int, default=max_iter,", "saving models.\") parser.add_argument(\"--test-interval\", type=int, default=5000, help=\"Interval for testing models.\") parser.add_argument(\"--latent\",", "law or agreed to in writing, software # distributed under", "for saving models.\") parser.add_argument(\"--test-interval\", type=int, default=5000, help=\"Interval for testing models.\")", "data to be used. When -1 is set all data", "command line arguments. \"\"\" import argparse import os description =", "{}.\".format(path)) with open(path, \"w\") as fp: for k, v in", "type=int, default=5000, help=\"Interval for testing models.\") parser.add_argument(\"--latent\", type=int, default=256, help=\"Number", "\"-t\", type=str, default='float', help='Type of computation. e.g. \"float\", \"half\".') parser.add_argument(\"--img-path\",", "line arguments. \"\"\" import argparse import os description = \"Example", "implied. # See the License for the specific language governing", "parser.add_argument(\"--aug-list\", nargs=\"+\", default=[\"lrflip\", \"translation\", \"color\"]) args = parser.parse_args() return args", "from nnabla import logger import os if not os.path.exists(args.monitor_path): os.makedirs(args.monitor_path)", "under the Apache License, Version 2.0 (the \"License\"); # you", "\"License\"); # you may not use this file except in", "Group Corporation. # # Licensed under the Apache License, Version", "parser.add_argument(\"--type-config\", \"-t\", type=str, default='float', help='Type of computation. e.g. \"float\", \"half\".')", "\"-b\", type=int, default=batch_size, help=\"Batch size.\") parser.add_argument(\"--max-iter\", \"-i\", type=int, default=max_iter, help=\"Max", "default=256, help=\"Number of latent variables.\") parser.add_argument(\"--monitor-path\", type=str, default=\"./result/tmp\", help=\"Monitor path.\")", "os.makedirs(args.monitor_path) path = \"{}/Arguments-{}.txt\".format(args.monitor_path, mode) logger.info(\"Arguments are saved to {}.\".format(path))", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "from\") parser.add_argument(\"--train-samples\", type=int, default=-1, help=\"Number of data to be used.", "type=int, default=max_iter, help=\"Max iterations.\") parser.add_argument(\"--save-interval\", type=int, default=50000, help=\"Interval for saving", "type=int, default=50000, help=\"Interval for saving models.\") parser.add_argument(\"--test-interval\", type=int, default=5000, help=\"Interval", "parser.add_argument(\"-c\", \"--context\", type=str, default=\"cudnn\", help=\"Context.\") parser.add_argument(\"--type-config\", \"-t\", type=str, default='float', help='Type", "default=\"cudnn\", help=\"Context.\") parser.add_argument(\"--type-config\", \"-t\", type=str, default='float', help='Type of computation. e.g.", "default values of command line arguments. \"\"\" import argparse import", "License. def get_args(batch_size=8, image_size=256, max_iter=100000): \"\"\" Get command line arguments.", "by applicable law or agreed to in writing, software #", "# distributed under the License is distributed on an \"AS", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "GAN.\" parser = argparse.ArgumentParser(description) parser.add_argument(\"-d\", \"--device-id\", type=str, default=\"0\", help=\"Device id.\")", "argparse import os description = \"Example of Lightweight GAN.\" parser", "= argparse.ArgumentParser(description) parser.add_argument(\"-d\", \"--device-id\", type=str, default=\"0\", help=\"Device id.\") parser.add_argument(\"-c\", \"--context\",", "may obtain a copy of the License at # #", "# Unless required by applicable law or agreed to in", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "used. When -1 is set all data is used.\") parser.add_argument(\"--lr\",", "is set all data is used.\") parser.add_argument(\"--lr\", type=float, default=2e-4, help=\"Learning", "import os if not os.path.exists(args.monitor_path): os.makedirs(args.monitor_path) path = \"{}/Arguments-{}.txt\".format(args.monitor_path, mode)", "def get_args(batch_size=8, image_size=256, max_iter=100000): \"\"\" Get command line arguments. Arguments", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "default=batch_size, help=\"Batch size.\") parser.add_argument(\"--max-iter\", \"-i\", type=int, default=max_iter, help=\"Max iterations.\") parser.add_argument(\"--save-interval\",", "load parameters from\") parser.add_argument(\"--train-samples\", type=int, default=-1, help=\"Number of data to", "When -1 is set all data is used.\") parser.add_argument(\"--lr\", type=float,", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "to in writing, software # distributed under the License is", "testing models.\") parser.add_argument(\"--latent\", type=int, default=256, help=\"Number of latent variables.\") parser.add_argument(\"--monitor-path\",", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "# See the License for the specific language governing permissions", "to load parameters from\") parser.add_argument(\"--train-samples\", type=int, default=-1, help=\"Number of data", "of data to be used. When -1 is set all", "models.\") parser.add_argument(\"--latent\", type=int, default=256, help=\"Number of latent variables.\") parser.add_argument(\"--monitor-path\", type=str,", "import argparse import os description = \"Example of Lightweight GAN.\"", "You may obtain a copy of the License at #", "default='float', help='Type of computation. e.g. \"float\", \"half\".') parser.add_argument(\"--img-path\", type=str, default=\"~/AnimalFace-dog\",", "models.\") parser.add_argument(\"--test-interval\", type=int, default=5000, help=\"Interval for testing models.\") parser.add_argument(\"--latent\", type=int,", "may not use this file except in compliance with the", "or agreed to in writing, software # distributed under the", "arguments. \"\"\" import argparse import os description = \"Example of", "default=\"./result/tmp\", help=\"Monitor path.\") parser.add_argument(\"--model-load-path\", type=str, default=\".\", help=\"Path to load parameters", "= \"{}/Arguments-{}.txt\".format(args.monitor_path, mode) logger.info(\"Arguments are saved to {}.\".format(path)) with open(path,", "not os.path.exists(args.monitor_path): os.makedirs(args.monitor_path) path = \"{}/Arguments-{}.txt\".format(args.monitor_path, mode) logger.info(\"Arguments are saved", "required by applicable law or agreed to in writing, software", "iterations.\") parser.add_argument(\"--save-interval\", type=int, default=50000, help=\"Interval for saving models.\") parser.add_argument(\"--test-interval\", type=int,", "parser.add_argument(\"--save-interval\", type=int, default=50000, help=\"Interval for saving models.\") parser.add_argument(\"--test-interval\", type=int, default=5000,", "help=\"Learning rate\") parser.add_argument(\"--aug-list\", nargs=\"+\", default=[\"lrflip\", \"translation\", \"color\"]) args = parser.parse_args()", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "computation. e.g. \"float\", \"half\".') parser.add_argument(\"--img-path\", type=str, default=\"~/AnimalFace-dog\", help=\"Image path.\") parser.add_argument(\"--image-size\",", "2021 Sony Corporation. # Copyright 2021 Sony Group Corporation. #", "help=\"Batch size.\") parser.add_argument(\"--max-iter\", \"-i\", type=int, default=max_iter, help=\"Max iterations.\") parser.add_argument(\"--save-interval\", type=int,", "default=\"0\", help=\"Device id.\") parser.add_argument(\"-c\", \"--context\", type=str, default=\"cudnn\", help=\"Context.\") parser.add_argument(\"--type-config\", \"-t\",", "with the License. # You may obtain a copy of", "default=\"~/AnimalFace-dog\", help=\"Image path.\") parser.add_argument(\"--image-size\", type=int, default=image_size, help=\"Image size.\") parser.add_argument(\"--batch-size\", \"-b\",", "this file except in compliance with the License. # You", "help=\"Path to load parameters from\") parser.add_argument(\"--train-samples\", type=int, default=-1, help=\"Number of", "def save_args(args, mode=\"train\"): from nnabla import logger import os if", "the Apache License, Version 2.0 (the \"License\"); # you may", "argparse.ArgumentParser(description) parser.add_argument(\"-d\", \"--device-id\", type=str, default=\"0\", help=\"Device id.\") parser.add_argument(\"-c\", \"--context\", type=str,", "os description = \"Example of Lightweight GAN.\" parser = argparse.ArgumentParser(description)", "to be used. When -1 is set all data is", "parser.add_argument(\"--max-iter\", \"-i\", type=int, default=max_iter, help=\"Max iterations.\") parser.add_argument(\"--save-interval\", type=int, default=50000, help=\"Interval", "saved to {}.\".format(path)) with open(path, \"w\") as fp: for k,", "\"--device-id\", type=str, default=\"0\", help=\"Device id.\") parser.add_argument(\"-c\", \"--context\", type=str, default=\"cudnn\", help=\"Context.\")", "Corporation. # # Licensed under the Apache License, Version 2.0" ]
[ "my_content = f.readlines() print(\"\\nUse readlines method\") print(\"-\" * 60) for", "print(\"-\" * 60) for line in my_content: print(line.strip()) print(\"-\" *", "# READ #### f = open(\"my_file.txt\") print(\"\\nLoop directly over file\")", "file\") print(\"-\" * 60) for line in f: print(line.strip()) print(\"-\"", "f = open(\"my_file.txt\") print(\"\\nLoop directly over file\") print(\"-\" * 60)", "for line in my_content.splitlines(): print(line) print(\"-\" * 60) f.close() with", "print(line.strip()) print(\"-\" * 60) # WRITE #### print(\"\\nWriting file.\") f", "APPEND #### print(\"\\nAppending file.\") with open(\"new_file.txt\", \"a\") as f: f.write(\"something", "as f: print(\"\\nUse with and loop over file\") print(\"-\" *", "readlines method\") print(\"-\" * 60) for line in my_content: print(line.strip())", "for line in f: print(line.strip()) print(\"-\" * 60) f.seek(0) my_content", "= open(\"my_file.txt\") print(\"\\nLoop directly over file\") print(\"-\" * 60) for", "with open(\"my_file.txt\") as f: print(\"\\nUse with and loop over file\")", "my_content: print(line.strip()) print(\"-\" * 60) f.seek(0) my_content = f.read() print(\"\\nUse", "f.close() with open(\"my_file.txt\") as f: print(\"\\nUse with and loop over", "print(line.strip()) print(\"-\" * 60) f.seek(0) my_content = f.readlines() print(\"\\nUse readlines", "60) for line in my_content.splitlines(): print(line) print(\"-\" * 60) f.close()", "in my_content.splitlines(): print(line) print(\"-\" * 60) f.close() with open(\"my_file.txt\") as", "f: print(\"\\nUse with and loop over file\") print(\"-\" * 60)", "splitlines\") print(\"-\" * 60) for line in my_content.splitlines(): print(line) print(\"-\"", "print(line) print(\"-\" * 60) f.close() with open(\"my_file.txt\") as f: print(\"\\nUse", "f: print(line.strip()) print(\"-\" * 60) f.seek(0) my_content = f.readlines() print(\"\\nUse", "= open(\"new_file.txt\", \"w\") f.write(\"whatever2\\n\") f.close() # APPEND #### print(\"\\nAppending file.\")", "# WRITE #### print(\"\\nWriting file.\") f = open(\"new_file.txt\", \"w\") f.write(\"whatever2\\n\")", "f.readlines() print(\"\\nUse readlines method\") print(\"-\" * 60) for line in", "over file\") print(\"-\" * 60) for line in f: print(line.strip())", "60) for line in f: print(line.strip()) print(\"-\" * 60) f.seek(0)", "line in my_content.splitlines(): print(line) print(\"-\" * 60) f.close() with open(\"my_file.txt\")", "open(\"new_file.txt\", \"w\") f.write(\"whatever2\\n\") f.close() # APPEND #### print(\"\\nAppending file.\") with", "print(\"\\nUse with and loop over file\") print(\"-\" * 60) for", "60) f.seek(0) my_content = f.read() print(\"\\nUse read + splitlines\") print(\"-\"", "print(line.strip()) print(\"-\" * 60) f.seek(0) my_content = f.read() print(\"\\nUse read", "* 60) for line in f: print(line.strip()) print(\"-\" * 60)", "f.close() # APPEND #### print(\"\\nAppending file.\") with open(\"new_file.txt\", \"a\") as", "from __future__ import print_function # READ #### f = open(\"my_file.txt\")", "60) for line in f: print(line.strip()) print(\"-\" * 60) #", "* 60) f.seek(0) my_content = f.read() print(\"\\nUse read + splitlines\")", "<gh_stars>0 #!/usr/bin/env python from __future__ import print_function # READ ####", "60) for line in my_content: print(line.strip()) print(\"-\" * 60) f.seek(0)", "line in f: print(line.strip()) print(\"-\" * 60) f.seek(0) my_content =", "for line in f: print(line.strip()) print(\"-\" * 60) # WRITE", "f: print(line.strip()) print(\"-\" * 60) # WRITE #### print(\"\\nWriting file.\")", "my_content.splitlines(): print(line) print(\"-\" * 60) f.close() with open(\"my_file.txt\") as f:", "* 60) for line in my_content: print(line.strip()) print(\"-\" * 60)", "python from __future__ import print_function # READ #### f =", "file.\") f = open(\"new_file.txt\", \"w\") f.write(\"whatever2\\n\") f.close() # APPEND ####", "print(\"\\nAppending file.\") with open(\"new_file.txt\", \"a\") as f: f.write(\"something else\\n\") print()", "import print_function # READ #### f = open(\"my_file.txt\") print(\"\\nLoop directly", "WRITE #### print(\"\\nWriting file.\") f = open(\"new_file.txt\", \"w\") f.write(\"whatever2\\n\") f.close()", "with and loop over file\") print(\"-\" * 60) for line", "open(\"my_file.txt\") print(\"\\nLoop directly over file\") print(\"-\" * 60) for line", "60) f.close() with open(\"my_file.txt\") as f: print(\"\\nUse with and loop", "read + splitlines\") print(\"-\" * 60) for line in my_content.splitlines():", "print(\"\\nLoop directly over file\") print(\"-\" * 60) for line in", "f = open(\"new_file.txt\", \"w\") f.write(\"whatever2\\n\") f.close() # APPEND #### print(\"\\nAppending", "open(\"my_file.txt\") as f: print(\"\\nUse with and loop over file\") print(\"-\"", "#### print(\"\\nWriting file.\") f = open(\"new_file.txt\", \"w\") f.write(\"whatever2\\n\") f.close() #", "f.read() print(\"\\nUse read + splitlines\") print(\"-\" * 60) for line", "print(\"\\nWriting file.\") f = open(\"new_file.txt\", \"w\") f.write(\"whatever2\\n\") f.close() # APPEND", "and loop over file\") print(\"-\" * 60) for line in", "print(\"-\" * 60) f.seek(0) my_content = f.readlines() print(\"\\nUse readlines method\")", "print_function # READ #### f = open(\"my_file.txt\") print(\"\\nLoop directly over", "print(\"-\" * 60) for line in f: print(line.strip()) print(\"-\" *", "loop over file\") print(\"-\" * 60) for line in f:", "line in my_content: print(line.strip()) print(\"-\" * 60) f.seek(0) my_content =", "READ #### f = open(\"my_file.txt\") print(\"\\nLoop directly over file\") print(\"-\"", "* 60) f.close() with open(\"my_file.txt\") as f: print(\"\\nUse with and", "= f.readlines() print(\"\\nUse readlines method\") print(\"-\" * 60) for line", "f.write(\"whatever2\\n\") f.close() # APPEND #### print(\"\\nAppending file.\") with open(\"new_file.txt\", \"a\")", "60) f.seek(0) my_content = f.readlines() print(\"\\nUse readlines method\") print(\"-\" *", "print(\"-\" * 60) for line in my_content.splitlines(): print(line) print(\"-\" *", "print(\"-\" * 60) f.close() with open(\"my_file.txt\") as f: print(\"\\nUse with", "print(\"\\nUse readlines method\") print(\"-\" * 60) for line in my_content:", "print(\"\\nUse read + splitlines\") print(\"-\" * 60) for line in", "in my_content: print(line.strip()) print(\"-\" * 60) f.seek(0) my_content = f.read()", "60) # WRITE #### print(\"\\nWriting file.\") f = open(\"new_file.txt\", \"w\")", "f.seek(0) my_content = f.read() print(\"\\nUse read + splitlines\") print(\"-\" *", "print(\"-\" * 60) # WRITE #### print(\"\\nWriting file.\") f =", "+ splitlines\") print(\"-\" * 60) for line in my_content.splitlines(): print(line)", "for line in my_content: print(line.strip()) print(\"-\" * 60) f.seek(0) my_content", "line in f: print(line.strip()) print(\"-\" * 60) # WRITE ####", "\"w\") f.write(\"whatever2\\n\") f.close() # APPEND #### print(\"\\nAppending file.\") with open(\"new_file.txt\",", "#!/usr/bin/env python from __future__ import print_function # READ #### f", "print(\"-\" * 60) f.seek(0) my_content = f.read() print(\"\\nUse read +", "* 60) for line in my_content.splitlines(): print(line) print(\"-\" * 60)", "directly over file\") print(\"-\" * 60) for line in f:", "in f: print(line.strip()) print(\"-\" * 60) f.seek(0) my_content = f.readlines()", "__future__ import print_function # READ #### f = open(\"my_file.txt\") print(\"\\nLoop", "my_content = f.read() print(\"\\nUse read + splitlines\") print(\"-\" * 60)", "f.seek(0) my_content = f.readlines() print(\"\\nUse readlines method\") print(\"-\" * 60)", "#### f = open(\"my_file.txt\") print(\"\\nLoop directly over file\") print(\"-\" *", "# APPEND #### print(\"\\nAppending file.\") with open(\"new_file.txt\", \"a\") as f:", "in f: print(line.strip()) print(\"-\" * 60) # WRITE #### print(\"\\nWriting", "#### print(\"\\nAppending file.\") with open(\"new_file.txt\", \"a\") as f: f.write(\"something else\\n\")", "* 60) # WRITE #### print(\"\\nWriting file.\") f = open(\"new_file.txt\",", "method\") print(\"-\" * 60) for line in my_content: print(line.strip()) print(\"-\"", "* 60) f.seek(0) my_content = f.readlines() print(\"\\nUse readlines method\") print(\"-\"", "= f.read() print(\"\\nUse read + splitlines\") print(\"-\" * 60) for" ]
[ "int] = { 'limit': 20, 'offset': 0 } datasets: List[DatasetType]", "name=cls.unique_name, dataset_format='ini', content='aGVsbG8sIHdvcmxkIQ==', tags=['test_tag'] ) @classmethod def tearDownClass(cls) -> None:", "because Self-Host will return HTTP 429 Too Many Requests otherwise.", ") cls.unique_name: str = str(uuid.uuid4()) cls.created_dataset: DatasetType = cls.client.create_dataset( name=cls.unique_name,", "self.unique_name) def test_get_dataset(self) -> None: fetched_dataset: DatasetType = self.client.get_dataset(self.created_dataset['uuid']) self.assertEqual(fetched_dataset['name'],", "cls.client.create_dataset( name=cls.unique_name, dataset_format='ini', content='aGVsbG8sIHdvcmxkIQ==', tags=['test_tag'] ) @classmethod def tearDownClass(cls) ->", "self.assertEqual(fetched_dataset['name'], self.created_dataset['name']) def test_update_dataset(self) -> None: self.client.update_dataset( dataset_uuid=self.created_dataset['uuid'], name=f'{self.created_dataset[\"name\"]} Updated',", "SelfHostClient = SelfHostClient( base_url='http://127.0.0.1:8080', username='test', password='<PASSWORD>' ) cls.unique_name: str =", "will return HTTP 429 Too Many Requests otherwise. \"\"\" @classmethod", "-> None: # Create and delete happens in setup and", "typing import List, Dict, Any import unittest from selfhost_client import", "dataset_uuid=self.created_dataset['uuid'], name=f'{self.created_dataset[\"name\"]} Updated', dataset_format='json', tags=['updated'] ) fetched_dataset: DatasetType = self.client.get_dataset(self.created_dataset['uuid'])", "happens in setup and teardown methods. self.assertEqual(self.created_dataset['name'], self.unique_name) def test_get_dataset(self)", "dataset_format='ini', content='aGVsbG8sIHdvcmxkIQ==', tags=['test_tag'] ) @classmethod def tearDownClass(cls) -> None: cls.client.delete_dataset(cls.created_dataset['uuid'])", "Many Requests otherwise. \"\"\" @classmethod def setUpClass(cls) -> None: cls.client:", "# Create and delete happens in setup and teardown methods.", "from typing import List, Dict, Any import unittest from selfhost_client", "delete happens in setup and teardown methods. self.assertEqual(self.created_dataset['name'], self.unique_name) def", "@classmethod def setUpClass(cls) -> None: cls.client: SelfHostClient = SelfHostClient( base_url='http://127.0.0.1:8080',", "None: cls.client: SelfHostClient = SelfHostClient( base_url='http://127.0.0.1:8080', username='test', password='<PASSWORD>' ) cls.unique_name:", "otherwise. \"\"\" @classmethod def setUpClass(cls) -> None: cls.client: SelfHostClient =", "None: # Create and delete happens in setup and teardown", "\"\"\" @classmethod def setUpClass(cls) -> None: cls.client: SelfHostClient = SelfHostClient(", "methods. self.assertEqual(self.created_dataset['name'], self.unique_name) def test_get_dataset(self) -> None: fetched_dataset: DatasetType =", "test_create_and_delete_dataset(self) -> None: # Create and delete happens in setup", "datasets: List[DatasetType] = self.client.get_datasets(**params) self.assertIsNotNone(datasets) def test_create_and_delete_dataset(self) -> None: #", "Self-Host will return HTTP 429 Too Many Requests otherwise. \"\"\"", "@classmethod def tearDownClass(cls) -> None: cls.client.delete_dataset(cls.created_dataset['uuid']) def test_get_datasets(self) -> None:", "tags=['updated'] ) fetched_dataset: DatasetType = self.client.get_dataset(self.created_dataset['uuid']) self.assertEqual(fetched_dataset['name'], f'{self.created_dataset[\"name\"]} Updated') self.assertEqual(fetched_dataset['format'],", "def test_create_and_delete_dataset(self) -> None: # Create and delete happens in", "individually because Self-Host will return HTTP 429 Too Many Requests", "base_url='http://127.0.0.1:8080', username='test', password='<PASSWORD>' ) cls.unique_name: str = str(uuid.uuid4()) cls.created_dataset: DatasetType", "= str(uuid.uuid4()) cls.created_dataset: DatasetType = cls.client.create_dataset( name=cls.unique_name, dataset_format='ini', content='aGVsbG8sIHdvcmxkIQ==', tags=['test_tag']", "\"\"\" Run these tests individually because Self-Host will return HTTP", "= SelfHostClient( base_url='http://127.0.0.1:8080', username='test', password='<PASSWORD>' ) cls.unique_name: str = str(uuid.uuid4())", "from selfhost_client import SelfHostClient, DatasetType class TestIntegrationDatasetsClient(unittest.TestCase): \"\"\" Run these", "429 Too Many Requests otherwise. \"\"\" @classmethod def setUpClass(cls) ->", "test_get_datasets(self) -> None: params: Dict[str, int] = { 'limit': 20,", "-> None: cls.client: SelfHostClient = SelfHostClient( base_url='http://127.0.0.1:8080', username='test', password='<PASSWORD>' )", "Updated') self.assertEqual(fetched_dataset['format'], 'json') self.assertEqual(fetched_dataset['tags'], ['updated']) def test_get_dataset_raw_content(self) -> None: fetched_content:", "DatasetType = self.client.get_dataset(self.created_dataset['uuid']) self.assertEqual(fetched_dataset['name'], f'{self.created_dataset[\"name\"]} Updated') self.assertEqual(fetched_dataset['format'], 'json') self.assertEqual(fetched_dataset['tags'], ['updated'])", "SelfHostClient, DatasetType class TestIntegrationDatasetsClient(unittest.TestCase): \"\"\" Run these tests individually because", "unittest from selfhost_client import SelfHostClient, DatasetType class TestIntegrationDatasetsClient(unittest.TestCase): \"\"\" Run", "['updated']) def test_get_dataset_raw_content(self) -> None: fetched_content: Any = self.client.get_dataset_raw_content(self.created_dataset['uuid']) self.assertIsNotNone(fetched_content)", "{ 'limit': 20, 'offset': 0 } datasets: List[DatasetType] = self.client.get_datasets(**params)", "Create and delete happens in setup and teardown methods. self.assertEqual(self.created_dataset['name'],", "Dict, Any import unittest from selfhost_client import SelfHostClient, DatasetType class", "these tests individually because Self-Host will return HTTP 429 Too", "cls.client: SelfHostClient = SelfHostClient( base_url='http://127.0.0.1:8080', username='test', password='<PASSWORD>' ) cls.unique_name: str", "Too Many Requests otherwise. \"\"\" @classmethod def setUpClass(cls) -> None:", "password='<PASSWORD>' ) cls.unique_name: str = str(uuid.uuid4()) cls.created_dataset: DatasetType = cls.client.create_dataset(", ") @classmethod def tearDownClass(cls) -> None: cls.client.delete_dataset(cls.created_dataset['uuid']) def test_get_datasets(self) ->", "'offset': 0 } datasets: List[DatasetType] = self.client.get_datasets(**params) self.assertIsNotNone(datasets) def test_create_and_delete_dataset(self)", "= self.client.get_datasets(**params) self.assertIsNotNone(datasets) def test_create_and_delete_dataset(self) -> None: # Create and", "self.assertEqual(self.created_dataset['name'], self.unique_name) def test_get_dataset(self) -> None: fetched_dataset: DatasetType = self.client.get_dataset(self.created_dataset['uuid'])", "= cls.client.create_dataset( name=cls.unique_name, dataset_format='ini', content='aGVsbG8sIHdvcmxkIQ==', tags=['test_tag'] ) @classmethod def tearDownClass(cls)", "params: Dict[str, int] = { 'limit': 20, 'offset': 0 }", "List[DatasetType] = self.client.get_datasets(**params) self.assertIsNotNone(datasets) def test_create_and_delete_dataset(self) -> None: # Create", "cls.unique_name: str = str(uuid.uuid4()) cls.created_dataset: DatasetType = cls.client.create_dataset( name=cls.unique_name, dataset_format='ini',", "self.client.get_datasets(**params) self.assertIsNotNone(datasets) def test_create_and_delete_dataset(self) -> None: # Create and delete", "DatasetType class TestIntegrationDatasetsClient(unittest.TestCase): \"\"\" Run these tests individually because Self-Host", "20, 'offset': 0 } datasets: List[DatasetType] = self.client.get_datasets(**params) self.assertIsNotNone(datasets) def", "f'{self.created_dataset[\"name\"]} Updated') self.assertEqual(fetched_dataset['format'], 'json') self.assertEqual(fetched_dataset['tags'], ['updated']) def test_get_dataset_raw_content(self) -> None:", "'limit': 20, 'offset': 0 } datasets: List[DatasetType] = self.client.get_datasets(**params) self.assertIsNotNone(datasets)", "None: self.client.update_dataset( dataset_uuid=self.created_dataset['uuid'], name=f'{self.created_dataset[\"name\"]} Updated', dataset_format='json', tags=['updated'] ) fetched_dataset: DatasetType", "Any import unittest from selfhost_client import SelfHostClient, DatasetType class TestIntegrationDatasetsClient(unittest.TestCase):", "Updated', dataset_format='json', tags=['updated'] ) fetched_dataset: DatasetType = self.client.get_dataset(self.created_dataset['uuid']) self.assertEqual(fetched_dataset['name'], f'{self.created_dataset[\"name\"]}", "cls.client.delete_dataset(cls.created_dataset['uuid']) def test_get_datasets(self) -> None: params: Dict[str, int] = {", "content='aGVsbG8sIHdvcmxkIQ==', tags=['test_tag'] ) @classmethod def tearDownClass(cls) -> None: cls.client.delete_dataset(cls.created_dataset['uuid']) def", "self.client.get_dataset(self.created_dataset['uuid']) self.assertEqual(fetched_dataset['name'], self.created_dataset['name']) def test_update_dataset(self) -> None: self.client.update_dataset( dataset_uuid=self.created_dataset['uuid'], name=f'{self.created_dataset[\"name\"]}", "username='test', password='<PASSWORD>' ) cls.unique_name: str = str(uuid.uuid4()) cls.created_dataset: DatasetType =", "def test_update_dataset(self) -> None: self.client.update_dataset( dataset_uuid=self.created_dataset['uuid'], name=f'{self.created_dataset[\"name\"]} Updated', dataset_format='json', tags=['updated']", "-> None: cls.client.delete_dataset(cls.created_dataset['uuid']) def test_get_datasets(self) -> None: params: Dict[str, int]", "and delete happens in setup and teardown methods. self.assertEqual(self.created_dataset['name'], self.unique_name)", "self.client.get_dataset(self.created_dataset['uuid']) self.assertEqual(fetched_dataset['name'], f'{self.created_dataset[\"name\"]} Updated') self.assertEqual(fetched_dataset['format'], 'json') self.assertEqual(fetched_dataset['tags'], ['updated']) def test_get_dataset_raw_content(self)", "= self.client.get_dataset(self.created_dataset['uuid']) self.assertEqual(fetched_dataset['name'], f'{self.created_dataset[\"name\"]} Updated') self.assertEqual(fetched_dataset['format'], 'json') self.assertEqual(fetched_dataset['tags'], ['updated']) def", "teardown methods. self.assertEqual(self.created_dataset['name'], self.unique_name) def test_get_dataset(self) -> None: fetched_dataset: DatasetType", "import List, Dict, Any import unittest from selfhost_client import SelfHostClient,", "uuid from typing import List, Dict, Any import unittest from", "None: cls.client.delete_dataset(cls.created_dataset['uuid']) def test_get_datasets(self) -> None: params: Dict[str, int] =", "def setUpClass(cls) -> None: cls.client: SelfHostClient = SelfHostClient( base_url='http://127.0.0.1:8080', username='test',", "'json') self.assertEqual(fetched_dataset['tags'], ['updated']) def test_get_dataset_raw_content(self) -> None: fetched_content: Any =", "tearDownClass(cls) -> None: cls.client.delete_dataset(cls.created_dataset['uuid']) def test_get_datasets(self) -> None: params: Dict[str,", "SelfHostClient( base_url='http://127.0.0.1:8080', username='test', password='<PASSWORD>' ) cls.unique_name: str = str(uuid.uuid4()) cls.created_dataset:", "} datasets: List[DatasetType] = self.client.get_datasets(**params) self.assertIsNotNone(datasets) def test_create_and_delete_dataset(self) -> None:", "return HTTP 429 Too Many Requests otherwise. \"\"\" @classmethod def", "selfhost_client import SelfHostClient, DatasetType class TestIntegrationDatasetsClient(unittest.TestCase): \"\"\" Run these tests", "tests individually because Self-Host will return HTTP 429 Too Many", "= { 'limit': 20, 'offset': 0 } datasets: List[DatasetType] =", "dataset_format='json', tags=['updated'] ) fetched_dataset: DatasetType = self.client.get_dataset(self.created_dataset['uuid']) self.assertEqual(fetched_dataset['name'], f'{self.created_dataset[\"name\"]} Updated')", "def test_get_datasets(self) -> None: params: Dict[str, int] = { 'limit':", "HTTP 429 Too Many Requests otherwise. \"\"\" @classmethod def setUpClass(cls)", "class TestIntegrationDatasetsClient(unittest.TestCase): \"\"\" Run these tests individually because Self-Host will", "in setup and teardown methods. self.assertEqual(self.created_dataset['name'], self.unique_name) def test_get_dataset(self) ->", "and teardown methods. self.assertEqual(self.created_dataset['name'], self.unique_name) def test_get_dataset(self) -> None: fetched_dataset:", "List, Dict, Any import unittest from selfhost_client import SelfHostClient, DatasetType", "Dict[str, int] = { 'limit': 20, 'offset': 0 } datasets:", "fetched_dataset: DatasetType = self.client.get_dataset(self.created_dataset['uuid']) self.assertEqual(fetched_dataset['name'], f'{self.created_dataset[\"name\"]} Updated') self.assertEqual(fetched_dataset['format'], 'json') self.assertEqual(fetched_dataset['tags'],", "Requests otherwise. \"\"\" @classmethod def setUpClass(cls) -> None: cls.client: SelfHostClient", "cls.created_dataset: DatasetType = cls.client.create_dataset( name=cls.unique_name, dataset_format='ini', content='aGVsbG8sIHdvcmxkIQ==', tags=['test_tag'] ) @classmethod", "str(uuid.uuid4()) cls.created_dataset: DatasetType = cls.client.create_dataset( name=cls.unique_name, dataset_format='ini', content='aGVsbG8sIHdvcmxkIQ==', tags=['test_tag'] )", "self.assertEqual(fetched_dataset['name'], f'{self.created_dataset[\"name\"]} Updated') self.assertEqual(fetched_dataset['format'], 'json') self.assertEqual(fetched_dataset['tags'], ['updated']) def test_get_dataset_raw_content(self) ->", "import SelfHostClient, DatasetType class TestIntegrationDatasetsClient(unittest.TestCase): \"\"\" Run these tests individually", "Run these tests individually because Self-Host will return HTTP 429", "self.assertEqual(fetched_dataset['format'], 'json') self.assertEqual(fetched_dataset['tags'], ['updated']) def test_get_dataset_raw_content(self) -> None: fetched_content: Any", "-> None: self.client.update_dataset( dataset_uuid=self.created_dataset['uuid'], name=f'{self.created_dataset[\"name\"]} Updated', dataset_format='json', tags=['updated'] ) fetched_dataset:", "self.created_dataset['name']) def test_update_dataset(self) -> None: self.client.update_dataset( dataset_uuid=self.created_dataset['uuid'], name=f'{self.created_dataset[\"name\"]} Updated', dataset_format='json',", "str = str(uuid.uuid4()) cls.created_dataset: DatasetType = cls.client.create_dataset( name=cls.unique_name, dataset_format='ini', content='aGVsbG8sIHdvcmxkIQ==',", "self.assertIsNotNone(datasets) def test_create_and_delete_dataset(self) -> None: # Create and delete happens", "None: params: Dict[str, int] = { 'limit': 20, 'offset': 0", "tags=['test_tag'] ) @classmethod def tearDownClass(cls) -> None: cls.client.delete_dataset(cls.created_dataset['uuid']) def test_get_datasets(self)", "test_update_dataset(self) -> None: self.client.update_dataset( dataset_uuid=self.created_dataset['uuid'], name=f'{self.created_dataset[\"name\"]} Updated', dataset_format='json', tags=['updated'] )", "name=f'{self.created_dataset[\"name\"]} Updated', dataset_format='json', tags=['updated'] ) fetched_dataset: DatasetType = self.client.get_dataset(self.created_dataset['uuid']) self.assertEqual(fetched_dataset['name'],", "def test_get_dataset(self) -> None: fetched_dataset: DatasetType = self.client.get_dataset(self.created_dataset['uuid']) self.assertEqual(fetched_dataset['name'], self.created_dataset['name'])", "0 } datasets: List[DatasetType] = self.client.get_datasets(**params) self.assertIsNotNone(datasets) def test_create_and_delete_dataset(self) ->", "def tearDownClass(cls) -> None: cls.client.delete_dataset(cls.created_dataset['uuid']) def test_get_datasets(self) -> None: params:", "fetched_dataset: DatasetType = self.client.get_dataset(self.created_dataset['uuid']) self.assertEqual(fetched_dataset['name'], self.created_dataset['name']) def test_update_dataset(self) -> None:", "setUpClass(cls) -> None: cls.client: SelfHostClient = SelfHostClient( base_url='http://127.0.0.1:8080', username='test', password='<PASSWORD>'", "-> None: fetched_dataset: DatasetType = self.client.get_dataset(self.created_dataset['uuid']) self.assertEqual(fetched_dataset['name'], self.created_dataset['name']) def test_update_dataset(self)", "setup and teardown methods. self.assertEqual(self.created_dataset['name'], self.unique_name) def test_get_dataset(self) -> None:", "None: fetched_dataset: DatasetType = self.client.get_dataset(self.created_dataset['uuid']) self.assertEqual(fetched_dataset['name'], self.created_dataset['name']) def test_update_dataset(self) ->", "test_get_dataset(self) -> None: fetched_dataset: DatasetType = self.client.get_dataset(self.created_dataset['uuid']) self.assertEqual(fetched_dataset['name'], self.created_dataset['name']) def", "self.assertEqual(fetched_dataset['tags'], ['updated']) def test_get_dataset_raw_content(self) -> None: fetched_content: Any = self.client.get_dataset_raw_content(self.created_dataset['uuid'])", "-> None: params: Dict[str, int] = { 'limit': 20, 'offset':", "import uuid from typing import List, Dict, Any import unittest", "import unittest from selfhost_client import SelfHostClient, DatasetType class TestIntegrationDatasetsClient(unittest.TestCase): \"\"\"", ") fetched_dataset: DatasetType = self.client.get_dataset(self.created_dataset['uuid']) self.assertEqual(fetched_dataset['name'], f'{self.created_dataset[\"name\"]} Updated') self.assertEqual(fetched_dataset['format'], 'json')", "self.client.update_dataset( dataset_uuid=self.created_dataset['uuid'], name=f'{self.created_dataset[\"name\"]} Updated', dataset_format='json', tags=['updated'] ) fetched_dataset: DatasetType =", "DatasetType = self.client.get_dataset(self.created_dataset['uuid']) self.assertEqual(fetched_dataset['name'], self.created_dataset['name']) def test_update_dataset(self) -> None: self.client.update_dataset(", "DatasetType = cls.client.create_dataset( name=cls.unique_name, dataset_format='ini', content='aGVsbG8sIHdvcmxkIQ==', tags=['test_tag'] ) @classmethod def", "TestIntegrationDatasetsClient(unittest.TestCase): \"\"\" Run these tests individually because Self-Host will return", "= self.client.get_dataset(self.created_dataset['uuid']) self.assertEqual(fetched_dataset['name'], self.created_dataset['name']) def test_update_dataset(self) -> None: self.client.update_dataset( dataset_uuid=self.created_dataset['uuid']," ]