code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def render_path(self, template_path, *context, **kwargs): """ Render the template at the given path using the given context. Read the render() docstring for more information. """ loader = self._make_loader() template = loader.read(template_path) return self._render_string(template, *context, **kwargs)
def function[render_path, parameter[self, template_path]]: constant[ Render the template at the given path using the given context. Read the render() docstring for more information. ] variable[loader] assign[=] call[name[self]._make_loader, parameter[]] variable[template] assign[=] call[name[loader].read, parameter[name[template_path]]] return[call[name[self]._render_string, parameter[name[template], <ast.Starred object at 0x7da2044c10f0>]]]
keyword[def] identifier[render_path] ( identifier[self] , identifier[template_path] ,* identifier[context] ,** identifier[kwargs] ): literal[string] identifier[loader] = identifier[self] . identifier[_make_loader] () identifier[template] = identifier[loader] . identifier[read] ( identifier[template_path] ) keyword[return] identifier[self] . identifier[_render_string] ( identifier[template] ,* identifier[context] ,** identifier[kwargs] )
def render_path(self, template_path, *context, **kwargs): """ Render the template at the given path using the given context. Read the render() docstring for more information. """ loader = self._make_loader() template = loader.read(template_path) return self._render_string(template, *context, **kwargs)
def fill(self, *args): '''Sets a fill color, applying it to new paths. :param args: color in supported format ''' if args is not None: self._canvas.fillcolor = self.color(*args) return self._canvas.fillcolor
def function[fill, parameter[self]]: constant[Sets a fill color, applying it to new paths. :param args: color in supported format ] if compare[name[args] is_not constant[None]] begin[:] name[self]._canvas.fillcolor assign[=] call[name[self].color, parameter[<ast.Starred object at 0x7da18bcc9ba0>]] return[name[self]._canvas.fillcolor]
keyword[def] identifier[fill] ( identifier[self] ,* identifier[args] ): literal[string] keyword[if] identifier[args] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[_canvas] . identifier[fillcolor] = identifier[self] . identifier[color] (* identifier[args] ) keyword[return] identifier[self] . identifier[_canvas] . identifier[fillcolor]
def fill(self, *args): """Sets a fill color, applying it to new paths. :param args: color in supported format """ if args is not None: self._canvas.fillcolor = self.color(*args) # depends on [control=['if'], data=['args']] return self._canvas.fillcolor
def parse_section_extras_require(self, section_options): """Parses `extras_require` configuration file section. :param dict section_options: """ parse_list = partial(self._parse_list, separator=';') self['extras_require'] = self._parse_section_to_dict( section_options, parse_list)
def function[parse_section_extras_require, parameter[self, section_options]]: constant[Parses `extras_require` configuration file section. :param dict section_options: ] variable[parse_list] assign[=] call[name[partial], parameter[name[self]._parse_list]] call[name[self]][constant[extras_require]] assign[=] call[name[self]._parse_section_to_dict, parameter[name[section_options], name[parse_list]]]
keyword[def] identifier[parse_section_extras_require] ( identifier[self] , identifier[section_options] ): literal[string] identifier[parse_list] = identifier[partial] ( identifier[self] . identifier[_parse_list] , identifier[separator] = literal[string] ) identifier[self] [ literal[string] ]= identifier[self] . identifier[_parse_section_to_dict] ( identifier[section_options] , identifier[parse_list] )
def parse_section_extras_require(self, section_options): """Parses `extras_require` configuration file section. :param dict section_options: """ parse_list = partial(self._parse_list, separator=';') self['extras_require'] = self._parse_section_to_dict(section_options, parse_list)
def _ParseEventData(self, variable_length_section): """Parses the event data form a variable-length data section. Args: variable_length_section (job_variable_length_data_section): a Windows Scheduled Task job variable-length data section. Returns: WinJobEventData: event data of the job file. """ event_data = WinJobEventData() event_data.application = ( variable_length_section.application_name.rstrip('\x00')) event_data.comment = variable_length_section.comment.rstrip('\x00') event_data.parameters = ( variable_length_section.parameters.rstrip('\x00')) event_data.username = variable_length_section.author.rstrip('\x00') event_data.working_directory = ( variable_length_section.working_directory.rstrip('\x00')) return event_data
def function[_ParseEventData, parameter[self, variable_length_section]]: constant[Parses the event data form a variable-length data section. Args: variable_length_section (job_variable_length_data_section): a Windows Scheduled Task job variable-length data section. Returns: WinJobEventData: event data of the job file. ] variable[event_data] assign[=] call[name[WinJobEventData], parameter[]] name[event_data].application assign[=] call[name[variable_length_section].application_name.rstrip, parameter[constant[]]] name[event_data].comment assign[=] call[name[variable_length_section].comment.rstrip, parameter[constant[]]] name[event_data].parameters assign[=] call[name[variable_length_section].parameters.rstrip, parameter[constant[]]] name[event_data].username assign[=] call[name[variable_length_section].author.rstrip, parameter[constant[]]] name[event_data].working_directory assign[=] call[name[variable_length_section].working_directory.rstrip, parameter[constant[]]] return[name[event_data]]
keyword[def] identifier[_ParseEventData] ( identifier[self] , identifier[variable_length_section] ): literal[string] identifier[event_data] = identifier[WinJobEventData] () identifier[event_data] . identifier[application] =( identifier[variable_length_section] . identifier[application_name] . identifier[rstrip] ( literal[string] )) identifier[event_data] . identifier[comment] = identifier[variable_length_section] . identifier[comment] . identifier[rstrip] ( literal[string] ) identifier[event_data] . identifier[parameters] =( identifier[variable_length_section] . identifier[parameters] . identifier[rstrip] ( literal[string] )) identifier[event_data] . identifier[username] = identifier[variable_length_section] . identifier[author] . identifier[rstrip] ( literal[string] ) identifier[event_data] . identifier[working_directory] =( identifier[variable_length_section] . identifier[working_directory] . identifier[rstrip] ( literal[string] )) keyword[return] identifier[event_data]
def _ParseEventData(self, variable_length_section): """Parses the event data form a variable-length data section. Args: variable_length_section (job_variable_length_data_section): a Windows Scheduled Task job variable-length data section. Returns: WinJobEventData: event data of the job file. """ event_data = WinJobEventData() event_data.application = variable_length_section.application_name.rstrip('\x00') event_data.comment = variable_length_section.comment.rstrip('\x00') event_data.parameters = variable_length_section.parameters.rstrip('\x00') event_data.username = variable_length_section.author.rstrip('\x00') event_data.working_directory = variable_length_section.working_directory.rstrip('\x00') return event_data
def b58encode_check(v): """Encode a string using Base58 with a 4 character checksum""" digest = sha256(sha256(v).digest()).digest() return b58encode(v + digest[:4])
def function[b58encode_check, parameter[v]]: constant[Encode a string using Base58 with a 4 character checksum] variable[digest] assign[=] call[call[name[sha256], parameter[call[call[name[sha256], parameter[name[v]]].digest, parameter[]]]].digest, parameter[]] return[call[name[b58encode], parameter[binary_operation[name[v] + call[name[digest]][<ast.Slice object at 0x7da2054a7160>]]]]]
keyword[def] identifier[b58encode_check] ( identifier[v] ): literal[string] identifier[digest] = identifier[sha256] ( identifier[sha256] ( identifier[v] ). identifier[digest] ()). identifier[digest] () keyword[return] identifier[b58encode] ( identifier[v] + identifier[digest] [: literal[int] ])
def b58encode_check(v): """Encode a string using Base58 with a 4 character checksum""" digest = sha256(sha256(v).digest()).digest() return b58encode(v + digest[:4])
def select(self, column, agg=None, _as=None, distinct=False): """ What columnns to select in query. :column should be a column name or equation to produce column **not** aggregated :agg should be a valid aggregate method used to producte the figure :_as should be a string used to represent the column. Required when agg present """ if agg and not _as: raise ValueError("Aggregate colunns require `_as` to be specified") if column is False: self._selects = {} else: self._selects.setdefault((_as or column), (column, agg, _as, distinct))
def function[select, parameter[self, column, agg, _as, distinct]]: constant[ What columnns to select in query. :column should be a column name or equation to produce column **not** aggregated :agg should be a valid aggregate method used to producte the figure :_as should be a string used to represent the column. Required when agg present ] if <ast.BoolOp object at 0x7da1b15b2800> begin[:] <ast.Raise object at 0x7da1b15b3880> if compare[name[column] is constant[False]] begin[:] name[self]._selects assign[=] dictionary[[], []]
keyword[def] identifier[select] ( identifier[self] , identifier[column] , identifier[agg] = keyword[None] , identifier[_as] = keyword[None] , identifier[distinct] = keyword[False] ): literal[string] keyword[if] identifier[agg] keyword[and] keyword[not] identifier[_as] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[column] keyword[is] keyword[False] : identifier[self] . identifier[_selects] ={} keyword[else] : identifier[self] . identifier[_selects] . identifier[setdefault] (( identifier[_as] keyword[or] identifier[column] ),( identifier[column] , identifier[agg] , identifier[_as] , identifier[distinct] ))
def select(self, column, agg=None, _as=None, distinct=False): """ What columnns to select in query. :column should be a column name or equation to produce column **not** aggregated :agg should be a valid aggregate method used to producte the figure :_as should be a string used to represent the column. Required when agg present """ if agg and (not _as): raise ValueError('Aggregate colunns require `_as` to be specified') # depends on [control=['if'], data=[]] if column is False: self._selects = {} # depends on [control=['if'], data=[]] else: self._selects.setdefault(_as or column, (column, agg, _as, distinct))
def _get_block_matches(self, attributes_a, attributes_b, filter_set_a=None, filter_set_b=None, delta=(0, 0, 0), tiebreak_with_block_similarity=False): """ :param attributes_a: A dict of blocks to their attributes :param attributes_b: A dict of blocks to their attributes The following parameters are optional. :param filter_set_a: A set to limit attributes_a to the blocks in this set. :param filter_set_b: A set to limit attributes_b to the blocks in this set. :param delta: An offset to add to each vector in attributes_a. :returns: A list of tuples of matching objects. """ # get the attributes that are in the sets if filter_set_a is None: filtered_attributes_a = {k: v for k, v in attributes_a.items()} else: filtered_attributes_a = {k: v for k, v in attributes_a.items() if k in filter_set_a} if filter_set_b is None: filtered_attributes_b = {k: v for k, v in attributes_b.items()} else: filtered_attributes_b = {k: v for k, v in attributes_b.items() if k in filter_set_b} # add delta for k in filtered_attributes_a: filtered_attributes_a[k] = tuple((i+j) for i, j in zip(filtered_attributes_a[k], delta)) for k in filtered_attributes_b: filtered_attributes_b[k] = tuple((i+j) for i, j in zip(filtered_attributes_b[k], delta)) # get closest closest_a = _get_closest_matches(filtered_attributes_a, filtered_attributes_b) closest_b = _get_closest_matches(filtered_attributes_b, filtered_attributes_a) if tiebreak_with_block_similarity: # use block similarity to break ties in the first set for a in closest_a: if len(closest_a[a]) > 1: best_similarity = 0 best = [] for x in closest_a[a]: similarity = self.block_similarity(a, x) if similarity > best_similarity: best_similarity = similarity best = [x] elif similarity == best_similarity: best.append(x) closest_a[a] = best # use block similarity to break ties in the second set for b in closest_b: if len(closest_b[b]) > 1: best_similarity = 0 best = [] for x in closest_b[b]: similarity = self.block_similarity(x, b) if similarity > best_similarity: best_similarity = similarity best = [x] elif similarity == best_similarity: best.append(x) closest_b[b] = best # a match (x,y) is good if x is the closest to y and y is the closest to x matches = [] for a in closest_a: if len(closest_a[a]) == 1: match = closest_a[a][0] if len(closest_b[match]) == 1 and closest_b[match][0] == a: matches.append((a, match)) return matches
def function[_get_block_matches, parameter[self, attributes_a, attributes_b, filter_set_a, filter_set_b, delta, tiebreak_with_block_similarity]]: constant[ :param attributes_a: A dict of blocks to their attributes :param attributes_b: A dict of blocks to their attributes The following parameters are optional. :param filter_set_a: A set to limit attributes_a to the blocks in this set. :param filter_set_b: A set to limit attributes_b to the blocks in this set. :param delta: An offset to add to each vector in attributes_a. :returns: A list of tuples of matching objects. ] if compare[name[filter_set_a] is constant[None]] begin[:] variable[filtered_attributes_a] assign[=] <ast.DictComp object at 0x7da204344cd0> if compare[name[filter_set_b] is constant[None]] begin[:] variable[filtered_attributes_b] assign[=] <ast.DictComp object at 0x7da204344a00> for taget[name[k]] in starred[name[filtered_attributes_a]] begin[:] call[name[filtered_attributes_a]][name[k]] assign[=] call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da204346bc0>]] for taget[name[k]] in starred[name[filtered_attributes_b]] begin[:] call[name[filtered_attributes_b]][name[k]] assign[=] call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da204344400>]] variable[closest_a] assign[=] call[name[_get_closest_matches], parameter[name[filtered_attributes_a], name[filtered_attributes_b]]] variable[closest_b] assign[=] call[name[_get_closest_matches], parameter[name[filtered_attributes_b], name[filtered_attributes_a]]] if name[tiebreak_with_block_similarity] begin[:] for taget[name[a]] in starred[name[closest_a]] begin[:] if compare[call[name[len], parameter[call[name[closest_a]][name[a]]]] greater[>] constant[1]] begin[:] variable[best_similarity] assign[=] constant[0] variable[best] assign[=] list[[]] for taget[name[x]] in starred[call[name[closest_a]][name[a]]] begin[:] variable[similarity] assign[=] call[name[self].block_similarity, parameter[name[a], name[x]]] if compare[name[similarity] greater[>] name[best_similarity]] begin[:] variable[best_similarity] assign[=] name[similarity] variable[best] assign[=] list[[<ast.Name object at 0x7da204344190>]] call[name[closest_a]][name[a]] assign[=] name[best] for taget[name[b]] in starred[name[closest_b]] begin[:] if compare[call[name[len], parameter[call[name[closest_b]][name[b]]]] greater[>] constant[1]] begin[:] variable[best_similarity] assign[=] constant[0] variable[best] assign[=] list[[]] for taget[name[x]] in starred[call[name[closest_b]][name[b]]] begin[:] variable[similarity] assign[=] call[name[self].block_similarity, parameter[name[x], name[b]]] if compare[name[similarity] greater[>] name[best_similarity]] begin[:] variable[best_similarity] assign[=] name[similarity] variable[best] assign[=] list[[<ast.Name object at 0x7da207f996f0>]] call[name[closest_b]][name[b]] assign[=] name[best] variable[matches] assign[=] list[[]] for taget[name[a]] in starred[name[closest_a]] begin[:] if compare[call[name[len], parameter[call[name[closest_a]][name[a]]]] equal[==] constant[1]] begin[:] variable[match] assign[=] call[call[name[closest_a]][name[a]]][constant[0]] if <ast.BoolOp object at 0x7da207f99b70> begin[:] call[name[matches].append, parameter[tuple[[<ast.Name object at 0x7da207f9b700>, <ast.Name object at 0x7da207f99990>]]]] return[name[matches]]
keyword[def] identifier[_get_block_matches] ( identifier[self] , identifier[attributes_a] , identifier[attributes_b] , identifier[filter_set_a] = keyword[None] , identifier[filter_set_b] = keyword[None] , identifier[delta] =( literal[int] , literal[int] , literal[int] ), identifier[tiebreak_with_block_similarity] = keyword[False] ): literal[string] keyword[if] identifier[filter_set_a] keyword[is] keyword[None] : identifier[filtered_attributes_a] ={ identifier[k] : identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[attributes_a] . identifier[items] ()} keyword[else] : identifier[filtered_attributes_a] ={ identifier[k] : identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[attributes_a] . identifier[items] () keyword[if] identifier[k] keyword[in] identifier[filter_set_a] } keyword[if] identifier[filter_set_b] keyword[is] keyword[None] : identifier[filtered_attributes_b] ={ identifier[k] : identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[attributes_b] . identifier[items] ()} keyword[else] : identifier[filtered_attributes_b] ={ identifier[k] : identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[attributes_b] . identifier[items] () keyword[if] identifier[k] keyword[in] identifier[filter_set_b] } keyword[for] identifier[k] keyword[in] identifier[filtered_attributes_a] : identifier[filtered_attributes_a] [ identifier[k] ]= identifier[tuple] (( identifier[i] + identifier[j] ) keyword[for] identifier[i] , identifier[j] keyword[in] identifier[zip] ( identifier[filtered_attributes_a] [ identifier[k] ], identifier[delta] )) keyword[for] identifier[k] keyword[in] identifier[filtered_attributes_b] : identifier[filtered_attributes_b] [ identifier[k] ]= identifier[tuple] (( identifier[i] + identifier[j] ) keyword[for] identifier[i] , identifier[j] keyword[in] identifier[zip] ( identifier[filtered_attributes_b] [ identifier[k] ], identifier[delta] )) identifier[closest_a] = identifier[_get_closest_matches] ( identifier[filtered_attributes_a] , identifier[filtered_attributes_b] ) identifier[closest_b] = identifier[_get_closest_matches] ( identifier[filtered_attributes_b] , identifier[filtered_attributes_a] ) keyword[if] identifier[tiebreak_with_block_similarity] : keyword[for] identifier[a] keyword[in] identifier[closest_a] : keyword[if] identifier[len] ( identifier[closest_a] [ identifier[a] ])> literal[int] : identifier[best_similarity] = literal[int] identifier[best] =[] keyword[for] identifier[x] keyword[in] identifier[closest_a] [ identifier[a] ]: identifier[similarity] = identifier[self] . identifier[block_similarity] ( identifier[a] , identifier[x] ) keyword[if] identifier[similarity] > identifier[best_similarity] : identifier[best_similarity] = identifier[similarity] identifier[best] =[ identifier[x] ] keyword[elif] identifier[similarity] == identifier[best_similarity] : identifier[best] . identifier[append] ( identifier[x] ) identifier[closest_a] [ identifier[a] ]= identifier[best] keyword[for] identifier[b] keyword[in] identifier[closest_b] : keyword[if] identifier[len] ( identifier[closest_b] [ identifier[b] ])> literal[int] : identifier[best_similarity] = literal[int] identifier[best] =[] keyword[for] identifier[x] keyword[in] identifier[closest_b] [ identifier[b] ]: identifier[similarity] = identifier[self] . identifier[block_similarity] ( identifier[x] , identifier[b] ) keyword[if] identifier[similarity] > identifier[best_similarity] : identifier[best_similarity] = identifier[similarity] identifier[best] =[ identifier[x] ] keyword[elif] identifier[similarity] == identifier[best_similarity] : identifier[best] . identifier[append] ( identifier[x] ) identifier[closest_b] [ identifier[b] ]= identifier[best] identifier[matches] =[] keyword[for] identifier[a] keyword[in] identifier[closest_a] : keyword[if] identifier[len] ( identifier[closest_a] [ identifier[a] ])== literal[int] : identifier[match] = identifier[closest_a] [ identifier[a] ][ literal[int] ] keyword[if] identifier[len] ( identifier[closest_b] [ identifier[match] ])== literal[int] keyword[and] identifier[closest_b] [ identifier[match] ][ literal[int] ]== identifier[a] : identifier[matches] . identifier[append] (( identifier[a] , identifier[match] )) keyword[return] identifier[matches]
def _get_block_matches(self, attributes_a, attributes_b, filter_set_a=None, filter_set_b=None, delta=(0, 0, 0), tiebreak_with_block_similarity=False): """ :param attributes_a: A dict of blocks to their attributes :param attributes_b: A dict of blocks to their attributes The following parameters are optional. :param filter_set_a: A set to limit attributes_a to the blocks in this set. :param filter_set_b: A set to limit attributes_b to the blocks in this set. :param delta: An offset to add to each vector in attributes_a. :returns: A list of tuples of matching objects. """ # get the attributes that are in the sets if filter_set_a is None: filtered_attributes_a = {k: v for (k, v) in attributes_a.items()} # depends on [control=['if'], data=[]] else: filtered_attributes_a = {k: v for (k, v) in attributes_a.items() if k in filter_set_a} if filter_set_b is None: filtered_attributes_b = {k: v for (k, v) in attributes_b.items()} # depends on [control=['if'], data=[]] else: filtered_attributes_b = {k: v for (k, v) in attributes_b.items() if k in filter_set_b} # add delta for k in filtered_attributes_a: filtered_attributes_a[k] = tuple((i + j for (i, j) in zip(filtered_attributes_a[k], delta))) # depends on [control=['for'], data=['k']] for k in filtered_attributes_b: filtered_attributes_b[k] = tuple((i + j for (i, j) in zip(filtered_attributes_b[k], delta))) # depends on [control=['for'], data=['k']] # get closest closest_a = _get_closest_matches(filtered_attributes_a, filtered_attributes_b) closest_b = _get_closest_matches(filtered_attributes_b, filtered_attributes_a) if tiebreak_with_block_similarity: # use block similarity to break ties in the first set for a in closest_a: if len(closest_a[a]) > 1: best_similarity = 0 best = [] for x in closest_a[a]: similarity = self.block_similarity(a, x) if similarity > best_similarity: best_similarity = similarity best = [x] # depends on [control=['if'], data=['similarity', 'best_similarity']] elif similarity == best_similarity: best.append(x) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['x']] closest_a[a] = best # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['a']] # use block similarity to break ties in the second set for b in closest_b: if len(closest_b[b]) > 1: best_similarity = 0 best = [] for x in closest_b[b]: similarity = self.block_similarity(x, b) if similarity > best_similarity: best_similarity = similarity best = [x] # depends on [control=['if'], data=['similarity', 'best_similarity']] elif similarity == best_similarity: best.append(x) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['x']] closest_b[b] = best # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['b']] # depends on [control=['if'], data=[]] # a match (x,y) is good if x is the closest to y and y is the closest to x matches = [] for a in closest_a: if len(closest_a[a]) == 1: match = closest_a[a][0] if len(closest_b[match]) == 1 and closest_b[match][0] == a: matches.append((a, match)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['a']] return matches
def _get_cygwin_path(self, windows_path): """ Convert windows path to cygpath """ conv_cmd = [os.path.join(self._cygwin_bin_location, "cygpath.exe"), "-u", windows_path] process = Popen(conv_cmd, stdout=PIPE, stderr=PIPE, shell=False) out, err = process.communicate() if err: print(err) raise Exception(err) return out.strip()
def function[_get_cygwin_path, parameter[self, windows_path]]: constant[ Convert windows path to cygpath ] variable[conv_cmd] assign[=] list[[<ast.Call object at 0x7da18eb57a00>, <ast.Constant object at 0x7da18eb573d0>, <ast.Name object at 0x7da18eb54c10>]] variable[process] assign[=] call[name[Popen], parameter[name[conv_cmd]]] <ast.Tuple object at 0x7da18eb572b0> assign[=] call[name[process].communicate, parameter[]] if name[err] begin[:] call[name[print], parameter[name[err]]] <ast.Raise object at 0x7da18eb54550> return[call[name[out].strip, parameter[]]]
keyword[def] identifier[_get_cygwin_path] ( identifier[self] , identifier[windows_path] ): literal[string] identifier[conv_cmd] =[ identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[_cygwin_bin_location] , literal[string] ), literal[string] , identifier[windows_path] ] identifier[process] = identifier[Popen] ( identifier[conv_cmd] , identifier[stdout] = identifier[PIPE] , identifier[stderr] = identifier[PIPE] , identifier[shell] = keyword[False] ) identifier[out] , identifier[err] = identifier[process] . identifier[communicate] () keyword[if] identifier[err] : identifier[print] ( identifier[err] ) keyword[raise] identifier[Exception] ( identifier[err] ) keyword[return] identifier[out] . identifier[strip] ()
def _get_cygwin_path(self, windows_path): """ Convert windows path to cygpath """ conv_cmd = [os.path.join(self._cygwin_bin_location, 'cygpath.exe'), '-u', windows_path] process = Popen(conv_cmd, stdout=PIPE, stderr=PIPE, shell=False) (out, err) = process.communicate() if err: print(err) raise Exception(err) # depends on [control=['if'], data=[]] return out.strip()
def remove_target(self, target_id): """remove a target, given the id""" updated_targets = [] for target in self.my_osid_object_form._my_map['targets']: if target['id'] != target_id: updated_targets.append(target) self.my_osid_object_form._my_map['targets'] = updated_targets
def function[remove_target, parameter[self, target_id]]: constant[remove a target, given the id] variable[updated_targets] assign[=] list[[]] for taget[name[target]] in starred[call[name[self].my_osid_object_form._my_map][constant[targets]]] begin[:] if compare[call[name[target]][constant[id]] not_equal[!=] name[target_id]] begin[:] call[name[updated_targets].append, parameter[name[target]]] call[name[self].my_osid_object_form._my_map][constant[targets]] assign[=] name[updated_targets]
keyword[def] identifier[remove_target] ( identifier[self] , identifier[target_id] ): literal[string] identifier[updated_targets] =[] keyword[for] identifier[target] keyword[in] identifier[self] . identifier[my_osid_object_form] . identifier[_my_map] [ literal[string] ]: keyword[if] identifier[target] [ literal[string] ]!= identifier[target_id] : identifier[updated_targets] . identifier[append] ( identifier[target] ) identifier[self] . identifier[my_osid_object_form] . identifier[_my_map] [ literal[string] ]= identifier[updated_targets]
def remove_target(self, target_id): """remove a target, given the id""" updated_targets = [] for target in self.my_osid_object_form._my_map['targets']: if target['id'] != target_id: updated_targets.append(target) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['target']] self.my_osid_object_form._my_map['targets'] = updated_targets
def write(self, output_buffer, kmip_version=enums.KMIPVersion.KMIP_1_0): """ Write the data encoding the QueryResponsePayload object to a stream. Args: output_buffer (Stream): A data stream in which to encode object data, supporting a write method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be encoded. Optional, defaults to KMIP 1.0. """ local_buffer = utils.BytearrayStream() if self._operations: for operation in self._operations: operation.write(local_buffer, kmip_version=kmip_version) if self._object_types: for object_type in self._object_types: object_type.write(local_buffer, kmip_version=kmip_version) if self._vendor_identification: self._vendor_identification.write( local_buffer, kmip_version=kmip_version ) if self._server_information: self._server_information.write( local_buffer, kmip_version=kmip_version ) if self._application_namespaces: for application_namespace in self._application_namespaces: application_namespace.write( local_buffer, kmip_version=kmip_version ) if kmip_version >= enums.KMIPVersion.KMIP_1_1: if self._extension_information: for extension_information in self._extension_information: extension_information.write( local_buffer, kmip_version=kmip_version ) if kmip_version >= enums.KMIPVersion.KMIP_1_2: if self._attestation_types: for attestation_type in self._attestation_types: attestation_type.write( local_buffer, kmip_version=kmip_version ) if kmip_version >= enums.KMIPVersion.KMIP_1_3: if self._rng_parameters: for rng_parameters in self._rng_parameters: rng_parameters.write( local_buffer, kmip_version=kmip_version ) if self._profile_information: for profile_information in self._profile_information: profile_information.write( local_buffer, kmip_version=kmip_version ) if self._validation_information: for validation_information in self._validation_information: validation_information.write( local_buffer, kmip_version=kmip_version ) if self._capability_information: for capability_information in self._capability_information: capability_information.write( local_buffer, kmip_version=kmip_version ) if self._client_registration_methods: for client_reg_method in self._client_registration_methods: client_reg_method.write( local_buffer, kmip_version=kmip_version ) if kmip_version >= enums.KMIPVersion.KMIP_2_0: if self._defaults_information: self._defaults_information.write( local_buffer, kmip_version=kmip_version ) if self._storage_protection_masks: for storage_protection_mask in self._storage_protection_masks: storage_protection_mask.write( local_buffer, kmip_version=kmip_version ) self.length = local_buffer.length() super(QueryResponsePayload, self).write( output_buffer, kmip_version=kmip_version ) output_buffer.write(local_buffer.buffer)
def function[write, parameter[self, output_buffer, kmip_version]]: constant[ Write the data encoding the QueryResponsePayload object to a stream. Args: output_buffer (Stream): A data stream in which to encode object data, supporting a write method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be encoded. Optional, defaults to KMIP 1.0. ] variable[local_buffer] assign[=] call[name[utils].BytearrayStream, parameter[]] if name[self]._operations begin[:] for taget[name[operation]] in starred[name[self]._operations] begin[:] call[name[operation].write, parameter[name[local_buffer]]] if name[self]._object_types begin[:] for taget[name[object_type]] in starred[name[self]._object_types] begin[:] call[name[object_type].write, parameter[name[local_buffer]]] if name[self]._vendor_identification begin[:] call[name[self]._vendor_identification.write, parameter[name[local_buffer]]] if name[self]._server_information begin[:] call[name[self]._server_information.write, parameter[name[local_buffer]]] if name[self]._application_namespaces begin[:] for taget[name[application_namespace]] in starred[name[self]._application_namespaces] begin[:] call[name[application_namespace].write, parameter[name[local_buffer]]] if compare[name[kmip_version] greater_or_equal[>=] name[enums].KMIPVersion.KMIP_1_1] begin[:] if name[self]._extension_information begin[:] for taget[name[extension_information]] in starred[name[self]._extension_information] begin[:] call[name[extension_information].write, parameter[name[local_buffer]]] if compare[name[kmip_version] greater_or_equal[>=] name[enums].KMIPVersion.KMIP_1_2] begin[:] if name[self]._attestation_types begin[:] for taget[name[attestation_type]] in starred[name[self]._attestation_types] begin[:] call[name[attestation_type].write, parameter[name[local_buffer]]] if compare[name[kmip_version] greater_or_equal[>=] name[enums].KMIPVersion.KMIP_1_3] begin[:] if name[self]._rng_parameters begin[:] for taget[name[rng_parameters]] in starred[name[self]._rng_parameters] begin[:] call[name[rng_parameters].write, parameter[name[local_buffer]]] if name[self]._profile_information begin[:] for taget[name[profile_information]] in starred[name[self]._profile_information] begin[:] call[name[profile_information].write, parameter[name[local_buffer]]] if name[self]._validation_information begin[:] for taget[name[validation_information]] in starred[name[self]._validation_information] begin[:] call[name[validation_information].write, parameter[name[local_buffer]]] if name[self]._capability_information begin[:] for taget[name[capability_information]] in starred[name[self]._capability_information] begin[:] call[name[capability_information].write, parameter[name[local_buffer]]] if name[self]._client_registration_methods begin[:] for taget[name[client_reg_method]] in starred[name[self]._client_registration_methods] begin[:] call[name[client_reg_method].write, parameter[name[local_buffer]]] if compare[name[kmip_version] greater_or_equal[>=] name[enums].KMIPVersion.KMIP_2_0] begin[:] if name[self]._defaults_information begin[:] call[name[self]._defaults_information.write, parameter[name[local_buffer]]] if name[self]._storage_protection_masks begin[:] for taget[name[storage_protection_mask]] in starred[name[self]._storage_protection_masks] begin[:] call[name[storage_protection_mask].write, parameter[name[local_buffer]]] name[self].length assign[=] call[name[local_buffer].length, parameter[]] call[call[name[super], parameter[name[QueryResponsePayload], name[self]]].write, parameter[name[output_buffer]]] call[name[output_buffer].write, parameter[name[local_buffer].buffer]]
keyword[def] identifier[write] ( identifier[self] , identifier[output_buffer] , identifier[kmip_version] = identifier[enums] . identifier[KMIPVersion] . identifier[KMIP_1_0] ): literal[string] identifier[local_buffer] = identifier[utils] . identifier[BytearrayStream] () keyword[if] identifier[self] . identifier[_operations] : keyword[for] identifier[operation] keyword[in] identifier[self] . identifier[_operations] : identifier[operation] . identifier[write] ( identifier[local_buffer] , identifier[kmip_version] = identifier[kmip_version] ) keyword[if] identifier[self] . identifier[_object_types] : keyword[for] identifier[object_type] keyword[in] identifier[self] . identifier[_object_types] : identifier[object_type] . identifier[write] ( identifier[local_buffer] , identifier[kmip_version] = identifier[kmip_version] ) keyword[if] identifier[self] . identifier[_vendor_identification] : identifier[self] . identifier[_vendor_identification] . identifier[write] ( identifier[local_buffer] , identifier[kmip_version] = identifier[kmip_version] ) keyword[if] identifier[self] . identifier[_server_information] : identifier[self] . identifier[_server_information] . identifier[write] ( identifier[local_buffer] , identifier[kmip_version] = identifier[kmip_version] ) keyword[if] identifier[self] . identifier[_application_namespaces] : keyword[for] identifier[application_namespace] keyword[in] identifier[self] . identifier[_application_namespaces] : identifier[application_namespace] . identifier[write] ( identifier[local_buffer] , identifier[kmip_version] = identifier[kmip_version] ) keyword[if] identifier[kmip_version] >= identifier[enums] . identifier[KMIPVersion] . identifier[KMIP_1_1] : keyword[if] identifier[self] . identifier[_extension_information] : keyword[for] identifier[extension_information] keyword[in] identifier[self] . identifier[_extension_information] : identifier[extension_information] . identifier[write] ( identifier[local_buffer] , identifier[kmip_version] = identifier[kmip_version] ) keyword[if] identifier[kmip_version] >= identifier[enums] . identifier[KMIPVersion] . identifier[KMIP_1_2] : keyword[if] identifier[self] . identifier[_attestation_types] : keyword[for] identifier[attestation_type] keyword[in] identifier[self] . identifier[_attestation_types] : identifier[attestation_type] . identifier[write] ( identifier[local_buffer] , identifier[kmip_version] = identifier[kmip_version] ) keyword[if] identifier[kmip_version] >= identifier[enums] . identifier[KMIPVersion] . identifier[KMIP_1_3] : keyword[if] identifier[self] . identifier[_rng_parameters] : keyword[for] identifier[rng_parameters] keyword[in] identifier[self] . identifier[_rng_parameters] : identifier[rng_parameters] . identifier[write] ( identifier[local_buffer] , identifier[kmip_version] = identifier[kmip_version] ) keyword[if] identifier[self] . identifier[_profile_information] : keyword[for] identifier[profile_information] keyword[in] identifier[self] . identifier[_profile_information] : identifier[profile_information] . identifier[write] ( identifier[local_buffer] , identifier[kmip_version] = identifier[kmip_version] ) keyword[if] identifier[self] . identifier[_validation_information] : keyword[for] identifier[validation_information] keyword[in] identifier[self] . identifier[_validation_information] : identifier[validation_information] . identifier[write] ( identifier[local_buffer] , identifier[kmip_version] = identifier[kmip_version] ) keyword[if] identifier[self] . identifier[_capability_information] : keyword[for] identifier[capability_information] keyword[in] identifier[self] . identifier[_capability_information] : identifier[capability_information] . identifier[write] ( identifier[local_buffer] , identifier[kmip_version] = identifier[kmip_version] ) keyword[if] identifier[self] . identifier[_client_registration_methods] : keyword[for] identifier[client_reg_method] keyword[in] identifier[self] . identifier[_client_registration_methods] : identifier[client_reg_method] . identifier[write] ( identifier[local_buffer] , identifier[kmip_version] = identifier[kmip_version] ) keyword[if] identifier[kmip_version] >= identifier[enums] . identifier[KMIPVersion] . identifier[KMIP_2_0] : keyword[if] identifier[self] . identifier[_defaults_information] : identifier[self] . identifier[_defaults_information] . identifier[write] ( identifier[local_buffer] , identifier[kmip_version] = identifier[kmip_version] ) keyword[if] identifier[self] . identifier[_storage_protection_masks] : keyword[for] identifier[storage_protection_mask] keyword[in] identifier[self] . identifier[_storage_protection_masks] : identifier[storage_protection_mask] . identifier[write] ( identifier[local_buffer] , identifier[kmip_version] = identifier[kmip_version] ) identifier[self] . identifier[length] = identifier[local_buffer] . identifier[length] () identifier[super] ( identifier[QueryResponsePayload] , identifier[self] ). identifier[write] ( identifier[output_buffer] , identifier[kmip_version] = identifier[kmip_version] ) identifier[output_buffer] . identifier[write] ( identifier[local_buffer] . identifier[buffer] )
def write(self, output_buffer, kmip_version=enums.KMIPVersion.KMIP_1_0): """ Write the data encoding the QueryResponsePayload object to a stream. Args: output_buffer (Stream): A data stream in which to encode object data, supporting a write method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be encoded. Optional, defaults to KMIP 1.0. """ local_buffer = utils.BytearrayStream() if self._operations: for operation in self._operations: operation.write(local_buffer, kmip_version=kmip_version) # depends on [control=['for'], data=['operation']] # depends on [control=['if'], data=[]] if self._object_types: for object_type in self._object_types: object_type.write(local_buffer, kmip_version=kmip_version) # depends on [control=['for'], data=['object_type']] # depends on [control=['if'], data=[]] if self._vendor_identification: self._vendor_identification.write(local_buffer, kmip_version=kmip_version) # depends on [control=['if'], data=[]] if self._server_information: self._server_information.write(local_buffer, kmip_version=kmip_version) # depends on [control=['if'], data=[]] if self._application_namespaces: for application_namespace in self._application_namespaces: application_namespace.write(local_buffer, kmip_version=kmip_version) # depends on [control=['for'], data=['application_namespace']] # depends on [control=['if'], data=[]] if kmip_version >= enums.KMIPVersion.KMIP_1_1: if self._extension_information: for extension_information in self._extension_information: extension_information.write(local_buffer, kmip_version=kmip_version) # depends on [control=['for'], data=['extension_information']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['kmip_version']] if kmip_version >= enums.KMIPVersion.KMIP_1_2: if self._attestation_types: for attestation_type in self._attestation_types: attestation_type.write(local_buffer, kmip_version=kmip_version) # depends on [control=['for'], data=['attestation_type']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['kmip_version']] if kmip_version >= enums.KMIPVersion.KMIP_1_3: if self._rng_parameters: for rng_parameters in self._rng_parameters: rng_parameters.write(local_buffer, kmip_version=kmip_version) # depends on [control=['for'], data=['rng_parameters']] # depends on [control=['if'], data=[]] if self._profile_information: for profile_information in self._profile_information: profile_information.write(local_buffer, kmip_version=kmip_version) # depends on [control=['for'], data=['profile_information']] # depends on [control=['if'], data=[]] if self._validation_information: for validation_information in self._validation_information: validation_information.write(local_buffer, kmip_version=kmip_version) # depends on [control=['for'], data=['validation_information']] # depends on [control=['if'], data=[]] if self._capability_information: for capability_information in self._capability_information: capability_information.write(local_buffer, kmip_version=kmip_version) # depends on [control=['for'], data=['capability_information']] # depends on [control=['if'], data=[]] if self._client_registration_methods: for client_reg_method in self._client_registration_methods: client_reg_method.write(local_buffer, kmip_version=kmip_version) # depends on [control=['for'], data=['client_reg_method']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['kmip_version']] if kmip_version >= enums.KMIPVersion.KMIP_2_0: if self._defaults_information: self._defaults_information.write(local_buffer, kmip_version=kmip_version) # depends on [control=['if'], data=[]] if self._storage_protection_masks: for storage_protection_mask in self._storage_protection_masks: storage_protection_mask.write(local_buffer, kmip_version=kmip_version) # depends on [control=['for'], data=['storage_protection_mask']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['kmip_version']] self.length = local_buffer.length() super(QueryResponsePayload, self).write(output_buffer, kmip_version=kmip_version) output_buffer.write(local_buffer.buffer)
def from_array(array): """ Deserialize a new ShippingOption from a given dictionary. :return: new ShippingOption instance. :rtype: ShippingOption """ if array is None or not array: return None # end if assert_type_or_raise(array, dict, parameter_name="array") data = {} data['id'] = u(array.get('id')) data['title'] = u(array.get('title')) data['prices'] = LabeledPrice.from_array_list(array.get('prices'), list_level=1) instance = ShippingOption(**data) instance._raw = array return instance
def function[from_array, parameter[array]]: constant[ Deserialize a new ShippingOption from a given dictionary. :return: new ShippingOption instance. :rtype: ShippingOption ] if <ast.BoolOp object at 0x7da1b0430130> begin[:] return[constant[None]] call[name[assert_type_or_raise], parameter[name[array], name[dict]]] variable[data] assign[=] dictionary[[], []] call[name[data]][constant[id]] assign[=] call[name[u], parameter[call[name[array].get, parameter[constant[id]]]]] call[name[data]][constant[title]] assign[=] call[name[u], parameter[call[name[array].get, parameter[constant[title]]]]] call[name[data]][constant[prices]] assign[=] call[name[LabeledPrice].from_array_list, parameter[call[name[array].get, parameter[constant[prices]]]]] variable[instance] assign[=] call[name[ShippingOption], parameter[]] name[instance]._raw assign[=] name[array] return[name[instance]]
keyword[def] identifier[from_array] ( identifier[array] ): literal[string] keyword[if] identifier[array] keyword[is] keyword[None] keyword[or] keyword[not] identifier[array] : keyword[return] keyword[None] identifier[assert_type_or_raise] ( identifier[array] , identifier[dict] , identifier[parameter_name] = literal[string] ) identifier[data] ={} identifier[data] [ literal[string] ]= identifier[u] ( identifier[array] . identifier[get] ( literal[string] )) identifier[data] [ literal[string] ]= identifier[u] ( identifier[array] . identifier[get] ( literal[string] )) identifier[data] [ literal[string] ]= identifier[LabeledPrice] . identifier[from_array_list] ( identifier[array] . identifier[get] ( literal[string] ), identifier[list_level] = literal[int] ) identifier[instance] = identifier[ShippingOption] (** identifier[data] ) identifier[instance] . identifier[_raw] = identifier[array] keyword[return] identifier[instance]
def from_array(array): """ Deserialize a new ShippingOption from a given dictionary. :return: new ShippingOption instance. :rtype: ShippingOption """ if array is None or not array: return None # depends on [control=['if'], data=[]] # end if assert_type_or_raise(array, dict, parameter_name='array') data = {} data['id'] = u(array.get('id')) data['title'] = u(array.get('title')) data['prices'] = LabeledPrice.from_array_list(array.get('prices'), list_level=1) instance = ShippingOption(**data) instance._raw = array return instance
def load_forcing_grid(path_runcontrol: str, grid: int)->pd.DataFrame: '''Load forcing data for a specific grid included in the index of `df_state_init </data-structure/supy-io.ipynb#df_state_init:-model-initial-states>`. Parameters ---------- path_runcontrol : str Path to SUEWS :ref:`RunControl.nml <suews:RunControl.nml>` grid : int Grid number Returns ------- df_forcing: pandas.DataFrame Forcing data. See `df_forcing_var` for details. Examples -------- >>> path_runcontrol = "~/SUEWS_sims/RunControl.nml" # a valid path to `RunControl.nml` >>> df_state_init = supy.init_supy(path_runcontrol) # get `df_state_init` >>> grid = df_state_init.index[0] # first grid number included in `df_state_init` >>> df_forcing = supy.load_forcing_grid(path_runcontrol, grid) # get df_forcing ''' try: path_runcontrol = Path(path_runcontrol).expanduser().resolve() except FileNotFoundError: print('{path} does not exists!'.format(path=path_runcontrol)) else: dict_mod_cfg = load_SUEWS_dict_ModConfig(path_runcontrol) df_state_init = init_supy(path_runcontrol) # load setting variables from dict_mod_cfg ( filecode, kdownzen, tstep_met_in, tstep_ESTM_in, multiplemetfiles, multipleestmfiles, dir_input_cfg ) = (dict_mod_cfg[x] for x in [ 'filecode', 'kdownzen', 'resolutionfilesin', 'resolutionfilesinestm', 'multiplemetfiles', 'multipleestmfiles', 'fileinputpath' ] ) tstep_mod, lat, lon, alt, timezone = df_state_init.loc[ grid, [(x, '0') for x in ['tstep', 'lat', 'lng', 'alt', 'timezone']] ].values path_site = path_runcontrol.parent path_input = path_site / dict_mod_cfg['fileinputpath'] # load raw data # met forcing df_forcing_met = load_SUEWS_Forcing_met_df_raw( path_input, filecode, grid, tstep_met_in, multiplemetfiles) # resample raw data from tstep_in to tstep_mod df_forcing_met_tstep = resample_forcing_met( df_forcing_met, tstep_met_in, tstep_mod, lat, lon, alt, timezone, kdownzen) # merge forcing datasets (met and ESTM) df_forcing_tstep = df_forcing_met_tstep.copy() # disable the AnOHM and ESTM components for now and for better performance # ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||| # TS 28 Dec 2018 # pack all records of `id` into `metforcingdata_grid` for AnOHM # df_grp = df_forcing_tstep.groupby('id') # dict_id_all = {xid: df_grp.get_group(xid) # for xid in df_forcing_tstep['id'].unique()} # id_all = df_forcing_tstep['id'].apply(lambda xid: dict_id_all[xid]) # df_forcing_tstep = df_forcing_tstep.merge( # id_all.to_frame(name='metforcingdata_grid'), # left_index=True, # right_index=True) # # add Ts forcing for ESTM # if np.asscalar(df_state_init.iloc[0]['storageheatmethod'].values) == 4: # # load ESTM forcing # df_forcing_estm = load_SUEWS_Forcing_ESTM_df_raw( # path_input, filecode, grid, tstep_ESTM_in, multipleestmfiles) # # resample raw data from tstep_in to tstep_mod # df_forcing_estm_tstep = resample_linear( # df_forcing_estm, tstep_met_in, tstep_mod) # df_forcing_tstep = df_forcing_tstep.merge( # df_forcing_estm_tstep, # left_on=['iy', 'id', 'it', 'imin'], # right_on=['iy', 'id', 'it', 'imin']) # # insert `ts5mindata_ir` into df_forcing_tstep # ts_col = df_forcing_estm.columns[4:] # df_forcing_tstep['ts5mindata_ir'] = ( # df_forcing_tstep.loc[:, ts_col].values.tolist()) # df_forcing_tstep['ts5mindata_ir'] = df_forcing_tstep[ # 'ts5mindata_ir'].map(lambda x: np.array(x, order='F')) # else: # # insert some placeholder values # df_forcing_tstep['ts5mindata_ir'] = df_forcing_tstep['Tair'] # ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ # disable the AnOHM and ESTM components for now and for better performance # coerced precision here to prevent numerical errors inside Fortran df_forcing = np.around(df_forcing_tstep, decimals=10) # new columns for later use in main calculation df_forcing[['iy', 'id', 'it', 'imin']] = df_forcing[[ 'iy', 'id', 'it', 'imin']].astype(np.int64) return df_forcing
def function[load_forcing_grid, parameter[path_runcontrol, grid]]: constant[Load forcing data for a specific grid included in the index of `df_state_init </data-structure/supy-io.ipynb#df_state_init:-model-initial-states>`. Parameters ---------- path_runcontrol : str Path to SUEWS :ref:`RunControl.nml <suews:RunControl.nml>` grid : int Grid number Returns ------- df_forcing: pandas.DataFrame Forcing data. See `df_forcing_var` for details. Examples -------- >>> path_runcontrol = "~/SUEWS_sims/RunControl.nml" # a valid path to `RunControl.nml` >>> df_state_init = supy.init_supy(path_runcontrol) # get `df_state_init` >>> grid = df_state_init.index[0] # first grid number included in `df_state_init` >>> df_forcing = supy.load_forcing_grid(path_runcontrol, grid) # get df_forcing ] <ast.Try object at 0x7da1b0d0c3d0> return[name[df_forcing]]
keyword[def] identifier[load_forcing_grid] ( identifier[path_runcontrol] : identifier[str] , identifier[grid] : identifier[int] )-> identifier[pd] . identifier[DataFrame] : literal[string] keyword[try] : identifier[path_runcontrol] = identifier[Path] ( identifier[path_runcontrol] ). identifier[expanduser] (). identifier[resolve] () keyword[except] identifier[FileNotFoundError] : identifier[print] ( literal[string] . identifier[format] ( identifier[path] = identifier[path_runcontrol] )) keyword[else] : identifier[dict_mod_cfg] = identifier[load_SUEWS_dict_ModConfig] ( identifier[path_runcontrol] ) identifier[df_state_init] = identifier[init_supy] ( identifier[path_runcontrol] ) ( identifier[filecode] , identifier[kdownzen] , identifier[tstep_met_in] , identifier[tstep_ESTM_in] , identifier[multiplemetfiles] , identifier[multipleestmfiles] , identifier[dir_input_cfg] )=( identifier[dict_mod_cfg] [ identifier[x] ] keyword[for] identifier[x] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] ) identifier[tstep_mod] , identifier[lat] , identifier[lon] , identifier[alt] , identifier[timezone] = identifier[df_state_init] . identifier[loc] [ identifier[grid] , [( identifier[x] , literal[string] ) keyword[for] identifier[x] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]] ]. identifier[values] identifier[path_site] = identifier[path_runcontrol] . identifier[parent] identifier[path_input] = identifier[path_site] / identifier[dict_mod_cfg] [ literal[string] ] identifier[df_forcing_met] = identifier[load_SUEWS_Forcing_met_df_raw] ( identifier[path_input] , identifier[filecode] , identifier[grid] , identifier[tstep_met_in] , identifier[multiplemetfiles] ) identifier[df_forcing_met_tstep] = identifier[resample_forcing_met] ( identifier[df_forcing_met] , identifier[tstep_met_in] , identifier[tstep_mod] , identifier[lat] , identifier[lon] , identifier[alt] , identifier[timezone] , identifier[kdownzen] ) identifier[df_forcing_tstep] = identifier[df_forcing_met_tstep] . identifier[copy] () identifier[df_forcing] = identifier[np] . identifier[around] ( identifier[df_forcing_tstep] , identifier[decimals] = literal[int] ) identifier[df_forcing] [[ literal[string] , literal[string] , literal[string] , literal[string] ]]= identifier[df_forcing] [[ literal[string] , literal[string] , literal[string] , literal[string] ]]. identifier[astype] ( identifier[np] . identifier[int64] ) keyword[return] identifier[df_forcing]
def load_forcing_grid(path_runcontrol: str, grid: int) -> pd.DataFrame: """Load forcing data for a specific grid included in the index of `df_state_init </data-structure/supy-io.ipynb#df_state_init:-model-initial-states>`. Parameters ---------- path_runcontrol : str Path to SUEWS :ref:`RunControl.nml <suews:RunControl.nml>` grid : int Grid number Returns ------- df_forcing: pandas.DataFrame Forcing data. See `df_forcing_var` for details. Examples -------- >>> path_runcontrol = "~/SUEWS_sims/RunControl.nml" # a valid path to `RunControl.nml` >>> df_state_init = supy.init_supy(path_runcontrol) # get `df_state_init` >>> grid = df_state_init.index[0] # first grid number included in `df_state_init` >>> df_forcing = supy.load_forcing_grid(path_runcontrol, grid) # get df_forcing """ try: path_runcontrol = Path(path_runcontrol).expanduser().resolve() # depends on [control=['try'], data=[]] except FileNotFoundError: print('{path} does not exists!'.format(path=path_runcontrol)) # depends on [control=['except'], data=[]] else: dict_mod_cfg = load_SUEWS_dict_ModConfig(path_runcontrol) df_state_init = init_supy(path_runcontrol) # load setting variables from dict_mod_cfg (filecode, kdownzen, tstep_met_in, tstep_ESTM_in, multiplemetfiles, multipleestmfiles, dir_input_cfg) = (dict_mod_cfg[x] for x in ['filecode', 'kdownzen', 'resolutionfilesin', 'resolutionfilesinestm', 'multiplemetfiles', 'multipleestmfiles', 'fileinputpath']) (tstep_mod, lat, lon, alt, timezone) = df_state_init.loc[grid, [(x, '0') for x in ['tstep', 'lat', 'lng', 'alt', 'timezone']]].values path_site = path_runcontrol.parent path_input = path_site / dict_mod_cfg['fileinputpath'] # load raw data # met forcing df_forcing_met = load_SUEWS_Forcing_met_df_raw(path_input, filecode, grid, tstep_met_in, multiplemetfiles) # resample raw data from tstep_in to tstep_mod df_forcing_met_tstep = resample_forcing_met(df_forcing_met, tstep_met_in, tstep_mod, lat, lon, alt, timezone, kdownzen) # merge forcing datasets (met and ESTM) df_forcing_tstep = df_forcing_met_tstep.copy() # disable the AnOHM and ESTM components for now and for better performance # ||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||||| # TS 28 Dec 2018 # pack all records of `id` into `metforcingdata_grid` for AnOHM # df_grp = df_forcing_tstep.groupby('id') # dict_id_all = {xid: df_grp.get_group(xid) # for xid in df_forcing_tstep['id'].unique()} # id_all = df_forcing_tstep['id'].apply(lambda xid: dict_id_all[xid]) # df_forcing_tstep = df_forcing_tstep.merge( # id_all.to_frame(name='metforcingdata_grid'), # left_index=True, # right_index=True) # # add Ts forcing for ESTM # if np.asscalar(df_state_init.iloc[0]['storageheatmethod'].values) == 4: # # load ESTM forcing # df_forcing_estm = load_SUEWS_Forcing_ESTM_df_raw( # path_input, filecode, grid, tstep_ESTM_in, multipleestmfiles) # # resample raw data from tstep_in to tstep_mod # df_forcing_estm_tstep = resample_linear( # df_forcing_estm, tstep_met_in, tstep_mod) # df_forcing_tstep = df_forcing_tstep.merge( # df_forcing_estm_tstep, # left_on=['iy', 'id', 'it', 'imin'], # right_on=['iy', 'id', 'it', 'imin']) # # insert `ts5mindata_ir` into df_forcing_tstep # ts_col = df_forcing_estm.columns[4:] # df_forcing_tstep['ts5mindata_ir'] = ( # df_forcing_tstep.loc[:, ts_col].values.tolist()) # df_forcing_tstep['ts5mindata_ir'] = df_forcing_tstep[ # 'ts5mindata_ir'].map(lambda x: np.array(x, order='F')) # else: # # insert some placeholder values # df_forcing_tstep['ts5mindata_ir'] = df_forcing_tstep['Tair'] # ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ # disable the AnOHM and ESTM components for now and for better performance # coerced precision here to prevent numerical errors inside Fortran df_forcing = np.around(df_forcing_tstep, decimals=10) # new columns for later use in main calculation df_forcing[['iy', 'id', 'it', 'imin']] = df_forcing[['iy', 'id', 'it', 'imin']].astype(np.int64) return df_forcing
def item_straat_adapter(obj, request): """ Adapter for rendering an object of :class:`crabpy.gateway.crab.Straat` to json. """ return { 'id': obj.id, 'label': obj.label, 'namen': obj.namen, 'status': { 'id': obj.status.id, 'naam': obj.status.naam, 'definitie': obj.status.definitie }, 'taal': { 'id': obj.taal.id, 'naam': obj.taal.naam, 'definitie': obj.taal.definitie }, 'metadata': { 'begin_tijd': obj.metadata.begin_tijd, 'begin_datum': obj.metadata.begin_datum, 'begin_bewerking': { 'id': obj.metadata.begin_bewerking.id, 'naam': obj.metadata.begin_bewerking.naam, 'definitie': obj.metadata.begin_bewerking.definitie }, 'begin_organisatie': { 'id': obj.metadata.begin_organisatie.id, 'naam': obj.metadata.begin_organisatie.naam, 'definitie': obj.metadata.begin_organisatie.definitie } }, 'bounding_box': obj.bounding_box }
def function[item_straat_adapter, parameter[obj, request]]: constant[ Adapter for rendering an object of :class:`crabpy.gateway.crab.Straat` to json. ] return[dictionary[[<ast.Constant object at 0x7da204567e50>, <ast.Constant object at 0x7da2045647c0>, <ast.Constant object at 0x7da204567160>, <ast.Constant object at 0x7da204567100>, <ast.Constant object at 0x7da204566020>, <ast.Constant object at 0x7da204564760>, <ast.Constant object at 0x7da204564880>], [<ast.Attribute object at 0x7da204565c90>, <ast.Attribute object at 0x7da204567550>, <ast.Attribute object at 0x7da2045643d0>, <ast.Dict object at 0x7da204565a80>, <ast.Dict object at 0x7da204564ac0>, <ast.Dict object at 0x7da204566200>, <ast.Attribute object at 0x7da1b0915ab0>]]]
keyword[def] identifier[item_straat_adapter] ( identifier[obj] , identifier[request] ): literal[string] keyword[return] { literal[string] : identifier[obj] . identifier[id] , literal[string] : identifier[obj] . identifier[label] , literal[string] : identifier[obj] . identifier[namen] , literal[string] :{ literal[string] : identifier[obj] . identifier[status] . identifier[id] , literal[string] : identifier[obj] . identifier[status] . identifier[naam] , literal[string] : identifier[obj] . identifier[status] . identifier[definitie] }, literal[string] :{ literal[string] : identifier[obj] . identifier[taal] . identifier[id] , literal[string] : identifier[obj] . identifier[taal] . identifier[naam] , literal[string] : identifier[obj] . identifier[taal] . identifier[definitie] }, literal[string] :{ literal[string] : identifier[obj] . identifier[metadata] . identifier[begin_tijd] , literal[string] : identifier[obj] . identifier[metadata] . identifier[begin_datum] , literal[string] :{ literal[string] : identifier[obj] . identifier[metadata] . identifier[begin_bewerking] . identifier[id] , literal[string] : identifier[obj] . identifier[metadata] . identifier[begin_bewerking] . identifier[naam] , literal[string] : identifier[obj] . identifier[metadata] . identifier[begin_bewerking] . identifier[definitie] }, literal[string] :{ literal[string] : identifier[obj] . identifier[metadata] . identifier[begin_organisatie] . identifier[id] , literal[string] : identifier[obj] . identifier[metadata] . identifier[begin_organisatie] . identifier[naam] , literal[string] : identifier[obj] . identifier[metadata] . identifier[begin_organisatie] . identifier[definitie] } }, literal[string] : identifier[obj] . identifier[bounding_box] }
def item_straat_adapter(obj, request): """ Adapter for rendering an object of :class:`crabpy.gateway.crab.Straat` to json. """ return {'id': obj.id, 'label': obj.label, 'namen': obj.namen, 'status': {'id': obj.status.id, 'naam': obj.status.naam, 'definitie': obj.status.definitie}, 'taal': {'id': obj.taal.id, 'naam': obj.taal.naam, 'definitie': obj.taal.definitie}, 'metadata': {'begin_tijd': obj.metadata.begin_tijd, 'begin_datum': obj.metadata.begin_datum, 'begin_bewerking': {'id': obj.metadata.begin_bewerking.id, 'naam': obj.metadata.begin_bewerking.naam, 'definitie': obj.metadata.begin_bewerking.definitie}, 'begin_organisatie': {'id': obj.metadata.begin_organisatie.id, 'naam': obj.metadata.begin_organisatie.naam, 'definitie': obj.metadata.begin_organisatie.definitie}}, 'bounding_box': obj.bounding_box}
def is_active(self): """The images plugin is active iff any run has at least one relevant tag.""" if self._db_connection_provider: # The plugin is active if one relevant tag can be found in the database. db = self._db_connection_provider() cursor = db.execute( ''' SELECT 1 FROM Tags WHERE Tags.plugin_name = ? LIMIT 1 ''', (metadata.PLUGIN_NAME,)) return bool(list(cursor)) if not self._multiplexer: return False return bool(self._multiplexer.PluginRunToTagToContent(metadata.PLUGIN_NAME))
def function[is_active, parameter[self]]: constant[The images plugin is active iff any run has at least one relevant tag.] if name[self]._db_connection_provider begin[:] variable[db] assign[=] call[name[self]._db_connection_provider, parameter[]] variable[cursor] assign[=] call[name[db].execute, parameter[constant[ SELECT 1 FROM Tags WHERE Tags.plugin_name = ? LIMIT 1 ], tuple[[<ast.Attribute object at 0x7da1b21cf0a0>]]]] return[call[name[bool], parameter[call[name[list], parameter[name[cursor]]]]]] if <ast.UnaryOp object at 0x7da1b21cc8e0> begin[:] return[constant[False]] return[call[name[bool], parameter[call[name[self]._multiplexer.PluginRunToTagToContent, parameter[name[metadata].PLUGIN_NAME]]]]]
keyword[def] identifier[is_active] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_db_connection_provider] : identifier[db] = identifier[self] . identifier[_db_connection_provider] () identifier[cursor] = identifier[db] . identifier[execute] ( literal[string] , ( identifier[metadata] . identifier[PLUGIN_NAME] ,)) keyword[return] identifier[bool] ( identifier[list] ( identifier[cursor] )) keyword[if] keyword[not] identifier[self] . identifier[_multiplexer] : keyword[return] keyword[False] keyword[return] identifier[bool] ( identifier[self] . identifier[_multiplexer] . identifier[PluginRunToTagToContent] ( identifier[metadata] . identifier[PLUGIN_NAME] ))
def is_active(self): """The images plugin is active iff any run has at least one relevant tag.""" if self._db_connection_provider: # The plugin is active if one relevant tag can be found in the database. db = self._db_connection_provider() cursor = db.execute('\n SELECT 1\n FROM Tags\n WHERE Tags.plugin_name = ?\n LIMIT 1\n ', (metadata.PLUGIN_NAME,)) return bool(list(cursor)) # depends on [control=['if'], data=[]] if not self._multiplexer: return False # depends on [control=['if'], data=[]] return bool(self._multiplexer.PluginRunToTagToContent(metadata.PLUGIN_NAME))
def setRulerType( self, rulerType ): """ Sets the ruler type for this ruler to the inputed type. :param rulerType | <XChartRuler.Type> """ self._rulerType = rulerType self.clear() # handle custom types if ( rulerType == XChartRuler.Type.Monthly ): self.setNotches(['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'])
def function[setRulerType, parameter[self, rulerType]]: constant[ Sets the ruler type for this ruler to the inputed type. :param rulerType | <XChartRuler.Type> ] name[self]._rulerType assign[=] name[rulerType] call[name[self].clear, parameter[]] if compare[name[rulerType] equal[==] name[XChartRuler].Type.Monthly] begin[:] call[name[self].setNotches, parameter[list[[<ast.Constant object at 0x7da20c795ae0>, <ast.Constant object at 0x7da20c794490>, <ast.Constant object at 0x7da20c795e70>, <ast.Constant object at 0x7da20c796ad0>, <ast.Constant object at 0x7da20c796320>, <ast.Constant object at 0x7da20c795330>, <ast.Constant object at 0x7da20c795c90>, <ast.Constant object at 0x7da20c7961d0>, <ast.Constant object at 0x7da20c795ed0>, <ast.Constant object at 0x7da20c796350>, <ast.Constant object at 0x7da20c796620>, <ast.Constant object at 0x7da20c796b00>]]]]
keyword[def] identifier[setRulerType] ( identifier[self] , identifier[rulerType] ): literal[string] identifier[self] . identifier[_rulerType] = identifier[rulerType] identifier[self] . identifier[clear] () keyword[if] ( identifier[rulerType] == identifier[XChartRuler] . identifier[Type] . identifier[Monthly] ): identifier[self] . identifier[setNotches] ([ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ])
def setRulerType(self, rulerType): """ Sets the ruler type for this ruler to the inputed type. :param rulerType | <XChartRuler.Type> """ self._rulerType = rulerType self.clear() # handle custom types if rulerType == XChartRuler.Type.Monthly: self.setNotches(['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']) # depends on [control=['if'], data=[]]
def runCPU(): """Poll CPU usage, make predictions, and plot the results. Runs forever.""" # Create the model for predicting CPU usage. model = ModelFactory.create(model_params.MODEL_PARAMS) model.enableInference({'predictedField': 'cpu'}) # The shifter will align prediction and actual values. shifter = InferenceShifter() # Keep the last WINDOW predicted and actual values for plotting. actHistory = deque([0.0] * WINDOW, maxlen=60) predHistory = deque([0.0] * WINDOW, maxlen=60) # Initialize the plot lines that we will update with each new record. actline, = plt.plot(range(WINDOW), actHistory) predline, = plt.plot(range(WINDOW), predHistory) # Set the y-axis range. actline.axes.set_ylim(0, 100) predline.axes.set_ylim(0, 100) while True: s = time.time() # Get the CPU usage. cpu = psutil.cpu_percent() # Run the input through the model and shift the resulting prediction. modelInput = {'cpu': cpu} result = shifter.shift(model.run(modelInput)) # Update the trailing predicted and actual value deques. inference = result.inferences['multiStepBestPredictions'][5] if inference is not None: actHistory.append(result.rawInput['cpu']) predHistory.append(inference) # Redraw the chart with the new data. actline.set_ydata(actHistory) # update the data predline.set_ydata(predHistory) # update the data plt.draw() plt.legend( ('actual','predicted') ) # Make sure we wait a total of 2 seconds per iteration. try: plt.pause(SECONDS_PER_STEP) except: pass
def function[runCPU, parameter[]]: constant[Poll CPU usage, make predictions, and plot the results. Runs forever.] variable[model] assign[=] call[name[ModelFactory].create, parameter[name[model_params].MODEL_PARAMS]] call[name[model].enableInference, parameter[dictionary[[<ast.Constant object at 0x7da18bc704f0>], [<ast.Constant object at 0x7da18bc72f80>]]]] variable[shifter] assign[=] call[name[InferenceShifter], parameter[]] variable[actHistory] assign[=] call[name[deque], parameter[binary_operation[list[[<ast.Constant object at 0x7da18bc71f00>]] * name[WINDOW]]]] variable[predHistory] assign[=] call[name[deque], parameter[binary_operation[list[[<ast.Constant object at 0x7da18bc73850>]] * name[WINDOW]]]] <ast.Tuple object at 0x7da18bc701f0> assign[=] call[name[plt].plot, parameter[call[name[range], parameter[name[WINDOW]]], name[actHistory]]] <ast.Tuple object at 0x7da18bc71480> assign[=] call[name[plt].plot, parameter[call[name[range], parameter[name[WINDOW]]], name[predHistory]]] call[name[actline].axes.set_ylim, parameter[constant[0], constant[100]]] call[name[predline].axes.set_ylim, parameter[constant[0], constant[100]]] while constant[True] begin[:] variable[s] assign[=] call[name[time].time, parameter[]] variable[cpu] assign[=] call[name[psutil].cpu_percent, parameter[]] variable[modelInput] assign[=] dictionary[[<ast.Constant object at 0x7da18bc723b0>], [<ast.Name object at 0x7da18bc70280>]] variable[result] assign[=] call[name[shifter].shift, parameter[call[name[model].run, parameter[name[modelInput]]]]] variable[inference] assign[=] call[call[name[result].inferences][constant[multiStepBestPredictions]]][constant[5]] if compare[name[inference] is_not constant[None]] begin[:] call[name[actHistory].append, parameter[call[name[result].rawInput][constant[cpu]]]] call[name[predHistory].append, parameter[name[inference]]] call[name[actline].set_ydata, parameter[name[actHistory]]] call[name[predline].set_ydata, parameter[name[predHistory]]] call[name[plt].draw, parameter[]] call[name[plt].legend, parameter[tuple[[<ast.Constant object at 0x7da20e9b39d0>, <ast.Constant object at 0x7da20e9b3c10>]]]] <ast.Try object at 0x7da20e9b1540>
keyword[def] identifier[runCPU] (): literal[string] identifier[model] = identifier[ModelFactory] . identifier[create] ( identifier[model_params] . identifier[MODEL_PARAMS] ) identifier[model] . identifier[enableInference] ({ literal[string] : literal[string] }) identifier[shifter] = identifier[InferenceShifter] () identifier[actHistory] = identifier[deque] ([ literal[int] ]* identifier[WINDOW] , identifier[maxlen] = literal[int] ) identifier[predHistory] = identifier[deque] ([ literal[int] ]* identifier[WINDOW] , identifier[maxlen] = literal[int] ) identifier[actline] ,= identifier[plt] . identifier[plot] ( identifier[range] ( identifier[WINDOW] ), identifier[actHistory] ) identifier[predline] ,= identifier[plt] . identifier[plot] ( identifier[range] ( identifier[WINDOW] ), identifier[predHistory] ) identifier[actline] . identifier[axes] . identifier[set_ylim] ( literal[int] , literal[int] ) identifier[predline] . identifier[axes] . identifier[set_ylim] ( literal[int] , literal[int] ) keyword[while] keyword[True] : identifier[s] = identifier[time] . identifier[time] () identifier[cpu] = identifier[psutil] . identifier[cpu_percent] () identifier[modelInput] ={ literal[string] : identifier[cpu] } identifier[result] = identifier[shifter] . identifier[shift] ( identifier[model] . identifier[run] ( identifier[modelInput] )) identifier[inference] = identifier[result] . identifier[inferences] [ literal[string] ][ literal[int] ] keyword[if] identifier[inference] keyword[is] keyword[not] keyword[None] : identifier[actHistory] . identifier[append] ( identifier[result] . identifier[rawInput] [ literal[string] ]) identifier[predHistory] . identifier[append] ( identifier[inference] ) identifier[actline] . identifier[set_ydata] ( identifier[actHistory] ) identifier[predline] . identifier[set_ydata] ( identifier[predHistory] ) identifier[plt] . identifier[draw] () identifier[plt] . identifier[legend] (( literal[string] , literal[string] )) keyword[try] : identifier[plt] . identifier[pause] ( identifier[SECONDS_PER_STEP] ) keyword[except] : keyword[pass]
def runCPU(): """Poll CPU usage, make predictions, and plot the results. Runs forever.""" # Create the model for predicting CPU usage. model = ModelFactory.create(model_params.MODEL_PARAMS) model.enableInference({'predictedField': 'cpu'}) # The shifter will align prediction and actual values. shifter = InferenceShifter() # Keep the last WINDOW predicted and actual values for plotting. actHistory = deque([0.0] * WINDOW, maxlen=60) predHistory = deque([0.0] * WINDOW, maxlen=60) # Initialize the plot lines that we will update with each new record. (actline,) = plt.plot(range(WINDOW), actHistory) (predline,) = plt.plot(range(WINDOW), predHistory) # Set the y-axis range. actline.axes.set_ylim(0, 100) predline.axes.set_ylim(0, 100) while True: s = time.time() # Get the CPU usage. cpu = psutil.cpu_percent() # Run the input through the model and shift the resulting prediction. modelInput = {'cpu': cpu} result = shifter.shift(model.run(modelInput)) # Update the trailing predicted and actual value deques. inference = result.inferences['multiStepBestPredictions'][5] if inference is not None: actHistory.append(result.rawInput['cpu']) predHistory.append(inference) # depends on [control=['if'], data=['inference']] # Redraw the chart with the new data. actline.set_ydata(actHistory) # update the data predline.set_ydata(predHistory) # update the data plt.draw() plt.legend(('actual', 'predicted')) # Make sure we wait a total of 2 seconds per iteration. try: plt.pause(SECONDS_PER_STEP) # depends on [control=['try'], data=[]] except: pass # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]]
def show_busy(self): """Lock buttons and enable the busy cursor.""" self.progress_bar.show() self.parent.pbnNext.setEnabled(False) self.parent.pbnBack.setEnabled(False) self.parent.pbnCancel.setEnabled(False) self.parent.repaint() enable_busy_cursor() QgsApplication.processEvents()
def function[show_busy, parameter[self]]: constant[Lock buttons and enable the busy cursor.] call[name[self].progress_bar.show, parameter[]] call[name[self].parent.pbnNext.setEnabled, parameter[constant[False]]] call[name[self].parent.pbnBack.setEnabled, parameter[constant[False]]] call[name[self].parent.pbnCancel.setEnabled, parameter[constant[False]]] call[name[self].parent.repaint, parameter[]] call[name[enable_busy_cursor], parameter[]] call[name[QgsApplication].processEvents, parameter[]]
keyword[def] identifier[show_busy] ( identifier[self] ): literal[string] identifier[self] . identifier[progress_bar] . identifier[show] () identifier[self] . identifier[parent] . identifier[pbnNext] . identifier[setEnabled] ( keyword[False] ) identifier[self] . identifier[parent] . identifier[pbnBack] . identifier[setEnabled] ( keyword[False] ) identifier[self] . identifier[parent] . identifier[pbnCancel] . identifier[setEnabled] ( keyword[False] ) identifier[self] . identifier[parent] . identifier[repaint] () identifier[enable_busy_cursor] () identifier[QgsApplication] . identifier[processEvents] ()
def show_busy(self): """Lock buttons and enable the busy cursor.""" self.progress_bar.show() self.parent.pbnNext.setEnabled(False) self.parent.pbnBack.setEnabled(False) self.parent.pbnCancel.setEnabled(False) self.parent.repaint() enable_busy_cursor() QgsApplication.processEvents()
def _choose_what_to_display(self, force_refresh=False): """ Choose what combination to display on the bar. By default we try to display the active layout on the first run, else we display the last selected combination. """ for _ in range(len(self.available_combinations)): if ( self.displayed is None and self.available_combinations[0] == self.active_layout ): self.displayed = self.available_combinations[0] break else: if self.displayed == self.available_combinations[0]: break else: self.available_combinations.rotate(1) else: if force_refresh: self.displayed = self.available_combinations[0] else: self.py3.log('xrandr error="displayed combination is not available"')
def function[_choose_what_to_display, parameter[self, force_refresh]]: constant[ Choose what combination to display on the bar. By default we try to display the active layout on the first run, else we display the last selected combination. ] for taget[name[_]] in starred[call[name[range], parameter[call[name[len], parameter[name[self].available_combinations]]]]] begin[:] if <ast.BoolOp object at 0x7da1b1d0f190> begin[:] name[self].displayed assign[=] call[name[self].available_combinations][constant[0]] break
keyword[def] identifier[_choose_what_to_display] ( identifier[self] , identifier[force_refresh] = keyword[False] ): literal[string] keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[available_combinations] )): keyword[if] ( identifier[self] . identifier[displayed] keyword[is] keyword[None] keyword[and] identifier[self] . identifier[available_combinations] [ literal[int] ]== identifier[self] . identifier[active_layout] ): identifier[self] . identifier[displayed] = identifier[self] . identifier[available_combinations] [ literal[int] ] keyword[break] keyword[else] : keyword[if] identifier[self] . identifier[displayed] == identifier[self] . identifier[available_combinations] [ literal[int] ]: keyword[break] keyword[else] : identifier[self] . identifier[available_combinations] . identifier[rotate] ( literal[int] ) keyword[else] : keyword[if] identifier[force_refresh] : identifier[self] . identifier[displayed] = identifier[self] . identifier[available_combinations] [ literal[int] ] keyword[else] : identifier[self] . identifier[py3] . identifier[log] ( literal[string] )
def _choose_what_to_display(self, force_refresh=False): """ Choose what combination to display on the bar. By default we try to display the active layout on the first run, else we display the last selected combination. """ for _ in range(len(self.available_combinations)): if self.displayed is None and self.available_combinations[0] == self.active_layout: self.displayed = self.available_combinations[0] break # depends on [control=['if'], data=[]] elif self.displayed == self.available_combinations[0]: break # depends on [control=['if'], data=[]] else: self.available_combinations.rotate(1) # depends on [control=['for'], data=[]] else: if force_refresh: self.displayed = self.available_combinations[0] # depends on [control=['if'], data=[]] else: self.py3.log('xrandr error="displayed combination is not available"')
def decrement(self, delta=1): """Decrement counter value. Parameters ---------- value_change : int Amount by which to subtract from the counter """ check_call(_LIB.MXProfileAdjustCounter(self.handle, -int(delta)))
def function[decrement, parameter[self, delta]]: constant[Decrement counter value. Parameters ---------- value_change : int Amount by which to subtract from the counter ] call[name[check_call], parameter[call[name[_LIB].MXProfileAdjustCounter, parameter[name[self].handle, <ast.UnaryOp object at 0x7da1b2089960>]]]]
keyword[def] identifier[decrement] ( identifier[self] , identifier[delta] = literal[int] ): literal[string] identifier[check_call] ( identifier[_LIB] . identifier[MXProfileAdjustCounter] ( identifier[self] . identifier[handle] ,- identifier[int] ( identifier[delta] )))
def decrement(self, delta=1): """Decrement counter value. Parameters ---------- value_change : int Amount by which to subtract from the counter """ check_call(_LIB.MXProfileAdjustCounter(self.handle, -int(delta)))
def get_block_entity_data(self, pos_or_x, y=None, z=None): """ Access block entity data. Returns: BlockEntityData subclass instance or None if no block entity data is stored for that location. """ if None not in (y, z): # x y z supplied pos_or_x = pos_or_x, y, z coord_tuple = tuple(int(floor(c)) for c in pos_or_x) return self.block_entities.get(coord_tuple, None)
def function[get_block_entity_data, parameter[self, pos_or_x, y, z]]: constant[ Access block entity data. Returns: BlockEntityData subclass instance or None if no block entity data is stored for that location. ] if compare[constant[None] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Name object at 0x7da1b2852a40>, <ast.Name object at 0x7da1b2852140>]]] begin[:] variable[pos_or_x] assign[=] tuple[[<ast.Name object at 0x7da1b2852f20>, <ast.Name object at 0x7da1b2850b20>, <ast.Name object at 0x7da1b2851bd0>]] variable[coord_tuple] assign[=] call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da1b283a3b0>]] return[call[name[self].block_entities.get, parameter[name[coord_tuple], constant[None]]]]
keyword[def] identifier[get_block_entity_data] ( identifier[self] , identifier[pos_or_x] , identifier[y] = keyword[None] , identifier[z] = keyword[None] ): literal[string] keyword[if] keyword[None] keyword[not] keyword[in] ( identifier[y] , identifier[z] ): identifier[pos_or_x] = identifier[pos_or_x] , identifier[y] , identifier[z] identifier[coord_tuple] = identifier[tuple] ( identifier[int] ( identifier[floor] ( identifier[c] )) keyword[for] identifier[c] keyword[in] identifier[pos_or_x] ) keyword[return] identifier[self] . identifier[block_entities] . identifier[get] ( identifier[coord_tuple] , keyword[None] )
def get_block_entity_data(self, pos_or_x, y=None, z=None): """ Access block entity data. Returns: BlockEntityData subclass instance or None if no block entity data is stored for that location. """ if None not in (y, z): # x y z supplied pos_or_x = (pos_or_x, y, z) # depends on [control=['if'], data=[]] coord_tuple = tuple((int(floor(c)) for c in pos_or_x)) return self.block_entities.get(coord_tuple, None)
def any_in_string(l, s): """ Check if any items in a list is in a string :params l: dict :params s: string :return bool: """ return any([i in l for i in l if i in s])
def function[any_in_string, parameter[l, s]]: constant[ Check if any items in a list is in a string :params l: dict :params s: string :return bool: ] return[call[name[any], parameter[<ast.ListComp object at 0x7da1b2407e50>]]]
keyword[def] identifier[any_in_string] ( identifier[l] , identifier[s] ): literal[string] keyword[return] identifier[any] ([ identifier[i] keyword[in] identifier[l] keyword[for] identifier[i] keyword[in] identifier[l] keyword[if] identifier[i] keyword[in] identifier[s] ])
def any_in_string(l, s): """ Check if any items in a list is in a string :params l: dict :params s: string :return bool: """ return any([i in l for i in l if i in s])
def top(self): """ Constructs the top line of the element""" ret = self.top_format % self.top_connect.center( self.width, self.top_pad) if self.right_fill: ret = ret.ljust(self.right_fill, self.top_pad) if self.left_fill: ret = ret.rjust(self.left_fill, self.top_pad) ret = ret.center(self.layer_width, self.top_bck) return ret
def function[top, parameter[self]]: constant[ Constructs the top line of the element] variable[ret] assign[=] binary_operation[name[self].top_format <ast.Mod object at 0x7da2590d6920> call[name[self].top_connect.center, parameter[name[self].width, name[self].top_pad]]] if name[self].right_fill begin[:] variable[ret] assign[=] call[name[ret].ljust, parameter[name[self].right_fill, name[self].top_pad]] if name[self].left_fill begin[:] variable[ret] assign[=] call[name[ret].rjust, parameter[name[self].left_fill, name[self].top_pad]] variable[ret] assign[=] call[name[ret].center, parameter[name[self].layer_width, name[self].top_bck]] return[name[ret]]
keyword[def] identifier[top] ( identifier[self] ): literal[string] identifier[ret] = identifier[self] . identifier[top_format] % identifier[self] . identifier[top_connect] . identifier[center] ( identifier[self] . identifier[width] , identifier[self] . identifier[top_pad] ) keyword[if] identifier[self] . identifier[right_fill] : identifier[ret] = identifier[ret] . identifier[ljust] ( identifier[self] . identifier[right_fill] , identifier[self] . identifier[top_pad] ) keyword[if] identifier[self] . identifier[left_fill] : identifier[ret] = identifier[ret] . identifier[rjust] ( identifier[self] . identifier[left_fill] , identifier[self] . identifier[top_pad] ) identifier[ret] = identifier[ret] . identifier[center] ( identifier[self] . identifier[layer_width] , identifier[self] . identifier[top_bck] ) keyword[return] identifier[ret]
def top(self): """ Constructs the top line of the element""" ret = self.top_format % self.top_connect.center(self.width, self.top_pad) if self.right_fill: ret = ret.ljust(self.right_fill, self.top_pad) # depends on [control=['if'], data=[]] if self.left_fill: ret = ret.rjust(self.left_fill, self.top_pad) # depends on [control=['if'], data=[]] ret = ret.center(self.layer_width, self.top_bck) return ret
def _compute_output_layer_expected(self): """Compute output layers expected that the IF will produce. Be careful when you call this function. It's a private function, better to use the public function `output_layers_expected()`. :return: List of expected layer keys. :rtype: list """ # Actually, an IF can produce maximum 6 layers, by default. expected = [ layer_purpose_exposure_summary['key'], # 1 layer_purpose_aggregate_hazard_impacted['key'], # 2 layer_purpose_aggregation_summary['key'], # 3 layer_purpose_analysis_impacted['key'], # 4 layer_purpose_exposure_summary_table['key'], # 5 layer_purpose_profiling['key'], # 6 ] if is_raster_layer(self.exposure): if self.exposure.keywords.get('layer_mode') == 'continuous': # If the exposure is a continuous raster, we can't provide the # exposure impacted layer. expected.remove(layer_purpose_exposure_summary['key']) if not self.exposure.keywords.get('classification'): # If the exposure doesn't have a classification, such as population # census layer, we can't provide an exposure breakdown layer. expected.remove(layer_purpose_exposure_summary_table['key']) # We add any layers produced by pre-processors for preprocessor in self._preprocessors: if preprocessor['output'].get('type') == 'layer': expected.append(preprocessor['output'].get('value')['key']) return expected
def function[_compute_output_layer_expected, parameter[self]]: constant[Compute output layers expected that the IF will produce. Be careful when you call this function. It's a private function, better to use the public function `output_layers_expected()`. :return: List of expected layer keys. :rtype: list ] variable[expected] assign[=] list[[<ast.Subscript object at 0x7da1b0ca7460>, <ast.Subscript object at 0x7da1b0ca7d00>, <ast.Subscript object at 0x7da1b0ca67a0>, <ast.Subscript object at 0x7da1b0ca7550>, <ast.Subscript object at 0x7da1b0ca7940>, <ast.Subscript object at 0x7da1b0ca69e0>]] if call[name[is_raster_layer], parameter[name[self].exposure]] begin[:] if compare[call[name[self].exposure.keywords.get, parameter[constant[layer_mode]]] equal[==] constant[continuous]] begin[:] call[name[expected].remove, parameter[call[name[layer_purpose_exposure_summary]][constant[key]]]] if <ast.UnaryOp object at 0x7da1b0c366e0> begin[:] call[name[expected].remove, parameter[call[name[layer_purpose_exposure_summary_table]][constant[key]]]] for taget[name[preprocessor]] in starred[name[self]._preprocessors] begin[:] if compare[call[call[name[preprocessor]][constant[output]].get, parameter[constant[type]]] equal[==] constant[layer]] begin[:] call[name[expected].append, parameter[call[call[call[name[preprocessor]][constant[output]].get, parameter[constant[value]]]][constant[key]]]] return[name[expected]]
keyword[def] identifier[_compute_output_layer_expected] ( identifier[self] ): literal[string] identifier[expected] =[ identifier[layer_purpose_exposure_summary] [ literal[string] ], identifier[layer_purpose_aggregate_hazard_impacted] [ literal[string] ], identifier[layer_purpose_aggregation_summary] [ literal[string] ], identifier[layer_purpose_analysis_impacted] [ literal[string] ], identifier[layer_purpose_exposure_summary_table] [ literal[string] ], identifier[layer_purpose_profiling] [ literal[string] ], ] keyword[if] identifier[is_raster_layer] ( identifier[self] . identifier[exposure] ): keyword[if] identifier[self] . identifier[exposure] . identifier[keywords] . identifier[get] ( literal[string] )== literal[string] : identifier[expected] . identifier[remove] ( identifier[layer_purpose_exposure_summary] [ literal[string] ]) keyword[if] keyword[not] identifier[self] . identifier[exposure] . identifier[keywords] . identifier[get] ( literal[string] ): identifier[expected] . identifier[remove] ( identifier[layer_purpose_exposure_summary_table] [ literal[string] ]) keyword[for] identifier[preprocessor] keyword[in] identifier[self] . identifier[_preprocessors] : keyword[if] identifier[preprocessor] [ literal[string] ]. identifier[get] ( literal[string] )== literal[string] : identifier[expected] . identifier[append] ( identifier[preprocessor] [ literal[string] ]. identifier[get] ( literal[string] )[ literal[string] ]) keyword[return] identifier[expected]
def _compute_output_layer_expected(self): """Compute output layers expected that the IF will produce. Be careful when you call this function. It's a private function, better to use the public function `output_layers_expected()`. :return: List of expected layer keys. :rtype: list """ # Actually, an IF can produce maximum 6 layers, by default. # 1 # 2 # 3 # 4 # 5 # 6 expected = [layer_purpose_exposure_summary['key'], layer_purpose_aggregate_hazard_impacted['key'], layer_purpose_aggregation_summary['key'], layer_purpose_analysis_impacted['key'], layer_purpose_exposure_summary_table['key'], layer_purpose_profiling['key']] if is_raster_layer(self.exposure): if self.exposure.keywords.get('layer_mode') == 'continuous': # If the exposure is a continuous raster, we can't provide the # exposure impacted layer. expected.remove(layer_purpose_exposure_summary['key']) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if not self.exposure.keywords.get('classification'): # If the exposure doesn't have a classification, such as population # census layer, we can't provide an exposure breakdown layer. expected.remove(layer_purpose_exposure_summary_table['key']) # depends on [control=['if'], data=[]] # We add any layers produced by pre-processors for preprocessor in self._preprocessors: if preprocessor['output'].get('type') == 'layer': expected.append(preprocessor['output'].get('value')['key']) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['preprocessor']] return expected
def _match_cubes(ccube_clean, ccube_dirty, bexpcube_clean, bexpcube_dirty, hpx_order): """ Match the HEALPIX scheme and order of all the input cubes return a dictionary of cubes with the same HEALPIX scheme and order """ if hpx_order == ccube_clean.hpx.order: ccube_clean_at_order = ccube_clean else: ccube_clean_at_order = ccube_clean.ud_grade(hpx_order, preserve_counts=True) if hpx_order == ccube_dirty.hpx.order: ccube_dirty_at_order = ccube_dirty else: ccube_dirty_at_order = ccube_dirty.ud_grade(hpx_order, preserve_counts=True) if hpx_order == bexpcube_clean.hpx.order: bexpcube_clean_at_order = bexpcube_clean else: bexpcube_clean_at_order = bexpcube_clean.ud_grade(hpx_order, preserve_counts=True) if hpx_order == bexpcube_dirty.hpx.order: bexpcube_dirty_at_order = bexpcube_dirty else: bexpcube_dirty_at_order = bexpcube_dirty.ud_grade(hpx_order, preserve_counts=True) if ccube_dirty_at_order.hpx.nest != ccube_clean.hpx.nest: ccube_dirty_at_order = ccube_dirty_at_order.swap_scheme() if bexpcube_clean_at_order.hpx.nest != ccube_clean.hpx.nest: bexpcube_clean_at_order = bexpcube_clean_at_order.swap_scheme() if bexpcube_dirty_at_order.hpx.nest != ccube_clean.hpx.nest: bexpcube_dirty_at_order = bexpcube_dirty_at_order.swap_scheme() ret_dict = dict(ccube_clean=ccube_clean_at_order, ccube_dirty=ccube_dirty_at_order, bexpcube_clean=bexpcube_clean_at_order, bexpcube_dirty=bexpcube_dirty_at_order) return ret_dict
def function[_match_cubes, parameter[ccube_clean, ccube_dirty, bexpcube_clean, bexpcube_dirty, hpx_order]]: constant[ Match the HEALPIX scheme and order of all the input cubes return a dictionary of cubes with the same HEALPIX scheme and order ] if compare[name[hpx_order] equal[==] name[ccube_clean].hpx.order] begin[:] variable[ccube_clean_at_order] assign[=] name[ccube_clean] if compare[name[hpx_order] equal[==] name[ccube_dirty].hpx.order] begin[:] variable[ccube_dirty_at_order] assign[=] name[ccube_dirty] if compare[name[hpx_order] equal[==] name[bexpcube_clean].hpx.order] begin[:] variable[bexpcube_clean_at_order] assign[=] name[bexpcube_clean] if compare[name[hpx_order] equal[==] name[bexpcube_dirty].hpx.order] begin[:] variable[bexpcube_dirty_at_order] assign[=] name[bexpcube_dirty] if compare[name[ccube_dirty_at_order].hpx.nest not_equal[!=] name[ccube_clean].hpx.nest] begin[:] variable[ccube_dirty_at_order] assign[=] call[name[ccube_dirty_at_order].swap_scheme, parameter[]] if compare[name[bexpcube_clean_at_order].hpx.nest not_equal[!=] name[ccube_clean].hpx.nest] begin[:] variable[bexpcube_clean_at_order] assign[=] call[name[bexpcube_clean_at_order].swap_scheme, parameter[]] if compare[name[bexpcube_dirty_at_order].hpx.nest not_equal[!=] name[ccube_clean].hpx.nest] begin[:] variable[bexpcube_dirty_at_order] assign[=] call[name[bexpcube_dirty_at_order].swap_scheme, parameter[]] variable[ret_dict] assign[=] call[name[dict], parameter[]] return[name[ret_dict]]
keyword[def] identifier[_match_cubes] ( identifier[ccube_clean] , identifier[ccube_dirty] , identifier[bexpcube_clean] , identifier[bexpcube_dirty] , identifier[hpx_order] ): literal[string] keyword[if] identifier[hpx_order] == identifier[ccube_clean] . identifier[hpx] . identifier[order] : identifier[ccube_clean_at_order] = identifier[ccube_clean] keyword[else] : identifier[ccube_clean_at_order] = identifier[ccube_clean] . identifier[ud_grade] ( identifier[hpx_order] , identifier[preserve_counts] = keyword[True] ) keyword[if] identifier[hpx_order] == identifier[ccube_dirty] . identifier[hpx] . identifier[order] : identifier[ccube_dirty_at_order] = identifier[ccube_dirty] keyword[else] : identifier[ccube_dirty_at_order] = identifier[ccube_dirty] . identifier[ud_grade] ( identifier[hpx_order] , identifier[preserve_counts] = keyword[True] ) keyword[if] identifier[hpx_order] == identifier[bexpcube_clean] . identifier[hpx] . identifier[order] : identifier[bexpcube_clean_at_order] = identifier[bexpcube_clean] keyword[else] : identifier[bexpcube_clean_at_order] = identifier[bexpcube_clean] . identifier[ud_grade] ( identifier[hpx_order] , identifier[preserve_counts] = keyword[True] ) keyword[if] identifier[hpx_order] == identifier[bexpcube_dirty] . identifier[hpx] . identifier[order] : identifier[bexpcube_dirty_at_order] = identifier[bexpcube_dirty] keyword[else] : identifier[bexpcube_dirty_at_order] = identifier[bexpcube_dirty] . identifier[ud_grade] ( identifier[hpx_order] , identifier[preserve_counts] = keyword[True] ) keyword[if] identifier[ccube_dirty_at_order] . identifier[hpx] . identifier[nest] != identifier[ccube_clean] . identifier[hpx] . identifier[nest] : identifier[ccube_dirty_at_order] = identifier[ccube_dirty_at_order] . identifier[swap_scheme] () keyword[if] identifier[bexpcube_clean_at_order] . identifier[hpx] . identifier[nest] != identifier[ccube_clean] . identifier[hpx] . identifier[nest] : identifier[bexpcube_clean_at_order] = identifier[bexpcube_clean_at_order] . identifier[swap_scheme] () keyword[if] identifier[bexpcube_dirty_at_order] . identifier[hpx] . identifier[nest] != identifier[ccube_clean] . identifier[hpx] . identifier[nest] : identifier[bexpcube_dirty_at_order] = identifier[bexpcube_dirty_at_order] . identifier[swap_scheme] () identifier[ret_dict] = identifier[dict] ( identifier[ccube_clean] = identifier[ccube_clean_at_order] , identifier[ccube_dirty] = identifier[ccube_dirty_at_order] , identifier[bexpcube_clean] = identifier[bexpcube_clean_at_order] , identifier[bexpcube_dirty] = identifier[bexpcube_dirty_at_order] ) keyword[return] identifier[ret_dict]
def _match_cubes(ccube_clean, ccube_dirty, bexpcube_clean, bexpcube_dirty, hpx_order): """ Match the HEALPIX scheme and order of all the input cubes return a dictionary of cubes with the same HEALPIX scheme and order """ if hpx_order == ccube_clean.hpx.order: ccube_clean_at_order = ccube_clean # depends on [control=['if'], data=[]] else: ccube_clean_at_order = ccube_clean.ud_grade(hpx_order, preserve_counts=True) if hpx_order == ccube_dirty.hpx.order: ccube_dirty_at_order = ccube_dirty # depends on [control=['if'], data=[]] else: ccube_dirty_at_order = ccube_dirty.ud_grade(hpx_order, preserve_counts=True) if hpx_order == bexpcube_clean.hpx.order: bexpcube_clean_at_order = bexpcube_clean # depends on [control=['if'], data=[]] else: bexpcube_clean_at_order = bexpcube_clean.ud_grade(hpx_order, preserve_counts=True) if hpx_order == bexpcube_dirty.hpx.order: bexpcube_dirty_at_order = bexpcube_dirty # depends on [control=['if'], data=[]] else: bexpcube_dirty_at_order = bexpcube_dirty.ud_grade(hpx_order, preserve_counts=True) if ccube_dirty_at_order.hpx.nest != ccube_clean.hpx.nest: ccube_dirty_at_order = ccube_dirty_at_order.swap_scheme() # depends on [control=['if'], data=[]] if bexpcube_clean_at_order.hpx.nest != ccube_clean.hpx.nest: bexpcube_clean_at_order = bexpcube_clean_at_order.swap_scheme() # depends on [control=['if'], data=[]] if bexpcube_dirty_at_order.hpx.nest != ccube_clean.hpx.nest: bexpcube_dirty_at_order = bexpcube_dirty_at_order.swap_scheme() # depends on [control=['if'], data=[]] ret_dict = dict(ccube_clean=ccube_clean_at_order, ccube_dirty=ccube_dirty_at_order, bexpcube_clean=bexpcube_clean_at_order, bexpcube_dirty=bexpcube_dirty_at_order) return ret_dict
def sample_batch(self, nlive_new=500, update_interval=None, logl_bounds=None, maxiter=None, maxcall=None, save_bounds=True): """ Generate an additional series of nested samples that will be combined with the previous set of dead points. Works by hacking the internal `sampler` object. Instantiates a generator that will be called by the user. Parameters ---------- nlive_new : int Number of new live points to be added. Default is `500`. update_interval : int or float, optional If an integer is passed, only update the bounding distribution every `update_interval`-th likelihood call. If a float is passed, update the bound after every `round(update_interval * nlive)`-th likelihood call. Larger update intervals can be more efficient when the likelihood function is quick to evaluate. If no value is provided, defaults to the value passed during initialization. logl_bounds : tuple of size (2,), optional The ln(likelihood) bounds used to bracket the run. If `None`, the default bounds span the entire range covered by the original run. maxiter : int, optional Maximum number of iterations. Iteration may stop earlier if the termination condition is reached. Default is `sys.maxsize` (no limit). maxcall : int, optional Maximum number of likelihood evaluations. Iteration may stop earlier if termination condition is reached. Default is `sys.maxsize` (no limit). save_bounds : bool, optional Whether or not to save past distributions used to bound the live points internally. Default is `True`. Returns ------- worst : int Index of the live point with the worst likelihood. This is our new dead point sample. **Negative values indicate the index of a new live point generated when initializing a new batch.** ustar : `~numpy.ndarray` with shape (npdim,) Position of the sample. vstar : `~numpy.ndarray` with shape (ndim,) Transformed position of the sample. loglstar : float Ln(likelihood) of the sample. nc : int Number of likelihood calls performed before the new live point was accepted. worst_it : int Iteration when the live (now dead) point was originally proposed. boundidx : int Index of the bound the dead point was originally drawn from. bounditer : int Index of the bound being used at the current iteration. eff : float The cumulative sampling efficiency (in percent). """ # Initialize default values. if maxcall is None: maxcall = sys.maxsize if maxiter is None: maxiter = sys.maxsize if nlive_new <= 2 * self.npdim: warnings.warn("Beware: `nlive_batch <= 2 * ndim`!") self.sampler.save_bounds = save_bounds # Initialize starting values. h = 0.0 # Information, initially *0.* logz = -1.e300 # ln(evidence), initially *0.* logvol = 0. # initially contains the whole prior (volume=1.) # Grab results from base run. base_id = np.array(self.base_id) base_u = np.array(self.base_u) base_v = np.array(self.base_v) base_logl = np.array(self.base_logl) base_n = np.array(self.base_n) base_scale = np.array(self.base_scale) nbase = len(base_n) nblive = self.nlive_init # Reset "new" results. self.new_id = [] self.new_u = [] self.new_v = [] self.new_logl = [] self.new_nc = [] self.new_it = [] self.new_n = [] self.new_boundidx = [] self.new_bounditer = [] self.new_scale = [] self.new_logl_min, self.new_logl_max = -np.inf, np.inf # Initialize ln(likelihood) bounds. if logl_bounds is None: logl_min, logl_max = -np.inf, max(base_logl[:-nblive]) else: logl_min, logl_max = logl_bounds self.new_logl_min, self.new_logl_max = logl_min, logl_max # Check whether the lower bound encompasses all previous base samples. psel = np.all(logl_min <= base_logl) vol = 1. - 1. / nblive # starting ln(prior volume) if psel: # If the lower bound encompasses all base samples, we want # to propose a new set of points from the unit cube. live_u = self.rstate.rand(nlive_new, self.npdim) if self.use_pool_ptform: live_v = np.array(list(self.M(self.prior_transform, np.array(live_u)))) else: live_v = np.array(list(map(self.prior_transform, np.array(live_u)))) if self.use_pool_logl: live_logl = np.array(list(self.M(self.loglikelihood, np.array(live_v)))) else: live_logl = np.array(list(map(self.loglikelihood, np.array(live_v)))) # Convert all `-np.inf` log-likelihoods to finite large numbers. # Necessary to keep estimators in our sampler from breaking. for i, logl in enumerate(live_logl): if not np.isfinite(logl): if np.sign(logl) < 0: live_logl[i] = -1e300 else: raise ValueError("The log-likelihood ({0}) of live " "point {1} located at u={2} v={3} " " is invalid." .format(logl, i, live_u[i], live_v[i])) live_bound = np.zeros(nlive_new, dtype='int') live_it = np.zeros(nlive_new, dtype='int') + self.it live_nc = np.ones(nlive_new, dtype='int') self.ncall += nlive_new # Return live points in generator format. for i in range(nlive_new): yield (-i - 1, live_u[i], live_v[i], live_logl[i], live_nc[i], live_it[i], 0, 0, self.eff) else: # If the lower bound doesn't encompass all base samples, we need # to "rewind" our previous base run until we arrive at the # relevant set of live points (and scale) at the bound. live_u = np.empty((nblive, self.npdim)) live_v = np.empty((nblive, base_v.shape[1])) live_logl = np.empty(nblive) live_u[base_id[-nblive:]] = base_u[-nblive:] live_v[base_id[-nblive:]] = base_v[-nblive:] live_logl[base_id[-nblive:]] = base_logl[-nblive:] for i in range(1, nbase - nblive): r = -(nblive + i) uidx = base_id[r] live_u[uidx] = base_u[r] live_v[uidx] = base_v[r] live_logl[uidx] = base_logl[r] if live_logl[uidx] <= logl_min: break live_scale = base_scale[r] # Hack the internal sampler by overwriting the live points # and scale factor. self.sampler.nlive = nblive self.sampler.live_u = np.array(live_u) self.sampler.live_v = np.array(live_v) self.sampler.live_logl = np.array(live_logl) self.sampler.scale = live_scale # Trigger an update of the internal bounding distribution based # on the "new" set of live points. vol = math.exp(- 1. * (nbase + r) / nblive) loglmin = min(live_logl) if self.sampler._beyond_unit_bound(loglmin): bound = self.sampler.update(vol / nblive) if save_bounds: self.sampler.bound.append(copy.deepcopy(bound)) self.sampler.nbound += 1 self.sampler.since_update = 0 # Sample a new batch of `nlive_new` live points using the # internal sampler given the `logl_min` constraint. live_u = np.empty((nlive_new, self.npdim)) live_v = np.empty((nlive_new, base_v.shape[1])) live_logl = np.empty(nlive_new) live_bound = np.zeros(nlive_new, dtype='int') if self.sampler._beyond_unit_bound(loglmin): live_bound += self.sampler.nbound - 1 live_it = np.empty(nlive_new, dtype='int') live_nc = np.empty(nlive_new, dtype='int') for i in range(nlive_new): (live_u[i], live_v[i], live_logl[i], live_nc[i]) = self.sampler._new_point(logl_min, math.log(vol)) live_it[i] = self.it self.ncall += live_nc[i] # Return live points in generator format. yield (-i - 1, live_u[i], live_v[i], live_logl[i], live_nc[i], live_it[i], live_bound[i], live_bound[i], self.eff) # Overwrite the previous set of live points in our internal sampler # with the new batch of points we just generated. self.sampler.nlive = nlive_new self.sampler.live_u = np.array(live_u) self.sampler.live_v = np.array(live_v) self.sampler.live_logl = np.array(live_logl) self.sampler.live_bound = np.array(live_bound) self.sampler.live_it = np.array(live_it) # Trigger an update of the internal bounding distribution (again). loglmin = min(live_logl) if self.sampler._beyond_unit_bound(loglmin): bound = self.sampler.update(vol / nlive_new) if save_bounds: self.sampler.bound.append(copy.deepcopy(bound)) self.sampler.nbound += 1 self.sampler.since_update = 0 # Copy over bound reference. self.bound = self.sampler.bound # Update `update_interval` based on our new set of live points. if update_interval is None: update_interval = self.update_interval if isinstance(update_interval, float): update_interval = int(round(self.update_interval * nlive_new)) if self.bounding == 'none': update_interval = np.inf # no need to update with no bounds self.sampler.update_interval = update_interval # Update internal ln(prior volume)-based quantities used to set things # like `pointvol` that help to prevent constructing over-constrained # bounding distributions. if self.new_logl_min == -np.inf: bound_logvol = 0. else: vol_idx = np.argmin(abs(self.saved_logl - self.new_logl_min)) bound_logvol = self.saved_logvol[vol_idx] bound_dlv = math.log((nlive_new + 1.) / nlive_new) self.sampler.saved_logvol[-1] = bound_logvol self.sampler.dlv = bound_dlv # Tell the sampler *not* to try and remove the previous addition of # live points. All the hacks above make the internal results # garbage anyways. self.sampler.added_live = False # Run the sampler internally as a generator until we hit # the lower likelihood threshold. Afterwards, we add in our remaining # live points *as if* we had terminated the run. This allows us to # sample past the original bounds "for free". for i in range(1): for it, results in enumerate(self.sampler.sample(dlogz=0., logl_max=logl_max, maxiter=maxiter-nlive_new-1, maxcall=maxcall-sum(live_nc), save_samples=False, save_bounds=save_bounds)): # Grab results. (worst, ustar, vstar, loglstar, logvol, logwt, logz, logzvar, h, nc, worst_it, boundidx, bounditer, eff, delta_logz) = results # Save results. self.new_id.append(worst) self.new_u.append(ustar) self.new_v.append(vstar) self.new_logl.append(loglstar) self.new_nc.append(nc) self.new_it.append(worst_it) self.new_n.append(nlive_new) self.new_boundidx.append(boundidx) self.new_bounditer.append(bounditer) self.new_scale.append(self.sampler.scale) # Increment relevant counters. self.ncall += nc self.eff = 100. * self.it / self.ncall self.it += 1 yield (worst, ustar, vstar, loglstar, nc, worst_it, boundidx, bounditer, self.eff) for it, results in enumerate(self.sampler.add_live_points()): # Grab results. (worst, ustar, vstar, loglstar, logvol, logwt, logz, logzvar, h, nc, worst_it, boundidx, bounditer, eff, delta_logz) = results # Save results. self.new_id.append(worst) self.new_u.append(ustar) self.new_v.append(vstar) self.new_logl.append(loglstar) self.new_nc.append(live_nc[worst]) self.new_it.append(worst_it) self.new_n.append(nlive_new - it) self.new_boundidx.append(boundidx) self.new_bounditer.append(bounditer) self.new_scale.append(self.sampler.scale) # Increment relevant counters. self.eff = 100. * self.it / self.ncall self.it += 1 yield (worst, ustar, vstar, loglstar, live_nc[worst], worst_it, boundidx, bounditer, self.eff)
def function[sample_batch, parameter[self, nlive_new, update_interval, logl_bounds, maxiter, maxcall, save_bounds]]: constant[ Generate an additional series of nested samples that will be combined with the previous set of dead points. Works by hacking the internal `sampler` object. Instantiates a generator that will be called by the user. Parameters ---------- nlive_new : int Number of new live points to be added. Default is `500`. update_interval : int or float, optional If an integer is passed, only update the bounding distribution every `update_interval`-th likelihood call. If a float is passed, update the bound after every `round(update_interval * nlive)`-th likelihood call. Larger update intervals can be more efficient when the likelihood function is quick to evaluate. If no value is provided, defaults to the value passed during initialization. logl_bounds : tuple of size (2,), optional The ln(likelihood) bounds used to bracket the run. If `None`, the default bounds span the entire range covered by the original run. maxiter : int, optional Maximum number of iterations. Iteration may stop earlier if the termination condition is reached. Default is `sys.maxsize` (no limit). maxcall : int, optional Maximum number of likelihood evaluations. Iteration may stop earlier if termination condition is reached. Default is `sys.maxsize` (no limit). save_bounds : bool, optional Whether or not to save past distributions used to bound the live points internally. Default is `True`. Returns ------- worst : int Index of the live point with the worst likelihood. This is our new dead point sample. **Negative values indicate the index of a new live point generated when initializing a new batch.** ustar : `~numpy.ndarray` with shape (npdim,) Position of the sample. vstar : `~numpy.ndarray` with shape (ndim,) Transformed position of the sample. loglstar : float Ln(likelihood) of the sample. nc : int Number of likelihood calls performed before the new live point was accepted. worst_it : int Iteration when the live (now dead) point was originally proposed. boundidx : int Index of the bound the dead point was originally drawn from. bounditer : int Index of the bound being used at the current iteration. eff : float The cumulative sampling efficiency (in percent). ] if compare[name[maxcall] is constant[None]] begin[:] variable[maxcall] assign[=] name[sys].maxsize if compare[name[maxiter] is constant[None]] begin[:] variable[maxiter] assign[=] name[sys].maxsize if compare[name[nlive_new] less_or_equal[<=] binary_operation[constant[2] * name[self].npdim]] begin[:] call[name[warnings].warn, parameter[constant[Beware: `nlive_batch <= 2 * ndim`!]]] name[self].sampler.save_bounds assign[=] name[save_bounds] variable[h] assign[=] constant[0.0] variable[logz] assign[=] <ast.UnaryOp object at 0x7da1b1e8dfc0> variable[logvol] assign[=] constant[0.0] variable[base_id] assign[=] call[name[np].array, parameter[name[self].base_id]] variable[base_u] assign[=] call[name[np].array, parameter[name[self].base_u]] variable[base_v] assign[=] call[name[np].array, parameter[name[self].base_v]] variable[base_logl] assign[=] call[name[np].array, parameter[name[self].base_logl]] variable[base_n] assign[=] call[name[np].array, parameter[name[self].base_n]] variable[base_scale] assign[=] call[name[np].array, parameter[name[self].base_scale]] variable[nbase] assign[=] call[name[len], parameter[name[base_n]]] variable[nblive] assign[=] name[self].nlive_init name[self].new_id assign[=] list[[]] name[self].new_u assign[=] list[[]] name[self].new_v assign[=] list[[]] name[self].new_logl assign[=] list[[]] name[self].new_nc assign[=] list[[]] name[self].new_it assign[=] list[[]] name[self].new_n assign[=] list[[]] name[self].new_boundidx assign[=] list[[]] name[self].new_bounditer assign[=] list[[]] name[self].new_scale assign[=] list[[]] <ast.Tuple object at 0x7da1b1e8f220> assign[=] tuple[[<ast.UnaryOp object at 0x7da1b1e8f2b0>, <ast.Attribute object at 0x7da1b1e8f3d0>]] if compare[name[logl_bounds] is constant[None]] begin[:] <ast.Tuple object at 0x7da1b1e8f520> assign[=] tuple[[<ast.UnaryOp object at 0x7da1b1e8f5e0>, <ast.Call object at 0x7da1b1e8f640>]] <ast.Tuple object at 0x7da1b1e8f8e0> assign[=] tuple[[<ast.Name object at 0x7da1b1e8f970>, <ast.Name object at 0x7da1b1e8fa00>]] variable[psel] assign[=] call[name[np].all, parameter[compare[name[logl_min] less_or_equal[<=] name[base_logl]]]] variable[vol] assign[=] binary_operation[constant[1.0] - binary_operation[constant[1.0] / name[nblive]]] if name[psel] begin[:] variable[live_u] assign[=] call[name[self].rstate.rand, parameter[name[nlive_new], name[self].npdim]] if name[self].use_pool_ptform begin[:] variable[live_v] assign[=] call[name[np].array, parameter[call[name[list], parameter[call[name[self].M, parameter[name[self].prior_transform, call[name[np].array, parameter[name[live_u]]]]]]]]] if name[self].use_pool_logl begin[:] variable[live_logl] assign[=] call[name[np].array, parameter[call[name[list], parameter[call[name[self].M, parameter[name[self].loglikelihood, call[name[np].array, parameter[name[live_v]]]]]]]]] for taget[tuple[[<ast.Name object at 0x7da1b1d46c20>, <ast.Name object at 0x7da1b1d45480>]]] in starred[call[name[enumerate], parameter[name[live_logl]]]] begin[:] if <ast.UnaryOp object at 0x7da1b1d46b90> begin[:] if compare[call[name[np].sign, parameter[name[logl]]] less[<] constant[0]] begin[:] call[name[live_logl]][name[i]] assign[=] <ast.UnaryOp object at 0x7da1b1d44be0> variable[live_bound] assign[=] call[name[np].zeros, parameter[name[nlive_new]]] variable[live_it] assign[=] binary_operation[call[name[np].zeros, parameter[name[nlive_new]]] + name[self].it] variable[live_nc] assign[=] call[name[np].ones, parameter[name[nlive_new]]] <ast.AugAssign object at 0x7da1b1d47040> for taget[name[i]] in starred[call[name[range], parameter[name[nlive_new]]]] begin[:] <ast.Yield object at 0x7da1b1d46a70> name[self].sampler.nlive assign[=] name[nlive_new] name[self].sampler.live_u assign[=] call[name[np].array, parameter[name[live_u]]] name[self].sampler.live_v assign[=] call[name[np].array, parameter[name[live_v]]] name[self].sampler.live_logl assign[=] call[name[np].array, parameter[name[live_logl]]] name[self].sampler.live_bound assign[=] call[name[np].array, parameter[name[live_bound]]] name[self].sampler.live_it assign[=] call[name[np].array, parameter[name[live_it]]] variable[loglmin] assign[=] call[name[min], parameter[name[live_logl]]] if call[name[self].sampler._beyond_unit_bound, parameter[name[loglmin]]] begin[:] variable[bound] assign[=] call[name[self].sampler.update, parameter[binary_operation[name[vol] / name[nlive_new]]]] if name[save_bounds] begin[:] call[name[self].sampler.bound.append, parameter[call[name[copy].deepcopy, parameter[name[bound]]]]] <ast.AugAssign object at 0x7da1b1eea980> name[self].sampler.since_update assign[=] constant[0] name[self].bound assign[=] name[self].sampler.bound if compare[name[update_interval] is constant[None]] begin[:] variable[update_interval] assign[=] name[self].update_interval if call[name[isinstance], parameter[name[update_interval], name[float]]] begin[:] variable[update_interval] assign[=] call[name[int], parameter[call[name[round], parameter[binary_operation[name[self].update_interval * name[nlive_new]]]]]] if compare[name[self].bounding equal[==] constant[none]] begin[:] variable[update_interval] assign[=] name[np].inf name[self].sampler.update_interval assign[=] name[update_interval] if compare[name[self].new_logl_min equal[==] <ast.UnaryOp object at 0x7da1b1ee9ed0>] begin[:] variable[bound_logvol] assign[=] constant[0.0] variable[bound_dlv] assign[=] call[name[math].log, parameter[binary_operation[binary_operation[name[nlive_new] + constant[1.0]] / name[nlive_new]]]] call[name[self].sampler.saved_logvol][<ast.UnaryOp object at 0x7da1b1ee9360>] assign[=] name[bound_logvol] name[self].sampler.dlv assign[=] name[bound_dlv] name[self].sampler.added_live assign[=] constant[False] for taget[name[i]] in starred[call[name[range], parameter[constant[1]]]] begin[:] for taget[tuple[[<ast.Name object at 0x7da1b1ee9720>, <ast.Name object at 0x7da1b1eead10>]]] in starred[call[name[enumerate], parameter[call[name[self].sampler.sample, parameter[]]]]] begin[:] <ast.Tuple object at 0x7da1b1d61fc0> assign[=] name[results] call[name[self].new_id.append, parameter[name[worst]]] call[name[self].new_u.append, parameter[name[ustar]]] call[name[self].new_v.append, parameter[name[vstar]]] call[name[self].new_logl.append, parameter[name[loglstar]]] call[name[self].new_nc.append, parameter[name[nc]]] call[name[self].new_it.append, parameter[name[worst_it]]] call[name[self].new_n.append, parameter[name[nlive_new]]] call[name[self].new_boundidx.append, parameter[name[boundidx]]] call[name[self].new_bounditer.append, parameter[name[bounditer]]] call[name[self].new_scale.append, parameter[name[self].sampler.scale]] <ast.AugAssign object at 0x7da1b1d60730> name[self].eff assign[=] binary_operation[binary_operation[constant[100.0] * name[self].it] / name[self].ncall] <ast.AugAssign object at 0x7da1b1d60280> <ast.Yield object at 0x7da1b1d62cb0> for taget[tuple[[<ast.Name object at 0x7da1b1d62830>, <ast.Name object at 0x7da1b1d628c0>]]] in starred[call[name[enumerate], parameter[call[name[self].sampler.add_live_points, parameter[]]]]] begin[:] <ast.Tuple object at 0x7da1b1d60760> assign[=] name[results] call[name[self].new_id.append, parameter[name[worst]]] call[name[self].new_u.append, parameter[name[ustar]]] call[name[self].new_v.append, parameter[name[vstar]]] call[name[self].new_logl.append, parameter[name[loglstar]]] call[name[self].new_nc.append, parameter[call[name[live_nc]][name[worst]]]] call[name[self].new_it.append, parameter[name[worst_it]]] call[name[self].new_n.append, parameter[binary_operation[name[nlive_new] - name[it]]]] call[name[self].new_boundidx.append, parameter[name[boundidx]]] call[name[self].new_bounditer.append, parameter[name[bounditer]]] call[name[self].new_scale.append, parameter[name[self].sampler.scale]] name[self].eff assign[=] binary_operation[binary_operation[constant[100.0] * name[self].it] / name[self].ncall] <ast.AugAssign object at 0x7da1b1d523e0> <ast.Yield object at 0x7da1b1d51c00>
keyword[def] identifier[sample_batch] ( identifier[self] , identifier[nlive_new] = literal[int] , identifier[update_interval] = keyword[None] , identifier[logl_bounds] = keyword[None] , identifier[maxiter] = keyword[None] , identifier[maxcall] = keyword[None] , identifier[save_bounds] = keyword[True] ): literal[string] keyword[if] identifier[maxcall] keyword[is] keyword[None] : identifier[maxcall] = identifier[sys] . identifier[maxsize] keyword[if] identifier[maxiter] keyword[is] keyword[None] : identifier[maxiter] = identifier[sys] . identifier[maxsize] keyword[if] identifier[nlive_new] <= literal[int] * identifier[self] . identifier[npdim] : identifier[warnings] . identifier[warn] ( literal[string] ) identifier[self] . identifier[sampler] . identifier[save_bounds] = identifier[save_bounds] identifier[h] = literal[int] identifier[logz] =- literal[int] identifier[logvol] = literal[int] identifier[base_id] = identifier[np] . identifier[array] ( identifier[self] . identifier[base_id] ) identifier[base_u] = identifier[np] . identifier[array] ( identifier[self] . identifier[base_u] ) identifier[base_v] = identifier[np] . identifier[array] ( identifier[self] . identifier[base_v] ) identifier[base_logl] = identifier[np] . identifier[array] ( identifier[self] . identifier[base_logl] ) identifier[base_n] = identifier[np] . identifier[array] ( identifier[self] . identifier[base_n] ) identifier[base_scale] = identifier[np] . identifier[array] ( identifier[self] . identifier[base_scale] ) identifier[nbase] = identifier[len] ( identifier[base_n] ) identifier[nblive] = identifier[self] . identifier[nlive_init] identifier[self] . identifier[new_id] =[] identifier[self] . identifier[new_u] =[] identifier[self] . identifier[new_v] =[] identifier[self] . identifier[new_logl] =[] identifier[self] . identifier[new_nc] =[] identifier[self] . identifier[new_it] =[] identifier[self] . identifier[new_n] =[] identifier[self] . identifier[new_boundidx] =[] identifier[self] . identifier[new_bounditer] =[] identifier[self] . identifier[new_scale] =[] identifier[self] . identifier[new_logl_min] , identifier[self] . identifier[new_logl_max] =- identifier[np] . identifier[inf] , identifier[np] . identifier[inf] keyword[if] identifier[logl_bounds] keyword[is] keyword[None] : identifier[logl_min] , identifier[logl_max] =- identifier[np] . identifier[inf] , identifier[max] ( identifier[base_logl] [:- identifier[nblive] ]) keyword[else] : identifier[logl_min] , identifier[logl_max] = identifier[logl_bounds] identifier[self] . identifier[new_logl_min] , identifier[self] . identifier[new_logl_max] = identifier[logl_min] , identifier[logl_max] identifier[psel] = identifier[np] . identifier[all] ( identifier[logl_min] <= identifier[base_logl] ) identifier[vol] = literal[int] - literal[int] / identifier[nblive] keyword[if] identifier[psel] : identifier[live_u] = identifier[self] . identifier[rstate] . identifier[rand] ( identifier[nlive_new] , identifier[self] . identifier[npdim] ) keyword[if] identifier[self] . identifier[use_pool_ptform] : identifier[live_v] = identifier[np] . identifier[array] ( identifier[list] ( identifier[self] . identifier[M] ( identifier[self] . identifier[prior_transform] , identifier[np] . identifier[array] ( identifier[live_u] )))) keyword[else] : identifier[live_v] = identifier[np] . identifier[array] ( identifier[list] ( identifier[map] ( identifier[self] . identifier[prior_transform] , identifier[np] . identifier[array] ( identifier[live_u] )))) keyword[if] identifier[self] . identifier[use_pool_logl] : identifier[live_logl] = identifier[np] . identifier[array] ( identifier[list] ( identifier[self] . identifier[M] ( identifier[self] . identifier[loglikelihood] , identifier[np] . identifier[array] ( identifier[live_v] )))) keyword[else] : identifier[live_logl] = identifier[np] . identifier[array] ( identifier[list] ( identifier[map] ( identifier[self] . identifier[loglikelihood] , identifier[np] . identifier[array] ( identifier[live_v] )))) keyword[for] identifier[i] , identifier[logl] keyword[in] identifier[enumerate] ( identifier[live_logl] ): keyword[if] keyword[not] identifier[np] . identifier[isfinite] ( identifier[logl] ): keyword[if] identifier[np] . identifier[sign] ( identifier[logl] )< literal[int] : identifier[live_logl] [ identifier[i] ]=- literal[int] keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] literal[string] . identifier[format] ( identifier[logl] , identifier[i] , identifier[live_u] [ identifier[i] ], identifier[live_v] [ identifier[i] ])) identifier[live_bound] = identifier[np] . identifier[zeros] ( identifier[nlive_new] , identifier[dtype] = literal[string] ) identifier[live_it] = identifier[np] . identifier[zeros] ( identifier[nlive_new] , identifier[dtype] = literal[string] )+ identifier[self] . identifier[it] identifier[live_nc] = identifier[np] . identifier[ones] ( identifier[nlive_new] , identifier[dtype] = literal[string] ) identifier[self] . identifier[ncall] += identifier[nlive_new] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[nlive_new] ): keyword[yield] (- identifier[i] - literal[int] , identifier[live_u] [ identifier[i] ], identifier[live_v] [ identifier[i] ], identifier[live_logl] [ identifier[i] ], identifier[live_nc] [ identifier[i] ], identifier[live_it] [ identifier[i] ], literal[int] , literal[int] , identifier[self] . identifier[eff] ) keyword[else] : identifier[live_u] = identifier[np] . identifier[empty] (( identifier[nblive] , identifier[self] . identifier[npdim] )) identifier[live_v] = identifier[np] . identifier[empty] (( identifier[nblive] , identifier[base_v] . identifier[shape] [ literal[int] ])) identifier[live_logl] = identifier[np] . identifier[empty] ( identifier[nblive] ) identifier[live_u] [ identifier[base_id] [- identifier[nblive] :]]= identifier[base_u] [- identifier[nblive] :] identifier[live_v] [ identifier[base_id] [- identifier[nblive] :]]= identifier[base_v] [- identifier[nblive] :] identifier[live_logl] [ identifier[base_id] [- identifier[nblive] :]]= identifier[base_logl] [- identifier[nblive] :] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[nbase] - identifier[nblive] ): identifier[r] =-( identifier[nblive] + identifier[i] ) identifier[uidx] = identifier[base_id] [ identifier[r] ] identifier[live_u] [ identifier[uidx] ]= identifier[base_u] [ identifier[r] ] identifier[live_v] [ identifier[uidx] ]= identifier[base_v] [ identifier[r] ] identifier[live_logl] [ identifier[uidx] ]= identifier[base_logl] [ identifier[r] ] keyword[if] identifier[live_logl] [ identifier[uidx] ]<= identifier[logl_min] : keyword[break] identifier[live_scale] = identifier[base_scale] [ identifier[r] ] identifier[self] . identifier[sampler] . identifier[nlive] = identifier[nblive] identifier[self] . identifier[sampler] . identifier[live_u] = identifier[np] . identifier[array] ( identifier[live_u] ) identifier[self] . identifier[sampler] . identifier[live_v] = identifier[np] . identifier[array] ( identifier[live_v] ) identifier[self] . identifier[sampler] . identifier[live_logl] = identifier[np] . identifier[array] ( identifier[live_logl] ) identifier[self] . identifier[sampler] . identifier[scale] = identifier[live_scale] identifier[vol] = identifier[math] . identifier[exp] (- literal[int] *( identifier[nbase] + identifier[r] )/ identifier[nblive] ) identifier[loglmin] = identifier[min] ( identifier[live_logl] ) keyword[if] identifier[self] . identifier[sampler] . identifier[_beyond_unit_bound] ( identifier[loglmin] ): identifier[bound] = identifier[self] . identifier[sampler] . identifier[update] ( identifier[vol] / identifier[nblive] ) keyword[if] identifier[save_bounds] : identifier[self] . identifier[sampler] . identifier[bound] . identifier[append] ( identifier[copy] . identifier[deepcopy] ( identifier[bound] )) identifier[self] . identifier[sampler] . identifier[nbound] += literal[int] identifier[self] . identifier[sampler] . identifier[since_update] = literal[int] identifier[live_u] = identifier[np] . identifier[empty] (( identifier[nlive_new] , identifier[self] . identifier[npdim] )) identifier[live_v] = identifier[np] . identifier[empty] (( identifier[nlive_new] , identifier[base_v] . identifier[shape] [ literal[int] ])) identifier[live_logl] = identifier[np] . identifier[empty] ( identifier[nlive_new] ) identifier[live_bound] = identifier[np] . identifier[zeros] ( identifier[nlive_new] , identifier[dtype] = literal[string] ) keyword[if] identifier[self] . identifier[sampler] . identifier[_beyond_unit_bound] ( identifier[loglmin] ): identifier[live_bound] += identifier[self] . identifier[sampler] . identifier[nbound] - literal[int] identifier[live_it] = identifier[np] . identifier[empty] ( identifier[nlive_new] , identifier[dtype] = literal[string] ) identifier[live_nc] = identifier[np] . identifier[empty] ( identifier[nlive_new] , identifier[dtype] = literal[string] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[nlive_new] ): ( identifier[live_u] [ identifier[i] ], identifier[live_v] [ identifier[i] ], identifier[live_logl] [ identifier[i] ], identifier[live_nc] [ identifier[i] ])= identifier[self] . identifier[sampler] . identifier[_new_point] ( identifier[logl_min] , identifier[math] . identifier[log] ( identifier[vol] )) identifier[live_it] [ identifier[i] ]= identifier[self] . identifier[it] identifier[self] . identifier[ncall] += identifier[live_nc] [ identifier[i] ] keyword[yield] (- identifier[i] - literal[int] , identifier[live_u] [ identifier[i] ], identifier[live_v] [ identifier[i] ], identifier[live_logl] [ identifier[i] ], identifier[live_nc] [ identifier[i] ], identifier[live_it] [ identifier[i] ], identifier[live_bound] [ identifier[i] ], identifier[live_bound] [ identifier[i] ], identifier[self] . identifier[eff] ) identifier[self] . identifier[sampler] . identifier[nlive] = identifier[nlive_new] identifier[self] . identifier[sampler] . identifier[live_u] = identifier[np] . identifier[array] ( identifier[live_u] ) identifier[self] . identifier[sampler] . identifier[live_v] = identifier[np] . identifier[array] ( identifier[live_v] ) identifier[self] . identifier[sampler] . identifier[live_logl] = identifier[np] . identifier[array] ( identifier[live_logl] ) identifier[self] . identifier[sampler] . identifier[live_bound] = identifier[np] . identifier[array] ( identifier[live_bound] ) identifier[self] . identifier[sampler] . identifier[live_it] = identifier[np] . identifier[array] ( identifier[live_it] ) identifier[loglmin] = identifier[min] ( identifier[live_logl] ) keyword[if] identifier[self] . identifier[sampler] . identifier[_beyond_unit_bound] ( identifier[loglmin] ): identifier[bound] = identifier[self] . identifier[sampler] . identifier[update] ( identifier[vol] / identifier[nlive_new] ) keyword[if] identifier[save_bounds] : identifier[self] . identifier[sampler] . identifier[bound] . identifier[append] ( identifier[copy] . identifier[deepcopy] ( identifier[bound] )) identifier[self] . identifier[sampler] . identifier[nbound] += literal[int] identifier[self] . identifier[sampler] . identifier[since_update] = literal[int] identifier[self] . identifier[bound] = identifier[self] . identifier[sampler] . identifier[bound] keyword[if] identifier[update_interval] keyword[is] keyword[None] : identifier[update_interval] = identifier[self] . identifier[update_interval] keyword[if] identifier[isinstance] ( identifier[update_interval] , identifier[float] ): identifier[update_interval] = identifier[int] ( identifier[round] ( identifier[self] . identifier[update_interval] * identifier[nlive_new] )) keyword[if] identifier[self] . identifier[bounding] == literal[string] : identifier[update_interval] = identifier[np] . identifier[inf] identifier[self] . identifier[sampler] . identifier[update_interval] = identifier[update_interval] keyword[if] identifier[self] . identifier[new_logl_min] ==- identifier[np] . identifier[inf] : identifier[bound_logvol] = literal[int] keyword[else] : identifier[vol_idx] = identifier[np] . identifier[argmin] ( identifier[abs] ( identifier[self] . identifier[saved_logl] - identifier[self] . identifier[new_logl_min] )) identifier[bound_logvol] = identifier[self] . identifier[saved_logvol] [ identifier[vol_idx] ] identifier[bound_dlv] = identifier[math] . identifier[log] (( identifier[nlive_new] + literal[int] )/ identifier[nlive_new] ) identifier[self] . identifier[sampler] . identifier[saved_logvol] [- literal[int] ]= identifier[bound_logvol] identifier[self] . identifier[sampler] . identifier[dlv] = identifier[bound_dlv] identifier[self] . identifier[sampler] . identifier[added_live] = keyword[False] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] ): keyword[for] identifier[it] , identifier[results] keyword[in] identifier[enumerate] ( identifier[self] . identifier[sampler] . identifier[sample] ( identifier[dlogz] = literal[int] , identifier[logl_max] = identifier[logl_max] , identifier[maxiter] = identifier[maxiter] - identifier[nlive_new] - literal[int] , identifier[maxcall] = identifier[maxcall] - identifier[sum] ( identifier[live_nc] ), identifier[save_samples] = keyword[False] , identifier[save_bounds] = identifier[save_bounds] )): ( identifier[worst] , identifier[ustar] , identifier[vstar] , identifier[loglstar] , identifier[logvol] , identifier[logwt] , identifier[logz] , identifier[logzvar] , identifier[h] , identifier[nc] , identifier[worst_it] , identifier[boundidx] , identifier[bounditer] , identifier[eff] , identifier[delta_logz] )= identifier[results] identifier[self] . identifier[new_id] . identifier[append] ( identifier[worst] ) identifier[self] . identifier[new_u] . identifier[append] ( identifier[ustar] ) identifier[self] . identifier[new_v] . identifier[append] ( identifier[vstar] ) identifier[self] . identifier[new_logl] . identifier[append] ( identifier[loglstar] ) identifier[self] . identifier[new_nc] . identifier[append] ( identifier[nc] ) identifier[self] . identifier[new_it] . identifier[append] ( identifier[worst_it] ) identifier[self] . identifier[new_n] . identifier[append] ( identifier[nlive_new] ) identifier[self] . identifier[new_boundidx] . identifier[append] ( identifier[boundidx] ) identifier[self] . identifier[new_bounditer] . identifier[append] ( identifier[bounditer] ) identifier[self] . identifier[new_scale] . identifier[append] ( identifier[self] . identifier[sampler] . identifier[scale] ) identifier[self] . identifier[ncall] += identifier[nc] identifier[self] . identifier[eff] = literal[int] * identifier[self] . identifier[it] / identifier[self] . identifier[ncall] identifier[self] . identifier[it] += literal[int] keyword[yield] ( identifier[worst] , identifier[ustar] , identifier[vstar] , identifier[loglstar] , identifier[nc] , identifier[worst_it] , identifier[boundidx] , identifier[bounditer] , identifier[self] . identifier[eff] ) keyword[for] identifier[it] , identifier[results] keyword[in] identifier[enumerate] ( identifier[self] . identifier[sampler] . identifier[add_live_points] ()): ( identifier[worst] , identifier[ustar] , identifier[vstar] , identifier[loglstar] , identifier[logvol] , identifier[logwt] , identifier[logz] , identifier[logzvar] , identifier[h] , identifier[nc] , identifier[worst_it] , identifier[boundidx] , identifier[bounditer] , identifier[eff] , identifier[delta_logz] )= identifier[results] identifier[self] . identifier[new_id] . identifier[append] ( identifier[worst] ) identifier[self] . identifier[new_u] . identifier[append] ( identifier[ustar] ) identifier[self] . identifier[new_v] . identifier[append] ( identifier[vstar] ) identifier[self] . identifier[new_logl] . identifier[append] ( identifier[loglstar] ) identifier[self] . identifier[new_nc] . identifier[append] ( identifier[live_nc] [ identifier[worst] ]) identifier[self] . identifier[new_it] . identifier[append] ( identifier[worst_it] ) identifier[self] . identifier[new_n] . identifier[append] ( identifier[nlive_new] - identifier[it] ) identifier[self] . identifier[new_boundidx] . identifier[append] ( identifier[boundidx] ) identifier[self] . identifier[new_bounditer] . identifier[append] ( identifier[bounditer] ) identifier[self] . identifier[new_scale] . identifier[append] ( identifier[self] . identifier[sampler] . identifier[scale] ) identifier[self] . identifier[eff] = literal[int] * identifier[self] . identifier[it] / identifier[self] . identifier[ncall] identifier[self] . identifier[it] += literal[int] keyword[yield] ( identifier[worst] , identifier[ustar] , identifier[vstar] , identifier[loglstar] , identifier[live_nc] [ identifier[worst] ], identifier[worst_it] , identifier[boundidx] , identifier[bounditer] , identifier[self] . identifier[eff] )
def sample_batch(self, nlive_new=500, update_interval=None, logl_bounds=None, maxiter=None, maxcall=None, save_bounds=True): """ Generate an additional series of nested samples that will be combined with the previous set of dead points. Works by hacking the internal `sampler` object. Instantiates a generator that will be called by the user. Parameters ---------- nlive_new : int Number of new live points to be added. Default is `500`. update_interval : int or float, optional If an integer is passed, only update the bounding distribution every `update_interval`-th likelihood call. If a float is passed, update the bound after every `round(update_interval * nlive)`-th likelihood call. Larger update intervals can be more efficient when the likelihood function is quick to evaluate. If no value is provided, defaults to the value passed during initialization. logl_bounds : tuple of size (2,), optional The ln(likelihood) bounds used to bracket the run. If `None`, the default bounds span the entire range covered by the original run. maxiter : int, optional Maximum number of iterations. Iteration may stop earlier if the termination condition is reached. Default is `sys.maxsize` (no limit). maxcall : int, optional Maximum number of likelihood evaluations. Iteration may stop earlier if termination condition is reached. Default is `sys.maxsize` (no limit). save_bounds : bool, optional Whether or not to save past distributions used to bound the live points internally. Default is `True`. Returns ------- worst : int Index of the live point with the worst likelihood. This is our new dead point sample. **Negative values indicate the index of a new live point generated when initializing a new batch.** ustar : `~numpy.ndarray` with shape (npdim,) Position of the sample. vstar : `~numpy.ndarray` with shape (ndim,) Transformed position of the sample. loglstar : float Ln(likelihood) of the sample. nc : int Number of likelihood calls performed before the new live point was accepted. worst_it : int Iteration when the live (now dead) point was originally proposed. boundidx : int Index of the bound the dead point was originally drawn from. bounditer : int Index of the bound being used at the current iteration. eff : float The cumulative sampling efficiency (in percent). """ # Initialize default values. if maxcall is None: maxcall = sys.maxsize # depends on [control=['if'], data=['maxcall']] if maxiter is None: maxiter = sys.maxsize # depends on [control=['if'], data=['maxiter']] if nlive_new <= 2 * self.npdim: warnings.warn('Beware: `nlive_batch <= 2 * ndim`!') # depends on [control=['if'], data=[]] self.sampler.save_bounds = save_bounds # Initialize starting values. h = 0.0 # Information, initially *0.* logz = -1e+300 # ln(evidence), initially *0.* logvol = 0.0 # initially contains the whole prior (volume=1.) # Grab results from base run. base_id = np.array(self.base_id) base_u = np.array(self.base_u) base_v = np.array(self.base_v) base_logl = np.array(self.base_logl) base_n = np.array(self.base_n) base_scale = np.array(self.base_scale) nbase = len(base_n) nblive = self.nlive_init # Reset "new" results. self.new_id = [] self.new_u = [] self.new_v = [] self.new_logl = [] self.new_nc = [] self.new_it = [] self.new_n = [] self.new_boundidx = [] self.new_bounditer = [] self.new_scale = [] (self.new_logl_min, self.new_logl_max) = (-np.inf, np.inf) # Initialize ln(likelihood) bounds. if logl_bounds is None: (logl_min, logl_max) = (-np.inf, max(base_logl[:-nblive])) # depends on [control=['if'], data=[]] else: (logl_min, logl_max) = logl_bounds (self.new_logl_min, self.new_logl_max) = (logl_min, logl_max) # Check whether the lower bound encompasses all previous base samples. psel = np.all(logl_min <= base_logl) vol = 1.0 - 1.0 / nblive # starting ln(prior volume) if psel: # If the lower bound encompasses all base samples, we want # to propose a new set of points from the unit cube. live_u = self.rstate.rand(nlive_new, self.npdim) if self.use_pool_ptform: live_v = np.array(list(self.M(self.prior_transform, np.array(live_u)))) # depends on [control=['if'], data=[]] else: live_v = np.array(list(map(self.prior_transform, np.array(live_u)))) if self.use_pool_logl: live_logl = np.array(list(self.M(self.loglikelihood, np.array(live_v)))) # depends on [control=['if'], data=[]] else: live_logl = np.array(list(map(self.loglikelihood, np.array(live_v)))) # Convert all `-np.inf` log-likelihoods to finite large numbers. # Necessary to keep estimators in our sampler from breaking. for (i, logl) in enumerate(live_logl): if not np.isfinite(logl): if np.sign(logl) < 0: live_logl[i] = -1e+300 # depends on [control=['if'], data=[]] else: raise ValueError('The log-likelihood ({0}) of live point {1} located at u={2} v={3} is invalid.'.format(logl, i, live_u[i], live_v[i])) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] live_bound = np.zeros(nlive_new, dtype='int') live_it = np.zeros(nlive_new, dtype='int') + self.it live_nc = np.ones(nlive_new, dtype='int') self.ncall += nlive_new # Return live points in generator format. for i in range(nlive_new): yield (-i - 1, live_u[i], live_v[i], live_logl[i], live_nc[i], live_it[i], 0, 0, self.eff) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] else: # If the lower bound doesn't encompass all base samples, we need # to "rewind" our previous base run until we arrive at the # relevant set of live points (and scale) at the bound. live_u = np.empty((nblive, self.npdim)) live_v = np.empty((nblive, base_v.shape[1])) live_logl = np.empty(nblive) live_u[base_id[-nblive:]] = base_u[-nblive:] live_v[base_id[-nblive:]] = base_v[-nblive:] live_logl[base_id[-nblive:]] = base_logl[-nblive:] for i in range(1, nbase - nblive): r = -(nblive + i) uidx = base_id[r] live_u[uidx] = base_u[r] live_v[uidx] = base_v[r] live_logl[uidx] = base_logl[r] if live_logl[uidx] <= logl_min: break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] live_scale = base_scale[r] # Hack the internal sampler by overwriting the live points # and scale factor. self.sampler.nlive = nblive self.sampler.live_u = np.array(live_u) self.sampler.live_v = np.array(live_v) self.sampler.live_logl = np.array(live_logl) self.sampler.scale = live_scale # Trigger an update of the internal bounding distribution based # on the "new" set of live points. vol = math.exp(-1.0 * (nbase + r) / nblive) loglmin = min(live_logl) if self.sampler._beyond_unit_bound(loglmin): bound = self.sampler.update(vol / nblive) if save_bounds: self.sampler.bound.append(copy.deepcopy(bound)) # depends on [control=['if'], data=[]] self.sampler.nbound += 1 self.sampler.since_update = 0 # depends on [control=['if'], data=[]] # Sample a new batch of `nlive_new` live points using the # internal sampler given the `logl_min` constraint. live_u = np.empty((nlive_new, self.npdim)) live_v = np.empty((nlive_new, base_v.shape[1])) live_logl = np.empty(nlive_new) live_bound = np.zeros(nlive_new, dtype='int') if self.sampler._beyond_unit_bound(loglmin): live_bound += self.sampler.nbound - 1 # depends on [control=['if'], data=[]] live_it = np.empty(nlive_new, dtype='int') live_nc = np.empty(nlive_new, dtype='int') for i in range(nlive_new): (live_u[i], live_v[i], live_logl[i], live_nc[i]) = self.sampler._new_point(logl_min, math.log(vol)) live_it[i] = self.it self.ncall += live_nc[i] # Return live points in generator format. yield (-i - 1, live_u[i], live_v[i], live_logl[i], live_nc[i], live_it[i], live_bound[i], live_bound[i], self.eff) # depends on [control=['for'], data=['i']] # Overwrite the previous set of live points in our internal sampler # with the new batch of points we just generated. self.sampler.nlive = nlive_new self.sampler.live_u = np.array(live_u) self.sampler.live_v = np.array(live_v) self.sampler.live_logl = np.array(live_logl) self.sampler.live_bound = np.array(live_bound) self.sampler.live_it = np.array(live_it) # Trigger an update of the internal bounding distribution (again). loglmin = min(live_logl) if self.sampler._beyond_unit_bound(loglmin): bound = self.sampler.update(vol / nlive_new) if save_bounds: self.sampler.bound.append(copy.deepcopy(bound)) # depends on [control=['if'], data=[]] self.sampler.nbound += 1 self.sampler.since_update = 0 # depends on [control=['if'], data=[]] # Copy over bound reference. self.bound = self.sampler.bound # Update `update_interval` based on our new set of live points. if update_interval is None: update_interval = self.update_interval # depends on [control=['if'], data=['update_interval']] if isinstance(update_interval, float): update_interval = int(round(self.update_interval * nlive_new)) # depends on [control=['if'], data=[]] if self.bounding == 'none': update_interval = np.inf # no need to update with no bounds # depends on [control=['if'], data=[]] self.sampler.update_interval = update_interval # Update internal ln(prior volume)-based quantities used to set things # like `pointvol` that help to prevent constructing over-constrained # bounding distributions. if self.new_logl_min == -np.inf: bound_logvol = 0.0 # depends on [control=['if'], data=[]] else: vol_idx = np.argmin(abs(self.saved_logl - self.new_logl_min)) bound_logvol = self.saved_logvol[vol_idx] bound_dlv = math.log((nlive_new + 1.0) / nlive_new) self.sampler.saved_logvol[-1] = bound_logvol self.sampler.dlv = bound_dlv # Tell the sampler *not* to try and remove the previous addition of # live points. All the hacks above make the internal results # garbage anyways. self.sampler.added_live = False # Run the sampler internally as a generator until we hit # the lower likelihood threshold. Afterwards, we add in our remaining # live points *as if* we had terminated the run. This allows us to # sample past the original bounds "for free". for i in range(1): for (it, results) in enumerate(self.sampler.sample(dlogz=0.0, logl_max=logl_max, maxiter=maxiter - nlive_new - 1, maxcall=maxcall - sum(live_nc), save_samples=False, save_bounds=save_bounds)): # Grab results. (worst, ustar, vstar, loglstar, logvol, logwt, logz, logzvar, h, nc, worst_it, boundidx, bounditer, eff, delta_logz) = results # Save results. self.new_id.append(worst) self.new_u.append(ustar) self.new_v.append(vstar) self.new_logl.append(loglstar) self.new_nc.append(nc) self.new_it.append(worst_it) self.new_n.append(nlive_new) self.new_boundidx.append(boundidx) self.new_bounditer.append(bounditer) self.new_scale.append(self.sampler.scale) # Increment relevant counters. self.ncall += nc self.eff = 100.0 * self.it / self.ncall self.it += 1 yield (worst, ustar, vstar, loglstar, nc, worst_it, boundidx, bounditer, self.eff) # depends on [control=['for'], data=[]] for (it, results) in enumerate(self.sampler.add_live_points()): # Grab results. (worst, ustar, vstar, loglstar, logvol, logwt, logz, logzvar, h, nc, worst_it, boundidx, bounditer, eff, delta_logz) = results # Save results. self.new_id.append(worst) self.new_u.append(ustar) self.new_v.append(vstar) self.new_logl.append(loglstar) self.new_nc.append(live_nc[worst]) self.new_it.append(worst_it) self.new_n.append(nlive_new - it) self.new_boundidx.append(boundidx) self.new_bounditer.append(bounditer) self.new_scale.append(self.sampler.scale) # Increment relevant counters. self.eff = 100.0 * self.it / self.ncall self.it += 1 yield (worst, ustar, vstar, loglstar, live_nc[worst], worst_it, boundidx, bounditer, self.eff) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
def __wrap(self, func): '''This decorator overrides the default arguments of a function. For each keyword argument in the function, the decorator first checks if the argument has been overridden by the caller, and uses that value instead if so. If not, the decorator consults the Preset object for an override value. If both of the above cases fail, the decorator reverts to the function's native default parameter value. ''' def deffunc(*args, **kwargs): '''The decorated function''' # Get the list of function arguments if hasattr(inspect, 'signature'): # Python 3.5 function_args = inspect.signature(func).parameters else: function_args = inspect.getargspec(func).args # Construct a dict of those kwargs which appear in the function filtered_kwargs = kwargs.copy() # look at all relevant keyword arguments for this function for param in function_args: if param in kwargs: # Did the user override the default? filtered_kwargs[param] = kwargs[param] elif param in self._defaults: # Do we have a clobbering value in the default dict? filtered_kwargs[param] = self._defaults[param] # Call the function with the supplied args and the filtered kwarg dict return func(*args, **filtered_kwargs) # pylint: disable=W0142 wrapped = functools.update_wrapper(deffunc, func) # force-mangle the docstring here wrapped.__doc__ = ('WARNING: this function has been modified by the Presets ' 'package.\nDefault parameter values described in the ' 'documentation below may be inaccurate.\n\n{}'.format(wrapped.__doc__)) return wrapped
def function[__wrap, parameter[self, func]]: constant[This decorator overrides the default arguments of a function. For each keyword argument in the function, the decorator first checks if the argument has been overridden by the caller, and uses that value instead if so. If not, the decorator consults the Preset object for an override value. If both of the above cases fail, the decorator reverts to the function's native default parameter value. ] def function[deffunc, parameter[]]: constant[The decorated function] if call[name[hasattr], parameter[name[inspect], constant[signature]]] begin[:] variable[function_args] assign[=] call[name[inspect].signature, parameter[name[func]]].parameters variable[filtered_kwargs] assign[=] call[name[kwargs].copy, parameter[]] for taget[name[param]] in starred[name[function_args]] begin[:] if compare[name[param] in name[kwargs]] begin[:] call[name[filtered_kwargs]][name[param]] assign[=] call[name[kwargs]][name[param]] return[call[name[func], parameter[<ast.Starred object at 0x7da2045661a0>]]] variable[wrapped] assign[=] call[name[functools].update_wrapper, parameter[name[deffunc], name[func]]] name[wrapped].__doc__ assign[=] call[constant[WARNING: this function has been modified by the Presets package. Default parameter values described in the documentation below may be inaccurate. {}].format, parameter[name[wrapped].__doc__]] return[name[wrapped]]
keyword[def] identifier[__wrap] ( identifier[self] , identifier[func] ): literal[string] keyword[def] identifier[deffunc] (* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[hasattr] ( identifier[inspect] , literal[string] ): identifier[function_args] = identifier[inspect] . identifier[signature] ( identifier[func] ). identifier[parameters] keyword[else] : identifier[function_args] = identifier[inspect] . identifier[getargspec] ( identifier[func] ). identifier[args] identifier[filtered_kwargs] = identifier[kwargs] . identifier[copy] () keyword[for] identifier[param] keyword[in] identifier[function_args] : keyword[if] identifier[param] keyword[in] identifier[kwargs] : identifier[filtered_kwargs] [ identifier[param] ]= identifier[kwargs] [ identifier[param] ] keyword[elif] identifier[param] keyword[in] identifier[self] . identifier[_defaults] : identifier[filtered_kwargs] [ identifier[param] ]= identifier[self] . identifier[_defaults] [ identifier[param] ] keyword[return] identifier[func] (* identifier[args] ,** identifier[filtered_kwargs] ) identifier[wrapped] = identifier[functools] . identifier[update_wrapper] ( identifier[deffunc] , identifier[func] ) identifier[wrapped] . identifier[__doc__] =( literal[string] literal[string] literal[string] . identifier[format] ( identifier[wrapped] . identifier[__doc__] )) keyword[return] identifier[wrapped]
def __wrap(self, func): """This decorator overrides the default arguments of a function. For each keyword argument in the function, the decorator first checks if the argument has been overridden by the caller, and uses that value instead if so. If not, the decorator consults the Preset object for an override value. If both of the above cases fail, the decorator reverts to the function's native default parameter value. """ def deffunc(*args, **kwargs): """The decorated function""" # Get the list of function arguments if hasattr(inspect, 'signature'): # Python 3.5 function_args = inspect.signature(func).parameters # depends on [control=['if'], data=[]] else: function_args = inspect.getargspec(func).args # Construct a dict of those kwargs which appear in the function filtered_kwargs = kwargs.copy() # look at all relevant keyword arguments for this function for param in function_args: if param in kwargs: # Did the user override the default? filtered_kwargs[param] = kwargs[param] # depends on [control=['if'], data=['param', 'kwargs']] elif param in self._defaults: # Do we have a clobbering value in the default dict? filtered_kwargs[param] = self._defaults[param] # depends on [control=['if'], data=['param']] # depends on [control=['for'], data=['param']] # Call the function with the supplied args and the filtered kwarg dict return func(*args, **filtered_kwargs) # pylint: disable=W0142 wrapped = functools.update_wrapper(deffunc, func) # force-mangle the docstring here wrapped.__doc__ = 'WARNING: this function has been modified by the Presets package.\nDefault parameter values described in the documentation below may be inaccurate.\n\n{}'.format(wrapped.__doc__) return wrapped
def _parse_spectra_annotation(self, line): """Parse and store the spectral annotation details """ if re.match('^PK\$NUM_PEAK(.*)', line, re.IGNORECASE): self.start_spectra_annotation = False return saplist = line.split() sarow = ( self.current_id_spectra_annotation, float(saplist[self.spectra_annotation_indexes['m/z']]) if 'm/z' in self.spectra_annotation_indexes else None, saplist[self.spectra_annotation_indexes[ 'tentative_formula']] if 'tentative_formula' in self.spectra_annotation_indexes else None, float(saplist[self.spectra_annotation_indexes[ 'mass_error(ppm)']]) if 'mass_error(ppm)' in self.spectra_annotation_indexes else None, self.current_id_meta) self.spectra_annotation_all.append(sarow) self.current_id_spectra_annotation += 1
def function[_parse_spectra_annotation, parameter[self, line]]: constant[Parse and store the spectral annotation details ] if call[name[re].match, parameter[constant[^PK\$NUM_PEAK(.*)], name[line], name[re].IGNORECASE]] begin[:] name[self].start_spectra_annotation assign[=] constant[False] return[None] variable[saplist] assign[=] call[name[line].split, parameter[]] variable[sarow] assign[=] tuple[[<ast.Attribute object at 0x7da207f033d0>, <ast.IfExp object at 0x7da207f03700>, <ast.IfExp object at 0x7da207f03580>, <ast.IfExp object at 0x7da207f02b30>, <ast.Attribute object at 0x7da207f03340>]] call[name[self].spectra_annotation_all.append, parameter[name[sarow]]] <ast.AugAssign object at 0x7da207f018a0>
keyword[def] identifier[_parse_spectra_annotation] ( identifier[self] , identifier[line] ): literal[string] keyword[if] identifier[re] . identifier[match] ( literal[string] , identifier[line] , identifier[re] . identifier[IGNORECASE] ): identifier[self] . identifier[start_spectra_annotation] = keyword[False] keyword[return] identifier[saplist] = identifier[line] . identifier[split] () identifier[sarow] =( identifier[self] . identifier[current_id_spectra_annotation] , identifier[float] ( identifier[saplist] [ identifier[self] . identifier[spectra_annotation_indexes] [ literal[string] ]]) keyword[if] literal[string] keyword[in] identifier[self] . identifier[spectra_annotation_indexes] keyword[else] keyword[None] , identifier[saplist] [ identifier[self] . identifier[spectra_annotation_indexes] [ literal[string] ]] keyword[if] literal[string] keyword[in] identifier[self] . identifier[spectra_annotation_indexes] keyword[else] keyword[None] , identifier[float] ( identifier[saplist] [ identifier[self] . identifier[spectra_annotation_indexes] [ literal[string] ]]) keyword[if] literal[string] keyword[in] identifier[self] . identifier[spectra_annotation_indexes] keyword[else] keyword[None] , identifier[self] . identifier[current_id_meta] ) identifier[self] . identifier[spectra_annotation_all] . identifier[append] ( identifier[sarow] ) identifier[self] . identifier[current_id_spectra_annotation] += literal[int]
def _parse_spectra_annotation(self, line): """Parse and store the spectral annotation details """ if re.match('^PK\\$NUM_PEAK(.*)', line, re.IGNORECASE): self.start_spectra_annotation = False return # depends on [control=['if'], data=[]] saplist = line.split() sarow = (self.current_id_spectra_annotation, float(saplist[self.spectra_annotation_indexes['m/z']]) if 'm/z' in self.spectra_annotation_indexes else None, saplist[self.spectra_annotation_indexes['tentative_formula']] if 'tentative_formula' in self.spectra_annotation_indexes else None, float(saplist[self.spectra_annotation_indexes['mass_error(ppm)']]) if 'mass_error(ppm)' in self.spectra_annotation_indexes else None, self.current_id_meta) self.spectra_annotation_all.append(sarow) self.current_id_spectra_annotation += 1
def default_rdo_tr(mod): """ Default translation function for Fedora/RDO based systems """ pkg = mod.rsplit('-python')[0] pkg = pkg.replace('_', '-').replace('.', '-').lower() if not pkg.startswith('python-'): pkg = 'python-' + pkg py2pkg = pkg py3pkg = re.sub('python', 'python3', pkg) return (pkg, py2pkg, py3pkg)
def function[default_rdo_tr, parameter[mod]]: constant[ Default translation function for Fedora/RDO based systems ] variable[pkg] assign[=] call[call[name[mod].rsplit, parameter[constant[-python]]]][constant[0]] variable[pkg] assign[=] call[call[call[name[pkg].replace, parameter[constant[_], constant[-]]].replace, parameter[constant[.], constant[-]]].lower, parameter[]] if <ast.UnaryOp object at 0x7da20c7ca800> begin[:] variable[pkg] assign[=] binary_operation[constant[python-] + name[pkg]] variable[py2pkg] assign[=] name[pkg] variable[py3pkg] assign[=] call[name[re].sub, parameter[constant[python], constant[python3], name[pkg]]] return[tuple[[<ast.Name object at 0x7da20c7c96f0>, <ast.Name object at 0x7da20c7c87f0>, <ast.Name object at 0x7da20c7cb940>]]]
keyword[def] identifier[default_rdo_tr] ( identifier[mod] ): literal[string] identifier[pkg] = identifier[mod] . identifier[rsplit] ( literal[string] )[ literal[int] ] identifier[pkg] = identifier[pkg] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[lower] () keyword[if] keyword[not] identifier[pkg] . identifier[startswith] ( literal[string] ): identifier[pkg] = literal[string] + identifier[pkg] identifier[py2pkg] = identifier[pkg] identifier[py3pkg] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[pkg] ) keyword[return] ( identifier[pkg] , identifier[py2pkg] , identifier[py3pkg] )
def default_rdo_tr(mod): """ Default translation function for Fedora/RDO based systems """ pkg = mod.rsplit('-python')[0] pkg = pkg.replace('_', '-').replace('.', '-').lower() if not pkg.startswith('python-'): pkg = 'python-' + pkg # depends on [control=['if'], data=[]] py2pkg = pkg py3pkg = re.sub('python', 'python3', pkg) return (pkg, py2pkg, py3pkg)
def read_user_dict(var_name, default_value): """Prompt the user to provide a dictionary of data. :param str var_name: Variable as specified in the context :param default_value: Value that will be returned if no input is provided :return: A Python dictionary to use in the context. """ # Please see http://click.pocoo.org/4/api/#click.prompt if not isinstance(default_value, dict): raise TypeError default_display = 'default' user_value = click.prompt( var_name, default=default_display, type=click.STRING, value_proc=process_json, ) if user_value == default_display: # Return the given default w/o any processing return default_value return user_value
def function[read_user_dict, parameter[var_name, default_value]]: constant[Prompt the user to provide a dictionary of data. :param str var_name: Variable as specified in the context :param default_value: Value that will be returned if no input is provided :return: A Python dictionary to use in the context. ] if <ast.UnaryOp object at 0x7da1b217b1c0> begin[:] <ast.Raise object at 0x7da1b217b730> variable[default_display] assign[=] constant[default] variable[user_value] assign[=] call[name[click].prompt, parameter[name[var_name]]] if compare[name[user_value] equal[==] name[default_display]] begin[:] return[name[default_value]] return[name[user_value]]
keyword[def] identifier[read_user_dict] ( identifier[var_name] , identifier[default_value] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[default_value] , identifier[dict] ): keyword[raise] identifier[TypeError] identifier[default_display] = literal[string] identifier[user_value] = identifier[click] . identifier[prompt] ( identifier[var_name] , identifier[default] = identifier[default_display] , identifier[type] = identifier[click] . identifier[STRING] , identifier[value_proc] = identifier[process_json] , ) keyword[if] identifier[user_value] == identifier[default_display] : keyword[return] identifier[default_value] keyword[return] identifier[user_value]
def read_user_dict(var_name, default_value): """Prompt the user to provide a dictionary of data. :param str var_name: Variable as specified in the context :param default_value: Value that will be returned if no input is provided :return: A Python dictionary to use in the context. """ # Please see http://click.pocoo.org/4/api/#click.prompt if not isinstance(default_value, dict): raise TypeError # depends on [control=['if'], data=[]] default_display = 'default' user_value = click.prompt(var_name, default=default_display, type=click.STRING, value_proc=process_json) if user_value == default_display: # Return the given default w/o any processing return default_value # depends on [control=['if'], data=[]] return user_value
def _parse_tree(self, node): """ Parse a <checksum> object """ if 'filename' in node.attrib: self.filename = node.attrib['filename'] if 'type' in node.attrib: self.kind = node.attrib['type'] if 'target' in node.attrib: self.target = node.attrib['target'] self.value = node.text
def function[_parse_tree, parameter[self, node]]: constant[ Parse a <checksum> object ] if compare[constant[filename] in name[node].attrib] begin[:] name[self].filename assign[=] call[name[node].attrib][constant[filename]] if compare[constant[type] in name[node].attrib] begin[:] name[self].kind assign[=] call[name[node].attrib][constant[type]] if compare[constant[target] in name[node].attrib] begin[:] name[self].target assign[=] call[name[node].attrib][constant[target]] name[self].value assign[=] name[node].text
keyword[def] identifier[_parse_tree] ( identifier[self] , identifier[node] ): literal[string] keyword[if] literal[string] keyword[in] identifier[node] . identifier[attrib] : identifier[self] . identifier[filename] = identifier[node] . identifier[attrib] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[node] . identifier[attrib] : identifier[self] . identifier[kind] = identifier[node] . identifier[attrib] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[node] . identifier[attrib] : identifier[self] . identifier[target] = identifier[node] . identifier[attrib] [ literal[string] ] identifier[self] . identifier[value] = identifier[node] . identifier[text]
def _parse_tree(self, node): """ Parse a <checksum> object """ if 'filename' in node.attrib: self.filename = node.attrib['filename'] # depends on [control=['if'], data=[]] if 'type' in node.attrib: self.kind = node.attrib['type'] # depends on [control=['if'], data=[]] if 'target' in node.attrib: self.target = node.attrib['target'] # depends on [control=['if'], data=[]] self.value = node.text
def check_window(self, name="default", level=0, baseline=False): """ *** Automated Visual Testing with SeleniumBase *** The first time a test calls self.check_window() for a unique "name" parameter provided, it will set a visual baseline, meaning that it creates a folder, saves the URL to a file, saves the current window screenshot to a file, and creates the following three files with the listed data saved: tags_level1.txt -> HTML tags from the window tags_level2.txt -> HTML tags + attributes from the window tags_level3.txt -> HTML tags + attributes/values from the window Baseline folders are named based on the test name and the name parameter passed to self.check_window(). The same test can store multiple baseline folders. If the baseline is being set/reset, the "level" doesn't matter. After the first run of self.check_window(), it will compare the HTML tags of the latest window to the one from the initial run. Here's how the level system works: * level=0 -> DRY RUN ONLY - Will perform a comparison to the baseline, and print out any differences that are found, but won't fail the test even if differences exist. * level=1 -> HTML tags are compared to tags_level1.txt * level=2 -> HTML tags are compared to tags_level1.txt and HTML tags/attributes are compared to tags_level2.txt * level=3 -> HTML tags are compared to tags_level1.txt and HTML tags + attributes are compared to tags_level2.txt and HTML tags + attributes/values are compared to tags_level3.txt As shown, Level-3 is the most strict, Level-1 is the least strict. If the comparisons from the latest window to the existing baseline don't match, the current test will fail, except for Level-0 tests. You can reset the visual baseline on the command line by using: --visual_baseline As long as "--visual_baseline" is used on the command line while running tests, the self.check_window() method cannot fail because it will rebuild the visual baseline rather than comparing the html tags of the latest run to the existing baseline. If there are any expected layout changes to a website that you're testing, you'll need to reset the baseline to prevent unnecessary failures. self.check_window() will fail with "Page Domain Mismatch Failure" if the page domain doesn't match the domain of the baseline. If you want to use self.check_window() to compare a web page to a later version of itself from within the same test run, you can add the parameter "baseline=True" to the first time you call self.check_window() in a test to use that as the baseline. This only makes sense if you're calling self.check_window() more than once with the same name parameter in the same test. Automated Visual Testing with self.check_window() is not very effective for websites that have dynamic content that changes the layout and structure of web pages. For those, you're much better off using regular SeleniumBase functional testing. Example usage: self.check_window(name="testing", level=0) self.check_window(name="xkcd_home", level=1) self.check_window(name="github_page", level=2) self.check_window(name="wikipedia_page", level=3) """ if level == "0": level = 0 if level == "1": level = 1 if level == "2": level = 2 if level == "3": level = 3 if level != 0 and level != 1 and level != 2 and level != 3: raise Exception('Parameter "level" must be set to 0, 1, 2, or 3!') module = self.__class__.__module__ if '.' in module and len(module.split('.')[-1]) > 1: module = module.split('.')[-1] test_id = "%s.%s" % (module, self._testMethodName) if not name or len(name) < 1: name = "default" name = str(name) visual_helper.visual_baseline_folder_setup() baseline_dir = constants.VisualBaseline.STORAGE_FOLDER visual_baseline_path = baseline_dir + "/" + test_id + "/" + name page_url_file = visual_baseline_path + "/page_url.txt" screenshot_file = visual_baseline_path + "/screenshot.png" level_1_file = visual_baseline_path + "/tags_level_1.txt" level_2_file = visual_baseline_path + "/tags_level_2.txt" level_3_file = visual_baseline_path + "/tags_level_3.txt" set_baseline = False if baseline or self.visual_baseline: set_baseline = True if not os.path.exists(visual_baseline_path): set_baseline = True try: os.makedirs(visual_baseline_path) except Exception: pass # Only reachable during multi-threaded test runs if not os.path.exists(page_url_file): set_baseline = True if not os.path.exists(screenshot_file): set_baseline = True if not os.path.exists(level_1_file): set_baseline = True if not os.path.exists(level_2_file): set_baseline = True if not os.path.exists(level_3_file): set_baseline = True page_url = self.get_current_url() soup = self.get_beautiful_soup() html_tags = soup.body.find_all() level_1 = [[tag.name] for tag in html_tags] level_1 = json.loads(json.dumps(level_1)) # Tuples become lists level_2 = [[tag.name, sorted(tag.attrs.keys())] for tag in html_tags] level_2 = json.loads(json.dumps(level_2)) # Tuples become lists level_3 = [[tag.name, sorted(tag.attrs.items())] for tag in html_tags] level_3 = json.loads(json.dumps(level_3)) # Tuples become lists if set_baseline: self.save_screenshot("screenshot.png", visual_baseline_path) out_file = codecs.open(page_url_file, "w+") out_file.writelines(page_url) out_file.close() out_file = codecs.open(level_1_file, "w+") out_file.writelines(json.dumps(level_1)) out_file.close() out_file = codecs.open(level_2_file, "w+") out_file.writelines(json.dumps(level_2)) out_file.close() out_file = codecs.open(level_3_file, "w+") out_file.writelines(json.dumps(level_3)) out_file.close() if not set_baseline: f = open(page_url_file, 'r') page_url_data = f.read().strip() f.close() f = open(level_1_file, 'r') level_1_data = json.loads(f.read()) f.close() f = open(level_2_file, 'r') level_2_data = json.loads(f.read()) f.close() f = open(level_3_file, 'r') level_3_data = json.loads(f.read()) f.close() domain_fail = ( "Page Domain Mismatch Failure: " "Current Page Domain doesn't match the Page Domain of the " "Baseline! Can't compare two completely different sites! " "Run with --visual_baseline to reset the baseline!") level_1_failure = ( "\n\n*** Exception: <Level 1> Visual Diff Failure:\n" "* HTML tags don't match the baseline!") level_2_failure = ( "\n\n*** Exception: <Level 2> Visual Diff Failure:\n" "* HTML tag attributes don't match the baseline!") level_3_failure = ( "\n\n*** Exception: <Level 3> Visual Diff Failure:\n" "* HTML tag attribute values don't match the baseline!") page_domain = self.get_domain_url(page_url) page_data_domain = self.get_domain_url(page_url_data) unittest.TestCase.maxDiff = 1000 if level == 1 or level == 2 or level == 3: self.assert_equal(page_domain, page_data_domain, domain_fail) self.assert_equal(level_1, level_1_data, level_1_failure) unittest.TestCase.maxDiff = None if level == 2 or level == 3: self.assert_equal(level_2, level_2_data, level_2_failure) if level == 3: self.assert_equal(level_3, level_3_data, level_3_failure) if level == 0: try: unittest.TestCase.maxDiff = 1000 self.assert_equal( page_domain, page_data_domain, domain_fail) self.assert_equal(level_1, level_1_data, level_1_failure) unittest.TestCase.maxDiff = None self.assert_equal(level_2, level_2_data, level_2_failure) self.assert_equal(level_3, level_3_data, level_3_failure) except Exception as e: print(e)
def function[check_window, parameter[self, name, level, baseline]]: constant[ *** Automated Visual Testing with SeleniumBase *** The first time a test calls self.check_window() for a unique "name" parameter provided, it will set a visual baseline, meaning that it creates a folder, saves the URL to a file, saves the current window screenshot to a file, and creates the following three files with the listed data saved: tags_level1.txt -> HTML tags from the window tags_level2.txt -> HTML tags + attributes from the window tags_level3.txt -> HTML tags + attributes/values from the window Baseline folders are named based on the test name and the name parameter passed to self.check_window(). The same test can store multiple baseline folders. If the baseline is being set/reset, the "level" doesn't matter. After the first run of self.check_window(), it will compare the HTML tags of the latest window to the one from the initial run. Here's how the level system works: * level=0 -> DRY RUN ONLY - Will perform a comparison to the baseline, and print out any differences that are found, but won't fail the test even if differences exist. * level=1 -> HTML tags are compared to tags_level1.txt * level=2 -> HTML tags are compared to tags_level1.txt and HTML tags/attributes are compared to tags_level2.txt * level=3 -> HTML tags are compared to tags_level1.txt and HTML tags + attributes are compared to tags_level2.txt and HTML tags + attributes/values are compared to tags_level3.txt As shown, Level-3 is the most strict, Level-1 is the least strict. If the comparisons from the latest window to the existing baseline don't match, the current test will fail, except for Level-0 tests. You can reset the visual baseline on the command line by using: --visual_baseline As long as "--visual_baseline" is used on the command line while running tests, the self.check_window() method cannot fail because it will rebuild the visual baseline rather than comparing the html tags of the latest run to the existing baseline. If there are any expected layout changes to a website that you're testing, you'll need to reset the baseline to prevent unnecessary failures. self.check_window() will fail with "Page Domain Mismatch Failure" if the page domain doesn't match the domain of the baseline. If you want to use self.check_window() to compare a web page to a later version of itself from within the same test run, you can add the parameter "baseline=True" to the first time you call self.check_window() in a test to use that as the baseline. This only makes sense if you're calling self.check_window() more than once with the same name parameter in the same test. Automated Visual Testing with self.check_window() is not very effective for websites that have dynamic content that changes the layout and structure of web pages. For those, you're much better off using regular SeleniumBase functional testing. Example usage: self.check_window(name="testing", level=0) self.check_window(name="xkcd_home", level=1) self.check_window(name="github_page", level=2) self.check_window(name="wikipedia_page", level=3) ] if compare[name[level] equal[==] constant[0]] begin[:] variable[level] assign[=] constant[0] if compare[name[level] equal[==] constant[1]] begin[:] variable[level] assign[=] constant[1] if compare[name[level] equal[==] constant[2]] begin[:] variable[level] assign[=] constant[2] if compare[name[level] equal[==] constant[3]] begin[:] variable[level] assign[=] constant[3] if <ast.BoolOp object at 0x7da1b1b62ce0> begin[:] <ast.Raise object at 0x7da1b1b61db0> variable[module] assign[=] name[self].__class__.__module__ if <ast.BoolOp object at 0x7da1b1b609d0> begin[:] variable[module] assign[=] call[call[name[module].split, parameter[constant[.]]]][<ast.UnaryOp object at 0x7da1b1b63be0>] variable[test_id] assign[=] binary_operation[constant[%s.%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1b62d70>, <ast.Attribute object at 0x7da1b1b632e0>]]] if <ast.BoolOp object at 0x7da1b1b60c10> begin[:] variable[name] assign[=] constant[default] variable[name] assign[=] call[name[str], parameter[name[name]]] call[name[visual_helper].visual_baseline_folder_setup, parameter[]] variable[baseline_dir] assign[=] name[constants].VisualBaseline.STORAGE_FOLDER variable[visual_baseline_path] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[name[baseline_dir] + constant[/]] + name[test_id]] + constant[/]] + name[name]] variable[page_url_file] assign[=] binary_operation[name[visual_baseline_path] + constant[/page_url.txt]] variable[screenshot_file] assign[=] binary_operation[name[visual_baseline_path] + constant[/screenshot.png]] variable[level_1_file] assign[=] binary_operation[name[visual_baseline_path] + constant[/tags_level_1.txt]] variable[level_2_file] assign[=] binary_operation[name[visual_baseline_path] + constant[/tags_level_2.txt]] variable[level_3_file] assign[=] binary_operation[name[visual_baseline_path] + constant[/tags_level_3.txt]] variable[set_baseline] assign[=] constant[False] if <ast.BoolOp object at 0x7da1b1b61f00> begin[:] variable[set_baseline] assign[=] constant[True] if <ast.UnaryOp object at 0x7da1b1b612a0> begin[:] variable[set_baseline] assign[=] constant[True] <ast.Try object at 0x7da1b1b60a30> if <ast.UnaryOp object at 0x7da1b1b63610> begin[:] variable[set_baseline] assign[=] constant[True] if <ast.UnaryOp object at 0x7da1b1b63550> begin[:] variable[set_baseline] assign[=] constant[True] if <ast.UnaryOp object at 0x7da1b1b609a0> begin[:] variable[set_baseline] assign[=] constant[True] if <ast.UnaryOp object at 0x7da1b1b62620> begin[:] variable[set_baseline] assign[=] constant[True] if <ast.UnaryOp object at 0x7da1b1b60430> begin[:] variable[set_baseline] assign[=] constant[True] variable[page_url] assign[=] call[name[self].get_current_url, parameter[]] variable[soup] assign[=] call[name[self].get_beautiful_soup, parameter[]] variable[html_tags] assign[=] call[name[soup].body.find_all, parameter[]] variable[level_1] assign[=] <ast.ListComp object at 0x7da1b1b164a0> variable[level_1] assign[=] call[name[json].loads, parameter[call[name[json].dumps, parameter[name[level_1]]]]] variable[level_2] assign[=] <ast.ListComp object at 0x7da1b1b16140> variable[level_2] assign[=] call[name[json].loads, parameter[call[name[json].dumps, parameter[name[level_2]]]]] variable[level_3] assign[=] <ast.ListComp object at 0x7da1b1b16fe0> variable[level_3] assign[=] call[name[json].loads, parameter[call[name[json].dumps, parameter[name[level_3]]]]] if name[set_baseline] begin[:] call[name[self].save_screenshot, parameter[constant[screenshot.png], name[visual_baseline_path]]] variable[out_file] assign[=] call[name[codecs].open, parameter[name[page_url_file], constant[w+]]] call[name[out_file].writelines, parameter[name[page_url]]] call[name[out_file].close, parameter[]] variable[out_file] assign[=] call[name[codecs].open, parameter[name[level_1_file], constant[w+]]] call[name[out_file].writelines, parameter[call[name[json].dumps, parameter[name[level_1]]]]] call[name[out_file].close, parameter[]] variable[out_file] assign[=] call[name[codecs].open, parameter[name[level_2_file], constant[w+]]] call[name[out_file].writelines, parameter[call[name[json].dumps, parameter[name[level_2]]]]] call[name[out_file].close, parameter[]] variable[out_file] assign[=] call[name[codecs].open, parameter[name[level_3_file], constant[w+]]] call[name[out_file].writelines, parameter[call[name[json].dumps, parameter[name[level_3]]]]] call[name[out_file].close, parameter[]] if <ast.UnaryOp object at 0x7da1b1b15b10> begin[:] variable[f] assign[=] call[name[open], parameter[name[page_url_file], constant[r]]] variable[page_url_data] assign[=] call[call[name[f].read, parameter[]].strip, parameter[]] call[name[f].close, parameter[]] variable[f] assign[=] call[name[open], parameter[name[level_1_file], constant[r]]] variable[level_1_data] assign[=] call[name[json].loads, parameter[call[name[f].read, parameter[]]]] call[name[f].close, parameter[]] variable[f] assign[=] call[name[open], parameter[name[level_2_file], constant[r]]] variable[level_2_data] assign[=] call[name[json].loads, parameter[call[name[f].read, parameter[]]]] call[name[f].close, parameter[]] variable[f] assign[=] call[name[open], parameter[name[level_3_file], constant[r]]] variable[level_3_data] assign[=] call[name[json].loads, parameter[call[name[f].read, parameter[]]]] call[name[f].close, parameter[]] variable[domain_fail] assign[=] constant[Page Domain Mismatch Failure: Current Page Domain doesn't match the Page Domain of the Baseline! Can't compare two completely different sites! Run with --visual_baseline to reset the baseline!] variable[level_1_failure] assign[=] constant[ *** Exception: <Level 1> Visual Diff Failure: * HTML tags don't match the baseline!] variable[level_2_failure] assign[=] constant[ *** Exception: <Level 2> Visual Diff Failure: * HTML tag attributes don't match the baseline!] variable[level_3_failure] assign[=] constant[ *** Exception: <Level 3> Visual Diff Failure: * HTML tag attribute values don't match the baseline!] variable[page_domain] assign[=] call[name[self].get_domain_url, parameter[name[page_url]]] variable[page_data_domain] assign[=] call[name[self].get_domain_url, parameter[name[page_url_data]]] name[unittest].TestCase.maxDiff assign[=] constant[1000] if <ast.BoolOp object at 0x7da1b1bc25f0> begin[:] call[name[self].assert_equal, parameter[name[page_domain], name[page_data_domain], name[domain_fail]]] call[name[self].assert_equal, parameter[name[level_1], name[level_1_data], name[level_1_failure]]] name[unittest].TestCase.maxDiff assign[=] constant[None] if <ast.BoolOp object at 0x7da1b1cd6d40> begin[:] call[name[self].assert_equal, parameter[name[level_2], name[level_2_data], name[level_2_failure]]] if compare[name[level] equal[==] constant[3]] begin[:] call[name[self].assert_equal, parameter[name[level_3], name[level_3_data], name[level_3_failure]]] if compare[name[level] equal[==] constant[0]] begin[:] <ast.Try object at 0x7da1b1cd4f70>
keyword[def] identifier[check_window] ( identifier[self] , identifier[name] = literal[string] , identifier[level] = literal[int] , identifier[baseline] = keyword[False] ): literal[string] keyword[if] identifier[level] == literal[string] : identifier[level] = literal[int] keyword[if] identifier[level] == literal[string] : identifier[level] = literal[int] keyword[if] identifier[level] == literal[string] : identifier[level] = literal[int] keyword[if] identifier[level] == literal[string] : identifier[level] = literal[int] keyword[if] identifier[level] != literal[int] keyword[and] identifier[level] != literal[int] keyword[and] identifier[level] != literal[int] keyword[and] identifier[level] != literal[int] : keyword[raise] identifier[Exception] ( literal[string] ) identifier[module] = identifier[self] . identifier[__class__] . identifier[__module__] keyword[if] literal[string] keyword[in] identifier[module] keyword[and] identifier[len] ( identifier[module] . identifier[split] ( literal[string] )[- literal[int] ])> literal[int] : identifier[module] = identifier[module] . identifier[split] ( literal[string] )[- literal[int] ] identifier[test_id] = literal[string] %( identifier[module] , identifier[self] . identifier[_testMethodName] ) keyword[if] keyword[not] identifier[name] keyword[or] identifier[len] ( identifier[name] )< literal[int] : identifier[name] = literal[string] identifier[name] = identifier[str] ( identifier[name] ) identifier[visual_helper] . identifier[visual_baseline_folder_setup] () identifier[baseline_dir] = identifier[constants] . identifier[VisualBaseline] . identifier[STORAGE_FOLDER] identifier[visual_baseline_path] = identifier[baseline_dir] + literal[string] + identifier[test_id] + literal[string] + identifier[name] identifier[page_url_file] = identifier[visual_baseline_path] + literal[string] identifier[screenshot_file] = identifier[visual_baseline_path] + literal[string] identifier[level_1_file] = identifier[visual_baseline_path] + literal[string] identifier[level_2_file] = identifier[visual_baseline_path] + literal[string] identifier[level_3_file] = identifier[visual_baseline_path] + literal[string] identifier[set_baseline] = keyword[False] keyword[if] identifier[baseline] keyword[or] identifier[self] . identifier[visual_baseline] : identifier[set_baseline] = keyword[True] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[visual_baseline_path] ): identifier[set_baseline] = keyword[True] keyword[try] : identifier[os] . identifier[makedirs] ( identifier[visual_baseline_path] ) keyword[except] identifier[Exception] : keyword[pass] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[page_url_file] ): identifier[set_baseline] = keyword[True] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[screenshot_file] ): identifier[set_baseline] = keyword[True] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[level_1_file] ): identifier[set_baseline] = keyword[True] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[level_2_file] ): identifier[set_baseline] = keyword[True] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[level_3_file] ): identifier[set_baseline] = keyword[True] identifier[page_url] = identifier[self] . identifier[get_current_url] () identifier[soup] = identifier[self] . identifier[get_beautiful_soup] () identifier[html_tags] = identifier[soup] . identifier[body] . identifier[find_all] () identifier[level_1] =[[ identifier[tag] . identifier[name] ] keyword[for] identifier[tag] keyword[in] identifier[html_tags] ] identifier[level_1] = identifier[json] . identifier[loads] ( identifier[json] . identifier[dumps] ( identifier[level_1] )) identifier[level_2] =[[ identifier[tag] . identifier[name] , identifier[sorted] ( identifier[tag] . identifier[attrs] . identifier[keys] ())] keyword[for] identifier[tag] keyword[in] identifier[html_tags] ] identifier[level_2] = identifier[json] . identifier[loads] ( identifier[json] . identifier[dumps] ( identifier[level_2] )) identifier[level_3] =[[ identifier[tag] . identifier[name] , identifier[sorted] ( identifier[tag] . identifier[attrs] . identifier[items] ())] keyword[for] identifier[tag] keyword[in] identifier[html_tags] ] identifier[level_3] = identifier[json] . identifier[loads] ( identifier[json] . identifier[dumps] ( identifier[level_3] )) keyword[if] identifier[set_baseline] : identifier[self] . identifier[save_screenshot] ( literal[string] , identifier[visual_baseline_path] ) identifier[out_file] = identifier[codecs] . identifier[open] ( identifier[page_url_file] , literal[string] ) identifier[out_file] . identifier[writelines] ( identifier[page_url] ) identifier[out_file] . identifier[close] () identifier[out_file] = identifier[codecs] . identifier[open] ( identifier[level_1_file] , literal[string] ) identifier[out_file] . identifier[writelines] ( identifier[json] . identifier[dumps] ( identifier[level_1] )) identifier[out_file] . identifier[close] () identifier[out_file] = identifier[codecs] . identifier[open] ( identifier[level_2_file] , literal[string] ) identifier[out_file] . identifier[writelines] ( identifier[json] . identifier[dumps] ( identifier[level_2] )) identifier[out_file] . identifier[close] () identifier[out_file] = identifier[codecs] . identifier[open] ( identifier[level_3_file] , literal[string] ) identifier[out_file] . identifier[writelines] ( identifier[json] . identifier[dumps] ( identifier[level_3] )) identifier[out_file] . identifier[close] () keyword[if] keyword[not] identifier[set_baseline] : identifier[f] = identifier[open] ( identifier[page_url_file] , literal[string] ) identifier[page_url_data] = identifier[f] . identifier[read] (). identifier[strip] () identifier[f] . identifier[close] () identifier[f] = identifier[open] ( identifier[level_1_file] , literal[string] ) identifier[level_1_data] = identifier[json] . identifier[loads] ( identifier[f] . identifier[read] ()) identifier[f] . identifier[close] () identifier[f] = identifier[open] ( identifier[level_2_file] , literal[string] ) identifier[level_2_data] = identifier[json] . identifier[loads] ( identifier[f] . identifier[read] ()) identifier[f] . identifier[close] () identifier[f] = identifier[open] ( identifier[level_3_file] , literal[string] ) identifier[level_3_data] = identifier[json] . identifier[loads] ( identifier[f] . identifier[read] ()) identifier[f] . identifier[close] () identifier[domain_fail] =( literal[string] literal[string] literal[string] literal[string] ) identifier[level_1_failure] =( literal[string] literal[string] ) identifier[level_2_failure] =( literal[string] literal[string] ) identifier[level_3_failure] =( literal[string] literal[string] ) identifier[page_domain] = identifier[self] . identifier[get_domain_url] ( identifier[page_url] ) identifier[page_data_domain] = identifier[self] . identifier[get_domain_url] ( identifier[page_url_data] ) identifier[unittest] . identifier[TestCase] . identifier[maxDiff] = literal[int] keyword[if] identifier[level] == literal[int] keyword[or] identifier[level] == literal[int] keyword[or] identifier[level] == literal[int] : identifier[self] . identifier[assert_equal] ( identifier[page_domain] , identifier[page_data_domain] , identifier[domain_fail] ) identifier[self] . identifier[assert_equal] ( identifier[level_1] , identifier[level_1_data] , identifier[level_1_failure] ) identifier[unittest] . identifier[TestCase] . identifier[maxDiff] = keyword[None] keyword[if] identifier[level] == literal[int] keyword[or] identifier[level] == literal[int] : identifier[self] . identifier[assert_equal] ( identifier[level_2] , identifier[level_2_data] , identifier[level_2_failure] ) keyword[if] identifier[level] == literal[int] : identifier[self] . identifier[assert_equal] ( identifier[level_3] , identifier[level_3_data] , identifier[level_3_failure] ) keyword[if] identifier[level] == literal[int] : keyword[try] : identifier[unittest] . identifier[TestCase] . identifier[maxDiff] = literal[int] identifier[self] . identifier[assert_equal] ( identifier[page_domain] , identifier[page_data_domain] , identifier[domain_fail] ) identifier[self] . identifier[assert_equal] ( identifier[level_1] , identifier[level_1_data] , identifier[level_1_failure] ) identifier[unittest] . identifier[TestCase] . identifier[maxDiff] = keyword[None] identifier[self] . identifier[assert_equal] ( identifier[level_2] , identifier[level_2_data] , identifier[level_2_failure] ) identifier[self] . identifier[assert_equal] ( identifier[level_3] , identifier[level_3_data] , identifier[level_3_failure] ) keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[print] ( identifier[e] )
def check_window(self, name='default', level=0, baseline=False): """ *** Automated Visual Testing with SeleniumBase *** The first time a test calls self.check_window() for a unique "name" parameter provided, it will set a visual baseline, meaning that it creates a folder, saves the URL to a file, saves the current window screenshot to a file, and creates the following three files with the listed data saved: tags_level1.txt -> HTML tags from the window tags_level2.txt -> HTML tags + attributes from the window tags_level3.txt -> HTML tags + attributes/values from the window Baseline folders are named based on the test name and the name parameter passed to self.check_window(). The same test can store multiple baseline folders. If the baseline is being set/reset, the "level" doesn't matter. After the first run of self.check_window(), it will compare the HTML tags of the latest window to the one from the initial run. Here's how the level system works: * level=0 -> DRY RUN ONLY - Will perform a comparison to the baseline, and print out any differences that are found, but won't fail the test even if differences exist. * level=1 -> HTML tags are compared to tags_level1.txt * level=2 -> HTML tags are compared to tags_level1.txt and HTML tags/attributes are compared to tags_level2.txt * level=3 -> HTML tags are compared to tags_level1.txt and HTML tags + attributes are compared to tags_level2.txt and HTML tags + attributes/values are compared to tags_level3.txt As shown, Level-3 is the most strict, Level-1 is the least strict. If the comparisons from the latest window to the existing baseline don't match, the current test will fail, except for Level-0 tests. You can reset the visual baseline on the command line by using: --visual_baseline As long as "--visual_baseline" is used on the command line while running tests, the self.check_window() method cannot fail because it will rebuild the visual baseline rather than comparing the html tags of the latest run to the existing baseline. If there are any expected layout changes to a website that you're testing, you'll need to reset the baseline to prevent unnecessary failures. self.check_window() will fail with "Page Domain Mismatch Failure" if the page domain doesn't match the domain of the baseline. If you want to use self.check_window() to compare a web page to a later version of itself from within the same test run, you can add the parameter "baseline=True" to the first time you call self.check_window() in a test to use that as the baseline. This only makes sense if you're calling self.check_window() more than once with the same name parameter in the same test. Automated Visual Testing with self.check_window() is not very effective for websites that have dynamic content that changes the layout and structure of web pages. For those, you're much better off using regular SeleniumBase functional testing. Example usage: self.check_window(name="testing", level=0) self.check_window(name="xkcd_home", level=1) self.check_window(name="github_page", level=2) self.check_window(name="wikipedia_page", level=3) """ if level == '0': level = 0 # depends on [control=['if'], data=['level']] if level == '1': level = 1 # depends on [control=['if'], data=['level']] if level == '2': level = 2 # depends on [control=['if'], data=['level']] if level == '3': level = 3 # depends on [control=['if'], data=['level']] if level != 0 and level != 1 and (level != 2) and (level != 3): raise Exception('Parameter "level" must be set to 0, 1, 2, or 3!') # depends on [control=['if'], data=[]] module = self.__class__.__module__ if '.' in module and len(module.split('.')[-1]) > 1: module = module.split('.')[-1] # depends on [control=['if'], data=[]] test_id = '%s.%s' % (module, self._testMethodName) if not name or len(name) < 1: name = 'default' # depends on [control=['if'], data=[]] name = str(name) visual_helper.visual_baseline_folder_setup() baseline_dir = constants.VisualBaseline.STORAGE_FOLDER visual_baseline_path = baseline_dir + '/' + test_id + '/' + name page_url_file = visual_baseline_path + '/page_url.txt' screenshot_file = visual_baseline_path + '/screenshot.png' level_1_file = visual_baseline_path + '/tags_level_1.txt' level_2_file = visual_baseline_path + '/tags_level_2.txt' level_3_file = visual_baseline_path + '/tags_level_3.txt' set_baseline = False if baseline or self.visual_baseline: set_baseline = True # depends on [control=['if'], data=[]] if not os.path.exists(visual_baseline_path): set_baseline = True try: os.makedirs(visual_baseline_path) # depends on [control=['try'], data=[]] except Exception: pass # Only reachable during multi-threaded test runs # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] if not os.path.exists(page_url_file): set_baseline = True # depends on [control=['if'], data=[]] if not os.path.exists(screenshot_file): set_baseline = True # depends on [control=['if'], data=[]] if not os.path.exists(level_1_file): set_baseline = True # depends on [control=['if'], data=[]] if not os.path.exists(level_2_file): set_baseline = True # depends on [control=['if'], data=[]] if not os.path.exists(level_3_file): set_baseline = True # depends on [control=['if'], data=[]] page_url = self.get_current_url() soup = self.get_beautiful_soup() html_tags = soup.body.find_all() level_1 = [[tag.name] for tag in html_tags] level_1 = json.loads(json.dumps(level_1)) # Tuples become lists level_2 = [[tag.name, sorted(tag.attrs.keys())] for tag in html_tags] level_2 = json.loads(json.dumps(level_2)) # Tuples become lists level_3 = [[tag.name, sorted(tag.attrs.items())] for tag in html_tags] level_3 = json.loads(json.dumps(level_3)) # Tuples become lists if set_baseline: self.save_screenshot('screenshot.png', visual_baseline_path) out_file = codecs.open(page_url_file, 'w+') out_file.writelines(page_url) out_file.close() out_file = codecs.open(level_1_file, 'w+') out_file.writelines(json.dumps(level_1)) out_file.close() out_file = codecs.open(level_2_file, 'w+') out_file.writelines(json.dumps(level_2)) out_file.close() out_file = codecs.open(level_3_file, 'w+') out_file.writelines(json.dumps(level_3)) out_file.close() # depends on [control=['if'], data=[]] if not set_baseline: f = open(page_url_file, 'r') page_url_data = f.read().strip() f.close() f = open(level_1_file, 'r') level_1_data = json.loads(f.read()) f.close() f = open(level_2_file, 'r') level_2_data = json.loads(f.read()) f.close() f = open(level_3_file, 'r') level_3_data = json.loads(f.read()) f.close() domain_fail = "Page Domain Mismatch Failure: Current Page Domain doesn't match the Page Domain of the Baseline! Can't compare two completely different sites! Run with --visual_baseline to reset the baseline!" level_1_failure = "\n\n*** Exception: <Level 1> Visual Diff Failure:\n* HTML tags don't match the baseline!" level_2_failure = "\n\n*** Exception: <Level 2> Visual Diff Failure:\n* HTML tag attributes don't match the baseline!" level_3_failure = "\n\n*** Exception: <Level 3> Visual Diff Failure:\n* HTML tag attribute values don't match the baseline!" page_domain = self.get_domain_url(page_url) page_data_domain = self.get_domain_url(page_url_data) unittest.TestCase.maxDiff = 1000 if level == 1 or level == 2 or level == 3: self.assert_equal(page_domain, page_data_domain, domain_fail) self.assert_equal(level_1, level_1_data, level_1_failure) # depends on [control=['if'], data=[]] unittest.TestCase.maxDiff = None if level == 2 or level == 3: self.assert_equal(level_2, level_2_data, level_2_failure) # depends on [control=['if'], data=[]] if level == 3: self.assert_equal(level_3, level_3_data, level_3_failure) # depends on [control=['if'], data=[]] if level == 0: try: unittest.TestCase.maxDiff = 1000 self.assert_equal(page_domain, page_data_domain, domain_fail) self.assert_equal(level_1, level_1_data, level_1_failure) unittest.TestCase.maxDiff = None self.assert_equal(level_2, level_2_data, level_2_failure) self.assert_equal(level_3, level_3_data, level_3_failure) # depends on [control=['try'], data=[]] except Exception as e: print(e) # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
def set_default_tlw(self, tlw, designer, inspector): "track default top level window for toolbox menu default action" self.designer = designer self.inspector = inspector
def function[set_default_tlw, parameter[self, tlw, designer, inspector]]: constant[track default top level window for toolbox menu default action] name[self].designer assign[=] name[designer] name[self].inspector assign[=] name[inspector]
keyword[def] identifier[set_default_tlw] ( identifier[self] , identifier[tlw] , identifier[designer] , identifier[inspector] ): literal[string] identifier[self] . identifier[designer] = identifier[designer] identifier[self] . identifier[inspector] = identifier[inspector]
def set_default_tlw(self, tlw, designer, inspector): """track default top level window for toolbox menu default action""" self.designer = designer self.inspector = inspector
def fuse(args): """ %prog fuse *.bed *.anchors Fuse gene orders based on anchors file. """ from jcvi.algorithms.graph import BiGraph p = OptionParser(fuse.__doc__) opts, args = p.parse_args(args) if len(args) < 1: sys.exit(not p.print_help()) bedfiles = [x for x in args if x.endswith(".bed")] anchorfiles = [x for x in args if x.endswith(".anchors")] # TODO: Use Markov clustering to sparsify the edges families = Grouper() for anchorfile in anchorfiles: af = AnchorFile(anchorfile) for a, b, block_id in af.iter_pairs(): families.join(a, b) allowed = set(families.keys()) logging.debug("Total families: {}, Gene members: {}" .format(len(families), len(allowed))) # TODO: Use C++ implementation of BiGraph() when available # For now just serialize this to the disk G = BiGraph() for bedfile in bedfiles: bed = Bed(bedfile, include=allowed) #add_bed_to_graph(G, bed, families) print_edges(G, bed, families)
def function[fuse, parameter[args]]: constant[ %prog fuse *.bed *.anchors Fuse gene orders based on anchors file. ] from relative_module[jcvi.algorithms.graph] import module[BiGraph] variable[p] assign[=] call[name[OptionParser], parameter[name[fuse].__doc__]] <ast.Tuple object at 0x7da2041d99c0> assign[=] call[name[p].parse_args, parameter[name[args]]] if compare[call[name[len], parameter[name[args]]] less[<] constant[1]] begin[:] call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da2044c00a0>]] variable[bedfiles] assign[=] <ast.ListComp object at 0x7da2044c3550> variable[anchorfiles] assign[=] <ast.ListComp object at 0x7da2044c12d0> variable[families] assign[=] call[name[Grouper], parameter[]] for taget[name[anchorfile]] in starred[name[anchorfiles]] begin[:] variable[af] assign[=] call[name[AnchorFile], parameter[name[anchorfile]]] for taget[tuple[[<ast.Name object at 0x7da2044c2650>, <ast.Name object at 0x7da2044c1960>, <ast.Name object at 0x7da2044c3a60>]]] in starred[call[name[af].iter_pairs, parameter[]]] begin[:] call[name[families].join, parameter[name[a], name[b]]] variable[allowed] assign[=] call[name[set], parameter[call[name[families].keys, parameter[]]]] call[name[logging].debug, parameter[call[constant[Total families: {}, Gene members: {}].format, parameter[call[name[len], parameter[name[families]]], call[name[len], parameter[name[allowed]]]]]]] variable[G] assign[=] call[name[BiGraph], parameter[]] for taget[name[bedfile]] in starred[name[bedfiles]] begin[:] variable[bed] assign[=] call[name[Bed], parameter[name[bedfile]]] call[name[print_edges], parameter[name[G], name[bed], name[families]]]
keyword[def] identifier[fuse] ( identifier[args] ): literal[string] keyword[from] identifier[jcvi] . identifier[algorithms] . identifier[graph] keyword[import] identifier[BiGraph] identifier[p] = identifier[OptionParser] ( identifier[fuse] . identifier[__doc__] ) identifier[opts] , identifier[args] = identifier[p] . identifier[parse_args] ( identifier[args] ) keyword[if] identifier[len] ( identifier[args] )< literal[int] : identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ()) identifier[bedfiles] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[args] keyword[if] identifier[x] . identifier[endswith] ( literal[string] )] identifier[anchorfiles] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[args] keyword[if] identifier[x] . identifier[endswith] ( literal[string] )] identifier[families] = identifier[Grouper] () keyword[for] identifier[anchorfile] keyword[in] identifier[anchorfiles] : identifier[af] = identifier[AnchorFile] ( identifier[anchorfile] ) keyword[for] identifier[a] , identifier[b] , identifier[block_id] keyword[in] identifier[af] . identifier[iter_pairs] (): identifier[families] . identifier[join] ( identifier[a] , identifier[b] ) identifier[allowed] = identifier[set] ( identifier[families] . identifier[keys] ()) identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[len] ( identifier[families] ), identifier[len] ( identifier[allowed] ))) identifier[G] = identifier[BiGraph] () keyword[for] identifier[bedfile] keyword[in] identifier[bedfiles] : identifier[bed] = identifier[Bed] ( identifier[bedfile] , identifier[include] = identifier[allowed] ) identifier[print_edges] ( identifier[G] , identifier[bed] , identifier[families] )
def fuse(args): """ %prog fuse *.bed *.anchors Fuse gene orders based on anchors file. """ from jcvi.algorithms.graph import BiGraph p = OptionParser(fuse.__doc__) (opts, args) = p.parse_args(args) if len(args) < 1: sys.exit(not p.print_help()) # depends on [control=['if'], data=[]] bedfiles = [x for x in args if x.endswith('.bed')] anchorfiles = [x for x in args if x.endswith('.anchors')] # TODO: Use Markov clustering to sparsify the edges families = Grouper() for anchorfile in anchorfiles: af = AnchorFile(anchorfile) for (a, b, block_id) in af.iter_pairs(): families.join(a, b) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['anchorfile']] allowed = set(families.keys()) logging.debug('Total families: {}, Gene members: {}'.format(len(families), len(allowed))) # TODO: Use C++ implementation of BiGraph() when available # For now just serialize this to the disk G = BiGraph() for bedfile in bedfiles: bed = Bed(bedfile, include=allowed) #add_bed_to_graph(G, bed, families) print_edges(G, bed, families) # depends on [control=['for'], data=['bedfile']]
def _download(self, dstFile): """ Download this resource from its URL to the given file object. :type dstFile: io.BytesIO|io.FileIO """ for attempt in retry(predicate=lambda e: isinstance(e, HTTPError) and e.code == 400): with attempt: with closing(urlopen(self.url)) as content: buf = content.read() contentHash = hashlib.md5(buf) assert contentHash.hexdigest() == self.contentHash dstFile.write(buf)
def function[_download, parameter[self, dstFile]]: constant[ Download this resource from its URL to the given file object. :type dstFile: io.BytesIO|io.FileIO ] for taget[name[attempt]] in starred[call[name[retry], parameter[]]] begin[:] with name[attempt] begin[:] with call[name[closing], parameter[call[name[urlopen], parameter[name[self].url]]]] begin[:] variable[buf] assign[=] call[name[content].read, parameter[]] variable[contentHash] assign[=] call[name[hashlib].md5, parameter[name[buf]]] assert[compare[call[name[contentHash].hexdigest, parameter[]] equal[==] name[self].contentHash]] call[name[dstFile].write, parameter[name[buf]]]
keyword[def] identifier[_download] ( identifier[self] , identifier[dstFile] ): literal[string] keyword[for] identifier[attempt] keyword[in] identifier[retry] ( identifier[predicate] = keyword[lambda] identifier[e] : identifier[isinstance] ( identifier[e] , identifier[HTTPError] ) keyword[and] identifier[e] . identifier[code] == literal[int] ): keyword[with] identifier[attempt] : keyword[with] identifier[closing] ( identifier[urlopen] ( identifier[self] . identifier[url] )) keyword[as] identifier[content] : identifier[buf] = identifier[content] . identifier[read] () identifier[contentHash] = identifier[hashlib] . identifier[md5] ( identifier[buf] ) keyword[assert] identifier[contentHash] . identifier[hexdigest] ()== identifier[self] . identifier[contentHash] identifier[dstFile] . identifier[write] ( identifier[buf] )
def _download(self, dstFile): """ Download this resource from its URL to the given file object. :type dstFile: io.BytesIO|io.FileIO """ for attempt in retry(predicate=lambda e: isinstance(e, HTTPError) and e.code == 400): with attempt: with closing(urlopen(self.url)) as content: buf = content.read() # depends on [control=['with'], data=['content']] # depends on [control=['with'], data=[]] # depends on [control=['for'], data=['attempt']] contentHash = hashlib.md5(buf) assert contentHash.hexdigest() == self.contentHash dstFile.write(buf)
def weather_at_ids(self, ids_list): """ Queries the OWM Weather API for the currently observed weathers at the specified city IDs (eg: [5128581,87182]) :param ids_list: the list of city IDs :type ids_list: list of int :returns: a list of *Observation* instances or an empty list if no weather data is available :raises: *ParseResponseException* when OWM Weather API responses' data cannot be parsed or *APICallException* when OWM Weather API can not be reached """ assert type(ids_list) is list, "'ids_list' must be a list of integers" for id in ids_list: assert type(id) is int, "'ids_list' must be a list of integers" if id < 0: raise ValueError("id values in 'ids_list' must be greater " "than 0") params = {'id': ','.join(list(map(str, ids_list))), 'lang': self._language} uri = http_client.HttpClient.to_url(GROUP_OBSERVATIONS_URL, self._API_key, self._subscription_type, self._use_ssl) _, json_data = self._wapi.cacheable_get_json(uri, params=params) return self._parsers['observation_list'].parse_JSON(json_data)
def function[weather_at_ids, parameter[self, ids_list]]: constant[ Queries the OWM Weather API for the currently observed weathers at the specified city IDs (eg: [5128581,87182]) :param ids_list: the list of city IDs :type ids_list: list of int :returns: a list of *Observation* instances or an empty list if no weather data is available :raises: *ParseResponseException* when OWM Weather API responses' data cannot be parsed or *APICallException* when OWM Weather API can not be reached ] assert[compare[call[name[type], parameter[name[ids_list]]] is name[list]]] for taget[name[id]] in starred[name[ids_list]] begin[:] assert[compare[call[name[type], parameter[name[id]]] is name[int]]] if compare[name[id] less[<] constant[0]] begin[:] <ast.Raise object at 0x7da20c6e6f80> variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da20c6e6020>, <ast.Constant object at 0x7da20c6e7d30>], [<ast.Call object at 0x7da20c6e7b20>, <ast.Attribute object at 0x7da20c6e6440>]] variable[uri] assign[=] call[name[http_client].HttpClient.to_url, parameter[name[GROUP_OBSERVATIONS_URL], name[self]._API_key, name[self]._subscription_type, name[self]._use_ssl]] <ast.Tuple object at 0x7da20c6e6050> assign[=] call[name[self]._wapi.cacheable_get_json, parameter[name[uri]]] return[call[call[name[self]._parsers][constant[observation_list]].parse_JSON, parameter[name[json_data]]]]
keyword[def] identifier[weather_at_ids] ( identifier[self] , identifier[ids_list] ): literal[string] keyword[assert] identifier[type] ( identifier[ids_list] ) keyword[is] identifier[list] , literal[string] keyword[for] identifier[id] keyword[in] identifier[ids_list] : keyword[assert] identifier[type] ( identifier[id] ) keyword[is] identifier[int] , literal[string] keyword[if] identifier[id] < literal[int] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] ) identifier[params] ={ literal[string] : literal[string] . identifier[join] ( identifier[list] ( identifier[map] ( identifier[str] , identifier[ids_list] ))), literal[string] : identifier[self] . identifier[_language] } identifier[uri] = identifier[http_client] . identifier[HttpClient] . identifier[to_url] ( identifier[GROUP_OBSERVATIONS_URL] , identifier[self] . identifier[_API_key] , identifier[self] . identifier[_subscription_type] , identifier[self] . identifier[_use_ssl] ) identifier[_] , identifier[json_data] = identifier[self] . identifier[_wapi] . identifier[cacheable_get_json] ( identifier[uri] , identifier[params] = identifier[params] ) keyword[return] identifier[self] . identifier[_parsers] [ literal[string] ]. identifier[parse_JSON] ( identifier[json_data] )
def weather_at_ids(self, ids_list): """ Queries the OWM Weather API for the currently observed weathers at the specified city IDs (eg: [5128581,87182]) :param ids_list: the list of city IDs :type ids_list: list of int :returns: a list of *Observation* instances or an empty list if no weather data is available :raises: *ParseResponseException* when OWM Weather API responses' data cannot be parsed or *APICallException* when OWM Weather API can not be reached """ assert type(ids_list) is list, "'ids_list' must be a list of integers" for id in ids_list: assert type(id) is int, "'ids_list' must be a list of integers" if id < 0: raise ValueError("id values in 'ids_list' must be greater than 0") # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['id']] params = {'id': ','.join(list(map(str, ids_list))), 'lang': self._language} uri = http_client.HttpClient.to_url(GROUP_OBSERVATIONS_URL, self._API_key, self._subscription_type, self._use_ssl) (_, json_data) = self._wapi.cacheable_get_json(uri, params=params) return self._parsers['observation_list'].parse_JSON(json_data)
def get_input_dialog(dialog): """Similar to :meth:`get_input_peer`, but for dialogs""" try: if dialog.SUBCLASS_OF_ID == 0xa21c9795: # crc32(b'InputDialogPeer') return dialog if dialog.SUBCLASS_OF_ID == 0xc91c90b6: # crc32(b'InputPeer') return types.InputDialogPeer(dialog) except AttributeError: _raise_cast_fail(dialog, 'InputDialogPeer') try: return types.InputDialogPeer(get_input_peer(dialog)) except TypeError: pass _raise_cast_fail(dialog, 'InputDialogPeer')
def function[get_input_dialog, parameter[dialog]]: constant[Similar to :meth:`get_input_peer`, but for dialogs] <ast.Try object at 0x7da1b1f49ab0> <ast.Try object at 0x7da1b26aea10> call[name[_raise_cast_fail], parameter[name[dialog], constant[InputDialogPeer]]]
keyword[def] identifier[get_input_dialog] ( identifier[dialog] ): literal[string] keyword[try] : keyword[if] identifier[dialog] . identifier[SUBCLASS_OF_ID] == literal[int] : keyword[return] identifier[dialog] keyword[if] identifier[dialog] . identifier[SUBCLASS_OF_ID] == literal[int] : keyword[return] identifier[types] . identifier[InputDialogPeer] ( identifier[dialog] ) keyword[except] identifier[AttributeError] : identifier[_raise_cast_fail] ( identifier[dialog] , literal[string] ) keyword[try] : keyword[return] identifier[types] . identifier[InputDialogPeer] ( identifier[get_input_peer] ( identifier[dialog] )) keyword[except] identifier[TypeError] : keyword[pass] identifier[_raise_cast_fail] ( identifier[dialog] , literal[string] )
def get_input_dialog(dialog): """Similar to :meth:`get_input_peer`, but for dialogs""" try: if dialog.SUBCLASS_OF_ID == 2719782805: # crc32(b'InputDialogPeer') return dialog # depends on [control=['if'], data=[]] if dialog.SUBCLASS_OF_ID == 3374092470: # crc32(b'InputPeer') return types.InputDialogPeer(dialog) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except AttributeError: _raise_cast_fail(dialog, 'InputDialogPeer') # depends on [control=['except'], data=[]] try: return types.InputDialogPeer(get_input_peer(dialog)) # depends on [control=['try'], data=[]] except TypeError: pass # depends on [control=['except'], data=[]] _raise_cast_fail(dialog, 'InputDialogPeer')
def get_usb_controller_count_by_type(self, type_p): """Returns the number of USB controllers of the given type attached to the VM. in type_p of type :class:`USBControllerType` return controllers of type int """ if not isinstance(type_p, USBControllerType): raise TypeError("type_p can only be an instance of type USBControllerType") controllers = self._call("getUSBControllerCountByType", in_p=[type_p]) return controllers
def function[get_usb_controller_count_by_type, parameter[self, type_p]]: constant[Returns the number of USB controllers of the given type attached to the VM. in type_p of type :class:`USBControllerType` return controllers of type int ] if <ast.UnaryOp object at 0x7da20c7cae00> begin[:] <ast.Raise object at 0x7da20c7c9450> variable[controllers] assign[=] call[name[self]._call, parameter[constant[getUSBControllerCountByType]]] return[name[controllers]]
keyword[def] identifier[get_usb_controller_count_by_type] ( identifier[self] , identifier[type_p] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[type_p] , identifier[USBControllerType] ): keyword[raise] identifier[TypeError] ( literal[string] ) identifier[controllers] = identifier[self] . identifier[_call] ( literal[string] , identifier[in_p] =[ identifier[type_p] ]) keyword[return] identifier[controllers]
def get_usb_controller_count_by_type(self, type_p): """Returns the number of USB controllers of the given type attached to the VM. in type_p of type :class:`USBControllerType` return controllers of type int """ if not isinstance(type_p, USBControllerType): raise TypeError('type_p can only be an instance of type USBControllerType') # depends on [control=['if'], data=[]] controllers = self._call('getUSBControllerCountByType', in_p=[type_p]) return controllers
def ConvertMessage(self, value, message): """Convert a JSON object into a message. Args: value: A JSON object. message: A WKT or regular protocol message to record the data. Raises: ParseError: In case of convert problems. """ message_descriptor = message.DESCRIPTOR full_name = message_descriptor.full_name if _IsWrapperMessage(message_descriptor): self._ConvertWrapperMessage(value, message) elif full_name in _WKTJSONMETHODS: methodcaller(_WKTJSONMETHODS[full_name][1], value, message)(self) else: self._ConvertFieldValuePair(value, message)
def function[ConvertMessage, parameter[self, value, message]]: constant[Convert a JSON object into a message. Args: value: A JSON object. message: A WKT or regular protocol message to record the data. Raises: ParseError: In case of convert problems. ] variable[message_descriptor] assign[=] name[message].DESCRIPTOR variable[full_name] assign[=] name[message_descriptor].full_name if call[name[_IsWrapperMessage], parameter[name[message_descriptor]]] begin[:] call[name[self]._ConvertWrapperMessage, parameter[name[value], name[message]]]
keyword[def] identifier[ConvertMessage] ( identifier[self] , identifier[value] , identifier[message] ): literal[string] identifier[message_descriptor] = identifier[message] . identifier[DESCRIPTOR] identifier[full_name] = identifier[message_descriptor] . identifier[full_name] keyword[if] identifier[_IsWrapperMessage] ( identifier[message_descriptor] ): identifier[self] . identifier[_ConvertWrapperMessage] ( identifier[value] , identifier[message] ) keyword[elif] identifier[full_name] keyword[in] identifier[_WKTJSONMETHODS] : identifier[methodcaller] ( identifier[_WKTJSONMETHODS] [ identifier[full_name] ][ literal[int] ], identifier[value] , identifier[message] )( identifier[self] ) keyword[else] : identifier[self] . identifier[_ConvertFieldValuePair] ( identifier[value] , identifier[message] )
def ConvertMessage(self, value, message): """Convert a JSON object into a message. Args: value: A JSON object. message: A WKT or regular protocol message to record the data. Raises: ParseError: In case of convert problems. """ message_descriptor = message.DESCRIPTOR full_name = message_descriptor.full_name if _IsWrapperMessage(message_descriptor): self._ConvertWrapperMessage(value, message) # depends on [control=['if'], data=[]] elif full_name in _WKTJSONMETHODS: methodcaller(_WKTJSONMETHODS[full_name][1], value, message)(self) # depends on [control=['if'], data=['full_name', '_WKTJSONMETHODS']] else: self._ConvertFieldValuePair(value, message)
def get_calls(self, job_name): ''' Reads file by given name and returns CallEdge array ''' config = self.file_index.get_by_name(job_name).yaml calls = self.get_calls_from_dict(config, from_name=job_name) return calls
def function[get_calls, parameter[self, job_name]]: constant[ Reads file by given name and returns CallEdge array ] variable[config] assign[=] call[name[self].file_index.get_by_name, parameter[name[job_name]]].yaml variable[calls] assign[=] call[name[self].get_calls_from_dict, parameter[name[config]]] return[name[calls]]
keyword[def] identifier[get_calls] ( identifier[self] , identifier[job_name] ): literal[string] identifier[config] = identifier[self] . identifier[file_index] . identifier[get_by_name] ( identifier[job_name] ). identifier[yaml] identifier[calls] = identifier[self] . identifier[get_calls_from_dict] ( identifier[config] , identifier[from_name] = identifier[job_name] ) keyword[return] identifier[calls]
def get_calls(self, job_name): """ Reads file by given name and returns CallEdge array """ config = self.file_index.get_by_name(job_name).yaml calls = self.get_calls_from_dict(config, from_name=job_name) return calls
def thumbnail_url(source, alias): """ Return the thumbnail url for a source file using an aliased set of thumbnail options. If no matching alias is found, returns an empty string. Example usage:: <img src="{{ person.photo|thumbnail_url:'small' }}" alt=""> """ try: thumb = get_thumbnailer(source)[alias] except Exception: return '' return thumb.url
def function[thumbnail_url, parameter[source, alias]]: constant[ Return the thumbnail url for a source file using an aliased set of thumbnail options. If no matching alias is found, returns an empty string. Example usage:: <img src="{{ person.photo|thumbnail_url:'small' }}" alt=""> ] <ast.Try object at 0x7da18f00e6e0> return[name[thumb].url]
keyword[def] identifier[thumbnail_url] ( identifier[source] , identifier[alias] ): literal[string] keyword[try] : identifier[thumb] = identifier[get_thumbnailer] ( identifier[source] )[ identifier[alias] ] keyword[except] identifier[Exception] : keyword[return] literal[string] keyword[return] identifier[thumb] . identifier[url]
def thumbnail_url(source, alias): """ Return the thumbnail url for a source file using an aliased set of thumbnail options. If no matching alias is found, returns an empty string. Example usage:: <img src="{{ person.photo|thumbnail_url:'small' }}" alt=""> """ try: thumb = get_thumbnailer(source)[alias] # depends on [control=['try'], data=[]] except Exception: return '' # depends on [control=['except'], data=[]] return thumb.url
def send(self, message, params=None): """Send service method request :param message: proto message instance (use :meth:`SteamUnifiedMessages.get`) or method name (e.g. ``Player.GetGameBadgeLevels#1``) :type message: :class:`str`, proto message instance :param params: message parameters :type params: :class:`dict` :return: ``jobid`` event identifier :rtype: :class:`str` Listen for ``jobid`` on this object to catch the response. .. note:: If you listen for ``jobid`` on the client instance you will get the encapsulated message """ if isinstance(message, str): message = self.get(message) if message not in self._data: raise ValueError("Supplied message is invalid. Use 'get' method.") if params: proto_fill_from_dict(message, params) capsule = MsgProto(EMsg.ClientServiceMethod) capsule.body.method_name = self._data[message] capsule.body.serialized_method = message.SerializeToString() return self._steam.send_job(capsule)
def function[send, parameter[self, message, params]]: constant[Send service method request :param message: proto message instance (use :meth:`SteamUnifiedMessages.get`) or method name (e.g. ``Player.GetGameBadgeLevels#1``) :type message: :class:`str`, proto message instance :param params: message parameters :type params: :class:`dict` :return: ``jobid`` event identifier :rtype: :class:`str` Listen for ``jobid`` on this object to catch the response. .. note:: If you listen for ``jobid`` on the client instance you will get the encapsulated message ] if call[name[isinstance], parameter[name[message], name[str]]] begin[:] variable[message] assign[=] call[name[self].get, parameter[name[message]]] if compare[name[message] <ast.NotIn object at 0x7da2590d7190> name[self]._data] begin[:] <ast.Raise object at 0x7da1b1d4a410> if name[params] begin[:] call[name[proto_fill_from_dict], parameter[name[message], name[params]]] variable[capsule] assign[=] call[name[MsgProto], parameter[name[EMsg].ClientServiceMethod]] name[capsule].body.method_name assign[=] call[name[self]._data][name[message]] name[capsule].body.serialized_method assign[=] call[name[message].SerializeToString, parameter[]] return[call[name[self]._steam.send_job, parameter[name[capsule]]]]
keyword[def] identifier[send] ( identifier[self] , identifier[message] , identifier[params] = keyword[None] ): literal[string] keyword[if] identifier[isinstance] ( identifier[message] , identifier[str] ): identifier[message] = identifier[self] . identifier[get] ( identifier[message] ) keyword[if] identifier[message] keyword[not] keyword[in] identifier[self] . identifier[_data] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[params] : identifier[proto_fill_from_dict] ( identifier[message] , identifier[params] ) identifier[capsule] = identifier[MsgProto] ( identifier[EMsg] . identifier[ClientServiceMethod] ) identifier[capsule] . identifier[body] . identifier[method_name] = identifier[self] . identifier[_data] [ identifier[message] ] identifier[capsule] . identifier[body] . identifier[serialized_method] = identifier[message] . identifier[SerializeToString] () keyword[return] identifier[self] . identifier[_steam] . identifier[send_job] ( identifier[capsule] )
def send(self, message, params=None): """Send service method request :param message: proto message instance (use :meth:`SteamUnifiedMessages.get`) or method name (e.g. ``Player.GetGameBadgeLevels#1``) :type message: :class:`str`, proto message instance :param params: message parameters :type params: :class:`dict` :return: ``jobid`` event identifier :rtype: :class:`str` Listen for ``jobid`` on this object to catch the response. .. note:: If you listen for ``jobid`` on the client instance you will get the encapsulated message """ if isinstance(message, str): message = self.get(message) # depends on [control=['if'], data=[]] if message not in self._data: raise ValueError("Supplied message is invalid. Use 'get' method.") # depends on [control=['if'], data=[]] if params: proto_fill_from_dict(message, params) # depends on [control=['if'], data=[]] capsule = MsgProto(EMsg.ClientServiceMethod) capsule.body.method_name = self._data[message] capsule.body.serialized_method = message.SerializeToString() return self._steam.send_job(capsule)
def registerDisplay(func): """ Registers a function to the display hook queue to be called on hook. Look at the sys.displayhook documentation for more information. :param func | <callable> """ setup() ref = weakref.ref(func) if ref not in _displayhooks: _displayhooks.append(ref)
def function[registerDisplay, parameter[func]]: constant[ Registers a function to the display hook queue to be called on hook. Look at the sys.displayhook documentation for more information. :param func | <callable> ] call[name[setup], parameter[]] variable[ref] assign[=] call[name[weakref].ref, parameter[name[func]]] if compare[name[ref] <ast.NotIn object at 0x7da2590d7190> name[_displayhooks]] begin[:] call[name[_displayhooks].append, parameter[name[ref]]]
keyword[def] identifier[registerDisplay] ( identifier[func] ): literal[string] identifier[setup] () identifier[ref] = identifier[weakref] . identifier[ref] ( identifier[func] ) keyword[if] identifier[ref] keyword[not] keyword[in] identifier[_displayhooks] : identifier[_displayhooks] . identifier[append] ( identifier[ref] )
def registerDisplay(func): """ Registers a function to the display hook queue to be called on hook. Look at the sys.displayhook documentation for more information. :param func | <callable> """ setup() ref = weakref.ref(func) if ref not in _displayhooks: _displayhooks.append(ref) # depends on [control=['if'], data=['ref', '_displayhooks']]
def _init_map(self): """stub""" self.my_osid_object_form._my_map['attempts'] = \ int(self._attempts_metadata['default_object_values'][0]) self.my_osid_object_form._my_map['weight'] = \ float(self._weight_metadata['default_object_values'][0]) # self.my_osid_object_form._my_map['rerandomize'] = \ # self._rerandomize_metadata['default_object_values'][0] self.my_osid_object_form._my_map['showanswer'] = \ str(self._showanswer_metadata['default_object_values'][0]) self.my_osid_object_form._my_map['markdown'] = \ str(self._markdown_metadata['default_object_values'][0])
def function[_init_map, parameter[self]]: constant[stub] call[name[self].my_osid_object_form._my_map][constant[attempts]] assign[=] call[name[int], parameter[call[call[name[self]._attempts_metadata][constant[default_object_values]]][constant[0]]]] call[name[self].my_osid_object_form._my_map][constant[weight]] assign[=] call[name[float], parameter[call[call[name[self]._weight_metadata][constant[default_object_values]]][constant[0]]]] call[name[self].my_osid_object_form._my_map][constant[showanswer]] assign[=] call[name[str], parameter[call[call[name[self]._showanswer_metadata][constant[default_object_values]]][constant[0]]]] call[name[self].my_osid_object_form._my_map][constant[markdown]] assign[=] call[name[str], parameter[call[call[name[self]._markdown_metadata][constant[default_object_values]]][constant[0]]]]
keyword[def] identifier[_init_map] ( identifier[self] ): literal[string] identifier[self] . identifier[my_osid_object_form] . identifier[_my_map] [ literal[string] ]= identifier[int] ( identifier[self] . identifier[_attempts_metadata] [ literal[string] ][ literal[int] ]) identifier[self] . identifier[my_osid_object_form] . identifier[_my_map] [ literal[string] ]= identifier[float] ( identifier[self] . identifier[_weight_metadata] [ literal[string] ][ literal[int] ]) identifier[self] . identifier[my_osid_object_form] . identifier[_my_map] [ literal[string] ]= identifier[str] ( identifier[self] . identifier[_showanswer_metadata] [ literal[string] ][ literal[int] ]) identifier[self] . identifier[my_osid_object_form] . identifier[_my_map] [ literal[string] ]= identifier[str] ( identifier[self] . identifier[_markdown_metadata] [ literal[string] ][ literal[int] ])
def _init_map(self): """stub""" self.my_osid_object_form._my_map['attempts'] = int(self._attempts_metadata['default_object_values'][0]) self.my_osid_object_form._my_map['weight'] = float(self._weight_metadata['default_object_values'][0]) # self.my_osid_object_form._my_map['rerandomize'] = \ # self._rerandomize_metadata['default_object_values'][0] self.my_osid_object_form._my_map['showanswer'] = str(self._showanswer_metadata['default_object_values'][0]) self.my_osid_object_form._my_map['markdown'] = str(self._markdown_metadata['default_object_values'][0])
def to_dict(self): "Post as a dict, for serializing" d = self.metadata.copy() d['content'] = self.content return d
def function[to_dict, parameter[self]]: constant[Post as a dict, for serializing] variable[d] assign[=] call[name[self].metadata.copy, parameter[]] call[name[d]][constant[content]] assign[=] name[self].content return[name[d]]
keyword[def] identifier[to_dict] ( identifier[self] ): literal[string] identifier[d] = identifier[self] . identifier[metadata] . identifier[copy] () identifier[d] [ literal[string] ]= identifier[self] . identifier[content] keyword[return] identifier[d]
def to_dict(self): """Post as a dict, for serializing""" d = self.metadata.copy() d['content'] = self.content return d
def get_search_url(self): """ resolve the search url no matter if local or remote. :return: url or exception """ if self.is_remote: return self.url return reverse('search_api', args=[self.slug])
def function[get_search_url, parameter[self]]: constant[ resolve the search url no matter if local or remote. :return: url or exception ] if name[self].is_remote begin[:] return[name[self].url] return[call[name[reverse], parameter[constant[search_api]]]]
keyword[def] identifier[get_search_url] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[is_remote] : keyword[return] identifier[self] . identifier[url] keyword[return] identifier[reverse] ( literal[string] , identifier[args] =[ identifier[self] . identifier[slug] ])
def get_search_url(self): """ resolve the search url no matter if local or remote. :return: url or exception """ if self.is_remote: return self.url # depends on [control=['if'], data=[]] return reverse('search_api', args=[self.slug])
def _fetch_stock_data(self, stock_list): """因为 timekline 的返回没有带对应的股票代码,所以要手动带上""" res = super()._fetch_stock_data(stock_list) with_stock = [] for stock, resp in zip(stock_list, res): if resp is not None: with_stock.append((stock, resp)) return with_stock
def function[_fetch_stock_data, parameter[self, stock_list]]: constant[因为 timekline 的返回没有带对应的股票代码,所以要手动带上] variable[res] assign[=] call[call[name[super], parameter[]]._fetch_stock_data, parameter[name[stock_list]]] variable[with_stock] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da20e957ac0>, <ast.Name object at 0x7da20e9577c0>]]] in starred[call[name[zip], parameter[name[stock_list], name[res]]]] begin[:] if compare[name[resp] is_not constant[None]] begin[:] call[name[with_stock].append, parameter[tuple[[<ast.Name object at 0x7da18f58de10>, <ast.Name object at 0x7da18f58d5a0>]]]] return[name[with_stock]]
keyword[def] identifier[_fetch_stock_data] ( identifier[self] , identifier[stock_list] ): literal[string] identifier[res] = identifier[super] (). identifier[_fetch_stock_data] ( identifier[stock_list] ) identifier[with_stock] =[] keyword[for] identifier[stock] , identifier[resp] keyword[in] identifier[zip] ( identifier[stock_list] , identifier[res] ): keyword[if] identifier[resp] keyword[is] keyword[not] keyword[None] : identifier[with_stock] . identifier[append] (( identifier[stock] , identifier[resp] )) keyword[return] identifier[with_stock]
def _fetch_stock_data(self, stock_list): """因为 timekline 的返回没有带对应的股票代码,所以要手动带上""" res = super()._fetch_stock_data(stock_list) with_stock = [] for (stock, resp) in zip(stock_list, res): if resp is not None: with_stock.append((stock, resp)) # depends on [control=['if'], data=['resp']] # depends on [control=['for'], data=[]] return with_stock
def resampled( chunksize_bytes=DEFAULT_CHUNK_SIZE, resample_to=SR44100(), store_resampled=False): """ Create a basic processing pipeline that can resample all incoming audio to a normalized sampling rate for downstream processing, and store a convenient, compressed version for playback :param chunksize_bytes: The number of bytes from the raw stream to process at once :param resample_to: The new, normalized sampling rate :return: A simple processing pipeline """ class Resampled(BaseModel): meta = JSONFeature( MetaData, store=True, encoder=AudioMetaDataEncoder) raw = ByteStreamFeature( ByteStream, chunksize=chunksize_bytes, needs=meta, store=False) ogg = OggVorbisFeature( OggVorbis, needs=raw, store=True) pcm = AudioSamplesFeature( AudioStream, needs=raw, store=False) resampled = AudioSamplesFeature( Resampler, needs=pcm, samplerate=resample_to, store=store_resampled) return Resampled
def function[resampled, parameter[chunksize_bytes, resample_to, store_resampled]]: constant[ Create a basic processing pipeline that can resample all incoming audio to a normalized sampling rate for downstream processing, and store a convenient, compressed version for playback :param chunksize_bytes: The number of bytes from the raw stream to process at once :param resample_to: The new, normalized sampling rate :return: A simple processing pipeline ] class class[Resampled, parameter[]] begin[:] variable[meta] assign[=] call[name[JSONFeature], parameter[name[MetaData]]] variable[raw] assign[=] call[name[ByteStreamFeature], parameter[name[ByteStream]]] variable[ogg] assign[=] call[name[OggVorbisFeature], parameter[name[OggVorbis]]] variable[pcm] assign[=] call[name[AudioSamplesFeature], parameter[name[AudioStream]]] variable[resampled] assign[=] call[name[AudioSamplesFeature], parameter[name[Resampler]]] return[name[Resampled]]
keyword[def] identifier[resampled] ( identifier[chunksize_bytes] = identifier[DEFAULT_CHUNK_SIZE] , identifier[resample_to] = identifier[SR44100] (), identifier[store_resampled] = keyword[False] ): literal[string] keyword[class] identifier[Resampled] ( identifier[BaseModel] ): identifier[meta] = identifier[JSONFeature] ( identifier[MetaData] , identifier[store] = keyword[True] , identifier[encoder] = identifier[AudioMetaDataEncoder] ) identifier[raw] = identifier[ByteStreamFeature] ( identifier[ByteStream] , identifier[chunksize] = identifier[chunksize_bytes] , identifier[needs] = identifier[meta] , identifier[store] = keyword[False] ) identifier[ogg] = identifier[OggVorbisFeature] ( identifier[OggVorbis] , identifier[needs] = identifier[raw] , identifier[store] = keyword[True] ) identifier[pcm] = identifier[AudioSamplesFeature] ( identifier[AudioStream] , identifier[needs] = identifier[raw] , identifier[store] = keyword[False] ) identifier[resampled] = identifier[AudioSamplesFeature] ( identifier[Resampler] , identifier[needs] = identifier[pcm] , identifier[samplerate] = identifier[resample_to] , identifier[store] = identifier[store_resampled] ) keyword[return] identifier[Resampled]
def resampled(chunksize_bytes=DEFAULT_CHUNK_SIZE, resample_to=SR44100(), store_resampled=False): """ Create a basic processing pipeline that can resample all incoming audio to a normalized sampling rate for downstream processing, and store a convenient, compressed version for playback :param chunksize_bytes: The number of bytes from the raw stream to process at once :param resample_to: The new, normalized sampling rate :return: A simple processing pipeline """ class Resampled(BaseModel): meta = JSONFeature(MetaData, store=True, encoder=AudioMetaDataEncoder) raw = ByteStreamFeature(ByteStream, chunksize=chunksize_bytes, needs=meta, store=False) ogg = OggVorbisFeature(OggVorbis, needs=raw, store=True) pcm = AudioSamplesFeature(AudioStream, needs=raw, store=False) resampled = AudioSamplesFeature(Resampler, needs=pcm, samplerate=resample_to, store=store_resampled) return Resampled
def to_json(self, filename=None, encoding="utf-8", errors="strict", multiline=False, **json_kwargs): """ Transform the BoxList object into a JSON string. :param filename: If provided will save to file :param encoding: File encoding :param errors: How to handle encoding errors :param multiline: Put each item in list onto it's own line :param json_kwargs: additional arguments to pass to json.dump(s) :return: string of JSON or return of `json.dump` """ if filename and multiline: lines = [_to_json(item, filename=False, encoding=encoding, errors=errors, **json_kwargs) for item in self] with open(filename, 'w', encoding=encoding, errors=errors) as f: f.write("\n".join(lines).decode('utf-8') if sys.version_info < (3, 0) else "\n".join(lines)) else: return _to_json(self.to_list(), filename=filename, encoding=encoding, errors=errors, **json_kwargs)
def function[to_json, parameter[self, filename, encoding, errors, multiline]]: constant[ Transform the BoxList object into a JSON string. :param filename: If provided will save to file :param encoding: File encoding :param errors: How to handle encoding errors :param multiline: Put each item in list onto it's own line :param json_kwargs: additional arguments to pass to json.dump(s) :return: string of JSON or return of `json.dump` ] if <ast.BoolOp object at 0x7da20e962e30> begin[:] variable[lines] assign[=] <ast.ListComp object at 0x7da20e961c90> with call[name[open], parameter[name[filename], constant[w]]] begin[:] call[name[f].write, parameter[<ast.IfExp object at 0x7da20e9632e0>]]
keyword[def] identifier[to_json] ( identifier[self] , identifier[filename] = keyword[None] , identifier[encoding] = literal[string] , identifier[errors] = literal[string] , identifier[multiline] = keyword[False] ,** identifier[json_kwargs] ): literal[string] keyword[if] identifier[filename] keyword[and] identifier[multiline] : identifier[lines] =[ identifier[_to_json] ( identifier[item] , identifier[filename] = keyword[False] , identifier[encoding] = identifier[encoding] , identifier[errors] = identifier[errors] ,** identifier[json_kwargs] ) keyword[for] identifier[item] keyword[in] identifier[self] ] keyword[with] identifier[open] ( identifier[filename] , literal[string] , identifier[encoding] = identifier[encoding] , identifier[errors] = identifier[errors] ) keyword[as] identifier[f] : identifier[f] . identifier[write] ( literal[string] . identifier[join] ( identifier[lines] ). identifier[decode] ( literal[string] ) keyword[if] identifier[sys] . identifier[version_info] <( literal[int] , literal[int] ) keyword[else] literal[string] . identifier[join] ( identifier[lines] )) keyword[else] : keyword[return] identifier[_to_json] ( identifier[self] . identifier[to_list] (), identifier[filename] = identifier[filename] , identifier[encoding] = identifier[encoding] , identifier[errors] = identifier[errors] ,** identifier[json_kwargs] )
def to_json(self, filename=None, encoding='utf-8', errors='strict', multiline=False, **json_kwargs): """ Transform the BoxList object into a JSON string. :param filename: If provided will save to file :param encoding: File encoding :param errors: How to handle encoding errors :param multiline: Put each item in list onto it's own line :param json_kwargs: additional arguments to pass to json.dump(s) :return: string of JSON or return of `json.dump` """ if filename and multiline: lines = [_to_json(item, filename=False, encoding=encoding, errors=errors, **json_kwargs) for item in self] with open(filename, 'w', encoding=encoding, errors=errors) as f: f.write('\n'.join(lines).decode('utf-8') if sys.version_info < (3, 0) else '\n'.join(lines)) # depends on [control=['with'], data=['f']] # depends on [control=['if'], data=[]] else: return _to_json(self.to_list(), filename=filename, encoding=encoding, errors=errors, **json_kwargs)
def delete_core_elements_of_models(models, raise_exceptions=True, recursive=True, destroy=True, force=False): """Deletes all respective core elements for the given models Calls the :func:`delete_core_element_of_model` for all given models. :param models: A single model or a list of models of respective core element to be deleted :param bool raise_exceptions: Whether to raise exceptions or log error messages in case of an error :param bool destroy: Access the destroy flag of the core remove methods :return: The number of models that were successfully deleted """ # If only one model is given, make a list out of it if not hasattr(models, '__iter__'): models = [models] return sum(delete_core_element_of_model(model, raise_exceptions, recursive=recursive, destroy=destroy, force=force) for model in models)
def function[delete_core_elements_of_models, parameter[models, raise_exceptions, recursive, destroy, force]]: constant[Deletes all respective core elements for the given models Calls the :func:`delete_core_element_of_model` for all given models. :param models: A single model or a list of models of respective core element to be deleted :param bool raise_exceptions: Whether to raise exceptions or log error messages in case of an error :param bool destroy: Access the destroy flag of the core remove methods :return: The number of models that were successfully deleted ] if <ast.UnaryOp object at 0x7da18bc73f10> begin[:] variable[models] assign[=] list[[<ast.Name object at 0x7da18bc705e0>]] return[call[name[sum], parameter[<ast.GeneratorExp object at 0x7da18bc70fd0>]]]
keyword[def] identifier[delete_core_elements_of_models] ( identifier[models] , identifier[raise_exceptions] = keyword[True] , identifier[recursive] = keyword[True] , identifier[destroy] = keyword[True] , identifier[force] = keyword[False] ): literal[string] keyword[if] keyword[not] identifier[hasattr] ( identifier[models] , literal[string] ): identifier[models] =[ identifier[models] ] keyword[return] identifier[sum] ( identifier[delete_core_element_of_model] ( identifier[model] , identifier[raise_exceptions] , identifier[recursive] = identifier[recursive] , identifier[destroy] = identifier[destroy] , identifier[force] = identifier[force] ) keyword[for] identifier[model] keyword[in] identifier[models] )
def delete_core_elements_of_models(models, raise_exceptions=True, recursive=True, destroy=True, force=False): """Deletes all respective core elements for the given models Calls the :func:`delete_core_element_of_model` for all given models. :param models: A single model or a list of models of respective core element to be deleted :param bool raise_exceptions: Whether to raise exceptions or log error messages in case of an error :param bool destroy: Access the destroy flag of the core remove methods :return: The number of models that were successfully deleted """ # If only one model is given, make a list out of it if not hasattr(models, '__iter__'): models = [models] # depends on [control=['if'], data=[]] return sum((delete_core_element_of_model(model, raise_exceptions, recursive=recursive, destroy=destroy, force=force) for model in models))
def draw_scatter_plot(world, size, target): """ This function can be used on a generic canvas (either an image to save on disk or a canvas part of a GUI) """ #Find min and max values of humidity and temperature on land so we can #normalize temperature and humidity to the chart humid = numpy.ma.masked_array(world.layers['humidity'].data, mask=world.layers['ocean'].data) temp = numpy.ma.masked_array(world.layers['temperature'].data, mask=world.layers['ocean'].data) min_humidity = humid.min() max_humidity = humid.max() min_temperature = temp.min() max_temperature = temp.max() temperature_delta = max_temperature - min_temperature humidity_delta = max_humidity - min_humidity #set all pixels white for y in range(0, size): for x in range(0, size): target.set_pixel(x, y, (255, 255, 255, 255)) #fill in 'bad' boxes with grey h_values = ['62', '50', '37', '25', '12'] t_values = [ 0, 1, 2, 3, 5 ] for loop in range(0, 5): h_min = (size - 1) * ((world.layers['humidity'].quantiles[h_values[loop]] - min_humidity) / humidity_delta) if loop != 4: h_max = (size - 1) * ((world.layers['humidity'].quantiles[h_values[loop + 1]] - min_humidity) / humidity_delta) else: h_max = size v_max = (size - 1) * ((world.layers['temperature'].thresholds[t_values[loop]][1] - min_temperature) / temperature_delta) if h_min < 0: h_min = 0 if h_max > size: h_max = size if v_max < 0: v_max = 0 if v_max > (size - 1): v_max = size - 1 if h_max > 0 and h_min < size and v_max > 0: for y in range(int(h_min), int(h_max)): for x in range(0, int(v_max)): target.set_pixel(x, (size - 1) - y, (128, 128, 128, 255)) #draw lines based on thresholds for t in range(0, 6): v = (size - 1) * ((world.layers['temperature'].thresholds[t][1] - min_temperature) / temperature_delta) if 0 < v < size: for y in range(0, size): target.set_pixel(int(v), (size - 1) - y, (0, 0, 0, 255)) ranges = ['87', '75', '62', '50', '37', '25', '12'] for p in ranges: h = (size - 1) * ((world.layers['humidity'].quantiles[p] - min_humidity) / humidity_delta) if 0 < h < size: for x in range(0, size): target.set_pixel(x, (size - 1) - int(h), (0, 0, 0, 255)) #draw gamma curve curve_gamma = world.gamma_curve curve_bonus = world.curve_offset for x in range(0, size): y = (size - 1) * ((numpy.power((float(x) / (size - 1)), curve_gamma) * (1 - curve_bonus)) + curve_bonus) target.set_pixel(x, (size - 1) - int(y), (255, 0, 0, 255)) #examine all cells in the map and if it is land get the temperature and #humidity for the cell. for y in range(world.height): for x in range(world.width): if world.is_land((x, y)): t = world.temperature_at((x, y)) p = world.humidity_at((x, y)) #get red and blue values depending on temperature and humidity if world.is_temperature_polar((x, y)): r = 0 elif world.is_temperature_alpine((x, y)): r = 42 elif world.is_temperature_boreal((x, y)): r = 85 elif world.is_temperature_cool((x, y)): r = 128 elif world.is_temperature_warm((x, y)): r = 170 elif world.is_temperature_subtropical((x, y)): r = 213 elif world.is_temperature_tropical((x, y)): r = 255 if world.is_humidity_superarid((x, y)): b = 32 elif world.is_humidity_perarid((x, y)): b = 64 elif world.is_humidity_arid((x, y)): b = 96 elif world.is_humidity_semiarid((x, y)): b = 128 elif world.is_humidity_subhumid((x, y)): b = 160 elif world.is_humidity_humid((x, y)): b = 192 elif world.is_humidity_perhumid((x, y)): b = 224 elif world.is_humidity_superhumid((x, y)): b = 255 #calculate x and y position based on normalized temperature and humidity nx = (size - 1) * ((t - min_temperature) / temperature_delta) ny = (size - 1) * ((p - min_humidity) / humidity_delta) target.set_pixel(int(nx), (size - 1) - int(ny), (r, 128, b, 255))
def function[draw_scatter_plot, parameter[world, size, target]]: constant[ This function can be used on a generic canvas (either an image to save on disk or a canvas part of a GUI) ] variable[humid] assign[=] call[name[numpy].ma.masked_array, parameter[call[name[world].layers][constant[humidity]].data]] variable[temp] assign[=] call[name[numpy].ma.masked_array, parameter[call[name[world].layers][constant[temperature]].data]] variable[min_humidity] assign[=] call[name[humid].min, parameter[]] variable[max_humidity] assign[=] call[name[humid].max, parameter[]] variable[min_temperature] assign[=] call[name[temp].min, parameter[]] variable[max_temperature] assign[=] call[name[temp].max, parameter[]] variable[temperature_delta] assign[=] binary_operation[name[max_temperature] - name[min_temperature]] variable[humidity_delta] assign[=] binary_operation[name[max_humidity] - name[min_humidity]] for taget[name[y]] in starred[call[name[range], parameter[constant[0], name[size]]]] begin[:] for taget[name[x]] in starred[call[name[range], parameter[constant[0], name[size]]]] begin[:] call[name[target].set_pixel, parameter[name[x], name[y], tuple[[<ast.Constant object at 0x7da1b06aee90>, <ast.Constant object at 0x7da1b06aee60>, <ast.Constant object at 0x7da1b06aee30>, <ast.Constant object at 0x7da1b06aee00>]]]] variable[h_values] assign[=] list[[<ast.Constant object at 0x7da1b06aece0>, <ast.Constant object at 0x7da1b06aecb0>, <ast.Constant object at 0x7da1b06aec80>, <ast.Constant object at 0x7da1b06aec50>, <ast.Constant object at 0x7da1b06aec20>]] variable[t_values] assign[=] list[[<ast.Constant object at 0x7da1b06aeb30>, <ast.Constant object at 0x7da1b06aeb00>, <ast.Constant object at 0x7da1b06aead0>, <ast.Constant object at 0x7da1b06aeaa0>, <ast.Constant object at 0x7da1b06aea70>]] for taget[name[loop]] in starred[call[name[range], parameter[constant[0], constant[5]]]] begin[:] variable[h_min] assign[=] binary_operation[binary_operation[name[size] - constant[1]] * binary_operation[binary_operation[call[call[name[world].layers][constant[humidity]].quantiles][call[name[h_values]][name[loop]]] - name[min_humidity]] / name[humidity_delta]]] if compare[name[loop] not_equal[!=] constant[4]] begin[:] variable[h_max] assign[=] binary_operation[binary_operation[name[size] - constant[1]] * binary_operation[binary_operation[call[call[name[world].layers][constant[humidity]].quantiles][call[name[h_values]][binary_operation[name[loop] + constant[1]]]] - name[min_humidity]] / name[humidity_delta]]] variable[v_max] assign[=] binary_operation[binary_operation[name[size] - constant[1]] * binary_operation[binary_operation[call[call[call[name[world].layers][constant[temperature]].thresholds][call[name[t_values]][name[loop]]]][constant[1]] - name[min_temperature]] / name[temperature_delta]]] if compare[name[h_min] less[<] constant[0]] begin[:] variable[h_min] assign[=] constant[0] if compare[name[h_max] greater[>] name[size]] begin[:] variable[h_max] assign[=] name[size] if compare[name[v_max] less[<] constant[0]] begin[:] variable[v_max] assign[=] constant[0] if compare[name[v_max] greater[>] binary_operation[name[size] - constant[1]]] begin[:] variable[v_max] assign[=] binary_operation[name[size] - constant[1]] if <ast.BoolOp object at 0x7da1b067bac0> begin[:] for taget[name[y]] in starred[call[name[range], parameter[call[name[int], parameter[name[h_min]]], call[name[int], parameter[name[h_max]]]]]] begin[:] for taget[name[x]] in starred[call[name[range], parameter[constant[0], call[name[int], parameter[name[v_max]]]]]] begin[:] call[name[target].set_pixel, parameter[name[x], binary_operation[binary_operation[name[size] - constant[1]] - name[y]], tuple[[<ast.Constant object at 0x7da1b06791e0>, <ast.Constant object at 0x7da1b06795a0>, <ast.Constant object at 0x7da1b067b070>, <ast.Constant object at 0x7da1b06792d0>]]]] for taget[name[t]] in starred[call[name[range], parameter[constant[0], constant[6]]]] begin[:] variable[v] assign[=] binary_operation[binary_operation[name[size] - constant[1]] * binary_operation[binary_operation[call[call[call[name[world].layers][constant[temperature]].thresholds][name[t]]][constant[1]] - name[min_temperature]] / name[temperature_delta]]] if compare[constant[0] less[<] name[v]] begin[:] for taget[name[y]] in starred[call[name[range], parameter[constant[0], name[size]]]] begin[:] call[name[target].set_pixel, parameter[call[name[int], parameter[name[v]]], binary_operation[binary_operation[name[size] - constant[1]] - name[y]], tuple[[<ast.Constant object at 0x7da1b067a1d0>, <ast.Constant object at 0x7da1b067b760>, <ast.Constant object at 0x7da1b067b2e0>, <ast.Constant object at 0x7da1b0679090>]]]] variable[ranges] assign[=] list[[<ast.Constant object at 0x7da1b080ba00>, <ast.Constant object at 0x7da1b080ad40>, <ast.Constant object at 0x7da1b080ae00>, <ast.Constant object at 0x7da1b080a290>, <ast.Constant object at 0x7da1b080a5f0>, <ast.Constant object at 0x7da1b080b9a0>, <ast.Constant object at 0x7da1b080a8c0>]] for taget[name[p]] in starred[name[ranges]] begin[:] variable[h] assign[=] binary_operation[binary_operation[name[size] - constant[1]] * binary_operation[binary_operation[call[call[name[world].layers][constant[humidity]].quantiles][name[p]] - name[min_humidity]] / name[humidity_delta]]] if compare[constant[0] less[<] name[h]] begin[:] for taget[name[x]] in starred[call[name[range], parameter[constant[0], name[size]]]] begin[:] call[name[target].set_pixel, parameter[name[x], binary_operation[binary_operation[name[size] - constant[1]] - call[name[int], parameter[name[h]]]], tuple[[<ast.Constant object at 0x7da1b07b9960>, <ast.Constant object at 0x7da1b07b82e0>, <ast.Constant object at 0x7da1b07bb040>, <ast.Constant object at 0x7da1b07bb520>]]]] variable[curve_gamma] assign[=] name[world].gamma_curve variable[curve_bonus] assign[=] name[world].curve_offset for taget[name[x]] in starred[call[name[range], parameter[constant[0], name[size]]]] begin[:] variable[y] assign[=] binary_operation[binary_operation[name[size] - constant[1]] * binary_operation[binary_operation[call[name[numpy].power, parameter[binary_operation[call[name[float], parameter[name[x]]] / binary_operation[name[size] - constant[1]]], name[curve_gamma]]] * binary_operation[constant[1] - name[curve_bonus]]] + name[curve_bonus]]] call[name[target].set_pixel, parameter[name[x], binary_operation[binary_operation[name[size] - constant[1]] - call[name[int], parameter[name[y]]]], tuple[[<ast.Constant object at 0x7da1b07b98a0>, <ast.Constant object at 0x7da1b07baa70>, <ast.Constant object at 0x7da1b07ba890>, <ast.Constant object at 0x7da1b07b9a80>]]]] for taget[name[y]] in starred[call[name[range], parameter[name[world].height]]] begin[:] for taget[name[x]] in starred[call[name[range], parameter[name[world].width]]] begin[:] if call[name[world].is_land, parameter[tuple[[<ast.Name object at 0x7da1b07b97b0>, <ast.Name object at 0x7da1b07bab60>]]]] begin[:] variable[t] assign[=] call[name[world].temperature_at, parameter[tuple[[<ast.Name object at 0x7da1b07baf80>, <ast.Name object at 0x7da1b07b9930>]]]] variable[p] assign[=] call[name[world].humidity_at, parameter[tuple[[<ast.Name object at 0x7da1b07bbeb0>, <ast.Name object at 0x7da1b07b9e10>]]]] if call[name[world].is_temperature_polar, parameter[tuple[[<ast.Name object at 0x7da1b07b9d80>, <ast.Name object at 0x7da1b07ba410>]]]] begin[:] variable[r] assign[=] constant[0] if call[name[world].is_humidity_superarid, parameter[tuple[[<ast.Name object at 0x7da1b07aa470>, <ast.Name object at 0x7da1b07a9720>]]]] begin[:] variable[b] assign[=] constant[32] variable[nx] assign[=] binary_operation[binary_operation[name[size] - constant[1]] * binary_operation[binary_operation[name[t] - name[min_temperature]] / name[temperature_delta]]] variable[ny] assign[=] binary_operation[binary_operation[name[size] - constant[1]] * binary_operation[binary_operation[name[p] - name[min_humidity]] / name[humidity_delta]]] call[name[target].set_pixel, parameter[call[name[int], parameter[name[nx]]], binary_operation[binary_operation[name[size] - constant[1]] - call[name[int], parameter[name[ny]]]], tuple[[<ast.Name object at 0x7da1b07cce50>, <ast.Constant object at 0x7da1b07ce230>, <ast.Name object at 0x7da1b07cf4f0>, <ast.Constant object at 0x7da1b07cdba0>]]]]
keyword[def] identifier[draw_scatter_plot] ( identifier[world] , identifier[size] , identifier[target] ): literal[string] identifier[humid] = identifier[numpy] . identifier[ma] . identifier[masked_array] ( identifier[world] . identifier[layers] [ literal[string] ]. identifier[data] , identifier[mask] = identifier[world] . identifier[layers] [ literal[string] ]. identifier[data] ) identifier[temp] = identifier[numpy] . identifier[ma] . identifier[masked_array] ( identifier[world] . identifier[layers] [ literal[string] ]. identifier[data] , identifier[mask] = identifier[world] . identifier[layers] [ literal[string] ]. identifier[data] ) identifier[min_humidity] = identifier[humid] . identifier[min] () identifier[max_humidity] = identifier[humid] . identifier[max] () identifier[min_temperature] = identifier[temp] . identifier[min] () identifier[max_temperature] = identifier[temp] . identifier[max] () identifier[temperature_delta] = identifier[max_temperature] - identifier[min_temperature] identifier[humidity_delta] = identifier[max_humidity] - identifier[min_humidity] keyword[for] identifier[y] keyword[in] identifier[range] ( literal[int] , identifier[size] ): keyword[for] identifier[x] keyword[in] identifier[range] ( literal[int] , identifier[size] ): identifier[target] . identifier[set_pixel] ( identifier[x] , identifier[y] ,( literal[int] , literal[int] , literal[int] , literal[int] )) identifier[h_values] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] identifier[t_values] =[ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ] keyword[for] identifier[loop] keyword[in] identifier[range] ( literal[int] , literal[int] ): identifier[h_min] =( identifier[size] - literal[int] )*(( identifier[world] . identifier[layers] [ literal[string] ]. identifier[quantiles] [ identifier[h_values] [ identifier[loop] ]]- identifier[min_humidity] )/ identifier[humidity_delta] ) keyword[if] identifier[loop] != literal[int] : identifier[h_max] =( identifier[size] - literal[int] )*(( identifier[world] . identifier[layers] [ literal[string] ]. identifier[quantiles] [ identifier[h_values] [ identifier[loop] + literal[int] ]]- identifier[min_humidity] )/ identifier[humidity_delta] ) keyword[else] : identifier[h_max] = identifier[size] identifier[v_max] =( identifier[size] - literal[int] )*(( identifier[world] . identifier[layers] [ literal[string] ]. identifier[thresholds] [ identifier[t_values] [ identifier[loop] ]][ literal[int] ]- identifier[min_temperature] )/ identifier[temperature_delta] ) keyword[if] identifier[h_min] < literal[int] : identifier[h_min] = literal[int] keyword[if] identifier[h_max] > identifier[size] : identifier[h_max] = identifier[size] keyword[if] identifier[v_max] < literal[int] : identifier[v_max] = literal[int] keyword[if] identifier[v_max] >( identifier[size] - literal[int] ): identifier[v_max] = identifier[size] - literal[int] keyword[if] identifier[h_max] > literal[int] keyword[and] identifier[h_min] < identifier[size] keyword[and] identifier[v_max] > literal[int] : keyword[for] identifier[y] keyword[in] identifier[range] ( identifier[int] ( identifier[h_min] ), identifier[int] ( identifier[h_max] )): keyword[for] identifier[x] keyword[in] identifier[range] ( literal[int] , identifier[int] ( identifier[v_max] )): identifier[target] . identifier[set_pixel] ( identifier[x] ,( identifier[size] - literal[int] )- identifier[y] ,( literal[int] , literal[int] , literal[int] , literal[int] )) keyword[for] identifier[t] keyword[in] identifier[range] ( literal[int] , literal[int] ): identifier[v] =( identifier[size] - literal[int] )*(( identifier[world] . identifier[layers] [ literal[string] ]. identifier[thresholds] [ identifier[t] ][ literal[int] ]- identifier[min_temperature] )/ identifier[temperature_delta] ) keyword[if] literal[int] < identifier[v] < identifier[size] : keyword[for] identifier[y] keyword[in] identifier[range] ( literal[int] , identifier[size] ): identifier[target] . identifier[set_pixel] ( identifier[int] ( identifier[v] ),( identifier[size] - literal[int] )- identifier[y] ,( literal[int] , literal[int] , literal[int] , literal[int] )) identifier[ranges] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] keyword[for] identifier[p] keyword[in] identifier[ranges] : identifier[h] =( identifier[size] - literal[int] )*(( identifier[world] . identifier[layers] [ literal[string] ]. identifier[quantiles] [ identifier[p] ]- identifier[min_humidity] )/ identifier[humidity_delta] ) keyword[if] literal[int] < identifier[h] < identifier[size] : keyword[for] identifier[x] keyword[in] identifier[range] ( literal[int] , identifier[size] ): identifier[target] . identifier[set_pixel] ( identifier[x] ,( identifier[size] - literal[int] )- identifier[int] ( identifier[h] ),( literal[int] , literal[int] , literal[int] , literal[int] )) identifier[curve_gamma] = identifier[world] . identifier[gamma_curve] identifier[curve_bonus] = identifier[world] . identifier[curve_offset] keyword[for] identifier[x] keyword[in] identifier[range] ( literal[int] , identifier[size] ): identifier[y] =( identifier[size] - literal[int] )*(( identifier[numpy] . identifier[power] (( identifier[float] ( identifier[x] )/( identifier[size] - literal[int] )), identifier[curve_gamma] )*( literal[int] - identifier[curve_bonus] ))+ identifier[curve_bonus] ) identifier[target] . identifier[set_pixel] ( identifier[x] ,( identifier[size] - literal[int] )- identifier[int] ( identifier[y] ),( literal[int] , literal[int] , literal[int] , literal[int] )) keyword[for] identifier[y] keyword[in] identifier[range] ( identifier[world] . identifier[height] ): keyword[for] identifier[x] keyword[in] identifier[range] ( identifier[world] . identifier[width] ): keyword[if] identifier[world] . identifier[is_land] (( identifier[x] , identifier[y] )): identifier[t] = identifier[world] . identifier[temperature_at] (( identifier[x] , identifier[y] )) identifier[p] = identifier[world] . identifier[humidity_at] (( identifier[x] , identifier[y] )) keyword[if] identifier[world] . identifier[is_temperature_polar] (( identifier[x] , identifier[y] )): identifier[r] = literal[int] keyword[elif] identifier[world] . identifier[is_temperature_alpine] (( identifier[x] , identifier[y] )): identifier[r] = literal[int] keyword[elif] identifier[world] . identifier[is_temperature_boreal] (( identifier[x] , identifier[y] )): identifier[r] = literal[int] keyword[elif] identifier[world] . identifier[is_temperature_cool] (( identifier[x] , identifier[y] )): identifier[r] = literal[int] keyword[elif] identifier[world] . identifier[is_temperature_warm] (( identifier[x] , identifier[y] )): identifier[r] = literal[int] keyword[elif] identifier[world] . identifier[is_temperature_subtropical] (( identifier[x] , identifier[y] )): identifier[r] = literal[int] keyword[elif] identifier[world] . identifier[is_temperature_tropical] (( identifier[x] , identifier[y] )): identifier[r] = literal[int] keyword[if] identifier[world] . identifier[is_humidity_superarid] (( identifier[x] , identifier[y] )): identifier[b] = literal[int] keyword[elif] identifier[world] . identifier[is_humidity_perarid] (( identifier[x] , identifier[y] )): identifier[b] = literal[int] keyword[elif] identifier[world] . identifier[is_humidity_arid] (( identifier[x] , identifier[y] )): identifier[b] = literal[int] keyword[elif] identifier[world] . identifier[is_humidity_semiarid] (( identifier[x] , identifier[y] )): identifier[b] = literal[int] keyword[elif] identifier[world] . identifier[is_humidity_subhumid] (( identifier[x] , identifier[y] )): identifier[b] = literal[int] keyword[elif] identifier[world] . identifier[is_humidity_humid] (( identifier[x] , identifier[y] )): identifier[b] = literal[int] keyword[elif] identifier[world] . identifier[is_humidity_perhumid] (( identifier[x] , identifier[y] )): identifier[b] = literal[int] keyword[elif] identifier[world] . identifier[is_humidity_superhumid] (( identifier[x] , identifier[y] )): identifier[b] = literal[int] identifier[nx] =( identifier[size] - literal[int] )*(( identifier[t] - identifier[min_temperature] )/ identifier[temperature_delta] ) identifier[ny] =( identifier[size] - literal[int] )*(( identifier[p] - identifier[min_humidity] )/ identifier[humidity_delta] ) identifier[target] . identifier[set_pixel] ( identifier[int] ( identifier[nx] ),( identifier[size] - literal[int] )- identifier[int] ( identifier[ny] ),( identifier[r] , literal[int] , identifier[b] , literal[int] ))
def draw_scatter_plot(world, size, target): """ This function can be used on a generic canvas (either an image to save on disk or a canvas part of a GUI) """ #Find min and max values of humidity and temperature on land so we can #normalize temperature and humidity to the chart humid = numpy.ma.masked_array(world.layers['humidity'].data, mask=world.layers['ocean'].data) temp = numpy.ma.masked_array(world.layers['temperature'].data, mask=world.layers['ocean'].data) min_humidity = humid.min() max_humidity = humid.max() min_temperature = temp.min() max_temperature = temp.max() temperature_delta = max_temperature - min_temperature humidity_delta = max_humidity - min_humidity #set all pixels white for y in range(0, size): for x in range(0, size): target.set_pixel(x, y, (255, 255, 255, 255)) # depends on [control=['for'], data=['x']] # depends on [control=['for'], data=['y']] #fill in 'bad' boxes with grey h_values = ['62', '50', '37', '25', '12'] t_values = [0, 1, 2, 3, 5] for loop in range(0, 5): h_min = (size - 1) * ((world.layers['humidity'].quantiles[h_values[loop]] - min_humidity) / humidity_delta) if loop != 4: h_max = (size - 1) * ((world.layers['humidity'].quantiles[h_values[loop + 1]] - min_humidity) / humidity_delta) # depends on [control=['if'], data=['loop']] else: h_max = size v_max = (size - 1) * ((world.layers['temperature'].thresholds[t_values[loop]][1] - min_temperature) / temperature_delta) if h_min < 0: h_min = 0 # depends on [control=['if'], data=['h_min']] if h_max > size: h_max = size # depends on [control=['if'], data=['h_max', 'size']] if v_max < 0: v_max = 0 # depends on [control=['if'], data=['v_max']] if v_max > size - 1: v_max = size - 1 # depends on [control=['if'], data=['v_max']] if h_max > 0 and h_min < size and (v_max > 0): for y in range(int(h_min), int(h_max)): for x in range(0, int(v_max)): target.set_pixel(x, size - 1 - y, (128, 128, 128, 255)) # depends on [control=['for'], data=['x']] # depends on [control=['for'], data=['y']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['loop']] #draw lines based on thresholds for t in range(0, 6): v = (size - 1) * ((world.layers['temperature'].thresholds[t][1] - min_temperature) / temperature_delta) if 0 < v < size: for y in range(0, size): target.set_pixel(int(v), size - 1 - y, (0, 0, 0, 255)) # depends on [control=['for'], data=['y']] # depends on [control=['if'], data=['v']] # depends on [control=['for'], data=['t']] ranges = ['87', '75', '62', '50', '37', '25', '12'] for p in ranges: h = (size - 1) * ((world.layers['humidity'].quantiles[p] - min_humidity) / humidity_delta) if 0 < h < size: for x in range(0, size): target.set_pixel(x, size - 1 - int(h), (0, 0, 0, 255)) # depends on [control=['for'], data=['x']] # depends on [control=['if'], data=['h']] # depends on [control=['for'], data=['p']] #draw gamma curve curve_gamma = world.gamma_curve curve_bonus = world.curve_offset for x in range(0, size): y = (size - 1) * (numpy.power(float(x) / (size - 1), curve_gamma) * (1 - curve_bonus) + curve_bonus) target.set_pixel(x, size - 1 - int(y), (255, 0, 0, 255)) # depends on [control=['for'], data=['x']] #examine all cells in the map and if it is land get the temperature and #humidity for the cell. for y in range(world.height): for x in range(world.width): if world.is_land((x, y)): t = world.temperature_at((x, y)) p = world.humidity_at((x, y)) #get red and blue values depending on temperature and humidity if world.is_temperature_polar((x, y)): r = 0 # depends on [control=['if'], data=[]] elif world.is_temperature_alpine((x, y)): r = 42 # depends on [control=['if'], data=[]] elif world.is_temperature_boreal((x, y)): r = 85 # depends on [control=['if'], data=[]] elif world.is_temperature_cool((x, y)): r = 128 # depends on [control=['if'], data=[]] elif world.is_temperature_warm((x, y)): r = 170 # depends on [control=['if'], data=[]] elif world.is_temperature_subtropical((x, y)): r = 213 # depends on [control=['if'], data=[]] elif world.is_temperature_tropical((x, y)): r = 255 # depends on [control=['if'], data=[]] if world.is_humidity_superarid((x, y)): b = 32 # depends on [control=['if'], data=[]] elif world.is_humidity_perarid((x, y)): b = 64 # depends on [control=['if'], data=[]] elif world.is_humidity_arid((x, y)): b = 96 # depends on [control=['if'], data=[]] elif world.is_humidity_semiarid((x, y)): b = 128 # depends on [control=['if'], data=[]] elif world.is_humidity_subhumid((x, y)): b = 160 # depends on [control=['if'], data=[]] elif world.is_humidity_humid((x, y)): b = 192 # depends on [control=['if'], data=[]] elif world.is_humidity_perhumid((x, y)): b = 224 # depends on [control=['if'], data=[]] elif world.is_humidity_superhumid((x, y)): b = 255 # depends on [control=['if'], data=[]] #calculate x and y position based on normalized temperature and humidity nx = (size - 1) * ((t - min_temperature) / temperature_delta) ny = (size - 1) * ((p - min_humidity) / humidity_delta) target.set_pixel(int(nx), size - 1 - int(ny), (r, 128, b, 255)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['x']] # depends on [control=['for'], data=['y']]
def get_config(k: str) -> Any: """ Returns a configuration variable's value or None if it is unset. """ # If the config has been set explicitly, use it. if k in list(CONFIG.keys()): return CONFIG[k] # If there is a specific PULUMI_CONFIG_<k> environment variable, use it. env_key = get_config_env_key(k) if env_key in os.environ: return os.environ[env_key] # If the config hasn't been set, but there is a process-wide PULUMI_CONFIG environment variable, use it. env_dict = get_config_env() if env_dict is not None and k in list(env_dict.keys()): return env_dict[k] return None
def function[get_config, parameter[k]]: constant[ Returns a configuration variable's value or None if it is unset. ] if compare[name[k] in call[name[list], parameter[call[name[CONFIG].keys, parameter[]]]]] begin[:] return[call[name[CONFIG]][name[k]]] variable[env_key] assign[=] call[name[get_config_env_key], parameter[name[k]]] if compare[name[env_key] in name[os].environ] begin[:] return[call[name[os].environ][name[env_key]]] variable[env_dict] assign[=] call[name[get_config_env], parameter[]] if <ast.BoolOp object at 0x7da18f811e10> begin[:] return[call[name[env_dict]][name[k]]] return[constant[None]]
keyword[def] identifier[get_config] ( identifier[k] : identifier[str] )-> identifier[Any] : literal[string] keyword[if] identifier[k] keyword[in] identifier[list] ( identifier[CONFIG] . identifier[keys] ()): keyword[return] identifier[CONFIG] [ identifier[k] ] identifier[env_key] = identifier[get_config_env_key] ( identifier[k] ) keyword[if] identifier[env_key] keyword[in] identifier[os] . identifier[environ] : keyword[return] identifier[os] . identifier[environ] [ identifier[env_key] ] identifier[env_dict] = identifier[get_config_env] () keyword[if] identifier[env_dict] keyword[is] keyword[not] keyword[None] keyword[and] identifier[k] keyword[in] identifier[list] ( identifier[env_dict] . identifier[keys] ()): keyword[return] identifier[env_dict] [ identifier[k] ] keyword[return] keyword[None]
def get_config(k: str) -> Any: """ Returns a configuration variable's value or None if it is unset. """ # If the config has been set explicitly, use it. if k in list(CONFIG.keys()): return CONFIG[k] # depends on [control=['if'], data=['k']] # If there is a specific PULUMI_CONFIG_<k> environment variable, use it. env_key = get_config_env_key(k) if env_key in os.environ: return os.environ[env_key] # depends on [control=['if'], data=['env_key']] # If the config hasn't been set, but there is a process-wide PULUMI_CONFIG environment variable, use it. env_dict = get_config_env() if env_dict is not None and k in list(env_dict.keys()): return env_dict[k] # depends on [control=['if'], data=[]] return None
def shipping_query_handler(self, *custom_filters, state=None, run_task=None, **kwargs): """ Decorator for shipping query handler Example: .. code-block:: python3 @dp.shipping_query_handler(lambda shipping_query: True) async def some_shipping_query_handler(shipping_query: types.ShippingQuery) :param state: :param custom_filters: :param run_task: run callback in task (no wait results) :param kwargs: """ def decorator(callback): self.register_shipping_query_handler(callback, *custom_filters, state=state, run_task=run_task, **kwargs) return callback return decorator
def function[shipping_query_handler, parameter[self]]: constant[ Decorator for shipping query handler Example: .. code-block:: python3 @dp.shipping_query_handler(lambda shipping_query: True) async def some_shipping_query_handler(shipping_query: types.ShippingQuery) :param state: :param custom_filters: :param run_task: run callback in task (no wait results) :param kwargs: ] def function[decorator, parameter[callback]]: call[name[self].register_shipping_query_handler, parameter[name[callback], <ast.Starred object at 0x7da1b1846470>]] return[name[callback]] return[name[decorator]]
keyword[def] identifier[shipping_query_handler] ( identifier[self] ,* identifier[custom_filters] , identifier[state] = keyword[None] , identifier[run_task] = keyword[None] ,** identifier[kwargs] ): literal[string] keyword[def] identifier[decorator] ( identifier[callback] ): identifier[self] . identifier[register_shipping_query_handler] ( identifier[callback] ,* identifier[custom_filters] , identifier[state] = identifier[state] , identifier[run_task] = identifier[run_task] ,** identifier[kwargs] ) keyword[return] identifier[callback] keyword[return] identifier[decorator]
def shipping_query_handler(self, *custom_filters, state=None, run_task=None, **kwargs): """ Decorator for shipping query handler Example: .. code-block:: python3 @dp.shipping_query_handler(lambda shipping_query: True) async def some_shipping_query_handler(shipping_query: types.ShippingQuery) :param state: :param custom_filters: :param run_task: run callback in task (no wait results) :param kwargs: """ def decorator(callback): self.register_shipping_query_handler(callback, *custom_filters, state=state, run_task=run_task, **kwargs) return callback return decorator
def save(self, filename, clear_history=True, incl_uniqueid=False, compact=False): """Save the bundle to a JSON-formatted ASCII file. :parameter str filename: relative or full path to the file :parameter bool clear_history: whether to clear history log items before saving (default: True) :parameter bool incl_uniqueid: whether to including uniqueids in the file (only needed if its necessary to maintain the uniqueids when reloading) :parameter bool compact: whether to use compact file-formatting (maybe be quicker to save/load, but not as easily readable) :return: the filename """ if clear_history: # TODO: let's not actually clear history, # but rather skip the context when saving self.remove_history() # TODO: add option for clear_models, clear_feedback # NOTE: PS.save will handle os.path.expanduser return super(Bundle, self).save(filename, incl_uniqueid=incl_uniqueid, compact=compact)
def function[save, parameter[self, filename, clear_history, incl_uniqueid, compact]]: constant[Save the bundle to a JSON-formatted ASCII file. :parameter str filename: relative or full path to the file :parameter bool clear_history: whether to clear history log items before saving (default: True) :parameter bool incl_uniqueid: whether to including uniqueids in the file (only needed if its necessary to maintain the uniqueids when reloading) :parameter bool compact: whether to use compact file-formatting (maybe be quicker to save/load, but not as easily readable) :return: the filename ] if name[clear_history] begin[:] call[name[self].remove_history, parameter[]] return[call[call[name[super], parameter[name[Bundle], name[self]]].save, parameter[name[filename]]]]
keyword[def] identifier[save] ( identifier[self] , identifier[filename] , identifier[clear_history] = keyword[True] , identifier[incl_uniqueid] = keyword[False] , identifier[compact] = keyword[False] ): literal[string] keyword[if] identifier[clear_history] : identifier[self] . identifier[remove_history] () keyword[return] identifier[super] ( identifier[Bundle] , identifier[self] ). identifier[save] ( identifier[filename] , identifier[incl_uniqueid] = identifier[incl_uniqueid] , identifier[compact] = identifier[compact] )
def save(self, filename, clear_history=True, incl_uniqueid=False, compact=False): """Save the bundle to a JSON-formatted ASCII file. :parameter str filename: relative or full path to the file :parameter bool clear_history: whether to clear history log items before saving (default: True) :parameter bool incl_uniqueid: whether to including uniqueids in the file (only needed if its necessary to maintain the uniqueids when reloading) :parameter bool compact: whether to use compact file-formatting (maybe be quicker to save/load, but not as easily readable) :return: the filename """ if clear_history: # TODO: let's not actually clear history, # but rather skip the context when saving self.remove_history() # depends on [control=['if'], data=[]] # TODO: add option for clear_models, clear_feedback # NOTE: PS.save will handle os.path.expanduser return super(Bundle, self).save(filename, incl_uniqueid=incl_uniqueid, compact=compact)
def time_report_item(self, label, message=None): """ This will return a dictionary for the given message based on timestamps :param label: :param message: str of the message to find the timestamp :return: dict of times """ next_ = TIMESTAMPS_ORDER[TIMESTAMPS_ORDER.index(label) + 1] while next_ not in self._timestamps: next_ = TIMESTAMPS_ORDER[TIMESTAMPS_ORDER.index(next_) + 1] assert label in TIMESTAMPS_ORDER start = self._timestamps[label] - self._timestamps[TIMESTAMPS_ORDER[0]] end = self._timestamps[next_] - self._timestamps[TIMESTAMPS_ORDER[0]] return {'Message': message, 'Start': start, 'End': end, 'Sum': end - start, 'Count': 1}
def function[time_report_item, parameter[self, label, message]]: constant[ This will return a dictionary for the given message based on timestamps :param label: :param message: str of the message to find the timestamp :return: dict of times ] variable[next_] assign[=] call[name[TIMESTAMPS_ORDER]][binary_operation[call[name[TIMESTAMPS_ORDER].index, parameter[name[label]]] + constant[1]]] while compare[name[next_] <ast.NotIn object at 0x7da2590d7190> name[self]._timestamps] begin[:] variable[next_] assign[=] call[name[TIMESTAMPS_ORDER]][binary_operation[call[name[TIMESTAMPS_ORDER].index, parameter[name[next_]]] + constant[1]]] assert[compare[name[label] in name[TIMESTAMPS_ORDER]]] variable[start] assign[=] binary_operation[call[name[self]._timestamps][name[label]] - call[name[self]._timestamps][call[name[TIMESTAMPS_ORDER]][constant[0]]]] variable[end] assign[=] binary_operation[call[name[self]._timestamps][name[next_]] - call[name[self]._timestamps][call[name[TIMESTAMPS_ORDER]][constant[0]]]] return[dictionary[[<ast.Constant object at 0x7da1b16d5060>, <ast.Constant object at 0x7da1b16d7f70>, <ast.Constant object at 0x7da1b16d58a0>, <ast.Constant object at 0x7da1b16d7370>, <ast.Constant object at 0x7da1b16d4100>], [<ast.Name object at 0x7da1b16d6620>, <ast.Name object at 0x7da1b16d75b0>, <ast.Name object at 0x7da1b16d7e20>, <ast.BinOp object at 0x7da1b16d46a0>, <ast.Constant object at 0x7da1b16d70a0>]]]
keyword[def] identifier[time_report_item] ( identifier[self] , identifier[label] , identifier[message] = keyword[None] ): literal[string] identifier[next_] = identifier[TIMESTAMPS_ORDER] [ identifier[TIMESTAMPS_ORDER] . identifier[index] ( identifier[label] )+ literal[int] ] keyword[while] identifier[next_] keyword[not] keyword[in] identifier[self] . identifier[_timestamps] : identifier[next_] = identifier[TIMESTAMPS_ORDER] [ identifier[TIMESTAMPS_ORDER] . identifier[index] ( identifier[next_] )+ literal[int] ] keyword[assert] identifier[label] keyword[in] identifier[TIMESTAMPS_ORDER] identifier[start] = identifier[self] . identifier[_timestamps] [ identifier[label] ]- identifier[self] . identifier[_timestamps] [ identifier[TIMESTAMPS_ORDER] [ literal[int] ]] identifier[end] = identifier[self] . identifier[_timestamps] [ identifier[next_] ]- identifier[self] . identifier[_timestamps] [ identifier[TIMESTAMPS_ORDER] [ literal[int] ]] keyword[return] { literal[string] : identifier[message] , literal[string] : identifier[start] , literal[string] : identifier[end] , literal[string] : identifier[end] - identifier[start] , literal[string] : literal[int] }
def time_report_item(self, label, message=None): """ This will return a dictionary for the given message based on timestamps :param label: :param message: str of the message to find the timestamp :return: dict of times """ next_ = TIMESTAMPS_ORDER[TIMESTAMPS_ORDER.index(label) + 1] while next_ not in self._timestamps: next_ = TIMESTAMPS_ORDER[TIMESTAMPS_ORDER.index(next_) + 1] # depends on [control=['while'], data=['next_']] assert label in TIMESTAMPS_ORDER start = self._timestamps[label] - self._timestamps[TIMESTAMPS_ORDER[0]] end = self._timestamps[next_] - self._timestamps[TIMESTAMPS_ORDER[0]] return {'Message': message, 'Start': start, 'End': end, 'Sum': end - start, 'Count': 1}
def create(self, resource, keys_vals, url_prefix, auth, session, send_opts): """Create the given key-value pairs for the given resource. Will attempt to create all key-value pairs even if a failure is encountered. Args: resource (intern.resource.boss.BossResource): List keys associated with this resource. keys_vals (dictionary): The metadata to associate with the resource. url_prefix (string): Protocol + host such as https://api.theboss.io auth (string): Token to send in the request header. session (requests.Session): HTTP session to use for request. send_opts (dictionary): Additional arguments to pass to session.send(). Raises: HTTPErrorList on failure. """ success = True exc = HTTPErrorList('At least one key-value create failed.') for pair in keys_vals.items(): key = pair[0] value = pair[1] req = self.get_metadata_request( resource, 'POST', 'application/json', url_prefix, auth, key, value) prep = session.prepare_request(req) resp = session.send(prep, **send_opts) if resp.status_code == 201: continue err = ( 'Create failed for {}: {}:{}, got HTTP response: ({}) - {}' .format(resource.name, key, value, resp.status_code, resp.text)) exc.http_errors.append(HTTPError(err, request=req, response=resp)) success = False if not success: raise exc
def function[create, parameter[self, resource, keys_vals, url_prefix, auth, session, send_opts]]: constant[Create the given key-value pairs for the given resource. Will attempt to create all key-value pairs even if a failure is encountered. Args: resource (intern.resource.boss.BossResource): List keys associated with this resource. keys_vals (dictionary): The metadata to associate with the resource. url_prefix (string): Protocol + host such as https://api.theboss.io auth (string): Token to send in the request header. session (requests.Session): HTTP session to use for request. send_opts (dictionary): Additional arguments to pass to session.send(). Raises: HTTPErrorList on failure. ] variable[success] assign[=] constant[True] variable[exc] assign[=] call[name[HTTPErrorList], parameter[constant[At least one key-value create failed.]]] for taget[name[pair]] in starred[call[name[keys_vals].items, parameter[]]] begin[:] variable[key] assign[=] call[name[pair]][constant[0]] variable[value] assign[=] call[name[pair]][constant[1]] variable[req] assign[=] call[name[self].get_metadata_request, parameter[name[resource], constant[POST], constant[application/json], name[url_prefix], name[auth], name[key], name[value]]] variable[prep] assign[=] call[name[session].prepare_request, parameter[name[req]]] variable[resp] assign[=] call[name[session].send, parameter[name[prep]]] if compare[name[resp].status_code equal[==] constant[201]] begin[:] continue variable[err] assign[=] call[constant[Create failed for {}: {}:{}, got HTTP response: ({}) - {}].format, parameter[name[resource].name, name[key], name[value], name[resp].status_code, name[resp].text]] call[name[exc].http_errors.append, parameter[call[name[HTTPError], parameter[name[err]]]]] variable[success] assign[=] constant[False] if <ast.UnaryOp object at 0x7da204621690> begin[:] <ast.Raise object at 0x7da204621b10>
keyword[def] identifier[create] ( identifier[self] , identifier[resource] , identifier[keys_vals] , identifier[url_prefix] , identifier[auth] , identifier[session] , identifier[send_opts] ): literal[string] identifier[success] = keyword[True] identifier[exc] = identifier[HTTPErrorList] ( literal[string] ) keyword[for] identifier[pair] keyword[in] identifier[keys_vals] . identifier[items] (): identifier[key] = identifier[pair] [ literal[int] ] identifier[value] = identifier[pair] [ literal[int] ] identifier[req] = identifier[self] . identifier[get_metadata_request] ( identifier[resource] , literal[string] , literal[string] , identifier[url_prefix] , identifier[auth] , identifier[key] , identifier[value] ) identifier[prep] = identifier[session] . identifier[prepare_request] ( identifier[req] ) identifier[resp] = identifier[session] . identifier[send] ( identifier[prep] ,** identifier[send_opts] ) keyword[if] identifier[resp] . identifier[status_code] == literal[int] : keyword[continue] identifier[err] =( literal[string] . identifier[format] ( identifier[resource] . identifier[name] , identifier[key] , identifier[value] , identifier[resp] . identifier[status_code] , identifier[resp] . identifier[text] )) identifier[exc] . identifier[http_errors] . identifier[append] ( identifier[HTTPError] ( identifier[err] , identifier[request] = identifier[req] , identifier[response] = identifier[resp] )) identifier[success] = keyword[False] keyword[if] keyword[not] identifier[success] : keyword[raise] identifier[exc]
def create(self, resource, keys_vals, url_prefix, auth, session, send_opts): """Create the given key-value pairs for the given resource. Will attempt to create all key-value pairs even if a failure is encountered. Args: resource (intern.resource.boss.BossResource): List keys associated with this resource. keys_vals (dictionary): The metadata to associate with the resource. url_prefix (string): Protocol + host such as https://api.theboss.io auth (string): Token to send in the request header. session (requests.Session): HTTP session to use for request. send_opts (dictionary): Additional arguments to pass to session.send(). Raises: HTTPErrorList on failure. """ success = True exc = HTTPErrorList('At least one key-value create failed.') for pair in keys_vals.items(): key = pair[0] value = pair[1] req = self.get_metadata_request(resource, 'POST', 'application/json', url_prefix, auth, key, value) prep = session.prepare_request(req) resp = session.send(prep, **send_opts) if resp.status_code == 201: continue # depends on [control=['if'], data=[]] err = 'Create failed for {}: {}:{}, got HTTP response: ({}) - {}'.format(resource.name, key, value, resp.status_code, resp.text) exc.http_errors.append(HTTPError(err, request=req, response=resp)) success = False # depends on [control=['for'], data=['pair']] if not success: raise exc # depends on [control=['if'], data=[]]
def convert_mrf_to_syntax_mrf( mrf_lines, conversion_rules ): ''' Converts given lines from Filosoft's mrf format to syntactic analyzer's format, using the morph-category conversion rules from conversion_rules, and punctuation via method _convert_punctuation(); As a result of conversion, the input list mrf_lines will be modified, and also returned after a successful conversion; Morph-category conversion rules should be loaded via method load_fs_mrf_to_syntax_mrf_translation_rules( rulesFile ), usually from a file named 'tmorftrtabel.txt'; Note that the resulting list of lines likely has more lines than the original list had, because the conversion often requires that the original Filosoft's analysis is expanded into multiple analyses suitable for the syntactic analyzer; ''' i = 0 while ( i < len(mrf_lines) ): line = mrf_lines[i] if line.startswith(' '): # only consider lines of analysis # 1) Convert punctuation if _punctOrAbbrev.search(line): mrf_lines[i] = _convert_punctuation( line ) if '_Y_' not in line: i += 1 continue # 2) Convert morphological analyses that have a form specified withFormMatch = _morfWithForm.search(line) if withFormMatch: root = withFormMatch.group(1) pos = withFormMatch.group(2) formStr = withFormMatch.group(3) forms = formStr.split(',') all_new_lines = [] for form in forms: morphKey = pos+' '+form.strip() if morphKey in conversion_rules: newlines = [ ' '+root+' //'+_esc_que_mark(r)+' //' for r in conversion_rules[morphKey] ] all_new_lines.extend( newlines ) if all_new_lines: del mrf_lines[i] for newline in all_new_lines: mrf_lines.insert(i, newline) i += len(newlines) continue else: withoutFormMatch = _morfWithoutForm.search(line) if withoutFormMatch: # 3) Convert morphological analyses that have only POS specified root = withoutFormMatch.group(1) pos = withoutFormMatch.group(2) morphKey = pos all_new_lines = [] if morphKey in conversion_rules: newlines = [ ' '+root+' //'+_esc_que_mark(r)+' //' for r in conversion_rules[morphKey] ] all_new_lines.extend( newlines ) if all_new_lines: del mrf_lines[i] for newline in all_new_lines: mrf_lines.insert(i, newline) i += len(newlines) continue i += 1 return mrf_lines
def function[convert_mrf_to_syntax_mrf, parameter[mrf_lines, conversion_rules]]: constant[ Converts given lines from Filosoft's mrf format to syntactic analyzer's format, using the morph-category conversion rules from conversion_rules, and punctuation via method _convert_punctuation(); As a result of conversion, the input list mrf_lines will be modified, and also returned after a successful conversion; Morph-category conversion rules should be loaded via method load_fs_mrf_to_syntax_mrf_translation_rules( rulesFile ), usually from a file named 'tmorftrtabel.txt'; Note that the resulting list of lines likely has more lines than the original list had, because the conversion often requires that the original Filosoft's analysis is expanded into multiple analyses suitable for the syntactic analyzer; ] variable[i] assign[=] constant[0] while compare[name[i] less[<] call[name[len], parameter[name[mrf_lines]]]] begin[:] variable[line] assign[=] call[name[mrf_lines]][name[i]] if call[name[line].startswith, parameter[constant[ ]]] begin[:] if call[name[_punctOrAbbrev].search, parameter[name[line]]] begin[:] call[name[mrf_lines]][name[i]] assign[=] call[name[_convert_punctuation], parameter[name[line]]] if compare[constant[_Y_] <ast.NotIn object at 0x7da2590d7190> name[line]] begin[:] <ast.AugAssign object at 0x7da20cabeef0> continue variable[withFormMatch] assign[=] call[name[_morfWithForm].search, parameter[name[line]]] if name[withFormMatch] begin[:] variable[root] assign[=] call[name[withFormMatch].group, parameter[constant[1]]] variable[pos] assign[=] call[name[withFormMatch].group, parameter[constant[2]]] variable[formStr] assign[=] call[name[withFormMatch].group, parameter[constant[3]]] variable[forms] assign[=] call[name[formStr].split, parameter[constant[,]]] variable[all_new_lines] assign[=] list[[]] for taget[name[form]] in starred[name[forms]] begin[:] variable[morphKey] assign[=] binary_operation[binary_operation[name[pos] + constant[ ]] + call[name[form].strip, parameter[]]] if compare[name[morphKey] in name[conversion_rules]] begin[:] variable[newlines] assign[=] <ast.ListComp object at 0x7da18ede6c50> call[name[all_new_lines].extend, parameter[name[newlines]]] if name[all_new_lines] begin[:] <ast.Delete object at 0x7da18ede59c0> for taget[name[newline]] in starred[name[all_new_lines]] begin[:] call[name[mrf_lines].insert, parameter[name[i], name[newline]]] <ast.AugAssign object at 0x7da18ede4b20> continue <ast.AugAssign object at 0x7da20c6a99c0> return[name[mrf_lines]]
keyword[def] identifier[convert_mrf_to_syntax_mrf] ( identifier[mrf_lines] , identifier[conversion_rules] ): literal[string] identifier[i] = literal[int] keyword[while] ( identifier[i] < identifier[len] ( identifier[mrf_lines] )): identifier[line] = identifier[mrf_lines] [ identifier[i] ] keyword[if] identifier[line] . identifier[startswith] ( literal[string] ): keyword[if] identifier[_punctOrAbbrev] . identifier[search] ( identifier[line] ): identifier[mrf_lines] [ identifier[i] ]= identifier[_convert_punctuation] ( identifier[line] ) keyword[if] literal[string] keyword[not] keyword[in] identifier[line] : identifier[i] += literal[int] keyword[continue] identifier[withFormMatch] = identifier[_morfWithForm] . identifier[search] ( identifier[line] ) keyword[if] identifier[withFormMatch] : identifier[root] = identifier[withFormMatch] . identifier[group] ( literal[int] ) identifier[pos] = identifier[withFormMatch] . identifier[group] ( literal[int] ) identifier[formStr] = identifier[withFormMatch] . identifier[group] ( literal[int] ) identifier[forms] = identifier[formStr] . identifier[split] ( literal[string] ) identifier[all_new_lines] =[] keyword[for] identifier[form] keyword[in] identifier[forms] : identifier[morphKey] = identifier[pos] + literal[string] + identifier[form] . identifier[strip] () keyword[if] identifier[morphKey] keyword[in] identifier[conversion_rules] : identifier[newlines] =[ literal[string] + identifier[root] + literal[string] + identifier[_esc_que_mark] ( identifier[r] )+ literal[string] keyword[for] identifier[r] keyword[in] identifier[conversion_rules] [ identifier[morphKey] ]] identifier[all_new_lines] . identifier[extend] ( identifier[newlines] ) keyword[if] identifier[all_new_lines] : keyword[del] identifier[mrf_lines] [ identifier[i] ] keyword[for] identifier[newline] keyword[in] identifier[all_new_lines] : identifier[mrf_lines] . identifier[insert] ( identifier[i] , identifier[newline] ) identifier[i] += identifier[len] ( identifier[newlines] ) keyword[continue] keyword[else] : identifier[withoutFormMatch] = identifier[_morfWithoutForm] . identifier[search] ( identifier[line] ) keyword[if] identifier[withoutFormMatch] : identifier[root] = identifier[withoutFormMatch] . identifier[group] ( literal[int] ) identifier[pos] = identifier[withoutFormMatch] . identifier[group] ( literal[int] ) identifier[morphKey] = identifier[pos] identifier[all_new_lines] =[] keyword[if] identifier[morphKey] keyword[in] identifier[conversion_rules] : identifier[newlines] =[ literal[string] + identifier[root] + literal[string] + identifier[_esc_que_mark] ( identifier[r] )+ literal[string] keyword[for] identifier[r] keyword[in] identifier[conversion_rules] [ identifier[morphKey] ]] identifier[all_new_lines] . identifier[extend] ( identifier[newlines] ) keyword[if] identifier[all_new_lines] : keyword[del] identifier[mrf_lines] [ identifier[i] ] keyword[for] identifier[newline] keyword[in] identifier[all_new_lines] : identifier[mrf_lines] . identifier[insert] ( identifier[i] , identifier[newline] ) identifier[i] += identifier[len] ( identifier[newlines] ) keyword[continue] identifier[i] += literal[int] keyword[return] identifier[mrf_lines]
def convert_mrf_to_syntax_mrf(mrf_lines, conversion_rules): """ Converts given lines from Filosoft's mrf format to syntactic analyzer's format, using the morph-category conversion rules from conversion_rules, and punctuation via method _convert_punctuation(); As a result of conversion, the input list mrf_lines will be modified, and also returned after a successful conversion; Morph-category conversion rules should be loaded via method load_fs_mrf_to_syntax_mrf_translation_rules( rulesFile ), usually from a file named 'tmorftrtabel.txt'; Note that the resulting list of lines likely has more lines than the original list had, because the conversion often requires that the original Filosoft's analysis is expanded into multiple analyses suitable for the syntactic analyzer; """ i = 0 while i < len(mrf_lines): line = mrf_lines[i] if line.startswith(' '): # only consider lines of analysis # 1) Convert punctuation if _punctOrAbbrev.search(line): mrf_lines[i] = _convert_punctuation(line) if '_Y_' not in line: i += 1 continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # 2) Convert morphological analyses that have a form specified withFormMatch = _morfWithForm.search(line) if withFormMatch: root = withFormMatch.group(1) pos = withFormMatch.group(2) formStr = withFormMatch.group(3) forms = formStr.split(',') all_new_lines = [] for form in forms: morphKey = pos + ' ' + form.strip() if morphKey in conversion_rules: newlines = [' ' + root + ' //' + _esc_que_mark(r) + ' //' for r in conversion_rules[morphKey]] all_new_lines.extend(newlines) # depends on [control=['if'], data=['morphKey', 'conversion_rules']] # depends on [control=['for'], data=['form']] if all_new_lines: del mrf_lines[i] for newline in all_new_lines: mrf_lines.insert(i, newline) # depends on [control=['for'], data=['newline']] i += len(newlines) continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: withoutFormMatch = _morfWithoutForm.search(line) if withoutFormMatch: # 3) Convert morphological analyses that have only POS specified root = withoutFormMatch.group(1) pos = withoutFormMatch.group(2) morphKey = pos all_new_lines = [] if morphKey in conversion_rules: newlines = [' ' + root + ' //' + _esc_que_mark(r) + ' //' for r in conversion_rules[morphKey]] all_new_lines.extend(newlines) # depends on [control=['if'], data=['morphKey', 'conversion_rules']] if all_new_lines: del mrf_lines[i] for newline in all_new_lines: mrf_lines.insert(i, newline) # depends on [control=['for'], data=['newline']] i += len(newlines) continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] i += 1 # depends on [control=['while'], data=['i']] return mrf_lines
def _all_create_or_get_groups(self, key, start_hdf5_group=None): """Creates new or follows existing group nodes along a given colon separated `key`. :param key: Colon separated path along hdf5 file, e.g. `parameters.mobiles.cars`. :param start_hdf5_group: HDF5 group from where to start, leave `None` for the trajectory group. :return: Final group node, e.g. group node with name `cars`. """ if start_hdf5_group is None: newhdf5_group = self._trajectory_group else: newhdf5_group = start_hdf5_group created = False if key == '': return newhdf5_group, created split_key = key.split('.') for name in split_key: newhdf5_group, created = self._all_create_or_get_group(name, newhdf5_group) return newhdf5_group, created
def function[_all_create_or_get_groups, parameter[self, key, start_hdf5_group]]: constant[Creates new or follows existing group nodes along a given colon separated `key`. :param key: Colon separated path along hdf5 file, e.g. `parameters.mobiles.cars`. :param start_hdf5_group: HDF5 group from where to start, leave `None` for the trajectory group. :return: Final group node, e.g. group node with name `cars`. ] if compare[name[start_hdf5_group] is constant[None]] begin[:] variable[newhdf5_group] assign[=] name[self]._trajectory_group variable[created] assign[=] constant[False] if compare[name[key] equal[==] constant[]] begin[:] return[tuple[[<ast.Name object at 0x7da18f810d30>, <ast.Name object at 0x7da1b03bb5e0>]]] variable[split_key] assign[=] call[name[key].split, parameter[constant[.]]] for taget[name[name]] in starred[name[split_key]] begin[:] <ast.Tuple object at 0x7da1b03bb580> assign[=] call[name[self]._all_create_or_get_group, parameter[name[name], name[newhdf5_group]]] return[tuple[[<ast.Name object at 0x7da1b03bbb20>, <ast.Name object at 0x7da1b03ba0b0>]]]
keyword[def] identifier[_all_create_or_get_groups] ( identifier[self] , identifier[key] , identifier[start_hdf5_group] = keyword[None] ): literal[string] keyword[if] identifier[start_hdf5_group] keyword[is] keyword[None] : identifier[newhdf5_group] = identifier[self] . identifier[_trajectory_group] keyword[else] : identifier[newhdf5_group] = identifier[start_hdf5_group] identifier[created] = keyword[False] keyword[if] identifier[key] == literal[string] : keyword[return] identifier[newhdf5_group] , identifier[created] identifier[split_key] = identifier[key] . identifier[split] ( literal[string] ) keyword[for] identifier[name] keyword[in] identifier[split_key] : identifier[newhdf5_group] , identifier[created] = identifier[self] . identifier[_all_create_or_get_group] ( identifier[name] , identifier[newhdf5_group] ) keyword[return] identifier[newhdf5_group] , identifier[created]
def _all_create_or_get_groups(self, key, start_hdf5_group=None): """Creates new or follows existing group nodes along a given colon separated `key`. :param key: Colon separated path along hdf5 file, e.g. `parameters.mobiles.cars`. :param start_hdf5_group: HDF5 group from where to start, leave `None` for the trajectory group. :return: Final group node, e.g. group node with name `cars`. """ if start_hdf5_group is None: newhdf5_group = self._trajectory_group # depends on [control=['if'], data=[]] else: newhdf5_group = start_hdf5_group created = False if key == '': return (newhdf5_group, created) # depends on [control=['if'], data=[]] split_key = key.split('.') for name in split_key: (newhdf5_group, created) = self._all_create_or_get_group(name, newhdf5_group) # depends on [control=['for'], data=['name']] return (newhdf5_group, created)
def _make_win(n, mono=False): """ Generate a window for a given length. :param n: an integer for the length of the window. :param mono: True for a mono window, False for a stereo window. :return: an numpy array containing the window value. """ if mono: win = np.hanning(n) + 0.00001 else: win = np.array([np.hanning(n) + 0.00001, np.hanning(n) + 0.00001]) win = np.transpose(win) return win
def function[_make_win, parameter[n, mono]]: constant[ Generate a window for a given length. :param n: an integer for the length of the window. :param mono: True for a mono window, False for a stereo window. :return: an numpy array containing the window value. ] if name[mono] begin[:] variable[win] assign[=] binary_operation[call[name[np].hanning, parameter[name[n]]] + constant[1e-05]] variable[win] assign[=] call[name[np].transpose, parameter[name[win]]] return[name[win]]
keyword[def] identifier[_make_win] ( identifier[n] , identifier[mono] = keyword[False] ): literal[string] keyword[if] identifier[mono] : identifier[win] = identifier[np] . identifier[hanning] ( identifier[n] )+ literal[int] keyword[else] : identifier[win] = identifier[np] . identifier[array] ([ identifier[np] . identifier[hanning] ( identifier[n] )+ literal[int] , identifier[np] . identifier[hanning] ( identifier[n] )+ literal[int] ]) identifier[win] = identifier[np] . identifier[transpose] ( identifier[win] ) keyword[return] identifier[win]
def _make_win(n, mono=False): """ Generate a window for a given length. :param n: an integer for the length of the window. :param mono: True for a mono window, False for a stereo window. :return: an numpy array containing the window value. """ if mono: win = np.hanning(n) + 1e-05 # depends on [control=['if'], data=[]] else: win = np.array([np.hanning(n) + 1e-05, np.hanning(n) + 1e-05]) win = np.transpose(win) return win
def predict(self, Xstar, n=0, noise=False, return_std=True, return_cov=False, full_output=False, return_samples=False, num_samples=1, samp_kwargs={}, return_mean_func=False, use_MCMC=False, full_MC=False, rejection_func=None, ddof=1, output_transform=None, **kwargs): """Predict the mean and covariance at the inputs `Xstar`. The order of the derivative is given by `n`. The keyword `noise` sets whether or not noise is included in the prediction. Parameters ---------- Xstar : array, (`M`, `D`) `M` test input values of dimension `D`. n : array, (`M`, `D`) or scalar, non-negative int, optional Order of derivative to predict (0 is the base quantity). If `n` is scalar, the value is used for all points in `Xstar`. If non-integer values are passed, they will be silently rounded. Default is 0 (return base quantity). noise : bool, optional Whether or not noise should be included in the covariance. Default is False (no noise in covariance). return_std : bool, optional Set to True to compute and return the standard deviation for the predictions, False to skip this step. Default is True (return tuple of (`mean`, `std`)). return_cov : bool, optional Set to True to compute and return the full covariance matrix for the predictions. This overrides the `return_std` keyword. If you want both the standard deviation and covariance matrix pre-computed, use the `full_output` keyword. full_output : bool, optional Set to True to return the full outputs in a dictionary with keys: ================= =========================================================================== mean mean of GP at requested points std standard deviation of GP at requested points cov covariance matrix for values of GP at requested points samp random samples of GP at requested points (only if `return_samples` is True) mean_func mean function of GP (only if `return_mean_func` is True) cov_func covariance of mean function of GP (zero if not using MCMC) std_func standard deviation of mean function of GP (zero if not using MCMC) mean_without_func mean of GP minus mean function of GP cov_without_func covariance matrix of just the GP portion of the fit std_without_func standard deviation of just the GP portion of the fit ================= =========================================================================== return_samples : bool, optional Set to True to compute and return samples of the GP in addition to computing the mean. Only done if `full_output` is True. Default is False. num_samples : int, optional Number of samples to compute. If using MCMC this is the number of samples per MCMC sample, if using present values of hyperparameters this is the number of samples actually returned. Default is 1. samp_kwargs : dict, optional Additional keywords to pass to :py:meth:`draw_sample` if `return_samples` is True. Default is {}. return_mean_func : bool, optional Set to True to return the evaluation of the mean function in addition to computing the mean of the process itself. Only done if `full_output` is True and `self.mu` is not None. Default is False. use_MCMC : bool, optional Set to True to use :py:meth:`predict_MCMC` to evaluate the prediction marginalized over the hyperparameters. full_MC : bool, optional Set to True to compute the mean and covariance matrix using Monte Carlo sampling of the posterior. The samples will also be returned if full_output is True. The sample mean and covariance will be evaluated after filtering through `rejection_func`, so conditional means and covariances can be computed. Default is False (do not use full sampling). rejection_func : callable, optional Any samples where this function evaluates False will be rejected, where it evaluates True they will be kept. Default is None (no rejection). Only has an effect if `full_MC` is True. ddof : int, optional The degree of freedom correction to use when computing the covariance matrix when `full_MC` is True. Default is 1 (unbiased estimator). output_transform: array, (`L`, `M`), optional Matrix to use to transform the output vector of length `M` to one of length `L`. This can, for instance, be used to compute integrals. **kwargs : optional kwargs All additional kwargs are passed to :py:meth:`predict_MCMC` if `use_MCMC` is True. Returns ------- mean : array, (`M`,) Predicted GP mean. Only returned if `full_output` is False. std : array, (`M`,) Predicted standard deviation, only returned if `return_std` is True, `return_cov` is False and `full_output` is False. cov : array, (`M`, `M`) Predicted covariance matrix, only returned if `return_cov` is True and `full_output` is False. full_output : dict Dictionary with fields for mean, std, cov and possibly random samples and the mean function. Only returned if `full_output` is True. Raises ------ ValueError If `n` is not consistent with the shape of `Xstar` or is not entirely composed of non-negative integers. """ if use_MCMC: res = self.predict_MCMC( Xstar, n=n, noise=noise, return_std=return_std or full_output, return_cov=return_cov or full_output, return_samples=full_output and (return_samples or rejection_func), return_mean_func=full_output and return_mean_func, num_samples=num_samples, samp_kwargs=samp_kwargs, full_MC=full_MC, rejection_func=rejection_func, ddof=ddof, output_transform=output_transform, **kwargs ) if full_output: return res elif return_cov: return (res['mean'], res['cov']) elif return_std: return (res['mean'], res['std']) else: return res['mean'] else: # Process Xstar: Xstar = scipy.atleast_2d(scipy.asarray(Xstar, dtype=float)) # Handle 1d x case where array is passed in: if self.num_dim == 1 and Xstar.shape[0] == 1: Xstar = Xstar.T if Xstar.shape[1] != self.num_dim: raise ValueError( "Second dimension of Xstar must be equal to self.num_dim! " "Shape of Xstar given is %s, num_dim is %d." % (Xstar.shape, self.num_dim) ) # Process T: if output_transform is not None: output_transform = scipy.atleast_2d(scipy.asarray(output_transform, dtype=float)) if output_transform.ndim != 2: raise ValueError( "output_transform must have exactly 2 dimensions! Shape " "of output_transform given is %s." % (output_transform.shape,) ) if output_transform.shape[1] != Xstar.shape[0]: raise ValueError( "output_transform must have the same number of columns " "the number of rows in Xstar! Shape of output_transform " "given is %s, shape of Xstar is %s." % (output_transform.shape, Xstar.shape,) ) # Process n: try: iter(n) except TypeError: n = n * scipy.ones(Xstar.shape, dtype=int) else: n = scipy.atleast_2d(scipy.asarray(n, dtype=int)) if self.num_dim == 1 and n.shape[0] == 1: n = n.T if n.shape != Xstar.shape: raise ValueError( "When using array-like n, shape must match shape of Xstar! " "Shape of n given is %s, shape of Xstar given is %s." % (n.shape, Xstar.shape) ) if (n < 0).any(): raise ValueError("All elements of n must be non-negative integers!") self.compute_K_L_alpha_ll() Kstar = self.compute_Kij(self.X, Xstar, self.n, n) if noise: Kstar = Kstar + self.compute_Kij(self.X, Xstar, self.n, n, noise=True) if self.T is not None: Kstar = self.T.dot(Kstar) mean = Kstar.T.dot(self.alpha) if self.mu is not None: mean_func = scipy.atleast_2d(self.mu(Xstar, n)).T mean += mean_func if output_transform is not None: mean = output_transform.dot(mean) if return_mean_func and self.mu is not None: mean_func = output_transform.dot(mean_func) mean = mean.ravel() if return_mean_func and self.mu is not None: mean_func = mean_func.ravel() if return_std or return_cov or full_output or full_MC: v = scipy.linalg.solve_triangular(self.L, Kstar, lower=True) Kstarstar = self.compute_Kij(Xstar, None, n, None) if noise: Kstarstar = Kstarstar + self.compute_Kij(Xstar, None, n, None, noise=True) covariance = Kstarstar - v.T.dot(v) if output_transform is not None: covariance = output_transform.dot(covariance.dot(output_transform.T)) if return_samples or full_MC: samps = self.draw_sample( Xstar, n=n, num_samp=num_samples, mean=mean, cov=covariance, **samp_kwargs ) if rejection_func: good_samps = [] for samp in samps.T: if rejection_func(samp): good_samps.append(samp) if len(good_samps) == 0: raise ValueError("Did not get any good samples!") samps = scipy.asarray(good_samps, dtype=float).T if full_MC: mean = scipy.mean(samps, axis=1) covariance = scipy.cov(samps, rowvar=1, ddof=ddof) std = scipy.sqrt(scipy.diagonal(covariance)) if full_output: out = { 'mean': mean, 'std': std, 'cov': covariance } if return_samples or full_MC: out['samp'] = samps if return_mean_func and self.mu is not None: out['mean_func'] = mean_func out['cov_func'] = scipy.zeros( (len(mean_func), len(mean_func)), dtype=float ) out['std_func'] = scipy.zeros_like(mean_func) out['mean_without_func'] = mean - mean_func out['cov_without_func'] = covariance out['std_without_func'] = std return out else: if return_cov: return (mean, covariance) elif return_std: return (mean, std) else: return mean else: return mean
def function[predict, parameter[self, Xstar, n, noise, return_std, return_cov, full_output, return_samples, num_samples, samp_kwargs, return_mean_func, use_MCMC, full_MC, rejection_func, ddof, output_transform]]: constant[Predict the mean and covariance at the inputs `Xstar`. The order of the derivative is given by `n`. The keyword `noise` sets whether or not noise is included in the prediction. Parameters ---------- Xstar : array, (`M`, `D`) `M` test input values of dimension `D`. n : array, (`M`, `D`) or scalar, non-negative int, optional Order of derivative to predict (0 is the base quantity). If `n` is scalar, the value is used for all points in `Xstar`. If non-integer values are passed, they will be silently rounded. Default is 0 (return base quantity). noise : bool, optional Whether or not noise should be included in the covariance. Default is False (no noise in covariance). return_std : bool, optional Set to True to compute and return the standard deviation for the predictions, False to skip this step. Default is True (return tuple of (`mean`, `std`)). return_cov : bool, optional Set to True to compute and return the full covariance matrix for the predictions. This overrides the `return_std` keyword. If you want both the standard deviation and covariance matrix pre-computed, use the `full_output` keyword. full_output : bool, optional Set to True to return the full outputs in a dictionary with keys: ================= =========================================================================== mean mean of GP at requested points std standard deviation of GP at requested points cov covariance matrix for values of GP at requested points samp random samples of GP at requested points (only if `return_samples` is True) mean_func mean function of GP (only if `return_mean_func` is True) cov_func covariance of mean function of GP (zero if not using MCMC) std_func standard deviation of mean function of GP (zero if not using MCMC) mean_without_func mean of GP minus mean function of GP cov_without_func covariance matrix of just the GP portion of the fit std_without_func standard deviation of just the GP portion of the fit ================= =========================================================================== return_samples : bool, optional Set to True to compute and return samples of the GP in addition to computing the mean. Only done if `full_output` is True. Default is False. num_samples : int, optional Number of samples to compute. If using MCMC this is the number of samples per MCMC sample, if using present values of hyperparameters this is the number of samples actually returned. Default is 1. samp_kwargs : dict, optional Additional keywords to pass to :py:meth:`draw_sample` if `return_samples` is True. Default is {}. return_mean_func : bool, optional Set to True to return the evaluation of the mean function in addition to computing the mean of the process itself. Only done if `full_output` is True and `self.mu` is not None. Default is False. use_MCMC : bool, optional Set to True to use :py:meth:`predict_MCMC` to evaluate the prediction marginalized over the hyperparameters. full_MC : bool, optional Set to True to compute the mean and covariance matrix using Monte Carlo sampling of the posterior. The samples will also be returned if full_output is True. The sample mean and covariance will be evaluated after filtering through `rejection_func`, so conditional means and covariances can be computed. Default is False (do not use full sampling). rejection_func : callable, optional Any samples where this function evaluates False will be rejected, where it evaluates True they will be kept. Default is None (no rejection). Only has an effect if `full_MC` is True. ddof : int, optional The degree of freedom correction to use when computing the covariance matrix when `full_MC` is True. Default is 1 (unbiased estimator). output_transform: array, (`L`, `M`), optional Matrix to use to transform the output vector of length `M` to one of length `L`. This can, for instance, be used to compute integrals. **kwargs : optional kwargs All additional kwargs are passed to :py:meth:`predict_MCMC` if `use_MCMC` is True. Returns ------- mean : array, (`M`,) Predicted GP mean. Only returned if `full_output` is False. std : array, (`M`,) Predicted standard deviation, only returned if `return_std` is True, `return_cov` is False and `full_output` is False. cov : array, (`M`, `M`) Predicted covariance matrix, only returned if `return_cov` is True and `full_output` is False. full_output : dict Dictionary with fields for mean, std, cov and possibly random samples and the mean function. Only returned if `full_output` is True. Raises ------ ValueError If `n` is not consistent with the shape of `Xstar` or is not entirely composed of non-negative integers. ] if name[use_MCMC] begin[:] variable[res] assign[=] call[name[self].predict_MCMC, parameter[name[Xstar]]] if name[full_output] begin[:] return[name[res]]
keyword[def] identifier[predict] ( identifier[self] , identifier[Xstar] , identifier[n] = literal[int] , identifier[noise] = keyword[False] , identifier[return_std] = keyword[True] , identifier[return_cov] = keyword[False] , identifier[full_output] = keyword[False] , identifier[return_samples] = keyword[False] , identifier[num_samples] = literal[int] , identifier[samp_kwargs] ={}, identifier[return_mean_func] = keyword[False] , identifier[use_MCMC] = keyword[False] , identifier[full_MC] = keyword[False] , identifier[rejection_func] = keyword[None] , identifier[ddof] = literal[int] , identifier[output_transform] = keyword[None] , ** identifier[kwargs] ): literal[string] keyword[if] identifier[use_MCMC] : identifier[res] = identifier[self] . identifier[predict_MCMC] ( identifier[Xstar] , identifier[n] = identifier[n] , identifier[noise] = identifier[noise] , identifier[return_std] = identifier[return_std] keyword[or] identifier[full_output] , identifier[return_cov] = identifier[return_cov] keyword[or] identifier[full_output] , identifier[return_samples] = identifier[full_output] keyword[and] ( identifier[return_samples] keyword[or] identifier[rejection_func] ), identifier[return_mean_func] = identifier[full_output] keyword[and] identifier[return_mean_func] , identifier[num_samples] = identifier[num_samples] , identifier[samp_kwargs] = identifier[samp_kwargs] , identifier[full_MC] = identifier[full_MC] , identifier[rejection_func] = identifier[rejection_func] , identifier[ddof] = identifier[ddof] , identifier[output_transform] = identifier[output_transform] , ** identifier[kwargs] ) keyword[if] identifier[full_output] : keyword[return] identifier[res] keyword[elif] identifier[return_cov] : keyword[return] ( identifier[res] [ literal[string] ], identifier[res] [ literal[string] ]) keyword[elif] identifier[return_std] : keyword[return] ( identifier[res] [ literal[string] ], identifier[res] [ literal[string] ]) keyword[else] : keyword[return] identifier[res] [ literal[string] ] keyword[else] : identifier[Xstar] = identifier[scipy] . identifier[atleast_2d] ( identifier[scipy] . identifier[asarray] ( identifier[Xstar] , identifier[dtype] = identifier[float] )) keyword[if] identifier[self] . identifier[num_dim] == literal[int] keyword[and] identifier[Xstar] . identifier[shape] [ literal[int] ]== literal[int] : identifier[Xstar] = identifier[Xstar] . identifier[T] keyword[if] identifier[Xstar] . identifier[shape] [ literal[int] ]!= identifier[self] . identifier[num_dim] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] %( identifier[Xstar] . identifier[shape] , identifier[self] . identifier[num_dim] ) ) keyword[if] identifier[output_transform] keyword[is] keyword[not] keyword[None] : identifier[output_transform] = identifier[scipy] . identifier[atleast_2d] ( identifier[scipy] . identifier[asarray] ( identifier[output_transform] , identifier[dtype] = identifier[float] )) keyword[if] identifier[output_transform] . identifier[ndim] != literal[int] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] %( identifier[output_transform] . identifier[shape] ,) ) keyword[if] identifier[output_transform] . identifier[shape] [ literal[int] ]!= identifier[Xstar] . identifier[shape] [ literal[int] ]: keyword[raise] identifier[ValueError] ( literal[string] literal[string] literal[string] %( identifier[output_transform] . identifier[shape] , identifier[Xstar] . identifier[shape] ,) ) keyword[try] : identifier[iter] ( identifier[n] ) keyword[except] identifier[TypeError] : identifier[n] = identifier[n] * identifier[scipy] . identifier[ones] ( identifier[Xstar] . identifier[shape] , identifier[dtype] = identifier[int] ) keyword[else] : identifier[n] = identifier[scipy] . identifier[atleast_2d] ( identifier[scipy] . identifier[asarray] ( identifier[n] , identifier[dtype] = identifier[int] )) keyword[if] identifier[self] . identifier[num_dim] == literal[int] keyword[and] identifier[n] . identifier[shape] [ literal[int] ]== literal[int] : identifier[n] = identifier[n] . identifier[T] keyword[if] identifier[n] . identifier[shape] != identifier[Xstar] . identifier[shape] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] %( identifier[n] . identifier[shape] , identifier[Xstar] . identifier[shape] ) ) keyword[if] ( identifier[n] < literal[int] ). identifier[any] (): keyword[raise] identifier[ValueError] ( literal[string] ) identifier[self] . identifier[compute_K_L_alpha_ll] () identifier[Kstar] = identifier[self] . identifier[compute_Kij] ( identifier[self] . identifier[X] , identifier[Xstar] , identifier[self] . identifier[n] , identifier[n] ) keyword[if] identifier[noise] : identifier[Kstar] = identifier[Kstar] + identifier[self] . identifier[compute_Kij] ( identifier[self] . identifier[X] , identifier[Xstar] , identifier[self] . identifier[n] , identifier[n] , identifier[noise] = keyword[True] ) keyword[if] identifier[self] . identifier[T] keyword[is] keyword[not] keyword[None] : identifier[Kstar] = identifier[self] . identifier[T] . identifier[dot] ( identifier[Kstar] ) identifier[mean] = identifier[Kstar] . identifier[T] . identifier[dot] ( identifier[self] . identifier[alpha] ) keyword[if] identifier[self] . identifier[mu] keyword[is] keyword[not] keyword[None] : identifier[mean_func] = identifier[scipy] . identifier[atleast_2d] ( identifier[self] . identifier[mu] ( identifier[Xstar] , identifier[n] )). identifier[T] identifier[mean] += identifier[mean_func] keyword[if] identifier[output_transform] keyword[is] keyword[not] keyword[None] : identifier[mean] = identifier[output_transform] . identifier[dot] ( identifier[mean] ) keyword[if] identifier[return_mean_func] keyword[and] identifier[self] . identifier[mu] keyword[is] keyword[not] keyword[None] : identifier[mean_func] = identifier[output_transform] . identifier[dot] ( identifier[mean_func] ) identifier[mean] = identifier[mean] . identifier[ravel] () keyword[if] identifier[return_mean_func] keyword[and] identifier[self] . identifier[mu] keyword[is] keyword[not] keyword[None] : identifier[mean_func] = identifier[mean_func] . identifier[ravel] () keyword[if] identifier[return_std] keyword[or] identifier[return_cov] keyword[or] identifier[full_output] keyword[or] identifier[full_MC] : identifier[v] = identifier[scipy] . identifier[linalg] . identifier[solve_triangular] ( identifier[self] . identifier[L] , identifier[Kstar] , identifier[lower] = keyword[True] ) identifier[Kstarstar] = identifier[self] . identifier[compute_Kij] ( identifier[Xstar] , keyword[None] , identifier[n] , keyword[None] ) keyword[if] identifier[noise] : identifier[Kstarstar] = identifier[Kstarstar] + identifier[self] . identifier[compute_Kij] ( identifier[Xstar] , keyword[None] , identifier[n] , keyword[None] , identifier[noise] = keyword[True] ) identifier[covariance] = identifier[Kstarstar] - identifier[v] . identifier[T] . identifier[dot] ( identifier[v] ) keyword[if] identifier[output_transform] keyword[is] keyword[not] keyword[None] : identifier[covariance] = identifier[output_transform] . identifier[dot] ( identifier[covariance] . identifier[dot] ( identifier[output_transform] . identifier[T] )) keyword[if] identifier[return_samples] keyword[or] identifier[full_MC] : identifier[samps] = identifier[self] . identifier[draw_sample] ( identifier[Xstar] , identifier[n] = identifier[n] , identifier[num_samp] = identifier[num_samples] , identifier[mean] = identifier[mean] , identifier[cov] = identifier[covariance] ,** identifier[samp_kwargs] ) keyword[if] identifier[rejection_func] : identifier[good_samps] =[] keyword[for] identifier[samp] keyword[in] identifier[samps] . identifier[T] : keyword[if] identifier[rejection_func] ( identifier[samp] ): identifier[good_samps] . identifier[append] ( identifier[samp] ) keyword[if] identifier[len] ( identifier[good_samps] )== literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[samps] = identifier[scipy] . identifier[asarray] ( identifier[good_samps] , identifier[dtype] = identifier[float] ). identifier[T] keyword[if] identifier[full_MC] : identifier[mean] = identifier[scipy] . identifier[mean] ( identifier[samps] , identifier[axis] = literal[int] ) identifier[covariance] = identifier[scipy] . identifier[cov] ( identifier[samps] , identifier[rowvar] = literal[int] , identifier[ddof] = identifier[ddof] ) identifier[std] = identifier[scipy] . identifier[sqrt] ( identifier[scipy] . identifier[diagonal] ( identifier[covariance] )) keyword[if] identifier[full_output] : identifier[out] ={ literal[string] : identifier[mean] , literal[string] : identifier[std] , literal[string] : identifier[covariance] } keyword[if] identifier[return_samples] keyword[or] identifier[full_MC] : identifier[out] [ literal[string] ]= identifier[samps] keyword[if] identifier[return_mean_func] keyword[and] identifier[self] . identifier[mu] keyword[is] keyword[not] keyword[None] : identifier[out] [ literal[string] ]= identifier[mean_func] identifier[out] [ literal[string] ]= identifier[scipy] . identifier[zeros] ( ( identifier[len] ( identifier[mean_func] ), identifier[len] ( identifier[mean_func] )), identifier[dtype] = identifier[float] ) identifier[out] [ literal[string] ]= identifier[scipy] . identifier[zeros_like] ( identifier[mean_func] ) identifier[out] [ literal[string] ]= identifier[mean] - identifier[mean_func] identifier[out] [ literal[string] ]= identifier[covariance] identifier[out] [ literal[string] ]= identifier[std] keyword[return] identifier[out] keyword[else] : keyword[if] identifier[return_cov] : keyword[return] ( identifier[mean] , identifier[covariance] ) keyword[elif] identifier[return_std] : keyword[return] ( identifier[mean] , identifier[std] ) keyword[else] : keyword[return] identifier[mean] keyword[else] : keyword[return] identifier[mean]
def predict(self, Xstar, n=0, noise=False, return_std=True, return_cov=False, full_output=False, return_samples=False, num_samples=1, samp_kwargs={}, return_mean_func=False, use_MCMC=False, full_MC=False, rejection_func=None, ddof=1, output_transform=None, **kwargs): """Predict the mean and covariance at the inputs `Xstar`. The order of the derivative is given by `n`. The keyword `noise` sets whether or not noise is included in the prediction. Parameters ---------- Xstar : array, (`M`, `D`) `M` test input values of dimension `D`. n : array, (`M`, `D`) or scalar, non-negative int, optional Order of derivative to predict (0 is the base quantity). If `n` is scalar, the value is used for all points in `Xstar`. If non-integer values are passed, they will be silently rounded. Default is 0 (return base quantity). noise : bool, optional Whether or not noise should be included in the covariance. Default is False (no noise in covariance). return_std : bool, optional Set to True to compute and return the standard deviation for the predictions, False to skip this step. Default is True (return tuple of (`mean`, `std`)). return_cov : bool, optional Set to True to compute and return the full covariance matrix for the predictions. This overrides the `return_std` keyword. If you want both the standard deviation and covariance matrix pre-computed, use the `full_output` keyword. full_output : bool, optional Set to True to return the full outputs in a dictionary with keys: ================= =========================================================================== mean mean of GP at requested points std standard deviation of GP at requested points cov covariance matrix for values of GP at requested points samp random samples of GP at requested points (only if `return_samples` is True) mean_func mean function of GP (only if `return_mean_func` is True) cov_func covariance of mean function of GP (zero if not using MCMC) std_func standard deviation of mean function of GP (zero if not using MCMC) mean_without_func mean of GP minus mean function of GP cov_without_func covariance matrix of just the GP portion of the fit std_without_func standard deviation of just the GP portion of the fit ================= =========================================================================== return_samples : bool, optional Set to True to compute and return samples of the GP in addition to computing the mean. Only done if `full_output` is True. Default is False. num_samples : int, optional Number of samples to compute. If using MCMC this is the number of samples per MCMC sample, if using present values of hyperparameters this is the number of samples actually returned. Default is 1. samp_kwargs : dict, optional Additional keywords to pass to :py:meth:`draw_sample` if `return_samples` is True. Default is {}. return_mean_func : bool, optional Set to True to return the evaluation of the mean function in addition to computing the mean of the process itself. Only done if `full_output` is True and `self.mu` is not None. Default is False. use_MCMC : bool, optional Set to True to use :py:meth:`predict_MCMC` to evaluate the prediction marginalized over the hyperparameters. full_MC : bool, optional Set to True to compute the mean and covariance matrix using Monte Carlo sampling of the posterior. The samples will also be returned if full_output is True. The sample mean and covariance will be evaluated after filtering through `rejection_func`, so conditional means and covariances can be computed. Default is False (do not use full sampling). rejection_func : callable, optional Any samples where this function evaluates False will be rejected, where it evaluates True they will be kept. Default is None (no rejection). Only has an effect if `full_MC` is True. ddof : int, optional The degree of freedom correction to use when computing the covariance matrix when `full_MC` is True. Default is 1 (unbiased estimator). output_transform: array, (`L`, `M`), optional Matrix to use to transform the output vector of length `M` to one of length `L`. This can, for instance, be used to compute integrals. **kwargs : optional kwargs All additional kwargs are passed to :py:meth:`predict_MCMC` if `use_MCMC` is True. Returns ------- mean : array, (`M`,) Predicted GP mean. Only returned if `full_output` is False. std : array, (`M`,) Predicted standard deviation, only returned if `return_std` is True, `return_cov` is False and `full_output` is False. cov : array, (`M`, `M`) Predicted covariance matrix, only returned if `return_cov` is True and `full_output` is False. full_output : dict Dictionary with fields for mean, std, cov and possibly random samples and the mean function. Only returned if `full_output` is True. Raises ------ ValueError If `n` is not consistent with the shape of `Xstar` or is not entirely composed of non-negative integers. """ if use_MCMC: res = self.predict_MCMC(Xstar, n=n, noise=noise, return_std=return_std or full_output, return_cov=return_cov or full_output, return_samples=full_output and (return_samples or rejection_func), return_mean_func=full_output and return_mean_func, num_samples=num_samples, samp_kwargs=samp_kwargs, full_MC=full_MC, rejection_func=rejection_func, ddof=ddof, output_transform=output_transform, **kwargs) if full_output: return res # depends on [control=['if'], data=[]] elif return_cov: return (res['mean'], res['cov']) # depends on [control=['if'], data=[]] elif return_std: return (res['mean'], res['std']) # depends on [control=['if'], data=[]] else: return res['mean'] # depends on [control=['if'], data=[]] else: # Process Xstar: Xstar = scipy.atleast_2d(scipy.asarray(Xstar, dtype=float)) # Handle 1d x case where array is passed in: if self.num_dim == 1 and Xstar.shape[0] == 1: Xstar = Xstar.T # depends on [control=['if'], data=[]] if Xstar.shape[1] != self.num_dim: raise ValueError('Second dimension of Xstar must be equal to self.num_dim! Shape of Xstar given is %s, num_dim is %d.' % (Xstar.shape, self.num_dim)) # depends on [control=['if'], data=[]] # Process T: if output_transform is not None: output_transform = scipy.atleast_2d(scipy.asarray(output_transform, dtype=float)) if output_transform.ndim != 2: raise ValueError('output_transform must have exactly 2 dimensions! Shape of output_transform given is %s.' % (output_transform.shape,)) # depends on [control=['if'], data=[]] if output_transform.shape[1] != Xstar.shape[0]: raise ValueError('output_transform must have the same number of columns the number of rows in Xstar! Shape of output_transform given is %s, shape of Xstar is %s.' % (output_transform.shape, Xstar.shape)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['output_transform']] # Process n: try: iter(n) # depends on [control=['try'], data=[]] except TypeError: n = n * scipy.ones(Xstar.shape, dtype=int) # depends on [control=['except'], data=[]] else: n = scipy.atleast_2d(scipy.asarray(n, dtype=int)) if self.num_dim == 1 and n.shape[0] == 1: n = n.T # depends on [control=['if'], data=[]] if n.shape != Xstar.shape: raise ValueError('When using array-like n, shape must match shape of Xstar! Shape of n given is %s, shape of Xstar given is %s.' % (n.shape, Xstar.shape)) # depends on [control=['if'], data=[]] if (n < 0).any(): raise ValueError('All elements of n must be non-negative integers!') # depends on [control=['if'], data=[]] self.compute_K_L_alpha_ll() Kstar = self.compute_Kij(self.X, Xstar, self.n, n) if noise: Kstar = Kstar + self.compute_Kij(self.X, Xstar, self.n, n, noise=True) # depends on [control=['if'], data=[]] if self.T is not None: Kstar = self.T.dot(Kstar) # depends on [control=['if'], data=[]] mean = Kstar.T.dot(self.alpha) if self.mu is not None: mean_func = scipy.atleast_2d(self.mu(Xstar, n)).T mean += mean_func # depends on [control=['if'], data=[]] if output_transform is not None: mean = output_transform.dot(mean) if return_mean_func and self.mu is not None: mean_func = output_transform.dot(mean_func) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['output_transform']] mean = mean.ravel() if return_mean_func and self.mu is not None: mean_func = mean_func.ravel() # depends on [control=['if'], data=[]] if return_std or return_cov or full_output or full_MC: v = scipy.linalg.solve_triangular(self.L, Kstar, lower=True) Kstarstar = self.compute_Kij(Xstar, None, n, None) if noise: Kstarstar = Kstarstar + self.compute_Kij(Xstar, None, n, None, noise=True) # depends on [control=['if'], data=[]] covariance = Kstarstar - v.T.dot(v) if output_transform is not None: covariance = output_transform.dot(covariance.dot(output_transform.T)) # depends on [control=['if'], data=['output_transform']] if return_samples or full_MC: samps = self.draw_sample(Xstar, n=n, num_samp=num_samples, mean=mean, cov=covariance, **samp_kwargs) if rejection_func: good_samps = [] for samp in samps.T: if rejection_func(samp): good_samps.append(samp) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['samp']] if len(good_samps) == 0: raise ValueError('Did not get any good samples!') # depends on [control=['if'], data=[]] samps = scipy.asarray(good_samps, dtype=float).T # depends on [control=['if'], data=[]] if full_MC: mean = scipy.mean(samps, axis=1) covariance = scipy.cov(samps, rowvar=1, ddof=ddof) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] std = scipy.sqrt(scipy.diagonal(covariance)) if full_output: out = {'mean': mean, 'std': std, 'cov': covariance} if return_samples or full_MC: out['samp'] = samps # depends on [control=['if'], data=[]] if return_mean_func and self.mu is not None: out['mean_func'] = mean_func out['cov_func'] = scipy.zeros((len(mean_func), len(mean_func)), dtype=float) out['std_func'] = scipy.zeros_like(mean_func) out['mean_without_func'] = mean - mean_func out['cov_without_func'] = covariance out['std_without_func'] = std # depends on [control=['if'], data=[]] return out # depends on [control=['if'], data=[]] elif return_cov: return (mean, covariance) # depends on [control=['if'], data=[]] elif return_std: return (mean, std) # depends on [control=['if'], data=[]] else: return mean # depends on [control=['if'], data=[]] else: return mean
def proto_02_01_MT70(abf=exampleABF): """repeated membrane tests.""" standard_overlayWithAverage(abf) swhlab.memtest.memtest(abf) swhlab.memtest.checkSweep(abf) swhlab.plot.save(abf,tag='check',resize=False)
def function[proto_02_01_MT70, parameter[abf]]: constant[repeated membrane tests.] call[name[standard_overlayWithAverage], parameter[name[abf]]] call[name[swhlab].memtest.memtest, parameter[name[abf]]] call[name[swhlab].memtest.checkSweep, parameter[name[abf]]] call[name[swhlab].plot.save, parameter[name[abf]]]
keyword[def] identifier[proto_02_01_MT70] ( identifier[abf] = identifier[exampleABF] ): literal[string] identifier[standard_overlayWithAverage] ( identifier[abf] ) identifier[swhlab] . identifier[memtest] . identifier[memtest] ( identifier[abf] ) identifier[swhlab] . identifier[memtest] . identifier[checkSweep] ( identifier[abf] ) identifier[swhlab] . identifier[plot] . identifier[save] ( identifier[abf] , identifier[tag] = literal[string] , identifier[resize] = keyword[False] )
def proto_02_01_MT70(abf=exampleABF): """repeated membrane tests.""" standard_overlayWithAverage(abf) swhlab.memtest.memtest(abf) swhlab.memtest.checkSweep(abf) swhlab.plot.save(abf, tag='check', resize=False)
def resolve_dotted_attribute(obj, attr, allow_dotted_names=True): """resolve_dotted_attribute(a, 'b.c.d') => a.b.c.d Resolves a dotted attribute name to an object. Raises an AttributeError if any attribute in the chain starts with a '_'. If the optional allow_dotted_names argument is false, dots are not supported and this function operates similar to getattr(obj, attr). """ if allow_dotted_names: attrs = attr.split('.') else: attrs = [attr] for i in attrs: if i.startswith('_'): raise AttributeError( 'attempt to access private attribute "%s"' % i ) else: obj = getattr(obj,i) return obj
def function[resolve_dotted_attribute, parameter[obj, attr, allow_dotted_names]]: constant[resolve_dotted_attribute(a, 'b.c.d') => a.b.c.d Resolves a dotted attribute name to an object. Raises an AttributeError if any attribute in the chain starts with a '_'. If the optional allow_dotted_names argument is false, dots are not supported and this function operates similar to getattr(obj, attr). ] if name[allow_dotted_names] begin[:] variable[attrs] assign[=] call[name[attr].split, parameter[constant[.]]] for taget[name[i]] in starred[name[attrs]] begin[:] if call[name[i].startswith, parameter[constant[_]]] begin[:] <ast.Raise object at 0x7da18dc078e0> return[name[obj]]
keyword[def] identifier[resolve_dotted_attribute] ( identifier[obj] , identifier[attr] , identifier[allow_dotted_names] = keyword[True] ): literal[string] keyword[if] identifier[allow_dotted_names] : identifier[attrs] = identifier[attr] . identifier[split] ( literal[string] ) keyword[else] : identifier[attrs] =[ identifier[attr] ] keyword[for] identifier[i] keyword[in] identifier[attrs] : keyword[if] identifier[i] . identifier[startswith] ( literal[string] ): keyword[raise] identifier[AttributeError] ( literal[string] % identifier[i] ) keyword[else] : identifier[obj] = identifier[getattr] ( identifier[obj] , identifier[i] ) keyword[return] identifier[obj]
def resolve_dotted_attribute(obj, attr, allow_dotted_names=True): """resolve_dotted_attribute(a, 'b.c.d') => a.b.c.d Resolves a dotted attribute name to an object. Raises an AttributeError if any attribute in the chain starts with a '_'. If the optional allow_dotted_names argument is false, dots are not supported and this function operates similar to getattr(obj, attr). """ if allow_dotted_names: attrs = attr.split('.') # depends on [control=['if'], data=[]] else: attrs = [attr] for i in attrs: if i.startswith('_'): raise AttributeError('attempt to access private attribute "%s"' % i) # depends on [control=['if'], data=[]] else: obj = getattr(obj, i) # depends on [control=['for'], data=['i']] return obj
def get_analog(self, component_info=None, data=None, component_position=None): """Get analog data.""" components = [] append_components = components.append for _ in range(component_info.device_count): component_position, device = QRTPacket._get_exact( RTAnalogDevice, data, component_position ) if device.sample_count > 0: component_position, sample_number = QRTPacket._get_exact( RTSampleNumber, data, component_position ) RTAnalogChannel.format = struct.Struct( RTAnalogChannel.format_str % device.sample_count ) for _ in range(device.channel_count): component_position, channel = QRTPacket._get_tuple( RTAnalogChannel, data, component_position ) append_components((device, sample_number, channel)) return components
def function[get_analog, parameter[self, component_info, data, component_position]]: constant[Get analog data.] variable[components] assign[=] list[[]] variable[append_components] assign[=] name[components].append for taget[name[_]] in starred[call[name[range], parameter[name[component_info].device_count]]] begin[:] <ast.Tuple object at 0x7da1afe504f0> assign[=] call[name[QRTPacket]._get_exact, parameter[name[RTAnalogDevice], name[data], name[component_position]]] if compare[name[device].sample_count greater[>] constant[0]] begin[:] <ast.Tuple object at 0x7da1afe52230> assign[=] call[name[QRTPacket]._get_exact, parameter[name[RTSampleNumber], name[data], name[component_position]]] name[RTAnalogChannel].format assign[=] call[name[struct].Struct, parameter[binary_operation[name[RTAnalogChannel].format_str <ast.Mod object at 0x7da2590d6920> name[device].sample_count]]] for taget[name[_]] in starred[call[name[range], parameter[name[device].channel_count]]] begin[:] <ast.Tuple object at 0x7da1afe531f0> assign[=] call[name[QRTPacket]._get_tuple, parameter[name[RTAnalogChannel], name[data], name[component_position]]] call[name[append_components], parameter[tuple[[<ast.Name object at 0x7da1afe53790>, <ast.Name object at 0x7da1afe522c0>, <ast.Name object at 0x7da1afe53ca0>]]]] return[name[components]]
keyword[def] identifier[get_analog] ( identifier[self] , identifier[component_info] = keyword[None] , identifier[data] = keyword[None] , identifier[component_position] = keyword[None] ): literal[string] identifier[components] =[] identifier[append_components] = identifier[components] . identifier[append] keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[component_info] . identifier[device_count] ): identifier[component_position] , identifier[device] = identifier[QRTPacket] . identifier[_get_exact] ( identifier[RTAnalogDevice] , identifier[data] , identifier[component_position] ) keyword[if] identifier[device] . identifier[sample_count] > literal[int] : identifier[component_position] , identifier[sample_number] = identifier[QRTPacket] . identifier[_get_exact] ( identifier[RTSampleNumber] , identifier[data] , identifier[component_position] ) identifier[RTAnalogChannel] . identifier[format] = identifier[struct] . identifier[Struct] ( identifier[RTAnalogChannel] . identifier[format_str] % identifier[device] . identifier[sample_count] ) keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[device] . identifier[channel_count] ): identifier[component_position] , identifier[channel] = identifier[QRTPacket] . identifier[_get_tuple] ( identifier[RTAnalogChannel] , identifier[data] , identifier[component_position] ) identifier[append_components] (( identifier[device] , identifier[sample_number] , identifier[channel] )) keyword[return] identifier[components]
def get_analog(self, component_info=None, data=None, component_position=None): """Get analog data.""" components = [] append_components = components.append for _ in range(component_info.device_count): (component_position, device) = QRTPacket._get_exact(RTAnalogDevice, data, component_position) if device.sample_count > 0: (component_position, sample_number) = QRTPacket._get_exact(RTSampleNumber, data, component_position) RTAnalogChannel.format = struct.Struct(RTAnalogChannel.format_str % device.sample_count) for _ in range(device.channel_count): (component_position, channel) = QRTPacket._get_tuple(RTAnalogChannel, data, component_position) append_components((device, sample_number, channel)) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['_']] return components
def execute(self, sensor_graph, scope_stack): """Execute this statement on the sensor_graph given the current scope tree. This adds a single config variable assignment to the current sensor graph Args: sensor_graph (SensorGraph): The sensor graph that we are building or modifying scope_stack (list(Scope)): A stack of nested scopes that may influence how this statement allocates clocks or other stream resources. """ parent = scope_stack[-1] try: slot = parent.resolve_identifier('current_slot', SlotIdentifier) except UnresolvedIdentifierError: raise SensorGraphSemanticError("set config statement used outside of config block") if self.explicit_type is None or not isinstance(self.identifier, int): raise SensorGraphSemanticError("Config variable type definitions are not yet supported") if isinstance(self.value, (bytes, bytearray)) and not self.explicit_type == 'binary': raise SensorGraphSemanticError("You must pass the binary variable type when using encoded binary data") if not isinstance(self.value, (bytes, bytearray)) and self.explicit_type == 'binary': raise SensorGraphSemanticError("You must pass an encoded binary value with binary type config variables") sensor_graph.add_config(slot, self.identifier, self.explicit_type, self.value)
def function[execute, parameter[self, sensor_graph, scope_stack]]: constant[Execute this statement on the sensor_graph given the current scope tree. This adds a single config variable assignment to the current sensor graph Args: sensor_graph (SensorGraph): The sensor graph that we are building or modifying scope_stack (list(Scope)): A stack of nested scopes that may influence how this statement allocates clocks or other stream resources. ] variable[parent] assign[=] call[name[scope_stack]][<ast.UnaryOp object at 0x7da18f721db0>] <ast.Try object at 0x7da18f7214b0> if <ast.BoolOp object at 0x7da18f721f60> begin[:] <ast.Raise object at 0x7da18f722140> if <ast.BoolOp object at 0x7da18f721d20> begin[:] <ast.Raise object at 0x7da18f723ee0> if <ast.BoolOp object at 0x7da18f722aa0> begin[:] <ast.Raise object at 0x7da18f722290> call[name[sensor_graph].add_config, parameter[name[slot], name[self].identifier, name[self].explicit_type, name[self].value]]
keyword[def] identifier[execute] ( identifier[self] , identifier[sensor_graph] , identifier[scope_stack] ): literal[string] identifier[parent] = identifier[scope_stack] [- literal[int] ] keyword[try] : identifier[slot] = identifier[parent] . identifier[resolve_identifier] ( literal[string] , identifier[SlotIdentifier] ) keyword[except] identifier[UnresolvedIdentifierError] : keyword[raise] identifier[SensorGraphSemanticError] ( literal[string] ) keyword[if] identifier[self] . identifier[explicit_type] keyword[is] keyword[None] keyword[or] keyword[not] identifier[isinstance] ( identifier[self] . identifier[identifier] , identifier[int] ): keyword[raise] identifier[SensorGraphSemanticError] ( literal[string] ) keyword[if] identifier[isinstance] ( identifier[self] . identifier[value] ,( identifier[bytes] , identifier[bytearray] )) keyword[and] keyword[not] identifier[self] . identifier[explicit_type] == literal[string] : keyword[raise] identifier[SensorGraphSemanticError] ( literal[string] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[self] . identifier[value] ,( identifier[bytes] , identifier[bytearray] )) keyword[and] identifier[self] . identifier[explicit_type] == literal[string] : keyword[raise] identifier[SensorGraphSemanticError] ( literal[string] ) identifier[sensor_graph] . identifier[add_config] ( identifier[slot] , identifier[self] . identifier[identifier] , identifier[self] . identifier[explicit_type] , identifier[self] . identifier[value] )
def execute(self, sensor_graph, scope_stack): """Execute this statement on the sensor_graph given the current scope tree. This adds a single config variable assignment to the current sensor graph Args: sensor_graph (SensorGraph): The sensor graph that we are building or modifying scope_stack (list(Scope)): A stack of nested scopes that may influence how this statement allocates clocks or other stream resources. """ parent = scope_stack[-1] try: slot = parent.resolve_identifier('current_slot', SlotIdentifier) # depends on [control=['try'], data=[]] except UnresolvedIdentifierError: raise SensorGraphSemanticError('set config statement used outside of config block') # depends on [control=['except'], data=[]] if self.explicit_type is None or not isinstance(self.identifier, int): raise SensorGraphSemanticError('Config variable type definitions are not yet supported') # depends on [control=['if'], data=[]] if isinstance(self.value, (bytes, bytearray)) and (not self.explicit_type == 'binary'): raise SensorGraphSemanticError('You must pass the binary variable type when using encoded binary data') # depends on [control=['if'], data=[]] if not isinstance(self.value, (bytes, bytearray)) and self.explicit_type == 'binary': raise SensorGraphSemanticError('You must pass an encoded binary value with binary type config variables') # depends on [control=['if'], data=[]] sensor_graph.add_config(slot, self.identifier, self.explicit_type, self.value)
def _rebalance_partition_replicas( self, partition, max_movement_count=None, max_movement_size=None, ): """Rebalance replication groups for given partition.""" # Separate replication-groups into under and over replicated total = partition.replication_factor over_replicated_rgs, under_replicated_rgs = separate_groups( list(self.cluster_topology.rgs.values()), lambda g: g.count_replica(partition), total, ) # Move replicas from over-replicated to under-replicated groups movement_count = 0 movement_size = 0 while ( under_replicated_rgs and over_replicated_rgs ) and ( max_movement_size is None or movement_size + partition.size <= max_movement_size ) and ( max_movement_count is None or movement_count < max_movement_count ): # Decide source and destination group rg_source = self._elect_source_replication_group( over_replicated_rgs, partition, ) rg_destination = self._elect_dest_replication_group( rg_source.count_replica(partition), under_replicated_rgs, partition, ) if rg_source and rg_destination: # Actual movement of partition self.log.debug( 'Moving partition {p_name} from replication-group ' '{rg_source} to replication-group {rg_dest}'.format( p_name=partition.name, rg_source=rg_source.id, rg_dest=rg_destination.id, ), ) rg_source.move_partition(rg_destination, partition) movement_count += 1 movement_size += partition.size else: # Groups balanced or cannot be balanced further break # Re-compute under and over-replicated replication-groups over_replicated_rgs, under_replicated_rgs = separate_groups( list(self.cluster_topology.rgs.values()), lambda g: g.count_replica(partition), total, ) return movement_count, movement_size
def function[_rebalance_partition_replicas, parameter[self, partition, max_movement_count, max_movement_size]]: constant[Rebalance replication groups for given partition.] variable[total] assign[=] name[partition].replication_factor <ast.Tuple object at 0x7da1b08409a0> assign[=] call[name[separate_groups], parameter[call[name[list], parameter[call[name[self].cluster_topology.rgs.values, parameter[]]]], <ast.Lambda object at 0x7da1b0841420>, name[total]]] variable[movement_count] assign[=] constant[0] variable[movement_size] assign[=] constant[0] while <ast.BoolOp object at 0x7da1b0840460> begin[:] variable[rg_source] assign[=] call[name[self]._elect_source_replication_group, parameter[name[over_replicated_rgs], name[partition]]] variable[rg_destination] assign[=] call[name[self]._elect_dest_replication_group, parameter[call[name[rg_source].count_replica, parameter[name[partition]]], name[under_replicated_rgs], name[partition]]] if <ast.BoolOp object at 0x7da1b0841690> begin[:] call[name[self].log.debug, parameter[call[constant[Moving partition {p_name} from replication-group {rg_source} to replication-group {rg_dest}].format, parameter[]]]] call[name[rg_source].move_partition, parameter[name[rg_destination], name[partition]]] <ast.AugAssign object at 0x7da1b078a260> <ast.AugAssign object at 0x7da1b078a350> <ast.Tuple object at 0x7da1b0789a20> assign[=] call[name[separate_groups], parameter[call[name[list], parameter[call[name[self].cluster_topology.rgs.values, parameter[]]]], <ast.Lambda object at 0x7da1b07b3ee0>, name[total]]] return[tuple[[<ast.Name object at 0x7da1b07b1ae0>, <ast.Name object at 0x7da1b07b39a0>]]]
keyword[def] identifier[_rebalance_partition_replicas] ( identifier[self] , identifier[partition] , identifier[max_movement_count] = keyword[None] , identifier[max_movement_size] = keyword[None] , ): literal[string] identifier[total] = identifier[partition] . identifier[replication_factor] identifier[over_replicated_rgs] , identifier[under_replicated_rgs] = identifier[separate_groups] ( identifier[list] ( identifier[self] . identifier[cluster_topology] . identifier[rgs] . identifier[values] ()), keyword[lambda] identifier[g] : identifier[g] . identifier[count_replica] ( identifier[partition] ), identifier[total] , ) identifier[movement_count] = literal[int] identifier[movement_size] = literal[int] keyword[while] ( identifier[under_replicated_rgs] keyword[and] identifier[over_replicated_rgs] ) keyword[and] ( identifier[max_movement_size] keyword[is] keyword[None] keyword[or] identifier[movement_size] + identifier[partition] . identifier[size] <= identifier[max_movement_size] ) keyword[and] ( identifier[max_movement_count] keyword[is] keyword[None] keyword[or] identifier[movement_count] < identifier[max_movement_count] ): identifier[rg_source] = identifier[self] . identifier[_elect_source_replication_group] ( identifier[over_replicated_rgs] , identifier[partition] , ) identifier[rg_destination] = identifier[self] . identifier[_elect_dest_replication_group] ( identifier[rg_source] . identifier[count_replica] ( identifier[partition] ), identifier[under_replicated_rgs] , identifier[partition] , ) keyword[if] identifier[rg_source] keyword[and] identifier[rg_destination] : identifier[self] . identifier[log] . identifier[debug] ( literal[string] literal[string] . identifier[format] ( identifier[p_name] = identifier[partition] . identifier[name] , identifier[rg_source] = identifier[rg_source] . identifier[id] , identifier[rg_dest] = identifier[rg_destination] . identifier[id] , ), ) identifier[rg_source] . identifier[move_partition] ( identifier[rg_destination] , identifier[partition] ) identifier[movement_count] += literal[int] identifier[movement_size] += identifier[partition] . identifier[size] keyword[else] : keyword[break] identifier[over_replicated_rgs] , identifier[under_replicated_rgs] = identifier[separate_groups] ( identifier[list] ( identifier[self] . identifier[cluster_topology] . identifier[rgs] . identifier[values] ()), keyword[lambda] identifier[g] : identifier[g] . identifier[count_replica] ( identifier[partition] ), identifier[total] , ) keyword[return] identifier[movement_count] , identifier[movement_size]
def _rebalance_partition_replicas(self, partition, max_movement_count=None, max_movement_size=None): """Rebalance replication groups for given partition.""" # Separate replication-groups into under and over replicated total = partition.replication_factor (over_replicated_rgs, under_replicated_rgs) = separate_groups(list(self.cluster_topology.rgs.values()), lambda g: g.count_replica(partition), total) # Move replicas from over-replicated to under-replicated groups movement_count = 0 movement_size = 0 while (under_replicated_rgs and over_replicated_rgs) and (max_movement_size is None or movement_size + partition.size <= max_movement_size) and (max_movement_count is None or movement_count < max_movement_count): # Decide source and destination group rg_source = self._elect_source_replication_group(over_replicated_rgs, partition) rg_destination = self._elect_dest_replication_group(rg_source.count_replica(partition), under_replicated_rgs, partition) if rg_source and rg_destination: # Actual movement of partition self.log.debug('Moving partition {p_name} from replication-group {rg_source} to replication-group {rg_dest}'.format(p_name=partition.name, rg_source=rg_source.id, rg_dest=rg_destination.id)) rg_source.move_partition(rg_destination, partition) movement_count += 1 movement_size += partition.size # depends on [control=['if'], data=[]] else: # Groups balanced or cannot be balanced further break # Re-compute under and over-replicated replication-groups (over_replicated_rgs, under_replicated_rgs) = separate_groups(list(self.cluster_topology.rgs.values()), lambda g: g.count_replica(partition), total) # depends on [control=['while'], data=[]] return (movement_count, movement_size)
def show_tip(self, point, tip, wrapped_tiplines): """ Attempts to show the specified tip at the current cursor location. """ # Don't attempt to show it if it's already visible and the text # to be displayed is the same as the one displayed before. if self.isVisible(): if self.tip == tip: return True else: self.hide() # Attempt to find the cursor position at which to show the call tip. text_edit = self._text_edit cursor = text_edit.textCursor() search_pos = cursor.position() - 1 self._start_position, _ = self._find_parenthesis(search_pos, forward=False) if self._start_position == -1: return False if self.hide_timer_on: self._hide_timer.stop() # Logic to decide how much time to show the calltip depending # on the amount of text present if len(wrapped_tiplines) == 1: args = wrapped_tiplines[0].split('(')[1] nargs = len(args.split(',')) if nargs == 1: hide_time = 1400 elif nargs == 2: hide_time = 1600 else: hide_time = 1800 elif len(wrapped_tiplines) == 2: args1 = wrapped_tiplines[1].strip() nargs1 = len(args1.split(',')) if nargs1 == 1: hide_time = 2500 else: hide_time = 2800 else: hide_time = 3500 self._hide_timer.start(hide_time, self) # Set the text and resize the widget accordingly. self.tip = tip self.setText(tip) self.resize(self.sizeHint()) # Locate and show the widget. Place the tip below the current line # unless it would be off the screen. In that case, decide the best # location based trying to minimize the area that goes off-screen. padding = 3 # Distance in pixels between cursor bounds and tip box. cursor_rect = text_edit.cursorRect(cursor) screen_rect = self.app.desktop().screenGeometry(text_edit) point.setY(point.y() + padding) tip_height = self.size().height() tip_width = self.size().width() vertical = 'bottom' horizontal = 'Right' if point.y() + tip_height > screen_rect.height() + screen_rect.y(): point_ = text_edit.mapToGlobal(cursor_rect.topRight()) # If tip is still off screen, check if point is in top or bottom # half of screen. if point_.y() - tip_height < padding: # If point is in upper half of screen, show tip below it. # otherwise above it. if 2*point.y() < screen_rect.height(): vertical = 'bottom' else: vertical = 'top' else: vertical = 'top' if point.x() + tip_width > screen_rect.width() + screen_rect.x(): point_ = text_edit.mapToGlobal(cursor_rect.topRight()) # If tip is still off-screen, check if point is in the right or # left half of the screen. if point_.x() - tip_width < padding: if 2*point.x() < screen_rect.width(): horizontal = 'Right' else: horizontal = 'Left' else: horizontal = 'Left' pos = getattr(cursor_rect, '%s%s' %(vertical, horizontal)) adjusted_point = text_edit.mapToGlobal(pos()) if vertical == 'top': point.setY(adjusted_point.y() - tip_height - padding) if horizontal == 'Left': point.setX(adjusted_point.x() - tip_width - padding) self.move(point) self.show() return True
def function[show_tip, parameter[self, point, tip, wrapped_tiplines]]: constant[ Attempts to show the specified tip at the current cursor location. ] if call[name[self].isVisible, parameter[]] begin[:] if compare[name[self].tip equal[==] name[tip]] begin[:] return[constant[True]] variable[text_edit] assign[=] name[self]._text_edit variable[cursor] assign[=] call[name[text_edit].textCursor, parameter[]] variable[search_pos] assign[=] binary_operation[call[name[cursor].position, parameter[]] - constant[1]] <ast.Tuple object at 0x7da20c76ca90> assign[=] call[name[self]._find_parenthesis, parameter[name[search_pos]]] if compare[name[self]._start_position equal[==] <ast.UnaryOp object at 0x7da20c76e140>] begin[:] return[constant[False]] if name[self].hide_timer_on begin[:] call[name[self]._hide_timer.stop, parameter[]] if compare[call[name[len], parameter[name[wrapped_tiplines]]] equal[==] constant[1]] begin[:] variable[args] assign[=] call[call[call[name[wrapped_tiplines]][constant[0]].split, parameter[constant[(]]]][constant[1]] variable[nargs] assign[=] call[name[len], parameter[call[name[args].split, parameter[constant[,]]]]] if compare[name[nargs] equal[==] constant[1]] begin[:] variable[hide_time] assign[=] constant[1400] call[name[self]._hide_timer.start, parameter[name[hide_time], name[self]]] name[self].tip assign[=] name[tip] call[name[self].setText, parameter[name[tip]]] call[name[self].resize, parameter[call[name[self].sizeHint, parameter[]]]] variable[padding] assign[=] constant[3] variable[cursor_rect] assign[=] call[name[text_edit].cursorRect, parameter[name[cursor]]] variable[screen_rect] assign[=] call[call[name[self].app.desktop, parameter[]].screenGeometry, parameter[name[text_edit]]] call[name[point].setY, parameter[binary_operation[call[name[point].y, parameter[]] + name[padding]]]] variable[tip_height] assign[=] call[call[name[self].size, parameter[]].height, parameter[]] variable[tip_width] assign[=] call[call[name[self].size, parameter[]].width, parameter[]] variable[vertical] assign[=] constant[bottom] variable[horizontal] assign[=] constant[Right] if compare[binary_operation[call[name[point].y, parameter[]] + name[tip_height]] greater[>] binary_operation[call[name[screen_rect].height, parameter[]] + call[name[screen_rect].y, parameter[]]]] begin[:] variable[point_] assign[=] call[name[text_edit].mapToGlobal, parameter[call[name[cursor_rect].topRight, parameter[]]]] if compare[binary_operation[call[name[point_].y, parameter[]] - name[tip_height]] less[<] name[padding]] begin[:] if compare[binary_operation[constant[2] * call[name[point].y, parameter[]]] less[<] call[name[screen_rect].height, parameter[]]] begin[:] variable[vertical] assign[=] constant[bottom] if compare[binary_operation[call[name[point].x, parameter[]] + name[tip_width]] greater[>] binary_operation[call[name[screen_rect].width, parameter[]] + call[name[screen_rect].x, parameter[]]]] begin[:] variable[point_] assign[=] call[name[text_edit].mapToGlobal, parameter[call[name[cursor_rect].topRight, parameter[]]]] if compare[binary_operation[call[name[point_].x, parameter[]] - name[tip_width]] less[<] name[padding]] begin[:] if compare[binary_operation[constant[2] * call[name[point].x, parameter[]]] less[<] call[name[screen_rect].width, parameter[]]] begin[:] variable[horizontal] assign[=] constant[Right] variable[pos] assign[=] call[name[getattr], parameter[name[cursor_rect], binary_operation[constant[%s%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18eb57400>, <ast.Name object at 0x7da18eb54490>]]]]] variable[adjusted_point] assign[=] call[name[text_edit].mapToGlobal, parameter[call[name[pos], parameter[]]]] if compare[name[vertical] equal[==] constant[top]] begin[:] call[name[point].setY, parameter[binary_operation[binary_operation[call[name[adjusted_point].y, parameter[]] - name[tip_height]] - name[padding]]]] if compare[name[horizontal] equal[==] constant[Left]] begin[:] call[name[point].setX, parameter[binary_operation[binary_operation[call[name[adjusted_point].x, parameter[]] - name[tip_width]] - name[padding]]]] call[name[self].move, parameter[name[point]]] call[name[self].show, parameter[]] return[constant[True]]
keyword[def] identifier[show_tip] ( identifier[self] , identifier[point] , identifier[tip] , identifier[wrapped_tiplines] ): literal[string] keyword[if] identifier[self] . identifier[isVisible] (): keyword[if] identifier[self] . identifier[tip] == identifier[tip] : keyword[return] keyword[True] keyword[else] : identifier[self] . identifier[hide] () identifier[text_edit] = identifier[self] . identifier[_text_edit] identifier[cursor] = identifier[text_edit] . identifier[textCursor] () identifier[search_pos] = identifier[cursor] . identifier[position] ()- literal[int] identifier[self] . identifier[_start_position] , identifier[_] = identifier[self] . identifier[_find_parenthesis] ( identifier[search_pos] , identifier[forward] = keyword[False] ) keyword[if] identifier[self] . identifier[_start_position] ==- literal[int] : keyword[return] keyword[False] keyword[if] identifier[self] . identifier[hide_timer_on] : identifier[self] . identifier[_hide_timer] . identifier[stop] () keyword[if] identifier[len] ( identifier[wrapped_tiplines] )== literal[int] : identifier[args] = identifier[wrapped_tiplines] [ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ] identifier[nargs] = identifier[len] ( identifier[args] . identifier[split] ( literal[string] )) keyword[if] identifier[nargs] == literal[int] : identifier[hide_time] = literal[int] keyword[elif] identifier[nargs] == literal[int] : identifier[hide_time] = literal[int] keyword[else] : identifier[hide_time] = literal[int] keyword[elif] identifier[len] ( identifier[wrapped_tiplines] )== literal[int] : identifier[args1] = identifier[wrapped_tiplines] [ literal[int] ]. identifier[strip] () identifier[nargs1] = identifier[len] ( identifier[args1] . identifier[split] ( literal[string] )) keyword[if] identifier[nargs1] == literal[int] : identifier[hide_time] = literal[int] keyword[else] : identifier[hide_time] = literal[int] keyword[else] : identifier[hide_time] = literal[int] identifier[self] . identifier[_hide_timer] . identifier[start] ( identifier[hide_time] , identifier[self] ) identifier[self] . identifier[tip] = identifier[tip] identifier[self] . identifier[setText] ( identifier[tip] ) identifier[self] . identifier[resize] ( identifier[self] . identifier[sizeHint] ()) identifier[padding] = literal[int] identifier[cursor_rect] = identifier[text_edit] . identifier[cursorRect] ( identifier[cursor] ) identifier[screen_rect] = identifier[self] . identifier[app] . identifier[desktop] (). identifier[screenGeometry] ( identifier[text_edit] ) identifier[point] . identifier[setY] ( identifier[point] . identifier[y] ()+ identifier[padding] ) identifier[tip_height] = identifier[self] . identifier[size] (). identifier[height] () identifier[tip_width] = identifier[self] . identifier[size] (). identifier[width] () identifier[vertical] = literal[string] identifier[horizontal] = literal[string] keyword[if] identifier[point] . identifier[y] ()+ identifier[tip_height] > identifier[screen_rect] . identifier[height] ()+ identifier[screen_rect] . identifier[y] (): identifier[point_] = identifier[text_edit] . identifier[mapToGlobal] ( identifier[cursor_rect] . identifier[topRight] ()) keyword[if] identifier[point_] . identifier[y] ()- identifier[tip_height] < identifier[padding] : keyword[if] literal[int] * identifier[point] . identifier[y] ()< identifier[screen_rect] . identifier[height] (): identifier[vertical] = literal[string] keyword[else] : identifier[vertical] = literal[string] keyword[else] : identifier[vertical] = literal[string] keyword[if] identifier[point] . identifier[x] ()+ identifier[tip_width] > identifier[screen_rect] . identifier[width] ()+ identifier[screen_rect] . identifier[x] (): identifier[point_] = identifier[text_edit] . identifier[mapToGlobal] ( identifier[cursor_rect] . identifier[topRight] ()) keyword[if] identifier[point_] . identifier[x] ()- identifier[tip_width] < identifier[padding] : keyword[if] literal[int] * identifier[point] . identifier[x] ()< identifier[screen_rect] . identifier[width] (): identifier[horizontal] = literal[string] keyword[else] : identifier[horizontal] = literal[string] keyword[else] : identifier[horizontal] = literal[string] identifier[pos] = identifier[getattr] ( identifier[cursor_rect] , literal[string] %( identifier[vertical] , identifier[horizontal] )) identifier[adjusted_point] = identifier[text_edit] . identifier[mapToGlobal] ( identifier[pos] ()) keyword[if] identifier[vertical] == literal[string] : identifier[point] . identifier[setY] ( identifier[adjusted_point] . identifier[y] ()- identifier[tip_height] - identifier[padding] ) keyword[if] identifier[horizontal] == literal[string] : identifier[point] . identifier[setX] ( identifier[adjusted_point] . identifier[x] ()- identifier[tip_width] - identifier[padding] ) identifier[self] . identifier[move] ( identifier[point] ) identifier[self] . identifier[show] () keyword[return] keyword[True]
def show_tip(self, point, tip, wrapped_tiplines): """ Attempts to show the specified tip at the current cursor location. """ # Don't attempt to show it if it's already visible and the text # to be displayed is the same as the one displayed before. if self.isVisible(): if self.tip == tip: return True # depends on [control=['if'], data=[]] else: self.hide() # depends on [control=['if'], data=[]] # Attempt to find the cursor position at which to show the call tip. text_edit = self._text_edit cursor = text_edit.textCursor() search_pos = cursor.position() - 1 (self._start_position, _) = self._find_parenthesis(search_pos, forward=False) if self._start_position == -1: return False # depends on [control=['if'], data=[]] if self.hide_timer_on: self._hide_timer.stop() # Logic to decide how much time to show the calltip depending # on the amount of text present if len(wrapped_tiplines) == 1: args = wrapped_tiplines[0].split('(')[1] nargs = len(args.split(',')) if nargs == 1: hide_time = 1400 # depends on [control=['if'], data=[]] elif nargs == 2: hide_time = 1600 # depends on [control=['if'], data=[]] else: hide_time = 1800 # depends on [control=['if'], data=[]] elif len(wrapped_tiplines) == 2: args1 = wrapped_tiplines[1].strip() nargs1 = len(args1.split(',')) if nargs1 == 1: hide_time = 2500 # depends on [control=['if'], data=[]] else: hide_time = 2800 # depends on [control=['if'], data=[]] else: hide_time = 3500 self._hide_timer.start(hide_time, self) # depends on [control=['if'], data=[]] # Set the text and resize the widget accordingly. self.tip = tip self.setText(tip) self.resize(self.sizeHint()) # Locate and show the widget. Place the tip below the current line # unless it would be off the screen. In that case, decide the best # location based trying to minimize the area that goes off-screen. padding = 3 # Distance in pixels between cursor bounds and tip box. cursor_rect = text_edit.cursorRect(cursor) screen_rect = self.app.desktop().screenGeometry(text_edit) point.setY(point.y() + padding) tip_height = self.size().height() tip_width = self.size().width() vertical = 'bottom' horizontal = 'Right' if point.y() + tip_height > screen_rect.height() + screen_rect.y(): point_ = text_edit.mapToGlobal(cursor_rect.topRight()) # If tip is still off screen, check if point is in top or bottom # half of screen. if point_.y() - tip_height < padding: # If point is in upper half of screen, show tip below it. # otherwise above it. if 2 * point.y() < screen_rect.height(): vertical = 'bottom' # depends on [control=['if'], data=[]] else: vertical = 'top' # depends on [control=['if'], data=[]] else: vertical = 'top' # depends on [control=['if'], data=[]] if point.x() + tip_width > screen_rect.width() + screen_rect.x(): point_ = text_edit.mapToGlobal(cursor_rect.topRight()) # If tip is still off-screen, check if point is in the right or # left half of the screen. if point_.x() - tip_width < padding: if 2 * point.x() < screen_rect.width(): horizontal = 'Right' # depends on [control=['if'], data=[]] else: horizontal = 'Left' # depends on [control=['if'], data=[]] else: horizontal = 'Left' # depends on [control=['if'], data=[]] pos = getattr(cursor_rect, '%s%s' % (vertical, horizontal)) adjusted_point = text_edit.mapToGlobal(pos()) if vertical == 'top': point.setY(adjusted_point.y() - tip_height - padding) # depends on [control=['if'], data=[]] if horizontal == 'Left': point.setX(adjusted_point.x() - tip_width - padding) # depends on [control=['if'], data=[]] self.move(point) self.show() return True
def _mk_index(self): """create the index for this set""" index = defaultdict(list) for line_no, line in enumerate(self._stripped_lines): if line: index[line].append(line_no) return index
def function[_mk_index, parameter[self]]: constant[create the index for this set] variable[index] assign[=] call[name[defaultdict], parameter[name[list]]] for taget[tuple[[<ast.Name object at 0x7da1b024d990>, <ast.Name object at 0x7da1b024d960>]]] in starred[call[name[enumerate], parameter[name[self]._stripped_lines]]] begin[:] if name[line] begin[:] call[call[name[index]][name[line]].append, parameter[name[line_no]]] return[name[index]]
keyword[def] identifier[_mk_index] ( identifier[self] ): literal[string] identifier[index] = identifier[defaultdict] ( identifier[list] ) keyword[for] identifier[line_no] , identifier[line] keyword[in] identifier[enumerate] ( identifier[self] . identifier[_stripped_lines] ): keyword[if] identifier[line] : identifier[index] [ identifier[line] ]. identifier[append] ( identifier[line_no] ) keyword[return] identifier[index]
def _mk_index(self): """create the index for this set""" index = defaultdict(list) for (line_no, line) in enumerate(self._stripped_lines): if line: index[line].append(line_no) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return index
def load_limits(self, config=None): """Load the stats limits (except the one in the exclude list).""" # For each plugins, call the load_limits method for p in self._plugins: self._plugins[p].load_limits(config)
def function[load_limits, parameter[self, config]]: constant[Load the stats limits (except the one in the exclude list).] for taget[name[p]] in starred[name[self]._plugins] begin[:] call[call[name[self]._plugins][name[p]].load_limits, parameter[name[config]]]
keyword[def] identifier[load_limits] ( identifier[self] , identifier[config] = keyword[None] ): literal[string] keyword[for] identifier[p] keyword[in] identifier[self] . identifier[_plugins] : identifier[self] . identifier[_plugins] [ identifier[p] ]. identifier[load_limits] ( identifier[config] )
def load_limits(self, config=None): """Load the stats limits (except the one in the exclude list).""" # For each plugins, call the load_limits method for p in self._plugins: self._plugins[p].load_limits(config) # depends on [control=['for'], data=['p']]
def write(self, oprot): ''' Write this object to the given output protocol and return self. :type oprot: thryft.protocol._output_protocol._OutputProtocol :rtype: pastpy.gen.database.impl.dbf.objects_dbf_record.ObjectsDbfRecord ''' oprot.write_struct_begin('ObjectsDbfRecord') if self.accessno is not None: oprot.write_field_begin(name='accessno', type=11, id=None) oprot.write_string(self.accessno) oprot.write_field_end() if self.accessory is not None: oprot.write_field_begin(name='accessory', type=11, id=None) oprot.write_string(self.accessory) oprot.write_field_end() if self.acqvalue is not None: oprot.write_field_begin(name='acqvalue', type=11, id=None) oprot.write_decimal(self.acqvalue) oprot.write_field_end() if self.age is not None: oprot.write_field_begin(name='age', type=11, id=None) oprot.write_string(self.age) oprot.write_field_end() if self.appnotes is not None: oprot.write_field_begin(name='appnotes', type=11, id=None) oprot.write_string(self.appnotes) oprot.write_field_end() if self.appraisor is not None: oprot.write_field_begin(name='appraisor', type=11, id=None) oprot.write_string(self.appraisor) oprot.write_field_end() if self.assemzone is not None: oprot.write_field_begin(name='assemzone', type=11, id=None) oprot.write_string(self.assemzone) oprot.write_field_end() if self.bagno is not None: oprot.write_field_begin(name='bagno', type=11, id=None) oprot.write_string(self.bagno) oprot.write_field_end() if self.boxno is not None: oprot.write_field_begin(name='boxno', type=11, id=None) oprot.write_string(self.boxno) oprot.write_field_end() if self.caption is not None: oprot.write_field_begin(name='caption', type=11, id=None) oprot.write_string(self.caption) oprot.write_field_end() if self.cat is not None: oprot.write_field_begin(name='cat', type=11, id=None) oprot.write_string(self.cat) oprot.write_field_end() if self.catby is not None: oprot.write_field_begin(name='catby', type=11, id=None) oprot.write_string(self.catby) oprot.write_field_end() if self.catdate is not None: oprot.write_field_begin(name='catdate', type=10, id=None) oprot.write_date(self.catdate) oprot.write_field_end() if self.cattype is not None: oprot.write_field_begin(name='cattype', type=11, id=None) oprot.write_string(self.cattype) oprot.write_field_end() if self.chemcomp is not None: oprot.write_field_begin(name='chemcomp', type=11, id=None) oprot.write_string(self.chemcomp) oprot.write_field_end() if self.circum is not None: oprot.write_field_begin(name='circum', type=11, id=None) oprot.write_decimal(self.circum) oprot.write_field_end() if self.circumft is not None: oprot.write_field_begin(name='circumft', type=11, id=None) oprot.write_decimal(self.circumft) oprot.write_field_end() if self.circumin is not None: oprot.write_field_begin(name='circumin', type=11, id=None) oprot.write_decimal(self.circumin) oprot.write_field_end() if self.classes is not None: oprot.write_field_begin(name='classes', type=11, id=None) oprot.write_string(self.classes) oprot.write_field_end() if self.colldate is not None: oprot.write_field_begin(name='colldate', type=10, id=None) oprot.write_date(self.colldate) oprot.write_field_end() if self.collection is not None: oprot.write_field_begin(name='collection', type=11, id=None) oprot.write_string(self.collection) oprot.write_field_end() if self.collector is not None: oprot.write_field_begin(name='collector', type=11, id=None) oprot.write_string(self.collector) oprot.write_field_end() if self.conddate is not None: oprot.write_field_begin(name='conddate', type=10, id=None) oprot.write_date(self.conddate) oprot.write_field_end() if self.condexam is not None: oprot.write_field_begin(name='condexam', type=11, id=None) oprot.write_string(self.condexam) oprot.write_field_end() if self.condition is not None: oprot.write_field_begin(name='condition', type=11, id=None) oprot.write_string(self.condition) oprot.write_field_end() if self.condnotes is not None: oprot.write_field_begin(name='condnotes', type=11, id=None) oprot.write_string(self.condnotes) oprot.write_field_end() if self.count is not None: oprot.write_field_begin(name='count', type=11, id=None) oprot.write_string(self.count) oprot.write_field_end() if self.creator is not None: oprot.write_field_begin(name='creator', type=11, id=None) oprot.write_string(self.creator) oprot.write_field_end() if self.creator2 is not None: oprot.write_field_begin(name='creator2', type=11, id=None) oprot.write_string(self.creator2) oprot.write_field_end() if self.creator3 is not None: oprot.write_field_begin(name='creator3', type=11, id=None) oprot.write_string(self.creator3) oprot.write_field_end() if self.credit is not None: oprot.write_field_begin(name='credit', type=11, id=None) oprot.write_string(self.credit) oprot.write_field_end() if self.crystal is not None: oprot.write_field_begin(name='crystal', type=11, id=None) oprot.write_string(self.crystal) oprot.write_field_end() if self.culture is not None: oprot.write_field_begin(name='culture', type=11, id=None) oprot.write_string(self.culture) oprot.write_field_end() if self.curvalmax is not None: oprot.write_field_begin(name='curvalmax', type=11, id=None) oprot.write_decimal(self.curvalmax) oprot.write_field_end() if self.curvalue is not None: oprot.write_field_begin(name='curvalue', type=11, id=None) oprot.write_decimal(self.curvalue) oprot.write_field_end() if self.dataset is not None: oprot.write_field_begin(name='dataset', type=11, id=None) oprot.write_string(self.dataset) oprot.write_field_end() if self.date is not None: oprot.write_field_begin(name='date', type=11, id=None) oprot.write_string(self.date) oprot.write_field_end() if self.datingmeth is not None: oprot.write_field_begin(name='datingmeth', type=11, id=None) oprot.write_string(self.datingmeth) oprot.write_field_end() if self.datum is not None: oprot.write_field_begin(name='datum', type=11, id=None) oprot.write_string(self.datum) oprot.write_field_end() if self.depth is not None: oprot.write_field_begin(name='depth', type=11, id=None) oprot.write_decimal(self.depth) oprot.write_field_end() if self.depthft is not None: oprot.write_field_begin(name='depthft', type=11, id=None) oprot.write_decimal(self.depthft) oprot.write_field_end() if self.depthin is not None: oprot.write_field_begin(name='depthin', type=11, id=None) oprot.write_decimal(self.depthin) oprot.write_field_end() if self.descrip is not None: oprot.write_field_begin(name='descrip', type=11, id=None) oprot.write_string(self.descrip) oprot.write_field_end() if self.diameter is not None: oprot.write_field_begin(name='diameter', type=11, id=None) oprot.write_decimal(self.diameter) oprot.write_field_end() if self.diameterft is not None: oprot.write_field_begin(name='diameterft', type=11, id=None) oprot.write_decimal(self.diameterft) oprot.write_field_end() if self.diameterin is not None: oprot.write_field_begin(name='diameterin', type=11, id=None) oprot.write_decimal(self.diameterin) oprot.write_field_end() if self.dimnotes is not None: oprot.write_field_begin(name='dimnotes', type=11, id=None) oprot.write_string(self.dimnotes) oprot.write_field_end() if self.dimtype is not None: oprot.write_field_begin(name='dimtype', type=8, id=None) oprot.write_i32(self.dimtype) oprot.write_field_end() if self.dispvalue is not None: oprot.write_field_begin(name='dispvalue', type=11, id=None) oprot.write_string(self.dispvalue) oprot.write_field_end() if self.earlydate is not None: oprot.write_field_begin(name='earlydate', type=8, id=None) oprot.write_i32(self.earlydate) oprot.write_field_end() if self.elements is not None: oprot.write_field_begin(name='elements', type=11, id=None) oprot.write_string(self.elements) oprot.write_field_end() if self.epoch is not None: oprot.write_field_begin(name='epoch', type=11, id=None) oprot.write_string(self.epoch) oprot.write_field_end() if self.era is not None: oprot.write_field_begin(name='era', type=11, id=None) oprot.write_string(self.era) oprot.write_field_end() if self.event is not None: oprot.write_field_begin(name='event', type=11, id=None) oprot.write_string(self.event) oprot.write_field_end() if self.ew is not None: oprot.write_field_begin(name='ew', type=11, id=None) oprot.write_string(self.ew) oprot.write_field_end() if self.excavadate is not None: oprot.write_field_begin(name='excavadate', type=10, id=None) oprot.write_date(self.excavadate) oprot.write_field_end() if self.excavateby is not None: oprot.write_field_begin(name='excavateby', type=11, id=None) oprot.write_string(self.excavateby) oprot.write_field_end() if self.exhibitid is not None: oprot.write_field_begin(name='exhibitid', type=11, id=None) oprot.write_string(self.exhibitid) oprot.write_field_end() if self.exhibitno is not None: oprot.write_field_begin(name='exhibitno', type=8, id=None) oprot.write_i32(self.exhibitno) oprot.write_field_end() if self.exhlabel1 is not None: oprot.write_field_begin(name='exhlabel1', type=11, id=None) oprot.write_string(self.exhlabel1) oprot.write_field_end() if self.exhlabel2 is not None: oprot.write_field_begin(name='exhlabel2', type=11, id=None) oprot.write_string(self.exhlabel2) oprot.write_field_end() if self.exhlabel3 is not None: oprot.write_field_begin(name='exhlabel3', type=11, id=None) oprot.write_string(self.exhlabel3) oprot.write_field_end() if self.exhlabel4 is not None: oprot.write_field_begin(name='exhlabel4', type=11, id=None) oprot.write_string(self.exhlabel4) oprot.write_field_end() if self.exhstart is not None: oprot.write_field_begin(name='exhstart', type=10, id=None) oprot.write_date(self.exhstart) oprot.write_field_end() if self.family is not None: oprot.write_field_begin(name='family', type=11, id=None) oprot.write_string(self.family) oprot.write_field_end() if self.feature is not None: oprot.write_field_begin(name='feature', type=11, id=None) oprot.write_string(self.feature) oprot.write_field_end() if self.flagdate is not None: oprot.write_field_begin(name='flagdate', type=10, id=None) oprot.write_date_time(self.flagdate) oprot.write_field_end() if self.flagnotes is not None: oprot.write_field_begin(name='flagnotes', type=11, id=None) oprot.write_string(self.flagnotes) oprot.write_field_end() if self.flagreason is not None: oprot.write_field_begin(name='flagreason', type=11, id=None) oprot.write_string(self.flagreason) oprot.write_field_end() if self.formation is not None: oprot.write_field_begin(name='formation', type=11, id=None) oprot.write_string(self.formation) oprot.write_field_end() if self.fossils is not None: oprot.write_field_begin(name='fossils', type=11, id=None) oprot.write_string(self.fossils) oprot.write_field_end() if self.found is not None: oprot.write_field_begin(name='found', type=11, id=None) oprot.write_string(self.found) oprot.write_field_end() if self.fracture is not None: oprot.write_field_begin(name='fracture', type=11, id=None) oprot.write_string(self.fracture) oprot.write_field_end() if self.frame is not None: oprot.write_field_begin(name='frame', type=11, id=None) oprot.write_string(self.frame) oprot.write_field_end() if self.framesize is not None: oprot.write_field_begin(name='framesize', type=11, id=None) oprot.write_string(self.framesize) oprot.write_field_end() if self.genus is not None: oprot.write_field_begin(name='genus', type=11, id=None) oprot.write_string(self.genus) oprot.write_field_end() if self.gparent is not None: oprot.write_field_begin(name='gparent', type=11, id=None) oprot.write_string(self.gparent) oprot.write_field_end() if self.grainsize is not None: oprot.write_field_begin(name='grainsize', type=11, id=None) oprot.write_string(self.grainsize) oprot.write_field_end() if self.habitat is not None: oprot.write_field_begin(name='habitat', type=11, id=None) oprot.write_string(self.habitat) oprot.write_field_end() if self.hardness is not None: oprot.write_field_begin(name='hardness', type=11, id=None) oprot.write_string(self.hardness) oprot.write_field_end() if self.height is not None: oprot.write_field_begin(name='height', type=11, id=None) oprot.write_decimal(self.height) oprot.write_field_end() if self.heightft is not None: oprot.write_field_begin(name='heightft', type=11, id=None) oprot.write_decimal(self.heightft) oprot.write_field_end() if self.heightin is not None: oprot.write_field_begin(name='heightin', type=11, id=None) oprot.write_decimal(self.heightin) oprot.write_field_end() if self.homeloc is not None: oprot.write_field_begin(name='homeloc', type=11, id=None) oprot.write_string(self.homeloc) oprot.write_field_end() if self.idby is not None: oprot.write_field_begin(name='idby', type=11, id=None) oprot.write_string(self.idby) oprot.write_field_end() if self.iddate is not None: oprot.write_field_begin(name='iddate', type=10, id=None) oprot.write_date(self.iddate) oprot.write_field_end() if self.imagefile is not None: oprot.write_field_begin(name='imagefile', type=11, id=None) oprot.write_string(self.imagefile) oprot.write_field_end() if self.imageno is not None: oprot.write_field_begin(name='imageno', type=8, id=None) oprot.write_i32(self.imageno) oprot.write_field_end() if self.imagesize is not None: oprot.write_field_begin(name='imagesize', type=11, id=None) oprot.write_string(self.imagesize) oprot.write_field_end() if self.inscomp is not None: oprot.write_field_begin(name='inscomp', type=11, id=None) oprot.write_string(self.inscomp) oprot.write_field_end() if self.inscrlang is not None: oprot.write_field_begin(name='inscrlang', type=11, id=None) oprot.write_string(self.inscrlang) oprot.write_field_end() if self.inscrpos is not None: oprot.write_field_begin(name='inscrpos', type=11, id=None) oprot.write_string(self.inscrpos) oprot.write_field_end() if self.inscrtech is not None: oprot.write_field_begin(name='inscrtech', type=11, id=None) oprot.write_string(self.inscrtech) oprot.write_field_end() if self.inscrtext is not None: oprot.write_field_begin(name='inscrtext', type=11, id=None) oprot.write_string(self.inscrtext) oprot.write_field_end() if self.inscrtrans is not None: oprot.write_field_begin(name='inscrtrans', type=11, id=None) oprot.write_string(self.inscrtrans) oprot.write_field_end() if self.inscrtype is not None: oprot.write_field_begin(name='inscrtype', type=11, id=None) oprot.write_string(self.inscrtype) oprot.write_field_end() if self.insdate is not None: oprot.write_field_begin(name='insdate', type=10, id=None) oprot.write_date(self.insdate) oprot.write_field_end() if self.insphone is not None: oprot.write_field_begin(name='insphone', type=11, id=None) oprot.write_string(self.insphone) oprot.write_field_end() if self.inspremium is not None: oprot.write_field_begin(name='inspremium', type=11, id=None) oprot.write_string(self.inspremium) oprot.write_field_end() if self.insrep is not None: oprot.write_field_begin(name='insrep', type=11, id=None) oprot.write_string(self.insrep) oprot.write_field_end() if self.insvalue is not None: oprot.write_field_begin(name='insvalue', type=11, id=None) oprot.write_decimal(self.insvalue) oprot.write_field_end() if self.invnby is not None: oprot.write_field_begin(name='invnby', type=11, id=None) oprot.write_string(self.invnby) oprot.write_field_end() if self.invndate is not None: oprot.write_field_begin(name='invndate', type=10, id=None) oprot.write_date(self.invndate) oprot.write_field_end() if self.kingdom is not None: oprot.write_field_begin(name='kingdom', type=11, id=None) oprot.write_string(self.kingdom) oprot.write_field_end() if self.latdeg is not None: oprot.write_field_begin(name='latdeg', type=11, id=None) oprot.write_decimal(self.latdeg) oprot.write_field_end() if self.latedate is not None: oprot.write_field_begin(name='latedate', type=8, id=None) oprot.write_i32(self.latedate) oprot.write_field_end() if self.legal is not None: oprot.write_field_begin(name='legal', type=11, id=None) oprot.write_string(self.legal) oprot.write_field_end() if self.length is not None: oprot.write_field_begin(name='length', type=11, id=None) oprot.write_decimal(self.length) oprot.write_field_end() if self.lengthft is not None: oprot.write_field_begin(name='lengthft', type=11, id=None) oprot.write_decimal(self.lengthft) oprot.write_field_end() if self.lengthin is not None: oprot.write_field_begin(name='lengthin', type=11, id=None) oprot.write_decimal(self.lengthin) oprot.write_field_end() if self.level is not None: oprot.write_field_begin(name='level', type=11, id=None) oprot.write_string(self.level) oprot.write_field_end() if self.lithofacie is not None: oprot.write_field_begin(name='lithofacie', type=11, id=None) oprot.write_string(self.lithofacie) oprot.write_field_end() if self.loancond is not None: oprot.write_field_begin(name='loancond', type=11, id=None) oprot.write_string(self.loancond) oprot.write_field_end() if self.loandue is not None: oprot.write_field_begin(name='loandue', type=10, id=None) oprot.write_date(self.loandue) oprot.write_field_end() if self.loanid is not None: oprot.write_field_begin(name='loanid', type=11, id=None) oprot.write_string(self.loanid) oprot.write_field_end() if self.loaninno is not None: oprot.write_field_begin(name='loaninno', type=11, id=None) oprot.write_string(self.loaninno) oprot.write_field_end() if self.loanno is not None: oprot.write_field_begin(name='loanno', type=8, id=None) oprot.write_i32(self.loanno) oprot.write_field_end() if self.loanrenew is not None: oprot.write_field_begin(name='loanrenew', type=10, id=None) oprot.write_date(self.loanrenew) oprot.write_field_end() if self.locfield1 is not None: oprot.write_field_begin(name='locfield1', type=11, id=None) oprot.write_string(self.locfield1) oprot.write_field_end() if self.locfield2 is not None: oprot.write_field_begin(name='locfield2', type=11, id=None) oprot.write_string(self.locfield2) oprot.write_field_end() if self.locfield3 is not None: oprot.write_field_begin(name='locfield3', type=11, id=None) oprot.write_string(self.locfield3) oprot.write_field_end() if self.locfield4 is not None: oprot.write_field_begin(name='locfield4', type=11, id=None) oprot.write_string(self.locfield4) oprot.write_field_end() if self.locfield5 is not None: oprot.write_field_begin(name='locfield5', type=11, id=None) oprot.write_string(self.locfield5) oprot.write_field_end() if self.locfield6 is not None: oprot.write_field_begin(name='locfield6', type=11, id=None) oprot.write_string(self.locfield6) oprot.write_field_end() if self.longdeg is not None: oprot.write_field_begin(name='longdeg', type=11, id=None) oprot.write_decimal(self.longdeg) oprot.write_field_end() if self.luster is not None: oprot.write_field_begin(name='luster', type=11, id=None) oprot.write_string(self.luster) oprot.write_field_end() if self.made is not None: oprot.write_field_begin(name='made', type=11, id=None) oprot.write_string(self.made) oprot.write_field_end() if self.maintcycle is not None: oprot.write_field_begin(name='maintcycle', type=11, id=None) oprot.write_string(self.maintcycle) oprot.write_field_end() if self.maintdate is not None: oprot.write_field_begin(name='maintdate', type=10, id=None) oprot.write_date(self.maintdate) oprot.write_field_end() if self.maintnote is not None: oprot.write_field_begin(name='maintnote', type=11, id=None) oprot.write_string(self.maintnote) oprot.write_field_end() if self.material is not None: oprot.write_field_begin(name='material', type=11, id=None) oprot.write_string(self.material) oprot.write_field_end() if self.medium is not None: oprot.write_field_begin(name='medium', type=11, id=None) oprot.write_string(self.medium) oprot.write_field_end() if self.member is not None: oprot.write_field_begin(name='member', type=11, id=None) oprot.write_string(self.member) oprot.write_field_end() if self.mmark is not None: oprot.write_field_begin(name='mmark', type=11, id=None) oprot.write_string(self.mmark) oprot.write_field_end() if self.nhclass is not None: oprot.write_field_begin(name='nhclass', type=11, id=None) oprot.write_string(self.nhclass) oprot.write_field_end() if self.nhorder is not None: oprot.write_field_begin(name='nhorder', type=11, id=None) oprot.write_string(self.nhorder) oprot.write_field_end() if self.notes is not None: oprot.write_field_begin(name='notes', type=11, id=None) oprot.write_string(self.notes) oprot.write_field_end() if self.ns is not None: oprot.write_field_begin(name='ns', type=11, id=None) oprot.write_string(self.ns) oprot.write_field_end() if self.objectid is not None: oprot.write_field_begin(name='objectid', type=11, id=None) oprot.write_string(self.objectid) oprot.write_field_end() if self.objname is not None: oprot.write_field_begin(name='objname', type=11, id=None) oprot.write_string(self.objname) oprot.write_field_end() if self.objname2 is not None: oprot.write_field_begin(name='objname2', type=11, id=None) oprot.write_string(self.objname2) oprot.write_field_end() if self.objname3 is not None: oprot.write_field_begin(name='objname3', type=11, id=None) oprot.write_string(self.objname3) oprot.write_field_end() if self.objnames is not None: oprot.write_field_begin(name='objnames', type=11, id=None) oprot.write_string(self.objnames) oprot.write_field_end() if self.occurrence is not None: oprot.write_field_begin(name='occurrence', type=11, id=None) oprot.write_string(self.occurrence) oprot.write_field_end() if self.oldno is not None: oprot.write_field_begin(name='oldno', type=11, id=None) oprot.write_string(self.oldno) oprot.write_field_end() if self.origin is not None: oprot.write_field_begin(name='origin', type=11, id=None) oprot.write_string(self.origin) oprot.write_field_end() if self.othername is not None: oprot.write_field_begin(name='othername', type=11, id=None) oprot.write_string(self.othername) oprot.write_field_end() if self.otherno is not None: oprot.write_field_begin(name='otherno', type=11, id=None) oprot.write_string(self.otherno) oprot.write_field_end() if self.outdate is not None: oprot.write_field_begin(name='outdate', type=10, id=None) oprot.write_date(self.outdate) oprot.write_field_end() if self.owned is not None: oprot.write_field_begin(name='owned', type=11, id=None) oprot.write_string(self.owned) oprot.write_field_end() if self.parent is not None: oprot.write_field_begin(name='parent', type=11, id=None) oprot.write_string(self.parent) oprot.write_field_end() if self.people is not None: oprot.write_field_begin(name='people', type=11, id=None) oprot.write_string(self.people) oprot.write_field_end() if self.period is not None: oprot.write_field_begin(name='period', type=11, id=None) oprot.write_string(self.period) oprot.write_field_end() if self.phylum is not None: oprot.write_field_begin(name='phylum', type=11, id=None) oprot.write_string(self.phylum) oprot.write_field_end() if self.policyno is not None: oprot.write_field_begin(name='policyno', type=11, id=None) oprot.write_string(self.policyno) oprot.write_field_end() if self.ppid is not None: oprot.write_field_begin(name='ppid', type=11, id=None) oprot.write_string(self.ppid) oprot.write_field_end() if self.preparator is not None: oprot.write_field_begin(name='preparator', type=11, id=None) oprot.write_string(self.preparator) oprot.write_field_end() if self.prepdate is not None: oprot.write_field_begin(name='prepdate', type=10, id=None) oprot.write_date(self.prepdate) oprot.write_field_end() if self.preserve is not None: oprot.write_field_begin(name='preserve', type=11, id=None) oprot.write_string(self.preserve) oprot.write_field_end() if self.pressure is not None: oprot.write_field_begin(name='pressure', type=11, id=None) oprot.write_string(self.pressure) oprot.write_field_end() if self.provenance is not None: oprot.write_field_begin(name='provenance', type=11, id=None) oprot.write_string(self.provenance) oprot.write_field_end() if self.pubnotes is not None: oprot.write_field_begin(name='pubnotes', type=11, id=None) oprot.write_string(self.pubnotes) oprot.write_field_end() if self.qrurl is not None: oprot.write_field_begin(name='qrurl', type=11, id=None) oprot.write_string(self.qrurl) oprot.write_field_end() if self.recas is not None: oprot.write_field_begin(name='recas', type=11, id=None) oprot.write_string(self.recas) oprot.write_field_end() if self.recdate is not None: oprot.write_field_begin(name='recdate', type=11, id=None) oprot.write_string(self.recdate) oprot.write_field_end() if self.recfrom is not None: oprot.write_field_begin(name='recfrom', type=11, id=None) oprot.write_string(self.recfrom) oprot.write_field_end() if self.relation is not None: oprot.write_field_begin(name='relation', type=11, id=None) oprot.write_string(self.relation) oprot.write_field_end() if self.relnotes is not None: oprot.write_field_begin(name='relnotes', type=11, id=None) oprot.write_string(self.relnotes) oprot.write_field_end() if self.renewuntil is not None: oprot.write_field_begin(name='renewuntil', type=10, id=None) oprot.write_date(self.renewuntil) oprot.write_field_end() if self.repatby is not None: oprot.write_field_begin(name='repatby', type=11, id=None) oprot.write_string(self.repatby) oprot.write_field_end() if self.repatclaim is not None: oprot.write_field_begin(name='repatclaim', type=11, id=None) oprot.write_string(self.repatclaim) oprot.write_field_end() if self.repatdate is not None: oprot.write_field_begin(name='repatdate', type=10, id=None) oprot.write_date(self.repatdate) oprot.write_field_end() if self.repatdisp is not None: oprot.write_field_begin(name='repatdisp', type=11, id=None) oprot.write_string(self.repatdisp) oprot.write_field_end() if self.repathand is not None: oprot.write_field_begin(name='repathand', type=11, id=None) oprot.write_string(self.repathand) oprot.write_field_end() if self.repatnotes is not None: oprot.write_field_begin(name='repatnotes', type=11, id=None) oprot.write_string(self.repatnotes) oprot.write_field_end() if self.repatnotic is not None: oprot.write_field_begin(name='repatnotic', type=10, id=None) oprot.write_date(self.repatnotic) oprot.write_field_end() if self.repattype is not None: oprot.write_field_begin(name='repattype', type=11, id=None) oprot.write_string(self.repattype) oprot.write_field_end() if self.rockclass is not None: oprot.write_field_begin(name='rockclass', type=11, id=None) oprot.write_string(self.rockclass) oprot.write_field_end() if self.rockcolor is not None: oprot.write_field_begin(name='rockcolor', type=11, id=None) oprot.write_string(self.rockcolor) oprot.write_field_end() if self.rockorigin is not None: oprot.write_field_begin(name='rockorigin', type=11, id=None) oprot.write_string(self.rockorigin) oprot.write_field_end() if self.rocktype is not None: oprot.write_field_begin(name='rocktype', type=11, id=None) oprot.write_string(self.rocktype) oprot.write_field_end() if self.role is not None: oprot.write_field_begin(name='role', type=11, id=None) oprot.write_string(self.role) oprot.write_field_end() if self.role2 is not None: oprot.write_field_begin(name='role2', type=11, id=None) oprot.write_string(self.role2) oprot.write_field_end() if self.role3 is not None: oprot.write_field_begin(name='role3', type=11, id=None) oprot.write_string(self.role3) oprot.write_field_end() if self.school is not None: oprot.write_field_begin(name='school', type=11, id=None) oprot.write_string(self.school) oprot.write_field_end() if self.sex is not None: oprot.write_field_begin(name='sex', type=11, id=None) oprot.write_string(self.sex) oprot.write_field_end() if self.sgflag is not None: oprot.write_field_begin(name='sgflag', type=11, id=None) oprot.write_string(self.sgflag) oprot.write_field_end() if self.signedname is not None: oprot.write_field_begin(name='signedname', type=11, id=None) oprot.write_string(self.signedname) oprot.write_field_end() if self.signloc is not None: oprot.write_field_begin(name='signloc', type=11, id=None) oprot.write_string(self.signloc) oprot.write_field_end() if self.site is not None: oprot.write_field_begin(name='site', type=11, id=None) oprot.write_string(self.site) oprot.write_field_end() if self.siteno is not None: oprot.write_field_begin(name='siteno', type=11, id=None) oprot.write_string(self.siteno) oprot.write_field_end() if self.specgrav is not None: oprot.write_field_begin(name='specgrav', type=11, id=None) oprot.write_string(self.specgrav) oprot.write_field_end() if self.species is not None: oprot.write_field_begin(name='species', type=11, id=None) oprot.write_string(self.species) oprot.write_field_end() if self.sprocess is not None: oprot.write_field_begin(name='sprocess', type=11, id=None) oprot.write_string(self.sprocess) oprot.write_field_end() if self.stage is not None: oprot.write_field_begin(name='stage', type=11, id=None) oprot.write_string(self.stage) oprot.write_field_end() if self.status is not None: oprot.write_field_begin(name='status', type=11, id=None) oprot.write_string(self.status) oprot.write_field_end() if self.statusby is not None: oprot.write_field_begin(name='statusby', type=11, id=None) oprot.write_string(self.statusby) oprot.write_field_end() if self.statusdate is not None: oprot.write_field_begin(name='statusdate', type=10, id=None) oprot.write_date(self.statusdate) oprot.write_field_end() if self.sterms is not None: oprot.write_field_begin(name='sterms', type=11, id=None) oprot.write_string(self.sterms) oprot.write_field_end() if self.stratum is not None: oprot.write_field_begin(name='stratum', type=11, id=None) oprot.write_string(self.stratum) oprot.write_field_end() if self.streak is not None: oprot.write_field_begin(name='streak', type=11, id=None) oprot.write_string(self.streak) oprot.write_field_end() if self.subfamily is not None: oprot.write_field_begin(name='subfamily', type=11, id=None) oprot.write_string(self.subfamily) oprot.write_field_end() if self.subjects is not None: oprot.write_field_begin(name='subjects', type=11, id=None) oprot.write_string(self.subjects) oprot.write_field_end() if self.subspecies is not None: oprot.write_field_begin(name='subspecies', type=11, id=None) oprot.write_string(self.subspecies) oprot.write_field_end() if self.technique is not None: oprot.write_field_begin(name='technique', type=11, id=None) oprot.write_string(self.technique) oprot.write_field_end() if self.tempauthor is not None: oprot.write_field_begin(name='tempauthor', type=11, id=None) oprot.write_string(self.tempauthor) oprot.write_field_end() if self.tempby is not None: oprot.write_field_begin(name='tempby', type=11, id=None) oprot.write_string(self.tempby) oprot.write_field_end() if self.tempdate is not None: oprot.write_field_begin(name='tempdate', type=10, id=None) oprot.write_date(self.tempdate) oprot.write_field_end() if self.temperatur is not None: oprot.write_field_begin(name='temperatur', type=11, id=None) oprot.write_string(self.temperatur) oprot.write_field_end() if self.temploc is not None: oprot.write_field_begin(name='temploc', type=11, id=None) oprot.write_string(self.temploc) oprot.write_field_end() if self.tempnotes is not None: oprot.write_field_begin(name='tempnotes', type=11, id=None) oprot.write_string(self.tempnotes) oprot.write_field_end() if self.tempreason is not None: oprot.write_field_begin(name='tempreason', type=11, id=None) oprot.write_string(self.tempreason) oprot.write_field_end() if self.tempuntil is not None: oprot.write_field_begin(name='tempuntil', type=11, id=None) oprot.write_string(self.tempuntil) oprot.write_field_end() if self.texture is not None: oprot.write_field_begin(name='texture', type=11, id=None) oprot.write_string(self.texture) oprot.write_field_end() if self.title is not None: oprot.write_field_begin(name='title', type=11, id=None) oprot.write_string(self.title) oprot.write_field_end() if self.tlocfield1 is not None: oprot.write_field_begin(name='tlocfield1', type=11, id=None) oprot.write_string(self.tlocfield1) oprot.write_field_end() if self.tlocfield2 is not None: oprot.write_field_begin(name='tlocfield2', type=11, id=None) oprot.write_string(self.tlocfield2) oprot.write_field_end() if self.tlocfield3 is not None: oprot.write_field_begin(name='tlocfield3', type=11, id=None) oprot.write_string(self.tlocfield3) oprot.write_field_end() if self.tlocfield4 is not None: oprot.write_field_begin(name='tlocfield4', type=11, id=None) oprot.write_string(self.tlocfield4) oprot.write_field_end() if self.tlocfield5 is not None: oprot.write_field_begin(name='tlocfield5', type=11, id=None) oprot.write_string(self.tlocfield5) oprot.write_field_end() if self.tlocfield6 is not None: oprot.write_field_begin(name='tlocfield6', type=11, id=None) oprot.write_string(self.tlocfield6) oprot.write_field_end() if self.udf1 is not None: oprot.write_field_begin(name='udf1', type=11, id=None) oprot.write_string(self.udf1) oprot.write_field_end() if self.udf10 is not None: oprot.write_field_begin(name='udf10', type=11, id=None) oprot.write_string(self.udf10) oprot.write_field_end() if self.udf11 is not None: oprot.write_field_begin(name='udf11', type=11, id=None) oprot.write_string(self.udf11) oprot.write_field_end() if self.udf12 is not None: oprot.write_field_begin(name='udf12', type=11, id=None) oprot.write_string(self.udf12) oprot.write_field_end() if self.udf13 is not None: oprot.write_field_begin(name='udf13', type=8, id=None) oprot.write_i32(self.udf13) oprot.write_field_end() if self.udf14 is not None: oprot.write_field_begin(name='udf14', type=11, id=None) oprot.write_decimal(self.udf14) oprot.write_field_end() if self.udf15 is not None: oprot.write_field_begin(name='udf15', type=11, id=None) oprot.write_decimal(self.udf15) oprot.write_field_end() if self.udf16 is not None: oprot.write_field_begin(name='udf16', type=11, id=None) oprot.write_decimal(self.udf16) oprot.write_field_end() if self.udf17 is not None: oprot.write_field_begin(name='udf17', type=11, id=None) oprot.write_decimal(self.udf17) oprot.write_field_end() if self.udf18 is not None: oprot.write_field_begin(name='udf18', type=10, id=None) oprot.write_date(self.udf18) oprot.write_field_end() if self.udf19 is not None: oprot.write_field_begin(name='udf19', type=10, id=None) oprot.write_date(self.udf19) oprot.write_field_end() if self.udf2 is not None: oprot.write_field_begin(name='udf2', type=11, id=None) oprot.write_string(self.udf2) oprot.write_field_end() if self.udf20 is not None: oprot.write_field_begin(name='udf20', type=10, id=None) oprot.write_date(self.udf20) oprot.write_field_end() if self.udf21 is not None: oprot.write_field_begin(name='udf21', type=11, id=None) oprot.write_string(self.udf21) oprot.write_field_end() if self.udf22 is not None: oprot.write_field_begin(name='udf22', type=11, id=None) oprot.write_string(self.udf22) oprot.write_field_end() if self.udf3 is not None: oprot.write_field_begin(name='udf3', type=11, id=None) oprot.write_string(self.udf3) oprot.write_field_end() if self.udf4 is not None: oprot.write_field_begin(name='udf4', type=11, id=None) oprot.write_string(self.udf4) oprot.write_field_end() if self.udf5 is not None: oprot.write_field_begin(name='udf5', type=11, id=None) oprot.write_string(self.udf5) oprot.write_field_end() if self.udf6 is not None: oprot.write_field_begin(name='udf6', type=11, id=None) oprot.write_string(self.udf6) oprot.write_field_end() if self.udf7 is not None: oprot.write_field_begin(name='udf7', type=11, id=None) oprot.write_string(self.udf7) oprot.write_field_end() if self.udf8 is not None: oprot.write_field_begin(name='udf8', type=11, id=None) oprot.write_string(self.udf8) oprot.write_field_end() if self.udf9 is not None: oprot.write_field_begin(name='udf9', type=11, id=None) oprot.write_string(self.udf9) oprot.write_field_end() if self.unit is not None: oprot.write_field_begin(name='unit', type=11, id=None) oprot.write_string(self.unit) oprot.write_field_end() if self.updated is not None: oprot.write_field_begin(name='updated', type=10, id=None) oprot.write_date_time(self.updated) oprot.write_field_end() if self.updatedby is not None: oprot.write_field_begin(name='updatedby', type=11, id=None) oprot.write_string(self.updatedby) oprot.write_field_end() if self.used is not None: oprot.write_field_begin(name='used', type=11, id=None) oprot.write_string(self.used) oprot.write_field_end() if self.valuedate is not None: oprot.write_field_begin(name='valuedate', type=10, id=None) oprot.write_date(self.valuedate) oprot.write_field_end() if self.varieties is not None: oprot.write_field_begin(name='varieties', type=11, id=None) oprot.write_string(self.varieties) oprot.write_field_end() if self.vexhtml is not None: oprot.write_field_begin(name='vexhtml', type=11, id=None) oprot.write_string(self.vexhtml) oprot.write_field_end() if self.vexlabel1 is not None: oprot.write_field_begin(name='vexlabel1', type=11, id=None) oprot.write_string(self.vexlabel1) oprot.write_field_end() if self.vexlabel2 is not None: oprot.write_field_begin(name='vexlabel2', type=11, id=None) oprot.write_string(self.vexlabel2) oprot.write_field_end() if self.vexlabel3 is not None: oprot.write_field_begin(name='vexlabel3', type=11, id=None) oprot.write_string(self.vexlabel3) oprot.write_field_end() if self.vexlabel4 is not None: oprot.write_field_begin(name='vexlabel4', type=11, id=None) oprot.write_string(self.vexlabel4) oprot.write_field_end() if self.webinclude is not None: oprot.write_field_begin(name='webinclude', type=2, id=None) oprot.write_bool(self.webinclude) oprot.write_field_end() if self.weight is not None: oprot.write_field_begin(name='weight', type=11, id=None) oprot.write_decimal(self.weight) oprot.write_field_end() if self.weightin is not None: oprot.write_field_begin(name='weightin', type=11, id=None) oprot.write_decimal(self.weightin) oprot.write_field_end() if self.weightlb is not None: oprot.write_field_begin(name='weightlb', type=11, id=None) oprot.write_decimal(self.weightlb) oprot.write_field_end() if self.width is not None: oprot.write_field_begin(name='width', type=11, id=None) oprot.write_decimal(self.width) oprot.write_field_end() if self.widthft is not None: oprot.write_field_begin(name='widthft', type=11, id=None) oprot.write_decimal(self.widthft) oprot.write_field_end() if self.widthin is not None: oprot.write_field_begin(name='widthin', type=11, id=None) oprot.write_decimal(self.widthin) oprot.write_field_end() if self.xcord is not None: oprot.write_field_begin(name='xcord', type=11, id=None) oprot.write_decimal(self.xcord) oprot.write_field_end() if self.ycord is not None: oprot.write_field_begin(name='ycord', type=11, id=None) oprot.write_decimal(self.ycord) oprot.write_field_end() if self.zcord is not None: oprot.write_field_begin(name='zcord', type=11, id=None) oprot.write_decimal(self.zcord) oprot.write_field_end() if self.zsorter is not None: oprot.write_field_begin(name='zsorter', type=11, id=None) oprot.write_string(self.zsorter) oprot.write_field_end() if self.zsorterx is not None: oprot.write_field_begin(name='zsorterx', type=11, id=None) oprot.write_string(self.zsorterx) oprot.write_field_end() oprot.write_field_stop() oprot.write_struct_end() return self
def function[write, parameter[self, oprot]]: constant[ Write this object to the given output protocol and return self. :type oprot: thryft.protocol._output_protocol._OutputProtocol :rtype: pastpy.gen.database.impl.dbf.objects_dbf_record.ObjectsDbfRecord ] call[name[oprot].write_struct_begin, parameter[constant[ObjectsDbfRecord]]] if compare[name[self].accessno is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].accessno]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].accessory is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].accessory]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].acqvalue is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].acqvalue]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].age is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].age]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].appnotes is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].appnotes]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].appraisor is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].appraisor]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].assemzone is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].assemzone]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].bagno is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].bagno]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].boxno is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].boxno]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].caption is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].caption]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].cat is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].cat]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].catby is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].catby]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].catdate is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_date, parameter[name[self].catdate]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].cattype is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].cattype]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].chemcomp is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].chemcomp]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].circum is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].circum]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].circumft is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].circumft]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].circumin is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].circumin]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].classes is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].classes]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].colldate is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_date, parameter[name[self].colldate]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].collection is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].collection]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].collector is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].collector]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].conddate is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_date, parameter[name[self].conddate]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].condexam is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].condexam]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].condition is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].condition]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].condnotes is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].condnotes]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].count is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].count]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].creator is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].creator]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].creator2 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].creator2]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].creator3 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].creator3]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].credit is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].credit]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].crystal is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].crystal]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].culture is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].culture]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].curvalmax is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].curvalmax]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].curvalue is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].curvalue]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].dataset is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].dataset]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].date is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].date]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].datingmeth is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].datingmeth]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].datum is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].datum]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].depth is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].depth]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].depthft is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].depthft]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].depthin is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].depthin]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].descrip is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].descrip]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].diameter is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].diameter]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].diameterft is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].diameterft]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].diameterin is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].diameterin]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].dimnotes is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].dimnotes]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].dimtype is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_i32, parameter[name[self].dimtype]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].dispvalue is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].dispvalue]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].earlydate is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_i32, parameter[name[self].earlydate]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].elements is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].elements]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].epoch is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].epoch]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].era is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].era]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].event is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].event]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].ew is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].ew]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].excavadate is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_date, parameter[name[self].excavadate]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].excavateby is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].excavateby]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].exhibitid is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].exhibitid]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].exhibitno is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_i32, parameter[name[self].exhibitno]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].exhlabel1 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].exhlabel1]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].exhlabel2 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].exhlabel2]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].exhlabel3 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].exhlabel3]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].exhlabel4 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].exhlabel4]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].exhstart is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_date, parameter[name[self].exhstart]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].family is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].family]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].feature is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].feature]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].flagdate is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_date_time, parameter[name[self].flagdate]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].flagnotes is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].flagnotes]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].flagreason is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].flagreason]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].formation is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].formation]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].fossils is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].fossils]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].found is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].found]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].fracture is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].fracture]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].frame is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].frame]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].framesize is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].framesize]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].genus is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].genus]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].gparent is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].gparent]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].grainsize is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].grainsize]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].habitat is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].habitat]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].hardness is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].hardness]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].height is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].height]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].heightft is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].heightft]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].heightin is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].heightin]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].homeloc is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].homeloc]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].idby is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].idby]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].iddate is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_date, parameter[name[self].iddate]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].imagefile is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].imagefile]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].imageno is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_i32, parameter[name[self].imageno]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].imagesize is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].imagesize]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].inscomp is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].inscomp]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].inscrlang is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].inscrlang]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].inscrpos is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].inscrpos]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].inscrtech is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].inscrtech]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].inscrtext is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].inscrtext]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].inscrtrans is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].inscrtrans]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].inscrtype is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].inscrtype]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].insdate is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_date, parameter[name[self].insdate]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].insphone is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].insphone]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].inspremium is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].inspremium]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].insrep is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].insrep]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].insvalue is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].insvalue]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].invnby is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].invnby]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].invndate is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_date, parameter[name[self].invndate]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].kingdom is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].kingdom]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].latdeg is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].latdeg]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].latedate is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_i32, parameter[name[self].latedate]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].legal is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].legal]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].length is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].length]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].lengthft is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].lengthft]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].lengthin is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].lengthin]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].level is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].level]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].lithofacie is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].lithofacie]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].loancond is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].loancond]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].loandue is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_date, parameter[name[self].loandue]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].loanid is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].loanid]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].loaninno is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].loaninno]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].loanno is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_i32, parameter[name[self].loanno]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].loanrenew is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_date, parameter[name[self].loanrenew]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].locfield1 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].locfield1]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].locfield2 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].locfield2]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].locfield3 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].locfield3]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].locfield4 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].locfield4]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].locfield5 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].locfield5]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].locfield6 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].locfield6]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].longdeg is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].longdeg]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].luster is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].luster]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].made is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].made]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].maintcycle is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].maintcycle]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].maintdate is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_date, parameter[name[self].maintdate]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].maintnote is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].maintnote]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].material is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].material]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].medium is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].medium]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].member is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].member]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].mmark is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].mmark]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].nhclass is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].nhclass]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].nhorder is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].nhorder]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].notes is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].notes]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].ns is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].ns]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].objectid is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].objectid]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].objname is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].objname]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].objname2 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].objname2]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].objname3 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].objname3]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].objnames is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].objnames]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].occurrence is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].occurrence]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].oldno is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].oldno]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].origin is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].origin]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].othername is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].othername]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].otherno is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].otherno]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].outdate is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_date, parameter[name[self].outdate]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].owned is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].owned]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].parent is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].parent]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].people is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].people]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].period is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].period]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].phylum is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].phylum]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].policyno is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].policyno]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].ppid is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].ppid]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].preparator is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].preparator]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].prepdate is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_date, parameter[name[self].prepdate]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].preserve is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].preserve]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].pressure is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].pressure]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].provenance is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].provenance]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].pubnotes is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].pubnotes]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].qrurl is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].qrurl]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].recas is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].recas]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].recdate is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].recdate]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].recfrom is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].recfrom]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].relation is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].relation]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].relnotes is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].relnotes]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].renewuntil is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_date, parameter[name[self].renewuntil]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].repatby is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].repatby]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].repatclaim is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].repatclaim]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].repatdate is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_date, parameter[name[self].repatdate]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].repatdisp is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].repatdisp]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].repathand is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].repathand]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].repatnotes is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].repatnotes]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].repatnotic is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_date, parameter[name[self].repatnotic]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].repattype is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].repattype]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].rockclass is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].rockclass]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].rockcolor is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].rockcolor]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].rockorigin is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].rockorigin]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].rocktype is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].rocktype]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].role is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].role]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].role2 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].role2]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].role3 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].role3]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].school is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].school]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].sex is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].sex]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].sgflag is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].sgflag]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].signedname is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].signedname]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].signloc is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].signloc]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].site is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].site]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].siteno is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].siteno]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].specgrav is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].specgrav]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].species is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].species]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].sprocess is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].sprocess]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].stage is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].stage]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].status is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].status]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].statusby is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].statusby]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].statusdate is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_date, parameter[name[self].statusdate]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].sterms is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].sterms]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].stratum is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].stratum]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].streak is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].streak]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].subfamily is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].subfamily]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].subjects is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].subjects]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].subspecies is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].subspecies]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].technique is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].technique]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].tempauthor is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].tempauthor]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].tempby is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].tempby]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].tempdate is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_date, parameter[name[self].tempdate]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].temperatur is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].temperatur]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].temploc is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].temploc]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].tempnotes is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].tempnotes]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].tempreason is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].tempreason]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].tempuntil is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].tempuntil]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].texture is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].texture]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].title is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].title]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].tlocfield1 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].tlocfield1]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].tlocfield2 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].tlocfield2]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].tlocfield3 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].tlocfield3]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].tlocfield4 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].tlocfield4]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].tlocfield5 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].tlocfield5]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].tlocfield6 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].tlocfield6]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].udf1 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].udf1]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].udf10 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].udf10]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].udf11 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].udf11]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].udf12 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].udf12]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].udf13 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_i32, parameter[name[self].udf13]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].udf14 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].udf14]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].udf15 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].udf15]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].udf16 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].udf16]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].udf17 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].udf17]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].udf18 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_date, parameter[name[self].udf18]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].udf19 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_date, parameter[name[self].udf19]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].udf2 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].udf2]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].udf20 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_date, parameter[name[self].udf20]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].udf21 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].udf21]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].udf22 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].udf22]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].udf3 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].udf3]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].udf4 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].udf4]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].udf5 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].udf5]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].udf6 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].udf6]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].udf7 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].udf7]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].udf8 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].udf8]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].udf9 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].udf9]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].unit is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].unit]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].updated is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_date_time, parameter[name[self].updated]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].updatedby is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].updatedby]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].used is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].used]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].valuedate is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_date, parameter[name[self].valuedate]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].varieties is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].varieties]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].vexhtml is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].vexhtml]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].vexlabel1 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].vexlabel1]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].vexlabel2 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].vexlabel2]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].vexlabel3 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].vexlabel3]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].vexlabel4 is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].vexlabel4]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].webinclude is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_bool, parameter[name[self].webinclude]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].weight is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].weight]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].weightin is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].weightin]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].weightlb is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].weightlb]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].width is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].width]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].widthft is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].widthft]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].widthin is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].widthin]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].xcord is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].xcord]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].ycord is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].ycord]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].zcord is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_decimal, parameter[name[self].zcord]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].zsorter is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].zsorter]] call[name[oprot].write_field_end, parameter[]] if compare[name[self].zsorterx is_not constant[None]] begin[:] call[name[oprot].write_field_begin, parameter[]] call[name[oprot].write_string, parameter[name[self].zsorterx]] call[name[oprot].write_field_end, parameter[]] call[name[oprot].write_field_stop, parameter[]] call[name[oprot].write_struct_end, parameter[]] return[name[self]]
keyword[def] identifier[write] ( identifier[self] , identifier[oprot] ): literal[string] identifier[oprot] . identifier[write_struct_begin] ( literal[string] ) keyword[if] identifier[self] . identifier[accessno] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[accessno] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[accessory] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[accessory] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[acqvalue] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[acqvalue] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[age] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[age] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[appnotes] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[appnotes] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[appraisor] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[appraisor] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[assemzone] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[assemzone] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[bagno] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[bagno] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[boxno] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[boxno] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[caption] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[caption] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[cat] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[cat] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[catby] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[catby] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[catdate] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_date] ( identifier[self] . identifier[catdate] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[cattype] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[cattype] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[chemcomp] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[chemcomp] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[circum] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[circum] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[circumft] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[circumft] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[circumin] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[circumin] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[classes] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[classes] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[colldate] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_date] ( identifier[self] . identifier[colldate] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[collection] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[collection] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[collector] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[collector] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[conddate] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_date] ( identifier[self] . identifier[conddate] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[condexam] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[condexam] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[condition] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[condition] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[condnotes] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[condnotes] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[count] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[count] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[creator] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[creator] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[creator2] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[creator2] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[creator3] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[creator3] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[credit] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[credit] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[crystal] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[crystal] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[culture] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[culture] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[curvalmax] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[curvalmax] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[curvalue] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[curvalue] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[dataset] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[dataset] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[date] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[date] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[datingmeth] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[datingmeth] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[datum] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[datum] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[depth] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[depth] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[depthft] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[depthft] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[depthin] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[depthin] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[descrip] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[descrip] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[diameter] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[diameter] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[diameterft] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[diameterft] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[diameterin] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[diameterin] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[dimnotes] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[dimnotes] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[dimtype] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_i32] ( identifier[self] . identifier[dimtype] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[dispvalue] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[dispvalue] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[earlydate] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_i32] ( identifier[self] . identifier[earlydate] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[elements] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[elements] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[epoch] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[epoch] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[era] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[era] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[event] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[event] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[ew] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[ew] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[excavadate] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_date] ( identifier[self] . identifier[excavadate] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[excavateby] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[excavateby] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[exhibitid] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[exhibitid] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[exhibitno] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_i32] ( identifier[self] . identifier[exhibitno] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[exhlabel1] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[exhlabel1] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[exhlabel2] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[exhlabel2] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[exhlabel3] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[exhlabel3] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[exhlabel4] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[exhlabel4] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[exhstart] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_date] ( identifier[self] . identifier[exhstart] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[family] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[family] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[feature] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[feature] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[flagdate] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_date_time] ( identifier[self] . identifier[flagdate] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[flagnotes] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[flagnotes] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[flagreason] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[flagreason] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[formation] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[formation] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[fossils] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[fossils] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[found] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[found] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[fracture] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[fracture] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[frame] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[frame] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[framesize] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[framesize] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[genus] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[genus] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[gparent] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[gparent] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[grainsize] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[grainsize] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[habitat] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[habitat] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[hardness] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[hardness] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[height] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[height] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[heightft] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[heightft] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[heightin] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[heightin] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[homeloc] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[homeloc] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[idby] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[idby] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[iddate] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_date] ( identifier[self] . identifier[iddate] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[imagefile] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[imagefile] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[imageno] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_i32] ( identifier[self] . identifier[imageno] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[imagesize] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[imagesize] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[inscomp] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[inscomp] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[inscrlang] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[inscrlang] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[inscrpos] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[inscrpos] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[inscrtech] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[inscrtech] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[inscrtext] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[inscrtext] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[inscrtrans] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[inscrtrans] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[inscrtype] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[inscrtype] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[insdate] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_date] ( identifier[self] . identifier[insdate] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[insphone] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[insphone] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[inspremium] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[inspremium] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[insrep] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[insrep] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[insvalue] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[insvalue] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[invnby] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[invnby] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[invndate] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_date] ( identifier[self] . identifier[invndate] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[kingdom] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[kingdom] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[latdeg] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[latdeg] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[latedate] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_i32] ( identifier[self] . identifier[latedate] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[legal] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[legal] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[length] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[length] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[lengthft] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[lengthft] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[lengthin] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[lengthin] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[level] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[level] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[lithofacie] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[lithofacie] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[loancond] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[loancond] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[loandue] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_date] ( identifier[self] . identifier[loandue] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[loanid] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[loanid] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[loaninno] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[loaninno] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[loanno] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_i32] ( identifier[self] . identifier[loanno] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[loanrenew] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_date] ( identifier[self] . identifier[loanrenew] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[locfield1] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[locfield1] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[locfield2] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[locfield2] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[locfield3] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[locfield3] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[locfield4] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[locfield4] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[locfield5] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[locfield5] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[locfield6] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[locfield6] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[longdeg] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[longdeg] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[luster] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[luster] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[made] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[made] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[maintcycle] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[maintcycle] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[maintdate] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_date] ( identifier[self] . identifier[maintdate] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[maintnote] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[maintnote] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[material] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[material] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[medium] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[medium] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[member] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[member] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[mmark] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[mmark] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[nhclass] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[nhclass] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[nhorder] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[nhorder] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[notes] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[notes] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[ns] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[ns] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[objectid] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[objectid] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[objname] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[objname] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[objname2] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[objname2] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[objname3] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[objname3] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[objnames] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[objnames] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[occurrence] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[occurrence] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[oldno] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[oldno] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[origin] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[origin] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[othername] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[othername] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[otherno] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[otherno] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[outdate] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_date] ( identifier[self] . identifier[outdate] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[owned] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[owned] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[parent] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[parent] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[people] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[people] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[period] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[period] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[phylum] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[phylum] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[policyno] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[policyno] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[ppid] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[ppid] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[preparator] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[preparator] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[prepdate] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_date] ( identifier[self] . identifier[prepdate] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[preserve] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[preserve] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[pressure] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[pressure] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[provenance] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[provenance] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[pubnotes] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[pubnotes] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[qrurl] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[qrurl] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[recas] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[recas] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[recdate] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[recdate] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[recfrom] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[recfrom] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[relation] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[relation] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[relnotes] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[relnotes] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[renewuntil] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_date] ( identifier[self] . identifier[renewuntil] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[repatby] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[repatby] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[repatclaim] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[repatclaim] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[repatdate] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_date] ( identifier[self] . identifier[repatdate] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[repatdisp] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[repatdisp] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[repathand] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[repathand] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[repatnotes] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[repatnotes] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[repatnotic] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_date] ( identifier[self] . identifier[repatnotic] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[repattype] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[repattype] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[rockclass] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[rockclass] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[rockcolor] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[rockcolor] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[rockorigin] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[rockorigin] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[rocktype] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[rocktype] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[role] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[role] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[role2] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[role2] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[role3] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[role3] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[school] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[school] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[sex] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[sex] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[sgflag] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[sgflag] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[signedname] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[signedname] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[signloc] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[signloc] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[site] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[site] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[siteno] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[siteno] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[specgrav] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[specgrav] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[species] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[species] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[sprocess] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[sprocess] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[stage] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[stage] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[status] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[status] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[statusby] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[statusby] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[statusdate] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_date] ( identifier[self] . identifier[statusdate] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[sterms] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[sterms] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[stratum] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[stratum] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[streak] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[streak] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[subfamily] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[subfamily] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[subjects] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[subjects] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[subspecies] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[subspecies] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[technique] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[technique] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[tempauthor] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[tempauthor] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[tempby] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[tempby] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[tempdate] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_date] ( identifier[self] . identifier[tempdate] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[temperatur] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[temperatur] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[temploc] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[temploc] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[tempnotes] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[tempnotes] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[tempreason] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[tempreason] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[tempuntil] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[tempuntil] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[texture] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[texture] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[title] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[title] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[tlocfield1] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[tlocfield1] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[tlocfield2] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[tlocfield2] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[tlocfield3] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[tlocfield3] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[tlocfield4] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[tlocfield4] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[tlocfield5] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[tlocfield5] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[tlocfield6] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[tlocfield6] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[udf1] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[udf1] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[udf10] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[udf10] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[udf11] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[udf11] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[udf12] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[udf12] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[udf13] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_i32] ( identifier[self] . identifier[udf13] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[udf14] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[udf14] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[udf15] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[udf15] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[udf16] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[udf16] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[udf17] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[udf17] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[udf18] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_date] ( identifier[self] . identifier[udf18] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[udf19] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_date] ( identifier[self] . identifier[udf19] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[udf2] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[udf2] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[udf20] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_date] ( identifier[self] . identifier[udf20] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[udf21] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[udf21] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[udf22] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[udf22] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[udf3] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[udf3] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[udf4] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[udf4] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[udf5] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[udf5] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[udf6] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[udf6] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[udf7] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[udf7] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[udf8] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[udf8] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[udf9] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[udf9] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[unit] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[unit] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[updated] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_date_time] ( identifier[self] . identifier[updated] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[updatedby] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[updatedby] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[used] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[used] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[valuedate] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_date] ( identifier[self] . identifier[valuedate] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[varieties] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[varieties] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[vexhtml] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[vexhtml] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[vexlabel1] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[vexlabel1] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[vexlabel2] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[vexlabel2] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[vexlabel3] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[vexlabel3] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[vexlabel4] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[vexlabel4] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[webinclude] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_bool] ( identifier[self] . identifier[webinclude] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[weight] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[weight] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[weightin] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[weightin] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[weightlb] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[weightlb] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[width] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[width] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[widthft] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[widthft] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[widthin] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[widthin] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[xcord] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[xcord] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[ycord] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[ycord] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[zcord] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_decimal] ( identifier[self] . identifier[zcord] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[zsorter] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[zsorter] ) identifier[oprot] . identifier[write_field_end] () keyword[if] identifier[self] . identifier[zsorterx] keyword[is] keyword[not] keyword[None] : identifier[oprot] . identifier[write_field_begin] ( identifier[name] = literal[string] , identifier[type] = literal[int] , identifier[id] = keyword[None] ) identifier[oprot] . identifier[write_string] ( identifier[self] . identifier[zsorterx] ) identifier[oprot] . identifier[write_field_end] () identifier[oprot] . identifier[write_field_stop] () identifier[oprot] . identifier[write_struct_end] () keyword[return] identifier[self]
def write(self, oprot): """ Write this object to the given output protocol and return self. :type oprot: thryft.protocol._output_protocol._OutputProtocol :rtype: pastpy.gen.database.impl.dbf.objects_dbf_record.ObjectsDbfRecord """ oprot.write_struct_begin('ObjectsDbfRecord') if self.accessno is not None: oprot.write_field_begin(name='accessno', type=11, id=None) oprot.write_string(self.accessno) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.accessory is not None: oprot.write_field_begin(name='accessory', type=11, id=None) oprot.write_string(self.accessory) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.acqvalue is not None: oprot.write_field_begin(name='acqvalue', type=11, id=None) oprot.write_decimal(self.acqvalue) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.age is not None: oprot.write_field_begin(name='age', type=11, id=None) oprot.write_string(self.age) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.appnotes is not None: oprot.write_field_begin(name='appnotes', type=11, id=None) oprot.write_string(self.appnotes) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.appraisor is not None: oprot.write_field_begin(name='appraisor', type=11, id=None) oprot.write_string(self.appraisor) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.assemzone is not None: oprot.write_field_begin(name='assemzone', type=11, id=None) oprot.write_string(self.assemzone) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.bagno is not None: oprot.write_field_begin(name='bagno', type=11, id=None) oprot.write_string(self.bagno) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.boxno is not None: oprot.write_field_begin(name='boxno', type=11, id=None) oprot.write_string(self.boxno) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.caption is not None: oprot.write_field_begin(name='caption', type=11, id=None) oprot.write_string(self.caption) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.cat is not None: oprot.write_field_begin(name='cat', type=11, id=None) oprot.write_string(self.cat) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.catby is not None: oprot.write_field_begin(name='catby', type=11, id=None) oprot.write_string(self.catby) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.catdate is not None: oprot.write_field_begin(name='catdate', type=10, id=None) oprot.write_date(self.catdate) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.cattype is not None: oprot.write_field_begin(name='cattype', type=11, id=None) oprot.write_string(self.cattype) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.chemcomp is not None: oprot.write_field_begin(name='chemcomp', type=11, id=None) oprot.write_string(self.chemcomp) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.circum is not None: oprot.write_field_begin(name='circum', type=11, id=None) oprot.write_decimal(self.circum) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.circumft is not None: oprot.write_field_begin(name='circumft', type=11, id=None) oprot.write_decimal(self.circumft) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.circumin is not None: oprot.write_field_begin(name='circumin', type=11, id=None) oprot.write_decimal(self.circumin) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.classes is not None: oprot.write_field_begin(name='classes', type=11, id=None) oprot.write_string(self.classes) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.colldate is not None: oprot.write_field_begin(name='colldate', type=10, id=None) oprot.write_date(self.colldate) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.collection is not None: oprot.write_field_begin(name='collection', type=11, id=None) oprot.write_string(self.collection) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.collector is not None: oprot.write_field_begin(name='collector', type=11, id=None) oprot.write_string(self.collector) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.conddate is not None: oprot.write_field_begin(name='conddate', type=10, id=None) oprot.write_date(self.conddate) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.condexam is not None: oprot.write_field_begin(name='condexam', type=11, id=None) oprot.write_string(self.condexam) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.condition is not None: oprot.write_field_begin(name='condition', type=11, id=None) oprot.write_string(self.condition) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.condnotes is not None: oprot.write_field_begin(name='condnotes', type=11, id=None) oprot.write_string(self.condnotes) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.count is not None: oprot.write_field_begin(name='count', type=11, id=None) oprot.write_string(self.count) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.creator is not None: oprot.write_field_begin(name='creator', type=11, id=None) oprot.write_string(self.creator) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.creator2 is not None: oprot.write_field_begin(name='creator2', type=11, id=None) oprot.write_string(self.creator2) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.creator3 is not None: oprot.write_field_begin(name='creator3', type=11, id=None) oprot.write_string(self.creator3) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.credit is not None: oprot.write_field_begin(name='credit', type=11, id=None) oprot.write_string(self.credit) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.crystal is not None: oprot.write_field_begin(name='crystal', type=11, id=None) oprot.write_string(self.crystal) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.culture is not None: oprot.write_field_begin(name='culture', type=11, id=None) oprot.write_string(self.culture) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.curvalmax is not None: oprot.write_field_begin(name='curvalmax', type=11, id=None) oprot.write_decimal(self.curvalmax) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.curvalue is not None: oprot.write_field_begin(name='curvalue', type=11, id=None) oprot.write_decimal(self.curvalue) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.dataset is not None: oprot.write_field_begin(name='dataset', type=11, id=None) oprot.write_string(self.dataset) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.date is not None: oprot.write_field_begin(name='date', type=11, id=None) oprot.write_string(self.date) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.datingmeth is not None: oprot.write_field_begin(name='datingmeth', type=11, id=None) oprot.write_string(self.datingmeth) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.datum is not None: oprot.write_field_begin(name='datum', type=11, id=None) oprot.write_string(self.datum) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.depth is not None: oprot.write_field_begin(name='depth', type=11, id=None) oprot.write_decimal(self.depth) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.depthft is not None: oprot.write_field_begin(name='depthft', type=11, id=None) oprot.write_decimal(self.depthft) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.depthin is not None: oprot.write_field_begin(name='depthin', type=11, id=None) oprot.write_decimal(self.depthin) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.descrip is not None: oprot.write_field_begin(name='descrip', type=11, id=None) oprot.write_string(self.descrip) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.diameter is not None: oprot.write_field_begin(name='diameter', type=11, id=None) oprot.write_decimal(self.diameter) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.diameterft is not None: oprot.write_field_begin(name='diameterft', type=11, id=None) oprot.write_decimal(self.diameterft) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.diameterin is not None: oprot.write_field_begin(name='diameterin', type=11, id=None) oprot.write_decimal(self.diameterin) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.dimnotes is not None: oprot.write_field_begin(name='dimnotes', type=11, id=None) oprot.write_string(self.dimnotes) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.dimtype is not None: oprot.write_field_begin(name='dimtype', type=8, id=None) oprot.write_i32(self.dimtype) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.dispvalue is not None: oprot.write_field_begin(name='dispvalue', type=11, id=None) oprot.write_string(self.dispvalue) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.earlydate is not None: oprot.write_field_begin(name='earlydate', type=8, id=None) oprot.write_i32(self.earlydate) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.elements is not None: oprot.write_field_begin(name='elements', type=11, id=None) oprot.write_string(self.elements) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.epoch is not None: oprot.write_field_begin(name='epoch', type=11, id=None) oprot.write_string(self.epoch) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.era is not None: oprot.write_field_begin(name='era', type=11, id=None) oprot.write_string(self.era) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.event is not None: oprot.write_field_begin(name='event', type=11, id=None) oprot.write_string(self.event) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.ew is not None: oprot.write_field_begin(name='ew', type=11, id=None) oprot.write_string(self.ew) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.excavadate is not None: oprot.write_field_begin(name='excavadate', type=10, id=None) oprot.write_date(self.excavadate) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.excavateby is not None: oprot.write_field_begin(name='excavateby', type=11, id=None) oprot.write_string(self.excavateby) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.exhibitid is not None: oprot.write_field_begin(name='exhibitid', type=11, id=None) oprot.write_string(self.exhibitid) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.exhibitno is not None: oprot.write_field_begin(name='exhibitno', type=8, id=None) oprot.write_i32(self.exhibitno) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.exhlabel1 is not None: oprot.write_field_begin(name='exhlabel1', type=11, id=None) oprot.write_string(self.exhlabel1) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.exhlabel2 is not None: oprot.write_field_begin(name='exhlabel2', type=11, id=None) oprot.write_string(self.exhlabel2) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.exhlabel3 is not None: oprot.write_field_begin(name='exhlabel3', type=11, id=None) oprot.write_string(self.exhlabel3) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.exhlabel4 is not None: oprot.write_field_begin(name='exhlabel4', type=11, id=None) oprot.write_string(self.exhlabel4) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.exhstart is not None: oprot.write_field_begin(name='exhstart', type=10, id=None) oprot.write_date(self.exhstart) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.family is not None: oprot.write_field_begin(name='family', type=11, id=None) oprot.write_string(self.family) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.feature is not None: oprot.write_field_begin(name='feature', type=11, id=None) oprot.write_string(self.feature) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.flagdate is not None: oprot.write_field_begin(name='flagdate', type=10, id=None) oprot.write_date_time(self.flagdate) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.flagnotes is not None: oprot.write_field_begin(name='flagnotes', type=11, id=None) oprot.write_string(self.flagnotes) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.flagreason is not None: oprot.write_field_begin(name='flagreason', type=11, id=None) oprot.write_string(self.flagreason) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.formation is not None: oprot.write_field_begin(name='formation', type=11, id=None) oprot.write_string(self.formation) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.fossils is not None: oprot.write_field_begin(name='fossils', type=11, id=None) oprot.write_string(self.fossils) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.found is not None: oprot.write_field_begin(name='found', type=11, id=None) oprot.write_string(self.found) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.fracture is not None: oprot.write_field_begin(name='fracture', type=11, id=None) oprot.write_string(self.fracture) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.frame is not None: oprot.write_field_begin(name='frame', type=11, id=None) oprot.write_string(self.frame) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.framesize is not None: oprot.write_field_begin(name='framesize', type=11, id=None) oprot.write_string(self.framesize) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.genus is not None: oprot.write_field_begin(name='genus', type=11, id=None) oprot.write_string(self.genus) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.gparent is not None: oprot.write_field_begin(name='gparent', type=11, id=None) oprot.write_string(self.gparent) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.grainsize is not None: oprot.write_field_begin(name='grainsize', type=11, id=None) oprot.write_string(self.grainsize) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.habitat is not None: oprot.write_field_begin(name='habitat', type=11, id=None) oprot.write_string(self.habitat) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.hardness is not None: oprot.write_field_begin(name='hardness', type=11, id=None) oprot.write_string(self.hardness) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.height is not None: oprot.write_field_begin(name='height', type=11, id=None) oprot.write_decimal(self.height) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.heightft is not None: oprot.write_field_begin(name='heightft', type=11, id=None) oprot.write_decimal(self.heightft) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.heightin is not None: oprot.write_field_begin(name='heightin', type=11, id=None) oprot.write_decimal(self.heightin) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.homeloc is not None: oprot.write_field_begin(name='homeloc', type=11, id=None) oprot.write_string(self.homeloc) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.idby is not None: oprot.write_field_begin(name='idby', type=11, id=None) oprot.write_string(self.idby) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.iddate is not None: oprot.write_field_begin(name='iddate', type=10, id=None) oprot.write_date(self.iddate) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.imagefile is not None: oprot.write_field_begin(name='imagefile', type=11, id=None) oprot.write_string(self.imagefile) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.imageno is not None: oprot.write_field_begin(name='imageno', type=8, id=None) oprot.write_i32(self.imageno) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.imagesize is not None: oprot.write_field_begin(name='imagesize', type=11, id=None) oprot.write_string(self.imagesize) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.inscomp is not None: oprot.write_field_begin(name='inscomp', type=11, id=None) oprot.write_string(self.inscomp) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.inscrlang is not None: oprot.write_field_begin(name='inscrlang', type=11, id=None) oprot.write_string(self.inscrlang) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.inscrpos is not None: oprot.write_field_begin(name='inscrpos', type=11, id=None) oprot.write_string(self.inscrpos) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.inscrtech is not None: oprot.write_field_begin(name='inscrtech', type=11, id=None) oprot.write_string(self.inscrtech) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.inscrtext is not None: oprot.write_field_begin(name='inscrtext', type=11, id=None) oprot.write_string(self.inscrtext) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.inscrtrans is not None: oprot.write_field_begin(name='inscrtrans', type=11, id=None) oprot.write_string(self.inscrtrans) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.inscrtype is not None: oprot.write_field_begin(name='inscrtype', type=11, id=None) oprot.write_string(self.inscrtype) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.insdate is not None: oprot.write_field_begin(name='insdate', type=10, id=None) oprot.write_date(self.insdate) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.insphone is not None: oprot.write_field_begin(name='insphone', type=11, id=None) oprot.write_string(self.insphone) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.inspremium is not None: oprot.write_field_begin(name='inspremium', type=11, id=None) oprot.write_string(self.inspremium) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.insrep is not None: oprot.write_field_begin(name='insrep', type=11, id=None) oprot.write_string(self.insrep) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.insvalue is not None: oprot.write_field_begin(name='insvalue', type=11, id=None) oprot.write_decimal(self.insvalue) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.invnby is not None: oprot.write_field_begin(name='invnby', type=11, id=None) oprot.write_string(self.invnby) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.invndate is not None: oprot.write_field_begin(name='invndate', type=10, id=None) oprot.write_date(self.invndate) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.kingdom is not None: oprot.write_field_begin(name='kingdom', type=11, id=None) oprot.write_string(self.kingdom) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.latdeg is not None: oprot.write_field_begin(name='latdeg', type=11, id=None) oprot.write_decimal(self.latdeg) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.latedate is not None: oprot.write_field_begin(name='latedate', type=8, id=None) oprot.write_i32(self.latedate) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.legal is not None: oprot.write_field_begin(name='legal', type=11, id=None) oprot.write_string(self.legal) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.length is not None: oprot.write_field_begin(name='length', type=11, id=None) oprot.write_decimal(self.length) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.lengthft is not None: oprot.write_field_begin(name='lengthft', type=11, id=None) oprot.write_decimal(self.lengthft) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.lengthin is not None: oprot.write_field_begin(name='lengthin', type=11, id=None) oprot.write_decimal(self.lengthin) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.level is not None: oprot.write_field_begin(name='level', type=11, id=None) oprot.write_string(self.level) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.lithofacie is not None: oprot.write_field_begin(name='lithofacie', type=11, id=None) oprot.write_string(self.lithofacie) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.loancond is not None: oprot.write_field_begin(name='loancond', type=11, id=None) oprot.write_string(self.loancond) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.loandue is not None: oprot.write_field_begin(name='loandue', type=10, id=None) oprot.write_date(self.loandue) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.loanid is not None: oprot.write_field_begin(name='loanid', type=11, id=None) oprot.write_string(self.loanid) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.loaninno is not None: oprot.write_field_begin(name='loaninno', type=11, id=None) oprot.write_string(self.loaninno) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.loanno is not None: oprot.write_field_begin(name='loanno', type=8, id=None) oprot.write_i32(self.loanno) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.loanrenew is not None: oprot.write_field_begin(name='loanrenew', type=10, id=None) oprot.write_date(self.loanrenew) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.locfield1 is not None: oprot.write_field_begin(name='locfield1', type=11, id=None) oprot.write_string(self.locfield1) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.locfield2 is not None: oprot.write_field_begin(name='locfield2', type=11, id=None) oprot.write_string(self.locfield2) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.locfield3 is not None: oprot.write_field_begin(name='locfield3', type=11, id=None) oprot.write_string(self.locfield3) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.locfield4 is not None: oprot.write_field_begin(name='locfield4', type=11, id=None) oprot.write_string(self.locfield4) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.locfield5 is not None: oprot.write_field_begin(name='locfield5', type=11, id=None) oprot.write_string(self.locfield5) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.locfield6 is not None: oprot.write_field_begin(name='locfield6', type=11, id=None) oprot.write_string(self.locfield6) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.longdeg is not None: oprot.write_field_begin(name='longdeg', type=11, id=None) oprot.write_decimal(self.longdeg) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.luster is not None: oprot.write_field_begin(name='luster', type=11, id=None) oprot.write_string(self.luster) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.made is not None: oprot.write_field_begin(name='made', type=11, id=None) oprot.write_string(self.made) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.maintcycle is not None: oprot.write_field_begin(name='maintcycle', type=11, id=None) oprot.write_string(self.maintcycle) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.maintdate is not None: oprot.write_field_begin(name='maintdate', type=10, id=None) oprot.write_date(self.maintdate) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.maintnote is not None: oprot.write_field_begin(name='maintnote', type=11, id=None) oprot.write_string(self.maintnote) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.material is not None: oprot.write_field_begin(name='material', type=11, id=None) oprot.write_string(self.material) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.medium is not None: oprot.write_field_begin(name='medium', type=11, id=None) oprot.write_string(self.medium) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.member is not None: oprot.write_field_begin(name='member', type=11, id=None) oprot.write_string(self.member) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.mmark is not None: oprot.write_field_begin(name='mmark', type=11, id=None) oprot.write_string(self.mmark) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.nhclass is not None: oprot.write_field_begin(name='nhclass', type=11, id=None) oprot.write_string(self.nhclass) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.nhorder is not None: oprot.write_field_begin(name='nhorder', type=11, id=None) oprot.write_string(self.nhorder) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.notes is not None: oprot.write_field_begin(name='notes', type=11, id=None) oprot.write_string(self.notes) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.ns is not None: oprot.write_field_begin(name='ns', type=11, id=None) oprot.write_string(self.ns) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.objectid is not None: oprot.write_field_begin(name='objectid', type=11, id=None) oprot.write_string(self.objectid) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.objname is not None: oprot.write_field_begin(name='objname', type=11, id=None) oprot.write_string(self.objname) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.objname2 is not None: oprot.write_field_begin(name='objname2', type=11, id=None) oprot.write_string(self.objname2) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.objname3 is not None: oprot.write_field_begin(name='objname3', type=11, id=None) oprot.write_string(self.objname3) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.objnames is not None: oprot.write_field_begin(name='objnames', type=11, id=None) oprot.write_string(self.objnames) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.occurrence is not None: oprot.write_field_begin(name='occurrence', type=11, id=None) oprot.write_string(self.occurrence) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.oldno is not None: oprot.write_field_begin(name='oldno', type=11, id=None) oprot.write_string(self.oldno) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.origin is not None: oprot.write_field_begin(name='origin', type=11, id=None) oprot.write_string(self.origin) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.othername is not None: oprot.write_field_begin(name='othername', type=11, id=None) oprot.write_string(self.othername) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.otherno is not None: oprot.write_field_begin(name='otherno', type=11, id=None) oprot.write_string(self.otherno) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.outdate is not None: oprot.write_field_begin(name='outdate', type=10, id=None) oprot.write_date(self.outdate) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.owned is not None: oprot.write_field_begin(name='owned', type=11, id=None) oprot.write_string(self.owned) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.parent is not None: oprot.write_field_begin(name='parent', type=11, id=None) oprot.write_string(self.parent) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.people is not None: oprot.write_field_begin(name='people', type=11, id=None) oprot.write_string(self.people) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.period is not None: oprot.write_field_begin(name='period', type=11, id=None) oprot.write_string(self.period) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.phylum is not None: oprot.write_field_begin(name='phylum', type=11, id=None) oprot.write_string(self.phylum) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.policyno is not None: oprot.write_field_begin(name='policyno', type=11, id=None) oprot.write_string(self.policyno) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.ppid is not None: oprot.write_field_begin(name='ppid', type=11, id=None) oprot.write_string(self.ppid) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.preparator is not None: oprot.write_field_begin(name='preparator', type=11, id=None) oprot.write_string(self.preparator) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.prepdate is not None: oprot.write_field_begin(name='prepdate', type=10, id=None) oprot.write_date(self.prepdate) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.preserve is not None: oprot.write_field_begin(name='preserve', type=11, id=None) oprot.write_string(self.preserve) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.pressure is not None: oprot.write_field_begin(name='pressure', type=11, id=None) oprot.write_string(self.pressure) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.provenance is not None: oprot.write_field_begin(name='provenance', type=11, id=None) oprot.write_string(self.provenance) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.pubnotes is not None: oprot.write_field_begin(name='pubnotes', type=11, id=None) oprot.write_string(self.pubnotes) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.qrurl is not None: oprot.write_field_begin(name='qrurl', type=11, id=None) oprot.write_string(self.qrurl) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.recas is not None: oprot.write_field_begin(name='recas', type=11, id=None) oprot.write_string(self.recas) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.recdate is not None: oprot.write_field_begin(name='recdate', type=11, id=None) oprot.write_string(self.recdate) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.recfrom is not None: oprot.write_field_begin(name='recfrom', type=11, id=None) oprot.write_string(self.recfrom) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.relation is not None: oprot.write_field_begin(name='relation', type=11, id=None) oprot.write_string(self.relation) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.relnotes is not None: oprot.write_field_begin(name='relnotes', type=11, id=None) oprot.write_string(self.relnotes) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.renewuntil is not None: oprot.write_field_begin(name='renewuntil', type=10, id=None) oprot.write_date(self.renewuntil) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.repatby is not None: oprot.write_field_begin(name='repatby', type=11, id=None) oprot.write_string(self.repatby) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.repatclaim is not None: oprot.write_field_begin(name='repatclaim', type=11, id=None) oprot.write_string(self.repatclaim) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.repatdate is not None: oprot.write_field_begin(name='repatdate', type=10, id=None) oprot.write_date(self.repatdate) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.repatdisp is not None: oprot.write_field_begin(name='repatdisp', type=11, id=None) oprot.write_string(self.repatdisp) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.repathand is not None: oprot.write_field_begin(name='repathand', type=11, id=None) oprot.write_string(self.repathand) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.repatnotes is not None: oprot.write_field_begin(name='repatnotes', type=11, id=None) oprot.write_string(self.repatnotes) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.repatnotic is not None: oprot.write_field_begin(name='repatnotic', type=10, id=None) oprot.write_date(self.repatnotic) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.repattype is not None: oprot.write_field_begin(name='repattype', type=11, id=None) oprot.write_string(self.repattype) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.rockclass is not None: oprot.write_field_begin(name='rockclass', type=11, id=None) oprot.write_string(self.rockclass) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.rockcolor is not None: oprot.write_field_begin(name='rockcolor', type=11, id=None) oprot.write_string(self.rockcolor) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.rockorigin is not None: oprot.write_field_begin(name='rockorigin', type=11, id=None) oprot.write_string(self.rockorigin) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.rocktype is not None: oprot.write_field_begin(name='rocktype', type=11, id=None) oprot.write_string(self.rocktype) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.role is not None: oprot.write_field_begin(name='role', type=11, id=None) oprot.write_string(self.role) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.role2 is not None: oprot.write_field_begin(name='role2', type=11, id=None) oprot.write_string(self.role2) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.role3 is not None: oprot.write_field_begin(name='role3', type=11, id=None) oprot.write_string(self.role3) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.school is not None: oprot.write_field_begin(name='school', type=11, id=None) oprot.write_string(self.school) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.sex is not None: oprot.write_field_begin(name='sex', type=11, id=None) oprot.write_string(self.sex) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.sgflag is not None: oprot.write_field_begin(name='sgflag', type=11, id=None) oprot.write_string(self.sgflag) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.signedname is not None: oprot.write_field_begin(name='signedname', type=11, id=None) oprot.write_string(self.signedname) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.signloc is not None: oprot.write_field_begin(name='signloc', type=11, id=None) oprot.write_string(self.signloc) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.site is not None: oprot.write_field_begin(name='site', type=11, id=None) oprot.write_string(self.site) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.siteno is not None: oprot.write_field_begin(name='siteno', type=11, id=None) oprot.write_string(self.siteno) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.specgrav is not None: oprot.write_field_begin(name='specgrav', type=11, id=None) oprot.write_string(self.specgrav) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.species is not None: oprot.write_field_begin(name='species', type=11, id=None) oprot.write_string(self.species) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.sprocess is not None: oprot.write_field_begin(name='sprocess', type=11, id=None) oprot.write_string(self.sprocess) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.stage is not None: oprot.write_field_begin(name='stage', type=11, id=None) oprot.write_string(self.stage) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.status is not None: oprot.write_field_begin(name='status', type=11, id=None) oprot.write_string(self.status) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.statusby is not None: oprot.write_field_begin(name='statusby', type=11, id=None) oprot.write_string(self.statusby) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.statusdate is not None: oprot.write_field_begin(name='statusdate', type=10, id=None) oprot.write_date(self.statusdate) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.sterms is not None: oprot.write_field_begin(name='sterms', type=11, id=None) oprot.write_string(self.sterms) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.stratum is not None: oprot.write_field_begin(name='stratum', type=11, id=None) oprot.write_string(self.stratum) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.streak is not None: oprot.write_field_begin(name='streak', type=11, id=None) oprot.write_string(self.streak) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.subfamily is not None: oprot.write_field_begin(name='subfamily', type=11, id=None) oprot.write_string(self.subfamily) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.subjects is not None: oprot.write_field_begin(name='subjects', type=11, id=None) oprot.write_string(self.subjects) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.subspecies is not None: oprot.write_field_begin(name='subspecies', type=11, id=None) oprot.write_string(self.subspecies) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.technique is not None: oprot.write_field_begin(name='technique', type=11, id=None) oprot.write_string(self.technique) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.tempauthor is not None: oprot.write_field_begin(name='tempauthor', type=11, id=None) oprot.write_string(self.tempauthor) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.tempby is not None: oprot.write_field_begin(name='tempby', type=11, id=None) oprot.write_string(self.tempby) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.tempdate is not None: oprot.write_field_begin(name='tempdate', type=10, id=None) oprot.write_date(self.tempdate) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.temperatur is not None: oprot.write_field_begin(name='temperatur', type=11, id=None) oprot.write_string(self.temperatur) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.temploc is not None: oprot.write_field_begin(name='temploc', type=11, id=None) oprot.write_string(self.temploc) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.tempnotes is not None: oprot.write_field_begin(name='tempnotes', type=11, id=None) oprot.write_string(self.tempnotes) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.tempreason is not None: oprot.write_field_begin(name='tempreason', type=11, id=None) oprot.write_string(self.tempreason) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.tempuntil is not None: oprot.write_field_begin(name='tempuntil', type=11, id=None) oprot.write_string(self.tempuntil) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.texture is not None: oprot.write_field_begin(name='texture', type=11, id=None) oprot.write_string(self.texture) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.title is not None: oprot.write_field_begin(name='title', type=11, id=None) oprot.write_string(self.title) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.tlocfield1 is not None: oprot.write_field_begin(name='tlocfield1', type=11, id=None) oprot.write_string(self.tlocfield1) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.tlocfield2 is not None: oprot.write_field_begin(name='tlocfield2', type=11, id=None) oprot.write_string(self.tlocfield2) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.tlocfield3 is not None: oprot.write_field_begin(name='tlocfield3', type=11, id=None) oprot.write_string(self.tlocfield3) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.tlocfield4 is not None: oprot.write_field_begin(name='tlocfield4', type=11, id=None) oprot.write_string(self.tlocfield4) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.tlocfield5 is not None: oprot.write_field_begin(name='tlocfield5', type=11, id=None) oprot.write_string(self.tlocfield5) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.tlocfield6 is not None: oprot.write_field_begin(name='tlocfield6', type=11, id=None) oprot.write_string(self.tlocfield6) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.udf1 is not None: oprot.write_field_begin(name='udf1', type=11, id=None) oprot.write_string(self.udf1) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.udf10 is not None: oprot.write_field_begin(name='udf10', type=11, id=None) oprot.write_string(self.udf10) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.udf11 is not None: oprot.write_field_begin(name='udf11', type=11, id=None) oprot.write_string(self.udf11) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.udf12 is not None: oprot.write_field_begin(name='udf12', type=11, id=None) oprot.write_string(self.udf12) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.udf13 is not None: oprot.write_field_begin(name='udf13', type=8, id=None) oprot.write_i32(self.udf13) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.udf14 is not None: oprot.write_field_begin(name='udf14', type=11, id=None) oprot.write_decimal(self.udf14) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.udf15 is not None: oprot.write_field_begin(name='udf15', type=11, id=None) oprot.write_decimal(self.udf15) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.udf16 is not None: oprot.write_field_begin(name='udf16', type=11, id=None) oprot.write_decimal(self.udf16) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.udf17 is not None: oprot.write_field_begin(name='udf17', type=11, id=None) oprot.write_decimal(self.udf17) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.udf18 is not None: oprot.write_field_begin(name='udf18', type=10, id=None) oprot.write_date(self.udf18) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.udf19 is not None: oprot.write_field_begin(name='udf19', type=10, id=None) oprot.write_date(self.udf19) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.udf2 is not None: oprot.write_field_begin(name='udf2', type=11, id=None) oprot.write_string(self.udf2) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.udf20 is not None: oprot.write_field_begin(name='udf20', type=10, id=None) oprot.write_date(self.udf20) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.udf21 is not None: oprot.write_field_begin(name='udf21', type=11, id=None) oprot.write_string(self.udf21) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.udf22 is not None: oprot.write_field_begin(name='udf22', type=11, id=None) oprot.write_string(self.udf22) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.udf3 is not None: oprot.write_field_begin(name='udf3', type=11, id=None) oprot.write_string(self.udf3) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.udf4 is not None: oprot.write_field_begin(name='udf4', type=11, id=None) oprot.write_string(self.udf4) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.udf5 is not None: oprot.write_field_begin(name='udf5', type=11, id=None) oprot.write_string(self.udf5) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.udf6 is not None: oprot.write_field_begin(name='udf6', type=11, id=None) oprot.write_string(self.udf6) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.udf7 is not None: oprot.write_field_begin(name='udf7', type=11, id=None) oprot.write_string(self.udf7) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.udf8 is not None: oprot.write_field_begin(name='udf8', type=11, id=None) oprot.write_string(self.udf8) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.udf9 is not None: oprot.write_field_begin(name='udf9', type=11, id=None) oprot.write_string(self.udf9) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.unit is not None: oprot.write_field_begin(name='unit', type=11, id=None) oprot.write_string(self.unit) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.updated is not None: oprot.write_field_begin(name='updated', type=10, id=None) oprot.write_date_time(self.updated) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.updatedby is not None: oprot.write_field_begin(name='updatedby', type=11, id=None) oprot.write_string(self.updatedby) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.used is not None: oprot.write_field_begin(name='used', type=11, id=None) oprot.write_string(self.used) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.valuedate is not None: oprot.write_field_begin(name='valuedate', type=10, id=None) oprot.write_date(self.valuedate) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.varieties is not None: oprot.write_field_begin(name='varieties', type=11, id=None) oprot.write_string(self.varieties) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.vexhtml is not None: oprot.write_field_begin(name='vexhtml', type=11, id=None) oprot.write_string(self.vexhtml) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.vexlabel1 is not None: oprot.write_field_begin(name='vexlabel1', type=11, id=None) oprot.write_string(self.vexlabel1) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.vexlabel2 is not None: oprot.write_field_begin(name='vexlabel2', type=11, id=None) oprot.write_string(self.vexlabel2) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.vexlabel3 is not None: oprot.write_field_begin(name='vexlabel3', type=11, id=None) oprot.write_string(self.vexlabel3) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.vexlabel4 is not None: oprot.write_field_begin(name='vexlabel4', type=11, id=None) oprot.write_string(self.vexlabel4) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.webinclude is not None: oprot.write_field_begin(name='webinclude', type=2, id=None) oprot.write_bool(self.webinclude) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.weight is not None: oprot.write_field_begin(name='weight', type=11, id=None) oprot.write_decimal(self.weight) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.weightin is not None: oprot.write_field_begin(name='weightin', type=11, id=None) oprot.write_decimal(self.weightin) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.weightlb is not None: oprot.write_field_begin(name='weightlb', type=11, id=None) oprot.write_decimal(self.weightlb) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.width is not None: oprot.write_field_begin(name='width', type=11, id=None) oprot.write_decimal(self.width) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.widthft is not None: oprot.write_field_begin(name='widthft', type=11, id=None) oprot.write_decimal(self.widthft) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.widthin is not None: oprot.write_field_begin(name='widthin', type=11, id=None) oprot.write_decimal(self.widthin) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.xcord is not None: oprot.write_field_begin(name='xcord', type=11, id=None) oprot.write_decimal(self.xcord) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.ycord is not None: oprot.write_field_begin(name='ycord', type=11, id=None) oprot.write_decimal(self.ycord) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.zcord is not None: oprot.write_field_begin(name='zcord', type=11, id=None) oprot.write_decimal(self.zcord) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.zsorter is not None: oprot.write_field_begin(name='zsorter', type=11, id=None) oprot.write_string(self.zsorter) oprot.write_field_end() # depends on [control=['if'], data=[]] if self.zsorterx is not None: oprot.write_field_begin(name='zsorterx', type=11, id=None) oprot.write_string(self.zsorterx) oprot.write_field_end() # depends on [control=['if'], data=[]] oprot.write_field_stop() oprot.write_struct_end() return self
def inspect_service(self, service, insert_defaults=None): """ Return information about a service. Args: service (str): Service name or ID. insert_defaults (boolean): If true, default values will be merged into the service inspect output. Returns: (dict): A dictionary of the server-side representation of the service, including all relevant properties. Raises: :py:class:`docker.errors.APIError` If the server returns an error. """ url = self._url('/services/{0}', service) params = {} if insert_defaults is not None: if utils.version_lt(self._version, '1.29'): raise errors.InvalidVersion( 'insert_defaults is not supported in API version < 1.29' ) params['insertDefaults'] = insert_defaults return self._result(self._get(url, params=params), True)
def function[inspect_service, parameter[self, service, insert_defaults]]: constant[ Return information about a service. Args: service (str): Service name or ID. insert_defaults (boolean): If true, default values will be merged into the service inspect output. Returns: (dict): A dictionary of the server-side representation of the service, including all relevant properties. Raises: :py:class:`docker.errors.APIError` If the server returns an error. ] variable[url] assign[=] call[name[self]._url, parameter[constant[/services/{0}], name[service]]] variable[params] assign[=] dictionary[[], []] if compare[name[insert_defaults] is_not constant[None]] begin[:] if call[name[utils].version_lt, parameter[name[self]._version, constant[1.29]]] begin[:] <ast.Raise object at 0x7da1b1f952a0> call[name[params]][constant[insertDefaults]] assign[=] name[insert_defaults] return[call[name[self]._result, parameter[call[name[self]._get, parameter[name[url]]], constant[True]]]]
keyword[def] identifier[inspect_service] ( identifier[self] , identifier[service] , identifier[insert_defaults] = keyword[None] ): literal[string] identifier[url] = identifier[self] . identifier[_url] ( literal[string] , identifier[service] ) identifier[params] ={} keyword[if] identifier[insert_defaults] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[utils] . identifier[version_lt] ( identifier[self] . identifier[_version] , literal[string] ): keyword[raise] identifier[errors] . identifier[InvalidVersion] ( literal[string] ) identifier[params] [ literal[string] ]= identifier[insert_defaults] keyword[return] identifier[self] . identifier[_result] ( identifier[self] . identifier[_get] ( identifier[url] , identifier[params] = identifier[params] ), keyword[True] )
def inspect_service(self, service, insert_defaults=None): """ Return information about a service. Args: service (str): Service name or ID. insert_defaults (boolean): If true, default values will be merged into the service inspect output. Returns: (dict): A dictionary of the server-side representation of the service, including all relevant properties. Raises: :py:class:`docker.errors.APIError` If the server returns an error. """ url = self._url('/services/{0}', service) params = {} if insert_defaults is not None: if utils.version_lt(self._version, '1.29'): raise errors.InvalidVersion('insert_defaults is not supported in API version < 1.29') # depends on [control=['if'], data=[]] params['insertDefaults'] = insert_defaults # depends on [control=['if'], data=['insert_defaults']] return self._result(self._get(url, params=params), True)
def calc_Z(A, x): """ calculate the Z matrix (flows) from A and x Parameters ---------- A : pandas.DataFrame or numpy.array Symmetric input output table (coefficients) x : pandas.DataFrame or numpy.array Industry output column vector Returns ------- pandas.DataFrame or numpy.array Symmetric input output table (flows) Z The type is determined by the type of A. If DataFrame index/columns as A """ if (type(x) is pd.DataFrame) or (type(x) is pd.Series): x = x.values x = x.reshape((1, -1)) # use numpy broadcasting - much faster # (but has to ensure that x is a row vector) # old mathematical form: # return A.dot(np.diagflat(x)) if type(A) is pd.DataFrame: return pd.DataFrame(A.values * x, index=A.index, columns=A.columns) else: return A*x
def function[calc_Z, parameter[A, x]]: constant[ calculate the Z matrix (flows) from A and x Parameters ---------- A : pandas.DataFrame or numpy.array Symmetric input output table (coefficients) x : pandas.DataFrame or numpy.array Industry output column vector Returns ------- pandas.DataFrame or numpy.array Symmetric input output table (flows) Z The type is determined by the type of A. If DataFrame index/columns as A ] if <ast.BoolOp object at 0x7da1b0492b90> begin[:] variable[x] assign[=] name[x].values variable[x] assign[=] call[name[x].reshape, parameter[tuple[[<ast.Constant object at 0x7da1b04932b0>, <ast.UnaryOp object at 0x7da1b04932e0>]]]] if compare[call[name[type], parameter[name[A]]] is name[pd].DataFrame] begin[:] return[call[name[pd].DataFrame, parameter[binary_operation[name[A].values * name[x]]]]]
keyword[def] identifier[calc_Z] ( identifier[A] , identifier[x] ): literal[string] keyword[if] ( identifier[type] ( identifier[x] ) keyword[is] identifier[pd] . identifier[DataFrame] ) keyword[or] ( identifier[type] ( identifier[x] ) keyword[is] identifier[pd] . identifier[Series] ): identifier[x] = identifier[x] . identifier[values] identifier[x] = identifier[x] . identifier[reshape] (( literal[int] ,- literal[int] )) keyword[if] identifier[type] ( identifier[A] ) keyword[is] identifier[pd] . identifier[DataFrame] : keyword[return] identifier[pd] . identifier[DataFrame] ( identifier[A] . identifier[values] * identifier[x] , identifier[index] = identifier[A] . identifier[index] , identifier[columns] = identifier[A] . identifier[columns] ) keyword[else] : keyword[return] identifier[A] * identifier[x]
def calc_Z(A, x): """ calculate the Z matrix (flows) from A and x Parameters ---------- A : pandas.DataFrame or numpy.array Symmetric input output table (coefficients) x : pandas.DataFrame or numpy.array Industry output column vector Returns ------- pandas.DataFrame or numpy.array Symmetric input output table (flows) Z The type is determined by the type of A. If DataFrame index/columns as A """ if type(x) is pd.DataFrame or type(x) is pd.Series: x = x.values # depends on [control=['if'], data=[]] x = x.reshape((1, -1)) # use numpy broadcasting - much faster # (but has to ensure that x is a row vector) # old mathematical form: # return A.dot(np.diagflat(x)) if type(A) is pd.DataFrame: return pd.DataFrame(A.values * x, index=A.index, columns=A.columns) # depends on [control=['if'], data=[]] else: return A * x
def update(self, friendly_name=values.unset, status=values.unset): """ Update the AccountInstance :param unicode friendly_name: FriendlyName to update :param AccountInstance.Status status: Status to update the Account with :returns: Updated AccountInstance :rtype: twilio.rest.api.v2010.account.AccountInstance """ return self._proxy.update(friendly_name=friendly_name, status=status, )
def function[update, parameter[self, friendly_name, status]]: constant[ Update the AccountInstance :param unicode friendly_name: FriendlyName to update :param AccountInstance.Status status: Status to update the Account with :returns: Updated AccountInstance :rtype: twilio.rest.api.v2010.account.AccountInstance ] return[call[name[self]._proxy.update, parameter[]]]
keyword[def] identifier[update] ( identifier[self] , identifier[friendly_name] = identifier[values] . identifier[unset] , identifier[status] = identifier[values] . identifier[unset] ): literal[string] keyword[return] identifier[self] . identifier[_proxy] . identifier[update] ( identifier[friendly_name] = identifier[friendly_name] , identifier[status] = identifier[status] ,)
def update(self, friendly_name=values.unset, status=values.unset): """ Update the AccountInstance :param unicode friendly_name: FriendlyName to update :param AccountInstance.Status status: Status to update the Account with :returns: Updated AccountInstance :rtype: twilio.rest.api.v2010.account.AccountInstance """ return self._proxy.update(friendly_name=friendly_name, status=status)
def get_or_create_organization(self, id=None, name=None): """ Gets existing or creates new organization :rtype: Organization """ if id: return self.get_organization(id) else: assert name try: return self.get_organization(name=name) except exceptions.NotFoundError: return self.create_organization(name)
def function[get_or_create_organization, parameter[self, id, name]]: constant[ Gets existing or creates new organization :rtype: Organization ] if name[id] begin[:] return[call[name[self].get_organization, parameter[name[id]]]]
keyword[def] identifier[get_or_create_organization] ( identifier[self] , identifier[id] = keyword[None] , identifier[name] = keyword[None] ): literal[string] keyword[if] identifier[id] : keyword[return] identifier[self] . identifier[get_organization] ( identifier[id] ) keyword[else] : keyword[assert] identifier[name] keyword[try] : keyword[return] identifier[self] . identifier[get_organization] ( identifier[name] = identifier[name] ) keyword[except] identifier[exceptions] . identifier[NotFoundError] : keyword[return] identifier[self] . identifier[create_organization] ( identifier[name] )
def get_or_create_organization(self, id=None, name=None): """ Gets existing or creates new organization :rtype: Organization """ if id: return self.get_organization(id) # depends on [control=['if'], data=[]] else: assert name try: return self.get_organization(name=name) # depends on [control=['try'], data=[]] except exceptions.NotFoundError: return self.create_organization(name) # depends on [control=['except'], data=[]]
def color_map_data(self, data: numpy.ndarray) -> None: """Set the data and mark the canvas item for updating. Data should be an ndarray of shape (256, 3) with type uint8 """ self.__color_map_data = data self.update()
def function[color_map_data, parameter[self, data]]: constant[Set the data and mark the canvas item for updating. Data should be an ndarray of shape (256, 3) with type uint8 ] name[self].__color_map_data assign[=] name[data] call[name[self].update, parameter[]]
keyword[def] identifier[color_map_data] ( identifier[self] , identifier[data] : identifier[numpy] . identifier[ndarray] )-> keyword[None] : literal[string] identifier[self] . identifier[__color_map_data] = identifier[data] identifier[self] . identifier[update] ()
def color_map_data(self, data: numpy.ndarray) -> None: """Set the data and mark the canvas item for updating. Data should be an ndarray of shape (256, 3) with type uint8 """ self.__color_map_data = data self.update()
def get_transcript_ids_for_ensembl_gene_ids(self, gene_ids, hgnc_symbols): """ fetch the ensembl transcript IDs for a given ensembl gene ID. Args: gene_ids: list of Ensembl gene IDs for the gene hgnc_symbols: list of possible HGNC symbols for gene """ chroms = {"1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", \ "12", "13", "14", "15", "16", "17", "18", "19", "20", "21", "22", \ "X", "Y"} headers = {"content-type": "application/json"} transcript_ids = [] for gene_id in gene_ids: self.attempt = 0 ext = "/overlap/id/{}?feature=transcript".format(gene_id) r = self.ensembl_request(ext, headers) for item in json.loads(r): # ignore non-coding transcripts if item["biotype"] not in ["protein_coding", "polymorphic_pseudogene"]: continue # ignore transcripts not on the standard chromosomes # (non-default chroms fail to map the known de novo variants # to the gene location if item["Parent"] != gene_id or item["seq_region_name"] not in \ chroms or \ all([symbol not in item["external_name"] for symbol in hgnc_symbols]): continue transcript_ids.append(item["id"]) return transcript_ids
def function[get_transcript_ids_for_ensembl_gene_ids, parameter[self, gene_ids, hgnc_symbols]]: constant[ fetch the ensembl transcript IDs for a given ensembl gene ID. Args: gene_ids: list of Ensembl gene IDs for the gene hgnc_symbols: list of possible HGNC symbols for gene ] variable[chroms] assign[=] <ast.Set object at 0x7da18ede53f0> variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da18ede6110>], [<ast.Constant object at 0x7da18ede4b80>]] variable[transcript_ids] assign[=] list[[]] for taget[name[gene_id]] in starred[name[gene_ids]] begin[:] name[self].attempt assign[=] constant[0] variable[ext] assign[=] call[constant[/overlap/id/{}?feature=transcript].format, parameter[name[gene_id]]] variable[r] assign[=] call[name[self].ensembl_request, parameter[name[ext], name[headers]]] for taget[name[item]] in starred[call[name[json].loads, parameter[name[r]]]] begin[:] if compare[call[name[item]][constant[biotype]] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da18ede5e40>, <ast.Constant object at 0x7da18ede6f50>]]] begin[:] continue if <ast.BoolOp object at 0x7da18ede67a0> begin[:] continue call[name[transcript_ids].append, parameter[call[name[item]][constant[id]]]] return[name[transcript_ids]]
keyword[def] identifier[get_transcript_ids_for_ensembl_gene_ids] ( identifier[self] , identifier[gene_ids] , identifier[hgnc_symbols] ): literal[string] identifier[chroms] ={ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] } identifier[headers] ={ literal[string] : literal[string] } identifier[transcript_ids] =[] keyword[for] identifier[gene_id] keyword[in] identifier[gene_ids] : identifier[self] . identifier[attempt] = literal[int] identifier[ext] = literal[string] . identifier[format] ( identifier[gene_id] ) identifier[r] = identifier[self] . identifier[ensembl_request] ( identifier[ext] , identifier[headers] ) keyword[for] identifier[item] keyword[in] identifier[json] . identifier[loads] ( identifier[r] ): keyword[if] identifier[item] [ literal[string] ] keyword[not] keyword[in] [ literal[string] , literal[string] ]: keyword[continue] keyword[if] identifier[item] [ literal[string] ]!= identifier[gene_id] keyword[or] identifier[item] [ literal[string] ] keyword[not] keyword[in] identifier[chroms] keyword[or] identifier[all] ([ identifier[symbol] keyword[not] keyword[in] identifier[item] [ literal[string] ] keyword[for] identifier[symbol] keyword[in] identifier[hgnc_symbols] ]): keyword[continue] identifier[transcript_ids] . identifier[append] ( identifier[item] [ literal[string] ]) keyword[return] identifier[transcript_ids]
def get_transcript_ids_for_ensembl_gene_ids(self, gene_ids, hgnc_symbols): """ fetch the ensembl transcript IDs for a given ensembl gene ID. Args: gene_ids: list of Ensembl gene IDs for the gene hgnc_symbols: list of possible HGNC symbols for gene """ chroms = {'1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '20', '21', '22', 'X', 'Y'} headers = {'content-type': 'application/json'} transcript_ids = [] for gene_id in gene_ids: self.attempt = 0 ext = '/overlap/id/{}?feature=transcript'.format(gene_id) r = self.ensembl_request(ext, headers) for item in json.loads(r): # ignore non-coding transcripts if item['biotype'] not in ['protein_coding', 'polymorphic_pseudogene']: continue # depends on [control=['if'], data=[]] # ignore transcripts not on the standard chromosomes # (non-default chroms fail to map the known de novo variants # to the gene location if item['Parent'] != gene_id or item['seq_region_name'] not in chroms or all([symbol not in item['external_name'] for symbol in hgnc_symbols]): continue # depends on [control=['if'], data=[]] transcript_ids.append(item['id']) # depends on [control=['for'], data=['item']] # depends on [control=['for'], data=['gene_id']] return transcript_ids
def rna_transcript_expression_dict_from_args(args): """ Returns a dictionary mapping Ensembl transcript IDs to FPKM expression values or None if neither Cufflinks tracking file nor StringTie GTF file were specified. """ if args.rna_transcript_fpkm_tracking_file: return load_cufflinks_fpkm_dict(args.rna_transcript_fpkm_tracking_file) elif args.rna_transcript_fpkm_gtf_file: return load_transcript_fpkm_dict_from_gtf( args.rna_transcript_fpkm_gtf_file) else: return None
def function[rna_transcript_expression_dict_from_args, parameter[args]]: constant[ Returns a dictionary mapping Ensembl transcript IDs to FPKM expression values or None if neither Cufflinks tracking file nor StringTie GTF file were specified. ] if name[args].rna_transcript_fpkm_tracking_file begin[:] return[call[name[load_cufflinks_fpkm_dict], parameter[name[args].rna_transcript_fpkm_tracking_file]]]
keyword[def] identifier[rna_transcript_expression_dict_from_args] ( identifier[args] ): literal[string] keyword[if] identifier[args] . identifier[rna_transcript_fpkm_tracking_file] : keyword[return] identifier[load_cufflinks_fpkm_dict] ( identifier[args] . identifier[rna_transcript_fpkm_tracking_file] ) keyword[elif] identifier[args] . identifier[rna_transcript_fpkm_gtf_file] : keyword[return] identifier[load_transcript_fpkm_dict_from_gtf] ( identifier[args] . identifier[rna_transcript_fpkm_gtf_file] ) keyword[else] : keyword[return] keyword[None]
def rna_transcript_expression_dict_from_args(args): """ Returns a dictionary mapping Ensembl transcript IDs to FPKM expression values or None if neither Cufflinks tracking file nor StringTie GTF file were specified. """ if args.rna_transcript_fpkm_tracking_file: return load_cufflinks_fpkm_dict(args.rna_transcript_fpkm_tracking_file) # depends on [control=['if'], data=[]] elif args.rna_transcript_fpkm_gtf_file: return load_transcript_fpkm_dict_from_gtf(args.rna_transcript_fpkm_gtf_file) # depends on [control=['if'], data=[]] else: return None
def _wrap_value_with_context(self, value: dict or str, field_name: str, start: int=0, end: int=0) -> Extraction: """Wraps the final result""" return Extraction(value, self.name, start_token=start, end_token=end, tag=field_name)
def function[_wrap_value_with_context, parameter[self, value, field_name, start, end]]: constant[Wraps the final result] return[call[name[Extraction], parameter[name[value], name[self].name]]]
keyword[def] identifier[_wrap_value_with_context] ( identifier[self] , identifier[value] : identifier[dict] keyword[or] identifier[str] , identifier[field_name] : identifier[str] , identifier[start] : identifier[int] = literal[int] , identifier[end] : identifier[int] = literal[int] )-> identifier[Extraction] : literal[string] keyword[return] identifier[Extraction] ( identifier[value] , identifier[self] . identifier[name] , identifier[start_token] = identifier[start] , identifier[end_token] = identifier[end] , identifier[tag] = identifier[field_name] )
def _wrap_value_with_context(self, value: dict or str, field_name: str, start: int=0, end: int=0) -> Extraction: """Wraps the final result""" return Extraction(value, self.name, start_token=start, end_token=end, tag=field_name)
def lookup(values, name=None): """ Creates the grammar for a Lookup (L) field, accepting only values from a list. Like in the Alphanumeric field, the result will be stripped of all heading and trailing whitespaces. :param values: values allowed :param name: name for the field :return: grammar for the lookup field """ if name is None: name = 'Lookup Field' if values is None: raise ValueError('The values can no be None') # TODO: This should not be needed, it is just a patch. Fix this. try: v = values.asList() values = v except AttributeError: values = values # Only the specified values are allowed lookup_field = pp.oneOf(values) lookup_field.setName(name) lookup_field.setParseAction(lambda s: s[0].strip()) lookup_field.leaveWhitespace() return lookup_field
def function[lookup, parameter[values, name]]: constant[ Creates the grammar for a Lookup (L) field, accepting only values from a list. Like in the Alphanumeric field, the result will be stripped of all heading and trailing whitespaces. :param values: values allowed :param name: name for the field :return: grammar for the lookup field ] if compare[name[name] is constant[None]] begin[:] variable[name] assign[=] constant[Lookup Field] if compare[name[values] is constant[None]] begin[:] <ast.Raise object at 0x7da1b1990610> <ast.Try object at 0x7da1b1990a00> variable[lookup_field] assign[=] call[name[pp].oneOf, parameter[name[values]]] call[name[lookup_field].setName, parameter[name[name]]] call[name[lookup_field].setParseAction, parameter[<ast.Lambda object at 0x7da1b1993100>]] call[name[lookup_field].leaveWhitespace, parameter[]] return[name[lookup_field]]
keyword[def] identifier[lookup] ( identifier[values] , identifier[name] = keyword[None] ): literal[string] keyword[if] identifier[name] keyword[is] keyword[None] : identifier[name] = literal[string] keyword[if] identifier[values] keyword[is] keyword[None] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[try] : identifier[v] = identifier[values] . identifier[asList] () identifier[values] = identifier[v] keyword[except] identifier[AttributeError] : identifier[values] = identifier[values] identifier[lookup_field] = identifier[pp] . identifier[oneOf] ( identifier[values] ) identifier[lookup_field] . identifier[setName] ( identifier[name] ) identifier[lookup_field] . identifier[setParseAction] ( keyword[lambda] identifier[s] : identifier[s] [ literal[int] ]. identifier[strip] ()) identifier[lookup_field] . identifier[leaveWhitespace] () keyword[return] identifier[lookup_field]
def lookup(values, name=None): """ Creates the grammar for a Lookup (L) field, accepting only values from a list. Like in the Alphanumeric field, the result will be stripped of all heading and trailing whitespaces. :param values: values allowed :param name: name for the field :return: grammar for the lookup field """ if name is None: name = 'Lookup Field' # depends on [control=['if'], data=['name']] if values is None: raise ValueError('The values can no be None') # depends on [control=['if'], data=[]] # TODO: This should not be needed, it is just a patch. Fix this. try: v = values.asList() values = v # depends on [control=['try'], data=[]] except AttributeError: values = values # depends on [control=['except'], data=[]] # Only the specified values are allowed lookup_field = pp.oneOf(values) lookup_field.setName(name) lookup_field.setParseAction(lambda s: s[0].strip()) lookup_field.leaveWhitespace() return lookup_field
def to_dict(self): """Returns parameters to replicate the distribution.""" result = { 'type': get_qualified_name(self), 'fitted': self.fitted, 'constant_value': self.constant_value } if not self.fitted: return result result.update(self._fit_params()) return result
def function[to_dict, parameter[self]]: constant[Returns parameters to replicate the distribution.] variable[result] assign[=] dictionary[[<ast.Constant object at 0x7da20c6a9a80>, <ast.Constant object at 0x7da20c6aae60>, <ast.Constant object at 0x7da20c6abca0>], [<ast.Call object at 0x7da20c6aab90>, <ast.Attribute object at 0x7da1b1eac670>, <ast.Attribute object at 0x7da1b1ead720>]] if <ast.UnaryOp object at 0x7da1b1ead960> begin[:] return[name[result]] call[name[result].update, parameter[call[name[self]._fit_params, parameter[]]]] return[name[result]]
keyword[def] identifier[to_dict] ( identifier[self] ): literal[string] identifier[result] ={ literal[string] : identifier[get_qualified_name] ( identifier[self] ), literal[string] : identifier[self] . identifier[fitted] , literal[string] : identifier[self] . identifier[constant_value] } keyword[if] keyword[not] identifier[self] . identifier[fitted] : keyword[return] identifier[result] identifier[result] . identifier[update] ( identifier[self] . identifier[_fit_params] ()) keyword[return] identifier[result]
def to_dict(self): """Returns parameters to replicate the distribution.""" result = {'type': get_qualified_name(self), 'fitted': self.fitted, 'constant_value': self.constant_value} if not self.fitted: return result # depends on [control=['if'], data=[]] result.update(self._fit_params()) return result
def sysinfo(self): """Return a list of (key, value) pairs showing internal information.""" import coverage as covmod import platform, re try: implementation = platform.python_implementation() except AttributeError: implementation = "unknown" info = [ ('version', covmod.__version__), ('coverage', covmod.__file__), ('cover_dir', self.cover_dir), ('pylib_dirs', self.pylib_dirs), ('tracer', self.collector.tracer_name()), ('config_files', self.config.attempted_config_files), ('configs_read', self.config.config_files), ('data_path', self.data.filename), ('python', sys.version.replace('\n', '')), ('platform', platform.platform()), ('implementation', implementation), ('executable', sys.executable), ('cwd', os.getcwd()), ('path', sys.path), ('environment', sorted([ ("%s = %s" % (k, v)) for k, v in iitems(os.environ) if re.search(r"^COV|^PY", k) ])), ('command_line', " ".join(getattr(sys, 'argv', ['???']))), ] if self.source_match: info.append(('source_match', self.source_match.info())) if self.include_match: info.append(('include_match', self.include_match.info())) if self.omit_match: info.append(('omit_match', self.omit_match.info())) if self.cover_match: info.append(('cover_match', self.cover_match.info())) if self.pylib_match: info.append(('pylib_match', self.pylib_match.info())) return info
def function[sysinfo, parameter[self]]: constant[Return a list of (key, value) pairs showing internal information.] import module[coverage] as alias[covmod] import module[platform], module[re] <ast.Try object at 0x7da207f98910> variable[info] assign[=] list[[<ast.Tuple object at 0x7da207f9a170>, <ast.Tuple object at 0x7da207f987f0>, <ast.Tuple object at 0x7da207f999c0>, <ast.Tuple object at 0x7da207f98220>, <ast.Tuple object at 0x7da207f98e80>, <ast.Tuple object at 0x7da207f9a050>, <ast.Tuple object at 0x7da207f9b6d0>, <ast.Tuple object at 0x7da207f98cd0>, <ast.Tuple object at 0x7da207f9bdc0>, <ast.Tuple object at 0x7da207f995a0>, <ast.Tuple object at 0x7da207f989a0>, <ast.Tuple object at 0x7da207f9add0>, <ast.Tuple object at 0x7da207f9a530>, <ast.Tuple object at 0x7da207f9ba00>, <ast.Tuple object at 0x7da207f9af50>, <ast.Tuple object at 0x7da18f723280>]] if name[self].source_match begin[:] call[name[info].append, parameter[tuple[[<ast.Constant object at 0x7da18f720370>, <ast.Call object at 0x7da18f7222f0>]]]] if name[self].include_match begin[:] call[name[info].append, parameter[tuple[[<ast.Constant object at 0x7da18f7200d0>, <ast.Call object at 0x7da18f720820>]]]] if name[self].omit_match begin[:] call[name[info].append, parameter[tuple[[<ast.Constant object at 0x7da18c4cd270>, <ast.Call object at 0x7da18c4cf820>]]]] if name[self].cover_match begin[:] call[name[info].append, parameter[tuple[[<ast.Constant object at 0x7da18c4ce2c0>, <ast.Call object at 0x7da18c4cc0a0>]]]] if name[self].pylib_match begin[:] call[name[info].append, parameter[tuple[[<ast.Constant object at 0x7da18c4cc6a0>, <ast.Call object at 0x7da18c4cc610>]]]] return[name[info]]
keyword[def] identifier[sysinfo] ( identifier[self] ): literal[string] keyword[import] identifier[coverage] keyword[as] identifier[covmod] keyword[import] identifier[platform] , identifier[re] keyword[try] : identifier[implementation] = identifier[platform] . identifier[python_implementation] () keyword[except] identifier[AttributeError] : identifier[implementation] = literal[string] identifier[info] =[ ( literal[string] , identifier[covmod] . identifier[__version__] ), ( literal[string] , identifier[covmod] . identifier[__file__] ), ( literal[string] , identifier[self] . identifier[cover_dir] ), ( literal[string] , identifier[self] . identifier[pylib_dirs] ), ( literal[string] , identifier[self] . identifier[collector] . identifier[tracer_name] ()), ( literal[string] , identifier[self] . identifier[config] . identifier[attempted_config_files] ), ( literal[string] , identifier[self] . identifier[config] . identifier[config_files] ), ( literal[string] , identifier[self] . identifier[data] . identifier[filename] ), ( literal[string] , identifier[sys] . identifier[version] . identifier[replace] ( literal[string] , literal[string] )), ( literal[string] , identifier[platform] . identifier[platform] ()), ( literal[string] , identifier[implementation] ), ( literal[string] , identifier[sys] . identifier[executable] ), ( literal[string] , identifier[os] . identifier[getcwd] ()), ( literal[string] , identifier[sys] . identifier[path] ), ( literal[string] , identifier[sorted] ([ ( literal[string] %( identifier[k] , identifier[v] )) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[iitems] ( identifier[os] . identifier[environ] ) keyword[if] identifier[re] . identifier[search] ( literal[string] , identifier[k] ) ])), ( literal[string] , literal[string] . identifier[join] ( identifier[getattr] ( identifier[sys] , literal[string] ,[ literal[string] ]))), ] keyword[if] identifier[self] . identifier[source_match] : identifier[info] . identifier[append] (( literal[string] , identifier[self] . identifier[source_match] . identifier[info] ())) keyword[if] identifier[self] . identifier[include_match] : identifier[info] . identifier[append] (( literal[string] , identifier[self] . identifier[include_match] . identifier[info] ())) keyword[if] identifier[self] . identifier[omit_match] : identifier[info] . identifier[append] (( literal[string] , identifier[self] . identifier[omit_match] . identifier[info] ())) keyword[if] identifier[self] . identifier[cover_match] : identifier[info] . identifier[append] (( literal[string] , identifier[self] . identifier[cover_match] . identifier[info] ())) keyword[if] identifier[self] . identifier[pylib_match] : identifier[info] . identifier[append] (( literal[string] , identifier[self] . identifier[pylib_match] . identifier[info] ())) keyword[return] identifier[info]
def sysinfo(self): """Return a list of (key, value) pairs showing internal information.""" import coverage as covmod import platform, re try: implementation = platform.python_implementation() # depends on [control=['try'], data=[]] except AttributeError: implementation = 'unknown' # depends on [control=['except'], data=[]] info = [('version', covmod.__version__), ('coverage', covmod.__file__), ('cover_dir', self.cover_dir), ('pylib_dirs', self.pylib_dirs), ('tracer', self.collector.tracer_name()), ('config_files', self.config.attempted_config_files), ('configs_read', self.config.config_files), ('data_path', self.data.filename), ('python', sys.version.replace('\n', '')), ('platform', platform.platform()), ('implementation', implementation), ('executable', sys.executable), ('cwd', os.getcwd()), ('path', sys.path), ('environment', sorted(['%s = %s' % (k, v) for (k, v) in iitems(os.environ) if re.search('^COV|^PY', k)])), ('command_line', ' '.join(getattr(sys, 'argv', ['???'])))] if self.source_match: info.append(('source_match', self.source_match.info())) # depends on [control=['if'], data=[]] if self.include_match: info.append(('include_match', self.include_match.info())) # depends on [control=['if'], data=[]] if self.omit_match: info.append(('omit_match', self.omit_match.info())) # depends on [control=['if'], data=[]] if self.cover_match: info.append(('cover_match', self.cover_match.info())) # depends on [control=['if'], data=[]] if self.pylib_match: info.append(('pylib_match', self.pylib_match.info())) # depends on [control=['if'], data=[]] return info
def is_stop_here(self, frame, event, arg): """ Does the magic to determine if we stop here and run a command processor or not. If so, return True and set self.stop_reason; if not, return False. Determining factors can be whether a breakpoint was encountered, whether we are stepping, next'ing, finish'ing, and, if so, whether there is an ignore counter. """ # Add an generic event filter here? # FIXME TODO: Check for # - thread switching (under set option) # Check for "next" and "finish" stopping via stop_level # Do we want a different line and if so, # do we have one? lineno = frame.f_lineno filename = frame.f_code.co_filename if self.different_line and event == 'line': if self.last_lineno == lineno and self.last_filename == filename: return False pass self.last_lineno = lineno self.last_filename = filename if self.stop_level is not None: if frame != self.last_frame: # Recompute stack_depth self.last_level = Mstack.count_frames(frame) self.last_frame = frame pass if self.last_level > self.stop_level: return False elif self.last_level == self.stop_level and \ self.stop_on_finish and event in ['return', 'c_return']: self.stop_level = None self.stop_reason = "in return for 'finish' command" return True pass # Check for stepping if self._is_step_next_stop(event): self.stop_reason = 'at a stepping statement' return True return False
def function[is_stop_here, parameter[self, frame, event, arg]]: constant[ Does the magic to determine if we stop here and run a command processor or not. If so, return True and set self.stop_reason; if not, return False. Determining factors can be whether a breakpoint was encountered, whether we are stepping, next'ing, finish'ing, and, if so, whether there is an ignore counter. ] variable[lineno] assign[=] name[frame].f_lineno variable[filename] assign[=] name[frame].f_code.co_filename if <ast.BoolOp object at 0x7da1b032fd60> begin[:] if <ast.BoolOp object at 0x7da1b032dcc0> begin[:] return[constant[False]] pass name[self].last_lineno assign[=] name[lineno] name[self].last_filename assign[=] name[filename] if compare[name[self].stop_level is_not constant[None]] begin[:] if compare[name[frame] not_equal[!=] name[self].last_frame] begin[:] name[self].last_level assign[=] call[name[Mstack].count_frames, parameter[name[frame]]] name[self].last_frame assign[=] name[frame] pass if compare[name[self].last_level greater[>] name[self].stop_level] begin[:] return[constant[False]] pass if call[name[self]._is_step_next_stop, parameter[name[event]]] begin[:] name[self].stop_reason assign[=] constant[at a stepping statement] return[constant[True]] return[constant[False]]
keyword[def] identifier[is_stop_here] ( identifier[self] , identifier[frame] , identifier[event] , identifier[arg] ): literal[string] identifier[lineno] = identifier[frame] . identifier[f_lineno] identifier[filename] = identifier[frame] . identifier[f_code] . identifier[co_filename] keyword[if] identifier[self] . identifier[different_line] keyword[and] identifier[event] == literal[string] : keyword[if] identifier[self] . identifier[last_lineno] == identifier[lineno] keyword[and] identifier[self] . identifier[last_filename] == identifier[filename] : keyword[return] keyword[False] keyword[pass] identifier[self] . identifier[last_lineno] = identifier[lineno] identifier[self] . identifier[last_filename] = identifier[filename] keyword[if] identifier[self] . identifier[stop_level] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[frame] != identifier[self] . identifier[last_frame] : identifier[self] . identifier[last_level] = identifier[Mstack] . identifier[count_frames] ( identifier[frame] ) identifier[self] . identifier[last_frame] = identifier[frame] keyword[pass] keyword[if] identifier[self] . identifier[last_level] > identifier[self] . identifier[stop_level] : keyword[return] keyword[False] keyword[elif] identifier[self] . identifier[last_level] == identifier[self] . identifier[stop_level] keyword[and] identifier[self] . identifier[stop_on_finish] keyword[and] identifier[event] keyword[in] [ literal[string] , literal[string] ]: identifier[self] . identifier[stop_level] = keyword[None] identifier[self] . identifier[stop_reason] = literal[string] keyword[return] keyword[True] keyword[pass] keyword[if] identifier[self] . identifier[_is_step_next_stop] ( identifier[event] ): identifier[self] . identifier[stop_reason] = literal[string] keyword[return] keyword[True] keyword[return] keyword[False]
def is_stop_here(self, frame, event, arg): """ Does the magic to determine if we stop here and run a command processor or not. If so, return True and set self.stop_reason; if not, return False. Determining factors can be whether a breakpoint was encountered, whether we are stepping, next'ing, finish'ing, and, if so, whether there is an ignore counter. """ # Add an generic event filter here? # FIXME TODO: Check for # - thread switching (under set option) # Check for "next" and "finish" stopping via stop_level # Do we want a different line and if so, # do we have one? lineno = frame.f_lineno filename = frame.f_code.co_filename if self.different_line and event == 'line': if self.last_lineno == lineno and self.last_filename == filename: return False # depends on [control=['if'], data=[]] pass # depends on [control=['if'], data=[]] self.last_lineno = lineno self.last_filename = filename if self.stop_level is not None: if frame != self.last_frame: # Recompute stack_depth self.last_level = Mstack.count_frames(frame) self.last_frame = frame pass # depends on [control=['if'], data=['frame']] if self.last_level > self.stop_level: return False # depends on [control=['if'], data=[]] elif self.last_level == self.stop_level and self.stop_on_finish and (event in ['return', 'c_return']): self.stop_level = None self.stop_reason = "in return for 'finish' command" return True # depends on [control=['if'], data=[]] pass # depends on [control=['if'], data=[]] # Check for stepping if self._is_step_next_stop(event): self.stop_reason = 'at a stepping statement' return True # depends on [control=['if'], data=[]] return False
def _handle_blacklisted_tag(self): """Handle the body of an HTML tag that is parser-blacklisted.""" strip = lambda text: text.rstrip().lower() while True: this, next = self._read(), self._read(1) if this is self.END: self._fail_route() elif this == "<" and next == "/": self._head += 3 if self._read() != ">" or (strip(self._read(-1)) != strip(self._stack[1].text)): self._head -= 1 self._emit_text("</") continue self._emit(tokens.TagOpenClose()) self._emit_text(self._read(-1)) self._emit(tokens.TagCloseClose()) return self._pop() elif this == "&": self._parse_entity() else: self._emit_text(this) self._head += 1
def function[_handle_blacklisted_tag, parameter[self]]: constant[Handle the body of an HTML tag that is parser-blacklisted.] variable[strip] assign[=] <ast.Lambda object at 0x7da2047e9ba0> while constant[True] begin[:] <ast.Tuple object at 0x7da2047e9a20> assign[=] tuple[[<ast.Call object at 0x7da2047e82e0>, <ast.Call object at 0x7da2047ea3e0>]] if compare[name[this] is name[self].END] begin[:] call[name[self]._fail_route, parameter[]] <ast.AugAssign object at 0x7da18c4cf700>
keyword[def] identifier[_handle_blacklisted_tag] ( identifier[self] ): literal[string] identifier[strip] = keyword[lambda] identifier[text] : identifier[text] . identifier[rstrip] (). identifier[lower] () keyword[while] keyword[True] : identifier[this] , identifier[next] = identifier[self] . identifier[_read] (), identifier[self] . identifier[_read] ( literal[int] ) keyword[if] identifier[this] keyword[is] identifier[self] . identifier[END] : identifier[self] . identifier[_fail_route] () keyword[elif] identifier[this] == literal[string] keyword[and] identifier[next] == literal[string] : identifier[self] . identifier[_head] += literal[int] keyword[if] identifier[self] . identifier[_read] ()!= literal[string] keyword[or] ( identifier[strip] ( identifier[self] . identifier[_read] (- literal[int] ))!= identifier[strip] ( identifier[self] . identifier[_stack] [ literal[int] ]. identifier[text] )): identifier[self] . identifier[_head] -= literal[int] identifier[self] . identifier[_emit_text] ( literal[string] ) keyword[continue] identifier[self] . identifier[_emit] ( identifier[tokens] . identifier[TagOpenClose] ()) identifier[self] . identifier[_emit_text] ( identifier[self] . identifier[_read] (- literal[int] )) identifier[self] . identifier[_emit] ( identifier[tokens] . identifier[TagCloseClose] ()) keyword[return] identifier[self] . identifier[_pop] () keyword[elif] identifier[this] == literal[string] : identifier[self] . identifier[_parse_entity] () keyword[else] : identifier[self] . identifier[_emit_text] ( identifier[this] ) identifier[self] . identifier[_head] += literal[int]
def _handle_blacklisted_tag(self): """Handle the body of an HTML tag that is parser-blacklisted.""" strip = lambda text: text.rstrip().lower() while True: (this, next) = (self._read(), self._read(1)) if this is self.END: self._fail_route() # depends on [control=['if'], data=[]] elif this == '<' and next == '/': self._head += 3 if self._read() != '>' or strip(self._read(-1)) != strip(self._stack[1].text): self._head -= 1 self._emit_text('</') continue # depends on [control=['if'], data=[]] self._emit(tokens.TagOpenClose()) self._emit_text(self._read(-1)) self._emit(tokens.TagCloseClose()) return self._pop() # depends on [control=['if'], data=[]] elif this == '&': self._parse_entity() # depends on [control=['if'], data=[]] else: self._emit_text(this) self._head += 1 # depends on [control=['while'], data=[]]
def collision_warning(self, item): """ Given a string, print a warning if this could collide with a Zappa core package module. Use for app functions and events. """ namespace_collisions = [ "zappa.", "wsgi.", "middleware.", "handler.", "util.", "letsencrypt.", "cli." ] for namespace_collision in namespace_collisions: if item.startswith(namespace_collision): click.echo(click.style("Warning!", fg="red", bold=True) + " You may have a namespace collision between " + click.style(item, bold=True) + " and " + click.style(namespace_collision, bold=True) + "! You may want to rename that file.")
def function[collision_warning, parameter[self, item]]: constant[ Given a string, print a warning if this could collide with a Zappa core package module. Use for app functions and events. ] variable[namespace_collisions] assign[=] list[[<ast.Constant object at 0x7da1b1fe5060>, <ast.Constant object at 0x7da1b1fe5cf0>, <ast.Constant object at 0x7da1b1fe5810>, <ast.Constant object at 0x7da1b1fe6920>, <ast.Constant object at 0x7da1b1fe5720>, <ast.Constant object at 0x7da1b1fe6140>, <ast.Constant object at 0x7da1b1fe5bd0>]] for taget[name[namespace_collision]] in starred[name[namespace_collisions]] begin[:] if call[name[item].startswith, parameter[name[namespace_collision]]] begin[:] call[name[click].echo, parameter[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[call[name[click].style, parameter[constant[Warning!]]] + constant[ You may have a namespace collision between ]] + call[name[click].style, parameter[name[item]]]] + constant[ and ]] + call[name[click].style, parameter[name[namespace_collision]]]] + constant[! You may want to rename that file.]]]]
keyword[def] identifier[collision_warning] ( identifier[self] , identifier[item] ): literal[string] identifier[namespace_collisions] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] keyword[for] identifier[namespace_collision] keyword[in] identifier[namespace_collisions] : keyword[if] identifier[item] . identifier[startswith] ( identifier[namespace_collision] ): identifier[click] . identifier[echo] ( identifier[click] . identifier[style] ( literal[string] , identifier[fg] = literal[string] , identifier[bold] = keyword[True] )+ literal[string] + identifier[click] . identifier[style] ( identifier[item] , identifier[bold] = keyword[True] )+ literal[string] + identifier[click] . identifier[style] ( identifier[namespace_collision] , identifier[bold] = keyword[True] )+ literal[string] )
def collision_warning(self, item): """ Given a string, print a warning if this could collide with a Zappa core package module. Use for app functions and events. """ namespace_collisions = ['zappa.', 'wsgi.', 'middleware.', 'handler.', 'util.', 'letsencrypt.', 'cli.'] for namespace_collision in namespace_collisions: if item.startswith(namespace_collision): click.echo(click.style('Warning!', fg='red', bold=True) + ' You may have a namespace collision between ' + click.style(item, bold=True) + ' and ' + click.style(namespace_collision, bold=True) + '! You may want to rename that file.') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['namespace_collision']]
def _all_feature_values( self, column, feature, distinct=True, contig=None, strand=None): """ Cached lookup of all values for a particular feature property from the database, caches repeated queries in memory and stores them as a CSV. Parameters ---------- column : str Name of property (e.g. exon_id) feature : str Type of entry (e.g. exon) distinct : bool, optional Keep only unique values contig : str, optional Restrict query to particular contig strand : str, optional Restrict results to "+" or "-" strands Returns a list constructed from query results. """ return self.db.query_feature_values( column=column, feature=feature, distinct=distinct, contig=contig, strand=strand)
def function[_all_feature_values, parameter[self, column, feature, distinct, contig, strand]]: constant[ Cached lookup of all values for a particular feature property from the database, caches repeated queries in memory and stores them as a CSV. Parameters ---------- column : str Name of property (e.g. exon_id) feature : str Type of entry (e.g. exon) distinct : bool, optional Keep only unique values contig : str, optional Restrict query to particular contig strand : str, optional Restrict results to "+" or "-" strands Returns a list constructed from query results. ] return[call[name[self].db.query_feature_values, parameter[]]]
keyword[def] identifier[_all_feature_values] ( identifier[self] , identifier[column] , identifier[feature] , identifier[distinct] = keyword[True] , identifier[contig] = keyword[None] , identifier[strand] = keyword[None] ): literal[string] keyword[return] identifier[self] . identifier[db] . identifier[query_feature_values] ( identifier[column] = identifier[column] , identifier[feature] = identifier[feature] , identifier[distinct] = identifier[distinct] , identifier[contig] = identifier[contig] , identifier[strand] = identifier[strand] )
def _all_feature_values(self, column, feature, distinct=True, contig=None, strand=None): """ Cached lookup of all values for a particular feature property from the database, caches repeated queries in memory and stores them as a CSV. Parameters ---------- column : str Name of property (e.g. exon_id) feature : str Type of entry (e.g. exon) distinct : bool, optional Keep only unique values contig : str, optional Restrict query to particular contig strand : str, optional Restrict results to "+" or "-" strands Returns a list constructed from query results. """ return self.db.query_feature_values(column=column, feature=feature, distinct=distinct, contig=contig, strand=strand)
def start(self): """Starts the execution of the root state. """ # load default input data for the state self._root_state.input_data = self._root_state.get_default_input_values_for_state(self._root_state) self._root_state.output_data = self._root_state.create_output_dictionary_for_state(self._root_state) new_execution_history = self._add_new_execution_history() new_execution_history.push_state_machine_start_history_item(self, run_id_generator()) self._root_state.start(new_execution_history)
def function[start, parameter[self]]: constant[Starts the execution of the root state. ] name[self]._root_state.input_data assign[=] call[name[self]._root_state.get_default_input_values_for_state, parameter[name[self]._root_state]] name[self]._root_state.output_data assign[=] call[name[self]._root_state.create_output_dictionary_for_state, parameter[name[self]._root_state]] variable[new_execution_history] assign[=] call[name[self]._add_new_execution_history, parameter[]] call[name[new_execution_history].push_state_machine_start_history_item, parameter[name[self], call[name[run_id_generator], parameter[]]]] call[name[self]._root_state.start, parameter[name[new_execution_history]]]
keyword[def] identifier[start] ( identifier[self] ): literal[string] identifier[self] . identifier[_root_state] . identifier[input_data] = identifier[self] . identifier[_root_state] . identifier[get_default_input_values_for_state] ( identifier[self] . identifier[_root_state] ) identifier[self] . identifier[_root_state] . identifier[output_data] = identifier[self] . identifier[_root_state] . identifier[create_output_dictionary_for_state] ( identifier[self] . identifier[_root_state] ) identifier[new_execution_history] = identifier[self] . identifier[_add_new_execution_history] () identifier[new_execution_history] . identifier[push_state_machine_start_history_item] ( identifier[self] , identifier[run_id_generator] ()) identifier[self] . identifier[_root_state] . identifier[start] ( identifier[new_execution_history] )
def start(self): """Starts the execution of the root state. """ # load default input data for the state self._root_state.input_data = self._root_state.get_default_input_values_for_state(self._root_state) self._root_state.output_data = self._root_state.create_output_dictionary_for_state(self._root_state) new_execution_history = self._add_new_execution_history() new_execution_history.push_state_machine_start_history_item(self, run_id_generator()) self._root_state.start(new_execution_history)
async def stop(self): """ Stop the transport. """ if self._association_state != self.State.CLOSED: await self._abort() self.__transport._unregister_data_receiver(self) self._set_state(self.State.CLOSED)
<ast.AsyncFunctionDef object at 0x7da2054a62c0>
keyword[async] keyword[def] identifier[stop] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_association_state] != identifier[self] . identifier[State] . identifier[CLOSED] : keyword[await] identifier[self] . identifier[_abort] () identifier[self] . identifier[__transport] . identifier[_unregister_data_receiver] ( identifier[self] ) identifier[self] . identifier[_set_state] ( identifier[self] . identifier[State] . identifier[CLOSED] )
async def stop(self): """ Stop the transport. """ if self._association_state != self.State.CLOSED: await self._abort() # depends on [control=['if'], data=[]] self.__transport._unregister_data_receiver(self) self._set_state(self.State.CLOSED)
def unpackcFunc(self): ''' "Unpacks" the consumption functions into their own field for easier access. After the model has been solved, the consumption functions reside in the attribute cFunc of each element of ConsumerType.solution. This method creates a (time varying) attribute cFunc that contains a list of consumption functions. Parameters ---------- none Returns ------- none ''' self.cFunc = [] for solution_t in self.solution: self.cFunc.append(solution_t.cFunc) self.addToTimeVary('cFunc')
def function[unpackcFunc, parameter[self]]: constant[ "Unpacks" the consumption functions into their own field for easier access. After the model has been solved, the consumption functions reside in the attribute cFunc of each element of ConsumerType.solution. This method creates a (time varying) attribute cFunc that contains a list of consumption functions. Parameters ---------- none Returns ------- none ] name[self].cFunc assign[=] list[[]] for taget[name[solution_t]] in starred[name[self].solution] begin[:] call[name[self].cFunc.append, parameter[name[solution_t].cFunc]] call[name[self].addToTimeVary, parameter[constant[cFunc]]]
keyword[def] identifier[unpackcFunc] ( identifier[self] ): literal[string] identifier[self] . identifier[cFunc] =[] keyword[for] identifier[solution_t] keyword[in] identifier[self] . identifier[solution] : identifier[self] . identifier[cFunc] . identifier[append] ( identifier[solution_t] . identifier[cFunc] ) identifier[self] . identifier[addToTimeVary] ( literal[string] )
def unpackcFunc(self): """ "Unpacks" the consumption functions into their own field for easier access. After the model has been solved, the consumption functions reside in the attribute cFunc of each element of ConsumerType.solution. This method creates a (time varying) attribute cFunc that contains a list of consumption functions. Parameters ---------- none Returns ------- none """ self.cFunc = [] for solution_t in self.solution: self.cFunc.append(solution_t.cFunc) # depends on [control=['for'], data=['solution_t']] self.addToTimeVary('cFunc')
def _encode_message(cls, message): """ Encode a single message. The magic number of a message is a format version number. The only supported magic number right now is zero Format ====== Message => Crc MagicByte Attributes Key Value Crc => int32 MagicByte => int8 Attributes => int8 Key => bytes Value => bytes """ if message.magic == 0: msg = b''.join([ struct.pack('>BB', message.magic, message.attributes), write_int_string(message.key), write_int_string(message.value) ]) crc = crc32(msg) msg = struct.pack('>I%ds' % len(msg), crc, msg) else: raise ProtocolError("Unexpected magic number: %d" % message.magic) return msg
def function[_encode_message, parameter[cls, message]]: constant[ Encode a single message. The magic number of a message is a format version number. The only supported magic number right now is zero Format ====== Message => Crc MagicByte Attributes Key Value Crc => int32 MagicByte => int8 Attributes => int8 Key => bytes Value => bytes ] if compare[name[message].magic equal[==] constant[0]] begin[:] variable[msg] assign[=] call[constant[b''].join, parameter[list[[<ast.Call object at 0x7da1b199a230>, <ast.Call object at 0x7da1b199b490>, <ast.Call object at 0x7da1b199a530>]]]] variable[crc] assign[=] call[name[crc32], parameter[name[msg]]] variable[msg] assign[=] call[name[struct].pack, parameter[binary_operation[constant[>I%ds] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[msg]]]], name[crc], name[msg]]] return[name[msg]]
keyword[def] identifier[_encode_message] ( identifier[cls] , identifier[message] ): literal[string] keyword[if] identifier[message] . identifier[magic] == literal[int] : identifier[msg] = literal[string] . identifier[join] ([ identifier[struct] . identifier[pack] ( literal[string] , identifier[message] . identifier[magic] , identifier[message] . identifier[attributes] ), identifier[write_int_string] ( identifier[message] . identifier[key] ), identifier[write_int_string] ( identifier[message] . identifier[value] ) ]) identifier[crc] = identifier[crc32] ( identifier[msg] ) identifier[msg] = identifier[struct] . identifier[pack] ( literal[string] % identifier[len] ( identifier[msg] ), identifier[crc] , identifier[msg] ) keyword[else] : keyword[raise] identifier[ProtocolError] ( literal[string] % identifier[message] . identifier[magic] ) keyword[return] identifier[msg]
def _encode_message(cls, message): """ Encode a single message. The magic number of a message is a format version number. The only supported magic number right now is zero Format ====== Message => Crc MagicByte Attributes Key Value Crc => int32 MagicByte => int8 Attributes => int8 Key => bytes Value => bytes """ if message.magic == 0: msg = b''.join([struct.pack('>BB', message.magic, message.attributes), write_int_string(message.key), write_int_string(message.value)]) crc = crc32(msg) msg = struct.pack('>I%ds' % len(msg), crc, msg) # depends on [control=['if'], data=[]] else: raise ProtocolError('Unexpected magic number: %d' % message.magic) return msg
def add_section(self, section): """Add a new Section object to the config. Should be a subclass of _AbstractSection.""" if not issubclass(section.__class__, _AbstractSection): raise TypeError("argument should be a subclass of Section") self.sections[section.get_key_name()] = section return section
def function[add_section, parameter[self, section]]: constant[Add a new Section object to the config. Should be a subclass of _AbstractSection.] if <ast.UnaryOp object at 0x7da1b15e6410> begin[:] <ast.Raise object at 0x7da1b15e7820> call[name[self].sections][call[name[section].get_key_name, parameter[]]] assign[=] name[section] return[name[section]]
keyword[def] identifier[add_section] ( identifier[self] , identifier[section] ): literal[string] keyword[if] keyword[not] identifier[issubclass] ( identifier[section] . identifier[__class__] , identifier[_AbstractSection] ): keyword[raise] identifier[TypeError] ( literal[string] ) identifier[self] . identifier[sections] [ identifier[section] . identifier[get_key_name] ()]= identifier[section] keyword[return] identifier[section]
def add_section(self, section): """Add a new Section object to the config. Should be a subclass of _AbstractSection.""" if not issubclass(section.__class__, _AbstractSection): raise TypeError('argument should be a subclass of Section') # depends on [control=['if'], data=[]] self.sections[section.get_key_name()] = section return section
def predict(self, nSteps): """ This function gives the future predictions for <nSteps> timesteps starting from the current TP state. The TP is returned to its original state at the end before returning. 1) We save the TP state. 2) Loop for nSteps a) Turn-on with lateral support from the current active cells b) Set the predicted cells as the next step's active cells. This step in learn and infer methods use input here to correct the predictions. We don't use any input here. 3) Revert back the TP state to the time before prediction Parameters: -------------------------------------------- nSteps: The number of future time steps to be predicted retval: all the future predictions - a numpy array of type "float32" and shape (nSteps, numberOfCols). The ith row gives the tp prediction for each column at a future timestep (t+i+1). """ # Save the TP dynamic state, we will use to revert back in the end pristineTPDynamicState = self._getTPDynamicState() assert (nSteps>0) # multiStepColumnPredictions holds all the future prediction. multiStepColumnPredictions = numpy.zeros((nSteps, self.numberOfCols), dtype="float32") # This is a (nSteps-1)+half loop. Phase 2 in both learn and infer methods # already predicts for timestep (t+1). We use that prediction for free and save # the half-a-loop of work. step = 0 while True: # We get the prediction for the columns in the next time step from # the topDownCompute method. It internally uses confidences. multiStepColumnPredictions[step,:] = self.topDownCompute() # Cleanest way in python to handle one and half loops if step == nSteps-1: break step += 1 # Copy t-1 into t self.activeState['t-1'][:,:] = self.activeState['t'][:,:] self.predictedState['t-1'][:,:] = self.predictedState['t'][:,:] self.confidence['t-1'][:,:] = self.confidence['t'][:,:] # Predicted state at "t-1" becomes the active state at "t" self.activeState['t'][:,:] = self.predictedState['t-1'][:,:] # Predicted state and confidence are set in phase2. self.predictedState['t'].fill(0) self.confidence['t'].fill(0.0) self.computePhase2(doLearn=False) # Revert the dynamic state to the saved state self._setTPDynamicState(pristineTPDynamicState) return multiStepColumnPredictions
def function[predict, parameter[self, nSteps]]: constant[ This function gives the future predictions for <nSteps> timesteps starting from the current TP state. The TP is returned to its original state at the end before returning. 1) We save the TP state. 2) Loop for nSteps a) Turn-on with lateral support from the current active cells b) Set the predicted cells as the next step's active cells. This step in learn and infer methods use input here to correct the predictions. We don't use any input here. 3) Revert back the TP state to the time before prediction Parameters: -------------------------------------------- nSteps: The number of future time steps to be predicted retval: all the future predictions - a numpy array of type "float32" and shape (nSteps, numberOfCols). The ith row gives the tp prediction for each column at a future timestep (t+i+1). ] variable[pristineTPDynamicState] assign[=] call[name[self]._getTPDynamicState, parameter[]] assert[compare[name[nSteps] greater[>] constant[0]]] variable[multiStepColumnPredictions] assign[=] call[name[numpy].zeros, parameter[tuple[[<ast.Name object at 0x7da1b086e7d0>, <ast.Attribute object at 0x7da1b086e410>]]]] variable[step] assign[=] constant[0] while constant[True] begin[:] call[name[multiStepColumnPredictions]][tuple[[<ast.Name object at 0x7da1b086d330>, <ast.Slice object at 0x7da1b086dfc0>]]] assign[=] call[name[self].topDownCompute, parameter[]] if compare[name[step] equal[==] binary_operation[name[nSteps] - constant[1]]] begin[:] break <ast.AugAssign object at 0x7da1b0866ce0> call[call[name[self].activeState][constant[t-1]]][tuple[[<ast.Slice object at 0x7da1b0866c50>, <ast.Slice object at 0x7da1b0865cf0>]]] assign[=] call[call[name[self].activeState][constant[t]]][tuple[[<ast.Slice object at 0x7da1b0866da0>, <ast.Slice object at 0x7da1b0865b10>]]] call[call[name[self].predictedState][constant[t-1]]][tuple[[<ast.Slice object at 0x7da1b0865ba0>, <ast.Slice object at 0x7da1b0865810>]]] assign[=] call[call[name[self].predictedState][constant[t]]][tuple[[<ast.Slice object at 0x7da1b0832b30>, <ast.Slice object at 0x7da1b0833880>]]] call[call[name[self].confidence][constant[t-1]]][tuple[[<ast.Slice object at 0x7da1b08bf9d0>, <ast.Slice object at 0x7da1b08bdf60>]]] assign[=] call[call[name[self].confidence][constant[t]]][tuple[[<ast.Slice object at 0x7da1b08bc790>, <ast.Slice object at 0x7da1b08bf0a0>]]] call[call[name[self].activeState][constant[t]]][tuple[[<ast.Slice object at 0x7da1b08befe0>, <ast.Slice object at 0x7da1b08bca30>]]] assign[=] call[call[name[self].predictedState][constant[t-1]]][tuple[[<ast.Slice object at 0x7da1b08bc940>, <ast.Slice object at 0x7da1b08bf940>]]] call[call[name[self].predictedState][constant[t]].fill, parameter[constant[0]]] call[call[name[self].confidence][constant[t]].fill, parameter[constant[0.0]]] call[name[self].computePhase2, parameter[]] call[name[self]._setTPDynamicState, parameter[name[pristineTPDynamicState]]] return[name[multiStepColumnPredictions]]
keyword[def] identifier[predict] ( identifier[self] , identifier[nSteps] ): literal[string] identifier[pristineTPDynamicState] = identifier[self] . identifier[_getTPDynamicState] () keyword[assert] ( identifier[nSteps] > literal[int] ) identifier[multiStepColumnPredictions] = identifier[numpy] . identifier[zeros] (( identifier[nSteps] , identifier[self] . identifier[numberOfCols] ), identifier[dtype] = literal[string] ) identifier[step] = literal[int] keyword[while] keyword[True] : identifier[multiStepColumnPredictions] [ identifier[step] ,:]= identifier[self] . identifier[topDownCompute] () keyword[if] identifier[step] == identifier[nSteps] - literal[int] : keyword[break] identifier[step] += literal[int] identifier[self] . identifier[activeState] [ literal[string] ][:,:]= identifier[self] . identifier[activeState] [ literal[string] ][:,:] identifier[self] . identifier[predictedState] [ literal[string] ][:,:]= identifier[self] . identifier[predictedState] [ literal[string] ][:,:] identifier[self] . identifier[confidence] [ literal[string] ][:,:]= identifier[self] . identifier[confidence] [ literal[string] ][:,:] identifier[self] . identifier[activeState] [ literal[string] ][:,:]= identifier[self] . identifier[predictedState] [ literal[string] ][:,:] identifier[self] . identifier[predictedState] [ literal[string] ]. identifier[fill] ( literal[int] ) identifier[self] . identifier[confidence] [ literal[string] ]. identifier[fill] ( literal[int] ) identifier[self] . identifier[computePhase2] ( identifier[doLearn] = keyword[False] ) identifier[self] . identifier[_setTPDynamicState] ( identifier[pristineTPDynamicState] ) keyword[return] identifier[multiStepColumnPredictions]
def predict(self, nSteps): """ This function gives the future predictions for <nSteps> timesteps starting from the current TP state. The TP is returned to its original state at the end before returning. 1) We save the TP state. 2) Loop for nSteps a) Turn-on with lateral support from the current active cells b) Set the predicted cells as the next step's active cells. This step in learn and infer methods use input here to correct the predictions. We don't use any input here. 3) Revert back the TP state to the time before prediction Parameters: -------------------------------------------- nSteps: The number of future time steps to be predicted retval: all the future predictions - a numpy array of type "float32" and shape (nSteps, numberOfCols). The ith row gives the tp prediction for each column at a future timestep (t+i+1). """ # Save the TP dynamic state, we will use to revert back in the end pristineTPDynamicState = self._getTPDynamicState() assert nSteps > 0 # multiStepColumnPredictions holds all the future prediction. multiStepColumnPredictions = numpy.zeros((nSteps, self.numberOfCols), dtype='float32') # This is a (nSteps-1)+half loop. Phase 2 in both learn and infer methods # already predicts for timestep (t+1). We use that prediction for free and save # the half-a-loop of work. step = 0 while True: # We get the prediction for the columns in the next time step from # the topDownCompute method. It internally uses confidences. multiStepColumnPredictions[step, :] = self.topDownCompute() # Cleanest way in python to handle one and half loops if step == nSteps - 1: break # depends on [control=['if'], data=[]] step += 1 # Copy t-1 into t self.activeState['t-1'][:, :] = self.activeState['t'][:, :] self.predictedState['t-1'][:, :] = self.predictedState['t'][:, :] self.confidence['t-1'][:, :] = self.confidence['t'][:, :] # Predicted state at "t-1" becomes the active state at "t" self.activeState['t'][:, :] = self.predictedState['t-1'][:, :] # Predicted state and confidence are set in phase2. self.predictedState['t'].fill(0) self.confidence['t'].fill(0.0) self.computePhase2(doLearn=False) # depends on [control=['while'], data=[]] # Revert the dynamic state to the saved state self._setTPDynamicState(pristineTPDynamicState) return multiStepColumnPredictions
def build_from_issue_comment(gh_token, body): """Create a WebhookMetadata from a comment added to an issue. """ if body["action"] in ["created", "edited"]: github_con = Github(gh_token) repo = github_con.get_repo(body['repository']['full_name']) issue = repo.get_issue(body['issue']['number']) text = body['comment']['body'] try: comment = issue.get_comment(body['comment']['id']) except UnknownObjectException: # If the comment has already disapeared, skip the command return None return WebhookMetadata(repo, issue, text, comment) return None
def function[build_from_issue_comment, parameter[gh_token, body]]: constant[Create a WebhookMetadata from a comment added to an issue. ] if compare[call[name[body]][constant[action]] in list[[<ast.Constant object at 0x7da1b25eac20>, <ast.Constant object at 0x7da1b25e97e0>]]] begin[:] variable[github_con] assign[=] call[name[Github], parameter[name[gh_token]]] variable[repo] assign[=] call[name[github_con].get_repo, parameter[call[call[name[body]][constant[repository]]][constant[full_name]]]] variable[issue] assign[=] call[name[repo].get_issue, parameter[call[call[name[body]][constant[issue]]][constant[number]]]] variable[text] assign[=] call[call[name[body]][constant[comment]]][constant[body]] <ast.Try object at 0x7da1b25eadd0> return[call[name[WebhookMetadata], parameter[name[repo], name[issue], name[text], name[comment]]]] return[constant[None]]
keyword[def] identifier[build_from_issue_comment] ( identifier[gh_token] , identifier[body] ): literal[string] keyword[if] identifier[body] [ literal[string] ] keyword[in] [ literal[string] , literal[string] ]: identifier[github_con] = identifier[Github] ( identifier[gh_token] ) identifier[repo] = identifier[github_con] . identifier[get_repo] ( identifier[body] [ literal[string] ][ literal[string] ]) identifier[issue] = identifier[repo] . identifier[get_issue] ( identifier[body] [ literal[string] ][ literal[string] ]) identifier[text] = identifier[body] [ literal[string] ][ literal[string] ] keyword[try] : identifier[comment] = identifier[issue] . identifier[get_comment] ( identifier[body] [ literal[string] ][ literal[string] ]) keyword[except] identifier[UnknownObjectException] : keyword[return] keyword[None] keyword[return] identifier[WebhookMetadata] ( identifier[repo] , identifier[issue] , identifier[text] , identifier[comment] ) keyword[return] keyword[None]
def build_from_issue_comment(gh_token, body): """Create a WebhookMetadata from a comment added to an issue. """ if body['action'] in ['created', 'edited']: github_con = Github(gh_token) repo = github_con.get_repo(body['repository']['full_name']) issue = repo.get_issue(body['issue']['number']) text = body['comment']['body'] try: comment = issue.get_comment(body['comment']['id']) # depends on [control=['try'], data=[]] except UnknownObjectException: # If the comment has already disapeared, skip the command return None # depends on [control=['except'], data=[]] return WebhookMetadata(repo, issue, text, comment) # depends on [control=['if'], data=[]] return None
def append_state(self, s, is_widened_state=False): """ Appended a new state to this VFGNode. :param s: The new state to append :param is_widened_state: Whether it is a widened state or not. """ if not is_widened_state: self.all_states.append(s) self.state = s else: self.widened_state = s
def function[append_state, parameter[self, s, is_widened_state]]: constant[ Appended a new state to this VFGNode. :param s: The new state to append :param is_widened_state: Whether it is a widened state or not. ] if <ast.UnaryOp object at 0x7da20c6aa860> begin[:] call[name[self].all_states.append, parameter[name[s]]] name[self].state assign[=] name[s]
keyword[def] identifier[append_state] ( identifier[self] , identifier[s] , identifier[is_widened_state] = keyword[False] ): literal[string] keyword[if] keyword[not] identifier[is_widened_state] : identifier[self] . identifier[all_states] . identifier[append] ( identifier[s] ) identifier[self] . identifier[state] = identifier[s] keyword[else] : identifier[self] . identifier[widened_state] = identifier[s]
def append_state(self, s, is_widened_state=False): """ Appended a new state to this VFGNode. :param s: The new state to append :param is_widened_state: Whether it is a widened state or not. """ if not is_widened_state: self.all_states.append(s) self.state = s # depends on [control=['if'], data=[]] else: self.widened_state = s
def append_function(self, function): """Append the function to the list of functions to be called. If the function is already a callable, use it. If it's a type str try to interpret it as [module]:?<callable>, load the module if there is one and retrieve the callable. Argument: function (str or callable): function to call on input. """ if not hasattr(function, '__call__'): function = get_function(function) if not hasattr(function, '__call__'): raise ValueError("function is expected to be callable") self._functions.append(function) log.debug("registered %s", function.__name__)
def function[append_function, parameter[self, function]]: constant[Append the function to the list of functions to be called. If the function is already a callable, use it. If it's a type str try to interpret it as [module]:?<callable>, load the module if there is one and retrieve the callable. Argument: function (str or callable): function to call on input. ] if <ast.UnaryOp object at 0x7da1b0efd4b0> begin[:] variable[function] assign[=] call[name[get_function], parameter[name[function]]] if <ast.UnaryOp object at 0x7da1b0effe20> begin[:] <ast.Raise object at 0x7da1b0efd570> call[name[self]._functions.append, parameter[name[function]]] call[name[log].debug, parameter[constant[registered %s], name[function].__name__]]
keyword[def] identifier[append_function] ( identifier[self] , identifier[function] ): literal[string] keyword[if] keyword[not] identifier[hasattr] ( identifier[function] , literal[string] ): identifier[function] = identifier[get_function] ( identifier[function] ) keyword[if] keyword[not] identifier[hasattr] ( identifier[function] , literal[string] ): keyword[raise] identifier[ValueError] ( literal[string] ) identifier[self] . identifier[_functions] . identifier[append] ( identifier[function] ) identifier[log] . identifier[debug] ( literal[string] , identifier[function] . identifier[__name__] )
def append_function(self, function): """Append the function to the list of functions to be called. If the function is already a callable, use it. If it's a type str try to interpret it as [module]:?<callable>, load the module if there is one and retrieve the callable. Argument: function (str or callable): function to call on input. """ if not hasattr(function, '__call__'): function = get_function(function) if not hasattr(function, '__call__'): raise ValueError('function is expected to be callable') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] self._functions.append(function) log.debug('registered %s', function.__name__)
def isiterable(obj, reject_string=True): """convenience tool to detect if something is iterable. in python3, strings count as iterables to we have the option to exclude them Parameters: ----------- obj : object to analyse reject_string : bool, whether to ignore strings Returns: -------- bool, if the object is itereable. """ iterable = hasattr(obj, '__len__') if reject_string: iterable = iterable and not isinstance(obj, str) return iterable
def function[isiterable, parameter[obj, reject_string]]: constant[convenience tool to detect if something is iterable. in python3, strings count as iterables to we have the option to exclude them Parameters: ----------- obj : object to analyse reject_string : bool, whether to ignore strings Returns: -------- bool, if the object is itereable. ] variable[iterable] assign[=] call[name[hasattr], parameter[name[obj], constant[__len__]]] if name[reject_string] begin[:] variable[iterable] assign[=] <ast.BoolOp object at 0x7da20e956b90> return[name[iterable]]
keyword[def] identifier[isiterable] ( identifier[obj] , identifier[reject_string] = keyword[True] ): literal[string] identifier[iterable] = identifier[hasattr] ( identifier[obj] , literal[string] ) keyword[if] identifier[reject_string] : identifier[iterable] = identifier[iterable] keyword[and] keyword[not] identifier[isinstance] ( identifier[obj] , identifier[str] ) keyword[return] identifier[iterable]
def isiterable(obj, reject_string=True): """convenience tool to detect if something is iterable. in python3, strings count as iterables to we have the option to exclude them Parameters: ----------- obj : object to analyse reject_string : bool, whether to ignore strings Returns: -------- bool, if the object is itereable. """ iterable = hasattr(obj, '__len__') if reject_string: iterable = iterable and (not isinstance(obj, str)) # depends on [control=['if'], data=[]] return iterable
def show(closeToo=False): """alternative to pylab.show() that updates IPython window.""" IPython.display.display(pylab.gcf()) if closeToo: pylab.close('all')
def function[show, parameter[closeToo]]: constant[alternative to pylab.show() that updates IPython window.] call[name[IPython].display.display, parameter[call[name[pylab].gcf, parameter[]]]] if name[closeToo] begin[:] call[name[pylab].close, parameter[constant[all]]]
keyword[def] identifier[show] ( identifier[closeToo] = keyword[False] ): literal[string] identifier[IPython] . identifier[display] . identifier[display] ( identifier[pylab] . identifier[gcf] ()) keyword[if] identifier[closeToo] : identifier[pylab] . identifier[close] ( literal[string] )
def show(closeToo=False): """alternative to pylab.show() that updates IPython window.""" IPython.display.display(pylab.gcf()) if closeToo: pylab.close('all') # depends on [control=['if'], data=[]]
def frequency(self): """ How often the recurrence repeats. ("YEARLY", "MONTHLY", "WEEKLY", "DAILY") """ freqOptions = ("YEARLY", "MONTHLY", "WEEKLY", "DAILY") if self.rule._freq < len(freqOptions): return freqOptions[self.rule._freq] else: return "unsupported_frequency_{}".format(self.rule._freq)
def function[frequency, parameter[self]]: constant[ How often the recurrence repeats. ("YEARLY", "MONTHLY", "WEEKLY", "DAILY") ] variable[freqOptions] assign[=] tuple[[<ast.Constant object at 0x7da204621120>, <ast.Constant object at 0x7da2046223e0>, <ast.Constant object at 0x7da204623cd0>, <ast.Constant object at 0x7da204623b50>]] if compare[name[self].rule._freq less[<] call[name[len], parameter[name[freqOptions]]]] begin[:] return[call[name[freqOptions]][name[self].rule._freq]]
keyword[def] identifier[frequency] ( identifier[self] ): literal[string] identifier[freqOptions] =( literal[string] , literal[string] , literal[string] , literal[string] ) keyword[if] identifier[self] . identifier[rule] . identifier[_freq] < identifier[len] ( identifier[freqOptions] ): keyword[return] identifier[freqOptions] [ identifier[self] . identifier[rule] . identifier[_freq] ] keyword[else] : keyword[return] literal[string] . identifier[format] ( identifier[self] . identifier[rule] . identifier[_freq] )
def frequency(self): """ How often the recurrence repeats. ("YEARLY", "MONTHLY", "WEEKLY", "DAILY") """ freqOptions = ('YEARLY', 'MONTHLY', 'WEEKLY', 'DAILY') if self.rule._freq < len(freqOptions): return freqOptions[self.rule._freq] # depends on [control=['if'], data=[]] else: return 'unsupported_frequency_{}'.format(self.rule._freq)
def stop(self, timeout=5): """Stop notifying Listeners when new :class:`~can.Message` objects arrive and call :meth:`~can.Listener.stop` on each Listener. :param float timeout: Max time in seconds to wait for receive threads to finish. Should be longer than timeout given at instantiation. """ self._running = False end_time = time.time() + timeout for reader in self._readers: if isinstance(reader, threading.Thread): now = time.time() if now < end_time: reader.join(end_time - now) else: # reader is a file descriptor self._loop.remove_reader(reader) for listener in self.listeners: if hasattr(listener, 'stop'): listener.stop()
def function[stop, parameter[self, timeout]]: constant[Stop notifying Listeners when new :class:`~can.Message` objects arrive and call :meth:`~can.Listener.stop` on each Listener. :param float timeout: Max time in seconds to wait for receive threads to finish. Should be longer than timeout given at instantiation. ] name[self]._running assign[=] constant[False] variable[end_time] assign[=] binary_operation[call[name[time].time, parameter[]] + name[timeout]] for taget[name[reader]] in starred[name[self]._readers] begin[:] if call[name[isinstance], parameter[name[reader], name[threading].Thread]] begin[:] variable[now] assign[=] call[name[time].time, parameter[]] if compare[name[now] less[<] name[end_time]] begin[:] call[name[reader].join, parameter[binary_operation[name[end_time] - name[now]]]] for taget[name[listener]] in starred[name[self].listeners] begin[:] if call[name[hasattr], parameter[name[listener], constant[stop]]] begin[:] call[name[listener].stop, parameter[]]
keyword[def] identifier[stop] ( identifier[self] , identifier[timeout] = literal[int] ): literal[string] identifier[self] . identifier[_running] = keyword[False] identifier[end_time] = identifier[time] . identifier[time] ()+ identifier[timeout] keyword[for] identifier[reader] keyword[in] identifier[self] . identifier[_readers] : keyword[if] identifier[isinstance] ( identifier[reader] , identifier[threading] . identifier[Thread] ): identifier[now] = identifier[time] . identifier[time] () keyword[if] identifier[now] < identifier[end_time] : identifier[reader] . identifier[join] ( identifier[end_time] - identifier[now] ) keyword[else] : identifier[self] . identifier[_loop] . identifier[remove_reader] ( identifier[reader] ) keyword[for] identifier[listener] keyword[in] identifier[self] . identifier[listeners] : keyword[if] identifier[hasattr] ( identifier[listener] , literal[string] ): identifier[listener] . identifier[stop] ()
def stop(self, timeout=5): """Stop notifying Listeners when new :class:`~can.Message` objects arrive and call :meth:`~can.Listener.stop` on each Listener. :param float timeout: Max time in seconds to wait for receive threads to finish. Should be longer than timeout given at instantiation. """ self._running = False end_time = time.time() + timeout for reader in self._readers: if isinstance(reader, threading.Thread): now = time.time() if now < end_time: reader.join(end_time - now) # depends on [control=['if'], data=['now', 'end_time']] # depends on [control=['if'], data=[]] else: # reader is a file descriptor self._loop.remove_reader(reader) # depends on [control=['for'], data=['reader']] for listener in self.listeners: if hasattr(listener, 'stop'): listener.stop() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['listener']]
def insert(self, dte, values): '''insert *values* at date *dte*.''' if len(values): dte = self.dateconvert(dte) if not self: self._date = np.array([dte]) self._data = np.array([values]) else: # search for the date index = self._skl.rank(dte) if index < 0: # date not available N = len(self._data) index = -1-index self._date.resize((N+1,)) self._data.resize((N+1, self.count())) if index < N: self._date[index+1:] = self._date[index:-1] self._data[index+1:] = self._data[index:-1] self._date[index] = dte self._data[index] = values self._skl.insert(dte)
def function[insert, parameter[self, dte, values]]: constant[insert *values* at date *dte*.] if call[name[len], parameter[name[values]]] begin[:] variable[dte] assign[=] call[name[self].dateconvert, parameter[name[dte]]] if <ast.UnaryOp object at 0x7da1b0fbbfd0> begin[:] name[self]._date assign[=] call[name[np].array, parameter[list[[<ast.Name object at 0x7da1b0fba4d0>]]]] name[self]._data assign[=] call[name[np].array, parameter[list[[<ast.Name object at 0x7da1b0fbaef0>]]]] call[name[self]._skl.insert, parameter[name[dte]]]
keyword[def] identifier[insert] ( identifier[self] , identifier[dte] , identifier[values] ): literal[string] keyword[if] identifier[len] ( identifier[values] ): identifier[dte] = identifier[self] . identifier[dateconvert] ( identifier[dte] ) keyword[if] keyword[not] identifier[self] : identifier[self] . identifier[_date] = identifier[np] . identifier[array] ([ identifier[dte] ]) identifier[self] . identifier[_data] = identifier[np] . identifier[array] ([ identifier[values] ]) keyword[else] : identifier[index] = identifier[self] . identifier[_skl] . identifier[rank] ( identifier[dte] ) keyword[if] identifier[index] < literal[int] : identifier[N] = identifier[len] ( identifier[self] . identifier[_data] ) identifier[index] =- literal[int] - identifier[index] identifier[self] . identifier[_date] . identifier[resize] (( identifier[N] + literal[int] ,)) identifier[self] . identifier[_data] . identifier[resize] (( identifier[N] + literal[int] , identifier[self] . identifier[count] ())) keyword[if] identifier[index] < identifier[N] : identifier[self] . identifier[_date] [ identifier[index] + literal[int] :]= identifier[self] . identifier[_date] [ identifier[index] :- literal[int] ] identifier[self] . identifier[_data] [ identifier[index] + literal[int] :]= identifier[self] . identifier[_data] [ identifier[index] :- literal[int] ] identifier[self] . identifier[_date] [ identifier[index] ]= identifier[dte] identifier[self] . identifier[_data] [ identifier[index] ]= identifier[values] identifier[self] . identifier[_skl] . identifier[insert] ( identifier[dte] )
def insert(self, dte, values): """insert *values* at date *dte*.""" if len(values): dte = self.dateconvert(dte) if not self: self._date = np.array([dte]) self._data = np.array([values]) # depends on [control=['if'], data=[]] else: # search for the date index = self._skl.rank(dte) if index < 0: # date not available N = len(self._data) index = -1 - index self._date.resize((N + 1,)) self._data.resize((N + 1, self.count())) if index < N: self._date[index + 1:] = self._date[index:-1] self._data[index + 1:] = self._data[index:-1] # depends on [control=['if'], data=['index']] # depends on [control=['if'], data=['index']] self._date[index] = dte self._data[index] = values self._skl.insert(dte) # depends on [control=['if'], data=[]]
async def send(self, request: ClientRequest, **kwargs: Any) -> AsyncClientResponse: # type: ignore """Send the request using this HTTP sender. """ requests_kwargs = self._configure_send(request, **kwargs) return await super(AsyncRequestsHTTPSender, self).send(request, **requests_kwargs)
<ast.AsyncFunctionDef object at 0x7da18eb55a20>
keyword[async] keyword[def] identifier[send] ( identifier[self] , identifier[request] : identifier[ClientRequest] ,** identifier[kwargs] : identifier[Any] )-> identifier[AsyncClientResponse] : literal[string] identifier[requests_kwargs] = identifier[self] . identifier[_configure_send] ( identifier[request] ,** identifier[kwargs] ) keyword[return] keyword[await] identifier[super] ( identifier[AsyncRequestsHTTPSender] , identifier[self] ). identifier[send] ( identifier[request] ,** identifier[requests_kwargs] )
async def send(self, request: ClientRequest, **kwargs: Any) -> AsyncClientResponse: # type: ignore 'Send the request using this HTTP sender.\n ' requests_kwargs = self._configure_send(request, **kwargs) return await super(AsyncRequestsHTTPSender, self).send(request, **requests_kwargs)
def __read_lipd_contents(): """ Use the file metadata to read in the LiPD file contents as a dataset library :return dict: Metadata """ global files, settings _d = {} try: if len(files[".lpd"]) == 1: _d = lipd_read(files[".lpd"][0]["full_path"]) if settings["verbose"]: print("Finished read: 1 record") else: for file in files[".lpd"]: _d[file["filename_no_ext"]] = lipd_read(file["full_path"]) if settings["verbose"]: print("Finished read: {} records".format(len(_d))) except Exception as e: print("Error: read_lipd_contents: {}".format(e)) return _d
def function[__read_lipd_contents, parameter[]]: constant[ Use the file metadata to read in the LiPD file contents as a dataset library :return dict: Metadata ] <ast.Global object at 0x7da2044c20b0> variable[_d] assign[=] dictionary[[], []] <ast.Try object at 0x7da2044c2a70> return[name[_d]]
keyword[def] identifier[__read_lipd_contents] (): literal[string] keyword[global] identifier[files] , identifier[settings] identifier[_d] ={} keyword[try] : keyword[if] identifier[len] ( identifier[files] [ literal[string] ])== literal[int] : identifier[_d] = identifier[lipd_read] ( identifier[files] [ literal[string] ][ literal[int] ][ literal[string] ]) keyword[if] identifier[settings] [ literal[string] ]: identifier[print] ( literal[string] ) keyword[else] : keyword[for] identifier[file] keyword[in] identifier[files] [ literal[string] ]: identifier[_d] [ identifier[file] [ literal[string] ]]= identifier[lipd_read] ( identifier[file] [ literal[string] ]) keyword[if] identifier[settings] [ literal[string] ]: identifier[print] ( literal[string] . identifier[format] ( identifier[len] ( identifier[_d] ))) keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[print] ( literal[string] . identifier[format] ( identifier[e] )) keyword[return] identifier[_d]
def __read_lipd_contents(): """ Use the file metadata to read in the LiPD file contents as a dataset library :return dict: Metadata """ global files, settings _d = {} try: if len(files['.lpd']) == 1: _d = lipd_read(files['.lpd'][0]['full_path']) if settings['verbose']: print('Finished read: 1 record') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: for file in files['.lpd']: _d[file['filename_no_ext']] = lipd_read(file['full_path']) # depends on [control=['for'], data=['file']] if settings['verbose']: print('Finished read: {} records'.format(len(_d))) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except Exception as e: print('Error: read_lipd_contents: {}'.format(e)) # depends on [control=['except'], data=['e']] return _d
def instantiate_objects(self, node): """ Recursively traverse a structure to identify dictionaries that represent objects that need to be instantiated Traverse all values of all dictionaries and all elements of all lists to identify dictionaries that contain the special "ENGINE" key which indicates that a class of that type should be instantiated and passed all key-value pairs found in the sibling "OPTIONS" dictionary as keyword arguments. For example:: tree = { 'a': { 'b': { 'first_obj': { 'ENGINE': 'mypackage.mymodule.Clazz', 'OPTIONS': { 'size': 10, 'foo': 'bar' } } }, 'c': [ { 'ENGINE': 'mypackage.mymodule.Clazz2', 'OPTIONS': { 'more_objects': { 'd': {'ENGINE': 'mypackage.foo.Bar'} } } } ] } } root = self.instantiate_objects(tree) That structure of dicts, lists, and strings will end up with (this example assumes that all keyword arguments to constructors were saved as attributes of the same name): assert type(root['a']['b']['first_obj']) == <type 'mypackage.mymodule.Clazz'> assert root['a']['b']['first_obj'].size == 10 assert root['a']['b']['first_obj'].foo == 'bar' assert type(root['a']['c'][0]) == <type 'mypackage.mymodule.Clazz2'> assert type(root['a']['c'][0].more_objects['d']) == <type 'mypackage.foo.Bar'> """ result = node if isinstance(node, dict): if 'ENGINE' in node: result = self.instantiate_from_dict(node) else: result = {} for key, value in six.iteritems(node): result[key] = self.instantiate_objects(value) elif isinstance(node, list): result = [] for child in node: result.append(self.instantiate_objects(child)) return result
def function[instantiate_objects, parameter[self, node]]: constant[ Recursively traverse a structure to identify dictionaries that represent objects that need to be instantiated Traverse all values of all dictionaries and all elements of all lists to identify dictionaries that contain the special "ENGINE" key which indicates that a class of that type should be instantiated and passed all key-value pairs found in the sibling "OPTIONS" dictionary as keyword arguments. For example:: tree = { 'a': { 'b': { 'first_obj': { 'ENGINE': 'mypackage.mymodule.Clazz', 'OPTIONS': { 'size': 10, 'foo': 'bar' } } }, 'c': [ { 'ENGINE': 'mypackage.mymodule.Clazz2', 'OPTIONS': { 'more_objects': { 'd': {'ENGINE': 'mypackage.foo.Bar'} } } } ] } } root = self.instantiate_objects(tree) That structure of dicts, lists, and strings will end up with (this example assumes that all keyword arguments to constructors were saved as attributes of the same name): assert type(root['a']['b']['first_obj']) == <type 'mypackage.mymodule.Clazz'> assert root['a']['b']['first_obj'].size == 10 assert root['a']['b']['first_obj'].foo == 'bar' assert type(root['a']['c'][0]) == <type 'mypackage.mymodule.Clazz2'> assert type(root['a']['c'][0].more_objects['d']) == <type 'mypackage.foo.Bar'> ] variable[result] assign[=] name[node] if call[name[isinstance], parameter[name[node], name[dict]]] begin[:] if compare[constant[ENGINE] in name[node]] begin[:] variable[result] assign[=] call[name[self].instantiate_from_dict, parameter[name[node]]] return[name[result]]
keyword[def] identifier[instantiate_objects] ( identifier[self] , identifier[node] ): literal[string] identifier[result] = identifier[node] keyword[if] identifier[isinstance] ( identifier[node] , identifier[dict] ): keyword[if] literal[string] keyword[in] identifier[node] : identifier[result] = identifier[self] . identifier[instantiate_from_dict] ( identifier[node] ) keyword[else] : identifier[result] ={} keyword[for] identifier[key] , identifier[value] keyword[in] identifier[six] . identifier[iteritems] ( identifier[node] ): identifier[result] [ identifier[key] ]= identifier[self] . identifier[instantiate_objects] ( identifier[value] ) keyword[elif] identifier[isinstance] ( identifier[node] , identifier[list] ): identifier[result] =[] keyword[for] identifier[child] keyword[in] identifier[node] : identifier[result] . identifier[append] ( identifier[self] . identifier[instantiate_objects] ( identifier[child] )) keyword[return] identifier[result]
def instantiate_objects(self, node): """ Recursively traverse a structure to identify dictionaries that represent objects that need to be instantiated Traverse all values of all dictionaries and all elements of all lists to identify dictionaries that contain the special "ENGINE" key which indicates that a class of that type should be instantiated and passed all key-value pairs found in the sibling "OPTIONS" dictionary as keyword arguments. For example:: tree = { 'a': { 'b': { 'first_obj': { 'ENGINE': 'mypackage.mymodule.Clazz', 'OPTIONS': { 'size': 10, 'foo': 'bar' } } }, 'c': [ { 'ENGINE': 'mypackage.mymodule.Clazz2', 'OPTIONS': { 'more_objects': { 'd': {'ENGINE': 'mypackage.foo.Bar'} } } } ] } } root = self.instantiate_objects(tree) That structure of dicts, lists, and strings will end up with (this example assumes that all keyword arguments to constructors were saved as attributes of the same name): assert type(root['a']['b']['first_obj']) == <type 'mypackage.mymodule.Clazz'> assert root['a']['b']['first_obj'].size == 10 assert root['a']['b']['first_obj'].foo == 'bar' assert type(root['a']['c'][0]) == <type 'mypackage.mymodule.Clazz2'> assert type(root['a']['c'][0].more_objects['d']) == <type 'mypackage.foo.Bar'> """ result = node if isinstance(node, dict): if 'ENGINE' in node: result = self.instantiate_from_dict(node) # depends on [control=['if'], data=['node']] else: result = {} for (key, value) in six.iteritems(node): result[key] = self.instantiate_objects(value) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] elif isinstance(node, list): result = [] for child in node: result.append(self.instantiate_objects(child)) # depends on [control=['for'], data=['child']] # depends on [control=['if'], data=[]] return result