code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def combine(cls, date, time): "Construct a datetime from a given date and a given time." if not isinstance(date, _date_class): raise TypeError("date argument must be a date instance") if not isinstance(time, _time_class): raise TypeError("time argument must be a time instance") return cls(date.year, date.month, date.day, time.hour, time.minute, time.second, time.microsecond, time.tzinfo)
def function[combine, parameter[cls, date, time]]: constant[Construct a datetime from a given date and a given time.] if <ast.UnaryOp object at 0x7da18c4cfaf0> begin[:] <ast.Raise object at 0x7da18c4ce680> if <ast.UnaryOp object at 0x7da18c4cde10> begin[:] <ast.Raise object at 0x7da18c4cd270> return[call[name[cls], parameter[name[date].year, name[date].month, name[date].day, name[time].hour, name[time].minute, name[time].second, name[time].microsecond, name[time].tzinfo]]]
keyword[def] identifier[combine] ( identifier[cls] , identifier[date] , identifier[time] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[date] , identifier[_date_class] ): keyword[raise] identifier[TypeError] ( literal[string] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[time] , identifier[_time_class] ): keyword[raise] identifier[TypeError] ( literal[string] ) keyword[return] identifier[cls] ( identifier[date] . identifier[year] , identifier[date] . identifier[month] , identifier[date] . identifier[day] , identifier[time] . identifier[hour] , identifier[time] . identifier[minute] , identifier[time] . identifier[second] , identifier[time] . identifier[microsecond] , identifier[time] . identifier[tzinfo] )
def combine(cls, date, time): """Construct a datetime from a given date and a given time.""" if not isinstance(date, _date_class): raise TypeError('date argument must be a date instance') # depends on [control=['if'], data=[]] if not isinstance(time, _time_class): raise TypeError('time argument must be a time instance') # depends on [control=['if'], data=[]] return cls(date.year, date.month, date.day, time.hour, time.minute, time.second, time.microsecond, time.tzinfo)
def use_region(self, offset=0, size=0, flags=0): """Assure we point to a window which allows access to the given offset into the file :param offset: absolute offset in bytes into the file :param size: amount of bytes to map. If 0, all available bytes will be mapped :param flags: additional flags to be given to os.open in case a file handle is initially opened for mapping. Has no effect if a region can actually be reused. :return: this instance - it should be queried for whether it points to a valid memory region. This is not the case if the mapping failed because we reached the end of the file **Note:**: The size actually mapped may be smaller than the given size. If that is the case, either the file has reached its end, or the map was created between two existing regions""" need_region = True man = self._manager fsize = self._rlist.file_size() size = min(size or fsize, man.window_size() or fsize) # clamp size to window size if self._region is not None: if self._region.includes_ofs(offset): need_region = False else: self.unuse_region() # END handle existing region # END check existing region # offset too large ? if offset >= fsize: return self # END handle offset if need_region: self._region = man._obtain_region(self._rlist, offset, size, flags, False) self._region.increment_client_count() # END need region handling self._ofs = offset - self._region._b self._size = min(size, self._region.ofs_end() - offset) return self
def function[use_region, parameter[self, offset, size, flags]]: constant[Assure we point to a window which allows access to the given offset into the file :param offset: absolute offset in bytes into the file :param size: amount of bytes to map. If 0, all available bytes will be mapped :param flags: additional flags to be given to os.open in case a file handle is initially opened for mapping. Has no effect if a region can actually be reused. :return: this instance - it should be queried for whether it points to a valid memory region. This is not the case if the mapping failed because we reached the end of the file **Note:**: The size actually mapped may be smaller than the given size. If that is the case, either the file has reached its end, or the map was created between two existing regions] variable[need_region] assign[=] constant[True] variable[man] assign[=] name[self]._manager variable[fsize] assign[=] call[name[self]._rlist.file_size, parameter[]] variable[size] assign[=] call[name[min], parameter[<ast.BoolOp object at 0x7da1b049b4c0>, <ast.BoolOp object at 0x7da1b04998d0>]] if compare[name[self]._region is_not constant[None]] begin[:] if call[name[self]._region.includes_ofs, parameter[name[offset]]] begin[:] variable[need_region] assign[=] constant[False] if compare[name[offset] greater_or_equal[>=] name[fsize]] begin[:] return[name[self]] if name[need_region] begin[:] name[self]._region assign[=] call[name[man]._obtain_region, parameter[name[self]._rlist, name[offset], name[size], name[flags], constant[False]]] call[name[self]._region.increment_client_count, parameter[]] name[self]._ofs assign[=] binary_operation[name[offset] - name[self]._region._b] name[self]._size assign[=] call[name[min], parameter[name[size], binary_operation[call[name[self]._region.ofs_end, parameter[]] - name[offset]]]] return[name[self]]
keyword[def] identifier[use_region] ( identifier[self] , identifier[offset] = literal[int] , identifier[size] = literal[int] , identifier[flags] = literal[int] ): literal[string] identifier[need_region] = keyword[True] identifier[man] = identifier[self] . identifier[_manager] identifier[fsize] = identifier[self] . identifier[_rlist] . identifier[file_size] () identifier[size] = identifier[min] ( identifier[size] keyword[or] identifier[fsize] , identifier[man] . identifier[window_size] () keyword[or] identifier[fsize] ) keyword[if] identifier[self] . identifier[_region] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[self] . identifier[_region] . identifier[includes_ofs] ( identifier[offset] ): identifier[need_region] = keyword[False] keyword[else] : identifier[self] . identifier[unuse_region] () keyword[if] identifier[offset] >= identifier[fsize] : keyword[return] identifier[self] keyword[if] identifier[need_region] : identifier[self] . identifier[_region] = identifier[man] . identifier[_obtain_region] ( identifier[self] . identifier[_rlist] , identifier[offset] , identifier[size] , identifier[flags] , keyword[False] ) identifier[self] . identifier[_region] . identifier[increment_client_count] () identifier[self] . identifier[_ofs] = identifier[offset] - identifier[self] . identifier[_region] . identifier[_b] identifier[self] . identifier[_size] = identifier[min] ( identifier[size] , identifier[self] . identifier[_region] . identifier[ofs_end] ()- identifier[offset] ) keyword[return] identifier[self]
def use_region(self, offset=0, size=0, flags=0): """Assure we point to a window which allows access to the given offset into the file :param offset: absolute offset in bytes into the file :param size: amount of bytes to map. If 0, all available bytes will be mapped :param flags: additional flags to be given to os.open in case a file handle is initially opened for mapping. Has no effect if a region can actually be reused. :return: this instance - it should be queried for whether it points to a valid memory region. This is not the case if the mapping failed because we reached the end of the file **Note:**: The size actually mapped may be smaller than the given size. If that is the case, either the file has reached its end, or the map was created between two existing regions""" need_region = True man = self._manager fsize = self._rlist.file_size() size = min(size or fsize, man.window_size() or fsize) # clamp size to window size if self._region is not None: if self._region.includes_ofs(offset): need_region = False # depends on [control=['if'], data=[]] else: self.unuse_region() # depends on [control=['if'], data=[]] # END handle existing region # END check existing region # offset too large ? if offset >= fsize: return self # depends on [control=['if'], data=[]] # END handle offset if need_region: self._region = man._obtain_region(self._rlist, offset, size, flags, False) self._region.increment_client_count() # depends on [control=['if'], data=[]] # END need region handling self._ofs = offset - self._region._b self._size = min(size, self._region.ofs_end() - offset) return self
def make_index(css_class, entities): """ Generate the HTML index (a short description and a link to the full documentation) for a list of FunctionDocs or ClassDocs. """ def make_entry(entity): return ('<dt><a href = "%(url)s">%(name)s</a></dt>\n' + '<dd>%(doc)s</dd>') % { 'name': entity.name, 'url': entity.url, 'doc': first_sentence(entity.doc) } entry_text = '\n'.join(make_entry(val) for val in entities) if entry_text: return '<dl class = "%s">\n%s\n</dl>' % (css_class, entry_text) else: return ''
def function[make_index, parameter[css_class, entities]]: constant[ Generate the HTML index (a short description and a link to the full documentation) for a list of FunctionDocs or ClassDocs. ] def function[make_entry, parameter[entity]]: return[binary_operation[binary_operation[constant[<dt><a href = "%(url)s">%(name)s</a></dt> ] + constant[<dd>%(doc)s</dd>]] <ast.Mod object at 0x7da2590d6920> dictionary[[<ast.Constant object at 0x7da207f00640>, <ast.Constant object at 0x7da207f00be0>, <ast.Constant object at 0x7da207f02ad0>], [<ast.Attribute object at 0x7da207f027a0>, <ast.Attribute object at 0x7da207f024d0>, <ast.Call object at 0x7da207f00820>]]]] variable[entry_text] assign[=] call[constant[ ].join, parameter[<ast.GeneratorExp object at 0x7da207f030d0>]] if name[entry_text] begin[:] return[binary_operation[constant[<dl class = "%s"> %s </dl>] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da207f00970>, <ast.Name object at 0x7da207f000a0>]]]]
keyword[def] identifier[make_index] ( identifier[css_class] , identifier[entities] ): literal[string] keyword[def] identifier[make_entry] ( identifier[entity] ): keyword[return] ( literal[string] + literal[string] )%{ literal[string] : identifier[entity] . identifier[name] , literal[string] : identifier[entity] . identifier[url] , literal[string] : identifier[first_sentence] ( identifier[entity] . identifier[doc] ) } identifier[entry_text] = literal[string] . identifier[join] ( identifier[make_entry] ( identifier[val] ) keyword[for] identifier[val] keyword[in] identifier[entities] ) keyword[if] identifier[entry_text] : keyword[return] literal[string] %( identifier[css_class] , identifier[entry_text] ) keyword[else] : keyword[return] literal[string]
def make_index(css_class, entities): """ Generate the HTML index (a short description and a link to the full documentation) for a list of FunctionDocs or ClassDocs. """ def make_entry(entity): return ('<dt><a href = "%(url)s">%(name)s</a></dt>\n' + '<dd>%(doc)s</dd>') % {'name': entity.name, 'url': entity.url, 'doc': first_sentence(entity.doc)} entry_text = '\n'.join((make_entry(val) for val in entities)) if entry_text: return '<dl class = "%s">\n%s\n</dl>' % (css_class, entry_text) # depends on [control=['if'], data=[]] else: return ''
def write_type_dumps(self, operations, preserve_order, output_dir): """ Splits the list of SQL operations by type and dumps these to separate files """ by_type = {SqlType.INDEX: [], SqlType.FUNCTION: [], SqlType.TRIGGER: []} for operation in operations: by_type[operation.sql_type].append(operation) # optionally sort each operation list by the object name if not preserve_order: for obj_type, ops in by_type.items(): by_type[obj_type] = sorted(ops, key=lambda o: o.obj_name) if by_type[SqlType.INDEX]: self.write_dump('indexes', by_type[SqlType.INDEX], output_dir) if by_type[SqlType.FUNCTION]: self.write_dump('functions', by_type[SqlType.FUNCTION], output_dir) if by_type[SqlType.TRIGGER]: self.write_dump('triggers', by_type[SqlType.TRIGGER], output_dir)
def function[write_type_dumps, parameter[self, operations, preserve_order, output_dir]]: constant[ Splits the list of SQL operations by type and dumps these to separate files ] variable[by_type] assign[=] dictionary[[<ast.Attribute object at 0x7da1b0f11c60>, <ast.Attribute object at 0x7da1b0f118a0>, <ast.Attribute object at 0x7da1b0f111b0>], [<ast.List object at 0x7da1b0f109d0>, <ast.List object at 0x7da1b0f134c0>, <ast.List object at 0x7da1b0f10ac0>]] for taget[name[operation]] in starred[name[operations]] begin[:] call[call[name[by_type]][name[operation].sql_type].append, parameter[name[operation]]] if <ast.UnaryOp object at 0x7da1b0f110f0> begin[:] for taget[tuple[[<ast.Name object at 0x7da1b0f13eb0>, <ast.Name object at 0x7da1b0f13040>]]] in starred[call[name[by_type].items, parameter[]]] begin[:] call[name[by_type]][name[obj_type]] assign[=] call[name[sorted], parameter[name[ops]]] if call[name[by_type]][name[SqlType].INDEX] begin[:] call[name[self].write_dump, parameter[constant[indexes], call[name[by_type]][name[SqlType].INDEX], name[output_dir]]] if call[name[by_type]][name[SqlType].FUNCTION] begin[:] call[name[self].write_dump, parameter[constant[functions], call[name[by_type]][name[SqlType].FUNCTION], name[output_dir]]] if call[name[by_type]][name[SqlType].TRIGGER] begin[:] call[name[self].write_dump, parameter[constant[triggers], call[name[by_type]][name[SqlType].TRIGGER], name[output_dir]]]
keyword[def] identifier[write_type_dumps] ( identifier[self] , identifier[operations] , identifier[preserve_order] , identifier[output_dir] ): literal[string] identifier[by_type] ={ identifier[SqlType] . identifier[INDEX] :[], identifier[SqlType] . identifier[FUNCTION] :[], identifier[SqlType] . identifier[TRIGGER] :[]} keyword[for] identifier[operation] keyword[in] identifier[operations] : identifier[by_type] [ identifier[operation] . identifier[sql_type] ]. identifier[append] ( identifier[operation] ) keyword[if] keyword[not] identifier[preserve_order] : keyword[for] identifier[obj_type] , identifier[ops] keyword[in] identifier[by_type] . identifier[items] (): identifier[by_type] [ identifier[obj_type] ]= identifier[sorted] ( identifier[ops] , identifier[key] = keyword[lambda] identifier[o] : identifier[o] . identifier[obj_name] ) keyword[if] identifier[by_type] [ identifier[SqlType] . identifier[INDEX] ]: identifier[self] . identifier[write_dump] ( literal[string] , identifier[by_type] [ identifier[SqlType] . identifier[INDEX] ], identifier[output_dir] ) keyword[if] identifier[by_type] [ identifier[SqlType] . identifier[FUNCTION] ]: identifier[self] . identifier[write_dump] ( literal[string] , identifier[by_type] [ identifier[SqlType] . identifier[FUNCTION] ], identifier[output_dir] ) keyword[if] identifier[by_type] [ identifier[SqlType] . identifier[TRIGGER] ]: identifier[self] . identifier[write_dump] ( literal[string] , identifier[by_type] [ identifier[SqlType] . identifier[TRIGGER] ], identifier[output_dir] )
def write_type_dumps(self, operations, preserve_order, output_dir): """ Splits the list of SQL operations by type and dumps these to separate files """ by_type = {SqlType.INDEX: [], SqlType.FUNCTION: [], SqlType.TRIGGER: []} for operation in operations: by_type[operation.sql_type].append(operation) # depends on [control=['for'], data=['operation']] # optionally sort each operation list by the object name if not preserve_order: for (obj_type, ops) in by_type.items(): by_type[obj_type] = sorted(ops, key=lambda o: o.obj_name) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] if by_type[SqlType.INDEX]: self.write_dump('indexes', by_type[SqlType.INDEX], output_dir) # depends on [control=['if'], data=[]] if by_type[SqlType.FUNCTION]: self.write_dump('functions', by_type[SqlType.FUNCTION], output_dir) # depends on [control=['if'], data=[]] if by_type[SqlType.TRIGGER]: self.write_dump('triggers', by_type[SqlType.TRIGGER], output_dir) # depends on [control=['if'], data=[]]
def native(self): """ The native Python datatype representation of this value :return: A unicode string or None """ if self.contents is None: return None if self._native is None: byte_string = self.__bytes__() byte_len = len(byte_string) cidr_int = None if byte_len in set([32, 16]): value = inet_ntop(socket.AF_INET6, byte_string[0:16]) if byte_len > 16: cidr_int = int_from_bytes(byte_string[16:]) elif byte_len in set([8, 4]): value = inet_ntop(socket.AF_INET, byte_string[0:4]) if byte_len > 4: cidr_int = int_from_bytes(byte_string[4:]) if cidr_int is not None: cidr_bits = '{0:b}'.format(cidr_int) cidr = len(cidr_bits.rstrip('0')) value = value + '/' + str_cls(cidr) self._native = value return self._native
def function[native, parameter[self]]: constant[ The native Python datatype representation of this value :return: A unicode string or None ] if compare[name[self].contents is constant[None]] begin[:] return[constant[None]] if compare[name[self]._native is constant[None]] begin[:] variable[byte_string] assign[=] call[name[self].__bytes__, parameter[]] variable[byte_len] assign[=] call[name[len], parameter[name[byte_string]]] variable[cidr_int] assign[=] constant[None] if compare[name[byte_len] in call[name[set], parameter[list[[<ast.Constant object at 0x7da2047ebbb0>, <ast.Constant object at 0x7da2047eacb0>]]]]] begin[:] variable[value] assign[=] call[name[inet_ntop], parameter[name[socket].AF_INET6, call[name[byte_string]][<ast.Slice object at 0x7da18bcc91b0>]]] if compare[name[byte_len] greater[>] constant[16]] begin[:] variable[cidr_int] assign[=] call[name[int_from_bytes], parameter[call[name[byte_string]][<ast.Slice object at 0x7da18bcc8ca0>]]] if compare[name[cidr_int] is_not constant[None]] begin[:] variable[cidr_bits] assign[=] call[constant[{0:b}].format, parameter[name[cidr_int]]] variable[cidr] assign[=] call[name[len], parameter[call[name[cidr_bits].rstrip, parameter[constant[0]]]]] variable[value] assign[=] binary_operation[binary_operation[name[value] + constant[/]] + call[name[str_cls], parameter[name[cidr]]]] name[self]._native assign[=] name[value] return[name[self]._native]
keyword[def] identifier[native] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[contents] keyword[is] keyword[None] : keyword[return] keyword[None] keyword[if] identifier[self] . identifier[_native] keyword[is] keyword[None] : identifier[byte_string] = identifier[self] . identifier[__bytes__] () identifier[byte_len] = identifier[len] ( identifier[byte_string] ) identifier[cidr_int] = keyword[None] keyword[if] identifier[byte_len] keyword[in] identifier[set] ([ literal[int] , literal[int] ]): identifier[value] = identifier[inet_ntop] ( identifier[socket] . identifier[AF_INET6] , identifier[byte_string] [ literal[int] : literal[int] ]) keyword[if] identifier[byte_len] > literal[int] : identifier[cidr_int] = identifier[int_from_bytes] ( identifier[byte_string] [ literal[int] :]) keyword[elif] identifier[byte_len] keyword[in] identifier[set] ([ literal[int] , literal[int] ]): identifier[value] = identifier[inet_ntop] ( identifier[socket] . identifier[AF_INET] , identifier[byte_string] [ literal[int] : literal[int] ]) keyword[if] identifier[byte_len] > literal[int] : identifier[cidr_int] = identifier[int_from_bytes] ( identifier[byte_string] [ literal[int] :]) keyword[if] identifier[cidr_int] keyword[is] keyword[not] keyword[None] : identifier[cidr_bits] = literal[string] . identifier[format] ( identifier[cidr_int] ) identifier[cidr] = identifier[len] ( identifier[cidr_bits] . identifier[rstrip] ( literal[string] )) identifier[value] = identifier[value] + literal[string] + identifier[str_cls] ( identifier[cidr] ) identifier[self] . identifier[_native] = identifier[value] keyword[return] identifier[self] . identifier[_native]
def native(self): """ The native Python datatype representation of this value :return: A unicode string or None """ if self.contents is None: return None # depends on [control=['if'], data=[]] if self._native is None: byte_string = self.__bytes__() byte_len = len(byte_string) cidr_int = None if byte_len in set([32, 16]): value = inet_ntop(socket.AF_INET6, byte_string[0:16]) if byte_len > 16: cidr_int = int_from_bytes(byte_string[16:]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['byte_len']] elif byte_len in set([8, 4]): value = inet_ntop(socket.AF_INET, byte_string[0:4]) if byte_len > 4: cidr_int = int_from_bytes(byte_string[4:]) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['byte_len']] if cidr_int is not None: cidr_bits = '{0:b}'.format(cidr_int) cidr = len(cidr_bits.rstrip('0')) value = value + '/' + str_cls(cidr) # depends on [control=['if'], data=['cidr_int']] self._native = value # depends on [control=['if'], data=[]] return self._native
def list_math_division(a, b): """! @brief Division of two lists. @details Each element from list 'a' is divided by element from list 'b' accordingly. @param[in] a (list): List of elements that supports mathematic division. @param[in] b (list): List of elements that supports mathematic division. @return (list) Result of division of two lists. """ return [a[i] / b[i] for i in range(len(a))];
def function[list_math_division, parameter[a, b]]: constant[! @brief Division of two lists. @details Each element from list 'a' is divided by element from list 'b' accordingly. @param[in] a (list): List of elements that supports mathematic division. @param[in] b (list): List of elements that supports mathematic division. @return (list) Result of division of two lists. ] return[<ast.ListComp object at 0x7da20e956200>]
keyword[def] identifier[list_math_division] ( identifier[a] , identifier[b] ): literal[string] keyword[return] [ identifier[a] [ identifier[i] ]/ identifier[b] [ identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[a] ))];
def list_math_division(a, b): """! @brief Division of two lists. @details Each element from list 'a' is divided by element from list 'b' accordingly. @param[in] a (list): List of elements that supports mathematic division. @param[in] b (list): List of elements that supports mathematic division. @return (list) Result of division of two lists. """ return [a[i] / b[i] for i in range(len(a))]
def do_help(self, *cmd_names: str) -> None: """Show help for command name Any number of command names may be given to help, and the long help text for all of them will be shown. """ def _h(cmd: str, template: str) -> None: try: func = getattr(self, cmd) except AttributeError: self._sout.write('No such command: {}\n'.format(cmd)) else: doc = func.__doc__ if func.__doc__ else '' doc_firstline = doc.split('\n', maxsplit=1)[0] arg_list = ' '.join( p for p in inspect.signature(func).parameters) self._sout.write( template.format( cmd_name=cmd[len(self._cmd_prefix):], arg_list=arg_list, cmd_arg_sep=' ' if arg_list else '', doc=doc, doc_firstline=doc_firstline ) + '\n' ) if not cmd_names: cmds = sorted( c.method_name for c in self._filter_cmds(with_alts=False) ) self._sout.write('Available Commands are:\n\n') for cmd in cmds: _h(cmd, self.help_short_template) else: for cmd in cmd_names: _h(self._cmd_prefix + cmd, self.help_template)
def function[do_help, parameter[self]]: constant[Show help for command name Any number of command names may be given to help, and the long help text for all of them will be shown. ] def function[_h, parameter[cmd, template]]: <ast.Try object at 0x7da1b23472e0> if <ast.UnaryOp object at 0x7da20c6c4970> begin[:] variable[cmds] assign[=] call[name[sorted], parameter[<ast.GeneratorExp object at 0x7da20c6c5c60>]] call[name[self]._sout.write, parameter[constant[Available Commands are: ]]] for taget[name[cmd]] in starred[name[cmds]] begin[:] call[name[_h], parameter[name[cmd], name[self].help_short_template]]
keyword[def] identifier[do_help] ( identifier[self] ,* identifier[cmd_names] : identifier[str] )-> keyword[None] : literal[string] keyword[def] identifier[_h] ( identifier[cmd] : identifier[str] , identifier[template] : identifier[str] )-> keyword[None] : keyword[try] : identifier[func] = identifier[getattr] ( identifier[self] , identifier[cmd] ) keyword[except] identifier[AttributeError] : identifier[self] . identifier[_sout] . identifier[write] ( literal[string] . identifier[format] ( identifier[cmd] )) keyword[else] : identifier[doc] = identifier[func] . identifier[__doc__] keyword[if] identifier[func] . identifier[__doc__] keyword[else] literal[string] identifier[doc_firstline] = identifier[doc] . identifier[split] ( literal[string] , identifier[maxsplit] = literal[int] )[ literal[int] ] identifier[arg_list] = literal[string] . identifier[join] ( identifier[p] keyword[for] identifier[p] keyword[in] identifier[inspect] . identifier[signature] ( identifier[func] ). identifier[parameters] ) identifier[self] . identifier[_sout] . identifier[write] ( identifier[template] . identifier[format] ( identifier[cmd_name] = identifier[cmd] [ identifier[len] ( identifier[self] . identifier[_cmd_prefix] ):], identifier[arg_list] = identifier[arg_list] , identifier[cmd_arg_sep] = literal[string] keyword[if] identifier[arg_list] keyword[else] literal[string] , identifier[doc] = identifier[doc] , identifier[doc_firstline] = identifier[doc_firstline] )+ literal[string] ) keyword[if] keyword[not] identifier[cmd_names] : identifier[cmds] = identifier[sorted] ( identifier[c] . identifier[method_name] keyword[for] identifier[c] keyword[in] identifier[self] . identifier[_filter_cmds] ( identifier[with_alts] = keyword[False] ) ) identifier[self] . identifier[_sout] . identifier[write] ( literal[string] ) keyword[for] identifier[cmd] keyword[in] identifier[cmds] : identifier[_h] ( identifier[cmd] , identifier[self] . identifier[help_short_template] ) keyword[else] : keyword[for] identifier[cmd] keyword[in] identifier[cmd_names] : identifier[_h] ( identifier[self] . identifier[_cmd_prefix] + identifier[cmd] , identifier[self] . identifier[help_template] )
def do_help(self, *cmd_names: str) -> None: """Show help for command name Any number of command names may be given to help, and the long help text for all of them will be shown. """ def _h(cmd: str, template: str) -> None: try: func = getattr(self, cmd) # depends on [control=['try'], data=[]] except AttributeError: self._sout.write('No such command: {}\n'.format(cmd)) # depends on [control=['except'], data=[]] else: doc = func.__doc__ if func.__doc__ else '' doc_firstline = doc.split('\n', maxsplit=1)[0] arg_list = ' '.join((p for p in inspect.signature(func).parameters)) self._sout.write(template.format(cmd_name=cmd[len(self._cmd_prefix):], arg_list=arg_list, cmd_arg_sep=' ' if arg_list else '', doc=doc, doc_firstline=doc_firstline) + '\n') if not cmd_names: cmds = sorted((c.method_name for c in self._filter_cmds(with_alts=False))) self._sout.write('Available Commands are:\n\n') for cmd in cmds: _h(cmd, self.help_short_template) # depends on [control=['for'], data=['cmd']] # depends on [control=['if'], data=[]] else: for cmd in cmd_names: _h(self._cmd_prefix + cmd, self.help_template) # depends on [control=['for'], data=['cmd']]
def _set_ndb_cache_policy(): """Tell NDB to never cache anything in memcache or in-process. This ensures that entities fetched from Datastore input_readers via NDB will not bloat up the request memory size and Datastore Puts will avoid doing calls to memcache. Without this you get soft memory limit exits, which hurts overall throughput. """ ndb_ctx = ndb.get_context() ndb_ctx.set_cache_policy(lambda key: False) ndb_ctx.set_memcache_policy(lambda key: False)
def function[_set_ndb_cache_policy, parameter[]]: constant[Tell NDB to never cache anything in memcache or in-process. This ensures that entities fetched from Datastore input_readers via NDB will not bloat up the request memory size and Datastore Puts will avoid doing calls to memcache. Without this you get soft memory limit exits, which hurts overall throughput. ] variable[ndb_ctx] assign[=] call[name[ndb].get_context, parameter[]] call[name[ndb_ctx].set_cache_policy, parameter[<ast.Lambda object at 0x7da20c990250>]] call[name[ndb_ctx].set_memcache_policy, parameter[<ast.Lambda object at 0x7da20c990610>]]
keyword[def] identifier[_set_ndb_cache_policy] (): literal[string] identifier[ndb_ctx] = identifier[ndb] . identifier[get_context] () identifier[ndb_ctx] . identifier[set_cache_policy] ( keyword[lambda] identifier[key] : keyword[False] ) identifier[ndb_ctx] . identifier[set_memcache_policy] ( keyword[lambda] identifier[key] : keyword[False] )
def _set_ndb_cache_policy(): """Tell NDB to never cache anything in memcache or in-process. This ensures that entities fetched from Datastore input_readers via NDB will not bloat up the request memory size and Datastore Puts will avoid doing calls to memcache. Without this you get soft memory limit exits, which hurts overall throughput. """ ndb_ctx = ndb.get_context() ndb_ctx.set_cache_policy(lambda key: False) ndb_ctx.set_memcache_policy(lambda key: False)
def get_undecorated_callback(self): """ Return the callback. If the callback is a decorated function, try to recover the original function. """ func = self.callback func = getattr(func, '__func__' if py3k else 'im_func', func) closure_attr = '__closure__' if py3k else 'func_closure' while hasattr(func, closure_attr) and getattr(func, closure_attr): attributes = getattr(func, closure_attr) func = attributes[0].cell_contents # in case of decorators with multiple arguments if not isinstance(func, FunctionType): # pick first FunctionType instance from multiple arguments func = filter(lambda x: isinstance(x, FunctionType), map(lambda x: x.cell_contents, attributes)) func = list(func)[0] # py3 support return func
def function[get_undecorated_callback, parameter[self]]: constant[ Return the callback. If the callback is a decorated function, try to recover the original function. ] variable[func] assign[=] name[self].callback variable[func] assign[=] call[name[getattr], parameter[name[func], <ast.IfExp object at 0x7da20c7c8b80>, name[func]]] variable[closure_attr] assign[=] <ast.IfExp object at 0x7da20c7ca410> while <ast.BoolOp object at 0x7da20c7ca020> begin[:] variable[attributes] assign[=] call[name[getattr], parameter[name[func], name[closure_attr]]] variable[func] assign[=] call[name[attributes]][constant[0]].cell_contents if <ast.UnaryOp object at 0x7da20e9623e0> begin[:] variable[func] assign[=] call[name[filter], parameter[<ast.Lambda object at 0x7da20e960bb0>, call[name[map], parameter[<ast.Lambda object at 0x7da20e9605e0>, name[attributes]]]]] variable[func] assign[=] call[call[name[list], parameter[name[func]]]][constant[0]] return[name[func]]
keyword[def] identifier[get_undecorated_callback] ( identifier[self] ): literal[string] identifier[func] = identifier[self] . identifier[callback] identifier[func] = identifier[getattr] ( identifier[func] , literal[string] keyword[if] identifier[py3k] keyword[else] literal[string] , identifier[func] ) identifier[closure_attr] = literal[string] keyword[if] identifier[py3k] keyword[else] literal[string] keyword[while] identifier[hasattr] ( identifier[func] , identifier[closure_attr] ) keyword[and] identifier[getattr] ( identifier[func] , identifier[closure_attr] ): identifier[attributes] = identifier[getattr] ( identifier[func] , identifier[closure_attr] ) identifier[func] = identifier[attributes] [ literal[int] ]. identifier[cell_contents] keyword[if] keyword[not] identifier[isinstance] ( identifier[func] , identifier[FunctionType] ): identifier[func] = identifier[filter] ( keyword[lambda] identifier[x] : identifier[isinstance] ( identifier[x] , identifier[FunctionType] ), identifier[map] ( keyword[lambda] identifier[x] : identifier[x] . identifier[cell_contents] , identifier[attributes] )) identifier[func] = identifier[list] ( identifier[func] )[ literal[int] ] keyword[return] identifier[func]
def get_undecorated_callback(self): """ Return the callback. If the callback is a decorated function, try to recover the original function. """ func = self.callback func = getattr(func, '__func__' if py3k else 'im_func', func) closure_attr = '__closure__' if py3k else 'func_closure' while hasattr(func, closure_attr) and getattr(func, closure_attr): attributes = getattr(func, closure_attr) func = attributes[0].cell_contents # in case of decorators with multiple arguments if not isinstance(func, FunctionType): # pick first FunctionType instance from multiple arguments func = filter(lambda x: isinstance(x, FunctionType), map(lambda x: x.cell_contents, attributes)) func = list(func)[0] # py3 support # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] return func
def get_json(url, **kwargs): """ ASSUME RESPONSE IN IN JSON """ response = get(url, **kwargs) try: c = response.all_content return json2value(utf82unicode(c)) except Exception as e: if mo_math.round(response.status_code, decimal=-2) in [400, 500]: Log.error(u"Bad GET response: {{code}}", code=response.status_code) else: Log.error(u"Good GET requests, but bad JSON", cause=e)
def function[get_json, parameter[url]]: constant[ ASSUME RESPONSE IN IN JSON ] variable[response] assign[=] call[name[get], parameter[name[url]]] <ast.Try object at 0x7da18f00fc10>
keyword[def] identifier[get_json] ( identifier[url] ,** identifier[kwargs] ): literal[string] identifier[response] = identifier[get] ( identifier[url] ,** identifier[kwargs] ) keyword[try] : identifier[c] = identifier[response] . identifier[all_content] keyword[return] identifier[json2value] ( identifier[utf82unicode] ( identifier[c] )) keyword[except] identifier[Exception] keyword[as] identifier[e] : keyword[if] identifier[mo_math] . identifier[round] ( identifier[response] . identifier[status_code] , identifier[decimal] =- literal[int] ) keyword[in] [ literal[int] , literal[int] ]: identifier[Log] . identifier[error] ( literal[string] , identifier[code] = identifier[response] . identifier[status_code] ) keyword[else] : identifier[Log] . identifier[error] ( literal[string] , identifier[cause] = identifier[e] )
def get_json(url, **kwargs): """ ASSUME RESPONSE IN IN JSON """ response = get(url, **kwargs) try: c = response.all_content return json2value(utf82unicode(c)) # depends on [control=['try'], data=[]] except Exception as e: if mo_math.round(response.status_code, decimal=-2) in [400, 500]: Log.error(u'Bad GET response: {{code}}', code=response.status_code) # depends on [control=['if'], data=[]] else: Log.error(u'Good GET requests, but bad JSON', cause=e) # depends on [control=['except'], data=['e']]
def _create_element_list_(self): """ Extract an alphabetically sorted list of elements from the compounds of the material. :returns: An alphabetically sorted list of elements. """ element_set = stoich.elements(self.compounds) return sorted(list(element_set))
def function[_create_element_list_, parameter[self]]: constant[ Extract an alphabetically sorted list of elements from the compounds of the material. :returns: An alphabetically sorted list of elements. ] variable[element_set] assign[=] call[name[stoich].elements, parameter[name[self].compounds]] return[call[name[sorted], parameter[call[name[list], parameter[name[element_set]]]]]]
keyword[def] identifier[_create_element_list_] ( identifier[self] ): literal[string] identifier[element_set] = identifier[stoich] . identifier[elements] ( identifier[self] . identifier[compounds] ) keyword[return] identifier[sorted] ( identifier[list] ( identifier[element_set] ))
def _create_element_list_(self): """ Extract an alphabetically sorted list of elements from the compounds of the material. :returns: An alphabetically sorted list of elements. """ element_set = stoich.elements(self.compounds) return sorted(list(element_set))
def get_equiv_inter(self, curie): """ get equivelant classes where curie is in an intersection """ start = self.qname(self.expand(curie)) # in case something is misaligned qstring = """ SELECT DISTINCT ?match WHERE { ?match owl:equivalentClass/owl:intersectionOf/rdf:rest*/rdf:first %s . }""" % start return [_ for (_,) in self.g.query(qstring)]
def function[get_equiv_inter, parameter[self, curie]]: constant[ get equivelant classes where curie is in an intersection ] variable[start] assign[=] call[name[self].qname, parameter[call[name[self].expand, parameter[name[curie]]]]] variable[qstring] assign[=] binary_operation[constant[ SELECT DISTINCT ?match WHERE { ?match owl:equivalentClass/owl:intersectionOf/rdf:rest*/rdf:first %s . }] <ast.Mod object at 0x7da2590d6920> name[start]] return[<ast.ListComp object at 0x7da1b1a203a0>]
keyword[def] identifier[get_equiv_inter] ( identifier[self] , identifier[curie] ): literal[string] identifier[start] = identifier[self] . identifier[qname] ( identifier[self] . identifier[expand] ( identifier[curie] )) identifier[qstring] = literal[string] % identifier[start] keyword[return] [ identifier[_] keyword[for] ( identifier[_] ,) keyword[in] identifier[self] . identifier[g] . identifier[query] ( identifier[qstring] )]
def get_equiv_inter(self, curie): """ get equivelant classes where curie is in an intersection """ start = self.qname(self.expand(curie)) # in case something is misaligned qstring = '\n SELECT DISTINCT ?match WHERE {\n ?match owl:equivalentClass/owl:intersectionOf/rdf:rest*/rdf:first %s .\n }' % start return [_ for (_,) in self.g.query(qstring)]
def time_methods(obj, methods, prefix=None): """ Patch obj so calls to given methods are timed >>> class C(object): ... def m1(self): ... return 'ok' ... ... def m2(self, arg): ... return arg ... >>> c = C() >>> time_methods(c, ['m1', 'm2']) >>> c.m1() 'ok' >>> c.m2('ok') 'ok' >>> c = C() >>> time_methods(c, ['m1'], 'mymetrics') """ if prefix: prefix = prefix + '.' else: prefix = '' for method in methods: current_method = getattr(obj, method) new_method = timed(prefix)(current_method) setattr(obj, method, new_method)
def function[time_methods, parameter[obj, methods, prefix]]: constant[ Patch obj so calls to given methods are timed >>> class C(object): ... def m1(self): ... return 'ok' ... ... def m2(self, arg): ... return arg ... >>> c = C() >>> time_methods(c, ['m1', 'm2']) >>> c.m1() 'ok' >>> c.m2('ok') 'ok' >>> c = C() >>> time_methods(c, ['m1'], 'mymetrics') ] if name[prefix] begin[:] variable[prefix] assign[=] binary_operation[name[prefix] + constant[.]] for taget[name[method]] in starred[name[methods]] begin[:] variable[current_method] assign[=] call[name[getattr], parameter[name[obj], name[method]]] variable[new_method] assign[=] call[call[name[timed], parameter[name[prefix]]], parameter[name[current_method]]] call[name[setattr], parameter[name[obj], name[method], name[new_method]]]
keyword[def] identifier[time_methods] ( identifier[obj] , identifier[methods] , identifier[prefix] = keyword[None] ): literal[string] keyword[if] identifier[prefix] : identifier[prefix] = identifier[prefix] + literal[string] keyword[else] : identifier[prefix] = literal[string] keyword[for] identifier[method] keyword[in] identifier[methods] : identifier[current_method] = identifier[getattr] ( identifier[obj] , identifier[method] ) identifier[new_method] = identifier[timed] ( identifier[prefix] )( identifier[current_method] ) identifier[setattr] ( identifier[obj] , identifier[method] , identifier[new_method] )
def time_methods(obj, methods, prefix=None): """ Patch obj so calls to given methods are timed >>> class C(object): ... def m1(self): ... return 'ok' ... ... def m2(self, arg): ... return arg ... >>> c = C() >>> time_methods(c, ['m1', 'm2']) >>> c.m1() 'ok' >>> c.m2('ok') 'ok' >>> c = C() >>> time_methods(c, ['m1'], 'mymetrics') """ if prefix: prefix = prefix + '.' # depends on [control=['if'], data=[]] else: prefix = '' for method in methods: current_method = getattr(obj, method) new_method = timed(prefix)(current_method) setattr(obj, method, new_method) # depends on [control=['for'], data=['method']]
def get_tissue_specificities(cls, entry): """ get list of :class:`pyuniprot.manager.models.TissueSpecificity` object from XML node entry :param entry: XML node entry :return: models.TissueSpecificity object """ tissue_specificities = [] query = "./comment[@type='tissue specificity']/text" for ts in entry.iterfind(query): tissue_specificities.append(models.TissueSpecificity(comment=ts.text)) return tissue_specificities
def function[get_tissue_specificities, parameter[cls, entry]]: constant[ get list of :class:`pyuniprot.manager.models.TissueSpecificity` object from XML node entry :param entry: XML node entry :return: models.TissueSpecificity object ] variable[tissue_specificities] assign[=] list[[]] variable[query] assign[=] constant[./comment[@type='tissue specificity']/text] for taget[name[ts]] in starred[call[name[entry].iterfind, parameter[name[query]]]] begin[:] call[name[tissue_specificities].append, parameter[call[name[models].TissueSpecificity, parameter[]]]] return[name[tissue_specificities]]
keyword[def] identifier[get_tissue_specificities] ( identifier[cls] , identifier[entry] ): literal[string] identifier[tissue_specificities] =[] identifier[query] = literal[string] keyword[for] identifier[ts] keyword[in] identifier[entry] . identifier[iterfind] ( identifier[query] ): identifier[tissue_specificities] . identifier[append] ( identifier[models] . identifier[TissueSpecificity] ( identifier[comment] = identifier[ts] . identifier[text] )) keyword[return] identifier[tissue_specificities]
def get_tissue_specificities(cls, entry): """ get list of :class:`pyuniprot.manager.models.TissueSpecificity` object from XML node entry :param entry: XML node entry :return: models.TissueSpecificity object """ tissue_specificities = [] query = "./comment[@type='tissue specificity']/text" for ts in entry.iterfind(query): tissue_specificities.append(models.TissueSpecificity(comment=ts.text)) # depends on [control=['for'], data=['ts']] return tissue_specificities
def init_banner(self): """optionally display the banner""" if self.display_banner and self.interact: self.shell.show_banner() # Make sure there is a space below the banner. if self.log_level <= logging.INFO: print
def function[init_banner, parameter[self]]: constant[optionally display the banner] if <ast.BoolOp object at 0x7da18bc71750> begin[:] call[name[self].shell.show_banner, parameter[]] if compare[name[self].log_level less_or_equal[<=] name[logging].INFO] begin[:] name[print]
keyword[def] identifier[init_banner] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[display_banner] keyword[and] identifier[self] . identifier[interact] : identifier[self] . identifier[shell] . identifier[show_banner] () keyword[if] identifier[self] . identifier[log_level] <= identifier[logging] . identifier[INFO] : identifier[print]
def init_banner(self): """optionally display the banner""" if self.display_banner and self.interact: self.shell.show_banner() # depends on [control=['if'], data=[]] # Make sure there is a space below the banner. if self.log_level <= logging.INFO: print # depends on [control=['if'], data=[]]
def get_overlay_spec(o, k, v): """ Gets the type.group.label + key spec from an Element in an Overlay. """ k = wrap_tuple(k) return ((type(v).__name__, v.group, v.label) + k if len(o.kdims) else (type(v).__name__,) + k)
def function[get_overlay_spec, parameter[o, k, v]]: constant[ Gets the type.group.label + key spec from an Element in an Overlay. ] variable[k] assign[=] call[name[wrap_tuple], parameter[name[k]]] return[<ast.IfExp object at 0x7da20c9933a0>]
keyword[def] identifier[get_overlay_spec] ( identifier[o] , identifier[k] , identifier[v] ): literal[string] identifier[k] = identifier[wrap_tuple] ( identifier[k] ) keyword[return] (( identifier[type] ( identifier[v] ). identifier[__name__] , identifier[v] . identifier[group] , identifier[v] . identifier[label] )+ identifier[k] keyword[if] identifier[len] ( identifier[o] . identifier[kdims] ) keyword[else] ( identifier[type] ( identifier[v] ). identifier[__name__] ,)+ identifier[k] )
def get_overlay_spec(o, k, v): """ Gets the type.group.label + key spec from an Element in an Overlay. """ k = wrap_tuple(k) return (type(v).__name__, v.group, v.label) + k if len(o.kdims) else (type(v).__name__,) + k
def _set_initial_wcxf(self, wc, get_smpar=True): """Load the initial values for Wilson coefficients from a wcxf.WC instance. Parameters: - `get_smpar`: boolean, optional, defaults to True. If True, an attempt is made to determine the SM parameters from the requirement of reproducing the correct SM masses and mixings at the electroweak scale. As approximations are involved, the result might or might not be reliable, depending on the size of the Wilson coefficients affecting the SM masses and mixings. If False, Standard Model parameters have to be provided separately and are assumed to be in the weak basis used for the Warsaw basis as defined in WCxf, i.e. in the basis where the down-type and charged lepton mass matrices are diagonal. """ if wc.eft != 'SMEFT': raise ValueError("Wilson coefficients use wrong EFT.") if wc.basis != 'Warsaw': raise ValueError("Wilson coefficients use wrong basis.") self.scale_in = wc.scale C = wilson.util.smeftutil.wcxf2arrays_symmetrized(wc.dict) # fill in zeros for missing WCs for k, s in smeftutil.C_keys_shape.items(): if k not in C and k not in smeftutil.SM_keys: if s == 1: C[k] = 0 else: C[k] = np.zeros(s) if self.C_in is None: self.C_in = C else: self.C_in.update(C) if get_smpar: self.C_in.update(self._get_sm_scale_in())
def function[_set_initial_wcxf, parameter[self, wc, get_smpar]]: constant[Load the initial values for Wilson coefficients from a wcxf.WC instance. Parameters: - `get_smpar`: boolean, optional, defaults to True. If True, an attempt is made to determine the SM parameters from the requirement of reproducing the correct SM masses and mixings at the electroweak scale. As approximations are involved, the result might or might not be reliable, depending on the size of the Wilson coefficients affecting the SM masses and mixings. If False, Standard Model parameters have to be provided separately and are assumed to be in the weak basis used for the Warsaw basis as defined in WCxf, i.e. in the basis where the down-type and charged lepton mass matrices are diagonal. ] if compare[name[wc].eft not_equal[!=] constant[SMEFT]] begin[:] <ast.Raise object at 0x7da1b1909270> if compare[name[wc].basis not_equal[!=] constant[Warsaw]] begin[:] <ast.Raise object at 0x7da1b1908310> name[self].scale_in assign[=] name[wc].scale variable[C] assign[=] call[name[wilson].util.smeftutil.wcxf2arrays_symmetrized, parameter[name[wc].dict]] for taget[tuple[[<ast.Name object at 0x7da1b1909c90>, <ast.Name object at 0x7da1b1909db0>]]] in starred[call[name[smeftutil].C_keys_shape.items, parameter[]]] begin[:] if <ast.BoolOp object at 0x7da1b1908c10> begin[:] if compare[name[s] equal[==] constant[1]] begin[:] call[name[C]][name[k]] assign[=] constant[0] if compare[name[self].C_in is constant[None]] begin[:] name[self].C_in assign[=] name[C] if name[get_smpar] begin[:] call[name[self].C_in.update, parameter[call[name[self]._get_sm_scale_in, parameter[]]]]
keyword[def] identifier[_set_initial_wcxf] ( identifier[self] , identifier[wc] , identifier[get_smpar] = keyword[True] ): literal[string] keyword[if] identifier[wc] . identifier[eft] != literal[string] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[wc] . identifier[basis] != literal[string] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[self] . identifier[scale_in] = identifier[wc] . identifier[scale] identifier[C] = identifier[wilson] . identifier[util] . identifier[smeftutil] . identifier[wcxf2arrays_symmetrized] ( identifier[wc] . identifier[dict] ) keyword[for] identifier[k] , identifier[s] keyword[in] identifier[smeftutil] . identifier[C_keys_shape] . identifier[items] (): keyword[if] identifier[k] keyword[not] keyword[in] identifier[C] keyword[and] identifier[k] keyword[not] keyword[in] identifier[smeftutil] . identifier[SM_keys] : keyword[if] identifier[s] == literal[int] : identifier[C] [ identifier[k] ]= literal[int] keyword[else] : identifier[C] [ identifier[k] ]= identifier[np] . identifier[zeros] ( identifier[s] ) keyword[if] identifier[self] . identifier[C_in] keyword[is] keyword[None] : identifier[self] . identifier[C_in] = identifier[C] keyword[else] : identifier[self] . identifier[C_in] . identifier[update] ( identifier[C] ) keyword[if] identifier[get_smpar] : identifier[self] . identifier[C_in] . identifier[update] ( identifier[self] . identifier[_get_sm_scale_in] ())
def _set_initial_wcxf(self, wc, get_smpar=True): """Load the initial values for Wilson coefficients from a wcxf.WC instance. Parameters: - `get_smpar`: boolean, optional, defaults to True. If True, an attempt is made to determine the SM parameters from the requirement of reproducing the correct SM masses and mixings at the electroweak scale. As approximations are involved, the result might or might not be reliable, depending on the size of the Wilson coefficients affecting the SM masses and mixings. If False, Standard Model parameters have to be provided separately and are assumed to be in the weak basis used for the Warsaw basis as defined in WCxf, i.e. in the basis where the down-type and charged lepton mass matrices are diagonal. """ if wc.eft != 'SMEFT': raise ValueError('Wilson coefficients use wrong EFT.') # depends on [control=['if'], data=[]] if wc.basis != 'Warsaw': raise ValueError('Wilson coefficients use wrong basis.') # depends on [control=['if'], data=[]] self.scale_in = wc.scale C = wilson.util.smeftutil.wcxf2arrays_symmetrized(wc.dict) # fill in zeros for missing WCs for (k, s) in smeftutil.C_keys_shape.items(): if k not in C and k not in smeftutil.SM_keys: if s == 1: C[k] = 0 # depends on [control=['if'], data=[]] else: C[k] = np.zeros(s) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] if self.C_in is None: self.C_in = C # depends on [control=['if'], data=[]] else: self.C_in.update(C) if get_smpar: self.C_in.update(self._get_sm_scale_in()) # depends on [control=['if'], data=[]]
def reload(self, client=None): """API call: reload the config via a ``GET`` request. This method will reload the newest data for the config. See https://cloud.google.com/deployment-manager/runtime-configurator/reference/rest/v1beta1/projects.configs/get :type client: :class:`google.cloud.runtimeconfig.client.Client` :param client: (Optional) The client to use. If not passed, falls back to the client stored on the current config. """ client = self._require_client(client) # We assume the config exists. If it doesn't it will raise a NotFound # exception. resp = client._connection.api_request(method="GET", path=self.path) self._set_properties(api_response=resp)
def function[reload, parameter[self, client]]: constant[API call: reload the config via a ``GET`` request. This method will reload the newest data for the config. See https://cloud.google.com/deployment-manager/runtime-configurator/reference/rest/v1beta1/projects.configs/get :type client: :class:`google.cloud.runtimeconfig.client.Client` :param client: (Optional) The client to use. If not passed, falls back to the client stored on the current config. ] variable[client] assign[=] call[name[self]._require_client, parameter[name[client]]] variable[resp] assign[=] call[name[client]._connection.api_request, parameter[]] call[name[self]._set_properties, parameter[]]
keyword[def] identifier[reload] ( identifier[self] , identifier[client] = keyword[None] ): literal[string] identifier[client] = identifier[self] . identifier[_require_client] ( identifier[client] ) identifier[resp] = identifier[client] . identifier[_connection] . identifier[api_request] ( identifier[method] = literal[string] , identifier[path] = identifier[self] . identifier[path] ) identifier[self] . identifier[_set_properties] ( identifier[api_response] = identifier[resp] )
def reload(self, client=None): """API call: reload the config via a ``GET`` request. This method will reload the newest data for the config. See https://cloud.google.com/deployment-manager/runtime-configurator/reference/rest/v1beta1/projects.configs/get :type client: :class:`google.cloud.runtimeconfig.client.Client` :param client: (Optional) The client to use. If not passed, falls back to the client stored on the current config. """ client = self._require_client(client) # We assume the config exists. If it doesn't it will raise a NotFound # exception. resp = client._connection.api_request(method='GET', path=self.path) self._set_properties(api_response=resp)
def get_deserializer(serializer_format): """ Get the deserializer for a specific format """ if serializer_format == Format.JSON: return _deserialize_json if serializer_format == Format.PICKLE: return _deserialize_pickle
def function[get_deserializer, parameter[serializer_format]]: constant[ Get the deserializer for a specific format ] if compare[name[serializer_format] equal[==] name[Format].JSON] begin[:] return[name[_deserialize_json]] if compare[name[serializer_format] equal[==] name[Format].PICKLE] begin[:] return[name[_deserialize_pickle]]
keyword[def] identifier[get_deserializer] ( identifier[serializer_format] ): literal[string] keyword[if] identifier[serializer_format] == identifier[Format] . identifier[JSON] : keyword[return] identifier[_deserialize_json] keyword[if] identifier[serializer_format] == identifier[Format] . identifier[PICKLE] : keyword[return] identifier[_deserialize_pickle]
def get_deserializer(serializer_format): """ Get the deserializer for a specific format """ if serializer_format == Format.JSON: return _deserialize_json # depends on [control=['if'], data=[]] if serializer_format == Format.PICKLE: return _deserialize_pickle # depends on [control=['if'], data=[]]
def fetch_raw_data(sql, connection, geometry): """ Fetch the coastdat2 from the database, adapt it to the local time zone and create a time index. """ tmp_dc = {} weather_df = pd.DataFrame( connection.execute(sql).fetchall(), columns=[ 'gid', 'geom_point', 'geom_polygon', 'data_id', 'time_series', 'dat_id', 'type_id', 'type', 'height', 'year', 'leap_year']).drop( 'dat_id', 1) # Get the timezone of the geometry tz = tools.tz_from_geom(connection, geometry) for ix in weather_df.index: # Convert the point of the weather location to a shapely object weather_df.loc[ix, 'geom_point'] = wkt_loads( weather_df['geom_point'][ix]) # Roll the dataset forward according to the timezone, because the # dataset is based on utc (Berlin +1, Kiev +2, London +0) utc = timezone('utc') offset = int(utc.localize(datetime(2002, 1, 1)).astimezone( timezone(tz)).strftime("%z")[:-2]) # Get the year and the length of the data array db_year = weather_df.loc[ix, 'year'] db_len = len(weather_df['time_series'][ix]) # Set absolute time index for the data sets to avoid errors. tmp_dc[ix] = pd.Series( np.roll(np.array(weather_df['time_series'][ix]), offset), index=pd.date_range(pd.datetime(db_year, 1, 1, 0), periods=db_len, freq='H', tz=tz)) weather_df['time_series'] = pd.Series(tmp_dc) return weather_df
def function[fetch_raw_data, parameter[sql, connection, geometry]]: constant[ Fetch the coastdat2 from the database, adapt it to the local time zone and create a time index. ] variable[tmp_dc] assign[=] dictionary[[], []] variable[weather_df] assign[=] call[call[name[pd].DataFrame, parameter[call[call[name[connection].execute, parameter[name[sql]]].fetchall, parameter[]]]].drop, parameter[constant[dat_id], constant[1]]] variable[tz] assign[=] call[name[tools].tz_from_geom, parameter[name[connection], name[geometry]]] for taget[name[ix]] in starred[name[weather_df].index] begin[:] call[name[weather_df].loc][tuple[[<ast.Name object at 0x7da18c4cdc30>, <ast.Constant object at 0x7da18c4cc6a0>]]] assign[=] call[name[wkt_loads], parameter[call[call[name[weather_df]][constant[geom_point]]][name[ix]]]] variable[utc] assign[=] call[name[timezone], parameter[constant[utc]]] variable[offset] assign[=] call[name[int], parameter[call[call[call[call[name[utc].localize, parameter[call[name[datetime], parameter[constant[2002], constant[1], constant[1]]]]].astimezone, parameter[call[name[timezone], parameter[name[tz]]]]].strftime, parameter[constant[%z]]]][<ast.Slice object at 0x7da18bccb430>]]] variable[db_year] assign[=] call[name[weather_df].loc][tuple[[<ast.Name object at 0x7da18bcc81f0>, <ast.Constant object at 0x7da18bcca470>]]] variable[db_len] assign[=] call[name[len], parameter[call[call[name[weather_df]][constant[time_series]]][name[ix]]]] call[name[tmp_dc]][name[ix]] assign[=] call[name[pd].Series, parameter[call[name[np].roll, parameter[call[name[np].array, parameter[call[call[name[weather_df]][constant[time_series]]][name[ix]]]], name[offset]]]]] call[name[weather_df]][constant[time_series]] assign[=] call[name[pd].Series, parameter[name[tmp_dc]]] return[name[weather_df]]
keyword[def] identifier[fetch_raw_data] ( identifier[sql] , identifier[connection] , identifier[geometry] ): literal[string] identifier[tmp_dc] ={} identifier[weather_df] = identifier[pd] . identifier[DataFrame] ( identifier[connection] . identifier[execute] ( identifier[sql] ). identifier[fetchall] (), identifier[columns] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]). identifier[drop] ( literal[string] , literal[int] ) identifier[tz] = identifier[tools] . identifier[tz_from_geom] ( identifier[connection] , identifier[geometry] ) keyword[for] identifier[ix] keyword[in] identifier[weather_df] . identifier[index] : identifier[weather_df] . identifier[loc] [ identifier[ix] , literal[string] ]= identifier[wkt_loads] ( identifier[weather_df] [ literal[string] ][ identifier[ix] ]) identifier[utc] = identifier[timezone] ( literal[string] ) identifier[offset] = identifier[int] ( identifier[utc] . identifier[localize] ( identifier[datetime] ( literal[int] , literal[int] , literal[int] )). identifier[astimezone] ( identifier[timezone] ( identifier[tz] )). identifier[strftime] ( literal[string] )[:- literal[int] ]) identifier[db_year] = identifier[weather_df] . identifier[loc] [ identifier[ix] , literal[string] ] identifier[db_len] = identifier[len] ( identifier[weather_df] [ literal[string] ][ identifier[ix] ]) identifier[tmp_dc] [ identifier[ix] ]= identifier[pd] . identifier[Series] ( identifier[np] . identifier[roll] ( identifier[np] . identifier[array] ( identifier[weather_df] [ literal[string] ][ identifier[ix] ]), identifier[offset] ), identifier[index] = identifier[pd] . identifier[date_range] ( identifier[pd] . identifier[datetime] ( identifier[db_year] , literal[int] , literal[int] , literal[int] ), identifier[periods] = identifier[db_len] , identifier[freq] = literal[string] , identifier[tz] = identifier[tz] )) identifier[weather_df] [ literal[string] ]= identifier[pd] . identifier[Series] ( identifier[tmp_dc] ) keyword[return] identifier[weather_df]
def fetch_raw_data(sql, connection, geometry): """ Fetch the coastdat2 from the database, adapt it to the local time zone and create a time index. """ tmp_dc = {} weather_df = pd.DataFrame(connection.execute(sql).fetchall(), columns=['gid', 'geom_point', 'geom_polygon', 'data_id', 'time_series', 'dat_id', 'type_id', 'type', 'height', 'year', 'leap_year']).drop('dat_id', 1) # Get the timezone of the geometry tz = tools.tz_from_geom(connection, geometry) for ix in weather_df.index: # Convert the point of the weather location to a shapely object weather_df.loc[ix, 'geom_point'] = wkt_loads(weather_df['geom_point'][ix]) # Roll the dataset forward according to the timezone, because the # dataset is based on utc (Berlin +1, Kiev +2, London +0) utc = timezone('utc') offset = int(utc.localize(datetime(2002, 1, 1)).astimezone(timezone(tz)).strftime('%z')[:-2]) # Get the year and the length of the data array db_year = weather_df.loc[ix, 'year'] db_len = len(weather_df['time_series'][ix]) # Set absolute time index for the data sets to avoid errors. tmp_dc[ix] = pd.Series(np.roll(np.array(weather_df['time_series'][ix]), offset), index=pd.date_range(pd.datetime(db_year, 1, 1, 0), periods=db_len, freq='H', tz=tz)) # depends on [control=['for'], data=['ix']] weather_df['time_series'] = pd.Series(tmp_dc) return weather_df
def _get_bank_redis_key(bank): ''' Return the Redis key for the bank given the name. ''' opts = _get_redis_keys_opts() return '{prefix}{separator}{bank}'.format( prefix=opts['bank_prefix'], separator=opts['separator'], bank=bank )
def function[_get_bank_redis_key, parameter[bank]]: constant[ Return the Redis key for the bank given the name. ] variable[opts] assign[=] call[name[_get_redis_keys_opts], parameter[]] return[call[constant[{prefix}{separator}{bank}].format, parameter[]]]
keyword[def] identifier[_get_bank_redis_key] ( identifier[bank] ): literal[string] identifier[opts] = identifier[_get_redis_keys_opts] () keyword[return] literal[string] . identifier[format] ( identifier[prefix] = identifier[opts] [ literal[string] ], identifier[separator] = identifier[opts] [ literal[string] ], identifier[bank] = identifier[bank] )
def _get_bank_redis_key(bank): """ Return the Redis key for the bank given the name. """ opts = _get_redis_keys_opts() return '{prefix}{separator}{bank}'.format(prefix=opts['bank_prefix'], separator=opts['separator'], bank=bank)
def _unique_class(self, cls): """ internal method to check if any of the plugins are instances of a given cls """ return not any(isinstance(obj, cls) for obj in self.plugins)
def function[_unique_class, parameter[self, cls]]: constant[ internal method to check if any of the plugins are instances of a given cls ] return[<ast.UnaryOp object at 0x7da207f01e40>]
keyword[def] identifier[_unique_class] ( identifier[self] , identifier[cls] ): literal[string] keyword[return] keyword[not] identifier[any] ( identifier[isinstance] ( identifier[obj] , identifier[cls] ) keyword[for] identifier[obj] keyword[in] identifier[self] . identifier[plugins] )
def _unique_class(self, cls): """ internal method to check if any of the plugins are instances of a given cls """ return not any((isinstance(obj, cls) for obj in self.plugins))
def read_csv(text, sep="\t"): """Create a DataFrame from CSV text""" import pandas as pd # no top level load to make a faster import of db return pd.read_csv(StringIO(text), sep="\t")
def function[read_csv, parameter[text, sep]]: constant[Create a DataFrame from CSV text] import module[pandas] as alias[pd] return[call[name[pd].read_csv, parameter[call[name[StringIO], parameter[name[text]]]]]]
keyword[def] identifier[read_csv] ( identifier[text] , identifier[sep] = literal[string] ): literal[string] keyword[import] identifier[pandas] keyword[as] identifier[pd] keyword[return] identifier[pd] . identifier[read_csv] ( identifier[StringIO] ( identifier[text] ), identifier[sep] = literal[string] )
def read_csv(text, sep='\t'): """Create a DataFrame from CSV text""" import pandas as pd # no top level load to make a faster import of db return pd.read_csv(StringIO(text), sep='\t')
def bfs(start): """ Breadth first search Yields all nodes found from the starting point :param start: start node """ to_visit = [start] visited = {start} while to_visit: node = to_visit.pop(0) yield node if node.exception_analysis: for _, _, exception in node.exception_analysis.exceptions: if exception not in visited: to_visit.append(exception) visited.add(exception) for _, _, child in node.childs: if child not in visited: to_visit.append(child) visited.add(child)
def function[bfs, parameter[start]]: constant[ Breadth first search Yields all nodes found from the starting point :param start: start node ] variable[to_visit] assign[=] list[[<ast.Name object at 0x7da20e749ab0>]] variable[visited] assign[=] <ast.Set object at 0x7da20e74bf10> while name[to_visit] begin[:] variable[node] assign[=] call[name[to_visit].pop, parameter[constant[0]]] <ast.Yield object at 0x7da20e748580> if name[node].exception_analysis begin[:] for taget[tuple[[<ast.Name object at 0x7da20e74a6e0>, <ast.Name object at 0x7da20e74b670>, <ast.Name object at 0x7da20e74b6a0>]]] in starred[name[node].exception_analysis.exceptions] begin[:] if compare[name[exception] <ast.NotIn object at 0x7da2590d7190> name[visited]] begin[:] call[name[to_visit].append, parameter[name[exception]]] call[name[visited].add, parameter[name[exception]]] for taget[tuple[[<ast.Name object at 0x7da20e74b3d0>, <ast.Name object at 0x7da20e74a8f0>, <ast.Name object at 0x7da20e74b790>]]] in starred[name[node].childs] begin[:] if compare[name[child] <ast.NotIn object at 0x7da2590d7190> name[visited]] begin[:] call[name[to_visit].append, parameter[name[child]]] call[name[visited].add, parameter[name[child]]]
keyword[def] identifier[bfs] ( identifier[start] ): literal[string] identifier[to_visit] =[ identifier[start] ] identifier[visited] ={ identifier[start] } keyword[while] identifier[to_visit] : identifier[node] = identifier[to_visit] . identifier[pop] ( literal[int] ) keyword[yield] identifier[node] keyword[if] identifier[node] . identifier[exception_analysis] : keyword[for] identifier[_] , identifier[_] , identifier[exception] keyword[in] identifier[node] . identifier[exception_analysis] . identifier[exceptions] : keyword[if] identifier[exception] keyword[not] keyword[in] identifier[visited] : identifier[to_visit] . identifier[append] ( identifier[exception] ) identifier[visited] . identifier[add] ( identifier[exception] ) keyword[for] identifier[_] , identifier[_] , identifier[child] keyword[in] identifier[node] . identifier[childs] : keyword[if] identifier[child] keyword[not] keyword[in] identifier[visited] : identifier[to_visit] . identifier[append] ( identifier[child] ) identifier[visited] . identifier[add] ( identifier[child] )
def bfs(start): """ Breadth first search Yields all nodes found from the starting point :param start: start node """ to_visit = [start] visited = {start} while to_visit: node = to_visit.pop(0) yield node if node.exception_analysis: for (_, _, exception) in node.exception_analysis.exceptions: if exception not in visited: to_visit.append(exception) visited.add(exception) # depends on [control=['if'], data=['exception', 'visited']] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] for (_, _, child) in node.childs: if child not in visited: to_visit.append(child) visited.add(child) # depends on [control=['if'], data=['child', 'visited']] # depends on [control=['for'], data=[]] # depends on [control=['while'], data=[]]
def create_widget(self): """ Create the underlying widget. """ d = self.declaration if d.orientation == 'vertical': self.widget = ScrollView(self.get_context(), None, d.style) else: self.widget = HorizontalScrollView(self.get_context(), None, d.style)
def function[create_widget, parameter[self]]: constant[ Create the underlying widget. ] variable[d] assign[=] name[self].declaration if compare[name[d].orientation equal[==] constant[vertical]] begin[:] name[self].widget assign[=] call[name[ScrollView], parameter[call[name[self].get_context, parameter[]], constant[None], name[d].style]]
keyword[def] identifier[create_widget] ( identifier[self] ): literal[string] identifier[d] = identifier[self] . identifier[declaration] keyword[if] identifier[d] . identifier[orientation] == literal[string] : identifier[self] . identifier[widget] = identifier[ScrollView] ( identifier[self] . identifier[get_context] (), keyword[None] , identifier[d] . identifier[style] ) keyword[else] : identifier[self] . identifier[widget] = identifier[HorizontalScrollView] ( identifier[self] . identifier[get_context] (), keyword[None] , identifier[d] . identifier[style] )
def create_widget(self): """ Create the underlying widget. """ d = self.declaration if d.orientation == 'vertical': self.widget = ScrollView(self.get_context(), None, d.style) # depends on [control=['if'], data=[]] else: self.widget = HorizontalScrollView(self.get_context(), None, d.style)
def force_unicode(s, encoding='utf-8', errors='strict'): """ Similar to smart_text, except that lazy instances are resolved to strings, rather than kept as lazy objects. """ # Handle the common case first, saves 30-40% when s is an instance of # six.text_type. This function gets called often in that setting. if isinstance(s, six.text_type): return s if not isinstance(s, six.string_types): if six.PY3: if isinstance(s, bytes): s = six.text_type(s, encoding, errors) else: s = six.text_type(s) else: s = six.text_type(bytes(s), encoding, errors) else: # Note: We use .decode() here, instead of six.text_type(s, # encoding, errors), so that if s is a SafeBytes, it ends up being # a SafeText at the end. s = s.decode(encoding, errors) return s
def function[force_unicode, parameter[s, encoding, errors]]: constant[ Similar to smart_text, except that lazy instances are resolved to strings, rather than kept as lazy objects. ] if call[name[isinstance], parameter[name[s], name[six].text_type]] begin[:] return[name[s]] if <ast.UnaryOp object at 0x7da1b1876020> begin[:] if name[six].PY3 begin[:] if call[name[isinstance], parameter[name[s], name[bytes]]] begin[:] variable[s] assign[=] call[name[six].text_type, parameter[name[s], name[encoding], name[errors]]] return[name[s]]
keyword[def] identifier[force_unicode] ( identifier[s] , identifier[encoding] = literal[string] , identifier[errors] = literal[string] ): literal[string] keyword[if] identifier[isinstance] ( identifier[s] , identifier[six] . identifier[text_type] ): keyword[return] identifier[s] keyword[if] keyword[not] identifier[isinstance] ( identifier[s] , identifier[six] . identifier[string_types] ): keyword[if] identifier[six] . identifier[PY3] : keyword[if] identifier[isinstance] ( identifier[s] , identifier[bytes] ): identifier[s] = identifier[six] . identifier[text_type] ( identifier[s] , identifier[encoding] , identifier[errors] ) keyword[else] : identifier[s] = identifier[six] . identifier[text_type] ( identifier[s] ) keyword[else] : identifier[s] = identifier[six] . identifier[text_type] ( identifier[bytes] ( identifier[s] ), identifier[encoding] , identifier[errors] ) keyword[else] : identifier[s] = identifier[s] . identifier[decode] ( identifier[encoding] , identifier[errors] ) keyword[return] identifier[s]
def force_unicode(s, encoding='utf-8', errors='strict'): """ Similar to smart_text, except that lazy instances are resolved to strings, rather than kept as lazy objects. """ # Handle the common case first, saves 30-40% when s is an instance of # six.text_type. This function gets called often in that setting. if isinstance(s, six.text_type): return s # depends on [control=['if'], data=[]] if not isinstance(s, six.string_types): if six.PY3: if isinstance(s, bytes): s = six.text_type(s, encoding, errors) # depends on [control=['if'], data=[]] else: s = six.text_type(s) # depends on [control=['if'], data=[]] else: s = six.text_type(bytes(s), encoding, errors) # depends on [control=['if'], data=[]] else: # Note: We use .decode() here, instead of six.text_type(s, # encoding, errors), so that if s is a SafeBytes, it ends up being # a SafeText at the end. s = s.decode(encoding, errors) return s
def hide_routemap_holder_route_map_content_set_extcommunity_soo_ASN_NN_soo(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") hide_routemap_holder = ET.SubElement(config, "hide-routemap-holder", xmlns="urn:brocade.com:mgmt:brocade-ip-policy") route_map = ET.SubElement(hide_routemap_holder, "route-map") name_key = ET.SubElement(route_map, "name") name_key.text = kwargs.pop('name') action_rm_key = ET.SubElement(route_map, "action-rm") action_rm_key.text = kwargs.pop('action_rm') instance_key = ET.SubElement(route_map, "instance") instance_key.text = kwargs.pop('instance') content = ET.SubElement(route_map, "content") set = ET.SubElement(content, "set") extcommunity = ET.SubElement(set, "extcommunity") soo = ET.SubElement(extcommunity, "soo") ASN_NN_soo = ET.SubElement(soo, "ASN-NN-soo") ASN_NN_soo.text = kwargs.pop('ASN_NN_soo') callback = kwargs.pop('callback', self._callback) return callback(config)
def function[hide_routemap_holder_route_map_content_set_extcommunity_soo_ASN_NN_soo, parameter[self]]: constant[Auto Generated Code ] variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]] variable[hide_routemap_holder] assign[=] call[name[ET].SubElement, parameter[name[config], constant[hide-routemap-holder]]] variable[route_map] assign[=] call[name[ET].SubElement, parameter[name[hide_routemap_holder], constant[route-map]]] variable[name_key] assign[=] call[name[ET].SubElement, parameter[name[route_map], constant[name]]] name[name_key].text assign[=] call[name[kwargs].pop, parameter[constant[name]]] variable[action_rm_key] assign[=] call[name[ET].SubElement, parameter[name[route_map], constant[action-rm]]] name[action_rm_key].text assign[=] call[name[kwargs].pop, parameter[constant[action_rm]]] variable[instance_key] assign[=] call[name[ET].SubElement, parameter[name[route_map], constant[instance]]] name[instance_key].text assign[=] call[name[kwargs].pop, parameter[constant[instance]]] variable[content] assign[=] call[name[ET].SubElement, parameter[name[route_map], constant[content]]] variable[set] assign[=] call[name[ET].SubElement, parameter[name[content], constant[set]]] variable[extcommunity] assign[=] call[name[ET].SubElement, parameter[name[set], constant[extcommunity]]] variable[soo] assign[=] call[name[ET].SubElement, parameter[name[extcommunity], constant[soo]]] variable[ASN_NN_soo] assign[=] call[name[ET].SubElement, parameter[name[soo], constant[ASN-NN-soo]]] name[ASN_NN_soo].text assign[=] call[name[kwargs].pop, parameter[constant[ASN_NN_soo]]] variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]] return[call[name[callback], parameter[name[config]]]]
keyword[def] identifier[hide_routemap_holder_route_map_content_set_extcommunity_soo_ASN_NN_soo] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[config] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[hide_routemap_holder] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] ) identifier[route_map] = identifier[ET] . identifier[SubElement] ( identifier[hide_routemap_holder] , literal[string] ) identifier[name_key] = identifier[ET] . identifier[SubElement] ( identifier[route_map] , literal[string] ) identifier[name_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[action_rm_key] = identifier[ET] . identifier[SubElement] ( identifier[route_map] , literal[string] ) identifier[action_rm_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[instance_key] = identifier[ET] . identifier[SubElement] ( identifier[route_map] , literal[string] ) identifier[instance_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[content] = identifier[ET] . identifier[SubElement] ( identifier[route_map] , literal[string] ) identifier[set] = identifier[ET] . identifier[SubElement] ( identifier[content] , literal[string] ) identifier[extcommunity] = identifier[ET] . identifier[SubElement] ( identifier[set] , literal[string] ) identifier[soo] = identifier[ET] . identifier[SubElement] ( identifier[extcommunity] , literal[string] ) identifier[ASN_NN_soo] = identifier[ET] . identifier[SubElement] ( identifier[soo] , literal[string] ) identifier[ASN_NN_soo] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] ) keyword[return] identifier[callback] ( identifier[config] )
def hide_routemap_holder_route_map_content_set_extcommunity_soo_ASN_NN_soo(self, **kwargs): """Auto Generated Code """ config = ET.Element('config') hide_routemap_holder = ET.SubElement(config, 'hide-routemap-holder', xmlns='urn:brocade.com:mgmt:brocade-ip-policy') route_map = ET.SubElement(hide_routemap_holder, 'route-map') name_key = ET.SubElement(route_map, 'name') name_key.text = kwargs.pop('name') action_rm_key = ET.SubElement(route_map, 'action-rm') action_rm_key.text = kwargs.pop('action_rm') instance_key = ET.SubElement(route_map, 'instance') instance_key.text = kwargs.pop('instance') content = ET.SubElement(route_map, 'content') set = ET.SubElement(content, 'set') extcommunity = ET.SubElement(set, 'extcommunity') soo = ET.SubElement(extcommunity, 'soo') ASN_NN_soo = ET.SubElement(soo, 'ASN-NN-soo') ASN_NN_soo.text = kwargs.pop('ASN_NN_soo') callback = kwargs.pop('callback', self._callback) return callback(config)
def create(self, customer_name, street, city, region, postal_code, iso_country, friendly_name=values.unset, emergency_enabled=values.unset, auto_correct_address=values.unset): """ Create a new AddressInstance :param unicode customer_name: The name to associate with the new address :param unicode street: The number and street address of the new address :param unicode city: The city of the new address :param unicode region: The state or region of the new address :param unicode postal_code: The postal code of the new address :param unicode iso_country: The ISO country code of the new address :param unicode friendly_name: A string to describe the new resource :param bool emergency_enabled: Whether to enable emergency calling on the new address :param bool auto_correct_address: Whether we should automatically correct the address :returns: Newly created AddressInstance :rtype: twilio.rest.api.v2010.account.address.AddressInstance """ data = values.of({ 'CustomerName': customer_name, 'Street': street, 'City': city, 'Region': region, 'PostalCode': postal_code, 'IsoCountry': iso_country, 'FriendlyName': friendly_name, 'EmergencyEnabled': emergency_enabled, 'AutoCorrectAddress': auto_correct_address, }) payload = self._version.create( 'POST', self._uri, data=data, ) return AddressInstance(self._version, payload, account_sid=self._solution['account_sid'], )
def function[create, parameter[self, customer_name, street, city, region, postal_code, iso_country, friendly_name, emergency_enabled, auto_correct_address]]: constant[ Create a new AddressInstance :param unicode customer_name: The name to associate with the new address :param unicode street: The number and street address of the new address :param unicode city: The city of the new address :param unicode region: The state or region of the new address :param unicode postal_code: The postal code of the new address :param unicode iso_country: The ISO country code of the new address :param unicode friendly_name: A string to describe the new resource :param bool emergency_enabled: Whether to enable emergency calling on the new address :param bool auto_correct_address: Whether we should automatically correct the address :returns: Newly created AddressInstance :rtype: twilio.rest.api.v2010.account.address.AddressInstance ] variable[data] assign[=] call[name[values].of, parameter[dictionary[[<ast.Constant object at 0x7da2054a5c60>, <ast.Constant object at 0x7da2054a7eb0>, <ast.Constant object at 0x7da2054a4b20>, <ast.Constant object at 0x7da2054a6a10>, <ast.Constant object at 0x7da2054a6740>, <ast.Constant object at 0x7da2054a6320>, <ast.Constant object at 0x7da2054a4370>, <ast.Constant object at 0x7da2054a5780>, <ast.Constant object at 0x7da2054a5540>], [<ast.Name object at 0x7da2054a5810>, <ast.Name object at 0x7da2054a5210>, <ast.Name object at 0x7da2054a7b50>, <ast.Name object at 0x7da2054a6d10>, <ast.Name object at 0x7da2054a7010>, <ast.Name object at 0x7da2054a6ef0>, <ast.Name object at 0x7da2054a6d40>, <ast.Name object at 0x7da2054a5ea0>, <ast.Name object at 0x7da2054a7970>]]]] variable[payload] assign[=] call[name[self]._version.create, parameter[constant[POST], name[self]._uri]] return[call[name[AddressInstance], parameter[name[self]._version, name[payload]]]]
keyword[def] identifier[create] ( identifier[self] , identifier[customer_name] , identifier[street] , identifier[city] , identifier[region] , identifier[postal_code] , identifier[iso_country] , identifier[friendly_name] = identifier[values] . identifier[unset] , identifier[emergency_enabled] = identifier[values] . identifier[unset] , identifier[auto_correct_address] = identifier[values] . identifier[unset] ): literal[string] identifier[data] = identifier[values] . identifier[of] ({ literal[string] : identifier[customer_name] , literal[string] : identifier[street] , literal[string] : identifier[city] , literal[string] : identifier[region] , literal[string] : identifier[postal_code] , literal[string] : identifier[iso_country] , literal[string] : identifier[friendly_name] , literal[string] : identifier[emergency_enabled] , literal[string] : identifier[auto_correct_address] , }) identifier[payload] = identifier[self] . identifier[_version] . identifier[create] ( literal[string] , identifier[self] . identifier[_uri] , identifier[data] = identifier[data] , ) keyword[return] identifier[AddressInstance] ( identifier[self] . identifier[_version] , identifier[payload] , identifier[account_sid] = identifier[self] . identifier[_solution] [ literal[string] ],)
def create(self, customer_name, street, city, region, postal_code, iso_country, friendly_name=values.unset, emergency_enabled=values.unset, auto_correct_address=values.unset): """ Create a new AddressInstance :param unicode customer_name: The name to associate with the new address :param unicode street: The number and street address of the new address :param unicode city: The city of the new address :param unicode region: The state or region of the new address :param unicode postal_code: The postal code of the new address :param unicode iso_country: The ISO country code of the new address :param unicode friendly_name: A string to describe the new resource :param bool emergency_enabled: Whether to enable emergency calling on the new address :param bool auto_correct_address: Whether we should automatically correct the address :returns: Newly created AddressInstance :rtype: twilio.rest.api.v2010.account.address.AddressInstance """ data = values.of({'CustomerName': customer_name, 'Street': street, 'City': city, 'Region': region, 'PostalCode': postal_code, 'IsoCountry': iso_country, 'FriendlyName': friendly_name, 'EmergencyEnabled': emergency_enabled, 'AutoCorrectAddress': auto_correct_address}) payload = self._version.create('POST', self._uri, data=data) return AddressInstance(self._version, payload, account_sid=self._solution['account_sid'])
def shared_variantcall(call_fn, name, align_bams, ref_file, items, assoc_files, region=None, out_file=None): """Provide base functionality for prepping and indexing for variant calling. """ config = items[0]["config"] if out_file is None: if vcfutils.is_paired_analysis(align_bams, items): out_file = "%s-paired-variants.vcf.gz" % config["metdata"]["batch"] else: out_file = "%s-variants.vcf.gz" % os.path.splitext(align_bams[0])[0] if not file_exists(out_file): logger.debug("Genotyping with {name}: {region} {fname}".format( name=name, region=region, fname=os.path.basename(align_bams[0]))) variant_regions = bedutils.population_variant_regions(items, merged=True) target_regions = subset_variant_regions(variant_regions, region, out_file, items=items) if (variant_regions is not None and isinstance(target_regions, six.string_types) and not os.path.isfile(target_regions)): vcfutils.write_empty_vcf(out_file, config) else: with file_transaction(config, out_file) as tx_out_file: call_fn(align_bams, ref_file, items, target_regions, tx_out_file) if out_file.endswith(".gz"): out_file = vcfutils.bgzip_and_index(out_file, config) return out_file
def function[shared_variantcall, parameter[call_fn, name, align_bams, ref_file, items, assoc_files, region, out_file]]: constant[Provide base functionality for prepping and indexing for variant calling. ] variable[config] assign[=] call[call[name[items]][constant[0]]][constant[config]] if compare[name[out_file] is constant[None]] begin[:] if call[name[vcfutils].is_paired_analysis, parameter[name[align_bams], name[items]]] begin[:] variable[out_file] assign[=] binary_operation[constant[%s-paired-variants.vcf.gz] <ast.Mod object at 0x7da2590d6920> call[call[name[config]][constant[metdata]]][constant[batch]]] if <ast.UnaryOp object at 0x7da18f09fd00> begin[:] call[name[logger].debug, parameter[call[constant[Genotyping with {name}: {region} {fname}].format, parameter[]]]] variable[variant_regions] assign[=] call[name[bedutils].population_variant_regions, parameter[name[items]]] variable[target_regions] assign[=] call[name[subset_variant_regions], parameter[name[variant_regions], name[region], name[out_file]]] if <ast.BoolOp object at 0x7da20c76fb50> begin[:] call[name[vcfutils].write_empty_vcf, parameter[name[out_file], name[config]]] if call[name[out_file].endswith, parameter[constant[.gz]]] begin[:] variable[out_file] assign[=] call[name[vcfutils].bgzip_and_index, parameter[name[out_file], name[config]]] return[name[out_file]]
keyword[def] identifier[shared_variantcall] ( identifier[call_fn] , identifier[name] , identifier[align_bams] , identifier[ref_file] , identifier[items] , identifier[assoc_files] , identifier[region] = keyword[None] , identifier[out_file] = keyword[None] ): literal[string] identifier[config] = identifier[items] [ literal[int] ][ literal[string] ] keyword[if] identifier[out_file] keyword[is] keyword[None] : keyword[if] identifier[vcfutils] . identifier[is_paired_analysis] ( identifier[align_bams] , identifier[items] ): identifier[out_file] = literal[string] % identifier[config] [ literal[string] ][ literal[string] ] keyword[else] : identifier[out_file] = literal[string] % identifier[os] . identifier[path] . identifier[splitext] ( identifier[align_bams] [ literal[int] ])[ literal[int] ] keyword[if] keyword[not] identifier[file_exists] ( identifier[out_file] ): identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[name] = identifier[name] , identifier[region] = identifier[region] , identifier[fname] = identifier[os] . identifier[path] . identifier[basename] ( identifier[align_bams] [ literal[int] ]))) identifier[variant_regions] = identifier[bedutils] . identifier[population_variant_regions] ( identifier[items] , identifier[merged] = keyword[True] ) identifier[target_regions] = identifier[subset_variant_regions] ( identifier[variant_regions] , identifier[region] , identifier[out_file] , identifier[items] = identifier[items] ) keyword[if] ( identifier[variant_regions] keyword[is] keyword[not] keyword[None] keyword[and] identifier[isinstance] ( identifier[target_regions] , identifier[six] . identifier[string_types] ) keyword[and] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[target_regions] )): identifier[vcfutils] . identifier[write_empty_vcf] ( identifier[out_file] , identifier[config] ) keyword[else] : keyword[with] identifier[file_transaction] ( identifier[config] , identifier[out_file] ) keyword[as] identifier[tx_out_file] : identifier[call_fn] ( identifier[align_bams] , identifier[ref_file] , identifier[items] , identifier[target_regions] , identifier[tx_out_file] ) keyword[if] identifier[out_file] . identifier[endswith] ( literal[string] ): identifier[out_file] = identifier[vcfutils] . identifier[bgzip_and_index] ( identifier[out_file] , identifier[config] ) keyword[return] identifier[out_file]
def shared_variantcall(call_fn, name, align_bams, ref_file, items, assoc_files, region=None, out_file=None): """Provide base functionality for prepping and indexing for variant calling. """ config = items[0]['config'] if out_file is None: if vcfutils.is_paired_analysis(align_bams, items): out_file = '%s-paired-variants.vcf.gz' % config['metdata']['batch'] # depends on [control=['if'], data=[]] else: out_file = '%s-variants.vcf.gz' % os.path.splitext(align_bams[0])[0] # depends on [control=['if'], data=['out_file']] if not file_exists(out_file): logger.debug('Genotyping with {name}: {region} {fname}'.format(name=name, region=region, fname=os.path.basename(align_bams[0]))) variant_regions = bedutils.population_variant_regions(items, merged=True) target_regions = subset_variant_regions(variant_regions, region, out_file, items=items) if variant_regions is not None and isinstance(target_regions, six.string_types) and (not os.path.isfile(target_regions)): vcfutils.write_empty_vcf(out_file, config) # depends on [control=['if'], data=[]] else: with file_transaction(config, out_file) as tx_out_file: call_fn(align_bams, ref_file, items, target_regions, tx_out_file) # depends on [control=['with'], data=['tx_out_file']] # depends on [control=['if'], data=[]] if out_file.endswith('.gz'): out_file = vcfutils.bgzip_and_index(out_file, config) # depends on [control=['if'], data=[]] return out_file
def copy(self, deep=True): """ Make a copy of this SparseDataFrame """ result = super().copy(deep=deep) result._default_fill_value = self._default_fill_value result._default_kind = self._default_kind return result
def function[copy, parameter[self, deep]]: constant[ Make a copy of this SparseDataFrame ] variable[result] assign[=] call[call[name[super], parameter[]].copy, parameter[]] name[result]._default_fill_value assign[=] name[self]._default_fill_value name[result]._default_kind assign[=] name[self]._default_kind return[name[result]]
keyword[def] identifier[copy] ( identifier[self] , identifier[deep] = keyword[True] ): literal[string] identifier[result] = identifier[super] (). identifier[copy] ( identifier[deep] = identifier[deep] ) identifier[result] . identifier[_default_fill_value] = identifier[self] . identifier[_default_fill_value] identifier[result] . identifier[_default_kind] = identifier[self] . identifier[_default_kind] keyword[return] identifier[result]
def copy(self, deep=True): """ Make a copy of this SparseDataFrame """ result = super().copy(deep=deep) result._default_fill_value = self._default_fill_value result._default_kind = self._default_kind return result
def _read_mode_qsopt(self, size, kind): """Read Quick-Start Response option. Positional arguments: * size - int, length of option * kind - int, 27 (Quick-Start Response) Returns: * dict -- extracted Quick-Start Response (QS) option Structure of TCP QSopt [RFC 4782]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Kind | Length=8 | Resv. | Rate | TTL Diff | | | | |Request| | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | QS Nonce | R | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 tcp.qs.kind Kind (27) 1 8 tcp.qs.length Length (8) 2 16 - Reserved (must be zero) 2 20 tcp.qs.req_rate Request Rate 3 24 tcp.qs.ttl_diff TTL Difference 4 32 tcp.qs.nounce QS Nounce 7 62 - Reserved (must be zero) """ rvrr = self._read_binary(1) ttld = self._read_unpack(1) noun = self._read_fileng(4) data = dict( kind=kind, length=size, req_rate=int(rvrr[4:], base=2), ttl_diff=ttld, nounce=noun[:-2], ) return data
def function[_read_mode_qsopt, parameter[self, size, kind]]: constant[Read Quick-Start Response option. Positional arguments: * size - int, length of option * kind - int, 27 (Quick-Start Response) Returns: * dict -- extracted Quick-Start Response (QS) option Structure of TCP QSopt [RFC 4782]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Kind | Length=8 | Resv. | Rate | TTL Diff | | | | |Request| | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | QS Nonce | R | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 tcp.qs.kind Kind (27) 1 8 tcp.qs.length Length (8) 2 16 - Reserved (must be zero) 2 20 tcp.qs.req_rate Request Rate 3 24 tcp.qs.ttl_diff TTL Difference 4 32 tcp.qs.nounce QS Nounce 7 62 - Reserved (must be zero) ] variable[rvrr] assign[=] call[name[self]._read_binary, parameter[constant[1]]] variable[ttld] assign[=] call[name[self]._read_unpack, parameter[constant[1]]] variable[noun] assign[=] call[name[self]._read_fileng, parameter[constant[4]]] variable[data] assign[=] call[name[dict], parameter[]] return[name[data]]
keyword[def] identifier[_read_mode_qsopt] ( identifier[self] , identifier[size] , identifier[kind] ): literal[string] identifier[rvrr] = identifier[self] . identifier[_read_binary] ( literal[int] ) identifier[ttld] = identifier[self] . identifier[_read_unpack] ( literal[int] ) identifier[noun] = identifier[self] . identifier[_read_fileng] ( literal[int] ) identifier[data] = identifier[dict] ( identifier[kind] = identifier[kind] , identifier[length] = identifier[size] , identifier[req_rate] = identifier[int] ( identifier[rvrr] [ literal[int] :], identifier[base] = literal[int] ), identifier[ttl_diff] = identifier[ttld] , identifier[nounce] = identifier[noun] [:- literal[int] ], ) keyword[return] identifier[data]
def _read_mode_qsopt(self, size, kind): """Read Quick-Start Response option. Positional arguments: * size - int, length of option * kind - int, 27 (Quick-Start Response) Returns: * dict -- extracted Quick-Start Response (QS) option Structure of TCP QSopt [RFC 4782]: 0 1 2 3 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | Kind | Length=8 | Resv. | Rate | TTL Diff | | | | |Request| | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ | QS Nonce | R | +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ Octets Bits Name Description 0 0 tcp.qs.kind Kind (27) 1 8 tcp.qs.length Length (8) 2 16 - Reserved (must be zero) 2 20 tcp.qs.req_rate Request Rate 3 24 tcp.qs.ttl_diff TTL Difference 4 32 tcp.qs.nounce QS Nounce 7 62 - Reserved (must be zero) """ rvrr = self._read_binary(1) ttld = self._read_unpack(1) noun = self._read_fileng(4) data = dict(kind=kind, length=size, req_rate=int(rvrr[4:], base=2), ttl_diff=ttld, nounce=noun[:-2]) return data
def save(self, content): """ Save any given content to the instance file. :param content: (str or bytes) :return: (None) """ # backup existing file if needed if os.path.exists(self.file_path) and not self.assume_yes: message = "Overwrite existing {}? (y/n) " if not confirm(message.format(self.filename)): self.backup() # write file self.output("Saving " + self.filename) with open(self.file_path, "wb") as handler: if not isinstance(content, bytes): content = bytes(content, "utf-8") handler.write(content) self.yeah("Done!")
def function[save, parameter[self, content]]: constant[ Save any given content to the instance file. :param content: (str or bytes) :return: (None) ] if <ast.BoolOp object at 0x7da20c7c95d0> begin[:] variable[message] assign[=] constant[Overwrite existing {}? (y/n) ] if <ast.UnaryOp object at 0x7da20c7c8730> begin[:] call[name[self].backup, parameter[]] call[name[self].output, parameter[binary_operation[constant[Saving ] + name[self].filename]]] with call[name[open], parameter[name[self].file_path, constant[wb]]] begin[:] if <ast.UnaryOp object at 0x7da20c7cb700> begin[:] variable[content] assign[=] call[name[bytes], parameter[name[content], constant[utf-8]]] call[name[handler].write, parameter[name[content]]] call[name[self].yeah, parameter[constant[Done!]]]
keyword[def] identifier[save] ( identifier[self] , identifier[content] ): literal[string] keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[self] . identifier[file_path] ) keyword[and] keyword[not] identifier[self] . identifier[assume_yes] : identifier[message] = literal[string] keyword[if] keyword[not] identifier[confirm] ( identifier[message] . identifier[format] ( identifier[self] . identifier[filename] )): identifier[self] . identifier[backup] () identifier[self] . identifier[output] ( literal[string] + identifier[self] . identifier[filename] ) keyword[with] identifier[open] ( identifier[self] . identifier[file_path] , literal[string] ) keyword[as] identifier[handler] : keyword[if] keyword[not] identifier[isinstance] ( identifier[content] , identifier[bytes] ): identifier[content] = identifier[bytes] ( identifier[content] , literal[string] ) identifier[handler] . identifier[write] ( identifier[content] ) identifier[self] . identifier[yeah] ( literal[string] )
def save(self, content): """ Save any given content to the instance file. :param content: (str or bytes) :return: (None) """ # backup existing file if needed if os.path.exists(self.file_path) and (not self.assume_yes): message = 'Overwrite existing {}? (y/n) ' if not confirm(message.format(self.filename)): self.backup() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # write file self.output('Saving ' + self.filename) with open(self.file_path, 'wb') as handler: if not isinstance(content, bytes): content = bytes(content, 'utf-8') # depends on [control=['if'], data=[]] handler.write(content) # depends on [control=['with'], data=['handler']] self.yeah('Done!')
def download_datapackage(self, dataset_key, dest_dir): """Download and unzip a dataset's datapackage :param dataset_key: Dataset identifier, in the form of owner/id :type dataset_key: str :param dest_dir: Directory under which datapackage should be saved :type dest_dir: str or path :returns: Location of the datapackage descriptor (datapackage.json) in the local filesystem :rtype: path :raises RestApiException: If a server error occurs Examples -------- >>> import datadotworld as dw >>> api_client = dw.api_client() >>> datapackage_descriptor = api_client.download_datapackage( ... 'jonloyens/an-intro-to-dataworld-dataset', ... '/tmp/test') # doctest: +SKIP >>> datapackage_descriptor # doctest: +SKIP '/tmp/test/datapackage.json' """ if path.isdir(dest_dir): raise ValueError('dest_dir must be a new directory, ' 'but {} already exists'.format(dest_dir)) owner_id, dataset_id = parse_dataset_key(dataset_key) url = "{0}://{1}/datapackage/{2}/{3}".format( self._protocol, self._download_host, owner_id, dataset_id) headers = { 'User-Agent': _user_agent(), 'Authorization': 'Bearer {0}'.format(self._config.auth_token) } try: response = requests.get(url, headers=headers, stream=True) response.raise_for_status() except requests.RequestException as e: raise RestApiError(cause=e) unzip_dir = path.join(self._config.tmp_dir, str(uuid.uuid4())) os.makedirs(unzip_dir) zip_file = path.join(unzip_dir, 'dataset.zip') with open(zip_file, 'wb') as f: for data in response.iter_content(chunk_size=4096): f.write(data) zip_obj = zipfile.ZipFile(zip_file) zip_obj.extractall(path=unzip_dir) # Find where datapackage.json is within expanded files unzipped_descriptor = glob.glob( '{}/**/datapackage.json'.format(unzip_dir)) if not unzipped_descriptor: raise RuntimeError( 'Zip file did not contain a datapackage manifest.') unzipped_dir = path.dirname(unzipped_descriptor[0]) shutil.move(unzipped_dir, dest_dir) shutil.rmtree(unzip_dir, ignore_errors=True) return path.join(dest_dir, 'datapackage.json')
def function[download_datapackage, parameter[self, dataset_key, dest_dir]]: constant[Download and unzip a dataset's datapackage :param dataset_key: Dataset identifier, in the form of owner/id :type dataset_key: str :param dest_dir: Directory under which datapackage should be saved :type dest_dir: str or path :returns: Location of the datapackage descriptor (datapackage.json) in the local filesystem :rtype: path :raises RestApiException: If a server error occurs Examples -------- >>> import datadotworld as dw >>> api_client = dw.api_client() >>> datapackage_descriptor = api_client.download_datapackage( ... 'jonloyens/an-intro-to-dataworld-dataset', ... '/tmp/test') # doctest: +SKIP >>> datapackage_descriptor # doctest: +SKIP '/tmp/test/datapackage.json' ] if call[name[path].isdir, parameter[name[dest_dir]]] begin[:] <ast.Raise object at 0x7da18dc9a290> <ast.Tuple object at 0x7da18dc9afb0> assign[=] call[name[parse_dataset_key], parameter[name[dataset_key]]] variable[url] assign[=] call[constant[{0}://{1}/datapackage/{2}/{3}].format, parameter[name[self]._protocol, name[self]._download_host, name[owner_id], name[dataset_id]]] variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da1b04a6bc0>, <ast.Constant object at 0x7da1b04a5e40>], [<ast.Call object at 0x7da1b04a4790>, <ast.Call object at 0x7da1b04a5b10>]] <ast.Try object at 0x7da1b04a47f0> variable[unzip_dir] assign[=] call[name[path].join, parameter[name[self]._config.tmp_dir, call[name[str], parameter[call[name[uuid].uuid4, parameter[]]]]]] call[name[os].makedirs, parameter[name[unzip_dir]]] variable[zip_file] assign[=] call[name[path].join, parameter[name[unzip_dir], constant[dataset.zip]]] with call[name[open], parameter[name[zip_file], constant[wb]]] begin[:] for taget[name[data]] in starred[call[name[response].iter_content, parameter[]]] begin[:] call[name[f].write, parameter[name[data]]] variable[zip_obj] assign[=] call[name[zipfile].ZipFile, parameter[name[zip_file]]] call[name[zip_obj].extractall, parameter[]] variable[unzipped_descriptor] assign[=] call[name[glob].glob, parameter[call[constant[{}/**/datapackage.json].format, parameter[name[unzip_dir]]]]] if <ast.UnaryOp object at 0x7da1b040cc40> begin[:] <ast.Raise object at 0x7da1b040f3d0> variable[unzipped_dir] assign[=] call[name[path].dirname, parameter[call[name[unzipped_descriptor]][constant[0]]]] call[name[shutil].move, parameter[name[unzipped_dir], name[dest_dir]]] call[name[shutil].rmtree, parameter[name[unzip_dir]]] return[call[name[path].join, parameter[name[dest_dir], constant[datapackage.json]]]]
keyword[def] identifier[download_datapackage] ( identifier[self] , identifier[dataset_key] , identifier[dest_dir] ): literal[string] keyword[if] identifier[path] . identifier[isdir] ( identifier[dest_dir] ): keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[dest_dir] )) identifier[owner_id] , identifier[dataset_id] = identifier[parse_dataset_key] ( identifier[dataset_key] ) identifier[url] = literal[string] . identifier[format] ( identifier[self] . identifier[_protocol] , identifier[self] . identifier[_download_host] , identifier[owner_id] , identifier[dataset_id] ) identifier[headers] ={ literal[string] : identifier[_user_agent] (), literal[string] : literal[string] . identifier[format] ( identifier[self] . identifier[_config] . identifier[auth_token] ) } keyword[try] : identifier[response] = identifier[requests] . identifier[get] ( identifier[url] , identifier[headers] = identifier[headers] , identifier[stream] = keyword[True] ) identifier[response] . identifier[raise_for_status] () keyword[except] identifier[requests] . identifier[RequestException] keyword[as] identifier[e] : keyword[raise] identifier[RestApiError] ( identifier[cause] = identifier[e] ) identifier[unzip_dir] = identifier[path] . identifier[join] ( identifier[self] . identifier[_config] . identifier[tmp_dir] , identifier[str] ( identifier[uuid] . identifier[uuid4] ())) identifier[os] . identifier[makedirs] ( identifier[unzip_dir] ) identifier[zip_file] = identifier[path] . identifier[join] ( identifier[unzip_dir] , literal[string] ) keyword[with] identifier[open] ( identifier[zip_file] , literal[string] ) keyword[as] identifier[f] : keyword[for] identifier[data] keyword[in] identifier[response] . identifier[iter_content] ( identifier[chunk_size] = literal[int] ): identifier[f] . identifier[write] ( identifier[data] ) identifier[zip_obj] = identifier[zipfile] . identifier[ZipFile] ( identifier[zip_file] ) identifier[zip_obj] . identifier[extractall] ( identifier[path] = identifier[unzip_dir] ) identifier[unzipped_descriptor] = identifier[glob] . identifier[glob] ( literal[string] . identifier[format] ( identifier[unzip_dir] )) keyword[if] keyword[not] identifier[unzipped_descriptor] : keyword[raise] identifier[RuntimeError] ( literal[string] ) identifier[unzipped_dir] = identifier[path] . identifier[dirname] ( identifier[unzipped_descriptor] [ literal[int] ]) identifier[shutil] . identifier[move] ( identifier[unzipped_dir] , identifier[dest_dir] ) identifier[shutil] . identifier[rmtree] ( identifier[unzip_dir] , identifier[ignore_errors] = keyword[True] ) keyword[return] identifier[path] . identifier[join] ( identifier[dest_dir] , literal[string] )
def download_datapackage(self, dataset_key, dest_dir): """Download and unzip a dataset's datapackage :param dataset_key: Dataset identifier, in the form of owner/id :type dataset_key: str :param dest_dir: Directory under which datapackage should be saved :type dest_dir: str or path :returns: Location of the datapackage descriptor (datapackage.json) in the local filesystem :rtype: path :raises RestApiException: If a server error occurs Examples -------- >>> import datadotworld as dw >>> api_client = dw.api_client() >>> datapackage_descriptor = api_client.download_datapackage( ... 'jonloyens/an-intro-to-dataworld-dataset', ... '/tmp/test') # doctest: +SKIP >>> datapackage_descriptor # doctest: +SKIP '/tmp/test/datapackage.json' """ if path.isdir(dest_dir): raise ValueError('dest_dir must be a new directory, but {} already exists'.format(dest_dir)) # depends on [control=['if'], data=[]] (owner_id, dataset_id) = parse_dataset_key(dataset_key) url = '{0}://{1}/datapackage/{2}/{3}'.format(self._protocol, self._download_host, owner_id, dataset_id) headers = {'User-Agent': _user_agent(), 'Authorization': 'Bearer {0}'.format(self._config.auth_token)} try: response = requests.get(url, headers=headers, stream=True) response.raise_for_status() # depends on [control=['try'], data=[]] except requests.RequestException as e: raise RestApiError(cause=e) # depends on [control=['except'], data=['e']] unzip_dir = path.join(self._config.tmp_dir, str(uuid.uuid4())) os.makedirs(unzip_dir) zip_file = path.join(unzip_dir, 'dataset.zip') with open(zip_file, 'wb') as f: for data in response.iter_content(chunk_size=4096): f.write(data) # depends on [control=['for'], data=['data']] # depends on [control=['with'], data=['f']] zip_obj = zipfile.ZipFile(zip_file) zip_obj.extractall(path=unzip_dir) # Find where datapackage.json is within expanded files unzipped_descriptor = glob.glob('{}/**/datapackage.json'.format(unzip_dir)) if not unzipped_descriptor: raise RuntimeError('Zip file did not contain a datapackage manifest.') # depends on [control=['if'], data=[]] unzipped_dir = path.dirname(unzipped_descriptor[0]) shutil.move(unzipped_dir, dest_dir) shutil.rmtree(unzip_dir, ignore_errors=True) return path.join(dest_dir, 'datapackage.json')
def dataset_exists(client, dataset_reference): """Return if a dataset exists. Args: client (google.cloud.bigquery.client.Client): A client to connect to the BigQuery API. dataset_reference (google.cloud.bigquery.dataset.DatasetReference): A reference to the dataset to look for. Returns: bool: ``True`` if the dataset exists, ``False`` otherwise. """ from google.cloud.exceptions import NotFound try: client.get_dataset(dataset_reference) return True except NotFound: return False
def function[dataset_exists, parameter[client, dataset_reference]]: constant[Return if a dataset exists. Args: client (google.cloud.bigquery.client.Client): A client to connect to the BigQuery API. dataset_reference (google.cloud.bigquery.dataset.DatasetReference): A reference to the dataset to look for. Returns: bool: ``True`` if the dataset exists, ``False`` otherwise. ] from relative_module[google.cloud.exceptions] import module[NotFound] <ast.Try object at 0x7da20c6abbb0>
keyword[def] identifier[dataset_exists] ( identifier[client] , identifier[dataset_reference] ): literal[string] keyword[from] identifier[google] . identifier[cloud] . identifier[exceptions] keyword[import] identifier[NotFound] keyword[try] : identifier[client] . identifier[get_dataset] ( identifier[dataset_reference] ) keyword[return] keyword[True] keyword[except] identifier[NotFound] : keyword[return] keyword[False]
def dataset_exists(client, dataset_reference): """Return if a dataset exists. Args: client (google.cloud.bigquery.client.Client): A client to connect to the BigQuery API. dataset_reference (google.cloud.bigquery.dataset.DatasetReference): A reference to the dataset to look for. Returns: bool: ``True`` if the dataset exists, ``False`` otherwise. """ from google.cloud.exceptions import NotFound try: client.get_dataset(dataset_reference) return True # depends on [control=['try'], data=[]] except NotFound: return False # depends on [control=['except'], data=[]]
def chunks(iterator, size): """Split an iterator into chunks with `size` elements each. Warning: ``size`` must be an actual iterator, if you pass this a concrete sequence will get you repeating elements. So ``chunks(iter(range(1000)), 10)`` is fine, but ``chunks(range(1000), 10)`` is not. Example: # size == 2 >>> x = chunks(iter([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), 2) >>> list(x) [[0, 1], [2, 3], [4, 5], [6, 7], [8, 9], [10]] # size == 3 >>> x = chunks(iter([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), 3) >>> list(x) [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10]] """ for item in iterator: yield [item] + list(islice(iterator, size - 1))
def function[chunks, parameter[iterator, size]]: constant[Split an iterator into chunks with `size` elements each. Warning: ``size`` must be an actual iterator, if you pass this a concrete sequence will get you repeating elements. So ``chunks(iter(range(1000)), 10)`` is fine, but ``chunks(range(1000), 10)`` is not. Example: # size == 2 >>> x = chunks(iter([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), 2) >>> list(x) [[0, 1], [2, 3], [4, 5], [6, 7], [8, 9], [10]] # size == 3 >>> x = chunks(iter([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), 3) >>> list(x) [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10]] ] for taget[name[item]] in starred[name[iterator]] begin[:] <ast.Yield object at 0x7da20e9b3070>
keyword[def] identifier[chunks] ( identifier[iterator] , identifier[size] ): literal[string] keyword[for] identifier[item] keyword[in] identifier[iterator] : keyword[yield] [ identifier[item] ]+ identifier[list] ( identifier[islice] ( identifier[iterator] , identifier[size] - literal[int] ))
def chunks(iterator, size): """Split an iterator into chunks with `size` elements each. Warning: ``size`` must be an actual iterator, if you pass this a concrete sequence will get you repeating elements. So ``chunks(iter(range(1000)), 10)`` is fine, but ``chunks(range(1000), 10)`` is not. Example: # size == 2 >>> x = chunks(iter([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), 2) >>> list(x) [[0, 1], [2, 3], [4, 5], [6, 7], [8, 9], [10]] # size == 3 >>> x = chunks(iter([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]), 3) >>> list(x) [[0, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10]] """ for item in iterator: yield ([item] + list(islice(iterator, size - 1))) # depends on [control=['for'], data=['item']]
def install(cls, type_, name, src, *args, **kwargs): """Install an add-on to this account.""" data = kwargs.pop('data', None) if data is None: data = { 'addon': { 'type': type_, 'name': name, 'src': src, } } cls.create(data=data, *args, **kwargs)
def function[install, parameter[cls, type_, name, src]]: constant[Install an add-on to this account.] variable[data] assign[=] call[name[kwargs].pop, parameter[constant[data], constant[None]]] if compare[name[data] is constant[None]] begin[:] variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b06fd990>], [<ast.Dict object at 0x7da1b06fc790>]] call[name[cls].create, parameter[<ast.Starred object at 0x7da1b06fca00>]]
keyword[def] identifier[install] ( identifier[cls] , identifier[type_] , identifier[name] , identifier[src] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[data] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] ) keyword[if] identifier[data] keyword[is] keyword[None] : identifier[data] ={ literal[string] :{ literal[string] : identifier[type_] , literal[string] : identifier[name] , literal[string] : identifier[src] , } } identifier[cls] . identifier[create] ( identifier[data] = identifier[data] ,* identifier[args] ,** identifier[kwargs] )
def install(cls, type_, name, src, *args, **kwargs): """Install an add-on to this account.""" data = kwargs.pop('data', None) if data is None: data = {'addon': {'type': type_, 'name': name, 'src': src}} # depends on [control=['if'], data=['data']] cls.create(*args, data=data, **kwargs)
def halfmax_points(self): """Get the bandpass' half-maximum wavelengths. These can be used to compute a representative bandwidth, or for display purposes. Unlike calc_halfmax_points(), this function will use a cached value if available. """ t = self.registry._halfmaxes.get((self.telescope, self.band)) if t is not None: return t t = self.calc_halfmax_points() self.registry.register_halfmaxes(self.telescope, self.band, t[0], t[1]) return t
def function[halfmax_points, parameter[self]]: constant[Get the bandpass' half-maximum wavelengths. These can be used to compute a representative bandwidth, or for display purposes. Unlike calc_halfmax_points(), this function will use a cached value if available. ] variable[t] assign[=] call[name[self].registry._halfmaxes.get, parameter[tuple[[<ast.Attribute object at 0x7da1b27672e0>, <ast.Attribute object at 0x7da1b2767940>]]]] if compare[name[t] is_not constant[None]] begin[:] return[name[t]] variable[t] assign[=] call[name[self].calc_halfmax_points, parameter[]] call[name[self].registry.register_halfmaxes, parameter[name[self].telescope, name[self].band, call[name[t]][constant[0]], call[name[t]][constant[1]]]] return[name[t]]
keyword[def] identifier[halfmax_points] ( identifier[self] ): literal[string] identifier[t] = identifier[self] . identifier[registry] . identifier[_halfmaxes] . identifier[get] (( identifier[self] . identifier[telescope] , identifier[self] . identifier[band] )) keyword[if] identifier[t] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[t] identifier[t] = identifier[self] . identifier[calc_halfmax_points] () identifier[self] . identifier[registry] . identifier[register_halfmaxes] ( identifier[self] . identifier[telescope] , identifier[self] . identifier[band] , identifier[t] [ literal[int] ], identifier[t] [ literal[int] ]) keyword[return] identifier[t]
def halfmax_points(self): """Get the bandpass' half-maximum wavelengths. These can be used to compute a representative bandwidth, or for display purposes. Unlike calc_halfmax_points(), this function will use a cached value if available. """ t = self.registry._halfmaxes.get((self.telescope, self.band)) if t is not None: return t # depends on [control=['if'], data=['t']] t = self.calc_halfmax_points() self.registry.register_halfmaxes(self.telescope, self.band, t[0], t[1]) return t
def track_enrollment(pathway, user_id, course_run_id, url_path=None): """ Emit a track event for enterprise course enrollment. """ track_event(user_id, 'edx.bi.user.enterprise.onboarding', { 'pathway': pathway, 'url_path': url_path, 'course_run_id': course_run_id, })
def function[track_enrollment, parameter[pathway, user_id, course_run_id, url_path]]: constant[ Emit a track event for enterprise course enrollment. ] call[name[track_event], parameter[name[user_id], constant[edx.bi.user.enterprise.onboarding], dictionary[[<ast.Constant object at 0x7da1b013e560>, <ast.Constant object at 0x7da1b013c5b0>, <ast.Constant object at 0x7da1b013e110>], [<ast.Name object at 0x7da1b013fb50>, <ast.Name object at 0x7da1b013ded0>, <ast.Name object at 0x7da1b013c490>]]]]
keyword[def] identifier[track_enrollment] ( identifier[pathway] , identifier[user_id] , identifier[course_run_id] , identifier[url_path] = keyword[None] ): literal[string] identifier[track_event] ( identifier[user_id] , literal[string] ,{ literal[string] : identifier[pathway] , literal[string] : identifier[url_path] , literal[string] : identifier[course_run_id] , })
def track_enrollment(pathway, user_id, course_run_id, url_path=None): """ Emit a track event for enterprise course enrollment. """ track_event(user_id, 'edx.bi.user.enterprise.onboarding', {'pathway': pathway, 'url_path': url_path, 'course_run_id': course_run_id})
def writexml(self, filename_or_file=None, indent=" ", newl=os.linesep, encoding="UTF-8"): """ Write the manifest as XML to a file or file object """ if not filename_or_file: filename_or_file = self.filename if isinstance(filename_or_file, (str, unicode)): filename_or_file = open(filename_or_file, "wb") xmlstr = self.toxml(indent, newl, encoding) filename_or_file.write(xmlstr) filename_or_file.close()
def function[writexml, parameter[self, filename_or_file, indent, newl, encoding]]: constant[ Write the manifest as XML to a file or file object ] if <ast.UnaryOp object at 0x7da1b0c53970> begin[:] variable[filename_or_file] assign[=] name[self].filename if call[name[isinstance], parameter[name[filename_or_file], tuple[[<ast.Name object at 0x7da1b0c53d60>, <ast.Name object at 0x7da1b0c51db0>]]]] begin[:] variable[filename_or_file] assign[=] call[name[open], parameter[name[filename_or_file], constant[wb]]] variable[xmlstr] assign[=] call[name[self].toxml, parameter[name[indent], name[newl], name[encoding]]] call[name[filename_or_file].write, parameter[name[xmlstr]]] call[name[filename_or_file].close, parameter[]]
keyword[def] identifier[writexml] ( identifier[self] , identifier[filename_or_file] = keyword[None] , identifier[indent] = literal[string] , identifier[newl] = identifier[os] . identifier[linesep] , identifier[encoding] = literal[string] ): literal[string] keyword[if] keyword[not] identifier[filename_or_file] : identifier[filename_or_file] = identifier[self] . identifier[filename] keyword[if] identifier[isinstance] ( identifier[filename_or_file] ,( identifier[str] , identifier[unicode] )): identifier[filename_or_file] = identifier[open] ( identifier[filename_or_file] , literal[string] ) identifier[xmlstr] = identifier[self] . identifier[toxml] ( identifier[indent] , identifier[newl] , identifier[encoding] ) identifier[filename_or_file] . identifier[write] ( identifier[xmlstr] ) identifier[filename_or_file] . identifier[close] ()
def writexml(self, filename_or_file=None, indent=' ', newl=os.linesep, encoding='UTF-8'): """ Write the manifest as XML to a file or file object """ if not filename_or_file: filename_or_file = self.filename # depends on [control=['if'], data=[]] if isinstance(filename_or_file, (str, unicode)): filename_or_file = open(filename_or_file, 'wb') # depends on [control=['if'], data=[]] xmlstr = self.toxml(indent, newl, encoding) filename_or_file.write(xmlstr) filename_or_file.close()
def extract_values(field_list=None, filter_arg_dict=None, out_stream=None): """Get list of dicts where each dict holds values from one SciObj. Args: field_list: list of str List of field names for which to return values. Must be strings from FIELD_NAME_TO_generate_dict.keys(). If None, return all fields. filter_arg_dict: dict Dict of arguments to pass to ``ScienceObject.objects.filter()``. Returns: list of dict: The keys in the returned dict correspond to the field names in ``field_list``. Raises: raise d1_common.types.exceptions.InvalidRequest() if ``field_list`` contains any invalid field names. A list of the invalid fields is included in the exception. """ lookup_dict, generate_dict = _split_field_list(field_list) query, annotate_key_list = _create_query(filter_arg_dict, generate_dict) lookup_list = [v["lookup_str"] for k, v in lookup_dict.items()] + annotate_key_list if out_stream is None: return _create_sciobj_list(query, lookup_list, lookup_dict, generate_dict) else: return _write_stream(query, lookup_list, lookup_dict, generate_dict, out_stream)
def function[extract_values, parameter[field_list, filter_arg_dict, out_stream]]: constant[Get list of dicts where each dict holds values from one SciObj. Args: field_list: list of str List of field names for which to return values. Must be strings from FIELD_NAME_TO_generate_dict.keys(). If None, return all fields. filter_arg_dict: dict Dict of arguments to pass to ``ScienceObject.objects.filter()``. Returns: list of dict: The keys in the returned dict correspond to the field names in ``field_list``. Raises: raise d1_common.types.exceptions.InvalidRequest() if ``field_list`` contains any invalid field names. A list of the invalid fields is included in the exception. ] <ast.Tuple object at 0x7da1b19089d0> assign[=] call[name[_split_field_list], parameter[name[field_list]]] <ast.Tuple object at 0x7da1b1908af0> assign[=] call[name[_create_query], parameter[name[filter_arg_dict], name[generate_dict]]] variable[lookup_list] assign[=] binary_operation[<ast.ListComp object at 0x7da1b1908bb0> + name[annotate_key_list]] if compare[name[out_stream] is constant[None]] begin[:] return[call[name[_create_sciobj_list], parameter[name[query], name[lookup_list], name[lookup_dict], name[generate_dict]]]]
keyword[def] identifier[extract_values] ( identifier[field_list] = keyword[None] , identifier[filter_arg_dict] = keyword[None] , identifier[out_stream] = keyword[None] ): literal[string] identifier[lookup_dict] , identifier[generate_dict] = identifier[_split_field_list] ( identifier[field_list] ) identifier[query] , identifier[annotate_key_list] = identifier[_create_query] ( identifier[filter_arg_dict] , identifier[generate_dict] ) identifier[lookup_list] =[ identifier[v] [ literal[string] ] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[lookup_dict] . identifier[items] ()]+ identifier[annotate_key_list] keyword[if] identifier[out_stream] keyword[is] keyword[None] : keyword[return] identifier[_create_sciobj_list] ( identifier[query] , identifier[lookup_list] , identifier[lookup_dict] , identifier[generate_dict] ) keyword[else] : keyword[return] identifier[_write_stream] ( identifier[query] , identifier[lookup_list] , identifier[lookup_dict] , identifier[generate_dict] , identifier[out_stream] )
def extract_values(field_list=None, filter_arg_dict=None, out_stream=None): """Get list of dicts where each dict holds values from one SciObj. Args: field_list: list of str List of field names for which to return values. Must be strings from FIELD_NAME_TO_generate_dict.keys(). If None, return all fields. filter_arg_dict: dict Dict of arguments to pass to ``ScienceObject.objects.filter()``. Returns: list of dict: The keys in the returned dict correspond to the field names in ``field_list``. Raises: raise d1_common.types.exceptions.InvalidRequest() if ``field_list`` contains any invalid field names. A list of the invalid fields is included in the exception. """ (lookup_dict, generate_dict) = _split_field_list(field_list) (query, annotate_key_list) = _create_query(filter_arg_dict, generate_dict) lookup_list = [v['lookup_str'] for (k, v) in lookup_dict.items()] + annotate_key_list if out_stream is None: return _create_sciobj_list(query, lookup_list, lookup_dict, generate_dict) # depends on [control=['if'], data=[]] else: return _write_stream(query, lookup_list, lookup_dict, generate_dict, out_stream)
def save_configuration_dict(h5_file, configuation_name, configuration, **kwargs): '''Stores any configuration dictionary to HDF5 file. Parameters ---------- h5_file : string, file Filename of the HDF5 configuration file or file object. configuation_name : str Configuration name. Will be used for table name. configuration : dict Configuration dictionary. ''' def save_conf(): try: h5_file.remove_node(h5_file.root.configuration, name=configuation_name) except tb.NodeError: pass try: configuration_group = h5_file.create_group(h5_file.root, "configuration") except tb.NodeError: configuration_group = h5_file.root.configuration scan_param_table = h5_file.create_table(configuration_group, name=configuation_name, description=NameValue, title=configuation_name) row_scan_param = scan_param_table.row for key, value in dict.iteritems(configuration): row_scan_param['name'] = key row_scan_param['value'] = str(value) row_scan_param.append() scan_param_table.flush() if isinstance(h5_file, tb.file.File): save_conf() else: if os.path.splitext(h5_file)[1].strip().lower() != ".h5": h5_file = os.path.splitext(h5_file)[0] + ".h5" with tb.open_file(h5_file, mode="a", title='', **kwargs) as h5_file: save_conf()
def function[save_configuration_dict, parameter[h5_file, configuation_name, configuration]]: constant[Stores any configuration dictionary to HDF5 file. Parameters ---------- h5_file : string, file Filename of the HDF5 configuration file or file object. configuation_name : str Configuration name. Will be used for table name. configuration : dict Configuration dictionary. ] def function[save_conf, parameter[]]: <ast.Try object at 0x7da1b11c48b0> <ast.Try object at 0x7da1b11c4fa0> variable[scan_param_table] assign[=] call[name[h5_file].create_table, parameter[name[configuration_group]]] variable[row_scan_param] assign[=] name[scan_param_table].row for taget[tuple[[<ast.Name object at 0x7da1b11c53f0>, <ast.Name object at 0x7da1b11c5330>]]] in starred[call[name[dict].iteritems, parameter[name[configuration]]]] begin[:] call[name[row_scan_param]][constant[name]] assign[=] name[key] call[name[row_scan_param]][constant[value]] assign[=] call[name[str], parameter[name[value]]] call[name[row_scan_param].append, parameter[]] call[name[scan_param_table].flush, parameter[]] if call[name[isinstance], parameter[name[h5_file], name[tb].file.File]] begin[:] call[name[save_conf], parameter[]]
keyword[def] identifier[save_configuration_dict] ( identifier[h5_file] , identifier[configuation_name] , identifier[configuration] ,** identifier[kwargs] ): literal[string] keyword[def] identifier[save_conf] (): keyword[try] : identifier[h5_file] . identifier[remove_node] ( identifier[h5_file] . identifier[root] . identifier[configuration] , identifier[name] = identifier[configuation_name] ) keyword[except] identifier[tb] . identifier[NodeError] : keyword[pass] keyword[try] : identifier[configuration_group] = identifier[h5_file] . identifier[create_group] ( identifier[h5_file] . identifier[root] , literal[string] ) keyword[except] identifier[tb] . identifier[NodeError] : identifier[configuration_group] = identifier[h5_file] . identifier[root] . identifier[configuration] identifier[scan_param_table] = identifier[h5_file] . identifier[create_table] ( identifier[configuration_group] , identifier[name] = identifier[configuation_name] , identifier[description] = identifier[NameValue] , identifier[title] = identifier[configuation_name] ) identifier[row_scan_param] = identifier[scan_param_table] . identifier[row] keyword[for] identifier[key] , identifier[value] keyword[in] identifier[dict] . identifier[iteritems] ( identifier[configuration] ): identifier[row_scan_param] [ literal[string] ]= identifier[key] identifier[row_scan_param] [ literal[string] ]= identifier[str] ( identifier[value] ) identifier[row_scan_param] . identifier[append] () identifier[scan_param_table] . identifier[flush] () keyword[if] identifier[isinstance] ( identifier[h5_file] , identifier[tb] . identifier[file] . identifier[File] ): identifier[save_conf] () keyword[else] : keyword[if] identifier[os] . identifier[path] . identifier[splitext] ( identifier[h5_file] )[ literal[int] ]. identifier[strip] (). identifier[lower] ()!= literal[string] : identifier[h5_file] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[h5_file] )[ literal[int] ]+ literal[string] keyword[with] identifier[tb] . identifier[open_file] ( identifier[h5_file] , identifier[mode] = literal[string] , identifier[title] = literal[string] ,** identifier[kwargs] ) keyword[as] identifier[h5_file] : identifier[save_conf] ()
def save_configuration_dict(h5_file, configuation_name, configuration, **kwargs): """Stores any configuration dictionary to HDF5 file. Parameters ---------- h5_file : string, file Filename of the HDF5 configuration file or file object. configuation_name : str Configuration name. Will be used for table name. configuration : dict Configuration dictionary. """ def save_conf(): try: h5_file.remove_node(h5_file.root.configuration, name=configuation_name) # depends on [control=['try'], data=[]] except tb.NodeError: pass # depends on [control=['except'], data=[]] try: configuration_group = h5_file.create_group(h5_file.root, 'configuration') # depends on [control=['try'], data=[]] except tb.NodeError: configuration_group = h5_file.root.configuration # depends on [control=['except'], data=[]] scan_param_table = h5_file.create_table(configuration_group, name=configuation_name, description=NameValue, title=configuation_name) row_scan_param = scan_param_table.row for (key, value) in dict.iteritems(configuration): row_scan_param['name'] = key row_scan_param['value'] = str(value) row_scan_param.append() # depends on [control=['for'], data=[]] scan_param_table.flush() if isinstance(h5_file, tb.file.File): save_conf() # depends on [control=['if'], data=[]] else: if os.path.splitext(h5_file)[1].strip().lower() != '.h5': h5_file = os.path.splitext(h5_file)[0] + '.h5' # depends on [control=['if'], data=[]] with tb.open_file(h5_file, mode='a', title='', **kwargs) as h5_file: save_conf() # depends on [control=['with'], data=[]]
def set_default(cls, name): """Replaces the current application default depot""" if name not in cls._depots: raise RuntimeError('%s depot has not been configured' % (name,)) cls._default_depot = name
def function[set_default, parameter[cls, name]]: constant[Replaces the current application default depot] if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[cls]._depots] begin[:] <ast.Raise object at 0x7da20c7c8730> name[cls]._default_depot assign[=] name[name]
keyword[def] identifier[set_default] ( identifier[cls] , identifier[name] ): literal[string] keyword[if] identifier[name] keyword[not] keyword[in] identifier[cls] . identifier[_depots] : keyword[raise] identifier[RuntimeError] ( literal[string] %( identifier[name] ,)) identifier[cls] . identifier[_default_depot] = identifier[name]
def set_default(cls, name): """Replaces the current application default depot""" if name not in cls._depots: raise RuntimeError('%s depot has not been configured' % (name,)) # depends on [control=['if'], data=['name']] cls._default_depot = name
def is_job_complete(job_id, conn=None): """ is_job_done function checks to if Brain.Jobs Status is Completed Completed is defined in statics as Done|Stopped|Error :param job_id: <str> id for the job :param conn: (optional)<connection> to run on :return: <dict> if job is done <false> if """ result = False job = RBJ.get(job_id).run(conn) if job and job.get(STATUS_FIELD) in COMPLETED: result = job return result
def function[is_job_complete, parameter[job_id, conn]]: constant[ is_job_done function checks to if Brain.Jobs Status is Completed Completed is defined in statics as Done|Stopped|Error :param job_id: <str> id for the job :param conn: (optional)<connection> to run on :return: <dict> if job is done <false> if ] variable[result] assign[=] constant[False] variable[job] assign[=] call[call[name[RBJ].get, parameter[name[job_id]]].run, parameter[name[conn]]] if <ast.BoolOp object at 0x7da1b157a7a0> begin[:] variable[result] assign[=] name[job] return[name[result]]
keyword[def] identifier[is_job_complete] ( identifier[job_id] , identifier[conn] = keyword[None] ): literal[string] identifier[result] = keyword[False] identifier[job] = identifier[RBJ] . identifier[get] ( identifier[job_id] ). identifier[run] ( identifier[conn] ) keyword[if] identifier[job] keyword[and] identifier[job] . identifier[get] ( identifier[STATUS_FIELD] ) keyword[in] identifier[COMPLETED] : identifier[result] = identifier[job] keyword[return] identifier[result]
def is_job_complete(job_id, conn=None): """ is_job_done function checks to if Brain.Jobs Status is Completed Completed is defined in statics as Done|Stopped|Error :param job_id: <str> id for the job :param conn: (optional)<connection> to run on :return: <dict> if job is done <false> if """ result = False job = RBJ.get(job_id).run(conn) if job and job.get(STATUS_FIELD) in COMPLETED: result = job # depends on [control=['if'], data=[]] return result
def get_url(self, obj): """ The URL at which the detail page should appear. """ if not hasattr(obj, 'get_absolute_url') or not obj.get_absolute_url(): raise ImproperlyConfigured("No URL configured. You must either \ set a ``get_absolute_url`` method on the %s model or override the %s view's \ ``get_url`` method" % (obj.__class__.__name__, self.__class__.__name__)) return obj.get_absolute_url()
def function[get_url, parameter[self, obj]]: constant[ The URL at which the detail page should appear. ] if <ast.BoolOp object at 0x7da204566da0> begin[:] <ast.Raise object at 0x7da20e9b09a0> return[call[name[obj].get_absolute_url, parameter[]]]
keyword[def] identifier[get_url] ( identifier[self] , identifier[obj] ): literal[string] keyword[if] keyword[not] identifier[hasattr] ( identifier[obj] , literal[string] ) keyword[or] keyword[not] identifier[obj] . identifier[get_absolute_url] (): keyword[raise] identifier[ImproperlyConfigured] ( literal[string] %( identifier[obj] . identifier[__class__] . identifier[__name__] , identifier[self] . identifier[__class__] . identifier[__name__] )) keyword[return] identifier[obj] . identifier[get_absolute_url] ()
def get_url(self, obj): """ The URL at which the detail page should appear. """ if not hasattr(obj, 'get_absolute_url') or not obj.get_absolute_url(): raise ImproperlyConfigured("No URL configured. You must either set a ``get_absolute_url`` method on the %s model or override the %s view's ``get_url`` method" % (obj.__class__.__name__, self.__class__.__name__)) # depends on [control=['if'], data=[]] return obj.get_absolute_url()
def norm(self, coords: Vector3Like, frac_coords: bool = True) -> float: """ Compute the norm of vector(s). Args: coords: Array-like object with the coordinates. frac_coords: Boolean stating whether the vector corresponds to fractional or cartesian coordinates. Returns: one-dimensional `numpy` array. """ return np.sqrt(self.dot(coords, coords, frac_coords=frac_coords))
def function[norm, parameter[self, coords, frac_coords]]: constant[ Compute the norm of vector(s). Args: coords: Array-like object with the coordinates. frac_coords: Boolean stating whether the vector corresponds to fractional or cartesian coordinates. Returns: one-dimensional `numpy` array. ] return[call[name[np].sqrt, parameter[call[name[self].dot, parameter[name[coords], name[coords]]]]]]
keyword[def] identifier[norm] ( identifier[self] , identifier[coords] : identifier[Vector3Like] , identifier[frac_coords] : identifier[bool] = keyword[True] )-> identifier[float] : literal[string] keyword[return] identifier[np] . identifier[sqrt] ( identifier[self] . identifier[dot] ( identifier[coords] , identifier[coords] , identifier[frac_coords] = identifier[frac_coords] ))
def norm(self, coords: Vector3Like, frac_coords: bool=True) -> float: """ Compute the norm of vector(s). Args: coords: Array-like object with the coordinates. frac_coords: Boolean stating whether the vector corresponds to fractional or cartesian coordinates. Returns: one-dimensional `numpy` array. """ return np.sqrt(self.dot(coords, coords, frac_coords=frac_coords))
def get_yml_content(file_path): '''Load yaml file content''' try: with open(file_path, 'r') as file: return yaml.load(file, Loader=yaml.Loader) except yaml.scanner.ScannerError as err: print_error('yaml file format error!') exit(1) except Exception as exception: print_error(exception) exit(1)
def function[get_yml_content, parameter[file_path]]: constant[Load yaml file content] <ast.Try object at 0x7da2041db100>
keyword[def] identifier[get_yml_content] ( identifier[file_path] ): literal[string] keyword[try] : keyword[with] identifier[open] ( identifier[file_path] , literal[string] ) keyword[as] identifier[file] : keyword[return] identifier[yaml] . identifier[load] ( identifier[file] , identifier[Loader] = identifier[yaml] . identifier[Loader] ) keyword[except] identifier[yaml] . identifier[scanner] . identifier[ScannerError] keyword[as] identifier[err] : identifier[print_error] ( literal[string] ) identifier[exit] ( literal[int] ) keyword[except] identifier[Exception] keyword[as] identifier[exception] : identifier[print_error] ( identifier[exception] ) identifier[exit] ( literal[int] )
def get_yml_content(file_path): """Load yaml file content""" try: with open(file_path, 'r') as file: return yaml.load(file, Loader=yaml.Loader) # depends on [control=['with'], data=['file']] # depends on [control=['try'], data=[]] except yaml.scanner.ScannerError as err: print_error('yaml file format error!') exit(1) # depends on [control=['except'], data=[]] except Exception as exception: print_error(exception) exit(1) # depends on [control=['except'], data=['exception']]
def get_additional_rewards(api): """returns list of non-user rewards (potion, armoire, gear)""" c = get_content(api) tasks = [c[i] for i in ['potion', 'armoire']] tasks.extend(api.user.inventory.buy.get()) for task in tasks: task['id'] = task['alias'] = task['key'] return tasks
def function[get_additional_rewards, parameter[api]]: constant[returns list of non-user rewards (potion, armoire, gear)] variable[c] assign[=] call[name[get_content], parameter[name[api]]] variable[tasks] assign[=] <ast.ListComp object at 0x7da1b1077580> call[name[tasks].extend, parameter[call[name[api].user.inventory.buy.get, parameter[]]]] for taget[name[task]] in starred[name[tasks]] begin[:] call[name[task]][constant[id]] assign[=] call[name[task]][constant[key]] return[name[tasks]]
keyword[def] identifier[get_additional_rewards] ( identifier[api] ): literal[string] identifier[c] = identifier[get_content] ( identifier[api] ) identifier[tasks] =[ identifier[c] [ identifier[i] ] keyword[for] identifier[i] keyword[in] [ literal[string] , literal[string] ]] identifier[tasks] . identifier[extend] ( identifier[api] . identifier[user] . identifier[inventory] . identifier[buy] . identifier[get] ()) keyword[for] identifier[task] keyword[in] identifier[tasks] : identifier[task] [ literal[string] ]= identifier[task] [ literal[string] ]= identifier[task] [ literal[string] ] keyword[return] identifier[tasks]
def get_additional_rewards(api): """returns list of non-user rewards (potion, armoire, gear)""" c = get_content(api) tasks = [c[i] for i in ['potion', 'armoire']] tasks.extend(api.user.inventory.buy.get()) for task in tasks: task['id'] = task['alias'] = task['key'] # depends on [control=['for'], data=['task']] return tasks
def easeInOutElastic(n, amplitude=1, period=0.5): """An elastic tween function wobbles towards the midpoint. Args: n (float): The time progress, starting at 0.0 and ending at 1.0. Returns: (float) The line progress, starting at 0.0 and ending at 1.0. Suitable for passing to getPointOnLine(). """ _checkRange(n) n *= 2 if n < 1: return easeInElastic(n, amplitude=amplitude, period=period) / 2 else: return easeOutElastic(n-1, amplitude=amplitude, period=period) / 2 + 0.5
def function[easeInOutElastic, parameter[n, amplitude, period]]: constant[An elastic tween function wobbles towards the midpoint. Args: n (float): The time progress, starting at 0.0 and ending at 1.0. Returns: (float) The line progress, starting at 0.0 and ending at 1.0. Suitable for passing to getPointOnLine(). ] call[name[_checkRange], parameter[name[n]]] <ast.AugAssign object at 0x7da18f09d9c0> if compare[name[n] less[<] constant[1]] begin[:] return[binary_operation[call[name[easeInElastic], parameter[name[n]]] / constant[2]]]
keyword[def] identifier[easeInOutElastic] ( identifier[n] , identifier[amplitude] = literal[int] , identifier[period] = literal[int] ): literal[string] identifier[_checkRange] ( identifier[n] ) identifier[n] *= literal[int] keyword[if] identifier[n] < literal[int] : keyword[return] identifier[easeInElastic] ( identifier[n] , identifier[amplitude] = identifier[amplitude] , identifier[period] = identifier[period] )/ literal[int] keyword[else] : keyword[return] identifier[easeOutElastic] ( identifier[n] - literal[int] , identifier[amplitude] = identifier[amplitude] , identifier[period] = identifier[period] )/ literal[int] + literal[int]
def easeInOutElastic(n, amplitude=1, period=0.5): """An elastic tween function wobbles towards the midpoint. Args: n (float): The time progress, starting at 0.0 and ending at 1.0. Returns: (float) The line progress, starting at 0.0 and ending at 1.0. Suitable for passing to getPointOnLine(). """ _checkRange(n) n *= 2 if n < 1: return easeInElastic(n, amplitude=amplitude, period=period) / 2 # depends on [control=['if'], data=['n']] else: return easeOutElastic(n - 1, amplitude=amplitude, period=period) / 2 + 0.5
def ensure_object_is_ndarray(item, title): """ Ensures that a given mapping matrix is a dense numpy array. Raises a helpful TypeError if otherwise. """ assert isinstance(title, str) if not isinstance(item, np.ndarray): msg = "{} must be a np.ndarray. {} passed instead." raise TypeError(msg.format(title, type(item))) return None
def function[ensure_object_is_ndarray, parameter[item, title]]: constant[ Ensures that a given mapping matrix is a dense numpy array. Raises a helpful TypeError if otherwise. ] assert[call[name[isinstance], parameter[name[title], name[str]]]] if <ast.UnaryOp object at 0x7da1b138f820> begin[:] variable[msg] assign[=] constant[{} must be a np.ndarray. {} passed instead.] <ast.Raise object at 0x7da1b138f400> return[constant[None]]
keyword[def] identifier[ensure_object_is_ndarray] ( identifier[item] , identifier[title] ): literal[string] keyword[assert] identifier[isinstance] ( identifier[title] , identifier[str] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[item] , identifier[np] . identifier[ndarray] ): identifier[msg] = literal[string] keyword[raise] identifier[TypeError] ( identifier[msg] . identifier[format] ( identifier[title] , identifier[type] ( identifier[item] ))) keyword[return] keyword[None]
def ensure_object_is_ndarray(item, title): """ Ensures that a given mapping matrix is a dense numpy array. Raises a helpful TypeError if otherwise. """ assert isinstance(title, str) if not isinstance(item, np.ndarray): msg = '{} must be a np.ndarray. {} passed instead.' raise TypeError(msg.format(title, type(item))) # depends on [control=['if'], data=[]] return None
def send_discover(self): """Send discover.""" assert self.client assert self.current_state == STATE_INIT or \ self.current_state == STATE_SELECTING pkt = self.client.gen_discover() sendp(pkt) # FIXME:20 check that this is correct,: all or only discover? if self.discover_attempts < MAX_ATTEMPTS_DISCOVER: self.discover_attempts += 1 timeout = gen_timeout_resend(self.discover_attempts) self.set_timeout(self.current_state, self.timeout_selecting, timeout)
def function[send_discover, parameter[self]]: constant[Send discover.] assert[name[self].client] assert[<ast.BoolOp object at 0x7da1b03b8ee0>] variable[pkt] assign[=] call[name[self].client.gen_discover, parameter[]] call[name[sendp], parameter[name[pkt]]] if compare[name[self].discover_attempts less[<] name[MAX_ATTEMPTS_DISCOVER]] begin[:] <ast.AugAssign object at 0x7da1b03b8eb0> variable[timeout] assign[=] call[name[gen_timeout_resend], parameter[name[self].discover_attempts]] call[name[self].set_timeout, parameter[name[self].current_state, name[self].timeout_selecting, name[timeout]]]
keyword[def] identifier[send_discover] ( identifier[self] ): literal[string] keyword[assert] identifier[self] . identifier[client] keyword[assert] identifier[self] . identifier[current_state] == identifier[STATE_INIT] keyword[or] identifier[self] . identifier[current_state] == identifier[STATE_SELECTING] identifier[pkt] = identifier[self] . identifier[client] . identifier[gen_discover] () identifier[sendp] ( identifier[pkt] ) keyword[if] identifier[self] . identifier[discover_attempts] < identifier[MAX_ATTEMPTS_DISCOVER] : identifier[self] . identifier[discover_attempts] += literal[int] identifier[timeout] = identifier[gen_timeout_resend] ( identifier[self] . identifier[discover_attempts] ) identifier[self] . identifier[set_timeout] ( identifier[self] . identifier[current_state] , identifier[self] . identifier[timeout_selecting] , identifier[timeout] )
def send_discover(self): """Send discover.""" assert self.client assert self.current_state == STATE_INIT or self.current_state == STATE_SELECTING pkt = self.client.gen_discover() sendp(pkt) # FIXME:20 check that this is correct,: all or only discover? if self.discover_attempts < MAX_ATTEMPTS_DISCOVER: self.discover_attempts += 1 # depends on [control=['if'], data=[]] timeout = gen_timeout_resend(self.discover_attempts) self.set_timeout(self.current_state, self.timeout_selecting, timeout)
def make_src_pkg(dest_dir, spec, sources, env=None, saltenv='base', runas='root'): ''' Create a platform specific source package from the given platform spec/control file and sources CLI Example: **Debian** .. code-block:: bash salt '*' pkgbuild.make_src_pkg /var/www/html/ https://raw.githubusercontent.com/saltstack/libnacl/master/pkg/deb/python-libnacl.control.tar.xz https://pypi.python.org/packages/source/l/libnacl/libnacl-1.3.5.tar.gz This example command should build the libnacl SOURCE package and place it in /var/www/html/ on the minion dest_dir Absolute path for directory to write source package spec Absolute path to spec file or equivalent sources Absolute path to source files to build source package from env : None A list or dictionary of environment variables to be set prior to execution. Example: .. code-block:: yaml - env: - DEB_BUILD_OPTIONS: 'nocheck' .. warning:: The above illustrates a common PyYAML pitfall, that **yes**, **no**, **on**, **off**, **true**, and **false** are all loaded as boolean ``True`` and ``False`` values, and must be enclosed in quotes to be used as strings. More info on this (and other) PyYAML idiosyncrasies can be found :ref:`here <yaml-idiosyncrasies>`. saltenv: base Salt environment variables runas : root .. versionadded:: fluorine User to create the files and directories .. note:: Ensure the user has correct permissions to any files and directories which are to be utilized. ''' _create_pbuilders(env, runas) tree_base = _mk_tree() ret = [] if not os.path.isdir(dest_dir): os.makedirs(dest_dir) # ensure directories are writable root_user = 'root' retrc = 0 cmd = "chown {0}:{0} {1}".format(runas, tree_base) retrc = __salt__['cmd.retcode'](cmd, runas='root') if retrc != 0: raise SaltInvocationError( "make_src_pkg ensuring tree_base \'{0}\' ownership failed with return error \'{1}\', " "check logs for further details".format( tree_base, retrc) ) cmd = "chown {0}:{0} {1}".format(runas, dest_dir) retrc = __salt__['cmd.retcode'](cmd, runas=root_user) if retrc != 0: raise SaltInvocationError( "make_src_pkg ensuring dest_dir \'{0}\' ownership failed with return error \'{1}\', " "check logs for further details".format( dest_dir, retrc) ) spec_pathfile = _get_spec(tree_base, spec, saltenv) # build salt equivalents from scratch if isinstance(sources, six.string_types): sources = sources.split(',') for src in sources: _get_src(tree_base, src, saltenv) # .dsc then assumes sources already build if spec_pathfile.endswith('.dsc'): for efile in os.listdir(tree_base): full = os.path.join(tree_base, efile) trgt = os.path.join(dest_dir, efile) shutil.copy(full, trgt) ret.append(trgt) return ret # obtain name of 'python setup.py sdist' generated tarball, extract the version # and manipulate the name for debian use (convert minix and add '+ds') salttarball = None for afile in os.listdir(tree_base): if afile.startswith('salt-') and afile.endswith('.tar.gz'): salttarball = afile break else: return ret frontname = salttarball.split('.tar.gz') salttar_name = frontname[0] k = salttar_name.rfind('-') debname = salttar_name[:k] + '_' + salttar_name[k+1:] debname += '+ds' debname_orig = debname + '.orig.tar.gz' abspath_debname = os.path.join(tree_base, debname) cmd = 'tar -xvzf {0}'.format(salttarball) retrc = __salt__['cmd.retcode'](cmd, cwd=tree_base, runas=root_user) cmd = 'mv {0} {1}'.format(salttar_name, debname) retrc |= __salt__['cmd.retcode'](cmd, cwd=tree_base, runas=root_user) cmd = 'tar -cvzf {0} {1}'.format(os.path.join(tree_base, debname_orig), debname) retrc |= __salt__['cmd.retcode'](cmd, cwd=tree_base, runas=root_user) cmd = 'rm -f {0}'.format(salttarball) retrc |= __salt__['cmd.retcode'](cmd, cwd=tree_base, runas=root_user, env=env) cmd = 'cp {0} {1}'.format(spec_pathfile, abspath_debname) retrc |= __salt__['cmd.retcode'](cmd, cwd=abspath_debname, runas=root_user) cmd = 'tar -xvJf {0}'.format(spec_pathfile) retrc |= __salt__['cmd.retcode'](cmd, cwd=abspath_debname, runas=root_user, env=env) cmd = 'rm -f {0}'.format(os.path.basename(spec_pathfile)) retrc |= __salt__['cmd.retcode'](cmd, cwd=abspath_debname, runas=root_user) cmd = 'debuild -S -uc -us -sa' retrc |= __salt__['cmd.retcode'](cmd, cwd=abspath_debname, runas=root_user, python_shell=True, env=env) cmd = 'rm -fR {0}'.format(abspath_debname) retrc |= __salt__['cmd.retcode'](cmd, runas=root_user) if retrc != 0: raise SaltInvocationError( 'Make source package for destination directory {0}, spec {1}, sources {2}, failed ' 'with return error {3}, check logs for further details'.format( dest_dir, spec, sources, retrc) ) for dfile in os.listdir(tree_base): if not dfile.endswith('.build'): full = os.path.join(tree_base, dfile) trgt = os.path.join(dest_dir, dfile) shutil.copy(full, trgt) ret.append(trgt) return ret
def function[make_src_pkg, parameter[dest_dir, spec, sources, env, saltenv, runas]]: constant[ Create a platform specific source package from the given platform spec/control file and sources CLI Example: **Debian** .. code-block:: bash salt '*' pkgbuild.make_src_pkg /var/www/html/ https://raw.githubusercontent.com/saltstack/libnacl/master/pkg/deb/python-libnacl.control.tar.xz https://pypi.python.org/packages/source/l/libnacl/libnacl-1.3.5.tar.gz This example command should build the libnacl SOURCE package and place it in /var/www/html/ on the minion dest_dir Absolute path for directory to write source package spec Absolute path to spec file or equivalent sources Absolute path to source files to build source package from env : None A list or dictionary of environment variables to be set prior to execution. Example: .. code-block:: yaml - env: - DEB_BUILD_OPTIONS: 'nocheck' .. warning:: The above illustrates a common PyYAML pitfall, that **yes**, **no**, **on**, **off**, **true**, and **false** are all loaded as boolean ``True`` and ``False`` values, and must be enclosed in quotes to be used as strings. More info on this (and other) PyYAML idiosyncrasies can be found :ref:`here <yaml-idiosyncrasies>`. saltenv: base Salt environment variables runas : root .. versionadded:: fluorine User to create the files and directories .. note:: Ensure the user has correct permissions to any files and directories which are to be utilized. ] call[name[_create_pbuilders], parameter[name[env], name[runas]]] variable[tree_base] assign[=] call[name[_mk_tree], parameter[]] variable[ret] assign[=] list[[]] if <ast.UnaryOp object at 0x7da1b1c22e90> begin[:] call[name[os].makedirs, parameter[name[dest_dir]]] variable[root_user] assign[=] constant[root] variable[retrc] assign[=] constant[0] variable[cmd] assign[=] call[constant[chown {0}:{0} {1}].format, parameter[name[runas], name[tree_base]]] variable[retrc] assign[=] call[call[name[__salt__]][constant[cmd.retcode]], parameter[name[cmd]]] if compare[name[retrc] not_equal[!=] constant[0]] begin[:] <ast.Raise object at 0x7da1b1c206d0> variable[cmd] assign[=] call[constant[chown {0}:{0} {1}].format, parameter[name[runas], name[dest_dir]]] variable[retrc] assign[=] call[call[name[__salt__]][constant[cmd.retcode]], parameter[name[cmd]]] if compare[name[retrc] not_equal[!=] constant[0]] begin[:] <ast.Raise object at 0x7da1b1c222f0> variable[spec_pathfile] assign[=] call[name[_get_spec], parameter[name[tree_base], name[spec], name[saltenv]]] if call[name[isinstance], parameter[name[sources], name[six].string_types]] begin[:] variable[sources] assign[=] call[name[sources].split, parameter[constant[,]]] for taget[name[src]] in starred[name[sources]] begin[:] call[name[_get_src], parameter[name[tree_base], name[src], name[saltenv]]] if call[name[spec_pathfile].endswith, parameter[constant[.dsc]]] begin[:] for taget[name[efile]] in starred[call[name[os].listdir, parameter[name[tree_base]]]] begin[:] variable[full] assign[=] call[name[os].path.join, parameter[name[tree_base], name[efile]]] variable[trgt] assign[=] call[name[os].path.join, parameter[name[dest_dir], name[efile]]] call[name[shutil].copy, parameter[name[full], name[trgt]]] call[name[ret].append, parameter[name[trgt]]] return[name[ret]] variable[salttarball] assign[=] constant[None] for taget[name[afile]] in starred[call[name[os].listdir, parameter[name[tree_base]]]] begin[:] if <ast.BoolOp object at 0x7da1b2007970> begin[:] variable[salttarball] assign[=] name[afile] break variable[frontname] assign[=] call[name[salttarball].split, parameter[constant[.tar.gz]]] variable[salttar_name] assign[=] call[name[frontname]][constant[0]] variable[k] assign[=] call[name[salttar_name].rfind, parameter[constant[-]]] variable[debname] assign[=] binary_operation[binary_operation[call[name[salttar_name]][<ast.Slice object at 0x7da1b20044f0>] + constant[_]] + call[name[salttar_name]][<ast.Slice object at 0x7da1b2004580>]] <ast.AugAssign object at 0x7da1b2004460> variable[debname_orig] assign[=] binary_operation[name[debname] + constant[.orig.tar.gz]] variable[abspath_debname] assign[=] call[name[os].path.join, parameter[name[tree_base], name[debname]]] variable[cmd] assign[=] call[constant[tar -xvzf {0}].format, parameter[name[salttarball]]] variable[retrc] assign[=] call[call[name[__salt__]][constant[cmd.retcode]], parameter[name[cmd]]] variable[cmd] assign[=] call[constant[mv {0} {1}].format, parameter[name[salttar_name], name[debname]]] <ast.AugAssign object at 0x7da1b2006770> variable[cmd] assign[=] call[constant[tar -cvzf {0} {1}].format, parameter[call[name[os].path.join, parameter[name[tree_base], name[debname_orig]]], name[debname]]] <ast.AugAssign object at 0x7da1b20073a0> variable[cmd] assign[=] call[constant[rm -f {0}].format, parameter[name[salttarball]]] <ast.AugAssign object at 0x7da1b2006650> variable[cmd] assign[=] call[constant[cp {0} {1}].format, parameter[name[spec_pathfile], name[abspath_debname]]] <ast.AugAssign object at 0x7da1b2005db0> variable[cmd] assign[=] call[constant[tar -xvJf {0}].format, parameter[name[spec_pathfile]]] <ast.AugAssign object at 0x7da1b2006890> variable[cmd] assign[=] call[constant[rm -f {0}].format, parameter[call[name[os].path.basename, parameter[name[spec_pathfile]]]]] <ast.AugAssign object at 0x7da1b2006680> variable[cmd] assign[=] constant[debuild -S -uc -us -sa] <ast.AugAssign object at 0x7da1b2005990> variable[cmd] assign[=] call[constant[rm -fR {0}].format, parameter[name[abspath_debname]]] <ast.AugAssign object at 0x7da1b2005750> if compare[name[retrc] not_equal[!=] constant[0]] begin[:] <ast.Raise object at 0x7da1b2005f60> for taget[name[dfile]] in starred[call[name[os].listdir, parameter[name[tree_base]]]] begin[:] if <ast.UnaryOp object at 0x7da1b2004d90> begin[:] variable[full] assign[=] call[name[os].path.join, parameter[name[tree_base], name[dfile]]] variable[trgt] assign[=] call[name[os].path.join, parameter[name[dest_dir], name[dfile]]] call[name[shutil].copy, parameter[name[full], name[trgt]]] call[name[ret].append, parameter[name[trgt]]] return[name[ret]]
keyword[def] identifier[make_src_pkg] ( identifier[dest_dir] , identifier[spec] , identifier[sources] , identifier[env] = keyword[None] , identifier[saltenv] = literal[string] , identifier[runas] = literal[string] ): literal[string] identifier[_create_pbuilders] ( identifier[env] , identifier[runas] ) identifier[tree_base] = identifier[_mk_tree] () identifier[ret] =[] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[dest_dir] ): identifier[os] . identifier[makedirs] ( identifier[dest_dir] ) identifier[root_user] = literal[string] identifier[retrc] = literal[int] identifier[cmd] = literal[string] . identifier[format] ( identifier[runas] , identifier[tree_base] ) identifier[retrc] = identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[runas] = literal[string] ) keyword[if] identifier[retrc] != literal[int] : keyword[raise] identifier[SaltInvocationError] ( literal[string] literal[string] . identifier[format] ( identifier[tree_base] , identifier[retrc] ) ) identifier[cmd] = literal[string] . identifier[format] ( identifier[runas] , identifier[dest_dir] ) identifier[retrc] = identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[runas] = identifier[root_user] ) keyword[if] identifier[retrc] != literal[int] : keyword[raise] identifier[SaltInvocationError] ( literal[string] literal[string] . identifier[format] ( identifier[dest_dir] , identifier[retrc] ) ) identifier[spec_pathfile] = identifier[_get_spec] ( identifier[tree_base] , identifier[spec] , identifier[saltenv] ) keyword[if] identifier[isinstance] ( identifier[sources] , identifier[six] . identifier[string_types] ): identifier[sources] = identifier[sources] . identifier[split] ( literal[string] ) keyword[for] identifier[src] keyword[in] identifier[sources] : identifier[_get_src] ( identifier[tree_base] , identifier[src] , identifier[saltenv] ) keyword[if] identifier[spec_pathfile] . identifier[endswith] ( literal[string] ): keyword[for] identifier[efile] keyword[in] identifier[os] . identifier[listdir] ( identifier[tree_base] ): identifier[full] = identifier[os] . identifier[path] . identifier[join] ( identifier[tree_base] , identifier[efile] ) identifier[trgt] = identifier[os] . identifier[path] . identifier[join] ( identifier[dest_dir] , identifier[efile] ) identifier[shutil] . identifier[copy] ( identifier[full] , identifier[trgt] ) identifier[ret] . identifier[append] ( identifier[trgt] ) keyword[return] identifier[ret] identifier[salttarball] = keyword[None] keyword[for] identifier[afile] keyword[in] identifier[os] . identifier[listdir] ( identifier[tree_base] ): keyword[if] identifier[afile] . identifier[startswith] ( literal[string] ) keyword[and] identifier[afile] . identifier[endswith] ( literal[string] ): identifier[salttarball] = identifier[afile] keyword[break] keyword[else] : keyword[return] identifier[ret] identifier[frontname] = identifier[salttarball] . identifier[split] ( literal[string] ) identifier[salttar_name] = identifier[frontname] [ literal[int] ] identifier[k] = identifier[salttar_name] . identifier[rfind] ( literal[string] ) identifier[debname] = identifier[salttar_name] [: identifier[k] ]+ literal[string] + identifier[salttar_name] [ identifier[k] + literal[int] :] identifier[debname] += literal[string] identifier[debname_orig] = identifier[debname] + literal[string] identifier[abspath_debname] = identifier[os] . identifier[path] . identifier[join] ( identifier[tree_base] , identifier[debname] ) identifier[cmd] = literal[string] . identifier[format] ( identifier[salttarball] ) identifier[retrc] = identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[cwd] = identifier[tree_base] , identifier[runas] = identifier[root_user] ) identifier[cmd] = literal[string] . identifier[format] ( identifier[salttar_name] , identifier[debname] ) identifier[retrc] |= identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[cwd] = identifier[tree_base] , identifier[runas] = identifier[root_user] ) identifier[cmd] = literal[string] . identifier[format] ( identifier[os] . identifier[path] . identifier[join] ( identifier[tree_base] , identifier[debname_orig] ), identifier[debname] ) identifier[retrc] |= identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[cwd] = identifier[tree_base] , identifier[runas] = identifier[root_user] ) identifier[cmd] = literal[string] . identifier[format] ( identifier[salttarball] ) identifier[retrc] |= identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[cwd] = identifier[tree_base] , identifier[runas] = identifier[root_user] , identifier[env] = identifier[env] ) identifier[cmd] = literal[string] . identifier[format] ( identifier[spec_pathfile] , identifier[abspath_debname] ) identifier[retrc] |= identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[cwd] = identifier[abspath_debname] , identifier[runas] = identifier[root_user] ) identifier[cmd] = literal[string] . identifier[format] ( identifier[spec_pathfile] ) identifier[retrc] |= identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[cwd] = identifier[abspath_debname] , identifier[runas] = identifier[root_user] , identifier[env] = identifier[env] ) identifier[cmd] = literal[string] . identifier[format] ( identifier[os] . identifier[path] . identifier[basename] ( identifier[spec_pathfile] )) identifier[retrc] |= identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[cwd] = identifier[abspath_debname] , identifier[runas] = identifier[root_user] ) identifier[cmd] = literal[string] identifier[retrc] |= identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[cwd] = identifier[abspath_debname] , identifier[runas] = identifier[root_user] , identifier[python_shell] = keyword[True] , identifier[env] = identifier[env] ) identifier[cmd] = literal[string] . identifier[format] ( identifier[abspath_debname] ) identifier[retrc] |= identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[runas] = identifier[root_user] ) keyword[if] identifier[retrc] != literal[int] : keyword[raise] identifier[SaltInvocationError] ( literal[string] literal[string] . identifier[format] ( identifier[dest_dir] , identifier[spec] , identifier[sources] , identifier[retrc] ) ) keyword[for] identifier[dfile] keyword[in] identifier[os] . identifier[listdir] ( identifier[tree_base] ): keyword[if] keyword[not] identifier[dfile] . identifier[endswith] ( literal[string] ): identifier[full] = identifier[os] . identifier[path] . identifier[join] ( identifier[tree_base] , identifier[dfile] ) identifier[trgt] = identifier[os] . identifier[path] . identifier[join] ( identifier[dest_dir] , identifier[dfile] ) identifier[shutil] . identifier[copy] ( identifier[full] , identifier[trgt] ) identifier[ret] . identifier[append] ( identifier[trgt] ) keyword[return] identifier[ret]
def make_src_pkg(dest_dir, spec, sources, env=None, saltenv='base', runas='root'): """ Create a platform specific source package from the given platform spec/control file and sources CLI Example: **Debian** .. code-block:: bash salt '*' pkgbuild.make_src_pkg /var/www/html/ https://raw.githubusercontent.com/saltstack/libnacl/master/pkg/deb/python-libnacl.control.tar.xz https://pypi.python.org/packages/source/l/libnacl/libnacl-1.3.5.tar.gz This example command should build the libnacl SOURCE package and place it in /var/www/html/ on the minion dest_dir Absolute path for directory to write source package spec Absolute path to spec file or equivalent sources Absolute path to source files to build source package from env : None A list or dictionary of environment variables to be set prior to execution. Example: .. code-block:: yaml - env: - DEB_BUILD_OPTIONS: 'nocheck' .. warning:: The above illustrates a common PyYAML pitfall, that **yes**, **no**, **on**, **off**, **true**, and **false** are all loaded as boolean ``True`` and ``False`` values, and must be enclosed in quotes to be used as strings. More info on this (and other) PyYAML idiosyncrasies can be found :ref:`here <yaml-idiosyncrasies>`. saltenv: base Salt environment variables runas : root .. versionadded:: fluorine User to create the files and directories .. note:: Ensure the user has correct permissions to any files and directories which are to be utilized. """ _create_pbuilders(env, runas) tree_base = _mk_tree() ret = [] if not os.path.isdir(dest_dir): os.makedirs(dest_dir) # depends on [control=['if'], data=[]] # ensure directories are writable root_user = 'root' retrc = 0 cmd = 'chown {0}:{0} {1}'.format(runas, tree_base) retrc = __salt__['cmd.retcode'](cmd, runas='root') if retrc != 0: raise SaltInvocationError("make_src_pkg ensuring tree_base '{0}' ownership failed with return error '{1}', check logs for further details".format(tree_base, retrc)) # depends on [control=['if'], data=['retrc']] cmd = 'chown {0}:{0} {1}'.format(runas, dest_dir) retrc = __salt__['cmd.retcode'](cmd, runas=root_user) if retrc != 0: raise SaltInvocationError("make_src_pkg ensuring dest_dir '{0}' ownership failed with return error '{1}', check logs for further details".format(dest_dir, retrc)) # depends on [control=['if'], data=['retrc']] spec_pathfile = _get_spec(tree_base, spec, saltenv) # build salt equivalents from scratch if isinstance(sources, six.string_types): sources = sources.split(',') # depends on [control=['if'], data=[]] for src in sources: _get_src(tree_base, src, saltenv) # depends on [control=['for'], data=['src']] # .dsc then assumes sources already build if spec_pathfile.endswith('.dsc'): for efile in os.listdir(tree_base): full = os.path.join(tree_base, efile) trgt = os.path.join(dest_dir, efile) shutil.copy(full, trgt) ret.append(trgt) # depends on [control=['for'], data=['efile']] return ret # depends on [control=['if'], data=[]] # obtain name of 'python setup.py sdist' generated tarball, extract the version # and manipulate the name for debian use (convert minix and add '+ds') salttarball = None for afile in os.listdir(tree_base): if afile.startswith('salt-') and afile.endswith('.tar.gz'): salttarball = afile break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['afile']] else: return ret frontname = salttarball.split('.tar.gz') salttar_name = frontname[0] k = salttar_name.rfind('-') debname = salttar_name[:k] + '_' + salttar_name[k + 1:] debname += '+ds' debname_orig = debname + '.orig.tar.gz' abspath_debname = os.path.join(tree_base, debname) cmd = 'tar -xvzf {0}'.format(salttarball) retrc = __salt__['cmd.retcode'](cmd, cwd=tree_base, runas=root_user) cmd = 'mv {0} {1}'.format(salttar_name, debname) retrc |= __salt__['cmd.retcode'](cmd, cwd=tree_base, runas=root_user) cmd = 'tar -cvzf {0} {1}'.format(os.path.join(tree_base, debname_orig), debname) retrc |= __salt__['cmd.retcode'](cmd, cwd=tree_base, runas=root_user) cmd = 'rm -f {0}'.format(salttarball) retrc |= __salt__['cmd.retcode'](cmd, cwd=tree_base, runas=root_user, env=env) cmd = 'cp {0} {1}'.format(spec_pathfile, abspath_debname) retrc |= __salt__['cmd.retcode'](cmd, cwd=abspath_debname, runas=root_user) cmd = 'tar -xvJf {0}'.format(spec_pathfile) retrc |= __salt__['cmd.retcode'](cmd, cwd=abspath_debname, runas=root_user, env=env) cmd = 'rm -f {0}'.format(os.path.basename(spec_pathfile)) retrc |= __salt__['cmd.retcode'](cmd, cwd=abspath_debname, runas=root_user) cmd = 'debuild -S -uc -us -sa' retrc |= __salt__['cmd.retcode'](cmd, cwd=abspath_debname, runas=root_user, python_shell=True, env=env) cmd = 'rm -fR {0}'.format(abspath_debname) retrc |= __salt__['cmd.retcode'](cmd, runas=root_user) if retrc != 0: raise SaltInvocationError('Make source package for destination directory {0}, spec {1}, sources {2}, failed with return error {3}, check logs for further details'.format(dest_dir, spec, sources, retrc)) # depends on [control=['if'], data=['retrc']] for dfile in os.listdir(tree_base): if not dfile.endswith('.build'): full = os.path.join(tree_base, dfile) trgt = os.path.join(dest_dir, dfile) shutil.copy(full, trgt) ret.append(trgt) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['dfile']] return ret
def find_uncommitted_filefields(sender, instance, **kwargs): """ A pre_save signal handler which attaches an attribute to the model instance containing all uncommitted ``FileField``s, which can then be used by the :func:`signal_committed_filefields` post_save handler. """ uncommitted = instance._uncommitted_filefields = [] fields = sender._meta.fields if kwargs.get('update_fields', None): update_fields = set(kwargs['update_fields']) fields = update_fields.intersection(fields) for field in fields: if isinstance(field, FileField): fieldfile = getattr(instance, field.name) if fieldfile and not fieldfile._committed: uncommitted.append(field.name)
def function[find_uncommitted_filefields, parameter[sender, instance]]: constant[ A pre_save signal handler which attaches an attribute to the model instance containing all uncommitted ``FileField``s, which can then be used by the :func:`signal_committed_filefields` post_save handler. ] variable[uncommitted] assign[=] list[[]] variable[fields] assign[=] name[sender]._meta.fields if call[name[kwargs].get, parameter[constant[update_fields], constant[None]]] begin[:] variable[update_fields] assign[=] call[name[set], parameter[call[name[kwargs]][constant[update_fields]]]] variable[fields] assign[=] call[name[update_fields].intersection, parameter[name[fields]]] for taget[name[field]] in starred[name[fields]] begin[:] if call[name[isinstance], parameter[name[field], name[FileField]]] begin[:] variable[fieldfile] assign[=] call[name[getattr], parameter[name[instance], name[field].name]] if <ast.BoolOp object at 0x7da18f812800> begin[:] call[name[uncommitted].append, parameter[name[field].name]]
keyword[def] identifier[find_uncommitted_filefields] ( identifier[sender] , identifier[instance] ,** identifier[kwargs] ): literal[string] identifier[uncommitted] = identifier[instance] . identifier[_uncommitted_filefields] =[] identifier[fields] = identifier[sender] . identifier[_meta] . identifier[fields] keyword[if] identifier[kwargs] . identifier[get] ( literal[string] , keyword[None] ): identifier[update_fields] = identifier[set] ( identifier[kwargs] [ literal[string] ]) identifier[fields] = identifier[update_fields] . identifier[intersection] ( identifier[fields] ) keyword[for] identifier[field] keyword[in] identifier[fields] : keyword[if] identifier[isinstance] ( identifier[field] , identifier[FileField] ): identifier[fieldfile] = identifier[getattr] ( identifier[instance] , identifier[field] . identifier[name] ) keyword[if] identifier[fieldfile] keyword[and] keyword[not] identifier[fieldfile] . identifier[_committed] : identifier[uncommitted] . identifier[append] ( identifier[field] . identifier[name] )
def find_uncommitted_filefields(sender, instance, **kwargs): """ A pre_save signal handler which attaches an attribute to the model instance containing all uncommitted ``FileField``s, which can then be used by the :func:`signal_committed_filefields` post_save handler. """ uncommitted = instance._uncommitted_filefields = [] fields = sender._meta.fields if kwargs.get('update_fields', None): update_fields = set(kwargs['update_fields']) fields = update_fields.intersection(fields) # depends on [control=['if'], data=[]] for field in fields: if isinstance(field, FileField): fieldfile = getattr(instance, field.name) if fieldfile and (not fieldfile._committed): uncommitted.append(field.name) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['field']]
def search_pages(self, page_ids=None, begin=0, count=10): """ 查询页面列表 详情请参考 http://mp.weixin.qq.com/wiki/5/6626199ea8757c752046d8e46cf13251.html :param page_ids: 指定页面的id列表 :param begin: 页面列表的起始索引值 :param count: 待查询的页面个数 :return: 页面查询结果信息 """ if not page_ids: data = { 'type': 2, 'begin': begin, 'count': count } else: if not isinstance(page_ids, (tuple, list)): page_ids = [page_ids] data = { 'type': 1, 'page_ids': page_ids } res = self._post( 'shakearound/page/search', data=data, result_processor=lambda x: x['data'] ) return res
def function[search_pages, parameter[self, page_ids, begin, count]]: constant[ 查询页面列表 详情请参考 http://mp.weixin.qq.com/wiki/5/6626199ea8757c752046d8e46cf13251.html :param page_ids: 指定页面的id列表 :param begin: 页面列表的起始索引值 :param count: 待查询的页面个数 :return: 页面查询结果信息 ] if <ast.UnaryOp object at 0x7da1b1f4b430> begin[:] variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b1f49360>, <ast.Constant object at 0x7da1b1f48700>, <ast.Constant object at 0x7da1b1f48550>], [<ast.Constant object at 0x7da1b1f49c90>, <ast.Name object at 0x7da1b1f49690>, <ast.Name object at 0x7da1b1f49bd0>]] variable[res] assign[=] call[name[self]._post, parameter[constant[shakearound/page/search]]] return[name[res]]
keyword[def] identifier[search_pages] ( identifier[self] , identifier[page_ids] = keyword[None] , identifier[begin] = literal[int] , identifier[count] = literal[int] ): literal[string] keyword[if] keyword[not] identifier[page_ids] : identifier[data] ={ literal[string] : literal[int] , literal[string] : identifier[begin] , literal[string] : identifier[count] } keyword[else] : keyword[if] keyword[not] identifier[isinstance] ( identifier[page_ids] ,( identifier[tuple] , identifier[list] )): identifier[page_ids] =[ identifier[page_ids] ] identifier[data] ={ literal[string] : literal[int] , literal[string] : identifier[page_ids] } identifier[res] = identifier[self] . identifier[_post] ( literal[string] , identifier[data] = identifier[data] , identifier[result_processor] = keyword[lambda] identifier[x] : identifier[x] [ literal[string] ] ) keyword[return] identifier[res]
def search_pages(self, page_ids=None, begin=0, count=10): """ 查询页面列表 详情请参考 http://mp.weixin.qq.com/wiki/5/6626199ea8757c752046d8e46cf13251.html :param page_ids: 指定页面的id列表 :param begin: 页面列表的起始索引值 :param count: 待查询的页面个数 :return: 页面查询结果信息 """ if not page_ids: data = {'type': 2, 'begin': begin, 'count': count} # depends on [control=['if'], data=[]] else: if not isinstance(page_ids, (tuple, list)): page_ids = [page_ids] # depends on [control=['if'], data=[]] data = {'type': 1, 'page_ids': page_ids} res = self._post('shakearound/page/search', data=data, result_processor=lambda x: x['data']) return res
def delete_lifecycle(self, policy=None, params=None): """ `<https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-delete-lifecycle.html>`_ :arg policy: The name of the index lifecycle policy """ return self.transport.perform_request( "DELETE", _make_path("_ilm", "policy", policy), params=params )
def function[delete_lifecycle, parameter[self, policy, params]]: constant[ `<https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-delete-lifecycle.html>`_ :arg policy: The name of the index lifecycle policy ] return[call[name[self].transport.perform_request, parameter[constant[DELETE], call[name[_make_path], parameter[constant[_ilm], constant[policy], name[policy]]]]]]
keyword[def] identifier[delete_lifecycle] ( identifier[self] , identifier[policy] = keyword[None] , identifier[params] = keyword[None] ): literal[string] keyword[return] identifier[self] . identifier[transport] . identifier[perform_request] ( literal[string] , identifier[_make_path] ( literal[string] , literal[string] , identifier[policy] ), identifier[params] = identifier[params] )
def delete_lifecycle(self, policy=None, params=None): """ `<https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-delete-lifecycle.html>`_ :arg policy: The name of the index lifecycle policy """ return self.transport.perform_request('DELETE', _make_path('_ilm', 'policy', policy), params=params)
def wait_until_page_contains_elements(self, timeout, *locators): """This is a copy of `Wait Until Page Contains Element` but it allows multiple arguments in order to wait for more than one element. | *Argument* | *Description* | *Example* | | timeout | maximum time to wait, if set to ${None} it will use Selenium's default timeout | 5s | | *locators | Selenium 2 element locator(s) | id=MyId |""" self._wait_until_no_error(timeout, self._wait_for_elements, locators)
def function[wait_until_page_contains_elements, parameter[self, timeout]]: constant[This is a copy of `Wait Until Page Contains Element` but it allows multiple arguments in order to wait for more than one element. | *Argument* | *Description* | *Example* | | timeout | maximum time to wait, if set to ${None} it will use Selenium's default timeout | 5s | | *locators | Selenium 2 element locator(s) | id=MyId |] call[name[self]._wait_until_no_error, parameter[name[timeout], name[self]._wait_for_elements, name[locators]]]
keyword[def] identifier[wait_until_page_contains_elements] ( identifier[self] , identifier[timeout] ,* identifier[locators] ): literal[string] identifier[self] . identifier[_wait_until_no_error] ( identifier[timeout] , identifier[self] . identifier[_wait_for_elements] , identifier[locators] )
def wait_until_page_contains_elements(self, timeout, *locators): """This is a copy of `Wait Until Page Contains Element` but it allows multiple arguments in order to wait for more than one element. | *Argument* | *Description* | *Example* | | timeout | maximum time to wait, if set to ${None} it will use Selenium's default timeout | 5s | | *locators | Selenium 2 element locator(s) | id=MyId |""" self._wait_until_no_error(timeout, self._wait_for_elements, locators)
def dump_table(data: List[dict], fieldnames: Sequence[str]) -> str: """ :param data: :param fieldnames: :return: Table string """ def min3(num: int) -> int: return 3 if num < 4 else num width_by_col: Dict[str, int] = { f: min3(max([string_width(str(d.get(f))) for d in data] + [string_width(f)])) for f in fieldnames } def fill_spaces(word: str, width: int, center=False): """ aaa, 4 => ' aaa ' """ to_fills: int = width - string_width(word) return f" {' ' * floor(to_fills / 2)}{word}{' ' * ceil(to_fills / 2)} " if center \ else f" {word}{' ' * to_fills} " def to_record(r: dict) -> str: return f"|{'|'.join([fill_spaces(str(r.get(f)), width_by_col.get(f)) for f in fieldnames])}|" return f""" |{'|'.join([fill_spaces(x, width_by_col.get(x), center=True) for x in fieldnames])}| |{'|'.join([fill_spaces(width_by_col.get(x) * "-", width_by_col.get(x)) for x in fieldnames])}| {os.linesep.join([to_record(x) for x in data])} """.lstrip()
def function[dump_table, parameter[data, fieldnames]]: constant[ :param data: :param fieldnames: :return: Table string ] def function[min3, parameter[num]]: return[<ast.IfExp object at 0x7da20c993640>] <ast.AnnAssign object at 0x7da20c990880> def function[fill_spaces, parameter[word, width, center]]: constant[ aaa, 4 => ' aaa ' ] <ast.AnnAssign object at 0x7da20e9555d0> return[<ast.IfExp object at 0x7da20e954fa0>] def function[to_record, parameter[r]]: return[<ast.JoinedStr object at 0x7da18c4cf100>] return[call[<ast.JoinedStr object at 0x7da18c4cc130>.lstrip, parameter[]]]
keyword[def] identifier[dump_table] ( identifier[data] : identifier[List] [ identifier[dict] ], identifier[fieldnames] : identifier[Sequence] [ identifier[str] ])-> identifier[str] : literal[string] keyword[def] identifier[min3] ( identifier[num] : identifier[int] )-> identifier[int] : keyword[return] literal[int] keyword[if] identifier[num] < literal[int] keyword[else] identifier[num] identifier[width_by_col] : identifier[Dict] [ identifier[str] , identifier[int] ]={ identifier[f] : identifier[min3] ( identifier[max] ([ identifier[string_width] ( identifier[str] ( identifier[d] . identifier[get] ( identifier[f] ))) keyword[for] identifier[d] keyword[in] identifier[data] ]+[ identifier[string_width] ( identifier[f] )])) keyword[for] identifier[f] keyword[in] identifier[fieldnames] } keyword[def] identifier[fill_spaces] ( identifier[word] : identifier[str] , identifier[width] : identifier[int] , identifier[center] = keyword[False] ): literal[string] identifier[to_fills] : identifier[int] = identifier[width] - identifier[string_width] ( identifier[word] ) keyword[return] literal[string] keyword[if] identifier[center] keyword[else] literal[string] keyword[def] identifier[to_record] ( identifier[r] : identifier[dict] )-> identifier[str] : keyword[return] literal[string] keyword[return] literal[string] . identifier[lstrip] ()
def dump_table(data: List[dict], fieldnames: Sequence[str]) -> str: """ :param data: :param fieldnames: :return: Table string """ def min3(num: int) -> int: return 3 if num < 4 else num width_by_col: Dict[str, int] = {f: min3(max([string_width(str(d.get(f))) for d in data] + [string_width(f)])) for f in fieldnames} def fill_spaces(word: str, width: int, center=False): """ aaa, 4 => ' aaa ' """ to_fills: int = width - string_width(word) return f" {' ' * floor(to_fills / 2)}{word}{' ' * ceil(to_fills / 2)} " if center else f" {word}{' ' * to_fills} " def to_record(r: dict) -> str: return f"|{'|'.join([fill_spaces(str(r.get(f)), width_by_col.get(f)) for f in fieldnames])}|" return f"\n|{'|'.join([fill_spaces(x, width_by_col.get(x), center=True) for x in fieldnames])}|\n|{'|'.join([fill_spaces(width_by_col.get(x) * '-', width_by_col.get(x)) for x in fieldnames])}|\n{os.linesep.join([to_record(x) for x in data])}\n".lstrip()
def functions(self): """ Returns all documented module level functions in the module sorted alphabetically as a list of `pydoc.Function`. """ p = lambda o: isinstance(o, Function) and self._docfilter(o) return sorted(filter(p, self.doc.values()))
def function[functions, parameter[self]]: constant[ Returns all documented module level functions in the module sorted alphabetically as a list of `pydoc.Function`. ] variable[p] assign[=] <ast.Lambda object at 0x7da20cabef50> return[call[name[sorted], parameter[call[name[filter], parameter[name[p], call[name[self].doc.values, parameter[]]]]]]]
keyword[def] identifier[functions] ( identifier[self] ): literal[string] identifier[p] = keyword[lambda] identifier[o] : identifier[isinstance] ( identifier[o] , identifier[Function] ) keyword[and] identifier[self] . identifier[_docfilter] ( identifier[o] ) keyword[return] identifier[sorted] ( identifier[filter] ( identifier[p] , identifier[self] . identifier[doc] . identifier[values] ()))
def functions(self): """ Returns all documented module level functions in the module sorted alphabetically as a list of `pydoc.Function`. """ p = lambda o: isinstance(o, Function) and self._docfilter(o) return sorted(filter(p, self.doc.values()))
def FindProxies(): """Tries to find proxies by interrogating all the user's settings. This function is a modified urillib.getproxies_registry() from the standard library. We just store the proxy value in the environment for urllib to find it. TODO(user): Iterate through all the possible values if one proxy fails, in case more than one proxy is specified in different users profiles. Returns: A list of proxies. """ proxies = [] for i in range(0, 100): try: sid = winreg.EnumKey(winreg.HKEY_USERS, i) except OSError: break try: subkey = ( sid + "\\Software\\Microsoft\\Windows" "\\CurrentVersion\\Internet Settings") internet_settings = winreg.OpenKey(winreg.HKEY_USERS, subkey) proxy_enable = winreg.QueryValueEx(internet_settings, "ProxyEnable")[0] if proxy_enable: # Returned as Unicode but problems if not converted to ASCII proxy_server = str( winreg.QueryValueEx(internet_settings, "ProxyServer")[0]) if "=" in proxy_server: # Per-protocol settings for p in proxy_server.split(";"): protocol, address = p.split("=", 1) # See if address has a type:// prefix if not re.match("^([^/:]+)://", address): address = "%s://%s" % (protocol, address) proxies.append(address) else: # Use one setting for all protocols if proxy_server[:5] == "http:": proxies.append(proxy_server) else: proxies.append("http://%s" % proxy_server) internet_settings.Close() except (OSError, ValueError, TypeError): continue logging.debug("Found proxy servers: %s", proxies) return proxies
def function[FindProxies, parameter[]]: constant[Tries to find proxies by interrogating all the user's settings. This function is a modified urillib.getproxies_registry() from the standard library. We just store the proxy value in the environment for urllib to find it. TODO(user): Iterate through all the possible values if one proxy fails, in case more than one proxy is specified in different users profiles. Returns: A list of proxies. ] variable[proxies] assign[=] list[[]] for taget[name[i]] in starred[call[name[range], parameter[constant[0], constant[100]]]] begin[:] <ast.Try object at 0x7da20cabc7c0> <ast.Try object at 0x7da1b1b06350> call[name[logging].debug, parameter[constant[Found proxy servers: %s], name[proxies]]] return[name[proxies]]
keyword[def] identifier[FindProxies] (): literal[string] identifier[proxies] =[] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , literal[int] ): keyword[try] : identifier[sid] = identifier[winreg] . identifier[EnumKey] ( identifier[winreg] . identifier[HKEY_USERS] , identifier[i] ) keyword[except] identifier[OSError] : keyword[break] keyword[try] : identifier[subkey] =( identifier[sid] + literal[string] literal[string] ) identifier[internet_settings] = identifier[winreg] . identifier[OpenKey] ( identifier[winreg] . identifier[HKEY_USERS] , identifier[subkey] ) identifier[proxy_enable] = identifier[winreg] . identifier[QueryValueEx] ( identifier[internet_settings] , literal[string] )[ literal[int] ] keyword[if] identifier[proxy_enable] : identifier[proxy_server] = identifier[str] ( identifier[winreg] . identifier[QueryValueEx] ( identifier[internet_settings] , literal[string] )[ literal[int] ]) keyword[if] literal[string] keyword[in] identifier[proxy_server] : keyword[for] identifier[p] keyword[in] identifier[proxy_server] . identifier[split] ( literal[string] ): identifier[protocol] , identifier[address] = identifier[p] . identifier[split] ( literal[string] , literal[int] ) keyword[if] keyword[not] identifier[re] . identifier[match] ( literal[string] , identifier[address] ): identifier[address] = literal[string] %( identifier[protocol] , identifier[address] ) identifier[proxies] . identifier[append] ( identifier[address] ) keyword[else] : keyword[if] identifier[proxy_server] [: literal[int] ]== literal[string] : identifier[proxies] . identifier[append] ( identifier[proxy_server] ) keyword[else] : identifier[proxies] . identifier[append] ( literal[string] % identifier[proxy_server] ) identifier[internet_settings] . identifier[Close] () keyword[except] ( identifier[OSError] , identifier[ValueError] , identifier[TypeError] ): keyword[continue] identifier[logging] . identifier[debug] ( literal[string] , identifier[proxies] ) keyword[return] identifier[proxies]
def FindProxies(): """Tries to find proxies by interrogating all the user's settings. This function is a modified urillib.getproxies_registry() from the standard library. We just store the proxy value in the environment for urllib to find it. TODO(user): Iterate through all the possible values if one proxy fails, in case more than one proxy is specified in different users profiles. Returns: A list of proxies. """ proxies = [] for i in range(0, 100): try: sid = winreg.EnumKey(winreg.HKEY_USERS, i) # depends on [control=['try'], data=[]] except OSError: break # depends on [control=['except'], data=[]] try: subkey = sid + '\\Software\\Microsoft\\Windows\\CurrentVersion\\Internet Settings' internet_settings = winreg.OpenKey(winreg.HKEY_USERS, subkey) proxy_enable = winreg.QueryValueEx(internet_settings, 'ProxyEnable')[0] if proxy_enable: # Returned as Unicode but problems if not converted to ASCII proxy_server = str(winreg.QueryValueEx(internet_settings, 'ProxyServer')[0]) if '=' in proxy_server: # Per-protocol settings for p in proxy_server.split(';'): (protocol, address) = p.split('=', 1) # See if address has a type:// prefix if not re.match('^([^/:]+)://', address): address = '%s://%s' % (protocol, address) # depends on [control=['if'], data=[]] proxies.append(address) # depends on [control=['for'], data=['p']] # depends on [control=['if'], data=['proxy_server']] # Use one setting for all protocols elif proxy_server[:5] == 'http:': proxies.append(proxy_server) # depends on [control=['if'], data=[]] else: proxies.append('http://%s' % proxy_server) # depends on [control=['if'], data=[]] internet_settings.Close() # depends on [control=['try'], data=[]] except (OSError, ValueError, TypeError): continue # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['i']] logging.debug('Found proxy servers: %s', proxies) return proxies
def explode_dn(dn, notypes=0, flags=0): """ explode_dn(dn [, notypes=0]) -> list This function takes a DN and breaks it up into its component parts. The notypes parameter is used to specify that only the component's attribute values be returned and not the attribute types. """ if not dn: return [] dn_decomp = str2dn(dn, flags) rdn_list = [] for rdn in dn_decomp: if notypes: rdn_list.append('+'.join([ escape_dn_chars(avalue or '') for atype, avalue, dummy in rdn ])) else: rdn_list.append('+'.join([ '='.join((atype, escape_dn_chars(avalue or ''))) for atype, avalue, dummy in rdn ])) return rdn_list
def function[explode_dn, parameter[dn, notypes, flags]]: constant[ explode_dn(dn [, notypes=0]) -> list This function takes a DN and breaks it up into its component parts. The notypes parameter is used to specify that only the component's attribute values be returned and not the attribute types. ] if <ast.UnaryOp object at 0x7da1b1122ad0> begin[:] return[list[[]]] variable[dn_decomp] assign[=] call[name[str2dn], parameter[name[dn], name[flags]]] variable[rdn_list] assign[=] list[[]] for taget[name[rdn]] in starred[name[dn_decomp]] begin[:] if name[notypes] begin[:] call[name[rdn_list].append, parameter[call[constant[+].join, parameter[<ast.ListComp object at 0x7da1b1120400>]]]] return[name[rdn_list]]
keyword[def] identifier[explode_dn] ( identifier[dn] , identifier[notypes] = literal[int] , identifier[flags] = literal[int] ): literal[string] keyword[if] keyword[not] identifier[dn] : keyword[return] [] identifier[dn_decomp] = identifier[str2dn] ( identifier[dn] , identifier[flags] ) identifier[rdn_list] =[] keyword[for] identifier[rdn] keyword[in] identifier[dn_decomp] : keyword[if] identifier[notypes] : identifier[rdn_list] . identifier[append] ( literal[string] . identifier[join] ([ identifier[escape_dn_chars] ( identifier[avalue] keyword[or] literal[string] ) keyword[for] identifier[atype] , identifier[avalue] , identifier[dummy] keyword[in] identifier[rdn] ])) keyword[else] : identifier[rdn_list] . identifier[append] ( literal[string] . identifier[join] ([ literal[string] . identifier[join] (( identifier[atype] , identifier[escape_dn_chars] ( identifier[avalue] keyword[or] literal[string] ))) keyword[for] identifier[atype] , identifier[avalue] , identifier[dummy] keyword[in] identifier[rdn] ])) keyword[return] identifier[rdn_list]
def explode_dn(dn, notypes=0, flags=0): """ explode_dn(dn [, notypes=0]) -> list This function takes a DN and breaks it up into its component parts. The notypes parameter is used to specify that only the component's attribute values be returned and not the attribute types. """ if not dn: return [] # depends on [control=['if'], data=[]] dn_decomp = str2dn(dn, flags) rdn_list = [] for rdn in dn_decomp: if notypes: rdn_list.append('+'.join([escape_dn_chars(avalue or '') for (atype, avalue, dummy) in rdn])) # depends on [control=['if'], data=[]] else: rdn_list.append('+'.join(['='.join((atype, escape_dn_chars(avalue or ''))) for (atype, avalue, dummy) in rdn])) # depends on [control=['for'], data=['rdn']] return rdn_list
def _set_missing_to_none(self, currency): """Fill missing rates of a currency with the closest available ones.""" rates = self._rates[currency] first_date, last_date = self.bounds[currency] for date in list_dates_between(first_date, last_date): if date not in rates: rates[date] = None if self.verbose: missing = len([r for r in itervalues(rates) if r is None]) if missing: print('{0}: {1} missing rates from {2} to {3} ({4} days)'.format( currency, missing, first_date, last_date, 1 + (last_date - first_date).days))
def function[_set_missing_to_none, parameter[self, currency]]: constant[Fill missing rates of a currency with the closest available ones.] variable[rates] assign[=] call[name[self]._rates][name[currency]] <ast.Tuple object at 0x7da1b03baa10> assign[=] call[name[self].bounds][name[currency]] for taget[name[date]] in starred[call[name[list_dates_between], parameter[name[first_date], name[last_date]]]] begin[:] if compare[name[date] <ast.NotIn object at 0x7da2590d7190> name[rates]] begin[:] call[name[rates]][name[date]] assign[=] constant[None] if name[self].verbose begin[:] variable[missing] assign[=] call[name[len], parameter[<ast.ListComp object at 0x7da1b038b7f0>]] if name[missing] begin[:] call[name[print], parameter[call[constant[{0}: {1} missing rates from {2} to {3} ({4} days)].format, parameter[name[currency], name[missing], name[first_date], name[last_date], binary_operation[constant[1] + binary_operation[name[last_date] - name[first_date]].days]]]]]
keyword[def] identifier[_set_missing_to_none] ( identifier[self] , identifier[currency] ): literal[string] identifier[rates] = identifier[self] . identifier[_rates] [ identifier[currency] ] identifier[first_date] , identifier[last_date] = identifier[self] . identifier[bounds] [ identifier[currency] ] keyword[for] identifier[date] keyword[in] identifier[list_dates_between] ( identifier[first_date] , identifier[last_date] ): keyword[if] identifier[date] keyword[not] keyword[in] identifier[rates] : identifier[rates] [ identifier[date] ]= keyword[None] keyword[if] identifier[self] . identifier[verbose] : identifier[missing] = identifier[len] ([ identifier[r] keyword[for] identifier[r] keyword[in] identifier[itervalues] ( identifier[rates] ) keyword[if] identifier[r] keyword[is] keyword[None] ]) keyword[if] identifier[missing] : identifier[print] ( literal[string] . identifier[format] ( identifier[currency] , identifier[missing] , identifier[first_date] , identifier[last_date] , literal[int] +( identifier[last_date] - identifier[first_date] ). identifier[days] ))
def _set_missing_to_none(self, currency): """Fill missing rates of a currency with the closest available ones.""" rates = self._rates[currency] (first_date, last_date) = self.bounds[currency] for date in list_dates_between(first_date, last_date): if date not in rates: rates[date] = None # depends on [control=['if'], data=['date', 'rates']] # depends on [control=['for'], data=['date']] if self.verbose: missing = len([r for r in itervalues(rates) if r is None]) if missing: print('{0}: {1} missing rates from {2} to {3} ({4} days)'.format(currency, missing, first_date, last_date, 1 + (last_date - first_date).days)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
def prt_outfiles_grouped(self, outfiles): """Write to outfiles.""" for outfile in outfiles: if outfile.endswith(".xlsx"): self.wr_xlsx(outfile) elif outfile.endswith(".txt"): self.wr_txt(outfile) else: self.wr_tsv(outfile)
def function[prt_outfiles_grouped, parameter[self, outfiles]]: constant[Write to outfiles.] for taget[name[outfile]] in starred[name[outfiles]] begin[:] if call[name[outfile].endswith, parameter[constant[.xlsx]]] begin[:] call[name[self].wr_xlsx, parameter[name[outfile]]]
keyword[def] identifier[prt_outfiles_grouped] ( identifier[self] , identifier[outfiles] ): literal[string] keyword[for] identifier[outfile] keyword[in] identifier[outfiles] : keyword[if] identifier[outfile] . identifier[endswith] ( literal[string] ): identifier[self] . identifier[wr_xlsx] ( identifier[outfile] ) keyword[elif] identifier[outfile] . identifier[endswith] ( literal[string] ): identifier[self] . identifier[wr_txt] ( identifier[outfile] ) keyword[else] : identifier[self] . identifier[wr_tsv] ( identifier[outfile] )
def prt_outfiles_grouped(self, outfiles): """Write to outfiles.""" for outfile in outfiles: if outfile.endswith('.xlsx'): self.wr_xlsx(outfile) # depends on [control=['if'], data=[]] elif outfile.endswith('.txt'): self.wr_txt(outfile) # depends on [control=['if'], data=[]] else: self.wr_tsv(outfile) # depends on [control=['for'], data=['outfile']]
def _get_alternates(self): """The list of alternates for this repo from which objects can be retrieved :return: list of strings being pathnames of alternates""" alternates_path = osp.join(self.git_dir, 'objects', 'info', 'alternates') if osp.exists(alternates_path): with open(alternates_path, 'rb') as f: alts = f.read().decode(defenc) return alts.strip().splitlines() else: return []
def function[_get_alternates, parameter[self]]: constant[The list of alternates for this repo from which objects can be retrieved :return: list of strings being pathnames of alternates] variable[alternates_path] assign[=] call[name[osp].join, parameter[name[self].git_dir, constant[objects], constant[info], constant[alternates]]] if call[name[osp].exists, parameter[name[alternates_path]]] begin[:] with call[name[open], parameter[name[alternates_path], constant[rb]]] begin[:] variable[alts] assign[=] call[call[name[f].read, parameter[]].decode, parameter[name[defenc]]] return[call[call[name[alts].strip, parameter[]].splitlines, parameter[]]]
keyword[def] identifier[_get_alternates] ( identifier[self] ): literal[string] identifier[alternates_path] = identifier[osp] . identifier[join] ( identifier[self] . identifier[git_dir] , literal[string] , literal[string] , literal[string] ) keyword[if] identifier[osp] . identifier[exists] ( identifier[alternates_path] ): keyword[with] identifier[open] ( identifier[alternates_path] , literal[string] ) keyword[as] identifier[f] : identifier[alts] = identifier[f] . identifier[read] (). identifier[decode] ( identifier[defenc] ) keyword[return] identifier[alts] . identifier[strip] (). identifier[splitlines] () keyword[else] : keyword[return] []
def _get_alternates(self): """The list of alternates for this repo from which objects can be retrieved :return: list of strings being pathnames of alternates""" alternates_path = osp.join(self.git_dir, 'objects', 'info', 'alternates') if osp.exists(alternates_path): with open(alternates_path, 'rb') as f: alts = f.read().decode(defenc) # depends on [control=['with'], data=['f']] return alts.strip().splitlines() # depends on [control=['if'], data=[]] else: return []
def calculate_sleep_time(attempt, delay_factor=5.0, randomization_factor=.5, max_delay=120): """Calculate the sleep time between retries, in seconds. Based off of `taskcluster.utils.calculateSleepTime`, but with kwargs instead of constant `delay_factor`/`randomization_factor`/`max_delay`. The taskcluster function generally slept for less than a second, which didn't always get past server issues. Args: attempt (int): the retry attempt number delay_factor (float, optional): a multiplier for the delay time. Defaults to 5. randomization_factor (float, optional): a randomization multiplier for the delay time. Defaults to .5. max_delay (float, optional): the max delay to sleep. Defaults to 120 (seconds). Returns: float: the time to sleep, in seconds. """ if attempt <= 0: return 0 # We subtract one to get exponents: 1, 2, 3, 4, 5, .. delay = float(2 ** (attempt - 1)) * float(delay_factor) # Apply randomization factor. Only increase the delay here. delay = delay * (randomization_factor * random.random() + 1) # Always limit with a maximum delay return min(delay, max_delay)
def function[calculate_sleep_time, parameter[attempt, delay_factor, randomization_factor, max_delay]]: constant[Calculate the sleep time between retries, in seconds. Based off of `taskcluster.utils.calculateSleepTime`, but with kwargs instead of constant `delay_factor`/`randomization_factor`/`max_delay`. The taskcluster function generally slept for less than a second, which didn't always get past server issues. Args: attempt (int): the retry attempt number delay_factor (float, optional): a multiplier for the delay time. Defaults to 5. randomization_factor (float, optional): a randomization multiplier for the delay time. Defaults to .5. max_delay (float, optional): the max delay to sleep. Defaults to 120 (seconds). Returns: float: the time to sleep, in seconds. ] if compare[name[attempt] less_or_equal[<=] constant[0]] begin[:] return[constant[0]] variable[delay] assign[=] binary_operation[call[name[float], parameter[binary_operation[constant[2] ** binary_operation[name[attempt] - constant[1]]]]] * call[name[float], parameter[name[delay_factor]]]] variable[delay] assign[=] binary_operation[name[delay] * binary_operation[binary_operation[name[randomization_factor] * call[name[random].random, parameter[]]] + constant[1]]] return[call[name[min], parameter[name[delay], name[max_delay]]]]
keyword[def] identifier[calculate_sleep_time] ( identifier[attempt] , identifier[delay_factor] = literal[int] , identifier[randomization_factor] = literal[int] , identifier[max_delay] = literal[int] ): literal[string] keyword[if] identifier[attempt] <= literal[int] : keyword[return] literal[int] identifier[delay] = identifier[float] ( literal[int] **( identifier[attempt] - literal[int] ))* identifier[float] ( identifier[delay_factor] ) identifier[delay] = identifier[delay] *( identifier[randomization_factor] * identifier[random] . identifier[random] ()+ literal[int] ) keyword[return] identifier[min] ( identifier[delay] , identifier[max_delay] )
def calculate_sleep_time(attempt, delay_factor=5.0, randomization_factor=0.5, max_delay=120): """Calculate the sleep time between retries, in seconds. Based off of `taskcluster.utils.calculateSleepTime`, but with kwargs instead of constant `delay_factor`/`randomization_factor`/`max_delay`. The taskcluster function generally slept for less than a second, which didn't always get past server issues. Args: attempt (int): the retry attempt number delay_factor (float, optional): a multiplier for the delay time. Defaults to 5. randomization_factor (float, optional): a randomization multiplier for the delay time. Defaults to .5. max_delay (float, optional): the max delay to sleep. Defaults to 120 (seconds). Returns: float: the time to sleep, in seconds. """ if attempt <= 0: return 0 # depends on [control=['if'], data=[]] # We subtract one to get exponents: 1, 2, 3, 4, 5, .. delay = float(2 ** (attempt - 1)) * float(delay_factor) # Apply randomization factor. Only increase the delay here. delay = delay * (randomization_factor * random.random() + 1) # Always limit with a maximum delay return min(delay, max_delay)
def date_range(cls,start_time,end_time,freq): ''' Returns a new SArray that represents a fixed frequency datetime index. Parameters ---------- start_time : datetime.datetime Left bound for generating dates. end_time : datetime.datetime Right bound for generating dates. freq : datetime.timedelta Fixed frequency between two consecutive data points. Returns ------- out : SArray Examples -------- >>> import datetime as dt >>> start = dt.datetime(2013, 5, 7, 10, 4, 10) >>> end = dt.datetime(2013, 5, 10, 10, 4, 10) >>> sa = tc.SArray.date_range(start,end,dt.timedelta(1)) >>> print sa dtype: datetime Rows: 4 [datetime.datetime(2013, 5, 7, 10, 4, 10), datetime.datetime(2013, 5, 8, 10, 4, 10), datetime.datetime(2013, 5, 9, 10, 4, 10), datetime.datetime(2013, 5, 10, 10, 4, 10)] ''' if not isinstance(start_time,datetime.datetime): raise TypeError("The ``start_time`` argument must be from type datetime.datetime.") if not isinstance(end_time,datetime.datetime): raise TypeError("The ``end_time`` argument must be from type datetime.datetime.") if not isinstance(freq,datetime.timedelta): raise TypeError("The ``freq`` argument must be from type datetime.timedelta.") from .. import extensions return extensions.date_range(start_time,end_time,freq.total_seconds())
def function[date_range, parameter[cls, start_time, end_time, freq]]: constant[ Returns a new SArray that represents a fixed frequency datetime index. Parameters ---------- start_time : datetime.datetime Left bound for generating dates. end_time : datetime.datetime Right bound for generating dates. freq : datetime.timedelta Fixed frequency between two consecutive data points. Returns ------- out : SArray Examples -------- >>> import datetime as dt >>> start = dt.datetime(2013, 5, 7, 10, 4, 10) >>> end = dt.datetime(2013, 5, 10, 10, 4, 10) >>> sa = tc.SArray.date_range(start,end,dt.timedelta(1)) >>> print sa dtype: datetime Rows: 4 [datetime.datetime(2013, 5, 7, 10, 4, 10), datetime.datetime(2013, 5, 8, 10, 4, 10), datetime.datetime(2013, 5, 9, 10, 4, 10), datetime.datetime(2013, 5, 10, 10, 4, 10)] ] if <ast.UnaryOp object at 0x7da1b1f0a980> begin[:] <ast.Raise object at 0x7da1b1f09a20> if <ast.UnaryOp object at 0x7da1b1f0afb0> begin[:] <ast.Raise object at 0x7da1b1f09990> if <ast.UnaryOp object at 0x7da1b1f08d90> begin[:] <ast.Raise object at 0x7da1b1f0b0d0> from relative_module[None] import module[extensions] return[call[name[extensions].date_range, parameter[name[start_time], name[end_time], call[name[freq].total_seconds, parameter[]]]]]
keyword[def] identifier[date_range] ( identifier[cls] , identifier[start_time] , identifier[end_time] , identifier[freq] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[start_time] , identifier[datetime] . identifier[datetime] ): keyword[raise] identifier[TypeError] ( literal[string] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[end_time] , identifier[datetime] . identifier[datetime] ): keyword[raise] identifier[TypeError] ( literal[string] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[freq] , identifier[datetime] . identifier[timedelta] ): keyword[raise] identifier[TypeError] ( literal[string] ) keyword[from] .. keyword[import] identifier[extensions] keyword[return] identifier[extensions] . identifier[date_range] ( identifier[start_time] , identifier[end_time] , identifier[freq] . identifier[total_seconds] ())
def date_range(cls, start_time, end_time, freq): """ Returns a new SArray that represents a fixed frequency datetime index. Parameters ---------- start_time : datetime.datetime Left bound for generating dates. end_time : datetime.datetime Right bound for generating dates. freq : datetime.timedelta Fixed frequency between two consecutive data points. Returns ------- out : SArray Examples -------- >>> import datetime as dt >>> start = dt.datetime(2013, 5, 7, 10, 4, 10) >>> end = dt.datetime(2013, 5, 10, 10, 4, 10) >>> sa = tc.SArray.date_range(start,end,dt.timedelta(1)) >>> print sa dtype: datetime Rows: 4 [datetime.datetime(2013, 5, 7, 10, 4, 10), datetime.datetime(2013, 5, 8, 10, 4, 10), datetime.datetime(2013, 5, 9, 10, 4, 10), datetime.datetime(2013, 5, 10, 10, 4, 10)] """ if not isinstance(start_time, datetime.datetime): raise TypeError('The ``start_time`` argument must be from type datetime.datetime.') # depends on [control=['if'], data=[]] if not isinstance(end_time, datetime.datetime): raise TypeError('The ``end_time`` argument must be from type datetime.datetime.') # depends on [control=['if'], data=[]] if not isinstance(freq, datetime.timedelta): raise TypeError('The ``freq`` argument must be from type datetime.timedelta.') # depends on [control=['if'], data=[]] from .. import extensions return extensions.date_range(start_time, end_time, freq.total_seconds())
def wait_started(name, path=None, timeout=300): ''' Check that the system has fully inited This is actually very important for systemD based containers see https://github.com/saltstack/salt/issues/23847 path path to the container parent default: /var/lib/lxc (system default) .. versionadded:: 2015.8.0 CLI Example: .. code-block:: bash salt myminion lxc.wait_started ubuntu ''' if not exists(name, path=path): raise CommandExecutionError( 'Container {0} does does exists'.format(name)) if not state(name, path=path) == 'running': raise CommandExecutionError( 'Container {0} is not running'.format(name)) ret = False if running_systemd(name, path=path): test_started = test_sd_started_state logger = log.error else: test_started = test_bare_started_state logger = log.debug now = time.time() expire = now + timeout now = time.time() started = test_started(name, path=path) while time.time() < expire and not started: time.sleep(0.3) started = test_started(name, path=path) if started is None: logger( 'Assuming %s is started, although we failed to detect that' ' is fully started correctly', name) ret = True else: ret = started return ret
def function[wait_started, parameter[name, path, timeout]]: constant[ Check that the system has fully inited This is actually very important for systemD based containers see https://github.com/saltstack/salt/issues/23847 path path to the container parent default: /var/lib/lxc (system default) .. versionadded:: 2015.8.0 CLI Example: .. code-block:: bash salt myminion lxc.wait_started ubuntu ] if <ast.UnaryOp object at 0x7da1b21be0b0> begin[:] <ast.Raise object at 0x7da1b21bc640> if <ast.UnaryOp object at 0x7da1b21bd690> begin[:] <ast.Raise object at 0x7da1b21bdcf0> variable[ret] assign[=] constant[False] if call[name[running_systemd], parameter[name[name]]] begin[:] variable[test_started] assign[=] name[test_sd_started_state] variable[logger] assign[=] name[log].error variable[now] assign[=] call[name[time].time, parameter[]] variable[expire] assign[=] binary_operation[name[now] + name[timeout]] variable[now] assign[=] call[name[time].time, parameter[]] variable[started] assign[=] call[name[test_started], parameter[name[name]]] while <ast.BoolOp object at 0x7da1b21bcd00> begin[:] call[name[time].sleep, parameter[constant[0.3]]] variable[started] assign[=] call[name[test_started], parameter[name[name]]] if compare[name[started] is constant[None]] begin[:] call[name[logger], parameter[constant[Assuming %s is started, although we failed to detect that is fully started correctly], name[name]]] variable[ret] assign[=] constant[True] return[name[ret]]
keyword[def] identifier[wait_started] ( identifier[name] , identifier[path] = keyword[None] , identifier[timeout] = literal[int] ): literal[string] keyword[if] keyword[not] identifier[exists] ( identifier[name] , identifier[path] = identifier[path] ): keyword[raise] identifier[CommandExecutionError] ( literal[string] . identifier[format] ( identifier[name] )) keyword[if] keyword[not] identifier[state] ( identifier[name] , identifier[path] = identifier[path] )== literal[string] : keyword[raise] identifier[CommandExecutionError] ( literal[string] . identifier[format] ( identifier[name] )) identifier[ret] = keyword[False] keyword[if] identifier[running_systemd] ( identifier[name] , identifier[path] = identifier[path] ): identifier[test_started] = identifier[test_sd_started_state] identifier[logger] = identifier[log] . identifier[error] keyword[else] : identifier[test_started] = identifier[test_bare_started_state] identifier[logger] = identifier[log] . identifier[debug] identifier[now] = identifier[time] . identifier[time] () identifier[expire] = identifier[now] + identifier[timeout] identifier[now] = identifier[time] . identifier[time] () identifier[started] = identifier[test_started] ( identifier[name] , identifier[path] = identifier[path] ) keyword[while] identifier[time] . identifier[time] ()< identifier[expire] keyword[and] keyword[not] identifier[started] : identifier[time] . identifier[sleep] ( literal[int] ) identifier[started] = identifier[test_started] ( identifier[name] , identifier[path] = identifier[path] ) keyword[if] identifier[started] keyword[is] keyword[None] : identifier[logger] ( literal[string] literal[string] , identifier[name] ) identifier[ret] = keyword[True] keyword[else] : identifier[ret] = identifier[started] keyword[return] identifier[ret]
def wait_started(name, path=None, timeout=300): """ Check that the system has fully inited This is actually very important for systemD based containers see https://github.com/saltstack/salt/issues/23847 path path to the container parent default: /var/lib/lxc (system default) .. versionadded:: 2015.8.0 CLI Example: .. code-block:: bash salt myminion lxc.wait_started ubuntu """ if not exists(name, path=path): raise CommandExecutionError('Container {0} does does exists'.format(name)) # depends on [control=['if'], data=[]] if not state(name, path=path) == 'running': raise CommandExecutionError('Container {0} is not running'.format(name)) # depends on [control=['if'], data=[]] ret = False if running_systemd(name, path=path): test_started = test_sd_started_state logger = log.error # depends on [control=['if'], data=[]] else: test_started = test_bare_started_state logger = log.debug now = time.time() expire = now + timeout now = time.time() started = test_started(name, path=path) while time.time() < expire and (not started): time.sleep(0.3) started = test_started(name, path=path) # depends on [control=['while'], data=[]] if started is None: logger('Assuming %s is started, although we failed to detect that is fully started correctly', name) ret = True # depends on [control=['if'], data=[]] else: ret = started return ret
def get_walks_exhaustive(graph, node, length): """Gets all walks under a given length starting at a given node :param networkx.Graph graph: A graph :param node: Starting node :param int length: The length of walks to get :return: A list of paths :rtype: list[tuple] """ if 0 == length: return (node,), return tuple( (node, key) + path for neighbor in graph.edge[node] for path in get_walks_exhaustive(graph, neighbor, length - 1) if node not in path for key in graph.edge[node][neighbor] )
def function[get_walks_exhaustive, parameter[graph, node, length]]: constant[Gets all walks under a given length starting at a given node :param networkx.Graph graph: A graph :param node: Starting node :param int length: The length of walks to get :return: A list of paths :rtype: list[tuple] ] if compare[constant[0] equal[==] name[length]] begin[:] return[tuple[[<ast.Tuple object at 0x7da1b00f60b0>]]] return[call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da1b00f71c0>]]]
keyword[def] identifier[get_walks_exhaustive] ( identifier[graph] , identifier[node] , identifier[length] ): literal[string] keyword[if] literal[int] == identifier[length] : keyword[return] ( identifier[node] ,), keyword[return] identifier[tuple] ( ( identifier[node] , identifier[key] )+ identifier[path] keyword[for] identifier[neighbor] keyword[in] identifier[graph] . identifier[edge] [ identifier[node] ] keyword[for] identifier[path] keyword[in] identifier[get_walks_exhaustive] ( identifier[graph] , identifier[neighbor] , identifier[length] - literal[int] ) keyword[if] identifier[node] keyword[not] keyword[in] identifier[path] keyword[for] identifier[key] keyword[in] identifier[graph] . identifier[edge] [ identifier[node] ][ identifier[neighbor] ] )
def get_walks_exhaustive(graph, node, length): """Gets all walks under a given length starting at a given node :param networkx.Graph graph: A graph :param node: Starting node :param int length: The length of walks to get :return: A list of paths :rtype: list[tuple] """ if 0 == length: return ((node,),) # depends on [control=['if'], data=[]] return tuple(((node, key) + path for neighbor in graph.edge[node] for path in get_walks_exhaustive(graph, neighbor, length - 1) if node not in path for key in graph.edge[node][neighbor]))
def GetAnalyzersInformation(cls): """Retrieves the analyzers information. Returns: list[tuple]: containing: str: analyzer name. str: analyzer description. """ analyzer_information = [] for _, analyzer_class in cls.GetAnalyzers(): description = getattr(analyzer_class, 'DESCRIPTION', '') analyzer_information.append((analyzer_class.NAME, description)) return analyzer_information
def function[GetAnalyzersInformation, parameter[cls]]: constant[Retrieves the analyzers information. Returns: list[tuple]: containing: str: analyzer name. str: analyzer description. ] variable[analyzer_information] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da2047eb040>, <ast.Name object at 0x7da2047eb3a0>]]] in starred[call[name[cls].GetAnalyzers, parameter[]]] begin[:] variable[description] assign[=] call[name[getattr], parameter[name[analyzer_class], constant[DESCRIPTION], constant[]]] call[name[analyzer_information].append, parameter[tuple[[<ast.Attribute object at 0x7da2047eac20>, <ast.Name object at 0x7da2047eb970>]]]] return[name[analyzer_information]]
keyword[def] identifier[GetAnalyzersInformation] ( identifier[cls] ): literal[string] identifier[analyzer_information] =[] keyword[for] identifier[_] , identifier[analyzer_class] keyword[in] identifier[cls] . identifier[GetAnalyzers] (): identifier[description] = identifier[getattr] ( identifier[analyzer_class] , literal[string] , literal[string] ) identifier[analyzer_information] . identifier[append] (( identifier[analyzer_class] . identifier[NAME] , identifier[description] )) keyword[return] identifier[analyzer_information]
def GetAnalyzersInformation(cls): """Retrieves the analyzers information. Returns: list[tuple]: containing: str: analyzer name. str: analyzer description. """ analyzer_information = [] for (_, analyzer_class) in cls.GetAnalyzers(): description = getattr(analyzer_class, 'DESCRIPTION', '') analyzer_information.append((analyzer_class.NAME, description)) # depends on [control=['for'], data=[]] return analyzer_information
def saturation(self, value): """Volume of water to volume of voids""" value = clean_float(value) if value is None: return try: unit_moisture_weight = self.unit_moist_weight - self.unit_dry_weight unit_moisture_volume = unit_moisture_weight / self._pw saturation = unit_moisture_volume / self._calc_unit_void_volume() if saturation is not None and not ct.isclose(saturation, value, rel_tol=self._tolerance): raise ModelError("New saturation (%.3f) is inconsistent " "with calculated value (%.3f)" % (value, saturation)) except TypeError: pass old_value = self.saturation self._saturation = value try: self.recompute_all_weights_and_void() self._add_to_stack("saturation", value) except ModelError as e: self._saturation = old_value raise ModelError(e)
def function[saturation, parameter[self, value]]: constant[Volume of water to volume of voids] variable[value] assign[=] call[name[clean_float], parameter[name[value]]] if compare[name[value] is constant[None]] begin[:] return[None] <ast.Try object at 0x7da1b18498d0> variable[old_value] assign[=] name[self].saturation name[self]._saturation assign[=] name[value] <ast.Try object at 0x7da18bc72140>
keyword[def] identifier[saturation] ( identifier[self] , identifier[value] ): literal[string] identifier[value] = identifier[clean_float] ( identifier[value] ) keyword[if] identifier[value] keyword[is] keyword[None] : keyword[return] keyword[try] : identifier[unit_moisture_weight] = identifier[self] . identifier[unit_moist_weight] - identifier[self] . identifier[unit_dry_weight] identifier[unit_moisture_volume] = identifier[unit_moisture_weight] / identifier[self] . identifier[_pw] identifier[saturation] = identifier[unit_moisture_volume] / identifier[self] . identifier[_calc_unit_void_volume] () keyword[if] identifier[saturation] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[ct] . identifier[isclose] ( identifier[saturation] , identifier[value] , identifier[rel_tol] = identifier[self] . identifier[_tolerance] ): keyword[raise] identifier[ModelError] ( literal[string] literal[string] %( identifier[value] , identifier[saturation] )) keyword[except] identifier[TypeError] : keyword[pass] identifier[old_value] = identifier[self] . identifier[saturation] identifier[self] . identifier[_saturation] = identifier[value] keyword[try] : identifier[self] . identifier[recompute_all_weights_and_void] () identifier[self] . identifier[_add_to_stack] ( literal[string] , identifier[value] ) keyword[except] identifier[ModelError] keyword[as] identifier[e] : identifier[self] . identifier[_saturation] = identifier[old_value] keyword[raise] identifier[ModelError] ( identifier[e] )
def saturation(self, value): """Volume of water to volume of voids""" value = clean_float(value) if value is None: return # depends on [control=['if'], data=[]] try: unit_moisture_weight = self.unit_moist_weight - self.unit_dry_weight unit_moisture_volume = unit_moisture_weight / self._pw saturation = unit_moisture_volume / self._calc_unit_void_volume() if saturation is not None and (not ct.isclose(saturation, value, rel_tol=self._tolerance)): raise ModelError('New saturation (%.3f) is inconsistent with calculated value (%.3f)' % (value, saturation)) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except TypeError: pass # depends on [control=['except'], data=[]] old_value = self.saturation self._saturation = value try: self.recompute_all_weights_and_void() self._add_to_stack('saturation', value) # depends on [control=['try'], data=[]] except ModelError as e: self._saturation = old_value raise ModelError(e) # depends on [control=['except'], data=['e']]
def process_docstring(app, what, name, obj, options, lines): """ Process the docstring for a given python object. Note that the list 'lines' is changed in this function. Sphinx uses the altered content of the list. """ result = [re.sub(r'U\{([^}]*)\}', r'\1', re.sub(r'(L|C)\{([^}]*)\}', r':py:obj:`\2`', re.sub(r'@(' + '|'.join(FIELDS) + r')', r':\1', l))) for l in lines] lines[:] = result[:]
def function[process_docstring, parameter[app, what, name, obj, options, lines]]: constant[ Process the docstring for a given python object. Note that the list 'lines' is changed in this function. Sphinx uses the altered content of the list. ] variable[result] assign[=] <ast.ListComp object at 0x7da18c4cec50> call[name[lines]][<ast.Slice object at 0x7da18f58d930>] assign[=] call[name[result]][<ast.Slice object at 0x7da18f58ff10>]
keyword[def] identifier[process_docstring] ( identifier[app] , identifier[what] , identifier[name] , identifier[obj] , identifier[options] , identifier[lines] ): literal[string] identifier[result] =[ identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[re] . identifier[sub] ( literal[string] + literal[string] . identifier[join] ( identifier[FIELDS] )+ literal[string] , literal[string] , identifier[l] ))) keyword[for] identifier[l] keyword[in] identifier[lines] ] identifier[lines] [:]= identifier[result] [:]
def process_docstring(app, what, name, obj, options, lines): """ Process the docstring for a given python object. Note that the list 'lines' is changed in this function. Sphinx uses the altered content of the list. """ result = [re.sub('U\\{([^}]*)\\}', '\\1', re.sub('(L|C)\\{([^}]*)\\}', ':py:obj:`\\2`', re.sub('@(' + '|'.join(FIELDS) + ')', ':\\1', l))) for l in lines] lines[:] = result[:]
def read(self): """ (coroutine) Read a single message from the pipe. (Return as text.) """ if self.done_f.done(): raise BrokenPipeError try: result = yield From(read_message_from_pipe(self.pipe_instance.pipe_handle)) raise Return(result) except BrokenPipeError: self.done_f.set_result(None) raise
def function[read, parameter[self]]: constant[ (coroutine) Read a single message from the pipe. (Return as text.) ] if call[name[self].done_f.done, parameter[]] begin[:] <ast.Raise object at 0x7da204960610> <ast.Try object at 0x7da2049638b0>
keyword[def] identifier[read] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[done_f] . identifier[done] (): keyword[raise] identifier[BrokenPipeError] keyword[try] : identifier[result] = keyword[yield] identifier[From] ( identifier[read_message_from_pipe] ( identifier[self] . identifier[pipe_instance] . identifier[pipe_handle] )) keyword[raise] identifier[Return] ( identifier[result] ) keyword[except] identifier[BrokenPipeError] : identifier[self] . identifier[done_f] . identifier[set_result] ( keyword[None] ) keyword[raise]
def read(self): """ (coroutine) Read a single message from the pipe. (Return as text.) """ if self.done_f.done(): raise BrokenPipeError # depends on [control=['if'], data=[]] try: result = (yield From(read_message_from_pipe(self.pipe_instance.pipe_handle))) raise Return(result) # depends on [control=['try'], data=[]] except BrokenPipeError: self.done_f.set_result(None) raise # depends on [control=['except'], data=[]]
def _draw_trees(trees, nrow=1, ncol=1, rmargin=.3, iopts=None, outdir=".", shfile=None, **kwargs): """ Draw one or multiple trees on one plot. """ from jcvi.graphics.tree import draw_tree if shfile: SHs = DictFile(shfile, delimiter="\t") ntrees = len(trees) n = nrow * ncol for x in xrange(int(ceil(float(ntrees)/n))): fig = plt.figure(1, (iopts.w, iopts.h)) if iopts \ else plt.figure(1, (5, 5)) root = fig.add_axes([0, 0, 1, 1]) xiv = 1. / ncol yiv = 1. / nrow xstart = list(np.arange(0, 1, xiv)) * nrow ystart = list(chain(*zip(*[list(np.arange(0, 1, yiv))[::-1]] * ncol))) for i in xrange(n*x, n*(x+1)): if i == ntrees: break ax = fig.add_axes([xstart[i%n], ystart[i%n], xiv, yiv]) f = trees.keys()[i] tree = trees[f] try: SH = SHs[f] except: SH = None draw_tree(ax, tree, rmargin=rmargin, reroot=False, \ supportcolor="r", SH=SH, **kwargs) root.set_xlim(0, 1) root.set_ylim(0, 1) root.set_axis_off() format = iopts.format if iopts else "pdf" dpi = iopts.dpi if iopts else 300 if n == 1: image_name = f.rsplit(".", 1)[0] + "." + format else: image_name = "trees{0}.{1}".format(x, format) image_name = op.join(outdir, image_name) savefig(image_name, dpi=dpi, iopts=iopts) plt.clf()
def function[_draw_trees, parameter[trees, nrow, ncol, rmargin, iopts, outdir, shfile]]: constant[ Draw one or multiple trees on one plot. ] from relative_module[jcvi.graphics.tree] import module[draw_tree] if name[shfile] begin[:] variable[SHs] assign[=] call[name[DictFile], parameter[name[shfile]]] variable[ntrees] assign[=] call[name[len], parameter[name[trees]]] variable[n] assign[=] binary_operation[name[nrow] * name[ncol]] for taget[name[x]] in starred[call[name[xrange], parameter[call[name[int], parameter[call[name[ceil], parameter[binary_operation[call[name[float], parameter[name[ntrees]]] / name[n]]]]]]]]] begin[:] variable[fig] assign[=] <ast.IfExp object at 0x7da207f00970> variable[root] assign[=] call[name[fig].add_axes, parameter[list[[<ast.Constant object at 0x7da207f017e0>, <ast.Constant object at 0x7da207f01450>, <ast.Constant object at 0x7da207f00e80>, <ast.Constant object at 0x7da207f02d40>]]]] variable[xiv] assign[=] binary_operation[constant[1.0] / name[ncol]] variable[yiv] assign[=] binary_operation[constant[1.0] / name[nrow]] variable[xstart] assign[=] binary_operation[call[name[list], parameter[call[name[np].arange, parameter[constant[0], constant[1], name[xiv]]]]] * name[nrow]] variable[ystart] assign[=] call[name[list], parameter[call[name[chain], parameter[<ast.Starred object at 0x7da207f03a90>]]]] for taget[name[i]] in starred[call[name[xrange], parameter[binary_operation[name[n] * name[x]], binary_operation[name[n] * binary_operation[name[x] + constant[1]]]]]] begin[:] if compare[name[i] equal[==] name[ntrees]] begin[:] break variable[ax] assign[=] call[name[fig].add_axes, parameter[list[[<ast.Subscript object at 0x7da18f00caf0>, <ast.Subscript object at 0x7da18f00e7d0>, <ast.Name object at 0x7da18f00d0f0>, <ast.Name object at 0x7da18f00e440>]]]] variable[f] assign[=] call[call[name[trees].keys, parameter[]]][name[i]] variable[tree] assign[=] call[name[trees]][name[f]] <ast.Try object at 0x7da18f00d720> call[name[draw_tree], parameter[name[ax], name[tree]]] call[name[root].set_xlim, parameter[constant[0], constant[1]]] call[name[root].set_ylim, parameter[constant[0], constant[1]]] call[name[root].set_axis_off, parameter[]] variable[format] assign[=] <ast.IfExp object at 0x7da18f00e080> variable[dpi] assign[=] <ast.IfExp object at 0x7da18f00c520> if compare[name[n] equal[==] constant[1]] begin[:] variable[image_name] assign[=] binary_operation[binary_operation[call[call[name[f].rsplit, parameter[constant[.], constant[1]]]][constant[0]] + constant[.]] + name[format]] variable[image_name] assign[=] call[name[op].join, parameter[name[outdir], name[image_name]]] call[name[savefig], parameter[name[image_name]]] call[name[plt].clf, parameter[]]
keyword[def] identifier[_draw_trees] ( identifier[trees] , identifier[nrow] = literal[int] , identifier[ncol] = literal[int] , identifier[rmargin] = literal[int] , identifier[iopts] = keyword[None] , identifier[outdir] = literal[string] , identifier[shfile] = keyword[None] ,** identifier[kwargs] ): literal[string] keyword[from] identifier[jcvi] . identifier[graphics] . identifier[tree] keyword[import] identifier[draw_tree] keyword[if] identifier[shfile] : identifier[SHs] = identifier[DictFile] ( identifier[shfile] , identifier[delimiter] = literal[string] ) identifier[ntrees] = identifier[len] ( identifier[trees] ) identifier[n] = identifier[nrow] * identifier[ncol] keyword[for] identifier[x] keyword[in] identifier[xrange] ( identifier[int] ( identifier[ceil] ( identifier[float] ( identifier[ntrees] )/ identifier[n] ))): identifier[fig] = identifier[plt] . identifier[figure] ( literal[int] ,( identifier[iopts] . identifier[w] , identifier[iopts] . identifier[h] )) keyword[if] identifier[iopts] keyword[else] identifier[plt] . identifier[figure] ( literal[int] ,( literal[int] , literal[int] )) identifier[root] = identifier[fig] . identifier[add_axes] ([ literal[int] , literal[int] , literal[int] , literal[int] ]) identifier[xiv] = literal[int] / identifier[ncol] identifier[yiv] = literal[int] / identifier[nrow] identifier[xstart] = identifier[list] ( identifier[np] . identifier[arange] ( literal[int] , literal[int] , identifier[xiv] ))* identifier[nrow] identifier[ystart] = identifier[list] ( identifier[chain] (* identifier[zip] (*[ identifier[list] ( identifier[np] . identifier[arange] ( literal[int] , literal[int] , identifier[yiv] ))[::- literal[int] ]]* identifier[ncol] ))) keyword[for] identifier[i] keyword[in] identifier[xrange] ( identifier[n] * identifier[x] , identifier[n] *( identifier[x] + literal[int] )): keyword[if] identifier[i] == identifier[ntrees] : keyword[break] identifier[ax] = identifier[fig] . identifier[add_axes] ([ identifier[xstart] [ identifier[i] % identifier[n] ], identifier[ystart] [ identifier[i] % identifier[n] ], identifier[xiv] , identifier[yiv] ]) identifier[f] = identifier[trees] . identifier[keys] ()[ identifier[i] ] identifier[tree] = identifier[trees] [ identifier[f] ] keyword[try] : identifier[SH] = identifier[SHs] [ identifier[f] ] keyword[except] : identifier[SH] = keyword[None] identifier[draw_tree] ( identifier[ax] , identifier[tree] , identifier[rmargin] = identifier[rmargin] , identifier[reroot] = keyword[False] , identifier[supportcolor] = literal[string] , identifier[SH] = identifier[SH] ,** identifier[kwargs] ) identifier[root] . identifier[set_xlim] ( literal[int] , literal[int] ) identifier[root] . identifier[set_ylim] ( literal[int] , literal[int] ) identifier[root] . identifier[set_axis_off] () identifier[format] = identifier[iopts] . identifier[format] keyword[if] identifier[iopts] keyword[else] literal[string] identifier[dpi] = identifier[iopts] . identifier[dpi] keyword[if] identifier[iopts] keyword[else] literal[int] keyword[if] identifier[n] == literal[int] : identifier[image_name] = identifier[f] . identifier[rsplit] ( literal[string] , literal[int] )[ literal[int] ]+ literal[string] + identifier[format] keyword[else] : identifier[image_name] = literal[string] . identifier[format] ( identifier[x] , identifier[format] ) identifier[image_name] = identifier[op] . identifier[join] ( identifier[outdir] , identifier[image_name] ) identifier[savefig] ( identifier[image_name] , identifier[dpi] = identifier[dpi] , identifier[iopts] = identifier[iopts] ) identifier[plt] . identifier[clf] ()
def _draw_trees(trees, nrow=1, ncol=1, rmargin=0.3, iopts=None, outdir='.', shfile=None, **kwargs): """ Draw one or multiple trees on one plot. """ from jcvi.graphics.tree import draw_tree if shfile: SHs = DictFile(shfile, delimiter='\t') # depends on [control=['if'], data=[]] ntrees = len(trees) n = nrow * ncol for x in xrange(int(ceil(float(ntrees) / n))): fig = plt.figure(1, (iopts.w, iopts.h)) if iopts else plt.figure(1, (5, 5)) root = fig.add_axes([0, 0, 1, 1]) xiv = 1.0 / ncol yiv = 1.0 / nrow xstart = list(np.arange(0, 1, xiv)) * nrow ystart = list(chain(*zip(*[list(np.arange(0, 1, yiv))[::-1]] * ncol))) for i in xrange(n * x, n * (x + 1)): if i == ntrees: break # depends on [control=['if'], data=[]] ax = fig.add_axes([xstart[i % n], ystart[i % n], xiv, yiv]) f = trees.keys()[i] tree = trees[f] try: SH = SHs[f] # depends on [control=['try'], data=[]] except: SH = None # depends on [control=['except'], data=[]] draw_tree(ax, tree, rmargin=rmargin, reroot=False, supportcolor='r', SH=SH, **kwargs) # depends on [control=['for'], data=['i']] root.set_xlim(0, 1) root.set_ylim(0, 1) root.set_axis_off() format = iopts.format if iopts else 'pdf' dpi = iopts.dpi if iopts else 300 if n == 1: image_name = f.rsplit('.', 1)[0] + '.' + format # depends on [control=['if'], data=[]] else: image_name = 'trees{0}.{1}'.format(x, format) image_name = op.join(outdir, image_name) savefig(image_name, dpi=dpi, iopts=iopts) plt.clf() # depends on [control=['for'], data=['x']]
def insert_index(self, table, name, url, std=""): """callback to insert index""" names = self.parse_title(name); for n in names: self.db_cursor.execute( 'INSERT INTO "%s" (name, url, std) VALUES (?, ?, ?)' % table, ( n, url, std))
def function[insert_index, parameter[self, table, name, url, std]]: constant[callback to insert index] variable[names] assign[=] call[name[self].parse_title, parameter[name[name]]] for taget[name[n]] in starred[name[names]] begin[:] call[name[self].db_cursor.execute, parameter[binary_operation[constant[INSERT INTO "%s" (name, url, std) VALUES (?, ?, ?)] <ast.Mod object at 0x7da2590d6920> name[table]], tuple[[<ast.Name object at 0x7da18f58ce20>, <ast.Name object at 0x7da18f58f4c0>, <ast.Name object at 0x7da18f58cc40>]]]]
keyword[def] identifier[insert_index] ( identifier[self] , identifier[table] , identifier[name] , identifier[url] , identifier[std] = literal[string] ): literal[string] identifier[names] = identifier[self] . identifier[parse_title] ( identifier[name] ); keyword[for] identifier[n] keyword[in] identifier[names] : identifier[self] . identifier[db_cursor] . identifier[execute] ( literal[string] % identifier[table] ,( identifier[n] , identifier[url] , identifier[std] ))
def insert_index(self, table, name, url, std=''): """callback to insert index""" names = self.parse_title(name) for n in names: self.db_cursor.execute('INSERT INTO "%s" (name, url, std) VALUES (?, ?, ?)' % table, (n, url, std)) # depends on [control=['for'], data=['n']]
def stopwatch_now(): """Get a timevalue for interval comparisons When possible it is a monotonic clock to prevent backwards time issues. """ if six.PY2: now = time.time() else: now = time.monotonic() return now
def function[stopwatch_now, parameter[]]: constant[Get a timevalue for interval comparisons When possible it is a monotonic clock to prevent backwards time issues. ] if name[six].PY2 begin[:] variable[now] assign[=] call[name[time].time, parameter[]] return[name[now]]
keyword[def] identifier[stopwatch_now] (): literal[string] keyword[if] identifier[six] . identifier[PY2] : identifier[now] = identifier[time] . identifier[time] () keyword[else] : identifier[now] = identifier[time] . identifier[monotonic] () keyword[return] identifier[now]
def stopwatch_now(): """Get a timevalue for interval comparisons When possible it is a monotonic clock to prevent backwards time issues. """ if six.PY2: now = time.time() # depends on [control=['if'], data=[]] else: now = time.monotonic() return now
def set_model(self, mdl): """ Setup the image model formation equation and corresponding objects into their various objects. `mdl` is a `peri.models.Model` object """ self.mdl = mdl self.mdl.check_inputs(self.comps) for c in self.comps: setattr(self, '_comp_'+c.category, c)
def function[set_model, parameter[self, mdl]]: constant[ Setup the image model formation equation and corresponding objects into their various objects. `mdl` is a `peri.models.Model` object ] name[self].mdl assign[=] name[mdl] call[name[self].mdl.check_inputs, parameter[name[self].comps]] for taget[name[c]] in starred[name[self].comps] begin[:] call[name[setattr], parameter[name[self], binary_operation[constant[_comp_] + name[c].category], name[c]]]
keyword[def] identifier[set_model] ( identifier[self] , identifier[mdl] ): literal[string] identifier[self] . identifier[mdl] = identifier[mdl] identifier[self] . identifier[mdl] . identifier[check_inputs] ( identifier[self] . identifier[comps] ) keyword[for] identifier[c] keyword[in] identifier[self] . identifier[comps] : identifier[setattr] ( identifier[self] , literal[string] + identifier[c] . identifier[category] , identifier[c] )
def set_model(self, mdl): """ Setup the image model formation equation and corresponding objects into their various objects. `mdl` is a `peri.models.Model` object """ self.mdl = mdl self.mdl.check_inputs(self.comps) for c in self.comps: setattr(self, '_comp_' + c.category, c) # depends on [control=['for'], data=['c']]
def dimension(self, length, draught, beam, speed, slenderness_coefficient, prismatic_coefficient): """ Assign values for the main dimension of a ship. :param length: metres length of the vehicle :param draught: metres draught of the vehicle :param beam: metres beam of the vehicle :param speed: m/s speed of the vehicle :param slenderness_coefficient: Slenderness coefficient dimensionless :math:`L/(∇^{1/3})` where L is length of ship, ∇ is displacement :param prismatic_coefficient: Prismatic coefficient dimensionless :math:`∇/(L\cdot A_m)` where L is length of ship, ∇ is displacement Am is midsection area of the ship """ self.length = length self.draught = draught self.beam = beam self.speed = speed self.slenderness_coefficient = slenderness_coefficient self.prismatic_coefficient = prismatic_coefficient self.displacement = (self.length / self.slenderness_coefficient) ** 3 self.surface_area = 1.025 * (1.7 * self.length * self.draught + self.displacement / self.draught)
def function[dimension, parameter[self, length, draught, beam, speed, slenderness_coefficient, prismatic_coefficient]]: constant[ Assign values for the main dimension of a ship. :param length: metres length of the vehicle :param draught: metres draught of the vehicle :param beam: metres beam of the vehicle :param speed: m/s speed of the vehicle :param slenderness_coefficient: Slenderness coefficient dimensionless :math:`L/(∇^{1/3})` where L is length of ship, ∇ is displacement :param prismatic_coefficient: Prismatic coefficient dimensionless :math:`∇/(L\cdot A_m)` where L is length of ship, ∇ is displacement Am is midsection area of the ship ] name[self].length assign[=] name[length] name[self].draught assign[=] name[draught] name[self].beam assign[=] name[beam] name[self].speed assign[=] name[speed] name[self].slenderness_coefficient assign[=] name[slenderness_coefficient] name[self].prismatic_coefficient assign[=] name[prismatic_coefficient] name[self].displacement assign[=] binary_operation[binary_operation[name[self].length / name[self].slenderness_coefficient] ** constant[3]] name[self].surface_area assign[=] binary_operation[constant[1.025] * binary_operation[binary_operation[binary_operation[constant[1.7] * name[self].length] * name[self].draught] + binary_operation[name[self].displacement / name[self].draught]]]
keyword[def] identifier[dimension] ( identifier[self] , identifier[length] , identifier[draught] , identifier[beam] , identifier[speed] , identifier[slenderness_coefficient] , identifier[prismatic_coefficient] ): literal[string] identifier[self] . identifier[length] = identifier[length] identifier[self] . identifier[draught] = identifier[draught] identifier[self] . identifier[beam] = identifier[beam] identifier[self] . identifier[speed] = identifier[speed] identifier[self] . identifier[slenderness_coefficient] = identifier[slenderness_coefficient] identifier[self] . identifier[prismatic_coefficient] = identifier[prismatic_coefficient] identifier[self] . identifier[displacement] =( identifier[self] . identifier[length] / identifier[self] . identifier[slenderness_coefficient] )** literal[int] identifier[self] . identifier[surface_area] = literal[int] *( literal[int] * identifier[self] . identifier[length] * identifier[self] . identifier[draught] + identifier[self] . identifier[displacement] / identifier[self] . identifier[draught] )
def dimension(self, length, draught, beam, speed, slenderness_coefficient, prismatic_coefficient): """ Assign values for the main dimension of a ship. :param length: metres length of the vehicle :param draught: metres draught of the vehicle :param beam: metres beam of the vehicle :param speed: m/s speed of the vehicle :param slenderness_coefficient: Slenderness coefficient dimensionless :math:`L/(∇^{1/3})` where L is length of ship, ∇ is displacement :param prismatic_coefficient: Prismatic coefficient dimensionless :math:`∇/(L\\cdot A_m)` where L is length of ship, ∇ is displacement Am is midsection area of the ship """ self.length = length self.draught = draught self.beam = beam self.speed = speed self.slenderness_coefficient = slenderness_coefficient self.prismatic_coefficient = prismatic_coefficient self.displacement = (self.length / self.slenderness_coefficient) ** 3 self.surface_area = 1.025 * (1.7 * self.length * self.draught + self.displacement / self.draught)
def _imm_new(cls): ''' All immutable new classes use a hack to make sure the post-init cleanup occurs. ''' imm = object.__new__(cls) # Note that right now imm has a normal setattr method; # Give any parameter that has one a default value params = cls._pimms_immutable_data_['params'] for (p,dat) in six.iteritems(params): dat = dat[0] if dat: object.__setattr__(imm, p, dat[0]) # Clear any values; they are not allowed yet _imm_clear(imm) # Note that we are initializing... dd = object.__getattribute__(imm, '__dict__') dd['_pimms_immutable_is_init'] = True # That should do it! return imm
def function[_imm_new, parameter[cls]]: constant[ All immutable new classes use a hack to make sure the post-init cleanup occurs. ] variable[imm] assign[=] call[name[object].__new__, parameter[name[cls]]] variable[params] assign[=] call[name[cls]._pimms_immutable_data_][constant[params]] for taget[tuple[[<ast.Name object at 0x7da1b2057e50>, <ast.Name object at 0x7da1b2056650>]]] in starred[call[name[six].iteritems, parameter[name[params]]]] begin[:] variable[dat] assign[=] call[name[dat]][constant[0]] if name[dat] begin[:] call[name[object].__setattr__, parameter[name[imm], name[p], call[name[dat]][constant[0]]]] call[name[_imm_clear], parameter[name[imm]]] variable[dd] assign[=] call[name[object].__getattribute__, parameter[name[imm], constant[__dict__]]] call[name[dd]][constant[_pimms_immutable_is_init]] assign[=] constant[True] return[name[imm]]
keyword[def] identifier[_imm_new] ( identifier[cls] ): literal[string] identifier[imm] = identifier[object] . identifier[__new__] ( identifier[cls] ) identifier[params] = identifier[cls] . identifier[_pimms_immutable_data_] [ literal[string] ] keyword[for] ( identifier[p] , identifier[dat] ) keyword[in] identifier[six] . identifier[iteritems] ( identifier[params] ): identifier[dat] = identifier[dat] [ literal[int] ] keyword[if] identifier[dat] : identifier[object] . identifier[__setattr__] ( identifier[imm] , identifier[p] , identifier[dat] [ literal[int] ]) identifier[_imm_clear] ( identifier[imm] ) identifier[dd] = identifier[object] . identifier[__getattribute__] ( identifier[imm] , literal[string] ) identifier[dd] [ literal[string] ]= keyword[True] keyword[return] identifier[imm]
def _imm_new(cls): """ All immutable new classes use a hack to make sure the post-init cleanup occurs. """ imm = object.__new__(cls) # Note that right now imm has a normal setattr method; # Give any parameter that has one a default value params = cls._pimms_immutable_data_['params'] for (p, dat) in six.iteritems(params): dat = dat[0] if dat: object.__setattr__(imm, p, dat[0]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # Clear any values; they are not allowed yet _imm_clear(imm) # Note that we are initializing... dd = object.__getattribute__(imm, '__dict__') dd['_pimms_immutable_is_init'] = True # That should do it! return imm
def network_delete(auth=None, **kwargs): ''' Delete a network name_or_id Name or ID of the network being deleted CLI Example: .. code-block:: bash salt '*' neutronng.network_delete name_or_id=network1 salt '*' neutronng.network_delete name_or_id=1dcac318a83b4610b7a7f7ba01465548 ''' cloud = get_operator_cloud(auth) kwargs = _clean_kwargs(**kwargs) return cloud.delete_network(**kwargs)
def function[network_delete, parameter[auth]]: constant[ Delete a network name_or_id Name or ID of the network being deleted CLI Example: .. code-block:: bash salt '*' neutronng.network_delete name_or_id=network1 salt '*' neutronng.network_delete name_or_id=1dcac318a83b4610b7a7f7ba01465548 ] variable[cloud] assign[=] call[name[get_operator_cloud], parameter[name[auth]]] variable[kwargs] assign[=] call[name[_clean_kwargs], parameter[]] return[call[name[cloud].delete_network, parameter[]]]
keyword[def] identifier[network_delete] ( identifier[auth] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[cloud] = identifier[get_operator_cloud] ( identifier[auth] ) identifier[kwargs] = identifier[_clean_kwargs] (** identifier[kwargs] ) keyword[return] identifier[cloud] . identifier[delete_network] (** identifier[kwargs] )
def network_delete(auth=None, **kwargs): """ Delete a network name_or_id Name or ID of the network being deleted CLI Example: .. code-block:: bash salt '*' neutronng.network_delete name_or_id=network1 salt '*' neutronng.network_delete name_or_id=1dcac318a83b4610b7a7f7ba01465548 """ cloud = get_operator_cloud(auth) kwargs = _clean_kwargs(**kwargs) return cloud.delete_network(**kwargs)
def take_bug_reports(ads, test_name, begin_time, destination=None): """Takes bug reports on a list of android devices. If you want to take a bug report, call this function with a list of android_device objects in on_fail. But reports will be taken on all the devices in the list concurrently. Bug report takes a relative long time to take, so use this cautiously. Args: ads: A list of AndroidDevice instances. test_name: Name of the test method that triggered this bug report. begin_time: timestamp taken when the test started, can be either string or int. destination: string, path to the directory where the bugreport should be saved. """ begin_time = mobly_logger.normalize_log_line_timestamp(str(begin_time)) def take_br(test_name, begin_time, ad, destination): ad.take_bug_report(test_name, begin_time, destination=destination) args = [(test_name, begin_time, ad, destination) for ad in ads] utils.concurrent_exec(take_br, args)
def function[take_bug_reports, parameter[ads, test_name, begin_time, destination]]: constant[Takes bug reports on a list of android devices. If you want to take a bug report, call this function with a list of android_device objects in on_fail. But reports will be taken on all the devices in the list concurrently. Bug report takes a relative long time to take, so use this cautiously. Args: ads: A list of AndroidDevice instances. test_name: Name of the test method that triggered this bug report. begin_time: timestamp taken when the test started, can be either string or int. destination: string, path to the directory where the bugreport should be saved. ] variable[begin_time] assign[=] call[name[mobly_logger].normalize_log_line_timestamp, parameter[call[name[str], parameter[name[begin_time]]]]] def function[take_br, parameter[test_name, begin_time, ad, destination]]: call[name[ad].take_bug_report, parameter[name[test_name], name[begin_time]]] variable[args] assign[=] <ast.ListComp object at 0x7da1b0746980> call[name[utils].concurrent_exec, parameter[name[take_br], name[args]]]
keyword[def] identifier[take_bug_reports] ( identifier[ads] , identifier[test_name] , identifier[begin_time] , identifier[destination] = keyword[None] ): literal[string] identifier[begin_time] = identifier[mobly_logger] . identifier[normalize_log_line_timestamp] ( identifier[str] ( identifier[begin_time] )) keyword[def] identifier[take_br] ( identifier[test_name] , identifier[begin_time] , identifier[ad] , identifier[destination] ): identifier[ad] . identifier[take_bug_report] ( identifier[test_name] , identifier[begin_time] , identifier[destination] = identifier[destination] ) identifier[args] =[( identifier[test_name] , identifier[begin_time] , identifier[ad] , identifier[destination] ) keyword[for] identifier[ad] keyword[in] identifier[ads] ] identifier[utils] . identifier[concurrent_exec] ( identifier[take_br] , identifier[args] )
def take_bug_reports(ads, test_name, begin_time, destination=None): """Takes bug reports on a list of android devices. If you want to take a bug report, call this function with a list of android_device objects in on_fail. But reports will be taken on all the devices in the list concurrently. Bug report takes a relative long time to take, so use this cautiously. Args: ads: A list of AndroidDevice instances. test_name: Name of the test method that triggered this bug report. begin_time: timestamp taken when the test started, can be either string or int. destination: string, path to the directory where the bugreport should be saved. """ begin_time = mobly_logger.normalize_log_line_timestamp(str(begin_time)) def take_br(test_name, begin_time, ad, destination): ad.take_bug_report(test_name, begin_time, destination=destination) args = [(test_name, begin_time, ad, destination) for ad in ads] utils.concurrent_exec(take_br, args)
def slice(l,num_slices=None,slice_length=None,runts=True,random=False): """ Returns a new list of n evenly-sized segments of the original list """ if random: import random random.shuffle(l) if not num_slices and not slice_length: return l if not slice_length: slice_length=int(len(l)/num_slices) newlist=[l[i:i+slice_length] for i in range(0, len(l), slice_length)] if runts: return newlist return [lx for lx in newlist if len(lx)==slice_length]
def function[slice, parameter[l, num_slices, slice_length, runts, random]]: constant[ Returns a new list of n evenly-sized segments of the original list ] if name[random] begin[:] import module[random] call[name[random].shuffle, parameter[name[l]]] if <ast.BoolOp object at 0x7da1b0effc10> begin[:] return[name[l]] if <ast.UnaryOp object at 0x7da1b0efff70> begin[:] variable[slice_length] assign[=] call[name[int], parameter[binary_operation[call[name[len], parameter[name[l]]] / name[num_slices]]]] variable[newlist] assign[=] <ast.ListComp object at 0x7da1b0ef02b0> if name[runts] begin[:] return[name[newlist]] return[<ast.ListComp object at 0x7da1b26ad270>]
keyword[def] identifier[slice] ( identifier[l] , identifier[num_slices] = keyword[None] , identifier[slice_length] = keyword[None] , identifier[runts] = keyword[True] , identifier[random] = keyword[False] ): literal[string] keyword[if] identifier[random] : keyword[import] identifier[random] identifier[random] . identifier[shuffle] ( identifier[l] ) keyword[if] keyword[not] identifier[num_slices] keyword[and] keyword[not] identifier[slice_length] : keyword[return] identifier[l] keyword[if] keyword[not] identifier[slice_length] : identifier[slice_length] = identifier[int] ( identifier[len] ( identifier[l] )/ identifier[num_slices] ) identifier[newlist] =[ identifier[l] [ identifier[i] : identifier[i] + identifier[slice_length] ] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[l] ), identifier[slice_length] )] keyword[if] identifier[runts] : keyword[return] identifier[newlist] keyword[return] [ identifier[lx] keyword[for] identifier[lx] keyword[in] identifier[newlist] keyword[if] identifier[len] ( identifier[lx] )== identifier[slice_length] ]
def slice(l, num_slices=None, slice_length=None, runts=True, random=False): """ Returns a new list of n evenly-sized segments of the original list """ if random: import random random.shuffle(l) # depends on [control=['if'], data=[]] if not num_slices and (not slice_length): return l # depends on [control=['if'], data=[]] if not slice_length: slice_length = int(len(l) / num_slices) # depends on [control=['if'], data=[]] newlist = [l[i:i + slice_length] for i in range(0, len(l), slice_length)] if runts: return newlist # depends on [control=['if'], data=[]] return [lx for lx in newlist if len(lx) == slice_length]
def setEditable( self, state ): """ Sets whether or not this combobox will be editable, updating its \ line edit to an XLineEdit if necessary. :param state | <bool> """ super(XComboBox, self).setEditable(state) if state: edit = self.lineEdit() if edit and isinstance(edit, XLineEdit): return elif edit: edit.setParent(None) edit.deleteLater() edit = XLineEdit(self) edit.setHint(self.hint()) self.setLineEdit(edit)
def function[setEditable, parameter[self, state]]: constant[ Sets whether or not this combobox will be editable, updating its line edit to an XLineEdit if necessary. :param state | <bool> ] call[call[name[super], parameter[name[XComboBox], name[self]]].setEditable, parameter[name[state]]] if name[state] begin[:] variable[edit] assign[=] call[name[self].lineEdit, parameter[]] if <ast.BoolOp object at 0x7da18eb54d60> begin[:] return[None] variable[edit] assign[=] call[name[XLineEdit], parameter[name[self]]] call[name[edit].setHint, parameter[call[name[self].hint, parameter[]]]] call[name[self].setLineEdit, parameter[name[edit]]]
keyword[def] identifier[setEditable] ( identifier[self] , identifier[state] ): literal[string] identifier[super] ( identifier[XComboBox] , identifier[self] ). identifier[setEditable] ( identifier[state] ) keyword[if] identifier[state] : identifier[edit] = identifier[self] . identifier[lineEdit] () keyword[if] identifier[edit] keyword[and] identifier[isinstance] ( identifier[edit] , identifier[XLineEdit] ): keyword[return] keyword[elif] identifier[edit] : identifier[edit] . identifier[setParent] ( keyword[None] ) identifier[edit] . identifier[deleteLater] () identifier[edit] = identifier[XLineEdit] ( identifier[self] ) identifier[edit] . identifier[setHint] ( identifier[self] . identifier[hint] ()) identifier[self] . identifier[setLineEdit] ( identifier[edit] )
def setEditable(self, state): """ Sets whether or not this combobox will be editable, updating its line edit to an XLineEdit if necessary. :param state | <bool> """ super(XComboBox, self).setEditable(state) if state: edit = self.lineEdit() if edit and isinstance(edit, XLineEdit): return # depends on [control=['if'], data=[]] elif edit: edit.setParent(None) edit.deleteLater() # depends on [control=['if'], data=[]] edit = XLineEdit(self) edit.setHint(self.hint()) self.setLineEdit(edit) # depends on [control=['if'], data=[]]
def _comparison_functions(cls, partial=False): """Retrieve comparison methods to apply on version components. This is a private API. Args: partial (bool): whether to provide 'partial' or 'strict' matching. Returns: 5-tuple of cmp-like functions. """ def prerelease_cmp(a, b): """Compare prerelease components. Special rule: a version without prerelease component has higher precedence than one with a prerelease component. """ if a and b: return identifier_list_cmp(a, b) elif a: # Versions with prerelease field have lower precedence return -1 elif b: return 1 else: return 0 def build_cmp(a, b): """Compare build metadata. Special rule: there is no ordering on build metadata. """ if a == b: return 0 else: return NotImplemented def make_optional(orig_cmp_fun): """Convert a cmp-like function to consider 'None == *'.""" @functools.wraps(orig_cmp_fun) def alt_cmp_fun(a, b): if a is None or b is None: return 0 return orig_cmp_fun(a, b) return alt_cmp_fun if partial: return [ base_cmp, # Major is still mandatory make_optional(base_cmp), make_optional(base_cmp), make_optional(prerelease_cmp), make_optional(build_cmp), ] else: return [ base_cmp, base_cmp, base_cmp, prerelease_cmp, build_cmp, ]
def function[_comparison_functions, parameter[cls, partial]]: constant[Retrieve comparison methods to apply on version components. This is a private API. Args: partial (bool): whether to provide 'partial' or 'strict' matching. Returns: 5-tuple of cmp-like functions. ] def function[prerelease_cmp, parameter[a, b]]: constant[Compare prerelease components. Special rule: a version without prerelease component has higher precedence than one with a prerelease component. ] if <ast.BoolOp object at 0x7da1b1027070> begin[:] return[call[name[identifier_list_cmp], parameter[name[a], name[b]]]] def function[build_cmp, parameter[a, b]]: constant[Compare build metadata. Special rule: there is no ordering on build metadata. ] if compare[name[a] equal[==] name[b]] begin[:] return[constant[0]] def function[make_optional, parameter[orig_cmp_fun]]: constant[Convert a cmp-like function to consider 'None == *'.] def function[alt_cmp_fun, parameter[a, b]]: if <ast.BoolOp object at 0x7da1b1027850> begin[:] return[constant[0]] return[call[name[orig_cmp_fun], parameter[name[a], name[b]]]] return[name[alt_cmp_fun]] if name[partial] begin[:] return[list[[<ast.Name object at 0x7da1b10267a0>, <ast.Call object at 0x7da1b1026800>, <ast.Call object at 0x7da1b1024700>, <ast.Call object at 0x7da1b10258a0>, <ast.Call object at 0x7da1b10268f0>]]]
keyword[def] identifier[_comparison_functions] ( identifier[cls] , identifier[partial] = keyword[False] ): literal[string] keyword[def] identifier[prerelease_cmp] ( identifier[a] , identifier[b] ): literal[string] keyword[if] identifier[a] keyword[and] identifier[b] : keyword[return] identifier[identifier_list_cmp] ( identifier[a] , identifier[b] ) keyword[elif] identifier[a] : keyword[return] - literal[int] keyword[elif] identifier[b] : keyword[return] literal[int] keyword[else] : keyword[return] literal[int] keyword[def] identifier[build_cmp] ( identifier[a] , identifier[b] ): literal[string] keyword[if] identifier[a] == identifier[b] : keyword[return] literal[int] keyword[else] : keyword[return] identifier[NotImplemented] keyword[def] identifier[make_optional] ( identifier[orig_cmp_fun] ): literal[string] @ identifier[functools] . identifier[wraps] ( identifier[orig_cmp_fun] ) keyword[def] identifier[alt_cmp_fun] ( identifier[a] , identifier[b] ): keyword[if] identifier[a] keyword[is] keyword[None] keyword[or] identifier[b] keyword[is] keyword[None] : keyword[return] literal[int] keyword[return] identifier[orig_cmp_fun] ( identifier[a] , identifier[b] ) keyword[return] identifier[alt_cmp_fun] keyword[if] identifier[partial] : keyword[return] [ identifier[base_cmp] , identifier[make_optional] ( identifier[base_cmp] ), identifier[make_optional] ( identifier[base_cmp] ), identifier[make_optional] ( identifier[prerelease_cmp] ), identifier[make_optional] ( identifier[build_cmp] ), ] keyword[else] : keyword[return] [ identifier[base_cmp] , identifier[base_cmp] , identifier[base_cmp] , identifier[prerelease_cmp] , identifier[build_cmp] , ]
def _comparison_functions(cls, partial=False): """Retrieve comparison methods to apply on version components. This is a private API. Args: partial (bool): whether to provide 'partial' or 'strict' matching. Returns: 5-tuple of cmp-like functions. """ def prerelease_cmp(a, b): """Compare prerelease components. Special rule: a version without prerelease component has higher precedence than one with a prerelease component. """ if a and b: return identifier_list_cmp(a, b) # depends on [control=['if'], data=[]] elif a: # Versions with prerelease field have lower precedence return -1 # depends on [control=['if'], data=[]] elif b: return 1 # depends on [control=['if'], data=[]] else: return 0 def build_cmp(a, b): """Compare build metadata. Special rule: there is no ordering on build metadata. """ if a == b: return 0 # depends on [control=['if'], data=[]] else: return NotImplemented def make_optional(orig_cmp_fun): """Convert a cmp-like function to consider 'None == *'.""" @functools.wraps(orig_cmp_fun) def alt_cmp_fun(a, b): if a is None or b is None: return 0 # depends on [control=['if'], data=[]] return orig_cmp_fun(a, b) return alt_cmp_fun if partial: # Major is still mandatory return [base_cmp, make_optional(base_cmp), make_optional(base_cmp), make_optional(prerelease_cmp), make_optional(build_cmp)] # depends on [control=['if'], data=[]] else: return [base_cmp, base_cmp, base_cmp, prerelease_cmp, build_cmp]
def scan(self, paths=None, depth=2): """scan media files in all paths """ song_exts = ['mp3', 'ogg', 'wma', 'm4a'] exts = song_exts paths = paths or [Library.DEFAULT_MUSIC_FOLDER] depth = depth if depth <= 3 else 3 media_files = [] for directory in paths: logger.debug('正在扫描目录(%s)...', directory) media_files.extend(scan_directory(directory, exts, depth)) logger.info('共扫描到 %d 个音乐文件,准备将其录入本地音乐库', len(media_files)) for fpath in media_files: add_song(fpath, self._songs, self._artists, self._albums) logger.info('录入本地音乐库完毕')
def function[scan, parameter[self, paths, depth]]: constant[scan media files in all paths ] variable[song_exts] assign[=] list[[<ast.Constant object at 0x7da204565ba0>, <ast.Constant object at 0x7da204564670>, <ast.Constant object at 0x7da204565f00>, <ast.Constant object at 0x7da204564250>]] variable[exts] assign[=] name[song_exts] variable[paths] assign[=] <ast.BoolOp object at 0x7da18dc059c0> variable[depth] assign[=] <ast.IfExp object at 0x7da18dc077c0> variable[media_files] assign[=] list[[]] for taget[name[directory]] in starred[name[paths]] begin[:] call[name[logger].debug, parameter[constant[正在扫描目录(%s)...], name[directory]]] call[name[media_files].extend, parameter[call[name[scan_directory], parameter[name[directory], name[exts], name[depth]]]]] call[name[logger].info, parameter[constant[共扫描到 %d 个音乐文件,准备将其录入本地音乐库], call[name[len], parameter[name[media_files]]]]] for taget[name[fpath]] in starred[name[media_files]] begin[:] call[name[add_song], parameter[name[fpath], name[self]._songs, name[self]._artists, name[self]._albums]] call[name[logger].info, parameter[constant[录入本地音乐库完毕]]]
keyword[def] identifier[scan] ( identifier[self] , identifier[paths] = keyword[None] , identifier[depth] = literal[int] ): literal[string] identifier[song_exts] =[ literal[string] , literal[string] , literal[string] , literal[string] ] identifier[exts] = identifier[song_exts] identifier[paths] = identifier[paths] keyword[or] [ identifier[Library] . identifier[DEFAULT_MUSIC_FOLDER] ] identifier[depth] = identifier[depth] keyword[if] identifier[depth] <= literal[int] keyword[else] literal[int] identifier[media_files] =[] keyword[for] identifier[directory] keyword[in] identifier[paths] : identifier[logger] . identifier[debug] ( literal[string] , identifier[directory] ) identifier[media_files] . identifier[extend] ( identifier[scan_directory] ( identifier[directory] , identifier[exts] , identifier[depth] )) identifier[logger] . identifier[info] ( literal[string] , identifier[len] ( identifier[media_files] )) keyword[for] identifier[fpath] keyword[in] identifier[media_files] : identifier[add_song] ( identifier[fpath] , identifier[self] . identifier[_songs] , identifier[self] . identifier[_artists] , identifier[self] . identifier[_albums] ) identifier[logger] . identifier[info] ( literal[string] )
def scan(self, paths=None, depth=2): """scan media files in all paths """ song_exts = ['mp3', 'ogg', 'wma', 'm4a'] exts = song_exts paths = paths or [Library.DEFAULT_MUSIC_FOLDER] depth = depth if depth <= 3 else 3 media_files = [] for directory in paths: logger.debug('正在扫描目录(%s)...', directory) media_files.extend(scan_directory(directory, exts, depth)) # depends on [control=['for'], data=['directory']] logger.info('共扫描到 %d 个音乐文件,准备将其录入本地音乐库', len(media_files)) for fpath in media_files: add_song(fpath, self._songs, self._artists, self._albums) # depends on [control=['for'], data=['fpath']] logger.info('录入本地音乐库完毕')
def _search_generator(self, item: Any, reverse: bool = False) -> Generator[Any, None, None]: """A helper method for `self.search` that returns a generator rather than a list.""" results = 0 for _, x in self.enumerate(item, reverse=reverse): yield x results += 1 if results == 0: raise SearchError(str(item))
def function[_search_generator, parameter[self, item, reverse]]: constant[A helper method for `self.search` that returns a generator rather than a list.] variable[results] assign[=] constant[0] for taget[tuple[[<ast.Name object at 0x7da1b11c0bb0>, <ast.Name object at 0x7da1b11c1390>]]] in starred[call[name[self].enumerate, parameter[name[item]]]] begin[:] <ast.Yield object at 0x7da1b11c06a0> <ast.AugAssign object at 0x7da1b11c1300> if compare[name[results] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da1b11c21d0>
keyword[def] identifier[_search_generator] ( identifier[self] , identifier[item] : identifier[Any] , identifier[reverse] : identifier[bool] = keyword[False] )-> identifier[Generator] [ identifier[Any] , keyword[None] , keyword[None] ]: literal[string] identifier[results] = literal[int] keyword[for] identifier[_] , identifier[x] keyword[in] identifier[self] . identifier[enumerate] ( identifier[item] , identifier[reverse] = identifier[reverse] ): keyword[yield] identifier[x] identifier[results] += literal[int] keyword[if] identifier[results] == literal[int] : keyword[raise] identifier[SearchError] ( identifier[str] ( identifier[item] ))
def _search_generator(self, item: Any, reverse: bool=False) -> Generator[Any, None, None]: """A helper method for `self.search` that returns a generator rather than a list.""" results = 0 for (_, x) in self.enumerate(item, reverse=reverse): yield x results += 1 # depends on [control=['for'], data=[]] if results == 0: raise SearchError(str(item)) # depends on [control=['if'], data=[]]
def _ListActiveBreakpoints(self, service): """Single attempt query the list of active breakpoints. Must not be called before the debuggee has been registered. If the request fails, this function resets self._debuggee_id, which triggers repeated debuggee registration. Args: service: client to use for API calls Returns: (registration_required, delay) tuple """ try: response = service.debuggees().breakpoints().list( debuggeeId=self._debuggee_id, waitToken=self._wait_token, successOnTimeout=True).execute() if not response.get('waitExpired'): self._wait_token = response.get('nextWaitToken') breakpoints = response.get('breakpoints') or [] if self._breakpoints != breakpoints: self._breakpoints = breakpoints native.LogInfo( 'Breakpoints list changed, %d active, wait token: %s' % ( len(self._breakpoints), self._wait_token)) self.on_active_breakpoints_changed(copy.deepcopy(self._breakpoints)) except BaseException: native.LogInfo('Failed to query active breakpoints: ' + traceback.format_exc()) # Forget debuggee ID to trigger repeated debuggee registration. Once the # registration succeeds, the worker thread will retry this query self._debuggee_id = None return (True, self.list_backoff.Failed()) self.list_backoff.Succeeded() return (False, 0)
def function[_ListActiveBreakpoints, parameter[self, service]]: constant[Single attempt query the list of active breakpoints. Must not be called before the debuggee has been registered. If the request fails, this function resets self._debuggee_id, which triggers repeated debuggee registration. Args: service: client to use for API calls Returns: (registration_required, delay) tuple ] <ast.Try object at 0x7da204960490> call[name[self].list_backoff.Succeeded, parameter[]] return[tuple[[<ast.Constant object at 0x7da20c6aaec0>, <ast.Constant object at 0x7da20c6a8e80>]]]
keyword[def] identifier[_ListActiveBreakpoints] ( identifier[self] , identifier[service] ): literal[string] keyword[try] : identifier[response] = identifier[service] . identifier[debuggees] (). identifier[breakpoints] (). identifier[list] ( identifier[debuggeeId] = identifier[self] . identifier[_debuggee_id] , identifier[waitToken] = identifier[self] . identifier[_wait_token] , identifier[successOnTimeout] = keyword[True] ). identifier[execute] () keyword[if] keyword[not] identifier[response] . identifier[get] ( literal[string] ): identifier[self] . identifier[_wait_token] = identifier[response] . identifier[get] ( literal[string] ) identifier[breakpoints] = identifier[response] . identifier[get] ( literal[string] ) keyword[or] [] keyword[if] identifier[self] . identifier[_breakpoints] != identifier[breakpoints] : identifier[self] . identifier[_breakpoints] = identifier[breakpoints] identifier[native] . identifier[LogInfo] ( literal[string] %( identifier[len] ( identifier[self] . identifier[_breakpoints] ), identifier[self] . identifier[_wait_token] )) identifier[self] . identifier[on_active_breakpoints_changed] ( identifier[copy] . identifier[deepcopy] ( identifier[self] . identifier[_breakpoints] )) keyword[except] identifier[BaseException] : identifier[native] . identifier[LogInfo] ( literal[string] + identifier[traceback] . identifier[format_exc] ()) identifier[self] . identifier[_debuggee_id] = keyword[None] keyword[return] ( keyword[True] , identifier[self] . identifier[list_backoff] . identifier[Failed] ()) identifier[self] . identifier[list_backoff] . identifier[Succeeded] () keyword[return] ( keyword[False] , literal[int] )
def _ListActiveBreakpoints(self, service): """Single attempt query the list of active breakpoints. Must not be called before the debuggee has been registered. If the request fails, this function resets self._debuggee_id, which triggers repeated debuggee registration. Args: service: client to use for API calls Returns: (registration_required, delay) tuple """ try: response = service.debuggees().breakpoints().list(debuggeeId=self._debuggee_id, waitToken=self._wait_token, successOnTimeout=True).execute() if not response.get('waitExpired'): self._wait_token = response.get('nextWaitToken') breakpoints = response.get('breakpoints') or [] if self._breakpoints != breakpoints: self._breakpoints = breakpoints native.LogInfo('Breakpoints list changed, %d active, wait token: %s' % (len(self._breakpoints), self._wait_token)) self.on_active_breakpoints_changed(copy.deepcopy(self._breakpoints)) # depends on [control=['if'], data=['breakpoints']] # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except BaseException: native.LogInfo('Failed to query active breakpoints: ' + traceback.format_exc()) # Forget debuggee ID to trigger repeated debuggee registration. Once the # registration succeeds, the worker thread will retry this query self._debuggee_id = None return (True, self.list_backoff.Failed()) # depends on [control=['except'], data=[]] self.list_backoff.Succeeded() return (False, 0)
def get_ready_user_tasks(self): """ Returns a list of User Tasks that are READY for user action """ return [t for t in self.get_tasks(Task.READY) if not self._is_engine_task(t.task_spec)]
def function[get_ready_user_tasks, parameter[self]]: constant[ Returns a list of User Tasks that are READY for user action ] return[<ast.ListComp object at 0x7da1b0121b40>]
keyword[def] identifier[get_ready_user_tasks] ( identifier[self] ): literal[string] keyword[return] [ identifier[t] keyword[for] identifier[t] keyword[in] identifier[self] . identifier[get_tasks] ( identifier[Task] . identifier[READY] ) keyword[if] keyword[not] identifier[self] . identifier[_is_engine_task] ( identifier[t] . identifier[task_spec] )]
def get_ready_user_tasks(self): """ Returns a list of User Tasks that are READY for user action """ return [t for t in self.get_tasks(Task.READY) if not self._is_engine_task(t.task_spec)]
def ep_style_string(self): """Serialize object to an EnerygPlus SizingPeriod:DesignDay. Returns: ep_string: A full string representing a SizingPeriod:DesignDay. """ # Put together the values in the order that they exist in the ddy file ep_vals = [self.name, self.sky_condition.month, self.sky_condition.day_of_month, self.day_type, self.dry_bulb_condition.dry_bulb_max, self.dry_bulb_condition.dry_bulb_range, self.dry_bulb_condition.modifier_type, self.dry_bulb_condition.modifier_schedule, self.humidity_condition.hum_type, '', self.humidity_condition.schedule, '', '', self.humidity_condition.wet_bulb_range, self.humidity_condition.barometric_pressure, self.wind_condition.wind_speed, self.wind_condition.wind_direction, self.wind_condition.rain, self.wind_condition.snow_on_ground, self.sky_condition.daylight_savings_indicator, self.sky_condition.solar_model, self.sky_condition.beam_shced, self.sky_condition.diff_sched, '', '', ''] # assign humidity values based on the type of criteria if self.humidity_condition.hum_type == 'Wetbulb' or \ self.humidity_condition.hum_type == 'Dewpoint': ep_vals[9] = self.humidity_condition.hum_value elif self.humidity_condition.hum_type == 'HumidityRatio': ep_vals[11] = self.humidity_condition.hum_value elif self.humidity_condition.hum_type == 'Enthalpy': ep_vals[12] = self.humidity_condition.hum_value # assign sky condition values based on the solar model if self.sky_condition.solar_model == 'ASHRAEClearSky': ep_vals[25] = self.sky_condition.clearness if self.sky_condition.solar_model == 'ASHRAETau': ep_vals[23] = self.sky_condition.tau_b ep_vals[24] = self.sky_condition._tau_d ep_vals.pop() # put everything together into one string comented_str = [' {},{}{}\n'.format( str(val), ' ' * (60 - len(str(val))), self.comments[i]) for i, val in enumerate(ep_vals)] comented_str[-1] = comented_str[-1].replace(',', ';') comented_str.insert(0, 'SizingPeriod:DesignDay,\n') comented_str.append('\n') return ''.join(comented_str)
def function[ep_style_string, parameter[self]]: constant[Serialize object to an EnerygPlus SizingPeriod:DesignDay. Returns: ep_string: A full string representing a SizingPeriod:DesignDay. ] variable[ep_vals] assign[=] list[[<ast.Attribute object at 0x7da1b1250df0>, <ast.Attribute object at 0x7da1b1250b20>, <ast.Attribute object at 0x7da1b1252830>, <ast.Attribute object at 0x7da1b12521a0>, <ast.Attribute object at 0x7da1b1251a80>, <ast.Attribute object at 0x7da1b1250fd0>, <ast.Attribute object at 0x7da1b1253490>, <ast.Attribute object at 0x7da1b12516c0>, <ast.Attribute object at 0x7da1b12504c0>, <ast.Constant object at 0x7da1b1253190>, <ast.Attribute object at 0x7da1b1253610>, <ast.Constant object at 0x7da1b1252350>, <ast.Constant object at 0x7da1b1251d80>, <ast.Attribute object at 0x7da1b12531c0>, <ast.Attribute object at 0x7da1b1251ae0>, <ast.Attribute object at 0x7da1b1250fa0>, <ast.Attribute object at 0x7da1b12524d0>, <ast.Attribute object at 0x7da1b12513f0>, <ast.Attribute object at 0x7da1b12527a0>, <ast.Attribute object at 0x7da1b1251f60>, <ast.Attribute object at 0x7da1b1252470>, <ast.Attribute object at 0x7da1b1252c20>, <ast.Attribute object at 0x7da1b1252ec0>, <ast.Constant object at 0x7da1b1253d60>, <ast.Constant object at 0x7da1b12438e0>, <ast.Constant object at 0x7da1b12422f0>]] if <ast.BoolOp object at 0x7da1b1243850> begin[:] call[name[ep_vals]][constant[9]] assign[=] name[self].humidity_condition.hum_value if compare[name[self].sky_condition.solar_model equal[==] constant[ASHRAEClearSky]] begin[:] call[name[ep_vals]][constant[25]] assign[=] name[self].sky_condition.clearness if compare[name[self].sky_condition.solar_model equal[==] constant[ASHRAETau]] begin[:] call[name[ep_vals]][constant[23]] assign[=] name[self].sky_condition.tau_b call[name[ep_vals]][constant[24]] assign[=] name[self].sky_condition._tau_d call[name[ep_vals].pop, parameter[]] variable[comented_str] assign[=] <ast.ListComp object at 0x7da1b1214580> call[name[comented_str]][<ast.UnaryOp object at 0x7da1b12501f0>] assign[=] call[call[name[comented_str]][<ast.UnaryOp object at 0x7da1b1252d40>].replace, parameter[constant[,], constant[;]]] call[name[comented_str].insert, parameter[constant[0], constant[SizingPeriod:DesignDay, ]]] call[name[comented_str].append, parameter[constant[ ]]] return[call[constant[].join, parameter[name[comented_str]]]]
keyword[def] identifier[ep_style_string] ( identifier[self] ): literal[string] identifier[ep_vals] =[ identifier[self] . identifier[name] , identifier[self] . identifier[sky_condition] . identifier[month] , identifier[self] . identifier[sky_condition] . identifier[day_of_month] , identifier[self] . identifier[day_type] , identifier[self] . identifier[dry_bulb_condition] . identifier[dry_bulb_max] , identifier[self] . identifier[dry_bulb_condition] . identifier[dry_bulb_range] , identifier[self] . identifier[dry_bulb_condition] . identifier[modifier_type] , identifier[self] . identifier[dry_bulb_condition] . identifier[modifier_schedule] , identifier[self] . identifier[humidity_condition] . identifier[hum_type] , literal[string] , identifier[self] . identifier[humidity_condition] . identifier[schedule] , literal[string] , literal[string] , identifier[self] . identifier[humidity_condition] . identifier[wet_bulb_range] , identifier[self] . identifier[humidity_condition] . identifier[barometric_pressure] , identifier[self] . identifier[wind_condition] . identifier[wind_speed] , identifier[self] . identifier[wind_condition] . identifier[wind_direction] , identifier[self] . identifier[wind_condition] . identifier[rain] , identifier[self] . identifier[wind_condition] . identifier[snow_on_ground] , identifier[self] . identifier[sky_condition] . identifier[daylight_savings_indicator] , identifier[self] . identifier[sky_condition] . identifier[solar_model] , identifier[self] . identifier[sky_condition] . identifier[beam_shced] , identifier[self] . identifier[sky_condition] . identifier[diff_sched] , literal[string] , literal[string] , literal[string] ] keyword[if] identifier[self] . identifier[humidity_condition] . identifier[hum_type] == literal[string] keyword[or] identifier[self] . identifier[humidity_condition] . identifier[hum_type] == literal[string] : identifier[ep_vals] [ literal[int] ]= identifier[self] . identifier[humidity_condition] . identifier[hum_value] keyword[elif] identifier[self] . identifier[humidity_condition] . identifier[hum_type] == literal[string] : identifier[ep_vals] [ literal[int] ]= identifier[self] . identifier[humidity_condition] . identifier[hum_value] keyword[elif] identifier[self] . identifier[humidity_condition] . identifier[hum_type] == literal[string] : identifier[ep_vals] [ literal[int] ]= identifier[self] . identifier[humidity_condition] . identifier[hum_value] keyword[if] identifier[self] . identifier[sky_condition] . identifier[solar_model] == literal[string] : identifier[ep_vals] [ literal[int] ]= identifier[self] . identifier[sky_condition] . identifier[clearness] keyword[if] identifier[self] . identifier[sky_condition] . identifier[solar_model] == literal[string] : identifier[ep_vals] [ literal[int] ]= identifier[self] . identifier[sky_condition] . identifier[tau_b] identifier[ep_vals] [ literal[int] ]= identifier[self] . identifier[sky_condition] . identifier[_tau_d] identifier[ep_vals] . identifier[pop] () identifier[comented_str] =[ literal[string] . identifier[format] ( identifier[str] ( identifier[val] ), literal[string] *( literal[int] - identifier[len] ( identifier[str] ( identifier[val] ))), identifier[self] . identifier[comments] [ identifier[i] ]) keyword[for] identifier[i] , identifier[val] keyword[in] identifier[enumerate] ( identifier[ep_vals] )] identifier[comented_str] [- literal[int] ]= identifier[comented_str] [- literal[int] ]. identifier[replace] ( literal[string] , literal[string] ) identifier[comented_str] . identifier[insert] ( literal[int] , literal[string] ) identifier[comented_str] . identifier[append] ( literal[string] ) keyword[return] literal[string] . identifier[join] ( identifier[comented_str] )
def ep_style_string(self): """Serialize object to an EnerygPlus SizingPeriod:DesignDay. Returns: ep_string: A full string representing a SizingPeriod:DesignDay. """ # Put together the values in the order that they exist in the ddy file ep_vals = [self.name, self.sky_condition.month, self.sky_condition.day_of_month, self.day_type, self.dry_bulb_condition.dry_bulb_max, self.dry_bulb_condition.dry_bulb_range, self.dry_bulb_condition.modifier_type, self.dry_bulb_condition.modifier_schedule, self.humidity_condition.hum_type, '', self.humidity_condition.schedule, '', '', self.humidity_condition.wet_bulb_range, self.humidity_condition.barometric_pressure, self.wind_condition.wind_speed, self.wind_condition.wind_direction, self.wind_condition.rain, self.wind_condition.snow_on_ground, self.sky_condition.daylight_savings_indicator, self.sky_condition.solar_model, self.sky_condition.beam_shced, self.sky_condition.diff_sched, '', '', ''] # assign humidity values based on the type of criteria if self.humidity_condition.hum_type == 'Wetbulb' or self.humidity_condition.hum_type == 'Dewpoint': ep_vals[9] = self.humidity_condition.hum_value # depends on [control=['if'], data=[]] elif self.humidity_condition.hum_type == 'HumidityRatio': ep_vals[11] = self.humidity_condition.hum_value # depends on [control=['if'], data=[]] elif self.humidity_condition.hum_type == 'Enthalpy': ep_vals[12] = self.humidity_condition.hum_value # depends on [control=['if'], data=[]] # assign sky condition values based on the solar model if self.sky_condition.solar_model == 'ASHRAEClearSky': ep_vals[25] = self.sky_condition.clearness # depends on [control=['if'], data=[]] if self.sky_condition.solar_model == 'ASHRAETau': ep_vals[23] = self.sky_condition.tau_b ep_vals[24] = self.sky_condition._tau_d ep_vals.pop() # depends on [control=['if'], data=[]] # put everything together into one string comented_str = [' {},{}{}\n'.format(str(val), ' ' * (60 - len(str(val))), self.comments[i]) for (i, val) in enumerate(ep_vals)] comented_str[-1] = comented_str[-1].replace(',', ';') comented_str.insert(0, 'SizingPeriod:DesignDay,\n') comented_str.append('\n') return ''.join(comented_str)
def get_route(self): """ Creates a session to find the URL for the loci and schemes """ # Create a new session session = OAuth1Session(self.consumer_key, self.consumer_secret, access_token=self.session_token, access_token_secret=self.session_secret) # Use the test URL in the GET request r = session.get(self.test_rest_url) if r.status_code == 200 or r.status_code == 201: if re.search('json', r.headers['content-type'], flags=0): decoded = r.json() else: decoded = r.text # Extract the URLs from the returned data self.loci = decoded['loci'] self.profile = decoded['schemes']
def function[get_route, parameter[self]]: constant[ Creates a session to find the URL for the loci and schemes ] variable[session] assign[=] call[name[OAuth1Session], parameter[name[self].consumer_key, name[self].consumer_secret]] variable[r] assign[=] call[name[session].get, parameter[name[self].test_rest_url]] if <ast.BoolOp object at 0x7da1b1fba8c0> begin[:] if call[name[re].search, parameter[constant[json], call[name[r].headers][constant[content-type]]]] begin[:] variable[decoded] assign[=] call[name[r].json, parameter[]] name[self].loci assign[=] call[name[decoded]][constant[loci]] name[self].profile assign[=] call[name[decoded]][constant[schemes]]
keyword[def] identifier[get_route] ( identifier[self] ): literal[string] identifier[session] = identifier[OAuth1Session] ( identifier[self] . identifier[consumer_key] , identifier[self] . identifier[consumer_secret] , identifier[access_token] = identifier[self] . identifier[session_token] , identifier[access_token_secret] = identifier[self] . identifier[session_secret] ) identifier[r] = identifier[session] . identifier[get] ( identifier[self] . identifier[test_rest_url] ) keyword[if] identifier[r] . identifier[status_code] == literal[int] keyword[or] identifier[r] . identifier[status_code] == literal[int] : keyword[if] identifier[re] . identifier[search] ( literal[string] , identifier[r] . identifier[headers] [ literal[string] ], identifier[flags] = literal[int] ): identifier[decoded] = identifier[r] . identifier[json] () keyword[else] : identifier[decoded] = identifier[r] . identifier[text] identifier[self] . identifier[loci] = identifier[decoded] [ literal[string] ] identifier[self] . identifier[profile] = identifier[decoded] [ literal[string] ]
def get_route(self): """ Creates a session to find the URL for the loci and schemes """ # Create a new session session = OAuth1Session(self.consumer_key, self.consumer_secret, access_token=self.session_token, access_token_secret=self.session_secret) # Use the test URL in the GET request r = session.get(self.test_rest_url) if r.status_code == 200 or r.status_code == 201: if re.search('json', r.headers['content-type'], flags=0): decoded = r.json() # depends on [control=['if'], data=[]] else: decoded = r.text # Extract the URLs from the returned data self.loci = decoded['loci'] self.profile = decoded['schemes'] # depends on [control=['if'], data=[]]
def create(pattern, casesensitive=True): """Factory for :class:`Matcher` instances; returns a :class:`Matcher` suitable for matching the supplied pattern""" casesensitive = determine_casesensitive(casesensitive) if "?" in pattern or "*" in pattern: return FNMatcher(pattern, casesensitive) else: return ConstantMatcher(pattern, casesensitive)
def function[create, parameter[pattern, casesensitive]]: constant[Factory for :class:`Matcher` instances; returns a :class:`Matcher` suitable for matching the supplied pattern] variable[casesensitive] assign[=] call[name[determine_casesensitive], parameter[name[casesensitive]]] if <ast.BoolOp object at 0x7da1b0ae2260> begin[:] return[call[name[FNMatcher], parameter[name[pattern], name[casesensitive]]]]
keyword[def] identifier[create] ( identifier[pattern] , identifier[casesensitive] = keyword[True] ): literal[string] identifier[casesensitive] = identifier[determine_casesensitive] ( identifier[casesensitive] ) keyword[if] literal[string] keyword[in] identifier[pattern] keyword[or] literal[string] keyword[in] identifier[pattern] : keyword[return] identifier[FNMatcher] ( identifier[pattern] , identifier[casesensitive] ) keyword[else] : keyword[return] identifier[ConstantMatcher] ( identifier[pattern] , identifier[casesensitive] )
def create(pattern, casesensitive=True): """Factory for :class:`Matcher` instances; returns a :class:`Matcher` suitable for matching the supplied pattern""" casesensitive = determine_casesensitive(casesensitive) if '?' in pattern or '*' in pattern: return FNMatcher(pattern, casesensitive) # depends on [control=['if'], data=[]] else: return ConstantMatcher(pattern, casesensitive)
def readTupleQuotes(self, symbol, start, end): ''' read quotes as tuple ''' if end is None: end=sys.maxint session=self.getReadSession()() try: rows=session.query(Quote).filter(and_(Quote.symbol == symbol, Quote.time >= int(start), Quote.time < int(end))) finally: self.getReadSession().remove() return rows
def function[readTupleQuotes, parameter[self, symbol, start, end]]: constant[ read quotes as tuple ] if compare[name[end] is constant[None]] begin[:] variable[end] assign[=] name[sys].maxint variable[session] assign[=] call[call[name[self].getReadSession, parameter[]], parameter[]] <ast.Try object at 0x7da1b0a4f2b0> return[name[rows]]
keyword[def] identifier[readTupleQuotes] ( identifier[self] , identifier[symbol] , identifier[start] , identifier[end] ): literal[string] keyword[if] identifier[end] keyword[is] keyword[None] : identifier[end] = identifier[sys] . identifier[maxint] identifier[session] = identifier[self] . identifier[getReadSession] ()() keyword[try] : identifier[rows] = identifier[session] . identifier[query] ( identifier[Quote] ). identifier[filter] ( identifier[and_] ( identifier[Quote] . identifier[symbol] == identifier[symbol] , identifier[Quote] . identifier[time] >= identifier[int] ( identifier[start] ), identifier[Quote] . identifier[time] < identifier[int] ( identifier[end] ))) keyword[finally] : identifier[self] . identifier[getReadSession] (). identifier[remove] () keyword[return] identifier[rows]
def readTupleQuotes(self, symbol, start, end): """ read quotes as tuple """ if end is None: end = sys.maxint # depends on [control=['if'], data=['end']] session = self.getReadSession()() try: rows = session.query(Quote).filter(and_(Quote.symbol == symbol, Quote.time >= int(start), Quote.time < int(end))) # depends on [control=['try'], data=[]] finally: self.getReadSession().remove() return rows
def set_global_log_level(level): """ Set the global log level on all loggers instantiated by txaio. """ for logger in _loggers: logger._set_log_level(level) global _log_level _log_level = level
def function[set_global_log_level, parameter[level]]: constant[ Set the global log level on all loggers instantiated by txaio. ] for taget[name[logger]] in starred[name[_loggers]] begin[:] call[name[logger]._set_log_level, parameter[name[level]]] <ast.Global object at 0x7da20e9545e0> variable[_log_level] assign[=] name[level]
keyword[def] identifier[set_global_log_level] ( identifier[level] ): literal[string] keyword[for] identifier[logger] keyword[in] identifier[_loggers] : identifier[logger] . identifier[_set_log_level] ( identifier[level] ) keyword[global] identifier[_log_level] identifier[_log_level] = identifier[level]
def set_global_log_level(level): """ Set the global log level on all loggers instantiated by txaio. """ for logger in _loggers: logger._set_log_level(level) # depends on [control=['for'], data=['logger']] global _log_level _log_level = level
def make_dict(obj): """This method creates a dictionary out of a non-builtin object""" # Recursion base case if is_builtin(obj) or isinstance(obj, OrderedDict): return obj output_dict = {} for key in dir(obj): if not key.startswith('__') and not callable(getattr(obj, key)): attr = getattr(obj, key) if isinstance(attr, list): output_dict[key] = [] for item in attr: output_dict[key].append(make_dict(item)) else: output_dict[key] = make_dict(attr) # All done return output_dict
def function[make_dict, parameter[obj]]: constant[This method creates a dictionary out of a non-builtin object] if <ast.BoolOp object at 0x7da1b1a3d600> begin[:] return[name[obj]] variable[output_dict] assign[=] dictionary[[], []] for taget[name[key]] in starred[call[name[dir], parameter[name[obj]]]] begin[:] if <ast.BoolOp object at 0x7da1b1950c40> begin[:] variable[attr] assign[=] call[name[getattr], parameter[name[obj], name[key]]] if call[name[isinstance], parameter[name[attr], name[list]]] begin[:] call[name[output_dict]][name[key]] assign[=] list[[]] for taget[name[item]] in starred[name[attr]] begin[:] call[call[name[output_dict]][name[key]].append, parameter[call[name[make_dict], parameter[name[item]]]]] return[name[output_dict]]
keyword[def] identifier[make_dict] ( identifier[obj] ): literal[string] keyword[if] identifier[is_builtin] ( identifier[obj] ) keyword[or] identifier[isinstance] ( identifier[obj] , identifier[OrderedDict] ): keyword[return] identifier[obj] identifier[output_dict] ={} keyword[for] identifier[key] keyword[in] identifier[dir] ( identifier[obj] ): keyword[if] keyword[not] identifier[key] . identifier[startswith] ( literal[string] ) keyword[and] keyword[not] identifier[callable] ( identifier[getattr] ( identifier[obj] , identifier[key] )): identifier[attr] = identifier[getattr] ( identifier[obj] , identifier[key] ) keyword[if] identifier[isinstance] ( identifier[attr] , identifier[list] ): identifier[output_dict] [ identifier[key] ]=[] keyword[for] identifier[item] keyword[in] identifier[attr] : identifier[output_dict] [ identifier[key] ]. identifier[append] ( identifier[make_dict] ( identifier[item] )) keyword[else] : identifier[output_dict] [ identifier[key] ]= identifier[make_dict] ( identifier[attr] ) keyword[return] identifier[output_dict]
def make_dict(obj): """This method creates a dictionary out of a non-builtin object""" # Recursion base case if is_builtin(obj) or isinstance(obj, OrderedDict): return obj # depends on [control=['if'], data=[]] output_dict = {} for key in dir(obj): if not key.startswith('__') and (not callable(getattr(obj, key))): attr = getattr(obj, key) if isinstance(attr, list): output_dict[key] = [] for item in attr: output_dict[key].append(make_dict(item)) # depends on [control=['for'], data=['item']] # depends on [control=['if'], data=[]] else: output_dict[key] = make_dict(attr) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']] # All done return output_dict
def validate_args(args): """Basic option validation. Returns False if the options are not valid, True otherwise. :param args: the command line options :type args: map :param brokers_num: the number of brokers """ if not args.minutes and not args.start_time: print("Error: missing --minutes or --start-time") return False if args.minutes and args.start_time: print("Error: --minutes shouldn't be specified if --start-time is used") return False if args.end_time and not args.start_time: print("Error: --end-time can't be used without --start-time") return False if args.minutes and args.minutes <= 0: print("Error: --minutes must be > 0") return False if args.start_time and not TIME_FORMAT_REGEX.match(args.start_time): print("Error: --start-time format is not valid") print("Example format: '2015-11-26 11:00:00'") return False if args.end_time and not TIME_FORMAT_REGEX.match(args.end_time): print("Error: --end-time format is not valid") print("Example format: '2015-11-26 11:00:00'") return False if args.batch_size <= 0: print("Error: --batch-size must be > 0") return False return True
def function[validate_args, parameter[args]]: constant[Basic option validation. Returns False if the options are not valid, True otherwise. :param args: the command line options :type args: map :param brokers_num: the number of brokers ] if <ast.BoolOp object at 0x7da1b07b03a0> begin[:] call[name[print], parameter[constant[Error: missing --minutes or --start-time]]] return[constant[False]] if <ast.BoolOp object at 0x7da1b07b01c0> begin[:] call[name[print], parameter[constant[Error: --minutes shouldn't be specified if --start-time is used]]] return[constant[False]] if <ast.BoolOp object at 0x7da1b07b2ad0> begin[:] call[name[print], parameter[constant[Error: --end-time can't be used without --start-time]]] return[constant[False]] if <ast.BoolOp object at 0x7da1b07b3130> begin[:] call[name[print], parameter[constant[Error: --minutes must be > 0]]] return[constant[False]] if <ast.BoolOp object at 0x7da1b07b3d60> begin[:] call[name[print], parameter[constant[Error: --start-time format is not valid]]] call[name[print], parameter[constant[Example format: '2015-11-26 11:00:00']]] return[constant[False]] if <ast.BoolOp object at 0x7da1b07b2cb0> begin[:] call[name[print], parameter[constant[Error: --end-time format is not valid]]] call[name[print], parameter[constant[Example format: '2015-11-26 11:00:00']]] return[constant[False]] if compare[name[args].batch_size less_or_equal[<=] constant[0]] begin[:] call[name[print], parameter[constant[Error: --batch-size must be > 0]]] return[constant[False]] return[constant[True]]
keyword[def] identifier[validate_args] ( identifier[args] ): literal[string] keyword[if] keyword[not] identifier[args] . identifier[minutes] keyword[and] keyword[not] identifier[args] . identifier[start_time] : identifier[print] ( literal[string] ) keyword[return] keyword[False] keyword[if] identifier[args] . identifier[minutes] keyword[and] identifier[args] . identifier[start_time] : identifier[print] ( literal[string] ) keyword[return] keyword[False] keyword[if] identifier[args] . identifier[end_time] keyword[and] keyword[not] identifier[args] . identifier[start_time] : identifier[print] ( literal[string] ) keyword[return] keyword[False] keyword[if] identifier[args] . identifier[minutes] keyword[and] identifier[args] . identifier[minutes] <= literal[int] : identifier[print] ( literal[string] ) keyword[return] keyword[False] keyword[if] identifier[args] . identifier[start_time] keyword[and] keyword[not] identifier[TIME_FORMAT_REGEX] . identifier[match] ( identifier[args] . identifier[start_time] ): identifier[print] ( literal[string] ) identifier[print] ( literal[string] ) keyword[return] keyword[False] keyword[if] identifier[args] . identifier[end_time] keyword[and] keyword[not] identifier[TIME_FORMAT_REGEX] . identifier[match] ( identifier[args] . identifier[end_time] ): identifier[print] ( literal[string] ) identifier[print] ( literal[string] ) keyword[return] keyword[False] keyword[if] identifier[args] . identifier[batch_size] <= literal[int] : identifier[print] ( literal[string] ) keyword[return] keyword[False] keyword[return] keyword[True]
def validate_args(args): """Basic option validation. Returns False if the options are not valid, True otherwise. :param args: the command line options :type args: map :param brokers_num: the number of brokers """ if not args.minutes and (not args.start_time): print('Error: missing --minutes or --start-time') return False # depends on [control=['if'], data=[]] if args.minutes and args.start_time: print("Error: --minutes shouldn't be specified if --start-time is used") return False # depends on [control=['if'], data=[]] if args.end_time and (not args.start_time): print("Error: --end-time can't be used without --start-time") return False # depends on [control=['if'], data=[]] if args.minutes and args.minutes <= 0: print('Error: --minutes must be > 0') return False # depends on [control=['if'], data=[]] if args.start_time and (not TIME_FORMAT_REGEX.match(args.start_time)): print('Error: --start-time format is not valid') print("Example format: '2015-11-26 11:00:00'") return False # depends on [control=['if'], data=[]] if args.end_time and (not TIME_FORMAT_REGEX.match(args.end_time)): print('Error: --end-time format is not valid') print("Example format: '2015-11-26 11:00:00'") return False # depends on [control=['if'], data=[]] if args.batch_size <= 0: print('Error: --batch-size must be > 0') return False # depends on [control=['if'], data=[]] return True
def _size(self): """ :return: how many bits is this slice selecting """ assert isinstance(self, Value) return int(self.val[0]) - int(self.val[1])
def function[_size, parameter[self]]: constant[ :return: how many bits is this slice selecting ] assert[call[name[isinstance], parameter[name[self], name[Value]]]] return[binary_operation[call[name[int], parameter[call[name[self].val][constant[0]]]] - call[name[int], parameter[call[name[self].val][constant[1]]]]]]
keyword[def] identifier[_size] ( identifier[self] ): literal[string] keyword[assert] identifier[isinstance] ( identifier[self] , identifier[Value] ) keyword[return] identifier[int] ( identifier[self] . identifier[val] [ literal[int] ])- identifier[int] ( identifier[self] . identifier[val] [ literal[int] ])
def _size(self): """ :return: how many bits is this slice selecting """ assert isinstance(self, Value) return int(self.val[0]) - int(self.val[1])
def _on_shortcut_changed(self, renderer, path, new_shortcuts): """Callback handling a change of a shortcut :param Gtk.CellRenderer renderer: Cell renderer showing the shortcut :param path: Path of shortcuts within the list store :param str new_shortcuts: New shortcuts """ action = self.shortcut_list_store[int(path)][self.KEY_STORAGE_ID] old_shortcuts = self.gui_config_model.get_current_config_value("SHORTCUTS", use_preliminary=True)[action] from ast import literal_eval try: new_shortcuts = literal_eval(new_shortcuts) if not isinstance(new_shortcuts, list) and \ not all([isinstance(shortcut, string_types) for shortcut in new_shortcuts]): raise ValueError() except (ValueError, SyntaxError): logger.warning("Shortcuts must be a list of strings") new_shortcuts = old_shortcuts shortcuts = self.gui_config_model.get_current_config_value("SHORTCUTS", use_preliminary=True, default={}) shortcuts[action] = new_shortcuts self.gui_config_model.set_preliminary_config_value("SHORTCUTS", shortcuts) self._select_row_by_column_value(self.view['shortcut_tree_view'], self.shortcut_list_store, self.KEY_STORAGE_ID, action)
def function[_on_shortcut_changed, parameter[self, renderer, path, new_shortcuts]]: constant[Callback handling a change of a shortcut :param Gtk.CellRenderer renderer: Cell renderer showing the shortcut :param path: Path of shortcuts within the list store :param str new_shortcuts: New shortcuts ] variable[action] assign[=] call[call[name[self].shortcut_list_store][call[name[int], parameter[name[path]]]]][name[self].KEY_STORAGE_ID] variable[old_shortcuts] assign[=] call[call[name[self].gui_config_model.get_current_config_value, parameter[constant[SHORTCUTS]]]][name[action]] from relative_module[ast] import module[literal_eval] <ast.Try object at 0x7da20c76f460> variable[shortcuts] assign[=] call[name[self].gui_config_model.get_current_config_value, parameter[constant[SHORTCUTS]]] call[name[shortcuts]][name[action]] assign[=] name[new_shortcuts] call[name[self].gui_config_model.set_preliminary_config_value, parameter[constant[SHORTCUTS], name[shortcuts]]] call[name[self]._select_row_by_column_value, parameter[call[name[self].view][constant[shortcut_tree_view]], name[self].shortcut_list_store, name[self].KEY_STORAGE_ID, name[action]]]
keyword[def] identifier[_on_shortcut_changed] ( identifier[self] , identifier[renderer] , identifier[path] , identifier[new_shortcuts] ): literal[string] identifier[action] = identifier[self] . identifier[shortcut_list_store] [ identifier[int] ( identifier[path] )][ identifier[self] . identifier[KEY_STORAGE_ID] ] identifier[old_shortcuts] = identifier[self] . identifier[gui_config_model] . identifier[get_current_config_value] ( literal[string] , identifier[use_preliminary] = keyword[True] )[ identifier[action] ] keyword[from] identifier[ast] keyword[import] identifier[literal_eval] keyword[try] : identifier[new_shortcuts] = identifier[literal_eval] ( identifier[new_shortcuts] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[new_shortcuts] , identifier[list] ) keyword[and] keyword[not] identifier[all] ([ identifier[isinstance] ( identifier[shortcut] , identifier[string_types] ) keyword[for] identifier[shortcut] keyword[in] identifier[new_shortcuts] ]): keyword[raise] identifier[ValueError] () keyword[except] ( identifier[ValueError] , identifier[SyntaxError] ): identifier[logger] . identifier[warning] ( literal[string] ) identifier[new_shortcuts] = identifier[old_shortcuts] identifier[shortcuts] = identifier[self] . identifier[gui_config_model] . identifier[get_current_config_value] ( literal[string] , identifier[use_preliminary] = keyword[True] , identifier[default] ={}) identifier[shortcuts] [ identifier[action] ]= identifier[new_shortcuts] identifier[self] . identifier[gui_config_model] . identifier[set_preliminary_config_value] ( literal[string] , identifier[shortcuts] ) identifier[self] . identifier[_select_row_by_column_value] ( identifier[self] . identifier[view] [ literal[string] ], identifier[self] . identifier[shortcut_list_store] , identifier[self] . identifier[KEY_STORAGE_ID] , identifier[action] )
def _on_shortcut_changed(self, renderer, path, new_shortcuts): """Callback handling a change of a shortcut :param Gtk.CellRenderer renderer: Cell renderer showing the shortcut :param path: Path of shortcuts within the list store :param str new_shortcuts: New shortcuts """ action = self.shortcut_list_store[int(path)][self.KEY_STORAGE_ID] old_shortcuts = self.gui_config_model.get_current_config_value('SHORTCUTS', use_preliminary=True)[action] from ast import literal_eval try: new_shortcuts = literal_eval(new_shortcuts) if not isinstance(new_shortcuts, list) and (not all([isinstance(shortcut, string_types) for shortcut in new_shortcuts])): raise ValueError() # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except (ValueError, SyntaxError): logger.warning('Shortcuts must be a list of strings') new_shortcuts = old_shortcuts # depends on [control=['except'], data=[]] shortcuts = self.gui_config_model.get_current_config_value('SHORTCUTS', use_preliminary=True, default={}) shortcuts[action] = new_shortcuts self.gui_config_model.set_preliminary_config_value('SHORTCUTS', shortcuts) self._select_row_by_column_value(self.view['shortcut_tree_view'], self.shortcut_list_store, self.KEY_STORAGE_ID, action)
def _integrate_segmentation_masks(segmasks): """ `segmasks` should be in sorted order of [coarsest, ..., finest]. Integrates the given list of segmentation masks together to form one segmentation mask by having each segment subsume ones that exist in the finer masks. """ if len(segmasks) == 1: return segmasks assert len(segmasks) > 0, "Passed in empty list of segmentation masks" coarse_mask = np.copy(segmasks[0]) mask_ids = [id for id in np.unique(coarse_mask) if id != 0] for id in mask_ids: for mask in segmasks[1:]: finer_ids = [i for i in np.unique(mask) if i != 0] for finer_id in finer_ids: _update_segmentation_mask_if_overlap(coarse_mask, mask, id, finer_id) # Lastly, merge all adjacent blocks, but just kidding, since this algorithm is waaaay to slow #_merge_adjacent_segments(coarse_mask) return coarse_mask
def function[_integrate_segmentation_masks, parameter[segmasks]]: constant[ `segmasks` should be in sorted order of [coarsest, ..., finest]. Integrates the given list of segmentation masks together to form one segmentation mask by having each segment subsume ones that exist in the finer masks. ] if compare[call[name[len], parameter[name[segmasks]]] equal[==] constant[1]] begin[:] return[name[segmasks]] assert[compare[call[name[len], parameter[name[segmasks]]] greater[>] constant[0]]] variable[coarse_mask] assign[=] call[name[np].copy, parameter[call[name[segmasks]][constant[0]]]] variable[mask_ids] assign[=] <ast.ListComp object at 0x7da1b03e1d50> for taget[name[id]] in starred[name[mask_ids]] begin[:] for taget[name[mask]] in starred[call[name[segmasks]][<ast.Slice object at 0x7da1b03e39d0>]] begin[:] variable[finer_ids] assign[=] <ast.ListComp object at 0x7da1b03e1300> for taget[name[finer_id]] in starred[name[finer_ids]] begin[:] call[name[_update_segmentation_mask_if_overlap], parameter[name[coarse_mask], name[mask], name[id], name[finer_id]]] return[name[coarse_mask]]
keyword[def] identifier[_integrate_segmentation_masks] ( identifier[segmasks] ): literal[string] keyword[if] identifier[len] ( identifier[segmasks] )== literal[int] : keyword[return] identifier[segmasks] keyword[assert] identifier[len] ( identifier[segmasks] )> literal[int] , literal[string] identifier[coarse_mask] = identifier[np] . identifier[copy] ( identifier[segmasks] [ literal[int] ]) identifier[mask_ids] =[ identifier[id] keyword[for] identifier[id] keyword[in] identifier[np] . identifier[unique] ( identifier[coarse_mask] ) keyword[if] identifier[id] != literal[int] ] keyword[for] identifier[id] keyword[in] identifier[mask_ids] : keyword[for] identifier[mask] keyword[in] identifier[segmasks] [ literal[int] :]: identifier[finer_ids] =[ identifier[i] keyword[for] identifier[i] keyword[in] identifier[np] . identifier[unique] ( identifier[mask] ) keyword[if] identifier[i] != literal[int] ] keyword[for] identifier[finer_id] keyword[in] identifier[finer_ids] : identifier[_update_segmentation_mask_if_overlap] ( identifier[coarse_mask] , identifier[mask] , identifier[id] , identifier[finer_id] ) keyword[return] identifier[coarse_mask]
def _integrate_segmentation_masks(segmasks): """ `segmasks` should be in sorted order of [coarsest, ..., finest]. Integrates the given list of segmentation masks together to form one segmentation mask by having each segment subsume ones that exist in the finer masks. """ if len(segmasks) == 1: return segmasks # depends on [control=['if'], data=[]] assert len(segmasks) > 0, 'Passed in empty list of segmentation masks' coarse_mask = np.copy(segmasks[0]) mask_ids = [id for id in np.unique(coarse_mask) if id != 0] for id in mask_ids: for mask in segmasks[1:]: finer_ids = [i for i in np.unique(mask) if i != 0] for finer_id in finer_ids: _update_segmentation_mask_if_overlap(coarse_mask, mask, id, finer_id) # depends on [control=['for'], data=['finer_id']] # depends on [control=['for'], data=['mask']] # depends on [control=['for'], data=['id']] # Lastly, merge all adjacent blocks, but just kidding, since this algorithm is waaaay to slow #_merge_adjacent_segments(coarse_mask) return coarse_mask
def cross_product(x1, y1, z1, x2, y2, z2): """ Cross product of two vectors, v1 x v2 Parameters ---------- x1 : float or array-like X component of vector 1 y1 : float or array-like Y component of vector 1 z1 : float or array-like Z component of vector 1 x2 : float or array-like X component of vector 2 y2 : float or array-like Y component of vector 2 z2 : float or array-like Z component of vector 2 Returns ------- x, y, z Unit vector x,y,z components """ x = y1*z2 - y2*z1 y = z1*x2 - x1*z2 z = x1*y2 - y1*x2 return x, y, z
def function[cross_product, parameter[x1, y1, z1, x2, y2, z2]]: constant[ Cross product of two vectors, v1 x v2 Parameters ---------- x1 : float or array-like X component of vector 1 y1 : float or array-like Y component of vector 1 z1 : float or array-like Z component of vector 1 x2 : float or array-like X component of vector 2 y2 : float or array-like Y component of vector 2 z2 : float or array-like Z component of vector 2 Returns ------- x, y, z Unit vector x,y,z components ] variable[x] assign[=] binary_operation[binary_operation[name[y1] * name[z2]] - binary_operation[name[y2] * name[z1]]] variable[y] assign[=] binary_operation[binary_operation[name[z1] * name[x2]] - binary_operation[name[x1] * name[z2]]] variable[z] assign[=] binary_operation[binary_operation[name[x1] * name[y2]] - binary_operation[name[y1] * name[x2]]] return[tuple[[<ast.Name object at 0x7da1b0a30c40>, <ast.Name object at 0x7da1b0a30cd0>, <ast.Name object at 0x7da1b0a33d60>]]]
keyword[def] identifier[cross_product] ( identifier[x1] , identifier[y1] , identifier[z1] , identifier[x2] , identifier[y2] , identifier[z2] ): literal[string] identifier[x] = identifier[y1] * identifier[z2] - identifier[y2] * identifier[z1] identifier[y] = identifier[z1] * identifier[x2] - identifier[x1] * identifier[z2] identifier[z] = identifier[x1] * identifier[y2] - identifier[y1] * identifier[x2] keyword[return] identifier[x] , identifier[y] , identifier[z]
def cross_product(x1, y1, z1, x2, y2, z2): """ Cross product of two vectors, v1 x v2 Parameters ---------- x1 : float or array-like X component of vector 1 y1 : float or array-like Y component of vector 1 z1 : float or array-like Z component of vector 1 x2 : float or array-like X component of vector 2 y2 : float or array-like Y component of vector 2 z2 : float or array-like Z component of vector 2 Returns ------- x, y, z Unit vector x,y,z components """ x = y1 * z2 - y2 * z1 y = z1 * x2 - x1 * z2 z = x1 * y2 - y1 * x2 return (x, y, z)
def _find_file(self, path, silent=False): """ Gets the full path and extension, or None if a README file could not be found at the specified path. """ for filename in DEFAULT_FILENAMES: full_path = os.path.join(path, filename) if path else filename if os.path.exists(full_path): return full_path # Return default filename if silent if silent: return os.path.join(path, DEFAULT_FILENAME) raise ReadmeNotFoundError(path)
def function[_find_file, parameter[self, path, silent]]: constant[ Gets the full path and extension, or None if a README file could not be found at the specified path. ] for taget[name[filename]] in starred[name[DEFAULT_FILENAMES]] begin[:] variable[full_path] assign[=] <ast.IfExp object at 0x7da1b1d23100> if call[name[os].path.exists, parameter[name[full_path]]] begin[:] return[name[full_path]] if name[silent] begin[:] return[call[name[os].path.join, parameter[name[path], name[DEFAULT_FILENAME]]]] <ast.Raise object at 0x7da1b1d05120>
keyword[def] identifier[_find_file] ( identifier[self] , identifier[path] , identifier[silent] = keyword[False] ): literal[string] keyword[for] identifier[filename] keyword[in] identifier[DEFAULT_FILENAMES] : identifier[full_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[filename] ) keyword[if] identifier[path] keyword[else] identifier[filename] keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[full_path] ): keyword[return] identifier[full_path] keyword[if] identifier[silent] : keyword[return] identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[DEFAULT_FILENAME] ) keyword[raise] identifier[ReadmeNotFoundError] ( identifier[path] )
def _find_file(self, path, silent=False): """ Gets the full path and extension, or None if a README file could not be found at the specified path. """ for filename in DEFAULT_FILENAMES: full_path = os.path.join(path, filename) if path else filename if os.path.exists(full_path): return full_path # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['filename']] # Return default filename if silent if silent: return os.path.join(path, DEFAULT_FILENAME) # depends on [control=['if'], data=[]] raise ReadmeNotFoundError(path)
def _build_native_function_call(fn): """ If fn can be interpreted and handled as a native function: i.e. fn is one of the extensions, or fn is a simple lambda closure using one of the extensions. fn = tc.extensions.add fn = lambda x: tc.extensions.add(5) Then, this returns a closure object, which describes the function call which can then be passed to C++. Returns a _Closure object on success, raises an exception on failure. """ # See if fn is the native function itself native_function_name = _get_toolkit_function_name_from_function(fn) if native_function_name != "": # yup! # generate an "identity" argument list argnames = _get_argument_list_from_toolkit_function_name(native_function_name) arglist = [[0, i] for i in range(len(argnames))] return _Closure(native_function_name, arglist) # ok. its not a native function from .util.lambda_closure_capture import translate from .util.lambda_closure_capture import Parameter # Lets see if it is a simple lambda capture = translate(fn) # ok. build up the closure arguments # Try to pick up the lambda function = _descend_namespace(capture.caller_globals, capture.closure_fn_name) native_function_name = _get_toolkit_function_name_from_function(function) if native_function_name == "": raise RuntimeError("Lambda does not contain a native function") argnames = _get_argument_list_from_toolkit_function_name(native_function_name) # ok. build up the argument list. this is mildly annoying due to the mix of # positional and named arguments # make an argument list with a placeholder for everything first arglist = [[-1, i] for i in argnames] # loop through the positional arguments for i in range(len(capture.positional_args)): arg = capture.positional_args[i] if type(arg) is Parameter: # This is a lambda argument # arg.name is the actual string of the argument # here we need the index arglist[i] = [0, capture.input_arg_names.index(arg.name)] else: # this is a captured value arglist[i] = [1, arg] # now. the named arguments are somewhat annoying for i in capture.named_args: arg = capture.named_args[i] if type(arg) is Parameter: # This is a lambda argument # arg.name is the actual string of the argument # here we need the index arglist[argnames.index(i)] = [0, capture.input_arg_names.index(arg.name)] else: # this is a captured value arglist[argnames.index(i)] = [1, arg] # done. Make sure all arguments are filled for i in arglist: if i[0] == -1: raise RuntimeError("Incomplete function specification") # attempt to recursively break down any other functions import inspect for i in range(len(arglist)): if arglist[i][0] == 1 and inspect.isfunction(arglist[i][1]): try: arglist[i][1] = _build_native_function_call(arglist[i][1]) except: pass return _Closure(native_function_name, arglist)
def function[_build_native_function_call, parameter[fn]]: constant[ If fn can be interpreted and handled as a native function: i.e. fn is one of the extensions, or fn is a simple lambda closure using one of the extensions. fn = tc.extensions.add fn = lambda x: tc.extensions.add(5) Then, this returns a closure object, which describes the function call which can then be passed to C++. Returns a _Closure object on success, raises an exception on failure. ] variable[native_function_name] assign[=] call[name[_get_toolkit_function_name_from_function], parameter[name[fn]]] if compare[name[native_function_name] not_equal[!=] constant[]] begin[:] variable[argnames] assign[=] call[name[_get_argument_list_from_toolkit_function_name], parameter[name[native_function_name]]] variable[arglist] assign[=] <ast.ListComp object at 0x7da1b21efdf0> return[call[name[_Closure], parameter[name[native_function_name], name[arglist]]]] from relative_module[util.lambda_closure_capture] import module[translate] from relative_module[util.lambda_closure_capture] import module[Parameter] variable[capture] assign[=] call[name[translate], parameter[name[fn]]] variable[function] assign[=] call[name[_descend_namespace], parameter[name[capture].caller_globals, name[capture].closure_fn_name]] variable[native_function_name] assign[=] call[name[_get_toolkit_function_name_from_function], parameter[name[function]]] if compare[name[native_function_name] equal[==] constant[]] begin[:] <ast.Raise object at 0x7da1b21ec070> variable[argnames] assign[=] call[name[_get_argument_list_from_toolkit_function_name], parameter[name[native_function_name]]] variable[arglist] assign[=] <ast.ListComp object at 0x7da1b21efbe0> for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[capture].positional_args]]]]] begin[:] variable[arg] assign[=] call[name[capture].positional_args][name[i]] if compare[call[name[type], parameter[name[arg]]] is name[Parameter]] begin[:] call[name[arglist]][name[i]] assign[=] list[[<ast.Constant object at 0x7da1b21eea10>, <ast.Call object at 0x7da1b21ee980>]] for taget[name[i]] in starred[name[capture].named_args] begin[:] variable[arg] assign[=] call[name[capture].named_args][name[i]] if compare[call[name[type], parameter[name[arg]]] is name[Parameter]] begin[:] call[name[arglist]][call[name[argnames].index, parameter[name[i]]]] assign[=] list[[<ast.Constant object at 0x7da1b21ecfd0>, <ast.Call object at 0x7da1b21edae0>]] for taget[name[i]] in starred[name[arglist]] begin[:] if compare[call[name[i]][constant[0]] equal[==] <ast.UnaryOp object at 0x7da1b21eda50>] begin[:] <ast.Raise object at 0x7da1b21ef070> import module[inspect] for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[arglist]]]]]] begin[:] if <ast.BoolOp object at 0x7da1b1f60820> begin[:] <ast.Try object at 0x7da1b1f63190> return[call[name[_Closure], parameter[name[native_function_name], name[arglist]]]]
keyword[def] identifier[_build_native_function_call] ( identifier[fn] ): literal[string] identifier[native_function_name] = identifier[_get_toolkit_function_name_from_function] ( identifier[fn] ) keyword[if] identifier[native_function_name] != literal[string] : identifier[argnames] = identifier[_get_argument_list_from_toolkit_function_name] ( identifier[native_function_name] ) identifier[arglist] =[[ literal[int] , identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[argnames] ))] keyword[return] identifier[_Closure] ( identifier[native_function_name] , identifier[arglist] ) keyword[from] . identifier[util] . identifier[lambda_closure_capture] keyword[import] identifier[translate] keyword[from] . identifier[util] . identifier[lambda_closure_capture] keyword[import] identifier[Parameter] identifier[capture] = identifier[translate] ( identifier[fn] ) identifier[function] = identifier[_descend_namespace] ( identifier[capture] . identifier[caller_globals] , identifier[capture] . identifier[closure_fn_name] ) identifier[native_function_name] = identifier[_get_toolkit_function_name_from_function] ( identifier[function] ) keyword[if] identifier[native_function_name] == literal[string] : keyword[raise] identifier[RuntimeError] ( literal[string] ) identifier[argnames] = identifier[_get_argument_list_from_toolkit_function_name] ( identifier[native_function_name] ) identifier[arglist] =[[- literal[int] , identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[argnames] ] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[capture] . identifier[positional_args] )): identifier[arg] = identifier[capture] . identifier[positional_args] [ identifier[i] ] keyword[if] identifier[type] ( identifier[arg] ) keyword[is] identifier[Parameter] : identifier[arglist] [ identifier[i] ]=[ literal[int] , identifier[capture] . identifier[input_arg_names] . identifier[index] ( identifier[arg] . identifier[name] )] keyword[else] : identifier[arglist] [ identifier[i] ]=[ literal[int] , identifier[arg] ] keyword[for] identifier[i] keyword[in] identifier[capture] . identifier[named_args] : identifier[arg] = identifier[capture] . identifier[named_args] [ identifier[i] ] keyword[if] identifier[type] ( identifier[arg] ) keyword[is] identifier[Parameter] : identifier[arglist] [ identifier[argnames] . identifier[index] ( identifier[i] )]=[ literal[int] , identifier[capture] . identifier[input_arg_names] . identifier[index] ( identifier[arg] . identifier[name] )] keyword[else] : identifier[arglist] [ identifier[argnames] . identifier[index] ( identifier[i] )]=[ literal[int] , identifier[arg] ] keyword[for] identifier[i] keyword[in] identifier[arglist] : keyword[if] identifier[i] [ literal[int] ]==- literal[int] : keyword[raise] identifier[RuntimeError] ( literal[string] ) keyword[import] identifier[inspect] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[arglist] )): keyword[if] identifier[arglist] [ identifier[i] ][ literal[int] ]== literal[int] keyword[and] identifier[inspect] . identifier[isfunction] ( identifier[arglist] [ identifier[i] ][ literal[int] ]): keyword[try] : identifier[arglist] [ identifier[i] ][ literal[int] ]= identifier[_build_native_function_call] ( identifier[arglist] [ identifier[i] ][ literal[int] ]) keyword[except] : keyword[pass] keyword[return] identifier[_Closure] ( identifier[native_function_name] , identifier[arglist] )
def _build_native_function_call(fn): """ If fn can be interpreted and handled as a native function: i.e. fn is one of the extensions, or fn is a simple lambda closure using one of the extensions. fn = tc.extensions.add fn = lambda x: tc.extensions.add(5) Then, this returns a closure object, which describes the function call which can then be passed to C++. Returns a _Closure object on success, raises an exception on failure. """ # See if fn is the native function itself native_function_name = _get_toolkit_function_name_from_function(fn) if native_function_name != '': # yup! # generate an "identity" argument list argnames = _get_argument_list_from_toolkit_function_name(native_function_name) arglist = [[0, i] for i in range(len(argnames))] return _Closure(native_function_name, arglist) # depends on [control=['if'], data=['native_function_name']] # ok. its not a native function from .util.lambda_closure_capture import translate from .util.lambda_closure_capture import Parameter # Lets see if it is a simple lambda capture = translate(fn) # ok. build up the closure arguments # Try to pick up the lambda function = _descend_namespace(capture.caller_globals, capture.closure_fn_name) native_function_name = _get_toolkit_function_name_from_function(function) if native_function_name == '': raise RuntimeError('Lambda does not contain a native function') # depends on [control=['if'], data=[]] argnames = _get_argument_list_from_toolkit_function_name(native_function_name) # ok. build up the argument list. this is mildly annoying due to the mix of # positional and named arguments # make an argument list with a placeholder for everything first arglist = [[-1, i] for i in argnames] # loop through the positional arguments for i in range(len(capture.positional_args)): arg = capture.positional_args[i] if type(arg) is Parameter: # This is a lambda argument # arg.name is the actual string of the argument # here we need the index arglist[i] = [0, capture.input_arg_names.index(arg.name)] # depends on [control=['if'], data=[]] else: # this is a captured value arglist[i] = [1, arg] # depends on [control=['for'], data=['i']] # now. the named arguments are somewhat annoying for i in capture.named_args: arg = capture.named_args[i] if type(arg) is Parameter: # This is a lambda argument # arg.name is the actual string of the argument # here we need the index arglist[argnames.index(i)] = [0, capture.input_arg_names.index(arg.name)] # depends on [control=['if'], data=[]] else: # this is a captured value arglist[argnames.index(i)] = [1, arg] # depends on [control=['for'], data=['i']] # done. Make sure all arguments are filled for i in arglist: if i[0] == -1: raise RuntimeError('Incomplete function specification') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] # attempt to recursively break down any other functions import inspect for i in range(len(arglist)): if arglist[i][0] == 1 and inspect.isfunction(arglist[i][1]): try: arglist[i][1] = _build_native_function_call(arglist[i][1]) # depends on [control=['try'], data=[]] except: pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] return _Closure(native_function_name, arglist)
def responses_callback(request): """Responses Request Handler. Converts a call intercepted by Responses to the Stack-In-A-Box infrastructure :param request: request object :returns: tuple - (int, dict, string) containing: int - the HTTP response status code dict - the headers for the HTTP response string - HTTP string response """ method = request.method headers = CaseInsensitiveDict() request_headers = CaseInsensitiveDict() request_headers.update(request.headers) request.headers = request_headers uri = request.url return StackInABox.call_into(method, request, uri, headers)
def function[responses_callback, parameter[request]]: constant[Responses Request Handler. Converts a call intercepted by Responses to the Stack-In-A-Box infrastructure :param request: request object :returns: tuple - (int, dict, string) containing: int - the HTTP response status code dict - the headers for the HTTP response string - HTTP string response ] variable[method] assign[=] name[request].method variable[headers] assign[=] call[name[CaseInsensitiveDict], parameter[]] variable[request_headers] assign[=] call[name[CaseInsensitiveDict], parameter[]] call[name[request_headers].update, parameter[name[request].headers]] name[request].headers assign[=] name[request_headers] variable[uri] assign[=] name[request].url return[call[name[StackInABox].call_into, parameter[name[method], name[request], name[uri], name[headers]]]]
keyword[def] identifier[responses_callback] ( identifier[request] ): literal[string] identifier[method] = identifier[request] . identifier[method] identifier[headers] = identifier[CaseInsensitiveDict] () identifier[request_headers] = identifier[CaseInsensitiveDict] () identifier[request_headers] . identifier[update] ( identifier[request] . identifier[headers] ) identifier[request] . identifier[headers] = identifier[request_headers] identifier[uri] = identifier[request] . identifier[url] keyword[return] identifier[StackInABox] . identifier[call_into] ( identifier[method] , identifier[request] , identifier[uri] , identifier[headers] )
def responses_callback(request): """Responses Request Handler. Converts a call intercepted by Responses to the Stack-In-A-Box infrastructure :param request: request object :returns: tuple - (int, dict, string) containing: int - the HTTP response status code dict - the headers for the HTTP response string - HTTP string response """ method = request.method headers = CaseInsensitiveDict() request_headers = CaseInsensitiveDict() request_headers.update(request.headers) request.headers = request_headers uri = request.url return StackInABox.call_into(method, request, uri, headers)
def parse_json(s, **kwargs): """Parse a string into a (nbformat, dict) tuple.""" d = json.loads(s, **kwargs) nbf = d.get('nbformat', 1) nbm = d.get('nbformat_minor', 0) return nbf, nbm, d
def function[parse_json, parameter[s]]: constant[Parse a string into a (nbformat, dict) tuple.] variable[d] assign[=] call[name[json].loads, parameter[name[s]]] variable[nbf] assign[=] call[name[d].get, parameter[constant[nbformat], constant[1]]] variable[nbm] assign[=] call[name[d].get, parameter[constant[nbformat_minor], constant[0]]] return[tuple[[<ast.Name object at 0x7da18ede42b0>, <ast.Name object at 0x7da18ede6320>, <ast.Name object at 0x7da18ede7400>]]]
keyword[def] identifier[parse_json] ( identifier[s] ,** identifier[kwargs] ): literal[string] identifier[d] = identifier[json] . identifier[loads] ( identifier[s] ,** identifier[kwargs] ) identifier[nbf] = identifier[d] . identifier[get] ( literal[string] , literal[int] ) identifier[nbm] = identifier[d] . identifier[get] ( literal[string] , literal[int] ) keyword[return] identifier[nbf] , identifier[nbm] , identifier[d]
def parse_json(s, **kwargs): """Parse a string into a (nbformat, dict) tuple.""" d = json.loads(s, **kwargs) nbf = d.get('nbformat', 1) nbm = d.get('nbformat_minor', 0) return (nbf, nbm, d)