code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def set_property(self, key, value): """Set a property on the document. Calling code should use this method to add and modify properties on the document instead of modifying ``properties`` directly. If ``key`` is ``"_links"`` or ``"_embedded"`` this method will silently fail. If there is no property with the name in ``key``, a new property is created with the name from ``key`` and the value from ``value``. If the document already has a property with that name, it's value is replaced with the value in ``value``. """ if key in self.RESERVED_ATTRIBUTE_NAMES: return self.o[key] = value
def function[set_property, parameter[self, key, value]]: constant[Set a property on the document. Calling code should use this method to add and modify properties on the document instead of modifying ``properties`` directly. If ``key`` is ``"_links"`` or ``"_embedded"`` this method will silently fail. If there is no property with the name in ``key``, a new property is created with the name from ``key`` and the value from ``value``. If the document already has a property with that name, it's value is replaced with the value in ``value``. ] if compare[name[key] in name[self].RESERVED_ATTRIBUTE_NAMES] begin[:] return[None] call[name[self].o][name[key]] assign[=] name[value]
keyword[def] identifier[set_property] ( identifier[self] , identifier[key] , identifier[value] ): literal[string] keyword[if] identifier[key] keyword[in] identifier[self] . identifier[RESERVED_ATTRIBUTE_NAMES] : keyword[return] identifier[self] . identifier[o] [ identifier[key] ]= identifier[value]
def set_property(self, key, value): """Set a property on the document. Calling code should use this method to add and modify properties on the document instead of modifying ``properties`` directly. If ``key`` is ``"_links"`` or ``"_embedded"`` this method will silently fail. If there is no property with the name in ``key``, a new property is created with the name from ``key`` and the value from ``value``. If the document already has a property with that name, it's value is replaced with the value in ``value``. """ if key in self.RESERVED_ATTRIBUTE_NAMES: return # depends on [control=['if'], data=[]] self.o[key] = value
def image(image_format, doc=None): """Dynamically creates an image type handler for the specified image type""" @on_valid('image/{0}'.format(image_format)) def image_handler(data, **kwargs): if hasattr(data, 'read'): return data elif hasattr(data, 'save'): output = stream() if introspect.takes_all_arguments(data.save, 'format') or introspect.takes_kwargs(data.save): data.save(output, format=image_format.upper()) else: data.save(output) output.seek(0) return output elif hasattr(data, 'render'): return data.render() elif os.path.isfile(data): return open(data, 'rb') image_handler.__doc__ = doc or "{0} formatted image".format(image_format) return image_handler
def function[image, parameter[image_format, doc]]: constant[Dynamically creates an image type handler for the specified image type] def function[image_handler, parameter[data]]: if call[name[hasattr], parameter[name[data], constant[read]]] begin[:] return[name[data]] name[image_handler].__doc__ assign[=] <ast.BoolOp object at 0x7da18bccb250> return[name[image_handler]]
keyword[def] identifier[image] ( identifier[image_format] , identifier[doc] = keyword[None] ): literal[string] @ identifier[on_valid] ( literal[string] . identifier[format] ( identifier[image_format] )) keyword[def] identifier[image_handler] ( identifier[data] ,** identifier[kwargs] ): keyword[if] identifier[hasattr] ( identifier[data] , literal[string] ): keyword[return] identifier[data] keyword[elif] identifier[hasattr] ( identifier[data] , literal[string] ): identifier[output] = identifier[stream] () keyword[if] identifier[introspect] . identifier[takes_all_arguments] ( identifier[data] . identifier[save] , literal[string] ) keyword[or] identifier[introspect] . identifier[takes_kwargs] ( identifier[data] . identifier[save] ): identifier[data] . identifier[save] ( identifier[output] , identifier[format] = identifier[image_format] . identifier[upper] ()) keyword[else] : identifier[data] . identifier[save] ( identifier[output] ) identifier[output] . identifier[seek] ( literal[int] ) keyword[return] identifier[output] keyword[elif] identifier[hasattr] ( identifier[data] , literal[string] ): keyword[return] identifier[data] . identifier[render] () keyword[elif] identifier[os] . identifier[path] . identifier[isfile] ( identifier[data] ): keyword[return] identifier[open] ( identifier[data] , literal[string] ) identifier[image_handler] . identifier[__doc__] = identifier[doc] keyword[or] literal[string] . identifier[format] ( identifier[image_format] ) keyword[return] identifier[image_handler]
def image(image_format, doc=None): """Dynamically creates an image type handler for the specified image type""" @on_valid('image/{0}'.format(image_format)) def image_handler(data, **kwargs): if hasattr(data, 'read'): return data # depends on [control=['if'], data=[]] elif hasattr(data, 'save'): output = stream() if introspect.takes_all_arguments(data.save, 'format') or introspect.takes_kwargs(data.save): data.save(output, format=image_format.upper()) # depends on [control=['if'], data=[]] else: data.save(output) output.seek(0) return output # depends on [control=['if'], data=[]] elif hasattr(data, 'render'): return data.render() # depends on [control=['if'], data=[]] elif os.path.isfile(data): return open(data, 'rb') # depends on [control=['if'], data=[]] image_handler.__doc__ = doc or '{0} formatted image'.format(image_format) return image_handler
def get_chunked_content(self, chunksize=4096): '''Generator that returns the datastream content in chunks, so larger datastreams can be used without reading the entire contents into memory.''' # get the datastream dissemination, but return the actual http response r = self.obj.api.getDatastreamDissemination(self.obj.pid, self.id, stream=True, asOfDateTime=self.as_of_date) # read and yield the response in chunks for chunk in r.iter_content(chunksize): yield chunk
def function[get_chunked_content, parameter[self, chunksize]]: constant[Generator that returns the datastream content in chunks, so larger datastreams can be used without reading the entire contents into memory.] variable[r] assign[=] call[name[self].obj.api.getDatastreamDissemination, parameter[name[self].obj.pid, name[self].id]] for taget[name[chunk]] in starred[call[name[r].iter_content, parameter[name[chunksize]]]] begin[:] <ast.Yield object at 0x7da1b2616e60>
keyword[def] identifier[get_chunked_content] ( identifier[self] , identifier[chunksize] = literal[int] ): literal[string] identifier[r] = identifier[self] . identifier[obj] . identifier[api] . identifier[getDatastreamDissemination] ( identifier[self] . identifier[obj] . identifier[pid] , identifier[self] . identifier[id] , identifier[stream] = keyword[True] , identifier[asOfDateTime] = identifier[self] . identifier[as_of_date] ) keyword[for] identifier[chunk] keyword[in] identifier[r] . identifier[iter_content] ( identifier[chunksize] ): keyword[yield] identifier[chunk]
def get_chunked_content(self, chunksize=4096): """Generator that returns the datastream content in chunks, so larger datastreams can be used without reading the entire contents into memory.""" # get the datastream dissemination, but return the actual http response r = self.obj.api.getDatastreamDissemination(self.obj.pid, self.id, stream=True, asOfDateTime=self.as_of_date) # read and yield the response in chunks for chunk in r.iter_content(chunksize): yield chunk # depends on [control=['for'], data=['chunk']]
def from_traceback(cls, tb): """ Construct a Bytecode from the given traceback """ while tb.tb_next: tb = tb.tb_next return cls(tb.tb_frame.f_code, current_offset=tb.tb_lasti)
def function[from_traceback, parameter[cls, tb]]: constant[ Construct a Bytecode from the given traceback ] while name[tb].tb_next begin[:] variable[tb] assign[=] name[tb].tb_next return[call[name[cls], parameter[name[tb].tb_frame.f_code]]]
keyword[def] identifier[from_traceback] ( identifier[cls] , identifier[tb] ): literal[string] keyword[while] identifier[tb] . identifier[tb_next] : identifier[tb] = identifier[tb] . identifier[tb_next] keyword[return] identifier[cls] ( identifier[tb] . identifier[tb_frame] . identifier[f_code] , identifier[current_offset] = identifier[tb] . identifier[tb_lasti] )
def from_traceback(cls, tb): """ Construct a Bytecode from the given traceback """ while tb.tb_next: tb = tb.tb_next # depends on [control=['while'], data=[]] return cls(tb.tb_frame.f_code, current_offset=tb.tb_lasti)
def accpro_results(self): """Parse the ACCpro output file and return a dict of secondary structure compositions. """ return ssbio.protein.sequence.utils.fasta.load_fasta_file_as_dict_of_seqs(self.out_accpro)
def function[accpro_results, parameter[self]]: constant[Parse the ACCpro output file and return a dict of secondary structure compositions. ] return[call[name[ssbio].protein.sequence.utils.fasta.load_fasta_file_as_dict_of_seqs, parameter[name[self].out_accpro]]]
keyword[def] identifier[accpro_results] ( identifier[self] ): literal[string] keyword[return] identifier[ssbio] . identifier[protein] . identifier[sequence] . identifier[utils] . identifier[fasta] . identifier[load_fasta_file_as_dict_of_seqs] ( identifier[self] . identifier[out_accpro] )
def accpro_results(self): """Parse the ACCpro output file and return a dict of secondary structure compositions. """ return ssbio.protein.sequence.utils.fasta.load_fasta_file_as_dict_of_seqs(self.out_accpro)
def access_ok(self, access): """ Check if there is enough permissions for access """ for c in access: if c not in self.perms: return False return True
def function[access_ok, parameter[self, access]]: constant[ Check if there is enough permissions for access ] for taget[name[c]] in starred[name[access]] begin[:] if compare[name[c] <ast.NotIn object at 0x7da2590d7190> name[self].perms] begin[:] return[constant[False]] return[constant[True]]
keyword[def] identifier[access_ok] ( identifier[self] , identifier[access] ): literal[string] keyword[for] identifier[c] keyword[in] identifier[access] : keyword[if] identifier[c] keyword[not] keyword[in] identifier[self] . identifier[perms] : keyword[return] keyword[False] keyword[return] keyword[True]
def access_ok(self, access): """ Check if there is enough permissions for access """ for c in access: if c not in self.perms: return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['c']] return True
def _bar(s, align, colors, width=100, vmin=None, vmax=None): """ Draw bar chart in dataframe cells. """ # Get input value range. smin = s.min() if vmin is None else vmin if isinstance(smin, ABCSeries): smin = smin.min() smax = s.max() if vmax is None else vmax if isinstance(smax, ABCSeries): smax = smax.max() if align == 'mid': smin = min(0, smin) smax = max(0, smax) elif align == 'zero': # For "zero" mode, we want the range to be symmetrical around zero. smax = max(abs(smin), abs(smax)) smin = -smax # Transform to percent-range of linear-gradient normed = width * (s.values - smin) / (smax - smin + 1e-12) zero = -width * smin / (smax - smin + 1e-12) def css_bar(start, end, color): """ Generate CSS code to draw a bar from start to end. """ css = 'width: 10em; height: 80%;' if end > start: css += 'background: linear-gradient(90deg,' if start > 0: css += ' transparent {s:.1f}%, {c} {s:.1f}%, '.format( s=start, c=color ) css += '{c} {e:.1f}%, transparent {e:.1f}%)'.format( e=min(end, width), c=color, ) return css def css(x): if pd.isna(x): return '' # avoid deprecated indexing `colors[x > zero]` color = colors[1] if x > zero else colors[0] if align == 'left': return css_bar(0, x, color) else: return css_bar(min(x, zero), max(x, zero), color) if s.ndim == 1: return [css(x) for x in normed] else: return pd.DataFrame( [[css(x) for x in row] for row in normed], index=s.index, columns=s.columns )
def function[_bar, parameter[s, align, colors, width, vmin, vmax]]: constant[ Draw bar chart in dataframe cells. ] variable[smin] assign[=] <ast.IfExp object at 0x7da18bccb070> if call[name[isinstance], parameter[name[smin], name[ABCSeries]]] begin[:] variable[smin] assign[=] call[name[smin].min, parameter[]] variable[smax] assign[=] <ast.IfExp object at 0x7da18bccbb80> if call[name[isinstance], parameter[name[smax], name[ABCSeries]]] begin[:] variable[smax] assign[=] call[name[smax].max, parameter[]] if compare[name[align] equal[==] constant[mid]] begin[:] variable[smin] assign[=] call[name[min], parameter[constant[0], name[smin]]] variable[smax] assign[=] call[name[max], parameter[constant[0], name[smax]]] variable[normed] assign[=] binary_operation[binary_operation[name[width] * binary_operation[name[s].values - name[smin]]] / binary_operation[binary_operation[name[smax] - name[smin]] + constant[1e-12]]] variable[zero] assign[=] binary_operation[binary_operation[<ast.UnaryOp object at 0x7da18bcc9630> * name[smin]] / binary_operation[binary_operation[name[smax] - name[smin]] + constant[1e-12]]] def function[css_bar, parameter[start, end, color]]: constant[ Generate CSS code to draw a bar from start to end. ] variable[css] assign[=] constant[width: 10em; height: 80%;] if compare[name[end] greater[>] name[start]] begin[:] <ast.AugAssign object at 0x7da18bccb6a0> if compare[name[start] greater[>] constant[0]] begin[:] <ast.AugAssign object at 0x7da18bccb100> <ast.AugAssign object at 0x7da18bcc9e10> return[name[css]] def function[css, parameter[x]]: if call[name[pd].isna, parameter[name[x]]] begin[:] return[constant[]] variable[color] assign[=] <ast.IfExp object at 0x7da18bccace0> if compare[name[align] equal[==] constant[left]] begin[:] return[call[name[css_bar], parameter[constant[0], name[x], name[color]]]] if compare[name[s].ndim equal[==] constant[1]] begin[:] return[<ast.ListComp object at 0x7da18bcc8bb0>]
keyword[def] identifier[_bar] ( identifier[s] , identifier[align] , identifier[colors] , identifier[width] = literal[int] , identifier[vmin] = keyword[None] , identifier[vmax] = keyword[None] ): literal[string] identifier[smin] = identifier[s] . identifier[min] () keyword[if] identifier[vmin] keyword[is] keyword[None] keyword[else] identifier[vmin] keyword[if] identifier[isinstance] ( identifier[smin] , identifier[ABCSeries] ): identifier[smin] = identifier[smin] . identifier[min] () identifier[smax] = identifier[s] . identifier[max] () keyword[if] identifier[vmax] keyword[is] keyword[None] keyword[else] identifier[vmax] keyword[if] identifier[isinstance] ( identifier[smax] , identifier[ABCSeries] ): identifier[smax] = identifier[smax] . identifier[max] () keyword[if] identifier[align] == literal[string] : identifier[smin] = identifier[min] ( literal[int] , identifier[smin] ) identifier[smax] = identifier[max] ( literal[int] , identifier[smax] ) keyword[elif] identifier[align] == literal[string] : identifier[smax] = identifier[max] ( identifier[abs] ( identifier[smin] ), identifier[abs] ( identifier[smax] )) identifier[smin] =- identifier[smax] identifier[normed] = identifier[width] *( identifier[s] . identifier[values] - identifier[smin] )/( identifier[smax] - identifier[smin] + literal[int] ) identifier[zero] =- identifier[width] * identifier[smin] /( identifier[smax] - identifier[smin] + literal[int] ) keyword[def] identifier[css_bar] ( identifier[start] , identifier[end] , identifier[color] ): literal[string] identifier[css] = literal[string] keyword[if] identifier[end] > identifier[start] : identifier[css] += literal[string] keyword[if] identifier[start] > literal[int] : identifier[css] += literal[string] . identifier[format] ( identifier[s] = identifier[start] , identifier[c] = identifier[color] ) identifier[css] += literal[string] . identifier[format] ( identifier[e] = identifier[min] ( identifier[end] , identifier[width] ), identifier[c] = identifier[color] , ) keyword[return] identifier[css] keyword[def] identifier[css] ( identifier[x] ): keyword[if] identifier[pd] . identifier[isna] ( identifier[x] ): keyword[return] literal[string] identifier[color] = identifier[colors] [ literal[int] ] keyword[if] identifier[x] > identifier[zero] keyword[else] identifier[colors] [ literal[int] ] keyword[if] identifier[align] == literal[string] : keyword[return] identifier[css_bar] ( literal[int] , identifier[x] , identifier[color] ) keyword[else] : keyword[return] identifier[css_bar] ( identifier[min] ( identifier[x] , identifier[zero] ), identifier[max] ( identifier[x] , identifier[zero] ), identifier[color] ) keyword[if] identifier[s] . identifier[ndim] == literal[int] : keyword[return] [ identifier[css] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[normed] ] keyword[else] : keyword[return] identifier[pd] . identifier[DataFrame] ( [[ identifier[css] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[row] ] keyword[for] identifier[row] keyword[in] identifier[normed] ], identifier[index] = identifier[s] . identifier[index] , identifier[columns] = identifier[s] . identifier[columns] )
def _bar(s, align, colors, width=100, vmin=None, vmax=None): """ Draw bar chart in dataframe cells. """ # Get input value range. smin = s.min() if vmin is None else vmin if isinstance(smin, ABCSeries): smin = smin.min() # depends on [control=['if'], data=[]] smax = s.max() if vmax is None else vmax if isinstance(smax, ABCSeries): smax = smax.max() # depends on [control=['if'], data=[]] if align == 'mid': smin = min(0, smin) smax = max(0, smax) # depends on [control=['if'], data=[]] elif align == 'zero': # For "zero" mode, we want the range to be symmetrical around zero. smax = max(abs(smin), abs(smax)) smin = -smax # depends on [control=['if'], data=[]] # Transform to percent-range of linear-gradient normed = width * (s.values - smin) / (smax - smin + 1e-12) zero = -width * smin / (smax - smin + 1e-12) def css_bar(start, end, color): """ Generate CSS code to draw a bar from start to end. """ css = 'width: 10em; height: 80%;' if end > start: css += 'background: linear-gradient(90deg,' if start > 0: css += ' transparent {s:.1f}%, {c} {s:.1f}%, '.format(s=start, c=color) # depends on [control=['if'], data=['start']] css += '{c} {e:.1f}%, transparent {e:.1f}%)'.format(e=min(end, width), c=color) # depends on [control=['if'], data=['end', 'start']] return css def css(x): if pd.isna(x): return '' # depends on [control=['if'], data=[]] # avoid deprecated indexing `colors[x > zero]` color = colors[1] if x > zero else colors[0] if align == 'left': return css_bar(0, x, color) # depends on [control=['if'], data=[]] else: return css_bar(min(x, zero), max(x, zero), color) if s.ndim == 1: return [css(x) for x in normed] # depends on [control=['if'], data=[]] else: return pd.DataFrame([[css(x) for x in row] for row in normed], index=s.index, columns=s.columns)
def get_resource_relationships_by_search(self, resource_relationship_query, resource_relationship_search): """Pass through to provider ResourceRelationshipSearchSession.get_resource_relationships_by_search""" # Implemented from azosid template for - # osid.resource.ResourceSearchSession.get_resources_by_search_template if not self._can('search'): raise PermissionDenied() return self._provider_session.get_resource_relationships_by_search(resource_relationship_query, resource_relationship_search)
def function[get_resource_relationships_by_search, parameter[self, resource_relationship_query, resource_relationship_search]]: constant[Pass through to provider ResourceRelationshipSearchSession.get_resource_relationships_by_search] if <ast.UnaryOp object at 0x7da204564250> begin[:] <ast.Raise object at 0x7da204566710> return[call[name[self]._provider_session.get_resource_relationships_by_search, parameter[name[resource_relationship_query], name[resource_relationship_search]]]]
keyword[def] identifier[get_resource_relationships_by_search] ( identifier[self] , identifier[resource_relationship_query] , identifier[resource_relationship_search] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[_can] ( literal[string] ): keyword[raise] identifier[PermissionDenied] () keyword[return] identifier[self] . identifier[_provider_session] . identifier[get_resource_relationships_by_search] ( identifier[resource_relationship_query] , identifier[resource_relationship_search] )
def get_resource_relationships_by_search(self, resource_relationship_query, resource_relationship_search): """Pass through to provider ResourceRelationshipSearchSession.get_resource_relationships_by_search""" # Implemented from azosid template for - # osid.resource.ResourceSearchSession.get_resources_by_search_template if not self._can('search'): raise PermissionDenied() # depends on [control=['if'], data=[]] return self._provider_session.get_resource_relationships_by_search(resource_relationship_query, resource_relationship_search)
def add_vertex(self, x, y, z): """ Add a ``VEC3`` of ``floats`` to the ``vert_data`` buffer """ self.vert_data.write( struct.pack('<f', x) + struct.pack('<f', y) + struct.pack('<f', z) ) # retain min/max values self.vert_min = _list3_min(self.vert_min, (x, y, z)) self.vert_max = _list3_max(self.vert_max, (x, y, z))
def function[add_vertex, parameter[self, x, y, z]]: constant[ Add a ``VEC3`` of ``floats`` to the ``vert_data`` buffer ] call[name[self].vert_data.write, parameter[binary_operation[binary_operation[call[name[struct].pack, parameter[constant[<f], name[x]]] + call[name[struct].pack, parameter[constant[<f], name[y]]]] + call[name[struct].pack, parameter[constant[<f], name[z]]]]]] name[self].vert_min assign[=] call[name[_list3_min], parameter[name[self].vert_min, tuple[[<ast.Name object at 0x7da20c6ab1f0>, <ast.Name object at 0x7da20c6a9840>, <ast.Name object at 0x7da20c6ab340>]]]] name[self].vert_max assign[=] call[name[_list3_max], parameter[name[self].vert_max, tuple[[<ast.Name object at 0x7da204963520>, <ast.Name object at 0x7da2049639a0>, <ast.Name object at 0x7da2049600d0>]]]]
keyword[def] identifier[add_vertex] ( identifier[self] , identifier[x] , identifier[y] , identifier[z] ): literal[string] identifier[self] . identifier[vert_data] . identifier[write] ( identifier[struct] . identifier[pack] ( literal[string] , identifier[x] )+ identifier[struct] . identifier[pack] ( literal[string] , identifier[y] )+ identifier[struct] . identifier[pack] ( literal[string] , identifier[z] ) ) identifier[self] . identifier[vert_min] = identifier[_list3_min] ( identifier[self] . identifier[vert_min] ,( identifier[x] , identifier[y] , identifier[z] )) identifier[self] . identifier[vert_max] = identifier[_list3_max] ( identifier[self] . identifier[vert_max] ,( identifier[x] , identifier[y] , identifier[z] ))
def add_vertex(self, x, y, z): """ Add a ``VEC3`` of ``floats`` to the ``vert_data`` buffer """ self.vert_data.write(struct.pack('<f', x) + struct.pack('<f', y) + struct.pack('<f', z)) # retain min/max values self.vert_min = _list3_min(self.vert_min, (x, y, z)) self.vert_max = _list3_max(self.vert_max, (x, y, z))
def mavg(data, xseq, **params): """ Fit moving average """ window = params['method_args']['window'] # The first average comes after the full window size # has been swept over rolling = data['y'].rolling(**params['method_args']) y = rolling.mean()[window:] n = len(data) stderr = rolling.std()[window:] x = data['x'][window:] data = pd.DataFrame({'x': x, 'y': y}) data.reset_index(inplace=True, drop=True) if params['se']: df = n - window # Original - Used data['ymin'], data['ymax'] = tdist_ci( y, df, stderr, params['level']) data['se'] = stderr return data
def function[mavg, parameter[data, xseq]]: constant[ Fit moving average ] variable[window] assign[=] call[call[name[params]][constant[method_args]]][constant[window]] variable[rolling] assign[=] call[call[name[data]][constant[y]].rolling, parameter[]] variable[y] assign[=] call[call[name[rolling].mean, parameter[]]][<ast.Slice object at 0x7da18dc07070>] variable[n] assign[=] call[name[len], parameter[name[data]]] variable[stderr] assign[=] call[call[name[rolling].std, parameter[]]][<ast.Slice object at 0x7da18dc06050>] variable[x] assign[=] call[call[name[data]][constant[x]]][<ast.Slice object at 0x7da18dc07a60>] variable[data] assign[=] call[name[pd].DataFrame, parameter[dictionary[[<ast.Constant object at 0x7da18f722230>, <ast.Constant object at 0x7da18f721b70>], [<ast.Name object at 0x7da18f721030>, <ast.Name object at 0x7da18f720a00>]]]] call[name[data].reset_index, parameter[]] if call[name[params]][constant[se]] begin[:] variable[df] assign[=] binary_operation[name[n] - name[window]] <ast.Tuple object at 0x7da18f723970> assign[=] call[name[tdist_ci], parameter[name[y], name[df], name[stderr], call[name[params]][constant[level]]]] call[name[data]][constant[se]] assign[=] name[stderr] return[name[data]]
keyword[def] identifier[mavg] ( identifier[data] , identifier[xseq] ,** identifier[params] ): literal[string] identifier[window] = identifier[params] [ literal[string] ][ literal[string] ] identifier[rolling] = identifier[data] [ literal[string] ]. identifier[rolling] (** identifier[params] [ literal[string] ]) identifier[y] = identifier[rolling] . identifier[mean] ()[ identifier[window] :] identifier[n] = identifier[len] ( identifier[data] ) identifier[stderr] = identifier[rolling] . identifier[std] ()[ identifier[window] :] identifier[x] = identifier[data] [ literal[string] ][ identifier[window] :] identifier[data] = identifier[pd] . identifier[DataFrame] ({ literal[string] : identifier[x] , literal[string] : identifier[y] }) identifier[data] . identifier[reset_index] ( identifier[inplace] = keyword[True] , identifier[drop] = keyword[True] ) keyword[if] identifier[params] [ literal[string] ]: identifier[df] = identifier[n] - identifier[window] identifier[data] [ literal[string] ], identifier[data] [ literal[string] ]= identifier[tdist_ci] ( identifier[y] , identifier[df] , identifier[stderr] , identifier[params] [ literal[string] ]) identifier[data] [ literal[string] ]= identifier[stderr] keyword[return] identifier[data]
def mavg(data, xseq, **params): """ Fit moving average """ window = params['method_args']['window'] # The first average comes after the full window size # has been swept over rolling = data['y'].rolling(**params['method_args']) y = rolling.mean()[window:] n = len(data) stderr = rolling.std()[window:] x = data['x'][window:] data = pd.DataFrame({'x': x, 'y': y}) data.reset_index(inplace=True, drop=True) if params['se']: df = n - window # Original - Used (data['ymin'], data['ymax']) = tdist_ci(y, df, stderr, params['level']) data['se'] = stderr # depends on [control=['if'], data=[]] return data
def iuptri(items, diago=True, with_inds=False): """ A generator that yields the upper triangle of the matrix (items x items) Args: items: Iterable object with elements [e0, e1, ...] diago: False if diagonal matrix elements should be excluded with_inds: If True, (i,j) (e_i, e_j) is returned else (e_i, e_j) >>> for (ij, mate) in iuptri([0,1], with_inds=True): ... print("ij:", ij, "mate:", mate) ij: (0, 0) mate: (0, 0) ij: (0, 1) mate: (0, 1) ij: (1, 1) mate: (1, 1) """ for (ii, item1) in enumerate(items): for (jj, item2) in enumerate(items): do_yield = (jj >= ii) if diago else (jj > ii) if do_yield: if with_inds: yield (ii, jj), (item1, item2) else: yield item1, item2
def function[iuptri, parameter[items, diago, with_inds]]: constant[ A generator that yields the upper triangle of the matrix (items x items) Args: items: Iterable object with elements [e0, e1, ...] diago: False if diagonal matrix elements should be excluded with_inds: If True, (i,j) (e_i, e_j) is returned else (e_i, e_j) >>> for (ij, mate) in iuptri([0,1], with_inds=True): ... print("ij:", ij, "mate:", mate) ij: (0, 0) mate: (0, 0) ij: (0, 1) mate: (0, 1) ij: (1, 1) mate: (1, 1) ] for taget[tuple[[<ast.Name object at 0x7da1b13e1ab0>, <ast.Name object at 0x7da1b13e2aa0>]]] in starred[call[name[enumerate], parameter[name[items]]]] begin[:] for taget[tuple[[<ast.Name object at 0x7da1b13e22f0>, <ast.Name object at 0x7da1b13e0460>]]] in starred[call[name[enumerate], parameter[name[items]]]] begin[:] variable[do_yield] assign[=] <ast.IfExp object at 0x7da1b13e1270> if name[do_yield] begin[:] if name[with_inds] begin[:] <ast.Yield object at 0x7da1b13e3850>
keyword[def] identifier[iuptri] ( identifier[items] , identifier[diago] = keyword[True] , identifier[with_inds] = keyword[False] ): literal[string] keyword[for] ( identifier[ii] , identifier[item1] ) keyword[in] identifier[enumerate] ( identifier[items] ): keyword[for] ( identifier[jj] , identifier[item2] ) keyword[in] identifier[enumerate] ( identifier[items] ): identifier[do_yield] =( identifier[jj] >= identifier[ii] ) keyword[if] identifier[diago] keyword[else] ( identifier[jj] > identifier[ii] ) keyword[if] identifier[do_yield] : keyword[if] identifier[with_inds] : keyword[yield] ( identifier[ii] , identifier[jj] ),( identifier[item1] , identifier[item2] ) keyword[else] : keyword[yield] identifier[item1] , identifier[item2]
def iuptri(items, diago=True, with_inds=False): """ A generator that yields the upper triangle of the matrix (items x items) Args: items: Iterable object with elements [e0, e1, ...] diago: False if diagonal matrix elements should be excluded with_inds: If True, (i,j) (e_i, e_j) is returned else (e_i, e_j) >>> for (ij, mate) in iuptri([0,1], with_inds=True): ... print("ij:", ij, "mate:", mate) ij: (0, 0) mate: (0, 0) ij: (0, 1) mate: (0, 1) ij: (1, 1) mate: (1, 1) """ for (ii, item1) in enumerate(items): for (jj, item2) in enumerate(items): do_yield = jj >= ii if diago else jj > ii if do_yield: if with_inds: yield ((ii, jj), (item1, item2)) # depends on [control=['if'], data=[]] else: yield (item1, item2) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
def setup_files(class_dir, seed): """Returns shuffled files """ # make sure its reproducible random.seed(seed) files = list_files(class_dir) files.sort() random.shuffle(files) return files
def function[setup_files, parameter[class_dir, seed]]: constant[Returns shuffled files ] call[name[random].seed, parameter[name[seed]]] variable[files] assign[=] call[name[list_files], parameter[name[class_dir]]] call[name[files].sort, parameter[]] call[name[random].shuffle, parameter[name[files]]] return[name[files]]
keyword[def] identifier[setup_files] ( identifier[class_dir] , identifier[seed] ): literal[string] identifier[random] . identifier[seed] ( identifier[seed] ) identifier[files] = identifier[list_files] ( identifier[class_dir] ) identifier[files] . identifier[sort] () identifier[random] . identifier[shuffle] ( identifier[files] ) keyword[return] identifier[files]
def setup_files(class_dir, seed): """Returns shuffled files """ # make sure its reproducible random.seed(seed) files = list_files(class_dir) files.sort() random.shuffle(files) return files
def quoter(obj): """Return a Quoted URL. The quote function will return a URL encoded string. If there is an exception in the job which results in a "KeyError" the original string will be returned as it will be assumed to already be URL encoded. :param obj: ``basestring`` :return: ``str`` """ try: try: return urllib.quote(obj) except AttributeError: return urllib.parse.quote(obj) except KeyError: return obj
def function[quoter, parameter[obj]]: constant[Return a Quoted URL. The quote function will return a URL encoded string. If there is an exception in the job which results in a "KeyError" the original string will be returned as it will be assumed to already be URL encoded. :param obj: ``basestring`` :return: ``str`` ] <ast.Try object at 0x7da1b27eebc0>
keyword[def] identifier[quoter] ( identifier[obj] ): literal[string] keyword[try] : keyword[try] : keyword[return] identifier[urllib] . identifier[quote] ( identifier[obj] ) keyword[except] identifier[AttributeError] : keyword[return] identifier[urllib] . identifier[parse] . identifier[quote] ( identifier[obj] ) keyword[except] identifier[KeyError] : keyword[return] identifier[obj]
def quoter(obj): """Return a Quoted URL. The quote function will return a URL encoded string. If there is an exception in the job which results in a "KeyError" the original string will be returned as it will be assumed to already be URL encoded. :param obj: ``basestring`` :return: ``str`` """ try: try: return urllib.quote(obj) # depends on [control=['try'], data=[]] except AttributeError: return urllib.parse.quote(obj) # depends on [control=['except'], data=[]] # depends on [control=['try'], data=[]] except KeyError: return obj # depends on [control=['except'], data=[]]
def signature(self, name, file_name, file_type, file_text, **kwargs): """Add Signature data to Batch object. Valid file_types: + Snort ® + Suricata + YARA + ClamAV ® + OpenIOC + CybOX ™ + Bro + Regex + SPL - Splunk ® Search Processing Language Args: name (str): The name for this Group. file_name (str): The name for the attached signature for this Group. file_type (str): The signature type for this Group. file_text (str): The signature content for this Group. date_added (str, kwargs): The date timestamp the Indicator was created. xid (str, kwargs): The external id for this Group. Returns: obj: An instance of Signature. """ group_obj = Signature(name, file_name, file_type, file_text, **kwargs) return self._group(group_obj)
def function[signature, parameter[self, name, file_name, file_type, file_text]]: constant[Add Signature data to Batch object. Valid file_types: + Snort ® + Suricata + YARA + ClamAV ® + OpenIOC + CybOX ™ + Bro + Regex + SPL - Splunk ® Search Processing Language Args: name (str): The name for this Group. file_name (str): The name for the attached signature for this Group. file_type (str): The signature type for this Group. file_text (str): The signature content for this Group. date_added (str, kwargs): The date timestamp the Indicator was created. xid (str, kwargs): The external id for this Group. Returns: obj: An instance of Signature. ] variable[group_obj] assign[=] call[name[Signature], parameter[name[name], name[file_name], name[file_type], name[file_text]]] return[call[name[self]._group, parameter[name[group_obj]]]]
keyword[def] identifier[signature] ( identifier[self] , identifier[name] , identifier[file_name] , identifier[file_type] , identifier[file_text] ,** identifier[kwargs] ): literal[string] identifier[group_obj] = identifier[Signature] ( identifier[name] , identifier[file_name] , identifier[file_type] , identifier[file_text] ,** identifier[kwargs] ) keyword[return] identifier[self] . identifier[_group] ( identifier[group_obj] )
def signature(self, name, file_name, file_type, file_text, **kwargs): """Add Signature data to Batch object. Valid file_types: + Snort ® + Suricata + YARA + ClamAV ® + OpenIOC + CybOX ™ + Bro + Regex + SPL - Splunk ® Search Processing Language Args: name (str): The name for this Group. file_name (str): The name for the attached signature for this Group. file_type (str): The signature type for this Group. file_text (str): The signature content for this Group. date_added (str, kwargs): The date timestamp the Indicator was created. xid (str, kwargs): The external id for this Group. Returns: obj: An instance of Signature. """ group_obj = Signature(name, file_name, file_type, file_text, **kwargs) return self._group(group_obj)
def get_summary_dict(self, include_msd_t=False, include_mscd_t=False): """ Provides a summary of diffusion information. Args: include_msd_t (bool): Whether to include mean square displace and time data with the data. include_msd_t (bool): Whether to include mean square charge displace and time data with the data. Returns: (dict) of diffusion and conductivity data. """ d = { "D": self.diffusivity, "D_sigma": self.diffusivity_std_dev, "D_charge": self.chg_diffusivity, "D_charge_sigma": self.chg_diffusivity_std_dev, "S": self.conductivity, "S_sigma": self.conductivity_std_dev, "S_charge": self.chg_conductivity, "D_components": self.diffusivity_components.tolist(), "S_components": self.conductivity_components.tolist(), "D_components_sigma": self.diffusivity_components_std_dev.tolist(), "S_components_sigma": self.conductivity_components_std_dev.tolist(), "specie": str(self.specie), "step_skip": self.step_skip, "time_step": self.time_step, "temperature": self.temperature, "max_framework_displacement": self.max_framework_displacement, "Haven_ratio": self.haven_ratio } if include_msd_t: d["msd"] = self.msd.tolist() d["msd_components"] = self.msd_components.tolist() d["dt"] = self.dt.tolist() if include_mscd_t: d["mscd"] = self.mscd.tolist() return d
def function[get_summary_dict, parameter[self, include_msd_t, include_mscd_t]]: constant[ Provides a summary of diffusion information. Args: include_msd_t (bool): Whether to include mean square displace and time data with the data. include_msd_t (bool): Whether to include mean square charge displace and time data with the data. Returns: (dict) of diffusion and conductivity data. ] variable[d] assign[=] dictionary[[<ast.Constant object at 0x7da1b1beb550>, <ast.Constant object at 0x7da1b1beb8b0>, <ast.Constant object at 0x7da1b1beab60>, <ast.Constant object at 0x7da1b1be8550>, <ast.Constant object at 0x7da1b1beab30>, <ast.Constant object at 0x7da1b1be8580>, <ast.Constant object at 0x7da1b1be9510>, <ast.Constant object at 0x7da1b1be95a0>, <ast.Constant object at 0x7da1b1be9540>, <ast.Constant object at 0x7da1b1be9570>, <ast.Constant object at 0x7da1b1be85b0>, <ast.Constant object at 0x7da1b1beb2e0>, <ast.Constant object at 0x7da1b1beb2b0>, <ast.Constant object at 0x7da1b1be94b0>, <ast.Constant object at 0x7da1b1be94e0>, <ast.Constant object at 0x7da1b1be9480>, <ast.Constant object at 0x7da1b1be92a0>], [<ast.Attribute object at 0x7da1b1be9390>, <ast.Attribute object at 0x7da1b1be93f0>, <ast.Attribute object at 0x7da1b1be9360>, <ast.Attribute object at 0x7da1b1be93c0>, <ast.Attribute object at 0x7da1b1be8f70>, <ast.Attribute object at 0x7da1b1be8e80>, <ast.Attribute object at 0x7da1b1be8e50>, <ast.Call object at 0x7da1b1be88b0>, <ast.Call object at 0x7da1b1be8f10>, <ast.Call object at 0x7da1b1be9960>, <ast.Call object at 0x7da1b1bebb20>, <ast.Call object at 0x7da1b1bebcd0>, <ast.Attribute object at 0x7da1b1be8b50>, <ast.Attribute object at 0x7da1b1be8dc0>, <ast.Attribute object at 0x7da1b1be8b20>, <ast.Attribute object at 0x7da1b1be8910>, <ast.Attribute object at 0x7da1b1be8a30>]] if name[include_msd_t] begin[:] call[name[d]][constant[msd]] assign[=] call[name[self].msd.tolist, parameter[]] call[name[d]][constant[msd_components]] assign[=] call[name[self].msd_components.tolist, parameter[]] call[name[d]][constant[dt]] assign[=] call[name[self].dt.tolist, parameter[]] if name[include_mscd_t] begin[:] call[name[d]][constant[mscd]] assign[=] call[name[self].mscd.tolist, parameter[]] return[name[d]]
keyword[def] identifier[get_summary_dict] ( identifier[self] , identifier[include_msd_t] = keyword[False] , identifier[include_mscd_t] = keyword[False] ): literal[string] identifier[d] ={ literal[string] : identifier[self] . identifier[diffusivity] , literal[string] : identifier[self] . identifier[diffusivity_std_dev] , literal[string] : identifier[self] . identifier[chg_diffusivity] , literal[string] : identifier[self] . identifier[chg_diffusivity_std_dev] , literal[string] : identifier[self] . identifier[conductivity] , literal[string] : identifier[self] . identifier[conductivity_std_dev] , literal[string] : identifier[self] . identifier[chg_conductivity] , literal[string] : identifier[self] . identifier[diffusivity_components] . identifier[tolist] (), literal[string] : identifier[self] . identifier[conductivity_components] . identifier[tolist] (), literal[string] : identifier[self] . identifier[diffusivity_components_std_dev] . identifier[tolist] (), literal[string] : identifier[self] . identifier[conductivity_components_std_dev] . identifier[tolist] (), literal[string] : identifier[str] ( identifier[self] . identifier[specie] ), literal[string] : identifier[self] . identifier[step_skip] , literal[string] : identifier[self] . identifier[time_step] , literal[string] : identifier[self] . identifier[temperature] , literal[string] : identifier[self] . identifier[max_framework_displacement] , literal[string] : identifier[self] . identifier[haven_ratio] } keyword[if] identifier[include_msd_t] : identifier[d] [ literal[string] ]= identifier[self] . identifier[msd] . identifier[tolist] () identifier[d] [ literal[string] ]= identifier[self] . identifier[msd_components] . identifier[tolist] () identifier[d] [ literal[string] ]= identifier[self] . identifier[dt] . identifier[tolist] () keyword[if] identifier[include_mscd_t] : identifier[d] [ literal[string] ]= identifier[self] . identifier[mscd] . identifier[tolist] () keyword[return] identifier[d]
def get_summary_dict(self, include_msd_t=False, include_mscd_t=False): """ Provides a summary of diffusion information. Args: include_msd_t (bool): Whether to include mean square displace and time data with the data. include_msd_t (bool): Whether to include mean square charge displace and time data with the data. Returns: (dict) of diffusion and conductivity data. """ d = {'D': self.diffusivity, 'D_sigma': self.diffusivity_std_dev, 'D_charge': self.chg_diffusivity, 'D_charge_sigma': self.chg_diffusivity_std_dev, 'S': self.conductivity, 'S_sigma': self.conductivity_std_dev, 'S_charge': self.chg_conductivity, 'D_components': self.diffusivity_components.tolist(), 'S_components': self.conductivity_components.tolist(), 'D_components_sigma': self.diffusivity_components_std_dev.tolist(), 'S_components_sigma': self.conductivity_components_std_dev.tolist(), 'specie': str(self.specie), 'step_skip': self.step_skip, 'time_step': self.time_step, 'temperature': self.temperature, 'max_framework_displacement': self.max_framework_displacement, 'Haven_ratio': self.haven_ratio} if include_msd_t: d['msd'] = self.msd.tolist() d['msd_components'] = self.msd_components.tolist() d['dt'] = self.dt.tolist() # depends on [control=['if'], data=[]] if include_mscd_t: d['mscd'] = self.mscd.tolist() # depends on [control=['if'], data=[]] return d
def start_serving_nowait(self, connection_config: ConnectionConfig, loop: Optional[asyncio.AbstractEventLoop] = None) -> None: """ Start serving this :class:`~lahja.endpoint.Endpoint` so that it can receive events. It is not guaranteed that the :class:`~lahja.endpoint.Endpoint` is fully ready after this method returns. Use :meth:`~lahja.endpoint.Endpoint.start_serving` or combine with :meth:`~lahja.endpoint.Endpoint.wait_until_serving` """ if loop is None: loop = asyncio.get_event_loop() self._name = connection_config.name self._ipc_path = connection_config.path self._create_external_api(self._ipc_path) self._loop = loop self._internal_loop_running = asyncio.Event(loop=self.event_loop) self._receiving_loop_running = asyncio.Event(loop=self.event_loop) self._internal_queue = asyncio.Queue(loop=self.event_loop) self._receiving_queue = asyncio.Queue(loop=self.event_loop) # Using `gather` (over e.g. `wait` or plain `ensure_future`) ensures that the inner futures # are automatically cancelled as soon as the parent task is cancelled asyncio.gather( asyncio.ensure_future(self._connect_receiving_queue(), loop=self.event_loop), asyncio.ensure_future(self._connect_internal_queue(), loop=self.event_loop), loop=self.event_loop ) self._running = True
def function[start_serving_nowait, parameter[self, connection_config, loop]]: constant[ Start serving this :class:`~lahja.endpoint.Endpoint` so that it can receive events. It is not guaranteed that the :class:`~lahja.endpoint.Endpoint` is fully ready after this method returns. Use :meth:`~lahja.endpoint.Endpoint.start_serving` or combine with :meth:`~lahja.endpoint.Endpoint.wait_until_serving` ] if compare[name[loop] is constant[None]] begin[:] variable[loop] assign[=] call[name[asyncio].get_event_loop, parameter[]] name[self]._name assign[=] name[connection_config].name name[self]._ipc_path assign[=] name[connection_config].path call[name[self]._create_external_api, parameter[name[self]._ipc_path]] name[self]._loop assign[=] name[loop] name[self]._internal_loop_running assign[=] call[name[asyncio].Event, parameter[]] name[self]._receiving_loop_running assign[=] call[name[asyncio].Event, parameter[]] name[self]._internal_queue assign[=] call[name[asyncio].Queue, parameter[]] name[self]._receiving_queue assign[=] call[name[asyncio].Queue, parameter[]] call[name[asyncio].gather, parameter[call[name[asyncio].ensure_future, parameter[call[name[self]._connect_receiving_queue, parameter[]]]], call[name[asyncio].ensure_future, parameter[call[name[self]._connect_internal_queue, parameter[]]]]]] name[self]._running assign[=] constant[True]
keyword[def] identifier[start_serving_nowait] ( identifier[self] , identifier[connection_config] : identifier[ConnectionConfig] , identifier[loop] : identifier[Optional] [ identifier[asyncio] . identifier[AbstractEventLoop] ]= keyword[None] )-> keyword[None] : literal[string] keyword[if] identifier[loop] keyword[is] keyword[None] : identifier[loop] = identifier[asyncio] . identifier[get_event_loop] () identifier[self] . identifier[_name] = identifier[connection_config] . identifier[name] identifier[self] . identifier[_ipc_path] = identifier[connection_config] . identifier[path] identifier[self] . identifier[_create_external_api] ( identifier[self] . identifier[_ipc_path] ) identifier[self] . identifier[_loop] = identifier[loop] identifier[self] . identifier[_internal_loop_running] = identifier[asyncio] . identifier[Event] ( identifier[loop] = identifier[self] . identifier[event_loop] ) identifier[self] . identifier[_receiving_loop_running] = identifier[asyncio] . identifier[Event] ( identifier[loop] = identifier[self] . identifier[event_loop] ) identifier[self] . identifier[_internal_queue] = identifier[asyncio] . identifier[Queue] ( identifier[loop] = identifier[self] . identifier[event_loop] ) identifier[self] . identifier[_receiving_queue] = identifier[asyncio] . identifier[Queue] ( identifier[loop] = identifier[self] . identifier[event_loop] ) identifier[asyncio] . identifier[gather] ( identifier[asyncio] . identifier[ensure_future] ( identifier[self] . identifier[_connect_receiving_queue] (), identifier[loop] = identifier[self] . identifier[event_loop] ), identifier[asyncio] . identifier[ensure_future] ( identifier[self] . identifier[_connect_internal_queue] (), identifier[loop] = identifier[self] . identifier[event_loop] ), identifier[loop] = identifier[self] . identifier[event_loop] ) identifier[self] . identifier[_running] = keyword[True]
def start_serving_nowait(self, connection_config: ConnectionConfig, loop: Optional[asyncio.AbstractEventLoop]=None) -> None: """ Start serving this :class:`~lahja.endpoint.Endpoint` so that it can receive events. It is not guaranteed that the :class:`~lahja.endpoint.Endpoint` is fully ready after this method returns. Use :meth:`~lahja.endpoint.Endpoint.start_serving` or combine with :meth:`~lahja.endpoint.Endpoint.wait_until_serving` """ if loop is None: loop = asyncio.get_event_loop() # depends on [control=['if'], data=['loop']] self._name = connection_config.name self._ipc_path = connection_config.path self._create_external_api(self._ipc_path) self._loop = loop self._internal_loop_running = asyncio.Event(loop=self.event_loop) self._receiving_loop_running = asyncio.Event(loop=self.event_loop) self._internal_queue = asyncio.Queue(loop=self.event_loop) self._receiving_queue = asyncio.Queue(loop=self.event_loop) # Using `gather` (over e.g. `wait` or plain `ensure_future`) ensures that the inner futures # are automatically cancelled as soon as the parent task is cancelled asyncio.gather(asyncio.ensure_future(self._connect_receiving_queue(), loop=self.event_loop), asyncio.ensure_future(self._connect_internal_queue(), loop=self.event_loop), loop=self.event_loop) self._running = True
def accounts(): """Load the accounts YAML file and return a dict """ import yaml for path in account_files: try: c_dir = os.path.dirname(path) if not os.path.exists(c_dir): os.makedirs(c_dir) with open(path, 'rb') as f: return yaml.load(f)['accounts'] except (OSError, IOError) as e: pass return {}
def function[accounts, parameter[]]: constant[Load the accounts YAML file and return a dict ] import module[yaml] for taget[name[path]] in starred[name[account_files]] begin[:] <ast.Try object at 0x7da18dc999f0> return[dictionary[[], []]]
keyword[def] identifier[accounts] (): literal[string] keyword[import] identifier[yaml] keyword[for] identifier[path] keyword[in] identifier[account_files] : keyword[try] : identifier[c_dir] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[path] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[c_dir] ): identifier[os] . identifier[makedirs] ( identifier[c_dir] ) keyword[with] identifier[open] ( identifier[path] , literal[string] ) keyword[as] identifier[f] : keyword[return] identifier[yaml] . identifier[load] ( identifier[f] )[ literal[string] ] keyword[except] ( identifier[OSError] , identifier[IOError] ) keyword[as] identifier[e] : keyword[pass] keyword[return] {}
def accounts(): """Load the accounts YAML file and return a dict """ import yaml for path in account_files: try: c_dir = os.path.dirname(path) if not os.path.exists(c_dir): os.makedirs(c_dir) # depends on [control=['if'], data=[]] with open(path, 'rb') as f: return yaml.load(f)['accounts'] # depends on [control=['with'], data=['f']] # depends on [control=['try'], data=[]] except (OSError, IOError) as e: pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['path']] return {}
def log_handlers(opts): ''' Returns the custom logging handler modules :param dict opts: The Salt options dictionary ''' ret = LazyLoader( _module_dirs( opts, 'log_handlers', int_type='handlers', base_path=os.path.join(SALT_BASE_PATH, 'log'), ), opts, tag='log_handlers', ) return FilterDictWrapper(ret, '.setup_handlers')
def function[log_handlers, parameter[opts]]: constant[ Returns the custom logging handler modules :param dict opts: The Salt options dictionary ] variable[ret] assign[=] call[name[LazyLoader], parameter[call[name[_module_dirs], parameter[name[opts], constant[log_handlers]]], name[opts]]] return[call[name[FilterDictWrapper], parameter[name[ret], constant[.setup_handlers]]]]
keyword[def] identifier[log_handlers] ( identifier[opts] ): literal[string] identifier[ret] = identifier[LazyLoader] ( identifier[_module_dirs] ( identifier[opts] , literal[string] , identifier[int_type] = literal[string] , identifier[base_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[SALT_BASE_PATH] , literal[string] ), ), identifier[opts] , identifier[tag] = literal[string] , ) keyword[return] identifier[FilterDictWrapper] ( identifier[ret] , literal[string] )
def log_handlers(opts): """ Returns the custom logging handler modules :param dict opts: The Salt options dictionary """ ret = LazyLoader(_module_dirs(opts, 'log_handlers', int_type='handlers', base_path=os.path.join(SALT_BASE_PATH, 'log')), opts, tag='log_handlers') return FilterDictWrapper(ret, '.setup_handlers')
def process_bind_param(self, value, dialect): """convert value from python object to json""" if value is not None: value = simplejson.dumps(value) return value
def function[process_bind_param, parameter[self, value, dialect]]: constant[convert value from python object to json] if compare[name[value] is_not constant[None]] begin[:] variable[value] assign[=] call[name[simplejson].dumps, parameter[name[value]]] return[name[value]]
keyword[def] identifier[process_bind_param] ( identifier[self] , identifier[value] , identifier[dialect] ): literal[string] keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] : identifier[value] = identifier[simplejson] . identifier[dumps] ( identifier[value] ) keyword[return] identifier[value]
def process_bind_param(self, value, dialect): """convert value from python object to json""" if value is not None: value = simplejson.dumps(value) # depends on [control=['if'], data=['value']] return value
def console_fill_char(con: tcod.console.Console, arr: Sequence[int]) -> None: """Fill the character tiles of a console with an array. `arr` is an array of integers with a length of the consoles width and height. .. deprecated:: 8.4 You should assign to :any:`tcod.console.Console.ch` instead. """ if isinstance(arr, np.ndarray): # numpy arrays, use numpy's ctypes functions np_array = np.ascontiguousarray(arr, dtype=np.intc) carr = ffi.cast("int *", np_array.ctypes.data) else: # otherwise convert using the ffi module carr = ffi.new("int[]", arr) lib.TCOD_console_fill_char(_console(con), carr)
def function[console_fill_char, parameter[con, arr]]: constant[Fill the character tiles of a console with an array. `arr` is an array of integers with a length of the consoles width and height. .. deprecated:: 8.4 You should assign to :any:`tcod.console.Console.ch` instead. ] if call[name[isinstance], parameter[name[arr], name[np].ndarray]] begin[:] variable[np_array] assign[=] call[name[np].ascontiguousarray, parameter[name[arr]]] variable[carr] assign[=] call[name[ffi].cast, parameter[constant[int *], name[np_array].ctypes.data]] call[name[lib].TCOD_console_fill_char, parameter[call[name[_console], parameter[name[con]]], name[carr]]]
keyword[def] identifier[console_fill_char] ( identifier[con] : identifier[tcod] . identifier[console] . identifier[Console] , identifier[arr] : identifier[Sequence] [ identifier[int] ])-> keyword[None] : literal[string] keyword[if] identifier[isinstance] ( identifier[arr] , identifier[np] . identifier[ndarray] ): identifier[np_array] = identifier[np] . identifier[ascontiguousarray] ( identifier[arr] , identifier[dtype] = identifier[np] . identifier[intc] ) identifier[carr] = identifier[ffi] . identifier[cast] ( literal[string] , identifier[np_array] . identifier[ctypes] . identifier[data] ) keyword[else] : identifier[carr] = identifier[ffi] . identifier[new] ( literal[string] , identifier[arr] ) identifier[lib] . identifier[TCOD_console_fill_char] ( identifier[_console] ( identifier[con] ), identifier[carr] )
def console_fill_char(con: tcod.console.Console, arr: Sequence[int]) -> None: """Fill the character tiles of a console with an array. `arr` is an array of integers with a length of the consoles width and height. .. deprecated:: 8.4 You should assign to :any:`tcod.console.Console.ch` instead. """ if isinstance(arr, np.ndarray): # numpy arrays, use numpy's ctypes functions np_array = np.ascontiguousarray(arr, dtype=np.intc) carr = ffi.cast('int *', np_array.ctypes.data) # depends on [control=['if'], data=[]] else: # otherwise convert using the ffi module carr = ffi.new('int[]', arr) lib.TCOD_console_fill_char(_console(con), carr)
def CreateFile(filename, contents, eol_style=EOL_STYLE_NATIVE, create_dir=True, encoding=None, binary=False): ''' Create a file with the given contents. :param unicode filename: Filename and path to be created. :param unicode contents: The file contents as a string. :type eol_style: EOL_STYLE_XXX constant :param eol_style: Replaces the EOL by the appropriate EOL depending on the eol_style value. Considers that all content is using only "\n" as EOL. :param bool create_dir: If True, also creates directories needed in filename's path :param unicode encoding: Target file's content encoding. Defaults to sys.getfilesystemencoding() Ignored if `binary` = True :param bool binary: If True, file is created in binary mode. In this case, `contents` must be `bytes` and not `unicode` :return unicode: Returns the name of the file created. :raises NotImplementedProtocol: If file protocol is not local or FTP :raises ValueError: If trying to mix unicode `contents` without `encoding`, or `encoding` without unicode `contents` .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information ''' # Lots of checks when writing binary files if binary: if isinstance(contents, six.text_type): raise TypeError('contents must be str (bytes) when binary=True') else: if not isinstance(contents, six.text_type): raise TypeError('contents must be unicode when binary=False') # Replaces eol on each line by the given eol_style. contents = _HandleContentsEol(contents, eol_style) # Encode string and pretend we are using binary to prevent 'open' from automatically # changing Eols encoding = encoding or sys.getfilesystemencoding() contents = contents.encode(encoding) binary = True # If asked, creates directory containing file if create_dir: dirname = os.path.dirname(filename) if dirname: CreateDirectory(dirname) from six.moves.urllib.parse import urlparse filename_url = urlparse(filename) # Handle local if _UrlIsLocal(filename_url): # Always writing as binary (see handling above) with open(filename, 'wb') as oss: oss.write(contents) # Handle FTP elif filename_url.scheme == 'ftp': # Always writing as binary (see handling above) from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(directory_url.scheme) else: from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(filename_url.scheme) return filename
def function[CreateFile, parameter[filename, contents, eol_style, create_dir, encoding, binary]]: constant[ Create a file with the given contents. :param unicode filename: Filename and path to be created. :param unicode contents: The file contents as a string. :type eol_style: EOL_STYLE_XXX constant :param eol_style: Replaces the EOL by the appropriate EOL depending on the eol_style value. Considers that all content is using only " " as EOL. :param bool create_dir: If True, also creates directories needed in filename's path :param unicode encoding: Target file's content encoding. Defaults to sys.getfilesystemencoding() Ignored if `binary` = True :param bool binary: If True, file is created in binary mode. In this case, `contents` must be `bytes` and not `unicode` :return unicode: Returns the name of the file created. :raises NotImplementedProtocol: If file protocol is not local or FTP :raises ValueError: If trying to mix unicode `contents` without `encoding`, or `encoding` without unicode `contents` .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information ] if name[binary] begin[:] if call[name[isinstance], parameter[name[contents], name[six].text_type]] begin[:] <ast.Raise object at 0x7da20c6c5660> if name[create_dir] begin[:] variable[dirname] assign[=] call[name[os].path.dirname, parameter[name[filename]]] if name[dirname] begin[:] call[name[CreateDirectory], parameter[name[dirname]]] from relative_module[six.moves.urllib.parse] import module[urlparse] variable[filename_url] assign[=] call[name[urlparse], parameter[name[filename]]] if call[name[_UrlIsLocal], parameter[name[filename_url]]] begin[:] with call[name[open], parameter[name[filename], constant[wb]]] begin[:] call[name[oss].write, parameter[name[contents]]] return[name[filename]]
keyword[def] identifier[CreateFile] ( identifier[filename] , identifier[contents] , identifier[eol_style] = identifier[EOL_STYLE_NATIVE] , identifier[create_dir] = keyword[True] , identifier[encoding] = keyword[None] , identifier[binary] = keyword[False] ): literal[string] keyword[if] identifier[binary] : keyword[if] identifier[isinstance] ( identifier[contents] , identifier[six] . identifier[text_type] ): keyword[raise] identifier[TypeError] ( literal[string] ) keyword[else] : keyword[if] keyword[not] identifier[isinstance] ( identifier[contents] , identifier[six] . identifier[text_type] ): keyword[raise] identifier[TypeError] ( literal[string] ) identifier[contents] = identifier[_HandleContentsEol] ( identifier[contents] , identifier[eol_style] ) identifier[encoding] = identifier[encoding] keyword[or] identifier[sys] . identifier[getfilesystemencoding] () identifier[contents] = identifier[contents] . identifier[encode] ( identifier[encoding] ) identifier[binary] = keyword[True] keyword[if] identifier[create_dir] : identifier[dirname] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[filename] ) keyword[if] identifier[dirname] : identifier[CreateDirectory] ( identifier[dirname] ) keyword[from] identifier[six] . identifier[moves] . identifier[urllib] . identifier[parse] keyword[import] identifier[urlparse] identifier[filename_url] = identifier[urlparse] ( identifier[filename] ) keyword[if] identifier[_UrlIsLocal] ( identifier[filename_url] ): keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[oss] : identifier[oss] . identifier[write] ( identifier[contents] ) keyword[elif] identifier[filename_url] . identifier[scheme] == literal[string] : keyword[from] . identifier[_exceptions] keyword[import] identifier[NotImplementedProtocol] keyword[raise] identifier[NotImplementedProtocol] ( identifier[directory_url] . identifier[scheme] ) keyword[else] : keyword[from] . identifier[_exceptions] keyword[import] identifier[NotImplementedProtocol] keyword[raise] identifier[NotImplementedProtocol] ( identifier[filename_url] . identifier[scheme] ) keyword[return] identifier[filename]
def CreateFile(filename, contents, eol_style=EOL_STYLE_NATIVE, create_dir=True, encoding=None, binary=False): """ Create a file with the given contents. :param unicode filename: Filename and path to be created. :param unicode contents: The file contents as a string. :type eol_style: EOL_STYLE_XXX constant :param eol_style: Replaces the EOL by the appropriate EOL depending on the eol_style value. Considers that all content is using only " " as EOL. :param bool create_dir: If True, also creates directories needed in filename's path :param unicode encoding: Target file's content encoding. Defaults to sys.getfilesystemencoding() Ignored if `binary` = True :param bool binary: If True, file is created in binary mode. In this case, `contents` must be `bytes` and not `unicode` :return unicode: Returns the name of the file created. :raises NotImplementedProtocol: If file protocol is not local or FTP :raises ValueError: If trying to mix unicode `contents` without `encoding`, or `encoding` without unicode `contents` .. seealso:: FTP LIMITATIONS at this module's doc for performance issues information """ # Lots of checks when writing binary files if binary: if isinstance(contents, six.text_type): raise TypeError('contents must be str (bytes) when binary=True') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: if not isinstance(contents, six.text_type): raise TypeError('contents must be unicode when binary=False') # depends on [control=['if'], data=[]] # Replaces eol on each line by the given eol_style. contents = _HandleContentsEol(contents, eol_style) # Encode string and pretend we are using binary to prevent 'open' from automatically # changing Eols encoding = encoding or sys.getfilesystemencoding() contents = contents.encode(encoding) binary = True # If asked, creates directory containing file if create_dir: dirname = os.path.dirname(filename) if dirname: CreateDirectory(dirname) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] from six.moves.urllib.parse import urlparse filename_url = urlparse(filename) # Handle local if _UrlIsLocal(filename_url): # Always writing as binary (see handling above) with open(filename, 'wb') as oss: oss.write(contents) # depends on [control=['with'], data=['oss']] # depends on [control=['if'], data=[]] # Handle FTP elif filename_url.scheme == 'ftp': # Always writing as binary (see handling above) from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(directory_url.scheme) # depends on [control=['if'], data=[]] else: from ._exceptions import NotImplementedProtocol raise NotImplementedProtocol(filename_url.scheme) return filename
def _convert(self, desired_type: Type[T], source_obj: S, logger: Logger, options: Dict[str, Dict[str, Any]]) -> T: """ Delegates to the user-provided method. Passes the appropriate part of the options according to the function name. :param desired_type: :param source_obj: :param logger: :param options: :return: """ try: if self.unpack_options: opts = self.get_applicable_options(options) if self.function_args is not None: return self.conversion_method(desired_type, source_obj, logger, **self.function_args, **opts) else: return self.conversion_method(desired_type, source_obj, logger, **opts) else: if self.function_args is not None: return self.conversion_method(desired_type, source_obj, logger, options, **self.function_args) else: return self.conversion_method(desired_type, source_obj, logger, options) except TypeError as e: raise CaughtTypeError.create(self.conversion_method, e)
def function[_convert, parameter[self, desired_type, source_obj, logger, options]]: constant[ Delegates to the user-provided method. Passes the appropriate part of the options according to the function name. :param desired_type: :param source_obj: :param logger: :param options: :return: ] <ast.Try object at 0x7da18f00e7d0>
keyword[def] identifier[_convert] ( identifier[self] , identifier[desired_type] : identifier[Type] [ identifier[T] ], identifier[source_obj] : identifier[S] , identifier[logger] : identifier[Logger] , identifier[options] : identifier[Dict] [ identifier[str] , identifier[Dict] [ identifier[str] , identifier[Any] ]])-> identifier[T] : literal[string] keyword[try] : keyword[if] identifier[self] . identifier[unpack_options] : identifier[opts] = identifier[self] . identifier[get_applicable_options] ( identifier[options] ) keyword[if] identifier[self] . identifier[function_args] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[self] . identifier[conversion_method] ( identifier[desired_type] , identifier[source_obj] , identifier[logger] ,** identifier[self] . identifier[function_args] ,** identifier[opts] ) keyword[else] : keyword[return] identifier[self] . identifier[conversion_method] ( identifier[desired_type] , identifier[source_obj] , identifier[logger] ,** identifier[opts] ) keyword[else] : keyword[if] identifier[self] . identifier[function_args] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[self] . identifier[conversion_method] ( identifier[desired_type] , identifier[source_obj] , identifier[logger] , identifier[options] ,** identifier[self] . identifier[function_args] ) keyword[else] : keyword[return] identifier[self] . identifier[conversion_method] ( identifier[desired_type] , identifier[source_obj] , identifier[logger] , identifier[options] ) keyword[except] identifier[TypeError] keyword[as] identifier[e] : keyword[raise] identifier[CaughtTypeError] . identifier[create] ( identifier[self] . identifier[conversion_method] , identifier[e] )
def _convert(self, desired_type: Type[T], source_obj: S, logger: Logger, options: Dict[str, Dict[str, Any]]) -> T: """ Delegates to the user-provided method. Passes the appropriate part of the options according to the function name. :param desired_type: :param source_obj: :param logger: :param options: :return: """ try: if self.unpack_options: opts = self.get_applicable_options(options) if self.function_args is not None: return self.conversion_method(desired_type, source_obj, logger, **self.function_args, **opts) # depends on [control=['if'], data=[]] else: return self.conversion_method(desired_type, source_obj, logger, **opts) # depends on [control=['if'], data=[]] elif self.function_args is not None: return self.conversion_method(desired_type, source_obj, logger, options, **self.function_args) # depends on [control=['if'], data=[]] else: return self.conversion_method(desired_type, source_obj, logger, options) # depends on [control=['try'], data=[]] except TypeError as e: raise CaughtTypeError.create(self.conversion_method, e) # depends on [control=['except'], data=['e']]
def generate_pymol_session(self, bonsai_pdb_content, cutting_pdb_content, bonsai_label = 'Bonsai', cutting_label = 'Cutting', pymol_executable = 'pymol', settings = {}): ''' Generates the PyMOL session for the scaffold, model, and design structures. Returns this session and the script which generated it.''' if not pymol_load_failed: b = BatchBuilder(pymol_executable = pymol_executable) loop_residues = set() for l in cutting_pdb_content.split('\n'): if l.startswith('ATOM ') and l[12:16] == ' C ': loop_residues.add(l[21:27]) loop_residues = sorted(loop_residues) structures_list = [ (bonsai_label, bonsai_pdb_content, set()), (cutting_label, cutting_pdb_content, loop_residues), ] settings['Main'] = bonsai_label settings['Loop'] = cutting_label PSE_files = b.run(LoopRemovalBuilder, [PDBContainer.from_content_triple(structures_list)], settings = settings) return PSE_files[0], b.PSE_scripts[0]
def function[generate_pymol_session, parameter[self, bonsai_pdb_content, cutting_pdb_content, bonsai_label, cutting_label, pymol_executable, settings]]: constant[ Generates the PyMOL session for the scaffold, model, and design structures. Returns this session and the script which generated it.] if <ast.UnaryOp object at 0x7da1b24f0760> begin[:] variable[b] assign[=] call[name[BatchBuilder], parameter[]] variable[loop_residues] assign[=] call[name[set], parameter[]] for taget[name[l]] in starred[call[name[cutting_pdb_content].split, parameter[constant[ ]]]] begin[:] if <ast.BoolOp object at 0x7da1b228c2e0> begin[:] call[name[loop_residues].add, parameter[call[name[l]][<ast.Slice object at 0x7da1b228e860>]]] variable[loop_residues] assign[=] call[name[sorted], parameter[name[loop_residues]]] variable[structures_list] assign[=] list[[<ast.Tuple object at 0x7da1b228da80>, <ast.Tuple object at 0x7da1b228ca90>]] call[name[settings]][constant[Main]] assign[=] name[bonsai_label] call[name[settings]][constant[Loop]] assign[=] name[cutting_label] variable[PSE_files] assign[=] call[name[b].run, parameter[name[LoopRemovalBuilder], list[[<ast.Call object at 0x7da1b228c8b0>]]]] return[tuple[[<ast.Subscript object at 0x7da1b228c4c0>, <ast.Subscript object at 0x7da1b228ca60>]]]
keyword[def] identifier[generate_pymol_session] ( identifier[self] , identifier[bonsai_pdb_content] , identifier[cutting_pdb_content] , identifier[bonsai_label] = literal[string] , identifier[cutting_label] = literal[string] , identifier[pymol_executable] = literal[string] , identifier[settings] ={}): literal[string] keyword[if] keyword[not] identifier[pymol_load_failed] : identifier[b] = identifier[BatchBuilder] ( identifier[pymol_executable] = identifier[pymol_executable] ) identifier[loop_residues] = identifier[set] () keyword[for] identifier[l] keyword[in] identifier[cutting_pdb_content] . identifier[split] ( literal[string] ): keyword[if] identifier[l] . identifier[startswith] ( literal[string] ) keyword[and] identifier[l] [ literal[int] : literal[int] ]== literal[string] : identifier[loop_residues] . identifier[add] ( identifier[l] [ literal[int] : literal[int] ]) identifier[loop_residues] = identifier[sorted] ( identifier[loop_residues] ) identifier[structures_list] =[ ( identifier[bonsai_label] , identifier[bonsai_pdb_content] , identifier[set] ()), ( identifier[cutting_label] , identifier[cutting_pdb_content] , identifier[loop_residues] ), ] identifier[settings] [ literal[string] ]= identifier[bonsai_label] identifier[settings] [ literal[string] ]= identifier[cutting_label] identifier[PSE_files] = identifier[b] . identifier[run] ( identifier[LoopRemovalBuilder] ,[ identifier[PDBContainer] . identifier[from_content_triple] ( identifier[structures_list] )], identifier[settings] = identifier[settings] ) keyword[return] identifier[PSE_files] [ literal[int] ], identifier[b] . identifier[PSE_scripts] [ literal[int] ]
def generate_pymol_session(self, bonsai_pdb_content, cutting_pdb_content, bonsai_label='Bonsai', cutting_label='Cutting', pymol_executable='pymol', settings={}): """ Generates the PyMOL session for the scaffold, model, and design structures. Returns this session and the script which generated it.""" if not pymol_load_failed: b = BatchBuilder(pymol_executable=pymol_executable) loop_residues = set() for l in cutting_pdb_content.split('\n'): if l.startswith('ATOM ') and l[12:16] == ' C ': loop_residues.add(l[21:27]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['l']] loop_residues = sorted(loop_residues) structures_list = [(bonsai_label, bonsai_pdb_content, set()), (cutting_label, cutting_pdb_content, loop_residues)] settings['Main'] = bonsai_label settings['Loop'] = cutting_label PSE_files = b.run(LoopRemovalBuilder, [PDBContainer.from_content_triple(structures_list)], settings=settings) return (PSE_files[0], b.PSE_scripts[0]) # depends on [control=['if'], data=[]]
def _add_common_args(parser, is_create=True): """If is_create is True, protocol and action become mandatory arguments. CreateCommand = is_create : True UpdateCommand = is_create : False """ parser.add_argument( '--name', help=_('Name for the firewall rule.')) parser.add_argument( '--description', help=_('Description for the firewall rule.')) parser.add_argument( '--source-ip-address', help=_('Source IP address or subnet.')) parser.add_argument( '--destination-ip-address', help=_('Destination IP address or subnet.')) parser.add_argument( '--source-port', help=_('Source port (integer in [1, 65535] or range in a:b).')) parser.add_argument( '--destination-port', help=_('Destination port (integer in [1, 65535] or range in ' 'a:b).')) utils.add_boolean_argument( parser, '--enabled', dest='enabled', help=_('Whether to enable or disable this rule.')) parser.add_argument( '--protocol', choices=['tcp', 'udp', 'icmp', 'any'], required=is_create, type=utils.convert_to_lowercase, help=_('Protocol for the firewall rule.')) parser.add_argument( '--action', required=is_create, type=utils.convert_to_lowercase, choices=['allow', 'deny', 'reject'], help=_('Action for the firewall rule.'))
def function[_add_common_args, parameter[parser, is_create]]: constant[If is_create is True, protocol and action become mandatory arguments. CreateCommand = is_create : True UpdateCommand = is_create : False ] call[name[parser].add_argument, parameter[constant[--name]]] call[name[parser].add_argument, parameter[constant[--description]]] call[name[parser].add_argument, parameter[constant[--source-ip-address]]] call[name[parser].add_argument, parameter[constant[--destination-ip-address]]] call[name[parser].add_argument, parameter[constant[--source-port]]] call[name[parser].add_argument, parameter[constant[--destination-port]]] call[name[utils].add_boolean_argument, parameter[name[parser], constant[--enabled]]] call[name[parser].add_argument, parameter[constant[--protocol]]] call[name[parser].add_argument, parameter[constant[--action]]]
keyword[def] identifier[_add_common_args] ( identifier[parser] , identifier[is_create] = keyword[True] ): literal[string] identifier[parser] . identifier[add_argument] ( literal[string] , identifier[help] = identifier[_] ( literal[string] )) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[help] = identifier[_] ( literal[string] )) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[help] = identifier[_] ( literal[string] )) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[help] = identifier[_] ( literal[string] )) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[help] = identifier[_] ( literal[string] )) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[help] = identifier[_] ( literal[string] literal[string] )) identifier[utils] . identifier[add_boolean_argument] ( identifier[parser] , literal[string] , identifier[dest] = literal[string] , identifier[help] = identifier[_] ( literal[string] )) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[choices] =[ literal[string] , literal[string] , literal[string] , literal[string] ], identifier[required] = identifier[is_create] , identifier[type] = identifier[utils] . identifier[convert_to_lowercase] , identifier[help] = identifier[_] ( literal[string] )) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[required] = identifier[is_create] , identifier[type] = identifier[utils] . identifier[convert_to_lowercase] , identifier[choices] =[ literal[string] , literal[string] , literal[string] ], identifier[help] = identifier[_] ( literal[string] ))
def _add_common_args(parser, is_create=True): """If is_create is True, protocol and action become mandatory arguments. CreateCommand = is_create : True UpdateCommand = is_create : False """ parser.add_argument('--name', help=_('Name for the firewall rule.')) parser.add_argument('--description', help=_('Description for the firewall rule.')) parser.add_argument('--source-ip-address', help=_('Source IP address or subnet.')) parser.add_argument('--destination-ip-address', help=_('Destination IP address or subnet.')) parser.add_argument('--source-port', help=_('Source port (integer in [1, 65535] or range in a:b).')) parser.add_argument('--destination-port', help=_('Destination port (integer in [1, 65535] or range in a:b).')) utils.add_boolean_argument(parser, '--enabled', dest='enabled', help=_('Whether to enable or disable this rule.')) parser.add_argument('--protocol', choices=['tcp', 'udp', 'icmp', 'any'], required=is_create, type=utils.convert_to_lowercase, help=_('Protocol for the firewall rule.')) parser.add_argument('--action', required=is_create, type=utils.convert_to_lowercase, choices=['allow', 'deny', 'reject'], help=_('Action for the firewall rule.'))
def write_properties_from_env(cls, path): ''' Uses environmental variables to write a *.properties file for KCL's MultiLangDaemon ''' with open(path, 'w') as f: f.write("# Autogenerated by kclboot v%s on %s\n\n" % (PACKAGE_VERSION, datetime.now())) for env_var, prop_var in ENV_TO_PROPERTY.items(): env_value = os.environ.get(env_var) if env_value: f.write("%s=%s\n" % (prop_var, env_value))
def function[write_properties_from_env, parameter[cls, path]]: constant[ Uses environmental variables to write a *.properties file for KCL's MultiLangDaemon ] with call[name[open], parameter[name[path], constant[w]]] begin[:] call[name[f].write, parameter[binary_operation[constant[# Autogenerated by kclboot v%s on %s ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b14517b0>, <ast.Call object at 0x7da1b1450910>]]]]] for taget[tuple[[<ast.Name object at 0x7da1b1450430>, <ast.Name object at 0x7da1b1450400>]]] in starred[call[name[ENV_TO_PROPERTY].items, parameter[]]] begin[:] variable[env_value] assign[=] call[name[os].environ.get, parameter[name[env_var]]] if name[env_value] begin[:] call[name[f].write, parameter[binary_operation[constant[%s=%s ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1450370>, <ast.Name object at 0x7da1b14507f0>]]]]]
keyword[def] identifier[write_properties_from_env] ( identifier[cls] , identifier[path] ): literal[string] keyword[with] identifier[open] ( identifier[path] , literal[string] ) keyword[as] identifier[f] : identifier[f] . identifier[write] ( literal[string] %( identifier[PACKAGE_VERSION] , identifier[datetime] . identifier[now] ())) keyword[for] identifier[env_var] , identifier[prop_var] keyword[in] identifier[ENV_TO_PROPERTY] . identifier[items] (): identifier[env_value] = identifier[os] . identifier[environ] . identifier[get] ( identifier[env_var] ) keyword[if] identifier[env_value] : identifier[f] . identifier[write] ( literal[string] %( identifier[prop_var] , identifier[env_value] ))
def write_properties_from_env(cls, path): """ Uses environmental variables to write a *.properties file for KCL's MultiLangDaemon """ with open(path, 'w') as f: f.write('# Autogenerated by kclboot v%s on %s\n\n' % (PACKAGE_VERSION, datetime.now())) for (env_var, prop_var) in ENV_TO_PROPERTY.items(): env_value = os.environ.get(env_var) if env_value: f.write('%s=%s\n' % (prop_var, env_value)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['with'], data=['f']]
def build_vars(path=None): """Build initial vars.""" init_vars = { "__name__": "__main__", "__package__": None, "reload": reload, } if path is not None: init_vars["__file__"] = fixpath(path) # put reserved_vars in for auto-completion purposes for var in reserved_vars: init_vars[var] = None return init_vars
def function[build_vars, parameter[path]]: constant[Build initial vars.] variable[init_vars] assign[=] dictionary[[<ast.Constant object at 0x7da18dc07760>, <ast.Constant object at 0x7da18dc05ed0>, <ast.Constant object at 0x7da18dc06e60>], [<ast.Constant object at 0x7da18dc04790>, <ast.Constant object at 0x7da18dc07910>, <ast.Name object at 0x7da18dc06b00>]] if compare[name[path] is_not constant[None]] begin[:] call[name[init_vars]][constant[__file__]] assign[=] call[name[fixpath], parameter[name[path]]] for taget[name[var]] in starred[name[reserved_vars]] begin[:] call[name[init_vars]][name[var]] assign[=] constant[None] return[name[init_vars]]
keyword[def] identifier[build_vars] ( identifier[path] = keyword[None] ): literal[string] identifier[init_vars] ={ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : identifier[reload] , } keyword[if] identifier[path] keyword[is] keyword[not] keyword[None] : identifier[init_vars] [ literal[string] ]= identifier[fixpath] ( identifier[path] ) keyword[for] identifier[var] keyword[in] identifier[reserved_vars] : identifier[init_vars] [ identifier[var] ]= keyword[None] keyword[return] identifier[init_vars]
def build_vars(path=None): """Build initial vars.""" init_vars = {'__name__': '__main__', '__package__': None, 'reload': reload} if path is not None: init_vars['__file__'] = fixpath(path) # depends on [control=['if'], data=['path']] # put reserved_vars in for auto-completion purposes for var in reserved_vars: init_vars[var] = None # depends on [control=['for'], data=['var']] return init_vars
def get_registry_records_by_keyword(keyword=None): """Get all the registry records (names and values) whose name contains the specified keyword or, if keyword is None, return all registry items :param keyword: The keyword that has to be contained in the record name :type keyword: str or None :returns: Dictionary mapping the names of the found records to its values """ portal_reg = ploneapi.portal.get_tool(name="portal_registry") found_registers = {} for record in portal_reg.records: if keyword is None: found_registers[record] = api.get_registry_record(record) elif keyword.lower() in record.lower(): found_registers[record] = api.get_registry_record(record) return found_registers
def function[get_registry_records_by_keyword, parameter[keyword]]: constant[Get all the registry records (names and values) whose name contains the specified keyword or, if keyword is None, return all registry items :param keyword: The keyword that has to be contained in the record name :type keyword: str or None :returns: Dictionary mapping the names of the found records to its values ] variable[portal_reg] assign[=] call[name[ploneapi].portal.get_tool, parameter[]] variable[found_registers] assign[=] dictionary[[], []] for taget[name[record]] in starred[name[portal_reg].records] begin[:] if compare[name[keyword] is constant[None]] begin[:] call[name[found_registers]][name[record]] assign[=] call[name[api].get_registry_record, parameter[name[record]]] return[name[found_registers]]
keyword[def] identifier[get_registry_records_by_keyword] ( identifier[keyword] = keyword[None] ): literal[string] identifier[portal_reg] = identifier[ploneapi] . identifier[portal] . identifier[get_tool] ( identifier[name] = literal[string] ) identifier[found_registers] ={} keyword[for] identifier[record] keyword[in] identifier[portal_reg] . identifier[records] : keyword[if] identifier[keyword] keyword[is] keyword[None] : identifier[found_registers] [ identifier[record] ]= identifier[api] . identifier[get_registry_record] ( identifier[record] ) keyword[elif] identifier[keyword] . identifier[lower] () keyword[in] identifier[record] . identifier[lower] (): identifier[found_registers] [ identifier[record] ]= identifier[api] . identifier[get_registry_record] ( identifier[record] ) keyword[return] identifier[found_registers]
def get_registry_records_by_keyword(keyword=None): """Get all the registry records (names and values) whose name contains the specified keyword or, if keyword is None, return all registry items :param keyword: The keyword that has to be contained in the record name :type keyword: str or None :returns: Dictionary mapping the names of the found records to its values """ portal_reg = ploneapi.portal.get_tool(name='portal_registry') found_registers = {} for record in portal_reg.records: if keyword is None: found_registers[record] = api.get_registry_record(record) # depends on [control=['if'], data=[]] elif keyword.lower() in record.lower(): found_registers[record] = api.get_registry_record(record) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['record']] return found_registers
def _get_column_change_options(self, fluent): """ Get the column change options. """ options = super(MySQLSchemaGrammar, self)._get_column_change_options(fluent) if fluent.type == "enum": options["extra"] = { "definition": "('{}')".format("','".join(fluent.allowed)) } return options
def function[_get_column_change_options, parameter[self, fluent]]: constant[ Get the column change options. ] variable[options] assign[=] call[call[name[super], parameter[name[MySQLSchemaGrammar], name[self]]]._get_column_change_options, parameter[name[fluent]]] if compare[name[fluent].type equal[==] constant[enum]] begin[:] call[name[options]][constant[extra]] assign[=] dictionary[[<ast.Constant object at 0x7da18eb54dc0>], [<ast.Call object at 0x7da18eb57e50>]] return[name[options]]
keyword[def] identifier[_get_column_change_options] ( identifier[self] , identifier[fluent] ): literal[string] identifier[options] = identifier[super] ( identifier[MySQLSchemaGrammar] , identifier[self] ). identifier[_get_column_change_options] ( identifier[fluent] ) keyword[if] identifier[fluent] . identifier[type] == literal[string] : identifier[options] [ literal[string] ]={ literal[string] : literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[fluent] . identifier[allowed] )) } keyword[return] identifier[options]
def _get_column_change_options(self, fluent): """ Get the column change options. """ options = super(MySQLSchemaGrammar, self)._get_column_change_options(fluent) if fluent.type == 'enum': options['extra'] = {'definition': "('{}')".format("','".join(fluent.allowed))} # depends on [control=['if'], data=[]] return options
def _get_colordata(bs, elements, bs_projection): """ Get color data, including projected band structures Args: bs: Bandstructure object elements: elements (in desired order) for setting to blue, red, green bs_projection: None for no projection, "elements" for element projection Returns: """ contribs = {} if bs_projection and bs_projection.lower() == "elements": projections = bs.get_projection_on_elements() for spin in (Spin.up, Spin.down): if spin in bs.bands: contribs[spin] = [] for band_idx in range(bs.nb_bands): colors = [] for k_idx in range(len(bs.kpoints)): if bs_projection and bs_projection.lower() == "elements": c = [0, 0, 0] projs = projections[spin][band_idx][k_idx] # note: squared color interpolations are smoother # see: https://youtu.be/LKnqECcg6Gw projs = dict( [(k, v ** 2) for k, v in projs.items()]) total = sum(projs.values()) if total > 0: for idx, e in enumerate(elements): c[idx] = math.sqrt(projs[ e] / total) # min is to handle round errors c = [c[1], c[2], c[0]] # prefer blue, then red, then green else: c = [0, 0, 0] if spin == Spin.up \ else [0, 0, 1] # black for spin up, blue for spin down colors.append(c) contribs[spin].append(colors) contribs[spin] = np.array(contribs[spin]) return contribs
def function[_get_colordata, parameter[bs, elements, bs_projection]]: constant[ Get color data, including projected band structures Args: bs: Bandstructure object elements: elements (in desired order) for setting to blue, red, green bs_projection: None for no projection, "elements" for element projection Returns: ] variable[contribs] assign[=] dictionary[[], []] if <ast.BoolOp object at 0x7da18dc06500> begin[:] variable[projections] assign[=] call[name[bs].get_projection_on_elements, parameter[]] for taget[name[spin]] in starred[tuple[[<ast.Attribute object at 0x7da18dc069e0>, <ast.Attribute object at 0x7da18dc05c00>]]] begin[:] if compare[name[spin] in name[bs].bands] begin[:] call[name[contribs]][name[spin]] assign[=] list[[]] for taget[name[band_idx]] in starred[call[name[range], parameter[name[bs].nb_bands]]] begin[:] variable[colors] assign[=] list[[]] for taget[name[k_idx]] in starred[call[name[range], parameter[call[name[len], parameter[name[bs].kpoints]]]]] begin[:] if <ast.BoolOp object at 0x7da18dc07bb0> begin[:] variable[c] assign[=] list[[<ast.Constant object at 0x7da18dc06170>, <ast.Constant object at 0x7da18dc071f0>, <ast.Constant object at 0x7da18dc05de0>]] variable[projs] assign[=] call[call[call[name[projections]][name[spin]]][name[band_idx]]][name[k_idx]] variable[projs] assign[=] call[name[dict], parameter[<ast.ListComp object at 0x7da18dc04820>]] variable[total] assign[=] call[name[sum], parameter[call[name[projs].values, parameter[]]]] if compare[name[total] greater[>] constant[0]] begin[:] for taget[tuple[[<ast.Name object at 0x7da18dc05ff0>, <ast.Name object at 0x7da18dc05a50>]]] in starred[call[name[enumerate], parameter[name[elements]]]] begin[:] call[name[c]][name[idx]] assign[=] call[name[math].sqrt, parameter[binary_operation[call[name[projs]][name[e]] / name[total]]]] variable[c] assign[=] list[[<ast.Subscript object at 0x7da18dc049a0>, <ast.Subscript object at 0x7da18dc050f0>, <ast.Subscript object at 0x7da18dc04e20>]] call[name[colors].append, parameter[name[c]]] call[call[name[contribs]][name[spin]].append, parameter[name[colors]]] call[name[contribs]][name[spin]] assign[=] call[name[np].array, parameter[call[name[contribs]][name[spin]]]] return[name[contribs]]
keyword[def] identifier[_get_colordata] ( identifier[bs] , identifier[elements] , identifier[bs_projection] ): literal[string] identifier[contribs] ={} keyword[if] identifier[bs_projection] keyword[and] identifier[bs_projection] . identifier[lower] ()== literal[string] : identifier[projections] = identifier[bs] . identifier[get_projection_on_elements] () keyword[for] identifier[spin] keyword[in] ( identifier[Spin] . identifier[up] , identifier[Spin] . identifier[down] ): keyword[if] identifier[spin] keyword[in] identifier[bs] . identifier[bands] : identifier[contribs] [ identifier[spin] ]=[] keyword[for] identifier[band_idx] keyword[in] identifier[range] ( identifier[bs] . identifier[nb_bands] ): identifier[colors] =[] keyword[for] identifier[k_idx] keyword[in] identifier[range] ( identifier[len] ( identifier[bs] . identifier[kpoints] )): keyword[if] identifier[bs_projection] keyword[and] identifier[bs_projection] . identifier[lower] ()== literal[string] : identifier[c] =[ literal[int] , literal[int] , literal[int] ] identifier[projs] = identifier[projections] [ identifier[spin] ][ identifier[band_idx] ][ identifier[k_idx] ] identifier[projs] = identifier[dict] ( [( identifier[k] , identifier[v] ** literal[int] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[projs] . identifier[items] ()]) identifier[total] = identifier[sum] ( identifier[projs] . identifier[values] ()) keyword[if] identifier[total] > literal[int] : keyword[for] identifier[idx] , identifier[e] keyword[in] identifier[enumerate] ( identifier[elements] ): identifier[c] [ identifier[idx] ]= identifier[math] . identifier[sqrt] ( identifier[projs] [ identifier[e] ]/ identifier[total] ) identifier[c] =[ identifier[c] [ literal[int] ], identifier[c] [ literal[int] ], identifier[c] [ literal[int] ]] keyword[else] : identifier[c] =[ literal[int] , literal[int] , literal[int] ] keyword[if] identifier[spin] == identifier[Spin] . identifier[up] keyword[else] [ literal[int] , literal[int] , literal[int] ] identifier[colors] . identifier[append] ( identifier[c] ) identifier[contribs] [ identifier[spin] ]. identifier[append] ( identifier[colors] ) identifier[contribs] [ identifier[spin] ]= identifier[np] . identifier[array] ( identifier[contribs] [ identifier[spin] ]) keyword[return] identifier[contribs]
def _get_colordata(bs, elements, bs_projection): """ Get color data, including projected band structures Args: bs: Bandstructure object elements: elements (in desired order) for setting to blue, red, green bs_projection: None for no projection, "elements" for element projection Returns: """ contribs = {} if bs_projection and bs_projection.lower() == 'elements': projections = bs.get_projection_on_elements() # depends on [control=['if'], data=[]] for spin in (Spin.up, Spin.down): if spin in bs.bands: contribs[spin] = [] for band_idx in range(bs.nb_bands): colors = [] for k_idx in range(len(bs.kpoints)): if bs_projection and bs_projection.lower() == 'elements': c = [0, 0, 0] projs = projections[spin][band_idx][k_idx] # note: squared color interpolations are smoother # see: https://youtu.be/LKnqECcg6Gw projs = dict([(k, v ** 2) for (k, v) in projs.items()]) total = sum(projs.values()) if total > 0: for (idx, e) in enumerate(elements): c[idx] = math.sqrt(projs[e] / total) # min is to handle round errors # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['total']] c = [c[1], c[2], c[0]] # prefer blue, then red, then green # depends on [control=['if'], data=[]] else: c = [0, 0, 0] if spin == Spin.up else [0, 0, 1] # black for spin up, blue for spin down colors.append(c) # depends on [control=['for'], data=['k_idx']] contribs[spin].append(colors) # depends on [control=['for'], data=['band_idx']] contribs[spin] = np.array(contribs[spin]) # depends on [control=['if'], data=['spin']] # depends on [control=['for'], data=['spin']] return contribs
def _get_quantiles(self, X, width, quantiles, modelmat=None, lp=None, prediction=False, xform=True, term=-1): """ estimate prediction intervals for LinearGAM Parameters ---------- X : array input data of shape (n_samples, m_features) width : float on (0, 1) quantiles : array-like of floats on (0, 1) instead of specifying the prediciton width, one can specify the quantiles. so width=.95 is equivalent to quantiles=[.025, .975] modelmat : array of shape or None, default: None lp : array or None, default: None prediction : bool, default: True. whether to compute prediction intervals (True) or confidence intervals (False) xform : bool, default: True, whether to apply the inverse link function and return values on the scale of the distribution mean (True), or to keep on the linear predictor scale (False) term : int, default: -1 Returns ------- intervals: np.array of shape (n_samples, 2 or len(quantiles)) Notes ----- when the scale parameter is known, then we can proceed with a large sample approximation to the distribution of the model coefficients where B_hat ~ Normal(B, cov) when the scale parameter is unknown, then we have to account for the distribution of the estimated scale parameter, which is Chi-squared. since we scale our estimate of B_hat by the sqrt of estimated scale, we get a t distribution: Normal / sqrt(Chi-squared) ~ t see Simon Wood section 1.3.2, 1.3.3, 1.5.5, 2.1.5 """ if quantiles is not None: quantiles = np.atleast_1d(quantiles) else: alpha = (1 - width)/2. quantiles = [alpha, 1 - alpha] for quantile in quantiles: if (quantile >= 1) or (quantile <= 0): raise ValueError('quantiles must be in (0, 1), but found {}'\ .format(quantiles)) if modelmat is None: modelmat = self._modelmat(X, term=term) if lp is None: lp = self._linear_predictor(modelmat=modelmat, term=term) idxs = self.terms.get_coef_indices(term) cov = self.statistics_['cov'][idxs][:, idxs] var = (modelmat.dot(cov) * modelmat.A).sum(axis=1) if prediction: var += self.distribution.scale lines = [] for quantile in quantiles: if self.distribution._known_scale: q = sp.stats.norm.ppf(quantile) else: q = sp.stats.t.ppf(quantile, df=self.statistics_['n_samples'] - self.statistics_['edof']) lines.append(lp + q * var**0.5) lines = np.vstack(lines).T if xform: lines = self.link.mu(lines, self.distribution) return lines
def function[_get_quantiles, parameter[self, X, width, quantiles, modelmat, lp, prediction, xform, term]]: constant[ estimate prediction intervals for LinearGAM Parameters ---------- X : array input data of shape (n_samples, m_features) width : float on (0, 1) quantiles : array-like of floats on (0, 1) instead of specifying the prediciton width, one can specify the quantiles. so width=.95 is equivalent to quantiles=[.025, .975] modelmat : array of shape or None, default: None lp : array or None, default: None prediction : bool, default: True. whether to compute prediction intervals (True) or confidence intervals (False) xform : bool, default: True, whether to apply the inverse link function and return values on the scale of the distribution mean (True), or to keep on the linear predictor scale (False) term : int, default: -1 Returns ------- intervals: np.array of shape (n_samples, 2 or len(quantiles)) Notes ----- when the scale parameter is known, then we can proceed with a large sample approximation to the distribution of the model coefficients where B_hat ~ Normal(B, cov) when the scale parameter is unknown, then we have to account for the distribution of the estimated scale parameter, which is Chi-squared. since we scale our estimate of B_hat by the sqrt of estimated scale, we get a t distribution: Normal / sqrt(Chi-squared) ~ t see Simon Wood section 1.3.2, 1.3.3, 1.5.5, 2.1.5 ] if compare[name[quantiles] is_not constant[None]] begin[:] variable[quantiles] assign[=] call[name[np].atleast_1d, parameter[name[quantiles]]] for taget[name[quantile]] in starred[name[quantiles]] begin[:] if <ast.BoolOp object at 0x7da18f58ff70> begin[:] <ast.Raise object at 0x7da18f58f5e0> if compare[name[modelmat] is constant[None]] begin[:] variable[modelmat] assign[=] call[name[self]._modelmat, parameter[name[X]]] if compare[name[lp] is constant[None]] begin[:] variable[lp] assign[=] call[name[self]._linear_predictor, parameter[]] variable[idxs] assign[=] call[name[self].terms.get_coef_indices, parameter[name[term]]] variable[cov] assign[=] call[call[call[name[self].statistics_][constant[cov]]][name[idxs]]][tuple[[<ast.Slice object at 0x7da18f58e170>, <ast.Name object at 0x7da18f58c550>]]] variable[var] assign[=] call[binary_operation[call[name[modelmat].dot, parameter[name[cov]]] * name[modelmat].A].sum, parameter[]] if name[prediction] begin[:] <ast.AugAssign object at 0x7da18f58dea0> variable[lines] assign[=] list[[]] for taget[name[quantile]] in starred[name[quantiles]] begin[:] if name[self].distribution._known_scale begin[:] variable[q] assign[=] call[name[sp].stats.norm.ppf, parameter[name[quantile]]] call[name[lines].append, parameter[binary_operation[name[lp] + binary_operation[name[q] * binary_operation[name[var] ** constant[0.5]]]]]] variable[lines] assign[=] call[name[np].vstack, parameter[name[lines]]].T if name[xform] begin[:] variable[lines] assign[=] call[name[self].link.mu, parameter[name[lines], name[self].distribution]] return[name[lines]]
keyword[def] identifier[_get_quantiles] ( identifier[self] , identifier[X] , identifier[width] , identifier[quantiles] , identifier[modelmat] = keyword[None] , identifier[lp] = keyword[None] , identifier[prediction] = keyword[False] , identifier[xform] = keyword[True] , identifier[term] =- literal[int] ): literal[string] keyword[if] identifier[quantiles] keyword[is] keyword[not] keyword[None] : identifier[quantiles] = identifier[np] . identifier[atleast_1d] ( identifier[quantiles] ) keyword[else] : identifier[alpha] =( literal[int] - identifier[width] )/ literal[int] identifier[quantiles] =[ identifier[alpha] , literal[int] - identifier[alpha] ] keyword[for] identifier[quantile] keyword[in] identifier[quantiles] : keyword[if] ( identifier[quantile] >= literal[int] ) keyword[or] ( identifier[quantile] <= literal[int] ): keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[quantiles] )) keyword[if] identifier[modelmat] keyword[is] keyword[None] : identifier[modelmat] = identifier[self] . identifier[_modelmat] ( identifier[X] , identifier[term] = identifier[term] ) keyword[if] identifier[lp] keyword[is] keyword[None] : identifier[lp] = identifier[self] . identifier[_linear_predictor] ( identifier[modelmat] = identifier[modelmat] , identifier[term] = identifier[term] ) identifier[idxs] = identifier[self] . identifier[terms] . identifier[get_coef_indices] ( identifier[term] ) identifier[cov] = identifier[self] . identifier[statistics_] [ literal[string] ][ identifier[idxs] ][:, identifier[idxs] ] identifier[var] =( identifier[modelmat] . identifier[dot] ( identifier[cov] )* identifier[modelmat] . identifier[A] ). identifier[sum] ( identifier[axis] = literal[int] ) keyword[if] identifier[prediction] : identifier[var] += identifier[self] . identifier[distribution] . identifier[scale] identifier[lines] =[] keyword[for] identifier[quantile] keyword[in] identifier[quantiles] : keyword[if] identifier[self] . identifier[distribution] . identifier[_known_scale] : identifier[q] = identifier[sp] . identifier[stats] . identifier[norm] . identifier[ppf] ( identifier[quantile] ) keyword[else] : identifier[q] = identifier[sp] . identifier[stats] . identifier[t] . identifier[ppf] ( identifier[quantile] , identifier[df] = identifier[self] . identifier[statistics_] [ literal[string] ]- identifier[self] . identifier[statistics_] [ literal[string] ]) identifier[lines] . identifier[append] ( identifier[lp] + identifier[q] * identifier[var] ** literal[int] ) identifier[lines] = identifier[np] . identifier[vstack] ( identifier[lines] ). identifier[T] keyword[if] identifier[xform] : identifier[lines] = identifier[self] . identifier[link] . identifier[mu] ( identifier[lines] , identifier[self] . identifier[distribution] ) keyword[return] identifier[lines]
def _get_quantiles(self, X, width, quantiles, modelmat=None, lp=None, prediction=False, xform=True, term=-1): """ estimate prediction intervals for LinearGAM Parameters ---------- X : array input data of shape (n_samples, m_features) width : float on (0, 1) quantiles : array-like of floats on (0, 1) instead of specifying the prediciton width, one can specify the quantiles. so width=.95 is equivalent to quantiles=[.025, .975] modelmat : array of shape or None, default: None lp : array or None, default: None prediction : bool, default: True. whether to compute prediction intervals (True) or confidence intervals (False) xform : bool, default: True, whether to apply the inverse link function and return values on the scale of the distribution mean (True), or to keep on the linear predictor scale (False) term : int, default: -1 Returns ------- intervals: np.array of shape (n_samples, 2 or len(quantiles)) Notes ----- when the scale parameter is known, then we can proceed with a large sample approximation to the distribution of the model coefficients where B_hat ~ Normal(B, cov) when the scale parameter is unknown, then we have to account for the distribution of the estimated scale parameter, which is Chi-squared. since we scale our estimate of B_hat by the sqrt of estimated scale, we get a t distribution: Normal / sqrt(Chi-squared) ~ t see Simon Wood section 1.3.2, 1.3.3, 1.5.5, 2.1.5 """ if quantiles is not None: quantiles = np.atleast_1d(quantiles) # depends on [control=['if'], data=['quantiles']] else: alpha = (1 - width) / 2.0 quantiles = [alpha, 1 - alpha] for quantile in quantiles: if quantile >= 1 or quantile <= 0: raise ValueError('quantiles must be in (0, 1), but found {}'.format(quantiles)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['quantile']] if modelmat is None: modelmat = self._modelmat(X, term=term) # depends on [control=['if'], data=['modelmat']] if lp is None: lp = self._linear_predictor(modelmat=modelmat, term=term) # depends on [control=['if'], data=['lp']] idxs = self.terms.get_coef_indices(term) cov = self.statistics_['cov'][idxs][:, idxs] var = (modelmat.dot(cov) * modelmat.A).sum(axis=1) if prediction: var += self.distribution.scale # depends on [control=['if'], data=[]] lines = [] for quantile in quantiles: if self.distribution._known_scale: q = sp.stats.norm.ppf(quantile) # depends on [control=['if'], data=[]] else: q = sp.stats.t.ppf(quantile, df=self.statistics_['n_samples'] - self.statistics_['edof']) lines.append(lp + q * var ** 0.5) # depends on [control=['for'], data=['quantile']] lines = np.vstack(lines).T if xform: lines = self.link.mu(lines, self.distribution) # depends on [control=['if'], data=[]] return lines
def junk_folder(self): """ Shortcut to get Junk Folder instance :rtype: mailbox.Folder """ return self.folder_constructor(parent=self, name='Junk', folder_id=OutlookWellKnowFolderNames .JUNK.value)
def function[junk_folder, parameter[self]]: constant[ Shortcut to get Junk Folder instance :rtype: mailbox.Folder ] return[call[name[self].folder_constructor, parameter[]]]
keyword[def] identifier[junk_folder] ( identifier[self] ): literal[string] keyword[return] identifier[self] . identifier[folder_constructor] ( identifier[parent] = identifier[self] , identifier[name] = literal[string] , identifier[folder_id] = identifier[OutlookWellKnowFolderNames] . identifier[JUNK] . identifier[value] )
def junk_folder(self): """ Shortcut to get Junk Folder instance :rtype: mailbox.Folder """ return self.folder_constructor(parent=self, name='Junk', folder_id=OutlookWellKnowFolderNames.JUNK.value)
def process_result(self, new_concept, concepts): """Save all concepts with non-zero |small_phi| to the |CauseEffectStructure|. """ if new_concept.phi > 0: # Replace the subsystem new_concept.subsystem = self.subsystem concepts.append(new_concept) return concepts
def function[process_result, parameter[self, new_concept, concepts]]: constant[Save all concepts with non-zero |small_phi| to the |CauseEffectStructure|. ] if compare[name[new_concept].phi greater[>] constant[0]] begin[:] name[new_concept].subsystem assign[=] name[self].subsystem call[name[concepts].append, parameter[name[new_concept]]] return[name[concepts]]
keyword[def] identifier[process_result] ( identifier[self] , identifier[new_concept] , identifier[concepts] ): literal[string] keyword[if] identifier[new_concept] . identifier[phi] > literal[int] : identifier[new_concept] . identifier[subsystem] = identifier[self] . identifier[subsystem] identifier[concepts] . identifier[append] ( identifier[new_concept] ) keyword[return] identifier[concepts]
def process_result(self, new_concept, concepts): """Save all concepts with non-zero |small_phi| to the |CauseEffectStructure|. """ if new_concept.phi > 0: # Replace the subsystem new_concept.subsystem = self.subsystem concepts.append(new_concept) # depends on [control=['if'], data=[]] return concepts
def update_hosted_zone_comment(Id=None, Name=None, Comment=None, PrivateZone=None, region=None, key=None, keyid=None, profile=None): ''' Update the comment on an existing Route 53 hosted zone. Id The unique Zone Identifier for the Hosted Zone. Name The domain name associated with the Hosted Zone(s). Comment Any comments you want to include about the hosted zone. PrivateZone Boolean - Set to True if changing a private hosted zone. CLI Example:: salt myminion boto3_route53.update_hosted_zone_comment Name=example.org. \ Comment="This is an example comment for an example zone" ''' if not _exactly_one((Id, Name)): raise SaltInvocationError('Exactly one of either Id or Name is required.') conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) if Name: args = {'Name': Name, 'PrivateZone': PrivateZone, 'region': region, 'key': key, 'keyid': keyid, 'profile': profile} zone = find_hosted_zone(**args) if not zone: log.error("Couldn't resolve domain name %s to a hosted zone ID.", Name) return [] Id = zone[0]['HostedZone']['Id'] tries = 10 while tries: try: r = conn.update_hosted_zone_comment(Id=Id, Comment=Comment) r.pop('ResponseMetadata', None) return [r] except ClientError as e: if tries and e.response.get('Error', {}).get('Code') == 'Throttling': log.debug('Throttled by AWS API.') time.sleep(3) tries -= 1 continue log.error('Failed to update comment on hosted zone %s: %s', Name or Id, e) return []
def function[update_hosted_zone_comment, parameter[Id, Name, Comment, PrivateZone, region, key, keyid, profile]]: constant[ Update the comment on an existing Route 53 hosted zone. Id The unique Zone Identifier for the Hosted Zone. Name The domain name associated with the Hosted Zone(s). Comment Any comments you want to include about the hosted zone. PrivateZone Boolean - Set to True if changing a private hosted zone. CLI Example:: salt myminion boto3_route53.update_hosted_zone_comment Name=example.org. Comment="This is an example comment for an example zone" ] if <ast.UnaryOp object at 0x7da18ede4a90> begin[:] <ast.Raise object at 0x7da18ede7e20> variable[conn] assign[=] call[name[_get_conn], parameter[]] if name[Name] begin[:] variable[args] assign[=] dictionary[[<ast.Constant object at 0x7da18ede4250>, <ast.Constant object at 0x7da18ede4fd0>, <ast.Constant object at 0x7da18ede5a50>, <ast.Constant object at 0x7da18ede4b50>, <ast.Constant object at 0x7da18ede6c50>, <ast.Constant object at 0x7da18ede5ff0>], [<ast.Name object at 0x7da18ede4af0>, <ast.Name object at 0x7da18ede5990>, <ast.Name object at 0x7da18ede4dc0>, <ast.Name object at 0x7da18ede4670>, <ast.Name object at 0x7da18ede68c0>, <ast.Name object at 0x7da18ede56f0>]] variable[zone] assign[=] call[name[find_hosted_zone], parameter[]] if <ast.UnaryOp object at 0x7da18ede7e50> begin[:] call[name[log].error, parameter[constant[Couldn't resolve domain name %s to a hosted zone ID.], name[Name]]] return[list[[]]] variable[Id] assign[=] call[call[call[name[zone]][constant[0]]][constant[HostedZone]]][constant[Id]] variable[tries] assign[=] constant[10] while name[tries] begin[:] <ast.Try object at 0x7da18ede6fb0> return[list[[]]]
keyword[def] identifier[update_hosted_zone_comment] ( identifier[Id] = keyword[None] , identifier[Name] = keyword[None] , identifier[Comment] = keyword[None] , identifier[PrivateZone] = keyword[None] , identifier[region] = keyword[None] , identifier[key] = keyword[None] , identifier[keyid] = keyword[None] , identifier[profile] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[_exactly_one] (( identifier[Id] , identifier[Name] )): keyword[raise] identifier[SaltInvocationError] ( literal[string] ) identifier[conn] = identifier[_get_conn] ( identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] ) keyword[if] identifier[Name] : identifier[args] ={ literal[string] : identifier[Name] , literal[string] : identifier[PrivateZone] , literal[string] : identifier[region] , literal[string] : identifier[key] , literal[string] : identifier[keyid] , literal[string] : identifier[profile] } identifier[zone] = identifier[find_hosted_zone] (** identifier[args] ) keyword[if] keyword[not] identifier[zone] : identifier[log] . identifier[error] ( literal[string] , identifier[Name] ) keyword[return] [] identifier[Id] = identifier[zone] [ literal[int] ][ literal[string] ][ literal[string] ] identifier[tries] = literal[int] keyword[while] identifier[tries] : keyword[try] : identifier[r] = identifier[conn] . identifier[update_hosted_zone_comment] ( identifier[Id] = identifier[Id] , identifier[Comment] = identifier[Comment] ) identifier[r] . identifier[pop] ( literal[string] , keyword[None] ) keyword[return] [ identifier[r] ] keyword[except] identifier[ClientError] keyword[as] identifier[e] : keyword[if] identifier[tries] keyword[and] identifier[e] . identifier[response] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] )== literal[string] : identifier[log] . identifier[debug] ( literal[string] ) identifier[time] . identifier[sleep] ( literal[int] ) identifier[tries] -= literal[int] keyword[continue] identifier[log] . identifier[error] ( literal[string] , identifier[Name] keyword[or] identifier[Id] , identifier[e] ) keyword[return] []
def update_hosted_zone_comment(Id=None, Name=None, Comment=None, PrivateZone=None, region=None, key=None, keyid=None, profile=None): """ Update the comment on an existing Route 53 hosted zone. Id The unique Zone Identifier for the Hosted Zone. Name The domain name associated with the Hosted Zone(s). Comment Any comments you want to include about the hosted zone. PrivateZone Boolean - Set to True if changing a private hosted zone. CLI Example:: salt myminion boto3_route53.update_hosted_zone_comment Name=example.org. Comment="This is an example comment for an example zone" """ if not _exactly_one((Id, Name)): raise SaltInvocationError('Exactly one of either Id or Name is required.') # depends on [control=['if'], data=[]] conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) if Name: args = {'Name': Name, 'PrivateZone': PrivateZone, 'region': region, 'key': key, 'keyid': keyid, 'profile': profile} zone = find_hosted_zone(**args) if not zone: log.error("Couldn't resolve domain name %s to a hosted zone ID.", Name) return [] # depends on [control=['if'], data=[]] Id = zone[0]['HostedZone']['Id'] # depends on [control=['if'], data=[]] tries = 10 while tries: try: r = conn.update_hosted_zone_comment(Id=Id, Comment=Comment) r.pop('ResponseMetadata', None) return [r] # depends on [control=['try'], data=[]] except ClientError as e: if tries and e.response.get('Error', {}).get('Code') == 'Throttling': log.debug('Throttled by AWS API.') time.sleep(3) tries -= 1 continue # depends on [control=['if'], data=[]] log.error('Failed to update comment on hosted zone %s: %s', Name or Id, e) # depends on [control=['except'], data=['e']] # depends on [control=['while'], data=[]] return []
def update_account_data(self) -> None: """Get basic information for the account.""" response = get( _url( "/accounts/{0}/identifiers".format(self._account_uid), self._sandbox ), headers=self._auth_headers ) response.raise_for_status() response = response.json() self.account_identifier = response.get('accountIdentifier') self.bank_identifier = response.get('bankIdentifier') self.iban = response.get('iban') self.bic = response.get('bic')
def function[update_account_data, parameter[self]]: constant[Get basic information for the account.] variable[response] assign[=] call[name[get], parameter[call[name[_url], parameter[call[constant[/accounts/{0}/identifiers].format, parameter[name[self]._account_uid]], name[self]._sandbox]]]] call[name[response].raise_for_status, parameter[]] variable[response] assign[=] call[name[response].json, parameter[]] name[self].account_identifier assign[=] call[name[response].get, parameter[constant[accountIdentifier]]] name[self].bank_identifier assign[=] call[name[response].get, parameter[constant[bankIdentifier]]] name[self].iban assign[=] call[name[response].get, parameter[constant[iban]]] name[self].bic assign[=] call[name[response].get, parameter[constant[bic]]]
keyword[def] identifier[update_account_data] ( identifier[self] )-> keyword[None] : literal[string] identifier[response] = identifier[get] ( identifier[_url] ( literal[string] . identifier[format] ( identifier[self] . identifier[_account_uid] ), identifier[self] . identifier[_sandbox] ), identifier[headers] = identifier[self] . identifier[_auth_headers] ) identifier[response] . identifier[raise_for_status] () identifier[response] = identifier[response] . identifier[json] () identifier[self] . identifier[account_identifier] = identifier[response] . identifier[get] ( literal[string] ) identifier[self] . identifier[bank_identifier] = identifier[response] . identifier[get] ( literal[string] ) identifier[self] . identifier[iban] = identifier[response] . identifier[get] ( literal[string] ) identifier[self] . identifier[bic] = identifier[response] . identifier[get] ( literal[string] )
def update_account_data(self) -> None: """Get basic information for the account.""" response = get(_url('/accounts/{0}/identifiers'.format(self._account_uid), self._sandbox), headers=self._auth_headers) response.raise_for_status() response = response.json() self.account_identifier = response.get('accountIdentifier') self.bank_identifier = response.get('bankIdentifier') self.iban = response.get('iban') self.bic = response.get('bic')
def _findSwipl(): """ This function makes a big effort to find the path to the SWI-Prolog shared library. Since this is both OS dependent and installation dependent, we may not aways succeed. If we do, we return a name/path that can be used by CDLL(). Otherwise we raise an exception. :return: Tuple. Fist element is the name or path to the library that can be used by CDLL. Second element is the path were SWI-Prolog resource file may be found (this is needed in some Linuxes) :rtype: Tuple of strings :raises ImportError: If we cannot guess the name of the library """ # Now begins the guesswork platform = sys.platform[:3] if platform == "win": # In Windows, we have the default installer # path and the registry to look (path, swiHome) = _findSwiplWin() elif platform in ("lin", "cyg"): (path, swiHome) = _findSwiplLin() elif platform == "dar": # Help with MacOS is welcome!! (path, swiHome) = _findSwiplDar() if path is None: (path, swiHome) = _findSwiplMacOSHome() else: # This should work for other UNIX (path, swiHome) = _findSwiplLin() # This is a catch all raise if path is None: raise ImportError('Could not find the SWI-Prolog library in this ' 'platform. If you are sure it is installed, please ' 'open an issue.') else: return (path, swiHome)
def function[_findSwipl, parameter[]]: constant[ This function makes a big effort to find the path to the SWI-Prolog shared library. Since this is both OS dependent and installation dependent, we may not aways succeed. If we do, we return a name/path that can be used by CDLL(). Otherwise we raise an exception. :return: Tuple. Fist element is the name or path to the library that can be used by CDLL. Second element is the path were SWI-Prolog resource file may be found (this is needed in some Linuxes) :rtype: Tuple of strings :raises ImportError: If we cannot guess the name of the library ] variable[platform] assign[=] call[name[sys].platform][<ast.Slice object at 0x7da1b16292d0>] if compare[name[platform] equal[==] constant[win]] begin[:] <ast.Tuple object at 0x7da1b1628970> assign[=] call[name[_findSwiplWin], parameter[]] if compare[name[path] is constant[None]] begin[:] <ast.Raise object at 0x7da1b162b460>
keyword[def] identifier[_findSwipl] (): literal[string] identifier[platform] = identifier[sys] . identifier[platform] [: literal[int] ] keyword[if] identifier[platform] == literal[string] : ( identifier[path] , identifier[swiHome] )= identifier[_findSwiplWin] () keyword[elif] identifier[platform] keyword[in] ( literal[string] , literal[string] ): ( identifier[path] , identifier[swiHome] )= identifier[_findSwiplLin] () keyword[elif] identifier[platform] == literal[string] : ( identifier[path] , identifier[swiHome] )= identifier[_findSwiplDar] () keyword[if] identifier[path] keyword[is] keyword[None] : ( identifier[path] , identifier[swiHome] )= identifier[_findSwiplMacOSHome] () keyword[else] : ( identifier[path] , identifier[swiHome] )= identifier[_findSwiplLin] () keyword[if] identifier[path] keyword[is] keyword[None] : keyword[raise] identifier[ImportError] ( literal[string] literal[string] literal[string] ) keyword[else] : keyword[return] ( identifier[path] , identifier[swiHome] )
def _findSwipl(): """ This function makes a big effort to find the path to the SWI-Prolog shared library. Since this is both OS dependent and installation dependent, we may not aways succeed. If we do, we return a name/path that can be used by CDLL(). Otherwise we raise an exception. :return: Tuple. Fist element is the name or path to the library that can be used by CDLL. Second element is the path were SWI-Prolog resource file may be found (this is needed in some Linuxes) :rtype: Tuple of strings :raises ImportError: If we cannot guess the name of the library """ # Now begins the guesswork platform = sys.platform[:3] if platform == 'win': # In Windows, we have the default installer # path and the registry to look (path, swiHome) = _findSwiplWin() # depends on [control=['if'], data=[]] elif platform in ('lin', 'cyg'): (path, swiHome) = _findSwiplLin() # depends on [control=['if'], data=[]] elif platform == 'dar': # Help with MacOS is welcome!! (path, swiHome) = _findSwiplDar() if path is None: (path, swiHome) = _findSwiplMacOSHome() # depends on [control=['if'], data=['path']] # depends on [control=['if'], data=[]] else: # This should work for other UNIX (path, swiHome) = _findSwiplLin() # This is a catch all raise if path is None: raise ImportError('Could not find the SWI-Prolog library in this platform. If you are sure it is installed, please open an issue.') # depends on [control=['if'], data=[]] else: return (path, swiHome)
def get_clinical_data(tcga_id): """Get clinical data for a TCGA project. Parameters ---------- tcga_id : str The TCGA project ID. Returns ------- `pandas.DataFrame` The clinical data.abs Notes ----- Clinical data is associated with individual cases (patients). These correspond to rows in the returned data frame, and are identified by 12-character TCGA barcodes. """ payload = { 'attachment': 'true', "filters": json.dumps({ "op": "and", "content": [ { "op":"in", "content":{ "field":"cases.project.program.name", "value":["TCGA"]}}, { "op": "in", "content": { "field": "project.project_id", "value": [tcga_id]}}] }), 'fields': 'case_id', 'expand': 'demographic,diagnoses,family_histories,exposures', 'format': 'JSON', 'pretty': 'true', 'size': 10000, 'filename': 'clinical.project-%s' % tcga_id, } r = requests.get('https://gdc-api.nci.nih.gov/cases', params=payload) j = json.loads(r.content.decode()) clinical = {} valid = 0 for s in j: if 'diagnoses' not in s: continue valid += 1 assert len(s['diagnoses']) == 1 diag = s['diagnoses'][0] tcga_id = diag['submitter_id'][:12] clinical[tcga_id] = diag logger.info('Found clinical data for %d cases.', valid) df = pd.DataFrame.from_dict(clinical).T df.sort_index(inplace=True) return df
def function[get_clinical_data, parameter[tcga_id]]: constant[Get clinical data for a TCGA project. Parameters ---------- tcga_id : str The TCGA project ID. Returns ------- `pandas.DataFrame` The clinical data.abs Notes ----- Clinical data is associated with individual cases (patients). These correspond to rows in the returned data frame, and are identified by 12-character TCGA barcodes. ] variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da20e955120>, <ast.Constant object at 0x7da20e954b50>, <ast.Constant object at 0x7da20e954a90>, <ast.Constant object at 0x7da1b0b376d0>, <ast.Constant object at 0x7da1b0b35bd0>, <ast.Constant object at 0x7da1b0b371f0>, <ast.Constant object at 0x7da1b0b37430>, <ast.Constant object at 0x7da1b0b36b60>], [<ast.Constant object at 0x7da1b0b37370>, <ast.Call object at 0x7da1b0b37b20>, <ast.Constant object at 0x7da20c795d20>, <ast.Constant object at 0x7da20c7961a0>, <ast.Constant object at 0x7da20c796890>, <ast.Constant object at 0x7da20c794a60>, <ast.Constant object at 0x7da20c7948e0>, <ast.BinOp object at 0x7da20c796200>]] variable[r] assign[=] call[name[requests].get, parameter[constant[https://gdc-api.nci.nih.gov/cases]]] variable[j] assign[=] call[name[json].loads, parameter[call[name[r].content.decode, parameter[]]]] variable[clinical] assign[=] dictionary[[], []] variable[valid] assign[=] constant[0] for taget[name[s]] in starred[name[j]] begin[:] if compare[constant[diagnoses] <ast.NotIn object at 0x7da2590d7190> name[s]] begin[:] continue <ast.AugAssign object at 0x7da20c7942b0> assert[compare[call[name[len], parameter[call[name[s]][constant[diagnoses]]]] equal[==] constant[1]]] variable[diag] assign[=] call[call[name[s]][constant[diagnoses]]][constant[0]] variable[tcga_id] assign[=] call[call[name[diag]][constant[submitter_id]]][<ast.Slice object at 0x7da18f811a50>] call[name[clinical]][name[tcga_id]] assign[=] name[diag] call[name[logger].info, parameter[constant[Found clinical data for %d cases.], name[valid]]] variable[df] assign[=] call[name[pd].DataFrame.from_dict, parameter[name[clinical]]].T call[name[df].sort_index, parameter[]] return[name[df]]
keyword[def] identifier[get_clinical_data] ( identifier[tcga_id] ): literal[string] identifier[payload] ={ literal[string] : literal[string] , literal[string] : identifier[json] . identifier[dumps] ({ literal[string] : literal[string] , literal[string] :[ { literal[string] : literal[string] , literal[string] :{ literal[string] : literal[string] , literal[string] :[ literal[string] ]}}, { literal[string] : literal[string] , literal[string] :{ literal[string] : literal[string] , literal[string] :[ identifier[tcga_id] ]}}] }), literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[int] , literal[string] : literal[string] % identifier[tcga_id] , } identifier[r] = identifier[requests] . identifier[get] ( literal[string] , identifier[params] = identifier[payload] ) identifier[j] = identifier[json] . identifier[loads] ( identifier[r] . identifier[content] . identifier[decode] ()) identifier[clinical] ={} identifier[valid] = literal[int] keyword[for] identifier[s] keyword[in] identifier[j] : keyword[if] literal[string] keyword[not] keyword[in] identifier[s] : keyword[continue] identifier[valid] += literal[int] keyword[assert] identifier[len] ( identifier[s] [ literal[string] ])== literal[int] identifier[diag] = identifier[s] [ literal[string] ][ literal[int] ] identifier[tcga_id] = identifier[diag] [ literal[string] ][: literal[int] ] identifier[clinical] [ identifier[tcga_id] ]= identifier[diag] identifier[logger] . identifier[info] ( literal[string] , identifier[valid] ) identifier[df] = identifier[pd] . identifier[DataFrame] . identifier[from_dict] ( identifier[clinical] ). identifier[T] identifier[df] . identifier[sort_index] ( identifier[inplace] = keyword[True] ) keyword[return] identifier[df]
def get_clinical_data(tcga_id): """Get clinical data for a TCGA project. Parameters ---------- tcga_id : str The TCGA project ID. Returns ------- `pandas.DataFrame` The clinical data.abs Notes ----- Clinical data is associated with individual cases (patients). These correspond to rows in the returned data frame, and are identified by 12-character TCGA barcodes. """ payload = {'attachment': 'true', 'filters': json.dumps({'op': 'and', 'content': [{'op': 'in', 'content': {'field': 'cases.project.program.name', 'value': ['TCGA']}}, {'op': 'in', 'content': {'field': 'project.project_id', 'value': [tcga_id]}}]}), 'fields': 'case_id', 'expand': 'demographic,diagnoses,family_histories,exposures', 'format': 'JSON', 'pretty': 'true', 'size': 10000, 'filename': 'clinical.project-%s' % tcga_id} r = requests.get('https://gdc-api.nci.nih.gov/cases', params=payload) j = json.loads(r.content.decode()) clinical = {} valid = 0 for s in j: if 'diagnoses' not in s: continue # depends on [control=['if'], data=[]] valid += 1 assert len(s['diagnoses']) == 1 diag = s['diagnoses'][0] tcga_id = diag['submitter_id'][:12] clinical[tcga_id] = diag # depends on [control=['for'], data=['s']] logger.info('Found clinical data for %d cases.', valid) df = pd.DataFrame.from_dict(clinical).T df.sort_index(inplace=True) return df
def _update_zipimporter_cache(normalized_path, cache, updater=None): """ Update zipimporter cache data for a given normalized path. Any sub-path entries are processed as well, i.e. those corresponding to zip archives embedded in other zip archives. Given updater is a callable taking a cache entry key and the original entry (after already removing the entry from the cache), and expected to update the entry and possibly return a new one to be inserted in its place. Returning None indicates that the entry should not be replaced with a new one. If no updater is given, the cache entries are simply removed without any additional processing, the same as if the updater simply returned None. """ for p in _collect_zipimporter_cache_entries(normalized_path, cache): # N.B. pypy's custom zipimport._zip_directory_cache implementation does # not support the complete dict interface: # * Does not support item assignment, thus not allowing this function # to be used only for removing existing cache entries. # * Does not support the dict.pop() method, forcing us to use the # get/del patterns instead. For more detailed information see the # following links: # https://bitbucket.org/pypa/setuptools/issue/202/more-robust-zipimporter-cache-invalidation#comment-10495960 # https://bitbucket.org/pypy/pypy/src/dd07756a34a41f674c0cacfbc8ae1d4cc9ea2ae4/pypy/module/zipimport/interp_zipimport.py#cl-99 old_entry = cache[p] del cache[p] new_entry = updater and updater(p, old_entry) if new_entry is not None: cache[p] = new_entry
def function[_update_zipimporter_cache, parameter[normalized_path, cache, updater]]: constant[ Update zipimporter cache data for a given normalized path. Any sub-path entries are processed as well, i.e. those corresponding to zip archives embedded in other zip archives. Given updater is a callable taking a cache entry key and the original entry (after already removing the entry from the cache), and expected to update the entry and possibly return a new one to be inserted in its place. Returning None indicates that the entry should not be replaced with a new one. If no updater is given, the cache entries are simply removed without any additional processing, the same as if the updater simply returned None. ] for taget[name[p]] in starred[call[name[_collect_zipimporter_cache_entries], parameter[name[normalized_path], name[cache]]]] begin[:] variable[old_entry] assign[=] call[name[cache]][name[p]] <ast.Delete object at 0x7da18bc71420> variable[new_entry] assign[=] <ast.BoolOp object at 0x7da18bc73820> if compare[name[new_entry] is_not constant[None]] begin[:] call[name[cache]][name[p]] assign[=] name[new_entry]
keyword[def] identifier[_update_zipimporter_cache] ( identifier[normalized_path] , identifier[cache] , identifier[updater] = keyword[None] ): literal[string] keyword[for] identifier[p] keyword[in] identifier[_collect_zipimporter_cache_entries] ( identifier[normalized_path] , identifier[cache] ): identifier[old_entry] = identifier[cache] [ identifier[p] ] keyword[del] identifier[cache] [ identifier[p] ] identifier[new_entry] = identifier[updater] keyword[and] identifier[updater] ( identifier[p] , identifier[old_entry] ) keyword[if] identifier[new_entry] keyword[is] keyword[not] keyword[None] : identifier[cache] [ identifier[p] ]= identifier[new_entry]
def _update_zipimporter_cache(normalized_path, cache, updater=None): """ Update zipimporter cache data for a given normalized path. Any sub-path entries are processed as well, i.e. those corresponding to zip archives embedded in other zip archives. Given updater is a callable taking a cache entry key and the original entry (after already removing the entry from the cache), and expected to update the entry and possibly return a new one to be inserted in its place. Returning None indicates that the entry should not be replaced with a new one. If no updater is given, the cache entries are simply removed without any additional processing, the same as if the updater simply returned None. """ for p in _collect_zipimporter_cache_entries(normalized_path, cache): # N.B. pypy's custom zipimport._zip_directory_cache implementation does # not support the complete dict interface: # * Does not support item assignment, thus not allowing this function # to be used only for removing existing cache entries. # * Does not support the dict.pop() method, forcing us to use the # get/del patterns instead. For more detailed information see the # following links: # https://bitbucket.org/pypa/setuptools/issue/202/more-robust-zipimporter-cache-invalidation#comment-10495960 # https://bitbucket.org/pypy/pypy/src/dd07756a34a41f674c0cacfbc8ae1d4cc9ea2ae4/pypy/module/zipimport/interp_zipimport.py#cl-99 old_entry = cache[p] del cache[p] new_entry = updater and updater(p, old_entry) if new_entry is not None: cache[p] = new_entry # depends on [control=['if'], data=['new_entry']] # depends on [control=['for'], data=['p']]
def check_connection (self): """ Check a URL with HTTP protocol. Here is an excerpt from RFC 1945 with common response codes: The first digit of the Status-Code defines the class of response. The last two digits do not have any categorization role. There are 5 values for the first digit: - 1xx: Informational - Not used, but reserved for future use - 2xx: Success - The action was successfully received, understood, and accepted. - 3xx: Redirection - Further action must be taken in order to complete the request - 4xx: Client Error - The request contains bad syntax or cannot be fulfilled - 5xx: Server Error - The server failed to fulfill an apparently valid request """ self.session = self.aggregate.get_request_session() # set the proxy, so a 407 status after this is an error self.set_proxy(self.aggregate.config["proxy"].get(self.scheme)) self.construct_auth() # check robots.txt if not self.allows_robots(self.url): self.add_info(_("Access denied by robots.txt, checked only syntax.")) self.set_result(_("syntax OK")) self.do_check_content = False return # check the http connection request = self.build_request() self.send_request(request) self._add_response_info() self.follow_redirections(request) self.check_response() if self.allows_simple_recursion(): self.parse_header_links()
def function[check_connection, parameter[self]]: constant[ Check a URL with HTTP protocol. Here is an excerpt from RFC 1945 with common response codes: The first digit of the Status-Code defines the class of response. The last two digits do not have any categorization role. There are 5 values for the first digit: - 1xx: Informational - Not used, but reserved for future use - 2xx: Success - The action was successfully received, understood, and accepted. - 3xx: Redirection - Further action must be taken in order to complete the request - 4xx: Client Error - The request contains bad syntax or cannot be fulfilled - 5xx: Server Error - The server failed to fulfill an apparently valid request ] name[self].session assign[=] call[name[self].aggregate.get_request_session, parameter[]] call[name[self].set_proxy, parameter[call[call[name[self].aggregate.config][constant[proxy]].get, parameter[name[self].scheme]]]] call[name[self].construct_auth, parameter[]] if <ast.UnaryOp object at 0x7da18eb568f0> begin[:] call[name[self].add_info, parameter[call[name[_], parameter[constant[Access denied by robots.txt, checked only syntax.]]]]] call[name[self].set_result, parameter[call[name[_], parameter[constant[syntax OK]]]]] name[self].do_check_content assign[=] constant[False] return[None] variable[request] assign[=] call[name[self].build_request, parameter[]] call[name[self].send_request, parameter[name[request]]] call[name[self]._add_response_info, parameter[]] call[name[self].follow_redirections, parameter[name[request]]] call[name[self].check_response, parameter[]] if call[name[self].allows_simple_recursion, parameter[]] begin[:] call[name[self].parse_header_links, parameter[]]
keyword[def] identifier[check_connection] ( identifier[self] ): literal[string] identifier[self] . identifier[session] = identifier[self] . identifier[aggregate] . identifier[get_request_session] () identifier[self] . identifier[set_proxy] ( identifier[self] . identifier[aggregate] . identifier[config] [ literal[string] ]. identifier[get] ( identifier[self] . identifier[scheme] )) identifier[self] . identifier[construct_auth] () keyword[if] keyword[not] identifier[self] . identifier[allows_robots] ( identifier[self] . identifier[url] ): identifier[self] . identifier[add_info] ( identifier[_] ( literal[string] )) identifier[self] . identifier[set_result] ( identifier[_] ( literal[string] )) identifier[self] . identifier[do_check_content] = keyword[False] keyword[return] identifier[request] = identifier[self] . identifier[build_request] () identifier[self] . identifier[send_request] ( identifier[request] ) identifier[self] . identifier[_add_response_info] () identifier[self] . identifier[follow_redirections] ( identifier[request] ) identifier[self] . identifier[check_response] () keyword[if] identifier[self] . identifier[allows_simple_recursion] (): identifier[self] . identifier[parse_header_links] ()
def check_connection(self): """ Check a URL with HTTP protocol. Here is an excerpt from RFC 1945 with common response codes: The first digit of the Status-Code defines the class of response. The last two digits do not have any categorization role. There are 5 values for the first digit: - 1xx: Informational - Not used, but reserved for future use - 2xx: Success - The action was successfully received, understood, and accepted. - 3xx: Redirection - Further action must be taken in order to complete the request - 4xx: Client Error - The request contains bad syntax or cannot be fulfilled - 5xx: Server Error - The server failed to fulfill an apparently valid request """ self.session = self.aggregate.get_request_session() # set the proxy, so a 407 status after this is an error self.set_proxy(self.aggregate.config['proxy'].get(self.scheme)) self.construct_auth() # check robots.txt if not self.allows_robots(self.url): self.add_info(_('Access denied by robots.txt, checked only syntax.')) self.set_result(_('syntax OK')) self.do_check_content = False return # depends on [control=['if'], data=[]] # check the http connection request = self.build_request() self.send_request(request) self._add_response_info() self.follow_redirections(request) self.check_response() if self.allows_simple_recursion(): self.parse_header_links() # depends on [control=['if'], data=[]]
def get(expr, key, default=None): """ Return the mapped value for this key, or the default if the key does not exist Parameters ---------- key : any default : any """ return ops.MapValueOrDefaultForKey(expr, key, default).to_expr()
def function[get, parameter[expr, key, default]]: constant[ Return the mapped value for this key, or the default if the key does not exist Parameters ---------- key : any default : any ] return[call[call[name[ops].MapValueOrDefaultForKey, parameter[name[expr], name[key], name[default]]].to_expr, parameter[]]]
keyword[def] identifier[get] ( identifier[expr] , identifier[key] , identifier[default] = keyword[None] ): literal[string] keyword[return] identifier[ops] . identifier[MapValueOrDefaultForKey] ( identifier[expr] , identifier[key] , identifier[default] ). identifier[to_expr] ()
def get(expr, key, default=None): """ Return the mapped value for this key, or the default if the key does not exist Parameters ---------- key : any default : any """ return ops.MapValueOrDefaultForKey(expr, key, default).to_expr()
def set_attribute(self, attribute, attribute_state): """Get an attribute from the session. :param attribute: :return: attribute value, status code :rtype: object, constants.StatusCode """ # Check that the attribute exists. try: attr = attributes.AttributesByID[attribute] except KeyError: return constants.StatusCode.error_nonsupported_attribute # Check that the attribute is valid for this session type. if not attr.in_resource(self.session_type): return constants.StatusCode.error_nonsupported_attribute # Check that the attribute is writable. if not attr.write: return constants.StatusCode.error_attribute_read_only try: self.attrs[attribute] = attribute_state except ValueError: return constants.StatusCode.error_nonsupported_attribute_state return constants.StatusCode.success
def function[set_attribute, parameter[self, attribute, attribute_state]]: constant[Get an attribute from the session. :param attribute: :return: attribute value, status code :rtype: object, constants.StatusCode ] <ast.Try object at 0x7da20e960280> if <ast.UnaryOp object at 0x7da20e963880> begin[:] return[name[constants].StatusCode.error_nonsupported_attribute] if <ast.UnaryOp object at 0x7da20e963e80> begin[:] return[name[constants].StatusCode.error_attribute_read_only] <ast.Try object at 0x7da20e9605b0> return[name[constants].StatusCode.success]
keyword[def] identifier[set_attribute] ( identifier[self] , identifier[attribute] , identifier[attribute_state] ): literal[string] keyword[try] : identifier[attr] = identifier[attributes] . identifier[AttributesByID] [ identifier[attribute] ] keyword[except] identifier[KeyError] : keyword[return] identifier[constants] . identifier[StatusCode] . identifier[error_nonsupported_attribute] keyword[if] keyword[not] identifier[attr] . identifier[in_resource] ( identifier[self] . identifier[session_type] ): keyword[return] identifier[constants] . identifier[StatusCode] . identifier[error_nonsupported_attribute] keyword[if] keyword[not] identifier[attr] . identifier[write] : keyword[return] identifier[constants] . identifier[StatusCode] . identifier[error_attribute_read_only] keyword[try] : identifier[self] . identifier[attrs] [ identifier[attribute] ]= identifier[attribute_state] keyword[except] identifier[ValueError] : keyword[return] identifier[constants] . identifier[StatusCode] . identifier[error_nonsupported_attribute_state] keyword[return] identifier[constants] . identifier[StatusCode] . identifier[success]
def set_attribute(self, attribute, attribute_state): """Get an attribute from the session. :param attribute: :return: attribute value, status code :rtype: object, constants.StatusCode """ # Check that the attribute exists. try: attr = attributes.AttributesByID[attribute] # depends on [control=['try'], data=[]] except KeyError: return constants.StatusCode.error_nonsupported_attribute # depends on [control=['except'], data=[]] # Check that the attribute is valid for this session type. if not attr.in_resource(self.session_type): return constants.StatusCode.error_nonsupported_attribute # depends on [control=['if'], data=[]] # Check that the attribute is writable. if not attr.write: return constants.StatusCode.error_attribute_read_only # depends on [control=['if'], data=[]] try: self.attrs[attribute] = attribute_state # depends on [control=['try'], data=[]] except ValueError: return constants.StatusCode.error_nonsupported_attribute_state # depends on [control=['except'], data=[]] return constants.StatusCode.success
def trigger(self, *args, **kargs): """ Execute all event handlers with optional arguments for the observable. """ event = args[0] if isinstance(event, str) and ' ' in event: event = event.split(' ') # split event names ... if isinstance(event, list): # event is a list of events for each in event: self.events[each].trigger(*args[1:], **kargs) else: self.events[event].trigger(*args[1:], **kargs)
def function[trigger, parameter[self]]: constant[ Execute all event handlers with optional arguments for the observable. ] variable[event] assign[=] call[name[args]][constant[0]] if <ast.BoolOp object at 0x7da1b25366e0> begin[:] variable[event] assign[=] call[name[event].split, parameter[constant[ ]]] if call[name[isinstance], parameter[name[event], name[list]]] begin[:] for taget[name[each]] in starred[name[event]] begin[:] call[call[name[self].events][name[each]].trigger, parameter[<ast.Starred object at 0x7da1b25ed750>]]
keyword[def] identifier[trigger] ( identifier[self] ,* identifier[args] ,** identifier[kargs] ): literal[string] identifier[event] = identifier[args] [ literal[int] ] keyword[if] identifier[isinstance] ( identifier[event] , identifier[str] ) keyword[and] literal[string] keyword[in] identifier[event] : identifier[event] = identifier[event] . identifier[split] ( literal[string] ) keyword[if] identifier[isinstance] ( identifier[event] , identifier[list] ): keyword[for] identifier[each] keyword[in] identifier[event] : identifier[self] . identifier[events] [ identifier[each] ]. identifier[trigger] (* identifier[args] [ literal[int] :],** identifier[kargs] ) keyword[else] : identifier[self] . identifier[events] [ identifier[event] ]. identifier[trigger] (* identifier[args] [ literal[int] :],** identifier[kargs] )
def trigger(self, *args, **kargs): """ Execute all event handlers with optional arguments for the observable. """ event = args[0] if isinstance(event, str) and ' ' in event: event = event.split(' ') # split event names ... # depends on [control=['if'], data=[]] if isinstance(event, list): # event is a list of events for each in event: self.events[each].trigger(*args[1:], **kargs) # depends on [control=['for'], data=['each']] # depends on [control=['if'], data=[]] else: self.events[event].trigger(*args[1:], **kargs)
def create_message_buffer(size, type): """Create a message buffer""" rtn = wrapper.nn_allocmsg(size, type) if rtn is None: raise NanoMsgAPIError() return rtn
def function[create_message_buffer, parameter[size, type]]: constant[Create a message buffer] variable[rtn] assign[=] call[name[wrapper].nn_allocmsg, parameter[name[size], name[type]]] if compare[name[rtn] is constant[None]] begin[:] <ast.Raise object at 0x7da20e9b2fe0> return[name[rtn]]
keyword[def] identifier[create_message_buffer] ( identifier[size] , identifier[type] ): literal[string] identifier[rtn] = identifier[wrapper] . identifier[nn_allocmsg] ( identifier[size] , identifier[type] ) keyword[if] identifier[rtn] keyword[is] keyword[None] : keyword[raise] identifier[NanoMsgAPIError] () keyword[return] identifier[rtn]
def create_message_buffer(size, type): """Create a message buffer""" rtn = wrapper.nn_allocmsg(size, type) if rtn is None: raise NanoMsgAPIError() # depends on [control=['if'], data=[]] return rtn
def run_sink_check(self, model, solver, threshold, implicit_sinks=True): """Run sink production check method.""" prob = solver.create_problem() # Create flux variables v = prob.namespace() for reaction_id in model.reactions: lower, upper = model.limits[reaction_id] v.define([reaction_id], lower=lower, upper=upper) # Build mass balance constraints massbalance_lhs = {compound: 0 for compound in model.compounds} for spec, value in iteritems(model.matrix): compound, reaction_id = spec massbalance_lhs[compound] += v(reaction_id) * value mass_balance_constrs = {} for compound, lhs in iteritems(massbalance_lhs): if implicit_sinks: # The constraint is merely >0 meaning that we have implicit # sinks for all compounds. prob.add_linear_constraints(lhs >= 0) else: # Save these constraints so we can temporarily remove them # to create a sink. c, = prob.add_linear_constraints(lhs == 0) mass_balance_constrs[compound] = c for compound, lhs in sorted(iteritems(massbalance_lhs)): if not implicit_sinks: mass_balance_constrs[compound].delete() prob.set_objective(lhs) try: result = prob.solve(lp.ObjectiveSense.Maximize) except lp.SolverError as e: logger.warning('Failed to solve for compound: {} ({})'.format( compound, e)) if result.get_value(lhs) < threshold: yield compound if not implicit_sinks: # Restore mass balance constraint. c, = prob.add_linear_constraints(lhs == 0) mass_balance_constrs[compound] = c
def function[run_sink_check, parameter[self, model, solver, threshold, implicit_sinks]]: constant[Run sink production check method.] variable[prob] assign[=] call[name[solver].create_problem, parameter[]] variable[v] assign[=] call[name[prob].namespace, parameter[]] for taget[name[reaction_id]] in starred[name[model].reactions] begin[:] <ast.Tuple object at 0x7da18bc722c0> assign[=] call[name[model].limits][name[reaction_id]] call[name[v].define, parameter[list[[<ast.Name object at 0x7da18bc70b50>]]]] variable[massbalance_lhs] assign[=] <ast.DictComp object at 0x7da18bc73e80> for taget[tuple[[<ast.Name object at 0x7da18bc71750>, <ast.Name object at 0x7da18bc72e00>]]] in starred[call[name[iteritems], parameter[name[model].matrix]]] begin[:] <ast.Tuple object at 0x7da18bc73df0> assign[=] name[spec] <ast.AugAssign object at 0x7da18bc73d00> variable[mass_balance_constrs] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da18bc73af0>, <ast.Name object at 0x7da18bc72200>]]] in starred[call[name[iteritems], parameter[name[massbalance_lhs]]]] begin[:] if name[implicit_sinks] begin[:] call[name[prob].add_linear_constraints, parameter[compare[name[lhs] greater_or_equal[>=] constant[0]]]] for taget[tuple[[<ast.Name object at 0x7da18bc73a90>, <ast.Name object at 0x7da18bc73dc0>]]] in starred[call[name[sorted], parameter[call[name[iteritems], parameter[name[massbalance_lhs]]]]]] begin[:] if <ast.UnaryOp object at 0x7da18bc73e20> begin[:] call[call[name[mass_balance_constrs]][name[compound]].delete, parameter[]] call[name[prob].set_objective, parameter[name[lhs]]] <ast.Try object at 0x7da18bc73d60> if compare[call[name[result].get_value, parameter[name[lhs]]] less[<] name[threshold]] begin[:] <ast.Yield object at 0x7da18bc71600> if <ast.UnaryOp object at 0x7da18bc70d00> begin[:] <ast.Tuple object at 0x7da20c76da50> assign[=] call[name[prob].add_linear_constraints, parameter[compare[name[lhs] equal[==] constant[0]]]] call[name[mass_balance_constrs]][name[compound]] assign[=] name[c]
keyword[def] identifier[run_sink_check] ( identifier[self] , identifier[model] , identifier[solver] , identifier[threshold] , identifier[implicit_sinks] = keyword[True] ): literal[string] identifier[prob] = identifier[solver] . identifier[create_problem] () identifier[v] = identifier[prob] . identifier[namespace] () keyword[for] identifier[reaction_id] keyword[in] identifier[model] . identifier[reactions] : identifier[lower] , identifier[upper] = identifier[model] . identifier[limits] [ identifier[reaction_id] ] identifier[v] . identifier[define] ([ identifier[reaction_id] ], identifier[lower] = identifier[lower] , identifier[upper] = identifier[upper] ) identifier[massbalance_lhs] ={ identifier[compound] : literal[int] keyword[for] identifier[compound] keyword[in] identifier[model] . identifier[compounds] } keyword[for] identifier[spec] , identifier[value] keyword[in] identifier[iteritems] ( identifier[model] . identifier[matrix] ): identifier[compound] , identifier[reaction_id] = identifier[spec] identifier[massbalance_lhs] [ identifier[compound] ]+= identifier[v] ( identifier[reaction_id] )* identifier[value] identifier[mass_balance_constrs] ={} keyword[for] identifier[compound] , identifier[lhs] keyword[in] identifier[iteritems] ( identifier[massbalance_lhs] ): keyword[if] identifier[implicit_sinks] : identifier[prob] . identifier[add_linear_constraints] ( identifier[lhs] >= literal[int] ) keyword[else] : identifier[c] ,= identifier[prob] . identifier[add_linear_constraints] ( identifier[lhs] == literal[int] ) identifier[mass_balance_constrs] [ identifier[compound] ]= identifier[c] keyword[for] identifier[compound] , identifier[lhs] keyword[in] identifier[sorted] ( identifier[iteritems] ( identifier[massbalance_lhs] )): keyword[if] keyword[not] identifier[implicit_sinks] : identifier[mass_balance_constrs] [ identifier[compound] ]. identifier[delete] () identifier[prob] . identifier[set_objective] ( identifier[lhs] ) keyword[try] : identifier[result] = identifier[prob] . identifier[solve] ( identifier[lp] . identifier[ObjectiveSense] . identifier[Maximize] ) keyword[except] identifier[lp] . identifier[SolverError] keyword[as] identifier[e] : identifier[logger] . identifier[warning] ( literal[string] . identifier[format] ( identifier[compound] , identifier[e] )) keyword[if] identifier[result] . identifier[get_value] ( identifier[lhs] )< identifier[threshold] : keyword[yield] identifier[compound] keyword[if] keyword[not] identifier[implicit_sinks] : identifier[c] ,= identifier[prob] . identifier[add_linear_constraints] ( identifier[lhs] == literal[int] ) identifier[mass_balance_constrs] [ identifier[compound] ]= identifier[c]
def run_sink_check(self, model, solver, threshold, implicit_sinks=True): """Run sink production check method.""" prob = solver.create_problem() # Create flux variables v = prob.namespace() for reaction_id in model.reactions: (lower, upper) = model.limits[reaction_id] v.define([reaction_id], lower=lower, upper=upper) # depends on [control=['for'], data=['reaction_id']] # Build mass balance constraints massbalance_lhs = {compound: 0 for compound in model.compounds} for (spec, value) in iteritems(model.matrix): (compound, reaction_id) = spec massbalance_lhs[compound] += v(reaction_id) * value # depends on [control=['for'], data=[]] mass_balance_constrs = {} for (compound, lhs) in iteritems(massbalance_lhs): if implicit_sinks: # The constraint is merely >0 meaning that we have implicit # sinks for all compounds. prob.add_linear_constraints(lhs >= 0) # depends on [control=['if'], data=[]] else: # Save these constraints so we can temporarily remove them # to create a sink. (c,) = prob.add_linear_constraints(lhs == 0) mass_balance_constrs[compound] = c # depends on [control=['for'], data=[]] for (compound, lhs) in sorted(iteritems(massbalance_lhs)): if not implicit_sinks: mass_balance_constrs[compound].delete() # depends on [control=['if'], data=[]] prob.set_objective(lhs) try: result = prob.solve(lp.ObjectiveSense.Maximize) # depends on [control=['try'], data=[]] except lp.SolverError as e: logger.warning('Failed to solve for compound: {} ({})'.format(compound, e)) # depends on [control=['except'], data=['e']] if result.get_value(lhs) < threshold: yield compound # depends on [control=['if'], data=[]] if not implicit_sinks: # Restore mass balance constraint. (c,) = prob.add_linear_constraints(lhs == 0) mass_balance_constrs[compound] = c # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
def get_sdr_data_helper(reserve_fn, get_fn, record_id, reservation_id=None): """Helper function to retrieve the sdr data using the specified functions. This can be used for SDRs from the Sensor Device or form the SDR repository. """ if reservation_id is None: reservation_id = reserve_fn() (next_id, data) = get_fn(reservation_id, record_id, 0, 5) header = ByteBuffer(data) record_id = header.pop_unsigned_int(2) record_version = header.pop_unsigned_int(1) record_type = header.pop_unsigned_int(1) record_payload_length = header.pop_unsigned_int(1) record_length = record_payload_length + 5 record_data = ByteBuffer(data) offset = len(record_data) max_req_len = 20 retry = 20 # now get the other record data while True: retry -= 1 if retry == 0: raise RetryError() length = max_req_len if (offset + length) > record_length: length = record_length - offset try: (next_id, data) = get_fn(reservation_id, record_id, offset, length) except CompletionCodeError as e: if e.cc == constants.CC_CANT_RET_NUM_REQ_BYTES: # reduce max lenght max_req_len -= 4 if max_req_len <= 0: retry = 0 else: raise CompletionCodeError(e.cc) record_data.extend(data[:]) offset = len(record_data) if len(record_data) >= record_length: break return (next_id, record_data)
def function[get_sdr_data_helper, parameter[reserve_fn, get_fn, record_id, reservation_id]]: constant[Helper function to retrieve the sdr data using the specified functions. This can be used for SDRs from the Sensor Device or form the SDR repository. ] if compare[name[reservation_id] is constant[None]] begin[:] variable[reservation_id] assign[=] call[name[reserve_fn], parameter[]] <ast.Tuple object at 0x7da18f723ca0> assign[=] call[name[get_fn], parameter[name[reservation_id], name[record_id], constant[0], constant[5]]] variable[header] assign[=] call[name[ByteBuffer], parameter[name[data]]] variable[record_id] assign[=] call[name[header].pop_unsigned_int, parameter[constant[2]]] variable[record_version] assign[=] call[name[header].pop_unsigned_int, parameter[constant[1]]] variable[record_type] assign[=] call[name[header].pop_unsigned_int, parameter[constant[1]]] variable[record_payload_length] assign[=] call[name[header].pop_unsigned_int, parameter[constant[1]]] variable[record_length] assign[=] binary_operation[name[record_payload_length] + constant[5]] variable[record_data] assign[=] call[name[ByteBuffer], parameter[name[data]]] variable[offset] assign[=] call[name[len], parameter[name[record_data]]] variable[max_req_len] assign[=] constant[20] variable[retry] assign[=] constant[20] while constant[True] begin[:] <ast.AugAssign object at 0x7da18f7210f0> if compare[name[retry] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da18f720550> variable[length] assign[=] name[max_req_len] if compare[binary_operation[name[offset] + name[length]] greater[>] name[record_length]] begin[:] variable[length] assign[=] binary_operation[name[record_length] - name[offset]] <ast.Try object at 0x7da18f720af0> call[name[record_data].extend, parameter[call[name[data]][<ast.Slice object at 0x7da18fe939a0>]]] variable[offset] assign[=] call[name[len], parameter[name[record_data]]] if compare[call[name[len], parameter[name[record_data]]] greater_or_equal[>=] name[record_length]] begin[:] break return[tuple[[<ast.Name object at 0x7da18fe90280>, <ast.Name object at 0x7da18fe93a00>]]]
keyword[def] identifier[get_sdr_data_helper] ( identifier[reserve_fn] , identifier[get_fn] , identifier[record_id] , identifier[reservation_id] = keyword[None] ): literal[string] keyword[if] identifier[reservation_id] keyword[is] keyword[None] : identifier[reservation_id] = identifier[reserve_fn] () ( identifier[next_id] , identifier[data] )= identifier[get_fn] ( identifier[reservation_id] , identifier[record_id] , literal[int] , literal[int] ) identifier[header] = identifier[ByteBuffer] ( identifier[data] ) identifier[record_id] = identifier[header] . identifier[pop_unsigned_int] ( literal[int] ) identifier[record_version] = identifier[header] . identifier[pop_unsigned_int] ( literal[int] ) identifier[record_type] = identifier[header] . identifier[pop_unsigned_int] ( literal[int] ) identifier[record_payload_length] = identifier[header] . identifier[pop_unsigned_int] ( literal[int] ) identifier[record_length] = identifier[record_payload_length] + literal[int] identifier[record_data] = identifier[ByteBuffer] ( identifier[data] ) identifier[offset] = identifier[len] ( identifier[record_data] ) identifier[max_req_len] = literal[int] identifier[retry] = literal[int] keyword[while] keyword[True] : identifier[retry] -= literal[int] keyword[if] identifier[retry] == literal[int] : keyword[raise] identifier[RetryError] () identifier[length] = identifier[max_req_len] keyword[if] ( identifier[offset] + identifier[length] )> identifier[record_length] : identifier[length] = identifier[record_length] - identifier[offset] keyword[try] : ( identifier[next_id] , identifier[data] )= identifier[get_fn] ( identifier[reservation_id] , identifier[record_id] , identifier[offset] , identifier[length] ) keyword[except] identifier[CompletionCodeError] keyword[as] identifier[e] : keyword[if] identifier[e] . identifier[cc] == identifier[constants] . identifier[CC_CANT_RET_NUM_REQ_BYTES] : identifier[max_req_len] -= literal[int] keyword[if] identifier[max_req_len] <= literal[int] : identifier[retry] = literal[int] keyword[else] : keyword[raise] identifier[CompletionCodeError] ( identifier[e] . identifier[cc] ) identifier[record_data] . identifier[extend] ( identifier[data] [:]) identifier[offset] = identifier[len] ( identifier[record_data] ) keyword[if] identifier[len] ( identifier[record_data] )>= identifier[record_length] : keyword[break] keyword[return] ( identifier[next_id] , identifier[record_data] )
def get_sdr_data_helper(reserve_fn, get_fn, record_id, reservation_id=None): """Helper function to retrieve the sdr data using the specified functions. This can be used for SDRs from the Sensor Device or form the SDR repository. """ if reservation_id is None: reservation_id = reserve_fn() # depends on [control=['if'], data=['reservation_id']] (next_id, data) = get_fn(reservation_id, record_id, 0, 5) header = ByteBuffer(data) record_id = header.pop_unsigned_int(2) record_version = header.pop_unsigned_int(1) record_type = header.pop_unsigned_int(1) record_payload_length = header.pop_unsigned_int(1) record_length = record_payload_length + 5 record_data = ByteBuffer(data) offset = len(record_data) max_req_len = 20 retry = 20 # now get the other record data while True: retry -= 1 if retry == 0: raise RetryError() # depends on [control=['if'], data=[]] length = max_req_len if offset + length > record_length: length = record_length - offset # depends on [control=['if'], data=['record_length']] try: (next_id, data) = get_fn(reservation_id, record_id, offset, length) # depends on [control=['try'], data=[]] except CompletionCodeError as e: if e.cc == constants.CC_CANT_RET_NUM_REQ_BYTES: # reduce max lenght max_req_len -= 4 if max_req_len <= 0: retry = 0 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: raise CompletionCodeError(e.cc) # depends on [control=['except'], data=['e']] record_data.extend(data[:]) offset = len(record_data) if len(record_data) >= record_length: break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] return (next_id, record_data)
def find_all_controllers(**kwargs) -> [ControllerDiscovery]: """ :return: A list of :class:`~approxeng.input.controllers.ControllerDiscovery` instances corresponding to controllers attached to this host, ordered by the ordering on ControllerDiscovery. Any controllers found will be constructed with kwargs passed to their constructor function, particularly useful for dead and hot zone parameters. """ def get_controller_classes() -> [{}]: """ Scans for subclasses of :class:`~approxeng.input.Controller` and reads out data from their :meth:`~approxeng.input.Controller.registrations_ids` method. This should return a list of tuples of `(vendor_id, product_id)` which are then used along with the subclass itself to populate a registry of known subclasses. :return: A generator that produces known subclasses and their registration information """ for controller_class in Controller.__subclasses__(): for vendor_id, product_id in controller_class.registration_ids(): yield {'constructor': controller_class, 'vendor_id': vendor_id, 'product_id': product_id} id_to_constructor = {'{}-{}'.format(c['vendor_id'], c['product_id']): c['constructor'] for c in get_controller_classes()} def controller_constructor(d: InputDevice): id = '{}-{}'.format(d.info.vendor, d.info.product) if id in id_to_constructor: return id_to_constructor[id] return None all_devices = list(InputDevice(path) for path in list_devices()) devices_by_name = {name: list(e for e in all_devices if unique_name(e) == name) for name in set(unique_name(e) for e in all_devices if controller_constructor(e) is not None)} controllers = sorted( ControllerDiscovery(controller=controller_constructor(devices[0])(**kwargs), devices=devices, name=name) for name, devices in devices_by_name.items()) return controllers
def function[find_all_controllers, parameter[]]: constant[ :return: A list of :class:`~approxeng.input.controllers.ControllerDiscovery` instances corresponding to controllers attached to this host, ordered by the ordering on ControllerDiscovery. Any controllers found will be constructed with kwargs passed to their constructor function, particularly useful for dead and hot zone parameters. ] def function[get_controller_classes, parameter[]]: constant[ Scans for subclasses of :class:`~approxeng.input.Controller` and reads out data from their :meth:`~approxeng.input.Controller.registrations_ids` method. This should return a list of tuples of `(vendor_id, product_id)` which are then used along with the subclass itself to populate a registry of known subclasses. :return: A generator that produces known subclasses and their registration information ] for taget[name[controller_class]] in starred[call[name[Controller].__subclasses__, parameter[]]] begin[:] for taget[tuple[[<ast.Name object at 0x7da2054a7400>, <ast.Name object at 0x7da2054a6b30>]]] in starred[call[name[controller_class].registration_ids, parameter[]]] begin[:] <ast.Yield object at 0x7da2054a4430> variable[id_to_constructor] assign[=] <ast.DictComp object at 0x7da2054a4190> def function[controller_constructor, parameter[d]]: variable[id] assign[=] call[constant[{}-{}].format, parameter[name[d].info.vendor, name[d].info.product]] if compare[name[id] in name[id_to_constructor]] begin[:] return[call[name[id_to_constructor]][name[id]]] return[constant[None]] variable[all_devices] assign[=] call[name[list], parameter[<ast.GeneratorExp object at 0x7da2047e9f60>]] variable[devices_by_name] assign[=] <ast.DictComp object at 0x7da2044c1030> variable[controllers] assign[=] call[name[sorted], parameter[<ast.GeneratorExp object at 0x7da18f723df0>]] return[name[controllers]]
keyword[def] identifier[find_all_controllers] (** identifier[kwargs] )->[ identifier[ControllerDiscovery] ]: literal[string] keyword[def] identifier[get_controller_classes] ()->[{}]: literal[string] keyword[for] identifier[controller_class] keyword[in] identifier[Controller] . identifier[__subclasses__] (): keyword[for] identifier[vendor_id] , identifier[product_id] keyword[in] identifier[controller_class] . identifier[registration_ids] (): keyword[yield] { literal[string] : identifier[controller_class] , literal[string] : identifier[vendor_id] , literal[string] : identifier[product_id] } identifier[id_to_constructor] ={ literal[string] . identifier[format] ( identifier[c] [ literal[string] ], identifier[c] [ literal[string] ]): identifier[c] [ literal[string] ] keyword[for] identifier[c] keyword[in] identifier[get_controller_classes] ()} keyword[def] identifier[controller_constructor] ( identifier[d] : identifier[InputDevice] ): identifier[id] = literal[string] . identifier[format] ( identifier[d] . identifier[info] . identifier[vendor] , identifier[d] . identifier[info] . identifier[product] ) keyword[if] identifier[id] keyword[in] identifier[id_to_constructor] : keyword[return] identifier[id_to_constructor] [ identifier[id] ] keyword[return] keyword[None] identifier[all_devices] = identifier[list] ( identifier[InputDevice] ( identifier[path] ) keyword[for] identifier[path] keyword[in] identifier[list_devices] ()) identifier[devices_by_name] ={ identifier[name] : identifier[list] ( identifier[e] keyword[for] identifier[e] keyword[in] identifier[all_devices] keyword[if] identifier[unique_name] ( identifier[e] )== identifier[name] ) keyword[for] identifier[name] keyword[in] identifier[set] ( identifier[unique_name] ( identifier[e] ) keyword[for] identifier[e] keyword[in] identifier[all_devices] keyword[if] identifier[controller_constructor] ( identifier[e] ) keyword[is] keyword[not] keyword[None] )} identifier[controllers] = identifier[sorted] ( identifier[ControllerDiscovery] ( identifier[controller] = identifier[controller_constructor] ( identifier[devices] [ literal[int] ])(** identifier[kwargs] ), identifier[devices] = identifier[devices] , identifier[name] = identifier[name] ) keyword[for] identifier[name] , identifier[devices] keyword[in] identifier[devices_by_name] . identifier[items] ()) keyword[return] identifier[controllers]
def find_all_controllers(**kwargs) -> [ControllerDiscovery]: """ :return: A list of :class:`~approxeng.input.controllers.ControllerDiscovery` instances corresponding to controllers attached to this host, ordered by the ordering on ControllerDiscovery. Any controllers found will be constructed with kwargs passed to their constructor function, particularly useful for dead and hot zone parameters. """ def get_controller_classes() -> [{}]: """ Scans for subclasses of :class:`~approxeng.input.Controller` and reads out data from their :meth:`~approxeng.input.Controller.registrations_ids` method. This should return a list of tuples of `(vendor_id, product_id)` which are then used along with the subclass itself to populate a registry of known subclasses. :return: A generator that produces known subclasses and their registration information """ for controller_class in Controller.__subclasses__(): for (vendor_id, product_id) in controller_class.registration_ids(): yield {'constructor': controller_class, 'vendor_id': vendor_id, 'product_id': product_id} # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['controller_class']] id_to_constructor = {'{}-{}'.format(c['vendor_id'], c['product_id']): c['constructor'] for c in get_controller_classes()} def controller_constructor(d: InputDevice): id = '{}-{}'.format(d.info.vendor, d.info.product) if id in id_to_constructor: return id_to_constructor[id] # depends on [control=['if'], data=['id', 'id_to_constructor']] return None all_devices = list((InputDevice(path) for path in list_devices())) devices_by_name = {name: list((e for e in all_devices if unique_name(e) == name)) for name in set((unique_name(e) for e in all_devices if controller_constructor(e) is not None))} controllers = sorted((ControllerDiscovery(controller=controller_constructor(devices[0])(**kwargs), devices=devices, name=name) for (name, devices) in devices_by_name.items())) return controllers
def getInputOrder(ast, input_order=None): """Derive the input order of the variables in an expression. """ variables = {} for a in ast.allOf('variable'): variables[a.value] = a variable_names = set(variables.keys()) if input_order: if variable_names != set(input_order): raise ValueError( "input names (%s) don't match those found in expression (%s)" % (input_order, variable_names)) ordered_names = input_order else: ordered_names = list(variable_names) ordered_names.sort() ordered_variables = [variables[v] for v in ordered_names] return ordered_variables
def function[getInputOrder, parameter[ast, input_order]]: constant[Derive the input order of the variables in an expression. ] variable[variables] assign[=] dictionary[[], []] for taget[name[a]] in starred[call[name[ast].allOf, parameter[constant[variable]]]] begin[:] call[name[variables]][name[a].value] assign[=] name[a] variable[variable_names] assign[=] call[name[set], parameter[call[name[variables].keys, parameter[]]]] if name[input_order] begin[:] if compare[name[variable_names] not_equal[!=] call[name[set], parameter[name[input_order]]]] begin[:] <ast.Raise object at 0x7da207f00ee0> variable[ordered_names] assign[=] name[input_order] variable[ordered_variables] assign[=] <ast.ListComp object at 0x7da207f00250> return[name[ordered_variables]]
keyword[def] identifier[getInputOrder] ( identifier[ast] , identifier[input_order] = keyword[None] ): literal[string] identifier[variables] ={} keyword[for] identifier[a] keyword[in] identifier[ast] . identifier[allOf] ( literal[string] ): identifier[variables] [ identifier[a] . identifier[value] ]= identifier[a] identifier[variable_names] = identifier[set] ( identifier[variables] . identifier[keys] ()) keyword[if] identifier[input_order] : keyword[if] identifier[variable_names] != identifier[set] ( identifier[input_order] ): keyword[raise] identifier[ValueError] ( literal[string] %( identifier[input_order] , identifier[variable_names] )) identifier[ordered_names] = identifier[input_order] keyword[else] : identifier[ordered_names] = identifier[list] ( identifier[variable_names] ) identifier[ordered_names] . identifier[sort] () identifier[ordered_variables] =[ identifier[variables] [ identifier[v] ] keyword[for] identifier[v] keyword[in] identifier[ordered_names] ] keyword[return] identifier[ordered_variables]
def getInputOrder(ast, input_order=None): """Derive the input order of the variables in an expression. """ variables = {} for a in ast.allOf('variable'): variables[a.value] = a # depends on [control=['for'], data=['a']] variable_names = set(variables.keys()) if input_order: if variable_names != set(input_order): raise ValueError("input names (%s) don't match those found in expression (%s)" % (input_order, variable_names)) # depends on [control=['if'], data=['variable_names']] ordered_names = input_order # depends on [control=['if'], data=[]] else: ordered_names = list(variable_names) ordered_names.sort() ordered_variables = [variables[v] for v in ordered_names] return ordered_variables
def QA_indicator_CCI(DataFrame, N=14): """ TYP:=(HIGH+LOW+CLOSE)/3; CCI:(TYP-MA(TYP,N))/(0.015*AVEDEV(TYP,N)); """ typ = (DataFrame['high'] + DataFrame['low'] + DataFrame['close']) / 3 cci = ((typ - MA(typ, N)) / (0.015 * AVEDEV(typ, N))) a = 100 b = -100 return pd.DataFrame({ 'CCI': cci, 'a': a, 'b': b })
def function[QA_indicator_CCI, parameter[DataFrame, N]]: constant[ TYP:=(HIGH+LOW+CLOSE)/3; CCI:(TYP-MA(TYP,N))/(0.015*AVEDEV(TYP,N)); ] variable[typ] assign[=] binary_operation[binary_operation[binary_operation[call[name[DataFrame]][constant[high]] + call[name[DataFrame]][constant[low]]] + call[name[DataFrame]][constant[close]]] / constant[3]] variable[cci] assign[=] binary_operation[binary_operation[name[typ] - call[name[MA], parameter[name[typ], name[N]]]] / binary_operation[constant[0.015] * call[name[AVEDEV], parameter[name[typ], name[N]]]]] variable[a] assign[=] constant[100] variable[b] assign[=] <ast.UnaryOp object at 0x7da1b1ff3be0> return[call[name[pd].DataFrame, parameter[dictionary[[<ast.Constant object at 0x7da1b1ff2380>, <ast.Constant object at 0x7da1b1ff2e30>, <ast.Constant object at 0x7da1b1ff16f0>], [<ast.Name object at 0x7da1b1ff1240>, <ast.Name object at 0x7da1b1ff02b0>, <ast.Name object at 0x7da1b1ff09a0>]]]]]
keyword[def] identifier[QA_indicator_CCI] ( identifier[DataFrame] , identifier[N] = literal[int] ): literal[string] identifier[typ] =( identifier[DataFrame] [ literal[string] ]+ identifier[DataFrame] [ literal[string] ]+ identifier[DataFrame] [ literal[string] ])/ literal[int] identifier[cci] =(( identifier[typ] - identifier[MA] ( identifier[typ] , identifier[N] ))/( literal[int] * identifier[AVEDEV] ( identifier[typ] , identifier[N] ))) identifier[a] = literal[int] identifier[b] =- literal[int] keyword[return] identifier[pd] . identifier[DataFrame] ({ literal[string] : identifier[cci] , literal[string] : identifier[a] , literal[string] : identifier[b] })
def QA_indicator_CCI(DataFrame, N=14): """ TYP:=(HIGH+LOW+CLOSE)/3; CCI:(TYP-MA(TYP,N))/(0.015*AVEDEV(TYP,N)); """ typ = (DataFrame['high'] + DataFrame['low'] + DataFrame['close']) / 3 cci = (typ - MA(typ, N)) / (0.015 * AVEDEV(typ, N)) a = 100 b = -100 return pd.DataFrame({'CCI': cci, 'a': a, 'b': b})
def _expectation(p, mean, none1, none2, none3, nghp=None): """ Compute the expectation: <m(X)>_p(X) - m(x) :: Linear, Identity or Constant mean function :return: NxQ """ return mean(p.mu)
def function[_expectation, parameter[p, mean, none1, none2, none3, nghp]]: constant[ Compute the expectation: <m(X)>_p(X) - m(x) :: Linear, Identity or Constant mean function :return: NxQ ] return[call[name[mean], parameter[name[p].mu]]]
keyword[def] identifier[_expectation] ( identifier[p] , identifier[mean] , identifier[none1] , identifier[none2] , identifier[none3] , identifier[nghp] = keyword[None] ): literal[string] keyword[return] identifier[mean] ( identifier[p] . identifier[mu] )
def _expectation(p, mean, none1, none2, none3, nghp=None): """ Compute the expectation: <m(X)>_p(X) - m(x) :: Linear, Identity or Constant mean function :return: NxQ """ return mean(p.mu)
def _create_api_uri(self, *parts): """Internal helper for creating fully qualified endpoint URIs.""" return urljoin(self.BASE_API_URI, '/'.join(imap(quote, parts)))
def function[_create_api_uri, parameter[self]]: constant[Internal helper for creating fully qualified endpoint URIs.] return[call[name[urljoin], parameter[name[self].BASE_API_URI, call[constant[/].join, parameter[call[name[imap], parameter[name[quote], name[parts]]]]]]]]
keyword[def] identifier[_create_api_uri] ( identifier[self] ,* identifier[parts] ): literal[string] keyword[return] identifier[urljoin] ( identifier[self] . identifier[BASE_API_URI] , literal[string] . identifier[join] ( identifier[imap] ( identifier[quote] , identifier[parts] )))
def _create_api_uri(self, *parts): """Internal helper for creating fully qualified endpoint URIs.""" return urljoin(self.BASE_API_URI, '/'.join(imap(quote, parts)))
def update_rbac_policy(self, rbac_policy_id, body=None): """Update a RBAC policy.""" return self.put(self.rbac_policy_path % rbac_policy_id, body=body)
def function[update_rbac_policy, parameter[self, rbac_policy_id, body]]: constant[Update a RBAC policy.] return[call[name[self].put, parameter[binary_operation[name[self].rbac_policy_path <ast.Mod object at 0x7da2590d6920> name[rbac_policy_id]]]]]
keyword[def] identifier[update_rbac_policy] ( identifier[self] , identifier[rbac_policy_id] , identifier[body] = keyword[None] ): literal[string] keyword[return] identifier[self] . identifier[put] ( identifier[self] . identifier[rbac_policy_path] % identifier[rbac_policy_id] , identifier[body] = identifier[body] )
def update_rbac_policy(self, rbac_policy_id, body=None): """Update a RBAC policy.""" return self.put(self.rbac_policy_path % rbac_policy_id, body=body)
def image(self, x, y, image, width=None, height=None): """ Inserts an image into the drawing, position by its top-left corner. :param int x: The x position to insert the image. :param int y: The y position to insert the image. :param str image: The file path or a PhotoImage or PIL.Image object. :param str width: The width to scale the image too, setting to `None` will use the actual width of the Image. Default to `None`. :param str height: The width to scale the image too, setting to `None` will use the actual height of the Image. Default to `None`. :return: The id of the image. """ # load the image and add to the dict (otherwise tk destroys the reference to them!) _image = utils.GUIZeroImage(image, width, height) id = self.tk.create_image(x, y, image=_image.tk_image, anchor="nw") self._images[id] = _image return id
def function[image, parameter[self, x, y, image, width, height]]: constant[ Inserts an image into the drawing, position by its top-left corner. :param int x: The x position to insert the image. :param int y: The y position to insert the image. :param str image: The file path or a PhotoImage or PIL.Image object. :param str width: The width to scale the image too, setting to `None` will use the actual width of the Image. Default to `None`. :param str height: The width to scale the image too, setting to `None` will use the actual height of the Image. Default to `None`. :return: The id of the image. ] variable[_image] assign[=] call[name[utils].GUIZeroImage, parameter[name[image], name[width], name[height]]] variable[id] assign[=] call[name[self].tk.create_image, parameter[name[x], name[y]]] call[name[self]._images][name[id]] assign[=] name[_image] return[name[id]]
keyword[def] identifier[image] ( identifier[self] , identifier[x] , identifier[y] , identifier[image] , identifier[width] = keyword[None] , identifier[height] = keyword[None] ): literal[string] identifier[_image] = identifier[utils] . identifier[GUIZeroImage] ( identifier[image] , identifier[width] , identifier[height] ) identifier[id] = identifier[self] . identifier[tk] . identifier[create_image] ( identifier[x] , identifier[y] , identifier[image] = identifier[_image] . identifier[tk_image] , identifier[anchor] = literal[string] ) identifier[self] . identifier[_images] [ identifier[id] ]= identifier[_image] keyword[return] identifier[id]
def image(self, x, y, image, width=None, height=None): """ Inserts an image into the drawing, position by its top-left corner. :param int x: The x position to insert the image. :param int y: The y position to insert the image. :param str image: The file path or a PhotoImage or PIL.Image object. :param str width: The width to scale the image too, setting to `None` will use the actual width of the Image. Default to `None`. :param str height: The width to scale the image too, setting to `None` will use the actual height of the Image. Default to `None`. :return: The id of the image. """ # load the image and add to the dict (otherwise tk destroys the reference to them!) _image = utils.GUIZeroImage(image, width, height) id = self.tk.create_image(x, y, image=_image.tk_image, anchor='nw') self._images[id] = _image return id
def same_player(self, other): """ Compares name and color. Returns True if both are owned by the same player. """ return self.name == other.name \ and self.color == other.color
def function[same_player, parameter[self, other]]: constant[ Compares name and color. Returns True if both are owned by the same player. ] return[<ast.BoolOp object at 0x7da2041d8b50>]
keyword[def] identifier[same_player] ( identifier[self] , identifier[other] ): literal[string] keyword[return] identifier[self] . identifier[name] == identifier[other] . identifier[name] keyword[and] identifier[self] . identifier[color] == identifier[other] . identifier[color]
def same_player(self, other): """ Compares name and color. Returns True if both are owned by the same player. """ return self.name == other.name and self.color == other.color
def _load_tasks(self, tasks, vars={}, additional_conditions=[]): ''' handle task and handler include statements ''' results = [] if tasks is None: # support empty handler files, and the like. tasks = [] for x in tasks: task_vars = self.vars.copy() task_vars.update(vars) if 'include' in x: tokens = shlex.split(x['include']) items = [''] included_additional_conditions = list(additional_conditions) for k in x: if k.startswith("with_"): plugin_name = k[5:] if plugin_name not in utils.plugins.lookup_loader: raise errors.AnsibleError("cannot find lookup plugin named %s for usage in with_%s" % (plugin_name, plugin_name)) terms = utils.template_ds(self.basedir, x[k], task_vars) items = utils.plugins.lookup_loader.get(plugin_name, basedir=self.basedir, runner=None).run(terms, inject=task_vars) elif k.startswith("when_"): included_additional_conditions.append(utils.compile_when_to_only_if("%s %s" % (k[5:], x[k]))) elif k in ("include", "vars", "only_if"): pass else: raise errors.AnsibleError("parse error: task includes cannot be used with other directives: %s" % k) if 'vars' in x: task_vars.update(x['vars']) if 'only_if' in x: included_additional_conditions.append(x['only_if']) for item in items: mv = task_vars.copy() mv['item'] = item for t in tokens[1:]: (k,v) = t.split("=", 1) mv[k] = utils.template_ds(self.basedir, v, mv) include_file = utils.template(self.basedir, tokens[0], mv) data = utils.parse_yaml_from_file(utils.path_dwim(self.basedir, include_file)) results += self._load_tasks(data, mv, included_additional_conditions) elif type(x) == dict: results.append(Task(self,x,module_vars=task_vars, additional_conditions=additional_conditions)) else: raise Exception("unexpected task type") for x in results: if self.tags is not None: x.tags.extend(self.tags) return results
def function[_load_tasks, parameter[self, tasks, vars, additional_conditions]]: constant[ handle task and handler include statements ] variable[results] assign[=] list[[]] if compare[name[tasks] is constant[None]] begin[:] variable[tasks] assign[=] list[[]] for taget[name[x]] in starred[name[tasks]] begin[:] variable[task_vars] assign[=] call[name[self].vars.copy, parameter[]] call[name[task_vars].update, parameter[name[vars]]] if compare[constant[include] in name[x]] begin[:] variable[tokens] assign[=] call[name[shlex].split, parameter[call[name[x]][constant[include]]]] variable[items] assign[=] list[[<ast.Constant object at 0x7da1b1351ea0>]] variable[included_additional_conditions] assign[=] call[name[list], parameter[name[additional_conditions]]] for taget[name[k]] in starred[name[x]] begin[:] if call[name[k].startswith, parameter[constant[with_]]] begin[:] variable[plugin_name] assign[=] call[name[k]][<ast.Slice object at 0x7da1b1352650>] if compare[name[plugin_name] <ast.NotIn object at 0x7da2590d7190> name[utils].plugins.lookup_loader] begin[:] <ast.Raise object at 0x7da1b1352020> variable[terms] assign[=] call[name[utils].template_ds, parameter[name[self].basedir, call[name[x]][name[k]], name[task_vars]]] variable[items] assign[=] call[call[name[utils].plugins.lookup_loader.get, parameter[name[plugin_name]]].run, parameter[name[terms]]] if compare[constant[vars] in name[x]] begin[:] call[name[task_vars].update, parameter[call[name[x]][constant[vars]]]] if compare[constant[only_if] in name[x]] begin[:] call[name[included_additional_conditions].append, parameter[call[name[x]][constant[only_if]]]] for taget[name[item]] in starred[name[items]] begin[:] variable[mv] assign[=] call[name[task_vars].copy, parameter[]] call[name[mv]][constant[item]] assign[=] name[item] for taget[name[t]] in starred[call[name[tokens]][<ast.Slice object at 0x7da1b15b3250>]] begin[:] <ast.Tuple object at 0x7da1b15b2920> assign[=] call[name[t].split, parameter[constant[=], constant[1]]] call[name[mv]][name[k]] assign[=] call[name[utils].template_ds, parameter[name[self].basedir, name[v], name[mv]]] variable[include_file] assign[=] call[name[utils].template, parameter[name[self].basedir, call[name[tokens]][constant[0]], name[mv]]] variable[data] assign[=] call[name[utils].parse_yaml_from_file, parameter[call[name[utils].path_dwim, parameter[name[self].basedir, name[include_file]]]]] <ast.AugAssign object at 0x7da1b133c8b0> for taget[name[x]] in starred[name[results]] begin[:] if compare[name[self].tags is_not constant[None]] begin[:] call[name[x].tags.extend, parameter[name[self].tags]] return[name[results]]
keyword[def] identifier[_load_tasks] ( identifier[self] , identifier[tasks] , identifier[vars] ={}, identifier[additional_conditions] =[]): literal[string] identifier[results] =[] keyword[if] identifier[tasks] keyword[is] keyword[None] : identifier[tasks] =[] keyword[for] identifier[x] keyword[in] identifier[tasks] : identifier[task_vars] = identifier[self] . identifier[vars] . identifier[copy] () identifier[task_vars] . identifier[update] ( identifier[vars] ) keyword[if] literal[string] keyword[in] identifier[x] : identifier[tokens] = identifier[shlex] . identifier[split] ( identifier[x] [ literal[string] ]) identifier[items] =[ literal[string] ] identifier[included_additional_conditions] = identifier[list] ( identifier[additional_conditions] ) keyword[for] identifier[k] keyword[in] identifier[x] : keyword[if] identifier[k] . identifier[startswith] ( literal[string] ): identifier[plugin_name] = identifier[k] [ literal[int] :] keyword[if] identifier[plugin_name] keyword[not] keyword[in] identifier[utils] . identifier[plugins] . identifier[lookup_loader] : keyword[raise] identifier[errors] . identifier[AnsibleError] ( literal[string] %( identifier[plugin_name] , identifier[plugin_name] )) identifier[terms] = identifier[utils] . identifier[template_ds] ( identifier[self] . identifier[basedir] , identifier[x] [ identifier[k] ], identifier[task_vars] ) identifier[items] = identifier[utils] . identifier[plugins] . identifier[lookup_loader] . identifier[get] ( identifier[plugin_name] , identifier[basedir] = identifier[self] . identifier[basedir] , identifier[runner] = keyword[None] ). identifier[run] ( identifier[terms] , identifier[inject] = identifier[task_vars] ) keyword[elif] identifier[k] . identifier[startswith] ( literal[string] ): identifier[included_additional_conditions] . identifier[append] ( identifier[utils] . identifier[compile_when_to_only_if] ( literal[string] %( identifier[k] [ literal[int] :], identifier[x] [ identifier[k] ]))) keyword[elif] identifier[k] keyword[in] ( literal[string] , literal[string] , literal[string] ): keyword[pass] keyword[else] : keyword[raise] identifier[errors] . identifier[AnsibleError] ( literal[string] % identifier[k] ) keyword[if] literal[string] keyword[in] identifier[x] : identifier[task_vars] . identifier[update] ( identifier[x] [ literal[string] ]) keyword[if] literal[string] keyword[in] identifier[x] : identifier[included_additional_conditions] . identifier[append] ( identifier[x] [ literal[string] ]) keyword[for] identifier[item] keyword[in] identifier[items] : identifier[mv] = identifier[task_vars] . identifier[copy] () identifier[mv] [ literal[string] ]= identifier[item] keyword[for] identifier[t] keyword[in] identifier[tokens] [ literal[int] :]: ( identifier[k] , identifier[v] )= identifier[t] . identifier[split] ( literal[string] , literal[int] ) identifier[mv] [ identifier[k] ]= identifier[utils] . identifier[template_ds] ( identifier[self] . identifier[basedir] , identifier[v] , identifier[mv] ) identifier[include_file] = identifier[utils] . identifier[template] ( identifier[self] . identifier[basedir] , identifier[tokens] [ literal[int] ], identifier[mv] ) identifier[data] = identifier[utils] . identifier[parse_yaml_from_file] ( identifier[utils] . identifier[path_dwim] ( identifier[self] . identifier[basedir] , identifier[include_file] )) identifier[results] += identifier[self] . identifier[_load_tasks] ( identifier[data] , identifier[mv] , identifier[included_additional_conditions] ) keyword[elif] identifier[type] ( identifier[x] )== identifier[dict] : identifier[results] . identifier[append] ( identifier[Task] ( identifier[self] , identifier[x] , identifier[module_vars] = identifier[task_vars] , identifier[additional_conditions] = identifier[additional_conditions] )) keyword[else] : keyword[raise] identifier[Exception] ( literal[string] ) keyword[for] identifier[x] keyword[in] identifier[results] : keyword[if] identifier[self] . identifier[tags] keyword[is] keyword[not] keyword[None] : identifier[x] . identifier[tags] . identifier[extend] ( identifier[self] . identifier[tags] ) keyword[return] identifier[results]
def _load_tasks(self, tasks, vars={}, additional_conditions=[]): """ handle task and handler include statements """ results = [] if tasks is None: # support empty handler files, and the like. tasks = [] # depends on [control=['if'], data=['tasks']] for x in tasks: task_vars = self.vars.copy() task_vars.update(vars) if 'include' in x: tokens = shlex.split(x['include']) items = [''] included_additional_conditions = list(additional_conditions) for k in x: if k.startswith('with_'): plugin_name = k[5:] if plugin_name not in utils.plugins.lookup_loader: raise errors.AnsibleError('cannot find lookup plugin named %s for usage in with_%s' % (plugin_name, plugin_name)) # depends on [control=['if'], data=['plugin_name']] terms = utils.template_ds(self.basedir, x[k], task_vars) items = utils.plugins.lookup_loader.get(plugin_name, basedir=self.basedir, runner=None).run(terms, inject=task_vars) # depends on [control=['if'], data=[]] elif k.startswith('when_'): included_additional_conditions.append(utils.compile_when_to_only_if('%s %s' % (k[5:], x[k]))) # depends on [control=['if'], data=[]] elif k in ('include', 'vars', 'only_if'): pass # depends on [control=['if'], data=[]] else: raise errors.AnsibleError('parse error: task includes cannot be used with other directives: %s' % k) # depends on [control=['for'], data=['k']] if 'vars' in x: task_vars.update(x['vars']) # depends on [control=['if'], data=['x']] if 'only_if' in x: included_additional_conditions.append(x['only_if']) # depends on [control=['if'], data=['x']] for item in items: mv = task_vars.copy() mv['item'] = item for t in tokens[1:]: (k, v) = t.split('=', 1) mv[k] = utils.template_ds(self.basedir, v, mv) # depends on [control=['for'], data=['t']] include_file = utils.template(self.basedir, tokens[0], mv) data = utils.parse_yaml_from_file(utils.path_dwim(self.basedir, include_file)) results += self._load_tasks(data, mv, included_additional_conditions) # depends on [control=['for'], data=['item']] # depends on [control=['if'], data=['x']] elif type(x) == dict: results.append(Task(self, x, module_vars=task_vars, additional_conditions=additional_conditions)) # depends on [control=['if'], data=[]] else: raise Exception('unexpected task type') # depends on [control=['for'], data=['x']] for x in results: if self.tags is not None: x.tags.extend(self.tags) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['x']] return results
def getDrivingDistance2D(self, vehID, x, y): """getDrivingDistance2D(string, double, double) -> integer Return the distance to the given network position along the vehicles route. """ self._connection._beginMessage( tc.CMD_GET_VEHICLE_VARIABLE, tc.DISTANCE_REQUEST, vehID, 1 + 4 + 1 + 8 + 8 + 1) self._connection._string += struct.pack("!BiBddB", tc.TYPE_COMPOUND, 2, tc.POSITION_2D, x, y, tc.REQUEST_DRIVINGDIST) return self._connection._checkResult(tc.CMD_GET_VEHICLE_VARIABLE, tc.DISTANCE_REQUEST, vehID).readDouble()
def function[getDrivingDistance2D, parameter[self, vehID, x, y]]: constant[getDrivingDistance2D(string, double, double) -> integer Return the distance to the given network position along the vehicles route. ] call[name[self]._connection._beginMessage, parameter[name[tc].CMD_GET_VEHICLE_VARIABLE, name[tc].DISTANCE_REQUEST, name[vehID], binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[1] + constant[4]] + constant[1]] + constant[8]] + constant[8]] + constant[1]]]] <ast.AugAssign object at 0x7da1b09efac0> return[call[call[name[self]._connection._checkResult, parameter[name[tc].CMD_GET_VEHICLE_VARIABLE, name[tc].DISTANCE_REQUEST, name[vehID]]].readDouble, parameter[]]]
keyword[def] identifier[getDrivingDistance2D] ( identifier[self] , identifier[vehID] , identifier[x] , identifier[y] ): literal[string] identifier[self] . identifier[_connection] . identifier[_beginMessage] ( identifier[tc] . identifier[CMD_GET_VEHICLE_VARIABLE] , identifier[tc] . identifier[DISTANCE_REQUEST] , identifier[vehID] , literal[int] + literal[int] + literal[int] + literal[int] + literal[int] + literal[int] ) identifier[self] . identifier[_connection] . identifier[_string] += identifier[struct] . identifier[pack] ( literal[string] , identifier[tc] . identifier[TYPE_COMPOUND] , literal[int] , identifier[tc] . identifier[POSITION_2D] , identifier[x] , identifier[y] , identifier[tc] . identifier[REQUEST_DRIVINGDIST] ) keyword[return] identifier[self] . identifier[_connection] . identifier[_checkResult] ( identifier[tc] . identifier[CMD_GET_VEHICLE_VARIABLE] , identifier[tc] . identifier[DISTANCE_REQUEST] , identifier[vehID] ). identifier[readDouble] ()
def getDrivingDistance2D(self, vehID, x, y): """getDrivingDistance2D(string, double, double) -> integer Return the distance to the given network position along the vehicles route. """ self._connection._beginMessage(tc.CMD_GET_VEHICLE_VARIABLE, tc.DISTANCE_REQUEST, vehID, 1 + 4 + 1 + 8 + 8 + 1) self._connection._string += struct.pack('!BiBddB', tc.TYPE_COMPOUND, 2, tc.POSITION_2D, x, y, tc.REQUEST_DRIVINGDIST) return self._connection._checkResult(tc.CMD_GET_VEHICLE_VARIABLE, tc.DISTANCE_REQUEST, vehID).readDouble()
def required(self, fn): """Request decorator. Forces authentication.""" @functools.wraps(fn) def decorated(*args, **kwargs): if (not self._check_auth() # Don't try to force authentication if the request is part # of the authentication process - otherwise we end up in a # loop. and request.blueprint != self.blueprint.name): return redirect(url_for("%s.login" % self.blueprint.name, next=request.url)) return fn(*args, **kwargs) return decorated
def function[required, parameter[self, fn]]: constant[Request decorator. Forces authentication.] def function[decorated, parameter[]]: if <ast.BoolOp object at 0x7da204344ee0> begin[:] return[call[name[redirect], parameter[call[name[url_for], parameter[binary_operation[constant[%s.login] <ast.Mod object at 0x7da2590d6920> name[self].blueprint.name]]]]]] return[call[name[fn], parameter[<ast.Starred object at 0x7da204344850>]]] return[name[decorated]]
keyword[def] identifier[required] ( identifier[self] , identifier[fn] ): literal[string] @ identifier[functools] . identifier[wraps] ( identifier[fn] ) keyword[def] identifier[decorated] (* identifier[args] ,** identifier[kwargs] ): keyword[if] ( keyword[not] identifier[self] . identifier[_check_auth] () keyword[and] identifier[request] . identifier[blueprint] != identifier[self] . identifier[blueprint] . identifier[name] ): keyword[return] identifier[redirect] ( identifier[url_for] ( literal[string] % identifier[self] . identifier[blueprint] . identifier[name] , identifier[next] = identifier[request] . identifier[url] )) keyword[return] identifier[fn] (* identifier[args] ,** identifier[kwargs] ) keyword[return] identifier[decorated]
def required(self, fn): """Request decorator. Forces authentication.""" @functools.wraps(fn) def decorated(*args, **kwargs): if not self._check_auth() and request.blueprint != self.blueprint.name: # Don't try to force authentication if the request is part # of the authentication process - otherwise we end up in a # loop. return redirect(url_for('%s.login' % self.blueprint.name, next=request.url)) # depends on [control=['if'], data=[]] return fn(*args, **kwargs) return decorated
def __convert_key(expression): """Converts keys in YAML that reference other keys. """ if type(expression) is str and len(expression) > 2 and expression[1] == '!': expression = eval(expression[2:-1]) return expression
def function[__convert_key, parameter[expression]]: constant[Converts keys in YAML that reference other keys. ] if <ast.BoolOp object at 0x7da20c7cad70> begin[:] variable[expression] assign[=] call[name[eval], parameter[call[name[expression]][<ast.Slice object at 0x7da20c7c8f40>]]] return[name[expression]]
keyword[def] identifier[__convert_key] ( identifier[expression] ): literal[string] keyword[if] identifier[type] ( identifier[expression] ) keyword[is] identifier[str] keyword[and] identifier[len] ( identifier[expression] )> literal[int] keyword[and] identifier[expression] [ literal[int] ]== literal[string] : identifier[expression] = identifier[eval] ( identifier[expression] [ literal[int] :- literal[int] ]) keyword[return] identifier[expression]
def __convert_key(expression): """Converts keys in YAML that reference other keys. """ if type(expression) is str and len(expression) > 2 and (expression[1] == '!'): expression = eval(expression[2:-1]) # depends on [control=['if'], data=[]] return expression
def lifetimes(self): r""" Lifetimes of states of the hidden transition matrix Returns ------- l : ndarray(nstates) state lifetimes in units of the input trajectory time step, defined by :math:`-tau / ln | p_{ii} |, i = 1,...,nstates`, where :math:`p_{ii}` are the diagonal entries of the hidden transition matrix. """ return -self._lag / np.log(np.diag(self.transition_matrix))
def function[lifetimes, parameter[self]]: constant[ Lifetimes of states of the hidden transition matrix Returns ------- l : ndarray(nstates) state lifetimes in units of the input trajectory time step, defined by :math:`-tau / ln | p_{ii} |, i = 1,...,nstates`, where :math:`p_{ii}` are the diagonal entries of the hidden transition matrix. ] return[binary_operation[<ast.UnaryOp object at 0x7da204961ba0> / call[name[np].log, parameter[call[name[np].diag, parameter[name[self].transition_matrix]]]]]]
keyword[def] identifier[lifetimes] ( identifier[self] ): literal[string] keyword[return] - identifier[self] . identifier[_lag] / identifier[np] . identifier[log] ( identifier[np] . identifier[diag] ( identifier[self] . identifier[transition_matrix] ))
def lifetimes(self): """ Lifetimes of states of the hidden transition matrix Returns ------- l : ndarray(nstates) state lifetimes in units of the input trajectory time step, defined by :math:`-tau / ln | p_{ii} |, i = 1,...,nstates`, where :math:`p_{ii}` are the diagonal entries of the hidden transition matrix. """ return -self._lag / np.log(np.diag(self.transition_matrix))
def dropna_columns(data: pd.DataFrame, max_na_values: int=0.15): """ Remove columns with more NA values than threshold level :param data: :param max_na_values: proportion threshold of max na values :return: """ size = data.shape[0] df_na = (data.isnull().sum()/size) >= max_na_values data.drop(df_na[df_na].index, axis=1, inplace=True)
def function[dropna_columns, parameter[data, max_na_values]]: constant[ Remove columns with more NA values than threshold level :param data: :param max_na_values: proportion threshold of max na values :return: ] variable[size] assign[=] call[name[data].shape][constant[0]] variable[df_na] assign[=] compare[binary_operation[call[call[name[data].isnull, parameter[]].sum, parameter[]] / name[size]] greater_or_equal[>=] name[max_na_values]] call[name[data].drop, parameter[call[name[df_na]][name[df_na]].index]]
keyword[def] identifier[dropna_columns] ( identifier[data] : identifier[pd] . identifier[DataFrame] , identifier[max_na_values] : identifier[int] = literal[int] ): literal[string] identifier[size] = identifier[data] . identifier[shape] [ literal[int] ] identifier[df_na] =( identifier[data] . identifier[isnull] (). identifier[sum] ()/ identifier[size] )>= identifier[max_na_values] identifier[data] . identifier[drop] ( identifier[df_na] [ identifier[df_na] ]. identifier[index] , identifier[axis] = literal[int] , identifier[inplace] = keyword[True] )
def dropna_columns(data: pd.DataFrame, max_na_values: int=0.15): """ Remove columns with more NA values than threshold level :param data: :param max_na_values: proportion threshold of max na values :return: """ size = data.shape[0] df_na = data.isnull().sum() / size >= max_na_values data.drop(df_na[df_na].index, axis=1, inplace=True)
def spkapp(targ, et, ref, sobs, abcorr): """ Deprecated: This routine has been superseded by :func:`spkaps`. This routine is supported for purposes of backward compatibility only. Return the state (position and velocity) of a target body relative to an observer, optionally corrected for light time and stellar aberration. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkapp_c.html :param targ: Target body. :type targ: int :param et: Observer epoch. :type et: float :param ref: Inertial reference frame of observer's state. :type ref: str :param sobs: State of observer wrt. solar system barycenter. :type sobs: 6-Element Array of floats :param abcorr: Aberration correction flag. :type abcorr: str :return: State of target, One way light time between observer and target. :rtype: tuple """ targ = ctypes.c_int(targ) et = ctypes.c_double(et) ref = stypes.stringToCharP(ref) abcorr = stypes.stringToCharP(abcorr) sobs = stypes.toDoubleVector(sobs) starg = stypes.emptyDoubleVector(6) lt = ctypes.c_double() libspice.spkapp_c(targ, et, ref, sobs, abcorr, starg, ctypes.byref(lt)) return stypes.cVectorToPython(starg), lt.value
def function[spkapp, parameter[targ, et, ref, sobs, abcorr]]: constant[ Deprecated: This routine has been superseded by :func:`spkaps`. This routine is supported for purposes of backward compatibility only. Return the state (position and velocity) of a target body relative to an observer, optionally corrected for light time and stellar aberration. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkapp_c.html :param targ: Target body. :type targ: int :param et: Observer epoch. :type et: float :param ref: Inertial reference frame of observer's state. :type ref: str :param sobs: State of observer wrt. solar system barycenter. :type sobs: 6-Element Array of floats :param abcorr: Aberration correction flag. :type abcorr: str :return: State of target, One way light time between observer and target. :rtype: tuple ] variable[targ] assign[=] call[name[ctypes].c_int, parameter[name[targ]]] variable[et] assign[=] call[name[ctypes].c_double, parameter[name[et]]] variable[ref] assign[=] call[name[stypes].stringToCharP, parameter[name[ref]]] variable[abcorr] assign[=] call[name[stypes].stringToCharP, parameter[name[abcorr]]] variable[sobs] assign[=] call[name[stypes].toDoubleVector, parameter[name[sobs]]] variable[starg] assign[=] call[name[stypes].emptyDoubleVector, parameter[constant[6]]] variable[lt] assign[=] call[name[ctypes].c_double, parameter[]] call[name[libspice].spkapp_c, parameter[name[targ], name[et], name[ref], name[sobs], name[abcorr], name[starg], call[name[ctypes].byref, parameter[name[lt]]]]] return[tuple[[<ast.Call object at 0x7da20c76ece0>, <ast.Attribute object at 0x7da20c76f490>]]]
keyword[def] identifier[spkapp] ( identifier[targ] , identifier[et] , identifier[ref] , identifier[sobs] , identifier[abcorr] ): literal[string] identifier[targ] = identifier[ctypes] . identifier[c_int] ( identifier[targ] ) identifier[et] = identifier[ctypes] . identifier[c_double] ( identifier[et] ) identifier[ref] = identifier[stypes] . identifier[stringToCharP] ( identifier[ref] ) identifier[abcorr] = identifier[stypes] . identifier[stringToCharP] ( identifier[abcorr] ) identifier[sobs] = identifier[stypes] . identifier[toDoubleVector] ( identifier[sobs] ) identifier[starg] = identifier[stypes] . identifier[emptyDoubleVector] ( literal[int] ) identifier[lt] = identifier[ctypes] . identifier[c_double] () identifier[libspice] . identifier[spkapp_c] ( identifier[targ] , identifier[et] , identifier[ref] , identifier[sobs] , identifier[abcorr] , identifier[starg] , identifier[ctypes] . identifier[byref] ( identifier[lt] )) keyword[return] identifier[stypes] . identifier[cVectorToPython] ( identifier[starg] ), identifier[lt] . identifier[value]
def spkapp(targ, et, ref, sobs, abcorr): """ Deprecated: This routine has been superseded by :func:`spkaps`. This routine is supported for purposes of backward compatibility only. Return the state (position and velocity) of a target body relative to an observer, optionally corrected for light time and stellar aberration. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spkapp_c.html :param targ: Target body. :type targ: int :param et: Observer epoch. :type et: float :param ref: Inertial reference frame of observer's state. :type ref: str :param sobs: State of observer wrt. solar system barycenter. :type sobs: 6-Element Array of floats :param abcorr: Aberration correction flag. :type abcorr: str :return: State of target, One way light time between observer and target. :rtype: tuple """ targ = ctypes.c_int(targ) et = ctypes.c_double(et) ref = stypes.stringToCharP(ref) abcorr = stypes.stringToCharP(abcorr) sobs = stypes.toDoubleVector(sobs) starg = stypes.emptyDoubleVector(6) lt = ctypes.c_double() libspice.spkapp_c(targ, et, ref, sobs, abcorr, starg, ctypes.byref(lt)) return (stypes.cVectorToPython(starg), lt.value)
def partition(graph): """ Splits unconnected subgraphs. For each node in the graph, make a list of its id and all directly connected id's. If one of the nodes in this list intersects with a subgraph, they are all part of that subgraph. Otherwise, this list is part of a new subgraph. Return a list of subgraphs sorted by size (biggest-first). """ g = [] for n in graph.nodes: c = [n.id for n in flatten(n)] f = False for i in range(len(g)): if len(intersection(g[i], c)) > 0: g[i] = union(g[i], c) f = True break if not f: g.append(c) # If 1 is directly connected to 2 and 3, # and 4 is directly connected to 5 and 6, these are separate subgraphs. # If we later find that 7 is directly connected to 3 and 6, # it will be attached to [1, 2, 3] yielding # [1, 2, 3, 6, 7] and [4, 5, 6]. # These two subgraphs are connected and need to be merged. merged = [] for i in range(len(g)): merged.append(g[i]) for j in range(i+1, len(g)): if len(intersection(g[i], g[j])) > 0: merged[-1].extend(g[j]) g[j] = [] g = merged g = [graph.sub(g, distance=0) for g in g] g.sort(lambda a, b: len(b) - len(a)) return g
def function[partition, parameter[graph]]: constant[ Splits unconnected subgraphs. For each node in the graph, make a list of its id and all directly connected id's. If one of the nodes in this list intersects with a subgraph, they are all part of that subgraph. Otherwise, this list is part of a new subgraph. Return a list of subgraphs sorted by size (biggest-first). ] variable[g] assign[=] list[[]] for taget[name[n]] in starred[name[graph].nodes] begin[:] variable[c] assign[=] <ast.ListComp object at 0x7da18dc072b0> variable[f] assign[=] constant[False] for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[g]]]]]] begin[:] if compare[call[name[len], parameter[call[name[intersection], parameter[call[name[g]][name[i]], name[c]]]]] greater[>] constant[0]] begin[:] call[name[g]][name[i]] assign[=] call[name[union], parameter[call[name[g]][name[i]], name[c]]] variable[f] assign[=] constant[True] break if <ast.UnaryOp object at 0x7da18dc06a70> begin[:] call[name[g].append, parameter[name[c]]] variable[merged] assign[=] list[[]] for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[g]]]]]] begin[:] call[name[merged].append, parameter[call[name[g]][name[i]]]] for taget[name[j]] in starred[call[name[range], parameter[binary_operation[name[i] + constant[1]], call[name[len], parameter[name[g]]]]]] begin[:] if compare[call[name[len], parameter[call[name[intersection], parameter[call[name[g]][name[i]], call[name[g]][name[j]]]]]] greater[>] constant[0]] begin[:] call[call[name[merged]][<ast.UnaryOp object at 0x7da18dc072e0>].extend, parameter[call[name[g]][name[j]]]] call[name[g]][name[j]] assign[=] list[[]] variable[g] assign[=] name[merged] variable[g] assign[=] <ast.ListComp object at 0x7da18dc06b00> call[name[g].sort, parameter[<ast.Lambda object at 0x7da18dc070d0>]] return[name[g]]
keyword[def] identifier[partition] ( identifier[graph] ): literal[string] identifier[g] =[] keyword[for] identifier[n] keyword[in] identifier[graph] . identifier[nodes] : identifier[c] =[ identifier[n] . identifier[id] keyword[for] identifier[n] keyword[in] identifier[flatten] ( identifier[n] )] identifier[f] = keyword[False] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[g] )): keyword[if] identifier[len] ( identifier[intersection] ( identifier[g] [ identifier[i] ], identifier[c] ))> literal[int] : identifier[g] [ identifier[i] ]= identifier[union] ( identifier[g] [ identifier[i] ], identifier[c] ) identifier[f] = keyword[True] keyword[break] keyword[if] keyword[not] identifier[f] : identifier[g] . identifier[append] ( identifier[c] ) identifier[merged] =[] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[g] )): identifier[merged] . identifier[append] ( identifier[g] [ identifier[i] ]) keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[i] + literal[int] , identifier[len] ( identifier[g] )): keyword[if] identifier[len] ( identifier[intersection] ( identifier[g] [ identifier[i] ], identifier[g] [ identifier[j] ]))> literal[int] : identifier[merged] [- literal[int] ]. identifier[extend] ( identifier[g] [ identifier[j] ]) identifier[g] [ identifier[j] ]=[] identifier[g] = identifier[merged] identifier[g] =[ identifier[graph] . identifier[sub] ( identifier[g] , identifier[distance] = literal[int] ) keyword[for] identifier[g] keyword[in] identifier[g] ] identifier[g] . identifier[sort] ( keyword[lambda] identifier[a] , identifier[b] : identifier[len] ( identifier[b] )- identifier[len] ( identifier[a] )) keyword[return] identifier[g]
def partition(graph): """ Splits unconnected subgraphs. For each node in the graph, make a list of its id and all directly connected id's. If one of the nodes in this list intersects with a subgraph, they are all part of that subgraph. Otherwise, this list is part of a new subgraph. Return a list of subgraphs sorted by size (biggest-first). """ g = [] for n in graph.nodes: c = [n.id for n in flatten(n)] f = False for i in range(len(g)): if len(intersection(g[i], c)) > 0: g[i] = union(g[i], c) f = True break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] if not f: g.append(c) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['n']] # If 1 is directly connected to 2 and 3, # and 4 is directly connected to 5 and 6, these are separate subgraphs. # If we later find that 7 is directly connected to 3 and 6, # it will be attached to [1, 2, 3] yielding # [1, 2, 3, 6, 7] and [4, 5, 6]. # These two subgraphs are connected and need to be merged. merged = [] for i in range(len(g)): merged.append(g[i]) for j in range(i + 1, len(g)): if len(intersection(g[i], g[j])) > 0: merged[-1].extend(g[j]) g[j] = [] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']] g = merged g = [graph.sub(g, distance=0) for g in g] g.sort(lambda a, b: len(b) - len(a)) return g
def translate(self): """Gets the value in the current language, or in the configured fallbck language.""" value = super().translate() if value is None or (isinstance(value, str) and value.strip() == ''): return None return int(value)
def function[translate, parameter[self]]: constant[Gets the value in the current language, or in the configured fallbck language.] variable[value] assign[=] call[call[name[super], parameter[]].translate, parameter[]] if <ast.BoolOp object at 0x7da20e9b2860> begin[:] return[constant[None]] return[call[name[int], parameter[name[value]]]]
keyword[def] identifier[translate] ( identifier[self] ): literal[string] identifier[value] = identifier[super] (). identifier[translate] () keyword[if] identifier[value] keyword[is] keyword[None] keyword[or] ( identifier[isinstance] ( identifier[value] , identifier[str] ) keyword[and] identifier[value] . identifier[strip] ()== literal[string] ): keyword[return] keyword[None] keyword[return] identifier[int] ( identifier[value] )
def translate(self): """Gets the value in the current language, or in the configured fallbck language.""" value = super().translate() if value is None or (isinstance(value, str) and value.strip() == ''): return None # depends on [control=['if'], data=[]] return int(value)
def start_state_manager_watches(self): """ Receive updates to the packing plan from the statemgrs and update processes as needed. """ Log.info("Start state manager watches") statemgr_config = StateMgrConfig() statemgr_config.set_state_locations(configloader.load_state_manager_locations( self.cluster, state_manager_config_file=self.state_manager_config_file, overrides={"heron.statemgr.connection.string": self.state_manager_connection})) try: self.state_managers = statemanagerfactory.get_all_state_managers(statemgr_config) for state_manager in self.state_managers: state_manager.start() except Exception as ex: Log.error("Found exception while initializing state managers: %s. Bailing out..." % ex) traceback.print_exc() sys.exit(1) # pylint: disable=unused-argument def on_packing_plan_watch(state_manager, new_packing_plan): Log.debug("State watch triggered for PackingPlan update on shard %s. Existing: %s, New: %s" % (self.shard, str(self.packing_plan), str(new_packing_plan))) if self.packing_plan != new_packing_plan: Log.info("PackingPlan change detected on shard %s, relaunching effected processes." % self.shard) self.update_packing_plan(new_packing_plan) Log.info("Updating executor processes") self.launch() else: Log.info( "State watch triggered for PackingPlan update but plan not changed so not relaunching.") for state_manager in self.state_managers: # The callback function with the bound # state_manager as first variable. onPackingPlanWatch = functools.partial(on_packing_plan_watch, state_manager) state_manager.get_packing_plan(self.topology_name, onPackingPlanWatch) Log.info("Registered state watch for packing plan changes with state manager %s." % str(state_manager))
def function[start_state_manager_watches, parameter[self]]: constant[ Receive updates to the packing plan from the statemgrs and update processes as needed. ] call[name[Log].info, parameter[constant[Start state manager watches]]] variable[statemgr_config] assign[=] call[name[StateMgrConfig], parameter[]] call[name[statemgr_config].set_state_locations, parameter[call[name[configloader].load_state_manager_locations, parameter[name[self].cluster]]]] <ast.Try object at 0x7da20c6e5ae0> def function[on_packing_plan_watch, parameter[state_manager, new_packing_plan]]: call[name[Log].debug, parameter[binary_operation[constant[State watch triggered for PackingPlan update on shard %s. Existing: %s, New: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20c6e5b40>, <ast.Call object at 0x7da20c6e7970>, <ast.Call object at 0x7da20c6e5a80>]]]]] if compare[name[self].packing_plan not_equal[!=] name[new_packing_plan]] begin[:] call[name[Log].info, parameter[binary_operation[constant[PackingPlan change detected on shard %s, relaunching effected processes.] <ast.Mod object at 0x7da2590d6920> name[self].shard]]] call[name[self].update_packing_plan, parameter[name[new_packing_plan]]] call[name[Log].info, parameter[constant[Updating executor processes]]] call[name[self].launch, parameter[]] for taget[name[state_manager]] in starred[name[self].state_managers] begin[:] variable[onPackingPlanWatch] assign[=] call[name[functools].partial, parameter[name[on_packing_plan_watch], name[state_manager]]] call[name[state_manager].get_packing_plan, parameter[name[self].topology_name, name[onPackingPlanWatch]]] call[name[Log].info, parameter[binary_operation[constant[Registered state watch for packing plan changes with state manager %s.] <ast.Mod object at 0x7da2590d6920> call[name[str], parameter[name[state_manager]]]]]]
keyword[def] identifier[start_state_manager_watches] ( identifier[self] ): literal[string] identifier[Log] . identifier[info] ( literal[string] ) identifier[statemgr_config] = identifier[StateMgrConfig] () identifier[statemgr_config] . identifier[set_state_locations] ( identifier[configloader] . identifier[load_state_manager_locations] ( identifier[self] . identifier[cluster] , identifier[state_manager_config_file] = identifier[self] . identifier[state_manager_config_file] , identifier[overrides] ={ literal[string] : identifier[self] . identifier[state_manager_connection] })) keyword[try] : identifier[self] . identifier[state_managers] = identifier[statemanagerfactory] . identifier[get_all_state_managers] ( identifier[statemgr_config] ) keyword[for] identifier[state_manager] keyword[in] identifier[self] . identifier[state_managers] : identifier[state_manager] . identifier[start] () keyword[except] identifier[Exception] keyword[as] identifier[ex] : identifier[Log] . identifier[error] ( literal[string] % identifier[ex] ) identifier[traceback] . identifier[print_exc] () identifier[sys] . identifier[exit] ( literal[int] ) keyword[def] identifier[on_packing_plan_watch] ( identifier[state_manager] , identifier[new_packing_plan] ): identifier[Log] . identifier[debug] ( literal[string] % ( identifier[self] . identifier[shard] , identifier[str] ( identifier[self] . identifier[packing_plan] ), identifier[str] ( identifier[new_packing_plan] ))) keyword[if] identifier[self] . identifier[packing_plan] != identifier[new_packing_plan] : identifier[Log] . identifier[info] ( literal[string] % identifier[self] . identifier[shard] ) identifier[self] . identifier[update_packing_plan] ( identifier[new_packing_plan] ) identifier[Log] . identifier[info] ( literal[string] ) identifier[self] . identifier[launch] () keyword[else] : identifier[Log] . identifier[info] ( literal[string] ) keyword[for] identifier[state_manager] keyword[in] identifier[self] . identifier[state_managers] : identifier[onPackingPlanWatch] = identifier[functools] . identifier[partial] ( identifier[on_packing_plan_watch] , identifier[state_manager] ) identifier[state_manager] . identifier[get_packing_plan] ( identifier[self] . identifier[topology_name] , identifier[onPackingPlanWatch] ) identifier[Log] . identifier[info] ( literal[string] % identifier[str] ( identifier[state_manager] ))
def start_state_manager_watches(self): """ Receive updates to the packing plan from the statemgrs and update processes as needed. """ Log.info('Start state manager watches') statemgr_config = StateMgrConfig() statemgr_config.set_state_locations(configloader.load_state_manager_locations(self.cluster, state_manager_config_file=self.state_manager_config_file, overrides={'heron.statemgr.connection.string': self.state_manager_connection})) try: self.state_managers = statemanagerfactory.get_all_state_managers(statemgr_config) for state_manager in self.state_managers: state_manager.start() # depends on [control=['for'], data=['state_manager']] # depends on [control=['try'], data=[]] except Exception as ex: Log.error('Found exception while initializing state managers: %s. Bailing out...' % ex) traceback.print_exc() sys.exit(1) # depends on [control=['except'], data=['ex']] # pylint: disable=unused-argument def on_packing_plan_watch(state_manager, new_packing_plan): Log.debug('State watch triggered for PackingPlan update on shard %s. Existing: %s, New: %s' % (self.shard, str(self.packing_plan), str(new_packing_plan))) if self.packing_plan != new_packing_plan: Log.info('PackingPlan change detected on shard %s, relaunching effected processes.' % self.shard) self.update_packing_plan(new_packing_plan) Log.info('Updating executor processes') self.launch() # depends on [control=['if'], data=['new_packing_plan']] else: Log.info('State watch triggered for PackingPlan update but plan not changed so not relaunching.') for state_manager in self.state_managers: # The callback function with the bound # state_manager as first variable. onPackingPlanWatch = functools.partial(on_packing_plan_watch, state_manager) state_manager.get_packing_plan(self.topology_name, onPackingPlanWatch) Log.info('Registered state watch for packing plan changes with state manager %s.' % str(state_manager)) # depends on [control=['for'], data=['state_manager']]
def get_standard_package(self, server_id, is_virt=True): """Retrieves the standard firewall package for the virtual server. :param int server_id: The ID of the server to create the firewall for :param bool is_virt: True if the ID provided is for a virtual server, False for a server :returns: A dictionary containing the standard virtual server firewall package """ firewall_port_speed = self._get_fwl_port_speed(server_id, is_virt) _value = "%s%s" % (firewall_port_speed, "Mbps Hardware Firewall") _filter = {'items': {'description': utils.query_filter(_value)}} return self.prod_pkg.getItems(id=0, filter=_filter)
def function[get_standard_package, parameter[self, server_id, is_virt]]: constant[Retrieves the standard firewall package for the virtual server. :param int server_id: The ID of the server to create the firewall for :param bool is_virt: True if the ID provided is for a virtual server, False for a server :returns: A dictionary containing the standard virtual server firewall package ] variable[firewall_port_speed] assign[=] call[name[self]._get_fwl_port_speed, parameter[name[server_id], name[is_virt]]] variable[_value] assign[=] binary_operation[constant[%s%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20c7c8880>, <ast.Constant object at 0x7da20c7cba00>]]] variable[_filter] assign[=] dictionary[[<ast.Constant object at 0x7da20c7c9450>], [<ast.Dict object at 0x7da20c7ca620>]] return[call[name[self].prod_pkg.getItems, parameter[]]]
keyword[def] identifier[get_standard_package] ( identifier[self] , identifier[server_id] , identifier[is_virt] = keyword[True] ): literal[string] identifier[firewall_port_speed] = identifier[self] . identifier[_get_fwl_port_speed] ( identifier[server_id] , identifier[is_virt] ) identifier[_value] = literal[string] %( identifier[firewall_port_speed] , literal[string] ) identifier[_filter] ={ literal[string] :{ literal[string] : identifier[utils] . identifier[query_filter] ( identifier[_value] )}} keyword[return] identifier[self] . identifier[prod_pkg] . identifier[getItems] ( identifier[id] = literal[int] , identifier[filter] = identifier[_filter] )
def get_standard_package(self, server_id, is_virt=True): """Retrieves the standard firewall package for the virtual server. :param int server_id: The ID of the server to create the firewall for :param bool is_virt: True if the ID provided is for a virtual server, False for a server :returns: A dictionary containing the standard virtual server firewall package """ firewall_port_speed = self._get_fwl_port_speed(server_id, is_virt) _value = '%s%s' % (firewall_port_speed, 'Mbps Hardware Firewall') _filter = {'items': {'description': utils.query_filter(_value)}} return self.prod_pkg.getItems(id=0, filter=_filter)
def write_combined_transit_stop_to_stop_network(gtfs, output_path, fmt=None): """ Parameters ---------- gtfs : gtfspy.GTFS output_path : str fmt: None, optional defaulting to "edg" and writing results as ".edg" files If "csv" csv files are produced instead """ if fmt is None: fmt = "edg" multi_di_graph = combined_stop_to_stop_transit_network(gtfs) _write_stop_to_stop_network_edges(multi_di_graph, output_path, fmt=fmt)
def function[write_combined_transit_stop_to_stop_network, parameter[gtfs, output_path, fmt]]: constant[ Parameters ---------- gtfs : gtfspy.GTFS output_path : str fmt: None, optional defaulting to "edg" and writing results as ".edg" files If "csv" csv files are produced instead ] if compare[name[fmt] is constant[None]] begin[:] variable[fmt] assign[=] constant[edg] variable[multi_di_graph] assign[=] call[name[combined_stop_to_stop_transit_network], parameter[name[gtfs]]] call[name[_write_stop_to_stop_network_edges], parameter[name[multi_di_graph], name[output_path]]]
keyword[def] identifier[write_combined_transit_stop_to_stop_network] ( identifier[gtfs] , identifier[output_path] , identifier[fmt] = keyword[None] ): literal[string] keyword[if] identifier[fmt] keyword[is] keyword[None] : identifier[fmt] = literal[string] identifier[multi_di_graph] = identifier[combined_stop_to_stop_transit_network] ( identifier[gtfs] ) identifier[_write_stop_to_stop_network_edges] ( identifier[multi_di_graph] , identifier[output_path] , identifier[fmt] = identifier[fmt] )
def write_combined_transit_stop_to_stop_network(gtfs, output_path, fmt=None): """ Parameters ---------- gtfs : gtfspy.GTFS output_path : str fmt: None, optional defaulting to "edg" and writing results as ".edg" files If "csv" csv files are produced instead """ if fmt is None: fmt = 'edg' # depends on [control=['if'], data=['fmt']] multi_di_graph = combined_stop_to_stop_transit_network(gtfs) _write_stop_to_stop_network_edges(multi_di_graph, output_path, fmt=fmt)
def _basename_in_blacklist_re(base_name, black_list_re): """Determines if the basename is matched in a regex blacklist :param str base_name: The basename of the file :param list black_list_re: A collection of regex patterns to match against. Successful matches are blacklisted. :returns: `True` if the basename is blacklisted, `False` otherwise. :rtype: bool """ for file_pattern in black_list_re: if file_pattern.match(base_name): return True return False
def function[_basename_in_blacklist_re, parameter[base_name, black_list_re]]: constant[Determines if the basename is matched in a regex blacklist :param str base_name: The basename of the file :param list black_list_re: A collection of regex patterns to match against. Successful matches are blacklisted. :returns: `True` if the basename is blacklisted, `False` otherwise. :rtype: bool ] for taget[name[file_pattern]] in starred[name[black_list_re]] begin[:] if call[name[file_pattern].match, parameter[name[base_name]]] begin[:] return[constant[True]] return[constant[False]]
keyword[def] identifier[_basename_in_blacklist_re] ( identifier[base_name] , identifier[black_list_re] ): literal[string] keyword[for] identifier[file_pattern] keyword[in] identifier[black_list_re] : keyword[if] identifier[file_pattern] . identifier[match] ( identifier[base_name] ): keyword[return] keyword[True] keyword[return] keyword[False]
def _basename_in_blacklist_re(base_name, black_list_re): """Determines if the basename is matched in a regex blacklist :param str base_name: The basename of the file :param list black_list_re: A collection of regex patterns to match against. Successful matches are blacklisted. :returns: `True` if the basename is blacklisted, `False` otherwise. :rtype: bool """ for file_pattern in black_list_re: if file_pattern.match(base_name): return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['file_pattern']] return False
def raw_clean(self, datas): """ Apply a cleaning on raw datas. """ datas = strip_tags(datas) # Remove HTML datas = STOP_WORDS.rebase(datas, '') # Remove STOP WORDS datas = PUNCTUATION.sub('', datas) # Remove punctuation datas = datas.lower() return [d for d in datas.split() if len(d) > 1]
def function[raw_clean, parameter[self, datas]]: constant[ Apply a cleaning on raw datas. ] variable[datas] assign[=] call[name[strip_tags], parameter[name[datas]]] variable[datas] assign[=] call[name[STOP_WORDS].rebase, parameter[name[datas], constant[]]] variable[datas] assign[=] call[name[PUNCTUATION].sub, parameter[constant[], name[datas]]] variable[datas] assign[=] call[name[datas].lower, parameter[]] return[<ast.ListComp object at 0x7da1b1d8d930>]
keyword[def] identifier[raw_clean] ( identifier[self] , identifier[datas] ): literal[string] identifier[datas] = identifier[strip_tags] ( identifier[datas] ) identifier[datas] = identifier[STOP_WORDS] . identifier[rebase] ( identifier[datas] , literal[string] ) identifier[datas] = identifier[PUNCTUATION] . identifier[sub] ( literal[string] , identifier[datas] ) identifier[datas] = identifier[datas] . identifier[lower] () keyword[return] [ identifier[d] keyword[for] identifier[d] keyword[in] identifier[datas] . identifier[split] () keyword[if] identifier[len] ( identifier[d] )> literal[int] ]
def raw_clean(self, datas): """ Apply a cleaning on raw datas. """ datas = strip_tags(datas) # Remove HTML datas = STOP_WORDS.rebase(datas, '') # Remove STOP WORDS datas = PUNCTUATION.sub('', datas) # Remove punctuation datas = datas.lower() return [d for d in datas.split() if len(d) > 1]
def _store_object(self, obj_name, content, etag=None, chunked=False, chunk_size=None, headers=None): """ Handles the low-level creation of a storage object and the uploading of the contents of that object. """ head_etag = headers.pop("ETag", "") if chunked: headers.pop("Content-Length", "") headers["Transfer-Encoding"] = "chunked" elif etag is None and content is not None: etag = utils.get_checksum(content) if etag: headers["ETag"] = etag if not headers.get("Content-Type"): headers["Content-Type"] = None uri = "/%s/%s" % (self.uri_base, obj_name) resp, resp_body = self.api.method_put(uri, data=content, headers=headers)
def function[_store_object, parameter[self, obj_name, content, etag, chunked, chunk_size, headers]]: constant[ Handles the low-level creation of a storage object and the uploading of the contents of that object. ] variable[head_etag] assign[=] call[name[headers].pop, parameter[constant[ETag], constant[]]] if name[chunked] begin[:] call[name[headers].pop, parameter[constant[Content-Length], constant[]]] call[name[headers]][constant[Transfer-Encoding]] assign[=] constant[chunked] if name[etag] begin[:] call[name[headers]][constant[ETag]] assign[=] name[etag] if <ast.UnaryOp object at 0x7da18bccb160> begin[:] call[name[headers]][constant[Content-Type]] assign[=] constant[None] variable[uri] assign[=] binary_operation[constant[/%s/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da18bcc8ee0>, <ast.Name object at 0x7da18bccbca0>]]] <ast.Tuple object at 0x7da18bcca410> assign[=] call[name[self].api.method_put, parameter[name[uri]]]
keyword[def] identifier[_store_object] ( identifier[self] , identifier[obj_name] , identifier[content] , identifier[etag] = keyword[None] , identifier[chunked] = keyword[False] , identifier[chunk_size] = keyword[None] , identifier[headers] = keyword[None] ): literal[string] identifier[head_etag] = identifier[headers] . identifier[pop] ( literal[string] , literal[string] ) keyword[if] identifier[chunked] : identifier[headers] . identifier[pop] ( literal[string] , literal[string] ) identifier[headers] [ literal[string] ]= literal[string] keyword[elif] identifier[etag] keyword[is] keyword[None] keyword[and] identifier[content] keyword[is] keyword[not] keyword[None] : identifier[etag] = identifier[utils] . identifier[get_checksum] ( identifier[content] ) keyword[if] identifier[etag] : identifier[headers] [ literal[string] ]= identifier[etag] keyword[if] keyword[not] identifier[headers] . identifier[get] ( literal[string] ): identifier[headers] [ literal[string] ]= keyword[None] identifier[uri] = literal[string] %( identifier[self] . identifier[uri_base] , identifier[obj_name] ) identifier[resp] , identifier[resp_body] = identifier[self] . identifier[api] . identifier[method_put] ( identifier[uri] , identifier[data] = identifier[content] , identifier[headers] = identifier[headers] )
def _store_object(self, obj_name, content, etag=None, chunked=False, chunk_size=None, headers=None): """ Handles the low-level creation of a storage object and the uploading of the contents of that object. """ head_etag = headers.pop('ETag', '') if chunked: headers.pop('Content-Length', '') headers['Transfer-Encoding'] = 'chunked' # depends on [control=['if'], data=[]] elif etag is None and content is not None: etag = utils.get_checksum(content) # depends on [control=['if'], data=[]] if etag: headers['ETag'] = etag # depends on [control=['if'], data=[]] if not headers.get('Content-Type'): headers['Content-Type'] = None # depends on [control=['if'], data=[]] uri = '/%s/%s' % (self.uri_base, obj_name) (resp, resp_body) = self.api.method_put(uri, data=content, headers=headers)
def open(self, *, autocommit=False): """Sets the connection with the core's open method. :param autocommit: the default autocommit state :type autocommit: boolean :return: self """ if self.connection is not None: raise Exception("Connection already set") self.connection = self.core.open() self.autocommit = autocommit if self._search_path: self._configure_connection( "search_path", self._search_path) return self
def function[open, parameter[self]]: constant[Sets the connection with the core's open method. :param autocommit: the default autocommit state :type autocommit: boolean :return: self ] if compare[name[self].connection is_not constant[None]] begin[:] <ast.Raise object at 0x7da20c7cb850> name[self].connection assign[=] call[name[self].core.open, parameter[]] name[self].autocommit assign[=] name[autocommit] if name[self]._search_path begin[:] call[name[self]._configure_connection, parameter[constant[search_path], name[self]._search_path]] return[name[self]]
keyword[def] identifier[open] ( identifier[self] ,*, identifier[autocommit] = keyword[False] ): literal[string] keyword[if] identifier[self] . identifier[connection] keyword[is] keyword[not] keyword[None] : keyword[raise] identifier[Exception] ( literal[string] ) identifier[self] . identifier[connection] = identifier[self] . identifier[core] . identifier[open] () identifier[self] . identifier[autocommit] = identifier[autocommit] keyword[if] identifier[self] . identifier[_search_path] : identifier[self] . identifier[_configure_connection] ( literal[string] , identifier[self] . identifier[_search_path] ) keyword[return] identifier[self]
def open(self, *, autocommit=False): """Sets the connection with the core's open method. :param autocommit: the default autocommit state :type autocommit: boolean :return: self """ if self.connection is not None: raise Exception('Connection already set') # depends on [control=['if'], data=[]] self.connection = self.core.open() self.autocommit = autocommit if self._search_path: self._configure_connection('search_path', self._search_path) # depends on [control=['if'], data=[]] return self
def merge_files(configuration, locale, fail_if_missing=True): """ Merge all the files in `locale`, as specified in config.yaml. """ for target, sources in configuration.generate_merge.items(): merge(configuration, locale, target, sources, fail_if_missing)
def function[merge_files, parameter[configuration, locale, fail_if_missing]]: constant[ Merge all the files in `locale`, as specified in config.yaml. ] for taget[tuple[[<ast.Name object at 0x7da18ede7970>, <ast.Name object at 0x7da18ede4b80>]]] in starred[call[name[configuration].generate_merge.items, parameter[]]] begin[:] call[name[merge], parameter[name[configuration], name[locale], name[target], name[sources], name[fail_if_missing]]]
keyword[def] identifier[merge_files] ( identifier[configuration] , identifier[locale] , identifier[fail_if_missing] = keyword[True] ): literal[string] keyword[for] identifier[target] , identifier[sources] keyword[in] identifier[configuration] . identifier[generate_merge] . identifier[items] (): identifier[merge] ( identifier[configuration] , identifier[locale] , identifier[target] , identifier[sources] , identifier[fail_if_missing] )
def merge_files(configuration, locale, fail_if_missing=True): """ Merge all the files in `locale`, as specified in config.yaml. """ for (target, sources) in configuration.generate_merge.items(): merge(configuration, locale, target, sources, fail_if_missing) # depends on [control=['for'], data=[]]
def from_api(cls, api): """ create an application description for the todo app, that based on the api can use either tha api or the ux for interaction """ ux = TodoUX(api) from .pseudorpc import PseudoRpc rpc = PseudoRpc(api) return cls({ViaAPI: api, ViaUX: ux, ViaRPC: rpc})
def function[from_api, parameter[cls, api]]: constant[ create an application description for the todo app, that based on the api can use either tha api or the ux for interaction ] variable[ux] assign[=] call[name[TodoUX], parameter[name[api]]] from relative_module[pseudorpc] import module[PseudoRpc] variable[rpc] assign[=] call[name[PseudoRpc], parameter[name[api]]] return[call[name[cls], parameter[dictionary[[<ast.Name object at 0x7da1b1da0bb0>, <ast.Name object at 0x7da1b1da2710>, <ast.Name object at 0x7da1b1da1b10>], [<ast.Name object at 0x7da1b1da2590>, <ast.Name object at 0x7da1b1da3970>, <ast.Name object at 0x7da1b1da29b0>]]]]]
keyword[def] identifier[from_api] ( identifier[cls] , identifier[api] ): literal[string] identifier[ux] = identifier[TodoUX] ( identifier[api] ) keyword[from] . identifier[pseudorpc] keyword[import] identifier[PseudoRpc] identifier[rpc] = identifier[PseudoRpc] ( identifier[api] ) keyword[return] identifier[cls] ({ identifier[ViaAPI] : identifier[api] , identifier[ViaUX] : identifier[ux] , identifier[ViaRPC] : identifier[rpc] })
def from_api(cls, api): """ create an application description for the todo app, that based on the api can use either tha api or the ux for interaction """ ux = TodoUX(api) from .pseudorpc import PseudoRpc rpc = PseudoRpc(api) return cls({ViaAPI: api, ViaUX: ux, ViaRPC: rpc})
def supports_protection(self): """ Determines if the the current HttpMessageSecurity object supports the message protection protocol. :return: True if the current object supports protection, otherwise False """ return self.client_signature_key \ and self.client_encryption_key \ and self.server_signature_key \ and self.server_encryption_key
def function[supports_protection, parameter[self]]: constant[ Determines if the the current HttpMessageSecurity object supports the message protection protocol. :return: True if the current object supports protection, otherwise False ] return[<ast.BoolOp object at 0x7da18f09f9a0>]
keyword[def] identifier[supports_protection] ( identifier[self] ): literal[string] keyword[return] identifier[self] . identifier[client_signature_key] keyword[and] identifier[self] . identifier[client_encryption_key] keyword[and] identifier[self] . identifier[server_signature_key] keyword[and] identifier[self] . identifier[server_encryption_key]
def supports_protection(self): """ Determines if the the current HttpMessageSecurity object supports the message protection protocol. :return: True if the current object supports protection, otherwise False """ return self.client_signature_key and self.client_encryption_key and self.server_signature_key and self.server_encryption_key
def run(self): """ This runs the leader process to issue and manage jobs. :raises: toil.leader.FailedJobsException if at the end of function their remain \ failed jobs :return: The return value of the root job's run function. :rtype: Any """ # Start the stats/logging aggregation thread self.statsAndLogging.start() if self.config.metrics: self.toilMetrics = ToilMetrics(provisioner=self.provisioner) try: # Start service manager thread self.serviceManager.start() try: # Create cluster scaling processes if not None if self.clusterScaler is not None: self.clusterScaler.start() try: # Run the main loop self.innerLoop() finally: if self.clusterScaler is not None: logger.debug('Waiting for workers to shutdown.') startTime = time.time() self.clusterScaler.shutdown() logger.debug('Worker shutdown complete in %s seconds.', time.time() - startTime) finally: # Ensure service manager thread is properly shutdown self.serviceManager.shutdown() finally: # Ensure the stats and logging thread is properly shutdown self.statsAndLogging.shutdown() if self.toilMetrics: self.toilMetrics.shutdown() # Filter the failed jobs self.toilState.totalFailedJobs = [j for j in self.toilState.totalFailedJobs if self.jobStore.exists(j.jobStoreID)] try: self.create_status_sentinel_file(self.toilState.totalFailedJobs) except IOError as e: logger.debug('Error from importFile with hardlink=True: {}'.format(e)) logger.info("Finished toil run %s" % ("successfully." if not self.toilState.totalFailedJobs \ else ("with %s failed jobs." % len(self.toilState.totalFailedJobs)))) if len(self.toilState.totalFailedJobs): logger.info("Failed jobs at end of the run: %s", ' '.join(str(job) for job in self.toilState.totalFailedJobs)) # Cleanup if len(self.toilState.totalFailedJobs) > 0: raise FailedJobsException(self.config.jobStore, self.toilState.totalFailedJobs, self.jobStore) return self.jobStore.getRootJobReturnValue()
def function[run, parameter[self]]: constant[ This runs the leader process to issue and manage jobs. :raises: toil.leader.FailedJobsException if at the end of function their remain failed jobs :return: The return value of the root job's run function. :rtype: Any ] call[name[self].statsAndLogging.start, parameter[]] if name[self].config.metrics begin[:] name[self].toilMetrics assign[=] call[name[ToilMetrics], parameter[]] <ast.Try object at 0x7da18dc05330> name[self].toilState.totalFailedJobs assign[=] <ast.ListComp object at 0x7da18dc07850> <ast.Try object at 0x7da18dc06f80> call[name[logger].info, parameter[binary_operation[constant[Finished toil run %s] <ast.Mod object at 0x7da2590d6920> <ast.IfExp object at 0x7da18dc05c90>]]] if call[name[len], parameter[name[self].toilState.totalFailedJobs]] begin[:] call[name[logger].info, parameter[constant[Failed jobs at end of the run: %s], call[constant[ ].join, parameter[<ast.GeneratorExp object at 0x7da18f58ff70>]]]] if compare[call[name[len], parameter[name[self].toilState.totalFailedJobs]] greater[>] constant[0]] begin[:] <ast.Raise object at 0x7da18f58df90> return[call[name[self].jobStore.getRootJobReturnValue, parameter[]]]
keyword[def] identifier[run] ( identifier[self] ): literal[string] identifier[self] . identifier[statsAndLogging] . identifier[start] () keyword[if] identifier[self] . identifier[config] . identifier[metrics] : identifier[self] . identifier[toilMetrics] = identifier[ToilMetrics] ( identifier[provisioner] = identifier[self] . identifier[provisioner] ) keyword[try] : identifier[self] . identifier[serviceManager] . identifier[start] () keyword[try] : keyword[if] identifier[self] . identifier[clusterScaler] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[clusterScaler] . identifier[start] () keyword[try] : identifier[self] . identifier[innerLoop] () keyword[finally] : keyword[if] identifier[self] . identifier[clusterScaler] keyword[is] keyword[not] keyword[None] : identifier[logger] . identifier[debug] ( literal[string] ) identifier[startTime] = identifier[time] . identifier[time] () identifier[self] . identifier[clusterScaler] . identifier[shutdown] () identifier[logger] . identifier[debug] ( literal[string] , identifier[time] . identifier[time] ()- identifier[startTime] ) keyword[finally] : identifier[self] . identifier[serviceManager] . identifier[shutdown] () keyword[finally] : identifier[self] . identifier[statsAndLogging] . identifier[shutdown] () keyword[if] identifier[self] . identifier[toilMetrics] : identifier[self] . identifier[toilMetrics] . identifier[shutdown] () identifier[self] . identifier[toilState] . identifier[totalFailedJobs] =[ identifier[j] keyword[for] identifier[j] keyword[in] identifier[self] . identifier[toilState] . identifier[totalFailedJobs] keyword[if] identifier[self] . identifier[jobStore] . identifier[exists] ( identifier[j] . identifier[jobStoreID] )] keyword[try] : identifier[self] . identifier[create_status_sentinel_file] ( identifier[self] . identifier[toilState] . identifier[totalFailedJobs] ) keyword[except] identifier[IOError] keyword[as] identifier[e] : identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[e] )) identifier[logger] . identifier[info] ( literal[string] % ( literal[string] keyword[if] keyword[not] identifier[self] . identifier[toilState] . identifier[totalFailedJobs] keyword[else] ( literal[string] % identifier[len] ( identifier[self] . identifier[toilState] . identifier[totalFailedJobs] )))) keyword[if] identifier[len] ( identifier[self] . identifier[toilState] . identifier[totalFailedJobs] ): identifier[logger] . identifier[info] ( literal[string] , literal[string] . identifier[join] ( identifier[str] ( identifier[job] ) keyword[for] identifier[job] keyword[in] identifier[self] . identifier[toilState] . identifier[totalFailedJobs] )) keyword[if] identifier[len] ( identifier[self] . identifier[toilState] . identifier[totalFailedJobs] )> literal[int] : keyword[raise] identifier[FailedJobsException] ( identifier[self] . identifier[config] . identifier[jobStore] , identifier[self] . identifier[toilState] . identifier[totalFailedJobs] , identifier[self] . identifier[jobStore] ) keyword[return] identifier[self] . identifier[jobStore] . identifier[getRootJobReturnValue] ()
def run(self): """ This runs the leader process to issue and manage jobs. :raises: toil.leader.FailedJobsException if at the end of function their remain failed jobs :return: The return value of the root job's run function. :rtype: Any """ # Start the stats/logging aggregation thread self.statsAndLogging.start() if self.config.metrics: self.toilMetrics = ToilMetrics(provisioner=self.provisioner) # depends on [control=['if'], data=[]] try: # Start service manager thread self.serviceManager.start() try: # Create cluster scaling processes if not None if self.clusterScaler is not None: self.clusterScaler.start() # depends on [control=['if'], data=[]] try: # Run the main loop self.innerLoop() # depends on [control=['try'], data=[]] finally: if self.clusterScaler is not None: logger.debug('Waiting for workers to shutdown.') startTime = time.time() self.clusterScaler.shutdown() logger.debug('Worker shutdown complete in %s seconds.', time.time() - startTime) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] finally: # Ensure service manager thread is properly shutdown self.serviceManager.shutdown() # depends on [control=['try'], data=[]] finally: # Ensure the stats and logging thread is properly shutdown self.statsAndLogging.shutdown() if self.toilMetrics: self.toilMetrics.shutdown() # depends on [control=['if'], data=[]] # Filter the failed jobs self.toilState.totalFailedJobs = [j for j in self.toilState.totalFailedJobs if self.jobStore.exists(j.jobStoreID)] try: self.create_status_sentinel_file(self.toilState.totalFailedJobs) # depends on [control=['try'], data=[]] except IOError as e: logger.debug('Error from importFile with hardlink=True: {}'.format(e)) # depends on [control=['except'], data=['e']] logger.info('Finished toil run %s' % ('successfully.' if not self.toilState.totalFailedJobs else 'with %s failed jobs.' % len(self.toilState.totalFailedJobs))) if len(self.toilState.totalFailedJobs): logger.info('Failed jobs at end of the run: %s', ' '.join((str(job) for job in self.toilState.totalFailedJobs))) # depends on [control=['if'], data=[]] # Cleanup if len(self.toilState.totalFailedJobs) > 0: raise FailedJobsException(self.config.jobStore, self.toilState.totalFailedJobs, self.jobStore) # depends on [control=['if'], data=[]] return self.jobStore.getRootJobReturnValue()
def create(self, friendly_name=None, description=None): """Creates the Dataset with the specified friendly name and description. Args: friendly_name: (optional) the friendly name for the dataset if it is being created. description: (optional) a description for the dataset if it is being created. Returns: The Dataset. Raises: Exception if the Dataset could not be created. """ if not self.exists(): try: response = self._api.datasets_insert(self._name_parts, friendly_name=friendly_name, description=description) except Exception as e: raise e if 'selfLink' not in response: raise Exception("Could not create dataset %s" % self._full_name) return self
def function[create, parameter[self, friendly_name, description]]: constant[Creates the Dataset with the specified friendly name and description. Args: friendly_name: (optional) the friendly name for the dataset if it is being created. description: (optional) a description for the dataset if it is being created. Returns: The Dataset. Raises: Exception if the Dataset could not be created. ] if <ast.UnaryOp object at 0x7da20e955e10> begin[:] <ast.Try object at 0x7da20e957370> if compare[constant[selfLink] <ast.NotIn object at 0x7da2590d7190> name[response]] begin[:] <ast.Raise object at 0x7da18c4cf580> return[name[self]]
keyword[def] identifier[create] ( identifier[self] , identifier[friendly_name] = keyword[None] , identifier[description] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[exists] (): keyword[try] : identifier[response] = identifier[self] . identifier[_api] . identifier[datasets_insert] ( identifier[self] . identifier[_name_parts] , identifier[friendly_name] = identifier[friendly_name] , identifier[description] = identifier[description] ) keyword[except] identifier[Exception] keyword[as] identifier[e] : keyword[raise] identifier[e] keyword[if] literal[string] keyword[not] keyword[in] identifier[response] : keyword[raise] identifier[Exception] ( literal[string] % identifier[self] . identifier[_full_name] ) keyword[return] identifier[self]
def create(self, friendly_name=None, description=None): """Creates the Dataset with the specified friendly name and description. Args: friendly_name: (optional) the friendly name for the dataset if it is being created. description: (optional) a description for the dataset if it is being created. Returns: The Dataset. Raises: Exception if the Dataset could not be created. """ if not self.exists(): try: response = self._api.datasets_insert(self._name_parts, friendly_name=friendly_name, description=description) # depends on [control=['try'], data=[]] except Exception as e: raise e # depends on [control=['except'], data=['e']] if 'selfLink' not in response: raise Exception('Could not create dataset %s' % self._full_name) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return self
def _get_model_table(self, part): """ Returns a list that represents the table. :param part: The table header, table footer or table body. :type part: hatemile.util.html.htmldomelement.HTMLDOMElement :return: The list that represents the table. :rtype: list(list(hatemile.util.html.htmldomelement.HTMLDOMElement)) """ rows = self.parser.find(part).find_children('tr').list_results() table = [] for row in rows: table.append(self._get_model_row(self.parser.find( row ).find_children('td,th').list_results())) return self._get_valid_model_table(table)
def function[_get_model_table, parameter[self, part]]: constant[ Returns a list that represents the table. :param part: The table header, table footer or table body. :type part: hatemile.util.html.htmldomelement.HTMLDOMElement :return: The list that represents the table. :rtype: list(list(hatemile.util.html.htmldomelement.HTMLDOMElement)) ] variable[rows] assign[=] call[call[call[name[self].parser.find, parameter[name[part]]].find_children, parameter[constant[tr]]].list_results, parameter[]] variable[table] assign[=] list[[]] for taget[name[row]] in starred[name[rows]] begin[:] call[name[table].append, parameter[call[name[self]._get_model_row, parameter[call[call[call[name[self].parser.find, parameter[name[row]]].find_children, parameter[constant[td,th]]].list_results, parameter[]]]]]] return[call[name[self]._get_valid_model_table, parameter[name[table]]]]
keyword[def] identifier[_get_model_table] ( identifier[self] , identifier[part] ): literal[string] identifier[rows] = identifier[self] . identifier[parser] . identifier[find] ( identifier[part] ). identifier[find_children] ( literal[string] ). identifier[list_results] () identifier[table] =[] keyword[for] identifier[row] keyword[in] identifier[rows] : identifier[table] . identifier[append] ( identifier[self] . identifier[_get_model_row] ( identifier[self] . identifier[parser] . identifier[find] ( identifier[row] ). identifier[find_children] ( literal[string] ). identifier[list_results] ())) keyword[return] identifier[self] . identifier[_get_valid_model_table] ( identifier[table] )
def _get_model_table(self, part): """ Returns a list that represents the table. :param part: The table header, table footer or table body. :type part: hatemile.util.html.htmldomelement.HTMLDOMElement :return: The list that represents the table. :rtype: list(list(hatemile.util.html.htmldomelement.HTMLDOMElement)) """ rows = self.parser.find(part).find_children('tr').list_results() table = [] for row in rows: table.append(self._get_model_row(self.parser.find(row).find_children('td,th').list_results())) # depends on [control=['for'], data=['row']] return self._get_valid_model_table(table)
def compat_get_paginated_response(view, page): """ get_paginated_response is unknown to DRF 3.0 """ if DRFVLIST[0] == 3 and DRFVLIST[1] >= 1: from rest_messaging.serializers import ComplexMessageSerializer # circular import serializer = ComplexMessageSerializer(page, many=True) return view.get_paginated_response(serializer.data) else: serializer = view.get_pagination_serializer(page) return Response(serializer.data)
def function[compat_get_paginated_response, parameter[view, page]]: constant[ get_paginated_response is unknown to DRF 3.0 ] if <ast.BoolOp object at 0x7da1b10b0370> begin[:] from relative_module[rest_messaging.serializers] import module[ComplexMessageSerializer] variable[serializer] assign[=] call[name[ComplexMessageSerializer], parameter[name[page]]] return[call[name[view].get_paginated_response, parameter[name[serializer].data]]]
keyword[def] identifier[compat_get_paginated_response] ( identifier[view] , identifier[page] ): literal[string] keyword[if] identifier[DRFVLIST] [ literal[int] ]== literal[int] keyword[and] identifier[DRFVLIST] [ literal[int] ]>= literal[int] : keyword[from] identifier[rest_messaging] . identifier[serializers] keyword[import] identifier[ComplexMessageSerializer] identifier[serializer] = identifier[ComplexMessageSerializer] ( identifier[page] , identifier[many] = keyword[True] ) keyword[return] identifier[view] . identifier[get_paginated_response] ( identifier[serializer] . identifier[data] ) keyword[else] : identifier[serializer] = identifier[view] . identifier[get_pagination_serializer] ( identifier[page] ) keyword[return] identifier[Response] ( identifier[serializer] . identifier[data] )
def compat_get_paginated_response(view, page): """ get_paginated_response is unknown to DRF 3.0 """ if DRFVLIST[0] == 3 and DRFVLIST[1] >= 1: from rest_messaging.serializers import ComplexMessageSerializer # circular import serializer = ComplexMessageSerializer(page, many=True) return view.get_paginated_response(serializer.data) # depends on [control=['if'], data=[]] else: serializer = view.get_pagination_serializer(page) return Response(serializer.data)
def set_source_quandl(self, quandl_token): """ Set data source to Quandl """ self.data_worker = data_worker self.worker_args = {"function": Quandl.get, "input": self.input_queue, "output": self.output_map, "token": quandl_token} self.source_name = "Quandl"
def function[set_source_quandl, parameter[self, quandl_token]]: constant[ Set data source to Quandl ] name[self].data_worker assign[=] name[data_worker] name[self].worker_args assign[=] dictionary[[<ast.Constant object at 0x7da20cabc2e0>, <ast.Constant object at 0x7da20cabf400>, <ast.Constant object at 0x7da20cabe860>, <ast.Constant object at 0x7da20cabdb10>], [<ast.Attribute object at 0x7da18c4cf160>, <ast.Attribute object at 0x7da18c4cd240>, <ast.Attribute object at 0x7da18c4cee90>, <ast.Name object at 0x7da18c4ce7d0>]] name[self].source_name assign[=] constant[Quandl]
keyword[def] identifier[set_source_quandl] ( identifier[self] , identifier[quandl_token] ): literal[string] identifier[self] . identifier[data_worker] = identifier[data_worker] identifier[self] . identifier[worker_args] ={ literal[string] : identifier[Quandl] . identifier[get] , literal[string] : identifier[self] . identifier[input_queue] , literal[string] : identifier[self] . identifier[output_map] , literal[string] : identifier[quandl_token] } identifier[self] . identifier[source_name] = literal[string]
def set_source_quandl(self, quandl_token): """ Set data source to Quandl """ self.data_worker = data_worker self.worker_args = {'function': Quandl.get, 'input': self.input_queue, 'output': self.output_map, 'token': quandl_token} self.source_name = 'Quandl'
def _make_sql_compatible(ll): """ Convert any python list of lists (or tuples) so that the strings are formatted correctly for insertion into Args: ll (list): List of lists (or tuples) """ new_ll = [] for l in ll: new_l = () for i in l: if not i: new_l = new_l + (None,) else: if isinstance(i, str): if sys.version_info < (3, 0): val = i.decode('utf8').encode('ascii', errors='ignore') else: # in py3 strings should be ok... val = i else: val = i new_l = new_l + (val,) new_ll.append(new_l) return new_ll
def function[_make_sql_compatible, parameter[ll]]: constant[ Convert any python list of lists (or tuples) so that the strings are formatted correctly for insertion into Args: ll (list): List of lists (or tuples) ] variable[new_ll] assign[=] list[[]] for taget[name[l]] in starred[name[ll]] begin[:] variable[new_l] assign[=] tuple[[]] for taget[name[i]] in starred[name[l]] begin[:] if <ast.UnaryOp object at 0x7da1b1d64ca0> begin[:] variable[new_l] assign[=] binary_operation[name[new_l] + tuple[[<ast.Constant object at 0x7da1b1d67130>]]] call[name[new_ll].append, parameter[name[new_l]]] return[name[new_ll]]
keyword[def] identifier[_make_sql_compatible] ( identifier[ll] ): literal[string] identifier[new_ll] =[] keyword[for] identifier[l] keyword[in] identifier[ll] : identifier[new_l] =() keyword[for] identifier[i] keyword[in] identifier[l] : keyword[if] keyword[not] identifier[i] : identifier[new_l] = identifier[new_l] +( keyword[None] ,) keyword[else] : keyword[if] identifier[isinstance] ( identifier[i] , identifier[str] ): keyword[if] identifier[sys] . identifier[version_info] <( literal[int] , literal[int] ): identifier[val] = identifier[i] . identifier[decode] ( literal[string] ). identifier[encode] ( literal[string] , identifier[errors] = literal[string] ) keyword[else] : identifier[val] = identifier[i] keyword[else] : identifier[val] = identifier[i] identifier[new_l] = identifier[new_l] +( identifier[val] ,) identifier[new_ll] . identifier[append] ( identifier[new_l] ) keyword[return] identifier[new_ll]
def _make_sql_compatible(ll): """ Convert any python list of lists (or tuples) so that the strings are formatted correctly for insertion into Args: ll (list): List of lists (or tuples) """ new_ll = [] for l in ll: new_l = () for i in l: if not i: new_l = new_l + (None,) # depends on [control=['if'], data=[]] else: if isinstance(i, str): if sys.version_info < (3, 0): val = i.decode('utf8').encode('ascii', errors='ignore') # depends on [control=['if'], data=[]] else: # in py3 strings should be ok... val = i # depends on [control=['if'], data=[]] else: val = i new_l = new_l + (val,) # depends on [control=['for'], data=['i']] new_ll.append(new_l) # depends on [control=['for'], data=['l']] return new_ll
def get_hash(name, password=None): ''' Returns the hash of a certificate in the keychain. name The name of the certificate (which you can get from keychain.get_friendly_name) or the location of a p12 file. password The password that is used in the certificate. Only required if your passing a p12 file. Note: This will be outputted to logs CLI Example: .. code-block:: bash salt '*' keychain.get_hash /tmp/test.p12 test123 ''' if '.p12' in name[-4:]: cmd = 'openssl pkcs12 -in {0} -passin pass:{1} -passout pass:{1}'.format(name, password) else: cmd = 'security find-certificate -c "{0}" -m -p'.format(name) out = __salt__['cmd.run'](cmd) matches = re.search('-----BEGIN CERTIFICATE-----(.*)-----END CERTIFICATE-----', out, re.DOTALL | re.MULTILINE) if matches: return matches.group(1) else: return False
def function[get_hash, parameter[name, password]]: constant[ Returns the hash of a certificate in the keychain. name The name of the certificate (which you can get from keychain.get_friendly_name) or the location of a p12 file. password The password that is used in the certificate. Only required if your passing a p12 file. Note: This will be outputted to logs CLI Example: .. code-block:: bash salt '*' keychain.get_hash /tmp/test.p12 test123 ] if compare[constant[.p12] in call[name[name]][<ast.Slice object at 0x7da1b21353c0>]] begin[:] variable[cmd] assign[=] call[constant[openssl pkcs12 -in {0} -passin pass:{1} -passout pass:{1}].format, parameter[name[name], name[password]]] variable[out] assign[=] call[call[name[__salt__]][constant[cmd.run]], parameter[name[cmd]]] variable[matches] assign[=] call[name[re].search, parameter[constant[-----BEGIN CERTIFICATE-----(.*)-----END CERTIFICATE-----], name[out], binary_operation[name[re].DOTALL <ast.BitOr object at 0x7da2590d6aa0> name[re].MULTILINE]]] if name[matches] begin[:] return[call[name[matches].group, parameter[constant[1]]]]
keyword[def] identifier[get_hash] ( identifier[name] , identifier[password] = keyword[None] ): literal[string] keyword[if] literal[string] keyword[in] identifier[name] [- literal[int] :]: identifier[cmd] = literal[string] . identifier[format] ( identifier[name] , identifier[password] ) keyword[else] : identifier[cmd] = literal[string] . identifier[format] ( identifier[name] ) identifier[out] = identifier[__salt__] [ literal[string] ]( identifier[cmd] ) identifier[matches] = identifier[re] . identifier[search] ( literal[string] , identifier[out] , identifier[re] . identifier[DOTALL] | identifier[re] . identifier[MULTILINE] ) keyword[if] identifier[matches] : keyword[return] identifier[matches] . identifier[group] ( literal[int] ) keyword[else] : keyword[return] keyword[False]
def get_hash(name, password=None): """ Returns the hash of a certificate in the keychain. name The name of the certificate (which you can get from keychain.get_friendly_name) or the location of a p12 file. password The password that is used in the certificate. Only required if your passing a p12 file. Note: This will be outputted to logs CLI Example: .. code-block:: bash salt '*' keychain.get_hash /tmp/test.p12 test123 """ if '.p12' in name[-4:]: cmd = 'openssl pkcs12 -in {0} -passin pass:{1} -passout pass:{1}'.format(name, password) # depends on [control=['if'], data=[]] else: cmd = 'security find-certificate -c "{0}" -m -p'.format(name) out = __salt__['cmd.run'](cmd) matches = re.search('-----BEGIN CERTIFICATE-----(.*)-----END CERTIFICATE-----', out, re.DOTALL | re.MULTILINE) if matches: return matches.group(1) # depends on [control=['if'], data=[]] else: return False
def swap_vertices(self, i, j): """ Swap two vertices in the tree structure array. swap_vertex swaps the location of two vertices in a tree structure array. :param tree: the tree for which two vertices are to be swapped. :param i: the index of the first vertex to be swapped. :param j: the index of the second vertex to be swapped. :rval tree: the tree structure with the two vertex locations swapped. """ store_vertex_i = self.vertices[i] store_vertex_j = self.vertices[j] self.vertices[j] = store_vertex_i self.vertices[i] = store_vertex_j for k in range(len(self.vertices)): for swap_list in [self.vertices[k].children, self.vertices[k].parents]: if i in swap_list: swap_list[swap_list.index(i)] = -1 if j in swap_list: swap_list[swap_list.index(j)] = i if -1 in swap_list: swap_list[swap_list.index(-1)] = j
def function[swap_vertices, parameter[self, i, j]]: constant[ Swap two vertices in the tree structure array. swap_vertex swaps the location of two vertices in a tree structure array. :param tree: the tree for which two vertices are to be swapped. :param i: the index of the first vertex to be swapped. :param j: the index of the second vertex to be swapped. :rval tree: the tree structure with the two vertex locations swapped. ] variable[store_vertex_i] assign[=] call[name[self].vertices][name[i]] variable[store_vertex_j] assign[=] call[name[self].vertices][name[j]] call[name[self].vertices][name[j]] assign[=] name[store_vertex_i] call[name[self].vertices][name[i]] assign[=] name[store_vertex_j] for taget[name[k]] in starred[call[name[range], parameter[call[name[len], parameter[name[self].vertices]]]]] begin[:] for taget[name[swap_list]] in starred[list[[<ast.Attribute object at 0x7da1b1c66230>, <ast.Attribute object at 0x7da1b1c641c0>]]] begin[:] if compare[name[i] in name[swap_list]] begin[:] call[name[swap_list]][call[name[swap_list].index, parameter[name[i]]]] assign[=] <ast.UnaryOp object at 0x7da1b1c65b40> if compare[name[j] in name[swap_list]] begin[:] call[name[swap_list]][call[name[swap_list].index, parameter[name[j]]]] assign[=] name[i] if compare[<ast.UnaryOp object at 0x7da1b26ad300> in name[swap_list]] begin[:] call[name[swap_list]][call[name[swap_list].index, parameter[<ast.UnaryOp object at 0x7da1b1c0ce20>]]] assign[=] name[j]
keyword[def] identifier[swap_vertices] ( identifier[self] , identifier[i] , identifier[j] ): literal[string] identifier[store_vertex_i] = identifier[self] . identifier[vertices] [ identifier[i] ] identifier[store_vertex_j] = identifier[self] . identifier[vertices] [ identifier[j] ] identifier[self] . identifier[vertices] [ identifier[j] ]= identifier[store_vertex_i] identifier[self] . identifier[vertices] [ identifier[i] ]= identifier[store_vertex_j] keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[vertices] )): keyword[for] identifier[swap_list] keyword[in] [ identifier[self] . identifier[vertices] [ identifier[k] ]. identifier[children] , identifier[self] . identifier[vertices] [ identifier[k] ]. identifier[parents] ]: keyword[if] identifier[i] keyword[in] identifier[swap_list] : identifier[swap_list] [ identifier[swap_list] . identifier[index] ( identifier[i] )]=- literal[int] keyword[if] identifier[j] keyword[in] identifier[swap_list] : identifier[swap_list] [ identifier[swap_list] . identifier[index] ( identifier[j] )]= identifier[i] keyword[if] - literal[int] keyword[in] identifier[swap_list] : identifier[swap_list] [ identifier[swap_list] . identifier[index] (- literal[int] )]= identifier[j]
def swap_vertices(self, i, j): """ Swap two vertices in the tree structure array. swap_vertex swaps the location of two vertices in a tree structure array. :param tree: the tree for which two vertices are to be swapped. :param i: the index of the first vertex to be swapped. :param j: the index of the second vertex to be swapped. :rval tree: the tree structure with the two vertex locations swapped. """ store_vertex_i = self.vertices[i] store_vertex_j = self.vertices[j] self.vertices[j] = store_vertex_i self.vertices[i] = store_vertex_j for k in range(len(self.vertices)): for swap_list in [self.vertices[k].children, self.vertices[k].parents]: if i in swap_list: swap_list[swap_list.index(i)] = -1 # depends on [control=['if'], data=['i', 'swap_list']] if j in swap_list: swap_list[swap_list.index(j)] = i # depends on [control=['if'], data=['j', 'swap_list']] if -1 in swap_list: swap_list[swap_list.index(-1)] = j # depends on [control=['if'], data=['swap_list']] # depends on [control=['for'], data=['swap_list']] # depends on [control=['for'], data=['k']]
def can_connect_to(self, other): """Whether a connection can be established between those two meshes.""" assert other.is_mesh() disconnected = not other.is_connected() and not self.is_connected() types_differ = self._is_consumed_mesh() != other._is_consumed_mesh() return disconnected and types_differ
def function[can_connect_to, parameter[self, other]]: constant[Whether a connection can be established between those two meshes.] assert[call[name[other].is_mesh, parameter[]]] variable[disconnected] assign[=] <ast.BoolOp object at 0x7da20c6e5030> variable[types_differ] assign[=] compare[call[name[self]._is_consumed_mesh, parameter[]] not_equal[!=] call[name[other]._is_consumed_mesh, parameter[]]] return[<ast.BoolOp object at 0x7da1b00da620>]
keyword[def] identifier[can_connect_to] ( identifier[self] , identifier[other] ): literal[string] keyword[assert] identifier[other] . identifier[is_mesh] () identifier[disconnected] = keyword[not] identifier[other] . identifier[is_connected] () keyword[and] keyword[not] identifier[self] . identifier[is_connected] () identifier[types_differ] = identifier[self] . identifier[_is_consumed_mesh] ()!= identifier[other] . identifier[_is_consumed_mesh] () keyword[return] identifier[disconnected] keyword[and] identifier[types_differ]
def can_connect_to(self, other): """Whether a connection can be established between those two meshes.""" assert other.is_mesh() disconnected = not other.is_connected() and (not self.is_connected()) types_differ = self._is_consumed_mesh() != other._is_consumed_mesh() return disconnected and types_differ
def rename_bika_setup(): """ Rename Bika Setup to just Setup to avoid naming confusions for new users """ logger.info("Renaming Bika Setup...") bika_setup = api.get_bika_setup() bika_setup.setTitle("Setup") bika_setup.reindexObject() setup = api.get_portal().portal_setup setup.runImportStepFromProfile('profile-bika.lims:default', 'controlpanel')
def function[rename_bika_setup, parameter[]]: constant[ Rename Bika Setup to just Setup to avoid naming confusions for new users ] call[name[logger].info, parameter[constant[Renaming Bika Setup...]]] variable[bika_setup] assign[=] call[name[api].get_bika_setup, parameter[]] call[name[bika_setup].setTitle, parameter[constant[Setup]]] call[name[bika_setup].reindexObject, parameter[]] variable[setup] assign[=] call[name[api].get_portal, parameter[]].portal_setup call[name[setup].runImportStepFromProfile, parameter[constant[profile-bika.lims:default], constant[controlpanel]]]
keyword[def] identifier[rename_bika_setup] (): literal[string] identifier[logger] . identifier[info] ( literal[string] ) identifier[bika_setup] = identifier[api] . identifier[get_bika_setup] () identifier[bika_setup] . identifier[setTitle] ( literal[string] ) identifier[bika_setup] . identifier[reindexObject] () identifier[setup] = identifier[api] . identifier[get_portal] (). identifier[portal_setup] identifier[setup] . identifier[runImportStepFromProfile] ( literal[string] , literal[string] )
def rename_bika_setup(): """ Rename Bika Setup to just Setup to avoid naming confusions for new users """ logger.info('Renaming Bika Setup...') bika_setup = api.get_bika_setup() bika_setup.setTitle('Setup') bika_setup.reindexObject() setup = api.get_portal().portal_setup setup.runImportStepFromProfile('profile-bika.lims:default', 'controlpanel')
def _psi(self,m): """\psi(m) = -\int_m^\infty d m^2 \rho(m^2)""" return 2.*self.a2*(1./(1.+m/self.a)+numpy.log(m/(m+self.a)))
def function[_psi, parameter[self, m]]: constant[\psi(m) = -\int_m^\infty d m^2 ho(m^2)] return[binary_operation[binary_operation[constant[2.0] * name[self].a2] * binary_operation[binary_operation[constant[1.0] / binary_operation[constant[1.0] + binary_operation[name[m] / name[self].a]]] + call[name[numpy].log, parameter[binary_operation[name[m] / binary_operation[name[m] + name[self].a]]]]]]]
keyword[def] identifier[_psi] ( identifier[self] , identifier[m] ): literal[string] keyword[return] literal[int] * identifier[self] . identifier[a2] *( literal[int] /( literal[int] + identifier[m] / identifier[self] . identifier[a] )+ identifier[numpy] . identifier[log] ( identifier[m] /( identifier[m] + identifier[self] . identifier[a] )))
def _psi(self, m): """\\psi(m) = -\\int_m^\\infty d m^2 \rho(m^2)""" return 2.0 * self.a2 * (1.0 / (1.0 + m / self.a) + numpy.log(m / (m + self.a)))
def fit(self, X, y=None, **kwargs): """ Fits the estimator to discover the feature importances described by the data, then draws those importances as a bar plot. Parameters ---------- X : ndarray or DataFrame of shape n x m A matrix of n instances with m features y : ndarray or Series of length n An array or series of target or class values kwargs : dict Keyword arguments passed to the fit method of the estimator. Returns ------- self : visualizer The fit method must always return self to support pipelines. """ super(FeatureImportances, self).fit(X, y, **kwargs) # Get the feature importances from the model self.feature_importances_ = self._find_importances_param() # Get the classes from the model if is_classifier(self): self.classes_ = self._find_classes_param() else: self.classes_ = None self.stack = False # If self.stack = True and feature importances is a multidim array, # we're expecting a shape of (n_classes, n_features) # therefore we flatten by taking the average by # column to get shape (n_features,) (see LogisticRegression) if not self.stack and self.feature_importances_.ndim > 1: self.feature_importances_ = np.mean(self.feature_importances_, axis=0) warnings.warn(( "detected multi-dimensional feature importances but stack=False, " "using mean to aggregate them." ), YellowbrickWarning) # Apply absolute value filter before normalization if self.absolute: self.feature_importances_ = np.abs(self.feature_importances_) # Normalize features relative to the maximum if self.relative: maxv = np.abs(self.feature_importances_).max() self.feature_importances_ /= maxv self.feature_importances_ *= 100.0 # Create labels for the feature importances # NOTE: this code is duplicated from MultiFeatureVisualizer if self.labels is None: # Use column names if a dataframe if is_dataframe(X): self.features_ = np.array(X.columns) # Otherwise use the column index as the labels else: _, ncols = X.shape self.features_ = np.arange(0, ncols) else: self.features_ = np.array(self.labels) # Sort the features and their importances if self.stack: sort_idx = np.argsort(np.mean(self.feature_importances_, 0)) self.features_ = self.features_[sort_idx] self.feature_importances_ = self.feature_importances_[:, sort_idx] else: sort_idx = np.argsort(self.feature_importances_) self.features_ = self.features_[sort_idx] self.feature_importances_ = self.feature_importances_[sort_idx] # Draw the feature importances self.draw() return self
def function[fit, parameter[self, X, y]]: constant[ Fits the estimator to discover the feature importances described by the data, then draws those importances as a bar plot. Parameters ---------- X : ndarray or DataFrame of shape n x m A matrix of n instances with m features y : ndarray or Series of length n An array or series of target or class values kwargs : dict Keyword arguments passed to the fit method of the estimator. Returns ------- self : visualizer The fit method must always return self to support pipelines. ] call[call[name[super], parameter[name[FeatureImportances], name[self]]].fit, parameter[name[X], name[y]]] name[self].feature_importances_ assign[=] call[name[self]._find_importances_param, parameter[]] if call[name[is_classifier], parameter[name[self]]] begin[:] name[self].classes_ assign[=] call[name[self]._find_classes_param, parameter[]] if <ast.BoolOp object at 0x7da18f00e200> begin[:] name[self].feature_importances_ assign[=] call[name[np].mean, parameter[name[self].feature_importances_]] call[name[warnings].warn, parameter[constant[detected multi-dimensional feature importances but stack=False, using mean to aggregate them.], name[YellowbrickWarning]]] if name[self].absolute begin[:] name[self].feature_importances_ assign[=] call[name[np].abs, parameter[name[self].feature_importances_]] if name[self].relative begin[:] variable[maxv] assign[=] call[call[name[np].abs, parameter[name[self].feature_importances_]].max, parameter[]] <ast.AugAssign object at 0x7da18f00c160> <ast.AugAssign object at 0x7da18f00c910> if compare[name[self].labels is constant[None]] begin[:] if call[name[is_dataframe], parameter[name[X]]] begin[:] name[self].features_ assign[=] call[name[np].array, parameter[name[X].columns]] if name[self].stack begin[:] variable[sort_idx] assign[=] call[name[np].argsort, parameter[call[name[np].mean, parameter[name[self].feature_importances_, constant[0]]]]] name[self].features_ assign[=] call[name[self].features_][name[sort_idx]] name[self].feature_importances_ assign[=] call[name[self].feature_importances_][tuple[[<ast.Slice object at 0x7da18bcc9750>, <ast.Name object at 0x7da18bcc84c0>]]] call[name[self].draw, parameter[]] return[name[self]]
keyword[def] identifier[fit] ( identifier[self] , identifier[X] , identifier[y] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[super] ( identifier[FeatureImportances] , identifier[self] ). identifier[fit] ( identifier[X] , identifier[y] ,** identifier[kwargs] ) identifier[self] . identifier[feature_importances_] = identifier[self] . identifier[_find_importances_param] () keyword[if] identifier[is_classifier] ( identifier[self] ): identifier[self] . identifier[classes_] = identifier[self] . identifier[_find_classes_param] () keyword[else] : identifier[self] . identifier[classes_] = keyword[None] identifier[self] . identifier[stack] = keyword[False] keyword[if] keyword[not] identifier[self] . identifier[stack] keyword[and] identifier[self] . identifier[feature_importances_] . identifier[ndim] > literal[int] : identifier[self] . identifier[feature_importances_] = identifier[np] . identifier[mean] ( identifier[self] . identifier[feature_importances_] , identifier[axis] = literal[int] ) identifier[warnings] . identifier[warn] (( literal[string] literal[string] ), identifier[YellowbrickWarning] ) keyword[if] identifier[self] . identifier[absolute] : identifier[self] . identifier[feature_importances_] = identifier[np] . identifier[abs] ( identifier[self] . identifier[feature_importances_] ) keyword[if] identifier[self] . identifier[relative] : identifier[maxv] = identifier[np] . identifier[abs] ( identifier[self] . identifier[feature_importances_] ). identifier[max] () identifier[self] . identifier[feature_importances_] /= identifier[maxv] identifier[self] . identifier[feature_importances_] *= literal[int] keyword[if] identifier[self] . identifier[labels] keyword[is] keyword[None] : keyword[if] identifier[is_dataframe] ( identifier[X] ): identifier[self] . identifier[features_] = identifier[np] . identifier[array] ( identifier[X] . identifier[columns] ) keyword[else] : identifier[_] , identifier[ncols] = identifier[X] . identifier[shape] identifier[self] . identifier[features_] = identifier[np] . identifier[arange] ( literal[int] , identifier[ncols] ) keyword[else] : identifier[self] . identifier[features_] = identifier[np] . identifier[array] ( identifier[self] . identifier[labels] ) keyword[if] identifier[self] . identifier[stack] : identifier[sort_idx] = identifier[np] . identifier[argsort] ( identifier[np] . identifier[mean] ( identifier[self] . identifier[feature_importances_] , literal[int] )) identifier[self] . identifier[features_] = identifier[self] . identifier[features_] [ identifier[sort_idx] ] identifier[self] . identifier[feature_importances_] = identifier[self] . identifier[feature_importances_] [:, identifier[sort_idx] ] keyword[else] : identifier[sort_idx] = identifier[np] . identifier[argsort] ( identifier[self] . identifier[feature_importances_] ) identifier[self] . identifier[features_] = identifier[self] . identifier[features_] [ identifier[sort_idx] ] identifier[self] . identifier[feature_importances_] = identifier[self] . identifier[feature_importances_] [ identifier[sort_idx] ] identifier[self] . identifier[draw] () keyword[return] identifier[self]
def fit(self, X, y=None, **kwargs): """ Fits the estimator to discover the feature importances described by the data, then draws those importances as a bar plot. Parameters ---------- X : ndarray or DataFrame of shape n x m A matrix of n instances with m features y : ndarray or Series of length n An array or series of target or class values kwargs : dict Keyword arguments passed to the fit method of the estimator. Returns ------- self : visualizer The fit method must always return self to support pipelines. """ super(FeatureImportances, self).fit(X, y, **kwargs) # Get the feature importances from the model self.feature_importances_ = self._find_importances_param() # Get the classes from the model if is_classifier(self): self.classes_ = self._find_classes_param() # depends on [control=['if'], data=[]] else: self.classes_ = None self.stack = False # If self.stack = True and feature importances is a multidim array, # we're expecting a shape of (n_classes, n_features) # therefore we flatten by taking the average by # column to get shape (n_features,) (see LogisticRegression) if not self.stack and self.feature_importances_.ndim > 1: self.feature_importances_ = np.mean(self.feature_importances_, axis=0) warnings.warn('detected multi-dimensional feature importances but stack=False, using mean to aggregate them.', YellowbrickWarning) # depends on [control=['if'], data=[]] # Apply absolute value filter before normalization if self.absolute: self.feature_importances_ = np.abs(self.feature_importances_) # depends on [control=['if'], data=[]] # Normalize features relative to the maximum if self.relative: maxv = np.abs(self.feature_importances_).max() self.feature_importances_ /= maxv self.feature_importances_ *= 100.0 # depends on [control=['if'], data=[]] # Create labels for the feature importances # NOTE: this code is duplicated from MultiFeatureVisualizer if self.labels is None: # Use column names if a dataframe if is_dataframe(X): self.features_ = np.array(X.columns) # depends on [control=['if'], data=[]] else: # Otherwise use the column index as the labels (_, ncols) = X.shape self.features_ = np.arange(0, ncols) # depends on [control=['if'], data=[]] else: self.features_ = np.array(self.labels) # Sort the features and their importances if self.stack: sort_idx = np.argsort(np.mean(self.feature_importances_, 0)) self.features_ = self.features_[sort_idx] self.feature_importances_ = self.feature_importances_[:, sort_idx] # depends on [control=['if'], data=[]] else: sort_idx = np.argsort(self.feature_importances_) self.features_ = self.features_[sort_idx] self.feature_importances_ = self.feature_importances_[sort_idx] # Draw the feature importances self.draw() return self
def showEvent(self, event): """ Ensures this widget is the top-most widget for its parent. :param event | <QtCore.QEvent> """ super(XOverlayWidget, self).showEvent(event) # raise to the top self.raise_() self._closeButton.setVisible(self.isClosable()) widget = self.centralWidget() if widget: center = self.rect().center() start_x = end_x = center.x() - widget.width() / 2 start_y = -widget.height() end_y = center.y() - widget.height() / 2 start = QtCore.QPoint(start_x, start_y) end = QtCore.QPoint(end_x, end_y) # create the movement animation anim = QtCore.QPropertyAnimation(self) anim.setPropertyName('pos') anim.setTargetObject(widget) anim.setStartValue(start) anim.setEndValue(end) anim.setDuration(500) anim.setEasingCurve(QtCore.QEasingCurve.InOutQuad) anim.finished.connect(anim.deleteLater) anim.start()
def function[showEvent, parameter[self, event]]: constant[ Ensures this widget is the top-most widget for its parent. :param event | <QtCore.QEvent> ] call[call[name[super], parameter[name[XOverlayWidget], name[self]]].showEvent, parameter[name[event]]] call[name[self].raise_, parameter[]] call[name[self]._closeButton.setVisible, parameter[call[name[self].isClosable, parameter[]]]] variable[widget] assign[=] call[name[self].centralWidget, parameter[]] if name[widget] begin[:] variable[center] assign[=] call[call[name[self].rect, parameter[]].center, parameter[]] variable[start_x] assign[=] binary_operation[call[name[center].x, parameter[]] - binary_operation[call[name[widget].width, parameter[]] / constant[2]]] variable[start_y] assign[=] <ast.UnaryOp object at 0x7da18bcc80a0> variable[end_y] assign[=] binary_operation[call[name[center].y, parameter[]] - binary_operation[call[name[widget].height, parameter[]] / constant[2]]] variable[start] assign[=] call[name[QtCore].QPoint, parameter[name[start_x], name[start_y]]] variable[end] assign[=] call[name[QtCore].QPoint, parameter[name[end_x], name[end_y]]] variable[anim] assign[=] call[name[QtCore].QPropertyAnimation, parameter[name[self]]] call[name[anim].setPropertyName, parameter[constant[pos]]] call[name[anim].setTargetObject, parameter[name[widget]]] call[name[anim].setStartValue, parameter[name[start]]] call[name[anim].setEndValue, parameter[name[end]]] call[name[anim].setDuration, parameter[constant[500]]] call[name[anim].setEasingCurve, parameter[name[QtCore].QEasingCurve.InOutQuad]] call[name[anim].finished.connect, parameter[name[anim].deleteLater]] call[name[anim].start, parameter[]]
keyword[def] identifier[showEvent] ( identifier[self] , identifier[event] ): literal[string] identifier[super] ( identifier[XOverlayWidget] , identifier[self] ). identifier[showEvent] ( identifier[event] ) identifier[self] . identifier[raise_] () identifier[self] . identifier[_closeButton] . identifier[setVisible] ( identifier[self] . identifier[isClosable] ()) identifier[widget] = identifier[self] . identifier[centralWidget] () keyword[if] identifier[widget] : identifier[center] = identifier[self] . identifier[rect] (). identifier[center] () identifier[start_x] = identifier[end_x] = identifier[center] . identifier[x] ()- identifier[widget] . identifier[width] ()/ literal[int] identifier[start_y] =- identifier[widget] . identifier[height] () identifier[end_y] = identifier[center] . identifier[y] ()- identifier[widget] . identifier[height] ()/ literal[int] identifier[start] = identifier[QtCore] . identifier[QPoint] ( identifier[start_x] , identifier[start_y] ) identifier[end] = identifier[QtCore] . identifier[QPoint] ( identifier[end_x] , identifier[end_y] ) identifier[anim] = identifier[QtCore] . identifier[QPropertyAnimation] ( identifier[self] ) identifier[anim] . identifier[setPropertyName] ( literal[string] ) identifier[anim] . identifier[setTargetObject] ( identifier[widget] ) identifier[anim] . identifier[setStartValue] ( identifier[start] ) identifier[anim] . identifier[setEndValue] ( identifier[end] ) identifier[anim] . identifier[setDuration] ( literal[int] ) identifier[anim] . identifier[setEasingCurve] ( identifier[QtCore] . identifier[QEasingCurve] . identifier[InOutQuad] ) identifier[anim] . identifier[finished] . identifier[connect] ( identifier[anim] . identifier[deleteLater] ) identifier[anim] . identifier[start] ()
def showEvent(self, event): """ Ensures this widget is the top-most widget for its parent. :param event | <QtCore.QEvent> """ super(XOverlayWidget, self).showEvent(event) # raise to the top self.raise_() self._closeButton.setVisible(self.isClosable()) widget = self.centralWidget() if widget: center = self.rect().center() start_x = end_x = center.x() - widget.width() / 2 start_y = -widget.height() end_y = center.y() - widget.height() / 2 start = QtCore.QPoint(start_x, start_y) end = QtCore.QPoint(end_x, end_y) # create the movement animation anim = QtCore.QPropertyAnimation(self) anim.setPropertyName('pos') anim.setTargetObject(widget) anim.setStartValue(start) anim.setEndValue(end) anim.setDuration(500) anim.setEasingCurve(QtCore.QEasingCurve.InOutQuad) anim.finished.connect(anim.deleteLater) anim.start() # depends on [control=['if'], data=[]]
def server_group_list(request): """Utility method to retrieve a list of server groups.""" try: return api.nova.server_group_list(request) except Exception: exceptions.handle(request, _('Unable to retrieve Nova server groups.')) return []
def function[server_group_list, parameter[request]]: constant[Utility method to retrieve a list of server groups.] <ast.Try object at 0x7da1b18dda80>
keyword[def] identifier[server_group_list] ( identifier[request] ): literal[string] keyword[try] : keyword[return] identifier[api] . identifier[nova] . identifier[server_group_list] ( identifier[request] ) keyword[except] identifier[Exception] : identifier[exceptions] . identifier[handle] ( identifier[request] , identifier[_] ( literal[string] )) keyword[return] []
def server_group_list(request): """Utility method to retrieve a list of server groups.""" try: return api.nova.server_group_list(request) # depends on [control=['try'], data=[]] except Exception: exceptions.handle(request, _('Unable to retrieve Nova server groups.')) return [] # depends on [control=['except'], data=[]]
def display(fig=None, closefig=True, **kwargs): """ Convert a Matplotlib Figure to a Leaflet map. Embed in IPython notebook. Parameters ---------- fig : figure, default gcf() Figure used to convert to map closefig : boolean, default True Close the current Figure """ from IPython.display import HTML if fig is None: fig = plt.gcf() if closefig: plt.close(fig) html = fig_to_html(fig, **kwargs) # We embed everything in an iframe. iframe_html = '<iframe src="data:text/html;base64,{html}" width="{width}" height="{height}"></iframe>'\ .format(html = base64.b64encode(html.encode('utf8')).decode('utf8'), width = '100%', height= int(60.*fig.get_figheight()), ) return HTML(iframe_html)
def function[display, parameter[fig, closefig]]: constant[ Convert a Matplotlib Figure to a Leaflet map. Embed in IPython notebook. Parameters ---------- fig : figure, default gcf() Figure used to convert to map closefig : boolean, default True Close the current Figure ] from relative_module[IPython.display] import module[HTML] if compare[name[fig] is constant[None]] begin[:] variable[fig] assign[=] call[name[plt].gcf, parameter[]] if name[closefig] begin[:] call[name[plt].close, parameter[name[fig]]] variable[html] assign[=] call[name[fig_to_html], parameter[name[fig]]] variable[iframe_html] assign[=] call[constant[<iframe src="data:text/html;base64,{html}" width="{width}" height="{height}"></iframe>].format, parameter[]] return[call[name[HTML], parameter[name[iframe_html]]]]
keyword[def] identifier[display] ( identifier[fig] = keyword[None] , identifier[closefig] = keyword[True] ,** identifier[kwargs] ): literal[string] keyword[from] identifier[IPython] . identifier[display] keyword[import] identifier[HTML] keyword[if] identifier[fig] keyword[is] keyword[None] : identifier[fig] = identifier[plt] . identifier[gcf] () keyword[if] identifier[closefig] : identifier[plt] . identifier[close] ( identifier[fig] ) identifier[html] = identifier[fig_to_html] ( identifier[fig] ,** identifier[kwargs] ) identifier[iframe_html] = literal[string] . identifier[format] ( identifier[html] = identifier[base64] . identifier[b64encode] ( identifier[html] . identifier[encode] ( literal[string] )). identifier[decode] ( literal[string] ), identifier[width] = literal[string] , identifier[height] = identifier[int] ( literal[int] * identifier[fig] . identifier[get_figheight] ()), ) keyword[return] identifier[HTML] ( identifier[iframe_html] )
def display(fig=None, closefig=True, **kwargs): """ Convert a Matplotlib Figure to a Leaflet map. Embed in IPython notebook. Parameters ---------- fig : figure, default gcf() Figure used to convert to map closefig : boolean, default True Close the current Figure """ from IPython.display import HTML if fig is None: fig = plt.gcf() # depends on [control=['if'], data=['fig']] if closefig: plt.close(fig) # depends on [control=['if'], data=[]] html = fig_to_html(fig, **kwargs) # We embed everything in an iframe. iframe_html = '<iframe src="data:text/html;base64,{html}" width="{width}" height="{height}"></iframe>'.format(html=base64.b64encode(html.encode('utf8')).decode('utf8'), width='100%', height=int(60.0 * fig.get_figheight())) return HTML(iframe_html)
def _dispatch_gen(self): """ Process the generate subset of commands. """ if not os.path.isdir(self._args.output): raise exception.Base("%s is not a writeable directory" % self._args.output) if not os.path.isfile(self._args.models_definition): if not self.check_package_exists(self._args.models_definition): raise exception.Base("failed to locate package or models definitions file at: %s" % self._args.models_definition) from prestans.devel.gen import Preplate preplate = Preplate( template_type=self._args.template, models_definition=self._args.models_definition, namespace=self._args.namespace, filter_namespace=self._args.filter_namespace, output_directory=self._args.output) preplate.run()
def function[_dispatch_gen, parameter[self]]: constant[ Process the generate subset of commands. ] if <ast.UnaryOp object at 0x7da1b0a855d0> begin[:] <ast.Raise object at 0x7da1b0a87640> if <ast.UnaryOp object at 0x7da1b0a85360> begin[:] if <ast.UnaryOp object at 0x7da1b0a84b50> begin[:] <ast.Raise object at 0x7da1b0a86350> from relative_module[prestans.devel.gen] import module[Preplate] variable[preplate] assign[=] call[name[Preplate], parameter[]] call[name[preplate].run, parameter[]]
keyword[def] identifier[_dispatch_gen] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[self] . identifier[_args] . identifier[output] ): keyword[raise] identifier[exception] . identifier[Base] ( literal[string] % identifier[self] . identifier[_args] . identifier[output] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[self] . identifier[_args] . identifier[models_definition] ): keyword[if] keyword[not] identifier[self] . identifier[check_package_exists] ( identifier[self] . identifier[_args] . identifier[models_definition] ): keyword[raise] identifier[exception] . identifier[Base] ( literal[string] % identifier[self] . identifier[_args] . identifier[models_definition] ) keyword[from] identifier[prestans] . identifier[devel] . identifier[gen] keyword[import] identifier[Preplate] identifier[preplate] = identifier[Preplate] ( identifier[template_type] = identifier[self] . identifier[_args] . identifier[template] , identifier[models_definition] = identifier[self] . identifier[_args] . identifier[models_definition] , identifier[namespace] = identifier[self] . identifier[_args] . identifier[namespace] , identifier[filter_namespace] = identifier[self] . identifier[_args] . identifier[filter_namespace] , identifier[output_directory] = identifier[self] . identifier[_args] . identifier[output] ) identifier[preplate] . identifier[run] ()
def _dispatch_gen(self): """ Process the generate subset of commands. """ if not os.path.isdir(self._args.output): raise exception.Base('%s is not a writeable directory' % self._args.output) # depends on [control=['if'], data=[]] if not os.path.isfile(self._args.models_definition): if not self.check_package_exists(self._args.models_definition): raise exception.Base('failed to locate package or models definitions file at: %s' % self._args.models_definition) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] from prestans.devel.gen import Preplate preplate = Preplate(template_type=self._args.template, models_definition=self._args.models_definition, namespace=self._args.namespace, filter_namespace=self._args.filter_namespace, output_directory=self._args.output) preplate.run()
def deleteTable(self, login, tableName): """ Parameters: - login - tableName """ self.send_deleteTable(login, tableName) self.recv_deleteTable()
def function[deleteTable, parameter[self, login, tableName]]: constant[ Parameters: - login - tableName ] call[name[self].send_deleteTable, parameter[name[login], name[tableName]]] call[name[self].recv_deleteTable, parameter[]]
keyword[def] identifier[deleteTable] ( identifier[self] , identifier[login] , identifier[tableName] ): literal[string] identifier[self] . identifier[send_deleteTable] ( identifier[login] , identifier[tableName] ) identifier[self] . identifier[recv_deleteTable] ()
def deleteTable(self, login, tableName): """ Parameters: - login - tableName """ self.send_deleteTable(login, tableName) self.recv_deleteTable()
def to_code_array(self): """Replaces everything in code_array from xls_file""" self._xls2shape() worksheets = self.workbook.sheet_names() for tab, worksheet_name in enumerate(worksheets): worksheet = self.workbook.sheet_by_name(worksheet_name) self._xls2code(worksheet, tab) self._xls2attributes(worksheet, tab) self._xls2row_heights(worksheet, tab) self._xls2col_widths(worksheet, tab)
def function[to_code_array, parameter[self]]: constant[Replaces everything in code_array from xls_file] call[name[self]._xls2shape, parameter[]] variable[worksheets] assign[=] call[name[self].workbook.sheet_names, parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b151b400>, <ast.Name object at 0x7da1b151b430>]]] in starred[call[name[enumerate], parameter[name[worksheets]]]] begin[:] variable[worksheet] assign[=] call[name[self].workbook.sheet_by_name, parameter[name[worksheet_name]]] call[name[self]._xls2code, parameter[name[worksheet], name[tab]]] call[name[self]._xls2attributes, parameter[name[worksheet], name[tab]]] call[name[self]._xls2row_heights, parameter[name[worksheet], name[tab]]] call[name[self]._xls2col_widths, parameter[name[worksheet], name[tab]]]
keyword[def] identifier[to_code_array] ( identifier[self] ): literal[string] identifier[self] . identifier[_xls2shape] () identifier[worksheets] = identifier[self] . identifier[workbook] . identifier[sheet_names] () keyword[for] identifier[tab] , identifier[worksheet_name] keyword[in] identifier[enumerate] ( identifier[worksheets] ): identifier[worksheet] = identifier[self] . identifier[workbook] . identifier[sheet_by_name] ( identifier[worksheet_name] ) identifier[self] . identifier[_xls2code] ( identifier[worksheet] , identifier[tab] ) identifier[self] . identifier[_xls2attributes] ( identifier[worksheet] , identifier[tab] ) identifier[self] . identifier[_xls2row_heights] ( identifier[worksheet] , identifier[tab] ) identifier[self] . identifier[_xls2col_widths] ( identifier[worksheet] , identifier[tab] )
def to_code_array(self): """Replaces everything in code_array from xls_file""" self._xls2shape() worksheets = self.workbook.sheet_names() for (tab, worksheet_name) in enumerate(worksheets): worksheet = self.workbook.sheet_by_name(worksheet_name) self._xls2code(worksheet, tab) self._xls2attributes(worksheet, tab) self._xls2row_heights(worksheet, tab) self._xls2col_widths(worksheet, tab) # depends on [control=['for'], data=[]]
def _file_iter_range(fp, offset, bytes, maxread=1024*1024): ''' Yield chunks from a range in a file. No chunk is bigger than maxread.''' fp.seek(offset) while bytes > 0: part = fp.read(min(bytes, maxread)) if not part: break bytes -= len(part) yield part
def function[_file_iter_range, parameter[fp, offset, bytes, maxread]]: constant[ Yield chunks from a range in a file. No chunk is bigger than maxread.] call[name[fp].seek, parameter[name[offset]]] while compare[name[bytes] greater[>] constant[0]] begin[:] variable[part] assign[=] call[name[fp].read, parameter[call[name[min], parameter[name[bytes], name[maxread]]]]] if <ast.UnaryOp object at 0x7da20e9b1b10> begin[:] break <ast.AugAssign object at 0x7da20e9b2230> <ast.Yield object at 0x7da20e9b1bd0>
keyword[def] identifier[_file_iter_range] ( identifier[fp] , identifier[offset] , identifier[bytes] , identifier[maxread] = literal[int] * literal[int] ): literal[string] identifier[fp] . identifier[seek] ( identifier[offset] ) keyword[while] identifier[bytes] > literal[int] : identifier[part] = identifier[fp] . identifier[read] ( identifier[min] ( identifier[bytes] , identifier[maxread] )) keyword[if] keyword[not] identifier[part] : keyword[break] identifier[bytes] -= identifier[len] ( identifier[part] ) keyword[yield] identifier[part]
def _file_iter_range(fp, offset, bytes, maxread=1024 * 1024): """ Yield chunks from a range in a file. No chunk is bigger than maxread.""" fp.seek(offset) while bytes > 0: part = fp.read(min(bytes, maxread)) if not part: break # depends on [control=['if'], data=[]] bytes -= len(part) yield part # depends on [control=['while'], data=['bytes']]
def generate_inventory(roles, networks, inventory_path, check_networks=False, fake_interfaces=None, fake_networks=None): """Generate an inventory file in the ini format. The inventory is generated using the ``roles`` in the ``ini`` format. If ``check_network == True``, the function will try to discover which networks interfaces are available and map them to one network of the ``networks`` parameters. Note that this auto-discovery feature requires the servers to have their IP set. Args: roles (dict): role->hosts mapping as returned by :py:meth:`enoslib.infra.provider.Provider.init` networks (list): network list as returned by :py:meth:`enoslib.infra.provider.Provider.init` inventory_path (str): path to the inventory to generate check_networks (bool): True to enable the auto-discovery of the mapping interface name <-> network role fake_interfaces (list): names of optionnal dummy interfaces to create on the nodes fake_networks (list): names of the roles to associate with the fake interfaces. Like reguilar network interfaces, the mapping will be added to the host vars. Internally this will be zipped with the fake_interfaces to produce the mapping. """ with open(inventory_path, "w") as f: f.write(_generate_inventory(roles)) if check_networks: discover_networks( roles, networks, fake_interfaces=fake_interfaces, fake_networks=fake_networks ) with open(inventory_path, "w") as f: f.write(_generate_inventory(roles))
def function[generate_inventory, parameter[roles, networks, inventory_path, check_networks, fake_interfaces, fake_networks]]: constant[Generate an inventory file in the ini format. The inventory is generated using the ``roles`` in the ``ini`` format. If ``check_network == True``, the function will try to discover which networks interfaces are available and map them to one network of the ``networks`` parameters. Note that this auto-discovery feature requires the servers to have their IP set. Args: roles (dict): role->hosts mapping as returned by :py:meth:`enoslib.infra.provider.Provider.init` networks (list): network list as returned by :py:meth:`enoslib.infra.provider.Provider.init` inventory_path (str): path to the inventory to generate check_networks (bool): True to enable the auto-discovery of the mapping interface name <-> network role fake_interfaces (list): names of optionnal dummy interfaces to create on the nodes fake_networks (list): names of the roles to associate with the fake interfaces. Like reguilar network interfaces, the mapping will be added to the host vars. Internally this will be zipped with the fake_interfaces to produce the mapping. ] with call[name[open], parameter[name[inventory_path], constant[w]]] begin[:] call[name[f].write, parameter[call[name[_generate_inventory], parameter[name[roles]]]]] if name[check_networks] begin[:] call[name[discover_networks], parameter[name[roles], name[networks]]] with call[name[open], parameter[name[inventory_path], constant[w]]] begin[:] call[name[f].write, parameter[call[name[_generate_inventory], parameter[name[roles]]]]]
keyword[def] identifier[generate_inventory] ( identifier[roles] , identifier[networks] , identifier[inventory_path] , identifier[check_networks] = keyword[False] , identifier[fake_interfaces] = keyword[None] , identifier[fake_networks] = keyword[None] ): literal[string] keyword[with] identifier[open] ( identifier[inventory_path] , literal[string] ) keyword[as] identifier[f] : identifier[f] . identifier[write] ( identifier[_generate_inventory] ( identifier[roles] )) keyword[if] identifier[check_networks] : identifier[discover_networks] ( identifier[roles] , identifier[networks] , identifier[fake_interfaces] = identifier[fake_interfaces] , identifier[fake_networks] = identifier[fake_networks] ) keyword[with] identifier[open] ( identifier[inventory_path] , literal[string] ) keyword[as] identifier[f] : identifier[f] . identifier[write] ( identifier[_generate_inventory] ( identifier[roles] ))
def generate_inventory(roles, networks, inventory_path, check_networks=False, fake_interfaces=None, fake_networks=None): """Generate an inventory file in the ini format. The inventory is generated using the ``roles`` in the ``ini`` format. If ``check_network == True``, the function will try to discover which networks interfaces are available and map them to one network of the ``networks`` parameters. Note that this auto-discovery feature requires the servers to have their IP set. Args: roles (dict): role->hosts mapping as returned by :py:meth:`enoslib.infra.provider.Provider.init` networks (list): network list as returned by :py:meth:`enoslib.infra.provider.Provider.init` inventory_path (str): path to the inventory to generate check_networks (bool): True to enable the auto-discovery of the mapping interface name <-> network role fake_interfaces (list): names of optionnal dummy interfaces to create on the nodes fake_networks (list): names of the roles to associate with the fake interfaces. Like reguilar network interfaces, the mapping will be added to the host vars. Internally this will be zipped with the fake_interfaces to produce the mapping. """ with open(inventory_path, 'w') as f: f.write(_generate_inventory(roles)) # depends on [control=['with'], data=['f']] if check_networks: discover_networks(roles, networks, fake_interfaces=fake_interfaces, fake_networks=fake_networks) with open(inventory_path, 'w') as f: f.write(_generate_inventory(roles)) # depends on [control=['with'], data=['f']] # depends on [control=['if'], data=[]]
def decrypt_document(self, document_id, encrypted_content): """ Decrypt a previously encrypted content using the secret store keys identified by document_id. Note that decryption requires permission already granted to the consumer account. :param document_id: hex str id of document to use for encryption session :param encrypted_content: hex str -- the encrypted content from a previous `encrypt_document` operation :return: None -- if decryption failed str -- the original content that was encrypted previously """ return self._secret_store_client(self._account).decrypt_document( remove_0x_prefix(document_id), encrypted_content )
def function[decrypt_document, parameter[self, document_id, encrypted_content]]: constant[ Decrypt a previously encrypted content using the secret store keys identified by document_id. Note that decryption requires permission already granted to the consumer account. :param document_id: hex str id of document to use for encryption session :param encrypted_content: hex str -- the encrypted content from a previous `encrypt_document` operation :return: None -- if decryption failed str -- the original content that was encrypted previously ] return[call[call[name[self]._secret_store_client, parameter[name[self]._account]].decrypt_document, parameter[call[name[remove_0x_prefix], parameter[name[document_id]]], name[encrypted_content]]]]
keyword[def] identifier[decrypt_document] ( identifier[self] , identifier[document_id] , identifier[encrypted_content] ): literal[string] keyword[return] identifier[self] . identifier[_secret_store_client] ( identifier[self] . identifier[_account] ). identifier[decrypt_document] ( identifier[remove_0x_prefix] ( identifier[document_id] ), identifier[encrypted_content] )
def decrypt_document(self, document_id, encrypted_content): """ Decrypt a previously encrypted content using the secret store keys identified by document_id. Note that decryption requires permission already granted to the consumer account. :param document_id: hex str id of document to use for encryption session :param encrypted_content: hex str -- the encrypted content from a previous `encrypt_document` operation :return: None -- if decryption failed str -- the original content that was encrypted previously """ return self._secret_store_client(self._account).decrypt_document(remove_0x_prefix(document_id), encrypted_content)
def get_operation_pattern(server_url, request_url_pattern): """Return an updated request URL pattern with the server URL removed.""" if server_url[-1] == "/": # operations have to start with a slash, so do not remove it server_url = server_url[:-1] if is_absolute(server_url): return request_url_pattern.replace(server_url, "", 1) return path_qs(request_url_pattern).replace(server_url, "", 1)
def function[get_operation_pattern, parameter[server_url, request_url_pattern]]: constant[Return an updated request URL pattern with the server URL removed.] if compare[call[name[server_url]][<ast.UnaryOp object at 0x7da18f8111e0>] equal[==] constant[/]] begin[:] variable[server_url] assign[=] call[name[server_url]][<ast.Slice object at 0x7da18f813dc0>] if call[name[is_absolute], parameter[name[server_url]]] begin[:] return[call[name[request_url_pattern].replace, parameter[name[server_url], constant[], constant[1]]]] return[call[call[name[path_qs], parameter[name[request_url_pattern]]].replace, parameter[name[server_url], constant[], constant[1]]]]
keyword[def] identifier[get_operation_pattern] ( identifier[server_url] , identifier[request_url_pattern] ): literal[string] keyword[if] identifier[server_url] [- literal[int] ]== literal[string] : identifier[server_url] = identifier[server_url] [:- literal[int] ] keyword[if] identifier[is_absolute] ( identifier[server_url] ): keyword[return] identifier[request_url_pattern] . identifier[replace] ( identifier[server_url] , literal[string] , literal[int] ) keyword[return] identifier[path_qs] ( identifier[request_url_pattern] ). identifier[replace] ( identifier[server_url] , literal[string] , literal[int] )
def get_operation_pattern(server_url, request_url_pattern): """Return an updated request URL pattern with the server URL removed.""" if server_url[-1] == '/': # operations have to start with a slash, so do not remove it server_url = server_url[:-1] # depends on [control=['if'], data=[]] if is_absolute(server_url): return request_url_pattern.replace(server_url, '', 1) # depends on [control=['if'], data=[]] return path_qs(request_url_pattern).replace(server_url, '', 1)
def sync(collector): """Sync an environment""" amazon = collector.configuration['amazon'] aws_syncr = collector.configuration['aws_syncr'] # Convert everything before we try and sync anything log.info("Converting configuration") converted = {} for thing in collector.configuration["__registered__"]: if thing in collector.configuration: converted[thing] = collector.configuration[thing] # Do the sync for typ in collector.configuration["__registered__"]: if typ in converted: thing = converted[typ] if not aws_syncr.artifact or aws_syncr.artifact == typ: log.info("Syncing {0}".format(typ)) for name, item in thing.items.items(): thing.sync_one(aws_syncr, amazon, item) if not amazon.changes: log.info("No changes were made!!")
def function[sync, parameter[collector]]: constant[Sync an environment] variable[amazon] assign[=] call[name[collector].configuration][constant[amazon]] variable[aws_syncr] assign[=] call[name[collector].configuration][constant[aws_syncr]] call[name[log].info, parameter[constant[Converting configuration]]] variable[converted] assign[=] dictionary[[], []] for taget[name[thing]] in starred[call[name[collector].configuration][constant[__registered__]]] begin[:] if compare[name[thing] in name[collector].configuration] begin[:] call[name[converted]][name[thing]] assign[=] call[name[collector].configuration][name[thing]] for taget[name[typ]] in starred[call[name[collector].configuration][constant[__registered__]]] begin[:] if compare[name[typ] in name[converted]] begin[:] variable[thing] assign[=] call[name[converted]][name[typ]] if <ast.BoolOp object at 0x7da20c990910> begin[:] call[name[log].info, parameter[call[constant[Syncing {0}].format, parameter[name[typ]]]]] for taget[tuple[[<ast.Name object at 0x7da20c993ac0>, <ast.Name object at 0x7da20c991540>]]] in starred[call[name[thing].items.items, parameter[]]] begin[:] call[name[thing].sync_one, parameter[name[aws_syncr], name[amazon], name[item]]] if <ast.UnaryOp object at 0x7da20c990700> begin[:] call[name[log].info, parameter[constant[No changes were made!!]]]
keyword[def] identifier[sync] ( identifier[collector] ): literal[string] identifier[amazon] = identifier[collector] . identifier[configuration] [ literal[string] ] identifier[aws_syncr] = identifier[collector] . identifier[configuration] [ literal[string] ] identifier[log] . identifier[info] ( literal[string] ) identifier[converted] ={} keyword[for] identifier[thing] keyword[in] identifier[collector] . identifier[configuration] [ literal[string] ]: keyword[if] identifier[thing] keyword[in] identifier[collector] . identifier[configuration] : identifier[converted] [ identifier[thing] ]= identifier[collector] . identifier[configuration] [ identifier[thing] ] keyword[for] identifier[typ] keyword[in] identifier[collector] . identifier[configuration] [ literal[string] ]: keyword[if] identifier[typ] keyword[in] identifier[converted] : identifier[thing] = identifier[converted] [ identifier[typ] ] keyword[if] keyword[not] identifier[aws_syncr] . identifier[artifact] keyword[or] identifier[aws_syncr] . identifier[artifact] == identifier[typ] : identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[typ] )) keyword[for] identifier[name] , identifier[item] keyword[in] identifier[thing] . identifier[items] . identifier[items] (): identifier[thing] . identifier[sync_one] ( identifier[aws_syncr] , identifier[amazon] , identifier[item] ) keyword[if] keyword[not] identifier[amazon] . identifier[changes] : identifier[log] . identifier[info] ( literal[string] )
def sync(collector): """Sync an environment""" amazon = collector.configuration['amazon'] aws_syncr = collector.configuration['aws_syncr'] # Convert everything before we try and sync anything log.info('Converting configuration') converted = {} for thing in collector.configuration['__registered__']: if thing in collector.configuration: converted[thing] = collector.configuration[thing] # depends on [control=['if'], data=['thing']] # depends on [control=['for'], data=['thing']] # Do the sync for typ in collector.configuration['__registered__']: if typ in converted: thing = converted[typ] if not aws_syncr.artifact or aws_syncr.artifact == typ: log.info('Syncing {0}'.format(typ)) for (name, item) in thing.items.items(): thing.sync_one(aws_syncr, amazon, item) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['typ', 'converted']] # depends on [control=['for'], data=['typ']] if not amazon.changes: log.info('No changes were made!!') # depends on [control=['if'], data=[]]
def ep(self, exc: Exception) -> bool: """Return False if the exception had not been handled gracefully""" if not isinstance(exc, ConnectionAbortedError): return False if len(exc.args) != 2: return False origin, reason = exc.args logging.getLogger(__name__).warning('Exited') return True
def function[ep, parameter[self, exc]]: constant[Return False if the exception had not been handled gracefully] if <ast.UnaryOp object at 0x7da1b26ae920> begin[:] return[constant[False]] if compare[call[name[len], parameter[name[exc].args]] not_equal[!=] constant[2]] begin[:] return[constant[False]] <ast.Tuple object at 0x7da1b26aeec0> assign[=] name[exc].args call[call[name[logging].getLogger, parameter[name[__name__]]].warning, parameter[constant[Exited]]] return[constant[True]]
keyword[def] identifier[ep] ( identifier[self] , identifier[exc] : identifier[Exception] )-> identifier[bool] : literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[exc] , identifier[ConnectionAbortedError] ): keyword[return] keyword[False] keyword[if] identifier[len] ( identifier[exc] . identifier[args] )!= literal[int] : keyword[return] keyword[False] identifier[origin] , identifier[reason] = identifier[exc] . identifier[args] identifier[logging] . identifier[getLogger] ( identifier[__name__] ). identifier[warning] ( literal[string] ) keyword[return] keyword[True]
def ep(self, exc: Exception) -> bool: """Return False if the exception had not been handled gracefully""" if not isinstance(exc, ConnectionAbortedError): return False # depends on [control=['if'], data=[]] if len(exc.args) != 2: return False # depends on [control=['if'], data=[]] (origin, reason) = exc.args logging.getLogger(__name__).warning('Exited') return True
def routes(self): """ Retrieves the main routes of the DTS Collection Response format expected : { "@context": "/dts/api/contexts/EntryPoint.jsonld", "@id": "/dts/api/", "@type": "EntryPoint", "collections": "/dts/api/collections/", "documents": "/dts/api/documents/", "navigation" : "/dts/api/navigation" } :returns: Dictionary of main routes with their path :rtype: dict """ if self._routes: return self._routes request = requests.get(self.endpoint) request.raise_for_status() data = request.json() self._routes = { "collections": parse_uri(data["collections"], self.endpoint), "documents": parse_uri(data["documents"], self.endpoint), "navigation": parse_uri(data["navigation"], self.endpoint) } return self._routes
def function[routes, parameter[self]]: constant[ Retrieves the main routes of the DTS Collection Response format expected : { "@context": "/dts/api/contexts/EntryPoint.jsonld", "@id": "/dts/api/", "@type": "EntryPoint", "collections": "/dts/api/collections/", "documents": "/dts/api/documents/", "navigation" : "/dts/api/navigation" } :returns: Dictionary of main routes with their path :rtype: dict ] if name[self]._routes begin[:] return[name[self]._routes] variable[request] assign[=] call[name[requests].get, parameter[name[self].endpoint]] call[name[request].raise_for_status, parameter[]] variable[data] assign[=] call[name[request].json, parameter[]] name[self]._routes assign[=] dictionary[[<ast.Constant object at 0x7da20e9576d0>, <ast.Constant object at 0x7da20e955f90>, <ast.Constant object at 0x7da20e9556f0>], [<ast.Call object at 0x7da20e9577f0>, <ast.Call object at 0x7da20e957c40>, <ast.Call object at 0x7da20e955390>]] return[name[self]._routes]
keyword[def] identifier[routes] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_routes] : keyword[return] identifier[self] . identifier[_routes] identifier[request] = identifier[requests] . identifier[get] ( identifier[self] . identifier[endpoint] ) identifier[request] . identifier[raise_for_status] () identifier[data] = identifier[request] . identifier[json] () identifier[self] . identifier[_routes] ={ literal[string] : identifier[parse_uri] ( identifier[data] [ literal[string] ], identifier[self] . identifier[endpoint] ), literal[string] : identifier[parse_uri] ( identifier[data] [ literal[string] ], identifier[self] . identifier[endpoint] ), literal[string] : identifier[parse_uri] ( identifier[data] [ literal[string] ], identifier[self] . identifier[endpoint] ) } keyword[return] identifier[self] . identifier[_routes]
def routes(self): """ Retrieves the main routes of the DTS Collection Response format expected : { "@context": "/dts/api/contexts/EntryPoint.jsonld", "@id": "/dts/api/", "@type": "EntryPoint", "collections": "/dts/api/collections/", "documents": "/dts/api/documents/", "navigation" : "/dts/api/navigation" } :returns: Dictionary of main routes with their path :rtype: dict """ if self._routes: return self._routes # depends on [control=['if'], data=[]] request = requests.get(self.endpoint) request.raise_for_status() data = request.json() self._routes = {'collections': parse_uri(data['collections'], self.endpoint), 'documents': parse_uri(data['documents'], self.endpoint), 'navigation': parse_uri(data['navigation'], self.endpoint)} return self._routes
def openIndex(self, filename, description): """Attempt to delete and recreate an index, returns open file object or None.""" try: os.remove(filename) self.printd(" Deleted old " + description) except: self.printd(" No " + description + " found") # Now, attempt to open a new index try: files = open(filename, 'wt') except: self.printd("Error: Unable to create file " + filename + " in current folder. Quitting.") return None return files
def function[openIndex, parameter[self, filename, description]]: constant[Attempt to delete and recreate an index, returns open file object or None.] <ast.Try object at 0x7da1b1596bf0> <ast.Try object at 0x7da18dc99ff0> return[name[files]]
keyword[def] identifier[openIndex] ( identifier[self] , identifier[filename] , identifier[description] ): literal[string] keyword[try] : identifier[os] . identifier[remove] ( identifier[filename] ) identifier[self] . identifier[printd] ( literal[string] + identifier[description] ) keyword[except] : identifier[self] . identifier[printd] ( literal[string] + identifier[description] + literal[string] ) keyword[try] : identifier[files] = identifier[open] ( identifier[filename] , literal[string] ) keyword[except] : identifier[self] . identifier[printd] ( literal[string] + identifier[filename] + literal[string] ) keyword[return] keyword[None] keyword[return] identifier[files]
def openIndex(self, filename, description): """Attempt to delete and recreate an index, returns open file object or None.""" try: os.remove(filename) self.printd(' Deleted old ' + description) # depends on [control=['try'], data=[]] except: self.printd(' No ' + description + ' found') # depends on [control=['except'], data=[]] # Now, attempt to open a new index try: files = open(filename, 'wt') # depends on [control=['try'], data=[]] except: self.printd('Error: Unable to create file ' + filename + ' in current folder. Quitting.') return None # depends on [control=['except'], data=[]] return files
def text(self, value): """ Setting text. (When doing this, make sure that the cursor_position is valid for this text. text/cursor_position should be consistent at any time, otherwise set a Document instead.) """ assert isinstance(value, six.text_type), 'Got %r' % value assert self.cursor_position <= len(value) # Don't allow editing of read-only buffers. if self.read_only(): raise EditReadOnlyBuffer() changed = self._set_text(value) if changed: self._text_changed() # Reset history search text. self.history_search_text = None
def function[text, parameter[self, value]]: constant[ Setting text. (When doing this, make sure that the cursor_position is valid for this text. text/cursor_position should be consistent at any time, otherwise set a Document instead.) ] assert[call[name[isinstance], parameter[name[value], name[six].text_type]]] assert[compare[name[self].cursor_position less_or_equal[<=] call[name[len], parameter[name[value]]]]] if call[name[self].read_only, parameter[]] begin[:] <ast.Raise object at 0x7da1b063dcc0> variable[changed] assign[=] call[name[self]._set_text, parameter[name[value]]] if name[changed] begin[:] call[name[self]._text_changed, parameter[]] name[self].history_search_text assign[=] constant[None]
keyword[def] identifier[text] ( identifier[self] , identifier[value] ): literal[string] keyword[assert] identifier[isinstance] ( identifier[value] , identifier[six] . identifier[text_type] ), literal[string] % identifier[value] keyword[assert] identifier[self] . identifier[cursor_position] <= identifier[len] ( identifier[value] ) keyword[if] identifier[self] . identifier[read_only] (): keyword[raise] identifier[EditReadOnlyBuffer] () identifier[changed] = identifier[self] . identifier[_set_text] ( identifier[value] ) keyword[if] identifier[changed] : identifier[self] . identifier[_text_changed] () identifier[self] . identifier[history_search_text] = keyword[None]
def text(self, value): """ Setting text. (When doing this, make sure that the cursor_position is valid for this text. text/cursor_position should be consistent at any time, otherwise set a Document instead.) """ assert isinstance(value, six.text_type), 'Got %r' % value assert self.cursor_position <= len(value) # Don't allow editing of read-only buffers. if self.read_only(): raise EditReadOnlyBuffer() # depends on [control=['if'], data=[]] changed = self._set_text(value) if changed: self._text_changed() # Reset history search text. self.history_search_text = None # depends on [control=['if'], data=[]]
def deploy_local(self, dotfiles, target_root=None): """Deploy dotfiles to a local path.""" if target_root is None: target_root = self.args.path for source_path, target_path in dotfiles.items(): source_path = path.join(self.source, source_path) target_path = path.join(target_root, target_path) if path.isfile(target_path) or path.islink(target_path): self.log.debug('Removing existing file at %s', target_path) os.unlink(target_path) elif path.isdir(target_path): self.log.debug('Removing existing dir at %s', target_path) shutil.rmtree(target_path) parent_dir = path.dirname(target_path) if not path.isdir(parent_dir): self.log.debug('Creating parent dir %s', parent_dir) os.makedirs(parent_dir) if self.args.copy: if path.isdir(source_path): self.log.debug('Copying file %s to %s', source_path, target_path) shutil.copytree(source_path, target_path) else: self.log.debug('Copying dir %s to %s', source_path, target_path) shutil.copy(source_path, target_path) else: self.log.debug('Symlinking %s -> %s', target_path, source_path) os.symlink(source_path, target_path)
def function[deploy_local, parameter[self, dotfiles, target_root]]: constant[Deploy dotfiles to a local path.] if compare[name[target_root] is constant[None]] begin[:] variable[target_root] assign[=] name[self].args.path for taget[tuple[[<ast.Name object at 0x7da18f812530>, <ast.Name object at 0x7da18f813d00>]]] in starred[call[name[dotfiles].items, parameter[]]] begin[:] variable[source_path] assign[=] call[name[path].join, parameter[name[self].source, name[source_path]]] variable[target_path] assign[=] call[name[path].join, parameter[name[target_root], name[target_path]]] if <ast.BoolOp object at 0x7da18f813f70> begin[:] call[name[self].log.debug, parameter[constant[Removing existing file at %s], name[target_path]]] call[name[os].unlink, parameter[name[target_path]]] variable[parent_dir] assign[=] call[name[path].dirname, parameter[name[target_path]]] if <ast.UnaryOp object at 0x7da18f8113f0> begin[:] call[name[self].log.debug, parameter[constant[Creating parent dir %s], name[parent_dir]]] call[name[os].makedirs, parameter[name[parent_dir]]] if name[self].args.copy begin[:] if call[name[path].isdir, parameter[name[source_path]]] begin[:] call[name[self].log.debug, parameter[constant[Copying file %s to %s], name[source_path], name[target_path]]] call[name[shutil].copytree, parameter[name[source_path], name[target_path]]]
keyword[def] identifier[deploy_local] ( identifier[self] , identifier[dotfiles] , identifier[target_root] = keyword[None] ): literal[string] keyword[if] identifier[target_root] keyword[is] keyword[None] : identifier[target_root] = identifier[self] . identifier[args] . identifier[path] keyword[for] identifier[source_path] , identifier[target_path] keyword[in] identifier[dotfiles] . identifier[items] (): identifier[source_path] = identifier[path] . identifier[join] ( identifier[self] . identifier[source] , identifier[source_path] ) identifier[target_path] = identifier[path] . identifier[join] ( identifier[target_root] , identifier[target_path] ) keyword[if] identifier[path] . identifier[isfile] ( identifier[target_path] ) keyword[or] identifier[path] . identifier[islink] ( identifier[target_path] ): identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[target_path] ) identifier[os] . identifier[unlink] ( identifier[target_path] ) keyword[elif] identifier[path] . identifier[isdir] ( identifier[target_path] ): identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[target_path] ) identifier[shutil] . identifier[rmtree] ( identifier[target_path] ) identifier[parent_dir] = identifier[path] . identifier[dirname] ( identifier[target_path] ) keyword[if] keyword[not] identifier[path] . identifier[isdir] ( identifier[parent_dir] ): identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[parent_dir] ) identifier[os] . identifier[makedirs] ( identifier[parent_dir] ) keyword[if] identifier[self] . identifier[args] . identifier[copy] : keyword[if] identifier[path] . identifier[isdir] ( identifier[source_path] ): identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[source_path] , identifier[target_path] ) identifier[shutil] . identifier[copytree] ( identifier[source_path] , identifier[target_path] ) keyword[else] : identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[source_path] , identifier[target_path] ) identifier[shutil] . identifier[copy] ( identifier[source_path] , identifier[target_path] ) keyword[else] : identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[target_path] , identifier[source_path] ) identifier[os] . identifier[symlink] ( identifier[source_path] , identifier[target_path] )
def deploy_local(self, dotfiles, target_root=None): """Deploy dotfiles to a local path.""" if target_root is None: target_root = self.args.path # depends on [control=['if'], data=['target_root']] for (source_path, target_path) in dotfiles.items(): source_path = path.join(self.source, source_path) target_path = path.join(target_root, target_path) if path.isfile(target_path) or path.islink(target_path): self.log.debug('Removing existing file at %s', target_path) os.unlink(target_path) # depends on [control=['if'], data=[]] elif path.isdir(target_path): self.log.debug('Removing existing dir at %s', target_path) shutil.rmtree(target_path) # depends on [control=['if'], data=[]] parent_dir = path.dirname(target_path) if not path.isdir(parent_dir): self.log.debug('Creating parent dir %s', parent_dir) os.makedirs(parent_dir) # depends on [control=['if'], data=[]] if self.args.copy: if path.isdir(source_path): self.log.debug('Copying file %s to %s', source_path, target_path) shutil.copytree(source_path, target_path) # depends on [control=['if'], data=[]] else: self.log.debug('Copying dir %s to %s', source_path, target_path) shutil.copy(source_path, target_path) # depends on [control=['if'], data=[]] else: self.log.debug('Symlinking %s -> %s', target_path, source_path) os.symlink(source_path, target_path) # depends on [control=['for'], data=[]]
def _debug(self, out, print_prefix=True): """ Print out to stderr, if debugging is enabled. """ if self.debug: if print_prefix: pre = self.__class__.__name__ if hasattr(self, 'debug_prefix'): pre = getattr(self, 'debug_prefix') sys.stderr.write("%s: " % pre) sys.stderr.write(out)
def function[_debug, parameter[self, out, print_prefix]]: constant[ Print out to stderr, if debugging is enabled. ] if name[self].debug begin[:] if name[print_prefix] begin[:] variable[pre] assign[=] name[self].__class__.__name__ if call[name[hasattr], parameter[name[self], constant[debug_prefix]]] begin[:] variable[pre] assign[=] call[name[getattr], parameter[name[self], constant[debug_prefix]]] call[name[sys].stderr.write, parameter[binary_operation[constant[%s: ] <ast.Mod object at 0x7da2590d6920> name[pre]]]] call[name[sys].stderr.write, parameter[name[out]]]
keyword[def] identifier[_debug] ( identifier[self] , identifier[out] , identifier[print_prefix] = keyword[True] ): literal[string] keyword[if] identifier[self] . identifier[debug] : keyword[if] identifier[print_prefix] : identifier[pre] = identifier[self] . identifier[__class__] . identifier[__name__] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[pre] = identifier[getattr] ( identifier[self] , literal[string] ) identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] % identifier[pre] ) identifier[sys] . identifier[stderr] . identifier[write] ( identifier[out] )
def _debug(self, out, print_prefix=True): """ Print out to stderr, if debugging is enabled. """ if self.debug: if print_prefix: pre = self.__class__.__name__ if hasattr(self, 'debug_prefix'): pre = getattr(self, 'debug_prefix') # depends on [control=['if'], data=[]] sys.stderr.write('%s: ' % pre) # depends on [control=['if'], data=[]] sys.stderr.write(out) # depends on [control=['if'], data=[]]