code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def merge(base, obj, location=None): """ merge is like XmrsPathNode.update() except it raises errors on unequal non-None values. """ # pump object to it's location with dummy nodes while location: axis = location.pop() obj = XmrsPathNode(None, None, links={axis: obj}) if base is None: return obj _merge(base, obj) # if isinstance(base, XmrsPath): # base.calculate_metrics() return base
def function[merge, parameter[base, obj, location]]: constant[ merge is like XmrsPathNode.update() except it raises errors on unequal non-None values. ] while name[location] begin[:] variable[axis] assign[=] call[name[location].pop, parameter[]] variable[obj] assign[=] call[name[XmrsPathNode], parameter[constant[None], constant[None]]] if compare[name[base] is constant[None]] begin[:] return[name[obj]] call[name[_merge], parameter[name[base], name[obj]]] return[name[base]]
keyword[def] identifier[merge] ( identifier[base] , identifier[obj] , identifier[location] = keyword[None] ): literal[string] keyword[while] identifier[location] : identifier[axis] = identifier[location] . identifier[pop] () identifier[obj] = identifier[XmrsPathNode] ( keyword[None] , keyword[None] , identifier[links] ={ identifier[axis] : identifier[obj] }) keyword[if] identifier[base] keyword[is] keyword[None] : keyword[return] identifier[obj] identifier[_merge] ( identifier[base] , identifier[obj] ) keyword[return] identifier[base]
def merge(base, obj, location=None): """ merge is like XmrsPathNode.update() except it raises errors on unequal non-None values. """ # pump object to it's location with dummy nodes while location: axis = location.pop() obj = XmrsPathNode(None, None, links={axis: obj}) # depends on [control=['while'], data=[]] if base is None: return obj # depends on [control=['if'], data=[]] _merge(base, obj) # if isinstance(base, XmrsPath): # base.calculate_metrics() return base
def updatable_map(cache: MutableMapping[Domain, Range]) -> Operator[Map]: """ Returns decorator that calls wrapped function if nothing was found in cache for its argument and reuses result afterwards. Wrapped function arguments should be hashable. """ def wrapper(function: Map[Domain, Range]) -> Map[Domain, Range]: @wraps(function) def wrapped(argument: Domain) -> Range: try: return cache[argument] except KeyError: result = function(argument) cache[argument] = result return result return wrapped return wrapper
def function[updatable_map, parameter[cache]]: constant[ Returns decorator that calls wrapped function if nothing was found in cache for its argument and reuses result afterwards. Wrapped function arguments should be hashable. ] def function[wrapper, parameter[function]]: def function[wrapped, parameter[argument]]: <ast.Try object at 0x7da20c993310> return[name[wrapped]] return[name[wrapper]]
keyword[def] identifier[updatable_map] ( identifier[cache] : identifier[MutableMapping] [ identifier[Domain] , identifier[Range] ])-> identifier[Operator] [ identifier[Map] ]: literal[string] keyword[def] identifier[wrapper] ( identifier[function] : identifier[Map] [ identifier[Domain] , identifier[Range] ])-> identifier[Map] [ identifier[Domain] , identifier[Range] ]: @ identifier[wraps] ( identifier[function] ) keyword[def] identifier[wrapped] ( identifier[argument] : identifier[Domain] )-> identifier[Range] : keyword[try] : keyword[return] identifier[cache] [ identifier[argument] ] keyword[except] identifier[KeyError] : identifier[result] = identifier[function] ( identifier[argument] ) identifier[cache] [ identifier[argument] ]= identifier[result] keyword[return] identifier[result] keyword[return] identifier[wrapped] keyword[return] identifier[wrapper]
def updatable_map(cache: MutableMapping[Domain, Range]) -> Operator[Map]: """ Returns decorator that calls wrapped function if nothing was found in cache for its argument and reuses result afterwards. Wrapped function arguments should be hashable. """ def wrapper(function: Map[Domain, Range]) -> Map[Domain, Range]: @wraps(function) def wrapped(argument: Domain) -> Range: try: return cache[argument] # depends on [control=['try'], data=[]] except KeyError: result = function(argument) cache[argument] = result return result # depends on [control=['except'], data=[]] return wrapped return wrapper
def create_asset_model(self, ): """Return a treemodel with the levels: project, assettype, asset and reftrack type :returns: a treemodel :rtype: :class:`jukeboxcore.gui.treemodel.TreeModel` :raises: None """ rootdata = treemodel.ListItemData(['Name']) rootitem = treemodel.TreeItem(rootdata) prjs = djadapter.projects.all() for prj in prjs: prjdata = djitemdata.ProjectItemData(prj) prjitem = treemodel.TreeItem(prjdata, rootitem) for atype in prj.atype_set.all(): atypedata = djitemdata.AtypeItemData(atype) atypeitem = treemodel.TreeItem(atypedata, prjitem) for asset in atype.asset_set.filter(project=prj): assetdata = djitemdata.AssetItemData(asset) assetitem = treemodel.TreeItem(assetdata, atypeitem) for typ in self.refobjinter.get_available_types_for_scene(asset): typdata = treemodel.ListItemData([typ]) treemodel.TreeItem(typdata, assetitem) return treemodel.TreeModel(rootitem)
def function[create_asset_model, parameter[self]]: constant[Return a treemodel with the levels: project, assettype, asset and reftrack type :returns: a treemodel :rtype: :class:`jukeboxcore.gui.treemodel.TreeModel` :raises: None ] variable[rootdata] assign[=] call[name[treemodel].ListItemData, parameter[list[[<ast.Constant object at 0x7da1b16d5690>]]]] variable[rootitem] assign[=] call[name[treemodel].TreeItem, parameter[name[rootdata]]] variable[prjs] assign[=] call[name[djadapter].projects.all, parameter[]] for taget[name[prj]] in starred[name[prjs]] begin[:] variable[prjdata] assign[=] call[name[djitemdata].ProjectItemData, parameter[name[prj]]] variable[prjitem] assign[=] call[name[treemodel].TreeItem, parameter[name[prjdata], name[rootitem]]] for taget[name[atype]] in starred[call[name[prj].atype_set.all, parameter[]]] begin[:] variable[atypedata] assign[=] call[name[djitemdata].AtypeItemData, parameter[name[atype]]] variable[atypeitem] assign[=] call[name[treemodel].TreeItem, parameter[name[atypedata], name[prjitem]]] for taget[name[asset]] in starred[call[name[atype].asset_set.filter, parameter[]]] begin[:] variable[assetdata] assign[=] call[name[djitemdata].AssetItemData, parameter[name[asset]]] variable[assetitem] assign[=] call[name[treemodel].TreeItem, parameter[name[assetdata], name[atypeitem]]] for taget[name[typ]] in starred[call[name[self].refobjinter.get_available_types_for_scene, parameter[name[asset]]]] begin[:] variable[typdata] assign[=] call[name[treemodel].ListItemData, parameter[list[[<ast.Name object at 0x7da1b16d5a80>]]]] call[name[treemodel].TreeItem, parameter[name[typdata], name[assetitem]]] return[call[name[treemodel].TreeModel, parameter[name[rootitem]]]]
keyword[def] identifier[create_asset_model] ( identifier[self] ,): literal[string] identifier[rootdata] = identifier[treemodel] . identifier[ListItemData] ([ literal[string] ]) identifier[rootitem] = identifier[treemodel] . identifier[TreeItem] ( identifier[rootdata] ) identifier[prjs] = identifier[djadapter] . identifier[projects] . identifier[all] () keyword[for] identifier[prj] keyword[in] identifier[prjs] : identifier[prjdata] = identifier[djitemdata] . identifier[ProjectItemData] ( identifier[prj] ) identifier[prjitem] = identifier[treemodel] . identifier[TreeItem] ( identifier[prjdata] , identifier[rootitem] ) keyword[for] identifier[atype] keyword[in] identifier[prj] . identifier[atype_set] . identifier[all] (): identifier[atypedata] = identifier[djitemdata] . identifier[AtypeItemData] ( identifier[atype] ) identifier[atypeitem] = identifier[treemodel] . identifier[TreeItem] ( identifier[atypedata] , identifier[prjitem] ) keyword[for] identifier[asset] keyword[in] identifier[atype] . identifier[asset_set] . identifier[filter] ( identifier[project] = identifier[prj] ): identifier[assetdata] = identifier[djitemdata] . identifier[AssetItemData] ( identifier[asset] ) identifier[assetitem] = identifier[treemodel] . identifier[TreeItem] ( identifier[assetdata] , identifier[atypeitem] ) keyword[for] identifier[typ] keyword[in] identifier[self] . identifier[refobjinter] . identifier[get_available_types_for_scene] ( identifier[asset] ): identifier[typdata] = identifier[treemodel] . identifier[ListItemData] ([ identifier[typ] ]) identifier[treemodel] . identifier[TreeItem] ( identifier[typdata] , identifier[assetitem] ) keyword[return] identifier[treemodel] . identifier[TreeModel] ( identifier[rootitem] )
def create_asset_model(self): """Return a treemodel with the levels: project, assettype, asset and reftrack type :returns: a treemodel :rtype: :class:`jukeboxcore.gui.treemodel.TreeModel` :raises: None """ rootdata = treemodel.ListItemData(['Name']) rootitem = treemodel.TreeItem(rootdata) prjs = djadapter.projects.all() for prj in prjs: prjdata = djitemdata.ProjectItemData(prj) prjitem = treemodel.TreeItem(prjdata, rootitem) for atype in prj.atype_set.all(): atypedata = djitemdata.AtypeItemData(atype) atypeitem = treemodel.TreeItem(atypedata, prjitem) for asset in atype.asset_set.filter(project=prj): assetdata = djitemdata.AssetItemData(asset) assetitem = treemodel.TreeItem(assetdata, atypeitem) for typ in self.refobjinter.get_available_types_for_scene(asset): typdata = treemodel.ListItemData([typ]) treemodel.TreeItem(typdata, assetitem) # depends on [control=['for'], data=['typ']] # depends on [control=['for'], data=['asset']] # depends on [control=['for'], data=['atype']] # depends on [control=['for'], data=['prj']] return treemodel.TreeModel(rootitem)
def channel(self) -> Iterator[amqp.Channel]: """Returns a new channel from a new connection as a context manager.""" with self.connection() as conn: ch = conn.channel() logger.info('Opened new channel') with _safe_close(ch): yield ch
def function[channel, parameter[self]]: constant[Returns a new channel from a new connection as a context manager.] with call[name[self].connection, parameter[]] begin[:] variable[ch] assign[=] call[name[conn].channel, parameter[]] call[name[logger].info, parameter[constant[Opened new channel]]] with call[name[_safe_close], parameter[name[ch]]] begin[:] <ast.Yield object at 0x7da1b10e9db0>
keyword[def] identifier[channel] ( identifier[self] )-> identifier[Iterator] [ identifier[amqp] . identifier[Channel] ]: literal[string] keyword[with] identifier[self] . identifier[connection] () keyword[as] identifier[conn] : identifier[ch] = identifier[conn] . identifier[channel] () identifier[logger] . identifier[info] ( literal[string] ) keyword[with] identifier[_safe_close] ( identifier[ch] ): keyword[yield] identifier[ch]
def channel(self) -> Iterator[amqp.Channel]: """Returns a new channel from a new connection as a context manager.""" with self.connection() as conn: ch = conn.channel() logger.info('Opened new channel') with _safe_close(ch): yield ch # depends on [control=['with'], data=[]] # depends on [control=['with'], data=['conn']]
def unwrap(node): """Remove a node, replacing it with its children.""" for child in list(node.childNodes): node.parentNode.insertBefore(child, node) remove_node(node)
def function[unwrap, parameter[node]]: constant[Remove a node, replacing it with its children.] for taget[name[child]] in starred[call[name[list], parameter[name[node].childNodes]]] begin[:] call[name[node].parentNode.insertBefore, parameter[name[child], name[node]]] call[name[remove_node], parameter[name[node]]]
keyword[def] identifier[unwrap] ( identifier[node] ): literal[string] keyword[for] identifier[child] keyword[in] identifier[list] ( identifier[node] . identifier[childNodes] ): identifier[node] . identifier[parentNode] . identifier[insertBefore] ( identifier[child] , identifier[node] ) identifier[remove_node] ( identifier[node] )
def unwrap(node): """Remove a node, replacing it with its children.""" for child in list(node.childNodes): node.parentNode.insertBefore(child, node) # depends on [control=['for'], data=['child']] remove_node(node)
def async_task(func, *args, **kwargs): """Queue a task for the cluster.""" keywords = kwargs.copy() opt_keys = ( 'hook', 'group', 'save', 'sync', 'cached', 'ack_failure', 'iter_count', 'iter_cached', 'chain', 'broker') q_options = keywords.pop('q_options', {}) # get an id tag = uuid() # build the task package task = {'id': tag[1], 'name': keywords.pop('task_name', None) or q_options.pop('task_name', None) or tag[0], 'func': func, 'args': args} # push optionals for key in opt_keys: if q_options and key in q_options: task[key] = q_options[key] elif key in keywords: task[key] = keywords.pop(key) # don't serialize the broker broker = task.pop('broker', get_broker()) # overrides if 'cached' not in task and Conf.CACHED: task['cached'] = Conf.CACHED if 'sync' not in task and Conf.SYNC: task['sync'] = Conf.SYNC if 'ack_failure' not in task and Conf.ACK_FAILURES: task['ack_failure'] = Conf.ACK_FAILURES # finalize task['kwargs'] = keywords task['started'] = timezone.now() # signal it pre_enqueue.send(sender="django_q", task=task) # sign it pack = SignedPackage.dumps(task) if task.get('sync', False): return _sync(pack) # push it enqueue_id = broker.enqueue(pack) logger.info('Enqueued {}'.format(enqueue_id)) logger.debug('Pushed {}'.format(tag)) return task['id']
def function[async_task, parameter[func]]: constant[Queue a task for the cluster.] variable[keywords] assign[=] call[name[kwargs].copy, parameter[]] variable[opt_keys] assign[=] tuple[[<ast.Constant object at 0x7da1b1716ef0>, <ast.Constant object at 0x7da1b1714130>, <ast.Constant object at 0x7da1b17161d0>, <ast.Constant object at 0x7da1b1715d20>, <ast.Constant object at 0x7da1b1714f10>, <ast.Constant object at 0x7da1b1716470>, <ast.Constant object at 0x7da1b1714d00>, <ast.Constant object at 0x7da1b1717520>, <ast.Constant object at 0x7da1b1715ae0>, <ast.Constant object at 0x7da1b17145b0>]] variable[q_options] assign[=] call[name[keywords].pop, parameter[constant[q_options], dictionary[[], []]]] variable[tag] assign[=] call[name[uuid], parameter[]] variable[task] assign[=] dictionary[[<ast.Constant object at 0x7da1b1716200>, <ast.Constant object at 0x7da1b1716fb0>, <ast.Constant object at 0x7da1b1714e80>, <ast.Constant object at 0x7da1b1716cb0>], [<ast.Subscript object at 0x7da1b1716f20>, <ast.BoolOp object at 0x7da1b1716980>, <ast.Name object at 0x7da1b1714760>, <ast.Name object at 0x7da1b1714e50>]] for taget[name[key]] in starred[name[opt_keys]] begin[:] if <ast.BoolOp object at 0x7da1b1715e70> begin[:] call[name[task]][name[key]] assign[=] call[name[q_options]][name[key]] variable[broker] assign[=] call[name[task].pop, parameter[constant[broker], call[name[get_broker], parameter[]]]] if <ast.BoolOp object at 0x7da1b17178e0> begin[:] call[name[task]][constant[cached]] assign[=] name[Conf].CACHED if <ast.BoolOp object at 0x7da1b1987f70> begin[:] call[name[task]][constant[sync]] assign[=] name[Conf].SYNC if <ast.BoolOp object at 0x7da1b19844c0> begin[:] call[name[task]][constant[ack_failure]] assign[=] name[Conf].ACK_FAILURES call[name[task]][constant[kwargs]] assign[=] name[keywords] call[name[task]][constant[started]] assign[=] call[name[timezone].now, parameter[]] call[name[pre_enqueue].send, parameter[]] variable[pack] assign[=] call[name[SignedPackage].dumps, parameter[name[task]]] if call[name[task].get, parameter[constant[sync], constant[False]]] begin[:] return[call[name[_sync], parameter[name[pack]]]] variable[enqueue_id] assign[=] call[name[broker].enqueue, parameter[name[pack]]] call[name[logger].info, parameter[call[constant[Enqueued {}].format, parameter[name[enqueue_id]]]]] call[name[logger].debug, parameter[call[constant[Pushed {}].format, parameter[name[tag]]]]] return[call[name[task]][constant[id]]]
keyword[def] identifier[async_task] ( identifier[func] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[keywords] = identifier[kwargs] . identifier[copy] () identifier[opt_keys] =( literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ) identifier[q_options] = identifier[keywords] . identifier[pop] ( literal[string] ,{}) identifier[tag] = identifier[uuid] () identifier[task] ={ literal[string] : identifier[tag] [ literal[int] ], literal[string] : identifier[keywords] . identifier[pop] ( literal[string] , keyword[None] ) keyword[or] identifier[q_options] . identifier[pop] ( literal[string] , keyword[None] ) keyword[or] identifier[tag] [ literal[int] ], literal[string] : identifier[func] , literal[string] : identifier[args] } keyword[for] identifier[key] keyword[in] identifier[opt_keys] : keyword[if] identifier[q_options] keyword[and] identifier[key] keyword[in] identifier[q_options] : identifier[task] [ identifier[key] ]= identifier[q_options] [ identifier[key] ] keyword[elif] identifier[key] keyword[in] identifier[keywords] : identifier[task] [ identifier[key] ]= identifier[keywords] . identifier[pop] ( identifier[key] ) identifier[broker] = identifier[task] . identifier[pop] ( literal[string] , identifier[get_broker] ()) keyword[if] literal[string] keyword[not] keyword[in] identifier[task] keyword[and] identifier[Conf] . identifier[CACHED] : identifier[task] [ literal[string] ]= identifier[Conf] . identifier[CACHED] keyword[if] literal[string] keyword[not] keyword[in] identifier[task] keyword[and] identifier[Conf] . identifier[SYNC] : identifier[task] [ literal[string] ]= identifier[Conf] . identifier[SYNC] keyword[if] literal[string] keyword[not] keyword[in] identifier[task] keyword[and] identifier[Conf] . identifier[ACK_FAILURES] : identifier[task] [ literal[string] ]= identifier[Conf] . identifier[ACK_FAILURES] identifier[task] [ literal[string] ]= identifier[keywords] identifier[task] [ literal[string] ]= identifier[timezone] . identifier[now] () identifier[pre_enqueue] . identifier[send] ( identifier[sender] = literal[string] , identifier[task] = identifier[task] ) identifier[pack] = identifier[SignedPackage] . identifier[dumps] ( identifier[task] ) keyword[if] identifier[task] . identifier[get] ( literal[string] , keyword[False] ): keyword[return] identifier[_sync] ( identifier[pack] ) identifier[enqueue_id] = identifier[broker] . identifier[enqueue] ( identifier[pack] ) identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[enqueue_id] )) identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[tag] )) keyword[return] identifier[task] [ literal[string] ]
def async_task(func, *args, **kwargs): """Queue a task for the cluster.""" keywords = kwargs.copy() opt_keys = ('hook', 'group', 'save', 'sync', 'cached', 'ack_failure', 'iter_count', 'iter_cached', 'chain', 'broker') q_options = keywords.pop('q_options', {}) # get an id tag = uuid() # build the task package task = {'id': tag[1], 'name': keywords.pop('task_name', None) or q_options.pop('task_name', None) or tag[0], 'func': func, 'args': args} # push optionals for key in opt_keys: if q_options and key in q_options: task[key] = q_options[key] # depends on [control=['if'], data=[]] elif key in keywords: task[key] = keywords.pop(key) # depends on [control=['if'], data=['key', 'keywords']] # depends on [control=['for'], data=['key']] # don't serialize the broker broker = task.pop('broker', get_broker()) # overrides if 'cached' not in task and Conf.CACHED: task['cached'] = Conf.CACHED # depends on [control=['if'], data=[]] if 'sync' not in task and Conf.SYNC: task['sync'] = Conf.SYNC # depends on [control=['if'], data=[]] if 'ack_failure' not in task and Conf.ACK_FAILURES: task['ack_failure'] = Conf.ACK_FAILURES # depends on [control=['if'], data=[]] # finalize task['kwargs'] = keywords task['started'] = timezone.now() # signal it pre_enqueue.send(sender='django_q', task=task) # sign it pack = SignedPackage.dumps(task) if task.get('sync', False): return _sync(pack) # depends on [control=['if'], data=[]] # push it enqueue_id = broker.enqueue(pack) logger.info('Enqueued {}'.format(enqueue_id)) logger.debug('Pushed {}'.format(tag)) return task['id']
def masked_sub_grid_1d_index_to_2d_sub_pixel_index_from_mask(mask, sub_grid_size): """Compute a 1D array that maps every unmasked pixel to its corresponding 2d pixel using its (y,x) pixel indexes. For howtolens if pixel [2,5] corresponds to the second pixel on the 1D array, grid_to_pixel[1] = [2,5]""" total_sub_pixels = total_sub_pixels_from_mask_and_sub_grid_size(mask=mask, sub_grid_size=sub_grid_size) sub_grid_to_sub_pixel = np.zeros(shape=(total_sub_pixels, 2)) sub_pixel_count = 0 for y in range(mask.shape[0]): for x in range(mask.shape[1]): if not mask[y, x]: for y1 in range(sub_grid_size): for x1 in range(sub_grid_size): sub_grid_to_sub_pixel[sub_pixel_count, :] = (y*sub_grid_size)+y1, (x*sub_grid_size)+x1 sub_pixel_count += 1 return sub_grid_to_sub_pixel
def function[masked_sub_grid_1d_index_to_2d_sub_pixel_index_from_mask, parameter[mask, sub_grid_size]]: constant[Compute a 1D array that maps every unmasked pixel to its corresponding 2d pixel using its (y,x) pixel indexes. For howtolens if pixel [2,5] corresponds to the second pixel on the 1D array, grid_to_pixel[1] = [2,5]] variable[total_sub_pixels] assign[=] call[name[total_sub_pixels_from_mask_and_sub_grid_size], parameter[]] variable[sub_grid_to_sub_pixel] assign[=] call[name[np].zeros, parameter[]] variable[sub_pixel_count] assign[=] constant[0] for taget[name[y]] in starred[call[name[range], parameter[call[name[mask].shape][constant[0]]]]] begin[:] for taget[name[x]] in starred[call[name[range], parameter[call[name[mask].shape][constant[1]]]]] begin[:] if <ast.UnaryOp object at 0x7da20c76dba0> begin[:] for taget[name[y1]] in starred[call[name[range], parameter[name[sub_grid_size]]]] begin[:] for taget[name[x1]] in starred[call[name[range], parameter[name[sub_grid_size]]]] begin[:] call[name[sub_grid_to_sub_pixel]][tuple[[<ast.Name object at 0x7da20c76da20>, <ast.Slice object at 0x7da20c76ecb0>]]] assign[=] tuple[[<ast.BinOp object at 0x7da20c76ea70>, <ast.BinOp object at 0x7da20c76e830>]] <ast.AugAssign object at 0x7da20c76caf0> return[name[sub_grid_to_sub_pixel]]
keyword[def] identifier[masked_sub_grid_1d_index_to_2d_sub_pixel_index_from_mask] ( identifier[mask] , identifier[sub_grid_size] ): literal[string] identifier[total_sub_pixels] = identifier[total_sub_pixels_from_mask_and_sub_grid_size] ( identifier[mask] = identifier[mask] , identifier[sub_grid_size] = identifier[sub_grid_size] ) identifier[sub_grid_to_sub_pixel] = identifier[np] . identifier[zeros] ( identifier[shape] =( identifier[total_sub_pixels] , literal[int] )) identifier[sub_pixel_count] = literal[int] keyword[for] identifier[y] keyword[in] identifier[range] ( identifier[mask] . identifier[shape] [ literal[int] ]): keyword[for] identifier[x] keyword[in] identifier[range] ( identifier[mask] . identifier[shape] [ literal[int] ]): keyword[if] keyword[not] identifier[mask] [ identifier[y] , identifier[x] ]: keyword[for] identifier[y1] keyword[in] identifier[range] ( identifier[sub_grid_size] ): keyword[for] identifier[x1] keyword[in] identifier[range] ( identifier[sub_grid_size] ): identifier[sub_grid_to_sub_pixel] [ identifier[sub_pixel_count] ,:]=( identifier[y] * identifier[sub_grid_size] )+ identifier[y1] ,( identifier[x] * identifier[sub_grid_size] )+ identifier[x1] identifier[sub_pixel_count] += literal[int] keyword[return] identifier[sub_grid_to_sub_pixel]
def masked_sub_grid_1d_index_to_2d_sub_pixel_index_from_mask(mask, sub_grid_size): """Compute a 1D array that maps every unmasked pixel to its corresponding 2d pixel using its (y,x) pixel indexes. For howtolens if pixel [2,5] corresponds to the second pixel on the 1D array, grid_to_pixel[1] = [2,5]""" total_sub_pixels = total_sub_pixels_from_mask_and_sub_grid_size(mask=mask, sub_grid_size=sub_grid_size) sub_grid_to_sub_pixel = np.zeros(shape=(total_sub_pixels, 2)) sub_pixel_count = 0 for y in range(mask.shape[0]): for x in range(mask.shape[1]): if not mask[y, x]: for y1 in range(sub_grid_size): for x1 in range(sub_grid_size): sub_grid_to_sub_pixel[sub_pixel_count, :] = (y * sub_grid_size + y1, x * sub_grid_size + x1) sub_pixel_count += 1 # depends on [control=['for'], data=['x1']] # depends on [control=['for'], data=['y1']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['x']] # depends on [control=['for'], data=['y']] return sub_grid_to_sub_pixel
def dequeue(self, block=True): """Dequeue a record and return item.""" return self.queue.get(block, self.queue_get_timeout)
def function[dequeue, parameter[self, block]]: constant[Dequeue a record and return item.] return[call[name[self].queue.get, parameter[name[block], name[self].queue_get_timeout]]]
keyword[def] identifier[dequeue] ( identifier[self] , identifier[block] = keyword[True] ): literal[string] keyword[return] identifier[self] . identifier[queue] . identifier[get] ( identifier[block] , identifier[self] . identifier[queue_get_timeout] )
def dequeue(self, block=True): """Dequeue a record and return item.""" return self.queue.get(block, self.queue_get_timeout)
def _generate_indicators(catalog, validator=None, only_numeric=False): """Genera los indicadores de un catálogo individual. Args: catalog (dict): diccionario de un data.json parseado Returns: dict: diccionario con los indicadores del catálogo provisto """ result = {} # Obtengo summary para los indicadores del estado de los metadatos result.update(_generate_status_indicators(catalog, validator=validator)) # Genero los indicadores relacionados con fechas, y los agrego result.update( _generate_date_indicators(catalog, only_numeric=only_numeric)) # Agrego la cuenta de los formatos de las distribuciones if not only_numeric: if 'dataset' in catalog: format_count = count_fields(get_distributions(catalog), 'format') type_count = count_fields(get_distributions(catalog), 'type') license_count = count_fields(get_datasets(catalog), 'license') else: format_count = type_count = license_count = {} result.update({ 'distribuciones_formatos_cant': format_count, 'distribuciones_tipos_cant': type_count, 'datasets_licencias_cant': license_count, }) # Agrego porcentaje de campos recomendados/optativos usados fields_count = _count_required_and_optional_fields(catalog) recomendados_pct = float(fields_count['recomendado']) / \ fields_count['total_recomendado'] optativos_pct = float(fields_count['optativo']) / \ fields_count['total_optativo'] result.update({ 'campos_recomendados_pct': round(recomendados_pct, 4), 'campos_optativos_pct': round(optativos_pct, 4) }) return fields_count, result
def function[_generate_indicators, parameter[catalog, validator, only_numeric]]: constant[Genera los indicadores de un catálogo individual. Args: catalog (dict): diccionario de un data.json parseado Returns: dict: diccionario con los indicadores del catálogo provisto ] variable[result] assign[=] dictionary[[], []] call[name[result].update, parameter[call[name[_generate_status_indicators], parameter[name[catalog]]]]] call[name[result].update, parameter[call[name[_generate_date_indicators], parameter[name[catalog]]]]] if <ast.UnaryOp object at 0x7da1b02e5780> begin[:] if compare[constant[dataset] in name[catalog]] begin[:] variable[format_count] assign[=] call[name[count_fields], parameter[call[name[get_distributions], parameter[name[catalog]]], constant[format]]] variable[type_count] assign[=] call[name[count_fields], parameter[call[name[get_distributions], parameter[name[catalog]]], constant[type]]] variable[license_count] assign[=] call[name[count_fields], parameter[call[name[get_datasets], parameter[name[catalog]]], constant[license]]] call[name[result].update, parameter[dictionary[[<ast.Constant object at 0x7da1b02e5960>, <ast.Constant object at 0x7da1b02e6e30>, <ast.Constant object at 0x7da1b02e6650>], [<ast.Name object at 0x7da1b02e6620>, <ast.Name object at 0x7da1b02e50f0>, <ast.Name object at 0x7da1b02e4c40>]]]] variable[fields_count] assign[=] call[name[_count_required_and_optional_fields], parameter[name[catalog]]] variable[recomendados_pct] assign[=] binary_operation[call[name[float], parameter[call[name[fields_count]][constant[recomendado]]]] / call[name[fields_count]][constant[total_recomendado]]] variable[optativos_pct] assign[=] binary_operation[call[name[float], parameter[call[name[fields_count]][constant[optativo]]]] / call[name[fields_count]][constant[total_optativo]]] call[name[result].update, parameter[dictionary[[<ast.Constant object at 0x7da18dc9b5e0>, <ast.Constant object at 0x7da18dc98910>], [<ast.Call object at 0x7da18dc98460>, <ast.Call object at 0x7da18dc9a4d0>]]]] return[tuple[[<ast.Name object at 0x7da20c6a8580>, <ast.Name object at 0x7da20c6a99f0>]]]
keyword[def] identifier[_generate_indicators] ( identifier[catalog] , identifier[validator] = keyword[None] , identifier[only_numeric] = keyword[False] ): literal[string] identifier[result] ={} identifier[result] . identifier[update] ( identifier[_generate_status_indicators] ( identifier[catalog] , identifier[validator] = identifier[validator] )) identifier[result] . identifier[update] ( identifier[_generate_date_indicators] ( identifier[catalog] , identifier[only_numeric] = identifier[only_numeric] )) keyword[if] keyword[not] identifier[only_numeric] : keyword[if] literal[string] keyword[in] identifier[catalog] : identifier[format_count] = identifier[count_fields] ( identifier[get_distributions] ( identifier[catalog] ), literal[string] ) identifier[type_count] = identifier[count_fields] ( identifier[get_distributions] ( identifier[catalog] ), literal[string] ) identifier[license_count] = identifier[count_fields] ( identifier[get_datasets] ( identifier[catalog] ), literal[string] ) keyword[else] : identifier[format_count] = identifier[type_count] = identifier[license_count] ={} identifier[result] . identifier[update] ({ literal[string] : identifier[format_count] , literal[string] : identifier[type_count] , literal[string] : identifier[license_count] , }) identifier[fields_count] = identifier[_count_required_and_optional_fields] ( identifier[catalog] ) identifier[recomendados_pct] = identifier[float] ( identifier[fields_count] [ literal[string] ])/ identifier[fields_count] [ literal[string] ] identifier[optativos_pct] = identifier[float] ( identifier[fields_count] [ literal[string] ])/ identifier[fields_count] [ literal[string] ] identifier[result] . identifier[update] ({ literal[string] : identifier[round] ( identifier[recomendados_pct] , literal[int] ), literal[string] : identifier[round] ( identifier[optativos_pct] , literal[int] ) }) keyword[return] identifier[fields_count] , identifier[result]
def _generate_indicators(catalog, validator=None, only_numeric=False): """Genera los indicadores de un catálogo individual. Args: catalog (dict): diccionario de un data.json parseado Returns: dict: diccionario con los indicadores del catálogo provisto """ result = {} # Obtengo summary para los indicadores del estado de los metadatos result.update(_generate_status_indicators(catalog, validator=validator)) # Genero los indicadores relacionados con fechas, y los agrego result.update(_generate_date_indicators(catalog, only_numeric=only_numeric)) # Agrego la cuenta de los formatos de las distribuciones if not only_numeric: if 'dataset' in catalog: format_count = count_fields(get_distributions(catalog), 'format') type_count = count_fields(get_distributions(catalog), 'type') license_count = count_fields(get_datasets(catalog), 'license') # depends on [control=['if'], data=['catalog']] else: format_count = type_count = license_count = {} result.update({'distribuciones_formatos_cant': format_count, 'distribuciones_tipos_cant': type_count, 'datasets_licencias_cant': license_count}) # depends on [control=['if'], data=[]] # Agrego porcentaje de campos recomendados/optativos usados fields_count = _count_required_and_optional_fields(catalog) recomendados_pct = float(fields_count['recomendado']) / fields_count['total_recomendado'] optativos_pct = float(fields_count['optativo']) / fields_count['total_optativo'] result.update({'campos_recomendados_pct': round(recomendados_pct, 4), 'campos_optativos_pct': round(optativos_pct, 4)}) return (fields_count, result)
def remove(in_bam): """ remove bam file and the index if exists """ if utils.file_exists(in_bam): utils.remove_safe(in_bam) if utils.file_exists(in_bam + ".bai"): utils.remove_safe(in_bam + ".bai")
def function[remove, parameter[in_bam]]: constant[ remove bam file and the index if exists ] if call[name[utils].file_exists, parameter[name[in_bam]]] begin[:] call[name[utils].remove_safe, parameter[name[in_bam]]] if call[name[utils].file_exists, parameter[binary_operation[name[in_bam] + constant[.bai]]]] begin[:] call[name[utils].remove_safe, parameter[binary_operation[name[in_bam] + constant[.bai]]]]
keyword[def] identifier[remove] ( identifier[in_bam] ): literal[string] keyword[if] identifier[utils] . identifier[file_exists] ( identifier[in_bam] ): identifier[utils] . identifier[remove_safe] ( identifier[in_bam] ) keyword[if] identifier[utils] . identifier[file_exists] ( identifier[in_bam] + literal[string] ): identifier[utils] . identifier[remove_safe] ( identifier[in_bam] + literal[string] )
def remove(in_bam): """ remove bam file and the index if exists """ if utils.file_exists(in_bam): utils.remove_safe(in_bam) # depends on [control=['if'], data=[]] if utils.file_exists(in_bam + '.bai'): utils.remove_safe(in_bam + '.bai') # depends on [control=['if'], data=[]]
def get_deliveryserver(self, domainid, serverid): """Get a delivery server""" return self.api_call( ENDPOINTS['deliveryservers']['get'], dict(domainid=domainid, serverid=serverid))
def function[get_deliveryserver, parameter[self, domainid, serverid]]: constant[Get a delivery server] return[call[name[self].api_call, parameter[call[call[name[ENDPOINTS]][constant[deliveryservers]]][constant[get]], call[name[dict], parameter[]]]]]
keyword[def] identifier[get_deliveryserver] ( identifier[self] , identifier[domainid] , identifier[serverid] ): literal[string] keyword[return] identifier[self] . identifier[api_call] ( identifier[ENDPOINTS] [ literal[string] ][ literal[string] ], identifier[dict] ( identifier[domainid] = identifier[domainid] , identifier[serverid] = identifier[serverid] ))
def get_deliveryserver(self, domainid, serverid): """Get a delivery server""" return self.api_call(ENDPOINTS['deliveryservers']['get'], dict(domainid=domainid, serverid=serverid))
def get_application_choices(): """ Get the select options for the application selector :return: """ result = [] keys = set() for ct in ContentType.objects.order_by('app_label', 'model'): try: if issubclass(ct.model_class(), TranslatableModel) and ct.app_label not in keys: result.append(('{}'.format(ct.app_label), '{}'.format(ct.app_label.capitalize()))) keys.add(ct.app_label) except TypeError: continue return result
def function[get_application_choices, parameter[]]: constant[ Get the select options for the application selector :return: ] variable[result] assign[=] list[[]] variable[keys] assign[=] call[name[set], parameter[]] for taget[name[ct]] in starred[call[name[ContentType].objects.order_by, parameter[constant[app_label], constant[model]]]] begin[:] <ast.Try object at 0x7da204565870> return[name[result]]
keyword[def] identifier[get_application_choices] (): literal[string] identifier[result] =[] identifier[keys] = identifier[set] () keyword[for] identifier[ct] keyword[in] identifier[ContentType] . identifier[objects] . identifier[order_by] ( literal[string] , literal[string] ): keyword[try] : keyword[if] identifier[issubclass] ( identifier[ct] . identifier[model_class] (), identifier[TranslatableModel] ) keyword[and] identifier[ct] . identifier[app_label] keyword[not] keyword[in] identifier[keys] : identifier[result] . identifier[append] (( literal[string] . identifier[format] ( identifier[ct] . identifier[app_label] ), literal[string] . identifier[format] ( identifier[ct] . identifier[app_label] . identifier[capitalize] ()))) identifier[keys] . identifier[add] ( identifier[ct] . identifier[app_label] ) keyword[except] identifier[TypeError] : keyword[continue] keyword[return] identifier[result]
def get_application_choices(): """ Get the select options for the application selector :return: """ result = [] keys = set() for ct in ContentType.objects.order_by('app_label', 'model'): try: if issubclass(ct.model_class(), TranslatableModel) and ct.app_label not in keys: result.append(('{}'.format(ct.app_label), '{}'.format(ct.app_label.capitalize()))) keys.add(ct.app_label) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except TypeError: continue # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['ct']] return result
def parallel_epd_worker(task): '''This is a parallel worker for the function below. Parameters ---------- task : tuple - task[0] = lcfile - task[1] = timecol - task[2] = magcol - task[3] = errcol - task[4] = externalparams - task[5] = lcformat - task[6] = lcformatdir - task[7] = epdsmooth_sigclip - task[8] = epdsmooth_windowsize - task[9] = epdsmooth_func - task[10] = epdsmooth_extraparams Returns ------- str or None If EPD succeeds for an input LC, returns the filename of the output EPD LC pickle file. If it fails, returns None. ''' (lcfile, timecol, magcol, errcol, externalparams, lcformat, lcformatdir, magsarefluxes, epdsmooth_sigclip, epdsmooth_windowsize, epdsmooth_func, epdsmooth_extraparams) = task try: epd = apply_epd_magseries(lcfile, timecol, magcol, errcol, externalparams, lcformat=lcformat, lcformatdir=lcformatdir, epdsmooth_sigclip=epdsmooth_sigclip, epdsmooth_windowsize=epdsmooth_windowsize, epdsmooth_func=epdsmooth_func, epdsmooth_extraparams=epdsmooth_extraparams) if epd is not None: LOGINFO('%s -> %s EPD OK' % (lcfile, epd)) return epd else: LOGERROR('EPD failed for %s' % lcfile) return None except Exception as e: LOGEXCEPTION('EPD failed for %s' % lcfile) return None
def function[parallel_epd_worker, parameter[task]]: constant[This is a parallel worker for the function below. Parameters ---------- task : tuple - task[0] = lcfile - task[1] = timecol - task[2] = magcol - task[3] = errcol - task[4] = externalparams - task[5] = lcformat - task[6] = lcformatdir - task[7] = epdsmooth_sigclip - task[8] = epdsmooth_windowsize - task[9] = epdsmooth_func - task[10] = epdsmooth_extraparams Returns ------- str or None If EPD succeeds for an input LC, returns the filename of the output EPD LC pickle file. If it fails, returns None. ] <ast.Tuple object at 0x7da1b021dcf0> assign[=] name[task] <ast.Try object at 0x7da1b021f6d0>
keyword[def] identifier[parallel_epd_worker] ( identifier[task] ): literal[string] ( identifier[lcfile] , identifier[timecol] , identifier[magcol] , identifier[errcol] , identifier[externalparams] , identifier[lcformat] , identifier[lcformatdir] , identifier[magsarefluxes] , identifier[epdsmooth_sigclip] , identifier[epdsmooth_windowsize] , identifier[epdsmooth_func] , identifier[epdsmooth_extraparams] )= identifier[task] keyword[try] : identifier[epd] = identifier[apply_epd_magseries] ( identifier[lcfile] , identifier[timecol] , identifier[magcol] , identifier[errcol] , identifier[externalparams] , identifier[lcformat] = identifier[lcformat] , identifier[lcformatdir] = identifier[lcformatdir] , identifier[epdsmooth_sigclip] = identifier[epdsmooth_sigclip] , identifier[epdsmooth_windowsize] = identifier[epdsmooth_windowsize] , identifier[epdsmooth_func] = identifier[epdsmooth_func] , identifier[epdsmooth_extraparams] = identifier[epdsmooth_extraparams] ) keyword[if] identifier[epd] keyword[is] keyword[not] keyword[None] : identifier[LOGINFO] ( literal[string] %( identifier[lcfile] , identifier[epd] )) keyword[return] identifier[epd] keyword[else] : identifier[LOGERROR] ( literal[string] % identifier[lcfile] ) keyword[return] keyword[None] keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[LOGEXCEPTION] ( literal[string] % identifier[lcfile] ) keyword[return] keyword[None]
def parallel_epd_worker(task): """This is a parallel worker for the function below. Parameters ---------- task : tuple - task[0] = lcfile - task[1] = timecol - task[2] = magcol - task[3] = errcol - task[4] = externalparams - task[5] = lcformat - task[6] = lcformatdir - task[7] = epdsmooth_sigclip - task[8] = epdsmooth_windowsize - task[9] = epdsmooth_func - task[10] = epdsmooth_extraparams Returns ------- str or None If EPD succeeds for an input LC, returns the filename of the output EPD LC pickle file. If it fails, returns None. """ (lcfile, timecol, magcol, errcol, externalparams, lcformat, lcformatdir, magsarefluxes, epdsmooth_sigclip, epdsmooth_windowsize, epdsmooth_func, epdsmooth_extraparams) = task try: epd = apply_epd_magseries(lcfile, timecol, magcol, errcol, externalparams, lcformat=lcformat, lcformatdir=lcformatdir, epdsmooth_sigclip=epdsmooth_sigclip, epdsmooth_windowsize=epdsmooth_windowsize, epdsmooth_func=epdsmooth_func, epdsmooth_extraparams=epdsmooth_extraparams) if epd is not None: LOGINFO('%s -> %s EPD OK' % (lcfile, epd)) return epd # depends on [control=['if'], data=['epd']] else: LOGERROR('EPD failed for %s' % lcfile) return None # depends on [control=['try'], data=[]] except Exception as e: LOGEXCEPTION('EPD failed for %s' % lcfile) return None # depends on [control=['except'], data=[]]
def _set_vrf(self, v, load=False): """ Setter method for vrf, mapped from YANG variable /routing_system/interface/ve/vrf (container) If this variable is read-only (config: false) in the source YANG file, then _set_vrf is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_vrf() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=vrf.vrf, is_container='container', presence=False, yang_name="vrf", rest_name="vrf", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Assign VRF to this ethernet interface', u'cli-incomplete-no': None, u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_VRF_BIND_CONFIG'}}, namespace='urn:brocade.com:mgmt:brocade-ip-config', defining_module='brocade-ip-config', yang_type='container', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """vrf must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=vrf.vrf, is_container='container', presence=False, yang_name="vrf", rest_name="vrf", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Assign VRF to this ethernet interface', u'cli-incomplete-no': None, u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_VRF_BIND_CONFIG'}}, namespace='urn:brocade.com:mgmt:brocade-ip-config', defining_module='brocade-ip-config', yang_type='container', is_config=True)""", }) self.__vrf = t if hasattr(self, '_set'): self._set()
def function[_set_vrf, parameter[self, v, load]]: constant[ Setter method for vrf, mapped from YANG variable /routing_system/interface/ve/vrf (container) If this variable is read-only (config: false) in the source YANG file, then _set_vrf is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_vrf() directly. ] if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:] variable[v] assign[=] call[name[v]._utype, parameter[name[v]]] <ast.Try object at 0x7da20e962ef0> name[self].__vrf assign[=] name[t] if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:] call[name[self]._set, parameter[]]
keyword[def] identifier[_set_vrf] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ): literal[string] keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ): identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] ) keyword[try] : identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[vrf] . identifier[vrf] , identifier[is_container] = literal[string] , identifier[presence] = keyword[False] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[raise] identifier[ValueError] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , }) identifier[self] . identifier[__vrf] = identifier[t] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[_set] ()
def _set_vrf(self, v, load=False): """ Setter method for vrf, mapped from YANG variable /routing_system/interface/ve/vrf (container) If this variable is read-only (config: false) in the source YANG file, then _set_vrf is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_vrf() directly. """ if hasattr(v, '_utype'): v = v._utype(v) # depends on [control=['if'], data=[]] try: t = YANGDynClass(v, base=vrf.vrf, is_container='container', presence=False, yang_name='vrf', rest_name='vrf', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Assign VRF to this ethernet interface', u'cli-incomplete-no': None, u'sort-priority': u'RUNNCFG_INTERFACE_LEVEL_VRF_BIND_CONFIG'}}, namespace='urn:brocade.com:mgmt:brocade-ip-config', defining_module='brocade-ip-config', yang_type='container', is_config=True) # depends on [control=['try'], data=[]] except (TypeError, ValueError): raise ValueError({'error-string': 'vrf must be of a type compatible with container', 'defined-type': 'container', 'generated-type': 'YANGDynClass(base=vrf.vrf, is_container=\'container\', presence=False, yang_name="vrf", rest_name="vrf", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'Assign VRF to this ethernet interface\', u\'cli-incomplete-no\': None, u\'sort-priority\': u\'RUNNCFG_INTERFACE_LEVEL_VRF_BIND_CONFIG\'}}, namespace=\'urn:brocade.com:mgmt:brocade-ip-config\', defining_module=\'brocade-ip-config\', yang_type=\'container\', is_config=True)'}) # depends on [control=['except'], data=[]] self.__vrf = t if hasattr(self, '_set'): self._set() # depends on [control=['if'], data=[]]
def format_log_context(msg, connection=None, keyspace=None): """Format log message to add keyspace and connection context""" connection_info = connection or 'DEFAULT_CONNECTION' if keyspace: msg = '[Connection: {0}, Keyspace: {1}] {2}'.format(connection_info, keyspace, msg) else: msg = '[Connection: {0}] {1}'.format(connection_info, msg) return msg
def function[format_log_context, parameter[msg, connection, keyspace]]: constant[Format log message to add keyspace and connection context] variable[connection_info] assign[=] <ast.BoolOp object at 0x7da2044c2b30> if name[keyspace] begin[:] variable[msg] assign[=] call[constant[[Connection: {0}, Keyspace: {1}] {2}].format, parameter[name[connection_info], name[keyspace], name[msg]]] return[name[msg]]
keyword[def] identifier[format_log_context] ( identifier[msg] , identifier[connection] = keyword[None] , identifier[keyspace] = keyword[None] ): literal[string] identifier[connection_info] = identifier[connection] keyword[or] literal[string] keyword[if] identifier[keyspace] : identifier[msg] = literal[string] . identifier[format] ( identifier[connection_info] , identifier[keyspace] , identifier[msg] ) keyword[else] : identifier[msg] = literal[string] . identifier[format] ( identifier[connection_info] , identifier[msg] ) keyword[return] identifier[msg]
def format_log_context(msg, connection=None, keyspace=None): """Format log message to add keyspace and connection context""" connection_info = connection or 'DEFAULT_CONNECTION' if keyspace: msg = '[Connection: {0}, Keyspace: {1}] {2}'.format(connection_info, keyspace, msg) # depends on [control=['if'], data=[]] else: msg = '[Connection: {0}] {1}'.format(connection_info, msg) return msg
def compliance_schedule(self, column=None, value=None, **kwargs): """ A sequence of activities with associated milestones which pertains to a given permit. >>> PCS().compliance_schedule('cmpl_schd_evt', '62099') """ return self._resolve_call('PCS_CMPL_SCHD', column, value, **kwargs)
def function[compliance_schedule, parameter[self, column, value]]: constant[ A sequence of activities with associated milestones which pertains to a given permit. >>> PCS().compliance_schedule('cmpl_schd_evt', '62099') ] return[call[name[self]._resolve_call, parameter[constant[PCS_CMPL_SCHD], name[column], name[value]]]]
keyword[def] identifier[compliance_schedule] ( identifier[self] , identifier[column] = keyword[None] , identifier[value] = keyword[None] ,** identifier[kwargs] ): literal[string] keyword[return] identifier[self] . identifier[_resolve_call] ( literal[string] , identifier[column] , identifier[value] ,** identifier[kwargs] )
def compliance_schedule(self, column=None, value=None, **kwargs): """ A sequence of activities with associated milestones which pertains to a given permit. >>> PCS().compliance_schedule('cmpl_schd_evt', '62099') """ return self._resolve_call('PCS_CMPL_SCHD', column, value, **kwargs)
def getdesc(self, actual=True): """Get the table description. By default it returns the actual description (thus telling the actual array shapes and data managers used). `actual=False` means that the original description as made by :func:`maketabdesc` is returned. """ tabledesc = self._getdesc(actual, True) # Strip out 0 length "HCcoordnames" and "HCidnames" # as these aren't valid. (See tabledefinehypercolumn) hcdefs = tabledesc.get('_define_hypercolumn_', {}) for c, hcdef in hcdefs.iteritems(): if "HCcoordnames" in hcdef and len(hcdef["HCcoordnames"]) == 0: del hcdef["HCcoordnames"] if "HCidnames" in hcdef and len(hcdef["HCidnames"]) == 0: del hcdef["HCidnames"] return tabledesc
def function[getdesc, parameter[self, actual]]: constant[Get the table description. By default it returns the actual description (thus telling the actual array shapes and data managers used). `actual=False` means that the original description as made by :func:`maketabdesc` is returned. ] variable[tabledesc] assign[=] call[name[self]._getdesc, parameter[name[actual], constant[True]]] variable[hcdefs] assign[=] call[name[tabledesc].get, parameter[constant[_define_hypercolumn_], dictionary[[], []]]] for taget[tuple[[<ast.Name object at 0x7da20c795de0>, <ast.Name object at 0x7da20c795330>]]] in starred[call[name[hcdefs].iteritems, parameter[]]] begin[:] if <ast.BoolOp object at 0x7da20c797730> begin[:] <ast.Delete object at 0x7da20c794940> if <ast.BoolOp object at 0x7da20c796aa0> begin[:] <ast.Delete object at 0x7da207f02590> return[name[tabledesc]]
keyword[def] identifier[getdesc] ( identifier[self] , identifier[actual] = keyword[True] ): literal[string] identifier[tabledesc] = identifier[self] . identifier[_getdesc] ( identifier[actual] , keyword[True] ) identifier[hcdefs] = identifier[tabledesc] . identifier[get] ( literal[string] ,{}) keyword[for] identifier[c] , identifier[hcdef] keyword[in] identifier[hcdefs] . identifier[iteritems] (): keyword[if] literal[string] keyword[in] identifier[hcdef] keyword[and] identifier[len] ( identifier[hcdef] [ literal[string] ])== literal[int] : keyword[del] identifier[hcdef] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[hcdef] keyword[and] identifier[len] ( identifier[hcdef] [ literal[string] ])== literal[int] : keyword[del] identifier[hcdef] [ literal[string] ] keyword[return] identifier[tabledesc]
def getdesc(self, actual=True): """Get the table description. By default it returns the actual description (thus telling the actual array shapes and data managers used). `actual=False` means that the original description as made by :func:`maketabdesc` is returned. """ tabledesc = self._getdesc(actual, True) # Strip out 0 length "HCcoordnames" and "HCidnames" # as these aren't valid. (See tabledefinehypercolumn) hcdefs = tabledesc.get('_define_hypercolumn_', {}) for (c, hcdef) in hcdefs.iteritems(): if 'HCcoordnames' in hcdef and len(hcdef['HCcoordnames']) == 0: del hcdef['HCcoordnames'] # depends on [control=['if'], data=[]] if 'HCidnames' in hcdef and len(hcdef['HCidnames']) == 0: del hcdef['HCidnames'] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return tabledesc
def _exec(self, detach=True): """ daemonize and exec main() """ kwargs = { 'pidfile': self.pidfile, 'working_directory': self.home_dir, } # FIXME - doesn't work if not detach: kwargs.update({ 'detach_process': False, 'files_preserve': [0,1,2], 'stdout': sys.stdout, 'stderr': sys.stderr, }) ctx = daemon.DaemonContext(**kwargs) with ctx: self._main()
def function[_exec, parameter[self, detach]]: constant[ daemonize and exec main() ] variable[kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da18fe92b90>, <ast.Constant object at 0x7da18fe938e0>], [<ast.Attribute object at 0x7da18fe924a0>, <ast.Attribute object at 0x7da18fe91750>]] if <ast.UnaryOp object at 0x7da18fe912d0> begin[:] call[name[kwargs].update, parameter[dictionary[[<ast.Constant object at 0x7da18fe93850>, <ast.Constant object at 0x7da18fe92530>, <ast.Constant object at 0x7da18fe90760>, <ast.Constant object at 0x7da18fe923b0>], [<ast.Constant object at 0x7da18fe90d60>, <ast.List object at 0x7da18fe90040>, <ast.Attribute object at 0x7da18fe92380>, <ast.Attribute object at 0x7da18fe90730>]]]] variable[ctx] assign[=] call[name[daemon].DaemonContext, parameter[]] with name[ctx] begin[:] call[name[self]._main, parameter[]]
keyword[def] identifier[_exec] ( identifier[self] , identifier[detach] = keyword[True] ): literal[string] identifier[kwargs] ={ literal[string] : identifier[self] . identifier[pidfile] , literal[string] : identifier[self] . identifier[home_dir] , } keyword[if] keyword[not] identifier[detach] : identifier[kwargs] . identifier[update] ({ literal[string] : keyword[False] , literal[string] :[ literal[int] , literal[int] , literal[int] ], literal[string] : identifier[sys] . identifier[stdout] , literal[string] : identifier[sys] . identifier[stderr] , }) identifier[ctx] = identifier[daemon] . identifier[DaemonContext] (** identifier[kwargs] ) keyword[with] identifier[ctx] : identifier[self] . identifier[_main] ()
def _exec(self, detach=True): """ daemonize and exec main() """ kwargs = {'pidfile': self.pidfile, 'working_directory': self.home_dir} # FIXME - doesn't work if not detach: kwargs.update({'detach_process': False, 'files_preserve': [0, 1, 2], 'stdout': sys.stdout, 'stderr': sys.stderr}) # depends on [control=['if'], data=[]] ctx = daemon.DaemonContext(**kwargs) with ctx: self._main() # depends on [control=['with'], data=[]]
def intersection(self, *args): """ Produce an array that contains every item shared between all the passed-in arrays. """ if type(self.obj[0]) is int: a = self.obj else: a = tuple(self.obj[0]) setobj = set(a) for i, v in enumerate(args): setobj = setobj & set(args[i]) return self._wrap(list(setobj))
def function[intersection, parameter[self]]: constant[ Produce an array that contains every item shared between all the passed-in arrays. ] if compare[call[name[type], parameter[call[name[self].obj][constant[0]]]] is name[int]] begin[:] variable[a] assign[=] name[self].obj variable[setobj] assign[=] call[name[set], parameter[name[a]]] for taget[tuple[[<ast.Name object at 0x7da20c7c85e0>, <ast.Name object at 0x7da20c7cbb20>]]] in starred[call[name[enumerate], parameter[name[args]]]] begin[:] variable[setobj] assign[=] binary_operation[name[setobj] <ast.BitAnd object at 0x7da2590d6b60> call[name[set], parameter[call[name[args]][name[i]]]]] return[call[name[self]._wrap, parameter[call[name[list], parameter[name[setobj]]]]]]
keyword[def] identifier[intersection] ( identifier[self] ,* identifier[args] ): literal[string] keyword[if] identifier[type] ( identifier[self] . identifier[obj] [ literal[int] ]) keyword[is] identifier[int] : identifier[a] = identifier[self] . identifier[obj] keyword[else] : identifier[a] = identifier[tuple] ( identifier[self] . identifier[obj] [ literal[int] ]) identifier[setobj] = identifier[set] ( identifier[a] ) keyword[for] identifier[i] , identifier[v] keyword[in] identifier[enumerate] ( identifier[args] ): identifier[setobj] = identifier[setobj] & identifier[set] ( identifier[args] [ identifier[i] ]) keyword[return] identifier[self] . identifier[_wrap] ( identifier[list] ( identifier[setobj] ))
def intersection(self, *args): """ Produce an array that contains every item shared between all the passed-in arrays. """ if type(self.obj[0]) is int: a = self.obj # depends on [control=['if'], data=[]] else: a = tuple(self.obj[0]) setobj = set(a) for (i, v) in enumerate(args): setobj = setobj & set(args[i]) # depends on [control=['for'], data=[]] return self._wrap(list(setobj))
def unpack_nfirst(seq, nfirst): """Unpack the nfrist items from the list and return the rest. >>> a, b, c, rest = unpack_nfirst((1, 2, 3, 4, 5), 3) >>> a, b, c (1, 2, 3) >>> rest (4, 5) """ iterator = iter(seq) for _ in range(nfirst): yield next(iterator, None) yield tuple(iterator)
def function[unpack_nfirst, parameter[seq, nfirst]]: constant[Unpack the nfrist items from the list and return the rest. >>> a, b, c, rest = unpack_nfirst((1, 2, 3, 4, 5), 3) >>> a, b, c (1, 2, 3) >>> rest (4, 5) ] variable[iterator] assign[=] call[name[iter], parameter[name[seq]]] for taget[name[_]] in starred[call[name[range], parameter[name[nfirst]]]] begin[:] <ast.Yield object at 0x7da18f811570> <ast.Yield object at 0x7da18f812230>
keyword[def] identifier[unpack_nfirst] ( identifier[seq] , identifier[nfirst] ): literal[string] identifier[iterator] = identifier[iter] ( identifier[seq] ) keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[nfirst] ): keyword[yield] identifier[next] ( identifier[iterator] , keyword[None] ) keyword[yield] identifier[tuple] ( identifier[iterator] )
def unpack_nfirst(seq, nfirst): """Unpack the nfrist items from the list and return the rest. >>> a, b, c, rest = unpack_nfirst((1, 2, 3, 4, 5), 3) >>> a, b, c (1, 2, 3) >>> rest (4, 5) """ iterator = iter(seq) for _ in range(nfirst): yield next(iterator, None) # depends on [control=['for'], data=[]] yield tuple(iterator)
def delete_collection_storage_class(self, **kwargs): # noqa: E501 """delete_collection_storage_class # noqa: E501 delete collection of StorageClass # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_collection_storage_class(async_req=True) >>> result = thread.get() :param async_req bool :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str pretty: If 'true', then the output is pretty printed. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the \"next key\". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1Status If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_collection_storage_class_with_http_info(**kwargs) # noqa: E501 else: (data) = self.delete_collection_storage_class_with_http_info(**kwargs) # noqa: E501 return data
def function[delete_collection_storage_class, parameter[self]]: constant[delete_collection_storage_class # noqa: E501 delete collection of StorageClass # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_collection_storage_class(async_req=True) >>> result = thread.get() :param async_req bool :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str pretty: If 'true', then the output is pretty printed. :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the "next key". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications. :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything. :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything. :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned. :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it's 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv. :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity. :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion. :return: V1Status If the method is called asynchronously, returns the request thread. ] call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True] if call[name[kwargs].get, parameter[constant[async_req]]] begin[:] return[call[name[self].delete_collection_storage_class_with_http_info, parameter[]]]
keyword[def] identifier[delete_collection_storage_class] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= keyword[True] keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ): keyword[return] identifier[self] . identifier[delete_collection_storage_class_with_http_info] (** identifier[kwargs] ) keyword[else] : ( identifier[data] )= identifier[self] . identifier[delete_collection_storage_class_with_http_info] (** identifier[kwargs] ) keyword[return] identifier[data]
def delete_collection_storage_class(self, **kwargs): # noqa: E501 'delete_collection_storage_class # noqa: E501\n\n delete collection of StorageClass # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.delete_collection_storage_class(async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param bool include_uninitialized: If true, partially initialized resources are included in the response.\n :param str pretty: If \'true\', then the output is pretty printed.\n :param str _continue: The continue option should be set when retrieving more results from the server. Since this value is server defined, clients may only use the continue value from a previous query result with identical query parameters (except for the value of continue) and the server may reject a continue value it does not recognize. If the specified continue value is no longer valid whether due to expiration (generally five to fifteen minutes) or a configuration change on the server, the server will respond with a 410 ResourceExpired error together with a continue token. If the client needs a consistent list, it must restart their list without the continue field. Otherwise, the client may send another list request with the token received with the 410 error, the server will respond with a list starting from the next key, but from the latest snapshot, which is inconsistent from the previous list results - objects that are created, modified, or deleted after the first list request will be included in the response, as long as their keys are after the "next key". This field is not supported when watch is true. Clients may start a watch from the last resourceVersion value returned by the server and not miss any modifications.\n :param str field_selector: A selector to restrict the list of returned objects by their fields. Defaults to everything.\n :param str label_selector: A selector to restrict the list of returned objects by their labels. Defaults to everything.\n :param int limit: limit is a maximum number of responses to return for a list call. If more items exist, the server will set the `continue` field on the list metadata to a value that can be used with the same initial query to retrieve the next set of results. Setting a limit may return fewer than the requested amount of items (up to zero items) in the event all requested objects are filtered out and clients should only use the presence of the continue field to determine whether more results are available. Servers may choose not to support the limit argument and will return all of the available results. If limit is specified and the continue field is empty, clients may assume that no more results are available. This field is not supported if watch is true. The server guarantees that the objects returned when using continue will be identical to issuing a single list call without a limit - that is, no objects created, modified, or deleted after the first request is issued will be included in any subsequent continued requests. This is sometimes referred to as a consistent snapshot, and ensures that a client that is using limit to receive smaller chunks of a very large result can ensure they see all possible objects. If objects are updated during a chunked list the version of the object that was present at the time the first list result was calculated is returned.\n :param str resource_version: When specified with a watch call, shows changes that occur after that particular version of a resource. Defaults to changes from the beginning of history. When specified for list: - if unset, then the result is returned from remote storage based on quorum-read flag; - if it\'s 0, then we simply return what we currently have in cache, no guarantee; - if set to non zero, then the result is at least as fresh as given rv.\n :param int timeout_seconds: Timeout for the list/watch call. This limits the duration of the call, regardless of any activity or inactivity.\n :param bool watch: Watch for changes to the described resources and return them as a stream of add, update, and remove notifications. Specify resourceVersion.\n :return: V1Status\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.delete_collection_storage_class_with_http_info(**kwargs) # noqa: E501 # depends on [control=['if'], data=[]] else: data = self.delete_collection_storage_class_with_http_info(**kwargs) # noqa: E501 return data
def mass_erase(): """Performs a MASS erase (i.e. erases the entire device.""" # Send DNLOAD with first byte=0x41 __dev.ctrl_transfer(0x21, __DFU_DNLOAD, 0, __DFU_INTERFACE, "\x41", __TIMEOUT) # Execute last command if get_status() != __DFU_STATE_DFU_DOWNLOAD_BUSY: raise Exception("DFU: erase failed") # Check command state if get_status() != __DFU_STATE_DFU_DOWNLOAD_IDLE: raise Exception("DFU: erase failed")
def function[mass_erase, parameter[]]: constant[Performs a MASS erase (i.e. erases the entire device.] call[name[__dev].ctrl_transfer, parameter[constant[33], name[__DFU_DNLOAD], constant[0], name[__DFU_INTERFACE], constant[A], name[__TIMEOUT]]] if compare[call[name[get_status], parameter[]] not_equal[!=] name[__DFU_STATE_DFU_DOWNLOAD_BUSY]] begin[:] <ast.Raise object at 0x7da20e955450> if compare[call[name[get_status], parameter[]] not_equal[!=] name[__DFU_STATE_DFU_DOWNLOAD_IDLE]] begin[:] <ast.Raise object at 0x7da20e954d00>
keyword[def] identifier[mass_erase] (): literal[string] identifier[__dev] . identifier[ctrl_transfer] ( literal[int] , identifier[__DFU_DNLOAD] , literal[int] , identifier[__DFU_INTERFACE] , literal[string] , identifier[__TIMEOUT] ) keyword[if] identifier[get_status] ()!= identifier[__DFU_STATE_DFU_DOWNLOAD_BUSY] : keyword[raise] identifier[Exception] ( literal[string] ) keyword[if] identifier[get_status] ()!= identifier[__DFU_STATE_DFU_DOWNLOAD_IDLE] : keyword[raise] identifier[Exception] ( literal[string] )
def mass_erase(): """Performs a MASS erase (i.e. erases the entire device.""" # Send DNLOAD with first byte=0x41 __dev.ctrl_transfer(33, __DFU_DNLOAD, 0, __DFU_INTERFACE, 'A', __TIMEOUT) # Execute last command if get_status() != __DFU_STATE_DFU_DOWNLOAD_BUSY: raise Exception('DFU: erase failed') # depends on [control=['if'], data=[]] # Check command state if get_status() != __DFU_STATE_DFU_DOWNLOAD_IDLE: raise Exception('DFU: erase failed') # depends on [control=['if'], data=[]]
def rewrite_filters_in_optional_blocks(ir_blocks): """In optional contexts, add a check for null that allows non-existent optional data through. Optional traversals in Gremlin represent missing optional data by setting the current vertex to null until the exit from the optional scope. Therefore, filtering and type coercions (which should have been lowered into filters by this point) must check for null before applying their filtering predicates. Since missing optional data isn't filtered, the new filtering predicate should be "(it == null) || existing_predicate". Args: ir_blocks: list of IR blocks to lower into Gremlin-compatible form Returns: new list of IR blocks with this lowering step applied """ new_ir_blocks = [] optional_context_depth = 0 for block in ir_blocks: new_block = block if isinstance(block, CoerceType): raise AssertionError(u'Found a CoerceType block after all such blocks should have been ' u'lowered to Filter blocks: {}'.format(ir_blocks)) elif isinstance(block, Traverse) and block.optional: optional_context_depth += 1 elif isinstance(block, Backtrack) and block.optional: optional_context_depth -= 1 if optional_context_depth < 0: raise AssertionError(u'Reached negative optional context depth for blocks: ' u'{}'.format(ir_blocks)) elif isinstance(block, Filter) and optional_context_depth > 0: null_check = BinaryComposition(u'=', LocalField('@this'), NullLiteral) new_block = Filter(BinaryComposition(u'||', null_check, block.predicate)) else: pass new_ir_blocks.append(new_block) return new_ir_blocks
def function[rewrite_filters_in_optional_blocks, parameter[ir_blocks]]: constant[In optional contexts, add a check for null that allows non-existent optional data through. Optional traversals in Gremlin represent missing optional data by setting the current vertex to null until the exit from the optional scope. Therefore, filtering and type coercions (which should have been lowered into filters by this point) must check for null before applying their filtering predicates. Since missing optional data isn't filtered, the new filtering predicate should be "(it == null) || existing_predicate". Args: ir_blocks: list of IR blocks to lower into Gremlin-compatible form Returns: new list of IR blocks with this lowering step applied ] variable[new_ir_blocks] assign[=] list[[]] variable[optional_context_depth] assign[=] constant[0] for taget[name[block]] in starred[name[ir_blocks]] begin[:] variable[new_block] assign[=] name[block] if call[name[isinstance], parameter[name[block], name[CoerceType]]] begin[:] <ast.Raise object at 0x7da18eb54fd0> call[name[new_ir_blocks].append, parameter[name[new_block]]] return[name[new_ir_blocks]]
keyword[def] identifier[rewrite_filters_in_optional_blocks] ( identifier[ir_blocks] ): literal[string] identifier[new_ir_blocks] =[] identifier[optional_context_depth] = literal[int] keyword[for] identifier[block] keyword[in] identifier[ir_blocks] : identifier[new_block] = identifier[block] keyword[if] identifier[isinstance] ( identifier[block] , identifier[CoerceType] ): keyword[raise] identifier[AssertionError] ( literal[string] literal[string] . identifier[format] ( identifier[ir_blocks] )) keyword[elif] identifier[isinstance] ( identifier[block] , identifier[Traverse] ) keyword[and] identifier[block] . identifier[optional] : identifier[optional_context_depth] += literal[int] keyword[elif] identifier[isinstance] ( identifier[block] , identifier[Backtrack] ) keyword[and] identifier[block] . identifier[optional] : identifier[optional_context_depth] -= literal[int] keyword[if] identifier[optional_context_depth] < literal[int] : keyword[raise] identifier[AssertionError] ( literal[string] literal[string] . identifier[format] ( identifier[ir_blocks] )) keyword[elif] identifier[isinstance] ( identifier[block] , identifier[Filter] ) keyword[and] identifier[optional_context_depth] > literal[int] : identifier[null_check] = identifier[BinaryComposition] ( literal[string] , identifier[LocalField] ( literal[string] ), identifier[NullLiteral] ) identifier[new_block] = identifier[Filter] ( identifier[BinaryComposition] ( literal[string] , identifier[null_check] , identifier[block] . identifier[predicate] )) keyword[else] : keyword[pass] identifier[new_ir_blocks] . identifier[append] ( identifier[new_block] ) keyword[return] identifier[new_ir_blocks]
def rewrite_filters_in_optional_blocks(ir_blocks): """In optional contexts, add a check for null that allows non-existent optional data through. Optional traversals in Gremlin represent missing optional data by setting the current vertex to null until the exit from the optional scope. Therefore, filtering and type coercions (which should have been lowered into filters by this point) must check for null before applying their filtering predicates. Since missing optional data isn't filtered, the new filtering predicate should be "(it == null) || existing_predicate". Args: ir_blocks: list of IR blocks to lower into Gremlin-compatible form Returns: new list of IR blocks with this lowering step applied """ new_ir_blocks = [] optional_context_depth = 0 for block in ir_blocks: new_block = block if isinstance(block, CoerceType): raise AssertionError(u'Found a CoerceType block after all such blocks should have been lowered to Filter blocks: {}'.format(ir_blocks)) # depends on [control=['if'], data=[]] elif isinstance(block, Traverse) and block.optional: optional_context_depth += 1 # depends on [control=['if'], data=[]] elif isinstance(block, Backtrack) and block.optional: optional_context_depth -= 1 if optional_context_depth < 0: raise AssertionError(u'Reached negative optional context depth for blocks: {}'.format(ir_blocks)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif isinstance(block, Filter) and optional_context_depth > 0: null_check = BinaryComposition(u'=', LocalField('@this'), NullLiteral) new_block = Filter(BinaryComposition(u'||', null_check, block.predicate)) # depends on [control=['if'], data=[]] else: pass new_ir_blocks.append(new_block) # depends on [control=['for'], data=['block']] return new_ir_blocks
def ansi_density(color, density_standard): """ Calculates density for the given SpectralColor using the spectral weighting function provided. For example, ANSI_STATUS_T_RED. These may be found in :py:mod:`colormath.density_standards`. :param SpectralColor color: The SpectralColor object to calculate density for. :param numpy.ndarray density_standard: NumPy array of filter of choice from :py:mod:`colormath.density_standards`. :rtype: float :returns: The density value for the given color and density standard. """ # Load the spec_XXXnm attributes into a Numpy array. sample = color.get_numpy_array() # Matrix multiplication intermediate = sample * density_standard # Sum the products. numerator = intermediate.sum() # This is the denominator in the density equation. sum_of_standard_wavelengths = density_standard.sum() # This is the top level of the density formula. return -1.0 * log10(numerator / sum_of_standard_wavelengths)
def function[ansi_density, parameter[color, density_standard]]: constant[ Calculates density for the given SpectralColor using the spectral weighting function provided. For example, ANSI_STATUS_T_RED. These may be found in :py:mod:`colormath.density_standards`. :param SpectralColor color: The SpectralColor object to calculate density for. :param numpy.ndarray density_standard: NumPy array of filter of choice from :py:mod:`colormath.density_standards`. :rtype: float :returns: The density value for the given color and density standard. ] variable[sample] assign[=] call[name[color].get_numpy_array, parameter[]] variable[intermediate] assign[=] binary_operation[name[sample] * name[density_standard]] variable[numerator] assign[=] call[name[intermediate].sum, parameter[]] variable[sum_of_standard_wavelengths] assign[=] call[name[density_standard].sum, parameter[]] return[binary_operation[<ast.UnaryOp object at 0x7da18f58ccd0> * call[name[log10], parameter[binary_operation[name[numerator] / name[sum_of_standard_wavelengths]]]]]]
keyword[def] identifier[ansi_density] ( identifier[color] , identifier[density_standard] ): literal[string] identifier[sample] = identifier[color] . identifier[get_numpy_array] () identifier[intermediate] = identifier[sample] * identifier[density_standard] identifier[numerator] = identifier[intermediate] . identifier[sum] () identifier[sum_of_standard_wavelengths] = identifier[density_standard] . identifier[sum] () keyword[return] - literal[int] * identifier[log10] ( identifier[numerator] / identifier[sum_of_standard_wavelengths] )
def ansi_density(color, density_standard): """ Calculates density for the given SpectralColor using the spectral weighting function provided. For example, ANSI_STATUS_T_RED. These may be found in :py:mod:`colormath.density_standards`. :param SpectralColor color: The SpectralColor object to calculate density for. :param numpy.ndarray density_standard: NumPy array of filter of choice from :py:mod:`colormath.density_standards`. :rtype: float :returns: The density value for the given color and density standard. """ # Load the spec_XXXnm attributes into a Numpy array. sample = color.get_numpy_array() # Matrix multiplication intermediate = sample * density_standard # Sum the products. numerator = intermediate.sum() # This is the denominator in the density equation. sum_of_standard_wavelengths = density_standard.sum() # This is the top level of the density formula. return -1.0 * log10(numerator / sum_of_standard_wavelengths)
def __fetch_issues(self, from_date, to_date): """Fetch the issues""" issues_groups = self.client.issues(from_date=from_date) for raw_issues in issues_groups: issues = json.loads(raw_issues) for issue in issues: if str_to_datetime(issue['updated_at']) > to_date: return self.__init_extra_issue_fields(issue) for field in TARGET_ISSUE_FIELDS: if not issue[field]: continue if field == 'user': issue[field + '_data'] = self.__get_user(issue[field]['login']) elif field == 'assignee': issue[field + '_data'] = self.__get_issue_assignee(issue[field]) elif field == 'assignees': issue[field + '_data'] = self.__get_issue_assignees(issue[field]) elif field == 'comments': issue[field + '_data'] = self.__get_issue_comments(issue['number']) elif field == 'reactions': issue[field + '_data'] = \ self.__get_issue_reactions(issue['number'], issue['reactions']['total_count']) yield issue
def function[__fetch_issues, parameter[self, from_date, to_date]]: constant[Fetch the issues] variable[issues_groups] assign[=] call[name[self].client.issues, parameter[]] for taget[name[raw_issues]] in starred[name[issues_groups]] begin[:] variable[issues] assign[=] call[name[json].loads, parameter[name[raw_issues]]] for taget[name[issue]] in starred[name[issues]] begin[:] if compare[call[name[str_to_datetime], parameter[call[name[issue]][constant[updated_at]]]] greater[>] name[to_date]] begin[:] return[None] call[name[self].__init_extra_issue_fields, parameter[name[issue]]] for taget[name[field]] in starred[name[TARGET_ISSUE_FIELDS]] begin[:] if <ast.UnaryOp object at 0x7da1b020fe80> begin[:] continue if compare[name[field] equal[==] constant[user]] begin[:] call[name[issue]][binary_operation[name[field] + constant[_data]]] assign[=] call[name[self].__get_user, parameter[call[call[name[issue]][name[field]]][constant[login]]]] <ast.Yield object at 0x7da1b03825f0>
keyword[def] identifier[__fetch_issues] ( identifier[self] , identifier[from_date] , identifier[to_date] ): literal[string] identifier[issues_groups] = identifier[self] . identifier[client] . identifier[issues] ( identifier[from_date] = identifier[from_date] ) keyword[for] identifier[raw_issues] keyword[in] identifier[issues_groups] : identifier[issues] = identifier[json] . identifier[loads] ( identifier[raw_issues] ) keyword[for] identifier[issue] keyword[in] identifier[issues] : keyword[if] identifier[str_to_datetime] ( identifier[issue] [ literal[string] ])> identifier[to_date] : keyword[return] identifier[self] . identifier[__init_extra_issue_fields] ( identifier[issue] ) keyword[for] identifier[field] keyword[in] identifier[TARGET_ISSUE_FIELDS] : keyword[if] keyword[not] identifier[issue] [ identifier[field] ]: keyword[continue] keyword[if] identifier[field] == literal[string] : identifier[issue] [ identifier[field] + literal[string] ]= identifier[self] . identifier[__get_user] ( identifier[issue] [ identifier[field] ][ literal[string] ]) keyword[elif] identifier[field] == literal[string] : identifier[issue] [ identifier[field] + literal[string] ]= identifier[self] . identifier[__get_issue_assignee] ( identifier[issue] [ identifier[field] ]) keyword[elif] identifier[field] == literal[string] : identifier[issue] [ identifier[field] + literal[string] ]= identifier[self] . identifier[__get_issue_assignees] ( identifier[issue] [ identifier[field] ]) keyword[elif] identifier[field] == literal[string] : identifier[issue] [ identifier[field] + literal[string] ]= identifier[self] . identifier[__get_issue_comments] ( identifier[issue] [ literal[string] ]) keyword[elif] identifier[field] == literal[string] : identifier[issue] [ identifier[field] + literal[string] ]= identifier[self] . identifier[__get_issue_reactions] ( identifier[issue] [ literal[string] ], identifier[issue] [ literal[string] ][ literal[string] ]) keyword[yield] identifier[issue]
def __fetch_issues(self, from_date, to_date): """Fetch the issues""" issues_groups = self.client.issues(from_date=from_date) for raw_issues in issues_groups: issues = json.loads(raw_issues) for issue in issues: if str_to_datetime(issue['updated_at']) > to_date: return # depends on [control=['if'], data=[]] self.__init_extra_issue_fields(issue) for field in TARGET_ISSUE_FIELDS: if not issue[field]: continue # depends on [control=['if'], data=[]] if field == 'user': issue[field + '_data'] = self.__get_user(issue[field]['login']) # depends on [control=['if'], data=['field']] elif field == 'assignee': issue[field + '_data'] = self.__get_issue_assignee(issue[field]) # depends on [control=['if'], data=['field']] elif field == 'assignees': issue[field + '_data'] = self.__get_issue_assignees(issue[field]) # depends on [control=['if'], data=['field']] elif field == 'comments': issue[field + '_data'] = self.__get_issue_comments(issue['number']) # depends on [control=['if'], data=['field']] elif field == 'reactions': issue[field + '_data'] = self.__get_issue_reactions(issue['number'], issue['reactions']['total_count']) # depends on [control=['if'], data=['field']] # depends on [control=['for'], data=['field']] yield issue # depends on [control=['for'], data=['issue']] # depends on [control=['for'], data=['raw_issues']]
def write_dftbp(filename, atoms): """Writes DFTB+ readable, gen-formatted structure files Args: filename: name of the gen-file to be written atoms: object containing information about structure """ scale_pos = dftbpToBohr lines = "" # 1. line, use absolute positions natoms = atoms.get_number_of_atoms() lines += str(natoms) lines += ' S \n' # 2. line expaned_symbols = atoms.get_chemical_symbols() symbols = get_reduced_symbols(expaned_symbols) lines += ' '.join(symbols) + '\n' atom_numbers = [] for ss in expaned_symbols: atom_numbers.append(symbols.index(ss) + 1) positions = atoms.get_positions()/scale_pos for ii in range(natoms): pos = positions[ii] pos_str = "{:3d} {:3d} {:20.15f} {:20.15f} {:20.15f}\n".format( ii + 1, atom_numbers[ii], pos[0], pos[1], pos[2]) lines += pos_str # origin arbitrary lines +='0.0 0.0 0.0\n' cell = atoms.get_cell()/scale_pos for ii in range(3): cell_str = "{:20.15f} {:20.15f} {:20.15f}\n".format( cell[ii][0], cell[ii][1], cell[ii][2]) lines += cell_str outfile = open(filename, 'w') outfile.write(lines)
def function[write_dftbp, parameter[filename, atoms]]: constant[Writes DFTB+ readable, gen-formatted structure files Args: filename: name of the gen-file to be written atoms: object containing information about structure ] variable[scale_pos] assign[=] name[dftbpToBohr] variable[lines] assign[=] constant[] variable[natoms] assign[=] call[name[atoms].get_number_of_atoms, parameter[]] <ast.AugAssign object at 0x7da2045647f0> <ast.AugAssign object at 0x7da204566050> variable[expaned_symbols] assign[=] call[name[atoms].get_chemical_symbols, parameter[]] variable[symbols] assign[=] call[name[get_reduced_symbols], parameter[name[expaned_symbols]]] <ast.AugAssign object at 0x7da2045668c0> variable[atom_numbers] assign[=] list[[]] for taget[name[ss]] in starred[name[expaned_symbols]] begin[:] call[name[atom_numbers].append, parameter[binary_operation[call[name[symbols].index, parameter[name[ss]]] + constant[1]]]] variable[positions] assign[=] binary_operation[call[name[atoms].get_positions, parameter[]] / name[scale_pos]] for taget[name[ii]] in starred[call[name[range], parameter[name[natoms]]]] begin[:] variable[pos] assign[=] call[name[positions]][name[ii]] variable[pos_str] assign[=] call[constant[{:3d} {:3d} {:20.15f} {:20.15f} {:20.15f} ].format, parameter[binary_operation[name[ii] + constant[1]], call[name[atom_numbers]][name[ii]], call[name[pos]][constant[0]], call[name[pos]][constant[1]], call[name[pos]][constant[2]]]] <ast.AugAssign object at 0x7da204566b00> <ast.AugAssign object at 0x7da204567e50> variable[cell] assign[=] binary_operation[call[name[atoms].get_cell, parameter[]] / name[scale_pos]] for taget[name[ii]] in starred[call[name[range], parameter[constant[3]]]] begin[:] variable[cell_str] assign[=] call[constant[{:20.15f} {:20.15f} {:20.15f} ].format, parameter[call[call[name[cell]][name[ii]]][constant[0]], call[call[name[cell]][name[ii]]][constant[1]], call[call[name[cell]][name[ii]]][constant[2]]]] <ast.AugAssign object at 0x7da2045666b0> variable[outfile] assign[=] call[name[open], parameter[name[filename], constant[w]]] call[name[outfile].write, parameter[name[lines]]]
keyword[def] identifier[write_dftbp] ( identifier[filename] , identifier[atoms] ): literal[string] identifier[scale_pos] = identifier[dftbpToBohr] identifier[lines] = literal[string] identifier[natoms] = identifier[atoms] . identifier[get_number_of_atoms] () identifier[lines] += identifier[str] ( identifier[natoms] ) identifier[lines] += literal[string] identifier[expaned_symbols] = identifier[atoms] . identifier[get_chemical_symbols] () identifier[symbols] = identifier[get_reduced_symbols] ( identifier[expaned_symbols] ) identifier[lines] += literal[string] . identifier[join] ( identifier[symbols] )+ literal[string] identifier[atom_numbers] =[] keyword[for] identifier[ss] keyword[in] identifier[expaned_symbols] : identifier[atom_numbers] . identifier[append] ( identifier[symbols] . identifier[index] ( identifier[ss] )+ literal[int] ) identifier[positions] = identifier[atoms] . identifier[get_positions] ()/ identifier[scale_pos] keyword[for] identifier[ii] keyword[in] identifier[range] ( identifier[natoms] ): identifier[pos] = identifier[positions] [ identifier[ii] ] identifier[pos_str] = literal[string] . identifier[format] ( identifier[ii] + literal[int] , identifier[atom_numbers] [ identifier[ii] ], identifier[pos] [ literal[int] ], identifier[pos] [ literal[int] ], identifier[pos] [ literal[int] ]) identifier[lines] += identifier[pos_str] identifier[lines] += literal[string] identifier[cell] = identifier[atoms] . identifier[get_cell] ()/ identifier[scale_pos] keyword[for] identifier[ii] keyword[in] identifier[range] ( literal[int] ): identifier[cell_str] = literal[string] . identifier[format] ( identifier[cell] [ identifier[ii] ][ literal[int] ], identifier[cell] [ identifier[ii] ][ literal[int] ], identifier[cell] [ identifier[ii] ][ literal[int] ]) identifier[lines] += identifier[cell_str] identifier[outfile] = identifier[open] ( identifier[filename] , literal[string] ) identifier[outfile] . identifier[write] ( identifier[lines] )
def write_dftbp(filename, atoms): """Writes DFTB+ readable, gen-formatted structure files Args: filename: name of the gen-file to be written atoms: object containing information about structure """ scale_pos = dftbpToBohr lines = '' # 1. line, use absolute positions natoms = atoms.get_number_of_atoms() lines += str(natoms) lines += ' S \n' # 2. line expaned_symbols = atoms.get_chemical_symbols() symbols = get_reduced_symbols(expaned_symbols) lines += ' '.join(symbols) + '\n' atom_numbers = [] for ss in expaned_symbols: atom_numbers.append(symbols.index(ss) + 1) # depends on [control=['for'], data=['ss']] positions = atoms.get_positions() / scale_pos for ii in range(natoms): pos = positions[ii] pos_str = '{:3d} {:3d} {:20.15f} {:20.15f} {:20.15f}\n'.format(ii + 1, atom_numbers[ii], pos[0], pos[1], pos[2]) lines += pos_str # depends on [control=['for'], data=['ii']] # origin arbitrary lines += '0.0 0.0 0.0\n' cell = atoms.get_cell() / scale_pos for ii in range(3): cell_str = '{:20.15f} {:20.15f} {:20.15f}\n'.format(cell[ii][0], cell[ii][1], cell[ii][2]) lines += cell_str # depends on [control=['for'], data=['ii']] outfile = open(filename, 'w') outfile.write(lines)
def _fetch_article(self, article_id): """Fetch article data :param article_id: id of the article to fetch """ fetched_data = self.handler.article(article_id) data = { 'number': fetched_data[1].number, 'message_id': fetched_data[1].message_id, 'lines': fetched_data[1].lines } return data
def function[_fetch_article, parameter[self, article_id]]: constant[Fetch article data :param article_id: id of the article to fetch ] variable[fetched_data] assign[=] call[name[self].handler.article, parameter[name[article_id]]] variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b0284e20>, <ast.Constant object at 0x7da1b0284520>, <ast.Constant object at 0x7da1b033a5c0>], [<ast.Attribute object at 0x7da1b0339840>, <ast.Attribute object at 0x7da1b033b700>, <ast.Attribute object at 0x7da1b0338490>]] return[name[data]]
keyword[def] identifier[_fetch_article] ( identifier[self] , identifier[article_id] ): literal[string] identifier[fetched_data] = identifier[self] . identifier[handler] . identifier[article] ( identifier[article_id] ) identifier[data] ={ literal[string] : identifier[fetched_data] [ literal[int] ]. identifier[number] , literal[string] : identifier[fetched_data] [ literal[int] ]. identifier[message_id] , literal[string] : identifier[fetched_data] [ literal[int] ]. identifier[lines] } keyword[return] identifier[data]
def _fetch_article(self, article_id): """Fetch article data :param article_id: id of the article to fetch """ fetched_data = self.handler.article(article_id) data = {'number': fetched_data[1].number, 'message_id': fetched_data[1].message_id, 'lines': fetched_data[1].lines} return data
def table(self,x,*args,**kwargs): """ returns the values of column x in an n-dimensional array, each dimension being the values from a dimension in *args at the moment very slow due to generate :/ """ reduce_func = kwargs.get('reduce',lambda x: x) data = [] for i,d in enumerate(self.generate(*args)): if len(d) > 0: data.append(reduce_func(d[x])) else: data.append(0) try: return np.reshape(data,[len(self.space(a)) for a in args]) except: return np.reshape(data,[-1]+[len(self.space(a)) for a in args])
def function[table, parameter[self, x]]: constant[ returns the values of column x in an n-dimensional array, each dimension being the values from a dimension in *args at the moment very slow due to generate :/ ] variable[reduce_func] assign[=] call[name[kwargs].get, parameter[constant[reduce], <ast.Lambda object at 0x7da18ede40d0>]] variable[data] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da18ede5450>, <ast.Name object at 0x7da18ede7fd0>]]] in starred[call[name[enumerate], parameter[call[name[self].generate, parameter[<ast.Starred object at 0x7da18ede4eb0>]]]]] begin[:] if compare[call[name[len], parameter[name[d]]] greater[>] constant[0]] begin[:] call[name[data].append, parameter[call[name[reduce_func], parameter[call[name[d]][name[x]]]]]] <ast.Try object at 0x7da18ede46d0>
keyword[def] identifier[table] ( identifier[self] , identifier[x] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[reduce_func] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[lambda] identifier[x] : identifier[x] ) identifier[data] =[] keyword[for] identifier[i] , identifier[d] keyword[in] identifier[enumerate] ( identifier[self] . identifier[generate] (* identifier[args] )): keyword[if] identifier[len] ( identifier[d] )> literal[int] : identifier[data] . identifier[append] ( identifier[reduce_func] ( identifier[d] [ identifier[x] ])) keyword[else] : identifier[data] . identifier[append] ( literal[int] ) keyword[try] : keyword[return] identifier[np] . identifier[reshape] ( identifier[data] ,[ identifier[len] ( identifier[self] . identifier[space] ( identifier[a] )) keyword[for] identifier[a] keyword[in] identifier[args] ]) keyword[except] : keyword[return] identifier[np] . identifier[reshape] ( identifier[data] ,[- literal[int] ]+[ identifier[len] ( identifier[self] . identifier[space] ( identifier[a] )) keyword[for] identifier[a] keyword[in] identifier[args] ])
def table(self, x, *args, **kwargs): """ returns the values of column x in an n-dimensional array, each dimension being the values from a dimension in *args at the moment very slow due to generate :/ """ reduce_func = kwargs.get('reduce', lambda x: x) data = [] for (i, d) in enumerate(self.generate(*args)): if len(d) > 0: data.append(reduce_func(d[x])) # depends on [control=['if'], data=[]] else: data.append(0) # depends on [control=['for'], data=[]] try: return np.reshape(data, [len(self.space(a)) for a in args]) # depends on [control=['try'], data=[]] except: return np.reshape(data, [-1] + [len(self.space(a)) for a in args]) # depends on [control=['except'], data=[]]
def addreadergroup(self, newgroup): """Add a reader group""" hresult, hcontext = SCardEstablishContext(SCARD_SCOPE_USER) if 0 != hresult: raise error( 'Failed to establish context: ' + \ SCardGetErrorMessage(hresult)) try: hresult = SCardIntroduceReaderGroup(hcontext, newgroup) if 0 != hresult: raise error( 'Unable to introduce reader group: ' + \ SCardGetErrorMessage(hresult)) else: innerreadergroups.addreadergroup(self, newgroup) finally: hresult = SCardReleaseContext(hcontext) if 0 != hresult: raise error( 'Failed to release context: ' + \ SCardGetErrorMessage(hresult))
def function[addreadergroup, parameter[self, newgroup]]: constant[Add a reader group] <ast.Tuple object at 0x7da1b23eca30> assign[=] call[name[SCardEstablishContext], parameter[name[SCARD_SCOPE_USER]]] if compare[constant[0] not_equal[!=] name[hresult]] begin[:] <ast.Raise object at 0x7da1b23ec1c0> <ast.Try object at 0x7da20c76e5f0>
keyword[def] identifier[addreadergroup] ( identifier[self] , identifier[newgroup] ): literal[string] identifier[hresult] , identifier[hcontext] = identifier[SCardEstablishContext] ( identifier[SCARD_SCOPE_USER] ) keyword[if] literal[int] != identifier[hresult] : keyword[raise] identifier[error] ( literal[string] + identifier[SCardGetErrorMessage] ( identifier[hresult] )) keyword[try] : identifier[hresult] = identifier[SCardIntroduceReaderGroup] ( identifier[hcontext] , identifier[newgroup] ) keyword[if] literal[int] != identifier[hresult] : keyword[raise] identifier[error] ( literal[string] + identifier[SCardGetErrorMessage] ( identifier[hresult] )) keyword[else] : identifier[innerreadergroups] . identifier[addreadergroup] ( identifier[self] , identifier[newgroup] ) keyword[finally] : identifier[hresult] = identifier[SCardReleaseContext] ( identifier[hcontext] ) keyword[if] literal[int] != identifier[hresult] : keyword[raise] identifier[error] ( literal[string] + identifier[SCardGetErrorMessage] ( identifier[hresult] ))
def addreadergroup(self, newgroup): """Add a reader group""" (hresult, hcontext) = SCardEstablishContext(SCARD_SCOPE_USER) if 0 != hresult: raise error('Failed to establish context: ' + SCardGetErrorMessage(hresult)) # depends on [control=['if'], data=['hresult']] try: hresult = SCardIntroduceReaderGroup(hcontext, newgroup) if 0 != hresult: raise error('Unable to introduce reader group: ' + SCardGetErrorMessage(hresult)) # depends on [control=['if'], data=['hresult']] else: innerreadergroups.addreadergroup(self, newgroup) # depends on [control=['try'], data=[]] finally: hresult = SCardReleaseContext(hcontext) if 0 != hresult: raise error('Failed to release context: ' + SCardGetErrorMessage(hresult)) # depends on [control=['if'], data=['hresult']]
def set_general_setting(key, value, qsettings=None): """Set value to QSettings based on key. :param key: Unique key for setting. :type key: basestring :param value: Value to be saved. :type value: QVariant :param qsettings: A custom QSettings to use. If it's not defined, it will use the default one. :type qsettings: qgis.PyQt.QtCore.QSettings """ if not qsettings: qsettings = QSettings() qsettings.setValue(key, deep_convert_dict(value))
def function[set_general_setting, parameter[key, value, qsettings]]: constant[Set value to QSettings based on key. :param key: Unique key for setting. :type key: basestring :param value: Value to be saved. :type value: QVariant :param qsettings: A custom QSettings to use. If it's not defined, it will use the default one. :type qsettings: qgis.PyQt.QtCore.QSettings ] if <ast.UnaryOp object at 0x7da18f58c190> begin[:] variable[qsettings] assign[=] call[name[QSettings], parameter[]] call[name[qsettings].setValue, parameter[name[key], call[name[deep_convert_dict], parameter[name[value]]]]]
keyword[def] identifier[set_general_setting] ( identifier[key] , identifier[value] , identifier[qsettings] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[qsettings] : identifier[qsettings] = identifier[QSettings] () identifier[qsettings] . identifier[setValue] ( identifier[key] , identifier[deep_convert_dict] ( identifier[value] ))
def set_general_setting(key, value, qsettings=None): """Set value to QSettings based on key. :param key: Unique key for setting. :type key: basestring :param value: Value to be saved. :type value: QVariant :param qsettings: A custom QSettings to use. If it's not defined, it will use the default one. :type qsettings: qgis.PyQt.QtCore.QSettings """ if not qsettings: qsettings = QSettings() # depends on [control=['if'], data=[]] qsettings.setValue(key, deep_convert_dict(value))
def hkdf_expand(pseudo_random_key, info=b"", length=32, hash=hashlib.sha512): ''' Expand `pseudo_random_key` and `info` into a key of length `bytes` using HKDF's expand function based on HMAC with the provided hash (default SHA-512). See the HKDF draft RFC and paper for usage notes. ''' hash_len = hash().digest_size length = int(length) if length > 255 * hash_len: raise Exception("Cannot expand to more than 255 * %d = %d bytes using the specified hash function" %\ (hash_len, 255 * hash_len)) blocks_needed = length // hash_len + (0 if length % hash_len == 0 else 1) # ceil okm = b"" output_block = b"" for counter in range(blocks_needed): output_block = hmac.new(pseudo_random_key, buffer(output_block + info + bytearray((counter + 1,))),\ hash).digest() okm += output_block return okm[:length]
def function[hkdf_expand, parameter[pseudo_random_key, info, length, hash]]: constant[ Expand `pseudo_random_key` and `info` into a key of length `bytes` using HKDF's expand function based on HMAC with the provided hash (default SHA-512). See the HKDF draft RFC and paper for usage notes. ] variable[hash_len] assign[=] call[name[hash], parameter[]].digest_size variable[length] assign[=] call[name[int], parameter[name[length]]] if compare[name[length] greater[>] binary_operation[constant[255] * name[hash_len]]] begin[:] <ast.Raise object at 0x7da1b0e49900> variable[blocks_needed] assign[=] binary_operation[binary_operation[name[length] <ast.FloorDiv object at 0x7da2590d6bc0> name[hash_len]] + <ast.IfExp object at 0x7da1b0e4b3a0>] variable[okm] assign[=] constant[b''] variable[output_block] assign[=] constant[b''] for taget[name[counter]] in starred[call[name[range], parameter[name[blocks_needed]]]] begin[:] variable[output_block] assign[=] call[call[name[hmac].new, parameter[name[pseudo_random_key], call[name[buffer], parameter[binary_operation[binary_operation[name[output_block] + name[info]] + call[name[bytearray], parameter[tuple[[<ast.BinOp object at 0x7da1b0ea0790>]]]]]]], name[hash]]].digest, parameter[]] <ast.AugAssign object at 0x7da1b0ea10c0> return[call[name[okm]][<ast.Slice object at 0x7da1b0ea0df0>]]
keyword[def] identifier[hkdf_expand] ( identifier[pseudo_random_key] , identifier[info] = literal[string] , identifier[length] = literal[int] , identifier[hash] = identifier[hashlib] . identifier[sha512] ): literal[string] identifier[hash_len] = identifier[hash] (). identifier[digest_size] identifier[length] = identifier[int] ( identifier[length] ) keyword[if] identifier[length] > literal[int] * identifier[hash_len] : keyword[raise] identifier[Exception] ( literal[string] %( identifier[hash_len] , literal[int] * identifier[hash_len] )) identifier[blocks_needed] = identifier[length] // identifier[hash_len] +( literal[int] keyword[if] identifier[length] % identifier[hash_len] == literal[int] keyword[else] literal[int] ) identifier[okm] = literal[string] identifier[output_block] = literal[string] keyword[for] identifier[counter] keyword[in] identifier[range] ( identifier[blocks_needed] ): identifier[output_block] = identifier[hmac] . identifier[new] ( identifier[pseudo_random_key] , identifier[buffer] ( identifier[output_block] + identifier[info] + identifier[bytearray] (( identifier[counter] + literal[int] ,))), identifier[hash] ). identifier[digest] () identifier[okm] += identifier[output_block] keyword[return] identifier[okm] [: identifier[length] ]
def hkdf_expand(pseudo_random_key, info=b'', length=32, hash=hashlib.sha512): """ Expand `pseudo_random_key` and `info` into a key of length `bytes` using HKDF's expand function based on HMAC with the provided hash (default SHA-512). See the HKDF draft RFC and paper for usage notes. """ hash_len = hash().digest_size length = int(length) if length > 255 * hash_len: raise Exception('Cannot expand to more than 255 * %d = %d bytes using the specified hash function' % (hash_len, 255 * hash_len)) # depends on [control=['if'], data=[]] blocks_needed = length // hash_len + (0 if length % hash_len == 0 else 1) # ceil okm = b'' output_block = b'' for counter in range(blocks_needed): output_block = hmac.new(pseudo_random_key, buffer(output_block + info + bytearray((counter + 1,))), hash).digest() okm += output_block # depends on [control=['for'], data=['counter']] return okm[:length]
def render_response(self): """Render as a string formatted for HTTP response headers (detailed 'Set-Cookie: ' style). """ # Use whatever renderers are defined for name and value. # (.attributes() is responsible for all other rendering.) name, value = self.name, self.value renderer = self.attribute_renderers.get('name', None) if renderer: name = renderer(name) renderer = self.attribute_renderers.get('value', None) if renderer: value = renderer(value) return '; '.join( ['{0}={1}'.format(name, value)] + [key if isinstance(val, bool) else '='.join((key, val)) for key, val in self.attributes().items()] )
def function[render_response, parameter[self]]: constant[Render as a string formatted for HTTP response headers (detailed 'Set-Cookie: ' style). ] <ast.Tuple object at 0x7da2044c39a0> assign[=] tuple[[<ast.Attribute object at 0x7da2044c10f0>, <ast.Attribute object at 0x7da2044c3040>]] variable[renderer] assign[=] call[name[self].attribute_renderers.get, parameter[constant[name], constant[None]]] if name[renderer] begin[:] variable[name] assign[=] call[name[renderer], parameter[name[name]]] variable[renderer] assign[=] call[name[self].attribute_renderers.get, parameter[constant[value], constant[None]]] if name[renderer] begin[:] variable[value] assign[=] call[name[renderer], parameter[name[value]]] return[call[constant[; ].join, parameter[binary_operation[list[[<ast.Call object at 0x7da2044c1ab0>]] + <ast.ListComp object at 0x7da2044c3a90>]]]]
keyword[def] identifier[render_response] ( identifier[self] ): literal[string] identifier[name] , identifier[value] = identifier[self] . identifier[name] , identifier[self] . identifier[value] identifier[renderer] = identifier[self] . identifier[attribute_renderers] . identifier[get] ( literal[string] , keyword[None] ) keyword[if] identifier[renderer] : identifier[name] = identifier[renderer] ( identifier[name] ) identifier[renderer] = identifier[self] . identifier[attribute_renderers] . identifier[get] ( literal[string] , keyword[None] ) keyword[if] identifier[renderer] : identifier[value] = identifier[renderer] ( identifier[value] ) keyword[return] literal[string] . identifier[join] ( [ literal[string] . identifier[format] ( identifier[name] , identifier[value] )]+ [ identifier[key] keyword[if] identifier[isinstance] ( identifier[val] , identifier[bool] ) keyword[else] literal[string] . identifier[join] (( identifier[key] , identifier[val] )) keyword[for] identifier[key] , identifier[val] keyword[in] identifier[self] . identifier[attributes] (). identifier[items] ()] )
def render_response(self): """Render as a string formatted for HTTP response headers (detailed 'Set-Cookie: ' style). """ # Use whatever renderers are defined for name and value. # (.attributes() is responsible for all other rendering.) (name, value) = (self.name, self.value) renderer = self.attribute_renderers.get('name', None) if renderer: name = renderer(name) # depends on [control=['if'], data=[]] renderer = self.attribute_renderers.get('value', None) if renderer: value = renderer(value) # depends on [control=['if'], data=[]] return '; '.join(['{0}={1}'.format(name, value)] + [key if isinstance(val, bool) else '='.join((key, val)) for (key, val) in self.attributes().items()])
def createsuperuser(settings_module, username, email, bin_env=None, database=None, pythonpath=None, env=None, runas=None): ''' Create a super user for the database. This function defaults to use the ``--noinput`` flag which prevents the creation of a password for the superuser. CLI Example: .. code-block:: bash salt '*' django.createsuperuser <settings_module> user user@example.com ''' args = ['noinput'] kwargs = dict( email=email, username=username, ) if database: kwargs['database'] = database return command(settings_module, 'createsuperuser', bin_env, pythonpath, env, runas, *args, **kwargs)
def function[createsuperuser, parameter[settings_module, username, email, bin_env, database, pythonpath, env, runas]]: constant[ Create a super user for the database. This function defaults to use the ``--noinput`` flag which prevents the creation of a password for the superuser. CLI Example: .. code-block:: bash salt '*' django.createsuperuser <settings_module> user user@example.com ] variable[args] assign[=] list[[<ast.Constant object at 0x7da1b21e20b0>]] variable[kwargs] assign[=] call[name[dict], parameter[]] if name[database] begin[:] call[name[kwargs]][constant[database]] assign[=] name[database] return[call[name[command], parameter[name[settings_module], constant[createsuperuser], name[bin_env], name[pythonpath], name[env], name[runas], <ast.Starred object at 0x7da1b21e3280>]]]
keyword[def] identifier[createsuperuser] ( identifier[settings_module] , identifier[username] , identifier[email] , identifier[bin_env] = keyword[None] , identifier[database] = keyword[None] , identifier[pythonpath] = keyword[None] , identifier[env] = keyword[None] , identifier[runas] = keyword[None] ): literal[string] identifier[args] =[ literal[string] ] identifier[kwargs] = identifier[dict] ( identifier[email] = identifier[email] , identifier[username] = identifier[username] , ) keyword[if] identifier[database] : identifier[kwargs] [ literal[string] ]= identifier[database] keyword[return] identifier[command] ( identifier[settings_module] , literal[string] , identifier[bin_env] , identifier[pythonpath] , identifier[env] , identifier[runas] , * identifier[args] ,** identifier[kwargs] )
def createsuperuser(settings_module, username, email, bin_env=None, database=None, pythonpath=None, env=None, runas=None): """ Create a super user for the database. This function defaults to use the ``--noinput`` flag which prevents the creation of a password for the superuser. CLI Example: .. code-block:: bash salt '*' django.createsuperuser <settings_module> user user@example.com """ args = ['noinput'] kwargs = dict(email=email, username=username) if database: kwargs['database'] = database # depends on [control=['if'], data=[]] return command(settings_module, 'createsuperuser', bin_env, pythonpath, env, runas, *args, **kwargs)
def _is_domain_valid(self, url, tld): """ Checks if given URL has valid domain name (ignores subdomains) :param str url: complete URL that we want to check :param str tld: TLD that should be found at the end of URL (hostname) :return: True if URL is valid, False otherwise :rtype: bool >>> extractor = URLExtract() >>> extractor._is_domain_valid("janlipovsky.cz", ".cz") True >>> extractor._is_domain_valid("https://janlipovsky.cz", ".cz") True >>> extractor._is_domain_valid("invalid.cz.", ".cz") False >>> extractor._is_domain_valid("invalid.cz,", ".cz") False >>> extractor._is_domain_valid("in.v_alid.cz", ".cz") False >>> extractor._is_domain_valid("-is.valid.cz", ".cz") True >>> extractor._is_domain_valid("not.valid-.cz", ".cz") False >>> extractor._is_domain_valid("http://blog/media/path.io.jpg", ".cz") False """ if not url: return False scheme_pos = url.find('://') if scheme_pos == -1: url = 'http://' + url url_parts = uritools.urisplit(url) # <scheme>://<authority>/<path>?<query>#<fragment> try: host = url_parts.gethost() except ValueError: self._logger.info( "Invalid host '%s'. " "If the host is valid report a bug.", url ) return False if not host: return False host_parts = host.split('.') if len(host_parts) <= 1: return False host_tld = '.'+host_parts[-1] if host_tld != tld: return False top = host_parts[-2] if self._hostname_re.match(top) is None: return False return True
def function[_is_domain_valid, parameter[self, url, tld]]: constant[ Checks if given URL has valid domain name (ignores subdomains) :param str url: complete URL that we want to check :param str tld: TLD that should be found at the end of URL (hostname) :return: True if URL is valid, False otherwise :rtype: bool >>> extractor = URLExtract() >>> extractor._is_domain_valid("janlipovsky.cz", ".cz") True >>> extractor._is_domain_valid("https://janlipovsky.cz", ".cz") True >>> extractor._is_domain_valid("invalid.cz.", ".cz") False >>> extractor._is_domain_valid("invalid.cz,", ".cz") False >>> extractor._is_domain_valid("in.v_alid.cz", ".cz") False >>> extractor._is_domain_valid("-is.valid.cz", ".cz") True >>> extractor._is_domain_valid("not.valid-.cz", ".cz") False >>> extractor._is_domain_valid("http://blog/media/path.io.jpg", ".cz") False ] if <ast.UnaryOp object at 0x7da20c76e2c0> begin[:] return[constant[False]] variable[scheme_pos] assign[=] call[name[url].find, parameter[constant[://]]] if compare[name[scheme_pos] equal[==] <ast.UnaryOp object at 0x7da20c76d5d0>] begin[:] variable[url] assign[=] binary_operation[constant[http://] + name[url]] variable[url_parts] assign[=] call[name[uritools].urisplit, parameter[name[url]]] <ast.Try object at 0x7da20c76fd60> if <ast.UnaryOp object at 0x7da20c76e770> begin[:] return[constant[False]] variable[host_parts] assign[=] call[name[host].split, parameter[constant[.]]] if compare[call[name[len], parameter[name[host_parts]]] less_or_equal[<=] constant[1]] begin[:] return[constant[False]] variable[host_tld] assign[=] binary_operation[constant[.] + call[name[host_parts]][<ast.UnaryOp object at 0x7da20c76d240>]] if compare[name[host_tld] not_equal[!=] name[tld]] begin[:] return[constant[False]] variable[top] assign[=] call[name[host_parts]][<ast.UnaryOp object at 0x7da20c76c9d0>] if compare[call[name[self]._hostname_re.match, parameter[name[top]]] is constant[None]] begin[:] return[constant[False]] return[constant[True]]
keyword[def] identifier[_is_domain_valid] ( identifier[self] , identifier[url] , identifier[tld] ): literal[string] keyword[if] keyword[not] identifier[url] : keyword[return] keyword[False] identifier[scheme_pos] = identifier[url] . identifier[find] ( literal[string] ) keyword[if] identifier[scheme_pos] ==- literal[int] : identifier[url] = literal[string] + identifier[url] identifier[url_parts] = identifier[uritools] . identifier[urisplit] ( identifier[url] ) keyword[try] : identifier[host] = identifier[url_parts] . identifier[gethost] () keyword[except] identifier[ValueError] : identifier[self] . identifier[_logger] . identifier[info] ( literal[string] literal[string] , identifier[url] ) keyword[return] keyword[False] keyword[if] keyword[not] identifier[host] : keyword[return] keyword[False] identifier[host_parts] = identifier[host] . identifier[split] ( literal[string] ) keyword[if] identifier[len] ( identifier[host_parts] )<= literal[int] : keyword[return] keyword[False] identifier[host_tld] = literal[string] + identifier[host_parts] [- literal[int] ] keyword[if] identifier[host_tld] != identifier[tld] : keyword[return] keyword[False] identifier[top] = identifier[host_parts] [- literal[int] ] keyword[if] identifier[self] . identifier[_hostname_re] . identifier[match] ( identifier[top] ) keyword[is] keyword[None] : keyword[return] keyword[False] keyword[return] keyword[True]
def _is_domain_valid(self, url, tld): """ Checks if given URL has valid domain name (ignores subdomains) :param str url: complete URL that we want to check :param str tld: TLD that should be found at the end of URL (hostname) :return: True if URL is valid, False otherwise :rtype: bool >>> extractor = URLExtract() >>> extractor._is_domain_valid("janlipovsky.cz", ".cz") True >>> extractor._is_domain_valid("https://janlipovsky.cz", ".cz") True >>> extractor._is_domain_valid("invalid.cz.", ".cz") False >>> extractor._is_domain_valid("invalid.cz,", ".cz") False >>> extractor._is_domain_valid("in.v_alid.cz", ".cz") False >>> extractor._is_domain_valid("-is.valid.cz", ".cz") True >>> extractor._is_domain_valid("not.valid-.cz", ".cz") False >>> extractor._is_domain_valid("http://blog/media/path.io.jpg", ".cz") False """ if not url: return False # depends on [control=['if'], data=[]] scheme_pos = url.find('://') if scheme_pos == -1: url = 'http://' + url # depends on [control=['if'], data=[]] url_parts = uritools.urisplit(url) # <scheme>://<authority>/<path>?<query>#<fragment> try: host = url_parts.gethost() # depends on [control=['try'], data=[]] except ValueError: self._logger.info("Invalid host '%s'. If the host is valid report a bug.", url) return False # depends on [control=['except'], data=[]] if not host: return False # depends on [control=['if'], data=[]] host_parts = host.split('.') if len(host_parts) <= 1: return False # depends on [control=['if'], data=[]] host_tld = '.' + host_parts[-1] if host_tld != tld: return False # depends on [control=['if'], data=[]] top = host_parts[-2] if self._hostname_re.match(top) is None: return False # depends on [control=['if'], data=[]] return True
def _convert_to_image_color(self, color): """:return: a color that can be used by the image""" rgb = self._convert_color_to_rrggbb(color) return self._convert_rrggbb_to_image_color(rgb)
def function[_convert_to_image_color, parameter[self, color]]: constant[:return: a color that can be used by the image] variable[rgb] assign[=] call[name[self]._convert_color_to_rrggbb, parameter[name[color]]] return[call[name[self]._convert_rrggbb_to_image_color, parameter[name[rgb]]]]
keyword[def] identifier[_convert_to_image_color] ( identifier[self] , identifier[color] ): literal[string] identifier[rgb] = identifier[self] . identifier[_convert_color_to_rrggbb] ( identifier[color] ) keyword[return] identifier[self] . identifier[_convert_rrggbb_to_image_color] ( identifier[rgb] )
def _convert_to_image_color(self, color): """:return: a color that can be used by the image""" rgb = self._convert_color_to_rrggbb(color) return self._convert_rrggbb_to_image_color(rgb)
def get_supported_unary_ops(): ''' Returns a dictionary of the Weld supported unary ops, with values being their Weld symbol. ''' unary_ops = {} unary_ops[np.exp.__name__] = 'exp' unary_ops[np.log.__name__] = 'log' unary_ops[np.sqrt.__name__] = 'sqrt' return unary_ops
def function[get_supported_unary_ops, parameter[]]: constant[ Returns a dictionary of the Weld supported unary ops, with values being their Weld symbol. ] variable[unary_ops] assign[=] dictionary[[], []] call[name[unary_ops]][name[np].exp.__name__] assign[=] constant[exp] call[name[unary_ops]][name[np].log.__name__] assign[=] constant[log] call[name[unary_ops]][name[np].sqrt.__name__] assign[=] constant[sqrt] return[name[unary_ops]]
keyword[def] identifier[get_supported_unary_ops] (): literal[string] identifier[unary_ops] ={} identifier[unary_ops] [ identifier[np] . identifier[exp] . identifier[__name__] ]= literal[string] identifier[unary_ops] [ identifier[np] . identifier[log] . identifier[__name__] ]= literal[string] identifier[unary_ops] [ identifier[np] . identifier[sqrt] . identifier[__name__] ]= literal[string] keyword[return] identifier[unary_ops]
def get_supported_unary_ops(): """ Returns a dictionary of the Weld supported unary ops, with values being their Weld symbol. """ unary_ops = {} unary_ops[np.exp.__name__] = 'exp' unary_ops[np.log.__name__] = 'log' unary_ops[np.sqrt.__name__] = 'sqrt' return unary_ops
def fire_event(self, event, *args, **kwargs): """ Execute the listeners for this event passing any arguments along. """ remove = [] event_stack = self._events[event] for x in event_stack: x['callback'](*args, **kwargs) if x['single']: remove.append(x) for x in remove: event_stack.remove(x)
def function[fire_event, parameter[self, event]]: constant[ Execute the listeners for this event passing any arguments along. ] variable[remove] assign[=] list[[]] variable[event_stack] assign[=] call[name[self]._events][name[event]] for taget[name[x]] in starred[name[event_stack]] begin[:] call[call[name[x]][constant[callback]], parameter[<ast.Starred object at 0x7da1b2347eb0>]] if call[name[x]][constant[single]] begin[:] call[name[remove].append, parameter[name[x]]] for taget[name[x]] in starred[name[remove]] begin[:] call[name[event_stack].remove, parameter[name[x]]]
keyword[def] identifier[fire_event] ( identifier[self] , identifier[event] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[remove] =[] identifier[event_stack] = identifier[self] . identifier[_events] [ identifier[event] ] keyword[for] identifier[x] keyword[in] identifier[event_stack] : identifier[x] [ literal[string] ](* identifier[args] ,** identifier[kwargs] ) keyword[if] identifier[x] [ literal[string] ]: identifier[remove] . identifier[append] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[remove] : identifier[event_stack] . identifier[remove] ( identifier[x] )
def fire_event(self, event, *args, **kwargs): """ Execute the listeners for this event passing any arguments along. """ remove = [] event_stack = self._events[event] for x in event_stack: x['callback'](*args, **kwargs) if x['single']: remove.append(x) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['x']] for x in remove: event_stack.remove(x) # depends on [control=['for'], data=['x']]
def bandpass(ts, low_hz, high_hz, order=3): """forward-backward butterworth band-pass filter""" orig_ndim = ts.ndim if ts.ndim is 1: ts = ts[:, np.newaxis] channels = ts.shape[1] fs = (len(ts) - 1.0) / (ts.tspan[-1] - ts.tspan[0]) nyq = 0.5 * fs low = low_hz/nyq high = high_hz/nyq b, a = signal.butter(order, [low, high], btype='band') if not np.all(np.abs(np.roots(a)) < 1.0): raise ValueError('Filter will not be stable with these values.') dtype = ts.dtype output = np.zeros((len(ts), channels), dtype) for i in range(channels): output[:, i] = signal.filtfilt(b, a, ts[:, i]) if orig_ndim is 1: output = output[:, 0] return Timeseries(output, ts.tspan, labels=ts.labels)
def function[bandpass, parameter[ts, low_hz, high_hz, order]]: constant[forward-backward butterworth band-pass filter] variable[orig_ndim] assign[=] name[ts].ndim if compare[name[ts].ndim is constant[1]] begin[:] variable[ts] assign[=] call[name[ts]][tuple[[<ast.Slice object at 0x7da20e963e80>, <ast.Attribute object at 0x7da20e962890>]]] variable[channels] assign[=] call[name[ts].shape][constant[1]] variable[fs] assign[=] binary_operation[binary_operation[call[name[len], parameter[name[ts]]] - constant[1.0]] / binary_operation[call[name[ts].tspan][<ast.UnaryOp object at 0x7da20e9630d0>] - call[name[ts].tspan][constant[0]]]] variable[nyq] assign[=] binary_operation[constant[0.5] * name[fs]] variable[low] assign[=] binary_operation[name[low_hz] / name[nyq]] variable[high] assign[=] binary_operation[name[high_hz] / name[nyq]] <ast.Tuple object at 0x7da20e962320> assign[=] call[name[signal].butter, parameter[name[order], list[[<ast.Name object at 0x7da20e963610>, <ast.Name object at 0x7da20e9627d0>]]]] if <ast.UnaryOp object at 0x7da20e963250> begin[:] <ast.Raise object at 0x7da20e961600> variable[dtype] assign[=] name[ts].dtype variable[output] assign[=] call[name[np].zeros, parameter[tuple[[<ast.Call object at 0x7da20e960700>, <ast.Name object at 0x7da20e961510>]], name[dtype]]] for taget[name[i]] in starred[call[name[range], parameter[name[channels]]]] begin[:] call[name[output]][tuple[[<ast.Slice object at 0x7da20e963d90>, <ast.Name object at 0x7da20e9625c0>]]] assign[=] call[name[signal].filtfilt, parameter[name[b], name[a], call[name[ts]][tuple[[<ast.Slice object at 0x7da20ec06ef0>, <ast.Name object at 0x7da20ec06230>]]]]] if compare[name[orig_ndim] is constant[1]] begin[:] variable[output] assign[=] call[name[output]][tuple[[<ast.Slice object at 0x7da1b0fddcc0>, <ast.Constant object at 0x7da1b0fdf940>]]] return[call[name[Timeseries], parameter[name[output], name[ts].tspan]]]
keyword[def] identifier[bandpass] ( identifier[ts] , identifier[low_hz] , identifier[high_hz] , identifier[order] = literal[int] ): literal[string] identifier[orig_ndim] = identifier[ts] . identifier[ndim] keyword[if] identifier[ts] . identifier[ndim] keyword[is] literal[int] : identifier[ts] = identifier[ts] [:, identifier[np] . identifier[newaxis] ] identifier[channels] = identifier[ts] . identifier[shape] [ literal[int] ] identifier[fs] =( identifier[len] ( identifier[ts] )- literal[int] )/( identifier[ts] . identifier[tspan] [- literal[int] ]- identifier[ts] . identifier[tspan] [ literal[int] ]) identifier[nyq] = literal[int] * identifier[fs] identifier[low] = identifier[low_hz] / identifier[nyq] identifier[high] = identifier[high_hz] / identifier[nyq] identifier[b] , identifier[a] = identifier[signal] . identifier[butter] ( identifier[order] ,[ identifier[low] , identifier[high] ], identifier[btype] = literal[string] ) keyword[if] keyword[not] identifier[np] . identifier[all] ( identifier[np] . identifier[abs] ( identifier[np] . identifier[roots] ( identifier[a] ))< literal[int] ): keyword[raise] identifier[ValueError] ( literal[string] ) identifier[dtype] = identifier[ts] . identifier[dtype] identifier[output] = identifier[np] . identifier[zeros] (( identifier[len] ( identifier[ts] ), identifier[channels] ), identifier[dtype] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[channels] ): identifier[output] [:, identifier[i] ]= identifier[signal] . identifier[filtfilt] ( identifier[b] , identifier[a] , identifier[ts] [:, identifier[i] ]) keyword[if] identifier[orig_ndim] keyword[is] literal[int] : identifier[output] = identifier[output] [:, literal[int] ] keyword[return] identifier[Timeseries] ( identifier[output] , identifier[ts] . identifier[tspan] , identifier[labels] = identifier[ts] . identifier[labels] )
def bandpass(ts, low_hz, high_hz, order=3): """forward-backward butterworth band-pass filter""" orig_ndim = ts.ndim if ts.ndim is 1: ts = ts[:, np.newaxis] # depends on [control=['if'], data=[]] channels = ts.shape[1] fs = (len(ts) - 1.0) / (ts.tspan[-1] - ts.tspan[0]) nyq = 0.5 * fs low = low_hz / nyq high = high_hz / nyq (b, a) = signal.butter(order, [low, high], btype='band') if not np.all(np.abs(np.roots(a)) < 1.0): raise ValueError('Filter will not be stable with these values.') # depends on [control=['if'], data=[]] dtype = ts.dtype output = np.zeros((len(ts), channels), dtype) for i in range(channels): output[:, i] = signal.filtfilt(b, a, ts[:, i]) # depends on [control=['for'], data=['i']] if orig_ndim is 1: output = output[:, 0] # depends on [control=['if'], data=[]] return Timeseries(output, ts.tspan, labels=ts.labels)
def get_product_metadata_name(self): """ :return: name of product metadata file :rtype: str """ if self.safe_type == EsaSafeType.OLD_TYPE: name = _edit_name(self.product_id, 'MTD', 'SAFL1C') else: name = 'MTD_{}'.format(self.product_id.split('_')[1]) return '{}.{}'.format(name, MimeType.XML.value)
def function[get_product_metadata_name, parameter[self]]: constant[ :return: name of product metadata file :rtype: str ] if compare[name[self].safe_type equal[==] name[EsaSafeType].OLD_TYPE] begin[:] variable[name] assign[=] call[name[_edit_name], parameter[name[self].product_id, constant[MTD], constant[SAFL1C]]] return[call[constant[{}.{}].format, parameter[name[name], name[MimeType].XML.value]]]
keyword[def] identifier[get_product_metadata_name] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[safe_type] == identifier[EsaSafeType] . identifier[OLD_TYPE] : identifier[name] = identifier[_edit_name] ( identifier[self] . identifier[product_id] , literal[string] , literal[string] ) keyword[else] : identifier[name] = literal[string] . identifier[format] ( identifier[self] . identifier[product_id] . identifier[split] ( literal[string] )[ literal[int] ]) keyword[return] literal[string] . identifier[format] ( identifier[name] , identifier[MimeType] . identifier[XML] . identifier[value] )
def get_product_metadata_name(self): """ :return: name of product metadata file :rtype: str """ if self.safe_type == EsaSafeType.OLD_TYPE: name = _edit_name(self.product_id, 'MTD', 'SAFL1C') # depends on [control=['if'], data=[]] else: name = 'MTD_{}'.format(self.product_id.split('_')[1]) return '{}.{}'.format(name, MimeType.XML.value)
def get_context_data(self, **kwargs): """ Returns view context dictionary. :rtype: dict. """ kwargs.update({ 'entries': Entry.objects.get_for_tag( self.kwargs.get('slug', 0) ) }) return super(EntriesView, self).get_context_data(**kwargs)
def function[get_context_data, parameter[self]]: constant[ Returns view context dictionary. :rtype: dict. ] call[name[kwargs].update, parameter[dictionary[[<ast.Constant object at 0x7da1b10e7970>], [<ast.Call object at 0x7da1b10e6830>]]]] return[call[call[name[super], parameter[name[EntriesView], name[self]]].get_context_data, parameter[]]]
keyword[def] identifier[get_context_data] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[kwargs] . identifier[update] ({ literal[string] : identifier[Entry] . identifier[objects] . identifier[get_for_tag] ( identifier[self] . identifier[kwargs] . identifier[get] ( literal[string] , literal[int] ) ) }) keyword[return] identifier[super] ( identifier[EntriesView] , identifier[self] ). identifier[get_context_data] (** identifier[kwargs] )
def get_context_data(self, **kwargs): """ Returns view context dictionary. :rtype: dict. """ kwargs.update({'entries': Entry.objects.get_for_tag(self.kwargs.get('slug', 0))}) return super(EntriesView, self).get_context_data(**kwargs)
def getCandScoresMap(self, profile): """ Returns a dictionary that associates integer representations of each candidate with their maximin score. :ivar Profile profile: A Profile object that represents an election profile. """ # Currently, we expect the profile to contain complete ordering over candidates. Ties are # allowed however. elecType = profile.getElecType() if elecType != "soc" and elecType != "toc": print("ERROR: unsupported election type") exit() wmg = profile.getWmg() # Initialize the maximin score for each candidate as infinity. maximinScores = dict() for cand in wmg.keys(): maximinScores[cand] = float("inf") # For each pair of candidates, calculate the number of times each beats the other. for cand1, cand2 in itertools.combinations(wmg.keys(), 2): if cand2 in wmg[cand1].keys(): maximinScores[cand1] = min(maximinScores[cand1], wmg[cand1][cand2]) maximinScores[cand2] = min(maximinScores[cand2], wmg[cand2][cand1]) return maximinScores
def function[getCandScoresMap, parameter[self, profile]]: constant[ Returns a dictionary that associates integer representations of each candidate with their maximin score. :ivar Profile profile: A Profile object that represents an election profile. ] variable[elecType] assign[=] call[name[profile].getElecType, parameter[]] if <ast.BoolOp object at 0x7da18f813ac0> begin[:] call[name[print], parameter[constant[ERROR: unsupported election type]]] call[name[exit], parameter[]] variable[wmg] assign[=] call[name[profile].getWmg, parameter[]] variable[maximinScores] assign[=] call[name[dict], parameter[]] for taget[name[cand]] in starred[call[name[wmg].keys, parameter[]]] begin[:] call[name[maximinScores]][name[cand]] assign[=] call[name[float], parameter[constant[inf]]] for taget[tuple[[<ast.Name object at 0x7da18f811f60>, <ast.Name object at 0x7da18f812d10>]]] in starred[call[name[itertools].combinations, parameter[call[name[wmg].keys, parameter[]], constant[2]]]] begin[:] if compare[name[cand2] in call[call[name[wmg]][name[cand1]].keys, parameter[]]] begin[:] call[name[maximinScores]][name[cand1]] assign[=] call[name[min], parameter[call[name[maximinScores]][name[cand1]], call[call[name[wmg]][name[cand1]]][name[cand2]]]] call[name[maximinScores]][name[cand2]] assign[=] call[name[min], parameter[call[name[maximinScores]][name[cand2]], call[call[name[wmg]][name[cand2]]][name[cand1]]]] return[name[maximinScores]]
keyword[def] identifier[getCandScoresMap] ( identifier[self] , identifier[profile] ): literal[string] identifier[elecType] = identifier[profile] . identifier[getElecType] () keyword[if] identifier[elecType] != literal[string] keyword[and] identifier[elecType] != literal[string] : identifier[print] ( literal[string] ) identifier[exit] () identifier[wmg] = identifier[profile] . identifier[getWmg] () identifier[maximinScores] = identifier[dict] () keyword[for] identifier[cand] keyword[in] identifier[wmg] . identifier[keys] (): identifier[maximinScores] [ identifier[cand] ]= identifier[float] ( literal[string] ) keyword[for] identifier[cand1] , identifier[cand2] keyword[in] identifier[itertools] . identifier[combinations] ( identifier[wmg] . identifier[keys] (), literal[int] ): keyword[if] identifier[cand2] keyword[in] identifier[wmg] [ identifier[cand1] ]. identifier[keys] (): identifier[maximinScores] [ identifier[cand1] ]= identifier[min] ( identifier[maximinScores] [ identifier[cand1] ], identifier[wmg] [ identifier[cand1] ][ identifier[cand2] ]) identifier[maximinScores] [ identifier[cand2] ]= identifier[min] ( identifier[maximinScores] [ identifier[cand2] ], identifier[wmg] [ identifier[cand2] ][ identifier[cand1] ]) keyword[return] identifier[maximinScores]
def getCandScoresMap(self, profile): """ Returns a dictionary that associates integer representations of each candidate with their maximin score. :ivar Profile profile: A Profile object that represents an election profile. """ # Currently, we expect the profile to contain complete ordering over candidates. Ties are # allowed however. elecType = profile.getElecType() if elecType != 'soc' and elecType != 'toc': print('ERROR: unsupported election type') exit() # depends on [control=['if'], data=[]] wmg = profile.getWmg() # Initialize the maximin score for each candidate as infinity. maximinScores = dict() for cand in wmg.keys(): maximinScores[cand] = float('inf') # depends on [control=['for'], data=['cand']] # For each pair of candidates, calculate the number of times each beats the other. for (cand1, cand2) in itertools.combinations(wmg.keys(), 2): if cand2 in wmg[cand1].keys(): maximinScores[cand1] = min(maximinScores[cand1], wmg[cand1][cand2]) maximinScores[cand2] = min(maximinScores[cand2], wmg[cand2][cand1]) # depends on [control=['if'], data=['cand2']] # depends on [control=['for'], data=[]] return maximinScores
def _transform_array(self, X): r"""Projects the data onto the dominant independent components. Parameters ---------- X : ndarray(n, m) the input data Returns ------- Y : ndarray(n,) the projected data """ X_meanfree = X - self.mean Y = np.dot(X_meanfree, self.eigenvectors[:, 0:self.dimension()]) return Y.astype(self.output_type())
def function[_transform_array, parameter[self, X]]: constant[Projects the data onto the dominant independent components. Parameters ---------- X : ndarray(n, m) the input data Returns ------- Y : ndarray(n,) the projected data ] variable[X_meanfree] assign[=] binary_operation[name[X] - name[self].mean] variable[Y] assign[=] call[name[np].dot, parameter[name[X_meanfree], call[name[self].eigenvectors][tuple[[<ast.Slice object at 0x7da20c6e4190>, <ast.Slice object at 0x7da20c6e7ac0>]]]]] return[call[name[Y].astype, parameter[call[name[self].output_type, parameter[]]]]]
keyword[def] identifier[_transform_array] ( identifier[self] , identifier[X] ): literal[string] identifier[X_meanfree] = identifier[X] - identifier[self] . identifier[mean] identifier[Y] = identifier[np] . identifier[dot] ( identifier[X_meanfree] , identifier[self] . identifier[eigenvectors] [:, literal[int] : identifier[self] . identifier[dimension] ()]) keyword[return] identifier[Y] . identifier[astype] ( identifier[self] . identifier[output_type] ())
def _transform_array(self, X): """Projects the data onto the dominant independent components. Parameters ---------- X : ndarray(n, m) the input data Returns ------- Y : ndarray(n,) the projected data """ X_meanfree = X - self.mean Y = np.dot(X_meanfree, self.eigenvectors[:, 0:self.dimension()]) return Y.astype(self.output_type())
def list_services(self, limit=None, marker=None): """List CDN services.""" return self._services_manager.list(limit=limit, marker=marker)
def function[list_services, parameter[self, limit, marker]]: constant[List CDN services.] return[call[name[self]._services_manager.list, parameter[]]]
keyword[def] identifier[list_services] ( identifier[self] , identifier[limit] = keyword[None] , identifier[marker] = keyword[None] ): literal[string] keyword[return] identifier[self] . identifier[_services_manager] . identifier[list] ( identifier[limit] = identifier[limit] , identifier[marker] = identifier[marker] )
def list_services(self, limit=None, marker=None): """List CDN services.""" return self._services_manager.list(limit=limit, marker=marker)
def expireat(self, key, when): """Emulate expireat""" expire_time = datetime.fromtimestamp(when) key = self._encode(key) if key in self.redis: self.timeouts[key] = expire_time return True return False
def function[expireat, parameter[self, key, when]]: constant[Emulate expireat] variable[expire_time] assign[=] call[name[datetime].fromtimestamp, parameter[name[when]]] variable[key] assign[=] call[name[self]._encode, parameter[name[key]]] if compare[name[key] in name[self].redis] begin[:] call[name[self].timeouts][name[key]] assign[=] name[expire_time] return[constant[True]] return[constant[False]]
keyword[def] identifier[expireat] ( identifier[self] , identifier[key] , identifier[when] ): literal[string] identifier[expire_time] = identifier[datetime] . identifier[fromtimestamp] ( identifier[when] ) identifier[key] = identifier[self] . identifier[_encode] ( identifier[key] ) keyword[if] identifier[key] keyword[in] identifier[self] . identifier[redis] : identifier[self] . identifier[timeouts] [ identifier[key] ]= identifier[expire_time] keyword[return] keyword[True] keyword[return] keyword[False]
def expireat(self, key, when): """Emulate expireat""" expire_time = datetime.fromtimestamp(when) key = self._encode(key) if key in self.redis: self.timeouts[key] = expire_time return True # depends on [control=['if'], data=['key']] return False
def check(self, topic, value): """ Checking the value if it fits into the given specification """ datatype_key = topic.meta.get('datatype', 'none') self._datatypes[datatype_key].check(topic, value) validate_dt = topic.meta.get('validate', None) if validate_dt: self._datatypes[validate_dt].check(topic, value)
def function[check, parameter[self, topic, value]]: constant[ Checking the value if it fits into the given specification ] variable[datatype_key] assign[=] call[name[topic].meta.get, parameter[constant[datatype], constant[none]]] call[call[name[self]._datatypes][name[datatype_key]].check, parameter[name[topic], name[value]]] variable[validate_dt] assign[=] call[name[topic].meta.get, parameter[constant[validate], constant[None]]] if name[validate_dt] begin[:] call[call[name[self]._datatypes][name[validate_dt]].check, parameter[name[topic], name[value]]]
keyword[def] identifier[check] ( identifier[self] , identifier[topic] , identifier[value] ): literal[string] identifier[datatype_key] = identifier[topic] . identifier[meta] . identifier[get] ( literal[string] , literal[string] ) identifier[self] . identifier[_datatypes] [ identifier[datatype_key] ]. identifier[check] ( identifier[topic] , identifier[value] ) identifier[validate_dt] = identifier[topic] . identifier[meta] . identifier[get] ( literal[string] , keyword[None] ) keyword[if] identifier[validate_dt] : identifier[self] . identifier[_datatypes] [ identifier[validate_dt] ]. identifier[check] ( identifier[topic] , identifier[value] )
def check(self, topic, value): """ Checking the value if it fits into the given specification """ datatype_key = topic.meta.get('datatype', 'none') self._datatypes[datatype_key].check(topic, value) validate_dt = topic.meta.get('validate', None) if validate_dt: self._datatypes[validate_dt].check(topic, value) # depends on [control=['if'], data=[]]
def log_ndtr(x, series_order=3, name="log_ndtr"): """Log Normal distribution function. For details of the Normal distribution function see `ndtr`. This function calculates `(log o ndtr)(x)` by either calling `log(ndtr(x))` or using an asymptotic series. Specifically: - For `x > upper_segment`, use the approximation `-ndtr(-x)` based on `log(1-x) ~= -x, x << 1`. - For `lower_segment < x <= upper_segment`, use the existing `ndtr` technique and take a log. - For `x <= lower_segment`, we use the series approximation of erf to compute the log CDF directly. The `lower_segment` is set based on the precision of the input: ``` lower_segment = { -20, x.dtype=float64 { -10, x.dtype=float32 upper_segment = { 8, x.dtype=float64 { 5, x.dtype=float32 ``` When `x < lower_segment`, the `ndtr` asymptotic series approximation is: ``` ndtr(x) = scale * (1 + sum) + R_N scale = exp(-0.5 x**2) / (-x sqrt(2 pi)) sum = Sum{(-1)^n (2n-1)!! / (x**2)^n, n=1:N} R_N = O(exp(-0.5 x**2) (2N+1)!! / |x|^{2N+3}) ``` where `(2n-1)!! = (2n-1) (2n-3) (2n-5) ... (3) (1)` is a [double-factorial](https://en.wikipedia.org/wiki/Double_factorial). Args: x: `Tensor` of type `float32`, `float64`. series_order: Positive Python `integer`. Maximum depth to evaluate the asymptotic expansion. This is the `N` above. name: Python string. A name for the operation (default="log_ndtr"). Returns: log_ndtr: `Tensor` with `dtype=x.dtype`. Raises: TypeError: if `x.dtype` is not handled. TypeError: if `series_order` is a not Python `integer.` ValueError: if `series_order` is not in `[0, 30]`. """ if not isinstance(series_order, int): raise TypeError("series_order must be a Python integer.") if series_order < 0: raise ValueError("series_order must be non-negative.") if series_order > 30: raise ValueError("series_order must be <= 30.") with tf.name_scope(name): x = tf.convert_to_tensor(value=x, name="x") if dtype_util.base_equal(x.dtype, tf.float64): lower_segment = LOGNDTR_FLOAT64_LOWER upper_segment = LOGNDTR_FLOAT64_UPPER elif dtype_util.base_equal(x.dtype, tf.float32): lower_segment = LOGNDTR_FLOAT32_LOWER upper_segment = LOGNDTR_FLOAT32_UPPER else: raise TypeError("x.dtype=%s is not supported." % x.dtype) # The basic idea here was ported from: # https://root.cern.ch/doc/v608/SpecFuncCephesInv_8cxx_source.html # We copy the main idea, with a few changes # * For x >> 1, and X ~ Normal(0, 1), # Log[P[X < x]] = Log[1 - P[X < -x]] approx -P[X < -x], # which extends the range of validity of this function. # * We use one fixed series_order for all of 'x', rather than adaptive. # * Our docstring properly reflects that this is an asymptotic series, not a # Taylor series. We also provided a correct bound on the remainder. # * We need to use the max/min in the _log_ndtr_lower arg to avoid nan when # x=0. This happens even though the branch is unchosen because when x=0 # the gradient of a select involves the calculation 1*dy+0*(-inf)=nan # regardless of whether dy is finite. Note that the minimum is a NOP if # the branch is chosen. return tf.where( tf.greater(x, upper_segment), -_ndtr(-x), # log(1-x) ~= -x, x << 1 tf.where( tf.greater(x, lower_segment), tf.math.log(_ndtr(tf.maximum(x, lower_segment))), _log_ndtr_lower(tf.minimum(x, lower_segment), series_order)))
def function[log_ndtr, parameter[x, series_order, name]]: constant[Log Normal distribution function. For details of the Normal distribution function see `ndtr`. This function calculates `(log o ndtr)(x)` by either calling `log(ndtr(x))` or using an asymptotic series. Specifically: - For `x > upper_segment`, use the approximation `-ndtr(-x)` based on `log(1-x) ~= -x, x << 1`. - For `lower_segment < x <= upper_segment`, use the existing `ndtr` technique and take a log. - For `x <= lower_segment`, we use the series approximation of erf to compute the log CDF directly. The `lower_segment` is set based on the precision of the input: ``` lower_segment = { -20, x.dtype=float64 { -10, x.dtype=float32 upper_segment = { 8, x.dtype=float64 { 5, x.dtype=float32 ``` When `x < lower_segment`, the `ndtr` asymptotic series approximation is: ``` ndtr(x) = scale * (1 + sum) + R_N scale = exp(-0.5 x**2) / (-x sqrt(2 pi)) sum = Sum{(-1)^n (2n-1)!! / (x**2)^n, n=1:N} R_N = O(exp(-0.5 x**2) (2N+1)!! / |x|^{2N+3}) ``` where `(2n-1)!! = (2n-1) (2n-3) (2n-5) ... (3) (1)` is a [double-factorial](https://en.wikipedia.org/wiki/Double_factorial). Args: x: `Tensor` of type `float32`, `float64`. series_order: Positive Python `integer`. Maximum depth to evaluate the asymptotic expansion. This is the `N` above. name: Python string. A name for the operation (default="log_ndtr"). Returns: log_ndtr: `Tensor` with `dtype=x.dtype`. Raises: TypeError: if `x.dtype` is not handled. TypeError: if `series_order` is a not Python `integer.` ValueError: if `series_order` is not in `[0, 30]`. ] if <ast.UnaryOp object at 0x7da1b02ca620> begin[:] <ast.Raise object at 0x7da1b02c9060> if compare[name[series_order] less[<] constant[0]] begin[:] <ast.Raise object at 0x7da1b02c84c0> if compare[name[series_order] greater[>] constant[30]] begin[:] <ast.Raise object at 0x7da1b02ca7d0> with call[name[tf].name_scope, parameter[name[name]]] begin[:] variable[x] assign[=] call[name[tf].convert_to_tensor, parameter[]] if call[name[dtype_util].base_equal, parameter[name[x].dtype, name[tf].float64]] begin[:] variable[lower_segment] assign[=] name[LOGNDTR_FLOAT64_LOWER] variable[upper_segment] assign[=] name[LOGNDTR_FLOAT64_UPPER] return[call[name[tf].where, parameter[call[name[tf].greater, parameter[name[x], name[upper_segment]]], <ast.UnaryOp object at 0x7da1b02ca110>, call[name[tf].where, parameter[call[name[tf].greater, parameter[name[x], name[lower_segment]]], call[name[tf].math.log, parameter[call[name[_ndtr], parameter[call[name[tf].maximum, parameter[name[x], name[lower_segment]]]]]]], call[name[_log_ndtr_lower], parameter[call[name[tf].minimum, parameter[name[x], name[lower_segment]]], name[series_order]]]]]]]]
keyword[def] identifier[log_ndtr] ( identifier[x] , identifier[series_order] = literal[int] , identifier[name] = literal[string] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[series_order] , identifier[int] ): keyword[raise] identifier[TypeError] ( literal[string] ) keyword[if] identifier[series_order] < literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[series_order] > literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[with] identifier[tf] . identifier[name_scope] ( identifier[name] ): identifier[x] = identifier[tf] . identifier[convert_to_tensor] ( identifier[value] = identifier[x] , identifier[name] = literal[string] ) keyword[if] identifier[dtype_util] . identifier[base_equal] ( identifier[x] . identifier[dtype] , identifier[tf] . identifier[float64] ): identifier[lower_segment] = identifier[LOGNDTR_FLOAT64_LOWER] identifier[upper_segment] = identifier[LOGNDTR_FLOAT64_UPPER] keyword[elif] identifier[dtype_util] . identifier[base_equal] ( identifier[x] . identifier[dtype] , identifier[tf] . identifier[float32] ): identifier[lower_segment] = identifier[LOGNDTR_FLOAT32_LOWER] identifier[upper_segment] = identifier[LOGNDTR_FLOAT32_UPPER] keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] % identifier[x] . identifier[dtype] ) keyword[return] identifier[tf] . identifier[where] ( identifier[tf] . identifier[greater] ( identifier[x] , identifier[upper_segment] ), - identifier[_ndtr] (- identifier[x] ), identifier[tf] . identifier[where] ( identifier[tf] . identifier[greater] ( identifier[x] , identifier[lower_segment] ), identifier[tf] . identifier[math] . identifier[log] ( identifier[_ndtr] ( identifier[tf] . identifier[maximum] ( identifier[x] , identifier[lower_segment] ))), identifier[_log_ndtr_lower] ( identifier[tf] . identifier[minimum] ( identifier[x] , identifier[lower_segment] ), identifier[series_order] )))
def log_ndtr(x, series_order=3, name='log_ndtr'): """Log Normal distribution function. For details of the Normal distribution function see `ndtr`. This function calculates `(log o ndtr)(x)` by either calling `log(ndtr(x))` or using an asymptotic series. Specifically: - For `x > upper_segment`, use the approximation `-ndtr(-x)` based on `log(1-x) ~= -x, x << 1`. - For `lower_segment < x <= upper_segment`, use the existing `ndtr` technique and take a log. - For `x <= lower_segment`, we use the series approximation of erf to compute the log CDF directly. The `lower_segment` is set based on the precision of the input: ``` lower_segment = { -20, x.dtype=float64 { -10, x.dtype=float32 upper_segment = { 8, x.dtype=float64 { 5, x.dtype=float32 ``` When `x < lower_segment`, the `ndtr` asymptotic series approximation is: ``` ndtr(x) = scale * (1 + sum) + R_N scale = exp(-0.5 x**2) / (-x sqrt(2 pi)) sum = Sum{(-1)^n (2n-1)!! / (x**2)^n, n=1:N} R_N = O(exp(-0.5 x**2) (2N+1)!! / |x|^{2N+3}) ``` where `(2n-1)!! = (2n-1) (2n-3) (2n-5) ... (3) (1)` is a [double-factorial](https://en.wikipedia.org/wiki/Double_factorial). Args: x: `Tensor` of type `float32`, `float64`. series_order: Positive Python `integer`. Maximum depth to evaluate the asymptotic expansion. This is the `N` above. name: Python string. A name for the operation (default="log_ndtr"). Returns: log_ndtr: `Tensor` with `dtype=x.dtype`. Raises: TypeError: if `x.dtype` is not handled. TypeError: if `series_order` is a not Python `integer.` ValueError: if `series_order` is not in `[0, 30]`. """ if not isinstance(series_order, int): raise TypeError('series_order must be a Python integer.') # depends on [control=['if'], data=[]] if series_order < 0: raise ValueError('series_order must be non-negative.') # depends on [control=['if'], data=[]] if series_order > 30: raise ValueError('series_order must be <= 30.') # depends on [control=['if'], data=[]] with tf.name_scope(name): x = tf.convert_to_tensor(value=x, name='x') if dtype_util.base_equal(x.dtype, tf.float64): lower_segment = LOGNDTR_FLOAT64_LOWER upper_segment = LOGNDTR_FLOAT64_UPPER # depends on [control=['if'], data=[]] elif dtype_util.base_equal(x.dtype, tf.float32): lower_segment = LOGNDTR_FLOAT32_LOWER upper_segment = LOGNDTR_FLOAT32_UPPER # depends on [control=['if'], data=[]] else: raise TypeError('x.dtype=%s is not supported.' % x.dtype) # The basic idea here was ported from: # https://root.cern.ch/doc/v608/SpecFuncCephesInv_8cxx_source.html # We copy the main idea, with a few changes # * For x >> 1, and X ~ Normal(0, 1), # Log[P[X < x]] = Log[1 - P[X < -x]] approx -P[X < -x], # which extends the range of validity of this function. # * We use one fixed series_order for all of 'x', rather than adaptive. # * Our docstring properly reflects that this is an asymptotic series, not a # Taylor series. We also provided a correct bound on the remainder. # * We need to use the max/min in the _log_ndtr_lower arg to avoid nan when # x=0. This happens even though the branch is unchosen because when x=0 # the gradient of a select involves the calculation 1*dy+0*(-inf)=nan # regardless of whether dy is finite. Note that the minimum is a NOP if # the branch is chosen. # log(1-x) ~= -x, x << 1 return tf.where(tf.greater(x, upper_segment), -_ndtr(-x), tf.where(tf.greater(x, lower_segment), tf.math.log(_ndtr(tf.maximum(x, lower_segment))), _log_ndtr_lower(tf.minimum(x, lower_segment), series_order))) # depends on [control=['with'], data=[]]
def change_mpl_backend(self, command): """ If the user is trying to change Matplotlib backends with %matplotlib, send the same command again to the kernel to correctly change it. Fixes issue 4002 """ if command.startswith('%matplotlib') and \ len(command.splitlines()) == 1: if not 'inline' in command: self.silent_execute(command)
def function[change_mpl_backend, parameter[self, command]]: constant[ If the user is trying to change Matplotlib backends with %matplotlib, send the same command again to the kernel to correctly change it. Fixes issue 4002 ] if <ast.BoolOp object at 0x7da20c6c4f10> begin[:] if <ast.UnaryOp object at 0x7da204346bf0> begin[:] call[name[self].silent_execute, parameter[name[command]]]
keyword[def] identifier[change_mpl_backend] ( identifier[self] , identifier[command] ): literal[string] keyword[if] identifier[command] . identifier[startswith] ( literal[string] ) keyword[and] identifier[len] ( identifier[command] . identifier[splitlines] ())== literal[int] : keyword[if] keyword[not] literal[string] keyword[in] identifier[command] : identifier[self] . identifier[silent_execute] ( identifier[command] )
def change_mpl_backend(self, command): """ If the user is trying to change Matplotlib backends with %matplotlib, send the same command again to the kernel to correctly change it. Fixes issue 4002 """ if command.startswith('%matplotlib') and len(command.splitlines()) == 1: if not 'inline' in command: self.silent_execute(command) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
def LifoQueue(self, name, initial=None, maxsize=None): """The LIFO queue datatype. :param name: The name of the queue. :keyword initial: Initial items in the queue. See :class:`redish.types.LifoQueue`. """ return types.LifoQueue(name, self.api, initial=initial, maxsize=maxsize)
def function[LifoQueue, parameter[self, name, initial, maxsize]]: constant[The LIFO queue datatype. :param name: The name of the queue. :keyword initial: Initial items in the queue. See :class:`redish.types.LifoQueue`. ] return[call[name[types].LifoQueue, parameter[name[name], name[self].api]]]
keyword[def] identifier[LifoQueue] ( identifier[self] , identifier[name] , identifier[initial] = keyword[None] , identifier[maxsize] = keyword[None] ): literal[string] keyword[return] identifier[types] . identifier[LifoQueue] ( identifier[name] , identifier[self] . identifier[api] , identifier[initial] = identifier[initial] , identifier[maxsize] = identifier[maxsize] )
def LifoQueue(self, name, initial=None, maxsize=None): """The LIFO queue datatype. :param name: The name of the queue. :keyword initial: Initial items in the queue. See :class:`redish.types.LifoQueue`. """ return types.LifoQueue(name, self.api, initial=initial, maxsize=maxsize)
def get_user(self, user_id): """Returns the current user from the session data. If authenticated, this return the user object based on the user ID and session data. .. note:: This required monkey-patching the ``contrib.auth`` middleware to make the ``request`` object available to the auth backend class. """ if (hasattr(self, 'request') and user_id == self.request.session["user_id"]): token = self.request.session['token'] endpoint = self.request.session['region_endpoint'] services_region = self.request.session['services_region'] user = auth_user.create_user_from_token(self.request, token, endpoint, services_region) return user else: return None
def function[get_user, parameter[self, user_id]]: constant[Returns the current user from the session data. If authenticated, this return the user object based on the user ID and session data. .. note:: This required monkey-patching the ``contrib.auth`` middleware to make the ``request`` object available to the auth backend class. ] if <ast.BoolOp object at 0x7da1b19873a0> begin[:] variable[token] assign[=] call[name[self].request.session][constant[token]] variable[endpoint] assign[=] call[name[self].request.session][constant[region_endpoint]] variable[services_region] assign[=] call[name[self].request.session][constant[services_region]] variable[user] assign[=] call[name[auth_user].create_user_from_token, parameter[name[self].request, name[token], name[endpoint], name[services_region]]] return[name[user]]
keyword[def] identifier[get_user] ( identifier[self] , identifier[user_id] ): literal[string] keyword[if] ( identifier[hasattr] ( identifier[self] , literal[string] ) keyword[and] identifier[user_id] == identifier[self] . identifier[request] . identifier[session] [ literal[string] ]): identifier[token] = identifier[self] . identifier[request] . identifier[session] [ literal[string] ] identifier[endpoint] = identifier[self] . identifier[request] . identifier[session] [ literal[string] ] identifier[services_region] = identifier[self] . identifier[request] . identifier[session] [ literal[string] ] identifier[user] = identifier[auth_user] . identifier[create_user_from_token] ( identifier[self] . identifier[request] , identifier[token] , identifier[endpoint] , identifier[services_region] ) keyword[return] identifier[user] keyword[else] : keyword[return] keyword[None]
def get_user(self, user_id): """Returns the current user from the session data. If authenticated, this return the user object based on the user ID and session data. .. note:: This required monkey-patching the ``contrib.auth`` middleware to make the ``request`` object available to the auth backend class. """ if hasattr(self, 'request') and user_id == self.request.session['user_id']: token = self.request.session['token'] endpoint = self.request.session['region_endpoint'] services_region = self.request.session['services_region'] user = auth_user.create_user_from_token(self.request, token, endpoint, services_region) return user # depends on [control=['if'], data=[]] else: return None
def get_tmaster(self, topologyName, callback=None): """ Get tmaster """ if callback: self.tmaster_watchers[topologyName].append(callback) else: tmaster_path = self.get_tmaster_path(topologyName) with open(tmaster_path) as f: data = f.read() tmaster = TMasterLocation() tmaster.ParseFromString(data) return tmaster
def function[get_tmaster, parameter[self, topologyName, callback]]: constant[ Get tmaster ] if name[callback] begin[:] call[call[name[self].tmaster_watchers][name[topologyName]].append, parameter[name[callback]]]
keyword[def] identifier[get_tmaster] ( identifier[self] , identifier[topologyName] , identifier[callback] = keyword[None] ): literal[string] keyword[if] identifier[callback] : identifier[self] . identifier[tmaster_watchers] [ identifier[topologyName] ]. identifier[append] ( identifier[callback] ) keyword[else] : identifier[tmaster_path] = identifier[self] . identifier[get_tmaster_path] ( identifier[topologyName] ) keyword[with] identifier[open] ( identifier[tmaster_path] ) keyword[as] identifier[f] : identifier[data] = identifier[f] . identifier[read] () identifier[tmaster] = identifier[TMasterLocation] () identifier[tmaster] . identifier[ParseFromString] ( identifier[data] ) keyword[return] identifier[tmaster]
def get_tmaster(self, topologyName, callback=None): """ Get tmaster """ if callback: self.tmaster_watchers[topologyName].append(callback) # depends on [control=['if'], data=[]] else: tmaster_path = self.get_tmaster_path(topologyName) with open(tmaster_path) as f: data = f.read() tmaster = TMasterLocation() tmaster.ParseFromString(data) return tmaster # depends on [control=['with'], data=['f']]
def updateColumnValues(self, networkId, tableType, columnName, default, body, verbose=None): """ Sets the values for cells in the table specified by the `tableType` and `networkId` parameters. If the 'default` parameter is not specified, the message body should consist of key-value pairs with which to set values. If the `default` parameter is specified, its value will be used for every cell in the column. This is useful to set columns like "selected." :param networkId: SUID of the network containing the table :param tableType: The type of table :param columnName: Name of the column in which to set values :param default: Default Value. If this value is provided, all cells will be set to this. -- Not required, can be None :param body: Array of SUID Keyed values :param verbose: print more :returns: default: successful operation """ response=api(url=self.___url+'networks/'+str(networkId)+'/tables/'+str(tableType)+'/columns/'+str(columnName)+'', method="PUT", body=body, verbose=verbose) return response
def function[updateColumnValues, parameter[self, networkId, tableType, columnName, default, body, verbose]]: constant[ Sets the values for cells in the table specified by the `tableType` and `networkId` parameters. If the 'default` parameter is not specified, the message body should consist of key-value pairs with which to set values. If the `default` parameter is specified, its value will be used for every cell in the column. This is useful to set columns like "selected." :param networkId: SUID of the network containing the table :param tableType: The type of table :param columnName: Name of the column in which to set values :param default: Default Value. If this value is provided, all cells will be set to this. -- Not required, can be None :param body: Array of SUID Keyed values :param verbose: print more :returns: default: successful operation ] variable[response] assign[=] call[name[api], parameter[]] return[name[response]]
keyword[def] identifier[updateColumnValues] ( identifier[self] , identifier[networkId] , identifier[tableType] , identifier[columnName] , identifier[default] , identifier[body] , identifier[verbose] = keyword[None] ): literal[string] identifier[response] = identifier[api] ( identifier[url] = identifier[self] . identifier[___url] + literal[string] + identifier[str] ( identifier[networkId] )+ literal[string] + identifier[str] ( identifier[tableType] )+ literal[string] + identifier[str] ( identifier[columnName] )+ literal[string] , identifier[method] = literal[string] , identifier[body] = identifier[body] , identifier[verbose] = identifier[verbose] ) keyword[return] identifier[response]
def updateColumnValues(self, networkId, tableType, columnName, default, body, verbose=None): """ Sets the values for cells in the table specified by the `tableType` and `networkId` parameters. If the 'default` parameter is not specified, the message body should consist of key-value pairs with which to set values. If the `default` parameter is specified, its value will be used for every cell in the column. This is useful to set columns like "selected." :param networkId: SUID of the network containing the table :param tableType: The type of table :param columnName: Name of the column in which to set values :param default: Default Value. If this value is provided, all cells will be set to this. -- Not required, can be None :param body: Array of SUID Keyed values :param verbose: print more :returns: default: successful operation """ response = api(url=self.___url + 'networks/' + str(networkId) + '/tables/' + str(tableType) + '/columns/' + str(columnName) + '', method='PUT', body=body, verbose=verbose) return response
def _compute_distance_scaling(self, C, rrup, mag): """ Returns the distance scaling term """ rscale1 = rrup + C["c2"] * (10.0 ** (C["c3"] * mag)) return -np.log10(rscale1) - (C["c4"] * rrup)
def function[_compute_distance_scaling, parameter[self, C, rrup, mag]]: constant[ Returns the distance scaling term ] variable[rscale1] assign[=] binary_operation[name[rrup] + binary_operation[call[name[C]][constant[c2]] * binary_operation[constant[10.0] ** binary_operation[call[name[C]][constant[c3]] * name[mag]]]]] return[binary_operation[<ast.UnaryOp object at 0x7da20c990730> - binary_operation[call[name[C]][constant[c4]] * name[rrup]]]]
keyword[def] identifier[_compute_distance_scaling] ( identifier[self] , identifier[C] , identifier[rrup] , identifier[mag] ): literal[string] identifier[rscale1] = identifier[rrup] + identifier[C] [ literal[string] ]*( literal[int] **( identifier[C] [ literal[string] ]* identifier[mag] )) keyword[return] - identifier[np] . identifier[log10] ( identifier[rscale1] )-( identifier[C] [ literal[string] ]* identifier[rrup] )
def _compute_distance_scaling(self, C, rrup, mag): """ Returns the distance scaling term """ rscale1 = rrup + C['c2'] * 10.0 ** (C['c3'] * mag) return -np.log10(rscale1) - C['c4'] * rrup
def sample_from_largest_budget(self, info_dict): """We opted for a single multidimensional KDE compared to the hierarchy of one-dimensional KDEs used in TPE. The dimensional is seperated by budget. This function sample a configuration from largest budget. Firstly we sample "num_samples" configurations, then prefer one with the largest l(x)/g(x). Parameters: ----------- info_dict: dict record the information of this configuration Returns ------- dict: new configuration named sample dict: info_dict, record the information of this configuration """ best = np.inf best_vector = None budget = max(self.kde_models.keys()) l = self.kde_models[budget]['good'].pdf g = self.kde_models[budget]['bad'].pdf minimize_me = lambda x: max(1e-32, g(x))/max(l(x), 1e-32) kde_good = self.kde_models[budget]['good'] kde_bad = self.kde_models[budget]['bad'] for i in range(self.num_samples): idx = np.random.randint(0, len(kde_good.data)) datum = kde_good.data[idx] vector = [] for m, bw, t in zip(datum, kde_good.bw, self.vartypes): bw = max(bw, self.min_bandwidth) if t == 0: bw = self.bw_factor*bw vector.append(sps.truncnorm.rvs(-m/bw, (1-m)/bw, loc=m, scale=bw)) else: if np.random.rand() < (1-bw): vector.append(int(m)) else: vector.append(np.random.randint(t)) val = minimize_me(vector) if not np.isfinite(val): logger.warning('sampled vector: %s has EI value %s'%(vector, val)) logger.warning("data in the KDEs:\n%s\n%s"%(kde_good.data, kde_bad.data)) logger.warning("bandwidth of the KDEs:\n%s\n%s"%(kde_good.bw, kde_bad.bw)) logger.warning("l(x) = %s"%(l(vector))) logger.warning("g(x) = %s"%(g(vector))) # right now, this happens because a KDE does not contain all values for a categorical parameter # this cannot be fixed with the statsmodels KDE, so for now, we are just going to evaluate this one # if the good_kde has a finite value, i.e. there is no config with that value in the bad kde, # so it shouldn't be terrible. if np.isfinite(l(vector)): best_vector = vector break if val < best: best = val best_vector = vector if best_vector is None: logger.debug("Sampling based optimization with %i samples failed -> using random configuration"%self.num_samples) sample = self.configspace.sample_configuration().get_dictionary() info_dict['model_based_pick'] = False else: logger.debug('best_vector: {}, {}, {}, {}'.format(best_vector, best, l(best_vector), g(best_vector))) for i, hp_value in enumerate(best_vector): if isinstance( self.configspace.get_hyperparameter( self.configspace.get_hyperparameter_by_idx(i) ), ConfigSpace.hyperparameters.CategoricalHyperparameter ): best_vector[i] = int(np.rint(best_vector[i])) sample = ConfigSpace.Configuration(self.configspace, vector=best_vector).get_dictionary() sample = ConfigSpace.util.deactivate_inactive_hyperparameters( configuration_space=self.configspace, configuration=sample) info_dict['model_based_pick'] = True return sample, info_dict
def function[sample_from_largest_budget, parameter[self, info_dict]]: constant[We opted for a single multidimensional KDE compared to the hierarchy of one-dimensional KDEs used in TPE. The dimensional is seperated by budget. This function sample a configuration from largest budget. Firstly we sample "num_samples" configurations, then prefer one with the largest l(x)/g(x). Parameters: ----------- info_dict: dict record the information of this configuration Returns ------- dict: new configuration named sample dict: info_dict, record the information of this configuration ] variable[best] assign[=] name[np].inf variable[best_vector] assign[=] constant[None] variable[budget] assign[=] call[name[max], parameter[call[name[self].kde_models.keys, parameter[]]]] variable[l] assign[=] call[call[name[self].kde_models][name[budget]]][constant[good]].pdf variable[g] assign[=] call[call[name[self].kde_models][name[budget]]][constant[bad]].pdf variable[minimize_me] assign[=] <ast.Lambda object at 0x7da2054a6800> variable[kde_good] assign[=] call[call[name[self].kde_models][name[budget]]][constant[good]] variable[kde_bad] assign[=] call[call[name[self].kde_models][name[budget]]][constant[bad]] for taget[name[i]] in starred[call[name[range], parameter[name[self].num_samples]]] begin[:] variable[idx] assign[=] call[name[np].random.randint, parameter[constant[0], call[name[len], parameter[name[kde_good].data]]]] variable[datum] assign[=] call[name[kde_good].data][name[idx]] variable[vector] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da2054a7760>, <ast.Name object at 0x7da2054a4c70>, <ast.Name object at 0x7da2054a7490>]]] in starred[call[name[zip], parameter[name[datum], name[kde_good].bw, name[self].vartypes]]] begin[:] variable[bw] assign[=] call[name[max], parameter[name[bw], name[self].min_bandwidth]] if compare[name[t] equal[==] constant[0]] begin[:] variable[bw] assign[=] binary_operation[name[self].bw_factor * name[bw]] call[name[vector].append, parameter[call[name[sps].truncnorm.rvs, parameter[binary_operation[<ast.UnaryOp object at 0x7da20e748700> / name[bw]], binary_operation[binary_operation[constant[1] - name[m]] / name[bw]]]]]] variable[val] assign[=] call[name[minimize_me], parameter[name[vector]]] if <ast.UnaryOp object at 0x7da20e74af80> begin[:] call[name[logger].warning, parameter[binary_operation[constant[sampled vector: %s has EI value %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20e74b610>, <ast.Name object at 0x7da20e74ad40>]]]]] call[name[logger].warning, parameter[binary_operation[constant[data in the KDEs: %s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20e74b670>, <ast.Attribute object at 0x7da2041d81c0>]]]]] call[name[logger].warning, parameter[binary_operation[constant[bandwidth of the KDEs: %s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da2041da830>, <ast.Attribute object at 0x7da2041dbca0>]]]]] call[name[logger].warning, parameter[binary_operation[constant[l(x) = %s] <ast.Mod object at 0x7da2590d6920> call[name[l], parameter[name[vector]]]]]] call[name[logger].warning, parameter[binary_operation[constant[g(x) = %s] <ast.Mod object at 0x7da2590d6920> call[name[g], parameter[name[vector]]]]]] if call[name[np].isfinite, parameter[call[name[l], parameter[name[vector]]]]] begin[:] variable[best_vector] assign[=] name[vector] break if compare[name[val] less[<] name[best]] begin[:] variable[best] assign[=] name[val] variable[best_vector] assign[=] name[vector] if compare[name[best_vector] is constant[None]] begin[:] call[name[logger].debug, parameter[binary_operation[constant[Sampling based optimization with %i samples failed -> using random configuration] <ast.Mod object at 0x7da2590d6920> name[self].num_samples]]] variable[sample] assign[=] call[call[name[self].configspace.sample_configuration, parameter[]].get_dictionary, parameter[]] call[name[info_dict]][constant[model_based_pick]] assign[=] constant[False] return[tuple[[<ast.Name object at 0x7da1b1f39510>, <ast.Name object at 0x7da1b1f38070>]]]
keyword[def] identifier[sample_from_largest_budget] ( identifier[self] , identifier[info_dict] ): literal[string] identifier[best] = identifier[np] . identifier[inf] identifier[best_vector] = keyword[None] identifier[budget] = identifier[max] ( identifier[self] . identifier[kde_models] . identifier[keys] ()) identifier[l] = identifier[self] . identifier[kde_models] [ identifier[budget] ][ literal[string] ]. identifier[pdf] identifier[g] = identifier[self] . identifier[kde_models] [ identifier[budget] ][ literal[string] ]. identifier[pdf] identifier[minimize_me] = keyword[lambda] identifier[x] : identifier[max] ( literal[int] , identifier[g] ( identifier[x] ))/ identifier[max] ( identifier[l] ( identifier[x] ), literal[int] ) identifier[kde_good] = identifier[self] . identifier[kde_models] [ identifier[budget] ][ literal[string] ] identifier[kde_bad] = identifier[self] . identifier[kde_models] [ identifier[budget] ][ literal[string] ] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[num_samples] ): identifier[idx] = identifier[np] . identifier[random] . identifier[randint] ( literal[int] , identifier[len] ( identifier[kde_good] . identifier[data] )) identifier[datum] = identifier[kde_good] . identifier[data] [ identifier[idx] ] identifier[vector] =[] keyword[for] identifier[m] , identifier[bw] , identifier[t] keyword[in] identifier[zip] ( identifier[datum] , identifier[kde_good] . identifier[bw] , identifier[self] . identifier[vartypes] ): identifier[bw] = identifier[max] ( identifier[bw] , identifier[self] . identifier[min_bandwidth] ) keyword[if] identifier[t] == literal[int] : identifier[bw] = identifier[self] . identifier[bw_factor] * identifier[bw] identifier[vector] . identifier[append] ( identifier[sps] . identifier[truncnorm] . identifier[rvs] (- identifier[m] / identifier[bw] ,( literal[int] - identifier[m] )/ identifier[bw] , identifier[loc] = identifier[m] , identifier[scale] = identifier[bw] )) keyword[else] : keyword[if] identifier[np] . identifier[random] . identifier[rand] ()<( literal[int] - identifier[bw] ): identifier[vector] . identifier[append] ( identifier[int] ( identifier[m] )) keyword[else] : identifier[vector] . identifier[append] ( identifier[np] . identifier[random] . identifier[randint] ( identifier[t] )) identifier[val] = identifier[minimize_me] ( identifier[vector] ) keyword[if] keyword[not] identifier[np] . identifier[isfinite] ( identifier[val] ): identifier[logger] . identifier[warning] ( literal[string] %( identifier[vector] , identifier[val] )) identifier[logger] . identifier[warning] ( literal[string] %( identifier[kde_good] . identifier[data] , identifier[kde_bad] . identifier[data] )) identifier[logger] . identifier[warning] ( literal[string] %( identifier[kde_good] . identifier[bw] , identifier[kde_bad] . identifier[bw] )) identifier[logger] . identifier[warning] ( literal[string] %( identifier[l] ( identifier[vector] ))) identifier[logger] . identifier[warning] ( literal[string] %( identifier[g] ( identifier[vector] ))) keyword[if] identifier[np] . identifier[isfinite] ( identifier[l] ( identifier[vector] )): identifier[best_vector] = identifier[vector] keyword[break] keyword[if] identifier[val] < identifier[best] : identifier[best] = identifier[val] identifier[best_vector] = identifier[vector] keyword[if] identifier[best_vector] keyword[is] keyword[None] : identifier[logger] . identifier[debug] ( literal[string] % identifier[self] . identifier[num_samples] ) identifier[sample] = identifier[self] . identifier[configspace] . identifier[sample_configuration] (). identifier[get_dictionary] () identifier[info_dict] [ literal[string] ]= keyword[False] keyword[else] : identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[best_vector] , identifier[best] , identifier[l] ( identifier[best_vector] ), identifier[g] ( identifier[best_vector] ))) keyword[for] identifier[i] , identifier[hp_value] keyword[in] identifier[enumerate] ( identifier[best_vector] ): keyword[if] identifier[isinstance] ( identifier[self] . identifier[configspace] . identifier[get_hyperparameter] ( identifier[self] . identifier[configspace] . identifier[get_hyperparameter_by_idx] ( identifier[i] ) ), identifier[ConfigSpace] . identifier[hyperparameters] . identifier[CategoricalHyperparameter] ): identifier[best_vector] [ identifier[i] ]= identifier[int] ( identifier[np] . identifier[rint] ( identifier[best_vector] [ identifier[i] ])) identifier[sample] = identifier[ConfigSpace] . identifier[Configuration] ( identifier[self] . identifier[configspace] , identifier[vector] = identifier[best_vector] ). identifier[get_dictionary] () identifier[sample] = identifier[ConfigSpace] . identifier[util] . identifier[deactivate_inactive_hyperparameters] ( identifier[configuration_space] = identifier[self] . identifier[configspace] , identifier[configuration] = identifier[sample] ) identifier[info_dict] [ literal[string] ]= keyword[True] keyword[return] identifier[sample] , identifier[info_dict]
def sample_from_largest_budget(self, info_dict): """We opted for a single multidimensional KDE compared to the hierarchy of one-dimensional KDEs used in TPE. The dimensional is seperated by budget. This function sample a configuration from largest budget. Firstly we sample "num_samples" configurations, then prefer one with the largest l(x)/g(x). Parameters: ----------- info_dict: dict record the information of this configuration Returns ------- dict: new configuration named sample dict: info_dict, record the information of this configuration """ best = np.inf best_vector = None budget = max(self.kde_models.keys()) l = self.kde_models[budget]['good'].pdf g = self.kde_models[budget]['bad'].pdf minimize_me = lambda x: max(1e-32, g(x)) / max(l(x), 1e-32) kde_good = self.kde_models[budget]['good'] kde_bad = self.kde_models[budget]['bad'] for i in range(self.num_samples): idx = np.random.randint(0, len(kde_good.data)) datum = kde_good.data[idx] vector = [] for (m, bw, t) in zip(datum, kde_good.bw, self.vartypes): bw = max(bw, self.min_bandwidth) if t == 0: bw = self.bw_factor * bw vector.append(sps.truncnorm.rvs(-m / bw, (1 - m) / bw, loc=m, scale=bw)) # depends on [control=['if'], data=[]] elif np.random.rand() < 1 - bw: vector.append(int(m)) # depends on [control=['if'], data=[]] else: vector.append(np.random.randint(t)) # depends on [control=['for'], data=[]] val = minimize_me(vector) if not np.isfinite(val): logger.warning('sampled vector: %s has EI value %s' % (vector, val)) logger.warning('data in the KDEs:\n%s\n%s' % (kde_good.data, kde_bad.data)) logger.warning('bandwidth of the KDEs:\n%s\n%s' % (kde_good.bw, kde_bad.bw)) logger.warning('l(x) = %s' % l(vector)) logger.warning('g(x) = %s' % g(vector)) # right now, this happens because a KDE does not contain all values for a categorical parameter # this cannot be fixed with the statsmodels KDE, so for now, we are just going to evaluate this one # if the good_kde has a finite value, i.e. there is no config with that value in the bad kde, # so it shouldn't be terrible. if np.isfinite(l(vector)): best_vector = vector break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if val < best: best = val best_vector = vector # depends on [control=['if'], data=['val', 'best']] # depends on [control=['for'], data=[]] if best_vector is None: logger.debug('Sampling based optimization with %i samples failed -> using random configuration' % self.num_samples) sample = self.configspace.sample_configuration().get_dictionary() info_dict['model_based_pick'] = False # depends on [control=['if'], data=[]] else: logger.debug('best_vector: {}, {}, {}, {}'.format(best_vector, best, l(best_vector), g(best_vector))) for (i, hp_value) in enumerate(best_vector): if isinstance(self.configspace.get_hyperparameter(self.configspace.get_hyperparameter_by_idx(i)), ConfigSpace.hyperparameters.CategoricalHyperparameter): best_vector[i] = int(np.rint(best_vector[i])) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] sample = ConfigSpace.Configuration(self.configspace, vector=best_vector).get_dictionary() sample = ConfigSpace.util.deactivate_inactive_hyperparameters(configuration_space=self.configspace, configuration=sample) info_dict['model_based_pick'] = True return (sample, info_dict)
def set_elem(elem_ref, elem): """ Sets element referenced by the elem_ref. Returns the elem. :param elem_ref: :param elem: :return: """ if elem_ref is None or elem_ref == elem or not is_elem_ref(elem_ref): return elem elif elem_ref[0] == ElemRefObj: setattr(elem_ref[1], elem_ref[2], elem) return elem elif elem_ref[0] == ElemRefArr: elem_ref[1][elem_ref[2]] = elem return elem
def function[set_elem, parameter[elem_ref, elem]]: constant[ Sets element referenced by the elem_ref. Returns the elem. :param elem_ref: :param elem: :return: ] if <ast.BoolOp object at 0x7da1b2437b20> begin[:] return[name[elem]]
keyword[def] identifier[set_elem] ( identifier[elem_ref] , identifier[elem] ): literal[string] keyword[if] identifier[elem_ref] keyword[is] keyword[None] keyword[or] identifier[elem_ref] == identifier[elem] keyword[or] keyword[not] identifier[is_elem_ref] ( identifier[elem_ref] ): keyword[return] identifier[elem] keyword[elif] identifier[elem_ref] [ literal[int] ]== identifier[ElemRefObj] : identifier[setattr] ( identifier[elem_ref] [ literal[int] ], identifier[elem_ref] [ literal[int] ], identifier[elem] ) keyword[return] identifier[elem] keyword[elif] identifier[elem_ref] [ literal[int] ]== identifier[ElemRefArr] : identifier[elem_ref] [ literal[int] ][ identifier[elem_ref] [ literal[int] ]]= identifier[elem] keyword[return] identifier[elem]
def set_elem(elem_ref, elem): """ Sets element referenced by the elem_ref. Returns the elem. :param elem_ref: :param elem: :return: """ if elem_ref is None or elem_ref == elem or (not is_elem_ref(elem_ref)): return elem # depends on [control=['if'], data=[]] elif elem_ref[0] == ElemRefObj: setattr(elem_ref[1], elem_ref[2], elem) return elem # depends on [control=['if'], data=[]] elif elem_ref[0] == ElemRefArr: elem_ref[1][elem_ref[2]] = elem return elem # depends on [control=['if'], data=[]]
def get_fields_dict(self, row): """ Returns a dict of field name and cleaned value pairs to initialize the model. Beware, it aligns the lists of fields and row values with Nones to allow for adding fields not found in the CSV. Whitespace around the value of the cell is stripped. """ return {k: getattr(self, 'clean_{}'.format(k), lambda x: x)(v.strip() if isinstance(v, str) else None) for k, v in zip_longest(self.get_fields(), row)}
def function[get_fields_dict, parameter[self, row]]: constant[ Returns a dict of field name and cleaned value pairs to initialize the model. Beware, it aligns the lists of fields and row values with Nones to allow for adding fields not found in the CSV. Whitespace around the value of the cell is stripped. ] return[<ast.DictComp object at 0x7da1b242ae90>]
keyword[def] identifier[get_fields_dict] ( identifier[self] , identifier[row] ): literal[string] keyword[return] { identifier[k] : identifier[getattr] ( identifier[self] , literal[string] . identifier[format] ( identifier[k] ), keyword[lambda] identifier[x] : identifier[x] )( identifier[v] . identifier[strip] () keyword[if] identifier[isinstance] ( identifier[v] , identifier[str] ) keyword[else] keyword[None] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[zip_longest] ( identifier[self] . identifier[get_fields] (), identifier[row] )}
def get_fields_dict(self, row): """ Returns a dict of field name and cleaned value pairs to initialize the model. Beware, it aligns the lists of fields and row values with Nones to allow for adding fields not found in the CSV. Whitespace around the value of the cell is stripped. """ return {k: getattr(self, 'clean_{}'.format(k), lambda x: x)(v.strip() if isinstance(v, str) else None) for (k, v) in zip_longest(self.get_fields(), row)}
def check_network_health(self): r""" This method check the network topological health by checking for: (1) Isolated pores (2) Islands or isolated clusters of pores (3) Duplicate throats (4) Bidirectional throats (ie. symmetrical adjacency matrix) (5) Headless throats Returns ------- A dictionary containing the offending pores or throat numbers under each named key. It also returns a list of which pores and throats should be trimmed from the network to restore health. This list is a suggestion only, and is based on keeping the largest cluster and trimming the others. Notes ----- - Does not yet check for duplicate pores - Does not yet suggest which throats to remove - This is just a 'check' and does not 'fix' the problems it finds """ health = HealthDict() health['disconnected_clusters'] = [] health['isolated_pores'] = [] health['trim_pores'] = [] health['duplicate_throats'] = [] health['bidirectional_throats'] = [] health['headless_throats'] = [] health['looped_throats'] = [] # Check for headless throats hits = sp.where(self['throat.conns'] > self.Np - 1)[0] if sp.size(hits) > 0: health['headless_throats'] = sp.unique(hits) return health # Check for throats that loop back onto the same pore P12 = self['throat.conns'] hits = sp.where(P12[:, 0] == P12[:, 1])[0] if sp.size(hits) > 0: health['looped_throats'] = hits # Check for individual isolated pores Ps = self.num_neighbors(self.pores()) if sp.sum(Ps == 0) > 0: health['isolated_pores'] = sp.where(Ps == 0)[0] # Check for separated clusters of pores temp = [] am = self.create_adjacency_matrix(fmt='coo', triu=True) Cs = csg.connected_components(am, directed=False)[1] if sp.unique(Cs).size > 1: for i in sp.unique(Cs): temp.append(sp.where(Cs == i)[0]) b = sp.array([len(item) for item in temp]) c = sp.argsort(b)[::-1] for i in range(0, len(c)): health['disconnected_clusters'].append(temp[c[i]]) if i > 0: health['trim_pores'].extend(temp[c[i]]) # Check for duplicate throats am = self.create_adjacency_matrix(fmt='csr', triu=True).tocoo() hits = sp.where(am.data > 1)[0] if len(hits): mergeTs = [] hits = sp.vstack((am.row[hits], am.col[hits])).T ihits = hits[:, 0] + 1j*hits[:, 1] conns = self['throat.conns'] iconns = conns[:, 0] + 1j*conns[:, 1] # Convert to imaginary for item in ihits: mergeTs.append(sp.where(iconns == item)[0]) health['duplicate_throats'] = mergeTs # Check for bidirectional throats adjmat = self.create_adjacency_matrix(fmt='coo') num_full = adjmat.sum() temp = sprs.triu(adjmat, k=1) num_upper = temp.sum() if num_full > num_upper: biTs = sp.where(self['throat.conns'][:, 0] > self['throat.conns'][:, 1])[0] health['bidirectional_throats'] = biTs.tolist() return health
def function[check_network_health, parameter[self]]: constant[ This method check the network topological health by checking for: (1) Isolated pores (2) Islands or isolated clusters of pores (3) Duplicate throats (4) Bidirectional throats (ie. symmetrical adjacency matrix) (5) Headless throats Returns ------- A dictionary containing the offending pores or throat numbers under each named key. It also returns a list of which pores and throats should be trimmed from the network to restore health. This list is a suggestion only, and is based on keeping the largest cluster and trimming the others. Notes ----- - Does not yet check for duplicate pores - Does not yet suggest which throats to remove - This is just a 'check' and does not 'fix' the problems it finds ] variable[health] assign[=] call[name[HealthDict], parameter[]] call[name[health]][constant[disconnected_clusters]] assign[=] list[[]] call[name[health]][constant[isolated_pores]] assign[=] list[[]] call[name[health]][constant[trim_pores]] assign[=] list[[]] call[name[health]][constant[duplicate_throats]] assign[=] list[[]] call[name[health]][constant[bidirectional_throats]] assign[=] list[[]] call[name[health]][constant[headless_throats]] assign[=] list[[]] call[name[health]][constant[looped_throats]] assign[=] list[[]] variable[hits] assign[=] call[call[name[sp].where, parameter[compare[call[name[self]][constant[throat.conns]] greater[>] binary_operation[name[self].Np - constant[1]]]]]][constant[0]] if compare[call[name[sp].size, parameter[name[hits]]] greater[>] constant[0]] begin[:] call[name[health]][constant[headless_throats]] assign[=] call[name[sp].unique, parameter[name[hits]]] return[name[health]] variable[P12] assign[=] call[name[self]][constant[throat.conns]] variable[hits] assign[=] call[call[name[sp].where, parameter[compare[call[name[P12]][tuple[[<ast.Slice object at 0x7da207f01ea0>, <ast.Constant object at 0x7da207f025f0>]]] equal[==] call[name[P12]][tuple[[<ast.Slice object at 0x7da207f02f20>, <ast.Constant object at 0x7da207f02aa0>]]]]]]][constant[0]] if compare[call[name[sp].size, parameter[name[hits]]] greater[>] constant[0]] begin[:] call[name[health]][constant[looped_throats]] assign[=] name[hits] variable[Ps] assign[=] call[name[self].num_neighbors, parameter[call[name[self].pores, parameter[]]]] if compare[call[name[sp].sum, parameter[compare[name[Ps] equal[==] constant[0]]]] greater[>] constant[0]] begin[:] call[name[health]][constant[isolated_pores]] assign[=] call[call[name[sp].where, parameter[compare[name[Ps] equal[==] constant[0]]]]][constant[0]] variable[temp] assign[=] list[[]] variable[am] assign[=] call[name[self].create_adjacency_matrix, parameter[]] variable[Cs] assign[=] call[call[name[csg].connected_components, parameter[name[am]]]][constant[1]] if compare[call[name[sp].unique, parameter[name[Cs]]].size greater[>] constant[1]] begin[:] for taget[name[i]] in starred[call[name[sp].unique, parameter[name[Cs]]]] begin[:] call[name[temp].append, parameter[call[call[name[sp].where, parameter[compare[name[Cs] equal[==] name[i]]]]][constant[0]]]] variable[b] assign[=] call[name[sp].array, parameter[<ast.ListComp object at 0x7da207f01e40>]] variable[c] assign[=] call[call[name[sp].argsort, parameter[name[b]]]][<ast.Slice object at 0x7da207f03610>] for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[c]]]]]] begin[:] call[call[name[health]][constant[disconnected_clusters]].append, parameter[call[name[temp]][call[name[c]][name[i]]]]] if compare[name[i] greater[>] constant[0]] begin[:] call[call[name[health]][constant[trim_pores]].extend, parameter[call[name[temp]][call[name[c]][name[i]]]]] variable[am] assign[=] call[call[name[self].create_adjacency_matrix, parameter[]].tocoo, parameter[]] variable[hits] assign[=] call[call[name[sp].where, parameter[compare[name[am].data greater[>] constant[1]]]]][constant[0]] if call[name[len], parameter[name[hits]]] begin[:] variable[mergeTs] assign[=] list[[]] variable[hits] assign[=] call[name[sp].vstack, parameter[tuple[[<ast.Subscript object at 0x7da18c4cc130>, <ast.Subscript object at 0x7da18c4cc250>]]]].T variable[ihits] assign[=] binary_operation[call[name[hits]][tuple[[<ast.Slice object at 0x7da18c4cceb0>, <ast.Constant object at 0x7da18c4cfe80>]]] + binary_operation[constant[1j] * call[name[hits]][tuple[[<ast.Slice object at 0x7da18c4cfee0>, <ast.Constant object at 0x7da18c4cfbb0>]]]]] variable[conns] assign[=] call[name[self]][constant[throat.conns]] variable[iconns] assign[=] binary_operation[call[name[conns]][tuple[[<ast.Slice object at 0x7da18c4cec80>, <ast.Constant object at 0x7da18c4cdc00>]]] + binary_operation[constant[1j] * call[name[conns]][tuple[[<ast.Slice object at 0x7da18c4cedd0>, <ast.Constant object at 0x7da18c4cf760>]]]]] for taget[name[item]] in starred[name[ihits]] begin[:] call[name[mergeTs].append, parameter[call[call[name[sp].where, parameter[compare[name[iconns] equal[==] name[item]]]]][constant[0]]]] call[name[health]][constant[duplicate_throats]] assign[=] name[mergeTs] variable[adjmat] assign[=] call[name[self].create_adjacency_matrix, parameter[]] variable[num_full] assign[=] call[name[adjmat].sum, parameter[]] variable[temp] assign[=] call[name[sprs].triu, parameter[name[adjmat]]] variable[num_upper] assign[=] call[name[temp].sum, parameter[]] if compare[name[num_full] greater[>] name[num_upper]] begin[:] variable[biTs] assign[=] call[call[name[sp].where, parameter[compare[call[call[name[self]][constant[throat.conns]]][tuple[[<ast.Slice object at 0x7da18c4cc6d0>, <ast.Constant object at 0x7da18c4ccb50>]]] greater[>] call[call[name[self]][constant[throat.conns]]][tuple[[<ast.Slice object at 0x7da18c4ce110>, <ast.Constant object at 0x7da18c4ce5f0>]]]]]]][constant[0]] call[name[health]][constant[bidirectional_throats]] assign[=] call[name[biTs].tolist, parameter[]] return[name[health]]
keyword[def] identifier[check_network_health] ( identifier[self] ): literal[string] identifier[health] = identifier[HealthDict] () identifier[health] [ literal[string] ]=[] identifier[health] [ literal[string] ]=[] identifier[health] [ literal[string] ]=[] identifier[health] [ literal[string] ]=[] identifier[health] [ literal[string] ]=[] identifier[health] [ literal[string] ]=[] identifier[health] [ literal[string] ]=[] identifier[hits] = identifier[sp] . identifier[where] ( identifier[self] [ literal[string] ]> identifier[self] . identifier[Np] - literal[int] )[ literal[int] ] keyword[if] identifier[sp] . identifier[size] ( identifier[hits] )> literal[int] : identifier[health] [ literal[string] ]= identifier[sp] . identifier[unique] ( identifier[hits] ) keyword[return] identifier[health] identifier[P12] = identifier[self] [ literal[string] ] identifier[hits] = identifier[sp] . identifier[where] ( identifier[P12] [:, literal[int] ]== identifier[P12] [:, literal[int] ])[ literal[int] ] keyword[if] identifier[sp] . identifier[size] ( identifier[hits] )> literal[int] : identifier[health] [ literal[string] ]= identifier[hits] identifier[Ps] = identifier[self] . identifier[num_neighbors] ( identifier[self] . identifier[pores] ()) keyword[if] identifier[sp] . identifier[sum] ( identifier[Ps] == literal[int] )> literal[int] : identifier[health] [ literal[string] ]= identifier[sp] . identifier[where] ( identifier[Ps] == literal[int] )[ literal[int] ] identifier[temp] =[] identifier[am] = identifier[self] . identifier[create_adjacency_matrix] ( identifier[fmt] = literal[string] , identifier[triu] = keyword[True] ) identifier[Cs] = identifier[csg] . identifier[connected_components] ( identifier[am] , identifier[directed] = keyword[False] )[ literal[int] ] keyword[if] identifier[sp] . identifier[unique] ( identifier[Cs] ). identifier[size] > literal[int] : keyword[for] identifier[i] keyword[in] identifier[sp] . identifier[unique] ( identifier[Cs] ): identifier[temp] . identifier[append] ( identifier[sp] . identifier[where] ( identifier[Cs] == identifier[i] )[ literal[int] ]) identifier[b] = identifier[sp] . identifier[array] ([ identifier[len] ( identifier[item] ) keyword[for] identifier[item] keyword[in] identifier[temp] ]) identifier[c] = identifier[sp] . identifier[argsort] ( identifier[b] )[::- literal[int] ] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[c] )): identifier[health] [ literal[string] ]. identifier[append] ( identifier[temp] [ identifier[c] [ identifier[i] ]]) keyword[if] identifier[i] > literal[int] : identifier[health] [ literal[string] ]. identifier[extend] ( identifier[temp] [ identifier[c] [ identifier[i] ]]) identifier[am] = identifier[self] . identifier[create_adjacency_matrix] ( identifier[fmt] = literal[string] , identifier[triu] = keyword[True] ). identifier[tocoo] () identifier[hits] = identifier[sp] . identifier[where] ( identifier[am] . identifier[data] > literal[int] )[ literal[int] ] keyword[if] identifier[len] ( identifier[hits] ): identifier[mergeTs] =[] identifier[hits] = identifier[sp] . identifier[vstack] (( identifier[am] . identifier[row] [ identifier[hits] ], identifier[am] . identifier[col] [ identifier[hits] ])). identifier[T] identifier[ihits] = identifier[hits] [:, literal[int] ]+ literal[int] * identifier[hits] [:, literal[int] ] identifier[conns] = identifier[self] [ literal[string] ] identifier[iconns] = identifier[conns] [:, literal[int] ]+ literal[int] * identifier[conns] [:, literal[int] ] keyword[for] identifier[item] keyword[in] identifier[ihits] : identifier[mergeTs] . identifier[append] ( identifier[sp] . identifier[where] ( identifier[iconns] == identifier[item] )[ literal[int] ]) identifier[health] [ literal[string] ]= identifier[mergeTs] identifier[adjmat] = identifier[self] . identifier[create_adjacency_matrix] ( identifier[fmt] = literal[string] ) identifier[num_full] = identifier[adjmat] . identifier[sum] () identifier[temp] = identifier[sprs] . identifier[triu] ( identifier[adjmat] , identifier[k] = literal[int] ) identifier[num_upper] = identifier[temp] . identifier[sum] () keyword[if] identifier[num_full] > identifier[num_upper] : identifier[biTs] = identifier[sp] . identifier[where] ( identifier[self] [ literal[string] ][:, literal[int] ]> identifier[self] [ literal[string] ][:, literal[int] ])[ literal[int] ] identifier[health] [ literal[string] ]= identifier[biTs] . identifier[tolist] () keyword[return] identifier[health]
def check_network_health(self): """ This method check the network topological health by checking for: (1) Isolated pores (2) Islands or isolated clusters of pores (3) Duplicate throats (4) Bidirectional throats (ie. symmetrical adjacency matrix) (5) Headless throats Returns ------- A dictionary containing the offending pores or throat numbers under each named key. It also returns a list of which pores and throats should be trimmed from the network to restore health. This list is a suggestion only, and is based on keeping the largest cluster and trimming the others. Notes ----- - Does not yet check for duplicate pores - Does not yet suggest which throats to remove - This is just a 'check' and does not 'fix' the problems it finds """ health = HealthDict() health['disconnected_clusters'] = [] health['isolated_pores'] = [] health['trim_pores'] = [] health['duplicate_throats'] = [] health['bidirectional_throats'] = [] health['headless_throats'] = [] health['looped_throats'] = [] # Check for headless throats hits = sp.where(self['throat.conns'] > self.Np - 1)[0] if sp.size(hits) > 0: health['headless_throats'] = sp.unique(hits) return health # depends on [control=['if'], data=[]] # Check for throats that loop back onto the same pore P12 = self['throat.conns'] hits = sp.where(P12[:, 0] == P12[:, 1])[0] if sp.size(hits) > 0: health['looped_throats'] = hits # depends on [control=['if'], data=[]] # Check for individual isolated pores Ps = self.num_neighbors(self.pores()) if sp.sum(Ps == 0) > 0: health['isolated_pores'] = sp.where(Ps == 0)[0] # depends on [control=['if'], data=[]] # Check for separated clusters of pores temp = [] am = self.create_adjacency_matrix(fmt='coo', triu=True) Cs = csg.connected_components(am, directed=False)[1] if sp.unique(Cs).size > 1: for i in sp.unique(Cs): temp.append(sp.where(Cs == i)[0]) # depends on [control=['for'], data=['i']] b = sp.array([len(item) for item in temp]) c = sp.argsort(b)[::-1] for i in range(0, len(c)): health['disconnected_clusters'].append(temp[c[i]]) if i > 0: health['trim_pores'].extend(temp[c[i]]) # depends on [control=['if'], data=['i']] # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] # Check for duplicate throats am = self.create_adjacency_matrix(fmt='csr', triu=True).tocoo() hits = sp.where(am.data > 1)[0] if len(hits): mergeTs = [] hits = sp.vstack((am.row[hits], am.col[hits])).T ihits = hits[:, 0] + 1j * hits[:, 1] conns = self['throat.conns'] iconns = conns[:, 0] + 1j * conns[:, 1] # Convert to imaginary for item in ihits: mergeTs.append(sp.where(iconns == item)[0]) # depends on [control=['for'], data=['item']] health['duplicate_throats'] = mergeTs # depends on [control=['if'], data=[]] # Check for bidirectional throats adjmat = self.create_adjacency_matrix(fmt='coo') num_full = adjmat.sum() temp = sprs.triu(adjmat, k=1) num_upper = temp.sum() if num_full > num_upper: biTs = sp.where(self['throat.conns'][:, 0] > self['throat.conns'][:, 1])[0] health['bidirectional_throats'] = biTs.tolist() # depends on [control=['if'], data=[]] return health
def get_density_matrix(self, surface_divider = 20.0): """! @brief Calculates density matrix (P-Matrix). @param[in] surface_divider (double): Divider in each dimension that affect radius for density measurement. @return (list) Density matrix (P-Matrix). @see get_distance_matrix() """ if self.__ccore_som_pointer is not None: self._weights = wrapper.som_get_weights(self.__ccore_som_pointer) density_matrix = [[0] * self._cols for i in range(self._rows)] dimension = len(self._weights[0]) dim_max = [ float('-Inf') ] * dimension dim_min = [ float('Inf') ] * dimension for weight in self._weights: for index_dim in range(dimension): if weight[index_dim] > dim_max[index_dim]: dim_max[index_dim] = weight[index_dim] if weight[index_dim] < dim_min[index_dim]: dim_min[index_dim] = weight[index_dim] radius = [0.0] * len(self._weights[0]) for index_dim in range(dimension): radius[index_dim] = ( dim_max[index_dim] - dim_min[index_dim] ) / surface_divider ## TODO: do not use data for point in self._data: for index_neuron in range(len(self)): point_covered = True for index_dim in range(dimension): if abs(point[index_dim] - self._weights[index_neuron][index_dim]) > radius[index_dim]: point_covered = False break row = int(math.floor(index_neuron / self._cols)) col = index_neuron - row * self._cols if point_covered is True: density_matrix[row][col] += 1 return density_matrix
def function[get_density_matrix, parameter[self, surface_divider]]: constant[! @brief Calculates density matrix (P-Matrix). @param[in] surface_divider (double): Divider in each dimension that affect radius for density measurement. @return (list) Density matrix (P-Matrix). @see get_distance_matrix() ] if compare[name[self].__ccore_som_pointer is_not constant[None]] begin[:] name[self]._weights assign[=] call[name[wrapper].som_get_weights, parameter[name[self].__ccore_som_pointer]] variable[density_matrix] assign[=] <ast.ListComp object at 0x7da1b0123970> variable[dimension] assign[=] call[name[len], parameter[call[name[self]._weights][constant[0]]]] variable[dim_max] assign[=] binary_operation[list[[<ast.Call object at 0x7da1b0120700>]] * name[dimension]] variable[dim_min] assign[=] binary_operation[list[[<ast.Call object at 0x7da1b01225f0>]] * name[dimension]] for taget[name[weight]] in starred[name[self]._weights] begin[:] for taget[name[index_dim]] in starred[call[name[range], parameter[name[dimension]]]] begin[:] if compare[call[name[weight]][name[index_dim]] greater[>] call[name[dim_max]][name[index_dim]]] begin[:] call[name[dim_max]][name[index_dim]] assign[=] call[name[weight]][name[index_dim]] if compare[call[name[weight]][name[index_dim]] less[<] call[name[dim_min]][name[index_dim]]] begin[:] call[name[dim_min]][name[index_dim]] assign[=] call[name[weight]][name[index_dim]] variable[radius] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b0121e10>]] * call[name[len], parameter[call[name[self]._weights][constant[0]]]]] for taget[name[index_dim]] in starred[call[name[range], parameter[name[dimension]]]] begin[:] call[name[radius]][name[index_dim]] assign[=] binary_operation[binary_operation[call[name[dim_max]][name[index_dim]] - call[name[dim_min]][name[index_dim]]] / name[surface_divider]] for taget[name[point]] in starred[name[self]._data] begin[:] for taget[name[index_neuron]] in starred[call[name[range], parameter[call[name[len], parameter[name[self]]]]]] begin[:] variable[point_covered] assign[=] constant[True] for taget[name[index_dim]] in starred[call[name[range], parameter[name[dimension]]]] begin[:] if compare[call[name[abs], parameter[binary_operation[call[name[point]][name[index_dim]] - call[call[name[self]._weights][name[index_neuron]]][name[index_dim]]]]] greater[>] call[name[radius]][name[index_dim]]] begin[:] variable[point_covered] assign[=] constant[False] break variable[row] assign[=] call[name[int], parameter[call[name[math].floor, parameter[binary_operation[name[index_neuron] / name[self]._cols]]]]] variable[col] assign[=] binary_operation[name[index_neuron] - binary_operation[name[row] * name[self]._cols]] if compare[name[point_covered] is constant[True]] begin[:] <ast.AugAssign object at 0x7da1b0120640> return[name[density_matrix]]
keyword[def] identifier[get_density_matrix] ( identifier[self] , identifier[surface_divider] = literal[int] ): literal[string] keyword[if] identifier[self] . identifier[__ccore_som_pointer] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[_weights] = identifier[wrapper] . identifier[som_get_weights] ( identifier[self] . identifier[__ccore_som_pointer] ) identifier[density_matrix] =[[ literal[int] ]* identifier[self] . identifier[_cols] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[_rows] )] identifier[dimension] = identifier[len] ( identifier[self] . identifier[_weights] [ literal[int] ]) identifier[dim_max] =[ identifier[float] ( literal[string] )]* identifier[dimension] identifier[dim_min] =[ identifier[float] ( literal[string] )]* identifier[dimension] keyword[for] identifier[weight] keyword[in] identifier[self] . identifier[_weights] : keyword[for] identifier[index_dim] keyword[in] identifier[range] ( identifier[dimension] ): keyword[if] identifier[weight] [ identifier[index_dim] ]> identifier[dim_max] [ identifier[index_dim] ]: identifier[dim_max] [ identifier[index_dim] ]= identifier[weight] [ identifier[index_dim] ] keyword[if] identifier[weight] [ identifier[index_dim] ]< identifier[dim_min] [ identifier[index_dim] ]: identifier[dim_min] [ identifier[index_dim] ]= identifier[weight] [ identifier[index_dim] ] identifier[radius] =[ literal[int] ]* identifier[len] ( identifier[self] . identifier[_weights] [ literal[int] ]) keyword[for] identifier[index_dim] keyword[in] identifier[range] ( identifier[dimension] ): identifier[radius] [ identifier[index_dim] ]=( identifier[dim_max] [ identifier[index_dim] ]- identifier[dim_min] [ identifier[index_dim] ])/ identifier[surface_divider] keyword[for] identifier[point] keyword[in] identifier[self] . identifier[_data] : keyword[for] identifier[index_neuron] keyword[in] identifier[range] ( identifier[len] ( identifier[self] )): identifier[point_covered] = keyword[True] keyword[for] identifier[index_dim] keyword[in] identifier[range] ( identifier[dimension] ): keyword[if] identifier[abs] ( identifier[point] [ identifier[index_dim] ]- identifier[self] . identifier[_weights] [ identifier[index_neuron] ][ identifier[index_dim] ])> identifier[radius] [ identifier[index_dim] ]: identifier[point_covered] = keyword[False] keyword[break] identifier[row] = identifier[int] ( identifier[math] . identifier[floor] ( identifier[index_neuron] / identifier[self] . identifier[_cols] )) identifier[col] = identifier[index_neuron] - identifier[row] * identifier[self] . identifier[_cols] keyword[if] identifier[point_covered] keyword[is] keyword[True] : identifier[density_matrix] [ identifier[row] ][ identifier[col] ]+= literal[int] keyword[return] identifier[density_matrix]
def get_density_matrix(self, surface_divider=20.0): """! @brief Calculates density matrix (P-Matrix). @param[in] surface_divider (double): Divider in each dimension that affect radius for density measurement. @return (list) Density matrix (P-Matrix). @see get_distance_matrix() """ if self.__ccore_som_pointer is not None: self._weights = wrapper.som_get_weights(self.__ccore_som_pointer) # depends on [control=['if'], data=[]] density_matrix = [[0] * self._cols for i in range(self._rows)] dimension = len(self._weights[0]) dim_max = [float('-Inf')] * dimension dim_min = [float('Inf')] * dimension for weight in self._weights: for index_dim in range(dimension): if weight[index_dim] > dim_max[index_dim]: dim_max[index_dim] = weight[index_dim] # depends on [control=['if'], data=[]] if weight[index_dim] < dim_min[index_dim]: dim_min[index_dim] = weight[index_dim] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['index_dim']] # depends on [control=['for'], data=['weight']] radius = [0.0] * len(self._weights[0]) for index_dim in range(dimension): radius[index_dim] = (dim_max[index_dim] - dim_min[index_dim]) / surface_divider # depends on [control=['for'], data=['index_dim']] ## TODO: do not use data for point in self._data: for index_neuron in range(len(self)): point_covered = True for index_dim in range(dimension): if abs(point[index_dim] - self._weights[index_neuron][index_dim]) > radius[index_dim]: point_covered = False break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['index_dim']] row = int(math.floor(index_neuron / self._cols)) col = index_neuron - row * self._cols if point_covered is True: density_matrix[row][col] += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['index_neuron']] # depends on [control=['for'], data=['point']] return density_matrix
def _set_ldp_params(self, v, load=False): """ Setter method for ldp_params, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/mpls_interface/ldp_params (container) If this variable is read-only (config: false) in the source YANG file, then _set_ldp_params is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_ldp_params() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=ldp_params.ldp_params, is_container='container', presence=False, yang_name="ldp-params", rest_name="ldp-params", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure LDP parameters', u'cli-full-command': None, u'cli-full-no': None, u'cli-add-mode': None, u'cli-mode-name': u'config-router-mpls-interface-$(interface-name)-ldp-params'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """ldp_params must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=ldp_params.ldp_params, is_container='container', presence=False, yang_name="ldp-params", rest_name="ldp-params", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure LDP parameters', u'cli-full-command': None, u'cli-full-no': None, u'cli-add-mode': None, u'cli-mode-name': u'config-router-mpls-interface-$(interface-name)-ldp-params'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True)""", }) self.__ldp_params = t if hasattr(self, '_set'): self._set()
def function[_set_ldp_params, parameter[self, v, load]]: constant[ Setter method for ldp_params, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/mpls_interface/ldp_params (container) If this variable is read-only (config: false) in the source YANG file, then _set_ldp_params is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_ldp_params() directly. ] if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:] variable[v] assign[=] call[name[v]._utype, parameter[name[v]]] <ast.Try object at 0x7da18f00c040> name[self].__ldp_params assign[=] name[t] if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:] call[name[self]._set, parameter[]]
keyword[def] identifier[_set_ldp_params] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ): literal[string] keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ): identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] ) keyword[try] : identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[ldp_params] . identifier[ldp_params] , identifier[is_container] = literal[string] , identifier[presence] = keyword[False] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[raise] identifier[ValueError] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , }) identifier[self] . identifier[__ldp_params] = identifier[t] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[_set] ()
def _set_ldp_params(self, v, load=False): """ Setter method for ldp_params, mapped from YANG variable /mpls_config/router/mpls/mpls_cmds_holder/mpls_interface/ldp_params (container) If this variable is read-only (config: false) in the source YANG file, then _set_ldp_params is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_ldp_params() directly. """ if hasattr(v, '_utype'): v = v._utype(v) # depends on [control=['if'], data=[]] try: t = YANGDynClass(v, base=ldp_params.ldp_params, is_container='container', presence=False, yang_name='ldp-params', rest_name='ldp-params', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure LDP parameters', u'cli-full-command': None, u'cli-full-no': None, u'cli-add-mode': None, u'cli-mode-name': u'config-router-mpls-interface-$(interface-name)-ldp-params'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='container', is_config=True) # depends on [control=['try'], data=[]] except (TypeError, ValueError): raise ValueError({'error-string': 'ldp_params must be of a type compatible with container', 'defined-type': 'container', 'generated-type': 'YANGDynClass(base=ldp_params.ldp_params, is_container=\'container\', presence=False, yang_name="ldp-params", rest_name="ldp-params", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'Configure LDP parameters\', u\'cli-full-command\': None, u\'cli-full-no\': None, u\'cli-add-mode\': None, u\'cli-mode-name\': u\'config-router-mpls-interface-$(interface-name)-ldp-params\'}}, namespace=\'urn:brocade.com:mgmt:brocade-mpls\', defining_module=\'brocade-mpls\', yang_type=\'container\', is_config=True)'}) # depends on [control=['except'], data=[]] self.__ldp_params = t if hasattr(self, '_set'): self._set() # depends on [control=['if'], data=[]]
def get(self, center, target, date): """Retrieve the position and velocity of a target with respect to a center Args: center (Target): target (Target): date (Date): Return: numpy.array: length-6 array position and velocity (in m and m/s) of the target, with respect to the center """ if (center.index, target.index) in self.segments: pos, vel = self.segments[center.index, target.index].compute_and_differentiate(date.jd) sign = 1 else: # When we wish to get a segment that is not available in the files (such as # EarthBarycenter with respect to the Moon, for example), we take the segment # representing the inverse vector if available and reverse it pos, vel = self.segments[target.index, center.index].compute_and_differentiate(date.jd) sign = -1 # In some cases, the pos vector contains both position and velocity if len(pos) == 3: # The velocity is given in km/days, so we convert to km/s # see: https://github.com/brandon-rhodes/python-jplephem/issues/19 for clarifications pv = np.concatenate((pos, vel / S_PER_DAY)) elif len(pos) == 6: pv = np.array(pos) else: raise JplError("Unknown state vector format") return sign * pv * 1000
def function[get, parameter[self, center, target, date]]: constant[Retrieve the position and velocity of a target with respect to a center Args: center (Target): target (Target): date (Date): Return: numpy.array: length-6 array position and velocity (in m and m/s) of the target, with respect to the center ] if compare[tuple[[<ast.Attribute object at 0x7da1b0b7cf10>, <ast.Attribute object at 0x7da1b0b7f190>]] in name[self].segments] begin[:] <ast.Tuple object at 0x7da1b0b9dc90> assign[=] call[call[name[self].segments][tuple[[<ast.Attribute object at 0x7da18eb574f0>, <ast.Attribute object at 0x7da18eb55b40>]]].compute_and_differentiate, parameter[name[date].jd]] variable[sign] assign[=] constant[1] if compare[call[name[len], parameter[name[pos]]] equal[==] constant[3]] begin[:] variable[pv] assign[=] call[name[np].concatenate, parameter[tuple[[<ast.Name object at 0x7da1b0c3e8f0>, <ast.BinOp object at 0x7da1b0c3e8c0>]]]] return[binary_operation[binary_operation[name[sign] * name[pv]] * constant[1000]]]
keyword[def] identifier[get] ( identifier[self] , identifier[center] , identifier[target] , identifier[date] ): literal[string] keyword[if] ( identifier[center] . identifier[index] , identifier[target] . identifier[index] ) keyword[in] identifier[self] . identifier[segments] : identifier[pos] , identifier[vel] = identifier[self] . identifier[segments] [ identifier[center] . identifier[index] , identifier[target] . identifier[index] ]. identifier[compute_and_differentiate] ( identifier[date] . identifier[jd] ) identifier[sign] = literal[int] keyword[else] : identifier[pos] , identifier[vel] = identifier[self] . identifier[segments] [ identifier[target] . identifier[index] , identifier[center] . identifier[index] ]. identifier[compute_and_differentiate] ( identifier[date] . identifier[jd] ) identifier[sign] =- literal[int] keyword[if] identifier[len] ( identifier[pos] )== literal[int] : identifier[pv] = identifier[np] . identifier[concatenate] (( identifier[pos] , identifier[vel] / identifier[S_PER_DAY] )) keyword[elif] identifier[len] ( identifier[pos] )== literal[int] : identifier[pv] = identifier[np] . identifier[array] ( identifier[pos] ) keyword[else] : keyword[raise] identifier[JplError] ( literal[string] ) keyword[return] identifier[sign] * identifier[pv] * literal[int]
def get(self, center, target, date): """Retrieve the position and velocity of a target with respect to a center Args: center (Target): target (Target): date (Date): Return: numpy.array: length-6 array position and velocity (in m and m/s) of the target, with respect to the center """ if (center.index, target.index) in self.segments: (pos, vel) = self.segments[center.index, target.index].compute_and_differentiate(date.jd) sign = 1 # depends on [control=['if'], data=[]] else: # When we wish to get a segment that is not available in the files (such as # EarthBarycenter with respect to the Moon, for example), we take the segment # representing the inverse vector if available and reverse it (pos, vel) = self.segments[target.index, center.index].compute_and_differentiate(date.jd) sign = -1 # In some cases, the pos vector contains both position and velocity if len(pos) == 3: # The velocity is given in km/days, so we convert to km/s # see: https://github.com/brandon-rhodes/python-jplephem/issues/19 for clarifications pv = np.concatenate((pos, vel / S_PER_DAY)) # depends on [control=['if'], data=[]] elif len(pos) == 6: pv = np.array(pos) # depends on [control=['if'], data=[]] else: raise JplError('Unknown state vector format') return sign * pv * 1000
def normalize_num_type(num_type): """ Work out what a sensible type for the array is. if the default type is float32, downcast 64bit float to float32. For ints, assume int32 """ if isinstance(num_type, tf.DType): num_type = num_type.as_numpy_dtype.type if num_type in [np.float32, np.float64]: # pylint: disable=E1101 num_type = settings.float_type elif num_type in [np.int16, np.int32, np.int64]: num_type = settings.int_type else: raise ValueError('Unknown dtype "{0}" passed to normalizer.'.format(num_type)) return num_type
def function[normalize_num_type, parameter[num_type]]: constant[ Work out what a sensible type for the array is. if the default type is float32, downcast 64bit float to float32. For ints, assume int32 ] if call[name[isinstance], parameter[name[num_type], name[tf].DType]] begin[:] variable[num_type] assign[=] name[num_type].as_numpy_dtype.type if compare[name[num_type] in list[[<ast.Attribute object at 0x7da18dc9ae60>, <ast.Attribute object at 0x7da18dc98640>]]] begin[:] variable[num_type] assign[=] name[settings].float_type return[name[num_type]]
keyword[def] identifier[normalize_num_type] ( identifier[num_type] ): literal[string] keyword[if] identifier[isinstance] ( identifier[num_type] , identifier[tf] . identifier[DType] ): identifier[num_type] = identifier[num_type] . identifier[as_numpy_dtype] . identifier[type] keyword[if] identifier[num_type] keyword[in] [ identifier[np] . identifier[float32] , identifier[np] . identifier[float64] ]: identifier[num_type] = identifier[settings] . identifier[float_type] keyword[elif] identifier[num_type] keyword[in] [ identifier[np] . identifier[int16] , identifier[np] . identifier[int32] , identifier[np] . identifier[int64] ]: identifier[num_type] = identifier[settings] . identifier[int_type] keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[num_type] )) keyword[return] identifier[num_type]
def normalize_num_type(num_type): """ Work out what a sensible type for the array is. if the default type is float32, downcast 64bit float to float32. For ints, assume int32 """ if isinstance(num_type, tf.DType): num_type = num_type.as_numpy_dtype.type # depends on [control=['if'], data=[]] if num_type in [np.float32, np.float64]: # pylint: disable=E1101 num_type = settings.float_type # depends on [control=['if'], data=['num_type']] elif num_type in [np.int16, np.int32, np.int64]: num_type = settings.int_type # depends on [control=['if'], data=['num_type']] else: raise ValueError('Unknown dtype "{0}" passed to normalizer.'.format(num_type)) return num_type
def previous_history(self, e): # (C-p) u'''Move back through the history list, fetching the previous command. ''' self._history.previous_history(self.l_buffer) self.l_buffer.point = lineobj.EndOfLine self.finalize()
def function[previous_history, parameter[self, e]]: constant[Move back through the history list, fetching the previous command. ] call[name[self]._history.previous_history, parameter[name[self].l_buffer]] name[self].l_buffer.point assign[=] name[lineobj].EndOfLine call[name[self].finalize, parameter[]]
keyword[def] identifier[previous_history] ( identifier[self] , identifier[e] ): literal[string] identifier[self] . identifier[_history] . identifier[previous_history] ( identifier[self] . identifier[l_buffer] ) identifier[self] . identifier[l_buffer] . identifier[point] = identifier[lineobj] . identifier[EndOfLine] identifier[self] . identifier[finalize] ()
def previous_history(self, e): # (C-p) u'Move back through the history list, fetching the previous\n command. ' self._history.previous_history(self.l_buffer) self.l_buffer.point = lineobj.EndOfLine self.finalize()
def ChunkedTransformerLM(vocab_size, feature_depth=512, feedforward_depth=2048, num_layers=6, num_heads=8, dropout=0.1, chunk_selector=None, max_len=2048, mode='train'): """Transformer language model operating on chunks. The input to this model is a sequence presented as a list or tuple of chunks: (chunk1, chunk2, chunks3, ..., chunkN). Each chunk should have the same shape (batch, chunk-length) and together they represent a long sequence that's a concatenation chunk1,chunk2,...,chunkN. Chunked Transformer emulates the operation of a Transformer on this long sequence except for the chunked attention layer, which may attend to only a subset of the chunks to reduce memory use. Args: vocab_size: int: vocab size feature_depth: int: depth of embedding feedforward_depth: int: depth of feed-forward layer num_layers: int: number of encoder/decoder layers num_heads: int: number of attention heads dropout: float: dropout rate (how much to drop out) chunk_selector: a function from chunk number to list of chunks to attend (if None, attends to the previous chunks which is equivalent to setting chunk_selector(x) = [] if x < 1 else [x-1] (TransformerXL); we attend to the current chunk with a causal mask too, selected chunks unmasked). max_len: int: maximum symbol length for positional encoding mode: str: 'train' or 'eval' Returns: the layer. """ stack = [ChunkedDecoderLayer(feature_depth, feedforward_depth, num_heads, dropout, chunk_selector, mode) for _ in range(num_layers)] # Below each Map(L) applies the layer L to each chunk independently. return layers.Serial( layers.ShiftRight(), layers.Map(layers.Embedding(feature_depth, vocab_size)), layers.Map(layers.Dropout(rate=dropout, mode=mode)), layers.PositionalEncoding(max_len=max_len), layers.Serial(*stack), layers.Map(layers.LayerNorm()), layers.Map(layers.Dense(vocab_size)), layers.Map(layers.LogSoftmax()), )
def function[ChunkedTransformerLM, parameter[vocab_size, feature_depth, feedforward_depth, num_layers, num_heads, dropout, chunk_selector, max_len, mode]]: constant[Transformer language model operating on chunks. The input to this model is a sequence presented as a list or tuple of chunks: (chunk1, chunk2, chunks3, ..., chunkN). Each chunk should have the same shape (batch, chunk-length) and together they represent a long sequence that's a concatenation chunk1,chunk2,...,chunkN. Chunked Transformer emulates the operation of a Transformer on this long sequence except for the chunked attention layer, which may attend to only a subset of the chunks to reduce memory use. Args: vocab_size: int: vocab size feature_depth: int: depth of embedding feedforward_depth: int: depth of feed-forward layer num_layers: int: number of encoder/decoder layers num_heads: int: number of attention heads dropout: float: dropout rate (how much to drop out) chunk_selector: a function from chunk number to list of chunks to attend (if None, attends to the previous chunks which is equivalent to setting chunk_selector(x) = [] if x < 1 else [x-1] (TransformerXL); we attend to the current chunk with a causal mask too, selected chunks unmasked). max_len: int: maximum symbol length for positional encoding mode: str: 'train' or 'eval' Returns: the layer. ] variable[stack] assign[=] <ast.ListComp object at 0x7da18ede4940> return[call[name[layers].Serial, parameter[call[name[layers].ShiftRight, parameter[]], call[name[layers].Map, parameter[call[name[layers].Embedding, parameter[name[feature_depth], name[vocab_size]]]]], call[name[layers].Map, parameter[call[name[layers].Dropout, parameter[]]]], call[name[layers].PositionalEncoding, parameter[]], call[name[layers].Serial, parameter[<ast.Starred object at 0x7da18ede5180>]], call[name[layers].Map, parameter[call[name[layers].LayerNorm, parameter[]]]], call[name[layers].Map, parameter[call[name[layers].Dense, parameter[name[vocab_size]]]]], call[name[layers].Map, parameter[call[name[layers].LogSoftmax, parameter[]]]]]]]
keyword[def] identifier[ChunkedTransformerLM] ( identifier[vocab_size] , identifier[feature_depth] = literal[int] , identifier[feedforward_depth] = literal[int] , identifier[num_layers] = literal[int] , identifier[num_heads] = literal[int] , identifier[dropout] = literal[int] , identifier[chunk_selector] = keyword[None] , identifier[max_len] = literal[int] , identifier[mode] = literal[string] ): literal[string] identifier[stack] =[ identifier[ChunkedDecoderLayer] ( identifier[feature_depth] , identifier[feedforward_depth] , identifier[num_heads] , identifier[dropout] , identifier[chunk_selector] , identifier[mode] ) keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[num_layers] )] keyword[return] identifier[layers] . identifier[Serial] ( identifier[layers] . identifier[ShiftRight] (), identifier[layers] . identifier[Map] ( identifier[layers] . identifier[Embedding] ( identifier[feature_depth] , identifier[vocab_size] )), identifier[layers] . identifier[Map] ( identifier[layers] . identifier[Dropout] ( identifier[rate] = identifier[dropout] , identifier[mode] = identifier[mode] )), identifier[layers] . identifier[PositionalEncoding] ( identifier[max_len] = identifier[max_len] ), identifier[layers] . identifier[Serial] (* identifier[stack] ), identifier[layers] . identifier[Map] ( identifier[layers] . identifier[LayerNorm] ()), identifier[layers] . identifier[Map] ( identifier[layers] . identifier[Dense] ( identifier[vocab_size] )), identifier[layers] . identifier[Map] ( identifier[layers] . identifier[LogSoftmax] ()), )
def ChunkedTransformerLM(vocab_size, feature_depth=512, feedforward_depth=2048, num_layers=6, num_heads=8, dropout=0.1, chunk_selector=None, max_len=2048, mode='train'): """Transformer language model operating on chunks. The input to this model is a sequence presented as a list or tuple of chunks: (chunk1, chunk2, chunks3, ..., chunkN). Each chunk should have the same shape (batch, chunk-length) and together they represent a long sequence that's a concatenation chunk1,chunk2,...,chunkN. Chunked Transformer emulates the operation of a Transformer on this long sequence except for the chunked attention layer, which may attend to only a subset of the chunks to reduce memory use. Args: vocab_size: int: vocab size feature_depth: int: depth of embedding feedforward_depth: int: depth of feed-forward layer num_layers: int: number of encoder/decoder layers num_heads: int: number of attention heads dropout: float: dropout rate (how much to drop out) chunk_selector: a function from chunk number to list of chunks to attend (if None, attends to the previous chunks which is equivalent to setting chunk_selector(x) = [] if x < 1 else [x-1] (TransformerXL); we attend to the current chunk with a causal mask too, selected chunks unmasked). max_len: int: maximum symbol length for positional encoding mode: str: 'train' or 'eval' Returns: the layer. """ stack = [ChunkedDecoderLayer(feature_depth, feedforward_depth, num_heads, dropout, chunk_selector, mode) for _ in range(num_layers)] # Below each Map(L) applies the layer L to each chunk independently. return layers.Serial(layers.ShiftRight(), layers.Map(layers.Embedding(feature_depth, vocab_size)), layers.Map(layers.Dropout(rate=dropout, mode=mode)), layers.PositionalEncoding(max_len=max_len), layers.Serial(*stack), layers.Map(layers.LayerNorm()), layers.Map(layers.Dense(vocab_size)), layers.Map(layers.LogSoftmax()))
def add_partition_with_environment_context(self, new_part, environment_context): """ Parameters: - new_part - environment_context """ self.send_add_partition_with_environment_context(new_part, environment_context) return self.recv_add_partition_with_environment_context()
def function[add_partition_with_environment_context, parameter[self, new_part, environment_context]]: constant[ Parameters: - new_part - environment_context ] call[name[self].send_add_partition_with_environment_context, parameter[name[new_part], name[environment_context]]] return[call[name[self].recv_add_partition_with_environment_context, parameter[]]]
keyword[def] identifier[add_partition_with_environment_context] ( identifier[self] , identifier[new_part] , identifier[environment_context] ): literal[string] identifier[self] . identifier[send_add_partition_with_environment_context] ( identifier[new_part] , identifier[environment_context] ) keyword[return] identifier[self] . identifier[recv_add_partition_with_environment_context] ()
def add_partition_with_environment_context(self, new_part, environment_context): """ Parameters: - new_part - environment_context """ self.send_add_partition_with_environment_context(new_part, environment_context) return self.recv_add_partition_with_environment_context()
def motion_detection_sensitivity(self): """Sensitivity level of Camera motion detection.""" if not self.triggers: return None for trigger in self.triggers: if trigger.get("type") != "pirMotionActive": continue sensitivity = trigger.get("sensitivity") if sensitivity: return sensitivity.get("default") return None
def function[motion_detection_sensitivity, parameter[self]]: constant[Sensitivity level of Camera motion detection.] if <ast.UnaryOp object at 0x7da204623760> begin[:] return[constant[None]] for taget[name[trigger]] in starred[name[self].triggers] begin[:] if compare[call[name[trigger].get, parameter[constant[type]]] not_equal[!=] constant[pirMotionActive]] begin[:] continue variable[sensitivity] assign[=] call[name[trigger].get, parameter[constant[sensitivity]]] if name[sensitivity] begin[:] return[call[name[sensitivity].get, parameter[constant[default]]]] return[constant[None]]
keyword[def] identifier[motion_detection_sensitivity] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[triggers] : keyword[return] keyword[None] keyword[for] identifier[trigger] keyword[in] identifier[self] . identifier[triggers] : keyword[if] identifier[trigger] . identifier[get] ( literal[string] )!= literal[string] : keyword[continue] identifier[sensitivity] = identifier[trigger] . identifier[get] ( literal[string] ) keyword[if] identifier[sensitivity] : keyword[return] identifier[sensitivity] . identifier[get] ( literal[string] ) keyword[return] keyword[None]
def motion_detection_sensitivity(self): """Sensitivity level of Camera motion detection.""" if not self.triggers: return None # depends on [control=['if'], data=[]] for trigger in self.triggers: if trigger.get('type') != 'pirMotionActive': continue # depends on [control=['if'], data=[]] sensitivity = trigger.get('sensitivity') if sensitivity: return sensitivity.get('default') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['trigger']] return None
def _add_attr_values_from_insert_to_original(original_code, insert_code, insert_code_list, attribute_name, op_list): """ This function appends values of the attribute `attribute_name` of the inserted code to the original values, and changes indexes inside inserted code. If some bytecode instruction in the inserted code used to call argument number i, after modification it calls argument n + i, where n - length of the values in the original code. So it helps to avoid variables mixing between two pieces of code. :param original_code: code to modify :param insert_code: code to insert :param insert_code_obj: bytes sequence of inserted code, which should be modified too :param attribute_name: name of attribute to modify ('co_names', 'co_consts' or 'co_varnames') :param op_list: sequence of bytecodes whose arguments should be changed :return: modified bytes sequence of the code to insert and new values of the attribute `attribute_name` for original code """ orig_value = getattr(original_code, attribute_name) insert_value = getattr(insert_code, attribute_name) orig_names_len = len(orig_value) code_with_new_values = list(insert_code_list) offset = 0 while offset < len(code_with_new_values): op = code_with_new_values[offset] if op in op_list: new_val = code_with_new_values[offset + 1] + orig_names_len if new_val > MAX_BYTE: code_with_new_values[offset + 1] = new_val & MAX_BYTE code_with_new_values = code_with_new_values[:offset] + [EXTENDED_ARG, new_val >> 8] + \ code_with_new_values[offset:] offset += 2 else: code_with_new_values[offset + 1] = new_val offset += 2 new_values = orig_value + insert_value return bytes(code_with_new_values), new_values
def function[_add_attr_values_from_insert_to_original, parameter[original_code, insert_code, insert_code_list, attribute_name, op_list]]: constant[ This function appends values of the attribute `attribute_name` of the inserted code to the original values, and changes indexes inside inserted code. If some bytecode instruction in the inserted code used to call argument number i, after modification it calls argument n + i, where n - length of the values in the original code. So it helps to avoid variables mixing between two pieces of code. :param original_code: code to modify :param insert_code: code to insert :param insert_code_obj: bytes sequence of inserted code, which should be modified too :param attribute_name: name of attribute to modify ('co_names', 'co_consts' or 'co_varnames') :param op_list: sequence of bytecodes whose arguments should be changed :return: modified bytes sequence of the code to insert and new values of the attribute `attribute_name` for original code ] variable[orig_value] assign[=] call[name[getattr], parameter[name[original_code], name[attribute_name]]] variable[insert_value] assign[=] call[name[getattr], parameter[name[insert_code], name[attribute_name]]] variable[orig_names_len] assign[=] call[name[len], parameter[name[orig_value]]] variable[code_with_new_values] assign[=] call[name[list], parameter[name[insert_code_list]]] variable[offset] assign[=] constant[0] while compare[name[offset] less[<] call[name[len], parameter[name[code_with_new_values]]]] begin[:] variable[op] assign[=] call[name[code_with_new_values]][name[offset]] if compare[name[op] in name[op_list]] begin[:] variable[new_val] assign[=] binary_operation[call[name[code_with_new_values]][binary_operation[name[offset] + constant[1]]] + name[orig_names_len]] if compare[name[new_val] greater[>] name[MAX_BYTE]] begin[:] call[name[code_with_new_values]][binary_operation[name[offset] + constant[1]]] assign[=] binary_operation[name[new_val] <ast.BitAnd object at 0x7da2590d6b60> name[MAX_BYTE]] variable[code_with_new_values] assign[=] binary_operation[binary_operation[call[name[code_with_new_values]][<ast.Slice object at 0x7da207f01b40>] + list[[<ast.Name object at 0x7da207f02a70>, <ast.BinOp object at 0x7da207f01720>]]] + call[name[code_with_new_values]][<ast.Slice object at 0x7da207f01780>]] <ast.AugAssign object at 0x7da207f004f0> <ast.AugAssign object at 0x7da207f01bd0> variable[new_values] assign[=] binary_operation[name[orig_value] + name[insert_value]] return[tuple[[<ast.Call object at 0x7da207f03b20>, <ast.Name object at 0x7da207f00970>]]]
keyword[def] identifier[_add_attr_values_from_insert_to_original] ( identifier[original_code] , identifier[insert_code] , identifier[insert_code_list] , identifier[attribute_name] , identifier[op_list] ): literal[string] identifier[orig_value] = identifier[getattr] ( identifier[original_code] , identifier[attribute_name] ) identifier[insert_value] = identifier[getattr] ( identifier[insert_code] , identifier[attribute_name] ) identifier[orig_names_len] = identifier[len] ( identifier[orig_value] ) identifier[code_with_new_values] = identifier[list] ( identifier[insert_code_list] ) identifier[offset] = literal[int] keyword[while] identifier[offset] < identifier[len] ( identifier[code_with_new_values] ): identifier[op] = identifier[code_with_new_values] [ identifier[offset] ] keyword[if] identifier[op] keyword[in] identifier[op_list] : identifier[new_val] = identifier[code_with_new_values] [ identifier[offset] + literal[int] ]+ identifier[orig_names_len] keyword[if] identifier[new_val] > identifier[MAX_BYTE] : identifier[code_with_new_values] [ identifier[offset] + literal[int] ]= identifier[new_val] & identifier[MAX_BYTE] identifier[code_with_new_values] = identifier[code_with_new_values] [: identifier[offset] ]+[ identifier[EXTENDED_ARG] , identifier[new_val] >> literal[int] ]+ identifier[code_with_new_values] [ identifier[offset] :] identifier[offset] += literal[int] keyword[else] : identifier[code_with_new_values] [ identifier[offset] + literal[int] ]= identifier[new_val] identifier[offset] += literal[int] identifier[new_values] = identifier[orig_value] + identifier[insert_value] keyword[return] identifier[bytes] ( identifier[code_with_new_values] ), identifier[new_values]
def _add_attr_values_from_insert_to_original(original_code, insert_code, insert_code_list, attribute_name, op_list): """ This function appends values of the attribute `attribute_name` of the inserted code to the original values, and changes indexes inside inserted code. If some bytecode instruction in the inserted code used to call argument number i, after modification it calls argument n + i, where n - length of the values in the original code. So it helps to avoid variables mixing between two pieces of code. :param original_code: code to modify :param insert_code: code to insert :param insert_code_obj: bytes sequence of inserted code, which should be modified too :param attribute_name: name of attribute to modify ('co_names', 'co_consts' or 'co_varnames') :param op_list: sequence of bytecodes whose arguments should be changed :return: modified bytes sequence of the code to insert and new values of the attribute `attribute_name` for original code """ orig_value = getattr(original_code, attribute_name) insert_value = getattr(insert_code, attribute_name) orig_names_len = len(orig_value) code_with_new_values = list(insert_code_list) offset = 0 while offset < len(code_with_new_values): op = code_with_new_values[offset] if op in op_list: new_val = code_with_new_values[offset + 1] + orig_names_len if new_val > MAX_BYTE: code_with_new_values[offset + 1] = new_val & MAX_BYTE code_with_new_values = code_with_new_values[:offset] + [EXTENDED_ARG, new_val >> 8] + code_with_new_values[offset:] offset += 2 # depends on [control=['if'], data=['new_val', 'MAX_BYTE']] else: code_with_new_values[offset + 1] = new_val # depends on [control=['if'], data=[]] offset += 2 # depends on [control=['while'], data=['offset']] new_values = orig_value + insert_value return (bytes(code_with_new_values), new_values)
def asset(url=None): """ Asset helper Generates path to a static asset based on configuration base path and support for versioning. Will easily allow you to move your assets away to a CDN without changing templates. Versioning allows you to cache your asset changes forever by the webserver. :param url: string - relative path to asset :return: string - full versioned url """ # fallback to url_for('static') if assets path not configured url = url.lstrip('/') assets_path = app.config.get('ASSETS_PATH') if not assets_path: url_for = app.jinja_env.globals.get('url_for') url = url_for('static', filename=url) else: assets_path = assets_path.rstrip('/') url = assets_path + '/' + url version = app.config.get('ASSETS_VERSION') if not version: return url sign = '?' if sign in url: sign = '&' pattern = '{url}{sign}v{version}' return pattern.format(url=url, sign=sign, version=version)
def function[asset, parameter[url]]: constant[ Asset helper Generates path to a static asset based on configuration base path and support for versioning. Will easily allow you to move your assets away to a CDN without changing templates. Versioning allows you to cache your asset changes forever by the webserver. :param url: string - relative path to asset :return: string - full versioned url ] variable[url] assign[=] call[name[url].lstrip, parameter[constant[/]]] variable[assets_path] assign[=] call[name[app].config.get, parameter[constant[ASSETS_PATH]]] if <ast.UnaryOp object at 0x7da18f58c490> begin[:] variable[url_for] assign[=] call[name[app].jinja_env.globals.get, parameter[constant[url_for]]] variable[url] assign[=] call[name[url_for], parameter[constant[static]]] variable[version] assign[=] call[name[app].config.get, parameter[constant[ASSETS_VERSION]]] if <ast.UnaryOp object at 0x7da18f58d8d0> begin[:] return[name[url]] variable[sign] assign[=] constant[?] if compare[name[sign] in name[url]] begin[:] variable[sign] assign[=] constant[&] variable[pattern] assign[=] constant[{url}{sign}v{version}] return[call[name[pattern].format, parameter[]]]
keyword[def] identifier[asset] ( identifier[url] = keyword[None] ): literal[string] identifier[url] = identifier[url] . identifier[lstrip] ( literal[string] ) identifier[assets_path] = identifier[app] . identifier[config] . identifier[get] ( literal[string] ) keyword[if] keyword[not] identifier[assets_path] : identifier[url_for] = identifier[app] . identifier[jinja_env] . identifier[globals] . identifier[get] ( literal[string] ) identifier[url] = identifier[url_for] ( literal[string] , identifier[filename] = identifier[url] ) keyword[else] : identifier[assets_path] = identifier[assets_path] . identifier[rstrip] ( literal[string] ) identifier[url] = identifier[assets_path] + literal[string] + identifier[url] identifier[version] = identifier[app] . identifier[config] . identifier[get] ( literal[string] ) keyword[if] keyword[not] identifier[version] : keyword[return] identifier[url] identifier[sign] = literal[string] keyword[if] identifier[sign] keyword[in] identifier[url] : identifier[sign] = literal[string] identifier[pattern] = literal[string] keyword[return] identifier[pattern] . identifier[format] ( identifier[url] = identifier[url] , identifier[sign] = identifier[sign] , identifier[version] = identifier[version] )
def asset(url=None): """ Asset helper Generates path to a static asset based on configuration base path and support for versioning. Will easily allow you to move your assets away to a CDN without changing templates. Versioning allows you to cache your asset changes forever by the webserver. :param url: string - relative path to asset :return: string - full versioned url """ # fallback to url_for('static') if assets path not configured url = url.lstrip('/') assets_path = app.config.get('ASSETS_PATH') if not assets_path: url_for = app.jinja_env.globals.get('url_for') url = url_for('static', filename=url) # depends on [control=['if'], data=[]] else: assets_path = assets_path.rstrip('/') url = assets_path + '/' + url version = app.config.get('ASSETS_VERSION') if not version: return url # depends on [control=['if'], data=[]] sign = '?' if sign in url: sign = '&' # depends on [control=['if'], data=['sign']] pattern = '{url}{sign}v{version}' return pattern.format(url=url, sign=sign, version=version)
def alignICP(source, target, iters=100, rigid=False): """ Return a copy of source actor which is aligned to target actor through the `Iterative Closest Point` algorithm. The core of the algorithm is to match each vertex in one surface with the closest surface point on the other, then apply the transformation that modify one surface to best match the other (in the least-square sense). .. hint:: |align1| |align1.py|_ |align2| |align2.py|_ """ if isinstance(source, Actor): source = source.polydata() if isinstance(target, Actor): target = target.polydata() icp = vtk.vtkIterativeClosestPointTransform() icp.SetSource(source) icp.SetTarget(target) icp.SetMaximumNumberOfIterations(iters) if rigid: icp.GetLandmarkTransform().SetModeToRigidBody() icp.StartByMatchingCentroidsOn() icp.Update() icpTransformFilter = vtk.vtkTransformPolyDataFilter() icpTransformFilter.SetInputData(source) icpTransformFilter.SetTransform(icp) icpTransformFilter.Update() poly = icpTransformFilter.GetOutput() actor = Actor(poly) # actor.info['transform'] = icp.GetLandmarkTransform() # not working! # do it manually... sourcePoints = vtk.vtkPoints() targetPoints = vtk.vtkPoints() for i in range(10): p1 = [0, 0, 0] source.GetPoints().GetPoint(i, p1) sourcePoints.InsertNextPoint(p1) p2 = [0, 0, 0] poly.GetPoints().GetPoint(i, p2) targetPoints.InsertNextPoint(p2) # Setup the transform landmarkTransform = vtk.vtkLandmarkTransform() landmarkTransform.SetSourceLandmarks(sourcePoints) landmarkTransform.SetTargetLandmarks(targetPoints) if rigid: landmarkTransform.SetModeToRigidBody() actor.info["transform"] = landmarkTransform return actor
def function[alignICP, parameter[source, target, iters, rigid]]: constant[ Return a copy of source actor which is aligned to target actor through the `Iterative Closest Point` algorithm. The core of the algorithm is to match each vertex in one surface with the closest surface point on the other, then apply the transformation that modify one surface to best match the other (in the least-square sense). .. hint:: |align1| |align1.py|_ |align2| |align2.py|_ ] if call[name[isinstance], parameter[name[source], name[Actor]]] begin[:] variable[source] assign[=] call[name[source].polydata, parameter[]] if call[name[isinstance], parameter[name[target], name[Actor]]] begin[:] variable[target] assign[=] call[name[target].polydata, parameter[]] variable[icp] assign[=] call[name[vtk].vtkIterativeClosestPointTransform, parameter[]] call[name[icp].SetSource, parameter[name[source]]] call[name[icp].SetTarget, parameter[name[target]]] call[name[icp].SetMaximumNumberOfIterations, parameter[name[iters]]] if name[rigid] begin[:] call[call[name[icp].GetLandmarkTransform, parameter[]].SetModeToRigidBody, parameter[]] call[name[icp].StartByMatchingCentroidsOn, parameter[]] call[name[icp].Update, parameter[]] variable[icpTransformFilter] assign[=] call[name[vtk].vtkTransformPolyDataFilter, parameter[]] call[name[icpTransformFilter].SetInputData, parameter[name[source]]] call[name[icpTransformFilter].SetTransform, parameter[name[icp]]] call[name[icpTransformFilter].Update, parameter[]] variable[poly] assign[=] call[name[icpTransformFilter].GetOutput, parameter[]] variable[actor] assign[=] call[name[Actor], parameter[name[poly]]] variable[sourcePoints] assign[=] call[name[vtk].vtkPoints, parameter[]] variable[targetPoints] assign[=] call[name[vtk].vtkPoints, parameter[]] for taget[name[i]] in starred[call[name[range], parameter[constant[10]]]] begin[:] variable[p1] assign[=] list[[<ast.Constant object at 0x7da18dc9abc0>, <ast.Constant object at 0x7da18dc98310>, <ast.Constant object at 0x7da18dc98610>]] call[call[name[source].GetPoints, parameter[]].GetPoint, parameter[name[i], name[p1]]] call[name[sourcePoints].InsertNextPoint, parameter[name[p1]]] variable[p2] assign[=] list[[<ast.Constant object at 0x7da18dc98c10>, <ast.Constant object at 0x7da18dc99960>, <ast.Constant object at 0x7da18dc9a380>]] call[call[name[poly].GetPoints, parameter[]].GetPoint, parameter[name[i], name[p2]]] call[name[targetPoints].InsertNextPoint, parameter[name[p2]]] variable[landmarkTransform] assign[=] call[name[vtk].vtkLandmarkTransform, parameter[]] call[name[landmarkTransform].SetSourceLandmarks, parameter[name[sourcePoints]]] call[name[landmarkTransform].SetTargetLandmarks, parameter[name[targetPoints]]] if name[rigid] begin[:] call[name[landmarkTransform].SetModeToRigidBody, parameter[]] call[name[actor].info][constant[transform]] assign[=] name[landmarkTransform] return[name[actor]]
keyword[def] identifier[alignICP] ( identifier[source] , identifier[target] , identifier[iters] = literal[int] , identifier[rigid] = keyword[False] ): literal[string] keyword[if] identifier[isinstance] ( identifier[source] , identifier[Actor] ): identifier[source] = identifier[source] . identifier[polydata] () keyword[if] identifier[isinstance] ( identifier[target] , identifier[Actor] ): identifier[target] = identifier[target] . identifier[polydata] () identifier[icp] = identifier[vtk] . identifier[vtkIterativeClosestPointTransform] () identifier[icp] . identifier[SetSource] ( identifier[source] ) identifier[icp] . identifier[SetTarget] ( identifier[target] ) identifier[icp] . identifier[SetMaximumNumberOfIterations] ( identifier[iters] ) keyword[if] identifier[rigid] : identifier[icp] . identifier[GetLandmarkTransform] (). identifier[SetModeToRigidBody] () identifier[icp] . identifier[StartByMatchingCentroidsOn] () identifier[icp] . identifier[Update] () identifier[icpTransformFilter] = identifier[vtk] . identifier[vtkTransformPolyDataFilter] () identifier[icpTransformFilter] . identifier[SetInputData] ( identifier[source] ) identifier[icpTransformFilter] . identifier[SetTransform] ( identifier[icp] ) identifier[icpTransformFilter] . identifier[Update] () identifier[poly] = identifier[icpTransformFilter] . identifier[GetOutput] () identifier[actor] = identifier[Actor] ( identifier[poly] ) identifier[sourcePoints] = identifier[vtk] . identifier[vtkPoints] () identifier[targetPoints] = identifier[vtk] . identifier[vtkPoints] () keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] ): identifier[p1] =[ literal[int] , literal[int] , literal[int] ] identifier[source] . identifier[GetPoints] (). identifier[GetPoint] ( identifier[i] , identifier[p1] ) identifier[sourcePoints] . identifier[InsertNextPoint] ( identifier[p1] ) identifier[p2] =[ literal[int] , literal[int] , literal[int] ] identifier[poly] . identifier[GetPoints] (). identifier[GetPoint] ( identifier[i] , identifier[p2] ) identifier[targetPoints] . identifier[InsertNextPoint] ( identifier[p2] ) identifier[landmarkTransform] = identifier[vtk] . identifier[vtkLandmarkTransform] () identifier[landmarkTransform] . identifier[SetSourceLandmarks] ( identifier[sourcePoints] ) identifier[landmarkTransform] . identifier[SetTargetLandmarks] ( identifier[targetPoints] ) keyword[if] identifier[rigid] : identifier[landmarkTransform] . identifier[SetModeToRigidBody] () identifier[actor] . identifier[info] [ literal[string] ]= identifier[landmarkTransform] keyword[return] identifier[actor]
def alignICP(source, target, iters=100, rigid=False): """ Return a copy of source actor which is aligned to target actor through the `Iterative Closest Point` algorithm. The core of the algorithm is to match each vertex in one surface with the closest surface point on the other, then apply the transformation that modify one surface to best match the other (in the least-square sense). .. hint:: |align1| |align1.py|_ |align2| |align2.py|_ """ if isinstance(source, Actor): source = source.polydata() # depends on [control=['if'], data=[]] if isinstance(target, Actor): target = target.polydata() # depends on [control=['if'], data=[]] icp = vtk.vtkIterativeClosestPointTransform() icp.SetSource(source) icp.SetTarget(target) icp.SetMaximumNumberOfIterations(iters) if rigid: icp.GetLandmarkTransform().SetModeToRigidBody() # depends on [control=['if'], data=[]] icp.StartByMatchingCentroidsOn() icp.Update() icpTransformFilter = vtk.vtkTransformPolyDataFilter() icpTransformFilter.SetInputData(source) icpTransformFilter.SetTransform(icp) icpTransformFilter.Update() poly = icpTransformFilter.GetOutput() actor = Actor(poly) # actor.info['transform'] = icp.GetLandmarkTransform() # not working! # do it manually... sourcePoints = vtk.vtkPoints() targetPoints = vtk.vtkPoints() for i in range(10): p1 = [0, 0, 0] source.GetPoints().GetPoint(i, p1) sourcePoints.InsertNextPoint(p1) p2 = [0, 0, 0] poly.GetPoints().GetPoint(i, p2) targetPoints.InsertNextPoint(p2) # depends on [control=['for'], data=['i']] # Setup the transform landmarkTransform = vtk.vtkLandmarkTransform() landmarkTransform.SetSourceLandmarks(sourcePoints) landmarkTransform.SetTargetLandmarks(targetPoints) if rigid: landmarkTransform.SetModeToRigidBody() # depends on [control=['if'], data=[]] actor.info['transform'] = landmarkTransform return actor
def add_artifact( self, filename, name=None, metadata=None, content_type=None, ): """Add a file as an artifact. In Sacred terminology an artifact is a file produced by the experiment run. In case of a MongoObserver that means storing the file in the database. This function can only be called during a run, and just calls the :py:meth:`sacred.run.Run.add_artifact` method. Parameters ---------- filename : str name of the file to be stored as artifact name : str, optional optionally set the name of the artifact. Defaults to the relative file-path. metadata: dict, optional optionally attach metadata to the artifact. This only has an effect when using the MongoObserver. content_type: str, optional optionally attach a content-type to the artifact. This only has an effect when using the MongoObserver. """ assert self.current_run is not None, "Can only be called during a run." self.current_run.add_artifact(filename, name, metadata, content_type)
def function[add_artifact, parameter[self, filename, name, metadata, content_type]]: constant[Add a file as an artifact. In Sacred terminology an artifact is a file produced by the experiment run. In case of a MongoObserver that means storing the file in the database. This function can only be called during a run, and just calls the :py:meth:`sacred.run.Run.add_artifact` method. Parameters ---------- filename : str name of the file to be stored as artifact name : str, optional optionally set the name of the artifact. Defaults to the relative file-path. metadata: dict, optional optionally attach metadata to the artifact. This only has an effect when using the MongoObserver. content_type: str, optional optionally attach a content-type to the artifact. This only has an effect when using the MongoObserver. ] assert[compare[name[self].current_run is_not constant[None]]] call[name[self].current_run.add_artifact, parameter[name[filename], name[name], name[metadata], name[content_type]]]
keyword[def] identifier[add_artifact] ( identifier[self] , identifier[filename] , identifier[name] = keyword[None] , identifier[metadata] = keyword[None] , identifier[content_type] = keyword[None] , ): literal[string] keyword[assert] identifier[self] . identifier[current_run] keyword[is] keyword[not] keyword[None] , literal[string] identifier[self] . identifier[current_run] . identifier[add_artifact] ( identifier[filename] , identifier[name] , identifier[metadata] , identifier[content_type] )
def add_artifact(self, filename, name=None, metadata=None, content_type=None): """Add a file as an artifact. In Sacred terminology an artifact is a file produced by the experiment run. In case of a MongoObserver that means storing the file in the database. This function can only be called during a run, and just calls the :py:meth:`sacred.run.Run.add_artifact` method. Parameters ---------- filename : str name of the file to be stored as artifact name : str, optional optionally set the name of the artifact. Defaults to the relative file-path. metadata: dict, optional optionally attach metadata to the artifact. This only has an effect when using the MongoObserver. content_type: str, optional optionally attach a content-type to the artifact. This only has an effect when using the MongoObserver. """ assert self.current_run is not None, 'Can only be called during a run.' self.current_run.add_artifact(filename, name, metadata, content_type)
def has_read_permission(self, request, path): """ Just return True if the user is an authenticated staff member. Extensions could base the permissions on the path too. """ user = request.user if not user.is_authenticated(): return False elif user.is_superuser: return True elif user.is_staff: return True else: return False
def function[has_read_permission, parameter[self, request, path]]: constant[ Just return True if the user is an authenticated staff member. Extensions could base the permissions on the path too. ] variable[user] assign[=] name[request].user if <ast.UnaryOp object at 0x7da237eefca0> begin[:] return[constant[False]]
keyword[def] identifier[has_read_permission] ( identifier[self] , identifier[request] , identifier[path] ): literal[string] identifier[user] = identifier[request] . identifier[user] keyword[if] keyword[not] identifier[user] . identifier[is_authenticated] (): keyword[return] keyword[False] keyword[elif] identifier[user] . identifier[is_superuser] : keyword[return] keyword[True] keyword[elif] identifier[user] . identifier[is_staff] : keyword[return] keyword[True] keyword[else] : keyword[return] keyword[False]
def has_read_permission(self, request, path): """ Just return True if the user is an authenticated staff member. Extensions could base the permissions on the path too. """ user = request.user if not user.is_authenticated(): return False # depends on [control=['if'], data=[]] elif user.is_superuser: return True # depends on [control=['if'], data=[]] elif user.is_staff: return True # depends on [control=['if'], data=[]] else: return False
def points(self, include_hidden=False): """Return the number of points awarded to this submission.""" return sum(x.points for x in self.testable_results if include_hidden or not x.testable.is_hidden)
def function[points, parameter[self, include_hidden]]: constant[Return the number of points awarded to this submission.] return[call[name[sum], parameter[<ast.GeneratorExp object at 0x7da1b0a818a0>]]]
keyword[def] identifier[points] ( identifier[self] , identifier[include_hidden] = keyword[False] ): literal[string] keyword[return] identifier[sum] ( identifier[x] . identifier[points] keyword[for] identifier[x] keyword[in] identifier[self] . identifier[testable_results] keyword[if] identifier[include_hidden] keyword[or] keyword[not] identifier[x] . identifier[testable] . identifier[is_hidden] )
def points(self, include_hidden=False): """Return the number of points awarded to this submission.""" return sum((x.points for x in self.testable_results if include_hidden or not x.testable.is_hidden))
def play_track(self, track_id=DEFAULT_TRACK_ID, position=0): """Plays a track at the given position.""" self.publish( action='playTrack', resource='audioPlayback/player', publish_response=False, properties={'trackId': track_id, 'position': position} )
def function[play_track, parameter[self, track_id, position]]: constant[Plays a track at the given position.] call[name[self].publish, parameter[]]
keyword[def] identifier[play_track] ( identifier[self] , identifier[track_id] = identifier[DEFAULT_TRACK_ID] , identifier[position] = literal[int] ): literal[string] identifier[self] . identifier[publish] ( identifier[action] = literal[string] , identifier[resource] = literal[string] , identifier[publish_response] = keyword[False] , identifier[properties] ={ literal[string] : identifier[track_id] , literal[string] : identifier[position] } )
def play_track(self, track_id=DEFAULT_TRACK_ID, position=0): """Plays a track at the given position.""" self.publish(action='playTrack', resource='audioPlayback/player', publish_response=False, properties={'trackId': track_id, 'position': position})
def convert_basis(basis_dict, fmt, header=None): ''' Returns the basis set data as a string representing the data in the specified output format ''' # make converters case insensitive fmt = fmt.lower() if fmt not in _converter_map: raise RuntimeError('Unknown basis set format "{}"'.format(fmt)) converter = _converter_map[fmt] # Determine if the converter supports all the types in the basis_dict if converter['valid'] is not None: ftypes = set(basis_dict['function_types']) if ftypes > converter['valid']: raise RuntimeError('Converter {} does not support all function types: {}'.format(fmt, str(ftypes))) # Actually do the conversion ret_str = converter['function'](basis_dict) if header is not None and fmt != 'json': comment_str = _converter_map[fmt]['comment'] header_str = comment_str + comment_str.join(header.splitlines(True)) ret_str = header_str + '\n\n' + ret_str # HACK - Psi4 requires the first non-comment line be spherical/cartesian # so we have to add that before the header if fmt == 'psi4': types = basis_dict['function_types'] harm_type = 'spherical' if 'spherical_gto' in types else 'cartesian' ret_str = harm_type + '\n\n' + ret_str return ret_str
def function[convert_basis, parameter[basis_dict, fmt, header]]: constant[ Returns the basis set data as a string representing the data in the specified output format ] variable[fmt] assign[=] call[name[fmt].lower, parameter[]] if compare[name[fmt] <ast.NotIn object at 0x7da2590d7190> name[_converter_map]] begin[:] <ast.Raise object at 0x7da18dc9ad40> variable[converter] assign[=] call[name[_converter_map]][name[fmt]] if compare[call[name[converter]][constant[valid]] is_not constant[None]] begin[:] variable[ftypes] assign[=] call[name[set], parameter[call[name[basis_dict]][constant[function_types]]]] if compare[name[ftypes] greater[>] call[name[converter]][constant[valid]]] begin[:] <ast.Raise object at 0x7da18dc98970> variable[ret_str] assign[=] call[call[name[converter]][constant[function]], parameter[name[basis_dict]]] if <ast.BoolOp object at 0x7da1b1d53250> begin[:] variable[comment_str] assign[=] call[call[name[_converter_map]][name[fmt]]][constant[comment]] variable[header_str] assign[=] binary_operation[name[comment_str] + call[name[comment_str].join, parameter[call[name[header].splitlines, parameter[constant[True]]]]]] variable[ret_str] assign[=] binary_operation[binary_operation[name[header_str] + constant[ ]] + name[ret_str]] if compare[name[fmt] equal[==] constant[psi4]] begin[:] variable[types] assign[=] call[name[basis_dict]][constant[function_types]] variable[harm_type] assign[=] <ast.IfExp object at 0x7da1b1d516f0> variable[ret_str] assign[=] binary_operation[binary_operation[name[harm_type] + constant[ ]] + name[ret_str]] return[name[ret_str]]
keyword[def] identifier[convert_basis] ( identifier[basis_dict] , identifier[fmt] , identifier[header] = keyword[None] ): literal[string] identifier[fmt] = identifier[fmt] . identifier[lower] () keyword[if] identifier[fmt] keyword[not] keyword[in] identifier[_converter_map] : keyword[raise] identifier[RuntimeError] ( literal[string] . identifier[format] ( identifier[fmt] )) identifier[converter] = identifier[_converter_map] [ identifier[fmt] ] keyword[if] identifier[converter] [ literal[string] ] keyword[is] keyword[not] keyword[None] : identifier[ftypes] = identifier[set] ( identifier[basis_dict] [ literal[string] ]) keyword[if] identifier[ftypes] > identifier[converter] [ literal[string] ]: keyword[raise] identifier[RuntimeError] ( literal[string] . identifier[format] ( identifier[fmt] , identifier[str] ( identifier[ftypes] ))) identifier[ret_str] = identifier[converter] [ literal[string] ]( identifier[basis_dict] ) keyword[if] identifier[header] keyword[is] keyword[not] keyword[None] keyword[and] identifier[fmt] != literal[string] : identifier[comment_str] = identifier[_converter_map] [ identifier[fmt] ][ literal[string] ] identifier[header_str] = identifier[comment_str] + identifier[comment_str] . identifier[join] ( identifier[header] . identifier[splitlines] ( keyword[True] )) identifier[ret_str] = identifier[header_str] + literal[string] + identifier[ret_str] keyword[if] identifier[fmt] == literal[string] : identifier[types] = identifier[basis_dict] [ literal[string] ] identifier[harm_type] = literal[string] keyword[if] literal[string] keyword[in] identifier[types] keyword[else] literal[string] identifier[ret_str] = identifier[harm_type] + literal[string] + identifier[ret_str] keyword[return] identifier[ret_str]
def convert_basis(basis_dict, fmt, header=None): """ Returns the basis set data as a string representing the data in the specified output format """ # make converters case insensitive fmt = fmt.lower() if fmt not in _converter_map: raise RuntimeError('Unknown basis set format "{}"'.format(fmt)) # depends on [control=['if'], data=['fmt']] converter = _converter_map[fmt] # Determine if the converter supports all the types in the basis_dict if converter['valid'] is not None: ftypes = set(basis_dict['function_types']) if ftypes > converter['valid']: raise RuntimeError('Converter {} does not support all function types: {}'.format(fmt, str(ftypes))) # depends on [control=['if'], data=['ftypes']] # depends on [control=['if'], data=[]] # Actually do the conversion ret_str = converter['function'](basis_dict) if header is not None and fmt != 'json': comment_str = _converter_map[fmt]['comment'] header_str = comment_str + comment_str.join(header.splitlines(True)) ret_str = header_str + '\n\n' + ret_str # depends on [control=['if'], data=[]] # HACK - Psi4 requires the first non-comment line be spherical/cartesian # so we have to add that before the header if fmt == 'psi4': types = basis_dict['function_types'] harm_type = 'spherical' if 'spherical_gto' in types else 'cartesian' ret_str = harm_type + '\n\n' + ret_str # depends on [control=['if'], data=[]] return ret_str
def get_thellier_gui_meas_mapping(input_df, output=2): """ Get the appropriate mapping for translating measurements in Thellier GUI. This requires special handling for treat_step_num/measurement/measurement_number. Parameters ---------- input_df : pandas DataFrame MagIC records output : int output to this MagIC data model (2 or 3) Output -------- mapping : dict (used in convert_meas_df_thellier_gui) """ if int(output) == 2: thellier_gui_meas3_2_meas2_map = meas_magic3_2_magic2_map.copy() if 'treat_step_num' in input_df.columns: thellier_gui_meas3_2_meas2_map.update( {'treat_step_num': 'measurement_number'}) thellier_gui_meas3_2_meas2_map.pop('measurement') return thellier_gui_meas3_2_meas2_map # 2 --> 3 else: thellier_gui_meas2_2_meas3_map = meas_magic2_2_magic3_map.copy() if 'measurement' in input_df.columns: thellier_gui_meas2_2_meas3_map.pop('measurement_number') try: res = int(input_df.iloc[0]['measurement_number']) if res < 100: thellier_gui_meas2_2_meas3_map['measurement_number'] = 'treat_step_num' except ValueError as ex: pass return thellier_gui_meas2_2_meas3_map
def function[get_thellier_gui_meas_mapping, parameter[input_df, output]]: constant[ Get the appropriate mapping for translating measurements in Thellier GUI. This requires special handling for treat_step_num/measurement/measurement_number. Parameters ---------- input_df : pandas DataFrame MagIC records output : int output to this MagIC data model (2 or 3) Output -------- mapping : dict (used in convert_meas_df_thellier_gui) ] if compare[call[name[int], parameter[name[output]]] equal[==] constant[2]] begin[:] variable[thellier_gui_meas3_2_meas2_map] assign[=] call[name[meas_magic3_2_magic2_map].copy, parameter[]] if compare[constant[treat_step_num] in name[input_df].columns] begin[:] call[name[thellier_gui_meas3_2_meas2_map].update, parameter[dictionary[[<ast.Constant object at 0x7da1b044f820>], [<ast.Constant object at 0x7da1b044efb0>]]]] call[name[thellier_gui_meas3_2_meas2_map].pop, parameter[constant[measurement]]] return[name[thellier_gui_meas3_2_meas2_map]]
keyword[def] identifier[get_thellier_gui_meas_mapping] ( identifier[input_df] , identifier[output] = literal[int] ): literal[string] keyword[if] identifier[int] ( identifier[output] )== literal[int] : identifier[thellier_gui_meas3_2_meas2_map] = identifier[meas_magic3_2_magic2_map] . identifier[copy] () keyword[if] literal[string] keyword[in] identifier[input_df] . identifier[columns] : identifier[thellier_gui_meas3_2_meas2_map] . identifier[update] ( { literal[string] : literal[string] }) identifier[thellier_gui_meas3_2_meas2_map] . identifier[pop] ( literal[string] ) keyword[return] identifier[thellier_gui_meas3_2_meas2_map] keyword[else] : identifier[thellier_gui_meas2_2_meas3_map] = identifier[meas_magic2_2_magic3_map] . identifier[copy] () keyword[if] literal[string] keyword[in] identifier[input_df] . identifier[columns] : identifier[thellier_gui_meas2_2_meas3_map] . identifier[pop] ( literal[string] ) keyword[try] : identifier[res] = identifier[int] ( identifier[input_df] . identifier[iloc] [ literal[int] ][ literal[string] ]) keyword[if] identifier[res] < literal[int] : identifier[thellier_gui_meas2_2_meas3_map] [ literal[string] ]= literal[string] keyword[except] identifier[ValueError] keyword[as] identifier[ex] : keyword[pass] keyword[return] identifier[thellier_gui_meas2_2_meas3_map]
def get_thellier_gui_meas_mapping(input_df, output=2): """ Get the appropriate mapping for translating measurements in Thellier GUI. This requires special handling for treat_step_num/measurement/measurement_number. Parameters ---------- input_df : pandas DataFrame MagIC records output : int output to this MagIC data model (2 or 3) Output -------- mapping : dict (used in convert_meas_df_thellier_gui) """ if int(output) == 2: thellier_gui_meas3_2_meas2_map = meas_magic3_2_magic2_map.copy() if 'treat_step_num' in input_df.columns: thellier_gui_meas3_2_meas2_map.update({'treat_step_num': 'measurement_number'}) thellier_gui_meas3_2_meas2_map.pop('measurement') # depends on [control=['if'], data=[]] return thellier_gui_meas3_2_meas2_map # depends on [control=['if'], data=[]] else: # 2 --> 3 thellier_gui_meas2_2_meas3_map = meas_magic2_2_magic3_map.copy() if 'measurement' in input_df.columns: thellier_gui_meas2_2_meas3_map.pop('measurement_number') try: res = int(input_df.iloc[0]['measurement_number']) if res < 100: thellier_gui_meas2_2_meas3_map['measurement_number'] = 'treat_step_num' # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except ValueError as ex: pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] return thellier_gui_meas2_2_meas3_map
def get_multifile_object_child_location(self, parent_item_prefix: str, child_name: str) -> str: """ Implementation of the parent abstract method. In this mode the attribute is a file inside the parent object folder :param parent_item_prefix: the absolute file prefix of the parent item. :return: the file prefix for this attribute """ check_var(parent_item_prefix, var_types=str, var_name='parent_item_prefix') check_var(child_name, var_types=str, var_name='item_name') # assert that folder_path is a folder if not isdir(parent_item_prefix): raise ValueError( 'Cannot get attribute item in non-flat mode, parent item path is not a folder : ' + parent_item_prefix) return join(parent_item_prefix, child_name)
def function[get_multifile_object_child_location, parameter[self, parent_item_prefix, child_name]]: constant[ Implementation of the parent abstract method. In this mode the attribute is a file inside the parent object folder :param parent_item_prefix: the absolute file prefix of the parent item. :return: the file prefix for this attribute ] call[name[check_var], parameter[name[parent_item_prefix]]] call[name[check_var], parameter[name[child_name]]] if <ast.UnaryOp object at 0x7da20c993e20> begin[:] <ast.Raise object at 0x7da20c992e60> return[call[name[join], parameter[name[parent_item_prefix], name[child_name]]]]
keyword[def] identifier[get_multifile_object_child_location] ( identifier[self] , identifier[parent_item_prefix] : identifier[str] , identifier[child_name] : identifier[str] )-> identifier[str] : literal[string] identifier[check_var] ( identifier[parent_item_prefix] , identifier[var_types] = identifier[str] , identifier[var_name] = literal[string] ) identifier[check_var] ( identifier[child_name] , identifier[var_types] = identifier[str] , identifier[var_name] = literal[string] ) keyword[if] keyword[not] identifier[isdir] ( identifier[parent_item_prefix] ): keyword[raise] identifier[ValueError] ( literal[string] + identifier[parent_item_prefix] ) keyword[return] identifier[join] ( identifier[parent_item_prefix] , identifier[child_name] )
def get_multifile_object_child_location(self, parent_item_prefix: str, child_name: str) -> str: """ Implementation of the parent abstract method. In this mode the attribute is a file inside the parent object folder :param parent_item_prefix: the absolute file prefix of the parent item. :return: the file prefix for this attribute """ check_var(parent_item_prefix, var_types=str, var_name='parent_item_prefix') check_var(child_name, var_types=str, var_name='item_name') # assert that folder_path is a folder if not isdir(parent_item_prefix): raise ValueError('Cannot get attribute item in non-flat mode, parent item path is not a folder : ' + parent_item_prefix) # depends on [control=['if'], data=[]] return join(parent_item_prefix, child_name)
def remove_patch(self, patch): """ Remove a patch from the patches list """ self._check_patch(patch) patchline = self.patch2line[patch] del self.patch2line[patch] self.patchlines.remove(patchline)
def function[remove_patch, parameter[self, patch]]: constant[ Remove a patch from the patches list ] call[name[self]._check_patch, parameter[name[patch]]] variable[patchline] assign[=] call[name[self].patch2line][name[patch]] <ast.Delete object at 0x7da20c6aa350> call[name[self].patchlines.remove, parameter[name[patchline]]]
keyword[def] identifier[remove_patch] ( identifier[self] , identifier[patch] ): literal[string] identifier[self] . identifier[_check_patch] ( identifier[patch] ) identifier[patchline] = identifier[self] . identifier[patch2line] [ identifier[patch] ] keyword[del] identifier[self] . identifier[patch2line] [ identifier[patch] ] identifier[self] . identifier[patchlines] . identifier[remove] ( identifier[patchline] )
def remove_patch(self, patch): """ Remove a patch from the patches list """ self._check_patch(patch) patchline = self.patch2line[patch] del self.patch2line[patch] self.patchlines.remove(patchline)
def cmd_graph(self, args): '''graph command''' if len(args) == 0: # list current graphs for i in range(len(self.graphs)): print("Graph %u: %s" % (i, self.graphs[i].fields)) return elif args[0] == "help": print("graph <timespan|tickresolution|expression>") elif args[0] == "timespan": if len(args) == 1: print("timespan: %.1f" % self.timespan) return self.timespan = float(args[1]) elif args[0] == "tickresolution": if len(args) == 1: print("tickresolution: %.1f" % self.tickresolution) return self.tickresolution = float(args[1]) else: # start a new graph self.graphs.append(Graph(self, args[:]))
def function[cmd_graph, parameter[self, args]]: constant[graph command] if compare[call[name[len], parameter[name[args]]] equal[==] constant[0]] begin[:] for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[self].graphs]]]]] begin[:] call[name[print], parameter[binary_operation[constant[Graph %u: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da2043461a0>, <ast.Attribute object at 0x7da204347dc0>]]]]] return[None]
keyword[def] identifier[cmd_graph] ( identifier[self] , identifier[args] ): literal[string] keyword[if] identifier[len] ( identifier[args] )== literal[int] : keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[graphs] )): identifier[print] ( literal[string] %( identifier[i] , identifier[self] . identifier[graphs] [ identifier[i] ]. identifier[fields] )) keyword[return] keyword[elif] identifier[args] [ literal[int] ]== literal[string] : identifier[print] ( literal[string] ) keyword[elif] identifier[args] [ literal[int] ]== literal[string] : keyword[if] identifier[len] ( identifier[args] )== literal[int] : identifier[print] ( literal[string] % identifier[self] . identifier[timespan] ) keyword[return] identifier[self] . identifier[timespan] = identifier[float] ( identifier[args] [ literal[int] ]) keyword[elif] identifier[args] [ literal[int] ]== literal[string] : keyword[if] identifier[len] ( identifier[args] )== literal[int] : identifier[print] ( literal[string] % identifier[self] . identifier[tickresolution] ) keyword[return] identifier[self] . identifier[tickresolution] = identifier[float] ( identifier[args] [ literal[int] ]) keyword[else] : identifier[self] . identifier[graphs] . identifier[append] ( identifier[Graph] ( identifier[self] , identifier[args] [:]))
def cmd_graph(self, args): """graph command""" if len(args) == 0: # list current graphs for i in range(len(self.graphs)): print('Graph %u: %s' % (i, self.graphs[i].fields)) # depends on [control=['for'], data=['i']] return # depends on [control=['if'], data=[]] elif args[0] == 'help': print('graph <timespan|tickresolution|expression>') # depends on [control=['if'], data=[]] elif args[0] == 'timespan': if len(args) == 1: print('timespan: %.1f' % self.timespan) return # depends on [control=['if'], data=[]] self.timespan = float(args[1]) # depends on [control=['if'], data=[]] elif args[0] == 'tickresolution': if len(args) == 1: print('tickresolution: %.1f' % self.tickresolution) return # depends on [control=['if'], data=[]] self.tickresolution = float(args[1]) # depends on [control=['if'], data=[]] else: # start a new graph self.graphs.append(Graph(self, args[:]))
def centre_of_atoms(atoms, mass_weighted=True): """ Returns centre point of any list of atoms. Parameters ---------- atoms : list List of AMPAL atom objects. mass_weighted : bool, optional If True returns centre of mass, otherwise just geometric centre of points. Returns ------- centre_of_mass : numpy.array 3D coordinate for the centre of mass. """ points = [x._vector for x in atoms] if mass_weighted: masses = [x.mass for x in atoms] else: masses = [] return centre_of_mass(points=points, masses=masses)
def function[centre_of_atoms, parameter[atoms, mass_weighted]]: constant[ Returns centre point of any list of atoms. Parameters ---------- atoms : list List of AMPAL atom objects. mass_weighted : bool, optional If True returns centre of mass, otherwise just geometric centre of points. Returns ------- centre_of_mass : numpy.array 3D coordinate for the centre of mass. ] variable[points] assign[=] <ast.ListComp object at 0x7da1b2629300> if name[mass_weighted] begin[:] variable[masses] assign[=] <ast.ListComp object at 0x7da1b2629ff0> return[call[name[centre_of_mass], parameter[]]]
keyword[def] identifier[centre_of_atoms] ( identifier[atoms] , identifier[mass_weighted] = keyword[True] ): literal[string] identifier[points] =[ identifier[x] . identifier[_vector] keyword[for] identifier[x] keyword[in] identifier[atoms] ] keyword[if] identifier[mass_weighted] : identifier[masses] =[ identifier[x] . identifier[mass] keyword[for] identifier[x] keyword[in] identifier[atoms] ] keyword[else] : identifier[masses] =[] keyword[return] identifier[centre_of_mass] ( identifier[points] = identifier[points] , identifier[masses] = identifier[masses] )
def centre_of_atoms(atoms, mass_weighted=True): """ Returns centre point of any list of atoms. Parameters ---------- atoms : list List of AMPAL atom objects. mass_weighted : bool, optional If True returns centre of mass, otherwise just geometric centre of points. Returns ------- centre_of_mass : numpy.array 3D coordinate for the centre of mass. """ points = [x._vector for x in atoms] if mass_weighted: masses = [x.mass for x in atoms] # depends on [control=['if'], data=[]] else: masses = [] return centre_of_mass(points=points, masses=masses)
def get_user_id(self, attributes): """ For use when CAS_CREATE_USER_WITH_ID is True. Will raise ImproperlyConfigured exceptions when a user_id cannot be accessed. This is important because we shouldn't create Users with automatically assigned ids if we are trying to keep User primary key's in sync. """ if not attributes: raise ImproperlyConfigured("CAS_CREATE_USER_WITH_ID is True, but " "no attributes were provided") user_id = attributes.get('id') if not user_id: raise ImproperlyConfigured("CAS_CREATE_USER_WITH_ID is True, but " "`'id'` is not part of attributes.") return user_id
def function[get_user_id, parameter[self, attributes]]: constant[ For use when CAS_CREATE_USER_WITH_ID is True. Will raise ImproperlyConfigured exceptions when a user_id cannot be accessed. This is important because we shouldn't create Users with automatically assigned ids if we are trying to keep User primary key's in sync. ] if <ast.UnaryOp object at 0x7da1b1e06800> begin[:] <ast.Raise object at 0x7da1b1e06740> variable[user_id] assign[=] call[name[attributes].get, parameter[constant[id]]] if <ast.UnaryOp object at 0x7da1b1e04610> begin[:] <ast.Raise object at 0x7da1b1e06320> return[name[user_id]]
keyword[def] identifier[get_user_id] ( identifier[self] , identifier[attributes] ): literal[string] keyword[if] keyword[not] identifier[attributes] : keyword[raise] identifier[ImproperlyConfigured] ( literal[string] literal[string] ) identifier[user_id] = identifier[attributes] . identifier[get] ( literal[string] ) keyword[if] keyword[not] identifier[user_id] : keyword[raise] identifier[ImproperlyConfigured] ( literal[string] literal[string] ) keyword[return] identifier[user_id]
def get_user_id(self, attributes): """ For use when CAS_CREATE_USER_WITH_ID is True. Will raise ImproperlyConfigured exceptions when a user_id cannot be accessed. This is important because we shouldn't create Users with automatically assigned ids if we are trying to keep User primary key's in sync. """ if not attributes: raise ImproperlyConfigured('CAS_CREATE_USER_WITH_ID is True, but no attributes were provided') # depends on [control=['if'], data=[]] user_id = attributes.get('id') if not user_id: raise ImproperlyConfigured("CAS_CREATE_USER_WITH_ID is True, but `'id'` is not part of attributes.") # depends on [control=['if'], data=[]] return user_id
def sanitize(self, val): """Given a Variable and a value, cleans it out""" if self.type == NUMBER: try: return clamp(self.min, self.max, float(val)) except ValueError: return 0.0 elif self.type == TEXT: try: return unicode(str(val), "utf_8", "replace") except: return "" elif self.type == BOOLEAN: if unicode(val).lower() in ("true", "1", "yes"): return True else: return False
def function[sanitize, parameter[self, val]]: constant[Given a Variable and a value, cleans it out] if compare[name[self].type equal[==] name[NUMBER]] begin[:] <ast.Try object at 0x7da1aff4f100>
keyword[def] identifier[sanitize] ( identifier[self] , identifier[val] ): literal[string] keyword[if] identifier[self] . identifier[type] == identifier[NUMBER] : keyword[try] : keyword[return] identifier[clamp] ( identifier[self] . identifier[min] , identifier[self] . identifier[max] , identifier[float] ( identifier[val] )) keyword[except] identifier[ValueError] : keyword[return] literal[int] keyword[elif] identifier[self] . identifier[type] == identifier[TEXT] : keyword[try] : keyword[return] identifier[unicode] ( identifier[str] ( identifier[val] ), literal[string] , literal[string] ) keyword[except] : keyword[return] literal[string] keyword[elif] identifier[self] . identifier[type] == identifier[BOOLEAN] : keyword[if] identifier[unicode] ( identifier[val] ). identifier[lower] () keyword[in] ( literal[string] , literal[string] , literal[string] ): keyword[return] keyword[True] keyword[else] : keyword[return] keyword[False]
def sanitize(self, val): """Given a Variable and a value, cleans it out""" if self.type == NUMBER: try: return clamp(self.min, self.max, float(val)) # depends on [control=['try'], data=[]] except ValueError: return 0.0 # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] elif self.type == TEXT: try: return unicode(str(val), 'utf_8', 'replace') # depends on [control=['try'], data=[]] except: return '' # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] elif self.type == BOOLEAN: if unicode(val).lower() in ('true', '1', 'yes'): return True # depends on [control=['if'], data=[]] else: return False # depends on [control=['if'], data=[]]
def load_model(itos_filename, classifier_filename, num_classes): """Load the classifier and int to string mapping Args: itos_filename (str): The filename of the int to string mapping file (usually called itos.pkl) classifier_filename (str): The filename of the trained classifier Returns: string to int mapping, trained classifer model """ # load the int to string mapping file itos = pickle.load(Path(itos_filename).open('rb')) # turn it into a string to int mapping (which is what we need) stoi = collections.defaultdict(lambda:0, {str(v):int(k) for k,v in enumerate(itos)}) # these parameters aren't used, but this is the easiest way to get a model bptt,em_sz,nh,nl = 70,400,1150,3 dps = np.array([0.4,0.5,0.05,0.3,0.4])*0.5 vs = len(itos) model = get_rnn_classifer(bptt, 20*70, num_classes, vs, emb_sz=em_sz, n_hid=nh, n_layers=nl, pad_token=1, layers=[em_sz*3, 50, num_classes], drops=[dps[4], 0.1], dropouti=dps[0], wdrop=dps[1], dropoute=dps[2], dropouth=dps[3]) # load the trained classifier model.load_state_dict(torch.load(classifier_filename, map_location=lambda storage, loc: storage)) # put the classifier into evaluation mode model.reset() model.eval() return stoi, model
def function[load_model, parameter[itos_filename, classifier_filename, num_classes]]: constant[Load the classifier and int to string mapping Args: itos_filename (str): The filename of the int to string mapping file (usually called itos.pkl) classifier_filename (str): The filename of the trained classifier Returns: string to int mapping, trained classifer model ] variable[itos] assign[=] call[name[pickle].load, parameter[call[call[name[Path], parameter[name[itos_filename]]].open, parameter[constant[rb]]]]] variable[stoi] assign[=] call[name[collections].defaultdict, parameter[<ast.Lambda object at 0x7da1b202a4a0>, <ast.DictComp object at 0x7da1b202bb50>]] <ast.Tuple object at 0x7da1b2029540> assign[=] tuple[[<ast.Constant object at 0x7da20cabe080>, <ast.Constant object at 0x7da20cabd6c0>, <ast.Constant object at 0x7da20cabf5e0>, <ast.Constant object at 0x7da20cabd7e0>]] variable[dps] assign[=] binary_operation[call[name[np].array, parameter[list[[<ast.Constant object at 0x7da1b1dd8e20>, <ast.Constant object at 0x7da1b1dd8f70>, <ast.Constant object at 0x7da1b1dd98d0>, <ast.Constant object at 0x7da1b1dd9330>, <ast.Constant object at 0x7da1b1ddae60>]]]] * constant[0.5]] variable[vs] assign[=] call[name[len], parameter[name[itos]]] variable[model] assign[=] call[name[get_rnn_classifer], parameter[name[bptt], binary_operation[constant[20] * constant[70]], name[num_classes], name[vs]]] call[name[model].load_state_dict, parameter[call[name[torch].load, parameter[name[classifier_filename]]]]] call[name[model].reset, parameter[]] call[name[model].eval, parameter[]] return[tuple[[<ast.Name object at 0x7da20e9b34c0>, <ast.Name object at 0x7da20e9b2950>]]]
keyword[def] identifier[load_model] ( identifier[itos_filename] , identifier[classifier_filename] , identifier[num_classes] ): literal[string] identifier[itos] = identifier[pickle] . identifier[load] ( identifier[Path] ( identifier[itos_filename] ). identifier[open] ( literal[string] )) identifier[stoi] = identifier[collections] . identifier[defaultdict] ( keyword[lambda] : literal[int] ,{ identifier[str] ( identifier[v] ): identifier[int] ( identifier[k] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[enumerate] ( identifier[itos] )}) identifier[bptt] , identifier[em_sz] , identifier[nh] , identifier[nl] = literal[int] , literal[int] , literal[int] , literal[int] identifier[dps] = identifier[np] . identifier[array] ([ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ])* literal[int] identifier[vs] = identifier[len] ( identifier[itos] ) identifier[model] = identifier[get_rnn_classifer] ( identifier[bptt] , literal[int] * literal[int] , identifier[num_classes] , identifier[vs] , identifier[emb_sz] = identifier[em_sz] , identifier[n_hid] = identifier[nh] , identifier[n_layers] = identifier[nl] , identifier[pad_token] = literal[int] , identifier[layers] =[ identifier[em_sz] * literal[int] , literal[int] , identifier[num_classes] ], identifier[drops] =[ identifier[dps] [ literal[int] ], literal[int] ], identifier[dropouti] = identifier[dps] [ literal[int] ], identifier[wdrop] = identifier[dps] [ literal[int] ], identifier[dropoute] = identifier[dps] [ literal[int] ], identifier[dropouth] = identifier[dps] [ literal[int] ]) identifier[model] . identifier[load_state_dict] ( identifier[torch] . identifier[load] ( identifier[classifier_filename] , identifier[map_location] = keyword[lambda] identifier[storage] , identifier[loc] : identifier[storage] )) identifier[model] . identifier[reset] () identifier[model] . identifier[eval] () keyword[return] identifier[stoi] , identifier[model]
def load_model(itos_filename, classifier_filename, num_classes): """Load the classifier and int to string mapping Args: itos_filename (str): The filename of the int to string mapping file (usually called itos.pkl) classifier_filename (str): The filename of the trained classifier Returns: string to int mapping, trained classifer model """ # load the int to string mapping file itos = pickle.load(Path(itos_filename).open('rb')) # turn it into a string to int mapping (which is what we need) stoi = collections.defaultdict(lambda : 0, {str(v): int(k) for (k, v) in enumerate(itos)}) # these parameters aren't used, but this is the easiest way to get a model (bptt, em_sz, nh, nl) = (70, 400, 1150, 3) dps = np.array([0.4, 0.5, 0.05, 0.3, 0.4]) * 0.5 vs = len(itos) model = get_rnn_classifer(bptt, 20 * 70, num_classes, vs, emb_sz=em_sz, n_hid=nh, n_layers=nl, pad_token=1, layers=[em_sz * 3, 50, num_classes], drops=[dps[4], 0.1], dropouti=dps[0], wdrop=dps[1], dropoute=dps[2], dropouth=dps[3]) # load the trained classifier model.load_state_dict(torch.load(classifier_filename, map_location=lambda storage, loc: storage)) # put the classifier into evaluation mode model.reset() model.eval() return (stoi, model)
def section(self, title=None): """ Returns the :class:`~plexapi.library.LibrarySection` that matches the specified title. Parameters: title (str): Title of the section to return. """ for section in self.sections(): if section.title.lower() == title.lower(): return section raise NotFound('Invalid library section: %s' % title)
def function[section, parameter[self, title]]: constant[ Returns the :class:`~plexapi.library.LibrarySection` that matches the specified title. Parameters: title (str): Title of the section to return. ] for taget[name[section]] in starred[call[name[self].sections, parameter[]]] begin[:] if compare[call[name[section].title.lower, parameter[]] equal[==] call[name[title].lower, parameter[]]] begin[:] return[name[section]] <ast.Raise object at 0x7da20c6a9c00>
keyword[def] identifier[section] ( identifier[self] , identifier[title] = keyword[None] ): literal[string] keyword[for] identifier[section] keyword[in] identifier[self] . identifier[sections] (): keyword[if] identifier[section] . identifier[title] . identifier[lower] ()== identifier[title] . identifier[lower] (): keyword[return] identifier[section] keyword[raise] identifier[NotFound] ( literal[string] % identifier[title] )
def section(self, title=None): """ Returns the :class:`~plexapi.library.LibrarySection` that matches the specified title. Parameters: title (str): Title of the section to return. """ for section in self.sections(): if section.title.lower() == title.lower(): return section # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['section']] raise NotFound('Invalid library section: %s' % title)
def fully_correlated_conditional(Kmn, Kmm, Knn, f, *, full_cov=False, full_output_cov=False, q_sqrt=None, white=False): """ This function handles conditioning of multi-output GPs in the case where the conditioning points are all fully correlated, in both the prior and posterior. :param Kmn: LM x N x P :param Kmm: LM x LM :param Knn: N x P or N x P x N x P :param f: data matrix, LM x 1 :param q_sqrt: 1 x LM x LM or 1 x ML :param full_cov: calculate covariance between inputs :param full_output_cov: calculate covariance between outputs :param white: use whitened representation :return: - mean: N x P - variance: N x P, N x P x P, P x N x N, N x P x N x P """ m, v = fully_correlated_conditional_repeat(Kmn, Kmm, Knn, f, full_cov=full_cov, full_output_cov=full_output_cov, q_sqrt=q_sqrt, white=white) return m[0, ...], v[0, ...]
def function[fully_correlated_conditional, parameter[Kmn, Kmm, Knn, f]]: constant[ This function handles conditioning of multi-output GPs in the case where the conditioning points are all fully correlated, in both the prior and posterior. :param Kmn: LM x N x P :param Kmm: LM x LM :param Knn: N x P or N x P x N x P :param f: data matrix, LM x 1 :param q_sqrt: 1 x LM x LM or 1 x ML :param full_cov: calculate covariance between inputs :param full_output_cov: calculate covariance between outputs :param white: use whitened representation :return: - mean: N x P - variance: N x P, N x P x P, P x N x N, N x P x N x P ] <ast.Tuple object at 0x7da1b21edf90> assign[=] call[name[fully_correlated_conditional_repeat], parameter[name[Kmn], name[Kmm], name[Knn], name[f]]] return[tuple[[<ast.Subscript object at 0x7da1b21ecb80>, <ast.Subscript object at 0x7da1b21a0400>]]]
keyword[def] identifier[fully_correlated_conditional] ( identifier[Kmn] , identifier[Kmm] , identifier[Knn] , identifier[f] ,*, identifier[full_cov] = keyword[False] , identifier[full_output_cov] = keyword[False] , identifier[q_sqrt] = keyword[None] , identifier[white] = keyword[False] ): literal[string] identifier[m] , identifier[v] = identifier[fully_correlated_conditional_repeat] ( identifier[Kmn] , identifier[Kmm] , identifier[Knn] , identifier[f] , identifier[full_cov] = identifier[full_cov] , identifier[full_output_cov] = identifier[full_output_cov] , identifier[q_sqrt] = identifier[q_sqrt] , identifier[white] = identifier[white] ) keyword[return] identifier[m] [ literal[int] ,...], identifier[v] [ literal[int] ,...]
def fully_correlated_conditional(Kmn, Kmm, Knn, f, *, full_cov=False, full_output_cov=False, q_sqrt=None, white=False): """ This function handles conditioning of multi-output GPs in the case where the conditioning points are all fully correlated, in both the prior and posterior. :param Kmn: LM x N x P :param Kmm: LM x LM :param Knn: N x P or N x P x N x P :param f: data matrix, LM x 1 :param q_sqrt: 1 x LM x LM or 1 x ML :param full_cov: calculate covariance between inputs :param full_output_cov: calculate covariance between outputs :param white: use whitened representation :return: - mean: N x P - variance: N x P, N x P x P, P x N x N, N x P x N x P """ (m, v) = fully_correlated_conditional_repeat(Kmn, Kmm, Knn, f, full_cov=full_cov, full_output_cov=full_output_cov, q_sqrt=q_sqrt, white=white) return (m[0, ...], v[0, ...])
def getBehavior(name, id=None): """ Return a matching behavior if it exists, or None. If id is None, return the default for name. """ name = name.upper() if name in __behaviorRegistry: if id: for n, behavior in __behaviorRegistry[name]: if n == id: return behavior return __behaviorRegistry[name][0][1] return None
def function[getBehavior, parameter[name, id]]: constant[ Return a matching behavior if it exists, or None. If id is None, return the default for name. ] variable[name] assign[=] call[name[name].upper, parameter[]] if compare[name[name] in name[__behaviorRegistry]] begin[:] if name[id] begin[:] for taget[tuple[[<ast.Name object at 0x7da18fe934c0>, <ast.Name object at 0x7da18fe91cf0>]]] in starred[call[name[__behaviorRegistry]][name[name]]] begin[:] if compare[name[n] equal[==] name[id]] begin[:] return[name[behavior]] return[call[call[call[name[__behaviorRegistry]][name[name]]][constant[0]]][constant[1]]] return[constant[None]]
keyword[def] identifier[getBehavior] ( identifier[name] , identifier[id] = keyword[None] ): literal[string] identifier[name] = identifier[name] . identifier[upper] () keyword[if] identifier[name] keyword[in] identifier[__behaviorRegistry] : keyword[if] identifier[id] : keyword[for] identifier[n] , identifier[behavior] keyword[in] identifier[__behaviorRegistry] [ identifier[name] ]: keyword[if] identifier[n] == identifier[id] : keyword[return] identifier[behavior] keyword[return] identifier[__behaviorRegistry] [ identifier[name] ][ literal[int] ][ literal[int] ] keyword[return] keyword[None]
def getBehavior(name, id=None): """ Return a matching behavior if it exists, or None. If id is None, return the default for name. """ name = name.upper() if name in __behaviorRegistry: if id: for (n, behavior) in __behaviorRegistry[name]: if n == id: return behavior # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] return __behaviorRegistry[name][0][1] # depends on [control=['if'], data=['name', '__behaviorRegistry']] return None
def get_group_members(self, group): """Returns a ``list`` with the group's members or ``None`` if unsuccessful. :param str group: Group we want users for. """ conn = self.bind try: records = conn.search_s( current_app.config['LDAP_BASE_DN'], ldap.SCOPE_SUBTREE, ldap_filter.filter_format( current_app.config['LDAP_GROUP_OBJECT_FILTER'], (group,)), [current_app.config['LDAP_GROUP_MEMBERS_FIELD']]) conn.unbind_s() if records: if current_app.config['LDAP_GROUP_MEMBERS_FIELD'] in \ records[0][1]: members = records[0][1][ current_app.config['LDAP_GROUP_MEMBERS_FIELD']] if sys.version_info[0] > 2: members = [m.decode('utf-8') for m in members] return members except ldap.LDAPError as e: raise LDAPException(self.error(e.args))
def function[get_group_members, parameter[self, group]]: constant[Returns a ``list`` with the group's members or ``None`` if unsuccessful. :param str group: Group we want users for. ] variable[conn] assign[=] name[self].bind <ast.Try object at 0x7da204566380>
keyword[def] identifier[get_group_members] ( identifier[self] , identifier[group] ): literal[string] identifier[conn] = identifier[self] . identifier[bind] keyword[try] : identifier[records] = identifier[conn] . identifier[search_s] ( identifier[current_app] . identifier[config] [ literal[string] ], identifier[ldap] . identifier[SCOPE_SUBTREE] , identifier[ldap_filter] . identifier[filter_format] ( identifier[current_app] . identifier[config] [ literal[string] ],( identifier[group] ,)), [ identifier[current_app] . identifier[config] [ literal[string] ]]) identifier[conn] . identifier[unbind_s] () keyword[if] identifier[records] : keyword[if] identifier[current_app] . identifier[config] [ literal[string] ] keyword[in] identifier[records] [ literal[int] ][ literal[int] ]: identifier[members] = identifier[records] [ literal[int] ][ literal[int] ][ identifier[current_app] . identifier[config] [ literal[string] ]] keyword[if] identifier[sys] . identifier[version_info] [ literal[int] ]> literal[int] : identifier[members] =[ identifier[m] . identifier[decode] ( literal[string] ) keyword[for] identifier[m] keyword[in] identifier[members] ] keyword[return] identifier[members] keyword[except] identifier[ldap] . identifier[LDAPError] keyword[as] identifier[e] : keyword[raise] identifier[LDAPException] ( identifier[self] . identifier[error] ( identifier[e] . identifier[args] ))
def get_group_members(self, group): """Returns a ``list`` with the group's members or ``None`` if unsuccessful. :param str group: Group we want users for. """ conn = self.bind try: records = conn.search_s(current_app.config['LDAP_BASE_DN'], ldap.SCOPE_SUBTREE, ldap_filter.filter_format(current_app.config['LDAP_GROUP_OBJECT_FILTER'], (group,)), [current_app.config['LDAP_GROUP_MEMBERS_FIELD']]) conn.unbind_s() if records: if current_app.config['LDAP_GROUP_MEMBERS_FIELD'] in records[0][1]: members = records[0][1][current_app.config['LDAP_GROUP_MEMBERS_FIELD']] if sys.version_info[0] > 2: members = [m.decode('utf-8') for m in members] # depends on [control=['if'], data=[]] return members # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except ldap.LDAPError as e: raise LDAPException(self.error(e.args)) # depends on [control=['except'], data=['e']]
def hold(name, seconds): ''' Wait for a given period of time, then fire a result of True, requiring this state allows for an action to be blocked for evaluation based on time USAGE: .. code-block:: yaml hold_on_a_moment: timer.hold: - seconds: 30 ''' ret = {'name': name, 'result': False, 'comment': '', 'changes': {}} start = time.time() if 'timer' not in __context__: __context__['timer'] = {} if name not in __context__['timer']: __context__['timer'][name] = start if (start - __context__['timer'][name]) > seconds: ret['result'] = True __context__['timer'][name] = start return ret
def function[hold, parameter[name, seconds]]: constant[ Wait for a given period of time, then fire a result of True, requiring this state allows for an action to be blocked for evaluation based on time USAGE: .. code-block:: yaml hold_on_a_moment: timer.hold: - seconds: 30 ] variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da204567ca0>, <ast.Constant object at 0x7da204566650>, <ast.Constant object at 0x7da204567370>, <ast.Constant object at 0x7da204567700>], [<ast.Name object at 0x7da204567610>, <ast.Constant object at 0x7da204564460>, <ast.Constant object at 0x7da204565810>, <ast.Dict object at 0x7da204565540>]] variable[start] assign[=] call[name[time].time, parameter[]] if compare[constant[timer] <ast.NotIn object at 0x7da2590d7190> name[__context__]] begin[:] call[name[__context__]][constant[timer]] assign[=] dictionary[[], []] if compare[name[name] <ast.NotIn object at 0x7da2590d7190> call[name[__context__]][constant[timer]]] begin[:] call[call[name[__context__]][constant[timer]]][name[name]] assign[=] name[start] if compare[binary_operation[name[start] - call[call[name[__context__]][constant[timer]]][name[name]]] greater[>] name[seconds]] begin[:] call[name[ret]][constant[result]] assign[=] constant[True] call[call[name[__context__]][constant[timer]]][name[name]] assign[=] name[start] return[name[ret]]
keyword[def] identifier[hold] ( identifier[name] , identifier[seconds] ): literal[string] identifier[ret] ={ literal[string] : identifier[name] , literal[string] : keyword[False] , literal[string] : literal[string] , literal[string] :{}} identifier[start] = identifier[time] . identifier[time] () keyword[if] literal[string] keyword[not] keyword[in] identifier[__context__] : identifier[__context__] [ literal[string] ]={} keyword[if] identifier[name] keyword[not] keyword[in] identifier[__context__] [ literal[string] ]: identifier[__context__] [ literal[string] ][ identifier[name] ]= identifier[start] keyword[if] ( identifier[start] - identifier[__context__] [ literal[string] ][ identifier[name] ])> identifier[seconds] : identifier[ret] [ literal[string] ]= keyword[True] identifier[__context__] [ literal[string] ][ identifier[name] ]= identifier[start] keyword[return] identifier[ret]
def hold(name, seconds): """ Wait for a given period of time, then fire a result of True, requiring this state allows for an action to be blocked for evaluation based on time USAGE: .. code-block:: yaml hold_on_a_moment: timer.hold: - seconds: 30 """ ret = {'name': name, 'result': False, 'comment': '', 'changes': {}} start = time.time() if 'timer' not in __context__: __context__['timer'] = {} # depends on [control=['if'], data=['__context__']] if name not in __context__['timer']: __context__['timer'][name] = start # depends on [control=['if'], data=['name']] if start - __context__['timer'][name] > seconds: ret['result'] = True __context__['timer'][name] = start # depends on [control=['if'], data=[]] return ret
def LoadConfig(config_obj, config_file=None, config_fd=None, secondary_configs=None, contexts=None, reset=False, parser=ConfigFileParser): """Initialize a ConfigManager with the specified options. Args: config_obj: The ConfigManager object to use and update. If None, one will be created. config_file: Filename to read the config from. config_fd: A file-like object to read config data from. secondary_configs: A list of secondary config URLs to load. contexts: Add these contexts to the config object. reset: Completely wipe previous config before doing the load. parser: Specify which parser to use. Returns: The resulting config object. The one passed in, unless None was specified. """ if config_obj is None or reset: # Create a new config object. config_obj = _CONFIG.MakeNewConfig() # Initialize the config with a filename or file like object. if config_file is not None: config_obj.Initialize(filename=config_file, must_exist=True, parser=parser) elif config_fd is not None: config_obj.Initialize(fd=config_fd, parser=parser) # Load all secondary files. if secondary_configs: for config_file in secondary_configs: config_obj.LoadSecondaryConfig(config_file) if contexts: for context in contexts: config_obj.AddContext(context) return config_obj
def function[LoadConfig, parameter[config_obj, config_file, config_fd, secondary_configs, contexts, reset, parser]]: constant[Initialize a ConfigManager with the specified options. Args: config_obj: The ConfigManager object to use and update. If None, one will be created. config_file: Filename to read the config from. config_fd: A file-like object to read config data from. secondary_configs: A list of secondary config URLs to load. contexts: Add these contexts to the config object. reset: Completely wipe previous config before doing the load. parser: Specify which parser to use. Returns: The resulting config object. The one passed in, unless None was specified. ] if <ast.BoolOp object at 0x7da1b1b59480> begin[:] variable[config_obj] assign[=] call[name[_CONFIG].MakeNewConfig, parameter[]] if compare[name[config_file] is_not constant[None]] begin[:] call[name[config_obj].Initialize, parameter[]] if name[secondary_configs] begin[:] for taget[name[config_file]] in starred[name[secondary_configs]] begin[:] call[name[config_obj].LoadSecondaryConfig, parameter[name[config_file]]] if name[contexts] begin[:] for taget[name[context]] in starred[name[contexts]] begin[:] call[name[config_obj].AddContext, parameter[name[context]]] return[name[config_obj]]
keyword[def] identifier[LoadConfig] ( identifier[config_obj] , identifier[config_file] = keyword[None] , identifier[config_fd] = keyword[None] , identifier[secondary_configs] = keyword[None] , identifier[contexts] = keyword[None] , identifier[reset] = keyword[False] , identifier[parser] = identifier[ConfigFileParser] ): literal[string] keyword[if] identifier[config_obj] keyword[is] keyword[None] keyword[or] identifier[reset] : identifier[config_obj] = identifier[_CONFIG] . identifier[MakeNewConfig] () keyword[if] identifier[config_file] keyword[is] keyword[not] keyword[None] : identifier[config_obj] . identifier[Initialize] ( identifier[filename] = identifier[config_file] , identifier[must_exist] = keyword[True] , identifier[parser] = identifier[parser] ) keyword[elif] identifier[config_fd] keyword[is] keyword[not] keyword[None] : identifier[config_obj] . identifier[Initialize] ( identifier[fd] = identifier[config_fd] , identifier[parser] = identifier[parser] ) keyword[if] identifier[secondary_configs] : keyword[for] identifier[config_file] keyword[in] identifier[secondary_configs] : identifier[config_obj] . identifier[LoadSecondaryConfig] ( identifier[config_file] ) keyword[if] identifier[contexts] : keyword[for] identifier[context] keyword[in] identifier[contexts] : identifier[config_obj] . identifier[AddContext] ( identifier[context] ) keyword[return] identifier[config_obj]
def LoadConfig(config_obj, config_file=None, config_fd=None, secondary_configs=None, contexts=None, reset=False, parser=ConfigFileParser): """Initialize a ConfigManager with the specified options. Args: config_obj: The ConfigManager object to use and update. If None, one will be created. config_file: Filename to read the config from. config_fd: A file-like object to read config data from. secondary_configs: A list of secondary config URLs to load. contexts: Add these contexts to the config object. reset: Completely wipe previous config before doing the load. parser: Specify which parser to use. Returns: The resulting config object. The one passed in, unless None was specified. """ if config_obj is None or reset: # Create a new config object. config_obj = _CONFIG.MakeNewConfig() # depends on [control=['if'], data=[]] # Initialize the config with a filename or file like object. if config_file is not None: config_obj.Initialize(filename=config_file, must_exist=True, parser=parser) # depends on [control=['if'], data=['config_file']] elif config_fd is not None: config_obj.Initialize(fd=config_fd, parser=parser) # depends on [control=['if'], data=['config_fd']] # Load all secondary files. if secondary_configs: for config_file in secondary_configs: config_obj.LoadSecondaryConfig(config_file) # depends on [control=['for'], data=['config_file']] # depends on [control=['if'], data=[]] if contexts: for context in contexts: config_obj.AddContext(context) # depends on [control=['for'], data=['context']] # depends on [control=['if'], data=[]] return config_obj
def _inception_table_links(self, href_list): """ Sometimes the EPA likes to nest their models and tables -- model within a model within a model -- so this internal method tries to clear all that up. """ tables = set() for link in href_list: if not link.startswith('http://'): link = self.agency_url + link html = urlopen(link).read() doc = lh.fromstring(html) area = doc.cssselect('map area') if area: # Then this is a model containing models. tables.update((a.attrib['href'] for a in area)) else: # The link is a table without additional models. tables.update(link) return tables
def function[_inception_table_links, parameter[self, href_list]]: constant[ Sometimes the EPA likes to nest their models and tables -- model within a model within a model -- so this internal method tries to clear all that up. ] variable[tables] assign[=] call[name[set], parameter[]] for taget[name[link]] in starred[name[href_list]] begin[:] if <ast.UnaryOp object at 0x7da20e961f30> begin[:] variable[link] assign[=] binary_operation[name[self].agency_url + name[link]] variable[html] assign[=] call[call[name[urlopen], parameter[name[link]]].read, parameter[]] variable[doc] assign[=] call[name[lh].fromstring, parameter[name[html]]] variable[area] assign[=] call[name[doc].cssselect, parameter[constant[map area]]] if name[area] begin[:] call[name[tables].update, parameter[<ast.GeneratorExp object at 0x7da1b2877460>]] return[name[tables]]
keyword[def] identifier[_inception_table_links] ( identifier[self] , identifier[href_list] ): literal[string] identifier[tables] = identifier[set] () keyword[for] identifier[link] keyword[in] identifier[href_list] : keyword[if] keyword[not] identifier[link] . identifier[startswith] ( literal[string] ): identifier[link] = identifier[self] . identifier[agency_url] + identifier[link] identifier[html] = identifier[urlopen] ( identifier[link] ). identifier[read] () identifier[doc] = identifier[lh] . identifier[fromstring] ( identifier[html] ) identifier[area] = identifier[doc] . identifier[cssselect] ( literal[string] ) keyword[if] identifier[area] : identifier[tables] . identifier[update] (( identifier[a] . identifier[attrib] [ literal[string] ] keyword[for] identifier[a] keyword[in] identifier[area] )) keyword[else] : identifier[tables] . identifier[update] ( identifier[link] ) keyword[return] identifier[tables]
def _inception_table_links(self, href_list): """ Sometimes the EPA likes to nest their models and tables -- model within a model within a model -- so this internal method tries to clear all that up. """ tables = set() for link in href_list: if not link.startswith('http://'): link = self.agency_url + link # depends on [control=['if'], data=[]] html = urlopen(link).read() doc = lh.fromstring(html) area = doc.cssselect('map area') if area: # Then this is a model containing models. tables.update((a.attrib['href'] for a in area)) # depends on [control=['if'], data=[]] else: # The link is a table without additional models. tables.update(link) # depends on [control=['for'], data=['link']] return tables
def _word_ngrams(self, tokens): """ Turn tokens into a tokens of n-grams ref: https://github.com/scikit-learn/scikit-learn/blob/ef5cb84a/sklearn/feature_extraction/text.py#L124-L153 """ # handle stop words if self.stop_words is not None: tokens = [w for w in tokens if w not in self.stop_words] # handle token n-grams min_n, max_n = self.ngram_range if max_n != 1: original_tokens = tokens if min_n == 1: # no need to do any slicing for unigrams # just iterate through the original tokens tokens = list(original_tokens) min_n += 1 else: tokens = [] n_original_tokens = len(original_tokens) # bind method outside of loop to reduce overhead tokens_append = tokens.append space_join = " ".join for n in range(min_n, min(max_n + 1, n_original_tokens + 1)): for i in range(n_original_tokens - n + 1): tokens_append(space_join(original_tokens[i: i + n])) return tokens
def function[_word_ngrams, parameter[self, tokens]]: constant[ Turn tokens into a tokens of n-grams ref: https://github.com/scikit-learn/scikit-learn/blob/ef5cb84a/sklearn/feature_extraction/text.py#L124-L153 ] if compare[name[self].stop_words is_not constant[None]] begin[:] variable[tokens] assign[=] <ast.ListComp object at 0x7da1b016fd00> <ast.Tuple object at 0x7da1b016c820> assign[=] name[self].ngram_range if compare[name[max_n] not_equal[!=] constant[1]] begin[:] variable[original_tokens] assign[=] name[tokens] if compare[name[min_n] equal[==] constant[1]] begin[:] variable[tokens] assign[=] call[name[list], parameter[name[original_tokens]]] <ast.AugAssign object at 0x7da1b016e830> variable[n_original_tokens] assign[=] call[name[len], parameter[name[original_tokens]]] variable[tokens_append] assign[=] name[tokens].append variable[space_join] assign[=] constant[ ].join for taget[name[n]] in starred[call[name[range], parameter[name[min_n], call[name[min], parameter[binary_operation[name[max_n] + constant[1]], binary_operation[name[n_original_tokens] + constant[1]]]]]]] begin[:] for taget[name[i]] in starred[call[name[range], parameter[binary_operation[binary_operation[name[n_original_tokens] - name[n]] + constant[1]]]]] begin[:] call[name[tokens_append], parameter[call[name[space_join], parameter[call[name[original_tokens]][<ast.Slice object at 0x7da1b013c850>]]]]] return[name[tokens]]
keyword[def] identifier[_word_ngrams] ( identifier[self] , identifier[tokens] ): literal[string] keyword[if] identifier[self] . identifier[stop_words] keyword[is] keyword[not] keyword[None] : identifier[tokens] =[ identifier[w] keyword[for] identifier[w] keyword[in] identifier[tokens] keyword[if] identifier[w] keyword[not] keyword[in] identifier[self] . identifier[stop_words] ] identifier[min_n] , identifier[max_n] = identifier[self] . identifier[ngram_range] keyword[if] identifier[max_n] != literal[int] : identifier[original_tokens] = identifier[tokens] keyword[if] identifier[min_n] == literal[int] : identifier[tokens] = identifier[list] ( identifier[original_tokens] ) identifier[min_n] += literal[int] keyword[else] : identifier[tokens] =[] identifier[n_original_tokens] = identifier[len] ( identifier[original_tokens] ) identifier[tokens_append] = identifier[tokens] . identifier[append] identifier[space_join] = literal[string] . identifier[join] keyword[for] identifier[n] keyword[in] identifier[range] ( identifier[min_n] , identifier[min] ( identifier[max_n] + literal[int] , identifier[n_original_tokens] + literal[int] )): keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n_original_tokens] - identifier[n] + literal[int] ): identifier[tokens_append] ( identifier[space_join] ( identifier[original_tokens] [ identifier[i] : identifier[i] + identifier[n] ])) keyword[return] identifier[tokens]
def _word_ngrams(self, tokens): """ Turn tokens into a tokens of n-grams ref: https://github.com/scikit-learn/scikit-learn/blob/ef5cb84a/sklearn/feature_extraction/text.py#L124-L153 """ # handle stop words if self.stop_words is not None: tokens = [w for w in tokens if w not in self.stop_words] # depends on [control=['if'], data=[]] # handle token n-grams (min_n, max_n) = self.ngram_range if max_n != 1: original_tokens = tokens if min_n == 1: # no need to do any slicing for unigrams # just iterate through the original tokens tokens = list(original_tokens) min_n += 1 # depends on [control=['if'], data=['min_n']] else: tokens = [] n_original_tokens = len(original_tokens) # bind method outside of loop to reduce overhead tokens_append = tokens.append space_join = ' '.join for n in range(min_n, min(max_n + 1, n_original_tokens + 1)): for i in range(n_original_tokens - n + 1): tokens_append(space_join(original_tokens[i:i + n])) # depends on [control=['for'], data=['i']] # depends on [control=['for'], data=['n']] # depends on [control=['if'], data=['max_n']] return tokens
def selection_ranges(self): """ Return a list of (from, to) tuples for the selection or none if nothing was selected. start and end position are always included in the selection. This will yield several (from, to) tuples in case of a BLOCK selection. """ if self.selection: from_, to = sorted([self.cursor_position, self.selection.original_cursor_position]) if self.selection.type == SelectionType.BLOCK: from_line, from_column = self.translate_index_to_position(from_) to_line, to_column = self.translate_index_to_position(to) from_column, to_column = sorted([from_column, to_column]) lines = self.lines for l in range(from_line, to_line + 1): line_length = len(lines[l]) if from_column < line_length: yield (self.translate_row_col_to_index(l, from_column), self.translate_row_col_to_index(l, min(line_length - 1, to_column))) else: # In case of a LINES selection, go to the start/end of the lines. if self.selection.type == SelectionType.LINES: from_ = max(0, self.text.rfind('\n', 0, from_) + 1) if self.text.find('\n', to) >= 0: to = self.text.find('\n', to) else: to = len(self.text) - 1 yield from_, to
def function[selection_ranges, parameter[self]]: constant[ Return a list of (from, to) tuples for the selection or none if nothing was selected. start and end position are always included in the selection. This will yield several (from, to) tuples in case of a BLOCK selection. ] if name[self].selection begin[:] <ast.Tuple object at 0x7da18f00c9a0> assign[=] call[name[sorted], parameter[list[[<ast.Attribute object at 0x7da18f00d450>, <ast.Attribute object at 0x7da18f00f070>]]]] if compare[name[self].selection.type equal[==] name[SelectionType].BLOCK] begin[:] <ast.Tuple object at 0x7da18f00c490> assign[=] call[name[self].translate_index_to_position, parameter[name[from_]]] <ast.Tuple object at 0x7da18f00eaa0> assign[=] call[name[self].translate_index_to_position, parameter[name[to]]] <ast.Tuple object at 0x7da18f00c340> assign[=] call[name[sorted], parameter[list[[<ast.Name object at 0x7da18f00d480>, <ast.Name object at 0x7da18f00f4f0>]]]] variable[lines] assign[=] name[self].lines for taget[name[l]] in starred[call[name[range], parameter[name[from_line], binary_operation[name[to_line] + constant[1]]]]] begin[:] variable[line_length] assign[=] call[name[len], parameter[call[name[lines]][name[l]]]] if compare[name[from_column] less[<] name[line_length]] begin[:] <ast.Yield object at 0x7da18f00fb50>
keyword[def] identifier[selection_ranges] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[selection] : identifier[from_] , identifier[to] = identifier[sorted] ([ identifier[self] . identifier[cursor_position] , identifier[self] . identifier[selection] . identifier[original_cursor_position] ]) keyword[if] identifier[self] . identifier[selection] . identifier[type] == identifier[SelectionType] . identifier[BLOCK] : identifier[from_line] , identifier[from_column] = identifier[self] . identifier[translate_index_to_position] ( identifier[from_] ) identifier[to_line] , identifier[to_column] = identifier[self] . identifier[translate_index_to_position] ( identifier[to] ) identifier[from_column] , identifier[to_column] = identifier[sorted] ([ identifier[from_column] , identifier[to_column] ]) identifier[lines] = identifier[self] . identifier[lines] keyword[for] identifier[l] keyword[in] identifier[range] ( identifier[from_line] , identifier[to_line] + literal[int] ): identifier[line_length] = identifier[len] ( identifier[lines] [ identifier[l] ]) keyword[if] identifier[from_column] < identifier[line_length] : keyword[yield] ( identifier[self] . identifier[translate_row_col_to_index] ( identifier[l] , identifier[from_column] ), identifier[self] . identifier[translate_row_col_to_index] ( identifier[l] , identifier[min] ( identifier[line_length] - literal[int] , identifier[to_column] ))) keyword[else] : keyword[if] identifier[self] . identifier[selection] . identifier[type] == identifier[SelectionType] . identifier[LINES] : identifier[from_] = identifier[max] ( literal[int] , identifier[self] . identifier[text] . identifier[rfind] ( literal[string] , literal[int] , identifier[from_] )+ literal[int] ) keyword[if] identifier[self] . identifier[text] . identifier[find] ( literal[string] , identifier[to] )>= literal[int] : identifier[to] = identifier[self] . identifier[text] . identifier[find] ( literal[string] , identifier[to] ) keyword[else] : identifier[to] = identifier[len] ( identifier[self] . identifier[text] )- literal[int] keyword[yield] identifier[from_] , identifier[to]
def selection_ranges(self): """ Return a list of (from, to) tuples for the selection or none if nothing was selected. start and end position are always included in the selection. This will yield several (from, to) tuples in case of a BLOCK selection. """ if self.selection: (from_, to) = sorted([self.cursor_position, self.selection.original_cursor_position]) if self.selection.type == SelectionType.BLOCK: (from_line, from_column) = self.translate_index_to_position(from_) (to_line, to_column) = self.translate_index_to_position(to) (from_column, to_column) = sorted([from_column, to_column]) lines = self.lines for l in range(from_line, to_line + 1): line_length = len(lines[l]) if from_column < line_length: yield (self.translate_row_col_to_index(l, from_column), self.translate_row_col_to_index(l, min(line_length - 1, to_column))) # depends on [control=['if'], data=['from_column', 'line_length']] # depends on [control=['for'], data=['l']] # depends on [control=['if'], data=[]] else: # In case of a LINES selection, go to the start/end of the lines. if self.selection.type == SelectionType.LINES: from_ = max(0, self.text.rfind('\n', 0, from_) + 1) if self.text.find('\n', to) >= 0: to = self.text.find('\n', to) # depends on [control=['if'], data=[]] else: to = len(self.text) - 1 # depends on [control=['if'], data=[]] yield (from_, to) # depends on [control=['if'], data=[]]
def update_from_object(self, obj, criterion=lambda key: key.isupper()): """ Update dict from the attributes of a module, class or other object. By default only attributes with all-uppercase names will be retrieved. Use the ``criterion`` argument to modify that behaviour. :arg obj: Either the actual module/object, or its absolute name, e.g. 'my_app.settings'. :arg criterion: Callable that must return True when passed the name of an attribute, if that attribute is to be used. :type criterion: :py:class:`function` .. versionadded:: 1.0 """ log.debug('Loading config from {0}'.format(obj)) if isinstance(obj, basestring): if '.' in obj: path, name = obj.rsplit('.', 1) mod = __import__(path, globals(), locals(), [name], 0) obj = getattr(mod, name) else: obj = __import__(obj, globals(), locals(), [], 0) self.update( (key, getattr(obj, key)) for key in filter(criterion, dir(obj)) )
def function[update_from_object, parameter[self, obj, criterion]]: constant[ Update dict from the attributes of a module, class or other object. By default only attributes with all-uppercase names will be retrieved. Use the ``criterion`` argument to modify that behaviour. :arg obj: Either the actual module/object, or its absolute name, e.g. 'my_app.settings'. :arg criterion: Callable that must return True when passed the name of an attribute, if that attribute is to be used. :type criterion: :py:class:`function` .. versionadded:: 1.0 ] call[name[log].debug, parameter[call[constant[Loading config from {0}].format, parameter[name[obj]]]]] if call[name[isinstance], parameter[name[obj], name[basestring]]] begin[:] if compare[constant[.] in name[obj]] begin[:] <ast.Tuple object at 0x7da1b23f8eb0> assign[=] call[name[obj].rsplit, parameter[constant[.], constant[1]]] variable[mod] assign[=] call[name[__import__], parameter[name[path], call[name[globals], parameter[]], call[name[locals], parameter[]], list[[<ast.Name object at 0x7da1b23f9480>]], constant[0]]] variable[obj] assign[=] call[name[getattr], parameter[name[mod], name[name]]] call[name[self].update, parameter[<ast.GeneratorExp object at 0x7da1b2429ff0>]]
keyword[def] identifier[update_from_object] ( identifier[self] , identifier[obj] , identifier[criterion] = keyword[lambda] identifier[key] : identifier[key] . identifier[isupper] ()): literal[string] identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[obj] )) keyword[if] identifier[isinstance] ( identifier[obj] , identifier[basestring] ): keyword[if] literal[string] keyword[in] identifier[obj] : identifier[path] , identifier[name] = identifier[obj] . identifier[rsplit] ( literal[string] , literal[int] ) identifier[mod] = identifier[__import__] ( identifier[path] , identifier[globals] (), identifier[locals] (),[ identifier[name] ], literal[int] ) identifier[obj] = identifier[getattr] ( identifier[mod] , identifier[name] ) keyword[else] : identifier[obj] = identifier[__import__] ( identifier[obj] , identifier[globals] (), identifier[locals] (),[], literal[int] ) identifier[self] . identifier[update] ( ( identifier[key] , identifier[getattr] ( identifier[obj] , identifier[key] )) keyword[for] identifier[key] keyword[in] identifier[filter] ( identifier[criterion] , identifier[dir] ( identifier[obj] )) )
def update_from_object(self, obj, criterion=lambda key: key.isupper()): """ Update dict from the attributes of a module, class or other object. By default only attributes with all-uppercase names will be retrieved. Use the ``criterion`` argument to modify that behaviour. :arg obj: Either the actual module/object, or its absolute name, e.g. 'my_app.settings'. :arg criterion: Callable that must return True when passed the name of an attribute, if that attribute is to be used. :type criterion: :py:class:`function` .. versionadded:: 1.0 """ log.debug('Loading config from {0}'.format(obj)) if isinstance(obj, basestring): if '.' in obj: (path, name) = obj.rsplit('.', 1) mod = __import__(path, globals(), locals(), [name], 0) obj = getattr(mod, name) # depends on [control=['if'], data=['obj']] else: obj = __import__(obj, globals(), locals(), [], 0) # depends on [control=['if'], data=[]] self.update(((key, getattr(obj, key)) for key in filter(criterion, dir(obj))))
async def asynchronously_get_data(self, url): """ Asynchronously get data from Chunked transfer encoding of https://smartcity.rbccps.org/api/0.1.0/subscribe. (Only this function requires Python 3. Rest of the functions can be run in python2. Args: url (string): url to subscribe """ headers = {"apikey": self.entity_api_key} try: async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(verify_ssl=False)) as session: async with session.get(url, headers=headers, timeout=3000) as response: while True: # loop over for each chunk of data chunk = await response.content.readchunk() if not chunk: break if platform == "linux" or platform == "linux2": # In linux systems, readchunk() returns a tuple chunk = chunk[0] resp = dict() resp["data"] = chunk.decode() current_milli_time = lambda: int(round(time() * 1000)) resp["timestamp"] = str(current_milli_time()) self.subscribe_data = resp except Exception as e: print("\n********* Oops: " + url + " " + str(type(e)) + str(e) + " *********\n") print('\n********* Closing TCP: {} *********\n'.format(url))
<ast.AsyncFunctionDef object at 0x7da18dc07550>
keyword[async] keyword[def] identifier[asynchronously_get_data] ( identifier[self] , identifier[url] ): literal[string] identifier[headers] ={ literal[string] : identifier[self] . identifier[entity_api_key] } keyword[try] : keyword[async] keyword[with] identifier[aiohttp] . identifier[ClientSession] ( identifier[connector] = identifier[aiohttp] . identifier[TCPConnector] ( identifier[verify_ssl] = keyword[False] )) keyword[as] identifier[session] : keyword[async] keyword[with] identifier[session] . identifier[get] ( identifier[url] , identifier[headers] = identifier[headers] , identifier[timeout] = literal[int] ) keyword[as] identifier[response] : keyword[while] keyword[True] : identifier[chunk] = keyword[await] identifier[response] . identifier[content] . identifier[readchunk] () keyword[if] keyword[not] identifier[chunk] : keyword[break] keyword[if] identifier[platform] == literal[string] keyword[or] identifier[platform] == literal[string] : identifier[chunk] = identifier[chunk] [ literal[int] ] identifier[resp] = identifier[dict] () identifier[resp] [ literal[string] ]= identifier[chunk] . identifier[decode] () identifier[current_milli_time] = keyword[lambda] : identifier[int] ( identifier[round] ( identifier[time] ()* literal[int] )) identifier[resp] [ literal[string] ]= identifier[str] ( identifier[current_milli_time] ()) identifier[self] . identifier[subscribe_data] = identifier[resp] keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[print] ( literal[string] + identifier[url] + literal[string] + identifier[str] ( identifier[type] ( identifier[e] ))+ identifier[str] ( identifier[e] )+ literal[string] ) identifier[print] ( literal[string] . identifier[format] ( identifier[url] ))
async def asynchronously_get_data(self, url): """ Asynchronously get data from Chunked transfer encoding of https://smartcity.rbccps.org/api/0.1.0/subscribe. (Only this function requires Python 3. Rest of the functions can be run in python2. Args: url (string): url to subscribe """ headers = {'apikey': self.entity_api_key} try: async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(verify_ssl=False)) as session: async with session.get(url, headers=headers, timeout=3000) as response: while True: # loop over for each chunk of data chunk = await response.content.readchunk() if not chunk: break # depends on [control=['if'], data=[]] if platform == 'linux' or platform == 'linux2': # In linux systems, readchunk() returns a tuple chunk = chunk[0] # depends on [control=['if'], data=[]] resp = dict() resp['data'] = chunk.decode() current_milli_time = lambda : int(round(time() * 1000)) resp['timestamp'] = str(current_milli_time()) self.subscribe_data = resp # depends on [control=['while'], data=[]] # depends on [control=['try'], data=[]] except Exception as e: print('\n********* Oops: ' + url + ' ' + str(type(e)) + str(e) + ' *********\n') # depends on [control=['except'], data=['e']] print('\n********* Closing TCP: {} *********\n'.format(url))
def n1qlQueryAll(self, *args, **kwargs): """ Execute a N1QL query, retrieving all rows. This method returns a :class:`Deferred` object which is executed with a :class:`~.N1QLRequest` object. The object may be iterated over to yield the rows in the result set. This method is similar to :meth:`~couchbase.bucket.Bucket.n1ql_query` in its arguments. Example:: def handler(req): for row in req: # ... handle row d = cb.n1qlQueryAll('SELECT * from `travel-sample` WHERE city=$1`, 'Reno') d.addCallback(handler) :return: A :class:`Deferred` .. seealso:: :meth:`~couchbase.bucket.Bucket.n1ql_query` """ if not self.connected: cb = lambda x: self.n1qlQueryAll(*args, **kwargs) return self.connect().addCallback(cb) kwargs['itercls'] = BatchedN1QLRequest o = super(RawBucket, self).n1ql_query(*args, **kwargs) o.start() return o._getDeferred()
def function[n1qlQueryAll, parameter[self]]: constant[ Execute a N1QL query, retrieving all rows. This method returns a :class:`Deferred` object which is executed with a :class:`~.N1QLRequest` object. The object may be iterated over to yield the rows in the result set. This method is similar to :meth:`~couchbase.bucket.Bucket.n1ql_query` in its arguments. Example:: def handler(req): for row in req: # ... handle row d = cb.n1qlQueryAll('SELECT * from `travel-sample` WHERE city=$1`, 'Reno') d.addCallback(handler) :return: A :class:`Deferred` .. seealso:: :meth:`~couchbase.bucket.Bucket.n1ql_query` ] if <ast.UnaryOp object at 0x7da2054a43a0> begin[:] variable[cb] assign[=] <ast.Lambda object at 0x7da2054a4d30> return[call[call[name[self].connect, parameter[]].addCallback, parameter[name[cb]]]] call[name[kwargs]][constant[itercls]] assign[=] name[BatchedN1QLRequest] variable[o] assign[=] call[call[name[super], parameter[name[RawBucket], name[self]]].n1ql_query, parameter[<ast.Starred object at 0x7da207f994b0>]] call[name[o].start, parameter[]] return[call[name[o]._getDeferred, parameter[]]]
keyword[def] identifier[n1qlQueryAll] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[connected] : identifier[cb] = keyword[lambda] identifier[x] : identifier[self] . identifier[n1qlQueryAll] (* identifier[args] ,** identifier[kwargs] ) keyword[return] identifier[self] . identifier[connect] (). identifier[addCallback] ( identifier[cb] ) identifier[kwargs] [ literal[string] ]= identifier[BatchedN1QLRequest] identifier[o] = identifier[super] ( identifier[RawBucket] , identifier[self] ). identifier[n1ql_query] (* identifier[args] ,** identifier[kwargs] ) identifier[o] . identifier[start] () keyword[return] identifier[o] . identifier[_getDeferred] ()
def n1qlQueryAll(self, *args, **kwargs): """ Execute a N1QL query, retrieving all rows. This method returns a :class:`Deferred` object which is executed with a :class:`~.N1QLRequest` object. The object may be iterated over to yield the rows in the result set. This method is similar to :meth:`~couchbase.bucket.Bucket.n1ql_query` in its arguments. Example:: def handler(req): for row in req: # ... handle row d = cb.n1qlQueryAll('SELECT * from `travel-sample` WHERE city=$1`, 'Reno') d.addCallback(handler) :return: A :class:`Deferred` .. seealso:: :meth:`~couchbase.bucket.Bucket.n1ql_query` """ if not self.connected: cb = lambda x: self.n1qlQueryAll(*args, **kwargs) return self.connect().addCallback(cb) # depends on [control=['if'], data=[]] kwargs['itercls'] = BatchedN1QLRequest o = super(RawBucket, self).n1ql_query(*args, **kwargs) o.start() return o._getDeferred()
def list_alerts(self, limit=None, cursor=None): '''**Description** List the current set of scanning alerts. **Arguments** - limit: Maximum number of alerts in the response. - cursor: An opaque string representing the current position in the list of alerts. It's provided in the 'responseMetadata' of the list_alerts response. **Success Return Value** A JSON object containing the list of alerts. ''' url = self.url + '/api/scanning/v1/alerts' if limit: url += '?limit=' + str(limit) if cursor: url += '&cursor=' + cursor res = requests.get(url, headers=self.hdrs, verify=self.ssl_verify) if not self._checkResponse(res): return [False, self.lasterr] return [True, res.json()]
def function[list_alerts, parameter[self, limit, cursor]]: constant[**Description** List the current set of scanning alerts. **Arguments** - limit: Maximum number of alerts in the response. - cursor: An opaque string representing the current position in the list of alerts. It's provided in the 'responseMetadata' of the list_alerts response. **Success Return Value** A JSON object containing the list of alerts. ] variable[url] assign[=] binary_operation[name[self].url + constant[/api/scanning/v1/alerts]] if name[limit] begin[:] <ast.AugAssign object at 0x7da18f58d3f0> if name[cursor] begin[:] <ast.AugAssign object at 0x7da18f58ea70> variable[res] assign[=] call[name[requests].get, parameter[name[url]]] if <ast.UnaryOp object at 0x7da18f58ce80> begin[:] return[list[[<ast.Constant object at 0x7da18f58f0a0>, <ast.Attribute object at 0x7da18f58c820>]]] return[list[[<ast.Constant object at 0x7da18f58e0b0>, <ast.Call object at 0x7da18f58d6f0>]]]
keyword[def] identifier[list_alerts] ( identifier[self] , identifier[limit] = keyword[None] , identifier[cursor] = keyword[None] ): literal[string] identifier[url] = identifier[self] . identifier[url] + literal[string] keyword[if] identifier[limit] : identifier[url] += literal[string] + identifier[str] ( identifier[limit] ) keyword[if] identifier[cursor] : identifier[url] += literal[string] + identifier[cursor] identifier[res] = identifier[requests] . identifier[get] ( identifier[url] , identifier[headers] = identifier[self] . identifier[hdrs] , identifier[verify] = identifier[self] . identifier[ssl_verify] ) keyword[if] keyword[not] identifier[self] . identifier[_checkResponse] ( identifier[res] ): keyword[return] [ keyword[False] , identifier[self] . identifier[lasterr] ] keyword[return] [ keyword[True] , identifier[res] . identifier[json] ()]
def list_alerts(self, limit=None, cursor=None): """**Description** List the current set of scanning alerts. **Arguments** - limit: Maximum number of alerts in the response. - cursor: An opaque string representing the current position in the list of alerts. It's provided in the 'responseMetadata' of the list_alerts response. **Success Return Value** A JSON object containing the list of alerts. """ url = self.url + '/api/scanning/v1/alerts' if limit: url += '?limit=' + str(limit) if cursor: url += '&cursor=' + cursor # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] res = requests.get(url, headers=self.hdrs, verify=self.ssl_verify) if not self._checkResponse(res): return [False, self.lasterr] # depends on [control=['if'], data=[]] return [True, res.json()]
def map(self, coro, *args, **kwargs): """ Schedule a given coroutine call for each plugin. The coro called get the Plugin instance as first argument of its method call :param coro: coro to call on each plugin :param filter_plugins: list of plugin names to filter (only plugin whose name is in filter are called). None will call all plugins. [] will call None. :param args: arguments to pass to coro :param kwargs: arguments to pass to coro :return: dict containing return from coro call for each plugin """ p_list = kwargs.pop('filter_plugins', None) if p_list is None: p_list = [p.name for p in self.plugins] tasks = [] plugins_list = [] for plugin in self._plugins: if plugin.name in p_list: coro_instance = coro(plugin, *args, **kwargs) if coro_instance: try: tasks.append(self._schedule_coro(coro_instance)) plugins_list.append(plugin) except AssertionError: self.logger.error("Method '%r' on plugin '%s' is not a coroutine" % (coro, plugin.name)) if tasks: ret_list = yield from asyncio.gather(*tasks, loop=self._loop) # Create result map plugin=>ret ret_dict = {k: v for k, v in zip(plugins_list, ret_list)} else: ret_dict = {} return ret_dict
def function[map, parameter[self, coro]]: constant[ Schedule a given coroutine call for each plugin. The coro called get the Plugin instance as first argument of its method call :param coro: coro to call on each plugin :param filter_plugins: list of plugin names to filter (only plugin whose name is in filter are called). None will call all plugins. [] will call None. :param args: arguments to pass to coro :param kwargs: arguments to pass to coro :return: dict containing return from coro call for each plugin ] variable[p_list] assign[=] call[name[kwargs].pop, parameter[constant[filter_plugins], constant[None]]] if compare[name[p_list] is constant[None]] begin[:] variable[p_list] assign[=] <ast.ListComp object at 0x7da20e954850> variable[tasks] assign[=] list[[]] variable[plugins_list] assign[=] list[[]] for taget[name[plugin]] in starred[name[self]._plugins] begin[:] if compare[name[plugin].name in name[p_list]] begin[:] variable[coro_instance] assign[=] call[name[coro], parameter[name[plugin], <ast.Starred object at 0x7da20e954700>]] if name[coro_instance] begin[:] <ast.Try object at 0x7da20e954b20> if name[tasks] begin[:] variable[ret_list] assign[=] <ast.YieldFrom object at 0x7da20e955450> variable[ret_dict] assign[=] <ast.DictComp object at 0x7da18fe929e0> return[name[ret_dict]]
keyword[def] identifier[map] ( identifier[self] , identifier[coro] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[p_list] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] ) keyword[if] identifier[p_list] keyword[is] keyword[None] : identifier[p_list] =[ identifier[p] . identifier[name] keyword[for] identifier[p] keyword[in] identifier[self] . identifier[plugins] ] identifier[tasks] =[] identifier[plugins_list] =[] keyword[for] identifier[plugin] keyword[in] identifier[self] . identifier[_plugins] : keyword[if] identifier[plugin] . identifier[name] keyword[in] identifier[p_list] : identifier[coro_instance] = identifier[coro] ( identifier[plugin] ,* identifier[args] ,** identifier[kwargs] ) keyword[if] identifier[coro_instance] : keyword[try] : identifier[tasks] . identifier[append] ( identifier[self] . identifier[_schedule_coro] ( identifier[coro_instance] )) identifier[plugins_list] . identifier[append] ( identifier[plugin] ) keyword[except] identifier[AssertionError] : identifier[self] . identifier[logger] . identifier[error] ( literal[string] % ( identifier[coro] , identifier[plugin] . identifier[name] )) keyword[if] identifier[tasks] : identifier[ret_list] = keyword[yield] keyword[from] identifier[asyncio] . identifier[gather] (* identifier[tasks] , identifier[loop] = identifier[self] . identifier[_loop] ) identifier[ret_dict] ={ identifier[k] : identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[zip] ( identifier[plugins_list] , identifier[ret_list] )} keyword[else] : identifier[ret_dict] ={} keyword[return] identifier[ret_dict]
def map(self, coro, *args, **kwargs): """ Schedule a given coroutine call for each plugin. The coro called get the Plugin instance as first argument of its method call :param coro: coro to call on each plugin :param filter_plugins: list of plugin names to filter (only plugin whose name is in filter are called). None will call all plugins. [] will call None. :param args: arguments to pass to coro :param kwargs: arguments to pass to coro :return: dict containing return from coro call for each plugin """ p_list = kwargs.pop('filter_plugins', None) if p_list is None: p_list = [p.name for p in self.plugins] # depends on [control=['if'], data=['p_list']] tasks = [] plugins_list = [] for plugin in self._plugins: if plugin.name in p_list: coro_instance = coro(plugin, *args, **kwargs) if coro_instance: try: tasks.append(self._schedule_coro(coro_instance)) plugins_list.append(plugin) # depends on [control=['try'], data=[]] except AssertionError: self.logger.error("Method '%r' on plugin '%s' is not a coroutine" % (coro, plugin.name)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['plugin']] if tasks: ret_list = (yield from asyncio.gather(*tasks, loop=self._loop)) # Create result map plugin=>ret ret_dict = {k: v for (k, v) in zip(plugins_list, ret_list)} # depends on [control=['if'], data=[]] else: ret_dict = {} return ret_dict
def handle_heart_failure(self, heart): """handler to attach to heartbeater. called when a previously registered heart fails to respond to beat request. triggers unregistration""" self.log.debug("heartbeat::handle_heart_failure(%r)", heart) eid = self.hearts.get(heart, None) queue = self.engines[eid].queue if eid is None or self.keytable[eid] in self.dead_engines: self.log.info("heartbeat::ignoring heart failure %r (not an engine or already dead)", heart) else: self.unregister_engine(heart, dict(content=dict(id=eid, queue=queue)))
def function[handle_heart_failure, parameter[self, heart]]: constant[handler to attach to heartbeater. called when a previously registered heart fails to respond to beat request. triggers unregistration] call[name[self].log.debug, parameter[constant[heartbeat::handle_heart_failure(%r)], name[heart]]] variable[eid] assign[=] call[name[self].hearts.get, parameter[name[heart], constant[None]]] variable[queue] assign[=] call[name[self].engines][name[eid]].queue if <ast.BoolOp object at 0x7da1b002f6d0> begin[:] call[name[self].log.info, parameter[constant[heartbeat::ignoring heart failure %r (not an engine or already dead)], name[heart]]]
keyword[def] identifier[handle_heart_failure] ( identifier[self] , identifier[heart] ): literal[string] identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[heart] ) identifier[eid] = identifier[self] . identifier[hearts] . identifier[get] ( identifier[heart] , keyword[None] ) identifier[queue] = identifier[self] . identifier[engines] [ identifier[eid] ]. identifier[queue] keyword[if] identifier[eid] keyword[is] keyword[None] keyword[or] identifier[self] . identifier[keytable] [ identifier[eid] ] keyword[in] identifier[self] . identifier[dead_engines] : identifier[self] . identifier[log] . identifier[info] ( literal[string] , identifier[heart] ) keyword[else] : identifier[self] . identifier[unregister_engine] ( identifier[heart] , identifier[dict] ( identifier[content] = identifier[dict] ( identifier[id] = identifier[eid] , identifier[queue] = identifier[queue] )))
def handle_heart_failure(self, heart): """handler to attach to heartbeater. called when a previously registered heart fails to respond to beat request. triggers unregistration""" self.log.debug('heartbeat::handle_heart_failure(%r)', heart) eid = self.hearts.get(heart, None) queue = self.engines[eid].queue if eid is None or self.keytable[eid] in self.dead_engines: self.log.info('heartbeat::ignoring heart failure %r (not an engine or already dead)', heart) # depends on [control=['if'], data=[]] else: self.unregister_engine(heart, dict(content=dict(id=eid, queue=queue)))
def write(self, request): """Write a Request.""" if FLAGS.sc2_verbose_protocol: self._log(" Writing request ".center(60, "-") + "\n") self._log_packet(request) self._write(request)
def function[write, parameter[self, request]]: constant[Write a Request.] if name[FLAGS].sc2_verbose_protocol begin[:] call[name[self]._log, parameter[binary_operation[call[constant[ Writing request ].center, parameter[constant[60], constant[-]]] + constant[ ]]]] call[name[self]._log_packet, parameter[name[request]]] call[name[self]._write, parameter[name[request]]]
keyword[def] identifier[write] ( identifier[self] , identifier[request] ): literal[string] keyword[if] identifier[FLAGS] . identifier[sc2_verbose_protocol] : identifier[self] . identifier[_log] ( literal[string] . identifier[center] ( literal[int] , literal[string] )+ literal[string] ) identifier[self] . identifier[_log_packet] ( identifier[request] ) identifier[self] . identifier[_write] ( identifier[request] )
def write(self, request): """Write a Request.""" if FLAGS.sc2_verbose_protocol: self._log(' Writing request '.center(60, '-') + '\n') self._log_packet(request) # depends on [control=['if'], data=[]] self._write(request)
def balance_of_contacts(records, weighted=True): """ The balance of interactions per contact. For every contact, the balance is the number of outgoing interactions divided by the total number of interactions (in+out). .. math:: \\forall \\,\\text{contact}\\,c,\\;\\text{balance}\,(c) = \\frac{\\bigl|\\text{outgoing}\,(c)\\bigr|}{\\bigl|\\text{outgoing}\,(c)\\bigr|+\\bigl|\\text{incoming}\,(c)\\bigr|} Parameters ---------- weighted : str, optional If ``True``, the balance for each contact is weighted by the number of interactions the user had with this contact. """ counter_out = defaultdict(int) counter = defaultdict(int) for r in records: if r.direction == 'out': counter_out[r.correspondent_id] += 1 counter[r.correspondent_id] += 1 if not weighted: balance = [counter_out[c] / counter[c] for c in counter] else: balance = [counter_out[c] / sum(counter.values()) for c in counter] return summary_stats(balance)
def function[balance_of_contacts, parameter[records, weighted]]: constant[ The balance of interactions per contact. For every contact, the balance is the number of outgoing interactions divided by the total number of interactions (in+out). .. math:: \forall \,\text{contact}\,c,\;\text{balance}\,(c) = \frac{\bigl|\text{outgoing}\,(c)\bigr|}{\bigl|\text{outgoing}\,(c)\bigr|+\bigl|\text{incoming}\,(c)\bigr|} Parameters ---------- weighted : str, optional If ``True``, the balance for each contact is weighted by the number of interactions the user had with this contact. ] variable[counter_out] assign[=] call[name[defaultdict], parameter[name[int]]] variable[counter] assign[=] call[name[defaultdict], parameter[name[int]]] for taget[name[r]] in starred[name[records]] begin[:] if compare[name[r].direction equal[==] constant[out]] begin[:] <ast.AugAssign object at 0x7da1b0d57910> <ast.AugAssign object at 0x7da1b0d55960> if <ast.UnaryOp object at 0x7da1b0d55ab0> begin[:] variable[balance] assign[=] <ast.ListComp object at 0x7da1b0d56620> return[call[name[summary_stats], parameter[name[balance]]]]
keyword[def] identifier[balance_of_contacts] ( identifier[records] , identifier[weighted] = keyword[True] ): literal[string] identifier[counter_out] = identifier[defaultdict] ( identifier[int] ) identifier[counter] = identifier[defaultdict] ( identifier[int] ) keyword[for] identifier[r] keyword[in] identifier[records] : keyword[if] identifier[r] . identifier[direction] == literal[string] : identifier[counter_out] [ identifier[r] . identifier[correspondent_id] ]+= literal[int] identifier[counter] [ identifier[r] . identifier[correspondent_id] ]+= literal[int] keyword[if] keyword[not] identifier[weighted] : identifier[balance] =[ identifier[counter_out] [ identifier[c] ]/ identifier[counter] [ identifier[c] ] keyword[for] identifier[c] keyword[in] identifier[counter] ] keyword[else] : identifier[balance] =[ identifier[counter_out] [ identifier[c] ]/ identifier[sum] ( identifier[counter] . identifier[values] ()) keyword[for] identifier[c] keyword[in] identifier[counter] ] keyword[return] identifier[summary_stats] ( identifier[balance] )
def balance_of_contacts(records, weighted=True): """ The balance of interactions per contact. For every contact, the balance is the number of outgoing interactions divided by the total number of interactions (in+out). .. math:: \\forall \\,\\text{contact}\\,c,\\;\\text{balance}\\,(c) = \\frac{\\bigl|\\text{outgoing}\\,(c)\\bigr|}{\\bigl|\\text{outgoing}\\,(c)\\bigr|+\\bigl|\\text{incoming}\\,(c)\\bigr|} Parameters ---------- weighted : str, optional If ``True``, the balance for each contact is weighted by the number of interactions the user had with this contact. """ counter_out = defaultdict(int) counter = defaultdict(int) for r in records: if r.direction == 'out': counter_out[r.correspondent_id] += 1 # depends on [control=['if'], data=[]] counter[r.correspondent_id] += 1 # depends on [control=['for'], data=['r']] if not weighted: balance = [counter_out[c] / counter[c] for c in counter] # depends on [control=['if'], data=[]] else: balance = [counter_out[c] / sum(counter.values()) for c in counter] return summary_stats(balance)
def _fast_write(self, outfile, value): """Function for fast writing to motor files.""" outfile.truncate(0) outfile.write(str(int(value))) outfile.flush()
def function[_fast_write, parameter[self, outfile, value]]: constant[Function for fast writing to motor files.] call[name[outfile].truncate, parameter[constant[0]]] call[name[outfile].write, parameter[call[name[str], parameter[call[name[int], parameter[name[value]]]]]]] call[name[outfile].flush, parameter[]]
keyword[def] identifier[_fast_write] ( identifier[self] , identifier[outfile] , identifier[value] ): literal[string] identifier[outfile] . identifier[truncate] ( literal[int] ) identifier[outfile] . identifier[write] ( identifier[str] ( identifier[int] ( identifier[value] ))) identifier[outfile] . identifier[flush] ()
def _fast_write(self, outfile, value): """Function for fast writing to motor files.""" outfile.truncate(0) outfile.write(str(int(value))) outfile.flush()
def read_namespaced_replica_set(self, name, namespace, **kwargs): # noqa: E501 """read_namespaced_replica_set # noqa: E501 read the specified ReplicaSet # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.read_namespaced_replica_set(name, namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the ReplicaSet (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param str pretty: If 'true', then the output is pretty printed. :param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'. :param bool export: Should this value be exported. Export strips fields that a user can not specify. :return: V1beta1ReplicaSet If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.read_namespaced_replica_set_with_http_info(name, namespace, **kwargs) # noqa: E501 else: (data) = self.read_namespaced_replica_set_with_http_info(name, namespace, **kwargs) # noqa: E501 return data
def function[read_namespaced_replica_set, parameter[self, name, namespace]]: constant[read_namespaced_replica_set # noqa: E501 read the specified ReplicaSet # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.read_namespaced_replica_set(name, namespace, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the ReplicaSet (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param str pretty: If 'true', then the output is pretty printed. :param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'. :param bool export: Should this value be exported. Export strips fields that a user can not specify. :return: V1beta1ReplicaSet If the method is called asynchronously, returns the request thread. ] call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True] if call[name[kwargs].get, parameter[constant[async_req]]] begin[:] return[call[name[self].read_namespaced_replica_set_with_http_info, parameter[name[name], name[namespace]]]]
keyword[def] identifier[read_namespaced_replica_set] ( identifier[self] , identifier[name] , identifier[namespace] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= keyword[True] keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ): keyword[return] identifier[self] . identifier[read_namespaced_replica_set_with_http_info] ( identifier[name] , identifier[namespace] ,** identifier[kwargs] ) keyword[else] : ( identifier[data] )= identifier[self] . identifier[read_namespaced_replica_set_with_http_info] ( identifier[name] , identifier[namespace] ,** identifier[kwargs] ) keyword[return] identifier[data]
def read_namespaced_replica_set(self, name, namespace, **kwargs): # noqa: E501 "read_namespaced_replica_set # noqa: E501\n\n read the specified ReplicaSet # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.read_namespaced_replica_set(name, namespace, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the ReplicaSet (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param str pretty: If 'true', then the output is pretty printed.\n :param bool exact: Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'.\n :param bool export: Should this value be exported. Export strips fields that a user can not specify.\n :return: V1beta1ReplicaSet\n If the method is called asynchronously,\n returns the request thread.\n " kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.read_namespaced_replica_set_with_http_info(name, namespace, **kwargs) # noqa: E501 # depends on [control=['if'], data=[]] else: data = self.read_namespaced_replica_set_with_http_info(name, namespace, **kwargs) # noqa: E501 return data
def keyword(self, token_stream, token, operators): """Lowest level parsing function. A keyword consists of zero or more prefix operators (NOT, or COMPARISON) followed by a TEXT, COLOR, or NUMBER block. """ if token[0] == 'TEXT' or token[0] == 'COLOR': return SearchKeyword(token[1], **operators) elif token[0] == 'COMPARISON': operators['comparison'] = token[1] elif token[0] == 'NOT': operators['boolean'] = 'not' else: if token[1] == None: problem = 'end of input.' else: problem = 'token {0} in input'.format(token[1]) raise SyntaxError('Unexpected {0}'.format(problem)) token = next(token_stream) return self.keyword(token_stream, token, operators)
def function[keyword, parameter[self, token_stream, token, operators]]: constant[Lowest level parsing function. A keyword consists of zero or more prefix operators (NOT, or COMPARISON) followed by a TEXT, COLOR, or NUMBER block. ] if <ast.BoolOp object at 0x7da20c991090> begin[:] return[call[name[SearchKeyword], parameter[call[name[token]][constant[1]]]]] variable[token] assign[=] call[name[next], parameter[name[token_stream]]] return[call[name[self].keyword, parameter[name[token_stream], name[token], name[operators]]]]
keyword[def] identifier[keyword] ( identifier[self] , identifier[token_stream] , identifier[token] , identifier[operators] ): literal[string] keyword[if] identifier[token] [ literal[int] ]== literal[string] keyword[or] identifier[token] [ literal[int] ]== literal[string] : keyword[return] identifier[SearchKeyword] ( identifier[token] [ literal[int] ],** identifier[operators] ) keyword[elif] identifier[token] [ literal[int] ]== literal[string] : identifier[operators] [ literal[string] ]= identifier[token] [ literal[int] ] keyword[elif] identifier[token] [ literal[int] ]== literal[string] : identifier[operators] [ literal[string] ]= literal[string] keyword[else] : keyword[if] identifier[token] [ literal[int] ]== keyword[None] : identifier[problem] = literal[string] keyword[else] : identifier[problem] = literal[string] . identifier[format] ( identifier[token] [ literal[int] ]) keyword[raise] identifier[SyntaxError] ( literal[string] . identifier[format] ( identifier[problem] )) identifier[token] = identifier[next] ( identifier[token_stream] ) keyword[return] identifier[self] . identifier[keyword] ( identifier[token_stream] , identifier[token] , identifier[operators] )
def keyword(self, token_stream, token, operators): """Lowest level parsing function. A keyword consists of zero or more prefix operators (NOT, or COMPARISON) followed by a TEXT, COLOR, or NUMBER block. """ if token[0] == 'TEXT' or token[0] == 'COLOR': return SearchKeyword(token[1], **operators) # depends on [control=['if'], data=[]] elif token[0] == 'COMPARISON': operators['comparison'] = token[1] # depends on [control=['if'], data=[]] elif token[0] == 'NOT': operators['boolean'] = 'not' # depends on [control=['if'], data=[]] else: if token[1] == None: problem = 'end of input.' # depends on [control=['if'], data=[]] else: problem = 'token {0} in input'.format(token[1]) raise SyntaxError('Unexpected {0}'.format(problem)) token = next(token_stream) return self.keyword(token_stream, token, operators)