code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def put(self, iocb): """Add an IOCB to a queue. This is usually called by the function that filters requests and passes them out to the correct processing thread.""" if _debug: IOQueue._debug("put %r", iocb) # requests should be pending before being queued if iocb.ioState != PENDING: raise RuntimeError("invalid state transition") # save that it might have been empty wasempty = not self.notempty.isSet() # add the request to the end of the list of iocb's at same priority priority = iocb.ioPriority item = (priority, iocb) self.queue.insert(bisect_left(self.queue, (priority+1,)), item) # point the iocb back to this queue iocb.ioQueue = self # set the event, queue is no longer empty self.notempty.set() return wasempty
def function[put, parameter[self, iocb]]: constant[Add an IOCB to a queue. This is usually called by the function that filters requests and passes them out to the correct processing thread.] if name[_debug] begin[:] call[name[IOQueue]._debug, parameter[constant[put %r], name[iocb]]] if compare[name[iocb].ioState not_equal[!=] name[PENDING]] begin[:] <ast.Raise object at 0x7da1b0813a30> variable[wasempty] assign[=] <ast.UnaryOp object at 0x7da1b0813070> variable[priority] assign[=] name[iocb].ioPriority variable[item] assign[=] tuple[[<ast.Name object at 0x7da1b08131f0>, <ast.Name object at 0x7da1b0812cb0>]] call[name[self].queue.insert, parameter[call[name[bisect_left], parameter[name[self].queue, tuple[[<ast.BinOp object at 0x7da1b0812890>]]]], name[item]]] name[iocb].ioQueue assign[=] name[self] call[name[self].notempty.set, parameter[]] return[name[wasempty]]
keyword[def] identifier[put] ( identifier[self] , identifier[iocb] ): literal[string] keyword[if] identifier[_debug] : identifier[IOQueue] . identifier[_debug] ( literal[string] , identifier[iocb] ) keyword[if] identifier[iocb] . identifier[ioState] != identifier[PENDING] : keyword[raise] identifier[RuntimeError] ( literal[string] ) identifier[wasempty] = keyword[not] identifier[self] . identifier[notempty] . identifier[isSet] () identifier[priority] = identifier[iocb] . identifier[ioPriority] identifier[item] =( identifier[priority] , identifier[iocb] ) identifier[self] . identifier[queue] . identifier[insert] ( identifier[bisect_left] ( identifier[self] . identifier[queue] ,( identifier[priority] + literal[int] ,)), identifier[item] ) identifier[iocb] . identifier[ioQueue] = identifier[self] identifier[self] . identifier[notempty] . identifier[set] () keyword[return] identifier[wasempty]
def put(self, iocb): """Add an IOCB to a queue. This is usually called by the function that filters requests and passes them out to the correct processing thread.""" if _debug: IOQueue._debug('put %r', iocb) # depends on [control=['if'], data=[]] # requests should be pending before being queued if iocb.ioState != PENDING: raise RuntimeError('invalid state transition') # depends on [control=['if'], data=[]] # save that it might have been empty wasempty = not self.notempty.isSet() # add the request to the end of the list of iocb's at same priority priority = iocb.ioPriority item = (priority, iocb) self.queue.insert(bisect_left(self.queue, (priority + 1,)), item) # point the iocb back to this queue iocb.ioQueue = self # set the event, queue is no longer empty self.notempty.set() return wasempty
def get_geo_info(filename, band=1): ''' Gets information from a Raster data set ''' sourceds = gdal.Open(filename, GA_ReadOnly) ndv = sourceds.GetRasterBand(band).GetNoDataValue() xsize = sourceds.RasterXSize ysize = sourceds.RasterYSize geot = sourceds.GetGeoTransform() projection = osr.SpatialReference() projection.ImportFromWkt(sourceds.GetProjectionRef()) datatype = sourceds.GetRasterBand(band).DataType datatype = gdal.GetDataTypeName(datatype) return ndv, xsize, ysize, geot, projection, datatype
def function[get_geo_info, parameter[filename, band]]: constant[ Gets information from a Raster data set ] variable[sourceds] assign[=] call[name[gdal].Open, parameter[name[filename], name[GA_ReadOnly]]] variable[ndv] assign[=] call[call[name[sourceds].GetRasterBand, parameter[name[band]]].GetNoDataValue, parameter[]] variable[xsize] assign[=] name[sourceds].RasterXSize variable[ysize] assign[=] name[sourceds].RasterYSize variable[geot] assign[=] call[name[sourceds].GetGeoTransform, parameter[]] variable[projection] assign[=] call[name[osr].SpatialReference, parameter[]] call[name[projection].ImportFromWkt, parameter[call[name[sourceds].GetProjectionRef, parameter[]]]] variable[datatype] assign[=] call[name[sourceds].GetRasterBand, parameter[name[band]]].DataType variable[datatype] assign[=] call[name[gdal].GetDataTypeName, parameter[name[datatype]]] return[tuple[[<ast.Name object at 0x7da1b27e3a90>, <ast.Name object at 0x7da1b27e1540>, <ast.Name object at 0x7da1b27e3fa0>, <ast.Name object at 0x7da1b27e1870>, <ast.Name object at 0x7da1b27e3220>, <ast.Name object at 0x7da1b27e0eb0>]]]
keyword[def] identifier[get_geo_info] ( identifier[filename] , identifier[band] = literal[int] ): literal[string] identifier[sourceds] = identifier[gdal] . identifier[Open] ( identifier[filename] , identifier[GA_ReadOnly] ) identifier[ndv] = identifier[sourceds] . identifier[GetRasterBand] ( identifier[band] ). identifier[GetNoDataValue] () identifier[xsize] = identifier[sourceds] . identifier[RasterXSize] identifier[ysize] = identifier[sourceds] . identifier[RasterYSize] identifier[geot] = identifier[sourceds] . identifier[GetGeoTransform] () identifier[projection] = identifier[osr] . identifier[SpatialReference] () identifier[projection] . identifier[ImportFromWkt] ( identifier[sourceds] . identifier[GetProjectionRef] ()) identifier[datatype] = identifier[sourceds] . identifier[GetRasterBand] ( identifier[band] ). identifier[DataType] identifier[datatype] = identifier[gdal] . identifier[GetDataTypeName] ( identifier[datatype] ) keyword[return] identifier[ndv] , identifier[xsize] , identifier[ysize] , identifier[geot] , identifier[projection] , identifier[datatype]
def get_geo_info(filename, band=1): """ Gets information from a Raster data set """ sourceds = gdal.Open(filename, GA_ReadOnly) ndv = sourceds.GetRasterBand(band).GetNoDataValue() xsize = sourceds.RasterXSize ysize = sourceds.RasterYSize geot = sourceds.GetGeoTransform() projection = osr.SpatialReference() projection.ImportFromWkt(sourceds.GetProjectionRef()) datatype = sourceds.GetRasterBand(band).DataType datatype = gdal.GetDataTypeName(datatype) return (ndv, xsize, ysize, geot, projection, datatype)
def target_internal_dependencies(target): """Returns internal Jarable dependencies that were "directly" declared. Directly declared deps are those that are explicitly listed in the definition of a target, rather than being depended on transitively. But in order to walk through aggregator targets such as `target`, `dependencies`, or `jar_library`, this recursively descends the dep graph and stops at Jarable instances.""" for dep in target.dependencies: if isinstance(dep, Jarable): yield dep else: for childdep in target_internal_dependencies(dep): yield childdep
def function[target_internal_dependencies, parameter[target]]: constant[Returns internal Jarable dependencies that were "directly" declared. Directly declared deps are those that are explicitly listed in the definition of a target, rather than being depended on transitively. But in order to walk through aggregator targets such as `target`, `dependencies`, or `jar_library`, this recursively descends the dep graph and stops at Jarable instances.] for taget[name[dep]] in starred[name[target].dependencies] begin[:] if call[name[isinstance], parameter[name[dep], name[Jarable]]] begin[:] <ast.Yield object at 0x7da1b22a5e70>
keyword[def] identifier[target_internal_dependencies] ( identifier[target] ): literal[string] keyword[for] identifier[dep] keyword[in] identifier[target] . identifier[dependencies] : keyword[if] identifier[isinstance] ( identifier[dep] , identifier[Jarable] ): keyword[yield] identifier[dep] keyword[else] : keyword[for] identifier[childdep] keyword[in] identifier[target_internal_dependencies] ( identifier[dep] ): keyword[yield] identifier[childdep]
def target_internal_dependencies(target): """Returns internal Jarable dependencies that were "directly" declared. Directly declared deps are those that are explicitly listed in the definition of a target, rather than being depended on transitively. But in order to walk through aggregator targets such as `target`, `dependencies`, or `jar_library`, this recursively descends the dep graph and stops at Jarable instances.""" for dep in target.dependencies: if isinstance(dep, Jarable): yield dep # depends on [control=['if'], data=[]] else: for childdep in target_internal_dependencies(dep): yield childdep # depends on [control=['for'], data=['childdep']] # depends on [control=['for'], data=['dep']]
def add_request_handler_chain(self, request_handler_chain): # type: (GenericRequestHandlerChain) -> None """Checks the type before adding it to the request_handler_chains instance variable. :param request_handler_chain: Request Handler Chain instance. :type request_handler_chain: RequestHandlerChain :raises: :py:class:`ask_sdk_runtime.exceptions.DispatchException` if a null input is provided or if the input is of invalid type """ if request_handler_chain is None or not isinstance( request_handler_chain, GenericRequestHandlerChain): raise DispatchException( "Request Handler Chain is not a GenericRequestHandlerChain " "instance") self._request_handler_chains.append(request_handler_chain)
def function[add_request_handler_chain, parameter[self, request_handler_chain]]: constant[Checks the type before adding it to the request_handler_chains instance variable. :param request_handler_chain: Request Handler Chain instance. :type request_handler_chain: RequestHandlerChain :raises: :py:class:`ask_sdk_runtime.exceptions.DispatchException` if a null input is provided or if the input is of invalid type ] if <ast.BoolOp object at 0x7da1b19ee560> begin[:] <ast.Raise object at 0x7da1b19ee4d0> call[name[self]._request_handler_chains.append, parameter[name[request_handler_chain]]]
keyword[def] identifier[add_request_handler_chain] ( identifier[self] , identifier[request_handler_chain] ): literal[string] keyword[if] identifier[request_handler_chain] keyword[is] keyword[None] keyword[or] keyword[not] identifier[isinstance] ( identifier[request_handler_chain] , identifier[GenericRequestHandlerChain] ): keyword[raise] identifier[DispatchException] ( literal[string] literal[string] ) identifier[self] . identifier[_request_handler_chains] . identifier[append] ( identifier[request_handler_chain] )
def add_request_handler_chain(self, request_handler_chain): # type: (GenericRequestHandlerChain) -> None 'Checks the type before adding it to the\n request_handler_chains instance variable.\n\n :param request_handler_chain: Request Handler Chain instance.\n :type request_handler_chain: RequestHandlerChain\n :raises: :py:class:`ask_sdk_runtime.exceptions.DispatchException`\n if a null input is provided or if the input is of invalid type\n ' if request_handler_chain is None or not isinstance(request_handler_chain, GenericRequestHandlerChain): raise DispatchException('Request Handler Chain is not a GenericRequestHandlerChain instance') # depends on [control=['if'], data=[]] self._request_handler_chains.append(request_handler_chain)
def split_args(self, arg_dict): """ given a dictionary of arguments, split them into args and kwargs note: this destroys the arg_dict passed. if you need it, create a copy first. """ pos_args = [] for arg in self.args: pos_args.append(arg_dict[arg.name]) del arg_dict[arg.name] return pos_args, arg_dict
def function[split_args, parameter[self, arg_dict]]: constant[ given a dictionary of arguments, split them into args and kwargs note: this destroys the arg_dict passed. if you need it, create a copy first. ] variable[pos_args] assign[=] list[[]] for taget[name[arg]] in starred[name[self].args] begin[:] call[name[pos_args].append, parameter[call[name[arg_dict]][name[arg].name]]] <ast.Delete object at 0x7da1b19b54b0> return[tuple[[<ast.Name object at 0x7da1b1ad11e0>, <ast.Name object at 0x7da1b1ad1270>]]]
keyword[def] identifier[split_args] ( identifier[self] , identifier[arg_dict] ): literal[string] identifier[pos_args] =[] keyword[for] identifier[arg] keyword[in] identifier[self] . identifier[args] : identifier[pos_args] . identifier[append] ( identifier[arg_dict] [ identifier[arg] . identifier[name] ]) keyword[del] identifier[arg_dict] [ identifier[arg] . identifier[name] ] keyword[return] identifier[pos_args] , identifier[arg_dict]
def split_args(self, arg_dict): """ given a dictionary of arguments, split them into args and kwargs note: this destroys the arg_dict passed. if you need it, create a copy first. """ pos_args = [] for arg in self.args: pos_args.append(arg_dict[arg.name]) del arg_dict[arg.name] # depends on [control=['for'], data=['arg']] return (pos_args, arg_dict)
def get_access_token(client_id, client_secret): ''' Name: token Parameters: client_id, client_secret Return: dictionary ''' headers = {'Content-Type': 'application/x-www-form-urlencoded'} payload = { 'client_id': client_id, 'client_secret': client_secret } request = requests.post(token_url, data=payload, headers=headers) if request.status_code == 200: token = request.json() return token return {'status': request.status_code, "message": request.text}
def function[get_access_token, parameter[client_id, client_secret]]: constant[ Name: token Parameters: client_id, client_secret Return: dictionary ] variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da20c6a97b0>], [<ast.Constant object at 0x7da20c6abbb0>]] variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da20c6ab670>, <ast.Constant object at 0x7da20c6a86d0>], [<ast.Name object at 0x7da20c6a9240>, <ast.Name object at 0x7da20c6a93f0>]] variable[request] assign[=] call[name[requests].post, parameter[name[token_url]]] if compare[name[request].status_code equal[==] constant[200]] begin[:] variable[token] assign[=] call[name[request].json, parameter[]] return[name[token]] return[dictionary[[<ast.Constant object at 0x7da20c6a9900>, <ast.Constant object at 0x7da20c6a8610>], [<ast.Attribute object at 0x7da20c6aaa10>, <ast.Attribute object at 0x7da20c6aaf50>]]]
keyword[def] identifier[get_access_token] ( identifier[client_id] , identifier[client_secret] ): literal[string] identifier[headers] ={ literal[string] : literal[string] } identifier[payload] ={ literal[string] : identifier[client_id] , literal[string] : identifier[client_secret] } identifier[request] = identifier[requests] . identifier[post] ( identifier[token_url] , identifier[data] = identifier[payload] , identifier[headers] = identifier[headers] ) keyword[if] identifier[request] . identifier[status_code] == literal[int] : identifier[token] = identifier[request] . identifier[json] () keyword[return] identifier[token] keyword[return] { literal[string] : identifier[request] . identifier[status_code] , literal[string] : identifier[request] . identifier[text] }
def get_access_token(client_id, client_secret): """ Name: token Parameters: client_id, client_secret Return: dictionary """ headers = {'Content-Type': 'application/x-www-form-urlencoded'} payload = {'client_id': client_id, 'client_secret': client_secret} request = requests.post(token_url, data=payload, headers=headers) if request.status_code == 200: token = request.json() return token # depends on [control=['if'], data=[]] return {'status': request.status_code, 'message': request.text}
def is_zonefile_cached( zonefile_hash, zonefile_dir, validate=False): """ Do we have the cached zonefile? It's okay if it's a non-standard zonefile. if @validate is true, then check that the data in zonefile_dir_path/zonefile.txt matches zonefile_hash Return True if so Return False if not """ zonefile_path = atlas_zonefile_path(zonefile_dir, zonefile_hash) zonefile_path_legacy = atlas_zonefile_path_legacy(zonefile_dir, zonefile_hash) res = False for zfp in [zonefile_path, zonefile_path_legacy]: if not os.path.exists(zfp): continue if validate: data = _read_atlas_zonefile(zfp, zonefile_hash) if data: # yup! res = True break else: res = True break return res
def function[is_zonefile_cached, parameter[zonefile_hash, zonefile_dir, validate]]: constant[ Do we have the cached zonefile? It's okay if it's a non-standard zonefile. if @validate is true, then check that the data in zonefile_dir_path/zonefile.txt matches zonefile_hash Return True if so Return False if not ] variable[zonefile_path] assign[=] call[name[atlas_zonefile_path], parameter[name[zonefile_dir], name[zonefile_hash]]] variable[zonefile_path_legacy] assign[=] call[name[atlas_zonefile_path_legacy], parameter[name[zonefile_dir], name[zonefile_hash]]] variable[res] assign[=] constant[False] for taget[name[zfp]] in starred[list[[<ast.Name object at 0x7da20c76f4c0>, <ast.Name object at 0x7da20c76d210>]]] begin[:] if <ast.UnaryOp object at 0x7da20c76c550> begin[:] continue if name[validate] begin[:] variable[data] assign[=] call[name[_read_atlas_zonefile], parameter[name[zfp], name[zonefile_hash]]] if name[data] begin[:] variable[res] assign[=] constant[True] break return[name[res]]
keyword[def] identifier[is_zonefile_cached] ( identifier[zonefile_hash] , identifier[zonefile_dir] , identifier[validate] = keyword[False] ): literal[string] identifier[zonefile_path] = identifier[atlas_zonefile_path] ( identifier[zonefile_dir] , identifier[zonefile_hash] ) identifier[zonefile_path_legacy] = identifier[atlas_zonefile_path_legacy] ( identifier[zonefile_dir] , identifier[zonefile_hash] ) identifier[res] = keyword[False] keyword[for] identifier[zfp] keyword[in] [ identifier[zonefile_path] , identifier[zonefile_path_legacy] ]: keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[zfp] ): keyword[continue] keyword[if] identifier[validate] : identifier[data] = identifier[_read_atlas_zonefile] ( identifier[zfp] , identifier[zonefile_hash] ) keyword[if] identifier[data] : identifier[res] = keyword[True] keyword[break] keyword[else] : identifier[res] = keyword[True] keyword[break] keyword[return] identifier[res]
def is_zonefile_cached(zonefile_hash, zonefile_dir, validate=False): """ Do we have the cached zonefile? It's okay if it's a non-standard zonefile. if @validate is true, then check that the data in zonefile_dir_path/zonefile.txt matches zonefile_hash Return True if so Return False if not """ zonefile_path = atlas_zonefile_path(zonefile_dir, zonefile_hash) zonefile_path_legacy = atlas_zonefile_path_legacy(zonefile_dir, zonefile_hash) res = False for zfp in [zonefile_path, zonefile_path_legacy]: if not os.path.exists(zfp): continue # depends on [control=['if'], data=[]] if validate: data = _read_atlas_zonefile(zfp, zonefile_hash) if data: # yup! res = True break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: res = True break # depends on [control=['for'], data=['zfp']] return res
def generate_model_name(raml_resource): """ Generate model name. :param raml_resource: Instance of ramlfications.raml.ResourceNode. """ resource_uri = get_resource_uri(raml_resource).strip('/') resource_uri = re.sub('\W', ' ', resource_uri) model_name = inflection.titleize(resource_uri) return inflection.singularize(model_name).replace(' ', '')
def function[generate_model_name, parameter[raml_resource]]: constant[ Generate model name. :param raml_resource: Instance of ramlfications.raml.ResourceNode. ] variable[resource_uri] assign[=] call[call[name[get_resource_uri], parameter[name[raml_resource]]].strip, parameter[constant[/]]] variable[resource_uri] assign[=] call[name[re].sub, parameter[constant[\W], constant[ ], name[resource_uri]]] variable[model_name] assign[=] call[name[inflection].titleize, parameter[name[resource_uri]]] return[call[call[name[inflection].singularize, parameter[name[model_name]]].replace, parameter[constant[ ], constant[]]]]
keyword[def] identifier[generate_model_name] ( identifier[raml_resource] ): literal[string] identifier[resource_uri] = identifier[get_resource_uri] ( identifier[raml_resource] ). identifier[strip] ( literal[string] ) identifier[resource_uri] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[resource_uri] ) identifier[model_name] = identifier[inflection] . identifier[titleize] ( identifier[resource_uri] ) keyword[return] identifier[inflection] . identifier[singularize] ( identifier[model_name] ). identifier[replace] ( literal[string] , literal[string] )
def generate_model_name(raml_resource): """ Generate model name. :param raml_resource: Instance of ramlfications.raml.ResourceNode. """ resource_uri = get_resource_uri(raml_resource).strip('/') resource_uri = re.sub('\\W', ' ', resource_uri) model_name = inflection.titleize(resource_uri) return inflection.singularize(model_name).replace(' ', '')
def accessible_to(self, user): """ returns all the items that are accessible to the specified user if user is not authenticated will return public items :param user: an user instance """ if user.is_superuser: try: queryset = self.get_queryset() except AttributeError: queryset = self elif user.is_authenticated(): # get user group (higher id) group = user.groups.all().order_by('-id')[0] queryset = self.filter(access_level__lte=ACCESS_LEVELS.get(group.name)) else: queryset = self.filter(access_level__lte=ACCESS_LEVELS.get('public')) return queryset
def function[accessible_to, parameter[self, user]]: constant[ returns all the items that are accessible to the specified user if user is not authenticated will return public items :param user: an user instance ] if name[user].is_superuser begin[:] <ast.Try object at 0x7da20c6e4bb0> return[name[queryset]]
keyword[def] identifier[accessible_to] ( identifier[self] , identifier[user] ): literal[string] keyword[if] identifier[user] . identifier[is_superuser] : keyword[try] : identifier[queryset] = identifier[self] . identifier[get_queryset] () keyword[except] identifier[AttributeError] : identifier[queryset] = identifier[self] keyword[elif] identifier[user] . identifier[is_authenticated] (): identifier[group] = identifier[user] . identifier[groups] . identifier[all] (). identifier[order_by] ( literal[string] )[ literal[int] ] identifier[queryset] = identifier[self] . identifier[filter] ( identifier[access_level__lte] = identifier[ACCESS_LEVELS] . identifier[get] ( identifier[group] . identifier[name] )) keyword[else] : identifier[queryset] = identifier[self] . identifier[filter] ( identifier[access_level__lte] = identifier[ACCESS_LEVELS] . identifier[get] ( literal[string] )) keyword[return] identifier[queryset]
def accessible_to(self, user): """ returns all the items that are accessible to the specified user if user is not authenticated will return public items :param user: an user instance """ if user.is_superuser: try: queryset = self.get_queryset() # depends on [control=['try'], data=[]] except AttributeError: queryset = self # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] elif user.is_authenticated(): # get user group (higher id) group = user.groups.all().order_by('-id')[0] queryset = self.filter(access_level__lte=ACCESS_LEVELS.get(group.name)) # depends on [control=['if'], data=[]] else: queryset = self.filter(access_level__lte=ACCESS_LEVELS.get('public')) return queryset
def validate_python_version(config, actual_py_version=None): # type: (Config, Optional[str]) -> None """Validate configuration matches a specific python version. If the ``actual_py_version`` is not provided, it will default to the major/minor version of the currently running python interpreter. :param actual_py_version: The major/minor python version in the form "pythonX.Y", e.g "python2.7", "python3.6". """ lambda_version = config.lambda_python_version if actual_py_version is None: actual_py_version = 'python%s.%s' % sys.version_info[:2] if actual_py_version != lambda_version: # We're not making this a hard error for now, but we may # turn this into a hard fail. warnings.warn("You are currently running %s, but the closest " "supported version on AWS Lambda is %s\n" "Please use %s, otherwise you may run into " "deployment issues. " % (actual_py_version, lambda_version, lambda_version), stacklevel=2)
def function[validate_python_version, parameter[config, actual_py_version]]: constant[Validate configuration matches a specific python version. If the ``actual_py_version`` is not provided, it will default to the major/minor version of the currently running python interpreter. :param actual_py_version: The major/minor python version in the form "pythonX.Y", e.g "python2.7", "python3.6". ] variable[lambda_version] assign[=] name[config].lambda_python_version if compare[name[actual_py_version] is constant[None]] begin[:] variable[actual_py_version] assign[=] binary_operation[constant[python%s.%s] <ast.Mod object at 0x7da2590d6920> call[name[sys].version_info][<ast.Slice object at 0x7da2054a6230>]] if compare[name[actual_py_version] not_equal[!=] name[lambda_version]] begin[:] call[name[warnings].warn, parameter[binary_operation[constant[You are currently running %s, but the closest supported version on AWS Lambda is %s Please use %s, otherwise you may run into deployment issues. ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da2054a46a0>, <ast.Name object at 0x7da2054a5150>, <ast.Name object at 0x7da2054a61a0>]]]]]
keyword[def] identifier[validate_python_version] ( identifier[config] , identifier[actual_py_version] = keyword[None] ): literal[string] identifier[lambda_version] = identifier[config] . identifier[lambda_python_version] keyword[if] identifier[actual_py_version] keyword[is] keyword[None] : identifier[actual_py_version] = literal[string] % identifier[sys] . identifier[version_info] [: literal[int] ] keyword[if] identifier[actual_py_version] != identifier[lambda_version] : identifier[warnings] . identifier[warn] ( literal[string] literal[string] literal[string] literal[string] % ( identifier[actual_py_version] , identifier[lambda_version] , identifier[lambda_version] ), identifier[stacklevel] = literal[int] )
def validate_python_version(config, actual_py_version=None): # type: (Config, Optional[str]) -> None 'Validate configuration matches a specific python version.\n\n If the ``actual_py_version`` is not provided, it will default\n to the major/minor version of the currently running python\n interpreter.\n\n :param actual_py_version: The major/minor python version in\n the form "pythonX.Y", e.g "python2.7", "python3.6".\n\n ' lambda_version = config.lambda_python_version if actual_py_version is None: actual_py_version = 'python%s.%s' % sys.version_info[:2] # depends on [control=['if'], data=['actual_py_version']] if actual_py_version != lambda_version: # We're not making this a hard error for now, but we may # turn this into a hard fail. warnings.warn('You are currently running %s, but the closest supported version on AWS Lambda is %s\nPlease use %s, otherwise you may run into deployment issues. ' % (actual_py_version, lambda_version, lambda_version), stacklevel=2) # depends on [control=['if'], data=['actual_py_version', 'lambda_version']]
def _completion_checker(async_id, context_id): """Check if all Async jobs within a Context have been run.""" if not context_id: logging.debug("Context for async %s does not exist", async_id) return context = FuriousContext.from_id(context_id) marker = FuriousCompletionMarker.get_by_id(context_id) if marker and marker.complete: logging.info("Context %s already complete" % context_id) return True task_ids = context.task_ids if async_id in task_ids: task_ids.remove(async_id) logging.debug("Loaded context.") logging.debug(task_ids) done, has_errors = _check_markers(task_ids) if not done: return False _mark_context_complete(marker, context, has_errors) return True
def function[_completion_checker, parameter[async_id, context_id]]: constant[Check if all Async jobs within a Context have been run.] if <ast.UnaryOp object at 0x7da20c7ca710> begin[:] call[name[logging].debug, parameter[constant[Context for async %s does not exist], name[async_id]]] return[None] variable[context] assign[=] call[name[FuriousContext].from_id, parameter[name[context_id]]] variable[marker] assign[=] call[name[FuriousCompletionMarker].get_by_id, parameter[name[context_id]]] if <ast.BoolOp object at 0x7da20c7c8eb0> begin[:] call[name[logging].info, parameter[binary_operation[constant[Context %s already complete] <ast.Mod object at 0x7da2590d6920> name[context_id]]]] return[constant[True]] variable[task_ids] assign[=] name[context].task_ids if compare[name[async_id] in name[task_ids]] begin[:] call[name[task_ids].remove, parameter[name[async_id]]] call[name[logging].debug, parameter[constant[Loaded context.]]] call[name[logging].debug, parameter[name[task_ids]]] <ast.Tuple object at 0x7da18ede5ba0> assign[=] call[name[_check_markers], parameter[name[task_ids]]] if <ast.UnaryOp object at 0x7da18ede7400> begin[:] return[constant[False]] call[name[_mark_context_complete], parameter[name[marker], name[context], name[has_errors]]] return[constant[True]]
keyword[def] identifier[_completion_checker] ( identifier[async_id] , identifier[context_id] ): literal[string] keyword[if] keyword[not] identifier[context_id] : identifier[logging] . identifier[debug] ( literal[string] , identifier[async_id] ) keyword[return] identifier[context] = identifier[FuriousContext] . identifier[from_id] ( identifier[context_id] ) identifier[marker] = identifier[FuriousCompletionMarker] . identifier[get_by_id] ( identifier[context_id] ) keyword[if] identifier[marker] keyword[and] identifier[marker] . identifier[complete] : identifier[logging] . identifier[info] ( literal[string] % identifier[context_id] ) keyword[return] keyword[True] identifier[task_ids] = identifier[context] . identifier[task_ids] keyword[if] identifier[async_id] keyword[in] identifier[task_ids] : identifier[task_ids] . identifier[remove] ( identifier[async_id] ) identifier[logging] . identifier[debug] ( literal[string] ) identifier[logging] . identifier[debug] ( identifier[task_ids] ) identifier[done] , identifier[has_errors] = identifier[_check_markers] ( identifier[task_ids] ) keyword[if] keyword[not] identifier[done] : keyword[return] keyword[False] identifier[_mark_context_complete] ( identifier[marker] , identifier[context] , identifier[has_errors] ) keyword[return] keyword[True]
def _completion_checker(async_id, context_id): """Check if all Async jobs within a Context have been run.""" if not context_id: logging.debug('Context for async %s does not exist', async_id) return # depends on [control=['if'], data=[]] context = FuriousContext.from_id(context_id) marker = FuriousCompletionMarker.get_by_id(context_id) if marker and marker.complete: logging.info('Context %s already complete' % context_id) return True # depends on [control=['if'], data=[]] task_ids = context.task_ids if async_id in task_ids: task_ids.remove(async_id) # depends on [control=['if'], data=['async_id', 'task_ids']] logging.debug('Loaded context.') logging.debug(task_ids) (done, has_errors) = _check_markers(task_ids) if not done: return False # depends on [control=['if'], data=[]] _mark_context_complete(marker, context, has_errors) return True
async def removeKeyPair(self, *args, **kwargs): """ Ensure a KeyPair for a given worker type does not exist Ensure that a keypair of a given name does not exist. This method is ``experimental`` """ return await self._makeApiCall(self.funcinfo["removeKeyPair"], *args, **kwargs)
<ast.AsyncFunctionDef object at 0x7da204961780>
keyword[async] keyword[def] identifier[removeKeyPair] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[return] keyword[await] identifier[self] . identifier[_makeApiCall] ( identifier[self] . identifier[funcinfo] [ literal[string] ],* identifier[args] ,** identifier[kwargs] )
async def removeKeyPair(self, *args, **kwargs): """ Ensure a KeyPair for a given worker type does not exist Ensure that a keypair of a given name does not exist. This method is ``experimental`` """ return await self._makeApiCall(self.funcinfo['removeKeyPair'], *args, **kwargs)
def parse_host(parser, event, node): """Parse and return the host entity if that is the next entity <!ELEMENT HOST (#PCDATA)> """ #pylint: disable=unused-argument host = '' (next_event, next_node) = six.next(parser) if next_event == pulldom.CHARACTERS: host = next_node.nodeValue (next_event, next_node) = six.next(parser) if not _is_end(next_event, next_node, 'HOST'): raise ParseError('Expecting end HOST') return host
def function[parse_host, parameter[parser, event, node]]: constant[Parse and return the host entity if that is the next entity <!ELEMENT HOST (#PCDATA)> ] variable[host] assign[=] constant[] <ast.Tuple object at 0x7da204567be0> assign[=] call[name[six].next, parameter[name[parser]]] if compare[name[next_event] equal[==] name[pulldom].CHARACTERS] begin[:] variable[host] assign[=] name[next_node].nodeValue <ast.Tuple object at 0x7da204566a40> assign[=] call[name[six].next, parameter[name[parser]]] if <ast.UnaryOp object at 0x7da204564970> begin[:] <ast.Raise object at 0x7da204565180> return[name[host]]
keyword[def] identifier[parse_host] ( identifier[parser] , identifier[event] , identifier[node] ): literal[string] identifier[host] = literal[string] ( identifier[next_event] , identifier[next_node] )= identifier[six] . identifier[next] ( identifier[parser] ) keyword[if] identifier[next_event] == identifier[pulldom] . identifier[CHARACTERS] : identifier[host] = identifier[next_node] . identifier[nodeValue] ( identifier[next_event] , identifier[next_node] )= identifier[six] . identifier[next] ( identifier[parser] ) keyword[if] keyword[not] identifier[_is_end] ( identifier[next_event] , identifier[next_node] , literal[string] ): keyword[raise] identifier[ParseError] ( literal[string] ) keyword[return] identifier[host]
def parse_host(parser, event, node): """Parse and return the host entity if that is the next entity <!ELEMENT HOST (#PCDATA)> """ #pylint: disable=unused-argument host = '' (next_event, next_node) = six.next(parser) if next_event == pulldom.CHARACTERS: host = next_node.nodeValue (next_event, next_node) = six.next(parser) # depends on [control=['if'], data=['next_event']] if not _is_end(next_event, next_node, 'HOST'): raise ParseError('Expecting end HOST') # depends on [control=['if'], data=[]] return host
def slots(self): """Get all the slots for this node. :returns: The names of slots for this class. If the class doesn't define any slot, through the ``__slots__`` variable, then this function will return a None. Also, it will return None in the case the slots were not inferred. :rtype: list(str) or None """ def grouped_slots(): # Not interested in object, since it can't have slots. for cls in self.mro()[:-1]: try: cls_slots = cls._slots() except NotImplementedError: continue if cls_slots is not None: yield from cls_slots else: yield None if not self.newstyle: raise NotImplementedError( "The concept of slots is undefined for old-style classes." ) slots = list(grouped_slots()) if not all(slot is not None for slot in slots): return None return sorted(slots, key=lambda item: item.value)
def function[slots, parameter[self]]: constant[Get all the slots for this node. :returns: The names of slots for this class. If the class doesn't define any slot, through the ``__slots__`` variable, then this function will return a None. Also, it will return None in the case the slots were not inferred. :rtype: list(str) or None ] def function[grouped_slots, parameter[]]: for taget[name[cls]] in starred[call[call[name[self].mro, parameter[]]][<ast.Slice object at 0x7da1b1eb4d30>]] begin[:] <ast.Try object at 0x7da1b1eb4640> if compare[name[cls_slots] is_not constant[None]] begin[:] <ast.YieldFrom object at 0x7da1b1eb5330> if <ast.UnaryOp object at 0x7da1b1eb74f0> begin[:] <ast.Raise object at 0x7da1b1eb6380> variable[slots] assign[=] call[name[list], parameter[call[name[grouped_slots], parameter[]]]] if <ast.UnaryOp object at 0x7da1b1eb4430> begin[:] return[constant[None]] return[call[name[sorted], parameter[name[slots]]]]
keyword[def] identifier[slots] ( identifier[self] ): literal[string] keyword[def] identifier[grouped_slots] (): keyword[for] identifier[cls] keyword[in] identifier[self] . identifier[mro] ()[:- literal[int] ]: keyword[try] : identifier[cls_slots] = identifier[cls] . identifier[_slots] () keyword[except] identifier[NotImplementedError] : keyword[continue] keyword[if] identifier[cls_slots] keyword[is] keyword[not] keyword[None] : keyword[yield] keyword[from] identifier[cls_slots] keyword[else] : keyword[yield] keyword[None] keyword[if] keyword[not] identifier[self] . identifier[newstyle] : keyword[raise] identifier[NotImplementedError] ( literal[string] ) identifier[slots] = identifier[list] ( identifier[grouped_slots] ()) keyword[if] keyword[not] identifier[all] ( identifier[slot] keyword[is] keyword[not] keyword[None] keyword[for] identifier[slot] keyword[in] identifier[slots] ): keyword[return] keyword[None] keyword[return] identifier[sorted] ( identifier[slots] , identifier[key] = keyword[lambda] identifier[item] : identifier[item] . identifier[value] )
def slots(self): """Get all the slots for this node. :returns: The names of slots for this class. If the class doesn't define any slot, through the ``__slots__`` variable, then this function will return a None. Also, it will return None in the case the slots were not inferred. :rtype: list(str) or None """ def grouped_slots(): # Not interested in object, since it can't have slots. for cls in self.mro()[:-1]: try: cls_slots = cls._slots() # depends on [control=['try'], data=[]] except NotImplementedError: continue # depends on [control=['except'], data=[]] if cls_slots is not None: yield from cls_slots # depends on [control=['if'], data=['cls_slots']] else: yield None # depends on [control=['for'], data=['cls']] if not self.newstyle: raise NotImplementedError('The concept of slots is undefined for old-style classes.') # depends on [control=['if'], data=[]] slots = list(grouped_slots()) if not all((slot is not None for slot in slots)): return None # depends on [control=['if'], data=[]] return sorted(slots, key=lambda item: item.value)
def cast_to_subclass(self): """ Load the bundle file from the database to get the derived bundle class, then return a new bundle built on that class :return: """ self.import_lib() self.load_requirements() try: self.commit() # To ensure the rollback() doesn't clear out anything important bsf = self.build_source_files.file(File.BSFILE.BUILD) except Exception as e: self.log('Error trying to create a bundle source file ... {} '.format(e)) raise self.rollback() return self try: clz = bsf.import_bundle() except Exception as e: raise BundleError('Failed to load bundle code file, skipping : {}'.format(e)) b = clz(self._dataset, self._library, self._source_url, self._build_url) b.limited_run = self.limited_run b.capture_exceptions = self.capture_exceptions b.multi = self.multi return b
def function[cast_to_subclass, parameter[self]]: constant[ Load the bundle file from the database to get the derived bundle class, then return a new bundle built on that class :return: ] call[name[self].import_lib, parameter[]] call[name[self].load_requirements, parameter[]] <ast.Try object at 0x7da20c991030> <ast.Try object at 0x7da20c993df0> variable[b] assign[=] call[name[clz], parameter[name[self]._dataset, name[self]._library, name[self]._source_url, name[self]._build_url]] name[b].limited_run assign[=] name[self].limited_run name[b].capture_exceptions assign[=] name[self].capture_exceptions name[b].multi assign[=] name[self].multi return[name[b]]
keyword[def] identifier[cast_to_subclass] ( identifier[self] ): literal[string] identifier[self] . identifier[import_lib] () identifier[self] . identifier[load_requirements] () keyword[try] : identifier[self] . identifier[commit] () identifier[bsf] = identifier[self] . identifier[build_source_files] . identifier[file] ( identifier[File] . identifier[BSFILE] . identifier[BUILD] ) keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[self] . identifier[log] ( literal[string] . identifier[format] ( identifier[e] )) keyword[raise] identifier[self] . identifier[rollback] () keyword[return] identifier[self] keyword[try] : identifier[clz] = identifier[bsf] . identifier[import_bundle] () keyword[except] identifier[Exception] keyword[as] identifier[e] : keyword[raise] identifier[BundleError] ( literal[string] . identifier[format] ( identifier[e] )) identifier[b] = identifier[clz] ( identifier[self] . identifier[_dataset] , identifier[self] . identifier[_library] , identifier[self] . identifier[_source_url] , identifier[self] . identifier[_build_url] ) identifier[b] . identifier[limited_run] = identifier[self] . identifier[limited_run] identifier[b] . identifier[capture_exceptions] = identifier[self] . identifier[capture_exceptions] identifier[b] . identifier[multi] = identifier[self] . identifier[multi] keyword[return] identifier[b]
def cast_to_subclass(self): """ Load the bundle file from the database to get the derived bundle class, then return a new bundle built on that class :return: """ self.import_lib() self.load_requirements() try: self.commit() # To ensure the rollback() doesn't clear out anything important bsf = self.build_source_files.file(File.BSFILE.BUILD) # depends on [control=['try'], data=[]] except Exception as e: self.log('Error trying to create a bundle source file ... {} '.format(e)) raise self.rollback() return self # depends on [control=['except'], data=['e']] try: clz = bsf.import_bundle() # depends on [control=['try'], data=[]] except Exception as e: raise BundleError('Failed to load bundle code file, skipping : {}'.format(e)) # depends on [control=['except'], data=['e']] b = clz(self._dataset, self._library, self._source_url, self._build_url) b.limited_run = self.limited_run b.capture_exceptions = self.capture_exceptions b.multi = self.multi return b
def convert_trees(self, ptb_trees, representation='basic', universal=True, include_punct=True, include_erased=False, **kwargs): """Convert a list of Penn Treebank formatted strings (ptb_trees) into Stanford Dependencies. The dependencies are represented as a list of sentences (CoNLL.Corpus), where each sentence (CoNLL.Sentence) is itself a list of CoNLL.Token objects. Currently supported representations are 'basic', 'collapsed', 'CCprocessed', and 'collapsedTree' which behave the same as they in the CoreNLP command line tools. (note that in the online CoreNLP demo, 'collapsed' is called 'enhanced') Additional arguments: universal (if True, use universal dependencies if they're available), include_punct (if False, punctuation tokens will not be included), and include_erased (if False and your representation might erase tokens, those tokens will be omitted from the output). See documentation on your backend to see if it supports further options.""" kwargs.update(representation=representation, universal=universal, include_punct=include_punct, include_erased=include_erased) return Corpus(self.convert_tree(ptb_tree, **kwargs) for ptb_tree in ptb_trees)
def function[convert_trees, parameter[self, ptb_trees, representation, universal, include_punct, include_erased]]: constant[Convert a list of Penn Treebank formatted strings (ptb_trees) into Stanford Dependencies. The dependencies are represented as a list of sentences (CoNLL.Corpus), where each sentence (CoNLL.Sentence) is itself a list of CoNLL.Token objects. Currently supported representations are 'basic', 'collapsed', 'CCprocessed', and 'collapsedTree' which behave the same as they in the CoreNLP command line tools. (note that in the online CoreNLP demo, 'collapsed' is called 'enhanced') Additional arguments: universal (if True, use universal dependencies if they're available), include_punct (if False, punctuation tokens will not be included), and include_erased (if False and your representation might erase tokens, those tokens will be omitted from the output). See documentation on your backend to see if it supports further options.] call[name[kwargs].update, parameter[]] return[call[name[Corpus], parameter[<ast.GeneratorExp object at 0x7da1b1a8c370>]]]
keyword[def] identifier[convert_trees] ( identifier[self] , identifier[ptb_trees] , identifier[representation] = literal[string] , identifier[universal] = keyword[True] , identifier[include_punct] = keyword[True] , identifier[include_erased] = keyword[False] ,** identifier[kwargs] ): literal[string] identifier[kwargs] . identifier[update] ( identifier[representation] = identifier[representation] , identifier[universal] = identifier[universal] , identifier[include_punct] = identifier[include_punct] , identifier[include_erased] = identifier[include_erased] ) keyword[return] identifier[Corpus] ( identifier[self] . identifier[convert_tree] ( identifier[ptb_tree] ,** identifier[kwargs] ) keyword[for] identifier[ptb_tree] keyword[in] identifier[ptb_trees] )
def convert_trees(self, ptb_trees, representation='basic', universal=True, include_punct=True, include_erased=False, **kwargs): """Convert a list of Penn Treebank formatted strings (ptb_trees) into Stanford Dependencies. The dependencies are represented as a list of sentences (CoNLL.Corpus), where each sentence (CoNLL.Sentence) is itself a list of CoNLL.Token objects. Currently supported representations are 'basic', 'collapsed', 'CCprocessed', and 'collapsedTree' which behave the same as they in the CoreNLP command line tools. (note that in the online CoreNLP demo, 'collapsed' is called 'enhanced') Additional arguments: universal (if True, use universal dependencies if they're available), include_punct (if False, punctuation tokens will not be included), and include_erased (if False and your representation might erase tokens, those tokens will be omitted from the output). See documentation on your backend to see if it supports further options.""" kwargs.update(representation=representation, universal=universal, include_punct=include_punct, include_erased=include_erased) return Corpus((self.convert_tree(ptb_tree, **kwargs) for ptb_tree in ptb_trees))
def _pca_weights( weights, pc, threshold=None, label_threshold=None, label_weights=None, optimize_label_iter=OPTIMIZE_LABEL_ITER_DEFAULT, **kwargs ): """ :param weights: :param pc: :param threshold: :param label_threshold: :param label_weights: :param kwargs: :return: """ fig = plt.figure(figsize=(8, 8)) ax = fig.add_subplot(1,1,1) ax.plot(weights.iloc[:, pc].values) ylim = np.max( np.abs( weights.values ) ) * 1.1 ax.set_ylim( -ylim, +ylim ) ax.set_xlim(0, weights.shape[0]) ax.set_aspect(1./ax.get_data_ratio()) wts = weights.iloc[:, pc] texts = [] if threshold: if label_threshold is None: label_threshold = threshold if label_weights: FILTER_UP = wts.values >= label_threshold FILTER_DOWN = wts.values <= -label_threshold FILTER = FILTER_UP | FILTER_DOWN wti = np.arange(0, weights.shape[0]) wti = wti[FILTER] idxs = get_index_list( wts.index.names, label_weights ) for x in wti: y = wts.iloc[x] t = ax.text(x, y, build_combined_label( wts.index.values[x], idxs), bbox=dict(boxstyle='round,pad=0.3', fc='#ffffff', ec='none', alpha=0.4)) texts.append(t) if texts and optimize_label_iter: adjust_text( texts, lim=optimize_label_iter, arrowprops=dict(arrowstyle='->', color='red') ) ax.axhline(threshold, 0, 1) ax.axhline(-threshold, 0, 1) ax.set_ylabel("Weights on Principal Component %d" % (pc+1), fontsize=16) fig.tight_layout() return ax
def function[_pca_weights, parameter[weights, pc, threshold, label_threshold, label_weights, optimize_label_iter]]: constant[ :param weights: :param pc: :param threshold: :param label_threshold: :param label_weights: :param kwargs: :return: ] variable[fig] assign[=] call[name[plt].figure, parameter[]] variable[ax] assign[=] call[name[fig].add_subplot, parameter[constant[1], constant[1], constant[1]]] call[name[ax].plot, parameter[call[name[weights].iloc][tuple[[<ast.Slice object at 0x7da20e9551e0>, <ast.Name object at 0x7da20e955cc0>]]].values]] variable[ylim] assign[=] binary_operation[call[name[np].max, parameter[call[name[np].abs, parameter[name[weights].values]]]] * constant[1.1]] call[name[ax].set_ylim, parameter[<ast.UnaryOp object at 0x7da20e954a30>, <ast.UnaryOp object at 0x7da20e9560e0>]] call[name[ax].set_xlim, parameter[constant[0], call[name[weights].shape][constant[0]]]] call[name[ax].set_aspect, parameter[binary_operation[constant[1.0] / call[name[ax].get_data_ratio, parameter[]]]]] variable[wts] assign[=] call[name[weights].iloc][tuple[[<ast.Slice object at 0x7da20e954220>, <ast.Name object at 0x7da20e9546d0>]]] variable[texts] assign[=] list[[]] if name[threshold] begin[:] if compare[name[label_threshold] is constant[None]] begin[:] variable[label_threshold] assign[=] name[threshold] if name[label_weights] begin[:] variable[FILTER_UP] assign[=] compare[name[wts].values greater_or_equal[>=] name[label_threshold]] variable[FILTER_DOWN] assign[=] compare[name[wts].values less_or_equal[<=] <ast.UnaryOp object at 0x7da20e956e00>] variable[FILTER] assign[=] binary_operation[name[FILTER_UP] <ast.BitOr object at 0x7da2590d6aa0> name[FILTER_DOWN]] variable[wti] assign[=] call[name[np].arange, parameter[constant[0], call[name[weights].shape][constant[0]]]] variable[wti] assign[=] call[name[wti]][name[FILTER]] variable[idxs] assign[=] call[name[get_index_list], parameter[name[wts].index.names, name[label_weights]]] for taget[name[x]] in starred[name[wti]] begin[:] variable[y] assign[=] call[name[wts].iloc][name[x]] variable[t] assign[=] call[name[ax].text, parameter[name[x], name[y], call[name[build_combined_label], parameter[call[name[wts].index.values][name[x]], name[idxs]]]]] call[name[texts].append, parameter[name[t]]] if <ast.BoolOp object at 0x7da20e955660> begin[:] call[name[adjust_text], parameter[name[texts]]] call[name[ax].axhline, parameter[name[threshold], constant[0], constant[1]]] call[name[ax].axhline, parameter[<ast.UnaryOp object at 0x7da20e955f90>, constant[0], constant[1]]] call[name[ax].set_ylabel, parameter[binary_operation[constant[Weights on Principal Component %d] <ast.Mod object at 0x7da2590d6920> binary_operation[name[pc] + constant[1]]]]] call[name[fig].tight_layout, parameter[]] return[name[ax]]
keyword[def] identifier[_pca_weights] ( identifier[weights] , identifier[pc] , identifier[threshold] = keyword[None] , identifier[label_threshold] = keyword[None] , identifier[label_weights] = keyword[None] , identifier[optimize_label_iter] = identifier[OPTIMIZE_LABEL_ITER_DEFAULT] , ** identifier[kwargs] ): literal[string] identifier[fig] = identifier[plt] . identifier[figure] ( identifier[figsize] =( literal[int] , literal[int] )) identifier[ax] = identifier[fig] . identifier[add_subplot] ( literal[int] , literal[int] , literal[int] ) identifier[ax] . identifier[plot] ( identifier[weights] . identifier[iloc] [:, identifier[pc] ]. identifier[values] ) identifier[ylim] = identifier[np] . identifier[max] ( identifier[np] . identifier[abs] ( identifier[weights] . identifier[values] ))* literal[int] identifier[ax] . identifier[set_ylim] (- identifier[ylim] ,+ identifier[ylim] ) identifier[ax] . identifier[set_xlim] ( literal[int] , identifier[weights] . identifier[shape] [ literal[int] ]) identifier[ax] . identifier[set_aspect] ( literal[int] / identifier[ax] . identifier[get_data_ratio] ()) identifier[wts] = identifier[weights] . identifier[iloc] [:, identifier[pc] ] identifier[texts] =[] keyword[if] identifier[threshold] : keyword[if] identifier[label_threshold] keyword[is] keyword[None] : identifier[label_threshold] = identifier[threshold] keyword[if] identifier[label_weights] : identifier[FILTER_UP] = identifier[wts] . identifier[values] >= identifier[label_threshold] identifier[FILTER_DOWN] = identifier[wts] . identifier[values] <=- identifier[label_threshold] identifier[FILTER] = identifier[FILTER_UP] | identifier[FILTER_DOWN] identifier[wti] = identifier[np] . identifier[arange] ( literal[int] , identifier[weights] . identifier[shape] [ literal[int] ]) identifier[wti] = identifier[wti] [ identifier[FILTER] ] identifier[idxs] = identifier[get_index_list] ( identifier[wts] . identifier[index] . identifier[names] , identifier[label_weights] ) keyword[for] identifier[x] keyword[in] identifier[wti] : identifier[y] = identifier[wts] . identifier[iloc] [ identifier[x] ] identifier[t] = identifier[ax] . identifier[text] ( identifier[x] , identifier[y] , identifier[build_combined_label] ( identifier[wts] . identifier[index] . identifier[values] [ identifier[x] ], identifier[idxs] ), identifier[bbox] = identifier[dict] ( identifier[boxstyle] = literal[string] , identifier[fc] = literal[string] , identifier[ec] = literal[string] , identifier[alpha] = literal[int] )) identifier[texts] . identifier[append] ( identifier[t] ) keyword[if] identifier[texts] keyword[and] identifier[optimize_label_iter] : identifier[adjust_text] ( identifier[texts] , identifier[lim] = identifier[optimize_label_iter] , identifier[arrowprops] = identifier[dict] ( identifier[arrowstyle] = literal[string] , identifier[color] = literal[string] ) ) identifier[ax] . identifier[axhline] ( identifier[threshold] , literal[int] , literal[int] ) identifier[ax] . identifier[axhline] (- identifier[threshold] , literal[int] , literal[int] ) identifier[ax] . identifier[set_ylabel] ( literal[string] %( identifier[pc] + literal[int] ), identifier[fontsize] = literal[int] ) identifier[fig] . identifier[tight_layout] () keyword[return] identifier[ax]
def _pca_weights(weights, pc, threshold=None, label_threshold=None, label_weights=None, optimize_label_iter=OPTIMIZE_LABEL_ITER_DEFAULT, **kwargs): """ :param weights: :param pc: :param threshold: :param label_threshold: :param label_weights: :param kwargs: :return: """ fig = plt.figure(figsize=(8, 8)) ax = fig.add_subplot(1, 1, 1) ax.plot(weights.iloc[:, pc].values) ylim = np.max(np.abs(weights.values)) * 1.1 ax.set_ylim(-ylim, +ylim) ax.set_xlim(0, weights.shape[0]) ax.set_aspect(1.0 / ax.get_data_ratio()) wts = weights.iloc[:, pc] texts = [] if threshold: if label_threshold is None: label_threshold = threshold # depends on [control=['if'], data=['label_threshold']] if label_weights: FILTER_UP = wts.values >= label_threshold FILTER_DOWN = wts.values <= -label_threshold FILTER = FILTER_UP | FILTER_DOWN wti = np.arange(0, weights.shape[0]) wti = wti[FILTER] idxs = get_index_list(wts.index.names, label_weights) for x in wti: y = wts.iloc[x] t = ax.text(x, y, build_combined_label(wts.index.values[x], idxs), bbox=dict(boxstyle='round,pad=0.3', fc='#ffffff', ec='none', alpha=0.4)) texts.append(t) # depends on [control=['for'], data=['x']] # depends on [control=['if'], data=[]] if texts and optimize_label_iter: adjust_text(texts, lim=optimize_label_iter, arrowprops=dict(arrowstyle='->', color='red')) # depends on [control=['if'], data=[]] ax.axhline(threshold, 0, 1) ax.axhline(-threshold, 0, 1) # depends on [control=['if'], data=[]] ax.set_ylabel('Weights on Principal Component %d' % (pc + 1), fontsize=16) fig.tight_layout() return ax
def mkdummy(name, **attrs): """Make a placeholder object that uses its own name for its repr""" return type( name, (), dict(__repr__=(lambda self: "<%s>" % name), **attrs) )()
def function[mkdummy, parameter[name]]: constant[Make a placeholder object that uses its own name for its repr] return[call[call[name[type], parameter[name[name], tuple[[]], call[name[dict], parameter[]]]], parameter[]]]
keyword[def] identifier[mkdummy] ( identifier[name] ,** identifier[attrs] ): literal[string] keyword[return] identifier[type] ( identifier[name] ,(), identifier[dict] ( identifier[__repr__] =( keyword[lambda] identifier[self] : literal[string] % identifier[name] ),** identifier[attrs] ) )()
def mkdummy(name, **attrs): """Make a placeholder object that uses its own name for its repr""" return type(name, (), dict(__repr__=lambda self: '<%s>' % name, **attrs))()
def paga_compare( adata, basis=None, edges=False, color=None, alpha=None, groups=None, components=None, projection='2d', legend_loc='on data', legend_fontsize=None, legend_fontweight='bold', color_map=None, palette=None, frameon=False, size=None, title=None, right_margin=None, left_margin=0.05, show=None, save=None, title_graph=None, groups_graph=None, **paga_graph_params): """Scatter and PAGA graph side-by-side. Consists in a scatter plot and the abstracted graph. See :func:`~scanpy.api.pl.paga` for all related parameters. See :func:`~scanpy.api.pl.paga_path` for visualizing gene changes along paths through the abstracted graph. Additional parameters are as follows. Parameters ---------- adata : :class:`~anndata.AnnData` Annotated data matrix. kwds_scatter : `dict` Keywords for :func:`~scanpy.api.pl.scatter`. kwds_paga : `dict` Keywords for :func:`~scanpy.api.pl.paga`. Returns ------- A list of `matplotlib.axes.Axes` if `show` is `False`. """ axs, _, _, _ = utils.setup_axes(panels=[0, 1], right_margin=right_margin) if color is None: color = adata.uns['paga']['groups'] suptitle = None # common title for entire figure if title_graph is None: suptitle = color if title is None else title title, title_graph = '', '' if basis is None: if 'X_draw_graph_fa' in adata.obsm.keys(): basis = 'draw_graph_fa' elif 'X_umap' in adata.obsm.keys(): basis = 'umap' elif 'X_tsne' in adata.obsm.keys(): basis = 'tsne' elif 'X_draw_graph_fr' in adata.obsm.keys(): basis = 'draw_graph_fr' else: basis = 'umap' from .scatterplots import plot_scatter plot_scatter( adata, ax=axs[0], basis=basis, color=color, edges=edges, alpha=alpha, groups=groups, components=components, legend_loc=legend_loc, legend_fontsize=legend_fontsize, legend_fontweight=legend_fontweight, color_map=color_map, palette=palette, frameon=frameon, size=size, title=title, show=False, save=False) if 'pos' not in paga_graph_params: if color == adata.uns['paga']['groups']: paga_graph_params['pos'] = utils._tmp_cluster_pos else: paga_graph_params['pos'] = adata.uns['paga']['pos'] xlim, ylim = axs[0].get_xlim(), axs[0].get_ylim() axs[1].set_xlim(xlim) axs[1].set_ylim(ylim) if 'labels' in paga_graph_params: labels = paga_graph_params.pop('labels') else: labels = groups_graph paga( adata, ax=axs[1], show=False, save=False, title=title_graph, labels=labels, colors=color, frameon=frameon, **paga_graph_params) if suptitle is not None: pl.suptitle(suptitle) utils.savefig_or_show('paga_compare', show=show, save=save) if show == False: return axs
def function[paga_compare, parameter[adata, basis, edges, color, alpha, groups, components, projection, legend_loc, legend_fontsize, legend_fontweight, color_map, palette, frameon, size, title, right_margin, left_margin, show, save, title_graph, groups_graph]]: constant[Scatter and PAGA graph side-by-side. Consists in a scatter plot and the abstracted graph. See :func:`~scanpy.api.pl.paga` for all related parameters. See :func:`~scanpy.api.pl.paga_path` for visualizing gene changes along paths through the abstracted graph. Additional parameters are as follows. Parameters ---------- adata : :class:`~anndata.AnnData` Annotated data matrix. kwds_scatter : `dict` Keywords for :func:`~scanpy.api.pl.scatter`. kwds_paga : `dict` Keywords for :func:`~scanpy.api.pl.paga`. Returns ------- A list of `matplotlib.axes.Axes` if `show` is `False`. ] <ast.Tuple object at 0x7da18f09c0a0> assign[=] call[name[utils].setup_axes, parameter[]] if compare[name[color] is constant[None]] begin[:] variable[color] assign[=] call[call[name[adata].uns][constant[paga]]][constant[groups]] variable[suptitle] assign[=] constant[None] if compare[name[title_graph] is constant[None]] begin[:] variable[suptitle] assign[=] <ast.IfExp object at 0x7da18f09edd0> <ast.Tuple object at 0x7da18f09f7c0> assign[=] tuple[[<ast.Constant object at 0x7da18f09ce80>, <ast.Constant object at 0x7da18f09e260>]] if compare[name[basis] is constant[None]] begin[:] if compare[constant[X_draw_graph_fa] in call[name[adata].obsm.keys, parameter[]]] begin[:] variable[basis] assign[=] constant[draw_graph_fa] from relative_module[scatterplots] import module[plot_scatter] call[name[plot_scatter], parameter[name[adata]]] if compare[constant[pos] <ast.NotIn object at 0x7da2590d7190> name[paga_graph_params]] begin[:] if compare[name[color] equal[==] call[call[name[adata].uns][constant[paga]]][constant[groups]]] begin[:] call[name[paga_graph_params]][constant[pos]] assign[=] name[utils]._tmp_cluster_pos <ast.Tuple object at 0x7da20c794730> assign[=] tuple[[<ast.Call object at 0x7da20c794460>, <ast.Call object at 0x7da20c794250>]] call[call[name[axs]][constant[1]].set_xlim, parameter[name[xlim]]] call[call[name[axs]][constant[1]].set_ylim, parameter[name[ylim]]] if compare[constant[labels] in name[paga_graph_params]] begin[:] variable[labels] assign[=] call[name[paga_graph_params].pop, parameter[constant[labels]]] call[name[paga], parameter[name[adata]]] if compare[name[suptitle] is_not constant[None]] begin[:] call[name[pl].suptitle, parameter[name[suptitle]]] call[name[utils].savefig_or_show, parameter[constant[paga_compare]]] if compare[name[show] equal[==] constant[False]] begin[:] return[name[axs]]
keyword[def] identifier[paga_compare] ( identifier[adata] , identifier[basis] = keyword[None] , identifier[edges] = keyword[False] , identifier[color] = keyword[None] , identifier[alpha] = keyword[None] , identifier[groups] = keyword[None] , identifier[components] = keyword[None] , identifier[projection] = literal[string] , identifier[legend_loc] = literal[string] , identifier[legend_fontsize] = keyword[None] , identifier[legend_fontweight] = literal[string] , identifier[color_map] = keyword[None] , identifier[palette] = keyword[None] , identifier[frameon] = keyword[False] , identifier[size] = keyword[None] , identifier[title] = keyword[None] , identifier[right_margin] = keyword[None] , identifier[left_margin] = literal[int] , identifier[show] = keyword[None] , identifier[save] = keyword[None] , identifier[title_graph] = keyword[None] , identifier[groups_graph] = keyword[None] , ** identifier[paga_graph_params] ): literal[string] identifier[axs] , identifier[_] , identifier[_] , identifier[_] = identifier[utils] . identifier[setup_axes] ( identifier[panels] =[ literal[int] , literal[int] ], identifier[right_margin] = identifier[right_margin] ) keyword[if] identifier[color] keyword[is] keyword[None] : identifier[color] = identifier[adata] . identifier[uns] [ literal[string] ][ literal[string] ] identifier[suptitle] = keyword[None] keyword[if] identifier[title_graph] keyword[is] keyword[None] : identifier[suptitle] = identifier[color] keyword[if] identifier[title] keyword[is] keyword[None] keyword[else] identifier[title] identifier[title] , identifier[title_graph] = literal[string] , literal[string] keyword[if] identifier[basis] keyword[is] keyword[None] : keyword[if] literal[string] keyword[in] identifier[adata] . identifier[obsm] . identifier[keys] (): identifier[basis] = literal[string] keyword[elif] literal[string] keyword[in] identifier[adata] . identifier[obsm] . identifier[keys] (): identifier[basis] = literal[string] keyword[elif] literal[string] keyword[in] identifier[adata] . identifier[obsm] . identifier[keys] (): identifier[basis] = literal[string] keyword[elif] literal[string] keyword[in] identifier[adata] . identifier[obsm] . identifier[keys] (): identifier[basis] = literal[string] keyword[else] : identifier[basis] = literal[string] keyword[from] . identifier[scatterplots] keyword[import] identifier[plot_scatter] identifier[plot_scatter] ( identifier[adata] , identifier[ax] = identifier[axs] [ literal[int] ], identifier[basis] = identifier[basis] , identifier[color] = identifier[color] , identifier[edges] = identifier[edges] , identifier[alpha] = identifier[alpha] , identifier[groups] = identifier[groups] , identifier[components] = identifier[components] , identifier[legend_loc] = identifier[legend_loc] , identifier[legend_fontsize] = identifier[legend_fontsize] , identifier[legend_fontweight] = identifier[legend_fontweight] , identifier[color_map] = identifier[color_map] , identifier[palette] = identifier[palette] , identifier[frameon] = identifier[frameon] , identifier[size] = identifier[size] , identifier[title] = identifier[title] , identifier[show] = keyword[False] , identifier[save] = keyword[False] ) keyword[if] literal[string] keyword[not] keyword[in] identifier[paga_graph_params] : keyword[if] identifier[color] == identifier[adata] . identifier[uns] [ literal[string] ][ literal[string] ]: identifier[paga_graph_params] [ literal[string] ]= identifier[utils] . identifier[_tmp_cluster_pos] keyword[else] : identifier[paga_graph_params] [ literal[string] ]= identifier[adata] . identifier[uns] [ literal[string] ][ literal[string] ] identifier[xlim] , identifier[ylim] = identifier[axs] [ literal[int] ]. identifier[get_xlim] (), identifier[axs] [ literal[int] ]. identifier[get_ylim] () identifier[axs] [ literal[int] ]. identifier[set_xlim] ( identifier[xlim] ) identifier[axs] [ literal[int] ]. identifier[set_ylim] ( identifier[ylim] ) keyword[if] literal[string] keyword[in] identifier[paga_graph_params] : identifier[labels] = identifier[paga_graph_params] . identifier[pop] ( literal[string] ) keyword[else] : identifier[labels] = identifier[groups_graph] identifier[paga] ( identifier[adata] , identifier[ax] = identifier[axs] [ literal[int] ], identifier[show] = keyword[False] , identifier[save] = keyword[False] , identifier[title] = identifier[title_graph] , identifier[labels] = identifier[labels] , identifier[colors] = identifier[color] , identifier[frameon] = identifier[frameon] , ** identifier[paga_graph_params] ) keyword[if] identifier[suptitle] keyword[is] keyword[not] keyword[None] : identifier[pl] . identifier[suptitle] ( identifier[suptitle] ) identifier[utils] . identifier[savefig_or_show] ( literal[string] , identifier[show] = identifier[show] , identifier[save] = identifier[save] ) keyword[if] identifier[show] == keyword[False] : keyword[return] identifier[axs]
def paga_compare(adata, basis=None, edges=False, color=None, alpha=None, groups=None, components=None, projection='2d', legend_loc='on data', legend_fontsize=None, legend_fontweight='bold', color_map=None, palette=None, frameon=False, size=None, title=None, right_margin=None, left_margin=0.05, show=None, save=None, title_graph=None, groups_graph=None, **paga_graph_params): """Scatter and PAGA graph side-by-side. Consists in a scatter plot and the abstracted graph. See :func:`~scanpy.api.pl.paga` for all related parameters. See :func:`~scanpy.api.pl.paga_path` for visualizing gene changes along paths through the abstracted graph. Additional parameters are as follows. Parameters ---------- adata : :class:`~anndata.AnnData` Annotated data matrix. kwds_scatter : `dict` Keywords for :func:`~scanpy.api.pl.scatter`. kwds_paga : `dict` Keywords for :func:`~scanpy.api.pl.paga`. Returns ------- A list of `matplotlib.axes.Axes` if `show` is `False`. """ (axs, _, _, _) = utils.setup_axes(panels=[0, 1], right_margin=right_margin) if color is None: color = adata.uns['paga']['groups'] # depends on [control=['if'], data=['color']] suptitle = None # common title for entire figure if title_graph is None: suptitle = color if title is None else title (title, title_graph) = ('', '') # depends on [control=['if'], data=['title_graph']] if basis is None: if 'X_draw_graph_fa' in adata.obsm.keys(): basis = 'draw_graph_fa' # depends on [control=['if'], data=[]] elif 'X_umap' in adata.obsm.keys(): basis = 'umap' # depends on [control=['if'], data=[]] elif 'X_tsne' in adata.obsm.keys(): basis = 'tsne' # depends on [control=['if'], data=[]] elif 'X_draw_graph_fr' in adata.obsm.keys(): basis = 'draw_graph_fr' # depends on [control=['if'], data=[]] else: basis = 'umap' # depends on [control=['if'], data=['basis']] from .scatterplots import plot_scatter plot_scatter(adata, ax=axs[0], basis=basis, color=color, edges=edges, alpha=alpha, groups=groups, components=components, legend_loc=legend_loc, legend_fontsize=legend_fontsize, legend_fontweight=legend_fontweight, color_map=color_map, palette=palette, frameon=frameon, size=size, title=title, show=False, save=False) if 'pos' not in paga_graph_params: if color == adata.uns['paga']['groups']: paga_graph_params['pos'] = utils._tmp_cluster_pos # depends on [control=['if'], data=[]] else: paga_graph_params['pos'] = adata.uns['paga']['pos'] # depends on [control=['if'], data=['paga_graph_params']] (xlim, ylim) = (axs[0].get_xlim(), axs[0].get_ylim()) axs[1].set_xlim(xlim) axs[1].set_ylim(ylim) if 'labels' in paga_graph_params: labels = paga_graph_params.pop('labels') # depends on [control=['if'], data=['paga_graph_params']] else: labels = groups_graph paga(adata, ax=axs[1], show=False, save=False, title=title_graph, labels=labels, colors=color, frameon=frameon, **paga_graph_params) if suptitle is not None: pl.suptitle(suptitle) # depends on [control=['if'], data=['suptitle']] utils.savefig_or_show('paga_compare', show=show, save=save) if show == False: return axs # depends on [control=['if'], data=[]]
def _get_buckets_cache_filename(): ''' Return the filename of the cache for bucket contents. Create the path if it does not exist. ''' cache_dir = _get_cache_dir() if not os.path.exists(cache_dir): os.makedirs(cache_dir) return os.path.join(cache_dir, 'buckets_files.cache')
def function[_get_buckets_cache_filename, parameter[]]: constant[ Return the filename of the cache for bucket contents. Create the path if it does not exist. ] variable[cache_dir] assign[=] call[name[_get_cache_dir], parameter[]] if <ast.UnaryOp object at 0x7da1b2098a00> begin[:] call[name[os].makedirs, parameter[name[cache_dir]]] return[call[name[os].path.join, parameter[name[cache_dir], constant[buckets_files.cache]]]]
keyword[def] identifier[_get_buckets_cache_filename] (): literal[string] identifier[cache_dir] = identifier[_get_cache_dir] () keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[cache_dir] ): identifier[os] . identifier[makedirs] ( identifier[cache_dir] ) keyword[return] identifier[os] . identifier[path] . identifier[join] ( identifier[cache_dir] , literal[string] )
def _get_buckets_cache_filename(): """ Return the filename of the cache for bucket contents. Create the path if it does not exist. """ cache_dir = _get_cache_dir() if not os.path.exists(cache_dir): os.makedirs(cache_dir) # depends on [control=['if'], data=[]] return os.path.join(cache_dir, 'buckets_files.cache')
def zrange(self, name, start, end, desc=False, withscores=False, score_cast_func=float): """ Returns all the elements including between ``start`` (non included) and ``stop`` (included). :param name: str the name of the redis key :param start: :param end: :param desc: :param withscores: :param score_cast_func: :return: """ with self.pipe as pipe: f = Future() res = pipe.zrange( self.redis_key(name), start, end, desc=desc, withscores=withscores, score_cast_func=score_cast_func) def cb(): if withscores: f.set([(self.valueparse.decode(v), s) for v, s in res.result]) else: f.set([self.valueparse.decode(v) for v in res.result]) pipe.on_execute(cb) return f
def function[zrange, parameter[self, name, start, end, desc, withscores, score_cast_func]]: constant[ Returns all the elements including between ``start`` (non included) and ``stop`` (included). :param name: str the name of the redis key :param start: :param end: :param desc: :param withscores: :param score_cast_func: :return: ] with name[self].pipe begin[:] variable[f] assign[=] call[name[Future], parameter[]] variable[res] assign[=] call[name[pipe].zrange, parameter[call[name[self].redis_key, parameter[name[name]]], name[start], name[end]]] def function[cb, parameter[]]: if name[withscores] begin[:] call[name[f].set, parameter[<ast.ListComp object at 0x7da1b0b804f0>]] call[name[pipe].on_execute, parameter[name[cb]]] return[name[f]]
keyword[def] identifier[zrange] ( identifier[self] , identifier[name] , identifier[start] , identifier[end] , identifier[desc] = keyword[False] , identifier[withscores] = keyword[False] , identifier[score_cast_func] = identifier[float] ): literal[string] keyword[with] identifier[self] . identifier[pipe] keyword[as] identifier[pipe] : identifier[f] = identifier[Future] () identifier[res] = identifier[pipe] . identifier[zrange] ( identifier[self] . identifier[redis_key] ( identifier[name] ), identifier[start] , identifier[end] , identifier[desc] = identifier[desc] , identifier[withscores] = identifier[withscores] , identifier[score_cast_func] = identifier[score_cast_func] ) keyword[def] identifier[cb] (): keyword[if] identifier[withscores] : identifier[f] . identifier[set] ([( identifier[self] . identifier[valueparse] . identifier[decode] ( identifier[v] ), identifier[s] ) keyword[for] identifier[v] , identifier[s] keyword[in] identifier[res] . identifier[result] ]) keyword[else] : identifier[f] . identifier[set] ([ identifier[self] . identifier[valueparse] . identifier[decode] ( identifier[v] ) keyword[for] identifier[v] keyword[in] identifier[res] . identifier[result] ]) identifier[pipe] . identifier[on_execute] ( identifier[cb] ) keyword[return] identifier[f]
def zrange(self, name, start, end, desc=False, withscores=False, score_cast_func=float): """ Returns all the elements including between ``start`` (non included) and ``stop`` (included). :param name: str the name of the redis key :param start: :param end: :param desc: :param withscores: :param score_cast_func: :return: """ with self.pipe as pipe: f = Future() res = pipe.zrange(self.redis_key(name), start, end, desc=desc, withscores=withscores, score_cast_func=score_cast_func) def cb(): if withscores: f.set([(self.valueparse.decode(v), s) for (v, s) in res.result]) # depends on [control=['if'], data=[]] else: f.set([self.valueparse.decode(v) for v in res.result]) pipe.on_execute(cb) return f # depends on [control=['with'], data=['pipe']]
def from_timeseries(cls, timeSeries): """Create a new Matrix instance from a TimeSeries or MultiDimensionalTimeSeries :param TimeSeries timeSeries: The TimeSeries, which should be used to create a new Matrix. :return: A Matrix with the values of the timeSeries. Each row of the Matrix represents one entry of the timeSeries. The time of an entry is ignored in the matrix. :rtype: Matrix :raise: Raises an :py:exc:`ValueError`, if the timeSeries is empty. """ width = 1 if isinstance(timeSeries, MultiDimensionalTimeSeries): width = timeSeries.dimension_count() matrixData = [[] for dummy in xrange(width)] for entry in timeSeries: for col in xrange(1, len(entry)): matrixData[col - 1].append(entry[col]) if not matrixData[0]: raise ValueError("Cannot create Matrix from empty Timeseries") mtrx = Matrix.from_two_dim_array(len(matrixData), len(matrixData[0]), matrixData) # mtrx.initialize(matrixData, rowBased=False) return mtrx
def function[from_timeseries, parameter[cls, timeSeries]]: constant[Create a new Matrix instance from a TimeSeries or MultiDimensionalTimeSeries :param TimeSeries timeSeries: The TimeSeries, which should be used to create a new Matrix. :return: A Matrix with the values of the timeSeries. Each row of the Matrix represents one entry of the timeSeries. The time of an entry is ignored in the matrix. :rtype: Matrix :raise: Raises an :py:exc:`ValueError`, if the timeSeries is empty. ] variable[width] assign[=] constant[1] if call[name[isinstance], parameter[name[timeSeries], name[MultiDimensionalTimeSeries]]] begin[:] variable[width] assign[=] call[name[timeSeries].dimension_count, parameter[]] variable[matrixData] assign[=] <ast.ListComp object at 0x7da18ede57e0> for taget[name[entry]] in starred[name[timeSeries]] begin[:] for taget[name[col]] in starred[call[name[xrange], parameter[constant[1], call[name[len], parameter[name[entry]]]]]] begin[:] call[call[name[matrixData]][binary_operation[name[col] - constant[1]]].append, parameter[call[name[entry]][name[col]]]] if <ast.UnaryOp object at 0x7da18ede5ab0> begin[:] <ast.Raise object at 0x7da18ede61d0> variable[mtrx] assign[=] call[name[Matrix].from_two_dim_array, parameter[call[name[len], parameter[name[matrixData]]], call[name[len], parameter[call[name[matrixData]][constant[0]]]], name[matrixData]]] return[name[mtrx]]
keyword[def] identifier[from_timeseries] ( identifier[cls] , identifier[timeSeries] ): literal[string] identifier[width] = literal[int] keyword[if] identifier[isinstance] ( identifier[timeSeries] , identifier[MultiDimensionalTimeSeries] ): identifier[width] = identifier[timeSeries] . identifier[dimension_count] () identifier[matrixData] =[[] keyword[for] identifier[dummy] keyword[in] identifier[xrange] ( identifier[width] )] keyword[for] identifier[entry] keyword[in] identifier[timeSeries] : keyword[for] identifier[col] keyword[in] identifier[xrange] ( literal[int] , identifier[len] ( identifier[entry] )): identifier[matrixData] [ identifier[col] - literal[int] ]. identifier[append] ( identifier[entry] [ identifier[col] ]) keyword[if] keyword[not] identifier[matrixData] [ literal[int] ]: keyword[raise] identifier[ValueError] ( literal[string] ) identifier[mtrx] = identifier[Matrix] . identifier[from_two_dim_array] ( identifier[len] ( identifier[matrixData] ), identifier[len] ( identifier[matrixData] [ literal[int] ]), identifier[matrixData] ) keyword[return] identifier[mtrx]
def from_timeseries(cls, timeSeries): """Create a new Matrix instance from a TimeSeries or MultiDimensionalTimeSeries :param TimeSeries timeSeries: The TimeSeries, which should be used to create a new Matrix. :return: A Matrix with the values of the timeSeries. Each row of the Matrix represents one entry of the timeSeries. The time of an entry is ignored in the matrix. :rtype: Matrix :raise: Raises an :py:exc:`ValueError`, if the timeSeries is empty. """ width = 1 if isinstance(timeSeries, MultiDimensionalTimeSeries): width = timeSeries.dimension_count() # depends on [control=['if'], data=[]] matrixData = [[] for dummy in xrange(width)] for entry in timeSeries: for col in xrange(1, len(entry)): matrixData[col - 1].append(entry[col]) # depends on [control=['for'], data=['col']] # depends on [control=['for'], data=['entry']] if not matrixData[0]: raise ValueError('Cannot create Matrix from empty Timeseries') # depends on [control=['if'], data=[]] mtrx = Matrix.from_two_dim_array(len(matrixData), len(matrixData[0]), matrixData) # mtrx.initialize(matrixData, rowBased=False) return mtrx
def cmd_http_options(server, verbose): """Retrieve the available HTTP methods of a web server. Example: \b $ habu.http.options -v http://google.com { "allowed": "GET, HEAD" } """ if verbose: logging.basicConfig(level=logging.INFO, format='%(message)s') if verbose: print("[-] Retrieving the HTTP headers of the server...") options = get_options(server) if type(options) is dict: print(json.dumps(options, indent=4)) if verbose: print("[+] HTTP options from {} retrieved".format(server)) else: print("[X] {}".format(options), file=sys.stderr) return True
def function[cmd_http_options, parameter[server, verbose]]: constant[Retrieve the available HTTP methods of a web server. Example:  $ habu.http.options -v http://google.com { "allowed": "GET, HEAD" } ] if name[verbose] begin[:] call[name[logging].basicConfig, parameter[]] if name[verbose] begin[:] call[name[print], parameter[constant[[-] Retrieving the HTTP headers of the server...]]] variable[options] assign[=] call[name[get_options], parameter[name[server]]] if compare[call[name[type], parameter[name[options]]] is name[dict]] begin[:] call[name[print], parameter[call[name[json].dumps, parameter[name[options]]]]] if name[verbose] begin[:] call[name[print], parameter[call[constant[[+] HTTP options from {} retrieved].format, parameter[name[server]]]]] return[constant[True]]
keyword[def] identifier[cmd_http_options] ( identifier[server] , identifier[verbose] ): literal[string] keyword[if] identifier[verbose] : identifier[logging] . identifier[basicConfig] ( identifier[level] = identifier[logging] . identifier[INFO] , identifier[format] = literal[string] ) keyword[if] identifier[verbose] : identifier[print] ( literal[string] ) identifier[options] = identifier[get_options] ( identifier[server] ) keyword[if] identifier[type] ( identifier[options] ) keyword[is] identifier[dict] : identifier[print] ( identifier[json] . identifier[dumps] ( identifier[options] , identifier[indent] = literal[int] )) keyword[if] identifier[verbose] : identifier[print] ( literal[string] . identifier[format] ( identifier[server] )) keyword[else] : identifier[print] ( literal[string] . identifier[format] ( identifier[options] ), identifier[file] = identifier[sys] . identifier[stderr] ) keyword[return] keyword[True]
def cmd_http_options(server, verbose): """Retrieve the available HTTP methods of a web server. Example: \x08 $ habu.http.options -v http://google.com { "allowed": "GET, HEAD" } """ if verbose: logging.basicConfig(level=logging.INFO, format='%(message)s') # depends on [control=['if'], data=[]] if verbose: print('[-] Retrieving the HTTP headers of the server...') # depends on [control=['if'], data=[]] options = get_options(server) if type(options) is dict: print(json.dumps(options, indent=4)) if verbose: print('[+] HTTP options from {} retrieved'.format(server)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: print('[X] {}'.format(options), file=sys.stderr) return True
def get_signalcheck(self, sar, **params): """get_signalcheck - perform a signal check. Parameters ---------- sar : dict signal-api-request specified as a dictionary of parameters. All of these parameters are optional. For details check https://api.postcode.nl/documentation/signal-api-example. returns : a response dictionary """ params = sar endpoint = 'rest/signal/check' # The 'sar'-request dictionary should be sent as valid JSON data, so # we need to convert it to JSON # when we construct the request in API.request retValue = self._API__request(endpoint, 'POST', params=params, convJSON=True) return retValue
def function[get_signalcheck, parameter[self, sar]]: constant[get_signalcheck - perform a signal check. Parameters ---------- sar : dict signal-api-request specified as a dictionary of parameters. All of these parameters are optional. For details check https://api.postcode.nl/documentation/signal-api-example. returns : a response dictionary ] variable[params] assign[=] name[sar] variable[endpoint] assign[=] constant[rest/signal/check] variable[retValue] assign[=] call[name[self]._API__request, parameter[name[endpoint], constant[POST]]] return[name[retValue]]
keyword[def] identifier[get_signalcheck] ( identifier[self] , identifier[sar] ,** identifier[params] ): literal[string] identifier[params] = identifier[sar] identifier[endpoint] = literal[string] identifier[retValue] = identifier[self] . identifier[_API__request] ( identifier[endpoint] , literal[string] , identifier[params] = identifier[params] , identifier[convJSON] = keyword[True] ) keyword[return] identifier[retValue]
def get_signalcheck(self, sar, **params): """get_signalcheck - perform a signal check. Parameters ---------- sar : dict signal-api-request specified as a dictionary of parameters. All of these parameters are optional. For details check https://api.postcode.nl/documentation/signal-api-example. returns : a response dictionary """ params = sar endpoint = 'rest/signal/check' # The 'sar'-request dictionary should be sent as valid JSON data, so # we need to convert it to JSON # when we construct the request in API.request retValue = self._API__request(endpoint, 'POST', params=params, convJSON=True) return retValue
def get_scigraph_nodes(id_list)-> Iterator[Dict]: """ Queries scigraph neighbors to get a list of nodes back We use the scigraph neighbors function because ids can be sent in batch which is faster than iteratively querying solr search or the scigraph graph/id function :return: json decoded result from scigraph_ontology._neighbors_graph :raises ValueError: If id is not in scigraph """ scigraph = OntologyFactory().create('scigraph:data') chunks = [id_list[i:i + 400] for i in range(0, len(list(id_list)), 400)] for chunk in chunks: params = { 'id': chunk, 'depth': 0 } try: result_graph = scigraph._neighbors_graph(**params) for node in result_graph['nodes']: yield node except JSONDecodeError as exception: # Assume json decode is due to an incorrect class ID # Should we handle this? raise ValueError(exception.doc)
def function[get_scigraph_nodes, parameter[id_list]]: constant[ Queries scigraph neighbors to get a list of nodes back We use the scigraph neighbors function because ids can be sent in batch which is faster than iteratively querying solr search or the scigraph graph/id function :return: json decoded result from scigraph_ontology._neighbors_graph :raises ValueError: If id is not in scigraph ] variable[scigraph] assign[=] call[call[name[OntologyFactory], parameter[]].create, parameter[constant[scigraph:data]]] variable[chunks] assign[=] <ast.ListComp object at 0x7da1b0747190> for taget[name[chunk]] in starred[name[chunks]] begin[:] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b083db40>, <ast.Constant object at 0x7da1b083ce20>], [<ast.Name object at 0x7da1b083d540>, <ast.Constant object at 0x7da1b083d8d0>]] <ast.Try object at 0x7da1b083cbe0>
keyword[def] identifier[get_scigraph_nodes] ( identifier[id_list] )-> identifier[Iterator] [ identifier[Dict] ]: literal[string] identifier[scigraph] = identifier[OntologyFactory] (). identifier[create] ( literal[string] ) identifier[chunks] =[ identifier[id_list] [ identifier[i] : identifier[i] + literal[int] ] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[list] ( identifier[id_list] )), literal[int] )] keyword[for] identifier[chunk] keyword[in] identifier[chunks] : identifier[params] ={ literal[string] : identifier[chunk] , literal[string] : literal[int] } keyword[try] : identifier[result_graph] = identifier[scigraph] . identifier[_neighbors_graph] (** identifier[params] ) keyword[for] identifier[node] keyword[in] identifier[result_graph] [ literal[string] ]: keyword[yield] identifier[node] keyword[except] identifier[JSONDecodeError] keyword[as] identifier[exception] : keyword[raise] identifier[ValueError] ( identifier[exception] . identifier[doc] )
def get_scigraph_nodes(id_list) -> Iterator[Dict]: """ Queries scigraph neighbors to get a list of nodes back We use the scigraph neighbors function because ids can be sent in batch which is faster than iteratively querying solr search or the scigraph graph/id function :return: json decoded result from scigraph_ontology._neighbors_graph :raises ValueError: If id is not in scigraph """ scigraph = OntologyFactory().create('scigraph:data') chunks = [id_list[i:i + 400] for i in range(0, len(list(id_list)), 400)] for chunk in chunks: params = {'id': chunk, 'depth': 0} try: result_graph = scigraph._neighbors_graph(**params) for node in result_graph['nodes']: yield node # depends on [control=['for'], data=['node']] # depends on [control=['try'], data=[]] except JSONDecodeError as exception: # Assume json decode is due to an incorrect class ID # Should we handle this? raise ValueError(exception.doc) # depends on [control=['except'], data=['exception']] # depends on [control=['for'], data=['chunk']]
def sanitize_for_archive(url, headers, payload): """Sanitize payload of a HTTP request by removing the token information before storing/retrieving archived items :param: url: HTTP url request :param: headers: HTTP headers request :param: payload: HTTP payload request :returns url, headers and the sanitized payload """ if headers and 'PRIVATE-TOKEN' in headers: headers.pop('PRIVATE-TOKEN', None) return url, headers, payload
def function[sanitize_for_archive, parameter[url, headers, payload]]: constant[Sanitize payload of a HTTP request by removing the token information before storing/retrieving archived items :param: url: HTTP url request :param: headers: HTTP headers request :param: payload: HTTP payload request :returns url, headers and the sanitized payload ] if <ast.BoolOp object at 0x7da1b0284100> begin[:] call[name[headers].pop, parameter[constant[PRIVATE-TOKEN], constant[None]]] return[tuple[[<ast.Name object at 0x7da1b02840a0>, <ast.Name object at 0x7da1b0286d40>, <ast.Name object at 0x7da1b0285810>]]]
keyword[def] identifier[sanitize_for_archive] ( identifier[url] , identifier[headers] , identifier[payload] ): literal[string] keyword[if] identifier[headers] keyword[and] literal[string] keyword[in] identifier[headers] : identifier[headers] . identifier[pop] ( literal[string] , keyword[None] ) keyword[return] identifier[url] , identifier[headers] , identifier[payload]
def sanitize_for_archive(url, headers, payload): """Sanitize payload of a HTTP request by removing the token information before storing/retrieving archived items :param: url: HTTP url request :param: headers: HTTP headers request :param: payload: HTTP payload request :returns url, headers and the sanitized payload """ if headers and 'PRIVATE-TOKEN' in headers: headers.pop('PRIVATE-TOKEN', None) # depends on [control=['if'], data=[]] return (url, headers, payload)
def createKeyboardTab(self): ''' KEYBOARD ''' _keyboardList = [ 'KEYCODE_1', 'KEYCODE_2', 'KEYCODE_3', 'KEYCODE_4', 'KEYCODE_5', 'KEYCODE_6', 'KEYCODE_7', 'KEYCODE_8', 'KEYCODE_9', 'KEYCODE_0', 'KEYCODE_Q', 'KEYCODE_W', 'KEYCODE_E', 'KEYCODE_R', 'KEYCODE_T', 'KEYCODE_Y', 'KEYCODE_U', 'KEYCODE_I', 'KEYCODE_O', 'KEYCODE_P', 'KEYCODE_A', 'KEYCODE_S', 'KEYCODE_D', 'KEYCODE_F', 'KEYCODE_G', 'KEYCODE_H', 'KEYCODE_J', 'KEYCODE_K', 'KEYCODE_L', 'KEYCODE_DEL', 'KEYCODE_Z', 'KEYCODE_X', 'KEYCODE_C', 'KEYCODE_V', 'KEYCODE_B', 'KEYCODE_N', 'KEYCODE_M', 'KEYCODE_.', 'KEYCODE_SPACE', 'KEYCODE_GO' ] for keyboard in _keyboardList: _cpb = ControlPanelButton(self.keyboardTab, self.culebron, self.printOperation, value=keyboard, text=keyboard[8:], width=Layout.BUTTON_WIDTH, bg=self.bg, fg=self.fg, highlightbackground=self.highlightbackground) _cpb.configure(command=_cpb.command) _cpb.grid(column=self.childWindow.column, row=self.childWindow.row) self.tabLayout()
def function[createKeyboardTab, parameter[self]]: constant[ KEYBOARD ] variable[_keyboardList] assign[=] list[[<ast.Constant object at 0x7da1b1e67280>, <ast.Constant object at 0x7da1b1e67580>, <ast.Constant object at 0x7da1b1e66f20>, <ast.Constant object at 0x7da1b1e67190>, <ast.Constant object at 0x7da1b1e66800>, <ast.Constant object at 0x7da1b1e671f0>, <ast.Constant object at 0x7da1b1e67070>, <ast.Constant object at 0x7da1b1e66860>, <ast.Constant object at 0x7da1b1e66a70>, <ast.Constant object at 0x7da1b1e67040>, <ast.Constant object at 0x7da1b1e668f0>, <ast.Constant object at 0x7da1b1e67160>, <ast.Constant object at 0x7da1b1e670d0>, <ast.Constant object at 0x7da1b1e64ca0>, <ast.Constant object at 0x7da1b1e640a0>, <ast.Constant object at 0x7da1b1e65de0>, <ast.Constant object at 0x7da1b1e64250>, <ast.Constant object at 0x7da1b1e66980>, <ast.Constant object at 0x7da1b1e65ea0>, <ast.Constant object at 0x7da1b1e65db0>, <ast.Constant object at 0x7da1b1e64cd0>, <ast.Constant object at 0x7da1b1e669b0>, <ast.Constant object at 0x7da1b1e670a0>, <ast.Constant object at 0x7da1b1e66ef0>, <ast.Constant object at 0x7da1b1e66cb0>, <ast.Constant object at 0x7da1b1e667d0>, <ast.Constant object at 0x7da1b1e66a40>, <ast.Constant object at 0x7da1b1e668c0>, <ast.Constant object at 0x7da1b1e67100>, <ast.Constant object at 0x7da1b1e66830>, <ast.Constant object at 0x7da1b1e669e0>, <ast.Constant object at 0x7da1b1e64e50>, <ast.Constant object at 0x7da1b1e64a90>, <ast.Constant object at 0x7da1b1e65300>, <ast.Constant object at 0x7da1b1e66890>, <ast.Constant object at 0x7da1b1e64370>, <ast.Constant object at 0x7da1b1e66620>, <ast.Constant object at 0x7da1b1e64c40>, <ast.Constant object at 0x7da1b1e64d00>, <ast.Constant object at 0x7da1b1e662c0>]] for taget[name[keyboard]] in starred[name[_keyboardList]] begin[:] variable[_cpb] assign[=] call[name[ControlPanelButton], parameter[name[self].keyboardTab, name[self].culebron, name[self].printOperation]] call[name[_cpb].configure, parameter[]] call[name[_cpb].grid, parameter[]] call[name[self].tabLayout, parameter[]]
keyword[def] identifier[createKeyboardTab] ( identifier[self] ): literal[string] identifier[_keyboardList] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] keyword[for] identifier[keyboard] keyword[in] identifier[_keyboardList] : identifier[_cpb] = identifier[ControlPanelButton] ( identifier[self] . identifier[keyboardTab] , identifier[self] . identifier[culebron] , identifier[self] . identifier[printOperation] , identifier[value] = identifier[keyboard] , identifier[text] = identifier[keyboard] [ literal[int] :], identifier[width] = identifier[Layout] . identifier[BUTTON_WIDTH] , identifier[bg] = identifier[self] . identifier[bg] , identifier[fg] = identifier[self] . identifier[fg] , identifier[highlightbackground] = identifier[self] . identifier[highlightbackground] ) identifier[_cpb] . identifier[configure] ( identifier[command] = identifier[_cpb] . identifier[command] ) identifier[_cpb] . identifier[grid] ( identifier[column] = identifier[self] . identifier[childWindow] . identifier[column] , identifier[row] = identifier[self] . identifier[childWindow] . identifier[row] ) identifier[self] . identifier[tabLayout] ()
def createKeyboardTab(self): """ KEYBOARD """ _keyboardList = ['KEYCODE_1', 'KEYCODE_2', 'KEYCODE_3', 'KEYCODE_4', 'KEYCODE_5', 'KEYCODE_6', 'KEYCODE_7', 'KEYCODE_8', 'KEYCODE_9', 'KEYCODE_0', 'KEYCODE_Q', 'KEYCODE_W', 'KEYCODE_E', 'KEYCODE_R', 'KEYCODE_T', 'KEYCODE_Y', 'KEYCODE_U', 'KEYCODE_I', 'KEYCODE_O', 'KEYCODE_P', 'KEYCODE_A', 'KEYCODE_S', 'KEYCODE_D', 'KEYCODE_F', 'KEYCODE_G', 'KEYCODE_H', 'KEYCODE_J', 'KEYCODE_K', 'KEYCODE_L', 'KEYCODE_DEL', 'KEYCODE_Z', 'KEYCODE_X', 'KEYCODE_C', 'KEYCODE_V', 'KEYCODE_B', 'KEYCODE_N', 'KEYCODE_M', 'KEYCODE_.', 'KEYCODE_SPACE', 'KEYCODE_GO'] for keyboard in _keyboardList: _cpb = ControlPanelButton(self.keyboardTab, self.culebron, self.printOperation, value=keyboard, text=keyboard[8:], width=Layout.BUTTON_WIDTH, bg=self.bg, fg=self.fg, highlightbackground=self.highlightbackground) _cpb.configure(command=_cpb.command) _cpb.grid(column=self.childWindow.column, row=self.childWindow.row) self.tabLayout() # depends on [control=['for'], data=['keyboard']]
def _validate_config(self): """ Handle and check configuration. """ groups = dict( job=defaultdict(Bunch), httpd=defaultdict(Bunch), ) for key, val in config.torque.items(): # Auto-convert numbers and bools if val.isdigit(): config.torque[key] = val = int(val) elif val.lower() in (matching.TRUE | matching.FALSE): val = matching.truth(str(val), key) # Assemble grouped parameters stem = key.split('.', 1)[0] if key == "httpd.active": groups[stem]["active"] = val elif stem in groups: try: stem, name, param = key.split('.', 2) except (TypeError, ValueError): self.fatal("Bad %s configuration key %r (expecting %s.NAME.PARAM)" % (stem, key, stem)) else: groups[stem][name][param] = val for key, val in groups.iteritems(): setattr(self, key.replace("job", "jobs"), Bunch(val)) # Validate httpd config if self.httpd.active: if self.httpd.waitress.url_scheme not in ("http", "https"): self.fatal("HTTP URL scheme must be either 'http' or 'https'") if not isinstance(self.httpd.waitress.port, int) or not(1024 <= self.httpd.waitress.port < 65536): self.fatal("HTTP port must be a 16 bit number >= 1024") # Validate jobs for name, params in self.jobs.items(): for key in ("handler", "schedule"): if key not in params: self.fatal("Job '%s' is missing the required 'job.%s.%s' parameter" % (name, name, key)) bool_param = lambda k, default, p=params: matching.truth(p.get(k, default), "job.%s.%s" % (name, k)) params.job_name = name params.dry_run = bool_param("dry_run", False) or self.options.dry_run params.active = bool_param("active", True) params.schedule = self._parse_schedule(params.schedule) if params.active: try: params.handler = pymagic.import_name(params.handler) except ImportError as exc: self.fatal("Bad handler name '%s' for job '%s':\n %s" % (params.handler, name, exc))
def function[_validate_config, parameter[self]]: constant[ Handle and check configuration. ] variable[groups] assign[=] call[name[dict], parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b1112f20>, <ast.Name object at 0x7da1b1112a10>]]] in starred[call[name[config].torque.items, parameter[]]] begin[:] if call[name[val].isdigit, parameter[]] begin[:] call[name[config].torque][name[key]] assign[=] call[name[int], parameter[name[val]]] variable[stem] assign[=] call[call[name[key].split, parameter[constant[.], constant[1]]]][constant[0]] if compare[name[key] equal[==] constant[httpd.active]] begin[:] call[call[name[groups]][name[stem]]][constant[active]] assign[=] name[val] for taget[tuple[[<ast.Name object at 0x7da2054a7c40>, <ast.Name object at 0x7da2054a58d0>]]] in starred[call[name[groups].iteritems, parameter[]]] begin[:] call[name[setattr], parameter[name[self], call[name[key].replace, parameter[constant[job], constant[jobs]]], call[name[Bunch], parameter[name[val]]]]] if name[self].httpd.active begin[:] if compare[name[self].httpd.waitress.url_scheme <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da2054a69b0>, <ast.Constant object at 0x7da2054a6a70>]]] begin[:] call[name[self].fatal, parameter[constant[HTTP URL scheme must be either 'http' or 'https']]] if <ast.BoolOp object at 0x7da2054a5ff0> begin[:] call[name[self].fatal, parameter[constant[HTTP port must be a 16 bit number >= 1024]]] for taget[tuple[[<ast.Name object at 0x7da2054a5030>, <ast.Name object at 0x7da2054a4f40>]]] in starred[call[name[self].jobs.items, parameter[]]] begin[:] for taget[name[key]] in starred[tuple[[<ast.Constant object at 0x7da2054a6d40>, <ast.Constant object at 0x7da2054a5e40>]]] begin[:] if compare[name[key] <ast.NotIn object at 0x7da2590d7190> name[params]] begin[:] call[name[self].fatal, parameter[binary_operation[constant[Job '%s' is missing the required 'job.%s.%s' parameter] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da2054a4700>, <ast.Name object at 0x7da2054a5f60>, <ast.Name object at 0x7da2054a4c10>]]]]] variable[bool_param] assign[=] <ast.Lambda object at 0x7da2054a6a10> name[params].job_name assign[=] name[name] name[params].dry_run assign[=] <ast.BoolOp object at 0x7da1b12c5d80> name[params].active assign[=] call[name[bool_param], parameter[constant[active], constant[True]]] name[params].schedule assign[=] call[name[self]._parse_schedule, parameter[name[params].schedule]] if name[params].active begin[:] <ast.Try object at 0x7da1b12c62f0>
keyword[def] identifier[_validate_config] ( identifier[self] ): literal[string] identifier[groups] = identifier[dict] ( identifier[job] = identifier[defaultdict] ( identifier[Bunch] ), identifier[httpd] = identifier[defaultdict] ( identifier[Bunch] ), ) keyword[for] identifier[key] , identifier[val] keyword[in] identifier[config] . identifier[torque] . identifier[items] (): keyword[if] identifier[val] . identifier[isdigit] (): identifier[config] . identifier[torque] [ identifier[key] ]= identifier[val] = identifier[int] ( identifier[val] ) keyword[elif] identifier[val] . identifier[lower] () keyword[in] ( identifier[matching] . identifier[TRUE] | identifier[matching] . identifier[FALSE] ): identifier[val] = identifier[matching] . identifier[truth] ( identifier[str] ( identifier[val] ), identifier[key] ) identifier[stem] = identifier[key] . identifier[split] ( literal[string] , literal[int] )[ literal[int] ] keyword[if] identifier[key] == literal[string] : identifier[groups] [ identifier[stem] ][ literal[string] ]= identifier[val] keyword[elif] identifier[stem] keyword[in] identifier[groups] : keyword[try] : identifier[stem] , identifier[name] , identifier[param] = identifier[key] . identifier[split] ( literal[string] , literal[int] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): identifier[self] . identifier[fatal] ( literal[string] %( identifier[stem] , identifier[key] , identifier[stem] )) keyword[else] : identifier[groups] [ identifier[stem] ][ identifier[name] ][ identifier[param] ]= identifier[val] keyword[for] identifier[key] , identifier[val] keyword[in] identifier[groups] . identifier[iteritems] (): identifier[setattr] ( identifier[self] , identifier[key] . identifier[replace] ( literal[string] , literal[string] ), identifier[Bunch] ( identifier[val] )) keyword[if] identifier[self] . identifier[httpd] . identifier[active] : keyword[if] identifier[self] . identifier[httpd] . identifier[waitress] . identifier[url_scheme] keyword[not] keyword[in] ( literal[string] , literal[string] ): identifier[self] . identifier[fatal] ( literal[string] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[self] . identifier[httpd] . identifier[waitress] . identifier[port] , identifier[int] ) keyword[or] keyword[not] ( literal[int] <= identifier[self] . identifier[httpd] . identifier[waitress] . identifier[port] < literal[int] ): identifier[self] . identifier[fatal] ( literal[string] ) keyword[for] identifier[name] , identifier[params] keyword[in] identifier[self] . identifier[jobs] . identifier[items] (): keyword[for] identifier[key] keyword[in] ( literal[string] , literal[string] ): keyword[if] identifier[key] keyword[not] keyword[in] identifier[params] : identifier[self] . identifier[fatal] ( literal[string] %( identifier[name] , identifier[name] , identifier[key] )) identifier[bool_param] = keyword[lambda] identifier[k] , identifier[default] , identifier[p] = identifier[params] : identifier[matching] . identifier[truth] ( identifier[p] . identifier[get] ( identifier[k] , identifier[default] ), literal[string] %( identifier[name] , identifier[k] )) identifier[params] . identifier[job_name] = identifier[name] identifier[params] . identifier[dry_run] = identifier[bool_param] ( literal[string] , keyword[False] ) keyword[or] identifier[self] . identifier[options] . identifier[dry_run] identifier[params] . identifier[active] = identifier[bool_param] ( literal[string] , keyword[True] ) identifier[params] . identifier[schedule] = identifier[self] . identifier[_parse_schedule] ( identifier[params] . identifier[schedule] ) keyword[if] identifier[params] . identifier[active] : keyword[try] : identifier[params] . identifier[handler] = identifier[pymagic] . identifier[import_name] ( identifier[params] . identifier[handler] ) keyword[except] identifier[ImportError] keyword[as] identifier[exc] : identifier[self] . identifier[fatal] ( literal[string] %( identifier[params] . identifier[handler] , identifier[name] , identifier[exc] ))
def _validate_config(self): """ Handle and check configuration. """ groups = dict(job=defaultdict(Bunch), httpd=defaultdict(Bunch)) for (key, val) in config.torque.items(): # Auto-convert numbers and bools if val.isdigit(): config.torque[key] = val = int(val) # depends on [control=['if'], data=[]] elif val.lower() in matching.TRUE | matching.FALSE: val = matching.truth(str(val), key) # depends on [control=['if'], data=[]] # Assemble grouped parameters stem = key.split('.', 1)[0] if key == 'httpd.active': groups[stem]['active'] = val # depends on [control=['if'], data=[]] elif stem in groups: try: (stem, name, param) = key.split('.', 2) # depends on [control=['try'], data=[]] except (TypeError, ValueError): self.fatal('Bad %s configuration key %r (expecting %s.NAME.PARAM)' % (stem, key, stem)) # depends on [control=['except'], data=[]] else: groups[stem][name][param] = val # depends on [control=['if'], data=['stem', 'groups']] # depends on [control=['for'], data=[]] for (key, val) in groups.iteritems(): setattr(self, key.replace('job', 'jobs'), Bunch(val)) # depends on [control=['for'], data=[]] # Validate httpd config if self.httpd.active: if self.httpd.waitress.url_scheme not in ('http', 'https'): self.fatal("HTTP URL scheme must be either 'http' or 'https'") # depends on [control=['if'], data=[]] if not isinstance(self.httpd.waitress.port, int) or not 1024 <= self.httpd.waitress.port < 65536: self.fatal('HTTP port must be a 16 bit number >= 1024') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # Validate jobs for (name, params) in self.jobs.items(): for key in ('handler', 'schedule'): if key not in params: self.fatal("Job '%s' is missing the required 'job.%s.%s' parameter" % (name, name, key)) # depends on [control=['if'], data=['key']] # depends on [control=['for'], data=['key']] bool_param = lambda k, default, p=params: matching.truth(p.get(k, default), 'job.%s.%s' % (name, k)) params.job_name = name params.dry_run = bool_param('dry_run', False) or self.options.dry_run params.active = bool_param('active', True) params.schedule = self._parse_schedule(params.schedule) if params.active: try: params.handler = pymagic.import_name(params.handler) # depends on [control=['try'], data=[]] except ImportError as exc: self.fatal("Bad handler name '%s' for job '%s':\n %s" % (params.handler, name, exc)) # depends on [control=['except'], data=['exc']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
def get_plaintext_citations(bibtex): """ Parse a BibTeX file to get a clean list of plaintext citations. :param bibtex: Either the path to the BibTeX file or the content of a \ BibTeX file. :returns: A list of cleaned plaintext citations. """ parser = BibTexParser() parser.customization = convert_to_unicode # Load the BibTeX if os.path.isfile(bibtex): with open(bibtex) as fh: bib_database = bibtexparser.load(fh, parser=parser) else: bib_database = bibtexparser.loads(bibtex, parser=parser) # Convert bibentries to plaintext bibentries = [bibentry_as_plaintext(bibentry) for bibentry in bib_database.entries] # Return them return bibentries
def function[get_plaintext_citations, parameter[bibtex]]: constant[ Parse a BibTeX file to get a clean list of plaintext citations. :param bibtex: Either the path to the BibTeX file or the content of a BibTeX file. :returns: A list of cleaned plaintext citations. ] variable[parser] assign[=] call[name[BibTexParser], parameter[]] name[parser].customization assign[=] name[convert_to_unicode] if call[name[os].path.isfile, parameter[name[bibtex]]] begin[:] with call[name[open], parameter[name[bibtex]]] begin[:] variable[bib_database] assign[=] call[name[bibtexparser].load, parameter[name[fh]]] variable[bibentries] assign[=] <ast.ListComp object at 0x7da1b2538d00> return[name[bibentries]]
keyword[def] identifier[get_plaintext_citations] ( identifier[bibtex] ): literal[string] identifier[parser] = identifier[BibTexParser] () identifier[parser] . identifier[customization] = identifier[convert_to_unicode] keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[bibtex] ): keyword[with] identifier[open] ( identifier[bibtex] ) keyword[as] identifier[fh] : identifier[bib_database] = identifier[bibtexparser] . identifier[load] ( identifier[fh] , identifier[parser] = identifier[parser] ) keyword[else] : identifier[bib_database] = identifier[bibtexparser] . identifier[loads] ( identifier[bibtex] , identifier[parser] = identifier[parser] ) identifier[bibentries] =[ identifier[bibentry_as_plaintext] ( identifier[bibentry] ) keyword[for] identifier[bibentry] keyword[in] identifier[bib_database] . identifier[entries] ] keyword[return] identifier[bibentries]
def get_plaintext_citations(bibtex): """ Parse a BibTeX file to get a clean list of plaintext citations. :param bibtex: Either the path to the BibTeX file or the content of a BibTeX file. :returns: A list of cleaned plaintext citations. """ parser = BibTexParser() parser.customization = convert_to_unicode # Load the BibTeX if os.path.isfile(bibtex): with open(bibtex) as fh: bib_database = bibtexparser.load(fh, parser=parser) # depends on [control=['with'], data=['fh']] # depends on [control=['if'], data=[]] else: bib_database = bibtexparser.loads(bibtex, parser=parser) # Convert bibentries to plaintext bibentries = [bibentry_as_plaintext(bibentry) for bibentry in bib_database.entries] # Return them return bibentries
def validate_IRkernel(venv_dir): """Validates that this env contains an IRkernel kernel and returns info to start it Returns: tuple (ARGV, language, resource_dir) """ r_exe_name = find_exe(venv_dir, "R") if r_exe_name is None: return [], None, None # check if this is really an IRkernel **kernel** import subprocess ressources_dir = None try: print_resources = 'cat(as.character(system.file("kernelspec", package = "IRkernel")))' resources_dir_bytes = subprocess.check_output([r_exe_name, '--slave', '-e', print_resources]) resources_dir = resources_dir_bytes.decode(errors='ignore') except: # not installed? -> not useable in any case... return [], None, None argv = [r_exe_name, "--slave", "-e", "IRkernel::main()", "--args", "{connection_file}"] if not os.path.exists(resources_dir.strip()): # Fallback to our own log, but don't get the nice js goodies... resources_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "logos", "r") return argv, "r", resources_dir
def function[validate_IRkernel, parameter[venv_dir]]: constant[Validates that this env contains an IRkernel kernel and returns info to start it Returns: tuple (ARGV, language, resource_dir) ] variable[r_exe_name] assign[=] call[name[find_exe], parameter[name[venv_dir], constant[R]]] if compare[name[r_exe_name] is constant[None]] begin[:] return[tuple[[<ast.List object at 0x7da18ede7b20>, <ast.Constant object at 0x7da18c4cf2e0>, <ast.Constant object at 0x7da18c4cf220>]]] import module[subprocess] variable[ressources_dir] assign[=] constant[None] <ast.Try object at 0x7da18c4ce3e0> variable[argv] assign[=] list[[<ast.Name object at 0x7da18f58de40>, <ast.Constant object at 0x7da18f58e440>, <ast.Constant object at 0x7da18f58ece0>, <ast.Constant object at 0x7da18f58d5a0>, <ast.Constant object at 0x7da18f58f820>, <ast.Constant object at 0x7da18f58f490>]] if <ast.UnaryOp object at 0x7da18ede6e60> begin[:] variable[resources_dir] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[call[name[os].path.abspath, parameter[name[__file__]]]]], constant[logos], constant[r]]] return[tuple[[<ast.Name object at 0x7da18ede7c10>, <ast.Constant object at 0x7da18ede6020>, <ast.Name object at 0x7da18ede4df0>]]]
keyword[def] identifier[validate_IRkernel] ( identifier[venv_dir] ): literal[string] identifier[r_exe_name] = identifier[find_exe] ( identifier[venv_dir] , literal[string] ) keyword[if] identifier[r_exe_name] keyword[is] keyword[None] : keyword[return] [], keyword[None] , keyword[None] keyword[import] identifier[subprocess] identifier[ressources_dir] = keyword[None] keyword[try] : identifier[print_resources] = literal[string] identifier[resources_dir_bytes] = identifier[subprocess] . identifier[check_output] ([ identifier[r_exe_name] , literal[string] , literal[string] , identifier[print_resources] ]) identifier[resources_dir] = identifier[resources_dir_bytes] . identifier[decode] ( identifier[errors] = literal[string] ) keyword[except] : keyword[return] [], keyword[None] , keyword[None] identifier[argv] =[ identifier[r_exe_name] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[resources_dir] . identifier[strip] ()): identifier[resources_dir] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[os] . identifier[path] . identifier[abspath] ( identifier[__file__] )), literal[string] , literal[string] ) keyword[return] identifier[argv] , literal[string] , identifier[resources_dir]
def validate_IRkernel(venv_dir): """Validates that this env contains an IRkernel kernel and returns info to start it Returns: tuple (ARGV, language, resource_dir) """ r_exe_name = find_exe(venv_dir, 'R') if r_exe_name is None: return ([], None, None) # depends on [control=['if'], data=[]] # check if this is really an IRkernel **kernel** import subprocess ressources_dir = None try: print_resources = 'cat(as.character(system.file("kernelspec", package = "IRkernel")))' resources_dir_bytes = subprocess.check_output([r_exe_name, '--slave', '-e', print_resources]) resources_dir = resources_dir_bytes.decode(errors='ignore') # depends on [control=['try'], data=[]] except: # not installed? -> not useable in any case... return ([], None, None) # depends on [control=['except'], data=[]] argv = [r_exe_name, '--slave', '-e', 'IRkernel::main()', '--args', '{connection_file}'] if not os.path.exists(resources_dir.strip()): # Fallback to our own log, but don't get the nice js goodies... resources_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'logos', 'r') # depends on [control=['if'], data=[]] return (argv, 'r', resources_dir)
def prior_to_xarray(self): """Convert prior samples to xarray.""" prior = self.prior prior_model = self.prior_model # filter posterior_predictive and log_likelihood prior_predictive = self.prior_predictive if prior_predictive is None: prior_predictive = [] elif isinstance(prior_predictive, str): prior_predictive = [prior_predictive] ignore = prior_predictive data = get_draws_stan3(prior, model=prior_model, ignore=ignore) return dict_to_dataset(data, library=self.stan, coords=self.coords, dims=self.dims)
def function[prior_to_xarray, parameter[self]]: constant[Convert prior samples to xarray.] variable[prior] assign[=] name[self].prior variable[prior_model] assign[=] name[self].prior_model variable[prior_predictive] assign[=] name[self].prior_predictive if compare[name[prior_predictive] is constant[None]] begin[:] variable[prior_predictive] assign[=] list[[]] variable[ignore] assign[=] name[prior_predictive] variable[data] assign[=] call[name[get_draws_stan3], parameter[name[prior]]] return[call[name[dict_to_dataset], parameter[name[data]]]]
keyword[def] identifier[prior_to_xarray] ( identifier[self] ): literal[string] identifier[prior] = identifier[self] . identifier[prior] identifier[prior_model] = identifier[self] . identifier[prior_model] identifier[prior_predictive] = identifier[self] . identifier[prior_predictive] keyword[if] identifier[prior_predictive] keyword[is] keyword[None] : identifier[prior_predictive] =[] keyword[elif] identifier[isinstance] ( identifier[prior_predictive] , identifier[str] ): identifier[prior_predictive] =[ identifier[prior_predictive] ] identifier[ignore] = identifier[prior_predictive] identifier[data] = identifier[get_draws_stan3] ( identifier[prior] , identifier[model] = identifier[prior_model] , identifier[ignore] = identifier[ignore] ) keyword[return] identifier[dict_to_dataset] ( identifier[data] , identifier[library] = identifier[self] . identifier[stan] , identifier[coords] = identifier[self] . identifier[coords] , identifier[dims] = identifier[self] . identifier[dims] )
def prior_to_xarray(self): """Convert prior samples to xarray.""" prior = self.prior prior_model = self.prior_model # filter posterior_predictive and log_likelihood prior_predictive = self.prior_predictive if prior_predictive is None: prior_predictive = [] # depends on [control=['if'], data=['prior_predictive']] elif isinstance(prior_predictive, str): prior_predictive = [prior_predictive] # depends on [control=['if'], data=[]] ignore = prior_predictive data = get_draws_stan3(prior, model=prior_model, ignore=ignore) return dict_to_dataset(data, library=self.stan, coords=self.coords, dims=self.dims)
def p_sum_lvl_1(self, p): """ sum_lvl_1 : script_lvl_1 | script_lvl_1 PLUS sum_lvl_1""" if len(p) == 4: p[3].append(p[1]) p[0] = p[3] else: p[0] = [p[1]]
def function[p_sum_lvl_1, parameter[self, p]]: constant[ sum_lvl_1 : script_lvl_1 | script_lvl_1 PLUS sum_lvl_1] if compare[call[name[len], parameter[name[p]]] equal[==] constant[4]] begin[:] call[call[name[p]][constant[3]].append, parameter[call[name[p]][constant[1]]]] call[name[p]][constant[0]] assign[=] call[name[p]][constant[3]]
keyword[def] identifier[p_sum_lvl_1] ( identifier[self] , identifier[p] ): literal[string] keyword[if] identifier[len] ( identifier[p] )== literal[int] : identifier[p] [ literal[int] ]. identifier[append] ( identifier[p] [ literal[int] ]) identifier[p] [ literal[int] ]= identifier[p] [ literal[int] ] keyword[else] : identifier[p] [ literal[int] ]=[ identifier[p] [ literal[int] ]]
def p_sum_lvl_1(self, p): """ sum_lvl_1 : script_lvl_1 | script_lvl_1 PLUS sum_lvl_1""" if len(p) == 4: p[3].append(p[1]) p[0] = p[3] # depends on [control=['if'], data=[]] else: p[0] = [p[1]]
def create_container(self, container, **kwargs): """Create container :param container(string): container name (Container is equivalent to Bucket term in Amazon). :param **kwargs(dict): extend args for specific driver. """ try: LOG.debug('create_container() with %s is success.', self.driver) return self.driver.create_container(container, **kwargs) except DriverException as e: LOG.exception('create_container() with %s raised\ an exception %s.', self.driver, e)
def function[create_container, parameter[self, container]]: constant[Create container :param container(string): container name (Container is equivalent to Bucket term in Amazon). :param **kwargs(dict): extend args for specific driver. ] <ast.Try object at 0x7da18f810130>
keyword[def] identifier[create_container] ( identifier[self] , identifier[container] ,** identifier[kwargs] ): literal[string] keyword[try] : identifier[LOG] . identifier[debug] ( literal[string] , identifier[self] . identifier[driver] ) keyword[return] identifier[self] . identifier[driver] . identifier[create_container] ( identifier[container] ,** identifier[kwargs] ) keyword[except] identifier[DriverException] keyword[as] identifier[e] : identifier[LOG] . identifier[exception] ( literal[string] , identifier[self] . identifier[driver] , identifier[e] )
def create_container(self, container, **kwargs): """Create container :param container(string): container name (Container is equivalent to Bucket term in Amazon). :param **kwargs(dict): extend args for specific driver. """ try: LOG.debug('create_container() with %s is success.', self.driver) return self.driver.create_container(container, **kwargs) # depends on [control=['try'], data=[]] except DriverException as e: LOG.exception('create_container() with %s raised an exception %s.', self.driver, e) # depends on [control=['except'], data=['e']]
def _GetSpecificationStore(cls, format_category): """Retrieves the specification store for specified format category. Args: format_category (str): format category. Returns: tuple[FormatSpecificationStore, list[AnalyzerHelper]]: a format specification store and remaining analyzer helpers that do not have a format specification. """ specification_store = specification.FormatSpecificationStore() remainder_list = [] for analyzer_helper in iter(cls._analyzer_helpers.values()): if not analyzer_helper.IsEnabled(): continue if format_category in analyzer_helper.format_categories: format_specification = analyzer_helper.GetFormatSpecification() if format_specification is not None: specification_store.AddSpecification(format_specification) else: remainder_list.append(analyzer_helper) return specification_store, remainder_list
def function[_GetSpecificationStore, parameter[cls, format_category]]: constant[Retrieves the specification store for specified format category. Args: format_category (str): format category. Returns: tuple[FormatSpecificationStore, list[AnalyzerHelper]]: a format specification store and remaining analyzer helpers that do not have a format specification. ] variable[specification_store] assign[=] call[name[specification].FormatSpecificationStore, parameter[]] variable[remainder_list] assign[=] list[[]] for taget[name[analyzer_helper]] in starred[call[name[iter], parameter[call[name[cls]._analyzer_helpers.values, parameter[]]]]] begin[:] if <ast.UnaryOp object at 0x7da1b0722b60> begin[:] continue if compare[name[format_category] in name[analyzer_helper].format_categories] begin[:] variable[format_specification] assign[=] call[name[analyzer_helper].GetFormatSpecification, parameter[]] if compare[name[format_specification] is_not constant[None]] begin[:] call[name[specification_store].AddSpecification, parameter[name[format_specification]]] return[tuple[[<ast.Name object at 0x7da1b0721000>, <ast.Name object at 0x7da1b0723220>]]]
keyword[def] identifier[_GetSpecificationStore] ( identifier[cls] , identifier[format_category] ): literal[string] identifier[specification_store] = identifier[specification] . identifier[FormatSpecificationStore] () identifier[remainder_list] =[] keyword[for] identifier[analyzer_helper] keyword[in] identifier[iter] ( identifier[cls] . identifier[_analyzer_helpers] . identifier[values] ()): keyword[if] keyword[not] identifier[analyzer_helper] . identifier[IsEnabled] (): keyword[continue] keyword[if] identifier[format_category] keyword[in] identifier[analyzer_helper] . identifier[format_categories] : identifier[format_specification] = identifier[analyzer_helper] . identifier[GetFormatSpecification] () keyword[if] identifier[format_specification] keyword[is] keyword[not] keyword[None] : identifier[specification_store] . identifier[AddSpecification] ( identifier[format_specification] ) keyword[else] : identifier[remainder_list] . identifier[append] ( identifier[analyzer_helper] ) keyword[return] identifier[specification_store] , identifier[remainder_list]
def _GetSpecificationStore(cls, format_category): """Retrieves the specification store for specified format category. Args: format_category (str): format category. Returns: tuple[FormatSpecificationStore, list[AnalyzerHelper]]: a format specification store and remaining analyzer helpers that do not have a format specification. """ specification_store = specification.FormatSpecificationStore() remainder_list = [] for analyzer_helper in iter(cls._analyzer_helpers.values()): if not analyzer_helper.IsEnabled(): continue # depends on [control=['if'], data=[]] if format_category in analyzer_helper.format_categories: format_specification = analyzer_helper.GetFormatSpecification() if format_specification is not None: specification_store.AddSpecification(format_specification) # depends on [control=['if'], data=['format_specification']] else: remainder_list.append(analyzer_helper) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['analyzer_helper']] return (specification_store, remainder_list)
def is_integer(v): # type: (...) -> bool """Test whether a value is an integer (of any kind). Examples: >>> is_integer(1) True >>> is_integer(-0.123) False >>> is_integer(3.) False >>> is_integer(9223372036854775808) True >>> is_integer('1') False >>> is_integer(None) False >>> is_integer(numpy.int(3)) True """ try: from builtins import int return isinstance(v, int) # Match both int and long on Py2 except ImportError: from past.builtins import long return isinstance(v, (int, long))
def function[is_integer, parameter[v]]: constant[Test whether a value is an integer (of any kind). Examples: >>> is_integer(1) True >>> is_integer(-0.123) False >>> is_integer(3.) False >>> is_integer(9223372036854775808) True >>> is_integer('1') False >>> is_integer(None) False >>> is_integer(numpy.int(3)) True ] <ast.Try object at 0x7da18bcc9570>
keyword[def] identifier[is_integer] ( identifier[v] ): literal[string] keyword[try] : keyword[from] identifier[builtins] keyword[import] identifier[int] keyword[return] identifier[isinstance] ( identifier[v] , identifier[int] ) keyword[except] identifier[ImportError] : keyword[from] identifier[past] . identifier[builtins] keyword[import] identifier[long] keyword[return] identifier[isinstance] ( identifier[v] ,( identifier[int] , identifier[long] ))
def is_integer(v): # type: (...) -> bool "Test whether a value is an integer (of any kind).\n\n Examples:\n >>> is_integer(1)\n True\n >>> is_integer(-0.123)\n False\n >>> is_integer(3.)\n False\n >>> is_integer(9223372036854775808)\n True\n >>> is_integer('1')\n False\n >>> is_integer(None)\n False\n >>> is_integer(numpy.int(3))\n True\n " try: from builtins import int return isinstance(v, int) # Match both int and long on Py2 # depends on [control=['try'], data=[]] except ImportError: from past.builtins import long return isinstance(v, (int, long)) # depends on [control=['except'], data=[]]
def update(self, request, *args, **kwargs): """ Run **PATCH** request against */api/price-list-items/<uuid>/* to update price list item. Only item_type, key value and units can be updated. Only customer owner and staff can update price items. """ return super(PriceListItemViewSet, self).update(request, *args, **kwargs)
def function[update, parameter[self, request]]: constant[ Run **PATCH** request against */api/price-list-items/<uuid>/* to update price list item. Only item_type, key value and units can be updated. Only customer owner and staff can update price items. ] return[call[call[name[super], parameter[name[PriceListItemViewSet], name[self]]].update, parameter[name[request], <ast.Starred object at 0x7da1b0d1db40>]]]
keyword[def] identifier[update] ( identifier[self] , identifier[request] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[return] identifier[super] ( identifier[PriceListItemViewSet] , identifier[self] ). identifier[update] ( identifier[request] ,* identifier[args] ,** identifier[kwargs] )
def update(self, request, *args, **kwargs): """ Run **PATCH** request against */api/price-list-items/<uuid>/* to update price list item. Only item_type, key value and units can be updated. Only customer owner and staff can update price items. """ return super(PriceListItemViewSet, self).update(request, *args, **kwargs)
def append_value(dictionary, key, item): """Append those items to the values for that key""" items = dictionary.get(key, []) items.append(item) dictionary[key] = items
def function[append_value, parameter[dictionary, key, item]]: constant[Append those items to the values for that key] variable[items] assign[=] call[name[dictionary].get, parameter[name[key], list[[]]]] call[name[items].append, parameter[name[item]]] call[name[dictionary]][name[key]] assign[=] name[items]
keyword[def] identifier[append_value] ( identifier[dictionary] , identifier[key] , identifier[item] ): literal[string] identifier[items] = identifier[dictionary] . identifier[get] ( identifier[key] ,[]) identifier[items] . identifier[append] ( identifier[item] ) identifier[dictionary] [ identifier[key] ]= identifier[items]
def append_value(dictionary, key, item): """Append those items to the values for that key""" items = dictionary.get(key, []) items.append(item) dictionary[key] = items
def __get_delta_files(self): """Search for delta files and return a dict of Delta objects, keyed by directory names.""" files = [(d, f) for d in self.dirs for f in listdir(d) if isfile(join(d, f))] deltas = OrderedDict() for d, f in files: file_ = join(d, f) if not Delta.is_valid_delta_name(file_): continue delta = Delta(file_) if d not in deltas: deltas[d] = [] deltas[d].append(delta) # sort delta objects in each bucket for d in deltas: deltas[d].sort(key=lambda x: (x.get_version(), x.get_priority(), x.get_name())) return deltas
def function[__get_delta_files, parameter[self]]: constant[Search for delta files and return a dict of Delta objects, keyed by directory names.] variable[files] assign[=] <ast.ListComp object at 0x7da18f00dc60> variable[deltas] assign[=] call[name[OrderedDict], parameter[]] for taget[tuple[[<ast.Name object at 0x7da18fe919f0>, <ast.Name object at 0x7da18fe91a50>]]] in starred[name[files]] begin[:] variable[file_] assign[=] call[name[join], parameter[name[d], name[f]]] if <ast.UnaryOp object at 0x7da18fe919c0> begin[:] continue variable[delta] assign[=] call[name[Delta], parameter[name[file_]]] if compare[name[d] <ast.NotIn object at 0x7da2590d7190> name[deltas]] begin[:] call[name[deltas]][name[d]] assign[=] list[[]] call[call[name[deltas]][name[d]].append, parameter[name[delta]]] for taget[name[d]] in starred[name[deltas]] begin[:] call[call[name[deltas]][name[d]].sort, parameter[]] return[name[deltas]]
keyword[def] identifier[__get_delta_files] ( identifier[self] ): literal[string] identifier[files] =[( identifier[d] , identifier[f] ) keyword[for] identifier[d] keyword[in] identifier[self] . identifier[dirs] keyword[for] identifier[f] keyword[in] identifier[listdir] ( identifier[d] ) keyword[if] identifier[isfile] ( identifier[join] ( identifier[d] , identifier[f] ))] identifier[deltas] = identifier[OrderedDict] () keyword[for] identifier[d] , identifier[f] keyword[in] identifier[files] : identifier[file_] = identifier[join] ( identifier[d] , identifier[f] ) keyword[if] keyword[not] identifier[Delta] . identifier[is_valid_delta_name] ( identifier[file_] ): keyword[continue] identifier[delta] = identifier[Delta] ( identifier[file_] ) keyword[if] identifier[d] keyword[not] keyword[in] identifier[deltas] : identifier[deltas] [ identifier[d] ]=[] identifier[deltas] [ identifier[d] ]. identifier[append] ( identifier[delta] ) keyword[for] identifier[d] keyword[in] identifier[deltas] : identifier[deltas] [ identifier[d] ]. identifier[sort] ( identifier[key] = keyword[lambda] identifier[x] :( identifier[x] . identifier[get_version] (), identifier[x] . identifier[get_priority] (), identifier[x] . identifier[get_name] ())) keyword[return] identifier[deltas]
def __get_delta_files(self): """Search for delta files and return a dict of Delta objects, keyed by directory names.""" files = [(d, f) for d in self.dirs for f in listdir(d) if isfile(join(d, f))] deltas = OrderedDict() for (d, f) in files: file_ = join(d, f) if not Delta.is_valid_delta_name(file_): continue # depends on [control=['if'], data=[]] delta = Delta(file_) if d not in deltas: deltas[d] = [] # depends on [control=['if'], data=['d', 'deltas']] deltas[d].append(delta) # depends on [control=['for'], data=[]] # sort delta objects in each bucket for d in deltas: deltas[d].sort(key=lambda x: (x.get_version(), x.get_priority(), x.get_name())) # depends on [control=['for'], data=['d']] return deltas
def _proxy(self): """ Generate an instance context for the instance, the context is capable of performing various actions. All instance actions are proxied to the context :returns: ShortCodeContext for this ShortCodeInstance :rtype: twilio.rest.proxy.v1.service.short_code.ShortCodeContext """ if self._context is None: self._context = ShortCodeContext( self._version, service_sid=self._solution['service_sid'], sid=self._solution['sid'], ) return self._context
def function[_proxy, parameter[self]]: constant[ Generate an instance context for the instance, the context is capable of performing various actions. All instance actions are proxied to the context :returns: ShortCodeContext for this ShortCodeInstance :rtype: twilio.rest.proxy.v1.service.short_code.ShortCodeContext ] if compare[name[self]._context is constant[None]] begin[:] name[self]._context assign[=] call[name[ShortCodeContext], parameter[name[self]._version]] return[name[self]._context]
keyword[def] identifier[_proxy] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_context] keyword[is] keyword[None] : identifier[self] . identifier[_context] = identifier[ShortCodeContext] ( identifier[self] . identifier[_version] , identifier[service_sid] = identifier[self] . identifier[_solution] [ literal[string] ], identifier[sid] = identifier[self] . identifier[_solution] [ literal[string] ], ) keyword[return] identifier[self] . identifier[_context]
def _proxy(self): """ Generate an instance context for the instance, the context is capable of performing various actions. All instance actions are proxied to the context :returns: ShortCodeContext for this ShortCodeInstance :rtype: twilio.rest.proxy.v1.service.short_code.ShortCodeContext """ if self._context is None: self._context = ShortCodeContext(self._version, service_sid=self._solution['service_sid'], sid=self._solution['sid']) # depends on [control=['if'], data=[]] return self._context
def add_segments_bar(self, segments, ax=None, height=0.14, pad=0.1, sharex=True, location='bottom', **plotargs): """Add a segment bar `Plot` indicating state information. By default, segments are displayed in a thin horizontal set of Axes sitting immediately below the x-axis of the main, similarly to a colorbar. Parameters ---------- segments : `~gwpy.segments.DataQualityFlag` A data-quality flag, or `SegmentList` denoting state segments about this Plot ax : `Axes`, optional Specific `Axes` relative to which to position new `Axes`, defaults to :func:`~matplotlib.pyplot.gca()` height : `float, `optional Height of the new axes, as a fraction of the anchor axes pad : `float`, optional Padding between the new axes and the anchor, as a fraction of the anchor axes dimension sharex : `True`, `~matplotlib.axes.Axes`, optional Either `True` to set ``sharex=ax`` for the new segment axes, or an `Axes` to use directly location : `str`, optional Location for new segment axes, defaults to ``'bottom'``, acceptable values are ``'top'`` or ``'bottom'``. **plotargs extra keyword arguments are passed to :meth:`~gwpy.plot.SegmentAxes.plot` """ # get axes to anchor against if not ax: ax = self.gca() # set options for new axes axes_kw = { 'pad': pad, 'add_to_figure': True, 'sharex': ax if sharex is True else sharex or None, 'axes_class': get_projection_class('segments'), } # map X-axis limit from old axes if axes_kw['sharex'] is ax and not ax.get_autoscalex_on(): axes_kw['xlim'] = ax.get_xlim() # if axes uses GPS scaling, copy the epoch as well try: axes_kw['epoch'] = ax.get_epoch() except AttributeError: pass # add new axes if ax.get_axes_locator(): divider = ax.get_axes_locator()._axes_divider else: from mpl_toolkits.axes_grid1 import make_axes_locatable divider = make_axes_locatable(ax) if location not in {'top', 'bottom'}: raise ValueError("Segments can only be positoned at 'top' or " "'bottom'.") segax = divider.append_axes(location, height, **axes_kw) # update anchor axes if axes_kw['sharex'] is ax and location == 'bottom': # map label segax.set_xlabel(ax.get_xlabel()) segax.xaxis.isDefault_label = ax.xaxis.isDefault_label ax.set_xlabel("") # hide ticks on original axes setp(ax.get_xticklabels(), visible=False) # plot segments segax.plot(segments, **plotargs) segax.grid(b=False, which='both', axis='y') segax.autoscale(axis='y', tight=True) return segax
def function[add_segments_bar, parameter[self, segments, ax, height, pad, sharex, location]]: constant[Add a segment bar `Plot` indicating state information. By default, segments are displayed in a thin horizontal set of Axes sitting immediately below the x-axis of the main, similarly to a colorbar. Parameters ---------- segments : `~gwpy.segments.DataQualityFlag` A data-quality flag, or `SegmentList` denoting state segments about this Plot ax : `Axes`, optional Specific `Axes` relative to which to position new `Axes`, defaults to :func:`~matplotlib.pyplot.gca()` height : `float, `optional Height of the new axes, as a fraction of the anchor axes pad : `float`, optional Padding between the new axes and the anchor, as a fraction of the anchor axes dimension sharex : `True`, `~matplotlib.axes.Axes`, optional Either `True` to set ``sharex=ax`` for the new segment axes, or an `Axes` to use directly location : `str`, optional Location for new segment axes, defaults to ``'bottom'``, acceptable values are ``'top'`` or ``'bottom'``. **plotargs extra keyword arguments are passed to :meth:`~gwpy.plot.SegmentAxes.plot` ] if <ast.UnaryOp object at 0x7da20c6aa440> begin[:] variable[ax] assign[=] call[name[self].gca, parameter[]] variable[axes_kw] assign[=] dictionary[[<ast.Constant object at 0x7da18f58fa00>, <ast.Constant object at 0x7da18f58e950>, <ast.Constant object at 0x7da18f58d9c0>, <ast.Constant object at 0x7da18f58f4c0>], [<ast.Name object at 0x7da18f58f640>, <ast.Constant object at 0x7da18f58f070>, <ast.IfExp object at 0x7da18f58d420>, <ast.Call object at 0x7da18f58f9d0>]] if <ast.BoolOp object at 0x7da18f58f010> begin[:] call[name[axes_kw]][constant[xlim]] assign[=] call[name[ax].get_xlim, parameter[]] <ast.Try object at 0x7da18f58d900> if call[name[ax].get_axes_locator, parameter[]] begin[:] variable[divider] assign[=] call[name[ax].get_axes_locator, parameter[]]._axes_divider if compare[name[location] <ast.NotIn object at 0x7da2590d7190> <ast.Set object at 0x7da18f58c6a0>] begin[:] <ast.Raise object at 0x7da18f58ed40> variable[segax] assign[=] call[name[divider].append_axes, parameter[name[location], name[height]]] if <ast.BoolOp object at 0x7da18dc99750> begin[:] call[name[segax].set_xlabel, parameter[call[name[ax].get_xlabel, parameter[]]]] name[segax].xaxis.isDefault_label assign[=] name[ax].xaxis.isDefault_label call[name[ax].set_xlabel, parameter[constant[]]] call[name[setp], parameter[call[name[ax].get_xticklabels, parameter[]]]] call[name[segax].plot, parameter[name[segments]]] call[name[segax].grid, parameter[]] call[name[segax].autoscale, parameter[]] return[name[segax]]
keyword[def] identifier[add_segments_bar] ( identifier[self] , identifier[segments] , identifier[ax] = keyword[None] , identifier[height] = literal[int] , identifier[pad] = literal[int] , identifier[sharex] = keyword[True] , identifier[location] = literal[string] ,** identifier[plotargs] ): literal[string] keyword[if] keyword[not] identifier[ax] : identifier[ax] = identifier[self] . identifier[gca] () identifier[axes_kw] ={ literal[string] : identifier[pad] , literal[string] : keyword[True] , literal[string] : identifier[ax] keyword[if] identifier[sharex] keyword[is] keyword[True] keyword[else] identifier[sharex] keyword[or] keyword[None] , literal[string] : identifier[get_projection_class] ( literal[string] ), } keyword[if] identifier[axes_kw] [ literal[string] ] keyword[is] identifier[ax] keyword[and] keyword[not] identifier[ax] . identifier[get_autoscalex_on] (): identifier[axes_kw] [ literal[string] ]= identifier[ax] . identifier[get_xlim] () keyword[try] : identifier[axes_kw] [ literal[string] ]= identifier[ax] . identifier[get_epoch] () keyword[except] identifier[AttributeError] : keyword[pass] keyword[if] identifier[ax] . identifier[get_axes_locator] (): identifier[divider] = identifier[ax] . identifier[get_axes_locator] (). identifier[_axes_divider] keyword[else] : keyword[from] identifier[mpl_toolkits] . identifier[axes_grid1] keyword[import] identifier[make_axes_locatable] identifier[divider] = identifier[make_axes_locatable] ( identifier[ax] ) keyword[if] identifier[location] keyword[not] keyword[in] { literal[string] , literal[string] }: keyword[raise] identifier[ValueError] ( literal[string] literal[string] ) identifier[segax] = identifier[divider] . identifier[append_axes] ( identifier[location] , identifier[height] ,** identifier[axes_kw] ) keyword[if] identifier[axes_kw] [ literal[string] ] keyword[is] identifier[ax] keyword[and] identifier[location] == literal[string] : identifier[segax] . identifier[set_xlabel] ( identifier[ax] . identifier[get_xlabel] ()) identifier[segax] . identifier[xaxis] . identifier[isDefault_label] = identifier[ax] . identifier[xaxis] . identifier[isDefault_label] identifier[ax] . identifier[set_xlabel] ( literal[string] ) identifier[setp] ( identifier[ax] . identifier[get_xticklabels] (), identifier[visible] = keyword[False] ) identifier[segax] . identifier[plot] ( identifier[segments] ,** identifier[plotargs] ) identifier[segax] . identifier[grid] ( identifier[b] = keyword[False] , identifier[which] = literal[string] , identifier[axis] = literal[string] ) identifier[segax] . identifier[autoscale] ( identifier[axis] = literal[string] , identifier[tight] = keyword[True] ) keyword[return] identifier[segax]
def add_segments_bar(self, segments, ax=None, height=0.14, pad=0.1, sharex=True, location='bottom', **plotargs): """Add a segment bar `Plot` indicating state information. By default, segments are displayed in a thin horizontal set of Axes sitting immediately below the x-axis of the main, similarly to a colorbar. Parameters ---------- segments : `~gwpy.segments.DataQualityFlag` A data-quality flag, or `SegmentList` denoting state segments about this Plot ax : `Axes`, optional Specific `Axes` relative to which to position new `Axes`, defaults to :func:`~matplotlib.pyplot.gca()` height : `float, `optional Height of the new axes, as a fraction of the anchor axes pad : `float`, optional Padding between the new axes and the anchor, as a fraction of the anchor axes dimension sharex : `True`, `~matplotlib.axes.Axes`, optional Either `True` to set ``sharex=ax`` for the new segment axes, or an `Axes` to use directly location : `str`, optional Location for new segment axes, defaults to ``'bottom'``, acceptable values are ``'top'`` or ``'bottom'``. **plotargs extra keyword arguments are passed to :meth:`~gwpy.plot.SegmentAxes.plot` """ # get axes to anchor against if not ax: ax = self.gca() # depends on [control=['if'], data=[]] # set options for new axes axes_kw = {'pad': pad, 'add_to_figure': True, 'sharex': ax if sharex is True else sharex or None, 'axes_class': get_projection_class('segments')} # map X-axis limit from old axes if axes_kw['sharex'] is ax and (not ax.get_autoscalex_on()): axes_kw['xlim'] = ax.get_xlim() # depends on [control=['if'], data=[]] # if axes uses GPS scaling, copy the epoch as well try: axes_kw['epoch'] = ax.get_epoch() # depends on [control=['try'], data=[]] except AttributeError: pass # depends on [control=['except'], data=[]] # add new axes if ax.get_axes_locator(): divider = ax.get_axes_locator()._axes_divider # depends on [control=['if'], data=[]] else: from mpl_toolkits.axes_grid1 import make_axes_locatable divider = make_axes_locatable(ax) if location not in {'top', 'bottom'}: raise ValueError("Segments can only be positoned at 'top' or 'bottom'.") # depends on [control=['if'], data=[]] segax = divider.append_axes(location, height, **axes_kw) # update anchor axes if axes_kw['sharex'] is ax and location == 'bottom': # map label segax.set_xlabel(ax.get_xlabel()) segax.xaxis.isDefault_label = ax.xaxis.isDefault_label ax.set_xlabel('') # hide ticks on original axes setp(ax.get_xticklabels(), visible=False) # depends on [control=['if'], data=[]] # plot segments segax.plot(segments, **plotargs) segax.grid(b=False, which='both', axis='y') segax.autoscale(axis='y', tight=True) return segax
def fitness(self, v): "Fitness function in the training set" base = self._base if base._classifier: if base._multiple_outputs: hy = SparseArray.argmax(v.hy) fit_func = base._fitness_function if fit_func == 'macro-F1' or fit_func == 'a_F1': f1_score = self.score mf1, mf1_v = f1_score.a_F1(base._y_klass, hy, base._mask_ts.index) v._error = mf1_v - 1 v.fitness = mf1 - 1 elif fit_func == 'DotF1' or fit_func == 'g_F1': f1_score = self.score mf1, mf1_v = f1_score.g_F1(base._y_klass, hy, base._mask_ts.index) v._error = mf1_v - 1 v.fitness = mf1 - 1 elif fit_func == 'DotRecallDotPrecision' or fit_func == 'g_g_recall_precision': f1_score = self.score mf1, mf1_v = f1_score.g_g_recall_precision(base._y_klass, hy, base._mask_ts.index) v._error = mf1_v - 1 v.fitness = mf1 - 1 elif fit_func == 'BER' or fit_func == 'a_recall': f1_score = self.score mf1, mf1_v = f1_score.a_recall(base._y_klass, hy, base._mask_ts.index) v._error = mf1_v - 1 v.fitness = mf1 - 1 elif fit_func == 'DotRecall' or fit_func == 'g_recall': f1_score = self.score mf1, mf1_v = f1_score.g_recall(base._y_klass, hy, base._mask_ts.index) v._error = mf1_v - 1 v.fitness = mf1 - 1 elif fit_func == 'macro-Precision' or fit_func == 'a_precision': f1_score = self.score mf1, mf1_v = f1_score.a_precision(base._y_klass, hy, base._mask_ts.index) v._error = mf1_v - 1 v.fitness = mf1 - 1 elif fit_func == 'DotPrecision' or fit_func == 'g_precision': f1_score = self.score mf1, mf1_v = f1_score.g_precision(base._y_klass, hy, base._mask_ts.index) v._error = mf1_v - 1 v.fitness = mf1 - 1 elif fit_func == 'accDotMacroF1': f1_score = self.score mf1, mf1_v = f1_score.accDotMacroF1(base._y_klass, hy, base._mask_ts.index) v._error = mf1_v - 1 v.fitness = mf1 - 1 elif fit_func == 'macro-RecallF1': f1_score = self.score mf1, mf1_v = f1_score.macroRecallF1(base._y_klass, hy, base._mask_ts.index) v._error = mf1_v - 1 v.fitness = mf1 - 1 elif fit_func == 'F1': f1_score = self.score f1_index = self._base._F1_index index = self.min_class if f1_index < 0 else f1_index mf1, mf1_v = f1_score.F1(index, base._y_klass, hy, base._mask_ts.index) v._error = mf1_v - 1 v.fitness = mf1 - 1 elif fit_func == 'RecallDotPrecision' or fit_func == 'g_recall_precision': f1_score = self.score mf1, mf1_v = f1_score.g_recall_precision(self.min_class, base._y_klass, hy, base._mask_ts.index) v._error = mf1_v - 1 v.fitness = mf1 - 1 elif fit_func == 'ER' or fit_func == 'accuracy': f1_score = self.score mf1, mf1_v = f1_score.accuracy(base._y_klass, hy, base._mask_ts.index) v._error = mf1_v - 1 v.fitness = mf1 - 1 else: raise RuntimeError('Unknown fitness function %s' % base._fitness_function) else: v.fitness = -base._ytr.SSE(v.hy * base._mask) else: if base._multiple_outputs: _ = np.mean([a.SAE(b.mul(c)) for a, b, c in zip(base._ytr, v.hy, base._mask)]) v.fitness = - _ else: v.fitness = -base._ytr.SAE(v.hy * base._mask)
def function[fitness, parameter[self, v]]: constant[Fitness function in the training set] variable[base] assign[=] name[self]._base if name[base]._classifier begin[:] if name[base]._multiple_outputs begin[:] variable[hy] assign[=] call[name[SparseArray].argmax, parameter[name[v].hy]] variable[fit_func] assign[=] name[base]._fitness_function if <ast.BoolOp object at 0x7da1b0e4eb60> begin[:] variable[f1_score] assign[=] name[self].score <ast.Tuple object at 0x7da1b0e4db40> assign[=] call[name[f1_score].a_F1, parameter[name[base]._y_klass, name[hy], name[base]._mask_ts.index]] name[v]._error assign[=] binary_operation[name[mf1_v] - constant[1]] name[v].fitness assign[=] binary_operation[name[mf1] - constant[1]]
keyword[def] identifier[fitness] ( identifier[self] , identifier[v] ): literal[string] identifier[base] = identifier[self] . identifier[_base] keyword[if] identifier[base] . identifier[_classifier] : keyword[if] identifier[base] . identifier[_multiple_outputs] : identifier[hy] = identifier[SparseArray] . identifier[argmax] ( identifier[v] . identifier[hy] ) identifier[fit_func] = identifier[base] . identifier[_fitness_function] keyword[if] identifier[fit_func] == literal[string] keyword[or] identifier[fit_func] == literal[string] : identifier[f1_score] = identifier[self] . identifier[score] identifier[mf1] , identifier[mf1_v] = identifier[f1_score] . identifier[a_F1] ( identifier[base] . identifier[_y_klass] , identifier[hy] , identifier[base] . identifier[_mask_ts] . identifier[index] ) identifier[v] . identifier[_error] = identifier[mf1_v] - literal[int] identifier[v] . identifier[fitness] = identifier[mf1] - literal[int] keyword[elif] identifier[fit_func] == literal[string] keyword[or] identifier[fit_func] == literal[string] : identifier[f1_score] = identifier[self] . identifier[score] identifier[mf1] , identifier[mf1_v] = identifier[f1_score] . identifier[g_F1] ( identifier[base] . identifier[_y_klass] , identifier[hy] , identifier[base] . identifier[_mask_ts] . identifier[index] ) identifier[v] . identifier[_error] = identifier[mf1_v] - literal[int] identifier[v] . identifier[fitness] = identifier[mf1] - literal[int] keyword[elif] identifier[fit_func] == literal[string] keyword[or] identifier[fit_func] == literal[string] : identifier[f1_score] = identifier[self] . identifier[score] identifier[mf1] , identifier[mf1_v] = identifier[f1_score] . identifier[g_g_recall_precision] ( identifier[base] . identifier[_y_klass] , identifier[hy] , identifier[base] . identifier[_mask_ts] . identifier[index] ) identifier[v] . identifier[_error] = identifier[mf1_v] - literal[int] identifier[v] . identifier[fitness] = identifier[mf1] - literal[int] keyword[elif] identifier[fit_func] == literal[string] keyword[or] identifier[fit_func] == literal[string] : identifier[f1_score] = identifier[self] . identifier[score] identifier[mf1] , identifier[mf1_v] = identifier[f1_score] . identifier[a_recall] ( identifier[base] . identifier[_y_klass] , identifier[hy] , identifier[base] . identifier[_mask_ts] . identifier[index] ) identifier[v] . identifier[_error] = identifier[mf1_v] - literal[int] identifier[v] . identifier[fitness] = identifier[mf1] - literal[int] keyword[elif] identifier[fit_func] == literal[string] keyword[or] identifier[fit_func] == literal[string] : identifier[f1_score] = identifier[self] . identifier[score] identifier[mf1] , identifier[mf1_v] = identifier[f1_score] . identifier[g_recall] ( identifier[base] . identifier[_y_klass] , identifier[hy] , identifier[base] . identifier[_mask_ts] . identifier[index] ) identifier[v] . identifier[_error] = identifier[mf1_v] - literal[int] identifier[v] . identifier[fitness] = identifier[mf1] - literal[int] keyword[elif] identifier[fit_func] == literal[string] keyword[or] identifier[fit_func] == literal[string] : identifier[f1_score] = identifier[self] . identifier[score] identifier[mf1] , identifier[mf1_v] = identifier[f1_score] . identifier[a_precision] ( identifier[base] . identifier[_y_klass] , identifier[hy] , identifier[base] . identifier[_mask_ts] . identifier[index] ) identifier[v] . identifier[_error] = identifier[mf1_v] - literal[int] identifier[v] . identifier[fitness] = identifier[mf1] - literal[int] keyword[elif] identifier[fit_func] == literal[string] keyword[or] identifier[fit_func] == literal[string] : identifier[f1_score] = identifier[self] . identifier[score] identifier[mf1] , identifier[mf1_v] = identifier[f1_score] . identifier[g_precision] ( identifier[base] . identifier[_y_klass] , identifier[hy] , identifier[base] . identifier[_mask_ts] . identifier[index] ) identifier[v] . identifier[_error] = identifier[mf1_v] - literal[int] identifier[v] . identifier[fitness] = identifier[mf1] - literal[int] keyword[elif] identifier[fit_func] == literal[string] : identifier[f1_score] = identifier[self] . identifier[score] identifier[mf1] , identifier[mf1_v] = identifier[f1_score] . identifier[accDotMacroF1] ( identifier[base] . identifier[_y_klass] , identifier[hy] , identifier[base] . identifier[_mask_ts] . identifier[index] ) identifier[v] . identifier[_error] = identifier[mf1_v] - literal[int] identifier[v] . identifier[fitness] = identifier[mf1] - literal[int] keyword[elif] identifier[fit_func] == literal[string] : identifier[f1_score] = identifier[self] . identifier[score] identifier[mf1] , identifier[mf1_v] = identifier[f1_score] . identifier[macroRecallF1] ( identifier[base] . identifier[_y_klass] , identifier[hy] , identifier[base] . identifier[_mask_ts] . identifier[index] ) identifier[v] . identifier[_error] = identifier[mf1_v] - literal[int] identifier[v] . identifier[fitness] = identifier[mf1] - literal[int] keyword[elif] identifier[fit_func] == literal[string] : identifier[f1_score] = identifier[self] . identifier[score] identifier[f1_index] = identifier[self] . identifier[_base] . identifier[_F1_index] identifier[index] = identifier[self] . identifier[min_class] keyword[if] identifier[f1_index] < literal[int] keyword[else] identifier[f1_index] identifier[mf1] , identifier[mf1_v] = identifier[f1_score] . identifier[F1] ( identifier[index] , identifier[base] . identifier[_y_klass] , identifier[hy] , identifier[base] . identifier[_mask_ts] . identifier[index] ) identifier[v] . identifier[_error] = identifier[mf1_v] - literal[int] identifier[v] . identifier[fitness] = identifier[mf1] - literal[int] keyword[elif] identifier[fit_func] == literal[string] keyword[or] identifier[fit_func] == literal[string] : identifier[f1_score] = identifier[self] . identifier[score] identifier[mf1] , identifier[mf1_v] = identifier[f1_score] . identifier[g_recall_precision] ( identifier[self] . identifier[min_class] , identifier[base] . identifier[_y_klass] , identifier[hy] , identifier[base] . identifier[_mask_ts] . identifier[index] ) identifier[v] . identifier[_error] = identifier[mf1_v] - literal[int] identifier[v] . identifier[fitness] = identifier[mf1] - literal[int] keyword[elif] identifier[fit_func] == literal[string] keyword[or] identifier[fit_func] == literal[string] : identifier[f1_score] = identifier[self] . identifier[score] identifier[mf1] , identifier[mf1_v] = identifier[f1_score] . identifier[accuracy] ( identifier[base] . identifier[_y_klass] , identifier[hy] , identifier[base] . identifier[_mask_ts] . identifier[index] ) identifier[v] . identifier[_error] = identifier[mf1_v] - literal[int] identifier[v] . identifier[fitness] = identifier[mf1] - literal[int] keyword[else] : keyword[raise] identifier[RuntimeError] ( literal[string] % identifier[base] . identifier[_fitness_function] ) keyword[else] : identifier[v] . identifier[fitness] =- identifier[base] . identifier[_ytr] . identifier[SSE] ( identifier[v] . identifier[hy] * identifier[base] . identifier[_mask] ) keyword[else] : keyword[if] identifier[base] . identifier[_multiple_outputs] : identifier[_] = identifier[np] . identifier[mean] ([ identifier[a] . identifier[SAE] ( identifier[b] . identifier[mul] ( identifier[c] )) keyword[for] identifier[a] , identifier[b] , identifier[c] keyword[in] identifier[zip] ( identifier[base] . identifier[_ytr] , identifier[v] . identifier[hy] , identifier[base] . identifier[_mask] )]) identifier[v] . identifier[fitness] =- identifier[_] keyword[else] : identifier[v] . identifier[fitness] =- identifier[base] . identifier[_ytr] . identifier[SAE] ( identifier[v] . identifier[hy] * identifier[base] . identifier[_mask] )
def fitness(self, v): """Fitness function in the training set""" base = self._base if base._classifier: if base._multiple_outputs: hy = SparseArray.argmax(v.hy) fit_func = base._fitness_function if fit_func == 'macro-F1' or fit_func == 'a_F1': f1_score = self.score (mf1, mf1_v) = f1_score.a_F1(base._y_klass, hy, base._mask_ts.index) v._error = mf1_v - 1 v.fitness = mf1 - 1 # depends on [control=['if'], data=[]] elif fit_func == 'DotF1' or fit_func == 'g_F1': f1_score = self.score (mf1, mf1_v) = f1_score.g_F1(base._y_klass, hy, base._mask_ts.index) v._error = mf1_v - 1 v.fitness = mf1 - 1 # depends on [control=['if'], data=[]] elif fit_func == 'DotRecallDotPrecision' or fit_func == 'g_g_recall_precision': f1_score = self.score (mf1, mf1_v) = f1_score.g_g_recall_precision(base._y_klass, hy, base._mask_ts.index) v._error = mf1_v - 1 v.fitness = mf1 - 1 # depends on [control=['if'], data=[]] elif fit_func == 'BER' or fit_func == 'a_recall': f1_score = self.score (mf1, mf1_v) = f1_score.a_recall(base._y_klass, hy, base._mask_ts.index) v._error = mf1_v - 1 v.fitness = mf1 - 1 # depends on [control=['if'], data=[]] elif fit_func == 'DotRecall' or fit_func == 'g_recall': f1_score = self.score (mf1, mf1_v) = f1_score.g_recall(base._y_klass, hy, base._mask_ts.index) v._error = mf1_v - 1 v.fitness = mf1 - 1 # depends on [control=['if'], data=[]] elif fit_func == 'macro-Precision' or fit_func == 'a_precision': f1_score = self.score (mf1, mf1_v) = f1_score.a_precision(base._y_klass, hy, base._mask_ts.index) v._error = mf1_v - 1 v.fitness = mf1 - 1 # depends on [control=['if'], data=[]] elif fit_func == 'DotPrecision' or fit_func == 'g_precision': f1_score = self.score (mf1, mf1_v) = f1_score.g_precision(base._y_klass, hy, base._mask_ts.index) v._error = mf1_v - 1 v.fitness = mf1 - 1 # depends on [control=['if'], data=[]] elif fit_func == 'accDotMacroF1': f1_score = self.score (mf1, mf1_v) = f1_score.accDotMacroF1(base._y_klass, hy, base._mask_ts.index) v._error = mf1_v - 1 v.fitness = mf1 - 1 # depends on [control=['if'], data=[]] elif fit_func == 'macro-RecallF1': f1_score = self.score (mf1, mf1_v) = f1_score.macroRecallF1(base._y_klass, hy, base._mask_ts.index) v._error = mf1_v - 1 v.fitness = mf1 - 1 # depends on [control=['if'], data=[]] elif fit_func == 'F1': f1_score = self.score f1_index = self._base._F1_index index = self.min_class if f1_index < 0 else f1_index (mf1, mf1_v) = f1_score.F1(index, base._y_klass, hy, base._mask_ts.index) v._error = mf1_v - 1 v.fitness = mf1 - 1 # depends on [control=['if'], data=[]] elif fit_func == 'RecallDotPrecision' or fit_func == 'g_recall_precision': f1_score = self.score (mf1, mf1_v) = f1_score.g_recall_precision(self.min_class, base._y_klass, hy, base._mask_ts.index) v._error = mf1_v - 1 v.fitness = mf1 - 1 # depends on [control=['if'], data=[]] elif fit_func == 'ER' or fit_func == 'accuracy': f1_score = self.score (mf1, mf1_v) = f1_score.accuracy(base._y_klass, hy, base._mask_ts.index) v._error = mf1_v - 1 v.fitness = mf1 - 1 # depends on [control=['if'], data=[]] else: raise RuntimeError('Unknown fitness function %s' % base._fitness_function) # depends on [control=['if'], data=[]] else: v.fitness = -base._ytr.SSE(v.hy * base._mask) # depends on [control=['if'], data=[]] elif base._multiple_outputs: _ = np.mean([a.SAE(b.mul(c)) for (a, b, c) in zip(base._ytr, v.hy, base._mask)]) v.fitness = -_ # depends on [control=['if'], data=[]] else: v.fitness = -base._ytr.SAE(v.hy * base._mask)
def plot_plates(step, time, vrms_surface, trench, ridge, agetrench, topo, fids): """handle ploting stuff""" vphi = step.fields['v2'][0, :, :, 0] tempfld = step.fields['T'][0, :, :, 0] concfld = step.fields['c'][0, :, :, 0] timestep = step.isnap if step.sdat.par['boundaries']['air_layer']: dsa = step.sdat.par['boundaries']['air_thickness'] # we are a bit below the surface; delete "-some number" # to be just below # the surface (that is considered plane here); should check if you are # in the thermal boundary layer indsurf = np.argmin(abs((1 - dsa) - step.geom.r_coord)) - 4 # depth to detect the continents indcont = np.argmin(abs((1 - dsa) - np.array(step.geom.r_coord))) - 10 else: indsurf = -1 indcont = -1 # depth to detect continents if step.sdat.par['boundaries']['air_layer'] and\ not step.sdat.par['continents']['proterozoic_belts']: continents = np.ma.masked_where( np.logical_or(concfld[:-1, indcont] < 3, concfld[:-1, indcont] > 4), concfld[:-1, indcont]) elif (step.sdat.par['boundaries']['air_layer'] and step.sdat.par['continents']['proterozoic_belts']): continents = np.ma.masked_where( np.logical_or(concfld[:-1, indcont] < 3, concfld[:-1, indcont] > 5), concfld[:-1, indcont]) elif step.sdat.par['tracersin']['tracers_weakcrust']: continents = np.ma.masked_where( concfld[:-1, indcont] < 3, concfld[:-1, indcont]) else: continents = np.ma.masked_where( concfld[:-1, indcont] < 2, concfld[:-1, indcont]) # masked array, only continents are true continentsall = continents / continents ph_coord = step.geom.p_coord # velocity vph2 = 0.5 * (vphi + np.roll(vphi, 1, 0)) # interpolate to the same phi dvph2 = (np.diff(vph2[:, indsurf]) / (ph_coord[0] * 2.)) # plotting fig0, (ax1, ax2, ax3) = plt.subplots(3, 1, sharex=True, figsize=(12, 8)) ax1.plot(ph_coord[:-1], concfld[:-1, indsurf], color='g', label='Conc') ax2.plot(ph_coord[:-1], tempfld[:-1, indsurf], color='k', label='Temp') ax3.plot(ph_coord[:-1], vph2[:-1, indsurf], label='Vel') ax1.fill_between( ph_coord[:-1], continents, 1., facecolor='#8B6914', alpha=0.2) ax2.fill_between( ph_coord[:-1], continentsall, 0., facecolor='#8B6914', alpha=0.2) tempmin = step.sdat.par['boundaries']['topT_val'] * 0.9\ if step.sdat.par['boundaries']['topT_mode'] == 'iso' else 0.0 tempmax = step.sdat.par['boundaries']['botT_val'] * 0.35\ if step.sdat.par['boundaries']['botT_mode'] == 'iso' else 0.8 ax2.set_ylim(tempmin, tempmax) ax3.fill_between( ph_coord[:-1], continentsall * round(1.5 * np.amax(dvph2), 1), round(np.amin(dvph2) * 1.1, 1), facecolor='#8B6914', alpha=0.2) ax3.set_ylim(conf.plates.vmin, conf.plates.vmax) ax1.set_ylabel("Concentration") ax2.set_ylabel("Temperature") ax3.set_ylabel("Velocity") ax1.set_title(timestep) ax1.text(0.95, 1.07, str(round(time, 0)) + ' My', transform=ax1.transAxes) ax1.text(0.01, 1.07, str(round(step.geom.ti_ad, 8)), transform=ax1.transAxes) plot_plate_limits(ax3, ridge, trench, conf.plates.vmin, conf.plates.vmax) misc.saveplot(fig0, 'sveltempconc', timestep) # plotting velocity and velocity derivative fig0, (ax1, ax2) = plt.subplots(2, 1, sharex=True, figsize=(12, 8)) ax1.plot(ph_coord[:-1], vph2[:-1, indsurf], label='Vel') ax1.axhline(y=0, xmin=0, xmax=2 * np.pi, color='black', ls='solid', alpha=0.2) ax1.set_ylabel("Velocity") ax1.text(0.95, 1.07, str(round(time, 0)) + ' My', transform=ax1.transAxes) ax1.text(0.01, 1.07, str(round(step.geom.ti_ad, 8)), transform=ax1.transAxes) ax2.plot(ph_coord[:-1] + ph_coord[0], dvph2, color='k', label='dv') ax2.set_ylabel("dv") plot_plate_limits(ax1, ridge, trench, conf.plates.vmin, conf.plates.vmax) plot_plate_limits(ax2, ridge, trench, conf.plates.dvmin, conf.plates.dvmax) ax1.set_xlim(0, 2 * np.pi) ax1.set_title(timestep) ax1.fill_between( ph_coord[:-1], continentsall * conf.plates.vmin, conf.plates.vmax, facecolor='#8b6914', alpha=0.2) ax1.set_ylim(conf.plates.vmin, conf.plates.vmax) ax2.fill_between( ph_coord[:-1], continentsall * conf.plates.dvmin, conf.plates.dvmax, facecolor='#8b6914', alpha=0.2) ax2.set_ylim(conf.plates.dvmin, conf.plates.dvmax) misc.saveplot(fig0, 'sveldvel', timestep) # plotting velocity and second invariant of stress if 'str' in conf.plates.plot: stressfld = step.fields['sII'][0, :, :, 0] fig0, (ax1, ax2) = plt.subplots(2, 1, sharex=True, figsize=(12, 8)) ax1.plot(ph_coord[:-1], vph2[:-1, indsurf], label='Vel') ax1.axhline(y=0, xmin=0, xmax=2 * np.pi, color='black', ls='solid', alpha=0.2) ax1.set_ylabel("Velocity") ax1.text(0.95, 1.07, str(round(time, 0)) + ' My', transform=ax1.transAxes) ax1.text(0.01, 1.07, str(round(step.geom.ti_ad, 8)), transform=ax1.transAxes) ax2.plot(ph_coord[:-1], stressfld[:-1, indsurf] * step.sdat.scales.stress / 1.e6, color='k', label='Stress') ax2.set_ylim(conf.plates.stressmin, conf.plates.stressmax) ax2.set_ylabel("Stress [MPa]") plot_plate_limits(ax1, ridge, trench, conf.plates.vmin, conf.plates.vmax) plot_plate_limits(ax2, ridge, trench, conf.plates.stressmin, conf.plates.stressmax) ax1.set_xlim(0, 2 * np.pi) ax1.set_title(timestep) ax1.fill_between( ph_coord[:-1], continentsall * conf.plates.vmin, conf.plates.vmax, facecolor='#8B6914', alpha=0.2) ax1.set_ylim(conf.plates.vmin, conf.plates.vmax) ax2.fill_between( ph_coord[:-1], continentsall * conf.plates.dvmin, conf.plates.dvmax, facecolor='#8B6914', alpha=0.2) misc.saveplot(fig0, 'svelstress', timestep) # plotting velocity fig1, (ax1, ax2) = plt.subplots(2, 1, sharex=True, figsize=(12, 8)) ax1.plot(ph_coord[:-1], vph2[:-1, indsurf], label='Vel') ax1.axhline(y=0, xmin=0, xmax=2 * np.pi, color='black', ls='solid', alpha=0.2) ax1.set_ylim(conf.plates.vmin, conf.plates.vmax) ax1.set_ylabel("Velocity") ax1.text(0.95, 1.07, str(round(time, 0)) + ' My', transform=ax1.transAxes) plot_plate_limits(ax1, ridge, trench, conf.plates.vmin, conf.plates.vmax) # plotting velocity and age at surface if 'age' in conf.plates.plot: agefld = step.fields['age'][0, :, :, 0] age_surface = np.ma.masked_where( agefld[:, indsurf] < 0.00001, agefld[:, indsurf]) age_surface_dim = (age_surface * vrms_surface * conf.scaling.ttransit / conf.scaling.yearins / 1.e6) fig2, (ax3, ax4) = plt.subplots(2, 1, sharex=True, figsize=(12, 8)) ax3.plot(ph_coord[:-1], vph2[:-1, indsurf], label='Vel') ax3.axhline( y=0, xmin=0, xmax=2 * np.pi, color='black', ls='solid', alpha=0.2) ax3.set_ylim(conf.plates.vmin, conf.plates.vmax) ax3.set_ylabel("Velocity") ax3.text(0.95, 1.07, str(round(time, 0)) + ' My', transform=ax3.transAxes) ax3.fill_between( ph_coord[:-1], continentsall * conf.plates.vmax, conf.plates.vmin, facecolor='#8B6914', alpha=0.2) plot_plate_limits(ax3, ridge, trench, conf.plates.vmin, conf.plates.vmax) times_subd = [] age_subd = [] distance_subd = [] ph_trench_subd = [] ph_cont_subd = [] if step.sdat.par['switches']['cont_tracers']: for i, trench_i in enumerate(trench): # detection of the distance in between subduction and continent ph_coord_noend = ph_coord[:-1] angdistance1 = abs(ph_coord_noend[continentsall == 1] - trench_i) angdistance2 = 2. * np.pi - angdistance1 angdistance = np.minimum(angdistance1, angdistance2) distancecont = min(angdistance) argdistancecont = np.argmin(angdistance) continentpos = ph_coord_noend[continentsall == 1][argdistancecont] ph_trench_subd.append(trench_i) age_subd.append(agetrench[i]) ph_cont_subd.append(continentpos) distance_subd.append(distancecont) times_subd.append(step.geom.ti_ad) if angdistance1[argdistancecont] < angdistance2[argdistancecont]: if continentpos - trench_i < 0: # continent is on the left distancecont = - distancecont ax1.annotate('', xy=(trench_i + distancecont, 2000), xycoords='data', xytext=(trench_i, 2000), textcoords='data', arrowprops=dict(arrowstyle="->", lw="2", shrinkA=0, shrinkB=0)) else: # distance over boundary xy_anot, xy_text = 0, 2 * np.pi if continentpos - trench_i < 0: xy_anot, xy_text = xy_text, xy_anot ax1.annotate('', xy=(xy_anot, 2000), xycoords='data', xytext=(trench_i, 2000), textcoords='data', arrowprops=dict(arrowstyle="-", lw="2", shrinkA=0, shrinkB=0)) ax1.annotate('', xy=(continentpos, 2000), xycoords='data', xytext=(xy_text, 2000), textcoords='data', arrowprops=dict(arrowstyle="->", lw="2", shrinkA=0, shrinkB=0)) ax1.fill_between( ph_coord[:-1], continentsall * conf.plates.vmin, conf.plates.vmax, facecolor='#8B6914', alpha=0.2) ax2.set_ylabel("Topography [km]") ax2.axhline(y=0, xmin=0, xmax=2 * np.pi, color='black', ls='solid', alpha=0.2) ax2.plot(topo[:, 0], topo[:, 1] * step.sdat.scales.length / 1.e3, color='black') ax2.set_xlim(0, 2 * np.pi) ax2.set_ylim(conf.plates.topomin, conf.plates.topomax) ax2.fill_between( ph_coord[:-1], continentsall * conf.plates.topomax, conf.plates.topomin, facecolor='#8B6914', alpha=0.2) plot_plate_limits(ax2, ridge, trench, conf.plates.topomin, conf.plates.topomax) ax1.set_title(timestep) misc.saveplot(fig1, 'sveltopo', timestep) if 'age' in conf.plates.plot: ax4.set_ylabel("Seafloor age [My]") # in dimensions ax4.plot(ph_coord[:-1], age_surface_dim[:-1], color='black') ax4.set_xlim(0, 2 * np.pi) ax4.fill_between( ph_coord[:-1], continentsall * conf.plates.agemax, conf.plates.agemin, facecolor='#8B6914', alpha=0.2) ax4.set_ylim(conf.plates.agemin, conf.plates.agemax) plot_plate_limits(ax4, ridge, trench, conf.plates.agemin, conf.plates.agemax) ax3.set_title(timestep) misc.saveplot(fig2, 'svelage', timestep) # writing the output into a file, all time steps are in one file for isubd in np.arange(len(distance_subd)): fids[1].write("%6.0f %11.7f %11.3f %10.6f %10.6f %10.6f %11.3f\n" % ( timestep, times_subd[isubd], time, distance_subd[isubd], ph_trench_subd[isubd], ph_cont_subd[isubd], age_subd[isubd], ))
def function[plot_plates, parameter[step, time, vrms_surface, trench, ridge, agetrench, topo, fids]]: constant[handle ploting stuff] variable[vphi] assign[=] call[call[name[step].fields][constant[v2]]][tuple[[<ast.Constant object at 0x7da1b182a2f0>, <ast.Slice object at 0x7da1b182a2c0>, <ast.Slice object at 0x7da1b182a290>, <ast.Constant object at 0x7da1b182a260>]]] variable[tempfld] assign[=] call[call[name[step].fields][constant[T]]][tuple[[<ast.Constant object at 0x7da1b182a0b0>, <ast.Slice object at 0x7da1b182a080>, <ast.Slice object at 0x7da1b182a050>, <ast.Constant object at 0x7da1b182a020>]]] variable[concfld] assign[=] call[call[name[step].fields][constant[c]]][tuple[[<ast.Constant object at 0x7da1b1829e70>, <ast.Slice object at 0x7da1b1829e40>, <ast.Slice object at 0x7da1b1829e10>, <ast.Constant object at 0x7da1b1829de0>]]] variable[timestep] assign[=] name[step].isnap if call[call[name[step].sdat.par][constant[boundaries]]][constant[air_layer]] begin[:] variable[dsa] assign[=] call[call[name[step].sdat.par][constant[boundaries]]][constant[air_thickness]] variable[indsurf] assign[=] binary_operation[call[name[np].argmin, parameter[call[name[abs], parameter[binary_operation[binary_operation[constant[1] - name[dsa]] - name[step].geom.r_coord]]]]] - constant[4]] variable[indcont] assign[=] binary_operation[call[name[np].argmin, parameter[call[name[abs], parameter[binary_operation[binary_operation[constant[1] - name[dsa]] - call[name[np].array, parameter[name[step].geom.r_coord]]]]]]] - constant[10]] if <ast.BoolOp object at 0x7da1b1829180> begin[:] variable[continents] assign[=] call[name[np].ma.masked_where, parameter[call[name[np].logical_or, parameter[compare[call[name[concfld]][tuple[[<ast.Slice object at 0x7da1b1828c10>, <ast.Name object at 0x7da1b1828b80>]]] less[<] constant[3]], compare[call[name[concfld]][tuple[[<ast.Slice object at 0x7da1b1828a60>, <ast.Name object at 0x7da1b18289d0>]]] greater[>] constant[4]]]], call[name[concfld]][tuple[[<ast.Slice object at 0x7da1b18288e0>, <ast.Name object at 0x7da1b1828850>]]]]] variable[continentsall] assign[=] binary_operation[name[continents] / name[continents]] variable[ph_coord] assign[=] name[step].geom.p_coord variable[vph2] assign[=] binary_operation[constant[0.5] * binary_operation[name[vphi] + call[name[np].roll, parameter[name[vphi], constant[1], constant[0]]]]] variable[dvph2] assign[=] binary_operation[call[name[np].diff, parameter[call[name[vph2]][tuple[[<ast.Slice object at 0x7da1b19baef0>, <ast.Name object at 0x7da1b19baec0>]]]]] / binary_operation[call[name[ph_coord]][constant[0]] * constant[2.0]]] <ast.Tuple object at 0x7da1b19bad70> assign[=] call[name[plt].subplots, parameter[constant[3], constant[1]]] call[name[ax1].plot, parameter[call[name[ph_coord]][<ast.Slice object at 0x7da1b19ba920>], call[name[concfld]][tuple[[<ast.Slice object at 0x7da1b19ba800>, <ast.Name object at 0x7da1b19ba770>]]]]] call[name[ax2].plot, parameter[call[name[ph_coord]][<ast.Slice object at 0x7da1b1813250>], call[name[tempfld]][tuple[[<ast.Slice object at 0x7da1b1813310>, <ast.Name object at 0x7da1b18139d0>]]]]] call[name[ax3].plot, parameter[call[name[ph_coord]][<ast.Slice object at 0x7da1b1813580>], call[name[vph2]][tuple[[<ast.Slice object at 0x7da1b1813670>, <ast.Name object at 0x7da1b1813730>]]]]] call[name[ax1].fill_between, parameter[call[name[ph_coord]][<ast.Slice object at 0x7da1b1813940>], name[continents], constant[1.0]]] call[name[ax2].fill_between, parameter[call[name[ph_coord]][<ast.Slice object at 0x7da1b1813be0>], name[continentsall], constant[0.0]]] variable[tempmin] assign[=] <ast.IfExp object at 0x7da1b18121a0> variable[tempmax] assign[=] <ast.IfExp object at 0x7da1b1811510> call[name[ax2].set_ylim, parameter[name[tempmin], name[tempmax]]] call[name[ax3].fill_between, parameter[call[name[ph_coord]][<ast.Slice object at 0x7da1b1811e40>], binary_operation[name[continentsall] * call[name[round], parameter[binary_operation[constant[1.5] * call[name[np].amax, parameter[name[dvph2]]]], constant[1]]]], call[name[round], parameter[binary_operation[call[name[np].amin, parameter[name[dvph2]]] * constant[1.1]], constant[1]]]]] call[name[ax3].set_ylim, parameter[name[conf].plates.vmin, name[conf].plates.vmax]] call[name[ax1].set_ylabel, parameter[constant[Concentration]]] call[name[ax2].set_ylabel, parameter[constant[Temperature]]] call[name[ax3].set_ylabel, parameter[constant[Velocity]]] call[name[ax1].set_title, parameter[name[timestep]]] call[name[ax1].text, parameter[constant[0.95], constant[1.07], binary_operation[call[name[str], parameter[call[name[round], parameter[name[time], constant[0]]]]] + constant[ My]]]] call[name[ax1].text, parameter[constant[0.01], constant[1.07], call[name[str], parameter[call[name[round], parameter[name[step].geom.ti_ad, constant[8]]]]]]] call[name[plot_plate_limits], parameter[name[ax3], name[ridge], name[trench], name[conf].plates.vmin, name[conf].plates.vmax]] call[name[misc].saveplot, parameter[name[fig0], constant[sveltempconc], name[timestep]]] <ast.Tuple object at 0x7da1b19cd8d0> assign[=] call[name[plt].subplots, parameter[constant[2], constant[1]]] call[name[ax1].plot, parameter[call[name[ph_coord]][<ast.Slice object at 0x7da1b19cdd20>], call[name[vph2]][tuple[[<ast.Slice object at 0x7da1b19cdd50>, <ast.Name object at 0x7da1b19cdde0>]]]]] call[name[ax1].axhline, parameter[]] call[name[ax1].set_ylabel, parameter[constant[Velocity]]] call[name[ax1].text, parameter[constant[0.95], constant[1.07], binary_operation[call[name[str], parameter[call[name[round], parameter[name[time], constant[0]]]]] + constant[ My]]]] call[name[ax1].text, parameter[constant[0.01], constant[1.07], call[name[str], parameter[call[name[round], parameter[name[step].geom.ti_ad, constant[8]]]]]]] call[name[ax2].plot, parameter[binary_operation[call[name[ph_coord]][<ast.Slice object at 0x7da1b19cca90>] + call[name[ph_coord]][constant[0]]], name[dvph2]]] call[name[ax2].set_ylabel, parameter[constant[dv]]] call[name[plot_plate_limits], parameter[name[ax1], name[ridge], name[trench], name[conf].plates.vmin, name[conf].plates.vmax]] call[name[plot_plate_limits], parameter[name[ax2], name[ridge], name[trench], name[conf].plates.dvmin, name[conf].plates.dvmax]] call[name[ax1].set_xlim, parameter[constant[0], binary_operation[constant[2] * name[np].pi]]] call[name[ax1].set_title, parameter[name[timestep]]] call[name[ax1].fill_between, parameter[call[name[ph_coord]][<ast.Slice object at 0x7da1b19cf610>], binary_operation[name[continentsall] * name[conf].plates.vmin], name[conf].plates.vmax]] call[name[ax1].set_ylim, parameter[name[conf].plates.vmin, name[conf].plates.vmax]] call[name[ax2].fill_between, parameter[call[name[ph_coord]][<ast.Slice object at 0x7da1b19cfa00>], binary_operation[name[continentsall] * name[conf].plates.dvmin], name[conf].plates.dvmax]] call[name[ax2].set_ylim, parameter[name[conf].plates.dvmin, name[conf].plates.dvmax]] call[name[misc].saveplot, parameter[name[fig0], constant[sveldvel], name[timestep]]] if compare[constant[str] in name[conf].plates.plot] begin[:] variable[stressfld] assign[=] call[call[name[step].fields][constant[sII]]][tuple[[<ast.Constant object at 0x7da1b19a2c50>, <ast.Slice object at 0x7da1b19a2d10>, <ast.Slice object at 0x7da1b19a3790>, <ast.Constant object at 0x7da1b19a2dd0>]]] <ast.Tuple object at 0x7da1b19a3df0> assign[=] call[name[plt].subplots, parameter[constant[2], constant[1]]] call[name[ax1].plot, parameter[call[name[ph_coord]][<ast.Slice object at 0x7da1b19a31c0>], call[name[vph2]][tuple[[<ast.Slice object at 0x7da1b19a3b80>, <ast.Name object at 0x7da1b19a38e0>]]]]] call[name[ax1].axhline, parameter[]] call[name[ax1].set_ylabel, parameter[constant[Velocity]]] call[name[ax1].text, parameter[constant[0.95], constant[1.07], binary_operation[call[name[str], parameter[call[name[round], parameter[name[time], constant[0]]]]] + constant[ My]]]] call[name[ax1].text, parameter[constant[0.01], constant[1.07], call[name[str], parameter[call[name[round], parameter[name[step].geom.ti_ad, constant[8]]]]]]] call[name[ax2].plot, parameter[call[name[ph_coord]][<ast.Slice object at 0x7da1b1801ab0>], binary_operation[binary_operation[call[name[stressfld]][tuple[[<ast.Slice object at 0x7da1b1802d10>, <ast.Name object at 0x7da1b1800af0>]]] * name[step].sdat.scales.stress] / constant[1000000.0]]]] call[name[ax2].set_ylim, parameter[name[conf].plates.stressmin, name[conf].plates.stressmax]] call[name[ax2].set_ylabel, parameter[constant[Stress [MPa]]]] call[name[plot_plate_limits], parameter[name[ax1], name[ridge], name[trench], name[conf].plates.vmin, name[conf].plates.vmax]] call[name[plot_plate_limits], parameter[name[ax2], name[ridge], name[trench], name[conf].plates.stressmin, name[conf].plates.stressmax]] call[name[ax1].set_xlim, parameter[constant[0], binary_operation[constant[2] * name[np].pi]]] call[name[ax1].set_title, parameter[name[timestep]]] call[name[ax1].fill_between, parameter[call[name[ph_coord]][<ast.Slice object at 0x7da1b1800700>], binary_operation[name[continentsall] * name[conf].plates.vmin], name[conf].plates.vmax]] call[name[ax1].set_ylim, parameter[name[conf].plates.vmin, name[conf].plates.vmax]] call[name[ax2].fill_between, parameter[call[name[ph_coord]][<ast.Slice object at 0x7da1b18013c0>], binary_operation[name[continentsall] * name[conf].plates.dvmin], name[conf].plates.dvmax]] call[name[misc].saveplot, parameter[name[fig0], constant[svelstress], name[timestep]]] <ast.Tuple object at 0x7da1b1802650> assign[=] call[name[plt].subplots, parameter[constant[2], constant[1]]] call[name[ax1].plot, parameter[call[name[ph_coord]][<ast.Slice object at 0x7da1b1800820>], call[name[vph2]][tuple[[<ast.Slice object at 0x7da1b18029b0>, <ast.Name object at 0x7da1b1803190>]]]]] call[name[ax1].axhline, parameter[]] call[name[ax1].set_ylim, parameter[name[conf].plates.vmin, name[conf].plates.vmax]] call[name[ax1].set_ylabel, parameter[constant[Velocity]]] call[name[ax1].text, parameter[constant[0.95], constant[1.07], binary_operation[call[name[str], parameter[call[name[round], parameter[name[time], constant[0]]]]] + constant[ My]]]] call[name[plot_plate_limits], parameter[name[ax1], name[ridge], name[trench], name[conf].plates.vmin, name[conf].plates.vmax]] if compare[constant[age] in name[conf].plates.plot] begin[:] variable[agefld] assign[=] call[call[name[step].fields][constant[age]]][tuple[[<ast.Constant object at 0x7da1b19b7910>, <ast.Slice object at 0x7da1b19b6b00>, <ast.Slice object at 0x7da1b19b72b0>, <ast.Constant object at 0x7da1b19b6dd0>]]] variable[age_surface] assign[=] call[name[np].ma.masked_where, parameter[compare[call[name[agefld]][tuple[[<ast.Slice object at 0x7da1b19b5570>, <ast.Name object at 0x7da1b19b7250>]]] less[<] constant[1e-05]], call[name[agefld]][tuple[[<ast.Slice object at 0x7da1b19b4c40>, <ast.Name object at 0x7da1b19b5330>]]]]] variable[age_surface_dim] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[name[age_surface] * name[vrms_surface]] * name[conf].scaling.ttransit] / name[conf].scaling.yearins] / constant[1000000.0]] <ast.Tuple object at 0x7da1b19b5f30> assign[=] call[name[plt].subplots, parameter[constant[2], constant[1]]] call[name[ax3].plot, parameter[call[name[ph_coord]][<ast.Slice object at 0x7da1b19b6f50>], call[name[vph2]][tuple[[<ast.Slice object at 0x7da1b19b40d0>, <ast.Name object at 0x7da1b19b4640>]]]]] call[name[ax3].axhline, parameter[]] call[name[ax3].set_ylim, parameter[name[conf].plates.vmin, name[conf].plates.vmax]] call[name[ax3].set_ylabel, parameter[constant[Velocity]]] call[name[ax3].text, parameter[constant[0.95], constant[1.07], binary_operation[call[name[str], parameter[call[name[round], parameter[name[time], constant[0]]]]] + constant[ My]]]] call[name[ax3].fill_between, parameter[call[name[ph_coord]][<ast.Slice object at 0x7da1b19b4730>], binary_operation[name[continentsall] * name[conf].plates.vmax], name[conf].plates.vmin]] call[name[plot_plate_limits], parameter[name[ax3], name[ridge], name[trench], name[conf].plates.vmin, name[conf].plates.vmax]] variable[times_subd] assign[=] list[[]] variable[age_subd] assign[=] list[[]] variable[distance_subd] assign[=] list[[]] variable[ph_trench_subd] assign[=] list[[]] variable[ph_cont_subd] assign[=] list[[]] if call[call[name[step].sdat.par][constant[switches]]][constant[cont_tracers]] begin[:] for taget[tuple[[<ast.Name object at 0x7da18f58dd50>, <ast.Name object at 0x7da18f58f5b0>]]] in starred[call[name[enumerate], parameter[name[trench]]]] begin[:] variable[ph_coord_noend] assign[=] call[name[ph_coord]][<ast.Slice object at 0x7da1b19c31f0>] variable[angdistance1] assign[=] call[name[abs], parameter[binary_operation[call[name[ph_coord_noend]][compare[name[continentsall] equal[==] constant[1]]] - name[trench_i]]]] variable[angdistance2] assign[=] binary_operation[binary_operation[constant[2.0] * name[np].pi] - name[angdistance1]] variable[angdistance] assign[=] call[name[np].minimum, parameter[name[angdistance1], name[angdistance2]]] variable[distancecont] assign[=] call[name[min], parameter[name[angdistance]]] variable[argdistancecont] assign[=] call[name[np].argmin, parameter[name[angdistance]]] variable[continentpos] assign[=] call[call[name[ph_coord_noend]][compare[name[continentsall] equal[==] constant[1]]]][name[argdistancecont]] call[name[ph_trench_subd].append, parameter[name[trench_i]]] call[name[age_subd].append, parameter[call[name[agetrench]][name[i]]]] call[name[ph_cont_subd].append, parameter[name[continentpos]]] call[name[distance_subd].append, parameter[name[distancecont]]] call[name[times_subd].append, parameter[name[step].geom.ti_ad]] if compare[call[name[angdistance1]][name[argdistancecont]] less[<] call[name[angdistance2]][name[argdistancecont]]] begin[:] if compare[binary_operation[name[continentpos] - name[trench_i]] less[<] constant[0]] begin[:] variable[distancecont] assign[=] <ast.UnaryOp object at 0x7da1b1943010> call[name[ax1].annotate, parameter[constant[]]] call[name[ax1].fill_between, parameter[call[name[ph_coord]][<ast.Slice object at 0x7da1b19cabf0>], binary_operation[name[continentsall] * name[conf].plates.vmin], name[conf].plates.vmax]] call[name[ax2].set_ylabel, parameter[constant[Topography [km]]]] call[name[ax2].axhline, parameter[]] call[name[ax2].plot, parameter[call[name[topo]][tuple[[<ast.Slice object at 0x7da1b1806470>, <ast.Constant object at 0x7da1b1807250>]]], binary_operation[binary_operation[call[name[topo]][tuple[[<ast.Slice object at 0x7da1b1806350>, <ast.Constant object at 0x7da1b1805570>]]] * name[step].sdat.scales.length] / constant[1000.0]]]] call[name[ax2].set_xlim, parameter[constant[0], binary_operation[constant[2] * name[np].pi]]] call[name[ax2].set_ylim, parameter[name[conf].plates.topomin, name[conf].plates.topomax]] call[name[ax2].fill_between, parameter[call[name[ph_coord]][<ast.Slice object at 0x7da1b1a6f010>], binary_operation[name[continentsall] * name[conf].plates.topomax], name[conf].plates.topomin]] call[name[plot_plate_limits], parameter[name[ax2], name[ridge], name[trench], name[conf].plates.topomin, name[conf].plates.topomax]] call[name[ax1].set_title, parameter[name[timestep]]] call[name[misc].saveplot, parameter[name[fig1], constant[sveltopo], name[timestep]]] if compare[constant[age] in name[conf].plates.plot] begin[:] call[name[ax4].set_ylabel, parameter[constant[Seafloor age [My]]]] call[name[ax4].plot, parameter[call[name[ph_coord]][<ast.Slice object at 0x7da1b1a6d7b0>], call[name[age_surface_dim]][<ast.Slice object at 0x7da1b1a6e500>]]] call[name[ax4].set_xlim, parameter[constant[0], binary_operation[constant[2] * name[np].pi]]] call[name[ax4].fill_between, parameter[call[name[ph_coord]][<ast.Slice object at 0x7da1b1a6e200>], binary_operation[name[continentsall] * name[conf].plates.agemax], name[conf].plates.agemin]] call[name[ax4].set_ylim, parameter[name[conf].plates.agemin, name[conf].plates.agemax]] call[name[plot_plate_limits], parameter[name[ax4], name[ridge], name[trench], name[conf].plates.agemin, name[conf].plates.agemax]] call[name[ax3].set_title, parameter[name[timestep]]] call[name[misc].saveplot, parameter[name[fig2], constant[svelage], name[timestep]]] for taget[name[isubd]] in starred[call[name[np].arange, parameter[call[name[len], parameter[name[distance_subd]]]]]] begin[:] call[call[name[fids]][constant[1]].write, parameter[binary_operation[constant[%6.0f %11.7f %11.3f %10.6f %10.6f %10.6f %11.3f ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1938ca0>, <ast.Subscript object at 0x7da1b193b400>, <ast.Name object at 0x7da1b193b730>, <ast.Subscript object at 0x7da1b193bbb0>, <ast.Subscript object at 0x7da1b1938ac0>, <ast.Subscript object at 0x7da1b1939090>, <ast.Subscript object at 0x7da1b193af50>]]]]]
keyword[def] identifier[plot_plates] ( identifier[step] , identifier[time] , identifier[vrms_surface] , identifier[trench] , identifier[ridge] , identifier[agetrench] , identifier[topo] , identifier[fids] ): literal[string] identifier[vphi] = identifier[step] . identifier[fields] [ literal[string] ][ literal[int] ,:,:, literal[int] ] identifier[tempfld] = identifier[step] . identifier[fields] [ literal[string] ][ literal[int] ,:,:, literal[int] ] identifier[concfld] = identifier[step] . identifier[fields] [ literal[string] ][ literal[int] ,:,:, literal[int] ] identifier[timestep] = identifier[step] . identifier[isnap] keyword[if] identifier[step] . identifier[sdat] . identifier[par] [ literal[string] ][ literal[string] ]: identifier[dsa] = identifier[step] . identifier[sdat] . identifier[par] [ literal[string] ][ literal[string] ] identifier[indsurf] = identifier[np] . identifier[argmin] ( identifier[abs] (( literal[int] - identifier[dsa] )- identifier[step] . identifier[geom] . identifier[r_coord] ))- literal[int] identifier[indcont] = identifier[np] . identifier[argmin] ( identifier[abs] (( literal[int] - identifier[dsa] )- identifier[np] . identifier[array] ( identifier[step] . identifier[geom] . identifier[r_coord] )))- literal[int] keyword[else] : identifier[indsurf] =- literal[int] identifier[indcont] =- literal[int] keyword[if] identifier[step] . identifier[sdat] . identifier[par] [ literal[string] ][ literal[string] ] keyword[and] keyword[not] identifier[step] . identifier[sdat] . identifier[par] [ literal[string] ][ literal[string] ]: identifier[continents] = identifier[np] . identifier[ma] . identifier[masked_where] ( identifier[np] . identifier[logical_or] ( identifier[concfld] [:- literal[int] , identifier[indcont] ]< literal[int] , identifier[concfld] [:- literal[int] , identifier[indcont] ]> literal[int] ), identifier[concfld] [:- literal[int] , identifier[indcont] ]) keyword[elif] ( identifier[step] . identifier[sdat] . identifier[par] [ literal[string] ][ literal[string] ] keyword[and] identifier[step] . identifier[sdat] . identifier[par] [ literal[string] ][ literal[string] ]): identifier[continents] = identifier[np] . identifier[ma] . identifier[masked_where] ( identifier[np] . identifier[logical_or] ( identifier[concfld] [:- literal[int] , identifier[indcont] ]< literal[int] , identifier[concfld] [:- literal[int] , identifier[indcont] ]> literal[int] ), identifier[concfld] [:- literal[int] , identifier[indcont] ]) keyword[elif] identifier[step] . identifier[sdat] . identifier[par] [ literal[string] ][ literal[string] ]: identifier[continents] = identifier[np] . identifier[ma] . identifier[masked_where] ( identifier[concfld] [:- literal[int] , identifier[indcont] ]< literal[int] , identifier[concfld] [:- literal[int] , identifier[indcont] ]) keyword[else] : identifier[continents] = identifier[np] . identifier[ma] . identifier[masked_where] ( identifier[concfld] [:- literal[int] , identifier[indcont] ]< literal[int] , identifier[concfld] [:- literal[int] , identifier[indcont] ]) identifier[continentsall] = identifier[continents] / identifier[continents] identifier[ph_coord] = identifier[step] . identifier[geom] . identifier[p_coord] identifier[vph2] = literal[int] *( identifier[vphi] + identifier[np] . identifier[roll] ( identifier[vphi] , literal[int] , literal[int] )) identifier[dvph2] =( identifier[np] . identifier[diff] ( identifier[vph2] [:, identifier[indsurf] ])/( identifier[ph_coord] [ literal[int] ]* literal[int] )) identifier[fig0] ,( identifier[ax1] , identifier[ax2] , identifier[ax3] )= identifier[plt] . identifier[subplots] ( literal[int] , literal[int] , identifier[sharex] = keyword[True] , identifier[figsize] =( literal[int] , literal[int] )) identifier[ax1] . identifier[plot] ( identifier[ph_coord] [:- literal[int] ], identifier[concfld] [:- literal[int] , identifier[indsurf] ], identifier[color] = literal[string] , identifier[label] = literal[string] ) identifier[ax2] . identifier[plot] ( identifier[ph_coord] [:- literal[int] ], identifier[tempfld] [:- literal[int] , identifier[indsurf] ], identifier[color] = literal[string] , identifier[label] = literal[string] ) identifier[ax3] . identifier[plot] ( identifier[ph_coord] [:- literal[int] ], identifier[vph2] [:- literal[int] , identifier[indsurf] ], identifier[label] = literal[string] ) identifier[ax1] . identifier[fill_between] ( identifier[ph_coord] [:- literal[int] ], identifier[continents] , literal[int] , identifier[facecolor] = literal[string] , identifier[alpha] = literal[int] ) identifier[ax2] . identifier[fill_between] ( identifier[ph_coord] [:- literal[int] ], identifier[continentsall] , literal[int] , identifier[facecolor] = literal[string] , identifier[alpha] = literal[int] ) identifier[tempmin] = identifier[step] . identifier[sdat] . identifier[par] [ literal[string] ][ literal[string] ]* literal[int] keyword[if] identifier[step] . identifier[sdat] . identifier[par] [ literal[string] ][ literal[string] ]== literal[string] keyword[else] literal[int] identifier[tempmax] = identifier[step] . identifier[sdat] . identifier[par] [ literal[string] ][ literal[string] ]* literal[int] keyword[if] identifier[step] . identifier[sdat] . identifier[par] [ literal[string] ][ literal[string] ]== literal[string] keyword[else] literal[int] identifier[ax2] . identifier[set_ylim] ( identifier[tempmin] , identifier[tempmax] ) identifier[ax3] . identifier[fill_between] ( identifier[ph_coord] [:- literal[int] ], identifier[continentsall] * identifier[round] ( literal[int] * identifier[np] . identifier[amax] ( identifier[dvph2] ), literal[int] ), identifier[round] ( identifier[np] . identifier[amin] ( identifier[dvph2] )* literal[int] , literal[int] ), identifier[facecolor] = literal[string] , identifier[alpha] = literal[int] ) identifier[ax3] . identifier[set_ylim] ( identifier[conf] . identifier[plates] . identifier[vmin] , identifier[conf] . identifier[plates] . identifier[vmax] ) identifier[ax1] . identifier[set_ylabel] ( literal[string] ) identifier[ax2] . identifier[set_ylabel] ( literal[string] ) identifier[ax3] . identifier[set_ylabel] ( literal[string] ) identifier[ax1] . identifier[set_title] ( identifier[timestep] ) identifier[ax1] . identifier[text] ( literal[int] , literal[int] , identifier[str] ( identifier[round] ( identifier[time] , literal[int] ))+ literal[string] , identifier[transform] = identifier[ax1] . identifier[transAxes] ) identifier[ax1] . identifier[text] ( literal[int] , literal[int] , identifier[str] ( identifier[round] ( identifier[step] . identifier[geom] . identifier[ti_ad] , literal[int] )), identifier[transform] = identifier[ax1] . identifier[transAxes] ) identifier[plot_plate_limits] ( identifier[ax3] , identifier[ridge] , identifier[trench] , identifier[conf] . identifier[plates] . identifier[vmin] , identifier[conf] . identifier[plates] . identifier[vmax] ) identifier[misc] . identifier[saveplot] ( identifier[fig0] , literal[string] , identifier[timestep] ) identifier[fig0] ,( identifier[ax1] , identifier[ax2] )= identifier[plt] . identifier[subplots] ( literal[int] , literal[int] , identifier[sharex] = keyword[True] , identifier[figsize] =( literal[int] , literal[int] )) identifier[ax1] . identifier[plot] ( identifier[ph_coord] [:- literal[int] ], identifier[vph2] [:- literal[int] , identifier[indsurf] ], identifier[label] = literal[string] ) identifier[ax1] . identifier[axhline] ( identifier[y] = literal[int] , identifier[xmin] = literal[int] , identifier[xmax] = literal[int] * identifier[np] . identifier[pi] , identifier[color] = literal[string] , identifier[ls] = literal[string] , identifier[alpha] = literal[int] ) identifier[ax1] . identifier[set_ylabel] ( literal[string] ) identifier[ax1] . identifier[text] ( literal[int] , literal[int] , identifier[str] ( identifier[round] ( identifier[time] , literal[int] ))+ literal[string] , identifier[transform] = identifier[ax1] . identifier[transAxes] ) identifier[ax1] . identifier[text] ( literal[int] , literal[int] , identifier[str] ( identifier[round] ( identifier[step] . identifier[geom] . identifier[ti_ad] , literal[int] )), identifier[transform] = identifier[ax1] . identifier[transAxes] ) identifier[ax2] . identifier[plot] ( identifier[ph_coord] [:- literal[int] ]+ identifier[ph_coord] [ literal[int] ], identifier[dvph2] , identifier[color] = literal[string] , identifier[label] = literal[string] ) identifier[ax2] . identifier[set_ylabel] ( literal[string] ) identifier[plot_plate_limits] ( identifier[ax1] , identifier[ridge] , identifier[trench] , identifier[conf] . identifier[plates] . identifier[vmin] , identifier[conf] . identifier[plates] . identifier[vmax] ) identifier[plot_plate_limits] ( identifier[ax2] , identifier[ridge] , identifier[trench] , identifier[conf] . identifier[plates] . identifier[dvmin] , identifier[conf] . identifier[plates] . identifier[dvmax] ) identifier[ax1] . identifier[set_xlim] ( literal[int] , literal[int] * identifier[np] . identifier[pi] ) identifier[ax1] . identifier[set_title] ( identifier[timestep] ) identifier[ax1] . identifier[fill_between] ( identifier[ph_coord] [:- literal[int] ], identifier[continentsall] * identifier[conf] . identifier[plates] . identifier[vmin] , identifier[conf] . identifier[plates] . identifier[vmax] , identifier[facecolor] = literal[string] , identifier[alpha] = literal[int] ) identifier[ax1] . identifier[set_ylim] ( identifier[conf] . identifier[plates] . identifier[vmin] , identifier[conf] . identifier[plates] . identifier[vmax] ) identifier[ax2] . identifier[fill_between] ( identifier[ph_coord] [:- literal[int] ], identifier[continentsall] * identifier[conf] . identifier[plates] . identifier[dvmin] , identifier[conf] . identifier[plates] . identifier[dvmax] , identifier[facecolor] = literal[string] , identifier[alpha] = literal[int] ) identifier[ax2] . identifier[set_ylim] ( identifier[conf] . identifier[plates] . identifier[dvmin] , identifier[conf] . identifier[plates] . identifier[dvmax] ) identifier[misc] . identifier[saveplot] ( identifier[fig0] , literal[string] , identifier[timestep] ) keyword[if] literal[string] keyword[in] identifier[conf] . identifier[plates] . identifier[plot] : identifier[stressfld] = identifier[step] . identifier[fields] [ literal[string] ][ literal[int] ,:,:, literal[int] ] identifier[fig0] ,( identifier[ax1] , identifier[ax2] )= identifier[plt] . identifier[subplots] ( literal[int] , literal[int] , identifier[sharex] = keyword[True] , identifier[figsize] =( literal[int] , literal[int] )) identifier[ax1] . identifier[plot] ( identifier[ph_coord] [:- literal[int] ], identifier[vph2] [:- literal[int] , identifier[indsurf] ], identifier[label] = literal[string] ) identifier[ax1] . identifier[axhline] ( identifier[y] = literal[int] , identifier[xmin] = literal[int] , identifier[xmax] = literal[int] * identifier[np] . identifier[pi] , identifier[color] = literal[string] , identifier[ls] = literal[string] , identifier[alpha] = literal[int] ) identifier[ax1] . identifier[set_ylabel] ( literal[string] ) identifier[ax1] . identifier[text] ( literal[int] , literal[int] , identifier[str] ( identifier[round] ( identifier[time] , literal[int] ))+ literal[string] , identifier[transform] = identifier[ax1] . identifier[transAxes] ) identifier[ax1] . identifier[text] ( literal[int] , literal[int] , identifier[str] ( identifier[round] ( identifier[step] . identifier[geom] . identifier[ti_ad] , literal[int] )), identifier[transform] = identifier[ax1] . identifier[transAxes] ) identifier[ax2] . identifier[plot] ( identifier[ph_coord] [:- literal[int] ], identifier[stressfld] [:- literal[int] , identifier[indsurf] ]* identifier[step] . identifier[sdat] . identifier[scales] . identifier[stress] / literal[int] , identifier[color] = literal[string] , identifier[label] = literal[string] ) identifier[ax2] . identifier[set_ylim] ( identifier[conf] . identifier[plates] . identifier[stressmin] , identifier[conf] . identifier[plates] . identifier[stressmax] ) identifier[ax2] . identifier[set_ylabel] ( literal[string] ) identifier[plot_plate_limits] ( identifier[ax1] , identifier[ridge] , identifier[trench] , identifier[conf] . identifier[plates] . identifier[vmin] , identifier[conf] . identifier[plates] . identifier[vmax] ) identifier[plot_plate_limits] ( identifier[ax2] , identifier[ridge] , identifier[trench] , identifier[conf] . identifier[plates] . identifier[stressmin] , identifier[conf] . identifier[plates] . identifier[stressmax] ) identifier[ax1] . identifier[set_xlim] ( literal[int] , literal[int] * identifier[np] . identifier[pi] ) identifier[ax1] . identifier[set_title] ( identifier[timestep] ) identifier[ax1] . identifier[fill_between] ( identifier[ph_coord] [:- literal[int] ], identifier[continentsall] * identifier[conf] . identifier[plates] . identifier[vmin] , identifier[conf] . identifier[plates] . identifier[vmax] , identifier[facecolor] = literal[string] , identifier[alpha] = literal[int] ) identifier[ax1] . identifier[set_ylim] ( identifier[conf] . identifier[plates] . identifier[vmin] , identifier[conf] . identifier[plates] . identifier[vmax] ) identifier[ax2] . identifier[fill_between] ( identifier[ph_coord] [:- literal[int] ], identifier[continentsall] * identifier[conf] . identifier[plates] . identifier[dvmin] , identifier[conf] . identifier[plates] . identifier[dvmax] , identifier[facecolor] = literal[string] , identifier[alpha] = literal[int] ) identifier[misc] . identifier[saveplot] ( identifier[fig0] , literal[string] , identifier[timestep] ) identifier[fig1] ,( identifier[ax1] , identifier[ax2] )= identifier[plt] . identifier[subplots] ( literal[int] , literal[int] , identifier[sharex] = keyword[True] , identifier[figsize] =( literal[int] , literal[int] )) identifier[ax1] . identifier[plot] ( identifier[ph_coord] [:- literal[int] ], identifier[vph2] [:- literal[int] , identifier[indsurf] ], identifier[label] = literal[string] ) identifier[ax1] . identifier[axhline] ( identifier[y] = literal[int] , identifier[xmin] = literal[int] , identifier[xmax] = literal[int] * identifier[np] . identifier[pi] , identifier[color] = literal[string] , identifier[ls] = literal[string] , identifier[alpha] = literal[int] ) identifier[ax1] . identifier[set_ylim] ( identifier[conf] . identifier[plates] . identifier[vmin] , identifier[conf] . identifier[plates] . identifier[vmax] ) identifier[ax1] . identifier[set_ylabel] ( literal[string] ) identifier[ax1] . identifier[text] ( literal[int] , literal[int] , identifier[str] ( identifier[round] ( identifier[time] , literal[int] ))+ literal[string] , identifier[transform] = identifier[ax1] . identifier[transAxes] ) identifier[plot_plate_limits] ( identifier[ax1] , identifier[ridge] , identifier[trench] , identifier[conf] . identifier[plates] . identifier[vmin] , identifier[conf] . identifier[plates] . identifier[vmax] ) keyword[if] literal[string] keyword[in] identifier[conf] . identifier[plates] . identifier[plot] : identifier[agefld] = identifier[step] . identifier[fields] [ literal[string] ][ literal[int] ,:,:, literal[int] ] identifier[age_surface] = identifier[np] . identifier[ma] . identifier[masked_where] ( identifier[agefld] [:, identifier[indsurf] ]< literal[int] , identifier[agefld] [:, identifier[indsurf] ]) identifier[age_surface_dim] =( identifier[age_surface] * identifier[vrms_surface] * identifier[conf] . identifier[scaling] . identifier[ttransit] / identifier[conf] . identifier[scaling] . identifier[yearins] / literal[int] ) identifier[fig2] ,( identifier[ax3] , identifier[ax4] )= identifier[plt] . identifier[subplots] ( literal[int] , literal[int] , identifier[sharex] = keyword[True] , identifier[figsize] =( literal[int] , literal[int] )) identifier[ax3] . identifier[plot] ( identifier[ph_coord] [:- literal[int] ], identifier[vph2] [:- literal[int] , identifier[indsurf] ], identifier[label] = literal[string] ) identifier[ax3] . identifier[axhline] ( identifier[y] = literal[int] , identifier[xmin] = literal[int] , identifier[xmax] = literal[int] * identifier[np] . identifier[pi] , identifier[color] = literal[string] , identifier[ls] = literal[string] , identifier[alpha] = literal[int] ) identifier[ax3] . identifier[set_ylim] ( identifier[conf] . identifier[plates] . identifier[vmin] , identifier[conf] . identifier[plates] . identifier[vmax] ) identifier[ax3] . identifier[set_ylabel] ( literal[string] ) identifier[ax3] . identifier[text] ( literal[int] , literal[int] , identifier[str] ( identifier[round] ( identifier[time] , literal[int] ))+ literal[string] , identifier[transform] = identifier[ax3] . identifier[transAxes] ) identifier[ax3] . identifier[fill_between] ( identifier[ph_coord] [:- literal[int] ], identifier[continentsall] * identifier[conf] . identifier[plates] . identifier[vmax] , identifier[conf] . identifier[plates] . identifier[vmin] , identifier[facecolor] = literal[string] , identifier[alpha] = literal[int] ) identifier[plot_plate_limits] ( identifier[ax3] , identifier[ridge] , identifier[trench] , identifier[conf] . identifier[plates] . identifier[vmin] , identifier[conf] . identifier[plates] . identifier[vmax] ) identifier[times_subd] =[] identifier[age_subd] =[] identifier[distance_subd] =[] identifier[ph_trench_subd] =[] identifier[ph_cont_subd] =[] keyword[if] identifier[step] . identifier[sdat] . identifier[par] [ literal[string] ][ literal[string] ]: keyword[for] identifier[i] , identifier[trench_i] keyword[in] identifier[enumerate] ( identifier[trench] ): identifier[ph_coord_noend] = identifier[ph_coord] [:- literal[int] ] identifier[angdistance1] = identifier[abs] ( identifier[ph_coord_noend] [ identifier[continentsall] == literal[int] ]- identifier[trench_i] ) identifier[angdistance2] = literal[int] * identifier[np] . identifier[pi] - identifier[angdistance1] identifier[angdistance] = identifier[np] . identifier[minimum] ( identifier[angdistance1] , identifier[angdistance2] ) identifier[distancecont] = identifier[min] ( identifier[angdistance] ) identifier[argdistancecont] = identifier[np] . identifier[argmin] ( identifier[angdistance] ) identifier[continentpos] = identifier[ph_coord_noend] [ identifier[continentsall] == literal[int] ][ identifier[argdistancecont] ] identifier[ph_trench_subd] . identifier[append] ( identifier[trench_i] ) identifier[age_subd] . identifier[append] ( identifier[agetrench] [ identifier[i] ]) identifier[ph_cont_subd] . identifier[append] ( identifier[continentpos] ) identifier[distance_subd] . identifier[append] ( identifier[distancecont] ) identifier[times_subd] . identifier[append] ( identifier[step] . identifier[geom] . identifier[ti_ad] ) keyword[if] identifier[angdistance1] [ identifier[argdistancecont] ]< identifier[angdistance2] [ identifier[argdistancecont] ]: keyword[if] identifier[continentpos] - identifier[trench_i] < literal[int] : identifier[distancecont] =- identifier[distancecont] identifier[ax1] . identifier[annotate] ( literal[string] , identifier[xy] =( identifier[trench_i] + identifier[distancecont] , literal[int] ), identifier[xycoords] = literal[string] , identifier[xytext] =( identifier[trench_i] , literal[int] ), identifier[textcoords] = literal[string] , identifier[arrowprops] = identifier[dict] ( identifier[arrowstyle] = literal[string] , identifier[lw] = literal[string] , identifier[shrinkA] = literal[int] , identifier[shrinkB] = literal[int] )) keyword[else] : identifier[xy_anot] , identifier[xy_text] = literal[int] , literal[int] * identifier[np] . identifier[pi] keyword[if] identifier[continentpos] - identifier[trench_i] < literal[int] : identifier[xy_anot] , identifier[xy_text] = identifier[xy_text] , identifier[xy_anot] identifier[ax1] . identifier[annotate] ( literal[string] , identifier[xy] =( identifier[xy_anot] , literal[int] ), identifier[xycoords] = literal[string] , identifier[xytext] =( identifier[trench_i] , literal[int] ), identifier[textcoords] = literal[string] , identifier[arrowprops] = identifier[dict] ( identifier[arrowstyle] = literal[string] , identifier[lw] = literal[string] , identifier[shrinkA] = literal[int] , identifier[shrinkB] = literal[int] )) identifier[ax1] . identifier[annotate] ( literal[string] , identifier[xy] =( identifier[continentpos] , literal[int] ), identifier[xycoords] = literal[string] , identifier[xytext] =( identifier[xy_text] , literal[int] ), identifier[textcoords] = literal[string] , identifier[arrowprops] = identifier[dict] ( identifier[arrowstyle] = literal[string] , identifier[lw] = literal[string] , identifier[shrinkA] = literal[int] , identifier[shrinkB] = literal[int] )) identifier[ax1] . identifier[fill_between] ( identifier[ph_coord] [:- literal[int] ], identifier[continentsall] * identifier[conf] . identifier[plates] . identifier[vmin] , identifier[conf] . identifier[plates] . identifier[vmax] , identifier[facecolor] = literal[string] , identifier[alpha] = literal[int] ) identifier[ax2] . identifier[set_ylabel] ( literal[string] ) identifier[ax2] . identifier[axhline] ( identifier[y] = literal[int] , identifier[xmin] = literal[int] , identifier[xmax] = literal[int] * identifier[np] . identifier[pi] , identifier[color] = literal[string] , identifier[ls] = literal[string] , identifier[alpha] = literal[int] ) identifier[ax2] . identifier[plot] ( identifier[topo] [:, literal[int] ], identifier[topo] [:, literal[int] ]* identifier[step] . identifier[sdat] . identifier[scales] . identifier[length] / literal[int] , identifier[color] = literal[string] ) identifier[ax2] . identifier[set_xlim] ( literal[int] , literal[int] * identifier[np] . identifier[pi] ) identifier[ax2] . identifier[set_ylim] ( identifier[conf] . identifier[plates] . identifier[topomin] , identifier[conf] . identifier[plates] . identifier[topomax] ) identifier[ax2] . identifier[fill_between] ( identifier[ph_coord] [:- literal[int] ], identifier[continentsall] * identifier[conf] . identifier[plates] . identifier[topomax] , identifier[conf] . identifier[plates] . identifier[topomin] , identifier[facecolor] = literal[string] , identifier[alpha] = literal[int] ) identifier[plot_plate_limits] ( identifier[ax2] , identifier[ridge] , identifier[trench] , identifier[conf] . identifier[plates] . identifier[topomin] , identifier[conf] . identifier[plates] . identifier[topomax] ) identifier[ax1] . identifier[set_title] ( identifier[timestep] ) identifier[misc] . identifier[saveplot] ( identifier[fig1] , literal[string] , identifier[timestep] ) keyword[if] literal[string] keyword[in] identifier[conf] . identifier[plates] . identifier[plot] : identifier[ax4] . identifier[set_ylabel] ( literal[string] ) identifier[ax4] . identifier[plot] ( identifier[ph_coord] [:- literal[int] ], identifier[age_surface_dim] [:- literal[int] ], identifier[color] = literal[string] ) identifier[ax4] . identifier[set_xlim] ( literal[int] , literal[int] * identifier[np] . identifier[pi] ) identifier[ax4] . identifier[fill_between] ( identifier[ph_coord] [:- literal[int] ], identifier[continentsall] * identifier[conf] . identifier[plates] . identifier[agemax] , identifier[conf] . identifier[plates] . identifier[agemin] , identifier[facecolor] = literal[string] , identifier[alpha] = literal[int] ) identifier[ax4] . identifier[set_ylim] ( identifier[conf] . identifier[plates] . identifier[agemin] , identifier[conf] . identifier[plates] . identifier[agemax] ) identifier[plot_plate_limits] ( identifier[ax4] , identifier[ridge] , identifier[trench] , identifier[conf] . identifier[plates] . identifier[agemin] , identifier[conf] . identifier[plates] . identifier[agemax] ) identifier[ax3] . identifier[set_title] ( identifier[timestep] ) identifier[misc] . identifier[saveplot] ( identifier[fig2] , literal[string] , identifier[timestep] ) keyword[for] identifier[isubd] keyword[in] identifier[np] . identifier[arange] ( identifier[len] ( identifier[distance_subd] )): identifier[fids] [ literal[int] ]. identifier[write] ( literal[string] %( identifier[timestep] , identifier[times_subd] [ identifier[isubd] ], identifier[time] , identifier[distance_subd] [ identifier[isubd] ], identifier[ph_trench_subd] [ identifier[isubd] ], identifier[ph_cont_subd] [ identifier[isubd] ], identifier[age_subd] [ identifier[isubd] ], ))
def plot_plates(step, time, vrms_surface, trench, ridge, agetrench, topo, fids): """handle ploting stuff""" vphi = step.fields['v2'][0, :, :, 0] tempfld = step.fields['T'][0, :, :, 0] concfld = step.fields['c'][0, :, :, 0] timestep = step.isnap if step.sdat.par['boundaries']['air_layer']: dsa = step.sdat.par['boundaries']['air_thickness'] # we are a bit below the surface; delete "-some number" # to be just below # the surface (that is considered plane here); should check if you are # in the thermal boundary layer indsurf = np.argmin(abs(1 - dsa - step.geom.r_coord)) - 4 # depth to detect the continents indcont = np.argmin(abs(1 - dsa - np.array(step.geom.r_coord))) - 10 # depends on [control=['if'], data=[]] else: indsurf = -1 indcont = -1 # depth to detect continents if step.sdat.par['boundaries']['air_layer'] and (not step.sdat.par['continents']['proterozoic_belts']): continents = np.ma.masked_where(np.logical_or(concfld[:-1, indcont] < 3, concfld[:-1, indcont] > 4), concfld[:-1, indcont]) # depends on [control=['if'], data=[]] elif step.sdat.par['boundaries']['air_layer'] and step.sdat.par['continents']['proterozoic_belts']: continents = np.ma.masked_where(np.logical_or(concfld[:-1, indcont] < 3, concfld[:-1, indcont] > 5), concfld[:-1, indcont]) # depends on [control=['if'], data=[]] elif step.sdat.par['tracersin']['tracers_weakcrust']: continents = np.ma.masked_where(concfld[:-1, indcont] < 3, concfld[:-1, indcont]) # depends on [control=['if'], data=[]] else: continents = np.ma.masked_where(concfld[:-1, indcont] < 2, concfld[:-1, indcont]) # masked array, only continents are true continentsall = continents / continents ph_coord = step.geom.p_coord # velocity vph2 = 0.5 * (vphi + np.roll(vphi, 1, 0)) # interpolate to the same phi dvph2 = np.diff(vph2[:, indsurf]) / (ph_coord[0] * 2.0) # plotting (fig0, (ax1, ax2, ax3)) = plt.subplots(3, 1, sharex=True, figsize=(12, 8)) ax1.plot(ph_coord[:-1], concfld[:-1, indsurf], color='g', label='Conc') ax2.plot(ph_coord[:-1], tempfld[:-1, indsurf], color='k', label='Temp') ax3.plot(ph_coord[:-1], vph2[:-1, indsurf], label='Vel') ax1.fill_between(ph_coord[:-1], continents, 1.0, facecolor='#8B6914', alpha=0.2) ax2.fill_between(ph_coord[:-1], continentsall, 0.0, facecolor='#8B6914', alpha=0.2) tempmin = step.sdat.par['boundaries']['topT_val'] * 0.9 if step.sdat.par['boundaries']['topT_mode'] == 'iso' else 0.0 tempmax = step.sdat.par['boundaries']['botT_val'] * 0.35 if step.sdat.par['boundaries']['botT_mode'] == 'iso' else 0.8 ax2.set_ylim(tempmin, tempmax) ax3.fill_between(ph_coord[:-1], continentsall * round(1.5 * np.amax(dvph2), 1), round(np.amin(dvph2) * 1.1, 1), facecolor='#8B6914', alpha=0.2) ax3.set_ylim(conf.plates.vmin, conf.plates.vmax) ax1.set_ylabel('Concentration') ax2.set_ylabel('Temperature') ax3.set_ylabel('Velocity') ax1.set_title(timestep) ax1.text(0.95, 1.07, str(round(time, 0)) + ' My', transform=ax1.transAxes) ax1.text(0.01, 1.07, str(round(step.geom.ti_ad, 8)), transform=ax1.transAxes) plot_plate_limits(ax3, ridge, trench, conf.plates.vmin, conf.plates.vmax) misc.saveplot(fig0, 'sveltempconc', timestep) # plotting velocity and velocity derivative (fig0, (ax1, ax2)) = plt.subplots(2, 1, sharex=True, figsize=(12, 8)) ax1.plot(ph_coord[:-1], vph2[:-1, indsurf], label='Vel') ax1.axhline(y=0, xmin=0, xmax=2 * np.pi, color='black', ls='solid', alpha=0.2) ax1.set_ylabel('Velocity') ax1.text(0.95, 1.07, str(round(time, 0)) + ' My', transform=ax1.transAxes) ax1.text(0.01, 1.07, str(round(step.geom.ti_ad, 8)), transform=ax1.transAxes) ax2.plot(ph_coord[:-1] + ph_coord[0], dvph2, color='k', label='dv') ax2.set_ylabel('dv') plot_plate_limits(ax1, ridge, trench, conf.plates.vmin, conf.plates.vmax) plot_plate_limits(ax2, ridge, trench, conf.plates.dvmin, conf.plates.dvmax) ax1.set_xlim(0, 2 * np.pi) ax1.set_title(timestep) ax1.fill_between(ph_coord[:-1], continentsall * conf.plates.vmin, conf.plates.vmax, facecolor='#8b6914', alpha=0.2) ax1.set_ylim(conf.plates.vmin, conf.plates.vmax) ax2.fill_between(ph_coord[:-1], continentsall * conf.plates.dvmin, conf.plates.dvmax, facecolor='#8b6914', alpha=0.2) ax2.set_ylim(conf.plates.dvmin, conf.plates.dvmax) misc.saveplot(fig0, 'sveldvel', timestep) # plotting velocity and second invariant of stress if 'str' in conf.plates.plot: stressfld = step.fields['sII'][0, :, :, 0] (fig0, (ax1, ax2)) = plt.subplots(2, 1, sharex=True, figsize=(12, 8)) ax1.plot(ph_coord[:-1], vph2[:-1, indsurf], label='Vel') ax1.axhline(y=0, xmin=0, xmax=2 * np.pi, color='black', ls='solid', alpha=0.2) ax1.set_ylabel('Velocity') ax1.text(0.95, 1.07, str(round(time, 0)) + ' My', transform=ax1.transAxes) ax1.text(0.01, 1.07, str(round(step.geom.ti_ad, 8)), transform=ax1.transAxes) ax2.plot(ph_coord[:-1], stressfld[:-1, indsurf] * step.sdat.scales.stress / 1000000.0, color='k', label='Stress') ax2.set_ylim(conf.plates.stressmin, conf.plates.stressmax) ax2.set_ylabel('Stress [MPa]') plot_plate_limits(ax1, ridge, trench, conf.plates.vmin, conf.plates.vmax) plot_plate_limits(ax2, ridge, trench, conf.plates.stressmin, conf.plates.stressmax) ax1.set_xlim(0, 2 * np.pi) ax1.set_title(timestep) ax1.fill_between(ph_coord[:-1], continentsall * conf.plates.vmin, conf.plates.vmax, facecolor='#8B6914', alpha=0.2) ax1.set_ylim(conf.plates.vmin, conf.plates.vmax) ax2.fill_between(ph_coord[:-1], continentsall * conf.plates.dvmin, conf.plates.dvmax, facecolor='#8B6914', alpha=0.2) misc.saveplot(fig0, 'svelstress', timestep) # depends on [control=['if'], data=[]] # plotting velocity (fig1, (ax1, ax2)) = plt.subplots(2, 1, sharex=True, figsize=(12, 8)) ax1.plot(ph_coord[:-1], vph2[:-1, indsurf], label='Vel') ax1.axhline(y=0, xmin=0, xmax=2 * np.pi, color='black', ls='solid', alpha=0.2) ax1.set_ylim(conf.plates.vmin, conf.plates.vmax) ax1.set_ylabel('Velocity') ax1.text(0.95, 1.07, str(round(time, 0)) + ' My', transform=ax1.transAxes) plot_plate_limits(ax1, ridge, trench, conf.plates.vmin, conf.plates.vmax) # plotting velocity and age at surface if 'age' in conf.plates.plot: agefld = step.fields['age'][0, :, :, 0] age_surface = np.ma.masked_where(agefld[:, indsurf] < 1e-05, agefld[:, indsurf]) age_surface_dim = age_surface * vrms_surface * conf.scaling.ttransit / conf.scaling.yearins / 1000000.0 (fig2, (ax3, ax4)) = plt.subplots(2, 1, sharex=True, figsize=(12, 8)) ax3.plot(ph_coord[:-1], vph2[:-1, indsurf], label='Vel') ax3.axhline(y=0, xmin=0, xmax=2 * np.pi, color='black', ls='solid', alpha=0.2) ax3.set_ylim(conf.plates.vmin, conf.plates.vmax) ax3.set_ylabel('Velocity') ax3.text(0.95, 1.07, str(round(time, 0)) + ' My', transform=ax3.transAxes) ax3.fill_between(ph_coord[:-1], continentsall * conf.plates.vmax, conf.plates.vmin, facecolor='#8B6914', alpha=0.2) plot_plate_limits(ax3, ridge, trench, conf.plates.vmin, conf.plates.vmax) # depends on [control=['if'], data=[]] times_subd = [] age_subd = [] distance_subd = [] ph_trench_subd = [] ph_cont_subd = [] if step.sdat.par['switches']['cont_tracers']: for (i, trench_i) in enumerate(trench): # detection of the distance in between subduction and continent ph_coord_noend = ph_coord[:-1] angdistance1 = abs(ph_coord_noend[continentsall == 1] - trench_i) angdistance2 = 2.0 * np.pi - angdistance1 angdistance = np.minimum(angdistance1, angdistance2) distancecont = min(angdistance) argdistancecont = np.argmin(angdistance) continentpos = ph_coord_noend[continentsall == 1][argdistancecont] ph_trench_subd.append(trench_i) age_subd.append(agetrench[i]) ph_cont_subd.append(continentpos) distance_subd.append(distancecont) times_subd.append(step.geom.ti_ad) if angdistance1[argdistancecont] < angdistance2[argdistancecont]: if continentpos - trench_i < 0: # continent is on the left distancecont = -distancecont # depends on [control=['if'], data=[]] ax1.annotate('', xy=(trench_i + distancecont, 2000), xycoords='data', xytext=(trench_i, 2000), textcoords='data', arrowprops=dict(arrowstyle='->', lw='2', shrinkA=0, shrinkB=0)) # depends on [control=['if'], data=[]] else: # distance over boundary (xy_anot, xy_text) = (0, 2 * np.pi) if continentpos - trench_i < 0: (xy_anot, xy_text) = (xy_text, xy_anot) # depends on [control=['if'], data=[]] ax1.annotate('', xy=(xy_anot, 2000), xycoords='data', xytext=(trench_i, 2000), textcoords='data', arrowprops=dict(arrowstyle='-', lw='2', shrinkA=0, shrinkB=0)) ax1.annotate('', xy=(continentpos, 2000), xycoords='data', xytext=(xy_text, 2000), textcoords='data', arrowprops=dict(arrowstyle='->', lw='2', shrinkA=0, shrinkB=0)) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] ax1.fill_between(ph_coord[:-1], continentsall * conf.plates.vmin, conf.plates.vmax, facecolor='#8B6914', alpha=0.2) ax2.set_ylabel('Topography [km]') ax2.axhline(y=0, xmin=0, xmax=2 * np.pi, color='black', ls='solid', alpha=0.2) ax2.plot(topo[:, 0], topo[:, 1] * step.sdat.scales.length / 1000.0, color='black') ax2.set_xlim(0, 2 * np.pi) ax2.set_ylim(conf.plates.topomin, conf.plates.topomax) ax2.fill_between(ph_coord[:-1], continentsall * conf.plates.topomax, conf.plates.topomin, facecolor='#8B6914', alpha=0.2) plot_plate_limits(ax2, ridge, trench, conf.plates.topomin, conf.plates.topomax) ax1.set_title(timestep) misc.saveplot(fig1, 'sveltopo', timestep) if 'age' in conf.plates.plot: ax4.set_ylabel('Seafloor age [My]') # in dimensions ax4.plot(ph_coord[:-1], age_surface_dim[:-1], color='black') ax4.set_xlim(0, 2 * np.pi) ax4.fill_between(ph_coord[:-1], continentsall * conf.plates.agemax, conf.plates.agemin, facecolor='#8B6914', alpha=0.2) ax4.set_ylim(conf.plates.agemin, conf.plates.agemax) plot_plate_limits(ax4, ridge, trench, conf.plates.agemin, conf.plates.agemax) ax3.set_title(timestep) misc.saveplot(fig2, 'svelage', timestep) # depends on [control=['if'], data=[]] # writing the output into a file, all time steps are in one file for isubd in np.arange(len(distance_subd)): fids[1].write('%6.0f %11.7f %11.3f %10.6f %10.6f %10.6f %11.3f\n' % (timestep, times_subd[isubd], time, distance_subd[isubd], ph_trench_subd[isubd], ph_cont_subd[isubd], age_subd[isubd])) # depends on [control=['for'], data=['isubd']]
def monolayer_vs_BE(self, plot_eads=False): """ Plots the binding energy energy as a function of monolayers (ML), i.e. the fractional area adsorbate density for all facets. For each facet at a specific monlayer, only plot the lowest binding energy. Args: plot_eads (bool): Option to plot the adsorption energy (binding energy multiplied by number of adsorbates) instead. Returns: (Plot): Plot of binding energy vs monolayer for all facets. """ plt = pretty_plot(width=8, height=7) for hkl in self.all_slab_entries.keys(): ml_be_dict = {} for clean_entry in self.all_slab_entries[hkl].keys(): if self.all_slab_entries[hkl][clean_entry]: for ads_entry in self.all_slab_entries[hkl][clean_entry]: if ads_entry.get_monolayer not in ml_be_dict.keys(): ml_be_dict[ads_entry.get_monolayer] = 1000 be = ads_entry.gibbs_binding_energy(eads=plot_eads) if be < ml_be_dict[ads_entry.get_monolayer]: ml_be_dict[ads_entry.get_monolayer] = be # sort the binding energies and monolayers # in order to properly draw a line plot vals = sorted(ml_be_dict.items()) monolayers, BEs = zip(*vals) plt.plot(monolayers, BEs, '-o', c=self.color_dict[clean_entry], label=hkl) adsorbates = tuple(ads_entry.ads_entries_dict.keys()) plt.xlabel(" %s" * len(adsorbates) % adsorbates + " Coverage (ML)") plt.ylabel("Adsorption Energy (eV)") if plot_eads \ else plt.ylabel("Binding Energy (eV)") plt.legend() plt.tight_layout() return plt
def function[monolayer_vs_BE, parameter[self, plot_eads]]: constant[ Plots the binding energy energy as a function of monolayers (ML), i.e. the fractional area adsorbate density for all facets. For each facet at a specific monlayer, only plot the lowest binding energy. Args: plot_eads (bool): Option to plot the adsorption energy (binding energy multiplied by number of adsorbates) instead. Returns: (Plot): Plot of binding energy vs monolayer for all facets. ] variable[plt] assign[=] call[name[pretty_plot], parameter[]] for taget[name[hkl]] in starred[call[name[self].all_slab_entries.keys, parameter[]]] begin[:] variable[ml_be_dict] assign[=] dictionary[[], []] for taget[name[clean_entry]] in starred[call[call[name[self].all_slab_entries][name[hkl]].keys, parameter[]]] begin[:] if call[call[name[self].all_slab_entries][name[hkl]]][name[clean_entry]] begin[:] for taget[name[ads_entry]] in starred[call[call[name[self].all_slab_entries][name[hkl]]][name[clean_entry]]] begin[:] if compare[name[ads_entry].get_monolayer <ast.NotIn object at 0x7da2590d7190> call[name[ml_be_dict].keys, parameter[]]] begin[:] call[name[ml_be_dict]][name[ads_entry].get_monolayer] assign[=] constant[1000] variable[be] assign[=] call[name[ads_entry].gibbs_binding_energy, parameter[]] if compare[name[be] less[<] call[name[ml_be_dict]][name[ads_entry].get_monolayer]] begin[:] call[name[ml_be_dict]][name[ads_entry].get_monolayer] assign[=] name[be] variable[vals] assign[=] call[name[sorted], parameter[call[name[ml_be_dict].items, parameter[]]]] <ast.Tuple object at 0x7da1b1cd5e70> assign[=] call[name[zip], parameter[<ast.Starred object at 0x7da1b1cd4fd0>]] call[name[plt].plot, parameter[name[monolayers], name[BEs], constant[-o]]] variable[adsorbates] assign[=] call[name[tuple], parameter[call[name[ads_entry].ads_entries_dict.keys, parameter[]]]] call[name[plt].xlabel, parameter[binary_operation[binary_operation[binary_operation[constant[ %s] * call[name[len], parameter[name[adsorbates]]]] <ast.Mod object at 0x7da2590d6920> name[adsorbates]] + constant[ Coverage (ML)]]]] <ast.IfExp object at 0x7da1b1cd5930> call[name[plt].legend, parameter[]] call[name[plt].tight_layout, parameter[]] return[name[plt]]
keyword[def] identifier[monolayer_vs_BE] ( identifier[self] , identifier[plot_eads] = keyword[False] ): literal[string] identifier[plt] = identifier[pretty_plot] ( identifier[width] = literal[int] , identifier[height] = literal[int] ) keyword[for] identifier[hkl] keyword[in] identifier[self] . identifier[all_slab_entries] . identifier[keys] (): identifier[ml_be_dict] ={} keyword[for] identifier[clean_entry] keyword[in] identifier[self] . identifier[all_slab_entries] [ identifier[hkl] ]. identifier[keys] (): keyword[if] identifier[self] . identifier[all_slab_entries] [ identifier[hkl] ][ identifier[clean_entry] ]: keyword[for] identifier[ads_entry] keyword[in] identifier[self] . identifier[all_slab_entries] [ identifier[hkl] ][ identifier[clean_entry] ]: keyword[if] identifier[ads_entry] . identifier[get_monolayer] keyword[not] keyword[in] identifier[ml_be_dict] . identifier[keys] (): identifier[ml_be_dict] [ identifier[ads_entry] . identifier[get_monolayer] ]= literal[int] identifier[be] = identifier[ads_entry] . identifier[gibbs_binding_energy] ( identifier[eads] = identifier[plot_eads] ) keyword[if] identifier[be] < identifier[ml_be_dict] [ identifier[ads_entry] . identifier[get_monolayer] ]: identifier[ml_be_dict] [ identifier[ads_entry] . identifier[get_monolayer] ]= identifier[be] identifier[vals] = identifier[sorted] ( identifier[ml_be_dict] . identifier[items] ()) identifier[monolayers] , identifier[BEs] = identifier[zip] (* identifier[vals] ) identifier[plt] . identifier[plot] ( identifier[monolayers] , identifier[BEs] , literal[string] , identifier[c] = identifier[self] . identifier[color_dict] [ identifier[clean_entry] ], identifier[label] = identifier[hkl] ) identifier[adsorbates] = identifier[tuple] ( identifier[ads_entry] . identifier[ads_entries_dict] . identifier[keys] ()) identifier[plt] . identifier[xlabel] ( literal[string] * identifier[len] ( identifier[adsorbates] )% identifier[adsorbates] + literal[string] ) identifier[plt] . identifier[ylabel] ( literal[string] ) keyword[if] identifier[plot_eads] keyword[else] identifier[plt] . identifier[ylabel] ( literal[string] ) identifier[plt] . identifier[legend] () identifier[plt] . identifier[tight_layout] () keyword[return] identifier[plt]
def monolayer_vs_BE(self, plot_eads=False): """ Plots the binding energy energy as a function of monolayers (ML), i.e. the fractional area adsorbate density for all facets. For each facet at a specific monlayer, only plot the lowest binding energy. Args: plot_eads (bool): Option to plot the adsorption energy (binding energy multiplied by number of adsorbates) instead. Returns: (Plot): Plot of binding energy vs monolayer for all facets. """ plt = pretty_plot(width=8, height=7) for hkl in self.all_slab_entries.keys(): ml_be_dict = {} for clean_entry in self.all_slab_entries[hkl].keys(): if self.all_slab_entries[hkl][clean_entry]: for ads_entry in self.all_slab_entries[hkl][clean_entry]: if ads_entry.get_monolayer not in ml_be_dict.keys(): ml_be_dict[ads_entry.get_monolayer] = 1000 # depends on [control=['if'], data=[]] be = ads_entry.gibbs_binding_energy(eads=plot_eads) if be < ml_be_dict[ads_entry.get_monolayer]: ml_be_dict[ads_entry.get_monolayer] = be # depends on [control=['if'], data=['be']] # depends on [control=['for'], data=['ads_entry']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['clean_entry']] # sort the binding energies and monolayers # in order to properly draw a line plot vals = sorted(ml_be_dict.items()) (monolayers, BEs) = zip(*vals) plt.plot(monolayers, BEs, '-o', c=self.color_dict[clean_entry], label=hkl) # depends on [control=['for'], data=['hkl']] adsorbates = tuple(ads_entry.ads_entries_dict.keys()) plt.xlabel(' %s' * len(adsorbates) % adsorbates + ' Coverage (ML)') plt.ylabel('Adsorption Energy (eV)') if plot_eads else plt.ylabel('Binding Energy (eV)') plt.legend() plt.tight_layout() return plt
def data_url_scheme(self): """Get svg in Data URL Scheme format. """ # TODO: move to web.app or make it function # remove #svg from dataframe encoded = base64.b64encode(self.contents().encode()) return "data:image/svg+xml;base64," + encoded.decode()
def function[data_url_scheme, parameter[self]]: constant[Get svg in Data URL Scheme format. ] variable[encoded] assign[=] call[name[base64].b64encode, parameter[call[call[name[self].contents, parameter[]].encode, parameter[]]]] return[binary_operation[constant[data:image/svg+xml;base64,] + call[name[encoded].decode, parameter[]]]]
keyword[def] identifier[data_url_scheme] ( identifier[self] ): literal[string] identifier[encoded] = identifier[base64] . identifier[b64encode] ( identifier[self] . identifier[contents] (). identifier[encode] ()) keyword[return] literal[string] + identifier[encoded] . identifier[decode] ()
def data_url_scheme(self): """Get svg in Data URL Scheme format. """ # TODO: move to web.app or make it function # remove #svg from dataframe encoded = base64.b64encode(self.contents().encode()) return 'data:image/svg+xml;base64,' + encoded.decode()
def get_season_points(self, season_key): """ Calling Season Points API. Arg: season_key: key of the season Return: json data """ season_points_url = self.api_path + "season/" + season_key + "/points/" response = self.get_response(season_points_url) return response
def function[get_season_points, parameter[self, season_key]]: constant[ Calling Season Points API. Arg: season_key: key of the season Return: json data ] variable[season_points_url] assign[=] binary_operation[binary_operation[binary_operation[name[self].api_path + constant[season/]] + name[season_key]] + constant[/points/]] variable[response] assign[=] call[name[self].get_response, parameter[name[season_points_url]]] return[name[response]]
keyword[def] identifier[get_season_points] ( identifier[self] , identifier[season_key] ): literal[string] identifier[season_points_url] = identifier[self] . identifier[api_path] + literal[string] + identifier[season_key] + literal[string] identifier[response] = identifier[self] . identifier[get_response] ( identifier[season_points_url] ) keyword[return] identifier[response]
def get_season_points(self, season_key): """ Calling Season Points API. Arg: season_key: key of the season Return: json data """ season_points_url = self.api_path + 'season/' + season_key + '/points/' response = self.get_response(season_points_url) return response
def install(self, name=None, prefix=None, pkgs=None, dep=True, channels=None, token=None): """ Install a set of packages into an environment by name or path. If token is specified, the channels different from the defaults will get the token appended. """ logger.debug(str((prefix, pkgs, channels))) # TODO: Fix temporal hack if not pkgs or not isinstance(pkgs, (list, tuple, str)): raise TypeError('must specify a list of one or more packages to ' 'install into existing environment') cmd_list = ['install', '--yes', '--json', '--force-pscheck'] if name: cmd_list.extend(['--name', name]) elif prefix: cmd_list.extend(['--prefix', prefix]) else: # Just install into the current environment, whatever that is pass # TODO: Check if correct if channels: cmd_list.extend(['--override-channels']) for channel in channels: cmd_list.extend(['--channel']) channel = self.parse_token_channel(channel, token) cmd_list.extend([channel]) # TODO: Fix temporal hack if isinstance(pkgs, (list, tuple)): cmd_list.extend(pkgs) elif isinstance(pkgs, str): cmd_list.extend(['--file', pkgs]) if not dep: cmd_list.extend(['--no-deps']) return self._call_and_parse(cmd_list)
def function[install, parameter[self, name, prefix, pkgs, dep, channels, token]]: constant[ Install a set of packages into an environment by name or path. If token is specified, the channels different from the defaults will get the token appended. ] call[name[logger].debug, parameter[call[name[str], parameter[tuple[[<ast.Name object at 0x7da1b27b9420>, <ast.Name object at 0x7da1b27b9060>, <ast.Name object at 0x7da1b27bb7f0>]]]]]] if <ast.BoolOp object at 0x7da1b27b9b40> begin[:] <ast.Raise object at 0x7da1b27bad40> variable[cmd_list] assign[=] list[[<ast.Constant object at 0x7da1b27b9780>, <ast.Constant object at 0x7da1b27b82e0>, <ast.Constant object at 0x7da1b27b88b0>, <ast.Constant object at 0x7da1b27b93f0>]] if name[name] begin[:] call[name[cmd_list].extend, parameter[list[[<ast.Constant object at 0x7da1b27b8be0>, <ast.Name object at 0x7da1b27badd0>]]]] if name[channels] begin[:] call[name[cmd_list].extend, parameter[list[[<ast.Constant object at 0x7da1b27b9270>]]]] for taget[name[channel]] in starred[name[channels]] begin[:] call[name[cmd_list].extend, parameter[list[[<ast.Constant object at 0x7da1b27b8a00>]]]] variable[channel] assign[=] call[name[self].parse_token_channel, parameter[name[channel], name[token]]] call[name[cmd_list].extend, parameter[list[[<ast.Name object at 0x7da1b2795570>]]]] if call[name[isinstance], parameter[name[pkgs], tuple[[<ast.Name object at 0x7da1b2796350>, <ast.Name object at 0x7da1b2795690>]]]] begin[:] call[name[cmd_list].extend, parameter[name[pkgs]]] if <ast.UnaryOp object at 0x7da1b2794c70> begin[:] call[name[cmd_list].extend, parameter[list[[<ast.Constant object at 0x7da1b2794f10>]]]] return[call[name[self]._call_and_parse, parameter[name[cmd_list]]]]
keyword[def] identifier[install] ( identifier[self] , identifier[name] = keyword[None] , identifier[prefix] = keyword[None] , identifier[pkgs] = keyword[None] , identifier[dep] = keyword[True] , identifier[channels] = keyword[None] , identifier[token] = keyword[None] ): literal[string] identifier[logger] . identifier[debug] ( identifier[str] (( identifier[prefix] , identifier[pkgs] , identifier[channels] ))) keyword[if] keyword[not] identifier[pkgs] keyword[or] keyword[not] identifier[isinstance] ( identifier[pkgs] ,( identifier[list] , identifier[tuple] , identifier[str] )): keyword[raise] identifier[TypeError] ( literal[string] literal[string] ) identifier[cmd_list] =[ literal[string] , literal[string] , literal[string] , literal[string] ] keyword[if] identifier[name] : identifier[cmd_list] . identifier[extend] ([ literal[string] , identifier[name] ]) keyword[elif] identifier[prefix] : identifier[cmd_list] . identifier[extend] ([ literal[string] , identifier[prefix] ]) keyword[else] : keyword[pass] keyword[if] identifier[channels] : identifier[cmd_list] . identifier[extend] ([ literal[string] ]) keyword[for] identifier[channel] keyword[in] identifier[channels] : identifier[cmd_list] . identifier[extend] ([ literal[string] ]) identifier[channel] = identifier[self] . identifier[parse_token_channel] ( identifier[channel] , identifier[token] ) identifier[cmd_list] . identifier[extend] ([ identifier[channel] ]) keyword[if] identifier[isinstance] ( identifier[pkgs] ,( identifier[list] , identifier[tuple] )): identifier[cmd_list] . identifier[extend] ( identifier[pkgs] ) keyword[elif] identifier[isinstance] ( identifier[pkgs] , identifier[str] ): identifier[cmd_list] . identifier[extend] ([ literal[string] , identifier[pkgs] ]) keyword[if] keyword[not] identifier[dep] : identifier[cmd_list] . identifier[extend] ([ literal[string] ]) keyword[return] identifier[self] . identifier[_call_and_parse] ( identifier[cmd_list] )
def install(self, name=None, prefix=None, pkgs=None, dep=True, channels=None, token=None): """ Install a set of packages into an environment by name or path. If token is specified, the channels different from the defaults will get the token appended. """ logger.debug(str((prefix, pkgs, channels))) # TODO: Fix temporal hack if not pkgs or not isinstance(pkgs, (list, tuple, str)): raise TypeError('must specify a list of one or more packages to install into existing environment') # depends on [control=['if'], data=[]] cmd_list = ['install', '--yes', '--json', '--force-pscheck'] if name: cmd_list.extend(['--name', name]) # depends on [control=['if'], data=[]] elif prefix: cmd_list.extend(['--prefix', prefix]) # depends on [control=['if'], data=[]] else: # Just install into the current environment, whatever that is pass # TODO: Check if correct if channels: cmd_list.extend(['--override-channels']) for channel in channels: cmd_list.extend(['--channel']) channel = self.parse_token_channel(channel, token) cmd_list.extend([channel]) # depends on [control=['for'], data=['channel']] # depends on [control=['if'], data=[]] # TODO: Fix temporal hack if isinstance(pkgs, (list, tuple)): cmd_list.extend(pkgs) # depends on [control=['if'], data=[]] elif isinstance(pkgs, str): cmd_list.extend(['--file', pkgs]) # depends on [control=['if'], data=[]] if not dep: cmd_list.extend(['--no-deps']) # depends on [control=['if'], data=[]] return self._call_and_parse(cmd_list)
def getQuotes(symbols): ''' get real-time quotes (index, last trade price, last trade time, etc) for stocks, using google api: http://finance.google.com/finance/info?client=ig&q=symbols Unlike python package 'yahoo-finance' (15 min delay), There is no delay for NYSE and NASDAQ stocks in 'googlefinance' package. example: quotes = getQuotes('AAPL') return: [{u'Index': u'NASDAQ', u'LastTradeWithCurrency': u'129.09', u'LastTradeDateTime': u'2015-03-02T16:04:29Z', u'LastTradePrice': u'129.09', u'Yield': u'1.46', u'LastTradeTime': u'4:04PM EST', u'LastTradeDateTimeLong': u'Mar 2, 4:04PM EST', u'Dividend': u'0.47', u'StockSymbol': u'AAPL', u'ID': u'22144'}] quotes = getQuotes(['AAPL', 'GOOG']) return: [{u'Index': u'NASDAQ', u'LastTradeWithCurrency': u'129.09', u'LastTradeDateTime': u'2015-03-02T16:04:29Z', u'LastTradePrice': u'129.09', u'Yield': u'1.46', u'LastTradeTime': u'4:04PM EST', u'LastTradeDateTimeLong': u'Mar 2, 4:04PM EST', u'Dividend': u'0.47', u'StockSymbol': u'AAPL', u'ID': u'22144'}, {u'Index': u'NASDAQ', u'LastTradeWithCurrency': u'571.34', u'LastTradeDateTime': u'2015-03-02T16:04:29Z', u'LastTradePrice': u'571.34', u'Yield': u'', u'LastTradeTime': u'4:04PM EST', u'LastTradeDateTimeLong': u'Mar 2, 4:04PM EST', u'Dividend': u'', u'StockSymbol': u'GOOG', u'ID': u'304466804484872'}] :param symbols: a single symbol or a list of stock symbols :return: real-time quotes list ''' if type(symbols) == type('str'): symbols = [symbols] content = json.loads(request(symbols)) return replaceKeys(content);
def function[getQuotes, parameter[symbols]]: constant[ get real-time quotes (index, last trade price, last trade time, etc) for stocks, using google api: http://finance.google.com/finance/info?client=ig&q=symbols Unlike python package 'yahoo-finance' (15 min delay), There is no delay for NYSE and NASDAQ stocks in 'googlefinance' package. example: quotes = getQuotes('AAPL') return: [{u'Index': u'NASDAQ', u'LastTradeWithCurrency': u'129.09', u'LastTradeDateTime': u'2015-03-02T16:04:29Z', u'LastTradePrice': u'129.09', u'Yield': u'1.46', u'LastTradeTime': u'4:04PM EST', u'LastTradeDateTimeLong': u'Mar 2, 4:04PM EST', u'Dividend': u'0.47', u'StockSymbol': u'AAPL', u'ID': u'22144'}] quotes = getQuotes(['AAPL', 'GOOG']) return: [{u'Index': u'NASDAQ', u'LastTradeWithCurrency': u'129.09', u'LastTradeDateTime': u'2015-03-02T16:04:29Z', u'LastTradePrice': u'129.09', u'Yield': u'1.46', u'LastTradeTime': u'4:04PM EST', u'LastTradeDateTimeLong': u'Mar 2, 4:04PM EST', u'Dividend': u'0.47', u'StockSymbol': u'AAPL', u'ID': u'22144'}, {u'Index': u'NASDAQ', u'LastTradeWithCurrency': u'571.34', u'LastTradeDateTime': u'2015-03-02T16:04:29Z', u'LastTradePrice': u'571.34', u'Yield': u'', u'LastTradeTime': u'4:04PM EST', u'LastTradeDateTimeLong': u'Mar 2, 4:04PM EST', u'Dividend': u'', u'StockSymbol': u'GOOG', u'ID': u'304466804484872'}] :param symbols: a single symbol or a list of stock symbols :return: real-time quotes list ] if compare[call[name[type], parameter[name[symbols]]] equal[==] call[name[type], parameter[constant[str]]]] begin[:] variable[symbols] assign[=] list[[<ast.Name object at 0x7da1b1546c50>]] variable[content] assign[=] call[name[json].loads, parameter[call[name[request], parameter[name[symbols]]]]] return[call[name[replaceKeys], parameter[name[content]]]]
keyword[def] identifier[getQuotes] ( identifier[symbols] ): literal[string] keyword[if] identifier[type] ( identifier[symbols] )== identifier[type] ( literal[string] ): identifier[symbols] =[ identifier[symbols] ] identifier[content] = identifier[json] . identifier[loads] ( identifier[request] ( identifier[symbols] )) keyword[return] identifier[replaceKeys] ( identifier[content] );
def getQuotes(symbols): """ get real-time quotes (index, last trade price, last trade time, etc) for stocks, using google api: http://finance.google.com/finance/info?client=ig&q=symbols Unlike python package 'yahoo-finance' (15 min delay), There is no delay for NYSE and NASDAQ stocks in 'googlefinance' package. example: quotes = getQuotes('AAPL') return: [{u'Index': u'NASDAQ', u'LastTradeWithCurrency': u'129.09', u'LastTradeDateTime': u'2015-03-02T16:04:29Z', u'LastTradePrice': u'129.09', u'Yield': u'1.46', u'LastTradeTime': u'4:04PM EST', u'LastTradeDateTimeLong': u'Mar 2, 4:04PM EST', u'Dividend': u'0.47', u'StockSymbol': u'AAPL', u'ID': u'22144'}] quotes = getQuotes(['AAPL', 'GOOG']) return: [{u'Index': u'NASDAQ', u'LastTradeWithCurrency': u'129.09', u'LastTradeDateTime': u'2015-03-02T16:04:29Z', u'LastTradePrice': u'129.09', u'Yield': u'1.46', u'LastTradeTime': u'4:04PM EST', u'LastTradeDateTimeLong': u'Mar 2, 4:04PM EST', u'Dividend': u'0.47', u'StockSymbol': u'AAPL', u'ID': u'22144'}, {u'Index': u'NASDAQ', u'LastTradeWithCurrency': u'571.34', u'LastTradeDateTime': u'2015-03-02T16:04:29Z', u'LastTradePrice': u'571.34', u'Yield': u'', u'LastTradeTime': u'4:04PM EST', u'LastTradeDateTimeLong': u'Mar 2, 4:04PM EST', u'Dividend': u'', u'StockSymbol': u'GOOG', u'ID': u'304466804484872'}] :param symbols: a single symbol or a list of stock symbols :return: real-time quotes list """ if type(symbols) == type('str'): symbols = [symbols] # depends on [control=['if'], data=[]] content = json.loads(request(symbols)) return replaceKeys(content)
def get_instances(self, state_filter=None): """ Get all the instances filtered by state. @param state_filter: the state that the instance should be in (e.g. "running"), or None for all states """ instances = [] for instance in self._get_instances(self._get_cluster_group_name(), state_filter): instances.append(Instance(instance.id, instance.dns_name, instance.private_dns_name, instance.private_ip_address)) return instances
def function[get_instances, parameter[self, state_filter]]: constant[ Get all the instances filtered by state. @param state_filter: the state that the instance should be in (e.g. "running"), or None for all states ] variable[instances] assign[=] list[[]] for taget[name[instance]] in starred[call[name[self]._get_instances, parameter[call[name[self]._get_cluster_group_name, parameter[]], name[state_filter]]]] begin[:] call[name[instances].append, parameter[call[name[Instance], parameter[name[instance].id, name[instance].dns_name, name[instance].private_dns_name, name[instance].private_ip_address]]]] return[name[instances]]
keyword[def] identifier[get_instances] ( identifier[self] , identifier[state_filter] = keyword[None] ): literal[string] identifier[instances] =[] keyword[for] identifier[instance] keyword[in] identifier[self] . identifier[_get_instances] ( identifier[self] . identifier[_get_cluster_group_name] (), identifier[state_filter] ): identifier[instances] . identifier[append] ( identifier[Instance] ( identifier[instance] . identifier[id] , identifier[instance] . identifier[dns_name] , identifier[instance] . identifier[private_dns_name] , identifier[instance] . identifier[private_ip_address] )) keyword[return] identifier[instances]
def get_instances(self, state_filter=None): """ Get all the instances filtered by state. @param state_filter: the state that the instance should be in (e.g. "running"), or None for all states """ instances = [] for instance in self._get_instances(self._get_cluster_group_name(), state_filter): instances.append(Instance(instance.id, instance.dns_name, instance.private_dns_name, instance.private_ip_address)) # depends on [control=['for'], data=['instance']] return instances
def translate_bytes(val): ''' These values can be expressed as an integer number of bytes, or a string expression (i.e. 100mb, 1gb, etc.). ''' try: val = int(val) except (TypeError, ValueError): if not isinstance(val, six.string_types): val = six.text_type(val) return val
def function[translate_bytes, parameter[val]]: constant[ These values can be expressed as an integer number of bytes, or a string expression (i.e. 100mb, 1gb, etc.). ] <ast.Try object at 0x7da2041d9d20> return[name[val]]
keyword[def] identifier[translate_bytes] ( identifier[val] ): literal[string] keyword[try] : identifier[val] = identifier[int] ( identifier[val] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[if] keyword[not] identifier[isinstance] ( identifier[val] , identifier[six] . identifier[string_types] ): identifier[val] = identifier[six] . identifier[text_type] ( identifier[val] ) keyword[return] identifier[val]
def translate_bytes(val): """ These values can be expressed as an integer number of bytes, or a string expression (i.e. 100mb, 1gb, etc.). """ try: val = int(val) # depends on [control=['try'], data=[]] except (TypeError, ValueError): if not isinstance(val, six.string_types): val = six.text_type(val) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]] return val
def get_artifact_url(context, task_id, path): """Get a TaskCluster artifact url. Args: context (scriptworker.context.Context): the scriptworker context task_id (str): the task id of the task that published the artifact path (str): the relative path of the artifact Returns: str: the artifact url Raises: TaskClusterFailure: on failure. """ if path.startswith("public/"): url = context.queue.buildUrl('getLatestArtifact', task_id, path) else: url = context.queue.buildSignedUrl( 'getLatestArtifact', task_id, path, # XXX Can set expiration kwarg in (int) seconds from now; # defaults to 15min. ) return url
def function[get_artifact_url, parameter[context, task_id, path]]: constant[Get a TaskCluster artifact url. Args: context (scriptworker.context.Context): the scriptworker context task_id (str): the task id of the task that published the artifact path (str): the relative path of the artifact Returns: str: the artifact url Raises: TaskClusterFailure: on failure. ] if call[name[path].startswith, parameter[constant[public/]]] begin[:] variable[url] assign[=] call[name[context].queue.buildUrl, parameter[constant[getLatestArtifact], name[task_id], name[path]]] return[name[url]]
keyword[def] identifier[get_artifact_url] ( identifier[context] , identifier[task_id] , identifier[path] ): literal[string] keyword[if] identifier[path] . identifier[startswith] ( literal[string] ): identifier[url] = identifier[context] . identifier[queue] . identifier[buildUrl] ( literal[string] , identifier[task_id] , identifier[path] ) keyword[else] : identifier[url] = identifier[context] . identifier[queue] . identifier[buildSignedUrl] ( literal[string] , identifier[task_id] , identifier[path] , ) keyword[return] identifier[url]
def get_artifact_url(context, task_id, path): """Get a TaskCluster artifact url. Args: context (scriptworker.context.Context): the scriptworker context task_id (str): the task id of the task that published the artifact path (str): the relative path of the artifact Returns: str: the artifact url Raises: TaskClusterFailure: on failure. """ if path.startswith('public/'): url = context.queue.buildUrl('getLatestArtifact', task_id, path) # depends on [control=['if'], data=[]] else: # XXX Can set expiration kwarg in (int) seconds from now; # defaults to 15min. url = context.queue.buildSignedUrl('getLatestArtifact', task_id, path) return url
def read_csv(filename): """Reads a CSV file containing a tabular description of a transition function, as found in Sipser. Major difference: instead of multiple header rows, only a single header row whose entries might be tuples. """ with open(filename) as file: table = list(csv.reader(file)) m = from_table(table) return m
def function[read_csv, parameter[filename]]: constant[Reads a CSV file containing a tabular description of a transition function, as found in Sipser. Major difference: instead of multiple header rows, only a single header row whose entries might be tuples. ] with call[name[open], parameter[name[filename]]] begin[:] variable[table] assign[=] call[name[list], parameter[call[name[csv].reader, parameter[name[file]]]]] variable[m] assign[=] call[name[from_table], parameter[name[table]]] return[name[m]]
keyword[def] identifier[read_csv] ( identifier[filename] ): literal[string] keyword[with] identifier[open] ( identifier[filename] ) keyword[as] identifier[file] : identifier[table] = identifier[list] ( identifier[csv] . identifier[reader] ( identifier[file] )) identifier[m] = identifier[from_table] ( identifier[table] ) keyword[return] identifier[m]
def read_csv(filename): """Reads a CSV file containing a tabular description of a transition function, as found in Sipser. Major difference: instead of multiple header rows, only a single header row whose entries might be tuples. """ with open(filename) as file: table = list(csv.reader(file)) # depends on [control=['with'], data=['file']] m = from_table(table) return m
def size(value): """ValueRef : number, area of the mark in pixels This is the total area of a symbol. For example, a value of 500 and a ``shape`` of ``'circle'`` would result in circles with an area of 500 square pixels. Only used if ``type`` is ``'symbol'``. """ if value.value: _assert_is_type('size.value', value.value, int) if value.value < 0: raise ValueError('size cannot be negative')
def function[size, parameter[value]]: constant[ValueRef : number, area of the mark in pixels This is the total area of a symbol. For example, a value of 500 and a ``shape`` of ``'circle'`` would result in circles with an area of 500 square pixels. Only used if ``type`` is ``'symbol'``. ] if name[value].value begin[:] call[name[_assert_is_type], parameter[constant[size.value], name[value].value, name[int]]] if compare[name[value].value less[<] constant[0]] begin[:] <ast.Raise object at 0x7da18f58c910>
keyword[def] identifier[size] ( identifier[value] ): literal[string] keyword[if] identifier[value] . identifier[value] : identifier[_assert_is_type] ( literal[string] , identifier[value] . identifier[value] , identifier[int] ) keyword[if] identifier[value] . identifier[value] < literal[int] : keyword[raise] identifier[ValueError] ( literal[string] )
def size(value): """ValueRef : number, area of the mark in pixels This is the total area of a symbol. For example, a value of 500 and a ``shape`` of ``'circle'`` would result in circles with an area of 500 square pixels. Only used if ``type`` is ``'symbol'``. """ if value.value: _assert_is_type('size.value', value.value, int) if value.value < 0: raise ValueError('size cannot be negative') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
def add_list_opt(self, opt, values): """ Add an option with a list of non-file parameters. """ self.add_opt(opt) for val in values: self.add_opt(val)
def function[add_list_opt, parameter[self, opt, values]]: constant[ Add an option with a list of non-file parameters. ] call[name[self].add_opt, parameter[name[opt]]] for taget[name[val]] in starred[name[values]] begin[:] call[name[self].add_opt, parameter[name[val]]]
keyword[def] identifier[add_list_opt] ( identifier[self] , identifier[opt] , identifier[values] ): literal[string] identifier[self] . identifier[add_opt] ( identifier[opt] ) keyword[for] identifier[val] keyword[in] identifier[values] : identifier[self] . identifier[add_opt] ( identifier[val] )
def add_list_opt(self, opt, values): """ Add an option with a list of non-file parameters. """ self.add_opt(opt) for val in values: self.add_opt(val) # depends on [control=['for'], data=['val']]
def _create_output_from_match(self, match_result): """As isort outputs full path, we change it to relative path.""" full_path = match_result['full_path'] path = self._get_relative_path(full_path) return LinterOutput(self.name, path, match_result['msg'])
def function[_create_output_from_match, parameter[self, match_result]]: constant[As isort outputs full path, we change it to relative path.] variable[full_path] assign[=] call[name[match_result]][constant[full_path]] variable[path] assign[=] call[name[self]._get_relative_path, parameter[name[full_path]]] return[call[name[LinterOutput], parameter[name[self].name, name[path], call[name[match_result]][constant[msg]]]]]
keyword[def] identifier[_create_output_from_match] ( identifier[self] , identifier[match_result] ): literal[string] identifier[full_path] = identifier[match_result] [ literal[string] ] identifier[path] = identifier[self] . identifier[_get_relative_path] ( identifier[full_path] ) keyword[return] identifier[LinterOutput] ( identifier[self] . identifier[name] , identifier[path] , identifier[match_result] [ literal[string] ])
def _create_output_from_match(self, match_result): """As isort outputs full path, we change it to relative path.""" full_path = match_result['full_path'] path = self._get_relative_path(full_path) return LinterOutput(self.name, path, match_result['msg'])
def new_term(self, term, value, **kwargs): """Create a new root-level term in this section""" tc = self.doc.get_term_class(term.lower()) t = tc(term, value, doc=self.doc, parent=None, section=self).new_children(**kwargs) self.doc.add_term(t) return t
def function[new_term, parameter[self, term, value]]: constant[Create a new root-level term in this section] variable[tc] assign[=] call[name[self].doc.get_term_class, parameter[call[name[term].lower, parameter[]]]] variable[t] assign[=] call[call[name[tc], parameter[name[term], name[value]]].new_children, parameter[]] call[name[self].doc.add_term, parameter[name[t]]] return[name[t]]
keyword[def] identifier[new_term] ( identifier[self] , identifier[term] , identifier[value] ,** identifier[kwargs] ): literal[string] identifier[tc] = identifier[self] . identifier[doc] . identifier[get_term_class] ( identifier[term] . identifier[lower] ()) identifier[t] = identifier[tc] ( identifier[term] , identifier[value] , identifier[doc] = identifier[self] . identifier[doc] , identifier[parent] = keyword[None] , identifier[section] = identifier[self] ). identifier[new_children] (** identifier[kwargs] ) identifier[self] . identifier[doc] . identifier[add_term] ( identifier[t] ) keyword[return] identifier[t]
def new_term(self, term, value, **kwargs): """Create a new root-level term in this section""" tc = self.doc.get_term_class(term.lower()) t = tc(term, value, doc=self.doc, parent=None, section=self).new_children(**kwargs) self.doc.add_term(t) return t
def get(path, **kwargs): """requests.get wrapper""" token = os.environ.get(BE_GITHUB_API_TOKEN) if token: kwargs["headers"] = { "Authorization": "token %s" % token } try: response = requests.get(path, verify=False, **kwargs) if response.status_code == 403: lib.echo("Patience: You can't pull more than 60 " "presets per hour without an API token.\n" "See https://github.com/mottosso/be/wiki" "/advanced#extended-preset-access") sys.exit(lib.USER_ERROR) return response except Exception as e: if self.verbose: lib.echo("ERROR: %s" % e) else: lib.echo("ERROR: Something went wrong. " "See --verbose for more information")
def function[get, parameter[path]]: constant[requests.get wrapper] variable[token] assign[=] call[name[os].environ.get, parameter[name[BE_GITHUB_API_TOKEN]]] if name[token] begin[:] call[name[kwargs]][constant[headers]] assign[=] dictionary[[<ast.Constant object at 0x7da1b11ed1b0>], [<ast.BinOp object at 0x7da1b11eff70>]] <ast.Try object at 0x7da1b11ec940>
keyword[def] identifier[get] ( identifier[path] ,** identifier[kwargs] ): literal[string] identifier[token] = identifier[os] . identifier[environ] . identifier[get] ( identifier[BE_GITHUB_API_TOKEN] ) keyword[if] identifier[token] : identifier[kwargs] [ literal[string] ]={ literal[string] : literal[string] % identifier[token] } keyword[try] : identifier[response] = identifier[requests] . identifier[get] ( identifier[path] , identifier[verify] = keyword[False] ,** identifier[kwargs] ) keyword[if] identifier[response] . identifier[status_code] == literal[int] : identifier[lib] . identifier[echo] ( literal[string] literal[string] literal[string] literal[string] ) identifier[sys] . identifier[exit] ( identifier[lib] . identifier[USER_ERROR] ) keyword[return] identifier[response] keyword[except] identifier[Exception] keyword[as] identifier[e] : keyword[if] identifier[self] . identifier[verbose] : identifier[lib] . identifier[echo] ( literal[string] % identifier[e] ) keyword[else] : identifier[lib] . identifier[echo] ( literal[string] literal[string] )
def get(path, **kwargs): """requests.get wrapper""" token = os.environ.get(BE_GITHUB_API_TOKEN) if token: kwargs['headers'] = {'Authorization': 'token %s' % token} # depends on [control=['if'], data=[]] try: response = requests.get(path, verify=False, **kwargs) if response.status_code == 403: lib.echo("Patience: You can't pull more than 60 presets per hour without an API token.\nSee https://github.com/mottosso/be/wiki/advanced#extended-preset-access") sys.exit(lib.USER_ERROR) # depends on [control=['if'], data=[]] return response # depends on [control=['try'], data=[]] except Exception as e: if self.verbose: lib.echo('ERROR: %s' % e) # depends on [control=['if'], data=[]] else: lib.echo('ERROR: Something went wrong. See --verbose for more information') # depends on [control=['except'], data=['e']]
def split_pred_string(predstr): """ Split *predstr* and return the (lemma, pos, sense, suffix) components. Examples: >>> Pred.split_pred_string('_dog_n_1_rel') ('dog', 'n', '1', 'rel') >>> Pred.split_pred_string('quant_rel') ('quant', None, None, 'rel') """ predstr = predstr.strip('"\'') # surrounding quotes don't matter rel_added = False if not predstr.lower().endswith('_rel'): logging.debug('Predicate does not end in "_rel": {}' .format(predstr)) rel_added = True predstr += '_rel' match = Pred.pred_re.search(predstr) if match is None: logging.debug('Unexpected predicate string: {}'.format(predstr)) return (predstr, None, None, None) # _lemma_pos(_sense)?_end return (match.group('lemma'), match.group('pos'), match.group('sense'), None if rel_added else match.group('end'))
def function[split_pred_string, parameter[predstr]]: constant[ Split *predstr* and return the (lemma, pos, sense, suffix) components. Examples: >>> Pred.split_pred_string('_dog_n_1_rel') ('dog', 'n', '1', 'rel') >>> Pred.split_pred_string('quant_rel') ('quant', None, None, 'rel') ] variable[predstr] assign[=] call[name[predstr].strip, parameter[constant["']]] variable[rel_added] assign[=] constant[False] if <ast.UnaryOp object at 0x7da1b0400fd0> begin[:] call[name[logging].debug, parameter[call[constant[Predicate does not end in "_rel": {}].format, parameter[name[predstr]]]]] variable[rel_added] assign[=] constant[True] <ast.AugAssign object at 0x7da1b0400af0> variable[match] assign[=] call[name[Pred].pred_re.search, parameter[name[predstr]]] if compare[name[match] is constant[None]] begin[:] call[name[logging].debug, parameter[call[constant[Unexpected predicate string: {}].format, parameter[name[predstr]]]]] return[tuple[[<ast.Name object at 0x7da1b0400d00>, <ast.Constant object at 0x7da1b04037c0>, <ast.Constant object at 0x7da1b0403280>, <ast.Constant object at 0x7da1b04003d0>]]] return[tuple[[<ast.Call object at 0x7da1b0403550>, <ast.Call object at 0x7da1b0402da0>, <ast.Call object at 0x7da1b0403760>, <ast.IfExp object at 0x7da1b0400160>]]]
keyword[def] identifier[split_pred_string] ( identifier[predstr] ): literal[string] identifier[predstr] = identifier[predstr] . identifier[strip] ( literal[string] ) identifier[rel_added] = keyword[False] keyword[if] keyword[not] identifier[predstr] . identifier[lower] (). identifier[endswith] ( literal[string] ): identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[predstr] )) identifier[rel_added] = keyword[True] identifier[predstr] += literal[string] identifier[match] = identifier[Pred] . identifier[pred_re] . identifier[search] ( identifier[predstr] ) keyword[if] identifier[match] keyword[is] keyword[None] : identifier[logging] . identifier[debug] ( literal[string] . identifier[format] ( identifier[predstr] )) keyword[return] ( identifier[predstr] , keyword[None] , keyword[None] , keyword[None] ) keyword[return] ( identifier[match] . identifier[group] ( literal[string] ), identifier[match] . identifier[group] ( literal[string] ), identifier[match] . identifier[group] ( literal[string] ), keyword[None] keyword[if] identifier[rel_added] keyword[else] identifier[match] . identifier[group] ( literal[string] ))
def split_pred_string(predstr): """ Split *predstr* and return the (lemma, pos, sense, suffix) components. Examples: >>> Pred.split_pred_string('_dog_n_1_rel') ('dog', 'n', '1', 'rel') >>> Pred.split_pred_string('quant_rel') ('quant', None, None, 'rel') """ predstr = predstr.strip('"\'') # surrounding quotes don't matter rel_added = False if not predstr.lower().endswith('_rel'): logging.debug('Predicate does not end in "_rel": {}'.format(predstr)) rel_added = True predstr += '_rel' # depends on [control=['if'], data=[]] match = Pred.pred_re.search(predstr) if match is None: logging.debug('Unexpected predicate string: {}'.format(predstr)) return (predstr, None, None, None) # depends on [control=['if'], data=[]] # _lemma_pos(_sense)?_end return (match.group('lemma'), match.group('pos'), match.group('sense'), None if rel_added else match.group('end'))
def stop(self): """Stop ZAP authentication""" if self.__task: self.__task.cancel() if self.__poller: self.__poller.unregister(self.zap_socket) self.__poller = None super().stop()
def function[stop, parameter[self]]: constant[Stop ZAP authentication] if name[self].__task begin[:] call[name[self].__task.cancel, parameter[]] if name[self].__poller begin[:] call[name[self].__poller.unregister, parameter[name[self].zap_socket]] name[self].__poller assign[=] constant[None] call[call[name[super], parameter[]].stop, parameter[]]
keyword[def] identifier[stop] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[__task] : identifier[self] . identifier[__task] . identifier[cancel] () keyword[if] identifier[self] . identifier[__poller] : identifier[self] . identifier[__poller] . identifier[unregister] ( identifier[self] . identifier[zap_socket] ) identifier[self] . identifier[__poller] = keyword[None] identifier[super] (). identifier[stop] ()
def stop(self): """Stop ZAP authentication""" if self.__task: self.__task.cancel() # depends on [control=['if'], data=[]] if self.__poller: self.__poller.unregister(self.zap_socket) self.__poller = None # depends on [control=['if'], data=[]] super().stop()
def unmet_expectations(self): ''' Assert that all expectations on the stub have been met. ''' unmet = [] for exp in self._expectations: if not exp.closed(with_counts=True): unmet.append(ExpectationNotSatisfied(exp)) return unmet
def function[unmet_expectations, parameter[self]]: constant[ Assert that all expectations on the stub have been met. ] variable[unmet] assign[=] list[[]] for taget[name[exp]] in starred[name[self]._expectations] begin[:] if <ast.UnaryOp object at 0x7da1b1b0d2d0> begin[:] call[name[unmet].append, parameter[call[name[ExpectationNotSatisfied], parameter[name[exp]]]]] return[name[unmet]]
keyword[def] identifier[unmet_expectations] ( identifier[self] ): literal[string] identifier[unmet] =[] keyword[for] identifier[exp] keyword[in] identifier[self] . identifier[_expectations] : keyword[if] keyword[not] identifier[exp] . identifier[closed] ( identifier[with_counts] = keyword[True] ): identifier[unmet] . identifier[append] ( identifier[ExpectationNotSatisfied] ( identifier[exp] )) keyword[return] identifier[unmet]
def unmet_expectations(self): """ Assert that all expectations on the stub have been met. """ unmet = [] for exp in self._expectations: if not exp.closed(with_counts=True): unmet.append(ExpectationNotSatisfied(exp)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['exp']] return unmet
def request(self, method, url, params=None, data=None, headers=None, auth=None, timeout=None, allow_redirects=False): """ Make a signed HTTP Request :param str method: The HTTP method to use :param str url: The URL to request :param dict params: Query parameters to append to the URL :param dict data: Parameters to go in the body of the HTTP request :param dict headers: HTTP Headers to send with the request :param tuple auth: Basic Auth arguments :param float timeout: Socket/Read timeout for the request :param boolean allow_redirects: Whether or not to allow redirects See the requests documentation for explanation of all these parameters :return: An http response :rtype: A :class:`Response <twilio.rest.http.response.Response>` object """ session = self.session or Session() request = Request(method.upper(), url, params=params, data=data, headers=headers, auth=auth) prepared_request = session.prepare_request(request) if 'Host' not in prepared_request.headers and 'host' not in prepared_request.headers: prepared_request.headers['Host'] = self._get_host(prepared_request) validation_payload = self._build_validation_payload(prepared_request) jwt = ClientValidationJwt(self.account_sid, self.api_key_sid, self.credential_sid, self.private_key, validation_payload) prepared_request.headers['Twilio-Client-Validation'] = jwt.to_jwt() response = session.send( prepared_request, allow_redirects=allow_redirects, timeout=timeout, ) return Response(int(response.status_code), response.text)
def function[request, parameter[self, method, url, params, data, headers, auth, timeout, allow_redirects]]: constant[ Make a signed HTTP Request :param str method: The HTTP method to use :param str url: The URL to request :param dict params: Query parameters to append to the URL :param dict data: Parameters to go in the body of the HTTP request :param dict headers: HTTP Headers to send with the request :param tuple auth: Basic Auth arguments :param float timeout: Socket/Read timeout for the request :param boolean allow_redirects: Whether or not to allow redirects See the requests documentation for explanation of all these parameters :return: An http response :rtype: A :class:`Response <twilio.rest.http.response.Response>` object ] variable[session] assign[=] <ast.BoolOp object at 0x7da1b1eae1d0> variable[request] assign[=] call[name[Request], parameter[call[name[method].upper, parameter[]], name[url]]] variable[prepared_request] assign[=] call[name[session].prepare_request, parameter[name[request]]] if <ast.BoolOp object at 0x7da1b1eae080> begin[:] call[name[prepared_request].headers][constant[Host]] assign[=] call[name[self]._get_host, parameter[name[prepared_request]]] variable[validation_payload] assign[=] call[name[self]._build_validation_payload, parameter[name[prepared_request]]] variable[jwt] assign[=] call[name[ClientValidationJwt], parameter[name[self].account_sid, name[self].api_key_sid, name[self].credential_sid, name[self].private_key, name[validation_payload]]] call[name[prepared_request].headers][constant[Twilio-Client-Validation]] assign[=] call[name[jwt].to_jwt, parameter[]] variable[response] assign[=] call[name[session].send, parameter[name[prepared_request]]] return[call[name[Response], parameter[call[name[int], parameter[name[response].status_code]], name[response].text]]]
keyword[def] identifier[request] ( identifier[self] , identifier[method] , identifier[url] , identifier[params] = keyword[None] , identifier[data] = keyword[None] , identifier[headers] = keyword[None] , identifier[auth] = keyword[None] , identifier[timeout] = keyword[None] , identifier[allow_redirects] = keyword[False] ): literal[string] identifier[session] = identifier[self] . identifier[session] keyword[or] identifier[Session] () identifier[request] = identifier[Request] ( identifier[method] . identifier[upper] (), identifier[url] , identifier[params] = identifier[params] , identifier[data] = identifier[data] , identifier[headers] = identifier[headers] , identifier[auth] = identifier[auth] ) identifier[prepared_request] = identifier[session] . identifier[prepare_request] ( identifier[request] ) keyword[if] literal[string] keyword[not] keyword[in] identifier[prepared_request] . identifier[headers] keyword[and] literal[string] keyword[not] keyword[in] identifier[prepared_request] . identifier[headers] : identifier[prepared_request] . identifier[headers] [ literal[string] ]= identifier[self] . identifier[_get_host] ( identifier[prepared_request] ) identifier[validation_payload] = identifier[self] . identifier[_build_validation_payload] ( identifier[prepared_request] ) identifier[jwt] = identifier[ClientValidationJwt] ( identifier[self] . identifier[account_sid] , identifier[self] . identifier[api_key_sid] , identifier[self] . identifier[credential_sid] , identifier[self] . identifier[private_key] , identifier[validation_payload] ) identifier[prepared_request] . identifier[headers] [ literal[string] ]= identifier[jwt] . identifier[to_jwt] () identifier[response] = identifier[session] . identifier[send] ( identifier[prepared_request] , identifier[allow_redirects] = identifier[allow_redirects] , identifier[timeout] = identifier[timeout] , ) keyword[return] identifier[Response] ( identifier[int] ( identifier[response] . identifier[status_code] ), identifier[response] . identifier[text] )
def request(self, method, url, params=None, data=None, headers=None, auth=None, timeout=None, allow_redirects=False): """ Make a signed HTTP Request :param str method: The HTTP method to use :param str url: The URL to request :param dict params: Query parameters to append to the URL :param dict data: Parameters to go in the body of the HTTP request :param dict headers: HTTP Headers to send with the request :param tuple auth: Basic Auth arguments :param float timeout: Socket/Read timeout for the request :param boolean allow_redirects: Whether or not to allow redirects See the requests documentation for explanation of all these parameters :return: An http response :rtype: A :class:`Response <twilio.rest.http.response.Response>` object """ session = self.session or Session() request = Request(method.upper(), url, params=params, data=data, headers=headers, auth=auth) prepared_request = session.prepare_request(request) if 'Host' not in prepared_request.headers and 'host' not in prepared_request.headers: prepared_request.headers['Host'] = self._get_host(prepared_request) # depends on [control=['if'], data=[]] validation_payload = self._build_validation_payload(prepared_request) jwt = ClientValidationJwt(self.account_sid, self.api_key_sid, self.credential_sid, self.private_key, validation_payload) prepared_request.headers['Twilio-Client-Validation'] = jwt.to_jwt() response = session.send(prepared_request, allow_redirects=allow_redirects, timeout=timeout) return Response(int(response.status_code), response.text)
def is_unitary(self, atol=None, rtol=None): """Return True if QuantumChannel is a unitary channel.""" try: op = self.to_operator() return op.is_unitary(atol=atol, rtol=rtol) except QiskitError: return False
def function[is_unitary, parameter[self, atol, rtol]]: constant[Return True if QuantumChannel is a unitary channel.] <ast.Try object at 0x7da1b03a5f90>
keyword[def] identifier[is_unitary] ( identifier[self] , identifier[atol] = keyword[None] , identifier[rtol] = keyword[None] ): literal[string] keyword[try] : identifier[op] = identifier[self] . identifier[to_operator] () keyword[return] identifier[op] . identifier[is_unitary] ( identifier[atol] = identifier[atol] , identifier[rtol] = identifier[rtol] ) keyword[except] identifier[QiskitError] : keyword[return] keyword[False]
def is_unitary(self, atol=None, rtol=None): """Return True if QuantumChannel is a unitary channel.""" try: op = self.to_operator() return op.is_unitary(atol=atol, rtol=rtol) # depends on [control=['try'], data=[]] except QiskitError: return False # depends on [control=['except'], data=[]]
def _members_changed( sender, instance, action, reverse, model, pk_set, **kwargs): """ Hook that executes whenever the group members are changed. """ if action == "post_add": if not reverse: group = instance for person in model.objects.filter(pk__in=pk_set): log.change(person, "Added person to group %s" % group) log.change(group, "Added person %s to group" % person) _add_person_to_group(person, group) else: person = instance for group in model.objects.filter(pk__in=pk_set): log.change(person, "Added person to group %s" % group) log.change(group, "Added person %s to group" % person) _add_person_to_group(person, group) elif action == "post_remove": if not reverse: group = instance for person in model.objects.filter(pk__in=pk_set): log.change(person, "Removed person from group %s" % group) log.change(group, "Removed person %s from group" % person) _remove_person_from_group(person, group) else: person = instance for group in model.objects.filter(pk__in=pk_set): log.change(person, "Removed person from group %s" % group) log.change(group, "Removed person %s from group" % person) _remove_person_from_group(person, group) elif action == "pre_clear": # This has to occur in pre_clear, not post_clear, as otherwise # we won't see what groups need to be removed. if not reverse: group = instance log.change(group, "Removed all people from group") for person in group.members.all(): log.change(group, "Removed person %s from group" % person) _remove_person_from_group(person, group) else: person = instance log.change(person, "Removed person from all groups") for group in person.groups.all(): log.change(group, "Removed person %s from group" % person) _remove_person_from_group(person, group)
def function[_members_changed, parameter[sender, instance, action, reverse, model, pk_set]]: constant[ Hook that executes whenever the group members are changed. ] if compare[name[action] equal[==] constant[post_add]] begin[:] if <ast.UnaryOp object at 0x7da18f721690> begin[:] variable[group] assign[=] name[instance] for taget[name[person]] in starred[call[name[model].objects.filter, parameter[]]] begin[:] call[name[log].change, parameter[name[person], binary_operation[constant[Added person to group %s] <ast.Mod object at 0x7da2590d6920> name[group]]]] call[name[log].change, parameter[name[group], binary_operation[constant[Added person %s to group] <ast.Mod object at 0x7da2590d6920> name[person]]]] call[name[_add_person_to_group], parameter[name[person], name[group]]]
keyword[def] identifier[_members_changed] ( identifier[sender] , identifier[instance] , identifier[action] , identifier[reverse] , identifier[model] , identifier[pk_set] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[action] == literal[string] : keyword[if] keyword[not] identifier[reverse] : identifier[group] = identifier[instance] keyword[for] identifier[person] keyword[in] identifier[model] . identifier[objects] . identifier[filter] ( identifier[pk__in] = identifier[pk_set] ): identifier[log] . identifier[change] ( identifier[person] , literal[string] % identifier[group] ) identifier[log] . identifier[change] ( identifier[group] , literal[string] % identifier[person] ) identifier[_add_person_to_group] ( identifier[person] , identifier[group] ) keyword[else] : identifier[person] = identifier[instance] keyword[for] identifier[group] keyword[in] identifier[model] . identifier[objects] . identifier[filter] ( identifier[pk__in] = identifier[pk_set] ): identifier[log] . identifier[change] ( identifier[person] , literal[string] % identifier[group] ) identifier[log] . identifier[change] ( identifier[group] , literal[string] % identifier[person] ) identifier[_add_person_to_group] ( identifier[person] , identifier[group] ) keyword[elif] identifier[action] == literal[string] : keyword[if] keyword[not] identifier[reverse] : identifier[group] = identifier[instance] keyword[for] identifier[person] keyword[in] identifier[model] . identifier[objects] . identifier[filter] ( identifier[pk__in] = identifier[pk_set] ): identifier[log] . identifier[change] ( identifier[person] , literal[string] % identifier[group] ) identifier[log] . identifier[change] ( identifier[group] , literal[string] % identifier[person] ) identifier[_remove_person_from_group] ( identifier[person] , identifier[group] ) keyword[else] : identifier[person] = identifier[instance] keyword[for] identifier[group] keyword[in] identifier[model] . identifier[objects] . identifier[filter] ( identifier[pk__in] = identifier[pk_set] ): identifier[log] . identifier[change] ( identifier[person] , literal[string] % identifier[group] ) identifier[log] . identifier[change] ( identifier[group] , literal[string] % identifier[person] ) identifier[_remove_person_from_group] ( identifier[person] , identifier[group] ) keyword[elif] identifier[action] == literal[string] : keyword[if] keyword[not] identifier[reverse] : identifier[group] = identifier[instance] identifier[log] . identifier[change] ( identifier[group] , literal[string] ) keyword[for] identifier[person] keyword[in] identifier[group] . identifier[members] . identifier[all] (): identifier[log] . identifier[change] ( identifier[group] , literal[string] % identifier[person] ) identifier[_remove_person_from_group] ( identifier[person] , identifier[group] ) keyword[else] : identifier[person] = identifier[instance] identifier[log] . identifier[change] ( identifier[person] , literal[string] ) keyword[for] identifier[group] keyword[in] identifier[person] . identifier[groups] . identifier[all] (): identifier[log] . identifier[change] ( identifier[group] , literal[string] % identifier[person] ) identifier[_remove_person_from_group] ( identifier[person] , identifier[group] )
def _members_changed(sender, instance, action, reverse, model, pk_set, **kwargs): """ Hook that executes whenever the group members are changed. """ if action == 'post_add': if not reverse: group = instance for person in model.objects.filter(pk__in=pk_set): log.change(person, 'Added person to group %s' % group) log.change(group, 'Added person %s to group' % person) _add_person_to_group(person, group) # depends on [control=['for'], data=['person']] # depends on [control=['if'], data=[]] else: person = instance for group in model.objects.filter(pk__in=pk_set): log.change(person, 'Added person to group %s' % group) log.change(group, 'Added person %s to group' % person) _add_person_to_group(person, group) # depends on [control=['for'], data=['group']] # depends on [control=['if'], data=[]] elif action == 'post_remove': if not reverse: group = instance for person in model.objects.filter(pk__in=pk_set): log.change(person, 'Removed person from group %s' % group) log.change(group, 'Removed person %s from group' % person) _remove_person_from_group(person, group) # depends on [control=['for'], data=['person']] # depends on [control=['if'], data=[]] else: person = instance for group in model.objects.filter(pk__in=pk_set): log.change(person, 'Removed person from group %s' % group) log.change(group, 'Removed person %s from group' % person) _remove_person_from_group(person, group) # depends on [control=['for'], data=['group']] # depends on [control=['if'], data=[]] elif action == 'pre_clear': # This has to occur in pre_clear, not post_clear, as otherwise # we won't see what groups need to be removed. if not reverse: group = instance log.change(group, 'Removed all people from group') for person in group.members.all(): log.change(group, 'Removed person %s from group' % person) _remove_person_from_group(person, group) # depends on [control=['for'], data=['person']] # depends on [control=['if'], data=[]] else: person = instance log.change(person, 'Removed person from all groups') for group in person.groups.all(): log.change(group, 'Removed person %s from group' % person) _remove_person_from_group(person, group) # depends on [control=['for'], data=['group']] # depends on [control=['if'], data=[]]
def _rnd_date(start, end): """Internal random date generator. """ return date.fromordinal(random.randint(start.toordinal(), end.toordinal()))
def function[_rnd_date, parameter[start, end]]: constant[Internal random date generator. ] return[call[name[date].fromordinal, parameter[call[name[random].randint, parameter[call[name[start].toordinal, parameter[]], call[name[end].toordinal, parameter[]]]]]]]
keyword[def] identifier[_rnd_date] ( identifier[start] , identifier[end] ): literal[string] keyword[return] identifier[date] . identifier[fromordinal] ( identifier[random] . identifier[randint] ( identifier[start] . identifier[toordinal] (), identifier[end] . identifier[toordinal] ()))
def _rnd_date(start, end): """Internal random date generator. """ return date.fromordinal(random.randint(start.toordinal(), end.toordinal()))
def set_data_filters(self, data_filter_set): """Sets the data filters (non indexed argument filters) Expects a set of tuples with the type and value, e.g.: (('uint256', [12345, 54321]), ('string', ('a-single-string',))) """ self.data_filter_set = data_filter_set if any(data_filter_set): self.data_filter_set_function = match_fn(data_filter_set)
def function[set_data_filters, parameter[self, data_filter_set]]: constant[Sets the data filters (non indexed argument filters) Expects a set of tuples with the type and value, e.g.: (('uint256', [12345, 54321]), ('string', ('a-single-string',))) ] name[self].data_filter_set assign[=] name[data_filter_set] if call[name[any], parameter[name[data_filter_set]]] begin[:] name[self].data_filter_set_function assign[=] call[name[match_fn], parameter[name[data_filter_set]]]
keyword[def] identifier[set_data_filters] ( identifier[self] , identifier[data_filter_set] ): literal[string] identifier[self] . identifier[data_filter_set] = identifier[data_filter_set] keyword[if] identifier[any] ( identifier[data_filter_set] ): identifier[self] . identifier[data_filter_set_function] = identifier[match_fn] ( identifier[data_filter_set] )
def set_data_filters(self, data_filter_set): """Sets the data filters (non indexed argument filters) Expects a set of tuples with the type and value, e.g.: (('uint256', [12345, 54321]), ('string', ('a-single-string',))) """ self.data_filter_set = data_filter_set if any(data_filter_set): self.data_filter_set_function = match_fn(data_filter_set) # depends on [control=['if'], data=[]]
def get_compiler(compiler, **compiler_attrs): """get and customize a compiler""" if compiler is None or isinstance(compiler, str): cc = ccompiler.new_compiler(compiler=compiler, verbose=0) customize_compiler(cc) if cc.compiler_type == 'mingw32': customize_mingw(cc) else: cc = compiler customize_gcc(cc) for name, val in compiler_attrs.items(): setattr(cc, name, val) return cc
def function[get_compiler, parameter[compiler]]: constant[get and customize a compiler] if <ast.BoolOp object at 0x7da1b09ece80> begin[:] variable[cc] assign[=] call[name[ccompiler].new_compiler, parameter[]] call[name[customize_compiler], parameter[name[cc]]] if compare[name[cc].compiler_type equal[==] constant[mingw32]] begin[:] call[name[customize_mingw], parameter[name[cc]]] for taget[tuple[[<ast.Name object at 0x7da1b09ec280>, <ast.Name object at 0x7da1b09ef6a0>]]] in starred[call[name[compiler_attrs].items, parameter[]]] begin[:] call[name[setattr], parameter[name[cc], name[name], name[val]]] return[name[cc]]
keyword[def] identifier[get_compiler] ( identifier[compiler] ,** identifier[compiler_attrs] ): literal[string] keyword[if] identifier[compiler] keyword[is] keyword[None] keyword[or] identifier[isinstance] ( identifier[compiler] , identifier[str] ): identifier[cc] = identifier[ccompiler] . identifier[new_compiler] ( identifier[compiler] = identifier[compiler] , identifier[verbose] = literal[int] ) identifier[customize_compiler] ( identifier[cc] ) keyword[if] identifier[cc] . identifier[compiler_type] == literal[string] : identifier[customize_mingw] ( identifier[cc] ) keyword[else] : identifier[cc] = identifier[compiler] identifier[customize_gcc] ( identifier[cc] ) keyword[for] identifier[name] , identifier[val] keyword[in] identifier[compiler_attrs] . identifier[items] (): identifier[setattr] ( identifier[cc] , identifier[name] , identifier[val] ) keyword[return] identifier[cc]
def get_compiler(compiler, **compiler_attrs): """get and customize a compiler""" if compiler is None or isinstance(compiler, str): cc = ccompiler.new_compiler(compiler=compiler, verbose=0) customize_compiler(cc) if cc.compiler_type == 'mingw32': customize_mingw(cc) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: cc = compiler customize_gcc(cc) for (name, val) in compiler_attrs.items(): setattr(cc, name, val) # depends on [control=['for'], data=[]] return cc
def idle_connections(self): """Return a list of idle connections :rtype: list """ return [c for c in self.connections.values() if not c.busy and not c.closed]
def function[idle_connections, parameter[self]]: constant[Return a list of idle connections :rtype: list ] return[<ast.ListComp object at 0x7da204346f50>]
keyword[def] identifier[idle_connections] ( identifier[self] ): literal[string] keyword[return] [ identifier[c] keyword[for] identifier[c] keyword[in] identifier[self] . identifier[connections] . identifier[values] () keyword[if] keyword[not] identifier[c] . identifier[busy] keyword[and] keyword[not] identifier[c] . identifier[closed] ]
def idle_connections(self): """Return a list of idle connections :rtype: list """ return [c for c in self.connections.values() if not c.busy and (not c.closed)]
def register_views(*args): """ Registration view for each resource from config. """ config = args[0] settings = config.get_settings() pages_config = settings[CONFIG_MODELS] resources = resources_of_config(pages_config) for resource in resources: if hasattr(resource, '__table__')\ and not hasattr(resource, 'model'): continue resource.model.pyramid_pages_template = resource.template config.add_view(resource.view, attr=resource.attr, route_name=PREFIX_PAGE, renderer=resource.template, context=resource, permission=PREFIX_PAGE)
def function[register_views, parameter[]]: constant[ Registration view for each resource from config. ] variable[config] assign[=] call[name[args]][constant[0]] variable[settings] assign[=] call[name[config].get_settings, parameter[]] variable[pages_config] assign[=] call[name[settings]][name[CONFIG_MODELS]] variable[resources] assign[=] call[name[resources_of_config], parameter[name[pages_config]]] for taget[name[resource]] in starred[name[resources]] begin[:] if <ast.BoolOp object at 0x7da20e9557e0> begin[:] continue name[resource].model.pyramid_pages_template assign[=] name[resource].template call[name[config].add_view, parameter[name[resource].view]]
keyword[def] identifier[register_views] (* identifier[args] ): literal[string] identifier[config] = identifier[args] [ literal[int] ] identifier[settings] = identifier[config] . identifier[get_settings] () identifier[pages_config] = identifier[settings] [ identifier[CONFIG_MODELS] ] identifier[resources] = identifier[resources_of_config] ( identifier[pages_config] ) keyword[for] identifier[resource] keyword[in] identifier[resources] : keyword[if] identifier[hasattr] ( identifier[resource] , literal[string] ) keyword[and] keyword[not] identifier[hasattr] ( identifier[resource] , literal[string] ): keyword[continue] identifier[resource] . identifier[model] . identifier[pyramid_pages_template] = identifier[resource] . identifier[template] identifier[config] . identifier[add_view] ( identifier[resource] . identifier[view] , identifier[attr] = identifier[resource] . identifier[attr] , identifier[route_name] = identifier[PREFIX_PAGE] , identifier[renderer] = identifier[resource] . identifier[template] , identifier[context] = identifier[resource] , identifier[permission] = identifier[PREFIX_PAGE] )
def register_views(*args): """ Registration view for each resource from config. """ config = args[0] settings = config.get_settings() pages_config = settings[CONFIG_MODELS] resources = resources_of_config(pages_config) for resource in resources: if hasattr(resource, '__table__') and (not hasattr(resource, 'model')): continue # depends on [control=['if'], data=[]] resource.model.pyramid_pages_template = resource.template config.add_view(resource.view, attr=resource.attr, route_name=PREFIX_PAGE, renderer=resource.template, context=resource, permission=PREFIX_PAGE) # depends on [control=['for'], data=['resource']]
def generate_wavelengths(minwave=500, maxwave=26000, num=10000, delta=None, log=True, wave_unit=u.AA): """Generate wavelength array to be used for spectrum sampling. .. math:: minwave \\le \\lambda < maxwave Parameters ---------- minwave, maxwave : float Lower and upper limits of the wavelengths. These must be values in linear space regardless of ``log``. num : int The number of wavelength values. This is only used when ``delta=None``. delta : float or `None` Delta between wavelength values. When ``log=True``, this is the spacing in log space. log : bool If `True`, the wavelength values are evenly spaced in log scale. Otherwise, spacing is linear. wave_unit : str or `~astropy.units.core.Unit` Wavelength unit. Default is Angstrom. Returns ------- waveset : `~astropy.units.quantity.Quantity` Generated wavelength set. waveset_str : str Info string associated with the result. """ wave_unit = units.validate_unit(wave_unit) if delta is not None: num = None waveset_str = 'Min: {0}, Max: {1}, Num: {2}, Delta: {3}, Log: {4}'.format( minwave, maxwave, num, delta, log) # Log space if log: logmin = np.log10(minwave) logmax = np.log10(maxwave) if delta is None: waveset = np.logspace(logmin, logmax, num, endpoint=False) else: waveset = 10 ** np.arange(logmin, logmax, delta) # Linear space else: if delta is None: waveset = np.linspace(minwave, maxwave, num, endpoint=False) else: waveset = np.arange(minwave, maxwave, delta) return waveset.astype(np.float64) * wave_unit, waveset_str
def function[generate_wavelengths, parameter[minwave, maxwave, num, delta, log, wave_unit]]: constant[Generate wavelength array to be used for spectrum sampling. .. math:: minwave \le \lambda < maxwave Parameters ---------- minwave, maxwave : float Lower and upper limits of the wavelengths. These must be values in linear space regardless of ``log``. num : int The number of wavelength values. This is only used when ``delta=None``. delta : float or `None` Delta between wavelength values. When ``log=True``, this is the spacing in log space. log : bool If `True`, the wavelength values are evenly spaced in log scale. Otherwise, spacing is linear. wave_unit : str or `~astropy.units.core.Unit` Wavelength unit. Default is Angstrom. Returns ------- waveset : `~astropy.units.quantity.Quantity` Generated wavelength set. waveset_str : str Info string associated with the result. ] variable[wave_unit] assign[=] call[name[units].validate_unit, parameter[name[wave_unit]]] if compare[name[delta] is_not constant[None]] begin[:] variable[num] assign[=] constant[None] variable[waveset_str] assign[=] call[constant[Min: {0}, Max: {1}, Num: {2}, Delta: {3}, Log: {4}].format, parameter[name[minwave], name[maxwave], name[num], name[delta], name[log]]] if name[log] begin[:] variable[logmin] assign[=] call[name[np].log10, parameter[name[minwave]]] variable[logmax] assign[=] call[name[np].log10, parameter[name[maxwave]]] if compare[name[delta] is constant[None]] begin[:] variable[waveset] assign[=] call[name[np].logspace, parameter[name[logmin], name[logmax], name[num]]] return[tuple[[<ast.BinOp object at 0x7da18f813160>, <ast.Name object at 0x7da18f8117e0>]]]
keyword[def] identifier[generate_wavelengths] ( identifier[minwave] = literal[int] , identifier[maxwave] = literal[int] , identifier[num] = literal[int] , identifier[delta] = keyword[None] , identifier[log] = keyword[True] , identifier[wave_unit] = identifier[u] . identifier[AA] ): literal[string] identifier[wave_unit] = identifier[units] . identifier[validate_unit] ( identifier[wave_unit] ) keyword[if] identifier[delta] keyword[is] keyword[not] keyword[None] : identifier[num] = keyword[None] identifier[waveset_str] = literal[string] . identifier[format] ( identifier[minwave] , identifier[maxwave] , identifier[num] , identifier[delta] , identifier[log] ) keyword[if] identifier[log] : identifier[logmin] = identifier[np] . identifier[log10] ( identifier[minwave] ) identifier[logmax] = identifier[np] . identifier[log10] ( identifier[maxwave] ) keyword[if] identifier[delta] keyword[is] keyword[None] : identifier[waveset] = identifier[np] . identifier[logspace] ( identifier[logmin] , identifier[logmax] , identifier[num] , identifier[endpoint] = keyword[False] ) keyword[else] : identifier[waveset] = literal[int] ** identifier[np] . identifier[arange] ( identifier[logmin] , identifier[logmax] , identifier[delta] ) keyword[else] : keyword[if] identifier[delta] keyword[is] keyword[None] : identifier[waveset] = identifier[np] . identifier[linspace] ( identifier[minwave] , identifier[maxwave] , identifier[num] , identifier[endpoint] = keyword[False] ) keyword[else] : identifier[waveset] = identifier[np] . identifier[arange] ( identifier[minwave] , identifier[maxwave] , identifier[delta] ) keyword[return] identifier[waveset] . identifier[astype] ( identifier[np] . identifier[float64] )* identifier[wave_unit] , identifier[waveset_str]
def generate_wavelengths(minwave=500, maxwave=26000, num=10000, delta=None, log=True, wave_unit=u.AA): """Generate wavelength array to be used for spectrum sampling. .. math:: minwave \\le \\lambda < maxwave Parameters ---------- minwave, maxwave : float Lower and upper limits of the wavelengths. These must be values in linear space regardless of ``log``. num : int The number of wavelength values. This is only used when ``delta=None``. delta : float or `None` Delta between wavelength values. When ``log=True``, this is the spacing in log space. log : bool If `True`, the wavelength values are evenly spaced in log scale. Otherwise, spacing is linear. wave_unit : str or `~astropy.units.core.Unit` Wavelength unit. Default is Angstrom. Returns ------- waveset : `~astropy.units.quantity.Quantity` Generated wavelength set. waveset_str : str Info string associated with the result. """ wave_unit = units.validate_unit(wave_unit) if delta is not None: num = None # depends on [control=['if'], data=[]] waveset_str = 'Min: {0}, Max: {1}, Num: {2}, Delta: {3}, Log: {4}'.format(minwave, maxwave, num, delta, log) # Log space if log: logmin = np.log10(minwave) logmax = np.log10(maxwave) if delta is None: waveset = np.logspace(logmin, logmax, num, endpoint=False) # depends on [control=['if'], data=[]] else: waveset = 10 ** np.arange(logmin, logmax, delta) # depends on [control=['if'], data=[]] # Linear space elif delta is None: waveset = np.linspace(minwave, maxwave, num, endpoint=False) # depends on [control=['if'], data=[]] else: waveset = np.arange(minwave, maxwave, delta) return (waveset.astype(np.float64) * wave_unit, waveset_str)
def ZIP(inputs, use_longest_length=None, defaults=None): """ Transposes an array of input arrays so that the first element of the output array would be an array containing, the first element of the first input array, the first element of the second input array, etc. See https://docs.mongodb.com/manual/reference/operator/aggregation/zip/ for more details :param inputs: An array of expressions that resolve to arrays. :param use_longest_length: A boolean which specifies whether the length of the longest array determines the number of arrays in the output array. :param defaults: An array of default element values to use if the input arrays have different lengths. :return: Aggregation operator """ res = {'inputs': inputs} if use_longest_length in [True, False]: res['useLongestLength'] = use_longest_length if defaults is not None: res['defaults'] = defaults return {'$zip': res}
def function[ZIP, parameter[inputs, use_longest_length, defaults]]: constant[ Transposes an array of input arrays so that the first element of the output array would be an array containing, the first element of the first input array, the first element of the second input array, etc. See https://docs.mongodb.com/manual/reference/operator/aggregation/zip/ for more details :param inputs: An array of expressions that resolve to arrays. :param use_longest_length: A boolean which specifies whether the length of the longest array determines the number of arrays in the output array. :param defaults: An array of default element values to use if the input arrays have different lengths. :return: Aggregation operator ] variable[res] assign[=] dictionary[[<ast.Constant object at 0x7da18f09cdc0>], [<ast.Name object at 0x7da18f09e080>]] if compare[name[use_longest_length] in list[[<ast.Constant object at 0x7da18f09ffd0>, <ast.Constant object at 0x7da18f09e950>]]] begin[:] call[name[res]][constant[useLongestLength]] assign[=] name[use_longest_length] if compare[name[defaults] is_not constant[None]] begin[:] call[name[res]][constant[defaults]] assign[=] name[defaults] return[dictionary[[<ast.Constant object at 0x7da1b1fa8880>], [<ast.Name object at 0x7da1b1fa88b0>]]]
keyword[def] identifier[ZIP] ( identifier[inputs] , identifier[use_longest_length] = keyword[None] , identifier[defaults] = keyword[None] ): literal[string] identifier[res] ={ literal[string] : identifier[inputs] } keyword[if] identifier[use_longest_length] keyword[in] [ keyword[True] , keyword[False] ]: identifier[res] [ literal[string] ]= identifier[use_longest_length] keyword[if] identifier[defaults] keyword[is] keyword[not] keyword[None] : identifier[res] [ literal[string] ]= identifier[defaults] keyword[return] { literal[string] : identifier[res] }
def ZIP(inputs, use_longest_length=None, defaults=None): """ Transposes an array of input arrays so that the first element of the output array would be an array containing, the first element of the first input array, the first element of the second input array, etc. See https://docs.mongodb.com/manual/reference/operator/aggregation/zip/ for more details :param inputs: An array of expressions that resolve to arrays. :param use_longest_length: A boolean which specifies whether the length of the longest array determines the number of arrays in the output array. :param defaults: An array of default element values to use if the input arrays have different lengths. :return: Aggregation operator """ res = {'inputs': inputs} if use_longest_length in [True, False]: res['useLongestLength'] = use_longest_length # depends on [control=['if'], data=['use_longest_length']] if defaults is not None: res['defaults'] = defaults # depends on [control=['if'], data=['defaults']] return {'$zip': res}
def shutdown_kernel(self): """Shutdown the kernel of the client.""" kernel_id = self.get_kernel_id() if kernel_id: delete_url = self.add_token(url_path_join(self.server_url, 'api/kernels/', kernel_id)) delete_req = requests.delete(delete_url) if delete_req.status_code != 204: QMessageBox.warning( self, _("Server error"), _("The Jupyter Notebook server " "failed to shutdown the kernel " "associated with this notebook. " "If you want to shut it down, " "you'll have to close Spyder."))
def function[shutdown_kernel, parameter[self]]: constant[Shutdown the kernel of the client.] variable[kernel_id] assign[=] call[name[self].get_kernel_id, parameter[]] if name[kernel_id] begin[:] variable[delete_url] assign[=] call[name[self].add_token, parameter[call[name[url_path_join], parameter[name[self].server_url, constant[api/kernels/], name[kernel_id]]]]] variable[delete_req] assign[=] call[name[requests].delete, parameter[name[delete_url]]] if compare[name[delete_req].status_code not_equal[!=] constant[204]] begin[:] call[name[QMessageBox].warning, parameter[name[self], call[name[_], parameter[constant[Server error]]], call[name[_], parameter[constant[The Jupyter Notebook server failed to shutdown the kernel associated with this notebook. If you want to shut it down, you'll have to close Spyder.]]]]]
keyword[def] identifier[shutdown_kernel] ( identifier[self] ): literal[string] identifier[kernel_id] = identifier[self] . identifier[get_kernel_id] () keyword[if] identifier[kernel_id] : identifier[delete_url] = identifier[self] . identifier[add_token] ( identifier[url_path_join] ( identifier[self] . identifier[server_url] , literal[string] , identifier[kernel_id] )) identifier[delete_req] = identifier[requests] . identifier[delete] ( identifier[delete_url] ) keyword[if] identifier[delete_req] . identifier[status_code] != literal[int] : identifier[QMessageBox] . identifier[warning] ( identifier[self] , identifier[_] ( literal[string] ), identifier[_] ( literal[string] literal[string] literal[string] literal[string] literal[string] ))
def shutdown_kernel(self): """Shutdown the kernel of the client.""" kernel_id = self.get_kernel_id() if kernel_id: delete_url = self.add_token(url_path_join(self.server_url, 'api/kernels/', kernel_id)) delete_req = requests.delete(delete_url) if delete_req.status_code != 204: QMessageBox.warning(self, _('Server error'), _("The Jupyter Notebook server failed to shutdown the kernel associated with this notebook. If you want to shut it down, you'll have to close Spyder.")) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
def power(self): """:return: A power object modeled as a named tuple""" power = self._state['powerUsage'] return PowerUsage(power.get('avgDayValue'), power.get('avgValue'), power.get('dayCost'), power.get('dayUsage'), power.get('isSmart'), power.get('meterReading'), power.get('value'), power.get('meterReadingLow'), power.get('dayLowUsage'))
def function[power, parameter[self]]: constant[:return: A power object modeled as a named tuple] variable[power] assign[=] call[name[self]._state][constant[powerUsage]] return[call[name[PowerUsage], parameter[call[name[power].get, parameter[constant[avgDayValue]]], call[name[power].get, parameter[constant[avgValue]]], call[name[power].get, parameter[constant[dayCost]]], call[name[power].get, parameter[constant[dayUsage]]], call[name[power].get, parameter[constant[isSmart]]], call[name[power].get, parameter[constant[meterReading]]], call[name[power].get, parameter[constant[value]]], call[name[power].get, parameter[constant[meterReadingLow]]], call[name[power].get, parameter[constant[dayLowUsage]]]]]]
keyword[def] identifier[power] ( identifier[self] ): literal[string] identifier[power] = identifier[self] . identifier[_state] [ literal[string] ] keyword[return] identifier[PowerUsage] ( identifier[power] . identifier[get] ( literal[string] ), identifier[power] . identifier[get] ( literal[string] ), identifier[power] . identifier[get] ( literal[string] ), identifier[power] . identifier[get] ( literal[string] ), identifier[power] . identifier[get] ( literal[string] ), identifier[power] . identifier[get] ( literal[string] ), identifier[power] . identifier[get] ( literal[string] ), identifier[power] . identifier[get] ( literal[string] ), identifier[power] . identifier[get] ( literal[string] ))
def power(self): """:return: A power object modeled as a named tuple""" power = self._state['powerUsage'] return PowerUsage(power.get('avgDayValue'), power.get('avgValue'), power.get('dayCost'), power.get('dayUsage'), power.get('isSmart'), power.get('meterReading'), power.get('value'), power.get('meterReadingLow'), power.get('dayLowUsage'))
def tojson(self) -> str: """Serialize an Event into JSON. Returns ------- str JSON-serialized Event. """ return json.dumps({ 'event_id': str(self.id), 'event_type': self.type, 'schema_name': self.schema_name, 'table_name': self.table_name, 'row_id': self.row_id })
def function[tojson, parameter[self]]: constant[Serialize an Event into JSON. Returns ------- str JSON-serialized Event. ] return[call[name[json].dumps, parameter[dictionary[[<ast.Constant object at 0x7da18bc71270>, <ast.Constant object at 0x7da18bc73520>, <ast.Constant object at 0x7da18bc71f60>, <ast.Constant object at 0x7da18bc73460>, <ast.Constant object at 0x7da18bc71d50>], [<ast.Call object at 0x7da18bc71360>, <ast.Attribute object at 0x7da18bc73670>, <ast.Attribute object at 0x7da18bc72a70>, <ast.Attribute object at 0x7da18bc72c80>, <ast.Attribute object at 0x7da18bc70bb0>]]]]]
keyword[def] identifier[tojson] ( identifier[self] )-> identifier[str] : literal[string] keyword[return] identifier[json] . identifier[dumps] ({ literal[string] : identifier[str] ( identifier[self] . identifier[id] ), literal[string] : identifier[self] . identifier[type] , literal[string] : identifier[self] . identifier[schema_name] , literal[string] : identifier[self] . identifier[table_name] , literal[string] : identifier[self] . identifier[row_id] })
def tojson(self) -> str: """Serialize an Event into JSON. Returns ------- str JSON-serialized Event. """ return json.dumps({'event_id': str(self.id), 'event_type': self.type, 'schema_name': self.schema_name, 'table_name': self.table_name, 'row_id': self.row_id})
async def finish_pairing(self, pin): """Finish pairing process.""" self.srp.step1(pin) pub_key, proof = self.srp.step2(self._atv_pub_key, self._atv_salt) msg = messages.crypto_pairing({ tlv8.TLV_SEQ_NO: b'\x03', tlv8.TLV_PUBLIC_KEY: pub_key, tlv8.TLV_PROOF: proof}) resp = await self.protocol.send_and_receive( msg, generate_identifier=False) pairing_data = _get_pairing_data(resp) atv_proof = pairing_data[tlv8.TLV_PROOF] log_binary(_LOGGER, 'Device', Proof=atv_proof) encrypted_data = self.srp.step3() msg = messages.crypto_pairing({ tlv8.TLV_SEQ_NO: b'\x05', tlv8.TLV_ENCRYPTED_DATA: encrypted_data}) resp = await self.protocol.send_and_receive( msg, generate_identifier=False) pairing_data = _get_pairing_data(resp) encrypted_data = pairing_data[tlv8.TLV_ENCRYPTED_DATA] return self.srp.step4(encrypted_data)
<ast.AsyncFunctionDef object at 0x7da18f7235e0>
keyword[async] keyword[def] identifier[finish_pairing] ( identifier[self] , identifier[pin] ): literal[string] identifier[self] . identifier[srp] . identifier[step1] ( identifier[pin] ) identifier[pub_key] , identifier[proof] = identifier[self] . identifier[srp] . identifier[step2] ( identifier[self] . identifier[_atv_pub_key] , identifier[self] . identifier[_atv_salt] ) identifier[msg] = identifier[messages] . identifier[crypto_pairing] ({ identifier[tlv8] . identifier[TLV_SEQ_NO] : literal[string] , identifier[tlv8] . identifier[TLV_PUBLIC_KEY] : identifier[pub_key] , identifier[tlv8] . identifier[TLV_PROOF] : identifier[proof] }) identifier[resp] = keyword[await] identifier[self] . identifier[protocol] . identifier[send_and_receive] ( identifier[msg] , identifier[generate_identifier] = keyword[False] ) identifier[pairing_data] = identifier[_get_pairing_data] ( identifier[resp] ) identifier[atv_proof] = identifier[pairing_data] [ identifier[tlv8] . identifier[TLV_PROOF] ] identifier[log_binary] ( identifier[_LOGGER] , literal[string] , identifier[Proof] = identifier[atv_proof] ) identifier[encrypted_data] = identifier[self] . identifier[srp] . identifier[step3] () identifier[msg] = identifier[messages] . identifier[crypto_pairing] ({ identifier[tlv8] . identifier[TLV_SEQ_NO] : literal[string] , identifier[tlv8] . identifier[TLV_ENCRYPTED_DATA] : identifier[encrypted_data] }) identifier[resp] = keyword[await] identifier[self] . identifier[protocol] . identifier[send_and_receive] ( identifier[msg] , identifier[generate_identifier] = keyword[False] ) identifier[pairing_data] = identifier[_get_pairing_data] ( identifier[resp] ) identifier[encrypted_data] = identifier[pairing_data] [ identifier[tlv8] . identifier[TLV_ENCRYPTED_DATA] ] keyword[return] identifier[self] . identifier[srp] . identifier[step4] ( identifier[encrypted_data] )
async def finish_pairing(self, pin): """Finish pairing process.""" self.srp.step1(pin) (pub_key, proof) = self.srp.step2(self._atv_pub_key, self._atv_salt) msg = messages.crypto_pairing({tlv8.TLV_SEQ_NO: b'\x03', tlv8.TLV_PUBLIC_KEY: pub_key, tlv8.TLV_PROOF: proof}) resp = await self.protocol.send_and_receive(msg, generate_identifier=False) pairing_data = _get_pairing_data(resp) atv_proof = pairing_data[tlv8.TLV_PROOF] log_binary(_LOGGER, 'Device', Proof=atv_proof) encrypted_data = self.srp.step3() msg = messages.crypto_pairing({tlv8.TLV_SEQ_NO: b'\x05', tlv8.TLV_ENCRYPTED_DATA: encrypted_data}) resp = await self.protocol.send_and_receive(msg, generate_identifier=False) pairing_data = _get_pairing_data(resp) encrypted_data = pairing_data[tlv8.TLV_ENCRYPTED_DATA] return self.srp.step4(encrypted_data)
def main(): """ Parse command line argument and output appropriate file type (csv or JSON) """ parser = ArgumentParser() parser.add_argument( "-c", "--clinvarfile", dest="clinvarfile", help="ClinVar VCF file (either this or -C must be specified)", metavar="CLINVARFILE") parser.add_argument( "-C", "--clinvardir", dest="clinvardir", help="ClinVar VCF directory (either this or -c must be specified). " + "This option will use vcf2clinvar.clinvar_update to automatically " + "check and import the most recent ClinVar file to this directory.", metavar="CLINVARDIR") parser.add_argument( "-i", "--input", dest="inputfile", help="Input VCF file ['.vcf', '.vcf.gz', '.vcf.bz2']. " + "Uncompressed genome data is also accepted via stdin.", metavar="INPUT") parser.add_argument( "-t", "--type", dest="type", default='csv', help="Output report type ('csv' or 'json'). Defaults to csv. " + "CSV Report: Reports all genome variants matching ClinVar records, " + "and some summary ClinVar data from these records. Header lines " + "with metadata begin with '##'.\n" + "JSON Report: Reports genome variants matching ClinVar records " + "(no record information is included).", metavar="TYPE") parser.add_argument( "-n", "--notes", dest="notes", help="Notes (JSON format) to include in report. (JSON report only)", metavar="NOTES") parser.add_argument( "-g", "--genome-build", dest="build", help="Genome build to include in report ('b37' or 'b38').", metavar="GENOMEBUILD") options = parser.parse_args() version = os.popen("python setup.py --version").read().strip() if options.inputfile: if options.inputfile.endswith('.vcf'): input_genome_file = open(options.inputfile) elif options.inputfile.endswith('.vcf.gz'): input_genome_file = gzip.open(options.inputfile) elif options.inputfile.endswith('.vcf.bz2'): input_genome_file = bz2.BZ2File(options.inputfile) else: raise IOError("Genome filename expected to end with ''.vcf'," + " '.vcf.gz', or '.vcf.bz2'.") elif not sys.stdin.isatty(): input_genome_file = sys.stdin else: sys.stderr.write("Provide input VCF file\n") parser.print_help() sys.exit(1) if options.build and options.build in ['b37', 'b38']: build = options.build else: raise IOError("Input VCF genome build must be 'b37' or 'b38'.") if (not (options.clinvarfile or options.clinvardir) or (options.clinvarfile and options.clinvardir)): sys.stderr.write("Please provide either a ClinVar file or directory.") parser.print_help() sys.exit(1) if options.clinvarfile: clinvarfilename = options.clinvarfile elif options.clinvardir: clinvarfilename = get_latest_vcf_file(target_dir=options.clinvardir, build=build) if clinvarfilename.endswith('.vcf'): input_clinvar_file = open(options.clinvarfile) elif clinvarfilename.endswith('.vcf.gz'): input_clinvar_file = gzip.open(clinvarfilename) elif clinvarfilename.endswith('.vcf.bz2'): input_clinvar_file = bz2.BZ2File(clinvarfilename) else: raise IOError("ClinVar filename expected to end with '.vcf'," + " '.vcf.gz', or '.vcf.bz2'.") if options.type not in ['csv', 'json']: raise IOError("Not a valid report type, must be 'csv' or 'json'.") if options.type == "csv": csv_report(input_genome_file=input_genome_file, input_clinvar_file=input_clinvar_file, build=build, version=version) elif options.type == "json": notes_json = {} if options.notes: notes_json["parameter"] = options.notes try: notes_json = json.loads(options.notes) except: sys.stderr.write("Could not parse JSON notes field\n") json_report(input_genome_file=input_genome_file, input_clinvar_file=input_clinvar_file, build=build, notes=notes_json, version=version)
def function[main, parameter[]]: constant[ Parse command line argument and output appropriate file type (csv or JSON) ] variable[parser] assign[=] call[name[ArgumentParser], parameter[]] call[name[parser].add_argument, parameter[constant[-c], constant[--clinvarfile]]] call[name[parser].add_argument, parameter[constant[-C], constant[--clinvardir]]] call[name[parser].add_argument, parameter[constant[-i], constant[--input]]] call[name[parser].add_argument, parameter[constant[-t], constant[--type]]] call[name[parser].add_argument, parameter[constant[-n], constant[--notes]]] call[name[parser].add_argument, parameter[constant[-g], constant[--genome-build]]] variable[options] assign[=] call[name[parser].parse_args, parameter[]] variable[version] assign[=] call[call[call[name[os].popen, parameter[constant[python setup.py --version]]].read, parameter[]].strip, parameter[]] if name[options].inputfile begin[:] if call[name[options].inputfile.endswith, parameter[constant[.vcf]]] begin[:] variable[input_genome_file] assign[=] call[name[open], parameter[name[options].inputfile]] if <ast.BoolOp object at 0x7da1afe7ba30> begin[:] variable[build] assign[=] name[options].build if <ast.BoolOp object at 0x7da1afef8c40> begin[:] call[name[sys].stderr.write, parameter[constant[Please provide either a ClinVar file or directory.]]] call[name[parser].print_help, parameter[]] call[name[sys].exit, parameter[constant[1]]] if name[options].clinvarfile begin[:] variable[clinvarfilename] assign[=] name[options].clinvarfile if call[name[clinvarfilename].endswith, parameter[constant[.vcf]]] begin[:] variable[input_clinvar_file] assign[=] call[name[open], parameter[name[options].clinvarfile]] if compare[name[options].type <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da1afe78970>, <ast.Constant object at 0x7da1afe79720>]]] begin[:] <ast.Raise object at 0x7da1afe7b160> if compare[name[options].type equal[==] constant[csv]] begin[:] call[name[csv_report], parameter[]]
keyword[def] identifier[main] (): literal[string] identifier[parser] = identifier[ArgumentParser] () identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[dest] = literal[string] , identifier[help] = literal[string] , identifier[metavar] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[dest] = literal[string] , identifier[help] = literal[string] + literal[string] + literal[string] , identifier[metavar] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[dest] = literal[string] , identifier[help] = literal[string] + literal[string] , identifier[metavar] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[dest] = literal[string] , identifier[default] = literal[string] , identifier[help] = literal[string] + literal[string] + literal[string] + literal[string] + literal[string] + literal[string] , identifier[metavar] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[dest] = literal[string] , identifier[help] = literal[string] , identifier[metavar] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[dest] = literal[string] , identifier[help] = literal[string] , identifier[metavar] = literal[string] ) identifier[options] = identifier[parser] . identifier[parse_args] () identifier[version] = identifier[os] . identifier[popen] ( literal[string] ). identifier[read] (). identifier[strip] () keyword[if] identifier[options] . identifier[inputfile] : keyword[if] identifier[options] . identifier[inputfile] . identifier[endswith] ( literal[string] ): identifier[input_genome_file] = identifier[open] ( identifier[options] . identifier[inputfile] ) keyword[elif] identifier[options] . identifier[inputfile] . identifier[endswith] ( literal[string] ): identifier[input_genome_file] = identifier[gzip] . identifier[open] ( identifier[options] . identifier[inputfile] ) keyword[elif] identifier[options] . identifier[inputfile] . identifier[endswith] ( literal[string] ): identifier[input_genome_file] = identifier[bz2] . identifier[BZ2File] ( identifier[options] . identifier[inputfile] ) keyword[else] : keyword[raise] identifier[IOError] ( literal[string] + literal[string] ) keyword[elif] keyword[not] identifier[sys] . identifier[stdin] . identifier[isatty] (): identifier[input_genome_file] = identifier[sys] . identifier[stdin] keyword[else] : identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] ) identifier[parser] . identifier[print_help] () identifier[sys] . identifier[exit] ( literal[int] ) keyword[if] identifier[options] . identifier[build] keyword[and] identifier[options] . identifier[build] keyword[in] [ literal[string] , literal[string] ]: identifier[build] = identifier[options] . identifier[build] keyword[else] : keyword[raise] identifier[IOError] ( literal[string] ) keyword[if] ( keyword[not] ( identifier[options] . identifier[clinvarfile] keyword[or] identifier[options] . identifier[clinvardir] ) keyword[or] ( identifier[options] . identifier[clinvarfile] keyword[and] identifier[options] . identifier[clinvardir] )): identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] ) identifier[parser] . identifier[print_help] () identifier[sys] . identifier[exit] ( literal[int] ) keyword[if] identifier[options] . identifier[clinvarfile] : identifier[clinvarfilename] = identifier[options] . identifier[clinvarfile] keyword[elif] identifier[options] . identifier[clinvardir] : identifier[clinvarfilename] = identifier[get_latest_vcf_file] ( identifier[target_dir] = identifier[options] . identifier[clinvardir] , identifier[build] = identifier[build] ) keyword[if] identifier[clinvarfilename] . identifier[endswith] ( literal[string] ): identifier[input_clinvar_file] = identifier[open] ( identifier[options] . identifier[clinvarfile] ) keyword[elif] identifier[clinvarfilename] . identifier[endswith] ( literal[string] ): identifier[input_clinvar_file] = identifier[gzip] . identifier[open] ( identifier[clinvarfilename] ) keyword[elif] identifier[clinvarfilename] . identifier[endswith] ( literal[string] ): identifier[input_clinvar_file] = identifier[bz2] . identifier[BZ2File] ( identifier[clinvarfilename] ) keyword[else] : keyword[raise] identifier[IOError] ( literal[string] + literal[string] ) keyword[if] identifier[options] . identifier[type] keyword[not] keyword[in] [ literal[string] , literal[string] ]: keyword[raise] identifier[IOError] ( literal[string] ) keyword[if] identifier[options] . identifier[type] == literal[string] : identifier[csv_report] ( identifier[input_genome_file] = identifier[input_genome_file] , identifier[input_clinvar_file] = identifier[input_clinvar_file] , identifier[build] = identifier[build] , identifier[version] = identifier[version] ) keyword[elif] identifier[options] . identifier[type] == literal[string] : identifier[notes_json] ={} keyword[if] identifier[options] . identifier[notes] : identifier[notes_json] [ literal[string] ]= identifier[options] . identifier[notes] keyword[try] : identifier[notes_json] = identifier[json] . identifier[loads] ( identifier[options] . identifier[notes] ) keyword[except] : identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] ) identifier[json_report] ( identifier[input_genome_file] = identifier[input_genome_file] , identifier[input_clinvar_file] = identifier[input_clinvar_file] , identifier[build] = identifier[build] , identifier[notes] = identifier[notes_json] , identifier[version] = identifier[version] )
def main(): """ Parse command line argument and output appropriate file type (csv or JSON) """ parser = ArgumentParser() parser.add_argument('-c', '--clinvarfile', dest='clinvarfile', help='ClinVar VCF file (either this or -C must be specified)', metavar='CLINVARFILE') parser.add_argument('-C', '--clinvardir', dest='clinvardir', help='ClinVar VCF directory (either this or -c must be specified). ' + 'This option will use vcf2clinvar.clinvar_update to automatically ' + 'check and import the most recent ClinVar file to this directory.', metavar='CLINVARDIR') parser.add_argument('-i', '--input', dest='inputfile', help="Input VCF file ['.vcf', '.vcf.gz', '.vcf.bz2']. " + 'Uncompressed genome data is also accepted via stdin.', metavar='INPUT') parser.add_argument('-t', '--type', dest='type', default='csv', help="Output report type ('csv' or 'json'). Defaults to csv. " + 'CSV Report: Reports all genome variants matching ClinVar records, ' + 'and some summary ClinVar data from these records. Header lines ' + "with metadata begin with '##'.\n" + 'JSON Report: Reports genome variants matching ClinVar records ' + '(no record information is included).', metavar='TYPE') parser.add_argument('-n', '--notes', dest='notes', help='Notes (JSON format) to include in report. (JSON report only)', metavar='NOTES') parser.add_argument('-g', '--genome-build', dest='build', help="Genome build to include in report ('b37' or 'b38').", metavar='GENOMEBUILD') options = parser.parse_args() version = os.popen('python setup.py --version').read().strip() if options.inputfile: if options.inputfile.endswith('.vcf'): input_genome_file = open(options.inputfile) # depends on [control=['if'], data=[]] elif options.inputfile.endswith('.vcf.gz'): input_genome_file = gzip.open(options.inputfile) # depends on [control=['if'], data=[]] elif options.inputfile.endswith('.vcf.bz2'): input_genome_file = bz2.BZ2File(options.inputfile) # depends on [control=['if'], data=[]] else: raise IOError("Genome filename expected to end with ''.vcf'," + " '.vcf.gz', or '.vcf.bz2'.") # depends on [control=['if'], data=[]] elif not sys.stdin.isatty(): input_genome_file = sys.stdin # depends on [control=['if'], data=[]] else: sys.stderr.write('Provide input VCF file\n') parser.print_help() sys.exit(1) if options.build and options.build in ['b37', 'b38']: build = options.build # depends on [control=['if'], data=[]] else: raise IOError("Input VCF genome build must be 'b37' or 'b38'.") if not (options.clinvarfile or options.clinvardir) or (options.clinvarfile and options.clinvardir): sys.stderr.write('Please provide either a ClinVar file or directory.') parser.print_help() sys.exit(1) # depends on [control=['if'], data=[]] if options.clinvarfile: clinvarfilename = options.clinvarfile # depends on [control=['if'], data=[]] elif options.clinvardir: clinvarfilename = get_latest_vcf_file(target_dir=options.clinvardir, build=build) # depends on [control=['if'], data=[]] if clinvarfilename.endswith('.vcf'): input_clinvar_file = open(options.clinvarfile) # depends on [control=['if'], data=[]] elif clinvarfilename.endswith('.vcf.gz'): input_clinvar_file = gzip.open(clinvarfilename) # depends on [control=['if'], data=[]] elif clinvarfilename.endswith('.vcf.bz2'): input_clinvar_file = bz2.BZ2File(clinvarfilename) # depends on [control=['if'], data=[]] else: raise IOError("ClinVar filename expected to end with '.vcf'," + " '.vcf.gz', or '.vcf.bz2'.") if options.type not in ['csv', 'json']: raise IOError("Not a valid report type, must be 'csv' or 'json'.") # depends on [control=['if'], data=[]] if options.type == 'csv': csv_report(input_genome_file=input_genome_file, input_clinvar_file=input_clinvar_file, build=build, version=version) # depends on [control=['if'], data=[]] elif options.type == 'json': notes_json = {} if options.notes: notes_json['parameter'] = options.notes try: notes_json = json.loads(options.notes) # depends on [control=['try'], data=[]] except: sys.stderr.write('Could not parse JSON notes field\n') # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] json_report(input_genome_file=input_genome_file, input_clinvar_file=input_clinvar_file, build=build, notes=notes_json, version=version) # depends on [control=['if'], data=[]]
def _apply_outputter(self, func, mod): ''' Apply the __outputter__ variable to the functions ''' if hasattr(mod, '__outputter__'): outp = mod.__outputter__ if func.__name__ in outp: func.__outputter__ = outp[func.__name__]
def function[_apply_outputter, parameter[self, func, mod]]: constant[ Apply the __outputter__ variable to the functions ] if call[name[hasattr], parameter[name[mod], constant[__outputter__]]] begin[:] variable[outp] assign[=] name[mod].__outputter__ if compare[name[func].__name__ in name[outp]] begin[:] name[func].__outputter__ assign[=] call[name[outp]][name[func].__name__]
keyword[def] identifier[_apply_outputter] ( identifier[self] , identifier[func] , identifier[mod] ): literal[string] keyword[if] identifier[hasattr] ( identifier[mod] , literal[string] ): identifier[outp] = identifier[mod] . identifier[__outputter__] keyword[if] identifier[func] . identifier[__name__] keyword[in] identifier[outp] : identifier[func] . identifier[__outputter__] = identifier[outp] [ identifier[func] . identifier[__name__] ]
def _apply_outputter(self, func, mod): """ Apply the __outputter__ variable to the functions """ if hasattr(mod, '__outputter__'): outp = mod.__outputter__ if func.__name__ in outp: func.__outputter__ = outp[func.__name__] # depends on [control=['if'], data=['outp']] # depends on [control=['if'], data=[]]
def transform(self, X): ''' Transforms the time series data based on the provided function. Note this transformation must not change the number of samples in the data. Parameters ---------- X : array-like, shape [n_samples, ...] time series data and (optionally) contextual data Returns ------- Xt : array-like, shape [n_samples, ...] transformed time series data ''' if self.func is None: return X else: Xt, Xc = get_ts_data_parts(X) n_samples = len(Xt) Xt = self.func(Xt, **self.func_kwargs) if len(Xt) != n_samples: raise ValueError("FunctionTransformer changes sample number (not supported).") if Xc is not None: Xt = TS_Data(Xt, Xc) return Xt
def function[transform, parameter[self, X]]: constant[ Transforms the time series data based on the provided function. Note this transformation must not change the number of samples in the data. Parameters ---------- X : array-like, shape [n_samples, ...] time series data and (optionally) contextual data Returns ------- Xt : array-like, shape [n_samples, ...] transformed time series data ] if compare[name[self].func is constant[None]] begin[:] return[name[X]]
keyword[def] identifier[transform] ( identifier[self] , identifier[X] ): literal[string] keyword[if] identifier[self] . identifier[func] keyword[is] keyword[None] : keyword[return] identifier[X] keyword[else] : identifier[Xt] , identifier[Xc] = identifier[get_ts_data_parts] ( identifier[X] ) identifier[n_samples] = identifier[len] ( identifier[Xt] ) identifier[Xt] = identifier[self] . identifier[func] ( identifier[Xt] ,** identifier[self] . identifier[func_kwargs] ) keyword[if] identifier[len] ( identifier[Xt] )!= identifier[n_samples] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[Xc] keyword[is] keyword[not] keyword[None] : identifier[Xt] = identifier[TS_Data] ( identifier[Xt] , identifier[Xc] ) keyword[return] identifier[Xt]
def transform(self, X): """ Transforms the time series data based on the provided function. Note this transformation must not change the number of samples in the data. Parameters ---------- X : array-like, shape [n_samples, ...] time series data and (optionally) contextual data Returns ------- Xt : array-like, shape [n_samples, ...] transformed time series data """ if self.func is None: return X # depends on [control=['if'], data=[]] else: (Xt, Xc) = get_ts_data_parts(X) n_samples = len(Xt) Xt = self.func(Xt, **self.func_kwargs) if len(Xt) != n_samples: raise ValueError('FunctionTransformer changes sample number (not supported).') # depends on [control=['if'], data=[]] if Xc is not None: Xt = TS_Data(Xt, Xc) # depends on [control=['if'], data=['Xc']] return Xt
def channel_shift_multi(x, intensity, is_random=False, channel_index=2): """Shift the channels of images with the same arguments, randomly or non-randomly, see `numpy.rollaxis <https://docs.scipy.org/doc/numpy/reference/generated/numpy.rollaxis.html>`__. Usually be used for image segmentation which x=[X, Y], X and Y should be matched. Parameters ----------- x : list of numpy.array List of images with dimension of [n_images, row, col, channel] (default). others : args See ``tl.prepro.channel_shift``. Returns ------- numpy.array A list of processed images. """ if is_random: factor = np.random.uniform(-intensity, intensity) else: factor = intensity results = [] for data in x: data = np.rollaxis(data, channel_index, 0) min_x, max_x = np.min(data), np.max(data) channel_images = [np.clip(x_channel + factor, min_x, max_x) for x_channel in x] data = np.stack(channel_images, axis=0) data = np.rollaxis(x, 0, channel_index + 1) results.append(data) return np.asarray(results)
def function[channel_shift_multi, parameter[x, intensity, is_random, channel_index]]: constant[Shift the channels of images with the same arguments, randomly or non-randomly, see `numpy.rollaxis <https://docs.scipy.org/doc/numpy/reference/generated/numpy.rollaxis.html>`__. Usually be used for image segmentation which x=[X, Y], X and Y should be matched. Parameters ----------- x : list of numpy.array List of images with dimension of [n_images, row, col, channel] (default). others : args See ``tl.prepro.channel_shift``. Returns ------- numpy.array A list of processed images. ] if name[is_random] begin[:] variable[factor] assign[=] call[name[np].random.uniform, parameter[<ast.UnaryOp object at 0x7da207f99330>, name[intensity]]] variable[results] assign[=] list[[]] for taget[name[data]] in starred[name[x]] begin[:] variable[data] assign[=] call[name[np].rollaxis, parameter[name[data], name[channel_index], constant[0]]] <ast.Tuple object at 0x7da207f9abc0> assign[=] tuple[[<ast.Call object at 0x7da207f99510>, <ast.Call object at 0x7da207f98e80>]] variable[channel_images] assign[=] <ast.ListComp object at 0x7da207f9a620> variable[data] assign[=] call[name[np].stack, parameter[name[channel_images]]] variable[data] assign[=] call[name[np].rollaxis, parameter[name[x], constant[0], binary_operation[name[channel_index] + constant[1]]]] call[name[results].append, parameter[name[data]]] return[call[name[np].asarray, parameter[name[results]]]]
keyword[def] identifier[channel_shift_multi] ( identifier[x] , identifier[intensity] , identifier[is_random] = keyword[False] , identifier[channel_index] = literal[int] ): literal[string] keyword[if] identifier[is_random] : identifier[factor] = identifier[np] . identifier[random] . identifier[uniform] (- identifier[intensity] , identifier[intensity] ) keyword[else] : identifier[factor] = identifier[intensity] identifier[results] =[] keyword[for] identifier[data] keyword[in] identifier[x] : identifier[data] = identifier[np] . identifier[rollaxis] ( identifier[data] , identifier[channel_index] , literal[int] ) identifier[min_x] , identifier[max_x] = identifier[np] . identifier[min] ( identifier[data] ), identifier[np] . identifier[max] ( identifier[data] ) identifier[channel_images] =[ identifier[np] . identifier[clip] ( identifier[x_channel] + identifier[factor] , identifier[min_x] , identifier[max_x] ) keyword[for] identifier[x_channel] keyword[in] identifier[x] ] identifier[data] = identifier[np] . identifier[stack] ( identifier[channel_images] , identifier[axis] = literal[int] ) identifier[data] = identifier[np] . identifier[rollaxis] ( identifier[x] , literal[int] , identifier[channel_index] + literal[int] ) identifier[results] . identifier[append] ( identifier[data] ) keyword[return] identifier[np] . identifier[asarray] ( identifier[results] )
def channel_shift_multi(x, intensity, is_random=False, channel_index=2): """Shift the channels of images with the same arguments, randomly or non-randomly, see `numpy.rollaxis <https://docs.scipy.org/doc/numpy/reference/generated/numpy.rollaxis.html>`__. Usually be used for image segmentation which x=[X, Y], X and Y should be matched. Parameters ----------- x : list of numpy.array List of images with dimension of [n_images, row, col, channel] (default). others : args See ``tl.prepro.channel_shift``. Returns ------- numpy.array A list of processed images. """ if is_random: factor = np.random.uniform(-intensity, intensity) # depends on [control=['if'], data=[]] else: factor = intensity results = [] for data in x: data = np.rollaxis(data, channel_index, 0) (min_x, max_x) = (np.min(data), np.max(data)) channel_images = [np.clip(x_channel + factor, min_x, max_x) for x_channel in x] data = np.stack(channel_images, axis=0) data = np.rollaxis(x, 0, channel_index + 1) results.append(data) # depends on [control=['for'], data=['data']] return np.asarray(results)
def _evaluate_expression(self, expr): """Evaluate an :class:`.Expression` using :meth:`_get_value`.""" def cast_value(v): # Convert Decimal to Fraction to allow successful multiplication # by either float (most solvers) or Fraction (exact solver). # Multiplying Fraction and float results in a float, and # multiplying Fraction and Fraction result in Fraction, which are # exactly the types of results we want. if isinstance(v, Decimal): return Fraction(v) return v total = cast_value(expr.offset) for var, value in expr.values(): value = cast_value(value) if not isinstance(var, Product): total += self._get_value(var) * value else: total += reduce( operator.mul, (self._get_value(v) for v in var), value) return total
def function[_evaluate_expression, parameter[self, expr]]: constant[Evaluate an :class:`.Expression` using :meth:`_get_value`.] def function[cast_value, parameter[v]]: if call[name[isinstance], parameter[name[v], name[Decimal]]] begin[:] return[call[name[Fraction], parameter[name[v]]]] return[name[v]] variable[total] assign[=] call[name[cast_value], parameter[name[expr].offset]] for taget[tuple[[<ast.Name object at 0x7da18f812890>, <ast.Name object at 0x7da18f813700>]]] in starred[call[name[expr].values, parameter[]]] begin[:] variable[value] assign[=] call[name[cast_value], parameter[name[value]]] if <ast.UnaryOp object at 0x7da18f8108e0> begin[:] <ast.AugAssign object at 0x7da18f8107f0> return[name[total]]
keyword[def] identifier[_evaluate_expression] ( identifier[self] , identifier[expr] ): literal[string] keyword[def] identifier[cast_value] ( identifier[v] ): keyword[if] identifier[isinstance] ( identifier[v] , identifier[Decimal] ): keyword[return] identifier[Fraction] ( identifier[v] ) keyword[return] identifier[v] identifier[total] = identifier[cast_value] ( identifier[expr] . identifier[offset] ) keyword[for] identifier[var] , identifier[value] keyword[in] identifier[expr] . identifier[values] (): identifier[value] = identifier[cast_value] ( identifier[value] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[var] , identifier[Product] ): identifier[total] += identifier[self] . identifier[_get_value] ( identifier[var] )* identifier[value] keyword[else] : identifier[total] += identifier[reduce] ( identifier[operator] . identifier[mul] ,( identifier[self] . identifier[_get_value] ( identifier[v] ) keyword[for] identifier[v] keyword[in] identifier[var] ), identifier[value] ) keyword[return] identifier[total]
def _evaluate_expression(self, expr): """Evaluate an :class:`.Expression` using :meth:`_get_value`.""" def cast_value(v): # Convert Decimal to Fraction to allow successful multiplication # by either float (most solvers) or Fraction (exact solver). # Multiplying Fraction and float results in a float, and # multiplying Fraction and Fraction result in Fraction, which are # exactly the types of results we want. if isinstance(v, Decimal): return Fraction(v) # depends on [control=['if'], data=[]] return v total = cast_value(expr.offset) for (var, value) in expr.values(): value = cast_value(value) if not isinstance(var, Product): total += self._get_value(var) * value # depends on [control=['if'], data=[]] else: total += reduce(operator.mul, (self._get_value(v) for v in var), value) # depends on [control=['for'], data=[]] return total
def __tokenize(self,string): """Split s into tokens and update the token buffer. __tokenize(string) New tokens are appended to the token buffer, discarding white space. Based on http://effbot.org/zone/xml-scanner.htm """ for m in self.dx_regex.finditer(string.strip()): code = m.lastgroup text = m.group(m.lastgroup) tok = Token(code,text) if not tok.iscode('WHITESPACE'): self.tokens.append(tok)
def function[__tokenize, parameter[self, string]]: constant[Split s into tokens and update the token buffer. __tokenize(string) New tokens are appended to the token buffer, discarding white space. Based on http://effbot.org/zone/xml-scanner.htm ] for taget[name[m]] in starred[call[name[self].dx_regex.finditer, parameter[call[name[string].strip, parameter[]]]]] begin[:] variable[code] assign[=] name[m].lastgroup variable[text] assign[=] call[name[m].group, parameter[name[m].lastgroup]] variable[tok] assign[=] call[name[Token], parameter[name[code], name[text]]] if <ast.UnaryOp object at 0x7da1afe72080> begin[:] call[name[self].tokens.append, parameter[name[tok]]]
keyword[def] identifier[__tokenize] ( identifier[self] , identifier[string] ): literal[string] keyword[for] identifier[m] keyword[in] identifier[self] . identifier[dx_regex] . identifier[finditer] ( identifier[string] . identifier[strip] ()): identifier[code] = identifier[m] . identifier[lastgroup] identifier[text] = identifier[m] . identifier[group] ( identifier[m] . identifier[lastgroup] ) identifier[tok] = identifier[Token] ( identifier[code] , identifier[text] ) keyword[if] keyword[not] identifier[tok] . identifier[iscode] ( literal[string] ): identifier[self] . identifier[tokens] . identifier[append] ( identifier[tok] )
def __tokenize(self, string): """Split s into tokens and update the token buffer. __tokenize(string) New tokens are appended to the token buffer, discarding white space. Based on http://effbot.org/zone/xml-scanner.htm """ for m in self.dx_regex.finditer(string.strip()): code = m.lastgroup text = m.group(m.lastgroup) tok = Token(code, text) if not tok.iscode('WHITESPACE'): self.tokens.append(tok) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['m']]
def compute_similarity(a, b, margin=1.0, cutoff=10.0): """Compute the similarity between two molecules based on their descriptors Arguments: a -- the similarity measure of the first molecule b -- the similarity measure of the second molecule margin -- the sensitivity when comparing distances (default = 1.0) cutoff -- don't compare distances longer than the cutoff (default = 10.0 au) When comparing two distances (always between two atom pairs with identical labels), the folowing formula is used: dav = (distance1+distance2)/2 delta = abs(distance1-distance2) When the delta is within the margin and dav is below the cutoff: (1-dav/cutoff)*(cos(delta/margin/np.pi)+1)/2 and zero otherwise. The returned value is the sum of such terms over all distance pairs with matching atom types. When comparing similarities it might be useful to normalize them in some way, e.g. similarity(a, b)/(similarity(a, a)*similarity(b, b))**0.5 """ return similarity_measure( a.table_labels, a.table_distances, b.table_labels, b.table_distances, margin, cutoff )
def function[compute_similarity, parameter[a, b, margin, cutoff]]: constant[Compute the similarity between two molecules based on their descriptors Arguments: a -- the similarity measure of the first molecule b -- the similarity measure of the second molecule margin -- the sensitivity when comparing distances (default = 1.0) cutoff -- don't compare distances longer than the cutoff (default = 10.0 au) When comparing two distances (always between two atom pairs with identical labels), the folowing formula is used: dav = (distance1+distance2)/2 delta = abs(distance1-distance2) When the delta is within the margin and dav is below the cutoff: (1-dav/cutoff)*(cos(delta/margin/np.pi)+1)/2 and zero otherwise. The returned value is the sum of such terms over all distance pairs with matching atom types. When comparing similarities it might be useful to normalize them in some way, e.g. similarity(a, b)/(similarity(a, a)*similarity(b, b))**0.5 ] return[call[name[similarity_measure], parameter[name[a].table_labels, name[a].table_distances, name[b].table_labels, name[b].table_distances, name[margin], name[cutoff]]]]
keyword[def] identifier[compute_similarity] ( identifier[a] , identifier[b] , identifier[margin] = literal[int] , identifier[cutoff] = literal[int] ): literal[string] keyword[return] identifier[similarity_measure] ( identifier[a] . identifier[table_labels] , identifier[a] . identifier[table_distances] , identifier[b] . identifier[table_labels] , identifier[b] . identifier[table_distances] , identifier[margin] , identifier[cutoff] )
def compute_similarity(a, b, margin=1.0, cutoff=10.0): """Compute the similarity between two molecules based on their descriptors Arguments: a -- the similarity measure of the first molecule b -- the similarity measure of the second molecule margin -- the sensitivity when comparing distances (default = 1.0) cutoff -- don't compare distances longer than the cutoff (default = 10.0 au) When comparing two distances (always between two atom pairs with identical labels), the folowing formula is used: dav = (distance1+distance2)/2 delta = abs(distance1-distance2) When the delta is within the margin and dav is below the cutoff: (1-dav/cutoff)*(cos(delta/margin/np.pi)+1)/2 and zero otherwise. The returned value is the sum of such terms over all distance pairs with matching atom types. When comparing similarities it might be useful to normalize them in some way, e.g. similarity(a, b)/(similarity(a, a)*similarity(b, b))**0.5 """ return similarity_measure(a.table_labels, a.table_distances, b.table_labels, b.table_distances, margin, cutoff)
def verify_data_signature(self, signature_url, data_url, data): """ Verify data against it's remote signature :type signature_url: str :param signature_url: remote path to signature for data_url :type data_url: str :param data_url: url from which data was fetched :type data: str :param data: content of remote file at file_url """ req = requests.get(signature_url) if req.status_code is 200: tm = int(time.time()) datestamp = datetime.utcfromtimestamp(tm).isoformat() sigfile = "repo-{0}-tmp.sig".format(datestamp) logger.debug("writing {0} to {1}".format(signature_url, sigfile)) with open(sigfile, 'wb') as f: f.write(req.content) else: raise RepositoryMissingSignatureError(signature_url) verified = self.gpg.verify_data(sigfile, data) try: os.remove(sigfile) except OSError: pass if verified.valid is True: logger.debug("verified {0} against {1}".format(data_url, signature_url)) else: raise RepositorySignatureError(data_url, signature_url)
def function[verify_data_signature, parameter[self, signature_url, data_url, data]]: constant[ Verify data against it's remote signature :type signature_url: str :param signature_url: remote path to signature for data_url :type data_url: str :param data_url: url from which data was fetched :type data: str :param data: content of remote file at file_url ] variable[req] assign[=] call[name[requests].get, parameter[name[signature_url]]] if compare[name[req].status_code is constant[200]] begin[:] variable[tm] assign[=] call[name[int], parameter[call[name[time].time, parameter[]]]] variable[datestamp] assign[=] call[call[name[datetime].utcfromtimestamp, parameter[name[tm]]].isoformat, parameter[]] variable[sigfile] assign[=] call[constant[repo-{0}-tmp.sig].format, parameter[name[datestamp]]] call[name[logger].debug, parameter[call[constant[writing {0} to {1}].format, parameter[name[signature_url], name[sigfile]]]]] with call[name[open], parameter[name[sigfile], constant[wb]]] begin[:] call[name[f].write, parameter[name[req].content]] variable[verified] assign[=] call[name[self].gpg.verify_data, parameter[name[sigfile], name[data]]] <ast.Try object at 0x7da20c992050> if compare[name[verified].valid is constant[True]] begin[:] call[name[logger].debug, parameter[call[constant[verified {0} against {1}].format, parameter[name[data_url], name[signature_url]]]]]
keyword[def] identifier[verify_data_signature] ( identifier[self] , identifier[signature_url] , identifier[data_url] , identifier[data] ): literal[string] identifier[req] = identifier[requests] . identifier[get] ( identifier[signature_url] ) keyword[if] identifier[req] . identifier[status_code] keyword[is] literal[int] : identifier[tm] = identifier[int] ( identifier[time] . identifier[time] ()) identifier[datestamp] = identifier[datetime] . identifier[utcfromtimestamp] ( identifier[tm] ). identifier[isoformat] () identifier[sigfile] = literal[string] . identifier[format] ( identifier[datestamp] ) identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[signature_url] , identifier[sigfile] )) keyword[with] identifier[open] ( identifier[sigfile] , literal[string] ) keyword[as] identifier[f] : identifier[f] . identifier[write] ( identifier[req] . identifier[content] ) keyword[else] : keyword[raise] identifier[RepositoryMissingSignatureError] ( identifier[signature_url] ) identifier[verified] = identifier[self] . identifier[gpg] . identifier[verify_data] ( identifier[sigfile] , identifier[data] ) keyword[try] : identifier[os] . identifier[remove] ( identifier[sigfile] ) keyword[except] identifier[OSError] : keyword[pass] keyword[if] identifier[verified] . identifier[valid] keyword[is] keyword[True] : identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[data_url] , identifier[signature_url] )) keyword[else] : keyword[raise] identifier[RepositorySignatureError] ( identifier[data_url] , identifier[signature_url] )
def verify_data_signature(self, signature_url, data_url, data): """ Verify data against it's remote signature :type signature_url: str :param signature_url: remote path to signature for data_url :type data_url: str :param data_url: url from which data was fetched :type data: str :param data: content of remote file at file_url """ req = requests.get(signature_url) if req.status_code is 200: tm = int(time.time()) datestamp = datetime.utcfromtimestamp(tm).isoformat() sigfile = 'repo-{0}-tmp.sig'.format(datestamp) logger.debug('writing {0} to {1}'.format(signature_url, sigfile)) with open(sigfile, 'wb') as f: f.write(req.content) # depends on [control=['with'], data=['f']] # depends on [control=['if'], data=[]] else: raise RepositoryMissingSignatureError(signature_url) verified = self.gpg.verify_data(sigfile, data) try: os.remove(sigfile) # depends on [control=['try'], data=[]] except OSError: pass # depends on [control=['except'], data=[]] if verified.valid is True: logger.debug('verified {0} against {1}'.format(data_url, signature_url)) # depends on [control=['if'], data=[]] else: raise RepositorySignatureError(data_url, signature_url)
def _GetTripSequence(self, schedule=None): """Return a list of (trip, stop_sequence) for all trips visiting this stop. A trip may be in the list multiple times with different index. stop_sequence is an integer. Args: schedule: Deprecated, do not use. """ if schedule is None: schedule = getattr(self, "_schedule", None) if schedule is None: warnings.warn("No longer supported. _schedule attribute is used to get " "stop_times table", DeprecationWarning) cursor = schedule._connection.cursor() cursor.execute("SELECT trip_id,stop_sequence FROM stop_times " "WHERE stop_id=?", (self.stop_id, )) return [(schedule.GetTrip(row[0]), row[1]) for row in cursor]
def function[_GetTripSequence, parameter[self, schedule]]: constant[Return a list of (trip, stop_sequence) for all trips visiting this stop. A trip may be in the list multiple times with different index. stop_sequence is an integer. Args: schedule: Deprecated, do not use. ] if compare[name[schedule] is constant[None]] begin[:] variable[schedule] assign[=] call[name[getattr], parameter[name[self], constant[_schedule], constant[None]]] if compare[name[schedule] is constant[None]] begin[:] call[name[warnings].warn, parameter[constant[No longer supported. _schedule attribute is used to get stop_times table], name[DeprecationWarning]]] variable[cursor] assign[=] call[name[schedule]._connection.cursor, parameter[]] call[name[cursor].execute, parameter[constant[SELECT trip_id,stop_sequence FROM stop_times WHERE stop_id=?], tuple[[<ast.Attribute object at 0x7da18bcca0b0>]]]] return[<ast.ListComp object at 0x7da18bcca710>]
keyword[def] identifier[_GetTripSequence] ( identifier[self] , identifier[schedule] = keyword[None] ): literal[string] keyword[if] identifier[schedule] keyword[is] keyword[None] : identifier[schedule] = identifier[getattr] ( identifier[self] , literal[string] , keyword[None] ) keyword[if] identifier[schedule] keyword[is] keyword[None] : identifier[warnings] . identifier[warn] ( literal[string] literal[string] , identifier[DeprecationWarning] ) identifier[cursor] = identifier[schedule] . identifier[_connection] . identifier[cursor] () identifier[cursor] . identifier[execute] ( literal[string] literal[string] , ( identifier[self] . identifier[stop_id] ,)) keyword[return] [( identifier[schedule] . identifier[GetTrip] ( identifier[row] [ literal[int] ]), identifier[row] [ literal[int] ]) keyword[for] identifier[row] keyword[in] identifier[cursor] ]
def _GetTripSequence(self, schedule=None): """Return a list of (trip, stop_sequence) for all trips visiting this stop. A trip may be in the list multiple times with different index. stop_sequence is an integer. Args: schedule: Deprecated, do not use. """ if schedule is None: schedule = getattr(self, '_schedule', None) # depends on [control=['if'], data=['schedule']] if schedule is None: warnings.warn('No longer supported. _schedule attribute is used to get stop_times table', DeprecationWarning) # depends on [control=['if'], data=[]] cursor = schedule._connection.cursor() cursor.execute('SELECT trip_id,stop_sequence FROM stop_times WHERE stop_id=?', (self.stop_id,)) return [(schedule.GetTrip(row[0]), row[1]) for row in cursor]
def register_app_activity(): """ Create watchers for models defined in settings.py. Once created, they will be passed over Activity.objects.follow_model(), which lives in managers.py """ from django.conf import settings from django.contrib.contenttypes.models import ContentType from .models import Activity # TO-DO: Add check for existence of setting if not hasattr(settings, 'ACTIVITY_MONITOR_MODELS'): return for item in settings.ACTIVITY_MONITOR_MODELS: try: app_label, model = item['model'].split('.', 1) content_type = ContentType.objects.get(app_label=app_label, model=model) model = content_type.model_class() Activity.objects.follow_model(model) except ContentType.DoesNotExist: pass
def function[register_app_activity, parameter[]]: constant[ Create watchers for models defined in settings.py. Once created, they will be passed over Activity.objects.follow_model(), which lives in managers.py ] from relative_module[django.conf] import module[settings] from relative_module[django.contrib.contenttypes.models] import module[ContentType] from relative_module[models] import module[Activity] if <ast.UnaryOp object at 0x7da20c7c9f00> begin[:] return[None] for taget[name[item]] in starred[name[settings].ACTIVITY_MONITOR_MODELS] begin[:] <ast.Try object at 0x7da20c7ca8c0>
keyword[def] identifier[register_app_activity] (): literal[string] keyword[from] identifier[django] . identifier[conf] keyword[import] identifier[settings] keyword[from] identifier[django] . identifier[contrib] . identifier[contenttypes] . identifier[models] keyword[import] identifier[ContentType] keyword[from] . identifier[models] keyword[import] identifier[Activity] keyword[if] keyword[not] identifier[hasattr] ( identifier[settings] , literal[string] ): keyword[return] keyword[for] identifier[item] keyword[in] identifier[settings] . identifier[ACTIVITY_MONITOR_MODELS] : keyword[try] : identifier[app_label] , identifier[model] = identifier[item] [ literal[string] ]. identifier[split] ( literal[string] , literal[int] ) identifier[content_type] = identifier[ContentType] . identifier[objects] . identifier[get] ( identifier[app_label] = identifier[app_label] , identifier[model] = identifier[model] ) identifier[model] = identifier[content_type] . identifier[model_class] () identifier[Activity] . identifier[objects] . identifier[follow_model] ( identifier[model] ) keyword[except] identifier[ContentType] . identifier[DoesNotExist] : keyword[pass]
def register_app_activity(): """ Create watchers for models defined in settings.py. Once created, they will be passed over Activity.objects.follow_model(), which lives in managers.py """ from django.conf import settings from django.contrib.contenttypes.models import ContentType from .models import Activity # TO-DO: Add check for existence of setting if not hasattr(settings, 'ACTIVITY_MONITOR_MODELS'): return # depends on [control=['if'], data=[]] for item in settings.ACTIVITY_MONITOR_MODELS: try: (app_label, model) = item['model'].split('.', 1) content_type = ContentType.objects.get(app_label=app_label, model=model) model = content_type.model_class() Activity.objects.follow_model(model) # depends on [control=['try'], data=[]] except ContentType.DoesNotExist: pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['item']]
def add_columns(self, model, **fields): """Create new fields.""" for name, field in fields.items(): model._meta.add_field(name, field) self.ops.append(self.migrator.add_column( model._meta.table_name, field.column_name, field)) if field.unique: self.ops.append(self.migrator.add_index( model._meta.table_name, (field.column_name,), unique=True)) return model
def function[add_columns, parameter[self, model]]: constant[Create new fields.] for taget[tuple[[<ast.Name object at 0x7da20e9620e0>, <ast.Name object at 0x7da20e960ac0>]]] in starred[call[name[fields].items, parameter[]]] begin[:] call[name[model]._meta.add_field, parameter[name[name], name[field]]] call[name[self].ops.append, parameter[call[name[self].migrator.add_column, parameter[name[model]._meta.table_name, name[field].column_name, name[field]]]]] if name[field].unique begin[:] call[name[self].ops.append, parameter[call[name[self].migrator.add_index, parameter[name[model]._meta.table_name, tuple[[<ast.Attribute object at 0x7da20e961ff0>]]]]]] return[name[model]]
keyword[def] identifier[add_columns] ( identifier[self] , identifier[model] ,** identifier[fields] ): literal[string] keyword[for] identifier[name] , identifier[field] keyword[in] identifier[fields] . identifier[items] (): identifier[model] . identifier[_meta] . identifier[add_field] ( identifier[name] , identifier[field] ) identifier[self] . identifier[ops] . identifier[append] ( identifier[self] . identifier[migrator] . identifier[add_column] ( identifier[model] . identifier[_meta] . identifier[table_name] , identifier[field] . identifier[column_name] , identifier[field] )) keyword[if] identifier[field] . identifier[unique] : identifier[self] . identifier[ops] . identifier[append] ( identifier[self] . identifier[migrator] . identifier[add_index] ( identifier[model] . identifier[_meta] . identifier[table_name] ,( identifier[field] . identifier[column_name] ,), identifier[unique] = keyword[True] )) keyword[return] identifier[model]
def add_columns(self, model, **fields): """Create new fields.""" for (name, field) in fields.items(): model._meta.add_field(name, field) self.ops.append(self.migrator.add_column(model._meta.table_name, field.column_name, field)) if field.unique: self.ops.append(self.migrator.add_index(model._meta.table_name, (field.column_name,), unique=True)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return model
def create(cls, tx_signers, recipients, metadata=None, asset=None): """A simple way to generate a `CREATE` transaction. Note: This method currently supports the following Cryptoconditions use cases: - Ed25519 - ThresholdSha256 Additionally, it provides support for the following BigchainDB use cases: - Multiple inputs and outputs. Args: tx_signers (:obj:`list` of :obj:`str`): A list of keys that represent the signers of the CREATE Transaction. recipients (:obj:`list` of :obj:`tuple`): A list of ([keys],amount) that represent the recipients of this Transaction. metadata (dict): The metadata to be stored along with the Transaction. asset (dict): The metadata associated with the asset that will be created in this Transaction. Returns: :class:`~bigchaindb.common.transaction.Transaction` """ (inputs, outputs) = cls.validate_create(tx_signers, recipients, asset, metadata) return cls(cls.CREATE, {'data': asset}, inputs, outputs, metadata)
def function[create, parameter[cls, tx_signers, recipients, metadata, asset]]: constant[A simple way to generate a `CREATE` transaction. Note: This method currently supports the following Cryptoconditions use cases: - Ed25519 - ThresholdSha256 Additionally, it provides support for the following BigchainDB use cases: - Multiple inputs and outputs. Args: tx_signers (:obj:`list` of :obj:`str`): A list of keys that represent the signers of the CREATE Transaction. recipients (:obj:`list` of :obj:`tuple`): A list of ([keys],amount) that represent the recipients of this Transaction. metadata (dict): The metadata to be stored along with the Transaction. asset (dict): The metadata associated with the asset that will be created in this Transaction. Returns: :class:`~bigchaindb.common.transaction.Transaction` ] <ast.Tuple object at 0x7da1b1bed870> assign[=] call[name[cls].validate_create, parameter[name[tx_signers], name[recipients], name[asset], name[metadata]]] return[call[name[cls], parameter[name[cls].CREATE, dictionary[[<ast.Constant object at 0x7da1b1bed000>], [<ast.Name object at 0x7da1b1bed600>]], name[inputs], name[outputs], name[metadata]]]]
keyword[def] identifier[create] ( identifier[cls] , identifier[tx_signers] , identifier[recipients] , identifier[metadata] = keyword[None] , identifier[asset] = keyword[None] ): literal[string] ( identifier[inputs] , identifier[outputs] )= identifier[cls] . identifier[validate_create] ( identifier[tx_signers] , identifier[recipients] , identifier[asset] , identifier[metadata] ) keyword[return] identifier[cls] ( identifier[cls] . identifier[CREATE] ,{ literal[string] : identifier[asset] }, identifier[inputs] , identifier[outputs] , identifier[metadata] )
def create(cls, tx_signers, recipients, metadata=None, asset=None): """A simple way to generate a `CREATE` transaction. Note: This method currently supports the following Cryptoconditions use cases: - Ed25519 - ThresholdSha256 Additionally, it provides support for the following BigchainDB use cases: - Multiple inputs and outputs. Args: tx_signers (:obj:`list` of :obj:`str`): A list of keys that represent the signers of the CREATE Transaction. recipients (:obj:`list` of :obj:`tuple`): A list of ([keys],amount) that represent the recipients of this Transaction. metadata (dict): The metadata to be stored along with the Transaction. asset (dict): The metadata associated with the asset that will be created in this Transaction. Returns: :class:`~bigchaindb.common.transaction.Transaction` """ (inputs, outputs) = cls.validate_create(tx_signers, recipients, asset, metadata) return cls(cls.CREATE, {'data': asset}, inputs, outputs, metadata)
def add(cls, model, commit=True): """Adds a model instance to session and commits the transaction. Args: model: The instance to add. Examples: >>> customer = Customer.new(name="hari", email="hari@gmail.com") >>> Customer.add(customer) hari@gmail.com """ if not isinstance(model, cls): raise ValueError('%s is not of type %s' % (model, cls)) cls.session.add(model) try: if commit: cls.session.commit() return model except: cls.session.rollback() raise
def function[add, parameter[cls, model, commit]]: constant[Adds a model instance to session and commits the transaction. Args: model: The instance to add. Examples: >>> customer = Customer.new(name="hari", email="hari@gmail.com") >>> Customer.add(customer) hari@gmail.com ] if <ast.UnaryOp object at 0x7da1b24ae830> begin[:] <ast.Raise object at 0x7da1b24afdf0> call[name[cls].session.add, parameter[name[model]]] <ast.Try object at 0x7da1b24af610>
keyword[def] identifier[add] ( identifier[cls] , identifier[model] , identifier[commit] = keyword[True] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[model] , identifier[cls] ): keyword[raise] identifier[ValueError] ( literal[string] %( identifier[model] , identifier[cls] )) identifier[cls] . identifier[session] . identifier[add] ( identifier[model] ) keyword[try] : keyword[if] identifier[commit] : identifier[cls] . identifier[session] . identifier[commit] () keyword[return] identifier[model] keyword[except] : identifier[cls] . identifier[session] . identifier[rollback] () keyword[raise]
def add(cls, model, commit=True): """Adds a model instance to session and commits the transaction. Args: model: The instance to add. Examples: >>> customer = Customer.new(name="hari", email="hari@gmail.com") >>> Customer.add(customer) hari@gmail.com """ if not isinstance(model, cls): raise ValueError('%s is not of type %s' % (model, cls)) # depends on [control=['if'], data=[]] cls.session.add(model) try: if commit: cls.session.commit() # depends on [control=['if'], data=[]] return model # depends on [control=['try'], data=[]] except: cls.session.rollback() raise # depends on [control=['except'], data=[]]
def covariance_matrix(self, data, background_rms=1, exposure_map=1, noise_map=None, verbose=False): """ returns a diagonal matrix for the covariance estimation which describes the error Notes: - the exposure map must be positive definite. Values that deviate too much from the mean exposure time will be given a lower limit to not under-predict the Poisson component of the noise. - the data must be positive semi-definite for the Poisson noise estimate. Values < 0 (Possible after mean subtraction) will not have a Poisson component in their noise estimate. :param data: data array, eg in units of photons/second :param background_rms: background noise rms, eg. in units (photons/second)^2 :param exposure_map: exposure time per pixel, e.g. in units of seconds :return: len(d) x len(d) matrix that give the error of background and Poisson components; (photons/second)^2 """ if noise_map is not None: return noise_map**2 if isinstance(exposure_map, int) or isinstance(exposure_map, float): if exposure_map <= 0: exposure_map = 1 else: mean_exp_time = np.mean(exposure_map) exposure_map[exposure_map < mean_exp_time / 10] = mean_exp_time / 10 if verbose: if background_rms * np.max(exposure_map) < 1: print("WARNING! sigma_b*f %s < 1 count may introduce unstable error estimates" % (background_rms * np.max(exposure_map))) d_pos = np.zeros_like(data) #threshold = 1.5*sigma_b d_pos[data >= 0] = data[data >= 0] #d_pos[d < threshold] = 0 sigma = d_pos / exposure_map + background_rms ** 2 return sigma
def function[covariance_matrix, parameter[self, data, background_rms, exposure_map, noise_map, verbose]]: constant[ returns a diagonal matrix for the covariance estimation which describes the error Notes: - the exposure map must be positive definite. Values that deviate too much from the mean exposure time will be given a lower limit to not under-predict the Poisson component of the noise. - the data must be positive semi-definite for the Poisson noise estimate. Values < 0 (Possible after mean subtraction) will not have a Poisson component in their noise estimate. :param data: data array, eg in units of photons/second :param background_rms: background noise rms, eg. in units (photons/second)^2 :param exposure_map: exposure time per pixel, e.g. in units of seconds :return: len(d) x len(d) matrix that give the error of background and Poisson components; (photons/second)^2 ] if compare[name[noise_map] is_not constant[None]] begin[:] return[binary_operation[name[noise_map] ** constant[2]]] if <ast.BoolOp object at 0x7da20c6e6380> begin[:] if compare[name[exposure_map] less_or_equal[<=] constant[0]] begin[:] variable[exposure_map] assign[=] constant[1] if name[verbose] begin[:] if compare[binary_operation[name[background_rms] * call[name[np].max, parameter[name[exposure_map]]]] less[<] constant[1]] begin[:] call[name[print], parameter[binary_operation[constant[WARNING! sigma_b*f %s < 1 count may introduce unstable error estimates] <ast.Mod object at 0x7da2590d6920> binary_operation[name[background_rms] * call[name[np].max, parameter[name[exposure_map]]]]]]] variable[d_pos] assign[=] call[name[np].zeros_like, parameter[name[data]]] call[name[d_pos]][compare[name[data] greater_or_equal[>=] constant[0]]] assign[=] call[name[data]][compare[name[data] greater_or_equal[>=] constant[0]]] variable[sigma] assign[=] binary_operation[binary_operation[name[d_pos] / name[exposure_map]] + binary_operation[name[background_rms] ** constant[2]]] return[name[sigma]]
keyword[def] identifier[covariance_matrix] ( identifier[self] , identifier[data] , identifier[background_rms] = literal[int] , identifier[exposure_map] = literal[int] , identifier[noise_map] = keyword[None] , identifier[verbose] = keyword[False] ): literal[string] keyword[if] identifier[noise_map] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[noise_map] ** literal[int] keyword[if] identifier[isinstance] ( identifier[exposure_map] , identifier[int] ) keyword[or] identifier[isinstance] ( identifier[exposure_map] , identifier[float] ): keyword[if] identifier[exposure_map] <= literal[int] : identifier[exposure_map] = literal[int] keyword[else] : identifier[mean_exp_time] = identifier[np] . identifier[mean] ( identifier[exposure_map] ) identifier[exposure_map] [ identifier[exposure_map] < identifier[mean_exp_time] / literal[int] ]= identifier[mean_exp_time] / literal[int] keyword[if] identifier[verbose] : keyword[if] identifier[background_rms] * identifier[np] . identifier[max] ( identifier[exposure_map] )< literal[int] : identifier[print] ( literal[string] %( identifier[background_rms] * identifier[np] . identifier[max] ( identifier[exposure_map] ))) identifier[d_pos] = identifier[np] . identifier[zeros_like] ( identifier[data] ) identifier[d_pos] [ identifier[data] >= literal[int] ]= identifier[data] [ identifier[data] >= literal[int] ] identifier[sigma] = identifier[d_pos] / identifier[exposure_map] + identifier[background_rms] ** literal[int] keyword[return] identifier[sigma]
def covariance_matrix(self, data, background_rms=1, exposure_map=1, noise_map=None, verbose=False): """ returns a diagonal matrix for the covariance estimation which describes the error Notes: - the exposure map must be positive definite. Values that deviate too much from the mean exposure time will be given a lower limit to not under-predict the Poisson component of the noise. - the data must be positive semi-definite for the Poisson noise estimate. Values < 0 (Possible after mean subtraction) will not have a Poisson component in their noise estimate. :param data: data array, eg in units of photons/second :param background_rms: background noise rms, eg. in units (photons/second)^2 :param exposure_map: exposure time per pixel, e.g. in units of seconds :return: len(d) x len(d) matrix that give the error of background and Poisson components; (photons/second)^2 """ if noise_map is not None: return noise_map ** 2 # depends on [control=['if'], data=['noise_map']] if isinstance(exposure_map, int) or isinstance(exposure_map, float): if exposure_map <= 0: exposure_map = 1 # depends on [control=['if'], data=['exposure_map']] # depends on [control=['if'], data=[]] else: mean_exp_time = np.mean(exposure_map) exposure_map[exposure_map < mean_exp_time / 10] = mean_exp_time / 10 if verbose: if background_rms * np.max(exposure_map) < 1: print('WARNING! sigma_b*f %s < 1 count may introduce unstable error estimates' % (background_rms * np.max(exposure_map))) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] d_pos = np.zeros_like(data) #threshold = 1.5*sigma_b d_pos[data >= 0] = data[data >= 0] #d_pos[d < threshold] = 0 sigma = d_pos / exposure_map + background_rms ** 2 return sigma
def using_env(self, env, clean=True, silent=True, filename=None): """ This context manager allows the contextual use of a different env Example of settings.toml:: [development] message = 'This is in dev' [other] message = 'this is in other env' Program:: >>> from dynaconf import settings >>> print settings.MESSAGE 'This is in dev' >>> with settings.using_env('OTHER'): ... print settings.MESSAGE 'this is in other env' :param env: Upper case name of env without any _ :param clean: If preloaded vars should be cleaned :param silent: Silence errors :param filename: Custom filename to load (optional) :return: context """ try: self.setenv(env, clean=clean, silent=silent, filename=filename) self.logger.debug("In env: %s", env) yield finally: if env.lower() != self.ENV_FOR_DYNACONF.lower(): del self.loaded_envs[-1] self.logger.debug("Out env: %s", env) self.setenv(self.current_env, clean=clean, filename=filename)
def function[using_env, parameter[self, env, clean, silent, filename]]: constant[ This context manager allows the contextual use of a different env Example of settings.toml:: [development] message = 'This is in dev' [other] message = 'this is in other env' Program:: >>> from dynaconf import settings >>> print settings.MESSAGE 'This is in dev' >>> with settings.using_env('OTHER'): ... print settings.MESSAGE 'this is in other env' :param env: Upper case name of env without any _ :param clean: If preloaded vars should be cleaned :param silent: Silence errors :param filename: Custom filename to load (optional) :return: context ] <ast.Try object at 0x7da1b1721150>
keyword[def] identifier[using_env] ( identifier[self] , identifier[env] , identifier[clean] = keyword[True] , identifier[silent] = keyword[True] , identifier[filename] = keyword[None] ): literal[string] keyword[try] : identifier[self] . identifier[setenv] ( identifier[env] , identifier[clean] = identifier[clean] , identifier[silent] = identifier[silent] , identifier[filename] = identifier[filename] ) identifier[self] . identifier[logger] . identifier[debug] ( literal[string] , identifier[env] ) keyword[yield] keyword[finally] : keyword[if] identifier[env] . identifier[lower] ()!= identifier[self] . identifier[ENV_FOR_DYNACONF] . identifier[lower] (): keyword[del] identifier[self] . identifier[loaded_envs] [- literal[int] ] identifier[self] . identifier[logger] . identifier[debug] ( literal[string] , identifier[env] ) identifier[self] . identifier[setenv] ( identifier[self] . identifier[current_env] , identifier[clean] = identifier[clean] , identifier[filename] = identifier[filename] )
def using_env(self, env, clean=True, silent=True, filename=None): """ This context manager allows the contextual use of a different env Example of settings.toml:: [development] message = 'This is in dev' [other] message = 'this is in other env' Program:: >>> from dynaconf import settings >>> print settings.MESSAGE 'This is in dev' >>> with settings.using_env('OTHER'): ... print settings.MESSAGE 'this is in other env' :param env: Upper case name of env without any _ :param clean: If preloaded vars should be cleaned :param silent: Silence errors :param filename: Custom filename to load (optional) :return: context """ try: self.setenv(env, clean=clean, silent=silent, filename=filename) self.logger.debug('In env: %s', env) yield # depends on [control=['try'], data=[]] finally: if env.lower() != self.ENV_FOR_DYNACONF.lower(): del self.loaded_envs[-1] # depends on [control=['if'], data=[]] self.logger.debug('Out env: %s', env) self.setenv(self.current_env, clean=clean, filename=filename)
def parse_params(args): """ Parse the params file args, create and return Assembly object.""" ## check that params.txt file is correctly formatted. try: with open(args.params) as paramsin: plines = paramsin.readlines() except IOError as _: sys.exit(" No params file found") ## check header: big version changes can be distinguished by the header legacy_version = 0 try: ## try to update the Assembly ... legacy_version = 1 if not len(plines[0].split()[0]) == 7: raise IPyradWarningExit(""" Error: file '{}' is not compatible with ipyrad v.{}. Please create and update a new params file using the -n argument. For info on which parameters have changed see the changelog: (http://ipyrad.readthedocs.io/releasenotes.html) """.format(args.params, ip.__version__)) except IndexError: raise IPyradWarningExit(""" Error: Params file should not have any empty lines at the top of the file. Verify there are no blank lines and rerun ipyrad. Offending file - {} """.format(args.params)) ## update and backup if legacy_version: #which version... #update_to_6() pass ## make into a dict. Ignore blank lines at the end of file ## Really this will ignore all blank lines items = [i.split("##")[0].strip() for i in plines[1:] if not i.strip() == ""] #keys = [i.split("]")[-2][-1] for i in plines[1:]] #keys = range(len(plines)-1) keys = ip.Assembly('null', quiet=True).paramsdict.keys() parsedict = {str(i):j for i, j in zip(keys, items)} return parsedict
def function[parse_params, parameter[args]]: constant[ Parse the params file args, create and return Assembly object.] <ast.Try object at 0x7da18eb57a60> variable[legacy_version] assign[=] constant[0] <ast.Try object at 0x7da18eb56f50> if name[legacy_version] begin[:] pass variable[items] assign[=] <ast.ListComp object at 0x7da18eb56590> variable[keys] assign[=] call[call[name[ip].Assembly, parameter[constant[null]]].paramsdict.keys, parameter[]] variable[parsedict] assign[=] <ast.DictComp object at 0x7da20cabd330> return[name[parsedict]]
keyword[def] identifier[parse_params] ( identifier[args] ): literal[string] keyword[try] : keyword[with] identifier[open] ( identifier[args] . identifier[params] ) keyword[as] identifier[paramsin] : identifier[plines] = identifier[paramsin] . identifier[readlines] () keyword[except] identifier[IOError] keyword[as] identifier[_] : identifier[sys] . identifier[exit] ( literal[string] ) identifier[legacy_version] = literal[int] keyword[try] : identifier[legacy_version] = literal[int] keyword[if] keyword[not] identifier[len] ( identifier[plines] [ literal[int] ]. identifier[split] ()[ literal[int] ])== literal[int] : keyword[raise] identifier[IPyradWarningExit] ( literal[string] . identifier[format] ( identifier[args] . identifier[params] , identifier[ip] . identifier[__version__] )) keyword[except] identifier[IndexError] : keyword[raise] identifier[IPyradWarningExit] ( literal[string] . identifier[format] ( identifier[args] . identifier[params] )) keyword[if] identifier[legacy_version] : keyword[pass] identifier[items] =[ identifier[i] . identifier[split] ( literal[string] )[ literal[int] ]. identifier[strip] () keyword[for] identifier[i] keyword[in] identifier[plines] [ literal[int] :] keyword[if] keyword[not] identifier[i] . identifier[strip] ()== literal[string] ] identifier[keys] = identifier[ip] . identifier[Assembly] ( literal[string] , identifier[quiet] = keyword[True] ). identifier[paramsdict] . identifier[keys] () identifier[parsedict] ={ identifier[str] ( identifier[i] ): identifier[j] keyword[for] identifier[i] , identifier[j] keyword[in] identifier[zip] ( identifier[keys] , identifier[items] )} keyword[return] identifier[parsedict]
def parse_params(args): """ Parse the params file args, create and return Assembly object.""" ## check that params.txt file is correctly formatted. try: with open(args.params) as paramsin: plines = paramsin.readlines() # depends on [control=['with'], data=['paramsin']] # depends on [control=['try'], data=[]] except IOError as _: sys.exit(' No params file found') # depends on [control=['except'], data=[]] ## check header: big version changes can be distinguished by the header legacy_version = 0 try: ## try to update the Assembly ... legacy_version = 1 if not len(plines[0].split()[0]) == 7: raise IPyradWarningExit("\n Error: file '{}' is not compatible with ipyrad v.{}.\n Please create and update a new params file using the -n argument. \n For info on which parameters have changed see the changelog:\n (http://ipyrad.readthedocs.io/releasenotes.html)\n ".format(args.params, ip.__version__)) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except IndexError: raise IPyradWarningExit('\n Error: Params file should not have any empty lines at the top\n of the file. Verify there are no blank lines and rerun ipyrad.\n Offending file - {}\n '.format(args.params)) # depends on [control=['except'], data=[]] ## update and backup if legacy_version: #which version... #update_to_6() pass # depends on [control=['if'], data=[]] ## make into a dict. Ignore blank lines at the end of file ## Really this will ignore all blank lines items = [i.split('##')[0].strip() for i in plines[1:] if not i.strip() == ''] #keys = [i.split("]")[-2][-1] for i in plines[1:]] #keys = range(len(plines)-1) keys = ip.Assembly('null', quiet=True).paramsdict.keys() parsedict = {str(i): j for (i, j) in zip(keys, items)} return parsedict
def _setup_bonds(self): """Derive Bond objects from the record.""" self._bonds = {} if 'bonds' not in self.record: return # Create bonds aid1s = self.record['bonds']['aid1'] aid2s = self.record['bonds']['aid2'] orders = self.record['bonds']['order'] if not len(aid1s) == len(aid2s) == len(orders): raise ResponseParseError('Error parsing bonds') for aid1, aid2, order in zip(aid1s, aid2s, orders): self._bonds[frozenset((aid1, aid2))] = Bond(aid1=aid1, aid2=aid2, order=order) # Add styles if 'coords' in self.record and 'style' in self.record['coords'][0]['conformers'][0]: aid1s = self.record['coords'][0]['conformers'][0]['style']['aid1'] aid2s = self.record['coords'][0]['conformers'][0]['style']['aid2'] styles = self.record['coords'][0]['conformers'][0]['style']['annotation'] for aid1, aid2, style in zip(aid1s, aid2s, styles): self._bonds[frozenset((aid1, aid2))].style = style
def function[_setup_bonds, parameter[self]]: constant[Derive Bond objects from the record.] name[self]._bonds assign[=] dictionary[[], []] if compare[constant[bonds] <ast.NotIn object at 0x7da2590d7190> name[self].record] begin[:] return[None] variable[aid1s] assign[=] call[call[name[self].record][constant[bonds]]][constant[aid1]] variable[aid2s] assign[=] call[call[name[self].record][constant[bonds]]][constant[aid2]] variable[orders] assign[=] call[call[name[self].record][constant[bonds]]][constant[order]] if <ast.UnaryOp object at 0x7da1b0ca4cd0> begin[:] <ast.Raise object at 0x7da1b0bcad70> for taget[tuple[[<ast.Name object at 0x7da1b0bc8c10>, <ast.Name object at 0x7da1b0bc9990>, <ast.Name object at 0x7da1b0bca710>]]] in starred[call[name[zip], parameter[name[aid1s], name[aid2s], name[orders]]]] begin[:] call[name[self]._bonds][call[name[frozenset], parameter[tuple[[<ast.Name object at 0x7da1b0bcb370>, <ast.Name object at 0x7da1b0bc8640>]]]]] assign[=] call[name[Bond], parameter[]] if <ast.BoolOp object at 0x7da1b0bc8d60> begin[:] variable[aid1s] assign[=] call[call[call[call[call[call[name[self].record][constant[coords]]][constant[0]]][constant[conformers]]][constant[0]]][constant[style]]][constant[aid1]] variable[aid2s] assign[=] call[call[call[call[call[call[name[self].record][constant[coords]]][constant[0]]][constant[conformers]]][constant[0]]][constant[style]]][constant[aid2]] variable[styles] assign[=] call[call[call[call[call[call[name[self].record][constant[coords]]][constant[0]]][constant[conformers]]][constant[0]]][constant[style]]][constant[annotation]] for taget[tuple[[<ast.Name object at 0x7da1b0bcac20>, <ast.Name object at 0x7da1b0bcbe80>, <ast.Name object at 0x7da1b0bc9fc0>]]] in starred[call[name[zip], parameter[name[aid1s], name[aid2s], name[styles]]]] begin[:] call[name[self]._bonds][call[name[frozenset], parameter[tuple[[<ast.Name object at 0x7da1b0bc8c40>, <ast.Name object at 0x7da1b0bcbdf0>]]]]].style assign[=] name[style]
keyword[def] identifier[_setup_bonds] ( identifier[self] ): literal[string] identifier[self] . identifier[_bonds] ={} keyword[if] literal[string] keyword[not] keyword[in] identifier[self] . identifier[record] : keyword[return] identifier[aid1s] = identifier[self] . identifier[record] [ literal[string] ][ literal[string] ] identifier[aid2s] = identifier[self] . identifier[record] [ literal[string] ][ literal[string] ] identifier[orders] = identifier[self] . identifier[record] [ literal[string] ][ literal[string] ] keyword[if] keyword[not] identifier[len] ( identifier[aid1s] )== identifier[len] ( identifier[aid2s] )== identifier[len] ( identifier[orders] ): keyword[raise] identifier[ResponseParseError] ( literal[string] ) keyword[for] identifier[aid1] , identifier[aid2] , identifier[order] keyword[in] identifier[zip] ( identifier[aid1s] , identifier[aid2s] , identifier[orders] ): identifier[self] . identifier[_bonds] [ identifier[frozenset] (( identifier[aid1] , identifier[aid2] ))]= identifier[Bond] ( identifier[aid1] = identifier[aid1] , identifier[aid2] = identifier[aid2] , identifier[order] = identifier[order] ) keyword[if] literal[string] keyword[in] identifier[self] . identifier[record] keyword[and] literal[string] keyword[in] identifier[self] . identifier[record] [ literal[string] ][ literal[int] ][ literal[string] ][ literal[int] ]: identifier[aid1s] = identifier[self] . identifier[record] [ literal[string] ][ literal[int] ][ literal[string] ][ literal[int] ][ literal[string] ][ literal[string] ] identifier[aid2s] = identifier[self] . identifier[record] [ literal[string] ][ literal[int] ][ literal[string] ][ literal[int] ][ literal[string] ][ literal[string] ] identifier[styles] = identifier[self] . identifier[record] [ literal[string] ][ literal[int] ][ literal[string] ][ literal[int] ][ literal[string] ][ literal[string] ] keyword[for] identifier[aid1] , identifier[aid2] , identifier[style] keyword[in] identifier[zip] ( identifier[aid1s] , identifier[aid2s] , identifier[styles] ): identifier[self] . identifier[_bonds] [ identifier[frozenset] (( identifier[aid1] , identifier[aid2] ))]. identifier[style] = identifier[style]
def _setup_bonds(self): """Derive Bond objects from the record.""" self._bonds = {} if 'bonds' not in self.record: return # depends on [control=['if'], data=[]] # Create bonds aid1s = self.record['bonds']['aid1'] aid2s = self.record['bonds']['aid2'] orders = self.record['bonds']['order'] if not len(aid1s) == len(aid2s) == len(orders): raise ResponseParseError('Error parsing bonds') # depends on [control=['if'], data=[]] for (aid1, aid2, order) in zip(aid1s, aid2s, orders): self._bonds[frozenset((aid1, aid2))] = Bond(aid1=aid1, aid2=aid2, order=order) # depends on [control=['for'], data=[]] # Add styles if 'coords' in self.record and 'style' in self.record['coords'][0]['conformers'][0]: aid1s = self.record['coords'][0]['conformers'][0]['style']['aid1'] aid2s = self.record['coords'][0]['conformers'][0]['style']['aid2'] styles = self.record['coords'][0]['conformers'][0]['style']['annotation'] for (aid1, aid2, style) in zip(aid1s, aid2s, styles): self._bonds[frozenset((aid1, aid2))].style = style # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
def _snat_subnet_for_ext_net(self, context, subnet, net): """Determine if an SNAT subnet is for this external network. This method determines if a given SNAT subnet is intended for the passed external network. For APIC ML2/Neutron workflow, SNAT subnets are created on a separate network from the external network. The association with an external network is made by putting the name of the external network in the name of the SNAT network name, using a well-known prefix. """ if subnet['network_id'] == net['id']: return True network = self._core_plugin.get_network( context.elevated(), subnet['network_id']) ext_net_name = network['name'] if (APIC_SNAT_NET + '-') in ext_net_name: # This is APIC ML2 mode -- we need to strip the prefix ext_net_name = ext_net_name[len(APIC_SNAT_NET + '-'):] if net['id'] == ext_net_name: return True return False
def function[_snat_subnet_for_ext_net, parameter[self, context, subnet, net]]: constant[Determine if an SNAT subnet is for this external network. This method determines if a given SNAT subnet is intended for the passed external network. For APIC ML2/Neutron workflow, SNAT subnets are created on a separate network from the external network. The association with an external network is made by putting the name of the external network in the name of the SNAT network name, using a well-known prefix. ] if compare[call[name[subnet]][constant[network_id]] equal[==] call[name[net]][constant[id]]] begin[:] return[constant[True]] variable[network] assign[=] call[name[self]._core_plugin.get_network, parameter[call[name[context].elevated, parameter[]], call[name[subnet]][constant[network_id]]]] variable[ext_net_name] assign[=] call[name[network]][constant[name]] if compare[binary_operation[name[APIC_SNAT_NET] + constant[-]] in name[ext_net_name]] begin[:] variable[ext_net_name] assign[=] call[name[ext_net_name]][<ast.Slice object at 0x7da1b1c60640>] if compare[call[name[net]][constant[id]] equal[==] name[ext_net_name]] begin[:] return[constant[True]] return[constant[False]]
keyword[def] identifier[_snat_subnet_for_ext_net] ( identifier[self] , identifier[context] , identifier[subnet] , identifier[net] ): literal[string] keyword[if] identifier[subnet] [ literal[string] ]== identifier[net] [ literal[string] ]: keyword[return] keyword[True] identifier[network] = identifier[self] . identifier[_core_plugin] . identifier[get_network] ( identifier[context] . identifier[elevated] (), identifier[subnet] [ literal[string] ]) identifier[ext_net_name] = identifier[network] [ literal[string] ] keyword[if] ( identifier[APIC_SNAT_NET] + literal[string] ) keyword[in] identifier[ext_net_name] : identifier[ext_net_name] = identifier[ext_net_name] [ identifier[len] ( identifier[APIC_SNAT_NET] + literal[string] ):] keyword[if] identifier[net] [ literal[string] ]== identifier[ext_net_name] : keyword[return] keyword[True] keyword[return] keyword[False]
def _snat_subnet_for_ext_net(self, context, subnet, net): """Determine if an SNAT subnet is for this external network. This method determines if a given SNAT subnet is intended for the passed external network. For APIC ML2/Neutron workflow, SNAT subnets are created on a separate network from the external network. The association with an external network is made by putting the name of the external network in the name of the SNAT network name, using a well-known prefix. """ if subnet['network_id'] == net['id']: return True # depends on [control=['if'], data=[]] network = self._core_plugin.get_network(context.elevated(), subnet['network_id']) ext_net_name = network['name'] if APIC_SNAT_NET + '-' in ext_net_name: # This is APIC ML2 mode -- we need to strip the prefix ext_net_name = ext_net_name[len(APIC_SNAT_NET + '-'):] if net['id'] == ext_net_name: return True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['ext_net_name']] return False
def calcELAxi(R,vR,vT,pot,vc=1.,ro=1.): """ NAME: calcELAxi PURPOSE: calculate the energy and angular momentum INPUT: R - Galactocentric radius (/ro) vR - radial part of the velocity (/vc) vT - azimuthal part of the velocity (/vc) vc - circular velocity ro - reference radius OUTPUT: (E,L) HISTORY: 2010-11-30 - Written - Bovy (NYU) """ return (potentialAxi(R,pot)+vR**2./2.+vT**2./2.,R*vT)
def function[calcELAxi, parameter[R, vR, vT, pot, vc, ro]]: constant[ NAME: calcELAxi PURPOSE: calculate the energy and angular momentum INPUT: R - Galactocentric radius (/ro) vR - radial part of the velocity (/vc) vT - azimuthal part of the velocity (/vc) vc - circular velocity ro - reference radius OUTPUT: (E,L) HISTORY: 2010-11-30 - Written - Bovy (NYU) ] return[tuple[[<ast.BinOp object at 0x7da1b0cf4eb0>, <ast.BinOp object at 0x7da1b0cf7e80>]]]
keyword[def] identifier[calcELAxi] ( identifier[R] , identifier[vR] , identifier[vT] , identifier[pot] , identifier[vc] = literal[int] , identifier[ro] = literal[int] ): literal[string] keyword[return] ( identifier[potentialAxi] ( identifier[R] , identifier[pot] )+ identifier[vR] ** literal[int] / literal[int] + identifier[vT] ** literal[int] / literal[int] , identifier[R] * identifier[vT] )
def calcELAxi(R, vR, vT, pot, vc=1.0, ro=1.0): """ NAME: calcELAxi PURPOSE: calculate the energy and angular momentum INPUT: R - Galactocentric radius (/ro) vR - radial part of the velocity (/vc) vT - azimuthal part of the velocity (/vc) vc - circular velocity ro - reference radius OUTPUT: (E,L) HISTORY: 2010-11-30 - Written - Bovy (NYU) """ return (potentialAxi(R, pot) + vR ** 2.0 / 2.0 + vT ** 2.0 / 2.0, R * vT)
def pseudo_cqt(y, sr=22050, hop_length=512, fmin=None, n_bins=84, bins_per_octave=12, tuning=0.0, filter_scale=1, norm=1, sparsity=0.01, window='hann', scale=True, pad_mode='reflect'): '''Compute the pseudo constant-Q transform of an audio signal. This uses a single fft size that is the smallest power of 2 that is greater than or equal to the max of: 1. The longest CQT filter 2. 2x the hop_length Parameters ---------- y : np.ndarray [shape=(n,)] audio time series sr : number > 0 [scalar] sampling rate of `y` hop_length : int > 0 [scalar] number of samples between successive CQT columns. fmin : float > 0 [scalar] Minimum frequency. Defaults to C1 ~= 32.70 Hz n_bins : int > 0 [scalar] Number of frequency bins, starting at `fmin` bins_per_octave : int > 0 [scalar] Number of bins per octave tuning : None or float in `[-0.5, 0.5)` Tuning offset in fractions of a bin (cents). If `None`, tuning will be automatically estimated from the signal. filter_scale : float > 0 Filter filter_scale factor. Larger values use longer windows. sparsity : float in [0, 1) Sparsify the CQT basis by discarding up to `sparsity` fraction of the energy in each basis. Set `sparsity=0` to disable sparsification. window : str, tuple, number, or function Window specification for the basis filters. See `filters.get_window` for details. pad_mode : string Padding mode for centered frame analysis. See also: `librosa.core.stft` and `np.pad`. Returns ------- CQT : np.ndarray [shape=(n_bins, t), dtype=np.float] Pseudo Constant-Q energy for each frequency at each time. Raises ------ ParameterError If `hop_length` is not an integer multiple of `2**(n_bins / bins_per_octave)` Or if `y` is too short to support the frequency range of the CQT. Notes ----- This function caches at level 20. ''' if fmin is None: # C1 by default fmin = note_to_hz('C1') if tuning is None: tuning = estimate_tuning(y=y, sr=sr) fft_basis, n_fft, _ = __cqt_filter_fft(sr, fmin, n_bins, bins_per_octave, tuning, filter_scale, norm, sparsity, hop_length=hop_length, window=window) fft_basis = np.abs(fft_basis) # Compute the magnitude STFT with Hann window D = np.abs(stft(y, n_fft=n_fft, hop_length=hop_length, pad_mode=pad_mode)) # Project onto the pseudo-cqt basis C = fft_basis.dot(D) if scale: C /= np.sqrt(n_fft) else: lengths = filters.constant_q_lengths(sr, fmin, n_bins=n_bins, bins_per_octave=bins_per_octave, tuning=tuning, window=window, filter_scale=filter_scale) C *= np.sqrt(lengths[:, np.newaxis] / n_fft) return C
def function[pseudo_cqt, parameter[y, sr, hop_length, fmin, n_bins, bins_per_octave, tuning, filter_scale, norm, sparsity, window, scale, pad_mode]]: constant[Compute the pseudo constant-Q transform of an audio signal. This uses a single fft size that is the smallest power of 2 that is greater than or equal to the max of: 1. The longest CQT filter 2. 2x the hop_length Parameters ---------- y : np.ndarray [shape=(n,)] audio time series sr : number > 0 [scalar] sampling rate of `y` hop_length : int > 0 [scalar] number of samples between successive CQT columns. fmin : float > 0 [scalar] Minimum frequency. Defaults to C1 ~= 32.70 Hz n_bins : int > 0 [scalar] Number of frequency bins, starting at `fmin` bins_per_octave : int > 0 [scalar] Number of bins per octave tuning : None or float in `[-0.5, 0.5)` Tuning offset in fractions of a bin (cents). If `None`, tuning will be automatically estimated from the signal. filter_scale : float > 0 Filter filter_scale factor. Larger values use longer windows. sparsity : float in [0, 1) Sparsify the CQT basis by discarding up to `sparsity` fraction of the energy in each basis. Set `sparsity=0` to disable sparsification. window : str, tuple, number, or function Window specification for the basis filters. See `filters.get_window` for details. pad_mode : string Padding mode for centered frame analysis. See also: `librosa.core.stft` and `np.pad`. Returns ------- CQT : np.ndarray [shape=(n_bins, t), dtype=np.float] Pseudo Constant-Q energy for each frequency at each time. Raises ------ ParameterError If `hop_length` is not an integer multiple of `2**(n_bins / bins_per_octave)` Or if `y` is too short to support the frequency range of the CQT. Notes ----- This function caches at level 20. ] if compare[name[fmin] is constant[None]] begin[:] variable[fmin] assign[=] call[name[note_to_hz], parameter[constant[C1]]] if compare[name[tuning] is constant[None]] begin[:] variable[tuning] assign[=] call[name[estimate_tuning], parameter[]] <ast.Tuple object at 0x7da1b0513b50> assign[=] call[name[__cqt_filter_fft], parameter[name[sr], name[fmin], name[n_bins], name[bins_per_octave], name[tuning], name[filter_scale], name[norm], name[sparsity]]] variable[fft_basis] assign[=] call[name[np].abs, parameter[name[fft_basis]]] variable[D] assign[=] call[name[np].abs, parameter[call[name[stft], parameter[name[y]]]]] variable[C] assign[=] call[name[fft_basis].dot, parameter[name[D]]] if name[scale] begin[:] <ast.AugAssign object at 0x7da1b05126b0> return[name[C]]
keyword[def] identifier[pseudo_cqt] ( identifier[y] , identifier[sr] = literal[int] , identifier[hop_length] = literal[int] , identifier[fmin] = keyword[None] , identifier[n_bins] = literal[int] , identifier[bins_per_octave] = literal[int] , identifier[tuning] = literal[int] , identifier[filter_scale] = literal[int] , identifier[norm] = literal[int] , identifier[sparsity] = literal[int] , identifier[window] = literal[string] , identifier[scale] = keyword[True] , identifier[pad_mode] = literal[string] ): literal[string] keyword[if] identifier[fmin] keyword[is] keyword[None] : identifier[fmin] = identifier[note_to_hz] ( literal[string] ) keyword[if] identifier[tuning] keyword[is] keyword[None] : identifier[tuning] = identifier[estimate_tuning] ( identifier[y] = identifier[y] , identifier[sr] = identifier[sr] ) identifier[fft_basis] , identifier[n_fft] , identifier[_] = identifier[__cqt_filter_fft] ( identifier[sr] , identifier[fmin] , identifier[n_bins] , identifier[bins_per_octave] , identifier[tuning] , identifier[filter_scale] , identifier[norm] , identifier[sparsity] , identifier[hop_length] = identifier[hop_length] , identifier[window] = identifier[window] ) identifier[fft_basis] = identifier[np] . identifier[abs] ( identifier[fft_basis] ) identifier[D] = identifier[np] . identifier[abs] ( identifier[stft] ( identifier[y] , identifier[n_fft] = identifier[n_fft] , identifier[hop_length] = identifier[hop_length] , identifier[pad_mode] = identifier[pad_mode] )) identifier[C] = identifier[fft_basis] . identifier[dot] ( identifier[D] ) keyword[if] identifier[scale] : identifier[C] /= identifier[np] . identifier[sqrt] ( identifier[n_fft] ) keyword[else] : identifier[lengths] = identifier[filters] . identifier[constant_q_lengths] ( identifier[sr] , identifier[fmin] , identifier[n_bins] = identifier[n_bins] , identifier[bins_per_octave] = identifier[bins_per_octave] , identifier[tuning] = identifier[tuning] , identifier[window] = identifier[window] , identifier[filter_scale] = identifier[filter_scale] ) identifier[C] *= identifier[np] . identifier[sqrt] ( identifier[lengths] [:, identifier[np] . identifier[newaxis] ]/ identifier[n_fft] ) keyword[return] identifier[C]
def pseudo_cqt(y, sr=22050, hop_length=512, fmin=None, n_bins=84, bins_per_octave=12, tuning=0.0, filter_scale=1, norm=1, sparsity=0.01, window='hann', scale=True, pad_mode='reflect'): """Compute the pseudo constant-Q transform of an audio signal. This uses a single fft size that is the smallest power of 2 that is greater than or equal to the max of: 1. The longest CQT filter 2. 2x the hop_length Parameters ---------- y : np.ndarray [shape=(n,)] audio time series sr : number > 0 [scalar] sampling rate of `y` hop_length : int > 0 [scalar] number of samples between successive CQT columns. fmin : float > 0 [scalar] Minimum frequency. Defaults to C1 ~= 32.70 Hz n_bins : int > 0 [scalar] Number of frequency bins, starting at `fmin` bins_per_octave : int > 0 [scalar] Number of bins per octave tuning : None or float in `[-0.5, 0.5)` Tuning offset in fractions of a bin (cents). If `None`, tuning will be automatically estimated from the signal. filter_scale : float > 0 Filter filter_scale factor. Larger values use longer windows. sparsity : float in [0, 1) Sparsify the CQT basis by discarding up to `sparsity` fraction of the energy in each basis. Set `sparsity=0` to disable sparsification. window : str, tuple, number, or function Window specification for the basis filters. See `filters.get_window` for details. pad_mode : string Padding mode for centered frame analysis. See also: `librosa.core.stft` and `np.pad`. Returns ------- CQT : np.ndarray [shape=(n_bins, t), dtype=np.float] Pseudo Constant-Q energy for each frequency at each time. Raises ------ ParameterError If `hop_length` is not an integer multiple of `2**(n_bins / bins_per_octave)` Or if `y` is too short to support the frequency range of the CQT. Notes ----- This function caches at level 20. """ if fmin is None: # C1 by default fmin = note_to_hz('C1') # depends on [control=['if'], data=['fmin']] if tuning is None: tuning = estimate_tuning(y=y, sr=sr) # depends on [control=['if'], data=['tuning']] (fft_basis, n_fft, _) = __cqt_filter_fft(sr, fmin, n_bins, bins_per_octave, tuning, filter_scale, norm, sparsity, hop_length=hop_length, window=window) fft_basis = np.abs(fft_basis) # Compute the magnitude STFT with Hann window D = np.abs(stft(y, n_fft=n_fft, hop_length=hop_length, pad_mode=pad_mode)) # Project onto the pseudo-cqt basis C = fft_basis.dot(D) if scale: C /= np.sqrt(n_fft) # depends on [control=['if'], data=[]] else: lengths = filters.constant_q_lengths(sr, fmin, n_bins=n_bins, bins_per_octave=bins_per_octave, tuning=tuning, window=window, filter_scale=filter_scale) C *= np.sqrt(lengths[:, np.newaxis] / n_fft) return C
def tree(self, in_tree): ''' assigns a tree to the internal self._tree variable. The tree is either loaded from file (if in_tree is str) or assigned (if in_tree is a Phylo.tree) ''' from os.path import isfile if isinstance(in_tree, Phylo.BaseTree.Tree): self._tree = in_tree elif type(in_tree) in string_types and isfile(in_tree): try: self._tree=Phylo.read(in_tree, 'newick') except: fmt = in_tree.split('.')[-1] if fmt in ['nexus', 'nex']: self._tree=Phylo.read(in_tree, 'nexus') else: self.logger('TreeAnc: could not load tree, format needs to be nexus or newick! input was '+str(in_tree),1) self._tree = None return ttconf.ERROR else: self.logger('TreeAnc: could not load tree! input was '+str(in_tree),0) self._tree = None return ttconf.ERROR # remove all existing sequence attributes for node in self._tree.find_clades(): if hasattr(node, "sequence"): node.__delattr__("sequence") node.original_length = node.branch_length node.mutation_length = node.branch_length self.prepare_tree() return ttconf.SUCCESS
def function[tree, parameter[self, in_tree]]: constant[ assigns a tree to the internal self._tree variable. The tree is either loaded from file (if in_tree is str) or assigned (if in_tree is a Phylo.tree) ] from relative_module[os.path] import module[isfile] if call[name[isinstance], parameter[name[in_tree], name[Phylo].BaseTree.Tree]] begin[:] name[self]._tree assign[=] name[in_tree] for taget[name[node]] in starred[call[name[self]._tree.find_clades, parameter[]]] begin[:] if call[name[hasattr], parameter[name[node], constant[sequence]]] begin[:] call[name[node].__delattr__, parameter[constant[sequence]]] name[node].original_length assign[=] name[node].branch_length name[node].mutation_length assign[=] name[node].branch_length call[name[self].prepare_tree, parameter[]] return[name[ttconf].SUCCESS]
keyword[def] identifier[tree] ( identifier[self] , identifier[in_tree] ): literal[string] keyword[from] identifier[os] . identifier[path] keyword[import] identifier[isfile] keyword[if] identifier[isinstance] ( identifier[in_tree] , identifier[Phylo] . identifier[BaseTree] . identifier[Tree] ): identifier[self] . identifier[_tree] = identifier[in_tree] keyword[elif] identifier[type] ( identifier[in_tree] ) keyword[in] identifier[string_types] keyword[and] identifier[isfile] ( identifier[in_tree] ): keyword[try] : identifier[self] . identifier[_tree] = identifier[Phylo] . identifier[read] ( identifier[in_tree] , literal[string] ) keyword[except] : identifier[fmt] = identifier[in_tree] . identifier[split] ( literal[string] )[- literal[int] ] keyword[if] identifier[fmt] keyword[in] [ literal[string] , literal[string] ]: identifier[self] . identifier[_tree] = identifier[Phylo] . identifier[read] ( identifier[in_tree] , literal[string] ) keyword[else] : identifier[self] . identifier[logger] ( literal[string] + identifier[str] ( identifier[in_tree] ), literal[int] ) identifier[self] . identifier[_tree] = keyword[None] keyword[return] identifier[ttconf] . identifier[ERROR] keyword[else] : identifier[self] . identifier[logger] ( literal[string] + identifier[str] ( identifier[in_tree] ), literal[int] ) identifier[self] . identifier[_tree] = keyword[None] keyword[return] identifier[ttconf] . identifier[ERROR] keyword[for] identifier[node] keyword[in] identifier[self] . identifier[_tree] . identifier[find_clades] (): keyword[if] identifier[hasattr] ( identifier[node] , literal[string] ): identifier[node] . identifier[__delattr__] ( literal[string] ) identifier[node] . identifier[original_length] = identifier[node] . identifier[branch_length] identifier[node] . identifier[mutation_length] = identifier[node] . identifier[branch_length] identifier[self] . identifier[prepare_tree] () keyword[return] identifier[ttconf] . identifier[SUCCESS]
def tree(self, in_tree): """ assigns a tree to the internal self._tree variable. The tree is either loaded from file (if in_tree is str) or assigned (if in_tree is a Phylo.tree) """ from os.path import isfile if isinstance(in_tree, Phylo.BaseTree.Tree): self._tree = in_tree # depends on [control=['if'], data=[]] elif type(in_tree) in string_types and isfile(in_tree): try: self._tree = Phylo.read(in_tree, 'newick') # depends on [control=['try'], data=[]] except: fmt = in_tree.split('.')[-1] if fmt in ['nexus', 'nex']: self._tree = Phylo.read(in_tree, 'nexus') # depends on [control=['if'], data=[]] else: self.logger('TreeAnc: could not load tree, format needs to be nexus or newick! input was ' + str(in_tree), 1) self._tree = None return ttconf.ERROR # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] else: self.logger('TreeAnc: could not load tree! input was ' + str(in_tree), 0) self._tree = None return ttconf.ERROR # remove all existing sequence attributes for node in self._tree.find_clades(): if hasattr(node, 'sequence'): node.__delattr__('sequence') # depends on [control=['if'], data=[]] node.original_length = node.branch_length node.mutation_length = node.branch_length # depends on [control=['for'], data=['node']] self.prepare_tree() return ttconf.SUCCESS
def empty(shape, ctx=None, dtype=None, stype=None): """Returns a new array of given shape and type, without initializing entries. Parameters ---------- shape : int or tuple of int The shape of the empty array. ctx : Context, optional An optional device context (default is the current default context). dtype : str or numpy.dtype, optional An optional value type (default is `float32`). stype : str, optional An optional storage type (default is `default`). Returns ------- NDArray, CSRNDArray or RowSparseNDArray A created array. Examples -------- >>> mx.nd.empty(1) <NDArray 1 @cpu(0)> >>> mx.nd.empty((1,2), mx.gpu(0)) <NDArray 1x2 @gpu(0)> >>> mx.nd.empty((1,2), mx.gpu(0), 'float16') <NDArray 1x2 @gpu(0)> >>> mx.nd.empty((1,2), stype='csr') <CSRNDArray 1x2 @cpu(0)> """ if stype is None or stype == 'default': return _empty_ndarray(shape, ctx, dtype) else: return _empty_sparse_ndarray(stype, shape, ctx, dtype)
def function[empty, parameter[shape, ctx, dtype, stype]]: constant[Returns a new array of given shape and type, without initializing entries. Parameters ---------- shape : int or tuple of int The shape of the empty array. ctx : Context, optional An optional device context (default is the current default context). dtype : str or numpy.dtype, optional An optional value type (default is `float32`). stype : str, optional An optional storage type (default is `default`). Returns ------- NDArray, CSRNDArray or RowSparseNDArray A created array. Examples -------- >>> mx.nd.empty(1) <NDArray 1 @cpu(0)> >>> mx.nd.empty((1,2), mx.gpu(0)) <NDArray 1x2 @gpu(0)> >>> mx.nd.empty((1,2), mx.gpu(0), 'float16') <NDArray 1x2 @gpu(0)> >>> mx.nd.empty((1,2), stype='csr') <CSRNDArray 1x2 @cpu(0)> ] if <ast.BoolOp object at 0x7da1b204e110> begin[:] return[call[name[_empty_ndarray], parameter[name[shape], name[ctx], name[dtype]]]]
keyword[def] identifier[empty] ( identifier[shape] , identifier[ctx] = keyword[None] , identifier[dtype] = keyword[None] , identifier[stype] = keyword[None] ): literal[string] keyword[if] identifier[stype] keyword[is] keyword[None] keyword[or] identifier[stype] == literal[string] : keyword[return] identifier[_empty_ndarray] ( identifier[shape] , identifier[ctx] , identifier[dtype] ) keyword[else] : keyword[return] identifier[_empty_sparse_ndarray] ( identifier[stype] , identifier[shape] , identifier[ctx] , identifier[dtype] )
def empty(shape, ctx=None, dtype=None, stype=None): """Returns a new array of given shape and type, without initializing entries. Parameters ---------- shape : int or tuple of int The shape of the empty array. ctx : Context, optional An optional device context (default is the current default context). dtype : str or numpy.dtype, optional An optional value type (default is `float32`). stype : str, optional An optional storage type (default is `default`). Returns ------- NDArray, CSRNDArray or RowSparseNDArray A created array. Examples -------- >>> mx.nd.empty(1) <NDArray 1 @cpu(0)> >>> mx.nd.empty((1,2), mx.gpu(0)) <NDArray 1x2 @gpu(0)> >>> mx.nd.empty((1,2), mx.gpu(0), 'float16') <NDArray 1x2 @gpu(0)> >>> mx.nd.empty((1,2), stype='csr') <CSRNDArray 1x2 @cpu(0)> """ if stype is None or stype == 'default': return _empty_ndarray(shape, ctx, dtype) # depends on [control=['if'], data=[]] else: return _empty_sparse_ndarray(stype, shape, ctx, dtype)
def send_course_enrollment_statement(lrs_configuration, course_enrollment): """ Send xAPI statement for course enrollment. Arguments: lrs_configuration (XAPILRSConfiguration): XAPILRSConfiguration instance where to send statements. course_enrollment (CourseEnrollment): Course enrollment object. """ user_details = LearnerInfoSerializer(course_enrollment.user) course_details = CourseInfoSerializer(course_enrollment.course) statement = LearnerCourseEnrollmentStatement( course_enrollment.user, course_enrollment.course, user_details.data, course_details.data, ) EnterpriseXAPIClient(lrs_configuration).save_statement(statement)
def function[send_course_enrollment_statement, parameter[lrs_configuration, course_enrollment]]: constant[ Send xAPI statement for course enrollment. Arguments: lrs_configuration (XAPILRSConfiguration): XAPILRSConfiguration instance where to send statements. course_enrollment (CourseEnrollment): Course enrollment object. ] variable[user_details] assign[=] call[name[LearnerInfoSerializer], parameter[name[course_enrollment].user]] variable[course_details] assign[=] call[name[CourseInfoSerializer], parameter[name[course_enrollment].course]] variable[statement] assign[=] call[name[LearnerCourseEnrollmentStatement], parameter[name[course_enrollment].user, name[course_enrollment].course, name[user_details].data, name[course_details].data]] call[call[name[EnterpriseXAPIClient], parameter[name[lrs_configuration]]].save_statement, parameter[name[statement]]]
keyword[def] identifier[send_course_enrollment_statement] ( identifier[lrs_configuration] , identifier[course_enrollment] ): literal[string] identifier[user_details] = identifier[LearnerInfoSerializer] ( identifier[course_enrollment] . identifier[user] ) identifier[course_details] = identifier[CourseInfoSerializer] ( identifier[course_enrollment] . identifier[course] ) identifier[statement] = identifier[LearnerCourseEnrollmentStatement] ( identifier[course_enrollment] . identifier[user] , identifier[course_enrollment] . identifier[course] , identifier[user_details] . identifier[data] , identifier[course_details] . identifier[data] , ) identifier[EnterpriseXAPIClient] ( identifier[lrs_configuration] ). identifier[save_statement] ( identifier[statement] )
def send_course_enrollment_statement(lrs_configuration, course_enrollment): """ Send xAPI statement for course enrollment. Arguments: lrs_configuration (XAPILRSConfiguration): XAPILRSConfiguration instance where to send statements. course_enrollment (CourseEnrollment): Course enrollment object. """ user_details = LearnerInfoSerializer(course_enrollment.user) course_details = CourseInfoSerializer(course_enrollment.course) statement = LearnerCourseEnrollmentStatement(course_enrollment.user, course_enrollment.course, user_details.data, course_details.data) EnterpriseXAPIClient(lrs_configuration).save_statement(statement)
def get_opus_maximum_of(self, author_cts_urn): """Return the author's opux maximum (None otherwise). Given the CTS URN of an author, this method returns its opus maximum. If not available returns None. :param author_cts_urn: the author's CTS URN. :return: an instance of `surfext.HucitWork` or None """ author = self.get_resource_by_urn(author_cts_urn) assert author is not None works = author.get_works() if len(works) > 1: for work in works: if work.is_opus_maximum(): return work elif len(works) == 1: return works[0] else: return None
def function[get_opus_maximum_of, parameter[self, author_cts_urn]]: constant[Return the author's opux maximum (None otherwise). Given the CTS URN of an author, this method returns its opus maximum. If not available returns None. :param author_cts_urn: the author's CTS URN. :return: an instance of `surfext.HucitWork` or None ] variable[author] assign[=] call[name[self].get_resource_by_urn, parameter[name[author_cts_urn]]] assert[compare[name[author] is_not constant[None]]] variable[works] assign[=] call[name[author].get_works, parameter[]] if compare[call[name[len], parameter[name[works]]] greater[>] constant[1]] begin[:] for taget[name[work]] in starred[name[works]] begin[:] if call[name[work].is_opus_maximum, parameter[]] begin[:] return[name[work]]
keyword[def] identifier[get_opus_maximum_of] ( identifier[self] , identifier[author_cts_urn] ): literal[string] identifier[author] = identifier[self] . identifier[get_resource_by_urn] ( identifier[author_cts_urn] ) keyword[assert] identifier[author] keyword[is] keyword[not] keyword[None] identifier[works] = identifier[author] . identifier[get_works] () keyword[if] identifier[len] ( identifier[works] )> literal[int] : keyword[for] identifier[work] keyword[in] identifier[works] : keyword[if] identifier[work] . identifier[is_opus_maximum] (): keyword[return] identifier[work] keyword[elif] identifier[len] ( identifier[works] )== literal[int] : keyword[return] identifier[works] [ literal[int] ] keyword[else] : keyword[return] keyword[None]
def get_opus_maximum_of(self, author_cts_urn): """Return the author's opux maximum (None otherwise). Given the CTS URN of an author, this method returns its opus maximum. If not available returns None. :param author_cts_urn: the author's CTS URN. :return: an instance of `surfext.HucitWork` or None """ author = self.get_resource_by_urn(author_cts_urn) assert author is not None works = author.get_works() if len(works) > 1: for work in works: if work.is_opus_maximum(): return work # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['work']] # depends on [control=['if'], data=[]] elif len(works) == 1: return works[0] # depends on [control=['if'], data=[]] else: return None
def print_callback(val): """ Internal function. This function is called via a call back returning from IPC to Cython to Python. It tries to perform incremental printing to IPython Notebook or Jupyter Notebook and when all else fails, just prints locally. """ success = False try: # for reasons I cannot fathom, regular printing, even directly # to io.stdout does not work. # I have to intrude rather deep into IPython to make it behave if have_ipython: if InteractiveShell.initialized(): IPython.display.publish_display_data({'text/plain':val,'text/html':'<pre>' + val + '</pre>'}) success = True except: pass if not success: print(val) sys.stdout.flush()
def function[print_callback, parameter[val]]: constant[ Internal function. This function is called via a call back returning from IPC to Cython to Python. It tries to perform incremental printing to IPython Notebook or Jupyter Notebook and when all else fails, just prints locally. ] variable[success] assign[=] constant[False] <ast.Try object at 0x7da1b1ef0fd0> if <ast.UnaryOp object at 0x7da1b1ef3fa0> begin[:] call[name[print], parameter[name[val]]] call[name[sys].stdout.flush, parameter[]]
keyword[def] identifier[print_callback] ( identifier[val] ): literal[string] identifier[success] = keyword[False] keyword[try] : keyword[if] identifier[have_ipython] : keyword[if] identifier[InteractiveShell] . identifier[initialized] (): identifier[IPython] . identifier[display] . identifier[publish_display_data] ({ literal[string] : identifier[val] , literal[string] : literal[string] + identifier[val] + literal[string] }) identifier[success] = keyword[True] keyword[except] : keyword[pass] keyword[if] keyword[not] identifier[success] : identifier[print] ( identifier[val] ) identifier[sys] . identifier[stdout] . identifier[flush] ()
def print_callback(val): """ Internal function. This function is called via a call back returning from IPC to Cython to Python. It tries to perform incremental printing to IPython Notebook or Jupyter Notebook and when all else fails, just prints locally. """ success = False try: # for reasons I cannot fathom, regular printing, even directly # to io.stdout does not work. # I have to intrude rather deep into IPython to make it behave if have_ipython: if InteractiveShell.initialized(): IPython.display.publish_display_data({'text/plain': val, 'text/html': '<pre>' + val + '</pre>'}) success = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except: pass # depends on [control=['except'], data=[]] if not success: print(val) sys.stdout.flush() # depends on [control=['if'], data=[]]
def disarm(self, wait=True, timeout=None): '''Disarm the vehicle. If wait is True, wait for disarm operation to complete before returning. If timeout is nonzero, raise a TimeouTerror if the vehicle has not disarmed after timeout seconds. ''' self.armed = False if wait: self.wait_for(lambda: not self.armed, timeout=timeout, errmsg='failed to disarm vehicle')
def function[disarm, parameter[self, wait, timeout]]: constant[Disarm the vehicle. If wait is True, wait for disarm operation to complete before returning. If timeout is nonzero, raise a TimeouTerror if the vehicle has not disarmed after timeout seconds. ] name[self].armed assign[=] constant[False] if name[wait] begin[:] call[name[self].wait_for, parameter[<ast.Lambda object at 0x7da1b1c66260>]]
keyword[def] identifier[disarm] ( identifier[self] , identifier[wait] = keyword[True] , identifier[timeout] = keyword[None] ): literal[string] identifier[self] . identifier[armed] = keyword[False] keyword[if] identifier[wait] : identifier[self] . identifier[wait_for] ( keyword[lambda] : keyword[not] identifier[self] . identifier[armed] , identifier[timeout] = identifier[timeout] , identifier[errmsg] = literal[string] )
def disarm(self, wait=True, timeout=None): """Disarm the vehicle. If wait is True, wait for disarm operation to complete before returning. If timeout is nonzero, raise a TimeouTerror if the vehicle has not disarmed after timeout seconds. """ self.armed = False if wait: self.wait_for(lambda : not self.armed, timeout=timeout, errmsg='failed to disarm vehicle') # depends on [control=['if'], data=[]]
def get_product(self, standard, key): """ 查询商品信息 详情请参考 http://mp.weixin.qq.com/wiki/15/7fa787701295b884410b5163e13313af.html :param standard: 商品编码标准 :param key: 商品编码内容 :return: 返回的 JSON 数据包 """ data = { 'keystandard': standard, 'keystr': key, } return self._post('product/get', data=data)
def function[get_product, parameter[self, standard, key]]: constant[ 查询商品信息 详情请参考 http://mp.weixin.qq.com/wiki/15/7fa787701295b884410b5163e13313af.html :param standard: 商品编码标准 :param key: 商品编码内容 :return: 返回的 JSON 数据包 ] variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b1f48610>, <ast.Constant object at 0x7da1b1f4bf10>], [<ast.Name object at 0x7da1b1f4b340>, <ast.Name object at 0x7da1b1f4a140>]] return[call[name[self]._post, parameter[constant[product/get]]]]
keyword[def] identifier[get_product] ( identifier[self] , identifier[standard] , identifier[key] ): literal[string] identifier[data] ={ literal[string] : identifier[standard] , literal[string] : identifier[key] , } keyword[return] identifier[self] . identifier[_post] ( literal[string] , identifier[data] = identifier[data] )
def get_product(self, standard, key): """ 查询商品信息 详情请参考 http://mp.weixin.qq.com/wiki/15/7fa787701295b884410b5163e13313af.html :param standard: 商品编码标准 :param key: 商品编码内容 :return: 返回的 JSON 数据包 """ data = {'keystandard': standard, 'keystr': key} return self._post('product/get', data=data)